SQLObject-1.5.2/0000755000175000017500000000000012322476205012660 5ustar phdphd00000000000000SQLObject-1.5.2/setup.cfg0000644000175000017500000000210312322476205014475 0ustar phdphd00000000000000[global] command_packages = buildutils.pudge_command, buildutils.publish_command [easy_install] [pudge] theme = pythonpaste.org docs = docs/index.txt docs/Authors.txt docs/DeveloperGuide.txt docs/FAQ.txt docs/Inheritance.txt docs/News.txt docs/SQLBuilder.txt docs/SQLObject.txt docs/SelectResults.txt docs/TODO.txt docs/Versioning.txt docs/Views.txt docs/community.txt docs/download.txt docs/links.txt docs/sqlobject-admin.txt doc_base = docs/ dest = docs/html modules = sqlobject exclude_modules = sqlobject.conftest sqlobject.tests sqlobject.inheritance.tests sqlobject.versioning.test sqlobject.wsgi_middleware title = SQLObject mailing_list_url = community.html settings = normal_link_color=#039 visited_color=#093 hover_color=#ddf body_outer_bg_color=#46a body_border_color=#09f nav_container_color=#79f nav_button_color=#037 nav_border_color=#05f no_about=true link1=SQLObject.html Documentation [publish] doc-dir = docs/html make-dirs = 1 doc-dest = scp://web.sourceforge.net:/home/project-web/sqlobject/ [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 SQLObject-1.5.2/scripts/0000755000175000017500000000000012322476205014347 5ustar phdphd00000000000000SQLObject-1.5.2/scripts/sqlobject-admin0000755000175000017500000000172110372665120017351 0ustar phdphd00000000000000#!/usr/bin/env python import sys import os try: import pkg_resources pkg_resources.require('SQLObject>0.6.1') except (ImportError, pkg_resources.DistributionNotFound): # Oh well, we tried... pass try: import sqlobject.manager except ImportError: try: here = __file__ except NameError: here = sys.argv[0] updir = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(here))), 'sqlobject') if os.path.exists(updir): sys.path.insert(0, os.path.dirname(updir)) else: print 'I cannot find the sqlobject module' print 'If SQLObject is installed, you may need to set $PYTHONPATH' sys.exit(3) # Now we have to get rid of possibly stale modules from that import # up there for name, value in sys.modules.items(): if name.startswith('sqlobject'): del sys.modules[name] from sqlobject.manager import command command.the_runner.run(sys.argv) SQLObject-1.5.2/scripts/sqlobject-convertOldURI0000755000175000017500000000057211553105310020753 0ustar phdphd00000000000000#!/usr/bin/env python import sys try: uri = sys.argv[1] except IndexError: sys.exit("Usage: %s old-style-URI" % sys.argv[0]) try: import pkg_resources pkg_resources.require('SQLObject>=1.0.0') except (ImportError, pkg_resources.DistributionNotFound): pass from sqlobject import connectionForURI conn = connectionForURI(uri, oldUri=True) print conn.uri() SQLObject-1.5.2/sqlobject/0000755000175000017500000000000012322476205014646 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/cache.py0000644000175000017500000002674711563772713016314 0ustar phdphd00000000000000""" This implements the instance caching in SQLObject. Caching is relatively aggressive. All objects are retained so long as they are in memory, by keeping weak references to objects. We also keep other objects in a cache that doesn't allow them to be garbage collected (unless caching is turned off). """ import threading from weakref import ref from time import time as now class CacheFactory(object): """ CacheFactory caches object creation. Each object should be referenced by a single hashable ID (note tuples of hashable values are also hashable). """ def __init__(self, cullFrequency=100, cullFraction=2, cache=True): """ Every cullFrequency times that an item is retrieved from this cache, the cull method is called. The cull method then expires an arbitrary fraction of the cached objects. The idea is at no time will the cache be entirely emptied, placing a potentially high load at that moment, but everything object will have its time to go eventually. The fraction is given as an integer, and one in that many objects are expired (i.e., the default is 1/2 of objects are expired). By setting cache to False, items won't be cached. However, in all cases a weak reference is kept to created objects, and if the object hasn't been garbage collected it will be returned. """ self.cullFrequency = cullFrequency self.cullCount = 0 self.cullOffset = 0 self.cullFraction = cullFraction self.doCache = cache if self.doCache: self.cache = {} self.expiredCache = {} self.lock = threading.Lock() def tryGet(self, id): """ This returns None, or the object in cache. """ value = self.expiredCache.get(id) if value: # it's actually a weakref: return value() if not self.doCache: return None return self.cache.get(id) def get(self, id): """ This method can cause deadlocks! tryGet is safer This returns the object found in cache, or None. If None, then the cache will remain locked! This is so that the calling function can create the object in a threadsafe manner before releasing the lock. You should use this like (note that ``cache`` is actually a CacheSet object in this example):: obj = cache.get(some_id, my_class) if obj is None: try: obj = create_object(some_id) cache.put(some_id, my_class, obj) finally: cache.finishPut(cls) This method checks both the main cache (which retains references) and the 'expired' cache, which retains only weak references. """ if self.doCache: if self.cullCount > self.cullFrequency: # Two threads could hit the cull in a row, but # that's not so bad. At least by setting cullCount # back to zero right away we avoid this. The cull # method has a lock, so it's threadsafe. self.cullCount = 0 self.cull() else: self.cullCount = self.cullCount + 1 try: return self.cache[id] except KeyError: pass self.lock.acquire() try: val = self.cache[id] except KeyError: pass else: self.lock.release() return val try: val = self.expiredCache[id]() except KeyError: return None else: del self.expiredCache[id] if val is None: return None self.cache[id] = val self.lock.release() return val else: try: val = self.expiredCache[id]() if val is not None: return val except KeyError: pass self.lock.acquire() try: val = self.expiredCache[id]() except KeyError: return None else: if val is None: del self.expiredCache[id] return None self.lock.release() return val def put(self, id, obj): """ Puts an object into the cache. Should only be called after .get(), so that duplicate objects don't end up in the cache. """ if self.doCache: self.cache[id] = obj else: self.expiredCache[id] = ref(obj) def finishPut(self): """ Releases the lock that is retained when .get() is called and returns None. """ self.lock.release() def created(self, id, obj): """ Inserts and object into the cache. Should be used when no one else knows about the object yet, so there cannot be any object already in the cache. After a database INSERT is an example of this situation. """ if self.doCache: if self.cullCount > self.cullFrequency: # Two threads could hit the cull in a row, but # that's not so bad. At least by setting cullCount # back to zero right away we avoid this. The cull # method has a lock, so it's threadsafe. self.cullCount = 0 self.cull() else: self.cullCount = self.cullCount + 1 self.cache[id] = obj else: self.expiredCache[id] = ref(obj) def cull(self): """Runs through the cache and expires objects E.g., if ``cullFraction`` is 3, then every third object is moved to the 'expired' (aka weakref) cache. """ self.lock.acquire() try: #remove dead references from the expired cache keys = self.expiredCache.keys() for key in keys: if self.expiredCache[key]() is None: self.expiredCache.pop(key, None) keys = self.cache.keys() for i in xrange(self.cullOffset, len(keys), self.cullFraction): id = keys[i] # create a weakref, then remove from the cache obj = ref(self.cache[id]) del self.cache[id] #the object may have been gc'd when removed from the cache #above, no need to place in expiredCache if obj() is not None: self.expiredCache[id] = obj # This offset tries to balance out which objects we # expire, so no object will just hang out in the cache # forever. self.cullOffset = (self.cullOffset + 1) % self.cullFraction finally: self.lock.release() def clear(self): """ Removes everything from the cache. Warning! This can cause duplicate objects in memory. """ if self.doCache: self.cache.clear() self.expiredCache.clear() def expire(self, id): """ Expires a single object. Typically called after a delete. Doesn't even keep a weakref. (@@: bad name?) """ if not self.doCache: return self.lock.acquire() try: if id in self.cache: del self.cache[id] if id in self.expiredCache: del self.expiredCache[id] finally: self.lock.release() def expireAll(self): """ Expires all objects, moving them all into the expired/weakref cache. """ if not self.doCache: return self.lock.acquire() try: for key, value in self.cache.items(): self.expiredCache[key] = ref(value) self.cache = {} finally: self.lock.release() def allIDs(self): """ Returns the IDs of all objects in the cache. """ if self.doCache: all = self.cache.keys() else: all = [] for id, value in self.expiredCache.items(): if value(): all.append(id) return all def getAll(self): """ Return all the objects in the cache. """ if self.doCache: all = self.cache.values() else: all = [] for value in self.expiredCache.values(): if value(): all.append(value()) return all class CacheSet(object): """ A CacheSet is used to collect and maintain a series of caches. In SQLObject, there is one CacheSet per connection, and one Cache in the CacheSet for each class, since IDs are not unique across classes. It contains methods similar to Cache, but that take a ``cls`` argument. """ def __init__(self, *args, **kw): self.caches = {} self.args = args self.kw = kw def get(self, id, cls): try: return self.caches[cls.__name__].get(id) except KeyError: self.caches[cls.__name__] = CacheFactory(*self.args, **self.kw) return self.caches[cls.__name__].get(id) def put(self, id, cls, obj): self.caches[cls.__name__].put(id, obj) def finishPut(self, cls): self.caches[cls.__name__].finishPut() def created(self, id, cls, obj): try: self.caches[cls.__name__].created(id, obj) except KeyError: self.caches[cls.__name__] = CacheFactory(*self.args, **self.kw) self.caches[cls.__name__].created(id, obj) def expire(self, id, cls): try: self.caches[cls.__name__].expire(id) except KeyError: pass def clear(self, cls=None): if cls is None: for cache in self.caches.values(): cache.clear() elif cls.__name__ in self.caches: self.caches[cls.__name__].clear() def tryGet(self, id, cls): return self.tryGetByName(id, cls.__name__) def tryGetByName(self, id, clsname): try: return self.caches[clsname].tryGet(id) except KeyError: return None def allIDs(self, cls): try: self.caches[cls.__name__].allIDs() except KeyError: return [] def allSubCaches(self): return self.caches.values() def allSubCachesByClassNames(self): return self.caches def weakrefAll(self, cls=None): """ Move all objects in the cls (or if not given, then in all classes) to the weakref dictionary, where they can be collected. """ if cls is None: for cache in self.caches.values(): cache.expireAll() elif cls.__name__ in self.caches: self.caches[cls.__name__].expireAll() def getAll(self, cls=None): """ Returns all instances in the cache for the given class or all classes. """ if cls is None: results = [] for cache in self.caches.values(): results.extend(cache.getAll()) return results elif cls.__name__ in self.caches: return self.caches[cls.__name__].getAll() else: return [] SQLObject-1.5.2/sqlobject/mysql/0000755000175000017500000000000012322476205016013 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/mysql/__init__.py0000644000175000017500000000030711133142410020107 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection #import mysqltypes def builder(): import mysqlconnection return mysqlconnection.MySQLConnection registerConnection(['mysql'], builder) SQLObject-1.5.2/sqlobject/mysql/mysqlconnection.py0000644000175000017500000003064211561535645021627 0ustar phdphd00000000000000from sqlobject import col from sqlobject.dbconnection import DBAPI from sqlobject.dberrors import * class ErrorMessage(str): def __new__(cls, e, append_msg=''): obj = str.__new__(cls, e[1] + append_msg) obj.code = int(e[0]) obj.module = e.__module__ obj.exception = e.__class__.__name__ return obj class MySQLConnection(DBAPI): supportTransactions = False dbName = 'mysql' schemes = [dbName] def __init__(self, db, user, password='', host='localhost', port=0, **kw): import MySQLdb, MySQLdb.constants.CR, MySQLdb.constants.ER self.module = MySQLdb self.host = host self.port = port self.db = db self.user = user self.password = password self.kw = {} for key in ("unix_socket", "init_command", "read_default_file", "read_default_group", "conv"): if key in kw: self.kw[key] = kw.pop(key) for key in ("connect_timeout", "compress", "named_pipe", "use_unicode", "client_flag", "local_infile"): if key in kw: self.kw[key] = int(kw.pop(key)) for key in ("ssl_key", "ssl_cert", "ssl_ca", "ssl_capath"): if key in kw: if "ssl" not in self.kw: self.kw["ssl"] = {} self.kw["ssl"][key[4:]] = kw.pop(key) if "charset" in kw: self.dbEncoding = self.kw["charset"] = kw.pop("charset") else: self.dbEncoding = None # MySQLdb < 1.2.1: only ascii # MySQLdb = 1.2.1: only unicode # MySQLdb > 1.2.1: both ascii and unicode self.need_unicode = (self.module.version_info[:3] >= (1, 2, 1)) and (self.module.version_info[:3] < (1, 2, 2)) DBAPI.__init__(self, **kw) @classmethod def _connectionFromParams(cls, user, password, host, port, path, args): return cls(db=path.strip('/'), user=user or '', password=password or '', host=host or 'localhost', port=port or 0, **args) def makeConnection(self): dbEncoding = self.dbEncoding if dbEncoding: from MySQLdb.connections import Connection if not hasattr(Connection, 'set_character_set'): # monkeypatch pre MySQLdb 1.2.1 def character_set_name(self): return dbEncoding + '_' + dbEncoding Connection.character_set_name = character_set_name try: conn = self.module.connect(host=self.host, port=self.port, db=self.db, user=self.user, passwd=self.password, **self.kw) if self.module.version_info[:3] >= (1, 2, 2): conn.ping(True) # Attempt to reconnect. This setting is persistent. except self.module.OperationalError, e: conninfo = "; used connection string: host=%(host)s, port=%(port)s, db=%(db)s, user=%(user)s" % self.__dict__ raise OperationalError(ErrorMessage(e, conninfo)) if hasattr(conn, 'autocommit'): conn.autocommit(bool(self.autoCommit)) if dbEncoding: if hasattr(conn, 'set_character_set'): # MySQLdb 1.2.1 and later conn.set_character_set(dbEncoding) else: # pre MySQLdb 1.2.1 # works along with monkeypatching code above conn.query("SET NAMES %s" % dbEncoding) return conn def _setAutoCommit(self, conn, auto): if hasattr(conn, 'autocommit'): conn.autocommit(auto) def _executeRetry(self, conn, cursor, query): if self.need_unicode and not isinstance(query, unicode): try: query = unicode(query, self.dbEncoding) except UnicodeError: pass # When a server connection is lost and a query is attempted, most of # the time the query will raise a SERVER_LOST exception, then at the # second attempt to execute it, the mysql lib will reconnect and # succeed. However is a few cases, the first attempt raises the # SERVER_GONE exception, the second attempt the SERVER_LOST exception # and only the third succeeds. Thus the 3 in the loop count. # If it doesn't reconnect even after 3 attempts, while the database is # up and running, it is because a 5.0.3 (or newer) server is used # which no longer permits autoreconnects by default. In that case a # reconnect flag must be set when making the connection to indicate # that autoreconnecting is desired. In MySQLdb 1.2.2 or newer this is # done by calling ping(True) on the connection. for count in range(3): try: return cursor.execute(query) except self.module.OperationalError, e: if e.args[0] in (self.module.constants.CR.SERVER_GONE_ERROR, self.module.constants.CR.SERVER_LOST): if count == 2: raise OperationalError(ErrorMessage(e)) if self.debug: self.printDebug(conn, str(e), 'ERROR') else: raise OperationalError(ErrorMessage(e)) except self.module.IntegrityError, e: msg = ErrorMessage(e) if e.args[0] == self.module.constants.ER.DUP_ENTRY: raise DuplicateEntryError(msg) else: raise IntegrityError(msg) except self.module.InternalError, e: raise InternalError(ErrorMessage(e)) except self.module.ProgrammingError, e: raise ProgrammingError(ErrorMessage(e)) except self.module.DataError, e: raise DataError(ErrorMessage(e)) except self.module.NotSupportedError, e: raise NotSupportedError(ErrorMessage(e)) except self.module.DatabaseError, e: raise DatabaseError(ErrorMessage(e)) except self.module.InterfaceError, e: raise InterfaceError(ErrorMessage(e)) except self.module.Warning, e: raise Warning(ErrorMessage(e)) except self.module.Error, e: raise Error(ErrorMessage(e)) def _queryInsertID(self, conn, soInstance, id, names, values): table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName c = conn.cursor() if id is not None: names = [idName] + names values = [id] + values q = self._insertSQL(table, names, values) if self.debug: self.printDebug(conn, q, 'QueryIns') self._executeRetry(conn, c, q) if id is None: try: id = c.lastrowid except AttributeError: id = c.insert_id() if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id @classmethod def _queryAddLimitOffset(cls, query, start, end): if not start: return "%s LIMIT %i" % (query, end) if not end: return "%s LIMIT %i, -1" % (query, start) return "%s LIMIT %i, %i" % (query, start, end-start) def createReferenceConstraint(self, soClass, col): return col.mysqlCreateReferenceConstraint() def createColumn(self, soClass, col): return col.mysqlCreateSQL() def createIndexSQL(self, soClass, index): return index.mysqlCreateIndexSQL(soClass) def createIDColumn(self, soClass): if soClass.sqlmeta.idType == str: return '%s TEXT PRIMARY KEY' % soClass.sqlmeta.idName return '%s INT PRIMARY KEY AUTO_INCREMENT' % soClass.sqlmeta.idName def joinSQLType(self, join): return 'INT NOT NULL' def tableExists(self, tableName): try: # Use DESCRIBE instead of SHOW TABLES because SHOW TABLES # assumes there is a default database selected # which is not always True (for an embedded application, e.g.) self.query('DESCRIBE %s' % (tableName)) return True except ProgrammingError, e: if e[0].code == 1146: # ER_NO_SUCH_TABLE return False raise def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD COLUMN %s' % (tableName, column.mysqlCreateSQL())) def delColumn(self, sqlmeta, column): self.query('ALTER TABLE %s DROP COLUMN %s' % (sqlmeta.table, column.dbName)) def columnsFromSchema(self, tableName, soClass): colData = self.queryAll("SHOW COLUMNS FROM %s" % tableName) results = [] for field, t, nullAllowed, key, default, extra in colData: if field == soClass.sqlmeta.idName: continue colClass, kw = self.guessClass(t) if self.kw.get('use_unicode') and colClass is col.StringCol: colClass = col.UnicodeCol if self.dbEncoding: kw['dbEncoding'] = self.dbEncoding kw['name'] = soClass.sqlmeta.style.dbColumnToPythonAttr(field) kw['dbName'] = field # Since MySQL 5.0, 'NO' is returned in the NULL column (SQLObject expected '') kw['notNone'] = (nullAllowed.upper() != 'YES' and True or False) if default and t.startswith('int'): kw['default'] = int(default) elif default and t.startswith('float'): kw['default'] = float(default) elif default == 'CURRENT_TIMESTAMP' and t == 'timestamp': kw['default'] = None elif default and colClass is col.BoolCol: kw['default'] = int(default) and True or False else: kw['default'] = default # @@ skip key... # @@ skip extra... results.append(colClass(**kw)) return results def guessClass(self, t): if t.startswith('int'): return col.IntCol, {} elif t.startswith('enum'): values = [] for i in t[5:-1].split(','): # take the enum() off and split values.append(i[1:-1]) # remove the surrounding \' return col.EnumCol, {'enumValues': values} elif t.startswith('double'): return col.FloatCol, {} elif t.startswith('varchar'): colType = col.StringCol if self.kw.get('use_unicode', False): colType = col.UnicodeCol if t.endswith('binary'): return colType, {'length': int(t[8:-8]), 'char_binary': True} else: return colType, {'length': int(t[8:-1])} elif t.startswith('char'): if t.endswith('binary'): return col.StringCol, {'length': int(t[5:-8]), 'varchar': False, 'char_binary': True} else: return col.StringCol, {'length': int(t[5:-1]), 'varchar': False} elif t.startswith('datetime'): return col.DateTimeCol, {} elif t.startswith('date'): return col.DateCol, {} elif t.startswith('time'): return col.TimeCol, {} elif t.startswith('timestamp'): return col.TimestampCol, {} elif t.startswith('bool'): return col.BoolCol, {} elif t.startswith('tinyblob'): return col.BLOBCol, {"length": 2**8-1} elif t.startswith('tinytext'): return col.StringCol, {"length": 2**8-1, "varchar": True} elif t.startswith('blob'): return col.BLOBCol, {"length": 2**16-1} elif t.startswith('text'): return col.StringCol, {"length": 2**16-1, "varchar": True} elif t.startswith('mediumblob'): return col.BLOBCol, {"length": 2**24-1} elif t.startswith('mediumtext'): return col.StringCol, {"length": 2**24-1, "varchar": True} elif t.startswith('longblob'): return col.BLOBCol, {"length": 2**32} elif t.startswith('longtext'): return col.StringCol, {"length": 2**32, "varchar": True} else: return col.Col, {} def _createOrDropDatabase(self, op="CREATE"): self.query('%s DATABASE %s' % (op, self.db)) def createEmptyDatabase(self): self._createOrDropDatabase() def dropDatabase(self): self._createOrDropDatabase(op="DROP") SQLObject-1.5.2/sqlobject/declarative.py0000644000175000017500000001613711563772713017524 0ustar phdphd00000000000000""" Declarative objects. Declarative objects have a simple protocol: you can use classes in lieu of instances and they are equivalent, and any keyword arguments you give to the constructor will override those instance variables. (So if a class is received, we'll simply instantiate an instance with no arguments). You can provide a variable __unpackargs__ (a list of strings), and if the constructor is called with non-keyword arguments they will be interpreted as the given keyword arguments. If __unpackargs__ is ('*', name), then all the arguments will be put in a variable by that name. You can define a __classinit__(cls, new_attrs) method, which will be called when the class is created (including subclasses). Note: you can't use super() in __classinit__ because the class isn't bound to a name. As an analog to __classinit__, Declarative adds __instanceinit__ which is called with the same argument (new_attrs). This is like __init__, but after __unpackargs__ and other factors have been taken into account. If __mutableattributes__ is defined as a sequence of strings, these attributes will not be shared between superclasses and their subclasses. E.g., if you have a class variable that contains a list and you append to that list, changes to subclasses will effect superclasses unless you add the attribute here. Also defines classinstancemethod, which acts as either a class method or an instance method depending on where it is called. """ import copy import events import itertools counter = itertools.count() __all__ = ('classinstancemethod', 'DeclarativeMeta', 'Declarative') class classinstancemethod(object): """ Acts like a class method when called from a class, like an instance method when called by an instance. The method should take two arguments, 'self' and 'cls'; one of these will be None depending on how the method was called. """ def __init__(self, func): self.func = func def __get__(self, obj, type=None): return _methodwrapper(self.func, obj=obj, type=type) class _methodwrapper(object): def __init__(self, func, obj, type): self.func = func self.obj = obj self.type = type def __call__(self, *args, **kw): assert not 'self' in kw and not 'cls' in kw, ( "You cannot use 'self' or 'cls' arguments to a " "classinstancemethod") return self.func(*((self.obj, self.type) + args), **kw) def __repr__(self): if self.obj is None: return ('' % (self.type.__name__, self.func.func_name)) else: return ('' % (self.type.__name__, self.func.func_name, self.obj)) class DeclarativeMeta(type): def __new__(meta, class_name, bases, new_attrs): post_funcs = [] early_funcs = [] events.send(events.ClassCreateSignal, bases[0], class_name, bases, new_attrs, post_funcs, early_funcs) cls = type.__new__(meta, class_name, bases, new_attrs) for func in early_funcs: func(cls) if '__classinit__' in new_attrs: cls.__classinit__ = staticmethod(cls.__classinit__.im_func) cls.__classinit__(cls, new_attrs) for func in post_funcs: func(cls) return cls class Declarative(object): __unpackargs__ = () __mutableattributes__ = () __metaclass__ = DeclarativeMeta __restrict_attributes__ = None def __classinit__(cls, new_attrs): cls.declarative_count = counter.next() for name in cls.__mutableattributes__: if name not in new_attrs: setattr(cls, copy.copy(getattr(cls, name))) def __instanceinit__(self, new_attrs): if self.__restrict_attributes__ is not None: for name in new_attrs: if name not in self.__restrict_attributes__: raise TypeError( '%s() got an unexpected keyword argument %r' % (self.__class__.__name__, name)) for name, value in new_attrs.items(): setattr(self, name, value) if 'declarative_count' not in new_attrs: self.declarative_count = counter.next() def __init__(self, *args, **kw): if self.__unpackargs__ and self.__unpackargs__[0] == '*': assert len(self.__unpackargs__) == 2, \ "When using __unpackargs__ = ('*', varname), you must only provide a single variable name (you gave %r)" % self.__unpackargs__ name = self.__unpackargs__[1] if name in kw: raise TypeError( "keyword parameter '%s' was given by position and name" % name) kw[name] = args else: if len(args) > len(self.__unpackargs__): raise TypeError( '%s() takes at most %i arguments (%i given)' % (self.__class__.__name__, len(self.__unpackargs__), len(args))) for name, arg in zip(self.__unpackargs__, args): if name in kw: raise TypeError( "keyword parameter '%s' was given by position and name" % name) kw[name] = arg if '__alsocopy' in kw: for name, value in kw['__alsocopy'].items(): if name not in kw: if name in self.__mutableattributes__: value = copy.copy(value) kw[name] = value del kw['__alsocopy'] self.__instanceinit__(kw) def __call__(self, *args, **kw): kw['__alsocopy'] = self.__dict__ return self.__class__(*args, **kw) @classinstancemethod def singleton(self, cls): if self: return self name = '_%s__singleton' % cls.__name__ if not hasattr(cls, name): setattr(cls, name, cls(declarative_count=cls.declarative_count)) return getattr(cls, name) @classinstancemethod def __repr__(self, cls): if self: name = '%s object' % self.__class__.__name__ v = self.__dict__.copy() else: name = '%s class' % cls.__name__ v = cls.__dict__.copy() if 'declarative_count' in v: name = '%s %i' % (name, v['declarative_count']) del v['declarative_count'] # @@: simplifying repr: #v = {} names = v.keys() args = [] for n in self._repr_vars(names): args.append('%s=%r' % (n, v[n])) if not args: return '<%s>' % name else: return '<%s %s>' % (name, ' '.join(args)) @staticmethod def _repr_vars(dictNames): names = [n for n in dictNames if not n.startswith('_') and n != 'declarative_count'] names.sort() return names def setup_attributes(cls, new_attrs): for name, value in new_attrs.items(): if hasattr(value, '__addtoclass__'): value.__addtoclass__(cls, name) SQLObject-1.5.2/sqlobject/dbconnection.py0000644000175000017500000011065512166333316017676 0ustar phdphd00000000000000import atexit from cgi import parse_qsl import inspect import new import os import sys import threading import types import urllib import warnings import weakref from cache import CacheSet import classregistry import col from converters import sqlrepr import main import sqlbuilder from util.threadinglocal import local as threading_local warnings.filterwarnings("ignore", "DB-API extension cursor.lastrowid used") _connections = {} def _closeConnection(ref): conn = ref() if conn is not None: conn.close() class ConsoleWriter: def __init__(self, connection, loglevel): # loglevel: None or empty string for stdout; or 'stderr' self.loglevel = loglevel or "stdout" self.dbEncoding = getattr(connection, "dbEncoding", None) or "ascii" def write(self, text): logfile = getattr(sys, self.loglevel) if isinstance(text, unicode): try: text = text.encode(self.dbEncoding) except UnicodeEncodeError: text = repr(text)[2:-1] # Remove u'...' from the repr logfile.write(text + '\n') class LogWriter: def __init__(self, connection, logger, loglevel): self.logger = logger self.loglevel = loglevel self.logmethod = getattr(logger, loglevel) def write(self, text): self.logmethod(text) def makeDebugWriter(connection, loggerName, loglevel): if not loggerName: return ConsoleWriter(connection, loglevel) import logging logger = logging.getLogger(loggerName) return LogWriter(connection, logger, loglevel) class Boolean(object): """A bool class that also understands some special string keywords (yes/no, true/false, on/off, 1/0)""" _keywords = {'1': True, 'yes': True, 'true': True, 'on': True, '0': False, 'no': False, 'false': False, 'off': False} def __new__(cls, value): try: return Boolean._keywords[value.lower()] except (AttributeError, KeyError): return bool(value) class DBConnection: def __init__(self, name=None, debug=False, debugOutput=False, cache=True, style=None, autoCommit=True, debugThreading=False, registry=None, logger=None, loglevel=None): self.name = name self.debug = Boolean(debug) self.debugOutput = Boolean(debugOutput) self.debugThreading = Boolean(debugThreading) self.debugWriter = makeDebugWriter(self, logger, loglevel) self.doCache = Boolean(cache) self.cache = CacheSet(cache=self.doCache) self.style = style self._connectionNumbers = {} self._connectionCount = 1 self.autoCommit = Boolean(autoCommit) self.registry = registry or None classregistry.registry(self.registry).addCallback(self.soClassAdded) registerConnectionInstance(self) atexit.register(_closeConnection, weakref.ref(self)) def oldUri(self): auth = getattr(self, 'user', '') or '' if auth: if self.password: auth = auth + ':' + self.password auth = auth + '@' else: assert not getattr(self, 'password', None), ( 'URIs cannot express passwords without usernames') uri = '%s://%s' % (self.dbName, auth) if self.host: uri += self.host if self.port: uri += ':%d' % self.port uri += '/' db = self.db if db.startswith('/'): db = db[1:] return uri + db def uri(self): auth = getattr(self, 'user', '') or '' if auth: auth = urllib.quote(auth) if self.password: auth = auth + ':' + urllib.quote(self.password) auth = auth + '@' else: assert not getattr(self, 'password', None), ( 'URIs cannot express passwords without usernames') uri = '%s://%s' % (self.dbName, auth) if self.host: uri += self.host if self.port: uri += ':%d' % self.port uri += '/' db = self.db if db.startswith('/'): db = db[1:] return uri + urllib.quote(db) @classmethod def connectionFromOldURI(cls, uri): return cls._connectionFromParams(*cls._parseOldURI(uri)) @classmethod def connectionFromURI(cls, uri): return cls._connectionFromParams(*cls._parseURI(uri)) @staticmethod def _parseOldURI(uri): schema, rest = uri.split(':', 1) assert rest.startswith('/'), "URIs must start with scheme:/ -- you did not include a / (in %r)" % rest if rest.startswith('/') and not rest.startswith('//'): host = None rest = rest[1:] elif rest.startswith('///'): host = None rest = rest[3:] else: rest = rest[2:] if rest.find('/') == -1: host = rest rest = '' else: host, rest = rest.split('/', 1) if host and host.find('@') != -1: user, host = host.rsplit('@', 1) if user.find(':') != -1: user, password = user.split(':', 1) else: password = None else: user = password = None if host and host.find(':') != -1: _host, port = host.split(':') try: port = int(port) except ValueError: raise ValueError, "port must be integer, got '%s' instead" % port if not (1 <= port <= 65535): raise ValueError, "port must be integer in the range 1-65535, got '%d' instead" % port host = _host else: port = None path = '/' + rest if os.name == 'nt': if (len(rest) > 1) and (rest[1] == '|'): path = "%s:%s" % (rest[0], rest[2:]) args = {} if path.find('?') != -1: path, arglist = path.split('?', 1) arglist = arglist.split('&') for single in arglist: argname, argvalue = single.split('=', 1) argvalue = urllib.unquote(argvalue) args[argname] = argvalue return user, password, host, port, path, args @staticmethod def _parseURI(uri): protocol, request = urllib.splittype(uri) user, password, port = None, None, None host, path = urllib.splithost(request) if host: # Python < 2.7 have a problem - splituser() calls unquote() too early #user, host = urllib.splituser(host) if '@' in host: user, host = host.split('@', 1) if user: user, password = [x and urllib.unquote(x) or None for x in urllib.splitpasswd(user)] host, port = urllib.splitport(host) if port: port = int(port) elif host == '': host = None # hash-tag is splitted but ignored path, tag = urllib.splittag(path) path, query = urllib.splitquery(path) path = urllib.unquote(path) if (os.name == 'nt') and (len(path) > 2): # Preserve backward compatibility with URIs like /C|/path; # replace '|' by ':' if path[2] == '|': path = "%s:%s" % (path[0:2], path[3:]) # Remove leading slash if (path[0] == '/') and (path[2] == ':'): path = path[1:] args = {} if query: for name, value in parse_qsl(query): args[name] = value return user, password, host, port, path, args def soClassAdded(self, soClass): """ This is called for each new class; we use this opportunity to create an instance method that is bound to the class and this connection. """ name = soClass.__name__ assert not hasattr(self, name), ( "Connection %r already has an attribute with the name " "%r (and you just created the conflicting class %r)" % (self, name, soClass)) setattr(self, name, ConnWrapper(soClass, self)) def expireAll(self): """ Expire all instances of objects for this connection. """ cache_set = self.cache cache_set.weakrefAll() for item in cache_set.getAll(): item.expire() class ConnWrapper(object): """ This represents a SQLObject class that is bound to a specific connection (instances have a connection instance variable, but classes are global, so this is binds the connection variable lazily when a class method is accessed) """ # @@: methods that take connection arguments should be explicitly # marked up instead of the implicit use of a connection argument # and inspect.getargspec() def __init__(self, soClass, connection): self._soClass = soClass self._connection = connection def __call__(self, *args, **kw): kw['connection'] = self._connection return self._soClass(*args, **kw) def __getattr__(self, attr): meth = getattr(self._soClass, attr) if not isinstance(meth, types.MethodType): # We don't need to wrap non-methods return meth try: takes_conn = meth.takes_connection except AttributeError: args, varargs, varkw, defaults = inspect.getargspec(meth) assert not varkw and not varargs, ( "I cannot tell whether I must wrap this method, " "because it takes **kw: %r" % meth) takes_conn = 'connection' in args meth.im_func.takes_connection = takes_conn if not takes_conn: return meth return ConnMethodWrapper(meth, self._connection) class ConnMethodWrapper(object): def __init__(self, method, connection): self._method = method self._connection = connection def __getattr__(self, attr): return getattr(self._method, attr) def __call__(self, *args, **kw): kw['connection'] = self._connection return self._method(*args, **kw) def __repr__(self): return '' % ( self._method, self._connection) class DBAPI(DBConnection): """ Subclass must define a `makeConnection()` method, which returns a newly-created connection object. ``queryInsertID`` must also be defined. """ dbName = None def __init__(self, **kw): self._pool = [] self._poolLock = threading.Lock() DBConnection.__init__(self, **kw) self._binaryType = type(self.module.Binary('')) def _runWithConnection(self, meth, *args): conn = self.getConnection() try: val = meth(conn, *args) finally: self.releaseConnection(conn) return val def getConnection(self): self._poolLock.acquire() try: if not self._pool: conn = self.makeConnection() self._connectionNumbers[id(conn)] = self._connectionCount self._connectionCount += 1 else: conn = self._pool.pop() if self.debug: s = 'ACQUIRE' if self._pool is not None: s += ' pool=[%s]' % ', '.join([str(self._connectionNumbers[id(v)]) for v in self._pool]) self.printDebug(conn, s, 'Pool') return conn finally: self._poolLock.release() def releaseConnection(self, conn, explicit=False): if self.debug: if explicit: s = 'RELEASE (explicit)' else: s = 'RELEASE (implicit, autocommit=%s)' % self.autoCommit if self._pool is None: s += ' no pooling' else: s += ' pool=[%s]' % ', '.join([str(self._connectionNumbers[id(v)]) for v in self._pool]) self.printDebug(conn, s, 'Pool') if self.supportTransactions and not explicit: if self.autoCommit == 'exception': if self.debug: self.printDebug(conn, 'auto/exception', 'ROLLBACK') conn.rollback() raise Exception, 'Object used outside of a transaction; implicit COMMIT or ROLLBACK not allowed' elif self.autoCommit: if self.debug: self.printDebug(conn, 'auto', 'COMMIT') if not getattr(conn, 'autocommit', False): conn.commit() else: if self.debug: self.printDebug(conn, 'auto', 'ROLLBACK') conn.rollback() if self._pool is not None: if conn not in self._pool: # @@: We can get duplicate releasing of connections with # the __del__ in Iteration (unfortunately, not sure why # it happens) self._pool.insert(0, conn) else: conn.close() def printDebug(self, conn, s, name, type='query'): if name == 'Pool' and self.debug != 'Pool': return if type == 'query': sep = ': ' else: sep = '->' s = repr(s) n = self._connectionNumbers[id(conn)] spaces = ' '*(8-len(name)) if self.debugThreading: threadName = threading.currentThread().getName() threadName = (':' + threadName + ' '*(8-len(threadName))) else: threadName = '' msg = '%(n)2i%(threadName)s/%(name)s%(spaces)s%(sep)s %(s)s' % locals() self.debugWriter.write(msg) def _executeRetry(self, conn, cursor, query): if self.debug: self.printDebug(conn, query, 'QueryR') return cursor.execute(query) def _query(self, conn, s): if self.debug: self.printDebug(conn, s, 'Query') self._executeRetry(conn, conn.cursor(), s) def query(self, s): return self._runWithConnection(self._query, s) def _queryAll(self, conn, s): if self.debug: self.printDebug(conn, s, 'QueryAll') c = conn.cursor() self._executeRetry(conn, c, s) value = c.fetchall() if self.debugOutput: self.printDebug(conn, value, 'QueryAll', 'result') return value def queryAll(self, s): return self._runWithConnection(self._queryAll, s) def _queryAllDescription(self, conn, s): """ Like queryAll, but returns (description, rows), where the description is cursor.description (which gives row types) """ if self.debug: self.printDebug(conn, s, 'QueryAllDesc') c = conn.cursor() self._executeRetry(conn, c, s) value = c.fetchall() if self.debugOutput: self.printDebug(conn, value, 'QueryAll', 'result') return c.description, value def queryAllDescription(self, s): return self._runWithConnection(self._queryAllDescription, s) def _queryOne(self, conn, s): if self.debug: self.printDebug(conn, s, 'QueryOne') c = conn.cursor() self._executeRetry(conn, c, s) value = c.fetchone() if self.debugOutput: self.printDebug(conn, value, 'QueryOne', 'result') return value def queryOne(self, s): return self._runWithConnection(self._queryOne, s) def _insertSQL(self, table, names, values): return ("INSERT INTO %s (%s) VALUES (%s)" % (table, ', '.join(names), ', '.join([self.sqlrepr(v) for v in values]))) def transaction(self): return Transaction(self) def queryInsertID(self, soInstance, id, names, values): return self._runWithConnection(self._queryInsertID, soInstance, id, names, values) def iterSelect(self, select): return select.IterationClass(self, self.getConnection(), select, keepConnection=False) def accumulateSelect(self, select, *expressions): """ Apply an accumulate function(s) (SUM, COUNT, MIN, AVG, MAX, etc...) to the select object. """ q = select.queryForSelect().newItems(expressions).unlimited().orderBy(None) q = self.sqlrepr(q) val = self.queryOne(q) if len(expressions) == 1: val = val[0] return val def queryForSelect(self, select): return self.sqlrepr(select.queryForSelect()) def _SO_createJoinTable(self, join): self.query(self._SO_createJoinTableSQL(join)) def _SO_createJoinTableSQL(self, join): return ('CREATE TABLE %s (\n%s %s,\n%s %s\n)' % (join.intermediateTable, join.joinColumn, self.joinSQLType(join), join.otherColumn, self.joinSQLType(join))) def _SO_dropJoinTable(self, join): self.query("DROP TABLE %s" % join.intermediateTable) def _SO_createIndex(self, soClass, index): self.query(self.createIndexSQL(soClass, index)) def createIndexSQL(self, soClass, index): assert 0, 'Implement in subclasses' def createTable(self, soClass): createSql, constraints = self.createTableSQL(soClass) self.query(createSql) return constraints def createReferenceConstraints(self, soClass): refConstraints = [self.createReferenceConstraint(soClass, column) \ for column in soClass.sqlmeta.columnList \ if isinstance(column, col.SOForeignKey)] refConstraintDefs = [constraint \ for constraint in refConstraints \ if constraint] return refConstraintDefs def createSQL(self, soClass): tableCreateSQLs = getattr(soClass.sqlmeta, 'createSQL', None) if tableCreateSQLs: assert isinstance(tableCreateSQLs,(str,list,dict,tuple)), ( '%s.sqlmeta.createSQL must be a str, list, dict or tuple.' % (soClass.__name__)) if isinstance(tableCreateSQLs, dict): tableCreateSQLs = tableCreateSQLs.get(soClass._connection.dbName, []) if isinstance(tableCreateSQLs, str): tableCreateSQLs = [tableCreateSQLs] if isinstance(tableCreateSQLs, tuple): tableCreateSQLs = list(tableCreateSQLs) assert isinstance(tableCreateSQLs,list), ( 'Unable to create a list from %s.sqlmeta.createSQL' % (soClass.__name__)) return tableCreateSQLs or [] def createTableSQL(self, soClass): constraints = self.createReferenceConstraints(soClass) extraSQL = self.createSQL(soClass) createSql = ('CREATE TABLE %s (\n%s\n)' % (soClass.sqlmeta.table, self.createColumns(soClass))) return createSql, constraints + extraSQL def createColumns(self, soClass): columnDefs = [self.createIDColumn(soClass)] \ + [self.createColumn(soClass, col) for col in soClass.sqlmeta.columnList] return ",\n".join([" %s" % c for c in columnDefs]) def createReferenceConstraint(self, soClass, col): assert 0, "Implement in subclasses" def createColumn(self, soClass, col): assert 0, "Implement in subclasses" def dropTable(self, tableName, cascade=False): self.query("DROP TABLE %s" % tableName) def clearTable(self, tableName): # 3-03 @@: Should this have a WHERE 1 = 1 or similar # clause? In some configurations without the WHERE clause # the query won't go through, but maybe we shouldn't override # that. self.query("DELETE FROM %s" % tableName) def createBinary(self, value): """ Create a binary object wrapper for the given database. """ # Default is Binary() function from the connection driver. return self.module.Binary(value) # The _SO_* series of methods are sorts of "friend" methods # with SQLObject. They grab values from the SQLObject instances # or classes freely, but keep the SQLObject class from accessing # the database directly. This way no SQL is actually created # in the SQLObject class. def _SO_update(self, so, values): self.query("UPDATE %s SET %s WHERE %s = (%s)" % (so.sqlmeta.table, ", ".join(["%s = (%s)" % (dbName, self.sqlrepr(value)) for dbName, value in values]), so.sqlmeta.idName, self.sqlrepr(so.id))) def _SO_selectOne(self, so, columnNames): return self._SO_selectOneAlt(so, columnNames, so.q.id==so.id) def _SO_selectOneAlt(self, so, columnNames, condition): if columnNames: columns = [isinstance(x, basestring) and sqlbuilder.SQLConstant(x) or x for x in columnNames] else: columns = None return self.queryOne(self.sqlrepr(sqlbuilder.Select(columns, staticTables=[so.sqlmeta.table], clause=condition))) def _SO_delete(self, so): self.query("DELETE FROM %s WHERE %s = (%s)" % (so.sqlmeta.table, so.sqlmeta.idName, self.sqlrepr(so.id))) def _SO_selectJoin(self, soClass, column, value): return self.queryAll("SELECT %s FROM %s WHERE %s = (%s)" % (soClass.sqlmeta.idName, soClass.sqlmeta.table, column, self.sqlrepr(value))) def _SO_intermediateJoin(self, table, getColumn, joinColumn, value): return self.queryAll("SELECT %s FROM %s WHERE %s = (%s)" % (getColumn, table, joinColumn, self.sqlrepr(value))) def _SO_intermediateDelete(self, table, firstColumn, firstValue, secondColumn, secondValue): self.query("DELETE FROM %s WHERE %s = (%s) AND %s = (%s)" % (table, firstColumn, self.sqlrepr(firstValue), secondColumn, self.sqlrepr(secondValue))) def _SO_intermediateInsert(self, table, firstColumn, firstValue, secondColumn, secondValue): self.query("INSERT INTO %s (%s, %s) VALUES (%s, %s)" % (table, firstColumn, secondColumn, self.sqlrepr(firstValue), self.sqlrepr(secondValue))) def _SO_columnClause(self, soClass, kw): ops = {None: "IS"} data = {} if 'id' in kw: data[soClass.sqlmeta.idName] = kw.pop('id') for key, col in soClass.sqlmeta.columns.items(): if key in kw: value = kw.pop(key) if col.from_python: value = col.from_python(value, sqlbuilder.SQLObjectState(soClass, connection=self)) data[col.dbName] = value elif col.foreignName in kw: obj = kw.pop(col.foreignName) if isinstance(obj, main.SQLObject): data[col.dbName] = obj.id else: data[col.dbName] = obj if kw: # pick the first key from kw to use to raise the error, raise TypeError, "got an unexpected keyword argument(s): %r" % kw.keys() if not data: return None return ' AND '.join( ['%s %s %s' % (dbName, ops.get(value, "="), self.sqlrepr(value)) for dbName, value in data.items()]) def sqlrepr(self, v): return sqlrepr(v, self.dbName) def __del__(self): self.close() def close(self): if not hasattr(self, '_pool'): # Probably there was an exception while creating this # instance, so it is incomplete. return if not self._pool: return self._poolLock.acquire() try: if not self._pool: # _pool could be filled in a different thread return conns = self._pool[:] self._pool[:] = [] for conn in conns: try: conn.close() except self.module.Error: pass del conn del conns finally: self._poolLock.release() def createEmptyDatabase(self): """ Create an empty database. """ raise NotImplementedError class Iteration(object): def __init__(self, dbconn, rawconn, select, keepConnection=False): self.dbconn = dbconn self.rawconn = rawconn self.select = select self.keepConnection = keepConnection self.cursor = rawconn.cursor() self.query = self.dbconn.queryForSelect(select) if dbconn.debug: dbconn.printDebug(rawconn, self.query, 'Select') self.dbconn._executeRetry(self.rawconn, self.cursor, self.query) def __iter__(self): return self def next(self): result = self.cursor.fetchone() if result is None: self._cleanup() raise StopIteration if result[0] is None: return None if self.select.ops.get('lazyColumns', 0): obj = self.select.sourceClass.get(result[0], connection=self.dbconn) return obj else: obj = self.select.sourceClass.get(result[0], selectResults=result[1:], connection=self.dbconn) return obj def _cleanup(self): if getattr(self, 'query', None) is None: # already cleaned up return self.query = None if not self.keepConnection: self.dbconn.releaseConnection(self.rawconn) self.dbconn = self.rawconn = self.select = self.cursor = None def __del__(self): self._cleanup() class Transaction(object): def __init__(self, dbConnection): # this is to skip __del__ in case of an exception in this __init__ self._obsolete = True self._dbConnection = dbConnection self._connection = dbConnection.getConnection() self._dbConnection._setAutoCommit(self._connection, 0) self.cache = CacheSet(cache=dbConnection.doCache) self._deletedCache = {} self._obsolete = False def assertActive(self): assert not self._obsolete, "This transaction has already gone through ROLLBACK; begin another transaction" def query(self, s): self.assertActive() return self._dbConnection._query(self._connection, s) def queryAll(self, s): self.assertActive() return self._dbConnection._queryAll(self._connection, s) def queryOne(self, s): self.assertActive() return self._dbConnection._queryOne(self._connection, s) def queryInsertID(self, soInstance, id, names, values): self.assertActive() return self._dbConnection._queryInsertID( self._connection, soInstance, id, names, values) def iterSelect(self, select): self.assertActive() # We can't keep the cursor open with results in a transaction, # because we might want to use the connection while we're # still iterating through the results. # @@: But would it be okay for psycopg, with threadsafety # level 2? return iter(list(select.IterationClass(self, self._connection, select, keepConnection=True))) def _SO_delete(self, inst): cls = inst.__class__.__name__ if not cls in self._deletedCache: self._deletedCache[cls] = [] self._deletedCache[cls].append(inst.id) meth = new.instancemethod(self._dbConnection._SO_delete.im_func, self, self.__class__) return meth(inst) def commit(self, close=False): if self._obsolete: # @@: is it okay to get extraneous commits? return if self._dbConnection.debug: self._dbConnection.printDebug(self._connection, '', 'COMMIT') self._connection.commit() subCaches = [(sub[0], sub[1].allIDs()) for sub in self.cache.allSubCachesByClassNames().items()] subCaches.extend([(x[0], x[1]) for x in self._deletedCache.items()]) for cls, ids in subCaches: for id in ids: inst = self._dbConnection.cache.tryGetByName(id, cls) if inst is not None: inst.expire() if close: self._makeObsolete() def rollback(self): if self._obsolete: # @@: is it okay to get extraneous rollbacks? return if self._dbConnection.debug: self._dbConnection.printDebug(self._connection, '', 'ROLLBACK') subCaches = [(sub, sub.allIDs()) for sub in self.cache.allSubCaches()] self._connection.rollback() for subCache, ids in subCaches: for id in ids: inst = subCache.tryGet(id) if inst is not None: inst.expire() self._makeObsolete() def __getattr__(self, attr): """ If nothing else works, let the parent connection handle it. Except with this transaction as 'self'. Poor man's acquisition? Bad programming? Okay, maybe. """ self.assertActive() attr = getattr(self._dbConnection, attr) try: func = attr.im_func except AttributeError: if isinstance(attr, ConnWrapper): return ConnWrapper(attr._soClass, self) else: return attr else: meth = new.instancemethod(func, self, self.__class__) return meth def _makeObsolete(self): self._obsolete = True if self._dbConnection.autoCommit: self._dbConnection._setAutoCommit(self._connection, 1) self._dbConnection.releaseConnection(self._connection, explicit=True) self._connection = None self._deletedCache = {} def begin(self): # @@: Should we do this, or should begin() be a no-op when we're # not already obsolete? assert self._obsolete, "You cannot begin a new transaction session without rolling back this one" self._obsolete = False self._connection = self._dbConnection.getConnection() self._dbConnection._setAutoCommit(self._connection, 0) def __del__(self): if self._obsolete: return self.rollback() def close(self): raise TypeError('You cannot just close transaction - you should either call rollback(), commit() or commit(close=True) to close the underlying connection.') class ConnectionHub(object): """ This object serves as a hub for connections, so that you can pass in a ConnectionHub to a SQLObject subclass as though it was a connection, but actually bind a real database connection later. You can also bind connections on a per-thread basis. You must hang onto the original ConnectionHub instance, as you cannot retrieve it again from the class or instance. To use the hub, do something like:: hub = ConnectionHub() class MyClass(SQLObject): _connection = hub hub.threadConnection = connectionFromURI('...') """ def __init__(self): self.threadingLocal = threading_local() def __get__(self, obj, type=None): # I'm a little surprised we have to do this, but apparently # the object's private dictionary of attributes doesn't # override this descriptor. if (obj is not None) and '_connection' in obj.__dict__: return obj.__dict__['_connection'] return self.getConnection() def __set__(self, obj, value): obj.__dict__['_connection'] = value def getConnection(self): try: return self.threadingLocal.connection except AttributeError: try: return self.processConnection except AttributeError: raise AttributeError( "No connection has been defined for this thread " "or process") def doInTransaction(self, func, *args, **kw): """ This routine can be used to run a function in a transaction, rolling the transaction back if any exception is raised from that function, and committing otherwise. Use like:: sqlhub.doInTransaction(process_request, os.environ) This will run ``process_request(os.environ)``. The return value will be preserved. """ # @@: In Python 2.5, something usable with with: should also # be added. try: old_conn = self.threadingLocal.connection old_conn_is_threading = True except AttributeError: old_conn = self.processConnection old_conn_is_threading = False conn = old_conn.transaction() if old_conn_is_threading: self.threadConnection = conn else: self.processConnection = conn try: try: value = func(*args, **kw) except: conn.rollback() raise else: conn.commit(close=True) return value finally: if old_conn_is_threading: self.threadConnection = old_conn else: self.processConnection = old_conn def _set_threadConnection(self, value): self.threadingLocal.connection = value def _get_threadConnection(self): return self.threadingLocal.connection def _del_threadConnection(self): del self.threadingLocal.connection threadConnection = property(_get_threadConnection, _set_threadConnection, _del_threadConnection) class ConnectionURIOpener(object): def __init__(self): self.schemeBuilders = {} self.instanceNames = {} self.cachedURIs = {} def registerConnection(self, schemes, builder): for uriScheme in schemes: assert not uriScheme in self.schemeBuilders \ or self.schemeBuilders[uriScheme] is builder, \ "A driver has already been registered for the URI scheme %s" % uriScheme self.schemeBuilders[uriScheme] = builder def registerConnectionInstance(self, inst): if inst.name: assert not inst.name in self.instanceNames \ or self.instanceNames[inst.name] is cls, \ "A instance has already been registered with the name %s" % inst.name assert inst.name.find(':') == -1, "You cannot include ':' in your class names (%r)" % cls.name self.instanceNames[inst.name] = inst def connectionForURI(self, uri, oldUri=False, **args): if args: if '?' not in uri: uri += '?' + urllib.urlencode(args) else: uri += '&' + urllib.urlencode(args) if uri in self.cachedURIs: return self.cachedURIs[uri] if uri.find(':') != -1: scheme, rest = uri.split(':', 1) connCls = self.dbConnectionForScheme(scheme) if oldUri: conn = connCls.connectionFromOldURI(uri) else: conn = connCls.connectionFromURI(uri) else: # We just have a name, not a URI assert uri in self.instanceNames, \ "No SQLObject driver exists under the name %s" % uri conn = self.instanceNames[uri] # @@: Do we care if we clobber another connection? self.cachedURIs[uri] = conn return conn def dbConnectionForScheme(self, scheme): assert scheme in self.schemeBuilders, ( "No SQLObject driver exists for %s (only %s)" % (scheme, ', '.join(self.schemeBuilders.keys()))) return self.schemeBuilders[scheme]() TheURIOpener = ConnectionURIOpener() registerConnection = TheURIOpener.registerConnection registerConnectionInstance = TheURIOpener.registerConnectionInstance connectionForURI = TheURIOpener.connectionForURI dbConnectionForScheme = TheURIOpener.dbConnectionForScheme # Register DB URI schemas import firebird import maxdb import mssql import mysql import postgres import rdbhost import sqlite import sybase SQLObject-1.5.2/sqlobject/postgres/0000755000175000017500000000000012322476205016514 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/postgres/__init__.py0000644000175000017500000000031511133142424020614 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import pgconnection return pgconnection.PostgresConnection registerConnection(['postgres', 'postgresql', 'psycopg'], builder) SQLObject-1.5.2/sqlobject/postgres/pgconnection.py0000644000175000017500000003651112222302075021552 0ustar phdphd00000000000000from sqlobject.dbconnection import DBAPI import re from sqlobject import col from sqlobject import sqlbuilder from sqlobject.converters import registerConverter from sqlobject.dberrors import * class ErrorMessage(str): def __new__(cls, e, append_msg=''): obj = str.__new__(cls, e[0] + append_msg) if e.__module__ == 'psycopg2': obj.code = getattr(e, 'pgcode', None) obj.error = getattr(e, 'pgerror', None) else: obj.code = obj.error = None obj.module = e.__module__ obj.exception = e.__class__.__name__ return obj class PostgresConnection(DBAPI): supportTransactions = True dbName = 'postgres' schemes = [dbName, 'postgresql'] def __init__(self, dsn=None, host=None, port=None, db=None, user=None, password=None, **kw): drivers = kw.pop('driver', None) or 'psycopg' for driver in drivers.split(','): driver = driver.strip() if not driver: continue try: if driver == 'psycopg2': import psycopg2 as psycopg elif driver == 'psycopg1': import psycopg elif driver == 'psycopg': try: import psycopg2 as psycopg except ImportError: import psycopg elif driver == 'pygresql': import pgdb self.module = pgdb else: raise ValueError('Unknown PostgreSQL driver "%s", expected psycopg2, psycopg1 or pygresql' % driver) except ImportError: pass else: break else: raise ImportError('Cannot find a PostgreSQL driver, tried %s' % drivers) if driver.startswith('psycopg'): self.module = psycopg # Register a converter for psycopg Binary type. registerConverter(type(psycopg.Binary('')), PsycoBinaryConverter) self.user = user self.host = host self.port = port self.db = db self.password = password self.dsn_dict = dsn_dict = {} if host: dsn_dict["host"] = host if port: if driver == 'pygresql': dsn_dict["host"] = "%s:%d" % (host, port) else: if psycopg.__version__.split('.')[0] == '1': dsn_dict["port"] = str(port) else: dsn_dict["port"] = port if db: dsn_dict["database"] = db if user: dsn_dict["user"] = user if password: dsn_dict["password"] = password sslmode = kw.pop("sslmode", None) if sslmode: dsn_dict["sslmode"] = sslmode self.use_dsn = dsn is not None if dsn is None: if driver == 'pygresql': dsn = '' if host: dsn += host dsn += ':' if db: dsn += db dsn += ':' if user: dsn += user dsn += ':' if password: dsn += password else: dsn = [] if db: dsn.append('dbname=%s' % db) if user: dsn.append('user=%s' % user) if password: dsn.append('password=%s' % password) if host: dsn.append('host=%s' % host) if port: dsn.append('port=%d' % port) if sslmode: dsn.append('sslmode=%s' % sslmode) dsn = ' '.join(dsn) self.driver = driver self.dsn = dsn self.unicodeCols = kw.pop('unicodeCols', False) self.schema = kw.pop('schema', None) self.dbEncoding = kw.pop("charset", None) DBAPI.__init__(self, **kw) @classmethod def _connectionFromParams(cls, user, password, host, port, path, args): path = path.strip('/') if (host is None) and path.count('/'): # Non-default unix socket path_parts = path.split('/') host = '/' + '/'.join(path_parts[:-1]) path = path_parts[-1] return cls(host=host, port=port, db=path, user=user, password=password, **args) def _setAutoCommit(self, conn, auto): # psycopg2 does not have an autocommit method. if hasattr(conn, 'autocommit'): try: conn.autocommit(auto) except TypeError: conn.autocommit = auto def makeConnection(self): try: if self.use_dsn: conn = self.module.connect(self.dsn) else: conn = self.module.connect(**self.dsn_dict) except self.module.OperationalError, e: raise OperationalError(ErrorMessage(e, "used connection string %r" % self.dsn)) # For printDebug in _executeRetry self._connectionNumbers[id(conn)] = self._connectionCount if self.autoCommit: self._setAutoCommit(conn, 1) c = conn.cursor() if self.schema: self._executeRetry(conn, c, "SET search_path TO " + self.schema) dbEncoding = self.dbEncoding if dbEncoding: self._executeRetry(conn, c, "SET client_encoding TO '%s'" % dbEncoding) return conn def _executeRetry(self, conn, cursor, query): if self.debug: self.printDebug(conn, query, 'QueryR') try: return cursor.execute(query) except self.module.OperationalError, e: raise OperationalError(ErrorMessage(e)) except self.module.IntegrityError, e: msg = ErrorMessage(e) if e.pgcode == '23505': raise DuplicateEntryError(msg) else: raise IntegrityError(msg) except self.module.InternalError, e: raise InternalError(ErrorMessage(e)) except self.module.ProgrammingError, e: raise ProgrammingError(ErrorMessage(e)) except self.module.DataError, e: raise DataError(ErrorMessage(e)) except self.module.NotSupportedError, e: raise NotSupportedError(ErrorMessage(e)) except self.module.DatabaseError, e: raise DatabaseError(ErrorMessage(e)) except self.module.InterfaceError, e: raise InterfaceError(ErrorMessage(e)) except self.module.Warning, e: raise Warning(ErrorMessage(e)) except self.module.Error, e: raise Error(ErrorMessage(e)) def _queryInsertID(self, conn, soInstance, id, names, values): table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName sequenceName = soInstance.sqlmeta.idSequence or \ '%s_%s_seq' % (table, idName) c = conn.cursor() if id is not None: names = [idName] + names values = [id] + values if names and values: q = self._insertSQL(table, names, values) else: q = "INSERT INTO %s DEFAULT VALUES" % table if id is None: q += " RETURNING " + idName if self.debug: self.printDebug(conn, q, 'QueryIns') self._executeRetry(conn, c, q) if id is None: id = c.fetchone()[0] if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id @classmethod def _queryAddLimitOffset(cls, query, start, end): if not start: return "%s LIMIT %i" % (query, end) if not end: return "%s OFFSET %i" % (query, start) return "%s LIMIT %i OFFSET %i" % (query, end-start, start) def createColumn(self, soClass, col): return col.postgresCreateSQL() def createReferenceConstraint(self, soClass, col): return col.postgresCreateReferenceConstraint() def createIndexSQL(self, soClass, index): return index.postgresCreateIndexSQL(soClass) def createIDColumn(self, soClass): key_type = {int: "SERIAL", str: "TEXT"}[soClass.sqlmeta.idType] return '%s %s PRIMARY KEY' % (soClass.sqlmeta.idName, key_type) def dropTable(self, tableName, cascade=False): self.query("DROP TABLE %s %s" % (tableName, cascade and 'CASCADE' or '')) def joinSQLType(self, join): return 'INT NOT NULL' def tableExists(self, tableName): result = self.queryOne("SELECT COUNT(relname) FROM pg_class WHERE relname = %s" % self.sqlrepr(tableName)) return result[0] def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD COLUMN %s' % (tableName, column.postgresCreateSQL())) def delColumn(self, sqlmeta, column): self.query('ALTER TABLE %s DROP COLUMN %s' % (sqlmeta.table, column.dbName)) def columnsFromSchema(self, tableName, soClass): keyQuery = """ SELECT pg_catalog.pg_get_constraintdef(oid) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = %s::regclass AND r.contype = 'f'""" colQuery = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), a.attnotnull, (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid=a.attrelid AND d.adnum = a.attnum) FROM pg_catalog.pg_attribute a WHERE a.attrelid =%s::regclass AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum""" primaryKeyQuery = """ SELECT pg_index.indisprimary, pg_catalog.pg_get_indexdef(pg_index.indexrelid) FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index AS pg_index WHERE c.relname = %s AND c.oid = pg_index.indrelid AND pg_index.indexrelid = c2.oid AND pg_index.indisprimary """ keyData = self.queryAll(keyQuery % self.sqlrepr(tableName)) keyRE = re.compile(r"\((.+)\) REFERENCES (.+)\(") keymap = {} for (condef,) in keyData: match = keyRE.search(condef) if match: field, reftable = match.groups() keymap[field] = reftable.capitalize() primaryData = self.queryAll(primaryKeyQuery % self.sqlrepr(tableName)) primaryRE = re.compile(r'CREATE .*? USING .* \((.+?)\)') primaryKey = None for isPrimary, indexDef in primaryData: match = primaryRE.search(indexDef) assert match, "Unparseable contraint definition: %r" % indexDef assert primaryKey is None, "Already found primary key (%r), then found: %r" % (primaryKey, indexDef) primaryKey = match.group(1) if primaryKey is None: # VIEWs don't have PRIMARY KEYs - accept help from user primaryKey = soClass.sqlmeta.idName assert primaryKey, "No primary key found in table %r" % tableName if primaryKey.startswith('"'): assert primaryKey.endswith('"') primaryKey = primaryKey[1:-1] colData = self.queryAll(colQuery % self.sqlrepr(tableName)) results = [] if self.unicodeCols: client_encoding = self.queryOne("SHOW client_encoding")[0] for field, t, notnull, defaultstr in colData: if field == primaryKey: continue if field in keymap: colClass = col.ForeignKey kw = {'foreignKey': soClass.sqlmeta.style.dbTableToPythonClass(keymap[field])} name = soClass.sqlmeta.style.dbColumnToPythonAttr(field) if name.endswith('ID'): name = name[:-2] kw['name'] = name else: colClass, kw = self.guessClass(t) if self.unicodeCols and colClass is col.StringCol: colClass = col.UnicodeCol kw['dbEncoding'] = client_encoding kw['name'] = soClass.sqlmeta.style.dbColumnToPythonAttr(field) kw['dbName'] = field kw['notNone'] = notnull if defaultstr is not None: kw['default'] = self.defaultFromSchema(colClass, defaultstr) elif not notnull: kw['default'] = None results.append(colClass(**kw)) return results def guessClass(self, t): if t.count('point'): # poINT before INT return col.StringCol, {} elif t.count('int'): return col.IntCol, {} elif t.count('varying') or t.count('varchar'): if '(' in t: return col.StringCol, {'length': int(t[t.index('(')+1:-1])} else: # varchar without length in Postgres means any length return col.StringCol, {} elif t.startswith('character('): return col.StringCol, {'length': int(t[t.index('(')+1:-1]), 'varchar': False} elif t.count('float') or t.count('real') or t.count('double'): return col.FloatCol, {} elif t == 'text': return col.StringCol, {} elif t.startswith('timestamp'): return col.DateTimeCol, {} elif t.startswith('datetime'): return col.DateTimeCol, {} elif t.startswith('date'): return col.DateCol, {} elif t.startswith('bool'): return col.BoolCol, {} elif t.startswith('bytea'): return col.BLOBCol, {} else: return col.Col, {} def defaultFromSchema(self, colClass, defaultstr): """ If the default can be converted to a python constant, convert it. Otherwise return is as a sqlbuilder constant. """ if colClass == col.BoolCol: if defaultstr == 'false': return False elif defaultstr == 'true': return True return getattr(sqlbuilder.const, defaultstr) def _createOrDropDatabase(self, op="CREATE"): # We have to connect to *some* database, so we'll connect to # template1, which is a common open database. # @@: This doesn't use self.use_dsn or self.dsn_dict if self.driver == 'pygresql': dsn = '%s:template1:%s:%s' % ( self.host or '', self.user or '', self.password or '') else: dsn = 'dbname=template1' if self.user: dsn += ' user=%s' % self.user if self.password: dsn += ' password=%s' % self.password if self.host: dsn += ' host=%s' % self.host conn = self.module.connect(dsn) cur = conn.cursor() # We must close the transaction with a commit so that # the CREATE DATABASE can work (which can't be in a transaction): self._executeRetry(conn, cur, 'COMMIT') self._executeRetry(conn, cur, '%s DATABASE %s' % (op, self.db)) cur.close() conn.close() def createEmptyDatabase(self): self._createOrDropDatabase() def dropDatabase(self): self._createOrDropDatabase(op="DROP") # Converter for psycopg Binary type. def PsycoBinaryConverter(value, db): assert db == 'postgres' return str(value) SQLObject-1.5.2/sqlobject/converters.py0000644000175000017500000001356411771154334017426 0ustar phdphd00000000000000from array import array import datetime from decimal import Decimal import sys import time from types import ClassType, InstanceType, NoneType try: import mx.DateTime.ISO origISOStr = mx.DateTime.ISO.strGMT from mx.DateTime import DateTimeType, DateTimeDeltaType except ImportError: try: import DateTime.ISO origISOStr = DateTime.ISO.strGMT from DateTime import DateTimeType, DateTimeDeltaType except ImportError: origISOStr = None DateTimeType = None DateTimeDeltaType = None try: import Sybase NumericType=Sybase.NumericType except ImportError: NumericType = None ######################################## ## Quoting ######################################## sqlStringReplace = [ ("'", "''"), ('\\', '\\\\'), ('\000', '\\0'), ('\b', '\\b'), ('\n', '\\n'), ('\r', '\\r'), ('\t', '\\t'), ] def isoStr(val): """ Gets rid of time zone information (@@: should we convert to GMT?) """ val = origISOStr(val) if val.find('+') == -1: return val else: return val[:val.find('+')] class ConverterRegistry: def __init__(self): self.basic = {} self.klass = {} def registerConverter(self, typ, func): if type(typ) is ClassType: self.klass[typ] = func else: self.basic[typ] = func def lookupConverter(self, value, default=None): if type(value) is InstanceType: # lookup on klasses dict return self.klass.get(value.__class__, default) return self.basic.get(type(value), default) converters = ConverterRegistry() registerConverter = converters.registerConverter lookupConverter = converters.lookupConverter def StringLikeConverter(value, db): if isinstance(value, array): try: value = value.tounicode() except ValueError: value = value.tostring() elif isinstance(value, buffer): value = str(value) if db in ('mysql', 'postgres', 'rdbhost'): for orig, repl in sqlStringReplace: value = value.replace(orig, repl) elif db in ('sqlite', 'firebird', 'sybase', 'maxdb', 'mssql'): value = value.replace("'", "''") else: assert 0, "Database %s unknown" % db if db in ('postgres', 'rdbhost') and ('\\' in value): return "E'%s'" % value return "'%s'" % value registerConverter(str, StringLikeConverter) registerConverter(unicode, StringLikeConverter) registerConverter(array, StringLikeConverter) registerConverter(buffer, StringLikeConverter) def IntConverter(value, db): return repr(int(value)) registerConverter(int, IntConverter) def LongConverter(value, db): return str(value) registerConverter(long, LongConverter) if NumericType: registerConverter(NumericType, IntConverter) def BoolConverter(value, db): if db in ('postgres', 'rdbhost'): if value: return "'t'" else: return "'f'" else: if value: return '1' else: return '0' registerConverter(bool, BoolConverter) def FloatConverter(value, db): return repr(value) registerConverter(float, FloatConverter) if DateTimeType: def DateTimeConverter(value, db): return "'%s'" % isoStr(value) registerConverter(DateTimeType, DateTimeConverter) def TimeConverter(value, db): return "'%s'" % value.strftime("%T") registerConverter(DateTimeDeltaType, TimeConverter) def NoneConverter(value, db): return "NULL" registerConverter(NoneType, NoneConverter) def SequenceConverter(value, db): return "(%s)" % ", ".join([sqlrepr(v, db) for v in value]) registerConverter(tuple, SequenceConverter) registerConverter(list, SequenceConverter) registerConverter(dict, SequenceConverter) registerConverter(set, SequenceConverter) registerConverter(frozenset, SequenceConverter) if sys.version_info[:3] < (2, 6, 0): # Module sets was deprecated in Python 2.6 from sets import Set, ImmutableSet registerConverter(Set, SequenceConverter) registerConverter(ImmutableSet, SequenceConverter) if hasattr(time, 'struct_time'): def StructTimeConverter(value, db): return time.strftime("'%Y-%m-%d %H:%M:%S'", value) registerConverter(time.struct_time, StructTimeConverter) def DateTimeConverter(value, db): return "'%04d-%02d-%02d %02d:%02d:%02d'" % ( value.year, value.month, value.day, value.hour, value.minute, value.second) registerConverter(datetime.datetime, DateTimeConverter) def DateConverter(value, db): return "'%04d-%02d-%02d'" % (value.year, value.month, value.day) registerConverter(datetime.date, DateConverter) def TimeConverter(value, db): return "'%02d:%02d:%02d'" % (value.hour, value.minute, value.second) registerConverter(datetime.time, TimeConverter) def DecimalConverter(value, db): # See http://mail.python.org/pipermail/python-dev/2008-March/078189.html return str(value.to_eng_string()) # Convert to str to work around a bug in Python 2.5.2 registerConverter(Decimal, DecimalConverter) def TimedeltaConverter(value, db): return """INTERVAL '%d days %d seconds'""" % \ (value.days, value.seconds) registerConverter(datetime.timedelta, TimedeltaConverter) def sqlrepr(obj, db=None): try: reprFunc = obj.__sqlrepr__ except AttributeError: converter = lookupConverter(obj) if converter is None: raise ValueError, "Unknown SQL builtin type: %s for %s" % \ (type(obj), repr(obj)) return converter(obj, db) else: return reprFunc(db) def quote_str(s, db): if db in ('postgres', 'rdbhost') and ('\\' in s): return "E'%s'" % s return "'%s'" % s def unquote_str(s): if s[:2].upper().startswith("E'") and s.endswith("'"): return s[2:-1] elif s.startswith("'") and s.endswith("'"): return s[1:-1] else: return s SQLObject-1.5.2/sqlobject/manager/0000755000175000017500000000000012322476205016260 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/manager/__init__.py0000644000175000017500000000000210372665117020366 0ustar phdphd00000000000000# SQLObject-1.5.2/sqlobject/manager/command.py0000755000175000017500000013760311563772713020276 0ustar phdphd00000000000000#!/usr/bin/env python import fnmatch import optparse import os import re import sys import textwrap import time import warnings try: from paste.deploy import appconfig except ImportError: appconfig = None import sqlobject from sqlobject import col from sqlobject.classregistry import findClass from sqlobject.declarative import DeclarativeMeta from sqlobject.util import moduleloader # It's not very unsafe to use tempnam like we are doing: warnings.filterwarnings( 'ignore', 'tempnam is a potential security risk.*', RuntimeWarning, '.*command', 28) def nowarning_tempnam(*args, **kw): return os.tempnam(*args, **kw) class SQLObjectVersionTable(sqlobject.SQLObject): """ This table is used to store information about the database and its version (used with record and update commands). """ class sqlmeta: table = 'sqlobject_db_version' version = col.StringCol() updated = col.DateTimeCol(default=col.DateTimeCol.now) def db_differences(soClass, conn): """ Returns the differences between a class and the table in a connection. Returns [] if no differences are found. This function does the best it can; it can miss many differences. """ # @@: Repeats a lot from CommandStatus.command, but it's hard # to actually factor out the display logic. Or I'm too lazy # to do so. diffs = [] if not conn.tableExists(soClass.sqlmeta.table): if soClass.sqlmeta.columns: diffs.append('Does not exist in database') else: try: columns = conn.columnsFromSchema(soClass.sqlmeta.table, soClass) except AttributeError: # Database does not support reading columns pass else: existing = {} for col in columns: col = col.withClass(soClass) existing[col.dbName] = col missing = {} for col in soClass.sqlmeta.columnList: if col.dbName in existing: del existing[col.dbName] else: missing[col.dbName] = col for col in existing.values(): diffs.append('Database has extra column: %s' % col.dbName) for col in missing.values(): diffs.append('Database missing column: %s' % col.dbName) return diffs class CommandRunner(object): def __init__(self): self.commands = {} self.command_aliases = {} def run(self, argv): invoked_as = argv[0] args = argv[1:] for i in range(len(args)): if not args[i].startswith('-'): # this must be a command command = args[i].lower() del args[i] break else: # no command found self.invalid('No COMMAND given (try "%s help")' % os.path.basename(invoked_as)) real_command = self.command_aliases.get(command, command) if real_command not in self.commands.keys(): self.invalid('COMMAND %s unknown' % command) runner = self.commands[real_command]( invoked_as, command, args, self) runner.run() def register(self, command): name = command.name self.commands[name] = command for alias in command.aliases: self.command_aliases[alias] = name def invalid(self, msg, code=2): print msg sys.exit(code) the_runner = CommandRunner() register = the_runner.register def standard_parser(connection=True, simulate=True, interactive=False, find_modules=True): parser = optparse.OptionParser() parser.add_option('-v', '--verbose', help='Be verbose (multiple times for more verbosity)', action='count', dest='verbose', default=0) if simulate: parser.add_option('-n', '--simulate', help="Don't actually do anything (implies -v)", action='store_true', dest='simulate') if connection: parser.add_option('-c', '--connection', help="The database connection URI", metavar='URI', dest='connection_uri') parser.add_option('-f', '--config-file', help="The Paste config file that contains the database URI (in the database key)", metavar="FILE", dest="config_file") if find_modules: parser.add_option('-m', '--module', help="Module in which to find SQLObject classes", action='append', metavar='MODULE', dest='modules', default=[]) parser.add_option('-p', '--package', help="Package to search for SQLObject classes", action="append", metavar="PACKAGE", dest="packages", default=[]) parser.add_option('--class', help="Select only named classes (wildcards allowed)", action="append", metavar="NAME", dest="class_matchers", default=[]) if interactive: parser.add_option('-i', '--interactive', help="Ask before doing anything (use twice to be more careful)", action="count", dest="interactive", default=0) parser.add_option('--egg', help="Select modules from the given Egg, using sqlobject.txt", action="append", metavar="EGG_SPEC", dest="eggs", default=[]) return parser class Command(object): __metaclass__ = DeclarativeMeta min_args = 0 min_args_error = 'You must provide at least %(min_args)s arguments' max_args = 0 max_args_error = 'You must provide no more than %(max_args)s arguments' aliases = () required_args = [] description = None help = '' def orderClassesByDependencyLevel(self, classes): """ Return classes ordered by their depth in the class dependency tree (this is *not* the inheritance tree), from the top level (independant) classes to the deepest level. The dependency tree is defined by the foreign key relations. """ # @@: written as a self-contained function for now, to prevent # having to modify any core SQLObject component and namespace # contamination. # yemartin - 2006-08-08 class SQLObjectCircularReferenceError(Exception): pass def findReverseDependencies(cls): """ Return a list of classes that cls depends on. Note that "depends on" here mean "has a foreign key pointing to". """ depended = [] for col in cls.sqlmeta.columnList: if col.foreignKey: other = findClass(col.foreignKey, col.soClass.sqlmeta.registry) if (other is not cls) and (other not in depended): depended.append(other) return depended # Cache to save already calculated dependency levels. dependency_levels = {} def calculateDependencyLevel(cls, dependency_stack=[]): """ Recursively calculate the dependency level of cls, while using the dependency_stack to detect any circular reference. """ # Return value from the cache if already calculated if cls in dependency_levels: return dependency_levels[cls] # Check for circular references if cls in dependency_stack: dependency_stack.append(cls) raise SQLObjectCircularReferenceError, ( "Found a circular reference: %s " % (' --> '.join([x.__name__ for x in dependency_stack]))) dependency_stack.append(cls) # Recursively inspect dependent classes. depended = findReverseDependencies(cls) if depended: level = max([calculateDependencyLevel(x, dependency_stack) for x in depended]) + 1 else: level = 0 dependency_levels[cls] = level return level # Now simply calculate and sort by dependency levels: try: sorter = [] for cls in classes: level = calculateDependencyLevel(cls) sorter.append((level, cls)) sorter.sort() ordered_classes = [cls for level, cls in sorter] except SQLObjectCircularReferenceError, msg: # Failsafe: return the classes as-is if a circular reference # prevented the dependency levels to be calculated. print ("Warning: a circular reference was detected in the " "model. Unable to sort the classes by dependency: they " "will be treated in alphabetic order. This may or may " "not work depending on your database backend. " "The error was:\n%s" % msg) return classes return ordered_classes def __classinit__(cls, new_args): if cls.__bases__ == (object,): # This abstract base class return register(cls) def __init__(self, invoked_as, command_name, args, runner): self.invoked_as = invoked_as self.command_name = command_name self.raw_args = args self.runner = runner def run(self): self.parser.usage = "%%prog [options]\n%s" % self.summary if self.help: help = textwrap.fill( self.help, int(os.environ.get('COLUMNS', 80))-4) self.parser.usage += '\n' + help self.parser.prog = '%s %s' % ( os.path.basename(self.invoked_as), self.command_name) if self.description: self.parser.description = description self.options, self.args = self.parser.parse_args(self.raw_args) if (getattr(self.options, 'simulate', False) and not self.options.verbose): self.options.verbose = 1 if self.min_args is not None and len(self.args) < self.min_args: self.runner.invalid( self.min_args_error % {'min_args': self.min_args, 'actual_args': len(self.args)}) if self.max_args is not None and len(self.args) > self.max_args: self.runner.invalid( self.max_args_error % {'max_args': self.max_args, 'actual_args': len(self.args)}) for var_name, option_name in self.required_args: if not getattr(self.options, var_name, None): self.runner.invalid( 'You must provide the option %s' % option_name) conf = self.config() if conf and conf.get('sys_path'): update_sys_path(conf['sys_path'], self.options.verbose) if conf and conf.get('database'): conn = sqlobject.connectionForURI(conf['database']) sqlobject.sqlhub.processConnection = conn for egg_spec in getattr(self.options, 'eggs', []): self.load_options_from_egg(egg_spec) self.command() def classes(self, require_connection=True, require_some=False): all = [] conf = self.config() for module_name in self.options.modules: all.extend(self.classes_from_module( moduleloader.load_module(module_name))) for package_name in self.options.packages: all.extend(self.classes_from_package(package_name)) for egg_spec in self.options.eggs: all.extend(self.classes_from_egg(egg_spec)) if self.options.class_matchers: filtered = [] for soClass in all: name = soClass.__name__ for matcher in self.options.class_matchers: if fnmatch.fnmatch(name, matcher): filtered.append(soClass) break all = filtered conn = self.connection() if conn: for soClass in all: soClass._connection = conn else: missing = [] for soClass in all: try: if not soClass._connection: missing.append(soClass) except AttributeError: missing.append(soClass) if missing and require_connection: self.runner.invalid( 'These classes do not have connections set:\n * %s\n' 'You must indicate --connection=URI' % '\n * '.join([soClass.__name__ for soClass in missing])) if require_some and not all: print 'No classes found!' if self.options.modules: print 'Looked in modules: %s' % ', '.join(self.options.modules) else: print 'No modules specified' if self.options.packages: print 'Looked in packages: %s' % ', '.join(self.options.packages) else: print 'No packages specified' if self.options.class_matchers: print 'Matching class pattern: %s' % self.options.class_matches if self.options.eggs: print 'Looked in eggs: %s' % ', '.join(self.options.eggs) else: print 'No eggs specified' sys.exit(1) return self.orderClassesByDependencyLevel(all) def classes_from_module(self, module): all = [] if hasattr(module, 'soClasses'): for name_or_class in module.soClasses: if isinstance(name_or_class, str): name_or_class = getattr(module, name_or_class) all.append(name_or_class) else: for name in dir(module): value = getattr(module, name) if (isinstance(value, type) and issubclass(value, sqlobject.SQLObject) and value.__module__ == module.__name__): all.append(value) return all def connection(self): config = self.config() if config is not None: assert config.get('database'), ( "No database variable found in config file %s" % self.options.config_file) return sqlobject.connectionForURI(config['database']) elif getattr(self.options, 'connection_uri', None): return sqlobject.connectionForURI(self.options.connection_uri) else: return None def config(self): if not getattr(self.options, 'config_file', None): return None config_file = self.options.config_file if appconfig: if (not config_file.startswith('egg:') and not config_file.startswith('config:')): config_file = 'config:' + config_file return appconfig(config_file, relative_to=os.getcwd()) else: return self.ini_config(config_file) def ini_config(self, conf_fn): conf_section = 'main' if '#' in conf_fn: conf_fn, conf_section = conf_fn.split('#', 1) from ConfigParser import ConfigParser p = ConfigParser() # Case-sensitive: p.optionxform = str if not os.path.exists(conf_fn): # Stupid RawConfigParser doesn't give an error for # non-existant files: raise OSError( "Config file %s does not exist" % self.options.config_file) p.read([conf_fn]) p._defaults.setdefault( 'here', os.path.dirname(os.path.abspath(conf_fn))) possible_sections = [] for section in p.sections(): name = section.strip().lower() if (conf_section == name or (conf_section == name.split(':')[-1] and name.split(':')[0] in ('app', 'application'))): possible_sections.append(section) if not possible_sections: raise OSError( "Config file %s does not have a section [%s] or [*:%s]" % (conf_fn, conf_section, conf_section)) if len(possible_sections) > 1: raise OSError( "Config file %s has multiple sections matching %s: %s" % (conf_fn, conf_section, ', '.join(possible_sections))) config = {} for op in p.options(possible_sections[0]): config[op] = p.get(possible_sections[0], op) return config def classes_from_package(self, package_name): all = [] package = moduleloader.load_module(package_name) package_dir = os.path.dirname(package.__file__) def find_classes_in_file(arg, dir_name, filenames): if dir_name.startswith('.svn'): return filenames = filter(lambda fname: fname.endswith('.py') and fname != '__init__.py', filenames) for fname in filenames: module_name = os.path.join(dir_name, fname) module_name = module_name[module_name.find(package_name):] module_name = module_name.replace(os.path.sep,'.')[:-3] try: module = moduleloader.load_module(module_name) except ImportError, err: if self.options.verbose: print 'Could not import module "%s". Error was : "%s"' % (module_name, err) continue except Exception, exc: if self.options.verbose: print 'Unknown exception while processing module "%s" : "%s"' % (module_name, exc) continue classes = self.classes_from_module(module) all.extend(classes) os.path.walk(package_dir, find_classes_in_file, None) return all def classes_from_egg(self, egg_spec): modules = [] dist, conf = self.config_from_egg(egg_spec, warn_no_sqlobject=True) for mod in conf.get('db_module', '').split(','): mod = mod.strip() if not mod: continue if self.options.verbose: print 'Looking in module %s' % mod modules.extend(self.classes_from_module( moduleloader.load_module(mod))) return modules def load_options_from_egg(self, egg_spec): dist, conf = self.config_from_egg(egg_spec) if (hasattr(self.options, 'output_dir') and not self.options.output_dir and conf.get('history_dir')): dir = conf['history_dir'] dir = dir.replace('$base', dist.location) self.options.output_dir = dir def config_from_egg(self, egg_spec, warn_no_sqlobject=True): import pkg_resources dist = pkg_resources.get_distribution(egg_spec) if not dist.has_metadata('sqlobject.txt'): if warn_no_sqlobject: print 'No sqlobject.txt in %s egg info' % egg_spec return None, {} result = {} for line in dist.get_metadata_lines('sqlobject.txt'): line = line.strip() if not line or line.startswith('#'): continue name, value = line.split('=', 1) name = name.strip().lower() if name in result: print 'Warning: %s appears more than once in sqlobject.txt' % name result[name.strip().lower()] = value.strip() return dist, result def command(self): raise NotImplementedError def _get_prog_name(self): return os.path.basename(self.invoked_as) prog_name = property(_get_prog_name) def ask(self, prompt, safe=False, default=True): if self.options.interactive >= 2: default = safe if default: prompt += ' [Y/n]? ' else: prompt += ' [y/N]? ' while 1: response = raw_input(prompt).strip() if not response.strip(): return default if response and response[0].lower() in ('y', 'n'): return response[0].lower() == 'y' print 'Y or N please' def shorten_filename(self, fn): """ Shortens a filename to make it relative to the current directory (if it can). For display purposes. """ if fn.startswith(os.getcwd() + '/'): fn = fn[len(os.getcwd())+1:] return fn def open_editor(self, pretext, breaker=None, extension='.txt'): """ Open an editor with the given text. Return the new text, or None if no edits were made. If given, everything after `breaker` will be ignored. """ fn = nowarning_tempnam() + extension f = open(fn, 'w') f.write(pretext) f.close() print '$EDITOR %s' % fn os.system('$EDITOR %s' % fn) f = open(fn, 'r') content = f.read() f.close() if breaker: content = content.split(breaker)[0] pretext = pretext.split(breaker)[0] if content == pretext or not content.strip(): return None return content class CommandSQL(Command): name = 'sql' summary = 'Show SQL CREATE statements' parser = standard_parser(simulate=False) def command(self): classes = self.classes() allConstraints = [] for cls in classes: if self.options.verbose >= 1: print '-- %s from %s' % ( cls.__name__, cls.__module__) createSql, constraints = cls.createTableSQL() print createSql.strip() + ';\n' allConstraints.append(constraints) for constraints in allConstraints: if constraints: for constraint in constraints: if constraint: print constraint.strip() + ';\n' class CommandList(Command): name = 'list' summary = 'Show all SQLObject classes found' parser = standard_parser(simulate=False, connection=False) def command(self): if self.options.verbose >= 1: print 'Classes found:' classes = self.classes(require_connection=False) for soClass in classes: print '%s.%s' % (soClass.__module__, soClass.__name__) if self.options.verbose >= 1: print ' Table: %s' % soClass.sqlmeta.table class CommandCreate(Command): name = 'create' summary = 'Create tables' parser = standard_parser(interactive=True) parser.add_option('--create-db', action='store_true', dest='create_db', help="Create the database") def command(self): v = self.options.verbose created = 0 existing = 0 dbs_created = [] constraints = {} for soClass in self.classes(require_some=True): if (self.options.create_db and soClass._connection not in dbs_created): if not self.options.simulate: try: soClass._connection.createEmptyDatabase() except soClass._connection.module.ProgrammingError, e: if str(e).find('already exists') != -1: print 'Database already exists' else: raise else: print '(simulating; cannot create database)' dbs_created.append(soClass._connection) if soClass._connection not in constraints.keys(): constraints[soClass._connection] = [] exists = soClass._connection.tableExists(soClass.sqlmeta.table) if v >= 1: if exists: existing += 1 print '%s already exists.' % soClass.__name__ else: print 'Creating %s' % soClass.__name__ if v >= 2: sql, extra = soClass.createTableSQL() print sql if (not self.options.simulate and not exists): if self.options.interactive: if self.ask('Create %s' % soClass.__name__): created += 1 tableConstraints = soClass.createTable(applyConstraints=False) if tableConstraints: constraints[soClass._connection].append(tableConstraints) else: print 'Cancelled' else: created += 1 tableConstraints = soClass.createTable(applyConstraints=False) if tableConstraints: constraints[soClass._connection].append(tableConstraints) for connection in constraints.keys(): if v >= 2: print 'Creating constraints' for constraintList in constraints[connection]: for constraint in constraintList: if constraint: connection.query(constraint) if v >= 1: print '%i tables created (%i already exist)' % ( created, existing) class CommandDrop(Command): name = 'drop' summary = 'Drop tables' parser = standard_parser(interactive=True) def command(self): v = self.options.verbose dropped = 0 not_existing = 0 for soClass in reversed(self.classes()): exists = soClass._connection.tableExists(soClass.sqlmeta.table) if v >= 1: if exists: print 'Dropping %s' % soClass.__name__ else: not_existing += 1 print '%s does not exist.' % soClass.__name__ if (not self.options.simulate and exists): if self.options.interactive: if self.ask('Drop %s' % soClass.__name__): dropped += 1 soClass.dropTable() else: print 'Cancelled' else: dropped += 1 soClass.dropTable() if v >= 1: print '%i tables dropped (%i didn\'t exist)' % ( dropped, not_existing) class CommandStatus(Command): name = 'status' summary = 'Show status of classes vs. database' help = ('This command checks the SQLObject definition and checks if ' 'the tables in the database match. It can always test for ' 'missing tables, and on some databases can test for the ' 'existance of other tables. Column types are not currently ' 'checked.') parser = standard_parser(simulate=False) def print_class(self, soClass): if self.printed: return self.printed = True print 'Checking %s...' % soClass.__name__ def command(self): good = 0 bad = 0 missing_tables = 0 columnsFromSchema_warning = False for soClass in self.classes(require_some=True): conn = soClass._connection self.printed = False if self.options.verbose: self.print_class(soClass) if not conn.tableExists(soClass.sqlmeta.table): self.print_class(soClass) print ' Does not exist in database' missing_tables += 1 continue try: columns = conn.columnsFromSchema(soClass.sqlmeta.table, soClass) except AttributeError: if not columnsFromSchema_warning: print 'Database does not support reading columns' columnsFromSchema_warning = True good += 1 continue except AssertionError, e: print 'Cannot read db table %s: %s' % ( soClass.sqlmeta.table, e) continue existing = {} for col in columns: col = col.withClass(soClass) existing[col.dbName] = col missing = {} for col in soClass.sqlmeta.columnList: if col.dbName in existing: del existing[col.dbName] else: missing[col.dbName] = col if existing: self.print_class(soClass) for col in existing.values(): print ' Database has extra column: %s' % col.dbName if missing: self.print_class(soClass) for col in missing.values(): print ' Database missing column: %s' % col.dbName if existing or missing: bad += 1 else: good += 1 if self.options.verbose: print '%i in sync; %i out of sync; %i not in database' % ( good, bad, missing_tables) class CommandHelp(Command): name = 'help' summary = 'Show help' parser = optparse.OptionParser() max_args = 1 def command(self): if self.args: the_runner.run([self.invoked_as, self.args[0], '-h']) else: print 'Available commands:' print ' (use "%s help COMMAND" or "%s COMMAND -h" ' % ( self.prog_name, self.prog_name) print ' for more information)' items = the_runner.commands.items() items.sort() max_len = max([len(cn) for cn, c in items]) for command_name, command in items: print '%s:%s %s' % (command_name, ' '*(max_len-len(command_name)), command.summary) if command.aliases: print '%s (Aliases: %s)' % ( ' '*max_len, ', '.join(command.aliases)) class CommandExecute(Command): name = 'execute' summary = 'Execute SQL statements' help = ('Runs SQL statements directly in the database, with no ' 'intervention. Useful when used with a configuration file. ' 'Each argument is executed as an individual statement.') parser = standard_parser(find_modules=False) parser.add_option('--stdin', help="Read SQL from stdin (normally takes SQL from the command line)", dest="use_stdin", action="store_true") max_args = None def command(self): args = self.args if self.options.use_stdin: if self.options.verbose: print "Reading additional SQL from stdin (Ctrl-D or Ctrl-Z to finish)..." args.append(sys.stdin.read()) self.conn = self.connection().getConnection() self.cursor = self.conn.cursor() for sql in args: self.execute_sql(sql) def execute_sql(self, sql): if self.options.verbose: print sql try: self.cursor.execute(sql) except Exception, e: if not self.options.verbose: print sql print "****Error:" print ' ', e return desc = self.cursor.description rows = self.cursor.fetchall() if self.options.verbose: if not self.cursor.rowcount: print "No rows accessed" else: print "%i rows accessed" % self.cursor.rowcount if desc: for name, type_code, display_size, internal_size, precision, scale, null_ok in desc: sys.stdout.write("%s\t" % name) sys.stdout.write("\n") for row in rows: for col in row: sys.stdout.write("%r\t" % col) sys.stdout.write("\n") print class CommandRecord(Command): name = 'record' summary = 'Record historical information about the database status' help = ('Record state of table definitions. The state of each ' 'table is written out to a separate file in a directory, ' 'and that directory forms a "version". A table is also ' 'added to your database (%s) that reflects the version the ' 'database is currently at. Use the upgrade command to ' 'sync databases with code.' % SQLObjectVersionTable.sqlmeta.table) parser = standard_parser() parser.add_option('--output-dir', help="Base directory for recorded definitions", dest="output_dir", metavar="DIR", default=None) parser.add_option('--no-db-record', help="Don't record version to database", dest="db_record", action="store_false", default=True) parser.add_option('--force-create', help="Create a new version even if appears to be " "identical to the last version", action="store_true", dest="force_create") parser.add_option('--name', help="The name to append to the version. The " "version should sort after previous versions (so " "any versions from the same day should come " "alphabetically before this version).", dest="version_name", metavar="NAME") parser.add_option('--force-db-version', help="Update the database version, and include no " "database information. This is for databases that " "were developed without any interaction with " "this tool, to create a 'beginning' revision.", metavar="VERSION_NAME", dest="force_db_version") parser.add_option('--edit', help="Open an editor for the upgrader in the last " "version (using $EDITOR).", action="store_true", dest="open_editor") version_regex = re.compile(r'^\d\d\d\d-\d\d-\d\d') def command(self): if self.options.force_db_version: self.command_force_db_version() return v = self.options.verbose sim = self.options.simulate classes = self.classes() if not classes: print "No classes found!" return output_dir = self.find_output_dir() version = os.path.basename(output_dir) print "Creating version %s" % version conns = [] files = {} for cls in self.classes(): dbName = cls._connection.dbName if cls._connection not in conns: conns.append(cls._connection) fn = os.path.join(cls.__name__ + '_' + dbName + '.sql') if sim: continue create, constraints = cls.createTableSQL() if constraints: constraints = '\n-- Constraints:\n%s\n' % ( '\n'.join(constraints)) else: constraints = '' files[fn] = ''.join([ '-- Exported definition from %s\n' % time.strftime('%Y-%m-%dT%H:%M:%S'), '-- Class %s.%s\n' % (cls.__module__, cls.__name__), '-- Database: %s\n' % dbName, create.strip(), '\n', constraints]) last_version_dir = self.find_last_version() if last_version_dir and not self.options.force_create: if v > 1: print "Checking %s to see if it is current" % last_version_dir files_copy = files.copy() for fn in os.listdir(last_version_dir): if not fn.endswith('.sql'): continue if not fn in files_copy: if v > 1: print "Missing file %s" % fn break f = open(os.path.join(last_version_dir, fn), 'r') content = f.read() f.close() if (self.strip_comments(files_copy[fn]) != self.strip_comments(content)): if v > 1: print "Content does not match: %s" % fn break del files_copy[fn] else: # No differences so far if not files_copy: # Used up all files print ("Current status matches version %s" % os.path.basename(last_version_dir)) return if v > 1: print "Extra files: %s" % ', '.join(files_copy.keys()) if v: print ("Current state does not match %s" % os.path.basename(last_version_dir)) if v > 1 and not last_version_dir: print "No last version to check" if not sim: os.mkdir(output_dir) if v: print 'Making directory %s' % self.shorten_filename(output_dir) files = files.items() files.sort() for fn, content in files: if v: print ' Writing %s' % self.shorten_filename(fn) if not sim: f = open(os.path.join(output_dir, fn), 'w') f.write(content) f.close() if self.options.db_record: all_diffs = [] for cls in self.classes(): for conn in conns: diffs = db_differences(cls, conn) for diff in diffs: if len(conns) > 1: diff = ' (%s).%s: %s' % ( conn.uri(), cls.sqlmeta.table, diff) else: diff = ' %s: %s' % (cls.sqlmeta.table, diff) all_diffs.append(diff) if all_diffs: print 'Database does not match schema:' print '\n'.join(all_diffs) for conn in conns: self.update_db(version, conn) else: all_diffs = [] if self.options.open_editor: if not last_version_dir: print ("Cannot edit upgrader because there is no " "previous version") else: breaker = ('-'*20 + ' lines below this will be ignored ' + '-'*20) pre_text = breaker + '\n' + '\n'.join(all_diffs) text = self.open_editor('\n\n' + pre_text, breaker=breaker, extension='.sql') if text is not None: fn = os.path.join(last_version_dir, 'upgrade_%s_%s.sql' % (dbName, version)) f = open(fn, 'w') f.write(text) f.close() print 'Wrote to %s' % fn def update_db(self, version, conn): v = self.options.verbose if not conn.tableExists(SQLObjectVersionTable.sqlmeta.table): if v: print ('Creating table %s' % SQLObjectVersionTable.sqlmeta.table) sql = SQLObjectVersionTable.createTableSQL(connection=conn) if v > 1: print sql if not self.options.simulate: SQLObjectVersionTable.createTable(connection=conn) if not self.options.simulate: SQLObjectVersionTable.clearTable(connection=conn) SQLObjectVersionTable( version=version, connection=conn) def strip_comments(self, sql): lines = [l for l in sql.splitlines() if not l.strip().startswith('--')] return '\n'.join(lines) def base_dir(self): base = self.options.output_dir if base is None: base = CONFIG.get('sqlobject_history_dir', '.') if not os.path.exists(base): print 'Creating history directory %s' % self.shorten_filename(base) if not self.options.simulate: os.makedirs(base) return base def find_output_dir(self): today = time.strftime('%Y-%m-%d', time.localtime()) if self.options.version_name: dir = os.path.join(self.base_dir(), today + '-' + self.options.version_name) if os.path.exists(dir): print ("Error, directory already exists: %s" % dir) sys.exit(1) return dir extra = '' while 1: dir = os.path.join(self.base_dir(), today + extra) if not os.path.exists(dir): return dir if not extra: extra = 'a' else: extra = chr(ord(extra)+1) def find_last_version(self): names = [] for fn in os.listdir(self.base_dir()): if not self.version_regex.search(fn): continue names.append(fn) if not names: return None names.sort() return os.path.join(self.base_dir(), names[-1]) def command_force_db_version(self): v = self.options.verbose sim = self.options.simulate version = self.options.force_db_version if not self.version_regex.search(version): print "Versions must be in the format YYYY-MM-DD..." print "You version %s does not fit this" % version return version_dir = os.path.join(self.base_dir(), version) if not os.path.exists(version_dir): if v: print 'Creating %s' % self.shorten_filename(version_dir) if not sim: os.mkdir(version_dir) elif v: print ('Directory %s exists' % self.shorten_filename(version_dir)) if self.options.db_record: self.update_db(version, self.connection()) class CommandUpgrade(CommandRecord): name = 'upgrade' summary = 'Update the database to a new version (as created by record)' help = ('This command runs scripts (that you write by hand) to ' 'upgrade a database. The database\'s current version is in ' 'the sqlobject_version table (use record --force-db-version ' 'if a database does not have a sqlobject_version table), ' 'and upgrade scripts are in the version directory you are ' 'upgrading FROM, named upgrade_DBNAME_VERSION.sql, like ' '"upgrade_mysql_2004-12-01b.sql".') parser = standard_parser(find_modules=False) parser.add_option('--upgrade-to', help="Upgrade to the given version (default: newest version)", dest="upgrade_to", metavar="VERSION") parser.add_option('--output-dir', help="Base directory for recorded definitions", dest="output_dir", metavar="DIR", default=None) upgrade_regex = re.compile(r'^upgrade_([a-z]*)_([^.]*)\.sql$', re.I) def command(self): v = self.options.verbose sim = self.options.simulate if self.options.upgrade_to: version_to = self.options.upgrade_to else: fname = self.find_last_version() if fname is None: print "No version exists, use 'record' command to create one" return version_to = os.path.basename(fname) current = self.current_version() if v: print 'Current version: %s' % current version_list = self.make_plan(current, version_to) if not version_list: print 'Database up to date' return if v: print 'Plan:' for next_version, upgrader in version_list: print ' Use %s to upgrade to %s' % ( self.shorten_filename(upgrader), next_version) conn = self.connection() for next_version, upgrader in version_list: f = open(upgrader) sql = f.read() f.close() if v: print "Running:" print sql print '-'*60 if not sim: try: conn.query(sql) except: print "Error in script: %s" % upgrader raise self.update_db(next_version, conn) print 'Done.' def current_version(self): conn = self.connection() if not conn.tableExists(SQLObjectVersionTable.sqlmeta.table): print 'No sqlobject_version table!' sys.exit(1) versions = list(SQLObjectVersionTable.select(connection=conn)) if not versions: print 'No rows in sqlobject_version!' sys.exit(1) if len(versions) > 1: print 'Ambiguous sqlobject_version_table' sys.exit(1) return versions[0].version def make_plan(self, current, dest): if current == dest: return [] dbname = self.connection().dbName next_version, upgrader = self.best_upgrade(current, dest, dbname) if not upgrader: print 'No way to upgrade from %s to %s' % (current, dest) print ('(you need a %s/upgrade_%s_%s.sql script)' % (current, dbname, dest)) sys.exit(1) plan = [(next_version, upgrader)] if next_version == dest: return plan else: return plan + self.make_plan(next_version, dest) def best_upgrade(self, current, dest, target_dbname): current_dir = os.path.join(self.base_dir(), current) if self.options.verbose > 1: print ('Looking in %s for upgraders' % self.shorten_filename(current_dir)) upgraders = [] for fn in os.listdir(current_dir): match = self.upgrade_regex.search(fn) if not match: if self.options.verbose > 1: print 'Not an upgrade script: %s' % fn continue dbname = match.group(1) version = match.group(2) if dbname != target_dbname: if self.options.verbose > 1: print 'Not for this database: %s (want %s)' % ( dbname, target_dbname) continue if version > dest: if self.options.verbose > 1: print 'Version too new: %s (only want %s)' % ( version, dest) upgraders.append((version, os.path.join(current_dir, fn))) if not upgraders: if self.options.verbose > 1: print 'No upgraders found in %s' % current_dir return None, None upgraders.sort() return upgraders[-1] def update_sys_path(paths, verbose): if isinstance(paths, basestring): paths = [paths] for path in paths: path = os.path.abspath(path) if path not in sys.path: if verbose > 1: print 'Adding %s to path' % path sys.path.insert(0, path) if __name__ == '__main__': the_runner.run(sys.argv) SQLObject-1.5.2/sqlobject/__init__.py0000644000175000017500000000046311655241113016756 0ustar phdphd00000000000000"""SQLObject""" from __version__ import version, version_info from col import * from index import * from joins import * from main import * from sqlbuilder import AND, OR, NOT, IN, LIKE, RLIKE, DESC, CONTAINSSTRING, const, func from styles import * from dbconnection import connectionForURI import dberrors SQLObject-1.5.2/sqlobject/__version__.py0000644000175000017500000000022712322475604017504 0ustar phdphd00000000000000 version = '1.5.2' major = 1 minor = 5 micro = 2 release_level = 'final' serial = 0 version_info = (major, minor, micro, release_level, serial) SQLObject-1.5.2/sqlobject/inheritance/0000755000175000017500000000000012322476205017137 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/inheritance/__init__.py0000644000175000017500000005470211563772713021271 0ustar phdphd00000000000000from sqlobject import dbconnection from sqlobject import classregistry from sqlobject import events from sqlobject import sqlbuilder from sqlobject.col import StringCol, ForeignKey from sqlobject.main import sqlmeta, SQLObject, SelectResults, \ makeProperties, unmakeProperties, getterName, setterName import iteration def tablesUsedSet(obj, db): if hasattr(obj, "tablesUsedSet"): return obj.tablesUsedSet(db) elif isinstance(obj, (tuple, list, set, frozenset)): s = set() for component in obj: s.update(tablesUsedSet(component, db)) return s else: return set() class InheritableSelectResults(SelectResults): IterationClass = iteration.InheritableIteration def __init__(self, sourceClass, clause, clauseTables=None, inheritedTables=None, **ops): if clause is None or isinstance(clause, str) and clause == 'all': clause = sqlbuilder.SQLTrueClause dbName = (ops.get('connection',None) or sourceClass._connection).dbName tablesSet = tablesUsedSet(clause, dbName) tablesSet.add(str(sourceClass.sqlmeta.table)) orderBy = ops.get('orderBy') if inheritedTables: for tableName in inheritedTables: tablesSet.add(str(tableName)) if orderBy and not isinstance(orderBy, basestring): tablesSet.update(tablesUsedSet(orderBy, dbName)) #DSM: if this class has a parent, we need to link it #DSM: and be sure the parent is in the table list. #DSM: The following code is before clauseTables #DSM: because if the user uses clauseTables #DSM: (and normal string SELECT), he must know what he wants #DSM: and will do himself the relationship between classes. if not isinstance(clause, str): tableRegistry = {} allClasses = classregistry.registry( sourceClass.sqlmeta.registry).allClasses() for registryClass in allClasses: if str(registryClass.sqlmeta.table) in tablesSet: #DSM: By default, no parents are needed for the clauses tableRegistry[registryClass] = registryClass tableRegistryCopy = tableRegistry.copy() for childClass in tableRegistryCopy: if childClass not in tableRegistry: continue currentClass = childClass while currentClass: if currentClass in tableRegistryCopy: if currentClass in tableRegistry: #DSM: Remove this class as it is a parent one #DSM: of a needed children del tableRegistry[currentClass] #DSM: Must keep the last parent needed #DSM: (to limit the number of join needed) tableRegistry[childClass] = currentClass currentClass = currentClass.sqlmeta.parentClass #DSM: Table registry contains only the last children #DSM: or standalone classes parentClause = [] for (currentClass, minParentClass) in tableRegistry.items(): while (currentClass != minParentClass) \ and currentClass.sqlmeta.parentClass: parentClass = currentClass.sqlmeta.parentClass parentClause.append(currentClass.q.id == parentClass.q.id) currentClass = parentClass tablesSet.add(str(currentClass.sqlmeta.table)) clause = reduce(sqlbuilder.AND, parentClause, clause) super(InheritableSelectResults, self).__init__(sourceClass, clause, clauseTables, **ops) def accumulateMany(self, *attributes, **kw): if kw.get("skipInherited"): return super(InheritableSelectResults, self).accumulateMany(*attributes) tables = [] for func_name, attribute in attributes: if not isinstance(attribute, basestring): tables.append(attribute.tableName) clone = self.__class__(self.sourceClass, self.clause, self.clauseTables, inheritedTables=tables, **self.ops) return clone.accumulateMany(skipInherited=True, *attributes) class InheritableSQLMeta(sqlmeta): @classmethod def addColumn(sqlmeta, columnDef, changeSchema=False, connection=None, childUpdate=False): soClass = sqlmeta.soClass #DSM: Try to add parent properties to the current class #DSM: Only do this once if possible at object creation and once for #DSM: each new dynamic column to refresh the current class if sqlmeta.parentClass: for col in sqlmeta.parentClass.sqlmeta.columnList: cname = col.name if cname == 'childName': continue if cname.endswith("ID"): cname = cname[:-2] setattr(soClass, getterName(cname), eval( 'lambda self: self._parent.%s' % cname)) if not col.immutable: def make_setfunc(cname): def setfunc(self, val): if not self.sqlmeta._creating and not getattr(self.sqlmeta, "row_update_sig_suppress", False): self.sqlmeta.send(events.RowUpdateSignal, self, {cname : val}) result = setattr(self._parent, cname, val) return setfunc setfunc = make_setfunc(cname) setattr(soClass, setterName(cname), setfunc) if childUpdate: makeProperties(soClass) return if columnDef: super(InheritableSQLMeta, sqlmeta).addColumn(columnDef, changeSchema, connection) #DSM: Update each child class if needed and existing (only for new #DSM: dynamic column as no child classes exists at object creation) if columnDef and hasattr(soClass, "q"): q = getattr(soClass.q, columnDef.name, None) else: q = None for c in sqlmeta.childClasses.values(): c.sqlmeta.addColumn(columnDef, connection=connection, childUpdate=True) if q: setattr(c.q, columnDef.name, q) @classmethod def delColumn(sqlmeta, column, changeSchema=False, connection=None, childUpdate=False): if childUpdate: soClass = sqlmeta.soClass unmakeProperties(soClass) makeProperties(soClass) if isinstance(column, str): name = column else: name = column.name delattr(soClass, name) delattr(soClass.q, name) return super(InheritableSQLMeta, sqlmeta).delColumn(column, changeSchema, connection) #DSM: Update each child class if needed #DSM: and delete properties for this column for c in sqlmeta.childClasses.values(): c.sqlmeta.delColumn(column, changeSchema=changeSchema, connection=connection, childUpdate=True) @classmethod def addJoin(sqlmeta, joinDef, childUpdate=False): soClass = sqlmeta.soClass #DSM: Try to add parent properties to the current class #DSM: Only do this once if possible at object creation and once for #DSM: each new dynamic join to refresh the current class if sqlmeta.parentClass: for join in sqlmeta.parentClass.sqlmeta.joins: jname = join.joinMethodName jarn = join.addRemoveName setattr(soClass, getterName(jname), eval('lambda self: self._parent.%s' % jname)) if hasattr(join, 'remove'): setattr(soClass, 'remove' + jarn, eval('lambda self,o: self._parent.remove%s(o)' % jarn)) if hasattr(join, 'add'): setattr(soClass, 'add' + jarn, eval('lambda self,o: self._parent.add%s(o)' % jarn)) if childUpdate: makeProperties(soClass) return if joinDef: super(InheritableSQLMeta, sqlmeta).addJoin(joinDef) #DSM: Update each child class if needed and existing (only for new #DSM: dynamic join as no child classes exists at object creation) for c in sqlmeta.childClasses.values(): c.sqlmeta.addJoin(joinDef, childUpdate=True) @classmethod def delJoin(sqlmeta, joinDef, childUpdate=False): if childUpdate: soClass = sqlmeta.soClass unmakeProperties(soClass) makeProperties(soClass) return super(InheritableSQLMeta, sqlmeta).delJoin(joinDef) #DSM: Update each child class if needed #DSM: and delete properties for this join for c in sqlmeta.childClasses.values(): c.sqlmeta.delJoin(joinDef, childUpdate=True) @classmethod def getAllColumns(sqlmeta): columns = sqlmeta.columns.copy() sm = sqlmeta while sm.parentClass: columns.update(sm.parentClass.sqlmeta.columns) sm = sm.parentClass.sqlmeta return columns @classmethod def getColumns(sqlmeta): columns = sqlmeta.getAllColumns() if 'childName' in columns: del columns['childName'] return columns class InheritableSQLObject(SQLObject): sqlmeta = InheritableSQLMeta _inheritable = True SelectResultsClass = InheritableSelectResults def set(self, **kw): if self._parent: SQLObject.set(self, _suppress_set_sig=True, **kw) else: SQLObject.set(self, **kw) def __classinit__(cls, new_attrs): SQLObject.__classinit__(cls, new_attrs) # if we are a child class, add sqlbuilder fields from parents currentClass = cls.sqlmeta.parentClass while currentClass: for column in currentClass.sqlmeta.columnDefinitions.values(): if column.name == 'childName': continue if isinstance(column, ForeignKey): continue setattr(cls.q, column.name, getattr(currentClass.q, column.name)) currentClass = currentClass.sqlmeta.parentClass @classmethod def _SO_setupSqlmeta(cls, new_attrs, is_base): # Note: cannot use super(InheritableSQLObject, cls)._SO_setupSqlmeta - # InheritableSQLObject is not defined when it's __classinit__ # is run. Cannot use SQLObject._SO_setupSqlmeta, either: # the method would be bound to wrong class. if cls.__name__ == "InheritableSQLObject": call_super = super(cls, cls) else: # InheritableSQLObject must be in globals yet call_super = super(InheritableSQLObject, cls) call_super._SO_setupSqlmeta(new_attrs, is_base) sqlmeta = cls.sqlmeta sqlmeta.childClasses = {} # locate parent class and register this class in it's children sqlmeta.parentClass = None for superclass in cls.__bases__: if getattr(superclass, '_inheritable', False) \ and (superclass.__name__ != 'InheritableSQLObject'): if sqlmeta.parentClass: # already have a parent class; # cannot inherit from more than one raise NotImplementedError( "Multiple inheritance is not implemented") sqlmeta.parentClass = superclass superclass.sqlmeta.childClasses[cls.__name__] = cls if sqlmeta.parentClass: # remove inherited column definitions cls.sqlmeta.columns = {} cls.sqlmeta.columnList = [] cls.sqlmeta.columnDefinitions = {} # default inheritance child name if not sqlmeta.childName: sqlmeta.childName = cls.__name__ @classmethod def get(cls, id, connection=None, selectResults=None, childResults=None, childUpdate=False): val = super(InheritableSQLObject, cls).get(id, connection, selectResults) #DSM: If we are updating a child, we should never return a child... if childUpdate: return val #DSM: If this class has a child, return the child if 'childName' in cls.sqlmeta.columns: childName = val.childName if childName is not None: childClass = cls.sqlmeta.childClasses[childName] # If the class has no columns (which sometimes makes sense # and may be true for non-inheritable (leaf) classes only), # shunt the query to avoid almost meaningless SQL # like "SELECT NULL FROM child WHERE id=1". # This is based on assumption that child object exists # if parent object exists. (If it doesn't your database # is broken and that is a job for database maintenance.) if not (childResults or childClass.sqlmeta.columns): childResults = (None,) return childClass.get(id, connection=connection, selectResults=childResults) #DSM: Now, we know we are alone or the last child in a family... #DSM: It's time to find our parents inst = val while inst.sqlmeta.parentClass and not inst._parent: inst._parent = inst.sqlmeta.parentClass.get(id, connection=connection, childUpdate=True) inst = inst._parent #DSM: We can now return ourself return val @classmethod def _notifyFinishClassCreation(cls): sqlmeta = cls.sqlmeta # verify names of added columns if sqlmeta.parentClass: # FIXME: this does not check for grandparent column overrides parentCols = sqlmeta.parentClass.sqlmeta.columns.keys() for column in sqlmeta.columnList: if column.name == 'childName': raise AttributeError( "The column name 'childName' is reserved") if column.name in parentCols: raise AttributeError("The column '%s' is" " already defined in an inheritable parent" % column.name) # if this class is inheritable, add column for children distinction if cls._inheritable and (cls.__name__ != 'InheritableSQLObject'): sqlmeta.addColumn(StringCol(name='childName', # limit string length to get VARCHAR and not CLOB length=255, default=None)) if not sqlmeta.columnList: # There are no columns - call addColumn to propagate columns # from parent classes to children sqlmeta.addColumn(None) if not sqlmeta.joins: # There are no joins - call addJoin to propagate joins # from parent classes to children sqlmeta.addJoin(None) def _create(self, id, **kw): #DSM: If we were called by a children class, #DSM: we must retreive the properties dictionary. #DSM: Note: we can't use the ** call paremeter directly #DSM: as we must be able to delete items from the dictionary #DSM: (and our children must know that the items were removed!) if 'kw' in kw: kw = kw['kw'] #DSM: If we are the children of an inheritable class, #DSM: we must first create our parent if self.sqlmeta.parentClass: parentClass = self.sqlmeta.parentClass new_kw = {} parent_kw = {} for (name, value) in kw.items(): if (name != 'childName') and hasattr(parentClass, name): parent_kw[name] = value else: new_kw[name] = value kw = new_kw # Need to check that we have enough data to sucesfully # create the current subclass otherwise we will leave # the database in an inconsistent state. for col in self.sqlmeta.columnList: if (col._default == sqlbuilder.NoDefault) and \ (col.name not in kw) and (col.foreignName not in kw): raise TypeError, "%s() did not get expected keyword argument %s" % (self.__class__.__name__, col.name) parent_kw['childName'] = self.sqlmeta.childName self._parent = parentClass(kw=parent_kw, connection=self._connection) id = self._parent.id # TC: Create this record and catch all exceptions in order to destroy # TC: the parent if the child can not be created. try: super(InheritableSQLObject, self)._create(id, **kw) except: # If we are outside a transaction and this is a child, destroy the parent connection = self._connection if (not isinstance(connection, dbconnection.Transaction) and connection.autoCommit) and self.sqlmeta.parentClass: self._parent.destroySelf() #TC: Do we need to do this?? self._parent = None # TC: Reraise the original exception raise @classmethod def _findAlternateID(cls, name, dbName, value, connection=None): result = list(cls.selectBy(connection, **{name: value})) if not result: return result, None obj = result[0] return [obj.id], obj @classmethod def select(cls, clause=None, *args, **kwargs): parentClass = cls.sqlmeta.parentClass childUpdate = kwargs.pop('childUpdate', None) # childUpdate may have one of three values: # True: # select was issued by parent class to create child objects. # Execute select without modifications. # None (default): # select is run by application. If this class is inheritance # child, delegate query to the parent class to utilize # InheritableIteration optimizations. Selected records # are restricted to this (child) class by adding childName # filter to the where clause. # False: # select is delegated from inheritance child which is parent # of another class. Delegate the query to parent if possible, # but don't add childName restriction: selected records # will be filtered by join to the table filtered by childName. if (not childUpdate) and parentClass: if childUpdate is None: # this is the first parent in deep hierarchy addClause = parentClass.q.childName == cls.sqlmeta.childName # if the clause was one of TRUE varians, replace it if (clause is None) or (clause is sqlbuilder.SQLTrueClause) \ or (isinstance(clause, basestring) and (clause == 'all')): clause = addClause else: # patch WHERE condition: # change ID field of this class to ID of parent class # XXX the clause is patched in place; it would be better # to build a new one if we have to replace field clsID = cls.q.id parentID = parentClass.q.id def _get_patched(clause): if isinstance(clause, sqlbuilder.SQLOp): _patch_id_clause(clause) return None elif not isinstance(clause, sqlbuilder.Field): return None elif (clause.tableName == clsID.tableName) \ and (clause.fieldName == clsID.fieldName): return parentID else: return None def _patch_id_clause(clause): if not isinstance(clause, sqlbuilder.SQLOp): return expr = _get_patched(clause.expr1) if expr: clause.expr1 = expr expr = _get_patched(clause.expr2) if expr: clause.expr2 = expr _patch_id_clause(clause) # add childName filter clause = sqlbuilder.AND(clause, addClause) return parentClass.select(clause, childUpdate=False, *args, **kwargs) else: return super(InheritableSQLObject, cls).select( clause, *args, **kwargs) @classmethod def selectBy(cls, connection=None, **kw): clause = [] foreignColumns = {} currentClass = cls while currentClass: foreignColumns.update(dict([(column.foreignName, name) for (name, column) in currentClass.sqlmeta.columns.items() if column.foreignKey ])) currentClass = currentClass.sqlmeta.parentClass for name, value in kw.items(): if name in foreignColumns: name = foreignColumns[name] # translate "key" to "keyID" if isinstance(value, SQLObject): value = value.id currentClass = cls while currentClass: try: clause.append(getattr(currentClass.q, name) == value) break except AttributeError, err: pass currentClass = currentClass.sqlmeta.parentClass else: raise AttributeError("'%s' instance has no attribute '%s'" % (cls.__name__, name)) if clause: clause = reduce(sqlbuilder.AND, clause) else: clause = None # select all conn = connection or cls._connection return cls.SelectResultsClass(cls, clause, connection=conn) def destroySelf(self): #DSM: If this object has parents, recursivly kill them if hasattr(self, '_parent') and self._parent: self._parent.destroySelf() super(InheritableSQLObject, self).destroySelf() def _reprItems(self): items = super(InheritableSQLObject, self)._reprItems() # add parent attributes (if any) if self.sqlmeta.parentClass: items.extend(self._parent._reprItems()) # filter out our special column return [item for item in items if item[0] != 'childName'] __all__ = ['InheritableSQLObject'] SQLObject-1.5.2/sqlobject/inheritance/tests/0000755000175000017500000000000012322476205020301 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/inheritance/tests/test_aggregates.py0000644000175000017500000000114010555427174024026 0ustar phdphd00000000000000from sqlobject import * from sqlobject.inheritance import * from sqlobject.tests.dbtest import * class TestAggregate1(InheritableSQLObject): value1 = IntCol() class TestAggregate2(TestAggregate1): value2 = IntCol() def test_aggregates(): setupClass([TestAggregate1, TestAggregate2]) TestAggregate1(value1=1) TestAggregate2(value1=2, value2=12) assert TestAggregate1.select().max("value1") == 2 assert TestAggregate2.select().max("value1") == 2 raises(Exception, TestAggregate2.select().max, "value2") assert TestAggregate2.select().max(TestAggregate2.q.value2) == 12 SQLObject-1.5.2/sqlobject/inheritance/tests/__init__.py0000644000175000017500000000000210372665115022405 0ustar phdphd00000000000000# SQLObject-1.5.2/sqlobject/inheritance/tests/test_inheritance.py0000644000175000017500000001035110555416720024205 0ustar phdphd00000000000000from py.test import raises from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.inheritance import InheritableSQLObject ######################################## ## Inheritance ######################################## class InheritablePerson(InheritableSQLObject): firstName = StringCol() lastName = StringCol(alternateID=True, length=255) class Employee(InheritablePerson): _inheritable = False position = StringCol() def setup(): setupClass(InheritablePerson) setupClass(Employee) Employee(firstName='Project', lastName='Leader', position='Project leader') InheritablePerson(firstName='Oneof', lastName='Authors') def test_creation_fail(): setup() kwargs ={'firstName':'John', 'lastname':'Doe'} raises(TypeError, Employee, **kwargs) persons = InheritablePerson.select(InheritablePerson.q.firstName == 'John') assert persons.count() == 0 def test_inheritance(): setup() persons = InheritablePerson.select() # all for person in persons: assert isinstance(person, InheritablePerson) if isinstance(person, Employee): assert not hasattr(person, "childName") else: assert hasattr(person, "childName") assert not person.childName def test_inheritance_select(): setup() persons = InheritablePerson.select(InheritablePerson.q.firstName <> None) assert persons.count() == 2 persons = InheritablePerson.select(InheritablePerson.q.firstName == "phd") assert persons.count() == 0 employees = Employee.select(Employee.q.firstName <> None) assert employees.count() == 1 employees = Employee.select(Employee.q.firstName == "phd") assert employees.count() == 0 employees = Employee.select(Employee.q.position <> None) assert employees.count() == 1 persons = InheritablePerson.selectBy(firstName="Project") assert persons.count() == 1 assert isinstance(persons[0], Employee) persons = Employee.selectBy(firstName="Project") assert persons.count() == 1 try: person = InheritablePerson.byLastName("Oneof") except: pass else: raise RuntimeError, "unknown person %s" % person person = InheritablePerson.byLastName("Leader") assert person.firstName == "Project" person = Employee.byLastName("Leader") assert person.firstName == "Project" persons = list(InheritablePerson.select(orderBy=InheritablePerson.q.lastName)) assert len(persons) == 2 persons = list(InheritablePerson.select(orderBy=(InheritablePerson.q.lastName, InheritablePerson.q.firstName))) assert len(persons) == 2 persons = list(Employee.select(orderBy=Employee.q.lastName)) assert len(persons) == 1 persons = list(Employee.select(orderBy=(Employee.q.lastName, Employee.q.firstName))) assert len(persons) == 1 persons = list(Employee.select(orderBy=Employee.q.position)) assert len(persons) == 1 persons = list(Employee.select(orderBy=(Employee.q.position, Employee.q.lastName))) assert len(persons) == 1 def test_addDelColumn(): setup() assert hasattr(InheritablePerson, "firstName") assert hasattr(Employee, "firstName") assert hasattr(InheritablePerson.q, "firstName") assert hasattr(Employee.q, "firstName") Employee.sqlmeta.addColumn(IntCol('runtime', default=None)) assert not hasattr(InheritablePerson, 'runtime') assert hasattr(Employee, 'runtime') assert not hasattr(InheritablePerson.q, 'runtime') assert hasattr(Employee.q, 'runtime') InheritablePerson.sqlmeta.addColumn(IntCol('runtime2', default=None)) assert hasattr(InheritablePerson, 'runtime2') assert hasattr(Employee, 'runtime2') assert hasattr(InheritablePerson.q, 'runtime2') assert hasattr(Employee.q, 'runtime2') Employee.sqlmeta.delColumn('runtime') assert not hasattr(InheritablePerson, 'runtime') assert not hasattr(Employee, 'runtime') assert not hasattr(InheritablePerson.q, 'runtime') assert not hasattr(Employee.q, 'runtime') InheritablePerson.sqlmeta.delColumn('runtime2') assert not hasattr(InheritablePerson, 'runtime2') assert not hasattr(Employee, 'runtime2') assert not hasattr(InheritablePerson.q, 'runtime2') assert not hasattr(Employee.q, 'runtime2') SQLObject-1.5.2/sqlobject/inheritance/tests/test_deep_inheritance.py0000644000175000017500000000617712203134056025204 0ustar phdphd00000000000000from py.test import raises from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.inheritance import InheritableSQLObject ######################################## ## Deep Inheritance ######################################## class DIPerson(InheritableSQLObject): firstName = StringCol(length=100) lastName = StringCol(alternateID=True, length=255) manager = ForeignKey("DIManager", default=None) class DIEmployee(DIPerson): position = StringCol(unique=True, length=100) class DIManager(DIEmployee): subdudes = MultipleJoin("DIPerson", joinColumn="manager_id") def test_creation_fail(): """ Try to create a Manager without specifying a position. This should fail without leaving any partial records in the database. """ setupClass([DIManager, DIEmployee, DIPerson]) kwargs ={'firstName': 'John', 'lastname': 'Doe'} raises(TypeError, DIManager, **kwargs) persons = DIEmployee.select(DIPerson.q.firstName == 'John') assert persons.count() == 0 def test_creation_fail2(): """ Try to create two Managers with the same position. This should fail without leaving any partial records in the database. """ setupClass([DIManager, DIEmployee, DIPerson]) kwargs ={'firstName': 'John', 'lastName': 'Doe', 'position': 'Project Manager'} DIManager(**kwargs) persons = DIEmployee.select(DIPerson.q.firstName == 'John') assert persons.count() == 1 kwargs ={'firstName': 'John', 'lastName': 'Doe II', 'position': 'Project Manager'} raises(Exception, DIManager, **kwargs) persons = DIPerson.select(DIPerson.q.firstName == 'John') assert persons.count() == 1 if not supports('transactions'): return transaction = DIPerson._connection.transaction() kwargs ={'firstName': 'John', 'lastName': 'Doe III', 'position': 'Project Manager'} raises(Exception, DIManager, connection=transaction, **kwargs) transaction.rollback() transaction.begin() persons = DIPerson.select(DIPerson.q.firstName == 'John', connection=transaction) assert persons.count() == 1 def test_deep_inheritance(): setupClass([DIManager, DIEmployee, DIPerson]) manager = DIManager(firstName='Project', lastName='Manager', position='Project Manager') manager_id = manager.id employee_id = DIEmployee(firstName='Project', lastName='Leader', position='Project leader', manager=manager).id person_id = DIPerson(firstName='Oneof', lastName='Authors', manager=manager).id conn = getConnection() cache = conn.cache cache.clear() managers = list(DIManager.select()) assert len(managers) == 1 cache.clear() employees = list(DIEmployee.select()) assert len(employees) == 2 cache.clear() persons = list(DIPerson.select()) assert len(persons) == 3 cache.clear() person = DIPerson.get(employee_id) assert isinstance(person, DIEmployee) person = DIPerson.get(manager_id) assert isinstance(person, DIEmployee) assert isinstance(person, DIManager) cache.clear() person = DIEmployee.get(manager_id) assert isinstance(person, DIManager) conn.close() SQLObject-1.5.2/sqlobject/inheritance/tests/test_foreignKey.py0000644000175000017500000000507512202715714024021 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.inheritance import InheritableSQLObject class Note(SQLObject): text = StringCol() class PersonWithNotes(InheritableSQLObject): firstName = StringCol() lastName = StringCol() note = ForeignKey("Note", default=None) class Paper(SQLObject): content = StringCol() class EmployeeWithNotes(PersonWithNotes): _inheritable = False paper = ForeignKey("Paper", default=None) def test_foreignKey(): setupClass([Note, PersonWithNotes, Paper, EmployeeWithNotes], force=True) note = Note(text="person") PersonWithNotes(firstName='Oneof', lastName='Authors', note=note) note = Note(text="employee") EmployeeWithNotes(firstName='Project', lastName='Leader', note=note) paper = Paper(content="secret") EmployeeWithNotes(firstName='Senior', lastName='Clerk', paper=paper) PersonWithNotes(firstName='Some', lastName='Person') person = PersonWithNotes.get(1) assert isinstance(person, PersonWithNotes) and not isinstance(person, EmployeeWithNotes) assert person.note.text == "person" employee = EmployeeWithNotes.get(2) assert isinstance(employee, EmployeeWithNotes) assert employee.note.text == "employee" save_employee = employee persons = PersonWithNotes.select(PersonWithNotes.q.noteID <> None) assert persons.count() == 2 persons = PersonWithNotes.selectBy(noteID=person.note.id) assert persons.count() == 1 employee = EmployeeWithNotes.select(PersonWithNotes.q.noteID <> None) assert employee.count() == 1 persons = PersonWithNotes.selectBy(noteID=person.note.id) assert persons.count() == 1 persons = PersonWithNotes.selectBy(note=person.note) assert persons.count() == 1 persons = PersonWithNotes.selectBy(note=None) assert persons.count() == 2 employee = EmployeeWithNotes.selectBy(paperID=None) assert employee.count() == 1 employee = EmployeeWithNotes.selectBy(paper=None) assert employee.count() == 1 employee = EmployeeWithNotes.selectBy(note=save_employee.note, paper=save_employee.paper) assert employee.count() == 1 employee = EmployeeWithNotes.selectBy() assert employee.count() == 2 class TestInheritableBase(InheritableSQLObject): pass class TestInheritableForeignKey(TestInheritableBase): base = ForeignKey("TestInheritableBase") def test_foreignKey2(): setupClass([TestInheritableBase, TestInheritableForeignKey]) test = TestInheritableBase() object = TestInheritableForeignKey(base=test) SQLObject-1.5.2/sqlobject/inheritance/tests/test_inheritance_tree.py0000644000175000017500000000202110555435614025222 0ustar phdphd00000000000000from sqlobject import * from sqlobject.inheritance import * from sqlobject.tests.dbtest import * ######################################## ## Inheritance Tree ######################################## class Tree1(InheritableSQLObject): aprop = StringCol(length=10) class Tree2(Tree1): bprop = StringCol(length=10) class Tree3(Tree1): cprop = StringCol(length=10) class Tree4(Tree2): dprop = StringCol(length=10) class Tree5(Tree2): eprop = StringCol(length=10) def test_tree(): setupClass([Tree1, Tree2, Tree3, Tree4, Tree5]) t1 = Tree1(aprop='t1') t2 = Tree2(aprop='t2', bprop='t2') t3 = Tree3(aprop='t3', cprop='t3') t4 = Tree4(aprop='t4', bprop='t4', dprop='t4') t5 = Tree5(aprop='t5', bprop='t5', eprop='t5') # find just the t5 out of childs from Tree2 assert t5 == Tree1.select(Tree2.q.childName == 'Tree5')[0] # t2,t4,t5 are all subclasses of Tree1 with t1 childName of 'Tree2' assert list(Tree1.select(Tree1.q.childName == 'Tree2', orderBy="aprop")) == [t2, t4, t5] SQLObject-1.5.2/sqlobject/inheritance/tests/test_indexes.py0000644000175000017500000000322312203134056023342 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.inheritance import InheritableSQLObject class InheritedPersonIndexGet(InheritableSQLObject): first_name = StringCol(notNone=True, length=100) last_name = StringCol(notNone=True, length=100) age = IntCol() pk = DatabaseIndex(first_name, last_name, unique=True) class InheritedEmployeeIndexGet(InheritedPersonIndexGet): security_number = IntCol() experience = IntCol() sec_index = DatabaseIndex(security_number, unique=True) class InheritedSalesManIndexGet(InheritedEmployeeIndexGet): _inheritable = False skill = IntCol() def test_index_get_1(): setupClass([InheritedPersonIndexGet, InheritedEmployeeIndexGet, InheritedSalesManIndexGet]) InheritedSalesManIndexGet(first_name='Michael', last_name='Pallin', age=65, security_number=2304, experience=2, skill=10) InheritedEmployeeIndexGet(first_name='Eric', last_name='Idle', age=63, security_number=3402, experience=9) InheritedPersonIndexGet(first_name='Terry', last_name='Guilliam', age=64) InheritedPersonIndexGet.pk.get('Michael', 'Pallin') InheritedEmployeeIndexGet.pk.get('Michael', 'Pallin') InheritedSalesManIndexGet.pk.get('Michael', 'Pallin') InheritedPersonIndexGet.pk.get('Eric', 'Idle') InheritedEmployeeIndexGet.pk.get('Eric', 'Idle') InheritedPersonIndexGet.pk.get(first_name='Terry', last_name='Guilliam') InheritedEmployeeIndexGet.sec_index.get(2304) InheritedEmployeeIndexGet.sec_index.get(3402) InheritedSalesManIndexGet.sec_index.get(2304) InheritedSalesManIndexGet.sec_index.get(3402) SQLObject-1.5.2/sqlobject/inheritance/tests/test_asdict.py0000644000175000017500000000312512202715714023160 0ustar phdphd00000000000000from sqlobject import * from sqlobject.inheritance import * from sqlobject.tests.dbtest import * ######################################## ## sqlmeta.asDict ######################################## class InheritablePersonAD(InheritableSQLObject): firstName = StringCol() lastName = StringCol(alternateID=True, length=255) class ManagerAD(InheritablePersonAD): department = StringCol() class EmployeeAD(InheritablePersonAD): _inheritable = False position = StringCol() def test_getColumns(): setupClass([InheritablePersonAD, ManagerAD, EmployeeAD]) for klass, columns in ( (InheritablePersonAD, ['firstName', 'lastName']), (ManagerAD, ['department', 'firstName', 'lastName']), (EmployeeAD, ['firstName', 'lastName', 'position'])): _columns = klass.sqlmeta.getColumns().keys() _columns.sort() assert _columns == columns def test_asDict(): setupClass([InheritablePersonAD, ManagerAD, EmployeeAD], force=True) InheritablePersonAD(firstName='Oneof', lastName='Authors') ManagerAD(firstName='ManagerAD', lastName='The', department='Dep') EmployeeAD(firstName='Project', lastName='Leader', position='Project leader') assert InheritablePersonAD.get(1).sqlmeta.asDict() == \ dict(firstName='Oneof', lastName='Authors', id=1) assert InheritablePersonAD.get(2).sqlmeta.asDict() == \ dict(firstName='ManagerAD', lastName='The', department='Dep', id=2) assert InheritablePersonAD.get(3).sqlmeta.asDict() == \ dict(firstName='Project', lastName='Leader', position='Project leader', id=3) SQLObject-1.5.2/sqlobject/inheritance/tests/testDestroyCascade.py0000644000175000017500000000073410477277513024466 0ustar phdphd00000000000000from sqlobject import * from sqlobject.inheritance import * from sqlobject.tests.dbtest import * class TestCascade1(InheritableSQLObject): dummy = IntCol() class TestCascade2(TestCascade1): c = ForeignKey('TestCascade3', cascade='null') class TestCascade3(SQLObject): dummy = IntCol() def test_destroySelf(): setupClass([TestCascade1, TestCascade3, TestCascade2]) c = TestCascade3(dummy=1) b = TestCascade2(cID=c.id, dummy=1) c.destroySelf() SQLObject-1.5.2/sqlobject/inheritance/iteration.py0000644000175000017500000000757611165443271021527 0ustar phdphd00000000000000from sqlobject import sqlbuilder from sqlobject.classregistry import findClass from sqlobject.dbconnection import Iteration class InheritableIteration(Iteration): # Default array size for cursor.fetchmany() defaultArraySize = 10000 def __init__(self, dbconn, rawconn, select, keepConnection=False): super(InheritableIteration, self).__init__(dbconn, rawconn, select, keepConnection) self.lazyColumns = select.ops.get('lazyColumns', False) self.cursor.arraysize = self.defaultArraySize self._results = [] # Find the index of the childName column childNameIdx = None columns = select.sourceClass.sqlmeta.columnList for i, column in enumerate(columns): if column.name == "childName": childNameIdx = i break self._childNameIdx = childNameIdx def next(self): if not self._results: self._results = list(self.cursor.fetchmany()) if not self.lazyColumns: self.fetchChildren() if not self._results: self._cleanup() raise StopIteration result = self._results[0] del self._results[0] if self.lazyColumns: obj = self.select.sourceClass.get(result[0], connection=self.dbconn) return obj else: id = result[0] if id in self._childrenResults: childResults = self._childrenResults[id] del self._childrenResults[id] else: childResults = None obj = self.select.sourceClass.get(id, selectResults=result[1:], childResults=childResults, connection=self.dbconn) return obj def fetchChildren(self): """Prefetch childrens' data Fetch childrens' data for every subclass in one big .select() to avoid .get() fetching it one by one. """ self._childrenResults = {} if self._childNameIdx is None: return childIdsNames = {} childNameIdx = self._childNameIdx for result in self._results: childName = result[childNameIdx+1] if childName: ids = childIdsNames.get(childName) if ids is None: ids = childIdsNames[childName] = [] ids.append(result[0]) dbconn = self.dbconn rawconn = self.rawconn cursor = rawconn.cursor() registry = self.select.sourceClass.sqlmeta.registry for childName, ids in childIdsNames.items(): klass = findClass(childName, registry) if len(ids) == 1: select = klass.select(klass.q.id == ids[0], childUpdate=True, connection=dbconn) else: select = klass.select(sqlbuilder.IN(klass.q.id, ids), childUpdate=True, connection=dbconn) query = dbconn.queryForSelect(select) if dbconn.debug: dbconn.printDebug(rawconn, query, 'Select children of the class %s' % childName) self.dbconn._executeRetry(rawconn, cursor, query) for result in cursor.fetchall(): # Inheritance child classes may have no own columns # (that makes sense when child class has a join # that does not apply to parent class objects). # In such cases result[1:] gives an empty tuple # which is interpreted as "no results fetched" in .get(). # So .get() issues another query which is absolutely # meaningless (like "SELECT NULL FROM child WHERE id=1"). # In order to avoid this, we replace empty results # with non-empty tuple. Extra values in selectResults # are Ok - they will be ignored by ._SO_selectInit(). self._childrenResults[result[0]] = result[1:] or (None,) SQLObject-1.5.2/sqlobject/main.py0000644000175000017500000020306512223766715016162 0ustar phdphd00000000000000""" SQLObject --------- :author: Ian Bicking SQLObject is a object-relational mapper. See SQLObject.html or SQLObject.txt for more. With the help by Oleg Broytman and many other contributors. See Authors.txt. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. """ import threading import weakref import sqlbuilder import dbconnection import col import styles import types import warnings import joins import index import classregistry import declarative import events from sresults import SelectResults from util.threadinglocal import local import sys if sys.version_info[:3] < (2, 4, 0): raise ImportError, "SQLObject requires Python 2.4.0 or later" """ This thread-local storage is needed for RowCreatedSignals. It gathers code-blocks to execute _after_ the whole hierachy of inherited SQLObjects is created. See SQLObject._create """ NoDefault = sqlbuilder.NoDefault class SQLObjectNotFound(LookupError): pass class SQLObjectIntegrityError(Exception): pass def makeProperties(obj): """ This function takes a dictionary of methods and finds methods named like: * _get_attr * _set_attr * _del_attr * _doc_attr Except for _doc_attr, these should be methods. It then creates properties from these methods, like property(_get_attr, _set_attr, _del_attr, _doc_attr). Missing methods are okay. """ if isinstance(obj, dict): def setFunc(var, value): obj[var] = value d = obj else: def setFunc(var, value): setattr(obj, var, value) d = obj.__dict__ props = {} for var, value in d.items(): if var.startswith('_set_'): props.setdefault(var[5:], {})['set'] = value elif var.startswith('_get_'): props.setdefault(var[5:], {})['get'] = value elif var.startswith('_del_'): props.setdefault(var[5:], {})['del'] = value elif var.startswith('_doc_'): props.setdefault(var[5:], {})['doc'] = value for var, setters in props.items(): if len(setters) == 1 and 'doc' in setters: continue if var in d: if isinstance(d[var], (types.MethodType, types.FunctionType)): warnings.warn( "I tried to set the property %r, but it was " "already set, as a method (%r). Methods have " "significantly different semantics than properties, " "and this may be a sign of a bug in your code." % (var, d[var])) continue setFunc(var, property(setters.get('get'), setters.get('set'), setters.get('del'), setters.get('doc'))) def unmakeProperties(obj): if isinstance(obj, dict): def delFunc(obj, var): del obj[var] d = obj else: delFunc = delattr d = obj.__dict__ for var, value in d.items(): if isinstance(value, property): for prop in [value.fget, value.fset, value.fdel]: if prop and not prop.__name__ in d: delFunc(obj, var) break def findDependencies(name, registry=None): depends = [] for klass in classregistry.registry(registry).allClasses(): if findDependantColumns(name, klass): depends.append(klass) else: for join in klass.sqlmeta.joins: if isinstance(join, joins.SORelatedJoin) and join.otherClassName == name: depends.append(klass) break return depends def findDependantColumns(name, klass): depends = [] for col in klass.sqlmeta.columnList: if col.foreignKey == name and col.cascade is not None: depends.append(col) return depends def _collectAttributes(cls, new_attrs, look_for_class): """Finds all attributes in `new_attrs` that are instances of `look_for_class`. The ``.name`` attribute is set for any matching objects. Returns them as a list. """ result = [] for attr, value in new_attrs.items(): if isinstance(value, look_for_class): value.name = attr delattr(cls, attr) result.append(value) return result class CreateNewSQLObject: """ Dummy singleton to use in place of an ID, to signal we want a new object. """ pass class sqlmeta(object): """ This object is the object we use to keep track of all sorts of information. Subclasses are made for each SQLObject subclass (dynamically if necessary), and instances are created to go alongside every SQLObject instance. """ table = None idName = None idSequence = None # This function is used to coerce IDs into the proper format, # so you should replace it with str, or another function, if you # aren't using integer IDs idType = int style = None lazyUpdate = False defaultOrder = None cacheValues = True registry = None fromDatabase = False # Default is false, but we set it to true for the *instance* # when necessary: (bad clever? maybe) expired = False # This is a mapping from column names to SOCol (or subclass) # instances: columns = {} columnList = [] # This is a mapping from column names to Col (or subclass) # instances; these objects don't have the logic that the SOCol # objects do, and are not attached to this class closely. columnDefinitions = {} # These are lists of the join and index objects: indexes = [] indexDefinitions = [] joins = [] joinDefinitions = [] # These attributes shouldn't be shared with superclasses: _unshared_attributes = ['table', 'columns', 'childName'] # These are internal bookkeeping attributes; the class-level # definition is a default for the instances, instances will # reset these values. # When an object is being created, it has an instance # variable _creating, which is true. This way all the # setters can be captured until the object is complete, # and then the row is inserted into the database. Once # that happens, _creating is deleted from the instance, # and only the class variable (which is always false) is # left. _creating = False _obsolete = False # Sometimes an intance is attached to a connection, not # globally available. In that case, self.sqlmeta._perConnection # will be true. It's false by default: _perConnection = False # Inheritance definitions: parentClass = None # A reference to the parent class childClasses = {} # References to child classes, keyed by childName childName = None # Class name for inheritance child object creation # Does the row require syncing? dirty = False # Default encoding for UnicodeCol's dbEncoding = None __metaclass__ = declarative.DeclarativeMeta def __classinit__(cls, new_attrs): for attr in cls._unshared_attributes: if attr not in new_attrs: setattr(cls, attr, None) declarative.setup_attributes(cls, new_attrs) def __init__(self, instance): self.instance = weakref.proxy(instance) @classmethod def send(cls, signal, *args, **kw): events.send(signal, cls.soClass, *args, **kw) @classmethod def setClass(cls, soClass): cls.soClass = soClass if not cls.style: cls.style = styles.defaultStyle try: if cls.soClass._connection and cls.soClass._connection.style: cls.style = cls.soClass._connection.style except AttributeError: pass if cls.table is None: cls.table = cls.style.pythonClassToDBTable(cls.soClass.__name__) if cls.idName is None: cls.idName = cls.style.idForTable(cls.table) # plainSetters are columns that haven't been overridden by the # user, so we can contact the database directly to set them. # Note that these can't set these in the SQLObject class # itself, because they specific to this subclass of SQLObject, # and cannot be shared among classes. cls._plainSetters = {} cls._plainGetters = {} cls._plainForeignSetters = {} cls._plainForeignGetters = {} cls._plainJoinGetters = {} cls._plainJoinAdders = {} cls._plainJoinRemovers = {} # This is a dictionary of columnName: columnObject # None of these objects can be shared with superclasses cls.columns = {} cls.columnList = [] # These, however, can be shared: cls.columnDefinitions = cls.columnDefinitions.copy() cls.indexes = [] cls.indexDefinitions = cls.indexDefinitions[:] cls.joins = [] cls.joinDefinitions = cls.joinDefinitions[:] ############################################################ ## Adding special values, like columns and indexes ############################################################ ######################################## ## Column handling ######################################## @classmethod def addColumn(cls, columnDef, changeSchema=False, connection=None): post_funcs = [] cls.send(events.AddColumnSignal, cls.soClass, connection, columnDef.name, columnDef, changeSchema, post_funcs) sqlmeta = cls soClass = cls.soClass del cls column = columnDef.withClass(soClass) name = column.name assert name != 'id', ( "The 'id' column is implicit, and should not be defined as " "a column") assert name not in sqlmeta.columns, ( "The class %s.%s already has a column %r (%r), you cannot " "add the column %r" % (soClass.__module__, soClass.__name__, name, sqlmeta.columnDefinitions[name], columnDef)) # Collect columns from the parent classes to test # if the column is not in a parent class parent_columns = [] for base in soClass.__bases__: if hasattr(base, "sqlmeta"): parent_columns.extend(base.sqlmeta.columns.keys()) if hasattr(soClass, name): assert (name in parent_columns) or (name == "childName"), ( "The class %s.%s already has a variable or method %r, you cannot " "add the column %r" % (soClass.__module__, soClass.__name__, name, name)) sqlmeta.columnDefinitions[name] = columnDef sqlmeta.columns[name] = column # A stable-ordered version of the list... sqlmeta.columnList.append(column) ################################################### # Create the getter function(s). We'll start by # creating functions like _SO_get_columnName, # then if there's no function named _get_columnName # we'll alias that to _SO_get_columnName. This # allows a sort of super call, even though there's # no superclass that defines the database access. if sqlmeta.cacheValues: # We create a method here, which is just a function # that takes "self" as the first argument. getter = eval('lambda self: self._SO_loadValue(%s)' % repr(instanceName(name))) else: # If we aren't caching values, we just call the # function _SO_getValue, which fetches from the # database. getter = eval('lambda self: self._SO_getValue(%s)' % repr(name)) setattr(soClass, rawGetterName(name), getter) # Here if the _get_columnName method isn't in the # definition, we add it with the default # _SO_get_columnName definition. if not hasattr(soClass, getterName(name)) or (name == 'childName'): setattr(soClass, getterName(name), getter) sqlmeta._plainGetters[name] = 1 ################################################# # Create the setter function(s) # Much like creating the getters, we will create # _SO_set_columnName methods, and then alias them # to _set_columnName if the user hasn't defined # those methods themself. # @@: This is lame; immutable right now makes it unsettable, # making the table read-only if not column.immutable: # We start by just using the _SO_setValue method setter = eval('lambda self, val: self._SO_setValue(%s, val, self.%s, self.%s)' % (repr(name), '_SO_from_python_%s' % name, '_SO_to_python_%s' % name)) setattr(soClass, '_SO_from_python_%s' % name, column.from_python) setattr(soClass, '_SO_to_python_%s' % name, column.to_python) setattr(soClass, rawSetterName(name), setter) # Then do the aliasing if not hasattr(soClass, setterName(name)) or (name == 'childName'): setattr(soClass, setterName(name), setter) # We keep track of setters that haven't been # overridden, because we can combine these # set columns into one SQL UPDATE query. sqlmeta._plainSetters[name] = 1 ################################################## # Here we check if the column is a foreign key, in # which case we need to make another method that # fetches the key and constructs the sister # SQLObject instance. if column.foreignKey: # We go through the standard _SO_get_columnName deal # we're giving the object, not the ID of the # object this time: origName = column.origName if sqlmeta.cacheValues: # self._SO_class_className is a reference # to the class in question. getter = eval('lambda self: self._SO_foreignKey(self._SO_loadValue(%r), self._SO_class_%s, %s)' % (instanceName(name), column.foreignKey, column.refColumn and repr(column.refColumn))) else: # Same non-caching version as above. getter = eval('lambda self: self._SO_foreignKey(self._SO_getValue(%s), self._SO_class_%s, %s)' % (repr(name), column.foreignKey, column.refColumn and repr(column.refColumn))) setattr(soClass, rawGetterName(origName), getter) # And we set the _get_columnName version if not hasattr(soClass, getterName(origName)): setattr(soClass, getterName(origName), getter) sqlmeta._plainForeignGetters[origName] = 1 if not column.immutable: # The setter just gets the ID of the object, # and then sets the real column. setter = eval('lambda self, val: setattr(self, %s, self._SO_getID(val, %s))' % (repr(name), column.refColumn and repr(column.refColumn))) setattr(soClass, rawSetterName(origName), setter) if not hasattr(soClass, setterName(origName)): setattr(soClass, setterName(origName), setter) sqlmeta._plainForeignSetters[origName] = 1 classregistry.registry(sqlmeta.registry).addClassCallback( column.foreignKey, lambda foreign, me, attr: setattr(me, attr, foreign), soClass, '_SO_class_%s' % column.foreignKey) if column.alternateMethodName: func = eval('lambda cls, val, connection=None: cls._SO_fetchAlternateID(%s, %s, val, connection=connection)' % (repr(column.name), repr(column.dbName))) setattr(soClass, column.alternateMethodName, classmethod(func)) if changeSchema: conn = connection or soClass._connection conn.addColumn(sqlmeta.table, column) if soClass._SO_finishedClassCreation: makeProperties(soClass) for func in post_funcs: func(soClass, column) @classmethod def addColumnsFromDatabase(sqlmeta, connection=None): soClass = sqlmeta.soClass conn = connection or soClass._connection for columnDef in conn.columnsFromSchema(sqlmeta.table, soClass): if columnDef.name not in sqlmeta.columnDefinitions: if isinstance(columnDef.name, unicode): columnDef.name = columnDef.name.encode('ascii') sqlmeta.addColumn(columnDef) @classmethod def delColumn(cls, column, changeSchema=False, connection=None): sqlmeta = cls soClass = sqlmeta.soClass if isinstance(column, str): if column in sqlmeta.columns: column = sqlmeta.columns[column] elif column+'ID' in sqlmeta.columns: column = sqlmeta.columns[column+'ID'] else: raise ValueError('Unknown column ' + column) if isinstance(column, col.Col): for c in sqlmeta.columns.values(): if column is c.columnDef: column = c break else: raise IndexError( "Column with definition %r not found" % column) post_funcs = [] cls.send(events.DeleteColumnSignal, cls.soClass, connection, column.name, column, post_funcs) name = column.name del sqlmeta.columns[name] del sqlmeta.columnDefinitions[name] sqlmeta.columnList.remove(column) delattr(soClass, rawGetterName(name)) if name in sqlmeta._plainGetters: delattr(soClass, getterName(name)) delattr(soClass, rawSetterName(name)) if name in sqlmeta._plainSetters: delattr(soClass, setterName(name)) if column.foreignKey: delattr(soClass, rawGetterName(soClass.sqlmeta.style.instanceIDAttrToAttr(name))) if name in sqlmeta._plainForeignGetters: delattr(soClass, getterName(name)) delattr(soClass, rawSetterName(soClass.sqlmeta.style.instanceIDAttrToAttr(name))) if name in sqlmeta._plainForeignSetters: delattr(soClass, setterName(name)) if column.alternateMethodName: delattr(soClass, column.alternateMethodName) if changeSchema: conn = connection or soClass._connection conn.delColumn(sqlmeta, column) if soClass._SO_finishedClassCreation: unmakeProperties(soClass) makeProperties(soClass) for func in post_funcs: func(soClass, column) ######################################## ## Join handling ######################################## @classmethod def addJoin(cls, joinDef): sqlmeta = cls soClass = cls.soClass # The name of the method we'll create. If it's # automatically generated, it's generated by the # join class. join = joinDef.withClass(soClass) meth = join.joinMethodName sqlmeta.joins.append(join) index = len(sqlmeta.joins)-1 if joinDef not in sqlmeta.joinDefinitions: sqlmeta.joinDefinitions.append(joinDef) # The function fetches the join by index, and # then lets the join object do the rest of the # work: func = eval('lambda self: self.sqlmeta.joins[%i].performJoin(self)' % index) # And we do the standard _SO_get_... _get_... deal setattr(soClass, rawGetterName(meth), func) if not hasattr(soClass, getterName(meth)): setattr(soClass, getterName(meth), func) sqlmeta._plainJoinGetters[meth] = 1 # Some joins allow you to remove objects from the # join. if hasattr(join, 'remove'): # Again, we let it do the remove, and we do the # standard naming trick. func = eval('lambda self, obj: self.sqlmeta.joins[%i].remove(self, obj)' % index) setattr(soClass, '_SO_remove' + join.addRemoveName, func) if not hasattr(soClass, 'remove' + join.addRemoveName): setattr(soClass, 'remove' + join.addRemoveName, func) sqlmeta._plainJoinRemovers[meth] = 1 # Some joins allow you to add objects. if hasattr(join, 'add'): # And again... func = eval('lambda self, obj: self.sqlmeta.joins[%i].add(self, obj)' % index) setattr(soClass, '_SO_add' + join.addRemoveName, func) if not hasattr(soClass, 'add' + join.addRemoveName): setattr(soClass, 'add' + join.addRemoveName, func) sqlmeta._plainJoinAdders[meth] = 1 if soClass._SO_finishedClassCreation: makeProperties(soClass) @classmethod def delJoin(sqlmeta, joinDef): soClass = sqlmeta.soClass for join in sqlmeta.joins: # previously deleted joins will be None, so it must # be skipped or it'll error out on the next line. if join is None: continue if joinDef is join.joinDef: break else: raise IndexError( "Join %r not found in class %r (from %r)" % (joinDef, soClass, sqlmeta.joins)) meth = join.joinMethodName sqlmeta.joinDefinitions.remove(joinDef) for i in range(len(sqlmeta.joins)): if sqlmeta.joins[i] is join: # Have to leave None, because we refer to joins # by index. sqlmeta.joins[i] = None delattr(soClass, rawGetterName(meth)) if meth in sqlmeta._plainJoinGetters: delattr(soClass, getterName(meth)) if hasattr(join, 'remove'): delattr(soClass, '_SO_remove' + join.addRemovePrefix) if meth in sqlmeta._plainJoinRemovers: delattr(soClass, 'remove' + join.addRemovePrefix) if hasattr(join, 'add'): delattr(soClass, '_SO_add' + join.addRemovePrefix) if meth in sqlmeta._plainJoinAdders: delattr(soClass, 'add' + join.addRemovePrefix) if soClass._SO_finishedClassCreation: unmakeProperties(soClass) makeProperties(soClass) ######################################## ## Indexes ######################################## @classmethod def addIndex(cls, indexDef): cls.indexDefinitions.append(indexDef) index = indexDef.withClass(cls.soClass) cls.indexes.append(index) setattr(cls.soClass, index.name, index) ######################################## ## Utility methods ######################################## @classmethod def getColumns(sqlmeta): return sqlmeta.columns.copy() def asDict(self): """ Return the object as a dictionary of columns to values. """ result = {} for key in self.getColumns(): result[key] = getattr(self.instance, key) result['id'] = self.instance.id return result @classmethod def expireAll(sqlmeta, connection=None): """ Expire all instances of this class. """ soClass = sqlmeta.soClass connection = connection or soClass._connection cache_set = connection.cache cache_set.weakrefAll(soClass) for item in cache_set.getAll(soClass): item.expire() sqlhub = dbconnection.ConnectionHub() # Turning it on gives earlier warning about things # that will be deprecated (having this off we won't flood people # with warnings right away). warnings_level = 1 exception_level = None # Current levels: # 1) Actively deprecated # 2) Deprecated after 1 # 3) Deprecated after 2 def deprecated(message, level=1, stacklevel=2): if exception_level is not None and exception_level <= level: raise NotImplementedError(message) if warnings_level is not None and warnings_level <= level: warnings.warn(message, DeprecationWarning, stacklevel=stacklevel) if sys.version_info[:3] < (2, 5, 0): deprecated("Support for Python 2.4 has been declared obsolete and will be removed in the next release of SQLObject") def setDeprecationLevel(warning=1, exception=None): """ Set the deprecation level for SQLObject. Low levels are more actively being deprecated. Any warning at a level at or below ``warning`` will give a warning. Any warning at a level at or below ``exception`` will give an exception. You can use a higher ``exception`` level for tests to help upgrade your code. ``None`` for either value means never warn or raise exceptions. The levels currently mean: 1) Deprecated in current version. Will be removed in next version. 2) Planned to deprecate in next version, remove later. 3) Planned to deprecate sometime, remove sometime much later. As the SQLObject versions progress, the deprecation level of specific features will go down, indicating the advancing nature of the feature's doom. We'll try to keep features at 1 for a major revision. As time continues there may be a level 0, which will give a useful error message (better than ``AttributeError``) but where the feature has been fully removed. """ global warnings_level, exception_level warnings_level = warning exception_level = exception class _sqlmeta_attr(object): def __init__(self, name, deprecation_level): self.name = name self.deprecation_level = deprecation_level def __get__(self, obj, type=None): if self.deprecation_level is not None: deprecated( 'Use of this attribute should be replaced with ' '.sqlmeta.%s' % self.name, level=self.deprecation_level) return getattr((type or obj).sqlmeta, self.name) _postponed_local = local() # SQLObject is the superclass for all SQLObject classes, of # course. All the deeper magic is done in MetaSQLObject, and # only lesser magic is done here. All the actual work is done # here, though -- just automatic method generation (like # methods and properties for each column) is done in # MetaSQLObject. class SQLObject(object): __metaclass__ = declarative.DeclarativeMeta _connection = sqlhub sqlmeta = sqlmeta #DSM: The _inheritable attribute controls wheter the class can by #DSM: inherited 'logically' with a foreignKey and a back reference. _inheritable = False # Is this class inheritable? _parent = None # A reference to the parent instance childName = None # Children name (to be able to get a subclass) # The law of Demeter: the class should not call another classes by name SelectResultsClass = SelectResults def __classinit__(cls, new_attrs): # This is true if we're initializing the SQLObject class, # instead of a subclass: is_base = cls.__bases__ == (object,) cls._SO_setupSqlmeta(new_attrs, is_base) implicitColumns = _collectAttributes(cls, new_attrs, col.Col) implicitJoins = _collectAttributes(cls, new_attrs, joins.Join) implicitIndexes = _collectAttributes(cls, new_attrs, index.DatabaseIndex) if not is_base: cls._SO_cleanDeprecatedAttrs(new_attrs) if '_connection' in new_attrs: connection = new_attrs['_connection'] del cls._connection assert 'connection' not in new_attrs elif 'connection' in new_attrs: connection = new_attrs['connection'] del cls.connection else: connection = None cls._SO_finishedClassCreation = False ###################################################### # Set some attributes to their defaults, if necessary. # First we get the connection: if not connection and not getattr(cls, '_connection', None): mod = sys.modules[cls.__module__] # See if there's a __connection__ global in # the module, use it if there is. if hasattr(mod, '__connection__'): connection = mod.__connection__ # Do not check hasattr(cls, '_connection') here - it is possible # SQLObject parent class has a connection attribute that came # from sqlhub, e.g.; check __dict__ only. if connection and ('_connection' not in cls.__dict__): cls.setConnection(connection) sqlmeta = cls.sqlmeta # We have to check if there are columns in the inherited # _columns where the attribute has been set to None in this # class. If so, then we need to remove that column from # _columns. for key in sqlmeta.columnDefinitions.keys(): if (key in new_attrs and new_attrs[key] is None): del sqlmeta.columnDefinitions[key] for column in sqlmeta.columnDefinitions.values(): sqlmeta.addColumn(column) for column in implicitColumns: sqlmeta.addColumn(column) # Now the class is in an essentially OK-state, so we can # set up any magic attributes: declarative.setup_attributes(cls, new_attrs) if sqlmeta.fromDatabase: sqlmeta.addColumnsFromDatabase() for j in implicitJoins: sqlmeta.addJoin(j) for i in implicitIndexes: sqlmeta.addIndex(i) order_getter = lambda o: o.creationOrder sqlmeta.columnList.sort(key=order_getter) sqlmeta.indexes.sort(key=order_getter) sqlmeta.indexDefinitions.sort(key=order_getter) # Joins cannot be sorted because addJoin created accessors # that remember indexes. #sqlmeta.joins.sort(key=order_getter) sqlmeta.joinDefinitions.sort(key=order_getter) # We don't setup the properties until we're finished with the # batch adding of all the columns... cls._notifyFinishClassCreation() cls._SO_finishedClassCreation = True makeProperties(cls) # We use the magic "q" attribute for accessing lazy # SQL where-clause generation. See the sql module for # more. if not is_base: cls.q = sqlbuilder.SQLObjectTable(cls) cls.j = sqlbuilder.SQLObjectTableWithJoins(cls) classregistry.registry(sqlmeta.registry).addClass(cls) @classmethod def _SO_setupSqlmeta(cls, new_attrs, is_base): """ This fixes up the sqlmeta attribute. It handles both the case where no sqlmeta was given (in which we need to create another subclass), or the sqlmeta given doesn't have the proper inheritance. Lastly it calls sqlmeta.setClass, which handles much of the setup. """ if ('sqlmeta' not in new_attrs and not is_base): # We have to create our own subclass, usually. # type(className, bases_tuple, attr_dict) creates a new subclass. cls.sqlmeta = type('sqlmeta', (cls.sqlmeta,), {}) if not issubclass(cls.sqlmeta, sqlmeta): # We allow no superclass and an object superclass, instead # of inheriting from sqlmeta; but in that case we replace # the class and just move over its attributes: assert cls.sqlmeta.__bases__ in ((), (object,)), ( "If you do not inherit your sqlmeta class from " "sqlobject.sqlmeta, it must not inherit from any other " "class (your sqlmeta inherits from: %s)" % cls.sqlmeta.__bases__) for base in cls.__bases__: superclass = getattr(base, 'sqlmeta', None) if superclass: break else: assert 0, ( "No sqlmeta class could be found in any superclass " "(while fixing up sqlmeta %r inheritance)" % cls.sqlmeta) values = dict(cls.sqlmeta.__dict__) for key in values.keys(): if key.startswith('__') and key.endswith('__'): # Magic values shouldn't be passed through: del values[key] cls.sqlmeta = type('sqlmeta', (superclass,), values) if not is_base: # Do not pollute the base sqlmeta class cls.sqlmeta.setClass(cls) @classmethod def _SO_cleanDeprecatedAttrs(cls, new_attrs): """ This removes attributes on SQLObject subclasses that have been deprecated; they are moved to the sqlmeta class, and a deprecation warning is given. """ for attr in (): if attr in new_attrs: deprecated("%r is deprecated and read-only; please do " "not use it in your classes until it is fully " "deprecated" % attr, level=1, stacklevel=5) @classmethod def get(cls, id, connection=None, selectResults=None): assert id is not None, 'None is not a possible id for %s' % cls.__name__ id = cls.sqlmeta.idType(id) if connection is None: cache = cls._connection.cache else: cache = connection.cache # This whole sequence comes from Cache.CacheFactory's # behavior, where a None returned means a cache miss. val = cache.get(id, cls) if val is None: try: val = cls(_SO_fetch_no_create=1) val._SO_validatorState = sqlbuilder.SQLObjectState(val) val._init(id, connection, selectResults) cache.put(id, cls, val) finally: cache.finishPut(cls) elif selectResults and not val.sqlmeta.dirty: val._SO_writeLock.acquire() try: val._SO_selectInit(selectResults) val.sqlmeta.expired = False finally: val._SO_writeLock.release() return val @classmethod def _notifyFinishClassCreation(cls): pass def _init(self, id, connection=None, selectResults=None): assert id is not None # This function gets called only when the object is # created, unlike __init__ which would be called # anytime the object was returned from cache. self.id = id self._SO_writeLock = threading.Lock() # If no connection was given, we'll inherit the class # instance variable which should have a _connection # attribute. if (connection is not None) and \ (getattr(self, '_connection', None) is not connection): self._connection = connection # Sometimes we need to know if this instance is # global or tied to a particular connection. # This flag tells us that: self.sqlmeta._perConnection = True if not selectResults: dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound, "The object %s by the ID %s does not exist" % (self.__class__.__name__, self.id) self._SO_selectInit(selectResults) self._SO_createValues = {} self.sqlmeta.dirty = False def _SO_loadValue(self, attrName): try: return getattr(self, attrName) except AttributeError: try: self._SO_writeLock.acquire() try: # Maybe, just in the moment since we got the lock, # some other thread did a _SO_loadValue and we # have the attribute! Let's try and find out! We # can keep trying this all day and still beat the # performance on the database call (okay, we can # keep trying this for a few msecs at least)... result = getattr(self, attrName) except AttributeError: pass else: return result self.sqlmeta.expired = False dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound, "The object %s by the ID %s has been deleted" % (self.__class__.__name__, self.id) self._SO_selectInit(selectResults) result = getattr(self, attrName) return result finally: self._SO_writeLock.release() def sync(self): if self.sqlmeta.lazyUpdate and self._SO_createValues: self.syncUpdate() self._SO_writeLock.acquire() try: dbNames = [col.dbName for col in self.sqlmeta.columnList] selectResults = self._connection._SO_selectOne(self, dbNames) if not selectResults: raise SQLObjectNotFound, "The object %s by the ID %s has been deleted" % (self.__class__.__name__, self.id) self._SO_selectInit(selectResults) self.sqlmeta.expired = False finally: self._SO_writeLock.release() def syncUpdate(self): if not self._SO_createValues: return self._SO_writeLock.acquire() try: if self.sqlmeta.columns: values = [(self.sqlmeta.columns[v[0]].dbName, v[1]) for v in self._SO_createValues.items()] self._connection._SO_update(self, values) self.sqlmeta.dirty = False self._SO_createValues = {} finally: self._SO_writeLock.release() post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self) def expire(self): if self.sqlmeta.expired: return self._SO_writeLock.acquire() try: if self.sqlmeta.expired: return for column in self.sqlmeta.columnList: delattr(self, instanceName(column.name)) self.sqlmeta.expired = True self._connection.cache.expire(self.id, self.__class__) self._SO_createValues = {} finally: self._SO_writeLock.release() def _SO_setValue(self, name, value, from_python, to_python): # This is the place where we actually update the # database. # If we are _creating, the object doesn't yet exist # in the database, and we can't insert it until all # the parts are set. So we just keep them in a # dictionary until later: d = {name: value} if not self.sqlmeta._creating and not getattr(self.sqlmeta, "row_update_sig_suppress", False): self.sqlmeta.send(events.RowUpdateSignal, self, d) if len(d) != 1 or name not in d: # Already called RowUpdateSignal, don't call it again # inside .set() self.sqlmeta.row_update_sig_suppress = True self.set(**d) del self.sqlmeta.row_update_sig_suppress value = d[name] if from_python: dbValue = from_python(value, self._SO_validatorState) else: dbValue = value if to_python: value = to_python(dbValue, self._SO_validatorState) if self.sqlmeta._creating or self.sqlmeta.lazyUpdate: self.sqlmeta.dirty = True self._SO_createValues[name] = dbValue setattr(self, instanceName(name), value) return self._connection._SO_update( self, [(self.sqlmeta.columns[name].dbName, dbValue)]) if self.sqlmeta.cacheValues: setattr(self, instanceName(name), value) post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self) def set(self, _suppress_set_sig=False, **kw): if not self.sqlmeta._creating and not getattr(self.sqlmeta, "row_update_sig_suppress", False) and not _suppress_set_sig: self.sqlmeta.send(events.RowUpdateSignal, self, kw) # set() is used to update multiple values at once, # potentially with one SQL statement if possible. # Filter out items that don't map to column names. # Those will be set directly on the object using # setattr(obj, name, value). is_column = lambda _c: _c in self.sqlmeta._plainSetters f_is_column = lambda item: is_column(item[0]) f_not_column = lambda item: not is_column(item[0]) items = kw.items() extra = dict(filter(f_not_column, items)) kw = dict(filter(f_is_column, items)) # _creating is special, see _SO_setValue if self.sqlmeta._creating or self.sqlmeta.lazyUpdate: for name, value in kw.items(): from_python = getattr(self, '_SO_from_python_%s' % name, None) if from_python: kw[name] = dbValue = from_python(value, self._SO_validatorState) else: dbValue = value to_python = getattr(self, '_SO_to_python_%s' % name, None) if to_python: value = to_python(dbValue, self._SO_validatorState) setattr(self, instanceName(name), value) self._SO_createValues.update(kw) for name, value in extra.items(): try: getattr(self.__class__, name) except AttributeError: if name not in self.sqlmeta.columns: raise TypeError, "%s.set() got an unexpected keyword argument %s" % (self.__class__.__name__, name) try: setattr(self, name, value) except AttributeError, e: raise AttributeError, '%s (with attribute %r)' % (e, name) self.sqlmeta.dirty = True return self._SO_writeLock.acquire() try: # We have to go through and see if the setters are # "plain", that is, if the user has changed their # definition in any way (put in something that # normalizes the value or checks for consistency, # for instance). If so then we have to use plain # old setattr() to change the value, since we can't # read the user's mind. We'll combine everything # else into a single UPDATE, if necessary. toUpdate = {} for name, value in kw.items(): from_python = getattr(self, '_SO_from_python_%s' % name, None) if from_python: dbValue = from_python(value, self._SO_validatorState) else: dbValue = value to_python = getattr(self, '_SO_to_python_%s' % name, None) if to_python: value = to_python(dbValue, self._SO_validatorState) if self.sqlmeta.cacheValues: setattr(self, instanceName(name), value) toUpdate[name] = dbValue for name, value in extra.items(): try: getattr(self.__class__, name) except AttributeError: if name not in self.sqlmeta.columns: raise TypeError, "%s.set() got an unexpected keyword argument %s" % (self.__class__.__name__, name) try: setattr(self, name, value) except AttributeError, e: raise AttributeError, '%s (with attribute %r)' % (e, name) if toUpdate: args = [(self.sqlmeta.columns[name].dbName, value) for name, value in toUpdate.items()] self._connection._SO_update(self, args) finally: self._SO_writeLock.release() post_funcs = [] self.sqlmeta.send(events.RowUpdatedSignal, self, post_funcs) for func in post_funcs: func(self) def _SO_selectInit(self, row): for col, colValue in zip(self.sqlmeta.columnList, row): if col.to_python: colValue = col.to_python(colValue, self._SO_validatorState) setattr(self, instanceName(col.name), colValue) def _SO_getValue(self, name): # Retrieves a single value from the database. Simple. assert not self.sqlmeta._obsolete, ( "%s with id %s has become obsolete" \ % (self.__class__.__name__, self.id)) # @@: do we really need this lock? #self._SO_writeLock.acquire() column = self.sqlmeta.columns[name] results = self._connection._SO_selectOne(self, [column.dbName]) #self._SO_writeLock.release() assert results != None, "%s with id %s is not in the database" \ % (self.__class__.__name__, self.id) value = results[0] if column.to_python: value = column.to_python(value, self._SO_validatorState) return value def _SO_foreignKey(self, value, joinClass, idName=None): if value is None: return None if self.sqlmeta._perConnection: connection = self._connection else: connection = None if idName is None: # Get by id return joinClass.get(value, connection=connection) return joinClass.select( getattr(joinClass.q, idName)==value, connection=connection).getOne() def __init__(self, **kw): # If we are the outmost constructor of a hiearchy of # InheritableSQLObjects (or simlpy _the_ constructor of a "normal" # SQLObject), we create a threadlocal list that collects the # RowCreatedSignals, and executes them if this very constructor is left try: _postponed_local.postponed_calls postponed_created = False except AttributeError: _postponed_local.postponed_calls = [] postponed_created = True try: # We shadow the sqlmeta class with an instance of sqlmeta # that points to us (our sqlmeta buddy object; where the # sqlmeta class is our class's buddy class) self.sqlmeta = self.__class__.sqlmeta(self) # The get() classmethod/constructor uses a magic keyword # argument when it wants an empty object, fetched from the # database. So we have nothing more to do in that case: if '_SO_fetch_no_create' in kw: return post_funcs = [] self.sqlmeta.send(events.RowCreateSignal, self, kw, post_funcs) # Pass the connection object along if we were given one. if 'connection' in kw: connection = kw.pop('connection') if getattr(self, '_connection', None) is not connection: self._connection = connection self.sqlmeta._perConnection = True self._SO_writeLock = threading.Lock() if 'id' in kw: id = self.sqlmeta.idType(kw['id']) del kw['id'] else: id = None self._create(id, **kw) for func in post_funcs: func(self) finally: # if we are the creator of the tl-storage, we # have to exectute and under all circumstances # remove the tl-storage if postponed_created: try: for func in _postponed_local.postponed_calls: func() finally: del _postponed_local.postponed_calls def _create(self, id, **kw): self.sqlmeta._creating = True self._SO_createValues = {} self._SO_validatorState = sqlbuilder.SQLObjectState(self) # First we do a little fix-up on the keywords we were # passed: for column in self.sqlmeta.columnList: # Then we check if the column wasn't passed in, and # if not we try to get the default. if column.name not in kw and column.foreignName not in kw: default = column.default # If we don't get it, it's an error: # If we specified an SQL DEFAULT, then we should use that if default is NoDefault: if column.defaultSQL is None: raise TypeError, "%s() did not get expected keyword argument '%s'" % (self.__class__.__name__, column.name) else: # There is defaultSQL for the column - do not put # the column to kw so that the backend creates the value continue # Otherwise we put it in as though they did pass # that keyword: kw[column.name] = default self.set(**kw) # Then we finalize the process: self._SO_finishCreate(id) def _SO_finishCreate(self, id=None): # Here's where an INSERT is finalized. # These are all the column values that were supposed # to be set, but were delayed until now: setters = self._SO_createValues.items() # Here's their database names: names = [self.sqlmeta.columns[v[0]].dbName for v in setters] values = [v[1] for v in setters] # Get rid of _SO_create*, we aren't creating anymore. # Doesn't have to be threadsafe because we're still in # new(), which doesn't need to be threadsafe. self.sqlmeta.dirty = False if not self.sqlmeta.lazyUpdate: del self._SO_createValues else: self._SO_createValues = {} del self.sqlmeta._creating # Do the insert -- most of the SQL in this case is left # up to DBConnection, since getting a new ID is # non-standard. id = self._connection.queryInsertID(self, id, names, values) cache = self._connection.cache cache.created(id, self.__class__, self) self._init(id) post_funcs = [] kw = dict([('class', self.__class__), ('id', id)]) def _send_RowCreatedSignal(): self.sqlmeta.send(events.RowCreatedSignal, self, kw, post_funcs) for func in post_funcs: func(self) _postponed_local.postponed_calls.append(_send_RowCreatedSignal) def _SO_getID(self, obj, refColumn=None): return getID(obj, refColumn) @classmethod def _findAlternateID(cls, name, dbName, value, connection=None): if isinstance(name, str): name = (name,) value = (value,) if len(name) != len(value): raise ValueError, "'column' and 'value' tuples must be of the same size" new_value = [] for n, v in zip(name, value): from_python = getattr(cls, '_SO_from_python_' + n) if from_python: v = from_python(v, sqlbuilder.SQLObjectState(cls, connection=connection)) new_value.append(v) condition = sqlbuilder.AND(*[getattr(cls.q, n)==v for n,v in zip(name, new_value)]) return (connection or cls._connection)._SO_selectOneAlt( cls, [cls.sqlmeta.idName] + [column.dbName for column in cls.sqlmeta.columnList], condition), None @classmethod def _SO_fetchAlternateID(cls, name, dbName, value, connection=None, idxName=None): result, obj = cls._findAlternateID(name, dbName, value, connection) if not result: if idxName is None: raise SQLObjectNotFound, "The %s by alternateID %s = %s does not exist" % (cls.__name__, name, repr(value)) else: names = [] for i in xrange(len(name)): names.append("%s = %s" % (name[i], repr(value[i]))) names = ', '.join(names) raise SQLObjectNotFound, "The %s by unique index %s(%s) does not exist" % (cls.__name__, idxName, names) if obj: return obj if connection: obj = cls.get(result[0], connection=connection, selectResults=result[1:]) else: obj = cls.get(result[0], selectResults=result[1:]) return obj @classmethod def _SO_depends(cls): return findDependencies(cls.__name__, cls.sqlmeta.registry) @classmethod def select(cls, clause=None, clauseTables=None, orderBy=NoDefault, limit=None, lazyColumns=False, reversed=False, distinct=False, connection=None, join=None, forUpdate=False): return cls.SelectResultsClass(cls, clause, clauseTables=clauseTables, orderBy=orderBy, limit=limit, lazyColumns=lazyColumns, reversed=reversed, distinct=distinct, connection=connection, join=join, forUpdate=forUpdate) @classmethod def selectBy(cls, connection=None, **kw): conn = connection or cls._connection return cls.SelectResultsClass(cls, conn._SO_columnClause(cls, kw), connection=conn) @classmethod def tableExists(cls, connection=None): conn = connection or cls._connection return conn.tableExists(cls.sqlmeta.table) @classmethod def dropTable(cls, ifExists=False, dropJoinTables=True, cascade=False, connection=None): conn = connection or cls._connection if ifExists and not cls.tableExists(connection=conn): return extra_sql = [] post_funcs = [] cls.sqlmeta.send(events.DropTableSignal, cls, connection, extra_sql, post_funcs) conn.dropTable(cls.sqlmeta.table, cascade) if dropJoinTables: cls.dropJoinTables(ifExists=ifExists, connection=conn) for sql in extra_sql: connection.query(sql) for func in post_funcs: func(cls, conn) @classmethod def createTable(cls, ifNotExists=False, createJoinTables=True, createIndexes=True, applyConstraints=True, connection=None): conn = connection or cls._connection if ifNotExists and cls.tableExists(connection=conn): return extra_sql = [] post_funcs = [] cls.sqlmeta.send(events.CreateTableSignal, cls, connection, extra_sql, post_funcs) constraints = conn.createTable(cls) if applyConstraints: for constraint in constraints: conn.query(constraint) else: extra_sql.extend(constraints) if createJoinTables: cls.createJoinTables(ifNotExists=ifNotExists, connection=conn) if createIndexes: cls.createIndexes(ifNotExists=ifNotExists, connection=conn) for func in post_funcs: func(cls, conn) return extra_sql @classmethod def createTableSQL(cls, createJoinTables=True, createIndexes=True, connection=None): conn = connection or cls._connection sql, constraints = conn.createTableSQL(cls) if createJoinTables: join_sql = cls.createJoinTablesSQL(connection=conn) if join_sql: sql += ';\n' + join_sql if createIndexes: index_sql = cls.createIndexesSQL(connection=conn) if index_sql: sql += ';\n' + index_sql return sql, constraints @classmethod def createJoinTables(cls, ifNotExists=False, connection=None): conn = connection or cls._connection for join in cls._getJoinsToCreate(): if (ifNotExists and conn.tableExists(join.intermediateTable)): continue conn._SO_createJoinTable(join) @classmethod def createJoinTablesSQL(cls, connection=None): conn = connection or cls._connection sql = [] for join in cls._getJoinsToCreate(): sql.append(conn._SO_createJoinTableSQL(join)) return ';\n'.join(sql) @classmethod def createIndexes(cls, ifNotExists=False, connection=None): conn = connection or cls._connection for index in cls.sqlmeta.indexes: if not index: continue conn._SO_createIndex(cls, index) @classmethod def createIndexesSQL(cls, connection=None): conn = connection or cls._connection sql = [] for index in cls.sqlmeta.indexes: if not index: continue sql.append(conn.createIndexSQL(cls, index)) return ';\n'.join(sql) @classmethod def _getJoinsToCreate(cls): joins = [] for join in cls.sqlmeta.joins: if not join: continue if not join.hasIntermediateTable() or not getattr(join, 'createRelatedTable', True): continue if join.soClass.__name__ > join.otherClass.__name__: continue joins.append(join) return joins @classmethod def dropJoinTables(cls, ifExists=False, connection=None): conn = connection or cls._connection for join in cls.sqlmeta.joins: if not join: continue if not join.hasIntermediateTable() or not getattr(join, 'createRelatedTable', True): continue if join.soClass.__name__ > join.otherClass.__name__: continue if ifExists and \ not conn.tableExists(join.intermediateTable): continue conn._SO_dropJoinTable(join) @classmethod def clearTable(cls, connection=None, clearJoinTables=True): # 3-03 @@: Maybe this should check the cache... but it's # kind of crude anyway, so... conn = connection or cls._connection conn.clearTable(cls.sqlmeta.table) if clearJoinTables: for join in cls._getJoinsToCreate(): conn.clearTable(join.intermediateTable) def destroySelf(self): post_funcs = [] self.sqlmeta.send(events.RowDestroySignal, self, post_funcs) # Kills this object. Kills it dead! klass = self.__class__ # Free related joins on the base class for join in klass.sqlmeta.joins: if isinstance(join, joins.SORelatedJoin): q = "DELETE FROM %s WHERE %s=%d" % (join.intermediateTable, join.joinColumn, self.id) self._connection.query(q) depends = [] depends = self._SO_depends() for k in depends: # Free related joins for join in k.sqlmeta.joins: if isinstance(join, joins.SORelatedJoin) and join.otherClassName == klass.__name__: q = "DELETE FROM %s WHERE %s=%d" % (join.intermediateTable, join.otherColumn, self.id) self._connection.query(q) cols = findDependantColumns(klass.__name__, k) # Don't confuse the rest of the process if len(cols) == 0: continue query = [] delete = setnull = restrict = False for col in cols: if col.cascade == False: # Found a restriction restrict = True query.append(getattr(k.q, col.name) == self.id) if col.cascade == 'null': setnull = col.name elif col.cascade: delete = True assert delete or setnull or restrict, ( "Class %s depends on %s accoriding to " "findDependantColumns, but this seems inaccurate" % (k, klass)) query = sqlbuilder.OR(*query) results = k.select(query, connection=self._connection) if restrict: if results.count(): # Restrictions only apply if there are # matching records on the related table raise SQLObjectIntegrityError, ( "Tried to delete %s::%s but " "table %s has a restriction against it" % (klass.__name__, self.id, k.__name__)) else: for row in results: if delete: row.destroySelf() else: row.set(**{setnull: None}) self.sqlmeta._obsolete = True self._connection._SO_delete(self) self._connection.cache.expire(self.id, self.__class__) for func in post_funcs: func(self) post_funcs = [] self.sqlmeta.send(events.RowDestroyedSignal, self, post_funcs) for func in post_funcs: func(self) @classmethod def delete(cls, id, connection=None): obj = cls.get(id, connection=connection) obj.destroySelf() @classmethod def deleteMany(cls, where=NoDefault, connection=None): conn = connection or cls._connection conn.query(conn.sqlrepr(sqlbuilder.Delete(cls.sqlmeta.table, where))) @classmethod def deleteBy(cls, connection=None, **kw): conn = connection or cls._connection conn.query(conn.sqlrepr(sqlbuilder.Delete(cls.sqlmeta.table, conn._SO_columnClause(cls, kw)))) def __repr__(self): if not hasattr(self, 'id'): # Object initialization not finished. No attributes can be read. return '<%s (not initialized)>' % self.__class__.__name__ return '<%s %r %s>' \ % (self.__class__.__name__, self.id, ' '.join(['%s=%s' % (name, repr(value)) for name, value in self._reprItems()])) def __sqlrepr__(self, db): return str(self.id) @classmethod def sqlrepr(cls, value, connection=None): return (connection or cls._connection).sqlrepr(value) @classmethod def coerceID(cls, value): if isinstance(value, cls): return value.id else: return cls.sqlmeta.idType(value) def _reprItems(self): items = [] for col in self.sqlmeta.columnList: value = getattr(self, col.name) r = repr(value) if len(r) > 20: value = r[:17] + "..." + r[-1] items.append((col.name, value)) return items @classmethod def setConnection(cls, value): if isinstance(value, basestring): value = dbconnection.connectionForURI(value) cls._connection = value def tablesUsedImmediate(self): return [self.__class__.q] # Comparison def __eq__(self, other): if self.__class__ is other.__class__: if self.id == other.id: return True return False def __ne__(self, other): return not self.__eq__(other) def __lt__(self, other): return NotImplemented def __le__(self, other): return NotImplemented def __gt__(self, other): return NotImplemented def __ge__(self, other): return NotImplemented # (De)serialization (pickle, etc.) def __getstate__(self): if self.sqlmeta._perConnection: from pickle import PicklingError raise PicklingError('Cannot pickle an SQLObject instance that has a per-instance connection') if self.sqlmeta.lazyUpdate and self._SO_createValues: self.syncUpdate() d = self.__dict__.copy() del d['sqlmeta'] del d['_SO_validatorState'] del d['_SO_writeLock'] del d['_SO_createValues'] return d def __setstate__(self, d): self.__init__(_SO_fetch_no_create=1) self._SO_validatorState = sqlbuilder.SQLObjectState(self) self._SO_writeLock = threading.Lock() self._SO_createValues = {} self.__dict__.update(d) cls = self.__class__ cache = self._connection.cache if cache.tryGet(self.id, cls) is not None: raise ValueError( "Cannot unpickle %s row with id=%s - a different instance with the id already exists in the cache" % (cls.__name__, self.id)) cache.created(self.id, cls, self) def setterName(name): return '_set_%s' % name def rawSetterName(name): return '_SO_set_%s' % name def getterName(name): return '_get_%s' % name def rawGetterName(name): return '_SO_get_%s' % name def instanceName(name): return '_SO_val_%s' % name ######################################## ## Utility functions (for external consumption) ######################################## def getID(obj, refColumn=None): if isinstance(obj, SQLObject): return getattr(obj, refColumn or 'id') elif isinstance(obj, int): return obj elif isinstance(obj, long): return int(obj) elif isinstance(obj, str): try: return int(obj) except ValueError: return obj elif obj is None: return None def getObject(obj, klass): if isinstance(obj, int): return klass(obj) elif isinstance(obj, long): return klass(int(obj)) elif isinstance(obj, str): return klass(int(obj)) elif obj is None: return None else: return obj __all__ = ['NoDefault', 'SQLObject', 'SQLObjectIntegrityError', 'SQLObjectNotFound', 'getID', 'getObject', 'sqlhub', 'sqlmeta', ] SQLObject-1.5.2/sqlobject/boundattributes.py0000644000175000017500000001117211561544577020454 0ustar phdphd00000000000000""" Bound attributes are attributes that are bound to a specific class and a specific name. In SQLObject a typical example is a column object, which knows its name and class. A bound attribute should define a method ``__addtoclass__(added_class, name)`` (attributes without this method will simply be treated as normal). The return value is ignored; if the attribute wishes to change the value in the class, it must call ``setattr(added_class, name, new_value)``. BoundAttribute is a class that facilitates lazy attribute creation. ``bind_attributes(cls, new_attrs)`` is a function that looks for attributes with this special method. ``new_attrs`` is a dictionary, as typically passed into ``__classinit__`` with declarative (calling ``bind_attributes`` in ``__classinit__`` would be typical). Note if you do this that attributes defined in a superclass will not be rebound in subclasses. If you want to rebind attributes in subclasses, use ``bind_attributes_local``, which adds a ``__bound_attributes__`` variable to your class to track these active attributes. """ __all__ = ['BoundAttribute', 'BoundFactory', 'bind_attributes', 'bind_attributes_local'] import declarative import events class BoundAttribute(declarative.Declarative): """ This is a declarative class that passes all the values given to it to another object. So you can pass it arguments (via __init__/__call__) or give it the equivalent of keyword arguments through subclassing. Then a bound object will be added in its place. To hook this other object in, override ``make_object(added_class, name, **attrs)`` and maybe ``set_object(added_class, name, **attrs)`` (the default implementation of ``set_object`` just resets the attribute to whatever ``make_object`` returned). Also see ``BoundFactory``. """ _private_variables = ( '_private_variables', '_all_attributes', '__classinit__', '__addtoclass__', '_add_attrs', 'set_object', 'make_object', 'clone_in_subclass', ) _all_attrs = () clone_for_subclass = True def __classinit__(cls, new_attrs): declarative.Declarative.__classinit__(cls, new_attrs) cls._all_attrs = cls._add_attrs(cls, new_attrs) def __instanceinit__(self, new_attrs): declarative.Declarative.__instanceinit__(self, new_attrs) self.__dict__['_all_attrs'] = self._add_attrs(self, new_attrs) @staticmethod def _add_attrs(this_object, new_attrs): private = this_object._private_variables all_attrs = list(this_object._all_attrs) for key in new_attrs.keys(): if key.startswith('_') or key in private: continue if key not in all_attrs: all_attrs.append(key) return tuple(all_attrs) @declarative.classinstancemethod def __addtoclass__(self, cls, added_class, attr_name): me = self or cls attrs = {} for name in me._all_attrs: attrs[name] = getattr(me, name) attrs['added_class'] = added_class attrs['attr_name'] = attr_name obj = me.make_object(**attrs) if self.clone_for_subclass: def on_rebind(new_class_name, bases, new_attrs, post_funcs, early_funcs): def rebind(new_class): me.set_object( new_class, attr_name, me.make_object(**attrs)) post_funcs.append(rebind) events.listen(receiver=on_rebind, soClass=added_class, signal=events.ClassCreateSignal, weak=False) me.set_object(added_class, attr_name, obj) @classmethod def set_object(cls, added_class, attr_name, obj): setattr(added_class, attr_name, obj) @classmethod def make_object(cls, added_class, attr_name, *args, **attrs): raise NotImplementedError def __setattr__(self, name, value): self.__dict__['_all_attrs'] = self._add_attrs(self, {name: value}) self.__dict__[name] = value class BoundFactory(BoundAttribute): """ This will bind the attribute to whatever is given by ``factory_class``. This factory should be a callable with the signature ``factory_class(added_class, attr_name, *args, **kw)``. The factory will be reinvoked (and the attribute rebound) for every subclassing. """ factory_class = None _private_variables = ( BoundAttribute._private_variables + ('factory_class',)) def make_object(cls, added_class, attr_name, *args, **kw): return cls.factory_class(added_class, attr_name, *args, **kw) SQLObject-1.5.2/sqlobject/col.py0000644000175000017500000015331112204667354016007 0ustar phdphd00000000000000""" Col -- SQLObject columns Note that each column object is named BlahBlahCol, and these are used in class definitions. But there's also a corresponding SOBlahBlahCol object, which is used in SQLObject *classes*. An explanation: when a SQLObject subclass is created, the metaclass looks through your class definition for any subclasses of Col. It collects them together, and indexes them to do all the database stuff you like, like the magic attributes and whatnot. It then asks the Col object to create an SOCol object (usually a subclass, actually). The SOCol object contains all the interesting logic, as well as a record of the attribute name you used and the class it is bound to (set by the metaclass). So, in summary: Col objects are what you define, but SOCol objects are what gets used. """ from array import array from itertools import count import re, time try: import cPickle as pickle except ImportError: import pickle import weakref from formencode import compound, validators from classregistry import findClass # Sadly the name "constraints" conflicts with many of the function # arguments in this module, so we rename it: import constraints as constrs import sqlbuilder from styles import capword NoDefault = sqlbuilder.NoDefault import datetime datetime_available = True try: from mx import DateTime except ImportError: try: import DateTime # old version of mxDateTime, or Zope's Version if we're running with Zope except ImportError: mxdatetime_available = False else: mxdatetime_available = True else: mxdatetime_available = True DATETIME_IMPLEMENTATION = "datetime" MXDATETIME_IMPLEMENTATION = "mxDateTime" if mxdatetime_available: if hasattr(DateTime, "Time"): DateTimeType = type(DateTime.now()) TimeType = type(DateTime.Time()) else: # Zope DateTimeType = type(DateTime.DateTime()) TimeType = type(DateTime.DateTime.Time(DateTime.DateTime())) default_datetime_implementation = DATETIME_IMPLEMENTATION __all__ = ["datetime_available", "mxdatetime_available", "default_datetime_implementation", "DATETIME_IMPLEMENTATION"] if mxdatetime_available: __all__.append("MXDATETIME_IMPLEMENTATION") creationOrder = count() ######################################## ## Columns ######################################## # Col is essentially a column definition, it doesn't have # much logic to it. class SOCol(object): def __init__(self, name, soClass, creationOrder, dbName=None, default=NoDefault, defaultSQL=None, foreignKey=None, alternateID=False, alternateMethodName=None, constraints=None, notNull=NoDefault, notNone=NoDefault, unique=NoDefault, sqlType=None, columnDef=None, validator=None, validator2=None, immutable=False, cascade=None, lazy=False, noCache=False, forceDBName=False, title=None, tags=[], origName=None, dbEncoding=None, extra_vars=None): super(SOCol, self).__init__() # This isn't strictly true, since we *could* use backquotes or # " or something (database-specific) around column names, but # why would anyone *want* to use a name like that? # @@: I suppose we could actually add backquotes to the # dbName if we needed to... if not forceDBName: assert sqlbuilder.sqlIdentifier(name), 'Name must be SQL-safe (letters, numbers, underscores): %s (or use forceDBName=True)' \ % repr(name) assert name != 'id', 'The column name "id" is reserved for SQLObject use (and is implicitly created).' assert name, "You must provide a name for all columns" self.columnDef = columnDef self.creationOrder = creationOrder self.immutable = immutable # cascade can be one of: # None: no constraint is generated # True: a CASCADE constraint is generated # False: a RESTRICT constraint is generated # 'null': a SET NULL trigger is generated if isinstance(cascade, str): assert cascade == 'null', ( "The only string value allowed for cascade is 'null' (you gave: %r)" % cascade) self.cascade = cascade if not isinstance(constraints, (list, tuple)): constraints = [constraints] self.constraints = self.autoConstraints() + constraints self.notNone = False if notNull is not NoDefault: self.notNone = notNull assert notNone is NoDefault or \ (not notNone) == (not notNull), \ "The notNull and notNone arguments are aliases, and must not conflict. You gave notNull=%r, notNone=%r" % (notNull, notNone) elif notNone is not NoDefault: self.notNone = notNone if self.notNone: self.constraints = [constrs.notNull] + self.constraints self.name = name self.soClass = soClass self._default = default self.defaultSQL = defaultSQL self.customSQLType = sqlType # deal with foreign keys self.foreignKey = foreignKey if self.foreignKey: if origName is not None: idname = soClass.sqlmeta.style.instanceAttrToIDAttr(origName) else: idname = soClass.sqlmeta.style.instanceAttrToIDAttr(name) if self.name != idname: self.foreignName = self.name self.name = idname else: self.foreignName = soClass.sqlmeta.style.instanceIDAttrToAttr(self.name) else: self.foreignName = None # if they don't give us a specific database name for # the column, we separate the mixedCase into mixed_case # and assume that. if dbName is None: self.dbName = soClass.sqlmeta.style.pythonAttrToDBColumn(self.name) else: self.dbName = dbName # alternateID means that this is a unique column that # can be used to identify rows self.alternateID = alternateID if unique is NoDefault: self.unique = alternateID else: self.unique = unique if self.unique and alternateMethodName is None: self.alternateMethodName = 'by' + capword(self.name) else: self.alternateMethodName = alternateMethodName _validators = self.createValidators() if validator: _validators.append(validator) if validator2: _validators.insert(0, validator2) _vlen = len(_validators) if _vlen: for _validator in _validators: _validator.soCol=weakref.proxy(self) if _vlen == 0: self.validator = None # Set sef.{from,to}_python elif _vlen == 1: self.validator = _validators[0] elif _vlen > 1: self.validator = compound.All.join(_validators[0], *_validators[1:]) self.noCache = noCache self.lazy = lazy # this is in case of ForeignKey, where we rename the column # and append an ID self.origName = origName or name self.title = title self.tags = tags self.dbEncoding = dbEncoding if extra_vars: for name, value in extra_vars.items(): setattr(self, name, value) def _set_validator(self, value): self._validator = value if self._validator: self.to_python = self._validator.to_python self.from_python = self._validator.from_python else: self.to_python = None self.from_python = None def _get_validator(self): return self._validator validator = property(_get_validator, _set_validator) def createValidators(self): """Create a list of validators for the column.""" return [] def autoConstraints(self): return [] def _get_default(self): # A default can be a callback or a plain value, # here we resolve the callback if self._default is NoDefault: return NoDefault elif hasattr(self._default, '__sqlrepr__'): return self._default elif callable(self._default): return self._default() else: return self._default default = property(_get_default, None, None) def _get_joinName(self): return self.soClass.sqlmeta.style.instanceIDAttrToAttr(self.name) joinName = property(_get_joinName, None, None) def __repr__(self): r = '<%s %s' % (self.__class__.__name__, self.name) if self.default is not NoDefault: r += ' default=%s' % repr(self.default) if self.foreignKey: r += ' connected to %s' % self.foreignKey if self.alternateID: r += ' alternate ID' if self.notNone: r += ' not null' return r + '>' def createSQL(self): return ' '.join([self._sqlType()] + self._extraSQL()) def _extraSQL(self): result = [] if self.notNone or self.alternateID: result.append('NOT NULL') if self.unique or self.alternateID: result.append('UNIQUE') if self.defaultSQL is not None: result.append("DEFAULT %s" % self.defaultSQL) return result def _sqlType(self): if self.customSQLType is None: raise ValueError, ("Col %s (%s) cannot be used for automatic " "schema creation (too abstract)" % (self.name, self.__class__)) else: return self.customSQLType def _mysqlType(self): return self._sqlType() def _postgresType(self): return self._sqlType() def _sqliteType(self): # SQLite is naturally typeless, so as a fallback it uses # no type. try: return self._sqlType() except ValueError: return '' def _sybaseType(self): return self._sqlType() def _mssqlType(self): return self._sqlType() def _firebirdType(self): return self._sqlType() def _maxdbType(self): return self._sqlType() def mysqlCreateSQL(self): return ' '.join([self.dbName, self._mysqlType()] + self._extraSQL()) def postgresCreateSQL(self): return ' '.join([self.dbName, self._postgresType()] + self._extraSQL()) def sqliteCreateSQL(self): return ' '.join([self.dbName, self._sqliteType()] + self._extraSQL()) def sybaseCreateSQL(self): return ' '.join([self.dbName, self._sybaseType()] + self._extraSQL()) def mssqlCreateSQL(self, connection=None): self.connection = connection return ' '.join([self.dbName, self._mssqlType()] + self._extraSQL()) def firebirdCreateSQL(self): # Ian Sparks pointed out that fb is picky about the order # of the NOT NULL clause in a create statement. So, we handle # them differently for Enum columns. if not isinstance(self, SOEnumCol): return ' '.join([self.dbName, self._firebirdType()] + self._extraSQL()) else: return ' '.join([self.dbName] + [self._firebirdType()[0]] + self._extraSQL() + [self._firebirdType()[1]]) def maxdbCreateSQL(self): return ' '.join([self.dbName, self._maxdbType()] + self._extraSQL()) def __get__(self, obj, type=None): if obj is None: # class attribute, return the descriptor itself return self if obj.sqlmeta._obsolete: raise RuntimeError('The object <%s %s> is obsolete' % ( obj.__class__.__name__, obj.id)) if obj.sqlmeta.cacheColumns: columns = obj.sqlmeta._columnCache if columns is None: obj.sqlmeta.loadValues() try: return columns[name] except KeyError: return obj.sqlmeta.loadColumn(self) else: return obj.sqlmeta.loadColumn(self) def __set__(self, obj, value): if self.immutable: raise AttributeError("The column %s.%s is immutable" % (obj.__class__.__name__, self.name)) obj.sqlmeta.setColumn(self, value) def __delete__(self, obj): raise AttributeError("I can't be deleted from %r" % obj) def getDbEncoding(self, state, default='utf-8'): if self.dbEncoding: return self.dbEncoding dbEncoding = state.soObject.sqlmeta.dbEncoding if dbEncoding: return dbEncoding try: connection = state.connection or state.soObject._connection except AttributeError: dbEncoding = None else: dbEncoding = getattr(connection, "dbEncoding", None) if not dbEncoding: dbEncoding = default return dbEncoding class Col(object): baseClass = SOCol def __init__(self, name=None, **kw): super(Col, self).__init__() self.__dict__['_name'] = name self.__dict__['_kw'] = kw self.__dict__['creationOrder'] = creationOrder.next() self.__dict__['_extra_vars'] = {} def _set_name(self, value): assert self._name is None or self._name == value, ( "You cannot change a name after it has already been set " "(from %s to %s)" % (self.name, value)) self.__dict__['_name'] = value def _get_name(self): return self._name name = property(_get_name, _set_name) def withClass(self, soClass): return self.baseClass(soClass=soClass, name=self._name, creationOrder=self.creationOrder, columnDef=self, extra_vars=self._extra_vars, **self._kw) def __setattr__(self, var, value): if var == 'name': super(Col, self).__setattr__(var, value) return self._extra_vars[var] = value def __repr__(self): return '<%s %s %s>' % ( self.__class__.__name__, hex(abs(id(self)))[2:], self._name or '(unnamed)') class SOValidator(validators.Validator): def getDbEncoding(self, state, default='utf-8'): try: return self.dbEncoding except AttributeError: return self.soCol.getDbEncoding(state, default=default) class SOStringLikeCol(SOCol): """A common ancestor for SOStringCol and SOUnicodeCol""" def __init__(self, **kw): self.length = kw.pop('length', None) self.varchar = kw.pop('varchar', 'auto') self.char_binary = kw.pop('char_binary', None) # A hack for MySQL if not self.length: assert self.varchar == 'auto' or not self.varchar, \ "Without a length strings are treated as TEXT, not varchar" self.varchar = False elif self.varchar == 'auto': self.varchar = True super(SOStringLikeCol, self).__init__(**kw) def autoConstraints(self): constraints = [constrs.isString] if self.length is not None: constraints += [constrs.MaxLength(self.length)] return constraints def _sqlType(self): if self.customSQLType is not None: return self.customSQLType if not self.length: return 'TEXT' elif self.varchar: return 'VARCHAR(%i)' % self.length else: return 'CHAR(%i)' % self.length def _check_case_sensitive(self, db): if self.char_binary: raise ValueError, "%s does not support binary character columns" % db def _mysqlType(self): type = self._sqlType() if self.char_binary: type += " BINARY" return type def _postgresType(self): self._check_case_sensitive("PostgreSQL") return super(SOStringLikeCol, self)._postgresType() def _sqliteType(self): self._check_case_sensitive("SQLite") return super(SOStringLikeCol, self)._sqliteType() def _sybaseType(self): self._check_case_sensitive("SYBASE") type = self._sqlType() if not self.notNone and not self.alternateID: type += ' NULL' return type def _mssqlType(self): if self.customSQLType is not None: return self.customSQLType if not self.length: if self.connection and self.connection.can_use_max_types(): type = 'VARCHAR(MAX)' else: type = 'VARCHAR(4000)' elif self.varchar: type = 'VARCHAR(%i)' % self.length else: type = 'CHAR(%i)' % self.length if not self.notNone and not self.alternateID: type += ' NULL' return type def _firebirdType(self): self._check_case_sensitive("FireBird") if not self.length: return 'BLOB SUB_TYPE TEXT' else: return self._sqlType() def _maxdbType(self): self._check_case_sensitive("SAP DB/MaxDB") if not self.length: return 'LONG ASCII' else: return self._sqlType() class StringValidator(SOValidator): def to_python(self, value, state): if value is None: return None try: connection = state.connection or state.soObject._connection binaryType = connection._binaryType except AttributeError: binaryType = type(None) # Just a simple workaround dbEncoding = self.getDbEncoding(state, default='ascii') if isinstance(value, unicode): return value.encode(dbEncoding) if self.dataType and isinstance(value, self.dataType): return value if isinstance(value, (str, buffer, binaryType, sqlbuilder.SQLExpression)): return value if hasattr(value, '__unicode__'): return unicode(value).encode(dbEncoding) raise validators.Invalid("expected a str in the StringCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) from_python = to_python class SOStringCol(SOStringLikeCol): def createValidators(self, dataType=None): return [StringValidator(name=self.name, dataType=dataType)] + \ super(SOStringCol, self).createValidators() class StringCol(Col): baseClass = SOStringCol class NQuoted(sqlbuilder.SQLExpression): def __init__(self, value): assert isinstance(value, unicode) self.value = value def __hash__(self): return hash(self.value) def __sqlrepr__(self, db): assert db == 'mssql' return "N" + sqlbuilder.sqlrepr(self.value, db) class UnicodeStringValidator(SOValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (unicode, sqlbuilder.SQLExpression)): return value if isinstance(value, str): return unicode(value, self.getDbEncoding(state)) if isinstance(value, array): # MySQL return unicode(value.tostring(), self.getDbEncoding(state)) if hasattr(value, '__unicode__'): return unicode(value) raise validators.Invalid("expected a str or a unicode in the UnicodeCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) def from_python(self, value, state): if value is None: return None if isinstance(value, (str, sqlbuilder.SQLExpression)): return value if isinstance(value, unicode): try: connection = state.connection or state.soObject._connection except AttributeError: pass else: if connection.dbName == 'mssql': return NQuoted(value) return value.encode(self.getDbEncoding(state)) if hasattr(value, '__unicode__'): return unicode(value).encode(self.getDbEncoding(state)) raise validators.Invalid("expected a str or a unicode in the UnicodeCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) class SOUnicodeCol(SOStringLikeCol): def _mssqlType(self): if self.customSQLType is not None: return self.customSQLType return 'N' + super(SOUnicodeCol, self)._mssqlType() def createValidators(self): return [UnicodeStringValidator(name=self.name)] + \ super(SOUnicodeCol, self).createValidators() class UnicodeCol(Col): baseClass = SOUnicodeCol class IntValidator(SOValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (int, long, sqlbuilder.SQLExpression)): return value for converter, attr_name in (int, '__int__'), (long, '__long__'): if hasattr(value, attr_name): try: return converter(value) except: break raise validators.Invalid("expected an int in the IntCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) from_python = to_python class SOIntCol(SOCol): # 3-03 @@: support precision, maybe max and min directly def __init__(self, **kw): self.length = kw.pop('length', None) self.unsigned = bool(kw.pop('unsigned', None)) self.zerofill = bool(kw.pop('zerofill', None)) SOCol.__init__(self, **kw) def autoConstraints(self): return [constrs.isInt] def createValidators(self): return [IntValidator(name=self.name)] + \ super(SOIntCol, self).createValidators() def addSQLAttrs(self, str): _ret = str if str is None or len(str) < 1: return None if self.length >= 1: _ret = "%s(%d)" % (_ret, self.length) if self.unsigned: _ret = _ret + " UNSIGNED" if self.zerofill: _ret = _ret + " ZEROFILL" return _ret def _sqlType(self): return self.addSQLAttrs("INT") class IntCol(Col): baseClass = SOIntCol class SOTinyIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("TINYINT") class TinyIntCol(Col): baseClass = SOTinyIntCol class SOSmallIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("SMALLINT") class SmallIntCol(Col): baseClass = SOSmallIntCol class SOMediumIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("MEDIUMINT") class MediumIntCol(Col): baseClass = SOMediumIntCol class SOBigIntCol(SOIntCol): def _sqlType(self): return self.addSQLAttrs("BIGINT") class BigIntCol(Col): baseClass = SOBigIntCol class BoolValidator(SOValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (bool, sqlbuilder.SQLExpression)): return value if isinstance(value, (int, long)) or hasattr(value, '__nonzero__'): return bool(value) raise validators.Invalid("expected a bool or an int in the BoolCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) from_python = to_python class SOBoolCol(SOCol): def autoConstraints(self): return [constrs.isBool] def createValidators(self): return [BoolValidator(name=self.name)] + \ super(SOBoolCol, self).createValidators() def _postgresType(self): return 'BOOL' def _mysqlType(self): return "BOOL" def _sybaseType(self): return "BIT" def _mssqlType(self): return "BIT" def _firebirdType(self): return 'INT' def _maxdbType(self): return "BOOLEAN" def _sqliteType(self): return "BOOLEAN" class BoolCol(Col): baseClass = SOBoolCol class FloatValidator(SOValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (float, int, long, sqlbuilder.SQLExpression)): return value for converter, attr_name in (float, '__float__'), (int, '__int__'), (long, '__long__'): if hasattr(value, attr_name): try: return converter(value) except: break raise validators.Invalid("expected a float in the FloatCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) from_python = to_python class SOFloatCol(SOCol): # 3-03 @@: support precision (e.g., DECIMAL) def autoConstraints(self): return [constrs.isFloat] def createValidators(self): return [FloatValidator(name=self.name)] + \ super(SOFloatCol, self).createValidators() def _sqlType(self): return 'FLOAT' def _mysqlType(self): return "DOUBLE PRECISION" class FloatCol(Col): baseClass = SOFloatCol class SOKeyCol(SOCol): key_type = {int: "INT", str: "TEXT"} # 3-03 @@: this should have a simplified constructor # Should provide foreign key information for other DBs. def __init__(self, **kw): self.refColumn = kw.pop('refColumn', None) super(SOKeyCol, self).__init__(**kw) def _sqlType(self): return self.key_type[self.soClass.sqlmeta.idType] def _sybaseType(self): key_type = {int: "NUMERIC(18,0) NULL", str: "TEXT"} return key_type[self.soClass.sqlmeta.idType] def _mssqlType(self): key_type = {int: "INT NULL", str: "TEXT"} return key_type[self.soClass.sqlmeta.idType] class KeyCol(Col): baseClass = SOKeyCol class SOForeignKey(SOKeyCol): def __init__(self, **kw): foreignKey = kw['foreignKey'] style = kw['soClass'].sqlmeta.style if kw.get('name'): kw['origName'] = kw['name'] kw['name'] = style.instanceAttrToIDAttr(kw['name']) else: kw['name'] = style.instanceAttrToIDAttr(style.pythonClassToAttr(foreignKey)) super(SOForeignKey, self).__init__(**kw) def sqliteCreateSQL(self): sql = SOKeyCol.sqliteCreateSQL(self) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('CONSTRAINT %(colName)s_exists ' #'FOREIGN KEY(%(colName)s) ' 'REFERENCES %(tName)s(%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action}) sql = ' '.join([sql, constraint]) return sql def postgresCreateSQL(self): sql = SOKeyCol.postgresCreateSQL(self) return sql def postgresCreateReferenceConstraint(self): sTName = self.soClass.sqlmeta.table other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('ALTER TABLE %(sTName)s ADD CONSTRAINT %(colName)s_exists ' 'FOREIGN KEY (%(colName)s) ' 'REFERENCES %(tName)s (%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action, 'sTName': sTName}) return constraint def mysqlCreateReferenceConstraint(self): sTName = self.soClass.sqlmeta.table sTLocalName = sTName.split('.')[-1] other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName if self.cascade is not None: if self.cascade == 'null': action = 'ON DELETE SET NULL' elif self.cascade: action = 'ON DELETE CASCADE' else: action = 'ON DELETE RESTRICT' else: action = '' constraint = ('ALTER TABLE %(sTName)s ADD CONSTRAINT %(sTLocalName)s_%(colName)s_exists ' 'FOREIGN KEY (%(colName)s) ' 'REFERENCES %(tName)s (%(idName)s) ' '%(action)s' % {'tName': tName, 'colName': self.dbName, 'idName': idName, 'action': action, 'sTName': sTName, 'sTLocalName': sTLocalName}) return constraint def mysqlCreateSQL(self): return SOKeyCol.mysqlCreateSQL(self) def sybaseCreateSQL(self): sql = SOKeyCol.sybaseCreateSQL(self) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName reference = ('REFERENCES %(tName)s(%(idName)s) ' % {'tName':tName, 'idName':idName}) sql = ' '.join([sql, reference]) return sql def sybaseCreateReferenceConstraint(self): # @@: Code from above should be moved here return None def mssqlCreateSQL(self, connection=None): sql = SOKeyCol.mssqlCreateSQL(self, connection) other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName reference = ('REFERENCES %(tName)s(%(idName)s) ' % {'tName':tName, 'idName':idName}) sql = ' '.join([sql, reference]) return sql def mssqlCreateReferenceConstraint(self): # @@: Code from above should be moved here return None def maxdbCreateSQL(self): other = findClass(self.foreignKey, self.soClass.sqlmeta.registry) fidName = self.dbName #I assume that foreign key name is identical to the id of the reference table sql = ' '.join([fidName, self._maxdbType()]) tName = other.sqlmeta.table idName = self.refColumn or other.sqlmeta.idName sql=sql + ',' + '\n' sql=sql + 'FOREIGN KEY (%s) REFERENCES %s(%s)'%(fidName,tName,idName) return sql def maxdbCreateReferenceConstraint(self): # @@: Code from above should be moved here return None class ForeignKey(KeyCol): baseClass = SOForeignKey def __init__(self, foreignKey=None, **kw): super(ForeignKey, self).__init__(foreignKey=foreignKey, **kw) class EnumValidator(SOValidator): def to_python(self, value, state): if value in self.enumValues: if isinstance(value, unicode): dbEncoding = self.getDbEncoding(state) value = value.encode(dbEncoding) return value elif not self.notNone and value is None: return None raise validators.Invalid("expected a member of %r in the EnumCol '%s', got %r instead" % \ (self.enumValues, self.name, value), value, state) from_python = to_python class SOEnumCol(SOCol): def __init__(self, **kw): self.enumValues = kw.pop('enumValues', None) assert self.enumValues is not None, \ 'You must provide an enumValues keyword argument' super(SOEnumCol, self).__init__(**kw) def autoConstraints(self): return [constrs.isString, constrs.InList(self.enumValues)] def createValidators(self): return [EnumValidator(name=self.name, enumValues=self.enumValues, notNone=self.notNone)] + \ super(SOEnumCol, self).createValidators() def _mysqlType(self): # We need to map None in the enum expression to an appropriate # condition on NULL if None in self.enumValues: return "ENUM(%s)" % ', '.join([sqlbuilder.sqlrepr(v, 'mysql') for v in self.enumValues if v is not None]) else: return "ENUM(%s) NOT NULL" % ', '.join([sqlbuilder.sqlrepr(v, 'mysql') for v in self.enumValues]) def _postgresType(self): length = max(map(self._getlength, self.enumValues)) enumValues = ', '.join([sqlbuilder.sqlrepr(v, 'postgres') for v in self.enumValues]) checkConstraint = "CHECK (%s in (%s))" % (self.dbName, enumValues) return "VARCHAR(%i) %s" % (length, checkConstraint) _sqliteType = _postgresType def _sybaseType(self): return self._postgresType() def _mssqlType(self): return self._postgresType() def _firebirdType(self): length = max(map(self._getlength, self.enumValues)) enumValues = ', '.join([sqlbuilder.sqlrepr(v, 'firebird') for v in self.enumValues]) checkConstraint = "CHECK (%s in (%s))" % (self.dbName, enumValues) #NB. Return a tuple, not a string here return "VARCHAR(%i)" % (length), checkConstraint def _maxdbType(self): raise TypeError("Enum type is not supported on MAX DB") def _getlength(self, obj): """ None counts as 0; everything else uses len() """ if obj is None: return 0 else: return len(obj) class EnumCol(Col): baseClass = SOEnumCol class SetValidator(SOValidator): """ Translates Python tuples into SQL comma-delimited SET strings. """ def to_python(self, value, state): if isinstance(value, str): return tuple(value.split(",")) raise validators.Invalid("expected a string in the SetCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) def from_python(self, value, state): if isinstance(value, basestring): value = (value,) try: return ",".join(value) except: raise validators.Invalid("expected a string or a sequence of stringsin the SetCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) class SOSetCol(SOCol): def __init__(self, **kw): self.setValues = kw.pop('setValues', None) assert self.setValues is not None, \ 'You must provide a setValues keyword argument' super(SOSetCol, self).__init__(**kw) def autoConstraints(self): return [constrs.isString, constrs.InList(self.setValues)] def createValidators(self): return [SetValidator(name=self.name, setValues=self.setValues)] + \ super(SOSetCol, self).createValidators() def _mysqlType(self): return "SET(%s)" % ', '.join([sqlbuilder.sqlrepr(v, 'mysql') for v in self.setValues]) class SetCol(Col): baseClass = SOSetCol class DateTimeValidator(validators.DateValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (datetime.datetime, datetime.date, datetime.time, sqlbuilder.SQLExpression)): return value if mxdatetime_available: if isinstance(value, DateTimeType): # convert mxDateTime instance to datetime if (self.format.find("%H") >= 0) or (self.format.find("%T")) >= 0: return datetime.datetime(value.year, value.month, value.day, value.hour, value.minute, int(value.second)) else: return datetime.date(value.year, value.month, value.day) elif isinstance(value, TimeType): # convert mxTime instance to time if self.format.find("%d") >= 0: return datetime.timedelta(seconds=value.seconds) else: return datetime.time(value.hour, value.minute, int(value.second)) try: stime = time.strptime(value, self.format) except: raise validators.Invalid("expected a date/time string of the '%s' format in the DateTimeCol '%s', got %s %r instead" % \ (self.format, self.name, type(value), value), value, state) return datetime.datetime(*stime[:6]) def from_python(self, value, state): if value is None: return None if isinstance(value, (datetime.datetime, datetime.date, datetime.time, sqlbuilder.SQLExpression)): return value if hasattr(value, "strftime"): return value.strftime(self.format) raise validators.Invalid("expected a datetime in the DateTimeCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) if mxdatetime_available: class MXDateTimeValidator(validators.DateValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (DateTimeType, TimeType, sqlbuilder.SQLExpression)): return value if isinstance(value, datetime.datetime): return DateTime.DateTime(value.year, value.month, value.day, value.hour, value.minute, value.second) elif isinstance(value, datetime.date): return DateTime.Date(value.year, value.month, value.day) elif isinstance(value, datetime.time): return DateTime.Time(value.hour, value.minute, value.second) try: stime = time.strptime(value, self.format) except: raise validators.Invalid("expected a date/time string of the '%s' format in the DateTimeCol '%s', got %s %r instead" % \ (self.format, self.name, type(value), value), value, state) return DateTime.mktime(stime) def from_python(self, value, state): if value is None: return None if isinstance(value, (DateTimeType, TimeType, sqlbuilder.SQLExpression)): return value if hasattr(value, "strftime"): return value.strftime(self.format) raise validators.Invalid("expected a mxDateTime in the DateTimeCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) class SODateTimeCol(SOCol): datetimeFormat = '%Y-%m-%d %H:%M:%S' def __init__(self, **kw): datetimeFormat = kw.pop('datetimeFormat', None) if datetimeFormat: self.datetimeFormat = datetimeFormat super(SODateTimeCol, self).__init__(**kw) def createValidators(self): _validators = super(SODateTimeCol, self).createValidators() if default_datetime_implementation == DATETIME_IMPLEMENTATION: validatorClass = DateTimeValidator elif default_datetime_implementation == MXDATETIME_IMPLEMENTATION: validatorClass = MXDateTimeValidator if default_datetime_implementation: _validators.insert(0, validatorClass(name=self.name, format=self.datetimeFormat)) return _validators def _mysqlType(self): return 'DATETIME' def _postgresType(self): return 'TIMESTAMP' def _sybaseType(self): return 'DATETIME' def _mssqlType(self): return 'DATETIME' def _sqliteType(self): return 'TIMESTAMP' def _firebirdType(self): return 'TIMESTAMP' def _maxdbType(self): return 'TIMESTAMP' class DateTimeCol(Col): baseClass = SODateTimeCol @staticmethod def now(): if default_datetime_implementation == DATETIME_IMPLEMENTATION: return datetime.datetime.now() elif default_datetime_implementation == MXDATETIME_IMPLEMENTATION: return DateTime.now() else: assert 0, ("No datetime implementation available " "(DATETIME_IMPLEMENTATION=%r)" % DATETIME_IMPLEMENTATION) class DateValidator(DateTimeValidator): def to_python(self, value, state): if isinstance(value, datetime.datetime): value = value.date() if isinstance(value, (datetime.date, sqlbuilder.SQLExpression)): return value value = super(DateValidator, self).to_python(value, state) if isinstance(value, datetime.datetime): value = value.date() return value from_python = to_python class SODateCol(SOCol): dateFormat = '%Y-%m-%d' def __init__(self, **kw): dateFormat = kw.pop('dateFormat', None) if dateFormat: self.dateFormat = dateFormat super(SODateCol, self).__init__(**kw) def createValidators(self): """Create a validator for the column. Can be overriden in descendants.""" _validators = super(SODateCol, self).createValidators() if default_datetime_implementation == DATETIME_IMPLEMENTATION: validatorClass = DateValidator elif default_datetime_implementation == MXDATETIME_IMPLEMENTATION: validatorClass = MXDateTimeValidator if default_datetime_implementation: _validators.insert(0, validatorClass(name=self.name, format=self.dateFormat)) return _validators def _mysqlType(self): return 'DATE' def _postgresType(self): return 'DATE' def _sybaseType(self): return self._postgresType() def _mssqlType(self): """ SQL Server doesn't have a DATE data type, to emulate we use a vc(10) """ return 'VARCHAR(10)' def _firebirdType(self): return 'DATE' def _maxdbType(self): return 'DATE' def _sqliteType(self): return 'DATE' class DateCol(Col): baseClass = SODateCol class TimeValidator(DateTimeValidator): def to_python(self, value, state): if isinstance(value, (datetime.time, sqlbuilder.SQLExpression)): return value if isinstance(value, datetime.timedelta): if value.days: raise validators.Invalid( "the value for the TimeCol '%s' must has days=0, it has days=%d" % (self.name, value.days), value, state) return datetime.time(*time.gmtime(value.seconds)[3:6]) value = super(TimeValidator, self).to_python(value, state) if isinstance(value, datetime.datetime): value = value.time() return value from_python = to_python class SOTimeCol(SOCol): timeFormat = '%H:%M:%S' def __init__(self, **kw): timeFormat = kw.pop('timeFormat', None) if timeFormat: self.timeFormat = timeFormat super(SOTimeCol, self).__init__(**kw) def createValidators(self): _validators = super(SOTimeCol, self).createValidators() if default_datetime_implementation == DATETIME_IMPLEMENTATION: validatorClass = TimeValidator elif default_datetime_implementation == MXDATETIME_IMPLEMENTATION: validatorClass = MXDateTimeValidator if default_datetime_implementation: _validators.insert(0, validatorClass(name=self.name, format=self.timeFormat)) return _validators def _mysqlType(self): return 'TIME' def _postgresType(self): return 'TIME' def _sybaseType(self): return 'TIME' def _sqliteType(self): return 'TIME' def _firebirdType(self): return 'TIME' def _maxdbType(self): return 'TIME' class TimeCol(Col): baseClass = SOTimeCol class SOTimestampCol(SODateTimeCol): """ Necessary to support MySQL's use of TIMESTAMP versus DATETIME types """ def __init__(self, **kw): if 'default' not in kw: kw['default'] = None SOCol.__init__(self, **kw) def _mysqlType(self): return 'TIMESTAMP' class TimestampCol(Col): baseClass = SOTimestampCol class TimedeltaValidator(SOValidator): def to_python(self, value, state): return value from_python = to_python class SOTimedeltaCol(SOCol): def _postgresType(self): return 'INTERVAL' def createValidators(self): return [TimedeltaValidator(name=self.name)] + \ super(SOTimedeltaCol, self).createValidators() class TimedeltaCol(Col): baseClass = SOTimedeltaCol from decimal import Decimal class DecimalValidator(SOValidator): def to_python(self, value, state): if value is None: return None if isinstance(value, (int, long, Decimal, sqlbuilder.SQLExpression)): return value if isinstance(value, float): value = str(value) try: connection = state.connection or state.soObject._connection except AttributeError: pass else: if hasattr(connection, "decimalSeparator"): value = value.replace(connection.decimalSeparator, ".") try: return Decimal(value) except: raise validators.Invalid("expected a Decimal in the DecimalCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) def from_python(self, value, state): if value is None: return None if isinstance(value, float): value = str(value) if isinstance(value, basestring): try: connection = state.connection or state.soObject._connection except AttributeError: pass else: if hasattr(connection, "decimalSeparator"): value = value.replace(connection.decimalSeparator, ".") try: return Decimal(value) except: raise validators.Invalid("can not parse Decimal value '%s' in the DecimalCol from '%s'" % (value, getattr(state, 'soObject', '(unknown)')), value, state) if isinstance(value, (int, long, Decimal, sqlbuilder.SQLExpression)): return value raise validators.Invalid("expected a Decimal in the DecimalCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) class SODecimalCol(SOCol): def __init__(self, **kw): self.size = kw.pop('size', NoDefault) assert self.size is not NoDefault, \ "You must give a size argument" self.precision = kw.pop('precision', NoDefault) assert self.precision is not NoDefault, \ "You must give a precision argument" super(SODecimalCol, self).__init__(**kw) def _sqlType(self): return 'DECIMAL(%i, %i)' % (self.size, self.precision) def createValidators(self): return [DecimalValidator(name=self.name)] + \ super(SODecimalCol, self).createValidators() class DecimalCol(Col): baseClass = SODecimalCol class SOCurrencyCol(SODecimalCol): def __init__(self, **kw): pushKey(kw, 'size', 10) pushKey(kw, 'precision', 2) super(SOCurrencyCol, self).__init__(**kw) class CurrencyCol(DecimalCol): baseClass = SOCurrencyCol class DecimalStringValidator(DecimalValidator): def to_python(self, value, state): value = super(DecimalStringValidator, self).to_python(value, state) if self.precision and isinstance(value, Decimal): assert value < self.max, \ "Value must be less than %s" % int(self.max) value = value.quantize(self.precision) return value def from_python(self, value, state): value = super(DecimalStringValidator, self).from_python(value, state) if isinstance(value, Decimal): if self.precision: assert value < self.max, \ "Value must be less than %s" % int(self.max) value = value.quantize(self.precision) value = value.to_eng_string() elif isinstance(value, (int, long)): value = str(value) return value class SODecimalStringCol(SOStringCol): def __init__(self, **kw): self.size = kw.pop('size', NoDefault) assert (self.size is not NoDefault) and (self.size >= 0), \ "You must give a size argument as a positive integer" self.precision = kw.pop('precision', NoDefault) assert (self.precision is not NoDefault) and (self.precision >= 0), \ "You must give a precision argument as a positive integer" kw['length'] = int(self.size) + int(self.precision) self.quantize = kw.pop('quantize', False) assert isinstance(self.quantize, bool), \ "quantize argument must be Boolean True/False" super(SODecimalStringCol, self).__init__(**kw) def createValidators(self): if self.quantize: v = DecimalStringValidator(name=self.name, precision=Decimal(10) ** (-1 * int(self.precision)), max=Decimal(10) ** (int(self.size) - int(self.precision))) else: v = DecimalStringValidator(name=self.name, precision=0) return [v] + \ super(SODecimalStringCol, self).createValidators(dataType=Decimal) class DecimalStringCol(StringCol): baseClass = SODecimalStringCol class BinaryValidator(SOValidator): """ Validator for binary types. We're assuming that the per-database modules provide some form of wrapper type for binary conversion. """ _cachedValue = None def to_python(self, value, state): if value is None: return None try: connection = state.connection or state.soObject._connection except AttributeError: dbName = None binaryType = type(None) # Just a simple workaround else: dbName = connection.dbName binaryType = connection._binaryType if isinstance(value, str): if dbName == "sqlite": value = connection.module.decode(value) return value if isinstance(value, (buffer, binaryType)): cachedValue = self._cachedValue if cachedValue and cachedValue[1] == value: return cachedValue[0] if isinstance(value, array): # MySQL return value.tostring() return str(value) # buffer => string raise validators.Invalid("expected a string in the BLOBCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) def from_python(self, value, state): if value is None: return None connection = state.connection or state.soObject._connection binary = connection.createBinary(value) self._cachedValue = (value, binary) return binary class SOBLOBCol(SOStringCol): def __init__(self, **kw): # Change the default from 'auto' to False - this is a (mostly) binary column if 'varchar' not in kw: kw['varchar'] = False super(SOBLOBCol, self).__init__(**kw) def createValidators(self): return [BinaryValidator(name=self.name)] + \ super(SOBLOBCol, self).createValidators() def _mysqlType(self): length = self.length varchar = self.varchar if length >= 2**24: return varchar and "LONGTEXT" or "LONGBLOB" if length >= 2**16: return varchar and "MEDIUMTEXT" or "MEDIUMBLOB" if length >= 2**8: return varchar and "TEXT" or "BLOB" return varchar and "TINYTEXT" or "TINYBLOB" def _postgresType(self): return 'BYTEA' def _mssqlType(self): if self.connection and self.connection.can_use_max_types(): return 'VARBINARY(MAX)' else: return "IMAGE" class BLOBCol(StringCol): baseClass = SOBLOBCol class PickleValidator(BinaryValidator): """ Validator for pickle types. A pickle type is simply a binary type with hidden pickling, so that we can simply store any kind of stuff in a particular column. The support for this relies directly on the support for binary for your database. """ def to_python(self, value, state): if value is None: return None if isinstance(value, unicode): dbEncoding = self.getDbEncoding(state, default='ascii') value = value.encode(dbEncoding) if isinstance(value, str): return pickle.loads(value) raise validators.Invalid("expected a pickle string in the PickleCol '%s', got %s %r instead" % \ (self.name, type(value), value), value, state) def from_python(self, value, state): if value is None: return None return pickle.dumps(value, self.pickleProtocol) class SOPickleCol(SOBLOBCol): def __init__(self, **kw): self.pickleProtocol = kw.pop('pickleProtocol', pickle.HIGHEST_PROTOCOL) super(SOPickleCol, self).__init__(**kw) def createValidators(self): return [PickleValidator(name=self.name, pickleProtocol=self.pickleProtocol)] + \ super(SOPickleCol, self).createValidators() def _mysqlType(self): length = self.length if length >= 2**24: return "LONGBLOB" if length >= 2**16: return "MEDIUMBLOB" return "BLOB" class PickleCol(BLOBCol): baseClass = SOPickleCol def pushKey(kw, name, value): if not name in kw: kw[name] = value all = [] for key, value in globals().items(): if isinstance(value, type) and (issubclass(value, (Col, SOCol))): all.append(key) __all__.extend(all) del all SQLObject-1.5.2/sqlobject/mssql/0000755000175000017500000000000012322476205016005 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/mssql/__init__.py0000644000175000017500000000026411133142377020117 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import mssqlconnection return mssqlconnection.MSSQLConnection registerConnection(['mssql'], builder) SQLObject-1.5.2/sqlobject/mssql/mssqlconnection.py0000644000175000017500000002621712204406536021605 0ustar phdphd00000000000000from sqlobject.dbconnection import DBAPI from sqlobject import col import re class MSSQLConnection(DBAPI): supportTransactions = True dbName = 'mssql' schemes = [dbName] limit_re = re.compile('^\s*(select )(.*)', re.IGNORECASE) def __init__(self, db, user, password='', host='localhost', port=None, autoCommit=0, **kw): drivers = kw.pop('driver', None) or 'adodb,pymssql' for driver in drivers.split(','): driver = driver.strip() if not driver: continue try: if driver in ('adodb', 'adodbapi'): import adodbapi as sqlmodule elif driver == 'pymssql': import pymssql as sqlmodule else: raise ValueError('Unknown MSSQL driver "%s", expected adodb or pymssql' % driver) except ImportError: pass else: break else: raise ImportError('Cannot find an MSSQL driver, tried %s' % drivers) self.module = sqlmodule if sqlmodule.__name__ == 'adodbapi': self.dbconnection = sqlmodule.connect # ADO uses unicode only (AFAIK) self.usingUnicodeStrings = True # Need to use SQLNCLI provider for SQL Server Express Edition if kw.get("ncli"): conn_str = "Provider=SQLNCLI;" else: conn_str = "Provider=SQLOLEDB;" conn_str += "Data Source=%s;Initial Catalog=%s;" # MSDE does not allow SQL server login if kw.get("sspi"): conn_str += "Integrated Security=SSPI;Persist Security Info=False" self.make_conn_str = lambda keys: conn_str % (keys.host, keys.db) else: conn_str += "User Id=%s;Password=%s" self.make_conn_str = lambda keys: conn_str % (keys.host, keys.db, keys.user, keys.password) kw.pop("sspi", None) kw.pop("ncli", None) else: # pymssql self.dbconnection = sqlmodule.connect sqlmodule.Binary = lambda st: str(st) # don't know whether pymssql uses unicode self.usingUnicodeStrings = False def _make_conn_str(keys): keys_dict = {} for attr, value in ( ('user', keys.user), ('password', keys.password), ('host', keys.host), ('port', keys.port), ('database', keys.db), ): if value: keys_dict[attr] = value return keys_dict self.make_conn_str = _make_conn_str self.autoCommit=int(autoCommit) self.user = user self.password = password self.host = host self.port = port self.db = db self._can_use_max_types = None DBAPI.__init__(self, **kw) @classmethod def _connectionFromParams(cls, user, password, host, port, path, args): path = path.strip('/') return cls(user=user, password=password, host=host or 'localhost', port=port, db=path, **args) def insert_id(self, conn): """ insert_id method. """ c = conn.cursor() # converting the identity to an int is ugly, but it gets returned # as a decimal otherwise :S c.execute('SELECT CONVERT(INT, @@IDENTITY)') return c.fetchone()[0] def makeConnection(self): conn_descr = self.make_conn_str(self) if isinstance(conn_descr, dict): con = self.dbconnection(**conn_descr) else: con = self.dbconnection(conn_descr) cur = con.cursor() cur.execute('SET ANSI_NULLS ON') cur.execute("SELECT CAST('12345.21' AS DECIMAL(10, 2))") self.decimalSeparator = str(cur.fetchone()[0])[-3] cur.close() return con HAS_IDENTITY = """ select 1 from INFORMATION_SCHEMA.COLUMNS where TABLE_NAME = '%s' and COLUMNPROPERTY(object_id(TABLE_NAME), COLUMN_NAME, 'IsIdentity') = 1 """ def _hasIdentity(self, conn, table): query = self.HAS_IDENTITY % table c = conn.cursor() c.execute(query) r = c.fetchone() return r is not None def _queryInsertID(self, conn, soInstance, id, names, values): """ Insert the Initial with names and values, using id. """ table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName c = conn.cursor() has_identity = self._hasIdentity(conn, table) if id is not None: names = [idName] + names values = [id] + values elif has_identity and idName in names: try: i = names.index( idName ) if i: del names[i] del values[i] except ValueError: pass if has_identity: if id is not None: c.execute('SET IDENTITY_INSERT %s ON' % table) else: c.execute('SET IDENTITY_INSERT %s OFF' % table) q = self._insertSQL(table, names, values) if self.debug: self.printDebug(conn, q, 'QueryIns') c.execute(q) if has_identity: c.execute('SET IDENTITY_INSERT %s OFF' % table) if id is None: id = self.insert_id(conn) if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id @classmethod def _queryAddLimitOffset(cls, query, start, end): if end and not start: limit_str = "SELECT TOP %i" % end match = cls.limit_re.match(query) if match and len(match.groups()) == 2: return ' '.join([limit_str, match.group(2)]) else: return query def createReferenceConstraint(self, soClass, col): return col.mssqlCreateReferenceConstraint() def createColumn(self, soClass, col): return col.mssqlCreateSQL(self) def createIDColumn(self, soClass): key_type = {int: "INT", str: "TEXT"}[soClass.sqlmeta.idType] return '%s %s IDENTITY UNIQUE' % (soClass.sqlmeta.idName, key_type) def createIndexSQL(self, soClass, index): return index.mssqlCreateIndexSQL(soClass) def joinSQLType(self, join): return 'INT NOT NULL' SHOW_TABLES="SELECT name FROM sysobjects WHERE type='U'" def tableExists(self, tableName): for (table,) in self.queryAll(self.SHOW_TABLES): if table.lower() == tableName.lower(): return True return False def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD %s' % (tableName, column.mssqlCreateSQL(self))) def delColumn(self, sqlmeta, column): self.query('ALTER TABLE %s DROP COLUMN %s' % (tableName.table, column.dbName)) # precision and scale is gotten from column table so that we can create # decimal columns if needed SHOW_COLUMNS = """ select name, length, ( select name from systypes where cast(xusertype as int)= cast(sc.xtype as int) ) datatype, prec, scale, isnullable, cdefault, m.text default_text, isnull(len(autoval),0) is_identity from syscolumns sc LEFT OUTER JOIN syscomments m on sc.cdefault = m.id AND m.colid = 1 where sc.id in (select id from sysobjects where name = '%s') order by colorder""" def columnsFromSchema(self, tableName, soClass): colData = self.queryAll(self.SHOW_COLUMNS % tableName) results = [] for field, size, t, precision, scale, nullAllowed, default, defaultText, is_identity in colData: if field == soClass.sqlmeta.idName: continue # precision is needed for decimal columns colClass, kw = self.guessClass(t, size, precision, scale) kw['name'] = str(soClass.sqlmeta.style.dbColumnToPythonAttr(field)) kw['dbName'] = str(field) kw['notNone'] = not nullAllowed if (defaultText): # Strip ( and ) defaultText = defaultText[1:-1] if defaultText[0] == "'": defaultText = defaultText[1:-1] else: if t == "int" : defaultText = int(defaultText) if t == "float" : defaultText = float(defaultText) if t == "numeric": defaultText = float(defaultText) # TODO need to access the "column" to_python method here--but the object doesn't exists yet # @@ skip key... kw['default'] = defaultText results.append(colClass(**kw)) return results def _setAutoCommit(self, conn, auto): #raise Exception(repr(auto)) return #conn.auto_commit = auto option = "ON" if auto == 0: option = "OFF" c = conn.cursor() c.execute("SET AUTOCOMMIT " + option) conn.setconnectoption(SQL.AUTOCOMMIT, option) # precision and scale is needed for decimal columns def guessClass(self, t, size, precision, scale): """ Here we take raw values coming out of syscolumns and map to SQLObject class types. """ if t.startswith('int'): return col.IntCol, {} elif t.startswith('varchar'): if self.usingUnicodeStrings: return col.UnicodeCol, {'length': size} return col.StringCol, {'length': size} elif t.startswith('char'): if self.usingUnicodeStrings: return col.UnicodeCol, {'length': size, 'varchar': False} return col.StringCol, {'length': size, 'varchar': False} elif t.startswith('datetime'): return col.DateTimeCol, {} elif t.startswith('decimal'): return col.DecimalCol, {'size': precision, # be careful for awkward naming 'precision': scale} else: return col.Col, {} def server_version(self): try: server_version = self.queryAll("SELECT SERVERPROPERTY('productversion')")[0][0] server_version = server_version.split('.')[0] server_version = int(server_version) except: server_version = None # unknown self.server_version = server_version # cache it forever return server_version def can_use_max_types(self): if self._can_use_max_types is not None: return self._can_use_max_types server_version = self.server_version() self._can_use_max_types = can_use_max_types = \ (server_version is not None) and (server_version >= 9) return can_use_max_types SQLObject-1.5.2/sqlobject/tests/0000755000175000017500000000000012322476205016010 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/tests/test_exceptions.py0000644000175000017500000000157612224002210021571 0ustar phdphd00000000000000from sqlobject import * from sqlobject.dberrors import * from sqlobject.tests.dbtest import * ######################################## ## Table aliases and self-joins ######################################## class TestException(SQLObject): name = StringCol(unique=True, length=100) class TestExceptionWithNonexistingTable(SQLObject): pass def test_exceptions(): if not supports("exceptions"): return setupClass(TestException) TestException(name="test") raises(DuplicateEntryError, TestException, name="test") connection = getConnection() if connection.module.__name__ != 'psycopg2': return TestExceptionWithNonexistingTable.setConnection(connection) try: list(TestExceptionWithNonexistingTable.select()) except ProgrammingError, e: assert e.args[0].code == '42P01' else: assert False, "DID NOT RAISE" SQLObject-1.5.2/sqlobject/tests/test_empty.py0000644000175000017500000000066710372665117020575 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class EmptyClass(SQLObject): pass def test_empty(): if not supports('emptyTable'): return setupClass(EmptyClass) e1 = EmptyClass() e2 = EmptyClass() assert e1 != e2 assert e1.id != e2.id assert e1 in list(EmptyClass.select()) assert e2 in list(EmptyClass.select()) e1.destroySelf() assert list(EmptyClass.select()) == [e2] SQLObject-1.5.2/sqlobject/tests/test_transactions.py0000644000175000017500000000503412202716263022131 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Transaction test ######################################## class TestSOTrans(SQLObject): #_cacheValues = False class sqlmeta: defaultOrder = 'name' name = StringCol(length=10, alternateID=True, dbName='name_col') def test_transaction(): if not supports('transactions'): return setupClass(TestSOTrans) TestSOTrans(name='bob') TestSOTrans(name='tim') trans = TestSOTrans._connection.transaction() try: TestSOTrans._connection.autoCommit = 'exception' TestSOTrans(name='joe', connection=trans) trans.rollback() trans.begin() assert ([n.name for n in TestSOTrans.select(connection=trans)] == ['bob', 'tim']) b = TestSOTrans.byName('bob', connection=trans) b.name = 'robert' trans.commit() assert b.name == 'robert' b.name = 'bob' trans.rollback() trans.begin() assert b.name == 'robert' finally: TestSOTrans._connection.autoCommit = True def test_transaction_commit_sync(): if not supports('transactions'): return setupClass(TestSOTrans) trans = TestSOTrans._connection.transaction() try: TestSOTrans(name='bob') bOut = TestSOTrans.byName('bob') bIn = TestSOTrans.byName('bob', connection=trans) bIn.name = 'robert' assert bOut.name == 'bob' trans.commit() assert bOut.name == 'robert' finally: TestSOTrans._connection.autoCommit = True def test_transaction_delete(close=False): if not supports('transactions'): return setupClass(TestSOTrans) connection = TestSOTrans._connection if (connection.dbName == 'sqlite') and connection._memory: return # The following test requires a different connection trans = connection.transaction() try: TestSOTrans(name='bob') bIn = TestSOTrans.byName('bob', connection=trans) bIn.destroySelf() bOut = TestSOTrans.select(TestSOTrans.q.name=='bob') assert bOut.count() == 1 bOutInst = bOut[0] bOutID = bOutInst.id trans.commit(close=close) assert bOut.count() == 0 raises(SQLObjectNotFound, "TestSOTrans.get(bOutID)") raises(SQLObjectNotFound, "bOutInst.name") finally: trans.rollback() connection.autoCommit = True connection.close() def test_transaction_delete_with_close(): test_transaction_delete(close=True) SQLObject-1.5.2/sqlobject/tests/test_schema.py0000644000175000017500000000125412203470105020652 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Schema per connection ######################################## class TestSchema(SQLObject): foo = UnicodeCol(length=200) def test_connection_schema(): if not supports('schema'): return conn = getConnection() conn.schema = None conn.query('CREATE SCHEMA test') conn.schema = 'test' conn.query('SET search_path TO test') setupClass(TestSchema) assert TestSchema._connection is conn TestSchema(foo='bar') assert conn.queryAll("SELECT * FROM test.test_schema") conn.schema = None conn.query('SET search_path TO public') SQLObject-1.5.2/sqlobject/tests/test_basic.py0000644000175000017500000002235712201741025020502 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class TestSO1(SQLObject): name = StringCol(length=50, dbName='name_col') name.title = 'Your Name' name.foobar = 1 passwd = StringCol(length=10) class sqlmeta: cacheValues = False def _set_passwd(self, passwd): self._SO_set_passwd(passwd.encode('rot13')) def setupGetters(cls): setupClass(cls) inserts(cls, [('bob', 'god'), ('sally', 'sordid'), ('dave', 'dremel'), ('fred', 'forgo')], 'name passwd') def test_case1(): setupGetters(TestSO1) bob = TestSO1.selectBy(name='bob')[0] assert bob.name == 'bob' assert bob.passwd == 'god'.encode('rot13') bobs = TestSO1.selectBy(name='bob')[:10] assert len(list(bobs)) == 1 def test_newline(): setupGetters(TestSO1) bob = TestSO1.selectBy(name='bob')[0] testString = 'hey\nyou\\can\'t you see me?\t' bob.name = testString bob.expire() assert bob.name == testString def test_count(): setupGetters(TestSO1) assert TestSO1.selectBy(name=None).count() == 0 assert TestSO1.selectBy(name='bob').count() == 1 assert TestSO1.select(TestSO1.q.name == 'bob').count() == 1 assert TestSO1.select().count() == len(list(TestSO1.select())) def test_getset(): setupGetters(TestSO1) bob = TestSO1.selectBy(name='bob')[0] assert bob.name == 'bob' bob.name = 'joe' assert bob.name == 'joe' bob.set(name='joebob', passwd='testtest') assert bob.name == 'joebob' def test_extra_vars(): setupGetters(TestSO1) col = TestSO1.sqlmeta.columns['name'] assert col.title == 'Your Name' assert col.foobar == 1 assert getattr(TestSO1.sqlmeta.columns['passwd'], 'title', None) is None class TestSO2(SQLObject): name = StringCol(length=50, dbName='name_col') passwd = StringCol(length=10) def _set_passwd(self, passwd): self._SO_set_passwd(passwd.encode('rot13')) def test_case2(): setupGetters(TestSO2) bob = TestSO2.selectBy(name='bob')[0] assert bob.name == 'bob' assert bob.passwd == 'god'.encode('rot13') class Student(SQLObject): is_smart = BoolCol() def test_boolCol(): setupClass(Student) student = Student(is_smart=False) assert student.is_smart == False student2 = Student(is_smart=1) assert student2.is_smart == True class TestSO3(SQLObject): name = StringCol(length=10, dbName='name_col') other = ForeignKey('TestSO4', default=None) other2 = KeyCol(foreignKey='TestSO4', default=None) class TestSO4(SQLObject): me = StringCol(length=10) def test_foreignKey(): setupClass([TestSO4, TestSO3]) test3_order = [col.name for col in TestSO3.sqlmeta.columnList] assert test3_order == ['name', 'otherID', 'other2ID'] tc3 = TestSO3(name='a') assert tc3.other is None assert tc3.other2 is None assert tc3.otherID is None assert tc3.other2ID is None tc4a = TestSO4(me='1') tc3.other = tc4a assert tc3.other == tc4a assert tc3.otherID == tc4a.id tc4b = TestSO4(me='2') tc3.other = tc4b.id assert tc3.other == tc4b assert tc3.otherID == tc4b.id tc4c = TestSO4(me='3') tc3.other2 = tc4c assert tc3.other2 == tc4c assert tc3.other2ID == tc4c.id tc4d = TestSO4(me='4') tc3.other2 = tc4d.id assert tc3.other2 == tc4d assert tc3.other2ID == tc4d.id tcc = TestSO3(name='b', other=tc4a) assert tcc.other == tc4a tcc2 = TestSO3(name='c', other=tc4a.id) assert tcc2.other == tc4a def test_selectBy(): setupClass([TestSO4, TestSO3]) tc4 = TestSO4(me='another') tc3 = TestSO3(name='sel', other=tc4) anothertc3 = TestSO3(name='not joined') assert tc3.other == tc4 assert list(TestSO3.selectBy(other=tc4)) == [tc3] assert list(TestSO3.selectBy(otherID=tc4.id)) == [tc3] assert TestSO3.selectBy(otherID=tc4.id)[0] == tc3 assert list(TestSO3.selectBy(otherID=tc4.id)[:10]) == [tc3] assert list(TestSO3.selectBy(other=tc4)[:10]) == [tc3] class TestSO5(SQLObject): name = StringCol(length=10, dbName='name_col') other = ForeignKey('TestSO6', default=None, cascade=True) another = ForeignKey('TestSO7', default=None, cascade=True) class TestSO6(SQLObject): name = StringCol(length=10, dbName='name_col') other = ForeignKey('TestSO7', default=None, cascade=True) class TestSO7(SQLObject): name = StringCol(length=10, dbName='name_col') def test_foreignKeyDestroySelfCascade(): setupClass([TestSO7, TestSO6, TestSO5]) tc5 = TestSO5(name='a') tc6a = TestSO6(name='1') tc5.other = tc6a tc7a = TestSO7(name='2') tc6a.other = tc7a tc5.another = tc7a assert tc5.other == tc6a assert tc5.otherID == tc6a.id assert tc6a.other == tc7a assert tc6a.otherID == tc7a.id assert tc5.other.other == tc7a assert tc5.other.otherID == tc7a.id assert tc5.another == tc7a assert tc5.anotherID == tc7a.id assert tc5.other.other == tc5.another assert TestSO5.select().count() == 1 assert TestSO6.select().count() == 1 assert TestSO7.select().count() == 1 tc6b = TestSO6(name='3') tc6c = TestSO6(name='4') tc7b = TestSO7(name='5') tc6b.other = tc7b tc6c.other = tc7b assert TestSO5.select().count() == 1 assert TestSO6.select().count() == 3 assert TestSO7.select().count() == 2 tc6b.destroySelf() assert TestSO5.select().count() == 1 assert TestSO6.select().count() == 2 assert TestSO7.select().count() == 2 tc7b.destroySelf() assert TestSO5.select().count() == 1 assert TestSO6.select().count() == 1 assert TestSO7.select().count() == 1 tc7a.destroySelf() assert TestSO5.select().count() == 0 assert TestSO6.select().count() == 0 assert TestSO7.select().count() == 0 def testForeignKeyDropTableCascade(): if not supports('dropTableCascade'): return setupClass(TestSO7) setupClass(TestSO6) setupClass(TestSO5) tc5a = TestSO5(name='a') tc6a = TestSO6(name='1') tc5a.other = tc6a tc7a = TestSO7(name='2') tc6a.other = tc7a tc5a.another = tc7a tc5b = TestSO5(name='b') tc5c = TestSO5(name='c') tc6b = TestSO6(name='3') tc5c.other = tc6b assert TestSO5.select().count() == 3 assert TestSO6.select().count() == 2 assert TestSO7.select().count() == 1 TestSO7.dropTable(cascade=True) assert TestSO5.select().count() == 3 assert TestSO6.select().count() == 2 tc6a.destroySelf() assert TestSO5.select().count() == 2 assert TestSO6.select().count() == 1 tc6b.destroySelf() assert TestSO5.select().count() == 1 assert TestSO6.select().count() == 0 assert iter(TestSO5.select()).next() == tc5b tc6c = TestSO6(name='3') tc5b.other = tc6c assert TestSO5.select().count() == 1 assert TestSO6.select().count() == 1 tc6c.destroySelf() assert TestSO5.select().count() == 0 assert TestSO6.select().count() == 0 class TestSO8(SQLObject): name = StringCol(length=10, dbName='name_col') other = ForeignKey('TestSO9', default=None, cascade=False) class TestSO9(SQLObject): name = StringCol(length=10, dbName='name_col') def testForeignKeyDestroySelfRestrict(): setupClass([TestSO9, TestSO8]) tc8a = TestSO8(name='a') tc9a = TestSO9(name='1') tc8a.other = tc9a tc8b = TestSO8(name='b') tc9b = TestSO9(name='2') assert tc8a.other == tc9a assert tc8a.otherID == tc9a.id assert TestSO8.select().count() == 2 assert TestSO9.select().count() == 2 raises(Exception, tc9a.destroySelf) tc9b.destroySelf() assert TestSO8.select().count() == 2 assert TestSO9.select().count() == 1 tc8a.destroySelf() tc8b.destroySelf() tc9a.destroySelf() assert TestSO8.select().count() == 0 assert TestSO9.select().count() == 0 class TestSO10(SQLObject): name = StringCol() class TestSO11(SQLObject): name = StringCol() other = ForeignKey('TestSO10', default=None, cascade='null') def testForeignKeySetNull(): setupClass([TestSO10, TestSO11]) obj1 = TestSO10(name='foo') obj2 = TestSO10(name='bar') dep1 = TestSO11(name='xxx', other=obj1) dep2 = TestSO11(name='yyy', other=obj1) dep3 = TestSO11(name='zzz', other=obj2) for name in 'xxx', 'yyy', 'zzz': assert len(list(TestSO11.selectBy(name=name))) == 1 obj1.destroySelf() for name in 'xxx', 'yyy', 'zzz': assert len(list(TestSO11.selectBy(name=name))) == 1 assert dep1.other is None assert dep2.other is None assert dep3.other is obj2 def testAsDict(): setupGetters(TestSO1) bob = TestSO1.selectBy(name='bob')[0] assert bob.sqlmeta.asDict() == { 'passwd': 'tbq', 'name': 'bob', 'id': bob.id} def test_nonexisting_attr(): setupClass(Student) try: Student.select(Student.q.nonexisting) except AttributeError: pass else: assert 0, "Expected an AttributeError" class TestSO12(SQLObject): name = StringCol() value = IntCol(defaultSQL='1') def test_defaultSQL(): setupClass(TestSO12) test = TestSO12(name="test") assert test.value == 1 def test_connection_override(): sqlhub.processConnection = connectionForURI('sqlite:///db1') class TestSO13(SQLObject): _connection = connectionForURI('sqlite:///db2') assert TestSO13._connection.uri() == 'sqlite:///db2' del sqlhub.processConnection SQLObject-1.5.2/sqlobject/tests/test_reparent_sqlmeta.py0000644000175000017500000000154011561536165022775 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * real_sqlmeta = sqlmeta class Reparented1(SQLObject): class sqlmeta: table = 'reparented1' dummy = StringCol() class Reparented2(SQLObject): class sqlmeta(object): @classmethod def setClass(cls, soClass): # Well, it's pretty hard to call the superclass method # when it's a classmethod and it's not actually your # *current* superclass. Sigh real_sqlmeta.setClass.im_func(cls, soClass) cls.worked = True dummy = StringCol() def test_reparented(): setupClass([Reparented1, Reparented2]) assert Reparented1.sqlmeta.table == 'reparented1' assert issubclass(Reparented1.sqlmeta, real_sqlmeta) assert issubclass(Reparented2.sqlmeta, real_sqlmeta) assert Reparented2.sqlmeta.worked SQLObject-1.5.2/sqlobject/tests/test_aggregates.py0000644000175000017500000000403711014312646021531 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * # Test MIN, AVG, MAX, COUNT, SUM class IntAccumulator(SQLObject): value = IntCol() class FloatAccumulator(SQLObject): value = FloatCol() def test_integer(): setupClass(IntAccumulator) IntAccumulator(value=1) IntAccumulator(value=2) IntAccumulator(value=3) assert IntAccumulator.select().min(IntAccumulator.q.value) == 1 assert IntAccumulator.select().avg(IntAccumulator.q.value) == 2 assert IntAccumulator.select().max(IntAccumulator.q.value) == 3 assert IntAccumulator.select().sum(IntAccumulator.q.value) == 6 assert IntAccumulator.select(IntAccumulator.q.value > 1).max(IntAccumulator.q.value) == 3 assert IntAccumulator.select(IntAccumulator.q.value > 1).sum(IntAccumulator.q.value) == 5 def floatcmp(f1, f2): if abs(f1-f2) < 0.1: return 0 if f1 < f2: return 1 return -1 def test_float(): setupClass(FloatAccumulator) FloatAccumulator(value=1.2) FloatAccumulator(value=2.4) FloatAccumulator(value=3.8) assert floatcmp(FloatAccumulator.select().min(FloatAccumulator.q.value), 1.2) == 0 assert floatcmp(FloatAccumulator.select().avg(FloatAccumulator.q.value), 2.5) == 0 assert floatcmp(FloatAccumulator.select().max(FloatAccumulator.q.value), 3.8) == 0 assert floatcmp(FloatAccumulator.select().sum(FloatAccumulator.q.value), 7.4) == 0 def test_many(): setupClass(IntAccumulator) IntAccumulator(value=1) IntAccumulator(value=1) IntAccumulator(value=2) IntAccumulator(value=2) IntAccumulator(value=3) IntAccumulator(value=3) attribute = IntAccumulator.q.value assert IntAccumulator.select().accumulateMany( ("MIN", attribute), ("AVG", attribute), ("MAX", attribute), ("COUNT", attribute), ("SUM", attribute) ) == (1, 2, 3, 6, 12) assert IntAccumulator.select(distinct=True).accumulateMany( ("MIN", attribute), ("AVG", attribute), ("MAX", attribute), ("COUNT", attribute), ("SUM", attribute) ) == (1, 2, 3, 3, 6) SQLObject-1.5.2/sqlobject/tests/test_identity.py0000644000175000017500000000121612105765625021260 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Identity (MS SQL) ######################################## class TestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table TestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(TestIdentity) # insert without giving identity i1 = TestIdentity(n=100) # verify result i1get = TestIdentity.get(1) assert(i1get.n == 100) # insert while giving identity i2 = TestIdentity(id=2, n=200) # verify result i2get = TestIdentity.get(2) assert(i2get.n == 200) SQLObject-1.5.2/sqlobject/tests/__init__.py0000644000175000017500000000000210372665117020116 0ustar phdphd00000000000000# SQLObject-1.5.2/sqlobject/tests/test_cache.py0000644000175000017500000000266310510515140020460 0ustar phdphd00000000000000from sqlobject import * from dbtest import * from sqlobject.cache import CacheSet class Something(object): pass def test_purge1(): x = CacheSet() y = Something() obj = x.get(1, y.__class__) assert obj is None x.put(1, y.__class__, y) x.finishPut(y.__class__) j = x.get(1, y.__class__) assert j == y x.expire(1, y.__class__) j = x.get(1, y.__class__) assert j == None x.finishPut(y.__class__) j = x.get(1, y.__class__) assert j == None x.finishPut(y.__class__) class CacheTest(SQLObject): name = StringCol(alternateID=True, length=100) def test_cache(): setupClass(CacheTest) s = CacheTest(name='foo') obj_id = id(s) s_id = s.id assert CacheTest.get(s_id) is s assert not s.sqlmeta.expired CacheTest.sqlmeta.expireAll() assert s.sqlmeta.expired CacheTest.sqlmeta.expireAll() s1 = CacheTest.get(s_id) # We should have a new object: assert id(s1) != obj_id obj_id2 = id(s1) CacheTest._connection.expireAll() s2 = CacheTest.get(s_id) assert id(s2) != obj_id and id(s2) != obj_id2 def test_cache_cull(): setupClass(CacheTest) s = CacheTest(name='test_cache_create') list = [CacheTest(name='test_cache_create %d' % count) for count in range(s._connection.cache.caches['CacheTest'].cullFrequency)] assert len(s._connection.cache.caches['CacheTest'].cache) < s._connection.cache.caches['CacheTest'].cullFrequency SQLObject-1.5.2/sqlobject/tests/test_validation.py0000644000175000017500000000624112204667354021564 0ustar phdphd00000000000000from sqlobject import * from sqlobject.col import validators from sqlobject.tests.dbtest import * ######################################## ## Validation/conversion ######################################## class SOTestValidator(validators.Validator): def to_python(self, value, state): if value: self.save_value.append(value) return 1 return value def from_python(self, value, state): if value: self.save_value.append(value) return 2 return value validator1 = SOTestValidator(save_value=[]) validator2 = SOTestValidator(save_value=[]) class SOValidation(SQLObject): name = StringCol(validator=validators.PlainText(), default='x', dbName='name_col') name2 = StringCol(validator=validators.ConfirmType(type=str), default='y') name3 = IntCol(validator=validators.Wrapper(fromPython=int), default=100) name4 = FloatCol(default=2.718) name5 = PickleCol(default=None) name6 = BoolCol(default=None) name7 = UnicodeCol(default=None) name8 = IntCol(default=None) name9 = IntCol(validator=validator1, validator2=validator2, default=0) class SOValidationTest(object): def __init__(self, value): self.value = value class SOValidationTestUnicode(SOValidationTest): def __unicode__(self): return self.value class SOValidationTestInt(SOValidationTest): def __int__(self): return self.value class SOValidationTestBool(SOValidationTest): def __nonzero__(self): return self.value class SOValidationTestFloat(SOValidationTest): def __float__(self): return self.value class TestValidation: def setup_method(self, meth): setupClass(SOValidation) def test_validate(self): t = SOValidation(name='hey') raises(validators.Invalid, setattr, t, 'name', '!!!') t.name = 'you' assert t.name == 'you' def test_confirmType(self): t = SOValidation(name2='hey') raises(validators.Invalid, setattr, t, 'name2', 1) raises(validators.Invalid, setattr, t, 'name3', '1') raises(validators.Invalid, setattr, t, 'name4', '1') raises(validators.Invalid, setattr, t, 'name6', '1') raises(validators.Invalid, setattr, t, 'name7', 1) t.name2 = 'you' assert t.name2 == 'you' for name, cls, value in ( ('name4', SOValidationTestFloat, 1.1), ('name6', SOValidationTestBool, True), ('name7', SOValidationTestUnicode, u'test'), ('name8', SOValidationTestInt, 1)): setattr(t, name, cls(value)) assert getattr(t, name) == value def test_wrapType(self): t = SOValidation(name3=1) raises(validators.Invalid, setattr, t, 'name3', 'x') t.name3 = 1L assert t.name3 == 1 t.name3 = 0 assert t.name3 == 0 def test_emptyValue(self): t = SOValidation(name5={}) assert t.name5 == {} def test_validator2(self): t = SOValidation(name9=1) t = SOValidation(name9=2) assert validator1.save_value == [2, 2, 2, 2, 2, 2] assert validator2.save_value == [1, 1, 1, 2, 1, 1] SQLObject-1.5.2/sqlobject/tests/test_md5.py0000644000175000017500000000064112226305676020115 0ustar phdphd00000000000000from md5 import md5 ######################################## ## md5.md5 ######################################## def test_md5(): assert md5('').hexdigest() == 'd41d8cd98f00b204e9800998ecf8427e' assert md5('\n').hexdigest() == '68b329da9893e34099c7d8ad5cb9c940' assert md5('123').hexdigest() == '202cb962ac59075b964b07152d234b70' assert md5('123\n').hexdigest() == 'ba1f2511fc30423bdbb183fe33f3dd0f' SQLObject-1.5.2/sqlobject/tests/test_comparison.py0000644000175000017500000000074412035030155021567 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class TestComparison(SQLObject): pass def test_eq(): setupClass(TestComparison, force=True) t1 = TestComparison() t2 = TestComparison() TestComparison._connection.cache.clear() t3 = TestComparison.get(1) t4 = TestComparison.get(2) assert t1.id == t3.id assert t2.id == t4.id assert t1 is not t3 assert t2 is not t4 assert t1 == t3 assert t2 == t4 assert t1 <> t2 SQLObject-1.5.2/sqlobject/tests/test_cyclic_reference.py0000644000175000017500000000360312202715763022711 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class TestCyclicReferenceA(SQLObject): class sqlmeta(sqlmeta): idName = 'test_id_here' table = 'test_cyclic_reference_a_table' name = StringCol() number = IntCol() time = DateTimeCol() short = StringCol(length=10) blobcol = BLOBCol() fkeyb = ForeignKey('TestCyclicReferenceB') class TestCyclicReferenceB(SQLObject): class sqlmeta(sqlmeta): idName = 'test_id_here' table = 'test_cyclic_reference_b_table' name = StringCol() number = IntCol() time = DateTimeCol() short = StringCol(length=10) blobcol = BLOBCol() fkeya = ForeignKey('TestCyclicReferenceA') def test_cyclic_reference(): if not supports('dropTableCascade'): return conn = getConnection() TestCyclicReferenceA.setConnection(conn) TestCyclicReferenceB.setConnection(conn) TestCyclicReferenceA.dropTable(ifExists=True, cascade=True) assert not conn.tableExists(TestCyclicReferenceA.sqlmeta.table) TestCyclicReferenceB.dropTable(ifExists=True, cascade=True) assert not conn.tableExists(TestCyclicReferenceB.sqlmeta.table) constraints = TestCyclicReferenceA.createTable(ifNotExists=True, applyConstraints=False) assert conn.tableExists(TestCyclicReferenceA.sqlmeta.table) constraints += TestCyclicReferenceB.createTable(ifNotExists=True, applyConstraints=False) assert conn.tableExists(TestCyclicReferenceB.sqlmeta.table) for constraint in constraints: conn.query(constraint) TestCyclicReferenceA.dropTable(ifExists=True, cascade=True) assert not conn.tableExists(TestCyclicReferenceA.sqlmeta.table) TestCyclicReferenceB.dropTable(ifExists=True, cascade=True) assert not conn.tableExists(TestCyclicReferenceB.sqlmeta.table) SQLObject-1.5.2/sqlobject/tests/test_views.py0000644000175000017500000001241611570431751020563 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.views import * class PhoneNumber(SQLObject): number = StringCol() calls = SQLMultipleJoin('PhoneCall') incoming = SQLMultipleJoin('PhoneCall', joinColumn='toID') class PhoneCall(SQLObject): phoneNumber = ForeignKey('PhoneNumber') to = ForeignKey('PhoneNumber') minutes = IntCol() class ViewPhoneCall(ViewSQLObject): class sqlmeta: idName = PhoneCall.q.id clause = PhoneCall.q.phoneNumberID==PhoneNumber.q.id minutes = IntCol(dbName=PhoneCall.q.minutes) number = StringCol(dbName=PhoneNumber.q.number) phoneNumber = ForeignKey('PhoneNumber', dbName=PhoneNumber.q.id) call = ForeignKey('PhoneCall', dbName=PhoneCall.q.id) class ViewPhone(ViewSQLObject): class sqlmeta: idName = PhoneNumber.q.id clause = PhoneCall.q.phoneNumberID==PhoneNumber.q.id minutes = IntCol(dbName=func.SUM(PhoneCall.q.minutes)) numberOfCalls = IntCol(dbName=func.COUNT(PhoneCall.q.phoneNumberID)) number = StringCol(dbName=PhoneNumber.q.number) phoneNumber = ForeignKey('PhoneNumber', dbName=PhoneNumber.q.id) calls = SQLMultipleJoin('PhoneCall', joinColumn='phoneNumberID') vCalls = SQLMultipleJoin('ViewPhoneCall', joinColumn='phoneNumberID', orderBy='id') class ViewPhoneMore(ViewSQLObject): ''' View on top of view ''' class sqlmeta: idName = ViewPhone.q.id clause = ViewPhone.q.id == PhoneCall.q.toID number = StringCol(dbName=ViewPhone.q.number) timesCalled = IntCol(dbName=func.COUNT(PhoneCall.q.toID)) timesCalledLong = IntCol(dbName=func.COUNT(PhoneCall.q.toID)) timesCalledLong.aggregateClause = PhoneCall.q.minutes>10 minutesCalled = IntCol(dbName=func.SUM(PhoneCall.q.minutes)) class ViewPhoneMore2(ViewPhoneMore): class sqlmeta: table = 'vpm' class ViewPhoneInnerAggregate(ViewPhone): twiceMinutes = IntCol(dbName=func.SUM(PhoneCall.q.minutes)*2) def setup_module(mod): setupClass([mod.PhoneNumber, mod.PhoneCall]) mod.ViewPhoneCall._connection = mod.PhoneNumber._connection mod.ViewPhone._connection = mod.PhoneNumber._connection mod.ViewPhoneMore._connection = mod.PhoneNumber._connection phones = inserts(mod.PhoneNumber, [('1234567890',), ('1111111111',)], 'number') calls = inserts(mod.PhoneCall, [(phones[0], phones[1], 5), (phones[0], phones[1], 20), (phones[1], phones[0], 10), (phones[1], phones[0], 25)], 'phoneNumber to minutes') mod.phones = phones mod.calls = calls mod.sqlrepr = mod.PhoneNumber._connection.sqlrepr def testSimpleVPC(): assert hasattr(ViewPhoneCall, 'minutes') assert hasattr(ViewPhoneCall, 'number') assert hasattr(ViewPhoneCall, 'phoneNumberID') def testColumnSQLVPC(): assert str(sqlrepr(ViewPhoneCall.q.id)) == 'view_phone_call.id' assert str(sqlrepr(ViewPhoneCall.q.minutes)) == 'view_phone_call.minutes' q = sqlrepr(ViewPhoneCall.q) assert q.count('phone_call.minutes AS minutes') assert q.count('phone_number.number AS number') def testAliasOverride(): assert str(sqlrepr(ViewPhoneMore2.q.id)) == 'vpm.id' def checkAttr(cls, id, attr, value): assert getattr(cls.get(id), attr) == value def testGetVPC(): checkAttr(ViewPhoneCall, calls[0].id, 'number', calls[0].phoneNumber.number) checkAttr(ViewPhoneCall, calls[0].id, 'minutes', calls[0].minutes) checkAttr(ViewPhoneCall, calls[0].id, 'phoneNumber', calls[0].phoneNumber) checkAttr(ViewPhoneCall, calls[2].id, 'number', calls[2].phoneNumber.number) checkAttr(ViewPhoneCall, calls[2].id, 'minutes', calls[2].minutes) checkAttr(ViewPhoneCall, calls[2].id, 'phoneNumber', calls[2].phoneNumber) def testGetVP(): checkAttr(ViewPhone, phones[0].id, 'number', phones[0].number) checkAttr(ViewPhone, phones[0].id, 'minutes', phones[0].calls.sum(PhoneCall.q.minutes)) checkAttr(ViewPhone, phones[0].id, 'phoneNumber', phones[0]) def testGetVPM(): checkAttr(ViewPhoneMore, phones[0].id, 'number', phones[0].number) checkAttr(ViewPhoneMore, phones[0].id, 'minutesCalled', phones[0].incoming.sum(PhoneCall.q.minutes)) checkAttr(ViewPhoneMore, phones[0].id, 'timesCalled', phones[0].incoming.count()) checkAttr(ViewPhoneMore, phones[0].id, 'timesCalledLong', phones[0].incoming.filter(PhoneCall.q.minutes>10).count()) def testJoinView(): p = ViewPhone.get(phones[0].id) assert p.calls.count() == 2 assert p.vCalls.count() == 2 assert p.vCalls[0] == ViewPhoneCall.get(calls[0].id) def testInnerAggregate(): checkAttr(ViewPhoneInnerAggregate, phones[0].id, 'twiceMinutes', phones[0].calls.sum(PhoneCall.q.minutes)*2) def testSelect(): s = ViewPhone.select() assert s.count() == len(phones) s = ViewPhoneCall.select() assert s.count() == len(calls) def testSelect2(): s = ViewPhone.select(ViewPhone.q.number==phones[0].number) assert s.getOne().phoneNumber == phones[0] def testDistinctCount(): # This test is for SelectResults non-* based count when distinct # We're really just checking this doesn't raise anything due to lack of sqlrepr'ing assert ViewPhone.select(distinct=True).count() == 2 SQLObject-1.5.2/sqlobject/tests/test_columns_order.py0000644000175000017500000000063711453747464022316 0ustar phdphd00000000000000from sqlobject import * ######################################## ## Columns order ######################################## class SOColumnsOrder(SQLObject): name = StringCol() surname = StringCol() parname = StringCol() age = IntCol() def test_columns_order(): column_names = [c.name for c in SOColumnsOrder.sqlmeta.columnList] assert column_names == ['name', 'surname', 'parname', 'age'] SQLObject-1.5.2/sqlobject/tests/test_enum.py0000644000175000017500000000271312204667354020376 0ustar phdphd00000000000000from sqlobject import * from sqlobject.col import validators from sqlobject.tests.dbtest import * ######################################## ## Enum test ######################################## class Enum1(SQLObject): l = EnumCol(enumValues=['a', 'bcd', 'e']) def testBad(): setupClass(Enum1) for l in ['a', 'bcd', 'a', 'e']: Enum1(l=l) raises( (Enum1._connection.module.IntegrityError, Enum1._connection.module.ProgrammingError, validators.Invalid), Enum1, l='b') class EnumWithNone(SQLObject): l = EnumCol(enumValues=['a', 'bcd', 'e', None]) def testNone(): setupClass(EnumWithNone) for l in [None, 'a', 'bcd', 'a', 'e', None]: e = EnumWithNone(l=l) assert e.l == l class EnumWithDefaultNone(SQLObject): l = EnumCol(enumValues=['a', 'bcd', 'e', None], default=None) def testDefaultNone(): setupClass(EnumWithDefaultNone) e = EnumWithDefaultNone() assert e.l == None class EnumWithDefaultOther(SQLObject): l = EnumCol(enumValues=['a', 'bcd', 'e', None], default='a') def testDefaultOther(): setupClass(EnumWithDefaultOther) e = EnumWithDefaultOther() assert e.l == 'a' class EnumUnicode(SQLObject): n = UnicodeCol() l = EnumCol(enumValues=['a', 'b']) def testUnicode(): setupClass(EnumUnicode) EnumUnicode(n=u'a', l='a') EnumUnicode(n=u'b', l=u'b') EnumUnicode(n=u'\u201c', l='a') EnumUnicode(n=u'\u201c', l=u'b') SQLObject-1.5.2/sqlobject/tests/test_ForeignKey.py0000644000175000017500000000533711644602102021464 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.tests.dbtest import InstalledTestDatabase class TestComposerKey(SQLObject): name = StringCol() id2 = IntCol(default=None, unique=True) class TestWorkKey(SQLObject): class sqlmeta: idName = "work_id" composer = ForeignKey('TestComposerKey', cascade=True) title = StringCol() class TestWorkKey2(SQLObject): title = StringCol() class TestOtherColumn(SQLObject): key1 = ForeignKey('TestComposerKey', default=None) key2 = ForeignKey('TestComposerKey', refColumn='id2', default=None) def test1(): setupClass([TestComposerKey, TestWorkKey]) c = TestComposerKey(name='Mahler, Gustav') w1 = TestWorkKey(composer=c, title='Symphony No. 9') w2 = TestWorkKey(composer=None, title=None) # Select by usual way s = TestWorkKey.selectBy(composerID=c.id, title='Symphony No. 9') assert s.count() == 1 assert s[0]==w1 # selectBy object.id s = TestWorkKey.selectBy(composer=c.id, title='Symphony No. 9') assert s.count() == 1 assert s[0]==w1 # selectBy object s = TestWorkKey.selectBy(composer=c, title='Symphony No. 9') assert s.count() == 1 assert s[0]==w1 # selectBy id s = TestWorkKey.selectBy(id=w1.id) assert s.count() == 1 assert s[0]==w1 # is None handled correctly? s = TestWorkKey.selectBy(composer=None, title=None) assert s.count() == 1 assert s[0]==w2 s = TestWorkKey.selectBy() assert s.count() == 2 # select with objects s = TestWorkKey.select(TestWorkKey.q.composerID==c.id) assert s.count() == 1 assert s[0]==w1 s = TestWorkKey.select(TestWorkKey.q.composer==c.id) assert s.count() == 1 assert s[0]==w1 s = TestWorkKey.select(TestWorkKey.q.composerID==c) assert s.count() == 1 assert s[0]==w1 s = TestWorkKey.select(TestWorkKey.q.composer==c) assert s.count() == 1 assert s[0]==w1 s = TestWorkKey.select((TestWorkKey.q.composer==c) & \ (TestWorkKey.q.title=='Symphony No. 9')) assert s.count() == 1 assert s[0]==w1 def test2(): TestWorkKey._connection = getConnection() InstalledTestDatabase.drop(TestWorkKey) setupClass([TestComposerKey, TestWorkKey2], force=True) TestWorkKey2.sqlmeta.addColumn(ForeignKey('TestComposerKey'), changeSchema=True) def test_otherColumn(): setupClass([TestComposerKey, TestOtherColumn]) test_composer1 = TestComposerKey(name='Test1') test_composer2 = TestComposerKey(name='Test2', id2=2) test_fkey = TestOtherColumn(key1=test_composer1) test_other = TestOtherColumn(key2=test_composer2.id2) getConnection().cache.clear() assert test_fkey.key1 == test_composer1 assert test_other.key2 == test_composer2 SQLObject-1.5.2/sqlobject/tests/test_sqlbuilder_dbspecific.py0000644000175000017500000000335110616077605023751 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * ''' Going to test that complex sqlbuilder constructions are never prematurely stringified. A straight-forward approach is to use Bools, since postgresql wants special formatting in queries. The test is whether a call to sqlrepr(x, 'postgres') includes the appropriate bool formatting throughout. ''' class SBButton(SQLObject): activated = BoolCol() def makeClause(): return SBButton.q.activated==True def makeSelect(): return Select(SBButton.q.id, clause=makeClause()) def checkCount(q, c, msg=''): print "STRING:", str(q) print "POSTGR:", sqlrepr(q, 'postgres') assert sqlrepr(q, 'postgres').count("'t'") == c and sqlrepr(q, 'postgres') != str(q), msg def testSimple(): setupClass(SBButton) yield checkCount, makeClause(), 1 yield checkCount, makeSelect(), 1 def testMiscOps(): setupClass(SBButton) yield checkCount, AND(makeClause(), makeClause()), 2 yield checkCount, AND(makeClause(), EXISTS(makeSelect())), 2 def testAliased(): setupClass(SBButton) b = Alias(makeSelect(), 'b') yield checkCount, b, 1 yield checkCount, Select(b.q.id), 1 # Table1 & Table2 are treated individually in joins yield checkCount, JOIN(None, b), 1 yield checkCount, JOIN(b, SBButton), 1 yield checkCount, JOIN(SBButton, b), 1 yield checkCount, LEFTJOINOn(None, b, SBButton.q.id==b.q.id), 1 yield checkCount, LEFTJOINOn(b, SBButton, SBButton.q.id==b.q.id), 1 yield checkCount, LEFTJOINOn(SBButton, b, SBButton.q.id==b.q.id), 1 def testTablesUsedSResults(): setupClass(SBButton) yield checkCount, SBButton.select(makeClause()).queryForSelect(), 1 SQLObject-1.5.2/sqlobject/tests/test_string_id.py0000644000175000017500000000306011453747054021411 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## String ID test ######################################## class SOStringID(SQLObject): class sqlmeta(sqlmeta): table = 'so_string_id' idType = str val = StringCol(alternateID=True) mysqlCreate = """ CREATE TABLE IF NOT EXISTS so_string_id ( id VARCHAR(50) PRIMARY KEY, val TEXT ) """ postgresCreate = """ CREATE TABLE so_string_id ( id VARCHAR(50) PRIMARY KEY, val TEXT ) """ sybaseCreate = """ CREATE TABLE so_string_id ( id VARCHAR(50) UNIQUE, val VARCHAR(50) NULL ) """ firebirdCreate = """ CREATE TABLE so_string_id ( id VARCHAR(50) NOT NULL PRIMARY KEY, val BLOB SUB_TYPE TEXT ) """ mssqlCreate = """ CREATE TABLE so_string_id ( id VARCHAR(50) PRIMARY KEY, val varchar(4000) ) """ sqliteCreate = postgresCreate mysqlDrop = """ DROP TABLE IF EXISTS so_string_id """ postgresDrop = """ DROP TABLE so_string_id """ sqliteDrop = postgresDrop firebirdDrop = postgresDrop mssqlDrop = postgresDrop def test_stringID(): setupClass(SOStringID) t1 = SOStringID(id='hey', val='whatever') t2 = SOStringID.byVal('whatever') assert t1 == t2 assert t1.val == t2.val assert t1.val == 'whatever' t1 = SOStringID(id='you', val='nowhere') t2 = SOStringID.get('you') assert t1 == t2 assert t1.val == t2.val assert t1.val == 'nowhere' SQLObject-1.5.2/sqlobject/tests/test_sqlbuilder.py0000644000175000017500000000540211664162515021574 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * class TestSQLBuilder(SQLObject): name = StringCol() value = IntCol() def test_Select(): setupClass(TestSQLBuilder) select1 = Select([const.id, func.MAX(const.salary)], staticTables=['employees']) assert sqlrepr(select1) == 'SELECT id, MAX(salary) FROM employees' select2 = Select([TestSQLBuilder.q.name, TestSQLBuilder.q.value]) assert sqlrepr(select2) == 'SELECT test_sql_builder.name, test_sql_builder.value FROM test_sql_builder' union = Union(select1, select2) assert sqlrepr(union) == 'SELECT id, MAX(salary) FROM employees UNION SELECT test_sql_builder.name, test_sql_builder.value FROM test_sql_builder' union = Union(TestSQLBuilder.select().queryForSelect()) assert sqlrepr(union) == 'SELECT test_sql_builder.id, test_sql_builder.name, test_sql_builder.value FROM test_sql_builder WHERE 1 = 1' def test_empty_AND(): assert AND() == None assert AND(True) == True # sqlrepr() is needed because AND() returns an SQLExpression that overrides # comparison. The following # AND('x', 'y') == "foo bar" # is True! (-: Eeek! assert sqlrepr(AND(1, 2)) == sqlrepr(SQLOp("AND", 1, 2)) == "((1) AND (2))" assert sqlrepr(AND(1, 2, '3'), "sqlite") == \ sqlrepr(SQLOp("AND", 1, SQLOp("AND", 2, '3')), "sqlite") == \ "((1) AND ((2) AND ('3')))" def test_modulo(): setupClass(TestSQLBuilder) assert sqlrepr(TestSQLBuilder.q.value % 2 == 0, 'mysql') == \ "((MOD(test_sql_builder.value, 2)) = (0))" assert sqlrepr(TestSQLBuilder.q.value % 2 == 0, 'sqlite') == \ "(((test_sql_builder.value) % (2)) = (0))" def test_str_or_sqlrepr(): select = Select(['id', 'name'], staticTables=['employees'], where='value>0', orderBy='id') assert sqlrepr(select, 'sqlite') == \ 'SELECT id, name FROM employees WHERE value>0 ORDER BY id' select = Select(['id', 'name'], staticTables=['employees'], where='value>0', orderBy='id', lazyColumns=True) assert sqlrepr(select, 'sqlite') == \ 'SELECT id FROM employees WHERE value>0 ORDER BY id' insert = Insert('employees', values={'id': 1, 'name': 'test'}) assert sqlrepr(insert, 'sqlite') == \ "INSERT INTO employees (id, name) VALUES (1, 'test')" update = Update('employees', {'name': 'test'}, where='id=1') assert sqlrepr(update, 'sqlite') == \ "UPDATE employees SET name='test' WHERE id=1" delete = Delete('employees', where='id=1') assert sqlrepr(delete, 'sqlite') == \ "DELETE FROM employees WHERE id=1" raises(TypeError, Delete, 'employees') delete = Delete('employees', where=None) assert sqlrepr(delete, 'sqlite') == \ "DELETE FROM employees" SQLObject-1.5.2/sqlobject/tests/test_slice.py0000644000175000017500000000247411611114522020516 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Slicing tests ######################################## class Counter(SQLObject): number = IntCol(notNull=True) class TestSlice: def setup_method(self, meth): setupClass(Counter) for i in range(100): Counter(number=i) def counterEqual(self, counters, value): if not supports('limitSelect'): return assert [c.number for c in counters] == value def test_slice(self): self.counterEqual( Counter.select(None, orderBy='number'), range(100)) self.counterEqual( Counter.select(None, orderBy='number')[10:20], range(10, 20)) self.counterEqual( Counter.select(None, orderBy='number')[20:30][:5], range(20, 25)) self.counterEqual( Counter.select(None, orderBy='number')[20:30][1:5], range(21, 25)) self.counterEqual( Counter.select(None, orderBy='number')[:-10], range(0, 90)) self.counterEqual( Counter.select(None, orderBy='number', reversed=True), range(99, -1, -1)) self.counterEqual( Counter.select(None, orderBy='-number'), range(99, -1, -1)) SQLObject-1.5.2/sqlobject/tests/test_parse_uri.py0000644000175000017500000001140311533226272021411 0ustar phdphd00000000000000import os from sqlobject.dbconnection import DBConnection from sqlobject.sqlite.sqliteconnection import SQLiteConnection ######################################## ## Test _parseURI ######################################## def test_parse(): _parseURI = DBConnection._parseURI user, password, host, port, path, args = _parseURI("mysql://host/database") assert user is None assert password is None assert host == "host" assert port is None assert path == "/database" assert args == {} user, password, host, port, path, args = _parseURI("mysql://user:pass%20word@host/database?unix_socket=/var/mysql/socket") assert user == "user" assert password == "pass word" assert host == "host" assert port is None assert path == "/database" assert args == {"unix_socket": "/var/mysql/socket"} user, password, host, port, path, args = _parseURI("postgres://user@host/database") assert user == "user" assert password is None assert host == "host" assert port is None assert path == "/database" assert args == {} user, password, host, port, path, args = _parseURI("postgres://host:5432/database") assert user is None assert password is None assert host == "host" assert port == 5432 assert path == "/database" assert args == {} user, password, host, port, path, args = _parseURI("postgres:///full/path/to/socket/database") assert user is None assert password is None assert host is None assert port is None assert path == "/full/path/to/socket/database" assert args == {} user, password, host, port, path, args = _parseURI("postgres://us%3Aer:p%40ssword@host/database") assert user == "us:er" assert password == "p@ssword" assert host == "host" assert port is None assert path == "/database" assert args == {} user, password, host, port, path, args = _parseURI("sqlite:///full/path/to/database") assert user is None assert password is None assert host is None assert port is None assert path == "/full/path/to/database" assert args == {} user, password, host, port, path, args = _parseURI("sqlite:/:memory:") assert user is None assert password is None assert host is None assert port is None assert path == "/:memory:" assert args == {} if os.name == 'nt': user, password, host, port, path, args = _parseURI("sqlite:/C|/full/path/to/database") assert user is None assert password is None assert host is None assert port is None assert path == "/C:/full/path/to/database" assert args == {} user, password, host, port, path, args = _parseURI("sqlite:///C:/full/path/to/database") assert user is None assert password is None assert host is None assert port is None assert path == "/C:/full/path/to/database" assert args == {} def test_uri(): connection = DBConnection() connection.close = lambda: None connection.dbName, connection.host, connection.port, connection.user, connection.password, connection.db = 'mysql', 'host', None, None, None, 'database' assert connection.uri() == "mysql://host/database" connection.dbName, connection.host, connection.port, connection.user, connection.password, connection.db = 'mysql', 'host', None, 'user', 'pass word', 'database' assert connection.uri() == "mysql://user:pass%20word@host/database" connection.dbName, connection.host, connection.port, connection.user, connection.password, connection.db = 'postgres', 'host', None, 'user', None, 'database' assert connection.uri() == "postgres://user@host/database" connection.dbName, connection.host, connection.port, connection.user, connection.password, connection.db = 'postgres', 'host', 5432, None, None, 'database' assert connection.uri() == "postgres://host:5432/database" connection.dbName, connection.host, connection.port, connection.user, connection.password, connection.db = 'postgres', None, None, None, None, '/full/path/to/socket/database' assert connection.uri() == "postgres:///full/path/to/socket/database" connection.dbName, connection.host, connection.port, connection.user, connection.password, connection.db = 'postgres', 'host', None, 'us:er', 'p@ssword', 'database' assert connection.uri() == "postgres://us%3Aer:p%40ssword@host/database" connection = SQLiteConnection(None) connection.filename = '/full/path/to/database' assert connection.uri() == "sqlite:///full/path/to/database" connection.filename = ':memory:' assert connection.uri() == "sqlite:/:memory:" if os.name == 'nt': connection.filename = 'C:/full/path/to/database' assert connection.uri() == "sqlite:///C:/full/path/to/database" SQLObject-1.5.2/sqlobject/tests/test_default_style.py0000644000175000017500000000371311037642601022266 0ustar phdphd00000000000000""" Test the default styles, to guarantee consistency. """ from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.styles import Style, MixedCaseUnderscoreStyle, MixedCaseStyle #hash of styles versus the database names resulting from 'columns' below columns = ["ABCUpper", "abc_lower", "ABCamelCaseColumn"] styles = {Style:columns, MixedCaseUnderscoreStyle:["abc_upper", "abc_lower", "ab_camel_case_column"], MixedCaseStyle:["ABCUpper", "Abc_lower", "ABCamelCaseColumn"], } #hash of styles versus the database names resulting from a foreign key named 'FKey' fkey = ForeignKey("DefaultStyleTest", name="FKey") fkeys = {Style:"FKeyID", MixedCaseUnderscoreStyle:"f_key_id", MixedCaseStyle:"FKeyID", } def make_columns(): global columns columns = [] for col_name in columns: columns.append(StringCol(name=col_name, length=10)) def do_col_test(DefaultStyleTest, style, dbnames): DefaultStyleTest.sqlmeta.style = style() for col, old_dbname in zip(columns, dbnames): DefaultStyleTest.sqlmeta.addColumn(col) try: new_dbname = DefaultStyleTest.sqlmeta.columns[col.name].dbName assert new_dbname == old_dbname finally: if col.name in DefaultStyleTest.sqlmeta.columns: DefaultStyleTest.sqlmeta.delColumn(col) def do_fkey_test(DefaultStyleTest, style, dbname): DefaultStyleTest.sqlmeta.style = style() DefaultStyleTest.sqlmeta.addColumn(fkey) try: assert DefaultStyleTest.sqlmeta.columns.keys()[0] == "FKeyID" assert DefaultStyleTest.sqlmeta.columns.values()[0].dbName == dbname finally: DefaultStyleTest.sqlmeta.delColumn(fkey) class DefaultStyleTest(SQLObject): pass def test_default_styles(): make_columns() for style in styles: yield do_col_test, DefaultStyleTest, style, styles[style] yield do_fkey_test, DefaultStyleTest, style, fkeys[style] SQLObject-1.5.2/sqlobject/tests/test_inheritance.py0000644000175000017500000000113710372665117021721 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Inheritance ######################################## class Super(SQLObject): name = StringCol(length=10) class Sub(Super): name2 = StringCol(length=10) def test_super(): setupClass(Super) setupClass(Sub) s1 = Super(name='one') s2 = Super(name='two') s3 = Super.get(s1.id) assert s1 == s3 def test_sub(): setupClass(Super) setupClass(Sub) s1 = Sub(name='one', name2='1') s2 = Sub(name='two', name2='2') s3 = Sub.get(s1.id) assert s1 == s3 SQLObject-1.5.2/sqlobject/tests/test_pickle.py0000644000175000017500000000225612150430033020661 0ustar phdphd00000000000000import pickle from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Pickle instances ######################################## class TestPickle(SQLObject): question = StringCol() answer = IntCol() test_question = 'The Ulimate Question of Life, the Universe and Everything' test_answer = 42 def test_pickleCol(): setupClass(TestPickle) connection = TestPickle._connection test = TestPickle(question=test_question, answer=test_answer) pickle_data = pickle.dumps(test, pickle.HIGHEST_PROTOCOL) connection.cache.clear() test = pickle.loads(pickle_data) test2 = connection.cache.tryGet(test.id, TestPickle) assert test2 is test assert test.question == test_question assert test.answer == test_answer if (connection.dbName == 'sqlite') and connection._memory: return # The following test requires a different connection test = TestPickle.get(test.id, connection=getConnection(registry='')) # to make a different DB URI # and open another connection raises(pickle.PicklingError, pickle.dumps, test, pickle.HIGHEST_PROTOCOL) SQLObject-1.5.2/sqlobject/tests/test_perConnection.py0000644000175000017500000000111112034057256022222 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Per-instance connection ######################################## class TestPerConnection(SQLObject): test = StringCol() def test_perConnection(): connection = getConnection() TestPerConnection.dropTable(connection=connection, ifExists=True) TestPerConnection.createTable(connection=connection) TestPerConnection(test='test', connection=connection) assert len(list(TestPerConnection.select(TestPerConnection.q.test=='test', connection=connection))) == 1 SQLObject-1.5.2/sqlobject/tests/test_auto.py0000644000175000017500000001450411645055424020400 0ustar phdphd00000000000000from datetime import datetime now = datetime.now from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject import classregistry from py.test import raises ######################################## ## Dynamic column tests ######################################## class Person(SQLObject): class sqlmeta: defaultOrder = 'name' name = StringCol(length=100, dbName='name_col') class Phone(SQLObject): class sqlmeta: defaultOrder = 'phone' phone = StringCol(length=12) class TestPeople: def setup_method(self, meth): setupClass(Person) setupClass(Phone) for n in ['jane', 'tim', 'bob', 'jake']: Person(name=n) for p in ['555-555-5555', '555-394-2930', '444-382-4854']: Phone(phone=p) def test_defaultOrder(self): assert (list(Person.select('all')) == list(Person.select('all', orderBy=Person.sqlmeta.defaultOrder))) def test_dynamicColumn(self): nickname = StringCol('nickname', length=10) Person.sqlmeta.addColumn(nickname, changeSchema=True) n = Person(name='robert', nickname='bob') assert ([p.name for p in Person.select('all')] == ['bob', 'jake', 'jane', 'robert', 'tim']) Person.sqlmeta.delColumn(nickname, changeSchema=True) def test_dynamicJoin(self): col = KeyCol('person', foreignKey='Person') Phone.sqlmeta.addColumn(col, changeSchema=True) join = MultipleJoin('Phone') Person.sqlmeta.addJoin(join) for phone in Phone.select('all'): if phone.phone.startswith('555'): phone.person = Person.selectBy(name='tim')[0] else: phone.person = Person.selectBy(name='bob')[0] l = [p.phone for p in Person.selectBy(name='tim')[0].phones] l.sort() assert l == ['555-394-2930', '555-555-5555'] Phone.sqlmeta.delColumn(col, changeSchema=True) Person.sqlmeta.delJoin(join) def _test_collidingName(self): class CollidingName(SQLObject): expire = StringCol() def test_collidingName(self): raises(AssertionError, Person.sqlmeta.addColumn, StringCol(name="name")) raises(AssertionError, Person.sqlmeta.addColumn, StringCol(name="_init")) raises(AssertionError, Person.sqlmeta.addColumn, StringCol(name="expire")) raises(AssertionError, Person.sqlmeta.addColumn, StringCol(name="set")) raises(AssertionError, self._test_collidingName) ######################################## ## Auto class generation ######################################## class TestAuto: mysqlCreate = """ CREATE TABLE IF NOT EXISTS auto_test ( auto_id INT AUTO_INCREMENT PRIMARY KEY, first_name VARCHAR(100), last_name VARCHAR(200) NOT NULL, age INT DEFAULT NULL, created DATETIME NOT NULL, happy char(1) DEFAULT 'Y' NOT NULL, long_field TEXT, wannahavefun TINYINT DEFAULT 0 NOT NULL ) """ postgresCreate = """ CREATE TABLE auto_test ( auto_id SERIAL PRIMARY KEY, first_name VARCHAR(100), last_name VARCHAR(200) NOT NULL, age INT DEFAULT 0, created TIMESTAMP NOT NULL, happy char(1) DEFAULT 'Y' NOT NULL, long_field TEXT, wannahavefun BOOL DEFAULT FALSE NOT NULL ) """ rdbhostCreate = """ CREATE TABLE auto_test ( auto_id SERIAL PRIMARY KEY, first_name VARCHAR(100), last_name VARCHAR(200) NOT NULL, age INT DEFAULT 0, created VARCHAR(40) NOT NULL, happy char(1) DEFAULT 'Y' NOT NULL, long_field TEXT, wannahavefun BOOL DEFAULT FALSE NOT NULL ) """ sqliteCreate = """ CREATE TABLE auto_test ( auto_id INTEGER PRIMARY KEY AUTOINCREMENT , first_name VARCHAR(100), last_name VARCHAR(200) NOT NULL, age INT DEFAULT NULL, created DATETIME NOT NULL, happy char(1) DEFAULT 'Y' NOT NULL, long_field TEXT, wannahavefun INT DEFAULT 0 NOT NULL ) """ sybaseCreate = """ CREATE TABLE auto_test ( auto_id integer, first_name VARCHAR(100), last_name VARCHAR(200) NOT NULL, age INT DEFAULT 0, created DATETIME NOT NULL, happy char(1) DEFAULT 'Y' NOT NULL, long_field TEXT, wannahavefun BIT default(0) NOT NULL ) """ mssqlCreate = """ CREATE TABLE auto_test ( auto_id int identity(1,1), first_name VARCHAR(100), last_name VARCHAR(200) NOT NULL, age INT DEFAULT 0, created DATETIME NOT NULL, happy char(1) DEFAULT 'Y' NOT NULL, long_field TEXT, wannahavefun BIT default(0) NOT NULL ) """ mysqlDrop = """ DROP TABLE IF EXISTS auto_test """ postgresDrop = """ DROP TABLE auto_test """ sqliteDrop = sybaseDrop = mssqlDrop = rdbhostDrop = postgresDrop def setup_method(self, meth): conn = getConnection() dbName = conn.dbName creator = getattr(self, dbName + 'Create', None) if creator: conn.query(creator) def teardown_method(self, meth): conn = getConnection() dbName = conn.dbName dropper = getattr(self, dbName + 'Drop', None) if dropper: conn.query(dropper) def test_classCreate(self): class AutoTest(SQLObject): _connection = getConnection() class sqlmeta(sqlmeta): idName = 'auto_id' fromDatabase = True john = AutoTest(firstName='john', lastName='doe', age=10, created=now(), wannahavefun=False, longField='x'*1000) jane = AutoTest(firstName='jane', lastName='doe', happy='N', created=now(), wannahavefun=True, longField='x'*1000) assert not john.wannahavefun assert jane.wannahavefun assert john.longField == 'x'*1000 assert jane.longField == 'x'*1000 del classregistry.registry( AutoTest.sqlmeta.registry).classes['AutoTest'] columns = AutoTest.sqlmeta.columns assert columns["lastName"].dbName == "last_name" assert columns["wannahavefun"].dbName == "wannahavefun" SQLObject-1.5.2/sqlobject/tests/test_sqlobject_admin.py0000644000175000017500000000211512201741025022545 0ustar phdphd00000000000000""" These Tests are not enabled yet, but here are some working examples of using createSQL so far. """ from sqlobject import * class Test1(SQLObject): class sqlmeta: createSQL = "CREATE SEQUENCE db_test1_seq;" test1 = StringCol() class Test2(SQLObject): class sqlmeta: createSQL = ["CREATE SEQUENCE db_test2_seq;", "ALTER TABLE test2 ADD CHECK(test2 != '');" ] test2 = StringCol() class Test3(SQLObject): class sqlmeta: createSQL = {'postgres': 'CREATE SEQUENCE db_test3_seq;', 'mysql': 'CREATE SEQUENCE db_test3_seq;'} test3 = StringCol() class Test4(SQLObject): class sqlmeta: createSQL = {'postgres': ['CREATE SEQUENCE db_test4_seq;', "ALTER TABLE test4 ADD CHECK(test4 != '');" ], 'mysql': 'CREATE SEQUENCE db_test4_seq;'} test4 = StringCol() class Test5(SQLObject): class sqlmeta: createSQL = {'mysql': 'CREATE SEQUENCE db_test5_seq;'} test5 = StringCol() SQLObject-1.5.2/sqlobject/tests/test_sqlbuilder_importproxy.py0000644000175000017500000000253010763017457024272 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.views import * from sqlobject.sqlbuilder import ImportProxy, Alias def testSimple(): nyi = ImportProxy('NotYetImported') x = nyi.q.name class NotYetImported(SQLObject): name = StringCol(dbName='a_name') y = nyi.q.name assert str(x) == 'not_yet_imported.a_name' assert str(y) == 'not_yet_imported.a_name' def testAddition(): nyi = ImportProxy('NotYetImported2') x = nyi.q.name+nyi.q.name class NotYetImported2(SQLObject): name = StringCol(dbName='a_name') assert str(x) == '((not_yet_imported2.a_name) + (not_yet_imported2.a_name))' def testOnView(): nyi = ImportProxy('NotYetImportedV') x = nyi.q.name class NotYetImported3(SQLObject): name = StringCol(dbName='a_name') class NotYetImportedV(ViewSQLObject): class sqlmeta: idName = NotYetImported3.q.id name = StringCol(dbName=NotYetImported3.q.name) assert str(x) == 'not_yet_imported_v.name' def testAlias(): nyi = ImportProxy('NotYetImported4') y = Alias(nyi, 'y') x = y.q.name class NotYetImported4(SQLObject): name = StringCol(dbName='a_name') assert str(y) == 'not_yet_imported4 y' assert tablesUsedSet(x, None) == set(['not_yet_imported4 y']) assert str(x) == 'y.a_name' SQLObject-1.5.2/sqlobject/tests/test_csvexport.py0000644000175000017500000000514710372665117021472 0ustar phdphd00000000000000from sqlobject import * from dbtest import * import csv from StringIO import StringIO from sqlobject.util.csvexport import export_csv, export_csv_zip def assert_export(result, *args, **kw): f = StringIO() kw['writer'] = f export_csv(*args, **kw) s = f.getvalue().replace('\r\n', '\n') if result.strip() != s.strip(): print '**Expected:' print result print '**Got:' print s assert result.strip() == s.strip() class SimpleCSV(SQLObject): name = StringCol() address = StringCol() address.csvTitle = 'Street Address' hidden = StringCol() hidden.noCSV = True class ComplexCSV(SQLObject): fname = StringCol() lname = StringCol() age = IntCol() extraCSVColumns = [('name', 'Full Name'), 'initials'] # initials should end up at the end then: csvColumnOrder = ['name', 'fname', 'lname', 'age'] def _get_name(self): return self.fname + ' ' + self.lname def _get_initials(self): return self.fname[0] + self.lname[0] def test_simple(): setupClass(SimpleCSV) SimpleCSV(name='Bob', address='432W', hidden='boo') SimpleCSV(name='Joe', address='123W', hidden='arg') assert_export("""\ name,Street Address Bob,432W Joe,123W """, SimpleCSV, orderBy='name') assert_export("""\ name,Street Address Joe,123W Bob,432W """, SimpleCSV, orderBy='address') assert_export("""\ name,Street Address Joe,123W """, SimpleCSV.selectBy(name='Joe')) def test_complex(): setupClass(ComplexCSV) ComplexCSV(fname='John', lname='Doe', age=40) ComplexCSV(fname='Bob', lname='Dylan', age=60) ComplexCSV(fname='Harriet', lname='Tubman', age=160) assert_export("""\ Full Name,fname,lname,age,initials John Doe,John,Doe,40,JD Bob Dylan,Bob,Dylan,60,BD Harriet Tubman,Harriet,Tubman,160,HT """, ComplexCSV, orderBy='lname') assert_export("""\ Full Name,fname,lname,age,initials Bob Dylan,Bob,Dylan,60,BD John Doe,John,Doe,40,JD """, ComplexCSV.select(ComplexCSV.q.lname.startswith('D'), orderBy='fname')) def test_zip(): # Just exercise tests, doesn't actually test results setupClass(SimpleCSV) SimpleCSV(name='Bob', address='432W', hidden='boo') SimpleCSV(name='Joe', address='123W', hidden='arg') setupClass(ComplexCSV) ComplexCSV(fname='John', lname='Doe', age=40) ComplexCSV(fname='Bob', lname='Dylan', age=60) ComplexCSV(fname='Harriet', lname='Tubman', age=160) s = export_csv_zip([SimpleCSV, ComplexCSV]) assert isinstance(s, str) and s s = export_csv_zip([SimpleCSV.selectBy(name='Bob'), (ComplexCSV, list(ComplexCSV.selectBy(fname='John')))]) SQLObject-1.5.2/sqlobject/tests/test_create_drop.py0000644000175000017500000000166210372665117021722 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class TestCreateDrop(SQLObject): class sqlmeta(sqlmeta): idName = 'test_id_here' table = 'test_create_drop_table' name = StringCol() number = IntCol() time = DateTimeCol() short = StringCol(length=10) blobcol = BLOBCol() def test_create_drop(): conn = getConnection() TestCreateDrop.setConnection(conn) TestCreateDrop.dropTable(ifExists=True) assert not conn.tableExists(TestCreateDrop.sqlmeta.table) TestCreateDrop.createTable(ifNotExists=True) assert conn.tableExists(TestCreateDrop.sqlmeta.table) TestCreateDrop.createTable(ifNotExists=True) assert conn.tableExists(TestCreateDrop.sqlmeta.table) TestCreateDrop.dropTable(ifExists=True) assert not conn.tableExists(TestCreateDrop.sqlmeta.table) TestCreateDrop.dropTable(ifExists=True) assert not conn.tableExists(TestCreateDrop.sqlmeta.table) SQLObject-1.5.2/sqlobject/tests/test_SQLRelatedJoin.py0000644000175000017500000000332312034270727022203 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class Fighter(SQLObject): class sqlmeta: idName='fighter_id' # test on a non-standard way name = StringCol() tourtments = RelatedJoin('Tourtment') class Tourtment(SQLObject): class sqlmeta: table='competition' # test on a non-standard way name = StringCol() fightersAsList = RelatedJoin('Fighter') fightersAsSResult = SQLRelatedJoin('Fighter') def createAllTables(): setupClass(Fighter) setupClass(Tourtment) def test_1(): createAllTables() # create some tourtments t1=Tourtment(name='Tourtment #1') t2=Tourtment(name='Tourtment #2') t3=Tourtment(name='Tourtment #3') # create some fighters gokou=Fighter(name='gokou') vegeta=Fighter(name='vegeta') gohan=Fighter(name='gohan') trunks=Fighter(name='trunks') # relating them t1.addFighter(gokou) t1.addFighter(vegeta) t1.addFighter(gohan) t2.addFighter(gokou) t2.addFighter(vegeta) t2.addFighter(trunks) t3.addFighter(gohan) t3.addFighter(trunks) # do some selects for i, j in zip(t1.fightersAsList, t1.fightersAsSResult): assert i is j assert len(t2.fightersAsList) == t2.fightersAsSResult.count() def test_related_join_transaction(): if not supports('transactions'): return createAllTables() trans = Tourtment._connection.transaction() try: t1=Tourtment(name='Tourtment #1', connection=trans) t1.addFighter(Fighter(name='Jim', connection=trans)) assert t1.fightersAsSResult.count() == 1 assert t1.fightersAsSResult[0]._connection == trans finally: trans.commit(True) Tourtment._connection.autoCommit = True SQLObject-1.5.2/sqlobject/tests/test_unicode.py0000644000175000017500000000655512203476067021066 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Unicode columns ######################################## class TestUnicode(SQLObject): count = IntCol(alternateID=True) col1 = UnicodeCol(alternateID=True, length=100) col2 = UnicodeCol(dbEncoding='latin1') data = [u'\u00f0', u'test', 'ascii test'] items = [] def setup(): global items items = [] setupClass(TestUnicode) if TestUnicode._connection.dbName == 'postgres': TestUnicode._connection.query('SET client_encoding TO latin1') for i, s in enumerate(data): items.append(TestUnicode(count=i, col1=s, col2=s)) def test_create(): setup() for s, item in zip(data, items): assert item.col1 == s assert item.col2 == s conn = TestUnicode._connection rows = conn.queryAll(""" SELECT count, col1, col2 FROM test_unicode ORDER BY count """) for count, col1, col2 in rows: assert data[count].encode('utf-8') == col1 assert data[count].encode('latin1') == col2 def _test_select(): for i, value in enumerate(data): rows = list(TestUnicode.select(TestUnicode.q.col1 == value)) assert len(rows) == 1 rows = list(TestUnicode.select(TestUnicode.q.col2 == value)) assert len(rows) == 1 rows = list(TestUnicode.select(AND( TestUnicode.q.col1 == value, TestUnicode.q.col2 == value ))) assert len(rows) == 1 rows = list(TestUnicode.selectBy(col1=value)) assert len(rows) == 1 rows = list(TestUnicode.selectBy(col2=value)) assert len(rows) == 1 rows = list(TestUnicode.selectBy(col1=value, col2=value)) assert len(rows) == 1 row = TestUnicode.byCol1(value) assert row.count == i rows = list(TestUnicode.select(OR( TestUnicode.q.col1 == u'\u00f0', TestUnicode.q.col2 == u'test' ))) assert len(rows) == 2 rows = list(TestUnicode.selectBy(col1=u'\u00f0', col2=u'test')) assert len(rows) == 0 # starts/endswith/contains rows = list(TestUnicode.select(TestUnicode.q.col1.startswith("test"))) assert len(rows) == 1 rows = list(TestUnicode.select(TestUnicode.q.col1.endswith("test"))) assert len(rows) == 2 rows = list(TestUnicode.select(TestUnicode.q.col1.contains("test"))) assert len(rows) == 2 rows = list(TestUnicode.select(TestUnicode.q.col1.startswith(u"\u00f0"))) assert len(rows) == 1 rows = list(TestUnicode.select(TestUnicode.q.col1.endswith(u"\u00f0"))) assert len(rows) == 1 rows = list(TestUnicode.select(TestUnicode.q.col1.contains(u"\u00f0"))) assert len(rows) == 1 def test_select(): setup() _test_select() def test_dbEncoding(): setup() TestUnicode.sqlmeta.dbEncoding = 'utf-8' _test_select() TestUnicode.sqlmeta.dbEncoding = 'latin-1' raises(AssertionError, _test_select) TestUnicode.sqlmeta.dbEncoding = 'ascii' raises(UnicodeEncodeError, _test_select) TestUnicode.sqlmeta.dbEncoding = None TestUnicode._connection.dbEncoding = 'utf-8' _test_select() TestUnicode._connection.dbEncoding = 'latin-1' raises(AssertionError, _test_select) TestUnicode._connection.dbEncoding = 'ascii' raises(UnicodeEncodeError, _test_select) del TestUnicode.sqlmeta.dbEncoding TestUnicode._connection.dbEncoding = 'utf-8' SQLObject-1.5.2/sqlobject/tests/test_converters.py0000644000175000017500000001645511651572721021632 0ustar phdphd00000000000000from datetime import timedelta import sys from sqlobject.converters import registerConverter, sqlrepr, \ quote_str, unquote_str from sqlobject.sqlbuilder import SQLExpression, SQLObjectField, \ Select, Insert, Update, Delete, Replace, \ SQLTrueClauseClass, SQLConstant, SQLPrefix, SQLCall, SQLOp, \ _LikeQuoted class TestClass: def __repr__(self): return '' def TestClassConverter(value, db): return repr(value) registerConverter(TestClass, TestClassConverter) class NewTestClass: __metaclass__ = type def __repr__(self): return '' def NewTestClassConverter(value, db): return repr(value) registerConverter(NewTestClass, NewTestClassConverter) def _sqlrepr(self, db): return '<%s>' % self.__class__.__name__ SQLExpression.__sqlrepr__ = _sqlrepr ############################################################ ## Tests ############################################################ def test_simple_string(): assert sqlrepr('A String', 'firebird') == "'A String'" def test_string_newline(): assert sqlrepr('A String\nAnother', 'postgres') == "E'A String\\nAnother'" assert sqlrepr('A String\nAnother', 'sqlite') == "'A String\nAnother'" def test_string_tab(): assert sqlrepr('A String\tAnother', 'postgres') == "E'A String\\tAnother'" def test_string_r(): assert sqlrepr('A String\rAnother', 'postgres') == "E'A String\\rAnother'" def test_string_b(): assert sqlrepr('A String\bAnother', 'postgres') == "E'A String\\bAnother'" def test_string_000(): assert sqlrepr('A String\000Another', 'postgres') == "E'A String\\0Another'" def test_string_(): assert sqlrepr('A String\tAnother', 'postgres') == "E'A String\\tAnother'" assert sqlrepr('A String\'Another', 'firebird') == "'A String''Another'" def test_simple_unicode(): assert sqlrepr(u'A String', 'postgres') == "'A String'" def test_integer(): assert sqlrepr(10) == "10" def test_float(): assert sqlrepr(10.01) == "10.01" def test_none(): assert sqlrepr(None) == "NULL" def test_list(): assert sqlrepr(['one','two','three'], 'postgres') == "('one', 'two', 'three')" def test_tuple(): assert sqlrepr(('one','two','three'), 'postgres') == "('one', 'two', 'three')" def test_bool(): assert sqlrepr(True, 'postgres') == "'t'" assert sqlrepr(False, 'postgres') == "'f'" assert sqlrepr(True, 'mysql') == "1" assert sqlrepr(False, 'mysql') == "0" def test_datetime(): from datetime import datetime, date, time assert sqlrepr(datetime(2005, 7, 14, 13, 31, 2)) == "'2005-07-14 13:31:02'" assert sqlrepr(date(2005, 7, 14)) == "'2005-07-14'" assert sqlrepr(time(13, 31, 2)) == "'13:31:02'" # now dates before 1900 assert sqlrepr(datetime(1428, 7, 14, 13, 31, 2)) == "'1428-07-14 13:31:02'" assert sqlrepr(date(1428, 7, 14)) == "'1428-07-14'" def test_instance(): instance = TestClass() assert sqlrepr(instance) == repr(instance) def test_newstyle(): instance = NewTestClass() assert sqlrepr(instance) == repr(instance) def test_sqlexpr(): instance = SQLExpression() assert sqlrepr(instance) == repr(instance) def test_sqlobjectfield(): instance = SQLObjectField('test', 'test', 'test', None, None) assert sqlrepr(instance) == repr(instance) def test_select(): instance = Select('test') assert sqlrepr(instance, 'mysql') == "SELECT test" def test_insert(): # Single column, no keyword arguments. instance = Insert('test', [('test',)]) assert sqlrepr(instance, 'mysql') == "INSERT INTO test VALUES ('test')" # Multiple columns, no keyword arguments. instance2 = Insert('test', [('1st', '2nd', '3th', '4th')]) assert sqlrepr(instance2, 'postgres') == "INSERT INTO test VALUES ('1st', '2nd', '3th', '4th')" # Multiple rows, multiple columns, "valueList" keyword argument. instance3 = Insert('test', valueList=[('a1', 'b1'), ('a2', 'b2'), ('a3', 'b3')]) assert sqlrepr(instance3, 'sqlite') == "INSERT INTO test VALUES ('a1', 'b1'), ('a2', 'b2'), ('a3', 'b3')" # Multiple columns, "values" keyword argument. instance4 = Insert('test', values=('v1', 'v2', 'v3')) assert sqlrepr(instance4, 'mysql') == "INSERT INTO test VALUES ('v1', 'v2', 'v3')" # Single column, "valueList" keyword argument. instance5 = Insert('test', valueList=[('v1',)]) assert sqlrepr(instance5, 'mysql') == "INSERT INTO test VALUES ('v1')" # Multiple rows, Multiple columns, template. instance6 = Insert('test', valueList=[('a1', 'b1'), ('a2', 'b2')], template=['col1', 'col2']) assert sqlrepr(instance6, 'mysql') == "INSERT INTO test (col1, col2) VALUES ('a1', 'b1'), ('a2', 'b2')" # Multiple columns, implicit template (dictionary value). instance7 = Insert('test', valueList=[{'col1': 'a1', 'col2': 'b1'}]) assert sqlrepr(instance7, 'mysql') == "INSERT INTO test (col2, col1) VALUES ('b1', 'a1')" # Multiple rows, Multiple columns, implicit template. instance8 = Insert('test', valueList=[{'col1': 'a1', 'col2': 'b1'}, {'col1': 'a2', 'col2': 'b2'}]) assert sqlrepr(instance8, 'mysql') == "INSERT INTO test (col2, col1) VALUES ('b1', 'a1'), ('b2', 'a2')" def test_update(): instance = Update('test', {'test':'test'}) assert sqlrepr(instance, 'mysql') == "UPDATE test SET test='test'" def test_delete(): instance = Delete('test', None) assert sqlrepr(instance, 'mysql') == "DELETE FROM test" def test_replace(): instance = Replace('test', {'test':'test'}) assert sqlrepr(instance, 'mysql') == "REPLACE test SET test='test'" def test_trueclause(): instance = SQLTrueClauseClass() assert sqlrepr(instance) == repr(instance) def test_op(): instance = SQLOp('and', 'this', 'that') assert sqlrepr(instance, 'mysql') == "(('this') AND ('that'))" def test_call(): instance = SQLCall('test', ('test',)) assert sqlrepr(instance, 'mysql') == "'test'('test')" def test_constant(): instance = SQLConstant('test') assert sqlrepr(instance) == repr(instance) def test_prefix(): instance = SQLPrefix('test', 'test') assert sqlrepr(instance, 'mysql') == "test 'test'" def test_dict(): assert sqlrepr({"key": "value"}, "sqlite") == "('key')" def test_sets(): try: set except NameError: pass else: assert sqlrepr(set([1])) == "(1)" if sys.version_info[:3] < (2, 6, 0): # Module sets was deprecated in Python 2.6 try: from sets import Set except ImportError: pass else: assert sqlrepr(Set([1])) == "(1)" def test_timedelta(): assert sqlrepr(timedelta(seconds=30*60)) == \ "INTERVAL '0 days 1800 seconds'" def test_quote_unquote_str(): assert quote_str('test%', 'postgres') == "'test%'" assert quote_str('test%', 'sqlite') == "'test%'" assert quote_str('test\%', 'postgres') == "E'test\\%'" assert quote_str('test\\%', 'sqlite') == "'test\%'" assert unquote_str("'test%'") == 'test%' assert unquote_str("'test\\%'") == 'test\\%' assert unquote_str("E'test\\%'") == 'test\\%' def test_like_quoted(): assert sqlrepr(_LikeQuoted('test'), 'postgres') == "'test'" assert sqlrepr(_LikeQuoted('test'), 'sqlite') == "'test'" assert sqlrepr(_LikeQuoted('test%'), 'postgres') == r"E'test\\%'" assert sqlrepr(_LikeQuoted('test%'), 'sqlite') == r"'test\%'" SQLObject-1.5.2/sqlobject/tests/test_new_joins.py0000644000175000017500000001172610372665117021430 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Joins ######################################## class PersonJoinerNew(SQLObject): name = StringCol(length=40, alternateID=True) addressJoiners = ManyToMany('AddressJoinerNew') class AddressJoinerNew(SQLObject): zip = StringCol(length=5, alternateID=True) personJoiners = ManyToMany('PersonJoinerNew') class ImplicitJoiningSONew(SQLObject): foo = ManyToMany('Bar') class ExplicitJoiningSONew(SQLObject): foo = OneToMany('Bar') class TestJoin: def setup_method(self, meth): setupClass(PersonJoinerNew) setupClass(AddressJoinerNew) for n in ['bob', 'tim', 'jane', 'joe', 'fred', 'barb']: PersonJoinerNew(name=n) for z in ['11111', '22222', '33333', '44444']: AddressJoinerNew(zip=z) def test_join(self): b = PersonJoinerNew.byName('bob') assert list(b.addressJoiners) == [] z = AddressJoinerNew.byZip('11111') b.addressJoiners.add(z) self.assertZipsEqual(b.addressJoiners, ['11111']) print str(z.personJoiners), repr(z.personJoiners) self.assertNamesEqual(z.personJoiners, ['bob']) z2 = AddressJoinerNew.byZip('22222') b.addressJoiners.add(z2) print str(b.addressJoiners) self.assertZipsEqual(b.addressJoiners, ['11111', '22222']) self.assertNamesEqual(z2.personJoiners, ['bob']) b.addressJoiners.remove(z) self.assertZipsEqual(b.addressJoiners, ['22222']) self.assertNamesEqual(z.personJoiners, []) def assertZipsEqual(self, zips, dest): assert [a.zip for a in zips] == dest def assertNamesEqual(self, people, dest): assert [p.name for p in people] == dest def test_joinAttributeWithUnderscores(self): # Make sure that the implicit setting of joinMethodName works assert hasattr(ImplicitJoiningSONew, 'foo') assert not hasattr(ImplicitJoiningSONew, 'bars') # And make sure explicit setting also works assert hasattr(ExplicitJoiningSONew, 'foo') assert not hasattr(ExplicitJoiningSONew, 'bars') class PersonJoinerNew2(SQLObject): name = StringCol('name', length=40, alternateID=True) addressJoiner2s = OneToMany('AddressJoinerNew2') class AddressJoinerNew2(SQLObject): class sqlmeta: defaultOrder = ['-zip', 'plus4'] zip = StringCol(length=5) plus4 = StringCol(length=4, default=None) personJoinerNew2 = ForeignKey('PersonJoinerNew2') class TestJoin2: def setup_method(self, meth): setupClass([PersonJoinerNew2, AddressJoinerNew2]) p1 = PersonJoinerNew2(name='bob') p2 = PersonJoinerNew2(name='sally') for z in ['11111', '22222', '33333']: a = AddressJoinerNew2(zip=z, personJoinerNew2=p1) #p1.addAddressJoinerNew2(a) AddressJoinerNew2(zip='00000', personJoinerNew2=p2) def test_basic(self): bob = PersonJoinerNew2.byName('bob') sally = PersonJoinerNew2.byName('sally') print bob.addressJoiner2s print bob assert len(list(bob.addressJoiner2s)) == 3 assert len(list(sally.addressJoiner2s)) == 1 bob.addressJoiner2s[0].destroySelf() assert len(list(bob.addressJoiner2s)) == 2 z = bob.addressJoiner2s[0] z.zip = 'xxxxx' id = z.id del z z = AddressJoinerNew2.get(id) assert z.zip == 'xxxxx' def test_defaultOrder(self): p1 = PersonJoinerNew2.byName('bob') assert ([i.zip for i in p1.addressJoiner2s] == ['33333', '22222', '11111']) _personJoiner3_getters = [] _personJoiner3_setters = [] class PersonJoinerNew3(SQLObject): name = StringCol('name', length=40, alternateID=True) addressJoinerNew3s = OneToMany('AddressJoinerNew3') class AddressJoinerNew3(SQLObject): zip = StringCol(length=5) personJoinerNew3 = ForeignKey('PersonJoinerNew3') def _get_personJoinerNew3(self): value = self._SO_get_personJoinerNew3() _personJoiner3_getters.append((self, value)) return value def _set_personJoinerNew3(self, value): self._SO_set_personJoinerNew3(value) _personJoiner3_setters.append((self, value)) class TestJoin3: def setup_method(self, meth): setupClass([PersonJoinerNew3, AddressJoinerNew3]) p1 = PersonJoinerNew3(name='bob') p2 = PersonJoinerNew3(name='sally') for z in ['11111', '22222', '33333']: a = AddressJoinerNew3(zip=z, personJoinerNew3=p1) AddressJoinerNew3(zip='00000', personJoinerNew3=p2) def test_accessors(self): assert len(list(_personJoiner3_getters)) == 0 assert len(list(_personJoiner3_setters)) == 4 bob = PersonJoinerNew3.byName('bob') for addressJoiner3 in bob.addressJoinerNew3s: addressJoiner3.personJoinerNew3 assert len(list(_personJoiner3_getters)) == 3 assert len(list(_personJoiner3_setters)) == 4 SQLObject-1.5.2/sqlobject/tests/test_setters.py0000644000175000017500000000125610372665117021123 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class TestPlainAndNonPlainSetter(SQLObject): firstName = StringCol(length=50, dbName='fname_col', default=None) lastName = StringCol(length=50, dbName='lname_col', default=None) def _set_name(self, v): firstName, lastName = v.split() self.firstName = firstName self.lastName = lastName def _get_name(self): return "%s %s" % (self.firstName, self.lastName) def test_create(): setupClass(TestPlainAndNonPlainSetter) t = TestPlainAndNonPlainSetter(name='John Doe') assert t.firstName == 'John' assert t.lastName == 'Doe' assert t.name == 'John Doe'SQLObject-1.5.2/sqlobject/tests/test_lazy.py0000644000175000017500000001261211446440062020400 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Lazy updates ######################################## class Lazy(SQLObject): class sqlmeta: lazyUpdate = True name = StringCol() other = StringCol(default='nothing') third = StringCol(default='third') class TestLazyTest: def setup_method(self, meth): # All this stuff is so that we can track when the connection # does an actual update; we put in a new _SO_update method # that calls the original and sets an instance variable that # we can later check. setupClass(Lazy) self.conn = Lazy._connection self.conn.didUpdate = False self._oldUpdate = self.conn._SO_update newUpdate = ( lambda so, values, s=self, c=self.conn, o=self._oldUpdate: self._alternateUpdate(so, values, c, o)) self.conn._SO_update = newUpdate def teardown_method(self, meth): self.conn._SO_update = self._oldUpdate del self._oldUpdate def _alternateUpdate(self, so, values, conn, oldUpdate): conn.didUpdate = True return oldUpdate(so, values) def test_lazy(self): assert not self.conn.didUpdate obj = Lazy(name='tim') # We just did an insert, but not an update: assert not self.conn.didUpdate obj.set(name='joe') assert obj.sqlmeta.dirty assert obj.name == 'joe' assert not self.conn.didUpdate obj.syncUpdate() assert obj.name == 'joe' assert self.conn.didUpdate assert not obj.sqlmeta.dirty assert obj.name == 'joe' self.conn.didUpdate = False obj = Lazy(name='frank') obj.name = 'joe' assert not self.conn.didUpdate assert obj.sqlmeta.dirty assert obj.name == 'joe' obj.name = 'joe2' assert not self.conn.didUpdate assert obj.sqlmeta.dirty assert obj.name == 'joe2' obj.syncUpdate() assert obj.name == 'joe2' assert not obj.sqlmeta.dirty assert self.conn.didUpdate self.conn.didUpdate = False obj = Lazy(name='loaded') assert not obj.sqlmeta.dirty assert not self.conn.didUpdate assert obj.name == 'loaded' obj.name = 'unloaded' assert obj.sqlmeta.dirty assert obj.name == 'unloaded' assert not self.conn.didUpdate obj.sync() assert not obj.sqlmeta.dirty assert obj.name == 'unloaded' assert self.conn.didUpdate self.conn.didUpdate = False obj.name = 'whatever' assert obj.sqlmeta.dirty assert obj.name == 'whatever' assert not self.conn.didUpdate obj._SO_loadValue('name') assert obj.sqlmeta.dirty assert obj.name == 'whatever' assert not self.conn.didUpdate obj._SO_loadValue('other') assert obj.name == 'whatever' assert not self.conn.didUpdate obj.syncUpdate() assert self.conn.didUpdate self.conn.didUpdate = False # Now, check that get() doesn't screw # cached objects' validator state. obj_id = obj.id old_state = obj._SO_validatorState obj = Lazy.get(obj_id) assert not obj.sqlmeta.dirty assert not self.conn.didUpdate assert obj._SO_validatorState is old_state assert obj.name == 'whatever' obj.name = 'unloaded' assert obj.name == 'unloaded' assert obj.sqlmeta.dirty assert not self.conn.didUpdate # Fetch the object again with get() and # make sure sqlmeta.dirty is still set, as the # object should come from the cache. obj = Lazy.get(obj_id) assert obj.sqlmeta.dirty assert not self.conn.didUpdate assert obj.name == 'unloaded' obj.syncUpdate() assert self.conn.didUpdate assert not obj.sqlmeta.dirty self.conn.didUpdate = False # Then clear the cache, and try a get() # again, to make sure stuf like _SO_createdValues # is properly initialized. self.conn.cache.clear() obj = Lazy.get(obj_id) assert not obj.sqlmeta.dirty assert not self.conn.didUpdate assert obj.name == 'unloaded' obj.name = 'spongebob' assert obj.name == 'spongebob' assert obj.sqlmeta.dirty assert not self.conn.didUpdate obj.syncUpdate() assert self.conn.didUpdate assert not obj.sqlmeta.dirty self.conn.didUpdate = False obj = Lazy(name='last') assert not obj.sqlmeta.dirty obj.syncUpdate() assert not self.conn.didUpdate assert not obj.sqlmeta.dirty # Check that setting multiple values # actually works. This was broken # and just worked because we were testing # only one value at a time, so 'name' # had the right value after the for loop *wink* # Also, check that passing a name that is not # a valid column doesn't break, but instead # just does a plain setattr. obj.set(name='first', other='who', third='yes') assert obj.name == 'first' assert obj.other == 'who' assert obj.third == 'yes' assert obj.sqlmeta.dirty assert not self.conn.didUpdate obj.syncUpdate() assert self.conn.didUpdate assert not obj.sqlmeta.dirty SQLObject-1.5.2/sqlobject/tests/test_select_through.py0000644000175000017500000000336610616077605022455 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * import py ''' Tests retrieving objects through a join/fk on a selectResults ''' class SRThrough1(SQLObject): three = ForeignKey('SRThrough3') twos = SQLMultipleJoin('SRThrough2', joinColumn='oneID') class SRThrough2(SQLObject): one = ForeignKey('SRThrough1') threes = SQLRelatedJoin('SRThrough3', addRemoveName='Three') class SRThrough3(SQLObject): name = StringCol() ones = SQLMultipleJoin('SRThrough1', joinColumn='threeID') twos = SQLRelatedJoin('SRThrough2') def setup_module(mod): setupClass([mod.SRThrough3, mod.SRThrough1, mod.SRThrough2]) threes = inserts(mod.SRThrough3, [('a',),('b',),('c',)], 'name') ones = inserts(mod.SRThrough1, [(threes[0].id,),(threes[0].id,),(threes[2].id,)], 'threeID') twos = inserts(mod.SRThrough2, [(ones[0].id,),(ones[1].id,),(ones[2].id,)], 'oneID') twos[0].addThree(threes[0]) twos[0].addThree(threes[1]) mod.threes = threes mod.twos = twos mod.ones = ones def testBadRef(): py.test.raises(AttributeError, 'threes[0].throughTo.four') def testThroughFK(): assert list(threes[0].ones.throughTo.three) == [threes[0]] def testThroughMultipleJoin(): assert list(threes[0].ones.throughTo.twos) == [twos[0], twos[1]] def testThroughRelatedJoin(): assert list(threes[0].twos.throughTo.threes) == [threes[0], threes[1]] assert list(SRThrough3.select(SRThrough3.q.id==threes[0].id).throughTo.twos) == list(threes[0].twos) def testThroughFKAndJoin(): assert list(threes[0].ones.throughTo.three.throughTo.twos) == [twos[0]]SQLObject-1.5.2/sqlobject/tests/test_NoneValuedResultItem.py0000644000175000017500000000151310372665117023504 0ustar phdphd00000000000000'''Test that selectResults handle NULL values from, for example, outer joins.''' from sqlobject import * from sqlobject.tests.dbtest import * class TestComposer(SQLObject): name = StringCol() class TestWork(SQLObject): class sqlmeta: idName = "work_id" composer = ForeignKey('TestComposer') title = StringCol() def test1(): setupClass([TestComposer, TestWork]) c = TestComposer(name='Mahler, Gustav') w = TestWork(composer=c, title='Symphony No. 9') c2 = TestComposer(name='Bruckner, Anton') # but don't add any works for Bruckner # do a left join, a common use case that often involves NULL results s = TestWork.select(join=sqlbuilder.LEFTJOINOn(TestComposer, TestWork, TestComposer.q.id==TestWork.q.composerID)) assert tuple(s)==(w, None) SQLObject-1.5.2/sqlobject/tests/test_sorting.py0000644000175000017500000000505010514714624021107 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class Names(SQLObject): class sqlmeta(sqlmeta): table = 'names_table' defaultOrder = ['lastName', 'firstName'] firstName = StringCol(length=30) lastName = StringCol(length=30) def setupNames(): setupClass(Names) inserts(Names, [('aj', 'baker'), ('joe', 'robbins'), ('tim', 'jackson'), ('joe', 'baker'), ('zoe', 'robbins')], schema='firstName lastName') def nameList(names): result = [] for name in names: result.append('%s %s' % (name.firstName, name.lastName)) return result def firstList(names): return [n.firstName for n in names] def test_defaultOrder(): setupNames() assert nameList(Names.select()) == \ ['aj baker', 'joe baker', 'tim jackson', 'joe robbins', 'zoe robbins'] def test_otherOrder(): setupNames() assert nameList(Names.select().orderBy(['firstName', 'lastName'])) == \ ['aj baker', 'joe baker', 'joe robbins', 'tim jackson', 'zoe robbins'] def test_untranslatedColumnOrder(): setupNames() assert nameList(Names.select().orderBy(['first_name', 'last_name'])) == \ ['aj baker', 'joe baker', 'joe robbins', 'tim jackson', 'zoe robbins'] def test_singleUntranslatedColumnOrder(): setupNames() assert firstList(Names.select().orderBy('firstName')) == \ ['aj', 'joe', 'joe', 'tim', 'zoe'] assert firstList(Names.select().orderBy('first_name')) == \ ['aj', 'joe', 'joe', 'tim', 'zoe'] assert firstList(Names.select().orderBy('-firstName')) == \ ['zoe', 'tim', 'joe', 'joe', 'aj'] assert firstList(Names.select().orderBy('-first_name')) == \ ['zoe', 'tim', 'joe', 'joe', 'aj'] assert firstList(Names.select().orderBy(Names.q.firstName)) == \ ['aj', 'joe', 'joe', 'tim', 'zoe'] assert firstList(Names.select().orderBy('firstName').reversed()) == \ ['zoe', 'tim', 'joe', 'joe', 'aj'] assert firstList(Names.select().orderBy('-firstName').reversed()) == \ ['aj', 'joe', 'joe', 'tim', 'zoe'] assert firstList(Names.select().orderBy(DESC(Names.q.firstName))) == \ ['zoe', 'tim', 'joe', 'joe', 'aj'] assert firstList(Names.select().orderBy(Names.q.firstName).reversed()) == \ ['zoe', 'tim', 'joe', 'joe', 'aj'] assert firstList(Names.select().orderBy(DESC(Names.q.firstName)).reversed()) == \ ['aj', 'joe', 'joe', 'tim', 'zoe'] SQLObject-1.5.2/sqlobject/tests/test_declarative.py0000644000175000017500000000363211561540724021712 0ustar phdphd00000000000000from sqlobject.declarative import * class A1(Declarative): a = 1 b = [] class A2(A1): a = 5 A3 = A2(b=5) def test_a_classes(): assert A1.a == 1 assert A1.singleton().a == 1 assert A1.b is A2.b assert A3.b == 5 assert A1.declarative_count == A1.singleton().declarative_count assert A1.declarative_count < A2.declarative_count assert A2.singleton() is not A1.singleton() assert A3.singleton().b == A3.b class B1(Declarative): attrs = [] def __classinit__(cls, new_attrs): Declarative.__classinit__(cls, new_attrs) cls.attrs = cls.add_attrs(cls.attrs, new_attrs) def __instanceinit__(self, new_attrs): Declarative.__instanceinit__(self, new_attrs) self.attrs = self.add_attrs(self.attrs, new_attrs) @staticmethod def add_attrs(old_attrs, new_attrs): old_attrs = old_attrs[:] for name in new_attrs.keys(): if (name in old_attrs or name.startswith('_') or name in ('add_attrs', 'declarative_count', 'attrs')): continue old_attrs.append(name) old_attrs.sort() return old_attrs c = 1 class B2(B1): g = 3 def __classinit__(cls, new_attrs): new_attrs['test'] = 'whatever' B1.__classinit__(cls, new_attrs) B3 = B2(c=5, d=3) B4 = B3(d=5) B5 = B1(a=1) def test_b_classes(): assert B1.attrs == ['c'] assert B1.c == 1 assert B2.attrs == ['c', 'g', 'test'] assert B3.d == 3 assert B4.d == 5 assert B5.a == 1 assert B5.attrs == ['a', 'c'] assert B3.attrs == ['c', 'd', 'g', 'test'] assert B4.attrs == ['c', 'd', 'g', 'test'] order = [B1, B1.singleton(), B2, B2.singleton(), B3, B3.singleton(), B4, B4.singleton(), B5, B5.singleton()] last = 0 for obj in order: assert obj.declarative_count >= last last = obj.declarative_count SQLObject-1.5.2/sqlobject/tests/test_indexes.py0000644000175000017500000000626112203134056021056 0ustar phdphd00000000000000from sqlobject import * from sqlobject.dberrors import * from sqlobject.tests.dbtest import * ######################################## ## Indexes ######################################## class SOIndex1(SQLObject): name = StringCol(length=100) number = IntCol() nameIndex = DatabaseIndex('name', unique=True) nameIndex2 = DatabaseIndex(name, number) nameIndex3 = DatabaseIndex({'column': name, 'length': 3}) class SOIndex2(SQLObject): name = StringCol(length=100) nameIndex = DatabaseIndex({'expression': 'lower(name)'}) def test_indexes_1(): setupClass(SOIndex1) n = 0 for name in 'blah blech boring yep yort snort'.split(): n += 1 SOIndex1(name=name, number=n) mod = SOIndex1._connection.module try: SOIndex1(name='blah', number=0) except ( mod.ProgrammingError, mod.IntegrityError, mod.OperationalError, mod.DatabaseError, ProgrammingError, IntegrityError, OperationalError, DatabaseError ): # expected pass else: assert 0, "Exception expected." def test_indexes_2(): if not supports('expressionIndex'): return setupClass(SOIndex2) SOIndex2(name='') class PersonIndexGet(SQLObject): firstName = StringCol(length=100) lastName = StringCol(length=100) age = IntCol(alternateID=True) nameIndex = DatabaseIndex(firstName, lastName, unique=True) def test_index_get_1(): setupClass(PersonIndexGet, force=True) PersonIndexGet(firstName='Eric', lastName='Idle', age=62) PersonIndexGet(firstName='Terry', lastName='Gilliam', age=65) PersonIndexGet(firstName='John', lastName='Cleese', age=66) PersonIndexGet.get(1) PersonIndexGet.nameIndex.get('Terry', 'Gilliam') PersonIndexGet.nameIndex.get(firstName='John', lastName='Cleese') try: print PersonIndexGet.nameIndex.get(firstName='Graham', lastName='Chapman') except Exception, e: pass else: raise AssertError try: print PersonIndexGet.nameIndex.get('Terry', lastName='Gilliam') except Exception, e: pass else: raise AssertError try: print PersonIndexGet.nameIndex.get('Terry', 'Gilliam', 65) except Exception, e: pass else: raise AssertError try: print PersonIndexGet.nameIndex.get('Terry') except Exception, e: pass else: raise AssertError class PersonIndexGet2(SQLObject): name = StringCol(alternateID=True, length=100) age = IntCol() addresses = MultipleJoin('AddressIndexGet2') class AddressIndexGet2(SQLObject): person = ForeignKey('PersonIndexGet2', notNone=True) type = StringCol(notNone=True, length=100) street = StringCol(notNone=True) pk = DatabaseIndex(person, type, unique=True) def test_index_get_2(): setupClass([PersonIndexGet2, AddressIndexGet2]) p = PersonIndexGet2(name='Terry Guilliam', age=64) AddressIndexGet2(person=p, type='home', street='Terry Street 234') AddressIndexGet2(person=p, type='work', street='Guilliam Street 234') AddressIndexGet2.pk.get(p, 'work') AddressIndexGet2.pk.get(person=p, type='work') SQLObject-1.5.2/sqlobject/tests/dbtest.py0000644000175000017500000002316212230017303017637 0ustar phdphd00000000000000""" The framework for making database tests. """ import logging import os import re import sys from py.test import raises import sqlobject import sqlobject.conftest as conftest if sys.platform[:3] == "win": def getcwd(): return os.getcwd().replace('\\', '/') else: getcwd = os.getcwd """ supportsMatrix defines what database backends support what features. Each feature has a name, if you see a key like '+featureName' then only the databases listed support the feature. Conversely, '-featureName' means all databases *except* the ones listed support the feature. The databases are given by their SQLObject string name, separated by spaces. The function supports(featureName) returns True or False based on this, and you can use it like:: def test_featureX(): if not supports('featureX'): return """ supportsMatrix = { '+exceptions': 'mysql postgres sqlite', '-transactions': 'mysql rdbhost', '-dropTableCascade': 'sybase mssql mysql', '-expressionIndex': 'mysql sqlite firebird mssql', '-blobData': 'mssql rdbhost', '-decimalColumn': 'mssql', '-emptyTable': 'mssql', '-limitSelect' : 'mssql', '+schema' : 'postgres', '+memorydb': 'sqlite', } def setupClass(soClasses, force=False): """ Makes sure the classes have a corresponding and correct table. This won't recreate the table if it already exists. It will check that the table is properly defined (in case you change your table definition). You can provide a single class or a list of classes; if a list then classes will be created in the order you provide, and destroyed in the opposite order. So if class A depends on class B, then do setupClass([B, A]) and B won't be destroyed or cleared until after A is destroyed or cleared. If force is true, then the database will be recreated no matter what. """ global hub if not isinstance(soClasses, (list, tuple)): soClasses = [soClasses] connection = getConnection() for soClass in soClasses: ## This would be an alternate way to register connections... #try: # hub #except NameError: # hub = sqlobject.dbconnection.ConnectionHub() #soClass._connection = hub #hub.threadConnection = connection #hub.processConnection = connection soClass._connection = connection installOrClear(soClasses, force=force) return soClasses installedDBFilename = os.path.join(getcwd(), 'dbs_data.tmp') installedDBTracker = sqlobject.connectionForURI( 'sqlite:///' + installedDBFilename) def getConnection(**kw): name = getConnectionURI() conn = sqlobject.connectionForURI(name, **kw) if conftest.option.show_sql: conn.debug = True if conftest.option.show_sql_output: conn.debugOutput = True return conn def getConnectionURI(): name = conftest.option.Database if name in conftest.connectionShortcuts: name = conftest.connectionShortcuts[name] return name try: connection = getConnection() except Exception, e: # At least this module should be importable... print >> sys.stderr, ( "Could not open database: %s" % e) class InstalledTestDatabase(sqlobject.SQLObject): """ This table is set up in SQLite (always, regardless of --Database) and tracks what tables have been set up in the 'real' database. This way we don't keep recreating the tables over and over when there are multiple tests that use a table. """ _connection = installedDBTracker table_name = sqlobject.StringCol(notNull=True) createSQL = sqlobject.StringCol(notNull=True) connectionURI = sqlobject.StringCol(notNull=True) @classmethod def installOrClear(cls, soClasses, force=False): cls.setup() reversed = list(soClasses)[:] reversed.reverse() # If anything needs to be dropped, they all must be dropped # But if we're forcing it, then we'll always drop if force: any_drops = True else: any_drops = False for soClass in reversed: table = soClass.sqlmeta.table if not soClass._connection.tableExists(table): continue items = list(cls.selectBy( table_name=table, connectionURI=soClass._connection.uri())) if items: instance = items[0] sql = instance.createSQL else: sql = None newSQL, constraints = soClass.createTableSQL() if sql != newSQL: if sql is not None: instance.destroySelf() any_drops = True break for soClass in reversed: if soClass._connection.tableExists(soClass.sqlmeta.table): if any_drops: cls.drop(soClass) else: cls.clear(soClass) for soClass in soClasses: table = soClass.sqlmeta.table if not soClass._connection.tableExists(table): cls.install(soClass) @classmethod def install(cls, soClass): """ Creates the given table in its database. """ sql = getattr(soClass, soClass._connection.dbName + 'Create', None) all_extra = [] if sql: soClass._connection.query(sql) else: sql, extra_sql = soClass.createTableSQL() soClass.createTable(applyConstraints=False) all_extra.extend(extra_sql) cls(table_name=soClass.sqlmeta.table, createSQL=sql, connectionURI=soClass._connection.uri()) for extra_sql in all_extra: soClass._connection.query(extra_sql) @classmethod def drop(cls, soClass): """ Drops a the given table from its database """ sql = getattr(soClass, soClass._connection.dbName + 'Drop', None) if sql: soClass._connection.query(sql) else: soClass.dropTable() @classmethod def clear(cls, soClass): """ Removes all the rows from a table. """ soClass.clearTable() @classmethod def setup(cls): """ This sets up *this* table. """ if not cls._connection.tableExists(cls.sqlmeta.table): cls.createTable() installOrClear = InstalledTestDatabase.installOrClear class Dummy(object): """ Used for creating fake objects; a really poor 'mock object'. """ def __init__(self, **kw): for name, value in kw.items(): setattr(self, name, value) def inserts(cls, data, schema=None): """ Creates a bunch of rows. You can use it like:: inserts(Person, [{'fname': 'blah', 'lname': 'doe'}, ...]) Or:: inserts(Person, [('blah', 'doe')], schema= ['fname', 'lname']) If you give a single string for the `schema` then it'll split that string to get the list of column names. """ if schema: if isinstance(schema, str): schema = schema.split() keywordData = [] for item in data: itemDict = {} for name, value in zip(schema, item): itemDict[name] = value keywordData.append(itemDict) data = keywordData results = [] for args in data: results.append(cls(**args)) return results def supports(feature): dbName = connection.dbName support = supportsMatrix.get('+' + feature, None) notSupport = supportsMatrix.get('-' + feature, None) if support is not None and dbName in support.split(): return True elif support: return False if notSupport is not None and dbName in notSupport.split(): return False elif notSupport: return True assert notSupport is not None or support is not None, ( "The supportMatrix does not list this feature: %r" % feature) # To avoid name clashes: _inserts = inserts def setSQLiteConnectionFactory(TableClass, factory): from sqlobject.sqlite.sqliteconnection import SQLiteConnection conn = TableClass._connection TableClass._connection = SQLiteConnection( filename=conn.filename, name=conn.name, debug=conn.debug, debugOutput=conn.debugOutput, cache=conn.cache, style=conn.style, autoCommit=conn.autoCommit, debugThreading=conn.debugThreading, registry=conn.registry, factory=factory ) installOrClear([TableClass]) def deprecated_module(): sqlobject.main.warnings_level = None sqlobject.main.exception_level = None def setup_module(mod): # modules with '_old' test backward compatible methods, so they # don't get warnings or errors. mod_name = str(mod.__name__) if mod_name.endswith('/py'): mod_name = mod_name[:-3] if mod_name.endswith('_old'): sqlobject.main.warnings_level = None sqlobject.main.exception_level = None else: sqlobject.main.warnings_level = None sqlobject.main.exception_level = 0 def teardown_module(mod=None): sqlobject.main.warnings_level = None sqlobject.main.exception_level = 0 def setupLogging(): fmt = '[%(asctime)s] %(name)s %(levelname)s: %(message)s' formatter = logging.Formatter(fmt) hdlr = logging.StreamHandler(sys.stderr) hdlr.setFormatter(formatter) hdlr.setLevel(logging.NOTSET) logger = logging.getLogger() logger.addHandler(hdlr) __all__ = ['getConnection', 'getConnectionURI', 'setupClass', 'Dummy', 'raises', 'inserts', 'supports', 'deprecated_module', 'setup_module', 'teardown_module', 'setupLogging'] SQLObject-1.5.2/sqlobject/tests/test_style.py0000644000175000017500000000164010372665117020567 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject import styles class AnotherStyle(styles.MixedCaseUnderscoreStyle): def pythonAttrToDBColumn(self, attr): if attr.lower().endswith('id'): return 'id'+styles.MixedCaseUnderscoreStyle.pythonAttrToDBColumn(self, attr[:-2]) else: return styles.MixedCaseUnderscoreStyle.pythonAttrToDBColumn(self, attr) class SOStyleTest1(SQLObject): a = StringCol() st2 = ForeignKey('SOStyleTest2') class sqlmeta(sqlmeta): style = AnotherStyle() class SOStyleTest2(SQLObject): b = StringCol() class sqlmeta(sqlmeta): style = AnotherStyle() def test_style(): setupClass([SOStyleTest2, SOStyleTest1]) st1 = SOStyleTest1(a='something', st2=None) st2 = SOStyleTest2(b='whatever') st1.st2 = st2 assert st1.sqlmeta.columns['st2ID'].dbName == 'idst2' assert st1.st2 == st2 SQLObject-1.5.2/sqlobject/tests/test_picklecol.py0000644000175000017500000000201111334552470021361 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Pickle columns ######################################## class PickleData: pi = 3.14156 def __init__(self): self.question = 'The Ulimate Question of Life, the Universe and Everything' self.answer = 42 class PickleContainer(SQLObject): pickledata = PickleCol(default=None, length=65535) def test_pickleCol(): if not supports('blobData'): return setupClass([PickleContainer], force=True) mypickledata = PickleData() ctnr = PickleContainer(pickledata=mypickledata) iid = ctnr.id PickleContainer._connection.cache.clear() ctnr2 = PickleContainer.get(iid) s2 = ctnr2.pickledata assert isinstance(s2, PickleData) assert isinstance(s2.pi, float) assert isinstance(s2.question, str) assert isinstance(s2.answer, int) assert s2.pi == mypickledata.pi assert s2.question == mypickledata.question assert s2.answer == mypickledata.answer SQLObject-1.5.2/sqlobject/tests/test_joins.py0000644000175000017500000001121010372665117020543 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Joins ######################################## class PersonJoiner(SQLObject): name = StringCol(length=40, alternateID=True) addressJoiners = RelatedJoin('AddressJoiner') class AddressJoiner(SQLObject): zip = StringCol(length=5, alternateID=True) personJoiners = RelatedJoin('PersonJoiner') class ImplicitJoiningSO(SQLObject): foo = RelatedJoin('Bar') class ExplicitJoiningSO(SQLObject): foo = MultipleJoin('Bar', joinMethodName='foo') class TestJoin: def setup_method(self, meth): setupClass(PersonJoiner) setupClass(AddressJoiner) for n in ['bob', 'tim', 'jane', 'joe', 'fred', 'barb']: PersonJoiner(name=n) for z in ['11111', '22222', '33333', '44444']: AddressJoiner(zip=z) def test_join(self): b = PersonJoiner.byName('bob') assert b.addressJoiners == [] z = AddressJoiner.byZip('11111') b.addAddressJoiner(z) self.assertZipsEqual(b.addressJoiners, ['11111']) self.assertNamesEqual(z.personJoiners, ['bob']) z2 = AddressJoiner.byZip('22222') b.addAddressJoiner(z2) self.assertZipsEqual(b.addressJoiners, ['11111', '22222']) self.assertNamesEqual(z2.personJoiners, ['bob']) b.removeAddressJoiner(z) self.assertZipsEqual(b.addressJoiners, ['22222']) self.assertNamesEqual(z.personJoiners, []) def assertZipsEqual(self, zips, dest): assert [a.zip for a in zips] == dest def assertNamesEqual(self, people, dest): assert [p.name for p in people] == dest def test_joinAttributeWithUnderscores(self): # Make sure that the implicit setting of joinMethodName works assert hasattr(ImplicitJoiningSO, 'foo') assert not hasattr(ImplicitJoiningSO, 'bars') # And make sure explicit setting also works assert hasattr(ExplicitJoiningSO, 'foo') assert not hasattr(ExplicitJoiningSO, 'bars') class PersonJoiner2(SQLObject): name = StringCol('name', length=40, alternateID=True) addressJoiner2s = MultipleJoin('AddressJoiner2') class AddressJoiner2(SQLObject): class sqlmeta: defaultOrder = ['-zip', 'plus4'] zip = StringCol(length=5) plus4 = StringCol(length=4, default=None) personJoiner2 = ForeignKey('PersonJoiner2') class TestJoin2: def setup_method(self, meth): setupClass([PersonJoiner2, AddressJoiner2]) p1 = PersonJoiner2(name='bob') p2 = PersonJoiner2(name='sally') for z in ['11111', '22222', '33333']: a = AddressJoiner2(zip=z, personJoiner2=p1) #p1.addAddressJoiner2(a) AddressJoiner2(zip='00000', personJoiner2=p2) def test_basic(self): bob = PersonJoiner2.byName('bob') sally = PersonJoiner2.byName('sally') assert len(bob.addressJoiner2s) == 3 assert len(sally.addressJoiner2s) == 1 bob.addressJoiner2s[0].destroySelf() assert len(bob.addressJoiner2s) == 2 z = bob.addressJoiner2s[0] z.zip = 'xxxxx' id = z.id del z z = AddressJoiner2.get(id) assert z.zip == 'xxxxx' def test_defaultOrder(self): p1 = PersonJoiner2.byName('bob') assert ([i.zip for i in p1.addressJoiner2s] == ['33333', '22222', '11111']) _personJoiner3_getters = [] _personJoiner3_setters = [] class PersonJoiner3(SQLObject): name = StringCol('name', length=40, alternateID=True) addressJoiner3s = MultipleJoin('AddressJoiner3') class AddressJoiner3(SQLObject): zip = StringCol(length=5) personJoiner3 = ForeignKey('PersonJoiner3') def _get_personJoiner3(self): value = self._SO_get_personJoiner3() _personJoiner3_getters.append((self, value)) return value def _set_personJoiner3(self, value): self._SO_set_personJoiner3(value) _personJoiner3_setters.append((self, value)) class TestJoin3: def setup_method(self, meth): setupClass([PersonJoiner3, AddressJoiner3]) p1 = PersonJoiner3(name='bob') p2 = PersonJoiner3(name='sally') for z in ['11111', '22222', '33333']: a = AddressJoiner3(zip=z, personJoiner3=p1) AddressJoiner3(zip='00000', personJoiner3=p2) def test_accessors(self): assert len(_personJoiner3_getters) == 0 assert len(_personJoiner3_setters) == 4 bob = PersonJoiner3.byName('bob') for addressJoiner3 in bob.addressJoiner3s: addressJoiner3.personJoiner3 assert len(_personJoiner3_getters) == 3 assert len(_personJoiner3_setters) == 4 SQLObject-1.5.2/sqlobject/tests/test_subqueries.py0000644000175000017500000000711112167312042021603 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * ######################################## ## Subqueries (subselects) ######################################## class TestIn1(SQLObject): col1 = StringCol() class TestIn2(SQLObject): col2 = StringCol() class TestOuter(SQLObject): fk = ForeignKey('TestIn1') def setup(): setupClass(TestIn1) setupClass(TestIn2) def insert(): setup() TestIn1(col1=None) TestIn1(col1='') TestIn1(col1="test") TestIn2(col2=None) TestIn2(col2='') TestIn2(col2="test") def test_1syntax_in(): setup() select = TestIn1.select(IN(TestIn1.q.col1, Select(TestIn2.q.col2))) assert str(select) == \ "SELECT test_in1.id, test_in1.col1 FROM test_in1 WHERE test_in1.col1 IN (SELECT test_in2.col2 FROM test_in2)" select = TestIn1.select(IN(TestIn1.q.col1, TestIn2.select())) assert str(select) == \ "SELECT test_in1.id, test_in1.col1 FROM test_in1 WHERE test_in1.col1 IN (SELECT test_in2.id FROM test_in2 WHERE 1 = 1)" def test_2perform_in(): insert() select = TestIn1.select(IN(TestIn1.q.col1, Select(TestIn2.q.col2))) assert select.count() == 2 def test_3syntax_exists(): setup() select = TestIn1.select(NOTEXISTS(Select(TestIn2.q.col2, where=(Outer(TestIn1).q.col1 == TestIn2.q.col2)))) assert str(select) == \ "SELECT test_in1.id, test_in1.col1 FROM test_in1 WHERE NOT EXISTS (SELECT test_in2.col2 FROM test_in2 WHERE ((test_in1.col1) = (test_in2.col2)))" setupClass(TestOuter) select = TestOuter.select(NOTEXISTS(Select(TestIn1.q.col1, where=(Outer(TestOuter).q.fk == TestIn1.q.id)))) assert str(select) == \ "SELECT test_outer.id, test_outer.fk_id FROM test_outer WHERE NOT EXISTS (SELECT test_in1.col1 FROM test_in1 WHERE ((test_outer.fk_id) = (test_in1.id)))" def test_4perform_exists(): insert() select = TestIn1.select(EXISTS(Select(TestIn2.q.col2, where=(Outer(TestIn1).q.col1 == TestIn2.q.col2)))) assert len(list(select)) == 2 setupClass(TestOuter) select = TestOuter.select(NOTEXISTS(Select(TestIn1.q.col1, where=(Outer(TestOuter).q.fkID == TestIn1.q.id)))) assert len(list(select)) == 0 def test_4syntax_direct(): setup() select = TestIn1.select(TestIn1.q.col1 == Select(TestIn2.q.col2, where=(TestIn2.q.col2 == "test"))) assert str(select) == \ "SELECT test_in1.id, test_in1.col1 FROM test_in1 WHERE ((test_in1.col1) = (SELECT test_in2.col2 FROM test_in2 WHERE ((test_in2.col2) = ('test'))))" def test_4perform_direct(): insert() select = TestIn1.select(TestIn1.q.col1 == Select(TestIn2.q.col2, where=(TestIn2.q.col2 == "test"))) assert select.count() == 1 def test_5perform_direct(): insert() select = TestIn1.select(TestIn1.q.col1 == Select(TestIn2.q.col2, where=(TestIn2.q.col2 == "test"))) assert select.count() == 1 def test_6syntax_join(): insert() j = LEFTOUTERJOINOn(TestIn2, TestIn1, TestIn1.q.col1==TestIn2.q.col2) select = TestIn1.select(TestIn1.q.col1 == Select(TestIn2.q.col2, where=(TestIn2.q.col2 == "test"), join=j)) assert str(select) == \ "SELECT test_in1.id, test_in1.col1 FROM test_in1 WHERE ((test_in1.col1) = (SELECT test_in2.col2 FROM test_in2 LEFT OUTER JOIN test_in1 ON ((test_in1.col1) = (test_in2.col2)) WHERE ((test_in2.col2) = ('test'))))" def test_6perform_join(): insert() j = LEFTOUTERJOINOn(TestIn2, TestIn1, TestIn1.q.col1==TestIn2.q.col2) select = TestIn1.select(TestIn1.q.col1 == Select(TestIn2.q.col2, where=(TestIn2.q.col2 == "test"), join=j)) assert select.count() == 1 SQLObject-1.5.2/sqlobject/tests/test_asdict.py0000644000175000017500000000065512035030154020664 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## sqlmeta.asDict() ######################################## class TestAsDict(SQLObject): name = StringCol(length=10) name2 = StringCol(length=10) def test_asDict(): setupClass(TestAsDict, force=True) t1 = TestAsDict(name='one', name2='1') assert t1.sqlmeta.asDict() == dict(name='one', name2='1', id=1) SQLObject-1.5.2/sqlobject/tests/test_sqlmeta_idName.py0000644000175000017500000000226611331564375022357 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class myid_sqlmeta(sqlmeta): idName = "my_id" class TestSqlmeta1(SQLObject): class sqlmeta(myid_sqlmeta): pass class TestSqlmeta2(SQLObject): class sqlmeta(sqlmeta): style = MixedCaseStyle(longID=True) class TestSqlmeta3(SQLObject): class sqlmeta(myid_sqlmeta): style = MixedCaseStyle(longID=True) class TestSqlmeta4(SQLObject): class sqlmeta(myid_sqlmeta): idName = None style = MixedCaseStyle(longID=True) class longid_sqlmeta(sqlmeta): idName = "my_id" style = MixedCaseStyle(longID=True) class TestSqlmeta5(SQLObject): class sqlmeta(longid_sqlmeta): pass class TestSqlmeta6(SQLObject): class sqlmeta(longid_sqlmeta): idName = None def test_sqlmeta_inherited_idName(): setupClass([TestSqlmeta1, TestSqlmeta2]) assert TestSqlmeta1.sqlmeta.idName == "my_id" assert TestSqlmeta2.sqlmeta.idName == "TestSqlmeta2ID" assert TestSqlmeta3.sqlmeta.idName == "my_id" assert TestSqlmeta4.sqlmeta.idName == "TestSqlmeta4ID" assert TestSqlmeta5.sqlmeta.idName == "my_id" assert TestSqlmeta6.sqlmeta.idName == "TestSqlmeta6ID" SQLObject-1.5.2/sqlobject/tests/test_joins_conditional.py0000644000175000017500000000645210612135705023132 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * ######################################## ## Condiotional joins ######################################## class TestJoin1(SQLObject): col1 = StringCol() class TestJoin2(SQLObject): col2 = StringCol() class TestJoin3(SQLObject): col3 = StringCol() class TestJoin4(SQLObject): col4 = StringCol() class TestJoin5(SQLObject): col5 = StringCol() def setup(): setupClass(TestJoin1) setupClass(TestJoin2) def test_1syntax(): setup() join = JOIN("table1", "table2") assert str(join) == "table1 JOIN table2" join = LEFTJOIN("table1", "table2") assert str(join) == "table1 LEFT JOIN table2" join = LEFTJOINOn("table1", "table2", "tabl1.col1 = table2.col2") assert getConnection().sqlrepr(join) == "table1 LEFT JOIN table2 ON tabl1.col1 = table2.col2" def test_2select_syntax(): setup() select = TestJoin1.select( join=LEFTJOINConditional(TestJoin1, TestJoin2, on_condition=(TestJoin1.q.col1 == TestJoin2.q.col2)) ) assert str(select) == \ "SELECT test_join1.id, test_join1.col1 FROM test_join1 LEFT JOIN test_join2 ON ((test_join1.col1) = (test_join2.col2)) WHERE 1 = 1" def test_3perform_join(): setup() TestJoin1(col1="test1") TestJoin1(col1="test2") TestJoin1(col1="test3") TestJoin2(col2="test1") TestJoin2(col2="test2") select = TestJoin1.select( join=LEFTJOINOn(TestJoin1, TestJoin2, TestJoin1.q.col1 == TestJoin2.q.col2) ) assert select.count() == 3 def test_4join_3tables_syntax(): setup() setupClass(TestJoin3) select = TestJoin1.select( join=LEFTJOIN(TestJoin2, TestJoin3) ) assert str(select) == \ "SELECT test_join1.id, test_join1.col1 FROM test_join1, test_join2 LEFT JOIN test_join3 WHERE 1 = 1" def test_5join_3tables_syntax2(): setup() setupClass(TestJoin3) select = TestJoin1.select( join=(LEFTJOIN(None, TestJoin2), LEFTJOIN(None, TestJoin3)) ) assert str(select) == \ "SELECT test_join1.id, test_join1.col1 FROM test_join1 LEFT JOIN test_join2 LEFT JOIN test_join3 WHERE 1 = 1" select = TestJoin1.select( join=(LEFTJOIN(TestJoin1, TestJoin2), LEFTJOIN(TestJoin1, TestJoin3)) ) assert str(select) == \ "SELECT test_join1.id, test_join1.col1 FROM test_join1 LEFT JOIN test_join2, test_join1 LEFT JOIN test_join3 WHERE 1 = 1" def test_6join_using(): setup() setupClass(TestJoin3) select = TestJoin1.select( join=LEFTJOINUsing(None, TestJoin2, [TestJoin2.q.id]) ) assert str(select) == \ "SELECT test_join1.id, test_join1.col1 FROM test_join1 LEFT JOIN test_join2 USING (test_join2.id) WHERE 1 = 1" def test_7join_on(): setup() setupClass(TestJoin3) setupClass(TestJoin4) setupClass(TestJoin5) select = TestJoin1.select(join=( LEFTJOINOn(TestJoin2, TestJoin3, TestJoin2.q.col2 == TestJoin3.q.col3), LEFTJOINOn(TestJoin4, TestJoin5, TestJoin4.q.col4 == TestJoin5.q.col5) )) assert str(select) == \ "SELECT test_join1.id, test_join1.col1 FROM test_join1, test_join2 LEFT JOIN test_join3 ON ((test_join2.col2) = (test_join3.col3)), test_join4 LEFT JOIN test_join5 ON ((test_join4.col4) = (test_join5.col5)) WHERE 1 = 1" SQLObject-1.5.2/sqlobject/tests/test_delete.py0000644000175000017500000000326710616077604020677 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from test_basic import TestSO1, setupGetters ######################################## ## Delete during select ######################################## def testSelect(): setupGetters(TestSO1) for obj in TestSO1.select('all'): obj.destroySelf() assert list(TestSO1.select('all')) == [] ######################################## ## Delete many rows at once ######################################## def testDeleteMany(): setupGetters(TestSO1) TestSO1.deleteMany(OR(TestSO1.q.name=="bob", TestSO1.q.name=="fred")) assert len(list(TestSO1.select('all'))) == 2 def testDeleteBy(): setupGetters(TestSO1) TestSO1.deleteBy(name="dave") assert len(list(TestSO1.select())) == 3 ######################################## ## Delete without caching ######################################## class NoCache(SQLObject): name = StringCol() def testDestroySelf(): setupClass(NoCache) old = NoCache._connection.cache NoCache._connection.cache = cache.CacheSet(cache=False) value = NoCache(name='test') value.destroySelf() NoCache._connection.cache = old ######################################## ## Delete from related joins ######################################## class Service(SQLObject): groups = RelatedJoin("ServiceGroup") class ServiceGroup(SQLObject): services = RelatedJoin("Service") def testDeleteRelatedJoins(): setupClass([Service, ServiceGroup]) service = Service() service_group = ServiceGroup() service.addServiceGroup(service_group) service.destroySelf() service_group = ServiceGroup.get(service_group.id) assert len(service_group.services) == 0 SQLObject-1.5.2/sqlobject/tests/test_boundattributes.py0000644000175000017500000000323011604624061022632 0ustar phdphd00000000000000from sqlobject import declarative from sqlobject import boundattributes import py.test pytestmark = py.test.mark.skipif('True') class TestMe(object): #__metaclass__ = declarative.DeclarativeMeta #__classinit__ = boundattributes.bind_attributes_local pass class AttrReplace(boundattributes.BoundAttribute): __unpackargs__ = ('replace',) replace = None @declarative.classinstancemethod def make_object(self, cls, added_class, attr_name, **attrs): if not self: return cls.singleton().make_object( added_class, attr_name, **attrs) self.replace.added_class = added_class self.replace.name = attr_name assert attrs['replace'] is self.replace del attrs['replace'] self.replace.attrs = attrs return self.replace class Holder: def __init__(self, name): self.holder_name = name def __repr__(self): return '' % self.holder_name def test_1(): v1 = Holder('v1') v2 = Holder('v2') v3 = Holder('v3') class V2Class(AttrReplace): arg1 = 'nothing' arg2 = ['something'] class A1(TestMe): a = AttrReplace(v1) v = V2Class(v2) class inline(AttrReplace): replace = v3 arg3 = 'again' arg4 = 'so there' for n in ('a', 'v', 'inline'): assert getattr(A1, n).name == n assert getattr(A1, n).added_class is A1 assert A1.a is v1 assert A1.a.attrs == {} assert A1.v is v2 assert A1.v.attrs == {'arg1': 'nothing', 'arg2': ['something']} assert A1.inline is v3 assert A1.inline.attrs == {'arg3': 'again', 'arg4': 'so there'} SQLObject-1.5.2/sqlobject/tests/test_sqlite.py0000644000175000017500000000742612230020265020720 0ustar phdphd00000000000000import threading from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject.tests.dbtest import setSQLiteConnectionFactory from test_basic import TestSO1 class SQLiteFactoryTest(SQLObject): name = StringCol() def test_sqlite_factory(): setupClass(SQLiteFactoryTest) if SQLiteFactoryTest._connection.dbName == "sqlite": if not SQLiteFactoryTest._connection.using_sqlite2: return factory = [None] def SQLiteConnectionFactory(sqlite): class MyConnection(sqlite.Connection): pass factory[0] = MyConnection return MyConnection setSQLiteConnectionFactory(SQLiteFactoryTest, SQLiteConnectionFactory) conn = SQLiteFactoryTest._connection.makeConnection() assert factory[0] assert isinstance(conn, factory[0]) def test_sqlite_factory_str(): setupClass(SQLiteFactoryTest) if SQLiteFactoryTest._connection.dbName == "sqlite": if not SQLiteFactoryTest._connection.using_sqlite2: return factory = [None] def SQLiteConnectionFactory(sqlite): class MyConnection(sqlite.Connection): pass factory[0] = MyConnection return MyConnection from sqlobject.sqlite import sqliteconnection sqliteconnection.SQLiteConnectionFactory = SQLiteConnectionFactory setSQLiteConnectionFactory(SQLiteFactoryTest, "SQLiteConnectionFactory") conn = SQLiteFactoryTest._connection.makeConnection() assert factory[0] assert isinstance(conn, factory[0]) del sqliteconnection.SQLiteConnectionFactory def test_sqlite_aggregate(): setupClass(SQLiteFactoryTest) if SQLiteFactoryTest._connection.dbName == "sqlite": if not SQLiteFactoryTest._connection.using_sqlite2: return def SQLiteConnectionFactory(sqlite): class MyConnection(sqlite.Connection): def __init__(self, *args, **kwargs): super(MyConnection, self).__init__(*args, **kwargs) self.create_aggregate("group_concat", 1, self.group_concat) class group_concat: def __init__(self): self.acc = [] def step(self, value): if isinstance(value, basestring): self.acc.append(value) else: self.acc.append(str(value)) def finalize(self): self.acc.sort() return ", ".join(self.acc) return MyConnection setSQLiteConnectionFactory(SQLiteFactoryTest, SQLiteConnectionFactory) SQLiteFactoryTest(name='sqlobject') SQLiteFactoryTest(name='sqlbuilder') assert SQLiteFactoryTest.select(orderBy="name").accumulateOne("group_concat", "name") == \ "sqlbuilder, sqlobject" def do_select(): list(TestSO1.select()) def test_sqlite_threaded(): setupClass(TestSO1) t = threading.Thread(target=do_select) t.start() t.join() # This should reuse the same connection as the connection # made above (at least will with most database drivers, but # this will cause an error in SQLite): do_select() def test_empty_string(): setupClass(TestSO1) test = TestSO1(name=None, passwd='') assert test.name is None assert test.passwd == '' def test_memorydb(): if not supports("memorydb"): return connection = getConnection() if connection.dbName != "sqlite": return if not connection._memory: return setupClass(TestSO1) connection.close() # create a new connection to an in-memory database TestSO1.setConnection(connection) TestSO1.createTable() SQLObject-1.5.2/sqlobject/tests/test_distinct.py0000644000175000017500000000143510372665117021252 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Distinct ######################################## class Distinct1(SQLObject): n = IntCol() class Distinct2(SQLObject): other = ForeignKey('Distinct1') def count(select): result = {} for ob in select: result[int(ob.n)] = result.get(int(ob.n), 0)+1 return result def test_distinct(): setupClass([Distinct1, Distinct2]) obs = [Distinct1(n=i) for i in range(3)] Distinct2(other=obs[0]) Distinct2(other=obs[0]) Distinct2(other=obs[1]) query = (Distinct2.q.otherID==Distinct1.q.id) sel = Distinct1.select(query) assert count(sel) == {0: 2, 1: 1} sel = Distinct1.select(query, distinct=True) assert count(sel) == {0: 1, 1:1} SQLObject-1.5.2/sqlobject/tests/test_aliases.py0000644000175000017500000000263110616144003021034 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * ######################################## ## Table aliases and self-joins ######################################## class JoinAlias(SQLObject): name = StringCol() parent = StringCol() def test_1syntax(): setupClass(JoinAlias) alias = Alias(JoinAlias) select = JoinAlias.select(JoinAlias.q.parent == alias.q.name) assert str(select) == \ "SELECT join_alias.id, join_alias.name, join_alias.parent FROM join_alias, join_alias join_alias_alias1 WHERE ((join_alias.parent) = (join_alias_alias1.name))" def test_2perform_join(): setupClass(JoinAlias) JoinAlias(name="grandparent", parent=None) JoinAlias(name="parent", parent="grandparent") JoinAlias(name="child", parent="parent") alias = Alias(JoinAlias) select = JoinAlias.select(JoinAlias.q.parent == alias.q.name) assert select.count() == 2 def test_3joins(): setupClass(JoinAlias) alias = Alias(JoinAlias) select = JoinAlias.select((JoinAlias.q.name == 'a') & (alias.q.name == 'b'), join=LEFTJOINOn(None, alias, alias.q.name == 'c') ) assert str(select) == \ "SELECT join_alias.id, join_alias.name, join_alias.parent FROM join_alias LEFT JOIN join_alias join_alias_alias3 ON ((join_alias_alias3.name) = ('c')) WHERE (((join_alias.name) = ('a')) AND ((join_alias_alias3.name) = ('b')))" SQLObject-1.5.2/sqlobject/tests/test_sqlbuilder_joins_instances.py0000644000175000017500000000535212034047215025040 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import * from sqlobject.tests.dbtest import * ''' Testing for expressing join, foreign keys, and instance identity in SQLBuilder expressions. ''' class SBPerson(SQLObject): name = StringCol() addresses = SQLMultipleJoin('SBAddress', joinColumn='personID') sharedAddresses = SQLRelatedJoin('SBAddress', addRemoveName='SharedAddress') class SBAddress(SQLObject): city = StringCol() person = ForeignKey('SBPerson') sharedPeople = SQLRelatedJoin('SBPerson') def setup_module(mod): setupClass([SBPerson, SBAddress]) mod.ppl = inserts(SBPerson, [('James',), ('Julia',)], 'name') mod.adds = inserts(SBAddress, [('London',mod.ppl[0].id), ('Chicago',mod.ppl[1].id), ('Abu Dhabi', mod.ppl[1].id)], 'city personID') mod.ppl[0].addSharedAddress(mod.adds[0]) mod.ppl[0].addSharedAddress(mod.adds[1]) mod.ppl[1].addSharedAddress(mod.adds[0]) def testJoin(): assert list(SBPerson.select(AND(SBPerson.q.id==SBAddress.q.personID, SBAddress.q.city=='London'))) == \ list(SBAddress.selectBy(city='London').throughTo.person) assert list(SBAddress.select(AND(SBPerson.q.id==SBAddress.q.personID, SBPerson.q.name=='Julia')).orderBy(SBAddress.q.city)) == \ list(SBPerson.selectBy(name='Julia').throughTo.addresses.orderBy(SBAddress.q.city)) def testRelatedJoin(): assert list(SBPerson.selectBy(name='Julia').throughTo.sharedAddresses) == \ list(ppl[1].sharedAddresses) def testInstance(): assert list(SBAddress.select(AND(SBPerson.q.id==SBAddress.q.personID, SBPerson.q.id==ppl[0].id))) == \ list(ppl[0].addresses) def testFK(): assert list(SBPerson.select(AND(SBAddress.j.person, SBAddress.q.city=='London'))) == \ list(SBPerson.select(AND(SBPerson.q.id==SBAddress.q.personID, SBAddress.q.city=='London'))) def testRelatedJoin2(): assert list(SBAddress.select(AND(SBAddress.j.sharedPeople, SBPerson.q.name=='Julia'))) == \ list(SBPerson.select(SBPerson.q.name=='Julia').throughTo.sharedAddresses) def testJoin2(): assert list(SBAddress.select(AND(SBPerson.j.addresses, SBPerson.q.name=='Julia')).orderBy(SBAddress.q.city)) == \ list(SBAddress.select(AND(SBPerson.q.id==SBAddress.q.personID, SBPerson.q.name=='Julia')).orderBy(SBAddress.q.city)) == \ list(SBPerson.selectBy(name='Julia').throughTo.addresses.orderBy(SBAddress.q.city)) def testFK2(): assert list(SBAddress.select(AND(SBAddress.j.person, SBPerson.q.name=='Julia'))) == \ list(SBAddress.select(AND(SBPerson.q.id==SBAddress.q.personID, SBPerson.q.name=='Julia'))) SQLObject-1.5.2/sqlobject/tests/test_combining_joins.py0000644000175000017500000000240510372665117022576 0ustar phdphd00000000000000from sqlobject import * from dbtest import * class ComplexGroup(SQLObject): name = StringCol() complexes = OneToMany('Complex') def _get_unit_models(self): q = self.complexes.clause & Complex.unit_models.clause return UnitModel.select(q) class Complex(SQLObject): name = StringCol() unit_models = ManyToMany('UnitModel') complex_group = ForeignKey('ComplexGroup') class UnitModel(SQLObject): class sqlmeta: defaultOrderBy = 'name' name = StringCol() complexes = ManyToMany('Complex') def test_join_sqlrepr(): setupClass([ComplexGroup, UnitModel, Complex]) cg1 = ComplexGroup(name='cg1') cg2 = ComplexGroup(name='cg2') c1 = Complex(name='c1', complex_group=cg1) c2 = Complex(name='c2', complex_group=cg2) c3 = Complex(name='c3', complex_group=cg2) u1 = UnitModel(name='u1') u2 = UnitModel(name='u2') u1.complexes.add(c1) u1.complexes.add(c2) u2.complexes.add(c2) u2.complexes.add(c3) assert list(Complex.selectBy(name='c1')) == [c1] assert list(cg1.unit_models) == [u1] assert list(cg2.unit_models) == [u1, u2, u2] assert list(cg2.unit_models.distinct()) == [u1, u2] assert list( cg2.unit_models.filter(UnitModel.q.name=='u1')) == [u1] SQLObject-1.5.2/sqlobject/tests/test_paste.py0000644000175000017500000000436611604624061020543 0ustar phdphd00000000000000from dbtest import * from sqlobject import sqlhub, SQLObject, StringCol import py.test try: from sqlobject.wsgi_middleware import make_middleware except ImportError: pytestmark = py.test.mark.skipif('True') class NameOnly(SQLObject): name = StringCol() def makeapp(abort=False, begin=False, fail=False): def app(environ, start_response): NameOnly(name='app1') if fail == 'early': assert 0 start_response('200 OK', [('content-type', 'text/plain')]) if begin: environ['sqlobject.begin']() NameOnly(name='app2') if abort: environ['sqlobject.abort']() if fail: assert 0 return ['ok'] return app def makestack(abort=False, begin=False, fail=False, **kw): app = makeapp(abort=abort, begin=begin, fail=fail) app = make_middleware(app, {}, database=getConnectionURI(), **kw) return app def runapp(**kw): print '-'*8 app = makestack(**kw) env = {} def start_response(*args): pass try: list(app(env, start_response)) return True except AssertionError: return False def setup(): setupClass(NameOnly) getConnection().query('DELETE FROM name_only') NameOnly._connection = sqlhub def names(): names = [n.name for n in NameOnly.select(connection=getConnection())] names.sort() return names def test_fail(): setup() assert not runapp(fail=True, use_transaction=True) assert names() == [] setup() assert not runapp(fail=True, use_transaction=False) assert names() == ['app1', 'app2'] setup() assert not runapp(fail=True, begin=True, use_transaction=True) assert names() == ['app1'] def test_other(): setup() assert runapp(fail=False, begin=True, use_transaction=True) assert names() == ['app1', 'app2'] setup() # @@: Dammit, I can't get these to pass because I can't get the # stupid table to clear itself. setupClass() sucks. When I # fix it I'll take this disabling out: return assert names() == [] assert runapp(fail=False, begin=True, abort=True, use_transaction=True) assert names() == ['app1'] setup() assert runapp(use_transaction=True) assert names() == ['app1', 'app2'] SQLObject-1.5.2/sqlobject/tests/test_events.py0000644000175000017500000000636411467543504020744 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * from sqlobject import events from sqlobject.inheritance import InheritableSQLObject import sys class EventTester(SQLObject): name = StringCol() def make_watcher(): log = [] def watch(*args): log.append(args) watch.log = log return watch def make_listen(signal, cls=None): if cls is None: cls = EventTester watcher = make_watcher() events.listen(watcher, cls, signal) return watcher def test_create(): watcher = make_listen(events.ClassCreateSignal) class EventTesterSub1(EventTester): pass class EventTesterSub2(EventTesterSub1): pass assert len(watcher.log) == 2 assert len(watcher.log[0]) == 5 assert watcher.log[0][0] == 'EventTesterSub1' assert watcher.log[0][1] == (EventTester,) assert isinstance(watcher.log[0][2], dict) assert isinstance(watcher.log[0][3], list) def test_row_create(): setupClass(EventTester) watcher = make_listen(events.RowCreateSignal) row1 = EventTester(name='foo') row2 = EventTester(name='bar') assert len(watcher.log) == 2 assert watcher.log == [ (row1, {'name': 'foo'}, []), (row2, {'name': 'bar'}, [])] def test_row_destroy(): setupClass(EventTester) watcher = make_listen(events.RowDestroySignal) f = EventTester(name='foo') assert not watcher.log f.destroySelf() assert watcher.log == [(f, [])] def test_row_destroyed(): setupClass(EventTester) watcher = make_listen(events.RowDestroyedSignal) f = EventTester(name='foo') assert not watcher.log f.destroySelf() assert watcher.log == [(f, [])] def test_row_update(): setupClass(EventTester) watcher = make_listen(events.RowUpdateSignal) f = EventTester(name='bar') assert not watcher.log f.name = 'bar2' f.set(name='bar3') assert watcher.log == [ (f, {'name': 'bar2'}), (f, {'name': 'bar3'})] def test_row_updated(): setupClass(EventTester) watcher = make_listen(events.RowUpdatedSignal) f = EventTester(name='bar') assert not watcher.log f.name = 'bar2' f.set(name='bar3') assert watcher.log == [(f, []), (f, [])] def test_add_column(): setupClass(EventTester) watcher = make_listen(events.AddColumnSignal) events.summarize_events_by_sender() class NewEventTester(EventTester): name2 = StringCol() expect = ( NewEventTester, None, 'name2', NewEventTester.sqlmeta.columnDefinitions['name2'], False, []) print zip(watcher.log[1], expect) assert watcher.log[1] == expect class InheritableEventTestA(InheritableSQLObject): a = IntCol() class InheritableEventTestB(InheritableEventTestA): b = IntCol() class InheritableEventTestC(InheritableEventTestB): c = IntCol() def _query(instance): row = InheritableEventTestA.get(instance.id) assert isinstance(row, InheritableEventTestC) assert row.c == 3 def _signal(instance, kwargs, postfuncs): postfuncs.append(_query) def test_inheritance_row_created(): setupClass([InheritableEventTestA, InheritableEventTestB, InheritableEventTestC]) events.listen(_signal, InheritableEventTestA, events.RowCreatedSignal) InheritableEventTestC(a=1, b=2, c=3) SQLObject-1.5.2/sqlobject/tests/test_SQLMultipleJoin.py0000644000175000017500000000424412034270726022420 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class Race(SQLObject): name = StringCol() fightersAsList = MultipleJoin('RFighter', joinColumn="rf_id") fightersAsSResult = SQLMultipleJoin('RFighter', joinColumn="rf_id") class RFighter(SQLObject): name = StringCol() race = ForeignKey('Race', dbName="rf_id") power = IntCol() def createAllTables(): setupClass([Race, RFighter]) def test_1(): createAllTables() # create some races human=Race(name='human') saiyajin=Race(name='saiyajin') hibrid=Race(name='hibrid (human with sayajin)') namek=Race(name='namekuseijin') # create some fighters gokou=RFighter(name='Gokou (Kakaruto)', race=saiyajin, power=10) vegeta=RFighter(name='Vegeta', race=saiyajin, power=9) krilim=RFighter(name='Krilim', race=human, power=3) yancha=RFighter(name='Yancha', race=human, power=2) jackiechan=RFighter(name='Jackie Chan', race=human, power=2) gohan=RFighter(name='Gohan', race=hibrid, power=8) goten=RFighter(name='Goten', race=hibrid, power=7) trunks=RFighter(name='Trunks', race=hibrid, power=8) picollo=RFighter(name='Picollo', race=namek, power=6) neil=RFighter(name='Neil', race=namek, power=5) # testing the SQLMultipleJoin stuff for i, j in zip(human.fightersAsList, human.fightersAsSResult): assert i is j # the 2 ways should give the same result assert namek.fightersAsSResult.count() == len(namek.fightersAsList) assert saiyajin.fightersAsSResult.max('power') == 10 assert trunks in hibrid.fightersAsSResult assert picollo not in hibrid.fightersAsSResult assert str(hibrid.fightersAsSResult.sum('power')) == '23' def test_multiple_join_transaction(): if not supports('transactions'): return createAllTables() trans = Race._connection.transaction() try: namek=Race(name='namekuseijin', connection=trans) gokou=RFighter(name='Gokou (Kakaruto)', race=namek, power=10, connection=trans) assert namek.fightersAsSResult.count() == 1 assert namek.fightersAsSResult[0]._connection == trans finally: trans.commit(True) Race._connection.autoCommit = True SQLObject-1.5.2/sqlobject/tests/test_SingleJoin.py0000644000175000017500000000167610622103435021465 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * class PersonWithAlbum(SQLObject): name = StringCol() # albumNone returns the album or none albumNone = SingleJoin('PhotoAlbum', joinColumn='test_person_id') # albumInstance returns the album or an default album instance albumInstance = SingleJoin('PhotoAlbum', makeDefault=True, joinColumn='test_person_id') class PhotoAlbum(SQLObject): color = StringCol(default='red') person = ForeignKey('PersonWithAlbum', dbName='test_person_id') def test_1(): setupClass([PersonWithAlbum, PhotoAlbum]) person = PersonWithAlbum(name='Gokou (Kakarouto)') assert not person.albumNone # I don't created an album, this way it returns None assert isinstance(person.albumInstance, PhotoAlbum) album = PhotoAlbum(person=person) assert person.albumNone assert isinstance(person.albumNone, PhotoAlbum) assert isinstance(person.albumInstance, PhotoAlbum) SQLObject-1.5.2/sqlobject/tests/test_select.py0000644000175000017500000001375112203134056020700 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import func from sqlobject.main import SQLObjectIntegrityError from dbtest import * from dbtest import setSQLiteConnectionFactory class IterTest(SQLObject): name = StringCol(dbName='name_col', length=200) names = ('a', 'b', 'c') def setupIter(): setupClass(IterTest) for n in names: IterTest(name=n) def test_00_normal(): setupIter() count = 0 for test in IterTest.select(): count += 1 assert count == len(names) def test_00b_lazy(): setupIter() count = 0 for test in IterTest.select(lazyColumns=True): count += 1 assert count == len(names) def test_01_turn_to_list(): count = 0 for test in list(IterTest.select()): count += 1 assert count == len(names) def test_02_generator(): all = IterTest.select() count = 0 for i, test in enumerate(all): count += 1 assert count == len(names) def test_03_ranged_indexed(): all = IterTest.select() count = 0 for i in range(all.count()): test = all[i] count += 1 assert count == len(names) def test_04_indexed_ended_by_exception(): if not supports('limitSelect'): return all = IterTest.select() count = 0 try: while 1: test = all[count] count = count+1 # Stop the test if it's gone on too long if count > len(names): break except IndexError: pass assert count == len(names) def test_05_select_limit(): setupIter() assert len(list(IterTest.select(limit=2))) == 2 raises(AssertionError, IterTest.select(limit=2).count) def test_06_contains(): setupIter() assert len(list(IterTest.select(IterTest.q.name.startswith('a')))) == 1 assert len(list(IterTest.select(IterTest.q.name.contains('a')))) == 1 assert len(list(IterTest.select(IterTest.q.name.contains(func.lower('A'))))) == 1 assert len(list(IterTest.select(IterTest.q.name.contains("a'b")))) == 0 assert len(list(IterTest.select(IterTest.q.name.endswith('a')))) == 1 def test_07_contains_special(): setupClass(IterTest) a = IterTest(name='\\test') b = IterTest(name='100%') c = IterTest(name='test_') assert list(IterTest.select(IterTest.q.name.startswith('\\'))) == [a] assert list(IterTest.select(IterTest.q.name.contains('%'))) == [b] assert list(IterTest.select(IterTest.q.name.endswith('_'))) == [c] def test_select_getOne(): setupClass(IterTest) a = IterTest(name='a') b = IterTest(name='b') assert IterTest.selectBy(name='a').getOne() == a assert IterTest.select(IterTest.q.name=='b').getOne() == b assert IterTest.selectBy(name='c').getOne(None) is None raises(SQLObjectNotFound, 'IterTest.selectBy(name="c").getOne()') b2 = IterTest(name='b') raises(SQLObjectIntegrityError, 'IterTest.selectBy(name="b").getOne()') raises(SQLObjectIntegrityError, 'IterTest.selectBy(name="b").getOne(None)') def test_selectBy(): setupClass(IterTest) a = IterTest(name='a') b = IterTest(name='b') assert IterTest.selectBy().count() == 2 def test_selectBy_kwargs(): setupClass(IterTest) try: b = IterTest(nonexistant='b') except TypeError: return assert False, "IterTest(nonexistant='b') should raise TypeError" class UniqTest(SQLObject): name = StringCol(dbName='name_col', unique=True, length=100) def test_by_uniq(): setupClass(UniqTest) a = UniqTest(name='a') b = UniqTest(name='b') assert UniqTest.byName('a') is a assert UniqTest.byName('b') is b class Counter2(SQLObject): n1 = IntCol(notNull=True) n2 = IntCol(notNull=True) class TestSelect: def setup_method(self, meth): setupClass(Counter2) for i in range(10): for j in range(10): Counter2(n1=i, n2=j) def counterEqual(self, counters, value): assert [(c.n1, c.n2) for c in counters] == value def accumulateEqual(self, func, counters, value): assert func([c.n1 for c in counters]) == value def test_1(self): self.accumulateEqual(sum,Counter2.select(orderBy='n1'), sum(range(10)) * 10) def test_2(self): self.accumulateEqual(len,Counter2.select('all'), 100) def test_select_LIKE(): setupClass(IterTest) IterTest(name='sqlobject') IterTest(name='sqlbuilder') assert IterTest.select(LIKE(IterTest.q.name, "sql%")).count() == 2 assert IterTest.select(LIKE(IterTest.q.name, "sqlb%")).count() == 1 assert IterTest.select(LIKE(IterTest.q.name, "sqlb%")).count() == 1 assert IterTest.select(LIKE(IterTest.q.name, "sqlx%")).count() == 0 def test_select_RLIKE(): setupClass(IterTest) if IterTest._connection.dbName == "sqlite": if not IterTest._connection.using_sqlite2: return # Implement regexp() function for SQLite; only works with PySQLite2 import re def regexp(regexp, test): return bool(re.search(regexp, test)) def SQLiteConnectionFactory(sqlite): class MyConnection(sqlite.Connection): def __init__(self, *args, **kwargs): super(MyConnection, self).__init__(*args, **kwargs) self.create_function("regexp", 2, regexp) return MyConnection setSQLiteConnectionFactory(IterTest, SQLiteConnectionFactory) IterTest(name='sqlobject') IterTest(name='sqlbuilder') assert IterTest.select(RLIKE(IterTest.q.name, "^sql.*$")).count() == 2 assert IterTest.select(RLIKE(IterTest.q.name, "^sqlb.*$")).count() == 1 assert IterTest.select(RLIKE(IterTest.q.name, "^sqlb.*$")).count() == 1 assert IterTest.select(RLIKE(IterTest.q.name, "^sqlx.*$")).count() == 0 def test_select_sqlbuilder(): setupClass(IterTest) IterTest(name='sqlobject') IterTest.select(IterTest.q.name==u'sqlobject') def test_select_perConnection(): setupClass(IterTest) IterTest(name='a') assert not IterTest.select().getOne().sqlmeta._perConnection SQLObject-1.5.2/sqlobject/tests/test_constraints.py0000644000175000017500000000175010372665117022000 0ustar phdphd00000000000000from sqlobject.constraints import * from sqlobject.tests.dbtest import * def test_constraints(): obj = 'Test object' col = Dummy(name='col') isString(obj, col, 'blah') raises(BadValue, isString, obj, col, 1) # @@: Should this really be an error? raises(BadValue, isString, obj, col, u'test!') #isString(obj, col, u'test!') raises(BadValue, notNull, obj, col, None) raises(BadValue, isInt, obj, col, 1.1) isInt(obj, col, 1) isInt(obj, col, 1L) isFloat(obj, col, 1) isFloat(obj, col, 1L) isFloat(obj, col, 1.2) raises(BadValue, isFloat, obj, col, '1.0') # @@: Should test isBool, but I don't think isBool is right lst = InList(('a', 'b', 'c')) lst(obj, col, 'a') raises(BadValue, lst, obj, col, ('a', 'b', 'c')) raises(BadValue, lst, obj, col, 'A') maxlen = MaxLength(2) raises(BadValue, maxlen, obj, col, '123') maxlen(obj, col, '12') maxlen(obj, col, (1,)) raises(BadValue, maxlen, obj, col, 1) SQLObject-1.5.2/sqlobject/tests/test_psycopg_sslmode.py0000644000175000017500000000140611413402104022617 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Test PosgreSQL sslmode ######################################## class TestSSLMode(SQLObject): test = StringCol() def test_sslmode(): setupClass(TestSSLMode) connection = TestSSLMode._connection if (connection.dbName != 'postgres') or \ (not connection.module.__name__.startswith('psycopg')): # sslmode is only implemented by psycopg[12] PostgreSQL driver return connection = getConnection(sslmode='require') TestSSLMode._connection = connection test = TestSSLMode(test='test') # Connect to the DB to test sslmode connection.cache.clear() test = TestSSLMode.select()[0] assert test.test == 'test' SQLObject-1.5.2/sqlobject/tests/test_groupBy.py0000644000175000017500000000232112203134062021034 0ustar phdphd00000000000000from sqlobject import * from sqlobject.sqlbuilder import Select, func from sqlobject.tests.dbtest import * ######################################## ## groupBy ######################################## class GroupbyTest(SQLObject): name = StringCol() value = IntCol() def test_groupBy(): setupClass(GroupbyTest) GroupbyTest(name='a', value=1) GroupbyTest(name='a', value=2) GroupbyTest(name='b', value=1) connection = getConnection() select = Select([GroupbyTest.q.name, func.COUNT(GroupbyTest.q.value)], groupBy=GroupbyTest.q.name, orderBy=GroupbyTest.q.name) sql = connection.sqlrepr(select) rows = connection.queryAll(sql) assert list(rows) == [('a', 2), ('b', 1)] def test_groupBy_list(): setupClass(GroupbyTest) GroupbyTest(name='a', value=1) GroupbyTest(name='a', value=2) GroupbyTest(name='b', value=1) connection = getConnection() select = Select([GroupbyTest.q.name, GroupbyTest.q.value], groupBy=[GroupbyTest.q.name, GroupbyTest.q.value], orderBy=[GroupbyTest.q.name, GroupbyTest.q.value]) sql = connection.sqlrepr(select) rows = connection.queryAll(sql) assert list(rows) == [('a', 1), ('a', 2), ('b', 1)] SQLObject-1.5.2/sqlobject/tests/test_datetime.py0000644000175000017500000000555711243552655021236 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Date/time columns ######################################## from sqlobject import col col.default_datetime_implementation = DATETIME_IMPLEMENTATION from datetime import datetime, date, time class DateTime1(SQLObject): col1 = DateTimeCol() col2 = DateCol() col3 = TimeCol() def test_dateTime(): setupClass(DateTime1) _now = datetime.now() dt1 = DateTime1(col1=_now, col2=_now, col3=_now.time()) assert isinstance(dt1.col1, datetime) assert dt1.col1.year == _now.year assert dt1.col1.month == _now.month assert dt1.col1.day == _now.day assert dt1.col1.hour == _now.hour assert dt1.col1.minute == _now.minute assert dt1.col1.second == int(_now.second) assert isinstance(dt1.col2, date) assert not isinstance(dt1.col2, datetime) assert dt1.col2.year == _now.year assert dt1.col2.month == _now.month assert dt1.col2.day == _now.day assert isinstance(dt1.col3, time) assert dt1.col3.hour == _now.hour assert dt1.col3.minute == _now.minute assert dt1.col3.second == int(_now.second) if mxdatetime_available: col.default_datetime_implementation = MXDATETIME_IMPLEMENTATION from mx.DateTime import now, Time dateFormat = None # use default connection = getConnection() if connection.dbName == "sqlite": if connection.using_sqlite2: # mxDateTime sends and PySQLite2 returns full date/time for dates dateFormat = "%Y-%m-%d %H:%M:%S" class DateTime2(SQLObject): col1 = DateTimeCol() col2 = DateCol(dateFormat=dateFormat) col3 = TimeCol() def test_mxDateTime(): setupClass(DateTime2) _now = now() dt2 = DateTime2(col1=_now, col2=_now, col3=Time(_now.hour, _now.minute, int(_now.second))) assert isinstance(dt2.col1, col.DateTimeType) assert dt2.col1.year == _now.year assert dt2.col1.month == _now.month assert dt2.col1.day == _now.day assert dt2.col1.hour == _now.hour assert dt2.col1.minute == _now.minute assert dt2.col1.second == int(_now.second) assert isinstance(dt2.col2, col.DateTimeType) assert dt2.col2.year == _now.year assert dt2.col2.month == _now.month assert dt2.col2.day == _now.day if getConnection().dbName == "sqlite": assert dt2.col2.hour == _now.hour assert dt2.col2.minute == _now.minute assert dt2.col2.second == int(_now.second) else: assert dt2.col2.hour == 0 assert dt2.col2.minute == 0 assert dt2.col2.second == 0 assert isinstance(dt2.col3, (col.DateTimeType, col.TimeType)) assert dt2.col3.hour == _now.hour assert dt2.col3.minute == _now.minute assert dt2.col3.second == int(_now.second) SQLObject-1.5.2/sqlobject/tests/test_blob.py0000644000175000017500000000105410372665117020344 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## BLOB columns ######################################## class ImageData(SQLObject): image = BLOBCol(default='emptydata', length=65535) def test_BLOBCol(): if not supports('blobData'): return setupClass(ImageData) data = ''.join([chr(x) for x in range(256)]) prof = ImageData() prof.image = data iid = prof.id ImageData._connection.cache.clear() prof2 = ImageData.get(iid) assert prof2.image == data SQLObject-1.5.2/sqlobject/tests/test_expire.py0000644000175000017500000000132410372665117020722 0ustar phdphd00000000000000from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Expiring, syncing ######################################## class SyncTest(SQLObject): name = StringCol(length=50, alternateID=True, dbName='name_col') def test_expire(): setupClass(SyncTest) SyncTest(name='bob') SyncTest(name='tim') conn = SyncTest._connection b = SyncTest.byName('bob') conn.query("UPDATE sync_test SET name_col = 'robert' WHERE id = %i" % b.id) assert b.name == 'bob' b.expire() assert b.name == 'robert' conn.query("UPDATE sync_test SET name_col = 'bobby' WHERE id = %i" % b.id) b.sync() assert b.name == 'bobby' SQLObject-1.5.2/sqlobject/tests/test_decimal.py0000644000175000017500000000527411035703120021014 0ustar phdphd00000000000000# -*- coding: koi8-r -*- from decimal import Decimal from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Decimal columns ######################################## class DecimalTable(SQLObject): name = UnicodeCol(length=255) col1 = DecimalCol(size=6, precision=4) col2 = DecimalStringCol(size=6, precision=4) col3 = DecimalStringCol(size=6, precision=4, quantize=True) if supports('decimalColumn'): def test_1_decimal(): setupClass(DecimalTable) d = DecimalTable(name='test', col1=21.12, col2='10.01', col3='10.01') # psycopg2 returns float as Decimal if isinstance(d.col1, Decimal): assert d.col1 == Decimal("21.12") else: assert d.col1 == 21.12 assert d.col2 == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col2'].to_python('10.01', d._SO_validatorState) == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col2'].from_python('10.01', d._SO_validatorState) == "10.01" assert d.col3 == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col3'].to_python('10.01', d._SO_validatorState) == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col3'].from_python('10.01', d._SO_validatorState) == "10.0100" def test_2_decimal(): setupClass(DecimalTable) d = DecimalTable(name='test', col1=Decimal("21.12"), col2=Decimal('10.01'), col3=Decimal('10.01')) assert d.col1 == Decimal("21.12") assert d.col2 == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col2'].to_python(Decimal('10.01'), d._SO_validatorState) == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col2'].from_python(Decimal('10.01'), d._SO_validatorState) == "10.01" assert d.col3 == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col3'].to_python(Decimal('10.01'), d._SO_validatorState) == Decimal("10.01") assert DecimalTable.sqlmeta.columns['col3'].from_python(Decimal('10.01'), d._SO_validatorState) == "10.0100" # See http://mail.python.org/pipermail/python-dev/2008-March/078189.html if isinstance(Decimal(u'123').to_eng_string(), unicode): # a bug in Python 2.5.2 def test_3_unicode(): setupClass(DecimalTable) d = DecimalTable(name='test', col1=Decimal(u"21.12"), col2='10.01', col3='10.01') assert d.col1 == Decimal("21.12") d = DecimalTable(name=unicode('ÔÅÓÔ', 'koi8-r'), col1=Decimal(u"21.12"), col2='10.01', col3='10.01') assert d.col1 == Decimal("21.12") SQLObject-1.5.2/sqlobject/sresults.py0000644000175000017500000003377011771602610017114 0ustar phdphd00000000000000import dbconnection import joins import main import sqlbuilder __all__ = ['SelectResults'] class SelectResults(object): IterationClass = dbconnection.Iteration def __init__(self, sourceClass, clause, clauseTables=None, **ops): self.sourceClass = sourceClass if clause is None or isinstance(clause, str) and clause == 'all': clause = sqlbuilder.SQLTrueClause if not isinstance(clause, sqlbuilder.SQLExpression): clause = sqlbuilder.SQLConstant(clause) self.clause = clause self.ops = ops if ops.get('orderBy', sqlbuilder.NoDefault) is sqlbuilder.NoDefault: ops['orderBy'] = sourceClass.sqlmeta.defaultOrder orderBy = ops['orderBy'] if isinstance(orderBy, (tuple, list)): orderBy = map(self._mungeOrderBy, orderBy) else: orderBy = self._mungeOrderBy(orderBy) ops['dbOrderBy'] = orderBy if 'connection' in ops and ops['connection'] is None: del ops['connection'] if ops.get('limit', None): assert not ops.get('start', None) and not ops.get('end', None), \ "'limit' cannot be used with 'start' or 'end'" ops["start"] = 0 ops["end"] = ops.pop("limit") tablesSet = sqlbuilder.tablesUsedSet(self.clause, self._getConnection().dbName) if clauseTables: for table in clauseTables: tablesSet.add(table) self.clauseTables = clauseTables # Explicitly post-adding-in sqlmeta.table, sqlbuilder.Select will handle sqlrepr'ing and dupes self.tables = list(tablesSet) + [sourceClass.sqlmeta.table] def queryForSelect(self): columns = [self.sourceClass.q.id] + [getattr(self.sourceClass.q, x.name) for x in self.sourceClass.sqlmeta.columnList] query = sqlbuilder.Select(columns, where=self.clause, join=self.ops.get('join', sqlbuilder.NoDefault), distinct=self.ops.get('distinct',False), lazyColumns=self.ops.get('lazyColumns', False), start=self.ops.get('start', 0), end=self.ops.get('end', None), orderBy=self.ops.get('dbOrderBy',sqlbuilder.NoDefault), reversed=self.ops.get('reversed', False), staticTables=self.tables, forUpdate=self.ops.get('forUpdate', False)) return query def __repr__(self): return "<%s at %x>" % (self.__class__.__name__, id(self)) def _getConnection(self): return self.ops.get('connection') or self.sourceClass._connection def __str__(self): conn = self._getConnection() return conn.queryForSelect(self) def _mungeOrderBy(self, orderBy): if isinstance(orderBy, str) and orderBy.startswith('-'): orderBy = orderBy[1:] desc = True else: desc = False if isinstance(orderBy, basestring): if orderBy in self.sourceClass.sqlmeta.columns: val = getattr(self.sourceClass.q, self.sourceClass.sqlmeta.columns[orderBy].name) if desc: return sqlbuilder.DESC(val) else: return val else: orderBy = sqlbuilder.SQLConstant(orderBy) if desc: return sqlbuilder.DESC(orderBy) else: return orderBy else: return orderBy def clone(self, **newOps): ops = self.ops.copy() ops.update(newOps) return self.__class__(self.sourceClass, self.clause, self.clauseTables, **ops) def orderBy(self, orderBy): return self.clone(orderBy=orderBy) def connection(self, conn): return self.clone(connection=conn) def limit(self, limit): return self[:limit] def lazyColumns(self, value): return self.clone(lazyColumns=value) def reversed(self): return self.clone(reversed=not self.ops.get('reversed', False)) def distinct(self): return self.clone(distinct=True) def newClause(self, new_clause): return self.__class__(self.sourceClass, new_clause, self.clauseTables, **self.ops) def filter(self, filter_clause): if filter_clause is None: # None doesn't filter anything, it's just a no-op: return self clause = self.clause if isinstance(clause, basestring): clause = sqlbuilder.SQLConstant('(%s)' % clause) return self.newClause(sqlbuilder.AND(clause, filter_clause)) def __getitem__(self, value): if isinstance(value, slice): assert not value.step, "Slices do not support steps" if not value.start and not value.stop: # No need to copy, I'm immutable return self # Negative indexes aren't handled (and everything we # don't handle ourselves we just create a list to # handle) if (value.start and value.start < 0) \ or (value.stop and value.stop < 0): if value.start: if value.stop: return list(self)[value.start:value.stop] return list(self)[value.start:] return list(self)[:value.stop] if value.start: assert value.start >= 0 start = self.ops.get('start', 0) + value.start if value.stop is not None: assert value.stop >= 0 if value.stop < value.start: # an empty result: end = start else: end = value.stop + self.ops.get('start', 0) if self.ops.get('end', None) is not None and \ self.ops['end'] < end: # truncated by previous slice: end = self.ops['end'] else: end = self.ops.get('end', None) else: start = self.ops.get('start', 0) end = value.stop + start if self.ops.get('end', None) is not None \ and self.ops['end'] < end: end = self.ops['end'] return self.clone(start=start, end=end) else: if value < 0: return list(iter(self))[value] else: start = self.ops.get('start', 0) + value return list(self.clone(start=start, end=start+1))[0] def __iter__(self): # @@: This could be optimized, using a simpler algorithm # since we don't have to worry about garbage collection, # etc., like we do with .lazyIter() return iter(list(self.lazyIter())) def lazyIter(self): """ Returns an iterator that will lazily pull rows out of the database and return SQLObject instances """ conn = self._getConnection() return conn.iterSelect(self) def accumulate(self, *expressions): """ Use accumulate expression(s) to select result using another SQL select through current connection. Return the accumulate result """ conn = self._getConnection() exprs = [] for expr in expressions: if not isinstance(expr, sqlbuilder.SQLExpression): expr = sqlbuilder.SQLConstant(expr) exprs.append(expr) return conn.accumulateSelect(self, *exprs) def count(self): """ Counting elements of current select results """ assert not self.ops.get('start') and not self.ops.get('end'), \ "start/end/limit have no meaning with 'count'" assert not (self.ops.get('distinct') and (self.ops.get('start') or self.ops.get('end'))), \ "distinct-counting of sliced objects is not supported" if self.ops.get('distinct'): # Column must be specified, so we are using unique ID column. # COUNT(DISTINCT column) is supported by MySQL and PostgreSQL, # but not by SQLite. Perhaps more portable would be subquery: # SELECT COUNT(*) FROM (SELECT DISTINCT id FROM table) count = self.accumulate('COUNT(DISTINCT %s)' % self._getConnection().sqlrepr(self.sourceClass.q.id)) else: count = self.accumulate('COUNT(*)') if self.ops.get('start'): count -= self.ops['start'] if self.ops.get('end'): count = min(self.ops['end'] - self.ops.get('start', 0), count) return count def accumulateMany(self, *attributes): """ Making the expressions for count/sum/min/max/avg of a given select result attributes. `attributes` must be a list/tuple of pairs (func_name, attribute); `attribute` can be a column name (like 'a_column') or a dot-q attribute (like Table.q.aColumn) """ expressions = [] conn = self._getConnection() if self.ops.get('distinct'): distinct = 'DISTINCT ' else: distinct = '' for func_name, attribute in attributes: if not isinstance(attribute, str): attribute = conn.sqlrepr(attribute) expression = '%s(%s%s)' % (func_name, distinct, attribute) expressions.append(expression) return self.accumulate(*expressions) def accumulateOne(self, func_name, attribute): """ Making the sum/min/max/avg of a given select result attribute. `attribute` can be a column name (like 'a_column') or a dot-q attribute (like Table.q.aColumn) """ return self.accumulateMany((func_name, attribute)) def sum(self, attribute): return self.accumulateOne("SUM", attribute) def min(self, attribute): return self.accumulateOne("MIN", attribute) def avg(self, attribute): return self.accumulateOne("AVG", attribute) def max(self, attribute): return self.accumulateOne("MAX", attribute) def getOne(self, default=sqlbuilder.NoDefault): """ If a query is expected to only return a single value, using ``.getOne()`` will return just that value. If not results are found, ``SQLObjectNotFound`` will be raised, unless you pass in a default value (like ``.getOne(None)``). If more than one result is returned, ``SQLObjectIntegrityError`` will be raised. """ results = list(self) if not results: if default is sqlbuilder.NoDefault: raise main.SQLObjectNotFound( "No results matched the query for %s" % self.sourceClass.__name__) return default if len(results) > 1: raise main.SQLObjectIntegrityError( "More than one result returned from query: %s" % results) return results[0] def throughTo(self): class _throughTo_getter(object): def __init__(self, inst): self.sresult = inst def __getattr__(self, attr): return self.sresult._throughTo(attr) return _throughTo_getter(self) throughTo = property(throughTo) def _throughTo(self, attr): otherClass = None orderBy = sqlbuilder.NoDefault ref = self.sourceClass.sqlmeta.columns.get(attr.endswith('ID') and attr or attr+'ID', None) if ref and ref.foreignKey: otherClass, clause = self._throughToFK(ref) else: join = [x for x in self.sourceClass.sqlmeta.joins if x.joinMethodName==attr] if join: join = join[0] orderBy = join.orderBy if hasattr(join, 'otherColumn'): otherClass, clause = self._throughToRelatedJoin(join) else: otherClass, clause = self._throughToMultipleJoin(join) if not otherClass: raise AttributeError("throughTo argument (got %s) should be name of foreignKey or SQL*Join in %s" % (attr, self.sourceClass)) return otherClass.select(clause, orderBy=orderBy, connection=self._getConnection()) def _throughToFK(self, col): otherClass = getattr(self.sourceClass, "_SO_class_"+col.foreignKey) colName = col.name query = self.queryForSelect().newItems([sqlbuilder.ColumnAS(getattr(self.sourceClass.q, colName), colName)]).orderBy(None).distinct() query = sqlbuilder.Alias(query, "%s_%s" % (self.sourceClass.__name__, col.name)) return otherClass, otherClass.q.id==getattr(query.q, colName) def _throughToMultipleJoin(self, join): otherClass = join.otherClass colName = join.soClass.sqlmeta.style.dbColumnToPythonAttr(join.joinColumn) query = self.queryForSelect().newItems([sqlbuilder.ColumnAS(self.sourceClass.q.id, 'id')]).orderBy(None).distinct() query = sqlbuilder.Alias(query, "%s_%s" % (self.sourceClass.__name__, join.joinMethodName)) joinColumn = getattr(otherClass.q, colName) return otherClass, joinColumn==query.q.id def _throughToRelatedJoin(self, join): otherClass = join.otherClass intTable = sqlbuilder.Table(join.intermediateTable) colName = join.joinColumn query = self.queryForSelect().newItems([sqlbuilder.ColumnAS(self.sourceClass.q.id, 'id')]).orderBy(None).distinct() query = sqlbuilder.Alias(query, "%s_%s" % (self.sourceClass.__name__, join.joinMethodName)) clause = sqlbuilder.AND(otherClass.q.id == getattr(intTable, join.otherColumn), getattr(intTable, colName) == query.q.id) return otherClass, clause SQLObject-1.5.2/sqlobject/rdbhost/0000755000175000017500000000000012322476205016313 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/rdbhost/__init__.py0000644000175000017500000000027411350206637020427 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import rdbhostconnection return rdbhostconnection.RdbhostConnection registerConnection(['rdbhost'], builder) SQLObject-1.5.2/sqlobject/rdbhost/rdbhostconnection.py0000644000175000017500000000516311520027737022420 0ustar phdphd00000000000000""" This module written by David Keeney, 2009, 2010 Released under the LGPL for use with the SQLObject ORM library. """ import re from sqlobject import col from sqlobject import sqlbuilder from sqlobject.converters import registerConverter from sqlobject.dbconnection import DBAPI from sqlobject.postgres.pgconnection import PostgresConnection class RdbhostConnection(PostgresConnection): supportTransactions = False dbName = 'rdbhost' schemes = [dbName] def __init__(self, dsn=None, host=None, port=None, db=None, user=None, password=None, unicodeCols=False, driver='rdbhost', **kw): drivers = driver for driver in drivers.split(','): driver = driver.strip() if not driver: continue try: if driver == 'rdbhost': from rdbhdb import rdbhdb as rdb # monkey patch % escaping into Cursor._execute old_execute = getattr(rdb.Cursor, '_execute') setattr(rdb.Cursor, '_old_execute', old_execute) def _execute(self, query, *args): assert not any([a for a in args]) query = query.replace('%', '%%') self._old_execute(query, (), (), ()) setattr(rdb.Cursor, '_execute', _execute) self.module = rdb else: raise ValueError('Unknown Rdbhost driver %s' % driver) except ImportError: pass else: break else: raise ImportError('Cannot find the Rdbhost driver') self.user = user self.host = host self.port = port self.db = db self.password = password self.dsn_dict = dsn_dict = {} self.use_dsn = dsn is not None if host: dsn_dict["host"] = host if user: dsn_dict["role"] = user if password: dsn_dict["authcode"] = password if dsn is None: dsn = [] if db: dsn.append('dbname=%s' % db) if user: dsn.append('user=%s' % user) if password: dsn.append('password=%s' % password) if host: dsn.append('host=%s' % host) if port: dsn.append('port=%d' % port) dsn = ' '.join(dsn) self.dsn = dsn self.unicodeCols = unicodeCols self.schema = kw.pop('schema', None) self.dbEncoding = 'utf-8' DBAPI.__init__(self, **kw) SQLObject-1.5.2/sqlobject/versioning/0000755000175000017500000000000012322476205017031 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/versioning/__init__.py0000644000175000017500000000737511563772713021167 0ustar phdphd00000000000000from sqlobject import * from datetime import datetime class Version(SQLObject): def restore(self): values = self.sqlmeta.asDict() del values['id'] del values['masterID'] del values['dateArchived'] for col in self.extraCols: del values[col] self.masterClass.get(self.masterID).set(**values) def nextVersion(self): version = self.select(AND(self.q.masterID == self.masterID, self.q.id > self.id), orderBy=self.q.id) if version.count(): return version[0] else: return self.master def getChangedFields(self): next = self.nextVersion() columns = self.masterClass.sqlmeta.columns fields = [] for column in columns: if column not in ["dateArchived", "id", "masterID"]: if getattr(self, column) != getattr(next, column): fields.append(column.title()) return fields @classmethod def select(cls, clause=None, *args, **kw): if not getattr(cls, '_connection', None): cls._connection = cls.masterClass._connection return super(Version, cls).select(clause, *args, **kw) def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.master, attr) def getColumns(columns, cls): for column, defi in cls.sqlmeta.columnDefinitions.items(): if column.endswith("ID") and isinstance(defi, ForeignKey): column = column[:-2] #remove incompatible constraints kwds = dict(defi._kw) for kw in ["alternateID", "unique"]: if kw in kwds: del kwds[kw] columns[column] = defi.__class__(**kwds) #ascend heirarchy if cls.sqlmeta.parentClass: getColumns(columns, cls.sqlmeta.parentClass) class Versioning(object): def __init__(self, extraCols = None): if extraCols: self.extraCols = extraCols else: self.extraCols = {} pass def __addtoclass__(self, soClass, name): self.name = name self.soClass = soClass attrs = {'dateArchived': DateTimeCol(default=datetime.now), 'master': ForeignKey(self.soClass.__name__), 'masterClass' : self.soClass, 'extraCols' : self.extraCols } getColumns (attrs, self.soClass) attrs.update(self.extraCols) self.versionClass = type(self.soClass.__name__+'Versions', (Version,), attrs) if '_connection' in self.soClass.__dict__: self.versionClass._connection = self.soClass.__dict__['_connection'] events.listen(self.createTable, soClass, events.CreateTableSignal) events.listen(self.rowUpdate, soClass, events.RowUpdateSignal) def createVersionTable(self, cls, conn): self.versionClass.createTable(ifNotExists=True, connection=conn) def createTable(self, soClass, connection, extra_sql, post_funcs): assert soClass is self.soClass post_funcs.append(self.createVersionTable) def rowUpdate(self, instance, kwargs): if instance.childName and instance.childName != self.soClass.__name__: return #if you want your child class versioned, version it. values = instance.sqlmeta.asDict() del values['id'] values['masterID'] = instance.id self.versionClass(connection=instance._connection, **values) def __get__(self, obj, type=None): if obj is None: return self return self.versionClass.select( self.versionClass.q.masterID==obj.id, connection=obj._connection) SQLObject-1.5.2/sqlobject/versioning/test/0000755000175000017500000000000012322476205020010 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/versioning/test/__init__.py0000644000175000017500000000000010545230325022103 0ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/versioning/test/test_version.py0000644000175000017500000001133611071242504023103 0ustar phdphd00000000000000from sqlobject import * from sqlobject.inheritance import InheritableSQLObject from sqlobject.versioning import Versioning from sqlobject.tests.dbtest import * class MyClass(SQLObject): name = StringCol() versions = Versioning() class Base(InheritableSQLObject): name = StringCol() value = IntCol(default=0) versions = Versioning() class Child(Base): toy = StringCol() class Government(InheritableSQLObject): name = StringCol() class Monarchy(Government): monarch = StringCol() versions = Versioning() class VChild(Base): weapon = StringCol() versions = Versioning() class HasForeign(SQLObject): foreign = ForeignKey("Base") versions = Versioning() def _set_extra(): return "read all about it" class Extra(SQLObject): name = StringCol() versions = Versioning(extraCols={'extra' : StringCol(default=_set_extra())}) class HasAltId(SQLObject): name = StringCol() altid = IntCol(alternateID=True) versions = Versioning() def setup(): classes = [MyClass, Base, Child, Government, Monarchy, VChild, Extra, HasAltId] if hasattr(HasForeign, "_connection"): classes.insert(0, HasForeign) else: classes.append(HasForeign) for cls in classes: if hasattr(cls, 'versions') and getattr(cls, "_connection", None) and \ cls._connection.tableExists(cls.sqlmeta.table): setupClass(cls.versions.versionClass) setupClass(cls) if hasattr(cls, 'versions'): setupClass(cls.versions.versionClass) for version in cls.versions.versionClass.select(): version.destroySelf() def test_versioning(): #the simple case setup() mc = MyClass(name='fleem') mc.set(name='morx') assert len(list(mc.versions)) == 1 assert mc.versions[0].name == "fleem" assert len(list(MyClass.select())) == 1 def test_inheritable_versioning(): setup() #base versioned, child unversioned base = Base(name='fleem') base.set(name='morx') assert len(list(base.versions)) == 1 assert base.versions[0].name == "fleem" assert len(list(Base.select())) == 1 child = Child(name='child', toy='nintendo') child.set(name='teenager', toy='guitar') assert len(list(child.versions)) == 0 #child versioned, base unversioned government = Government(name='canada') assert not hasattr(government, 'versions') monarchy = Monarchy(name='UK', monarch='king george iv') assert len(list(monarchy.versions)) == 0 monarchy.set(name='queen elisabeth ii') assert len(list(monarchy.versions)) == 1 assert monarchy.versions[0].name == "UK" assert len(list(Monarchy.select())) == 1 #both parent and child versioned num_base_versions = len(list(base.versions)) vchild = VChild(name='kid', weapon='slingshot') vchild.set(name='toon', weapon='dynamite') assert len(list(base.versions)) == num_base_versions assert len(list(vchild.versions)) == 1 vchild.name = "newname" #test setting using setattr directly rather than .set assert len(list(vchild.versions)) == 2 def test_restore(): setup() base = Base(name='fleem') base.set(name='morx') assert base.name == "morx" base.versions[0].restore() assert base.name == "fleem" monarchy = Monarchy(name='USA', monarch='Emperor Norton I') monarchy.set(name='morx') assert monarchy.name == "morx" monarchy.versions[0].restore() assert monarchy.name == "USA" assert monarchy.monarch == "Emperor Norton I" extra = Extra(name='fleem') extra.set(name='morx') assert extra.name == "morx" extra.versions[0].restore() assert extra.name == "fleem" def test_next(): setup() base = Base(name='first', value=1) base.set(name='second') base.set(name='third', value=2) version = base.versions[0] assert version.nextVersion() == base.versions[1] assert version.nextVersion().nextVersion() == base def test_get_changed(): setup() base = Base(name='first', value=1) base.set(name='second') base.set(name='third', value=2) assert base.versions[0].getChangedFields() == ['Name'] assert sorted(base.versions[1].getChangedFields()) == ['Name', 'Value'] def test_foreign_keys(): setup() base1 = Base(name='first', value=1) base2 = Base(name='first', value=1) has_foreign = HasForeign(foreign = base1) has_foreign.foreign = base2 assert has_foreign.versions[0].foreign == base1 def test_extra(): setup() extra = Extra(name='title') extra.name = 'new' assert extra.versions[0].extra == 'read all about it' assert sorted(extra.versions[0].getChangedFields()) == ['Name'] def test_altid(): setup() extra = HasAltId(name="fleem", altid=5) extra.name = "morx" SQLObject-1.5.2/sqlobject/util/0000755000175000017500000000000012322476205015623 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/util/threadinglocal.py0000644000175000017500000000023011537673562021164 0ustar phdphd00000000000000try: from threading import local except ImportError: # No threads, so "thread local" means process-global class local(object): pass SQLObject-1.5.2/sqlobject/util/__init__.py0000644000175000017500000000000210372665120017723 0ustar phdphd00000000000000# SQLObject-1.5.2/sqlobject/util/csvimport.py0000644000175000017500000002560110372665120020226 0ustar phdphd00000000000000""" Import from a CSV file or directory of files. CSV files should have a header line that lists columns. Headers can also be appended with ``:type`` to indicate the type of the field. ``escaped`` is the default, though it can be overridden by the importer. Supported types: ``:python``: A python expression, run through ``eval()``. This can be a security risk, pass in ``allow_python=False`` if you don't want to allow it. ``:int``: Integer ``:float``: Float ``:str``: String ``:escaped``: A string with backslash escapes (note that you don't put quotation marks around the value) ``:base64``: A base64-encoded string ``:date``: ISO date, like YYYY-MM-DD; this can also be ``NOW+days`` or ``NOW-days`` ``:datetime``: ISO date/time like YYYY-MM-DDTHH:MM:SS (either T or a space can be used to separate the time, and seconds are optional). This can also be ``NOW+seconds`` or ``NOW-seconds`` ``:bool``: Converts true/false/yes/no/on/off/1/0 to boolean value ``:ref``: This will be resolved to the ID of the object named in this column (None if the column is empty). @@: Since there's no ordering, there's no way to promise the object already exists. You can also get back references to the objects if you have a special ``[name]`` column. Any column named ``[comment]`` or with no name will be ignored. In any column you can put ``[default]`` to exclude the value and use whatever default the class wants. ``[null]`` will use NULL. Lines that begin with ``[comment]`` are ignored. """ from datetime import datetime, date, timedelta import os import csv import types __all__ = ['load_csv_from_directory', 'load_csv', 'create_data'] DEFAULT_TYPE = 'escaped' def create_data(data, class_getter, keyorder=None): """ Create the ``data``, which is the return value from ``load_csv()``. Classes will be resolved with the callable ``class_getter``; or if ``class_getter`` is a module then the class names will be attributes of that. Returns a dictionary of ``{object_name: object(s)}``, using the names from the ``[name]`` columns (if there are any). If a name is used multiple times, you get a list of objects, not a single object. If ``keyorder`` is given, then the keys will be retrieved in that order. It can be a list/tuple of names, or a sorting function. If not given and ``class_getter`` is a module and has a ``soClasses`` function, then that will be used for the order. """ objects = {} classnames = data.keys() if (not keyorder and isinstance(class_getter, types.ModuleType) and hasattr(class_getter, 'soClasses')): keyorder = [c.__name__ for c in class_getter.soClasses] if not keyorder: classnames.sort() elif isinstance(keyorder, (list, tuple)): all = classnames classnames = [name for name in keyorder if name in classnames] for name in all: if name not in classnames: classnames.append(name) else: classnames.sort(keyorder) for classname in classnames: items = data[classname] if not items: continue if isinstance(class_getter, types.ModuleType): soClass = getattr(class_getter, classname) else: soClass = class_getter(classname) for item in items: for key, value in item.items(): if isinstance(value, Reference): resolved = objects.get(value.name) if not resolved: raise ValueError( "Object reference to %r does not have target" % value.name) elif (isinstance(resolved, list) and len(resolved) > 1): raise ValueError( "Object reference to %r is ambiguous (got %r)" % (value.name, resolved)) item[key] = resolved.id if '[name]' in item: name = item.pop('[name]').strip() else: name = None inst = soClass(**item) if name: if name in objects: if isinstance(objects[name], list): objects[name].append(inst) else: objects[name] = [objects[name], inst] else: objects[name] = inst return objects def load_csv_from_directory(directory, allow_python=True, default_type=DEFAULT_TYPE, allow_multiple_classes=True): """ Load the data from all the files in a directory. Filenames indicate the class, with ``general.csv`` for data not associated with a class. Return data just like ``load_csv`` does. This might cause problems on case-insensitive filesystems. """ results = {} for filename in os.listdir(directory): base, ext = os.path.splitext(filename) if ext.lower() != '.csv': continue f = open(os.path.join(directory, filename), 'rb') csvreader = csv.reader(f) data = load_csv(csvreader, allow_python=allow_python, default_type=default_type, default_class=base, allow_multiple_classes=allow_multiple_classes) f.close() for classname, items in data.items(): results.setdefault(classname, []).extend(items) return results def load_csv(csvreader, allow_python=True, default_type=DEFAULT_TYPE, default_class=None, allow_multiple_classes=True): """ Loads the CSV file, returning a list of dictionaries with types coerced. """ current_class = default_class current_headers = None results = {} for row in csvreader: if not [cell for cell in row if cell.strip()]: # empty row continue if row and row[0].strip() == 'CLASS:': if not allow_multiple_classes: raise ValueError( "CLASS: line in CSV file, but multiple classes are not allowed in this file (line: %r)" % row) if not row[1:]: raise ValueError( "CLASS: in line in CSV file, with no class name in next column (line: %r)" % row) current_class = row[1] current_headers = None continue if not current_class: raise ValueError( "No CLASS: line given, and there is no default class for this file (line: %r" % row) if current_headers is None: current_headers = _parse_headers(row, default_type) continue if row[0] == '[comment]': continue # Pad row with empty strings: row += ['']*(len(current_headers) - len(row)) row_converted = {} for value, (name, coercer, args) in zip(row, current_headers): if name is None: # Comment continue if value == '[default]': continue if value == '[null]': row_converted[name] = None continue args = (value,) + args row_converted[name] = coercer(*args) results.setdefault(current_class, []).append(row_converted) return results def _parse_headers(header_row, default_type): headers = [] for name in header_row: original_name = name if ':' in name: name, type = name.split(':', 1) else: type = default_type if type == 'python' and not allow_python: raise ValueError( ":python header given when python headers are not allowed (with header %r" % original_name) name = name.strip() if name == '[comment]' or not name: headers.append((None, None, None)) continue type = type.strip().lower() if '(' in type: type, arg = type.split('(', 1) if not arg.endswith(')'): raise ValueError( "Arguments (in ()'s) do not end with ): %r" % original_name) args = (arg[:-1],) else: args = () if name == '[name]': type = 'str' coercer, args = get_coercer(type) headers.append((name, coercer, args)) return headers _coercers = {} def get_coercer(type): if type not in _coercers: raise ValueError( "Coercion type %r not known (I know: %s)" % (type, ', '.join(_coercers.keys()))) return _coercers[type] def register_coercer(type, coercer, *args): _coercers[type] = (coercer, args) def identity(v): return v register_coercer('str', identity) register_coercer('string', identity) def decode_string(v, encoding): return v.decode(encoding) register_coercer('escaped', decode_string, 'string_escape') register_coercer('strescaped', decode_string, 'string_escape') register_coercer('base64', decode_string, 'base64') register_coercer('int', int) register_coercer('float', float) def parse_python(v): return eval(v, {}, {}) register_coercer('python', parse_python) def parse_date(v): v = v.strip() if not v: return None if v.startswith('NOW-') or v.startswith('NOW+'): days = int(v[3:]) now = date.today() return now+timedelta(days) else: parsed = time.strptime(v, '%Y-%m-%d') return date.fromtimestamp(time.mktime(parsed)) register_coercer('date', parse_date) def parse_datetime(v): v = v.strip() if not v: return None if v.startswith('NOW-') or v.startswith('NOW+'): seconds = int(v[3:]) now = datetime.now() return now+timedelta(0, seconds) else: fmts = ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S', '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M'] for fmt in fmts[:-1]: try: parsed = time.strptime(v, fmt) break except ValueError: pass else: parsed = time.strptime(v, fmts[-1]) return datetime.fromtimestamp(time.mktime(parsed)) register_coercer('datetime', parse_datetime) class Reference(object): def __init__(self, name): self.name = name def parse_ref(v): if not v.strip(): return None else: return Reference(v) register_coercer('ref', parse_ref) def parse_bool(v): v = v.strip().lower() if v in ('y', 'yes', 't', 'true', 'on', '1'): return True elif v in ('n', 'no', 'f', 'false', 'off', '0'): return False raise ValueError( "Value is not boolean-like: %r" % value) register_coercer('bool', parse_bool) register_coercer('boolean', parse_bool) SQLObject-1.5.2/sqlobject/util/csvexport.py0000644000175000017500000001514610674477227020256 0ustar phdphd00000000000000""" Exports a SQLObject class (possibly annotated) to a CSV file. """ import os import csv try: from cStringIO import StringIO except ImportError: from StringIO import StringIO import sqlobject __all__ = ['export_csv', 'export_csv_zip'] def export_csv(soClass, select=None, writer=None, connection=None, orderBy=None): """ Export the SQLObject class ``soClass`` to a CSV file. ``soClass`` can also be a SelectResult object, as returned by ``.select()``. If it is a class, all objects will be retrieved, ordered by ``orderBy`` if given, or the ``.csvOrderBy`` attribute if present (but csvOrderBy will only be applied when no select result is given). You can also pass in select results (or simply a list of instances) in ``select`` -- if you have a list of objects (not a SelectResult instance, as produced by ``.select()``) then you must pass it in with ``select`` and pass the class in as the first argument. ``writer`` is a ``csv.writer()`` object, or a file-like object. If not given, the string of the file will be returned. Uses ``connection`` as the data source, if given, otherwise the default connection. Columns can be annotated with ``.csvTitle`` attributes, which will form the attributes of the columns, or 'title' (secondarily), or if nothing then the column attribute name. If a column has a ``.noCSV`` attribute which is true, then the column will be suppressed. Additionally a class can have an ``.extraCSVColumns`` attribute, which should be a list of strings/tuples. If a tuple, it should be like ``(attribute, title)``, otherwise it is the attribute, which will also be the title. These will be appended to the end of the CSV file; the attribute will be retrieved from instances. Also a ``.csvColumnOrder`` attribute can be on the class, which is the string names of attributes in the order they should be presented. """ return_fileobj = None if not writer: return_fileobj = StringIO() writer = csv.writer(return_fileobj) elif not hasattr(writer, 'writerow'): writer = csv.writer(writer) if isinstance(soClass, sqlobject.SQLObject.SelectResultsClass): assert select is None, ( "You cannot pass in a select argument (%r) and a SelectResult argument (%r) for soClass" % (select, soClass)) select = soClass soClass = select.sourceClass elif select is None: select = soClass.select() if getattr(soClass, 'csvOrderBy', None): select = select.orderBy(soClass.csvOrderBy) if orderBy: select = select.orderBy(orderBy) if connection: select = select.connection(connection) _actually_export_csv(soClass, select, writer) if return_fileobj: # They didn't pass any writer or file object in, so we return # the string result: return return_fileobj.getvalue() def _actually_export_csv(soClass, select, writer): attributes, titles = _find_columns(soClass) writer.writerow(titles) for soInstance in select: row = [getattr(soInstance, attr) for attr in attributes] writer.writerow(row) def _find_columns(soClass): order = [] attrs = {} for col in soClass.sqlmeta.columnList: if getattr(col, 'noCSV', False): continue order.append(col.name) title = col.name if hasattr(col, 'csvTitle'): title = col.csvTitle elif getattr(col, 'title', None) is not None: title = col.title attrs[col.name] = title for attrDesc in getattr(soClass, 'extraCSVColumns', []): if isinstance(attrDesc, (list, tuple)): attr, title = attrDesc else: attr = title = attrDesc order.append(attr) attrs[attr] = title if hasattr(soClass, 'csvColumnOrder'): oldOrder = order order = soClass.csvColumnOrder for attr in order: if attr not in oldOrder: raise KeyError( "Attribute %r in csvColumnOrder (on class %r) does not exist as a column or in .extraCSVColumns (I have: %r)" % (attr, soClass, oldOrder)) oldOrder.remove(attr) order.extend(oldOrder) titles = [attrs[attr] for attr in order] return order, titles def export_csv_zip(soClasses, file=None, zip=None, filename_prefix='', connection=None): """ Export several SQLObject classes into a .zip file. Each item in the ``soClasses`` list may be a SQLObject class, select result, or ``(soClass, select)`` tuple. Each file in the zip will be named after the class name (with ``.csv`` appended), or using the filename in the ``.csvFilename`` attribute. If ``file`` is given, the zip will be written to that. ``file`` may be a string (a filename) or a file-like object. If not given, a string will be returnd. If ``zip`` is given, then the files will be written to that zip file. All filenames will be prefixed with ``filename_prefix`` (which may be a directory name, for instance). """ import zipfile close_file_when_finished = False close_zip_when_finished = True return_when_finished = False if file: if isinstance(file, basestring): close_when_finished = True file = open(file, 'wb') elif zip: close_zip_when_finished = False else: return_when_finished = True file = StringIO() if not zip: zip = zipfile.ZipFile(file, mode='w') try: _actually_export_classes(soClasses, zip, filename_prefix, connection) finally: if close_zip_when_finished: zip.close() if close_file_when_finished: file.close() if return_when_finished: return file.getvalue() def _actually_export_classes(soClasses, zip, filename_prefix, connection): for classDesc in soClasses: if isinstance(classDesc, (tuple, list)): soClass, select = classDesc elif isinstance(classDesc, sqlobject.SQLObject.SelectResultsClass): select = classDesc soClass = select.sourceClass else: soClass = classDesc select = None filename = getattr(soClass, 'csvFilename', soClass.__name__) if not os.path.splitext(filename)[1]: filename += '.csv' filename = filename_prefix + filename zip.writestr(filename, export_csv(soClass, select, connection=connection)) SQLObject-1.5.2/sqlobject/util/moduleloader.py0000644000175000017500000000255211563772713020666 0ustar phdphd00000000000000import sys import imp def load_module(module_name): mod = __import__(module_name) components = module_name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod def load_module_from_name(filename, module_name): if module_name in sys.modules: return sys.modules[module_name] init_filename = os.path.join(os.path.dirname(filename), '__init__.py') if not os.path.exists(init_filename): try: f = open(init_filename, 'w') except (OSError, IOError), e: raise IOError( 'Cannot write __init__.py file into directory %s (%s)\n' % (os.path.dirname(filename), e)) f.write('#\n') f.close() fp = None if module_name in sys.modules: return sys.modules[module_name] if '.' in module_name: parent_name = '.'.join(module_name.split('.')[:-1]) base_name = module_name.split('.')[-1] parent = load_module_from_name(os.path.dirname(filename), parent_name) else: base_name = module_name fp = None try: fp, pathname, stuff = imp.find_module( base_name, [os.path.dirname(filename)]) module = imp.load_module(module_name, fp, pathname, stuff) finally: if fp is not None: fp.close() return module SQLObject-1.5.2/sqlobject/maxdb/0000755000175000017500000000000012322476205015741 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/maxdb/maxdbconnection.py0000644000175000017500000002524711563772713021511 0ustar phdphd00000000000000""" Contributed by Edigram SAS, Paris France Tel:01 44 77 94 00 Ahmed MOHAMED ALI 27 April 2004 This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. connection creation sample:: __connection__ = DBConnection.maxdbConnection( host=hostname, database=dbname, user=user_name, password=user_password, autoCommit=1, debug=1) """ from sqlobject.dbconnection import DBAPI from sqlobject import col class maxdbException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class LowerBoundOfSliceIsNotSupported(maxdbException): def __init__(self, value): maxdbException.__init__(self, '') class IncorrectIDStyleError(maxdbException) : def __init__(self,value): maxdbException.__init__( self, 'This primary key name is not in the expected style, ' 'please rename the column to %r or switch to another style' % value) class StyleMismatchError(maxdbException): def __init__(self, value): maxdbException.__init__( self, 'The name %r is only permitted for primary key, change the ' 'column name or switch to another style' % value) class PrimaryKeyNotFounded(maxdbException): def __init__(self, value): maxdbException.__init__( self, "No primary key was defined on table %r" % value) SAPDBMAX_ID_LENGTH=32 class MaxdbConnection(DBAPI): supportTransactions = True dbName = 'maxdb' schemes = [dbName] def __init__ (self, host='', port=None, user=None, password=None, database=None, autoCommit=1, sqlmode='internal', isolation=None, timeout=None, **kw): from sapdb import dbapi self.module = dbapi self.host = host self.port = port self.user = user self.password = password self.db = database self.autoCommit = autoCommit self.sqlmode = sqlmode self.isolation = isolation self.timeout = timeout DBAPI.__init__(self, **kw) @classmethod def _connectionFromParams(cls, auth, password, host, port, path, args): path = path.replace('/', os.path.sep) return cls(host, port, user=auth, password=password, database=path, **args) def _getConfigParams(self,sqlmode,auto): autocommit='off' if auto: autocommit='on' opt = {} opt["autocommit"] = autocommit opt["sqlmode"] = sqlmode if self.isolation: opt["isolation"]=self.isolation if self.timeout : opt["timeout"]=self.timeout return opt def _setAutoCommit(self, conn, auto): conn.close() conn.__init__(self.user, self.password, self.db, self.host, **self._getConfigParams(self.sqlmode, auto)) def createSequenceName(self,table): """ sequence name are builded with the concatenation of the table name with '_SEQ' word we truncate the name of the sequence_name because sapdb identifier cannot exceed 32 characters so that the name of the sequence does not exceed 32 characters """ return '%s_SEQ'%(table[:SAPDBMAX_ID_LENGTH -4]) def makeConnection(self): conn = self.module.Connection( self.user, self.password, self.db, self.host, **self._getConfigParams(self.sqlmode, self.autoCommit)) return conn def _queryInsertID(self, conn, soInstance, id, names, values): table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName c = conn.cursor() if id is None: c.execute('SELECT %s.NEXTVAL FROM DUAL' % (self.createSequenceName(table))) id = c.fetchone()[0] names = [idName] + names values = [id] + values q = self._insertSQL(table, names, values) if self.debug: self.printDebug(conn, q, 'QueryIns') c.execute(q) if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id @classmethod def sqlAddLimit(cls,query,limit): sql = query sql = sql.replace("SELECT","SELECT ROWNO, ") if sql.find('WHERE') != -1: sql = sql + ' AND ' + limit else: sql = sql + 'WHERE ' + limit return sql @classmethod def _queryAddLimitOffset(cls, query, start, end): if start: raise LowerBoundOfSliceIsNotSupported limit = ' ROWNO <= %d ' % (end) return cls.sqlAddLimit(query,limit) def createTable(self, soClass): #we create the table in a transaction because the addition of the #table and the sequence must be atomic #i tried to use the transaction class but i get a recursion limit error #t=self.transaction() # t.query('CREATE TABLE %s (\n%s\n)' % \ # (soClass.sqlmeta.table, self.createColumns(soClass))) # # t.query("CREATE SEQUENCE %s" % self.createSequenceName(soClass.sqlmeta.table)) # t.commit() #so use transaction when the problem will be solved self.query('CREATE TABLE %s (\n%s\n)' % \ (soClass.sqlmeta.table, self.createColumns(soClass))) self.query("CREATE SEQUENCE %s" % self.createSequenceName(soClass.sqlmeta.table)) return [] def createReferenceConstraint(self, soClass, col): return col.maxdbCreateReferenceConstraint() def createColumn(self, soClass, col): return col.maxdbCreateSQL() def createIDColumn(self, soClass): key_type = {int: "INT", str: "TEXT"}[soClass.sqlmeta.idType] return '%s %s PRIMARY KEY' % (soClass.sqlmeta.idName, key_type) def createIndexSQL(self, soClass, index): return index.maxdbCreateIndexSQL(soClass) def dropTable(self, tableName,cascade=False): #we drop the table in a transaction because the removal of the #table and the sequence must be atomic #i tried to use the transaction class but i get a recursion limit error # try: # t=self.transaction() # t.query("DROP TABLE %s" % tableName) # t.query("DROP SEQUENCE %s" % self.createSequenceName(tableName)) # t.commit() # except: # t.rollback() #so use transaction when the problem will be solved self.query("DROP TABLE %s" % tableName) self.query("DROP SEQUENCE %s" % self.createSequenceName(tableName)) def joinSQLType(self, join): return 'INT NOT NULL' def tableExists(self, tableName): for (table,) in self.queryAll("SELECT OBJECT_NAME FROM ALL_OBJECTS WHERE OBJECT_TYPE='TABLE'"): if table.lower() == tableName.lower(): return True return False def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD %s' % (tableName, column.maxdbCreateSQL())) def delColumn(self, sqlmeta, column): self.query('ALTER TABLE %s DROP COLUMN %s' % (sqlmeta.table, column.dbName)) GET_COLUMNS = """ SELECT COLUMN_NAME, NULLABLE, DATA_DEFAULT, DATA_TYPE, DATA_LENGTH, DATA_SCALE FROM USER_TAB_COLUMNS WHERE TABLE_NAME=UPPER('%s')""" GET_PK_AND_FK = """ SELECT constraint_cols.column_name, constraints.constraint_type, refname,reftablename FROM user_cons_columns constraint_cols INNER JOIN user_constraints constraints ON constraint_cols.constraint_name = constraints.constraint_name LEFT OUTER JOIN show_foreign_key fk ON constraint_cols.column_name = fk.columnname WHERE constraints.table_name =UPPER('%s')""" def columnsFromSchema(self, tableName, soClass): colData = self.queryAll(self.GET_COLUMNS % tableName) results = [] keymap = {} pkmap={} fkData = self.queryAll(self.GET_PK_AND_FK% tableName) for col, cons_type, refcol, reftable in fkData: col_name= col.lower() pkmap[col_name]=False if cons_type == 'R': keymap[col_name]=reftable.lower() elif cons_type == 'P': pkmap[col_name]=True if len(pkmap) == 0: raise PrimaryKeyNotFounded, tableName for (field, nullAllowed, default, data_type, data_len, data_scale) in colData: # id is defined as primary key --> ok # We let sqlobject raise error if the 'id' is used for another column field_name = field.lower() if (field_name == soClass.sqlmeta.idName) and pkmap[field_name]: continue colClass, kw = self.guessClass(data_type,data_len,data_scale) kw['name'] = field_name kw['dbName'] = field if nullAllowed == 'Y' : nullAllowed=False else: nullAllowed=True kw['notNone'] = nullAllowed if default is not None: kw['default'] = default if field_name in keymap: kw['foreignKey'] = keymap[field_name] results.append(colClass(**kw)) return results _numericTypes=['INTEGER', 'INT','SMALLINT'] _dateTypes=['DATE','TIME','TIMESTAMP'] def guessClass(self, t, flength, fscale=None): """ An internal method that tries to figure out what Col subclass is appropriate given whatever introspective information is available -- both very database-specific. """ if t in self._numericTypes: return col.IntCol, {} # The type returned by the sapdb library for LONG is # SapDB_LongReader To get the data call the read member with # desired size (default =-1 means get all) elif t.find('LONG') != -1: return col.StringCol, {'length': flength, 'varchar': False} elif t in self._dateTypes: return col.DateTimeCol, {} elif t == 'FIXED': return CurrencyCol,{'size':flength, 'precision':fscale} else: return col.Col, {} SQLObject-1.5.2/sqlobject/maxdb/__init__.py0000644000175000017500000000027311133142363020046 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import maxdbconnection return maxdbconnection.MaxdbConnection registerConnection(['maxdb','sapdb'],builder) SQLObject-1.5.2/sqlobject/maxdb/readme.txt0000644000175000017500000000125610372665115017746 0ustar phdphd00000000000000This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. Author: Edigram SA - Paris France Tel:0144779400 SAPDBAPI installation --------------------- The sapdb module can be downloaded from: Win32 ------- ftp://ftp.sap.com/pub/sapdb/bin/win/sapdb-python-win32-7.4.03.31a.zip Linux ------ ftp://ftp.sap.com/pub/sapdb/bin/linux/sapdb-python-linux-i386-7.4.03.31a.tgz uncompress the archive and add the sapdb directory path to your PYTHONPATH. SQLObject-1.5.2/sqlobject/index.py0000644000175000017500000001446711563772713016354 0ustar phdphd00000000000000from itertools import count from types import * from converters import sqlrepr creationOrder = count() class SODatabaseIndex(object): def __init__(self, soClass, name, columns, creationOrder, unique=False): self.soClass = soClass self.name = name self.descriptions = self.convertColumns(columns) self.creationOrder = creationOrder self.unique = unique def get(self, *args, **kw): if not self.unique: raise AttributeError, ( "'%s' object has no attribute 'get' (index is not unique)" % self.name) connection = kw.pop('connection', None) if args and kw: raise TypeError, "You cannot mix named and unnamed arguments" columns = [d['column'] for d in self.descriptions if 'column' in d] if kw and len(kw) != len(columns) or args and len(args) != len(columns): raise TypeError, ("get() takes exactly %d argument and an optional " "named argument 'connection' (%d given)" % ( len(columns), len(args)+len(kw))) if args: kw = {} for i in range(len(args)): if columns[i].foreignName is not None: kw[columns[i].foreignName] = args[i] else: kw[columns[i].name] = args[i] return self.soClass.selectBy(connection=connection, **kw).getOne() def convertColumns(self, columns): """ Converts all the columns to dictionary descriptors; dereferences string column names. """ new = [] for desc in columns: if not isinstance(desc, dict): desc = {'column': desc} if 'expression' in desc: assert 'column' not in desc, ( 'You cannot provide both an expression and a column ' '(for %s in index %s in %s)' % (desc, self.name, self.soClass)) assert 'length' not in desc, ( 'length does not apply to expressions (for %s in ' 'index %s in %s)' % (desc, self.name, self.soClass)) new.append(desc) continue columnName = desc['column'] if not isinstance(columnName, str): columnName = columnName.name colDict = self.soClass.sqlmeta.columns if columnName not in colDict: for possible in colDict.values(): if possible.origName == columnName: column = possible break else: # None found raise ValueError, "The column by the name %r was not found in the class %r" % (columnName, self.soClass) else: column = colDict[columnName] desc['column'] = column new.append(desc) return new def getExpression(self, desc, db): if isinstance(desc['expression'], str): return desc['expression'] else: return sqlrepr(desc['expression'], db) def sqliteCreateIndexSQL(self, soClass): if self.unique: uniqueOrIndex = 'UNIQUE INDEX' else: uniqueOrIndex = 'INDEX' spec = [] for desc in self.descriptions: if 'expression' in desc: spec.append(self.getExpression(desc, 'sqlite')) else: spec.append(desc['column'].dbName) ret = 'CREATE %s %s_%s ON %s (%s)' % \ (uniqueOrIndex, self.soClass.sqlmeta.table, self.name, self.soClass.sqlmeta.table, ', '.join(spec)) return ret postgresCreateIndexSQL = maxdbCreateIndexSQL = mssqlCreateIndexSQL = sybaseCreateIndexSQL = firebirdCreateIndexSQL = sqliteCreateIndexSQL def mysqlCreateIndexSQL(self, soClass): if self.unique: uniqueOrIndex = 'UNIQUE' else: uniqueOrIndex = 'INDEX' spec = [] for desc in self.descriptions: if 'expression' in desc: spec.append(self.getExpression(desc, 'mysql')) elif 'length' in desc: spec.append('%s(%d)' % (desc['column'].dbName, desc['length'])) else: spec.append(desc['column'].dbName) return 'ALTER TABLE %s ADD %s %s (%s)' % \ (soClass.sqlmeta.table, uniqueOrIndex, self.name, ', '.join(spec)) class DatabaseIndex(object): """ This takes a variable number of parameters, each of which is a column for indexing. Each column may be a column object or the string name of the column (*not* the database name). You may also use dictionaries, to further customize the indexing of the column. The dictionary may have certain keys: 'column': The column object or string identifier. 'length': MySQL will only index the first N characters if this is given. For other databases this is ignored. 'expression': You can create an index based on an expression, e.g., 'lower(column)'. This can either be a string or a sqlbuilder expression. Further keys may be added to the column specs in the future. The class also take the keyword argument `unique`; if true then a UNIQUE index is created. """ baseClass = SODatabaseIndex def __init__(self, *columns, **kw): kw['columns'] = columns self.kw = kw self.creationOrder = creationOrder.next() def setName(self, value): assert self.kw.get('name') is None, "You cannot change a name after it has already been set (from %s to %s)" % (self.kw['name'], value) self.kw['name'] = value def _get_name(self): return self.kw['name'] def _set_name(self, value): self.setName(value) name = property(_get_name, _set_name) def withClass(self, soClass): return self.baseClass(soClass=soClass, creationOrder=self.creationOrder, **self.kw) def __repr__(self): return '<%s %s %s>' % ( self.__class__.__name__, hex(abs(id(self)))[2:], self.kw) __all__ = ['DatabaseIndex'] SQLObject-1.5.2/sqlobject/classregistry.py0000644000175000017500000001144711563772713020136 0ustar phdphd00000000000000""" classresolver.py 2 February 2004, Ian Bicking Resolves strings to classes, and runs callbacks when referenced classes are created. Classes are referred to only by name, not by module. So that identically-named classes can coexist, classes are put into individual registries, which are keyed on strings (names). These registries are created on demand. Use like:: >>> import classregistry >>> registry = classregistry.registry('MyModules') >>> def afterMyClassExists(cls): ... print 'Class finally exists:', cls >>> registry.addClassCallback('MyClass', afterMyClassExists) >>> class MyClass: ... pass >>> registry.addClass(MyClass) Class finally exists: MyClass """ class ClassRegistry(object): """ We'll be dealing with classes that reference each other, so class C1 may reference C2 (in a join), while C2 references C1 right back. Since classes are created in an order, there will be a point when C1 exists but C2 doesn't. So we deal with classes by name, and after each class is created we try to fix up any references by replacing the names with actual classes. Here we keep a dictionaries of class names to classes -- note that the classes might be spread among different modules, so since we pile them together names need to be globally unique, to just module unique. Like needSet below, the container dictionary is keyed by the class registry. """ def __init__(self, name): self.name = name self.classes = {} self.callbacks = {} self.genericCallbacks = [] def addClassCallback(self, className, callback, *args, **kw): """ Whenever a name is substituted for the class, you can register a callback that will be called when the needed class is created. If it's already been created, the callback will be called immediately. """ if className in self.classes: callback(self.classes[className], *args, **kw) else: self.callbacks.setdefault(className, []).append((callback, args, kw)) def addCallback(self, callback, *args, **kw): """ This callback is called for all classes, not just specific ones (like addClassCallback). """ self.genericCallbacks.append((callback, args, kw)) for cls in self.classes.values(): callback(cls, *args, **kw) def addClass(self, cls): """ Everytime a class is created, we add it to the registry, so that other classes can find it by name. We also call any callbacks that are waiting for the class. """ if cls.__name__ in self.classes: import sys other = self.classes[cls.__name__] raise ValueError( "class %s is already in the registry (other class is " "%r, from the module %s in %s; attempted new class is " "%r, from the module %s in %s)" % (cls.__name__, other, other.__module__, getattr(sys.modules.get(other.__module__), '__file__', '(unknown)'), cls, cls.__module__, getattr(sys.modules.get(cls.__module__), '__file__', '(unknown)'))) self.classes[cls.__name__] = cls if cls.__name__ in self.callbacks: for callback, args, kw in self.callbacks[cls.__name__]: callback(cls, *args, **kw) del self.callbacks[cls.__name__] for callback, args, kw in self.genericCallbacks: callback(cls, *args, **kw) def getClass(self, className): try: return self.classes[className] except KeyError: all = self.classes.keys() all.sort() raise KeyError( "No class %s found in the registry %s (these classes " "exist: %s)" % (className, self.name or '[default]', ', '.join(all))) def allClasses(self): return self.classes.values() class _MasterRegistry(object): """ This singleton holds all the class registries. There can be multiple registries to hold different unrelated sets of classes that reside in the same process. These registries are named with strings, and are created on demand. The MasterRegistry module global holds the singleton. """ def __init__(self): self.registries = {} def registry(self, item): if item not in self.registries: self.registries[item] = ClassRegistry(item) return self.registries[item] MasterRegistry = _MasterRegistry() registry = MasterRegistry.registry def findClass(name, class_registry=None): return registry(class_registry).getClass(name) SQLObject-1.5.2/sqlobject/joins.py0000644000175000017500000004266511645061726016364 0ustar phdphd00000000000000from itertools import count import classregistry import events import styles import sqlbuilder from styles import capword __all__ = ['MultipleJoin', 'SQLMultipleJoin', 'RelatedJoin', 'SQLRelatedJoin', 'SingleJoin', 'ManyToMany', 'OneToMany'] creationOrder = count() NoDefault = sqlbuilder.NoDefault def getID(obj): try: return obj.id except AttributeError: return int(obj) class Join(object): def __init__(self, otherClass=None, **kw): kw['otherClass'] = otherClass self.kw = kw self._joinMethodName = self.kw.pop('joinMethodName', None) self.creationOrder = creationOrder.next() def _set_joinMethodName(self, value): assert self._joinMethodName == value or self._joinMethodName is None, "You have already given an explicit joinMethodName (%s), and you are now setting it to %s" % (self._joinMethodName, value) self._joinMethodName = value def _get_joinMethodName(self): return self._joinMethodName joinMethodName = property(_get_joinMethodName, _set_joinMethodName) name = joinMethodName def withClass(self, soClass): if 'joinMethodName' in self.kw: self._joinMethodName = self.kw['joinMethodName'] del self.kw['joinMethodName'] return self.baseClass(creationOrder=self.creationOrder, soClass=soClass, joinDef=self, joinMethodName=self._joinMethodName, **self.kw) # A join is separate from a foreign key, i.e., it is # many-to-many, or one-to-many where the *other* class # has the foreign key. class SOJoin(object): def __init__(self, creationOrder, soClass=None, otherClass=None, joinColumn=None, joinMethodName=None, orderBy=NoDefault, joinDef=None): self.creationOrder = creationOrder self.soClass = soClass self.joinDef = joinDef self.otherClassName = otherClass classregistry.registry(soClass.sqlmeta.registry).addClassCallback( otherClass, self._setOtherClass) self.joinColumn = joinColumn self.joinMethodName = joinMethodName self._orderBy = orderBy if not self.joinColumn: # Here we set up the basic join, which is # one-to-many, where the other class points to # us. self.joinColumn = styles.getStyle( self.soClass).tableReference(self.soClass.sqlmeta.table) def orderBy(self): if self._orderBy is NoDefault: self._orderBy = self.otherClass.sqlmeta.defaultOrder return self._orderBy orderBy = property(orderBy) def _setOtherClass(self, cls): self.otherClass = cls def hasIntermediateTable(self): return False def _applyOrderBy(self, results, defaultSortClass): if self.orderBy is not None: results.sort(sorter(self.orderBy)) return results def sorter(orderBy): if isinstance(orderBy, (tuple, list)): if len(orderBy) == 1: orderBy = orderBy[0] else: fhead = sorter(orderBy[0]) frest = sorter(orderBy[1:]) return lambda a, b, fhead=fhead, frest=frest: fhead(a, b) or frest(a, b) if isinstance(orderBy, sqlbuilder.DESC) \ and isinstance(orderBy.expr, sqlbuilder.SQLObjectField): orderBy = '-' + orderBy.expr.original elif isinstance(orderBy, sqlbuilder.SQLObjectField): orderBy = orderBy.original # @@: but we don't handle more complex expressions for orderings if orderBy.startswith('-'): orderBy = orderBy[1:] reverse = True else: reverse = False def cmper(a, b, attr=orderBy, rev=reverse): a = getattr(a, attr) b = getattr(b, attr) if rev: a, b = b, a if a is None: if b is None: return 0 return -1 if b is None: return 1 return cmp(a, b) return cmper # This is a one-to-many class SOMultipleJoin(SOJoin): def __init__(self, addRemoveName=None, **kw): # addRemovePrefix is something like @@ SOJoin.__init__(self, **kw) # Here we generate the method names if not self.joinMethodName: name = self.otherClassName[0].lower() + self.otherClassName[1:] if name.endswith('s'): name = name + "es" else: name = name + "s" self.joinMethodName = name if addRemoveName: self.addRemoveName = addRemoveName else: self.addRemoveName = capword(self.otherClassName) def performJoin(self, inst): ids = inst._connection._SO_selectJoin( self.otherClass, self.joinColumn, inst.id) if inst.sqlmeta._perConnection: conn = inst._connection else: conn = None return self._applyOrderBy([self.otherClass.get(id, conn) for (id,) in ids if id is not None], self.otherClass) def _dbNameToPythonName(self): for column in self.otherClass.sqlmeta.columns.values(): if column.dbName == self.joinColumn: return column.name return self.soClass.sqlmeta.style.dbColumnToPythonAttr(self.joinColumn) class MultipleJoin(Join): baseClass = SOMultipleJoin class SOSQLMultipleJoin(SOMultipleJoin): def performJoin(self, inst): if inst.sqlmeta._perConnection: conn = inst._connection else: conn = None pythonColumn = self._dbNameToPythonName() results = self.otherClass.select(getattr(self.otherClass.q, pythonColumn) == inst.id, connection=conn) return results.orderBy(self.orderBy) class SQLMultipleJoin(Join): baseClass = SOSQLMultipleJoin # This is a many-to-many join, with an intermediary table class SORelatedJoin(SOMultipleJoin): def __init__(self, otherColumn=None, intermediateTable=None, createRelatedTable=True, **kw): self.intermediateTable = intermediateTable self.otherColumn = otherColumn self.createRelatedTable = createRelatedTable SOMultipleJoin.__init__(self, **kw) classregistry.registry( self.soClass.sqlmeta.registry).addClassCallback( self.otherClassName, self._setOtherRelatedClass) def _setOtherRelatedClass(self, otherClass): if not self.intermediateTable: names = [self.soClass.sqlmeta.table, otherClass.sqlmeta.table] names.sort() self.intermediateTable = '%s_%s' % (names[0], names[1]) if not self.otherColumn: self.otherColumn = self.soClass.sqlmeta.style.tableReference( otherClass.sqlmeta.table) def hasIntermediateTable(self): return True def performJoin(self, inst): ids = inst._connection._SO_intermediateJoin( self.intermediateTable, self.otherColumn, self.joinColumn, inst.id) if inst.sqlmeta._perConnection: conn = inst._connection else: conn = None return self._applyOrderBy([self.otherClass.get(id, conn) for (id,) in ids if id is not None], self.otherClass) def remove(self, inst, other): inst._connection._SO_intermediateDelete( self.intermediateTable, self.joinColumn, getID(inst), self.otherColumn, getID(other)) def add(self, inst, other): inst._connection._SO_intermediateInsert( self.intermediateTable, self.joinColumn, getID(inst), self.otherColumn, getID(other)) class RelatedJoin(MultipleJoin): baseClass = SORelatedJoin # helper classes to SQLRelatedJoin class OtherTableToJoin(sqlbuilder.SQLExpression): def __init__(self, otherTable, otherIdName, interTable, joinColumn): self.otherTable = otherTable self.otherIdName = otherIdName self.interTable = interTable self.joinColumn = joinColumn def tablesUsedImmediate(self): return [self.otherTable, self.interTable] def __sqlrepr__(self, db): return '%s.%s = %s.%s' % (self.otherTable, self.otherIdName, self.interTable, self.joinColumn) class JoinToTable(sqlbuilder.SQLExpression): def __init__(self, table, idName, interTable, joinColumn): self.table = table self.idName = idName self.interTable = interTable self.joinColumn = joinColumn def tablesUsedImmediate(self): return [self.table, self.interTable] def __sqlrepr__(self, db): return '%s.%s = %s.%s' % (self.interTable, self.joinColumn, self.table, self.idName) class TableToId(sqlbuilder.SQLExpression): def __init__(self, table, idName, idValue): self.table = table self.idName = idName self.idValue = idValue def tablesUsedImmediate(self): return [self.table] def __sqlrepr__(self, db): return '%s.%s = %s' % (self.table, self.idName, self.idValue) class SOSQLRelatedJoin(SORelatedJoin): def performJoin(self, inst): if inst.sqlmeta._perConnection: conn = inst._connection else: conn = None results = self.otherClass.select(sqlbuilder.AND( OtherTableToJoin( self.otherClass.sqlmeta.table, self.otherClass.sqlmeta.idName, self.intermediateTable, self.otherColumn ), JoinToTable( self.soClass.sqlmeta.table, self.soClass.sqlmeta.idName, self.intermediateTable, self.joinColumn ), TableToId(self.soClass.sqlmeta.table, self.soClass.sqlmeta.idName, inst.id), ), clauseTables=(self.soClass.sqlmeta.table, self.otherClass.sqlmeta.table, self.intermediateTable), connection=conn) return results.orderBy(self.orderBy) class SQLRelatedJoin(RelatedJoin): baseClass = SOSQLRelatedJoin class SOSingleJoin(SOMultipleJoin): def __init__(self, **kw): self.makeDefault = kw.pop('makeDefault', False) SOMultipleJoin.__init__(self, **kw) def performJoin(self, inst): if inst.sqlmeta._perConnection: conn = inst._connection else: conn = None pythonColumn = self._dbNameToPythonName() results = self.otherClass.select( getattr(self.otherClass.q, pythonColumn) == inst.id, connection=conn ) if results.count() == 0: if not self.makeDefault: return None else: kw = {self.soClass.sqlmeta.style.instanceIDAttrToAttr(pythonColumn): inst} return self.otherClass(**kw) # instanciating the otherClass with all else: return results[0] class SingleJoin(Join): baseClass = SOSingleJoin import boundattributes class SOManyToMany(object): def __init__(self, soClass, name, join, intermediateTable, joinColumn, otherColumn, createJoinTable, **attrs): self.name = name self.intermediateTable = intermediateTable self.joinColumn = joinColumn self.otherColumn = otherColumn self.createJoinTable = createJoinTable self.soClass = self.otherClass = None for name, value in attrs.items(): setattr(self, name, value) classregistry.registry( soClass.sqlmeta.registry).addClassCallback( join, self._setOtherClass) classregistry.registry( soClass.sqlmeta.registry).addClassCallback( soClass.__name__, self._setThisClass) def _setThisClass(self, soClass): self.soClass = soClass if self.soClass and self.otherClass: self._finishSet() def _setOtherClass(self, otherClass): self.otherClass = otherClass if self.soClass and self.otherClass: self._finishSet() def _finishSet(self): if self.intermediateTable is None: names = [self.soClass.sqlmeta.table, self.otherClass.sqlmeta.table] names.sort() self.intermediateTable = '%s_%s' % (names[0], names[1]) if not self.otherColumn: self.otherColumn = self.soClass.sqlmeta.style.tableReference( self.otherClass.sqlmeta.table) if not self.joinColumn: self.joinColumn = styles.getStyle( self.soClass).tableReference(self.soClass.sqlmeta.table) events.listen(self.event_CreateTableSignal, self.soClass, events.CreateTableSignal) events.listen(self.event_CreateTableSignal, self.otherClass, events.CreateTableSignal) self.clause = ( (self.otherClass.q.id == sqlbuilder.Field(self.intermediateTable, self.otherColumn)) & (sqlbuilder.Field(self.intermediateTable, self.joinColumn) == self.soClass.q.id)) def __get__(self, obj, type): if obj is None: return self query = ( (self.otherClass.q.id == sqlbuilder.Field(self.intermediateTable, self.otherColumn)) & (sqlbuilder.Field(self.intermediateTable, self.joinColumn) == obj.id)) select = self.otherClass.select(query) return _ManyToManySelectWrapper(obj, self, select) def event_CreateTableSignal(self, soClass, connection, extra_sql, post_funcs): if self.createJoinTable: post_funcs.append(self.event_CreateTableSignalPost) def event_CreateTableSignalPost(self, soClass, connection): if connection.tableExists(self.intermediateTable): return connection._SO_createJoinTable(self) class ManyToMany(boundattributes.BoundFactory): factory_class = SOManyToMany __restrict_attributes__ = ( 'join', 'intermediateTable', 'joinColumn', 'otherColumn', 'createJoinTable') __unpackargs__ = ('join',) # Default values: intermediateTable = None joinColumn = None otherColumn = None createJoinTable = True class _ManyToManySelectWrapper(object): def __init__(self, forObject, join, select): self.forObject = forObject self.join = join self.select = select def __getattr__(self, attr): # @@: This passes through private variable access too... should it? # Also magic methods, like __str__ return getattr(self.select, attr) def __repr__(self): return '<%s for: %s>' % (self.__class__.__name__, repr(self.select)) def __str__(self): return str(self.select) def __iter__(self): return iter(self.select) def __getitem__(self, key): return self.select[key] def add(self, obj): obj._connection._SO_intermediateInsert( self.join.intermediateTable, self.join.joinColumn, getID(self.forObject), self.join.otherColumn, getID(obj)) def remove(self, obj): obj._connection._SO_intermediateDelete( self.join.intermediateTable, self.join.joinColumn, getID(self.forObject), self.join.otherColumn, getID(obj)) def create(self, **kw): obj = self.join.otherClass(**kw) self.add(obj) return obj class SOOneToMany(object): def __init__(self, soClass, name, join, joinColumn, **attrs): self.soClass = soClass self.name = name self.joinColumn = joinColumn for name, value in attrs.items(): setattr(self, name, value) classregistry.registry( soClass.sqlmeta.registry).addClassCallback( join, self._setOtherClass) def _setOtherClass(self, otherClass): self.otherClass = otherClass if not self.joinColumn: self.joinColumn = styles.getStyle( self.soClass).tableReference(self.soClass.sqlmeta.table) self.clause = ( sqlbuilder.Field(self.otherClass.sqlmeta.table, self.joinColumn) == self.soClass.q.id) def __get__(self, obj, type): if obj is None: return self query = ( sqlbuilder.Field(self.otherClass.sqlmeta.table, self.joinColumn) == obj.id) select = self.otherClass.select(query) return _OneToManySelectWrapper(obj, self, select) class OneToMany(boundattributes.BoundFactory): factory_class = SOOneToMany __restrict_attributes__ = ( 'join', 'joinColumn') __unpackargs__ = ('join',) # Default values: joinColumn = None class _OneToManySelectWrapper(object): def __init__(self, forObject, join, select): self.forObject = forObject self.join = join self.select = select def __getattr__(self, attr): # @@: This passes through private variable access too... should it? # Also magic methods, like __str__ return getattr(self.select, attr) def __repr__(self): return '<%s for: %s>' % (self.__class__.__name__, repr(self.select)) def __str__(self): return str(self.select) def __iter__(self): return iter(self.select) def __getitem__(self, key): return self.select[key] def create(self, **kw): kw[self.join.joinColumn] = self.forObject.id return self.join.otherClass(**kw) SQLObject-1.5.2/sqlobject/conftest.py0000644000175000017500000000450311604624061017044 0ustar phdphd00000000000000""" This module is used by py.test to configure testing for this application. """ # Override some options (doesn't override command line): verbose = 0 exitfirst = True import py import os import sqlobject try: import pkg_resources except ImportError: # Python 2.2 pass else: pkg_resources.require('SQLObject') connectionShortcuts = { 'mysql': 'mysql://test@localhost/test', 'dbm': 'dbm:///data', 'postgres': 'postgres:///test', 'postgresql': 'postgres:///test', 'rdbhost': 'rdhbost://role:authcode@www.rdbhost.com/', 'pygresql': 'pygresql://localhost/test', 'sqlite': 'sqlite:/:memory:', 'sybase': 'sybase://test:test123@sybase/test?autoCommit=0', 'firebird': 'firebird://sysdba:masterkey@localhost/var/lib/firebird/data/test.gdb', 'mssql': 'mssql://sa:@127.0.0.1/test' } def pytest_addoption(parser): """Add the SQLObject options""" parser.addoption('-D', '--Database', action="store", dest="Database", default='sqlite', help="The database to run the tests under (default sqlite). " "Can also use an alias from: %s" % (', '.join(connectionShortcuts.keys()))) parser.addoption('-S', '--SQL', action="store_true", dest="show_sql", default=False, help="Show SQL from statements (when capturing stdout the " "SQL is only displayed when a test fails)") parser.addoption('-O', '--SQL-output', action="store_true", dest="show_sql_output", default=False, help="Show output from SQL statements (when capturing " "stdout the output is only displayed when a test fails)") parser.addoption('-E', '--events', action="store_true", dest="debug_events", default=False, help="Debug events (print information about events as they are " "sent)") option = None def pytest_configure(config): """Make cmdline arguments available to dbtest""" global option option = config.option class SQLObjectClass(py.test.collect.Class): def run(self): if (isinstance(self.obj, type) and issubclass(self.obj, sqlobject.SQLObject)): return [] return super(SQLObjectClass, self).run() Class = SQLObjectClass def setup_tests(): if option.debug_events: from sqlobject import events events.debug_events() SQLObject-1.5.2/sqlobject/firebird/0000755000175000017500000000000012322476205016434 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/firebird/__init__.py0000644000175000017500000000031511133141724020536 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import firebirdconnection return firebirdconnection.FirebirdConnection registerConnection(['firebird', 'interbase'], builder) SQLObject-1.5.2/sqlobject/firebird/firebirdconnection.py0000644000175000017500000004146011724210560022654 0ustar phdphd00000000000000import os import re import warnings from sqlobject import col from sqlobject.dbconnection import DBAPI kinterbasdb = None class FirebirdConnection(DBAPI): supportTransactions = False dbName = 'firebird' schemes = [dbName] limit_re = re.compile('^\s*(select )(.*)', re.IGNORECASE) def __init__(self, host, db, port='3050', user='sysdba', password='masterkey', autoCommit=1, dialect=None, role=None, charset=None, **kw): global kinterbasdb if kinterbasdb is None: import kinterbasdb # See http://kinterbasdb.sourceforge.net/dist_docs/usage.html # for an explanation; in short: use datetime, decimal and unicode. kinterbasdb.init(type_conv=200) self.module = kinterbasdb self.host = host self.port = port self.db = db self.user = user self.password = password if dialect: self.dialect = int(dialect) else: self.dialect = None self.role = role if charset: self.dbEncoding = charset.replace('-', '') # encoding defined by user in the connection string else: self.dbEncoding = charset self.defaultDbEncoding = '' # encoding defined during database creation and stored in the database DBAPI.__init__(self, **kw) @classmethod def _connectionFromParams(cls, auth, password, host, port, path, args): if not password: password = 'masterkey' if not auth: auth='sysdba' # check for alias using if (path[0] == '/') and path[-3:].lower() not in ('fdb', 'gdb'): path = path[1:] path = path.replace('/', os.sep) return cls(host, port=port, db=path, user=auth, password=password, **args) def _runWithConnection(self, meth, *args): if not self.autoCommit: return DBAPI._runWithConnection(self, meth, args) conn = self.getConnection() # @@: Horrible auto-commit implementation. Just horrible! try: conn.begin() except self.module.ProgrammingError: pass try: val = meth(conn, *args) try: conn.commit() except self.module.ProgrammingError: pass finally: self.releaseConnection(conn) return val def _setAutoCommit(self, conn, auto): # Only _runWithConnection does "autocommit", so we don't # need to worry about that. pass def makeConnection(self): extra = {} if self.dialect: extra['dialect'] = self.dialect return self.module.connect( host=self.host, port=self.port, database=self.db, user=self.user, password=self.password, role=self.role, charset=self.dbEncoding, **extra ) def _queryInsertID(self, conn, soInstance, id, names, values): """Firebird uses 'generators' to create new ids for a table. The users needs to create a generator named GEN_ for each table this method to work.""" table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName sequenceName = soInstance.sqlmeta.idSequence or \ 'GEN_%s' % table c = conn.cursor() if id is None: c.execute('SELECT gen_id(%s,1) FROM rdb$database' % sequenceName) id = c.fetchone()[0] names = [idName] + names values = [id] + values q = self._insertSQL(table, names, values) if self.debug: self.printDebug(conn, q, 'QueryIns') c.execute(q) if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id @classmethod def _queryAddLimitOffset(cls, query, start, end): """Firebird slaps the limit and offset (actually 'first' and 'skip', respectively) statement right after the select.""" if not start: limit_str = "SELECT FIRST %i" % end if not end: limit_str = "SELECT SKIP %i" % start else: limit_str = "SELECT FIRST %i SKIP %i" % (end-start, start) match = cls.limit_re.match(query) if match and len(match.groups()) == 2: return ' '.join([limit_str, match.group(2)]) else: return query def createTable(self, soClass): self.query('CREATE TABLE %s (\n%s\n)' % \ (soClass.sqlmeta.table, self.createColumns(soClass))) self.query("CREATE GENERATOR GEN_%s" % soClass.sqlmeta.table) return [] def createReferenceConstraint(self, soClass, col): return None def createColumn(self, soClass, col): return col.firebirdCreateSQL() def createIDColumn(self, soClass): key_type = {int: "INT", str: "TEXT"}[soClass.sqlmeta.idType] return '%s %s NOT NULL PRIMARY KEY' % (soClass.sqlmeta.idName, key_type) def createIndexSQL(self, soClass, index): return index.firebirdCreateIndexSQL(soClass) def joinSQLType(self, join): return 'INT NOT NULL' def tableExists(self, tableName): # there's something in the database by this name...let's # assume it's a table. By default, fb 1.0 stores EVERYTHING # it cares about in uppercase. result = self.queryOne("SELECT COUNT(rdb$relation_name) FROM rdb$relations WHERE rdb$relation_name = '%s'" % tableName.upper()) return result[0] def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD %s' % (tableName, column.firebirdCreateSQL())) def dropTable(self, tableName, cascade=False): self.query("DROP TABLE %s" % tableName) self.query("DROP GENERATOR GEN_%s" % tableName) def delColumn(self, sqlmeta, column): self.query('ALTER TABLE %s DROP %s' % (sqlmeta.table, column.dbName)) def readDefaultEncodingFromDB(self): if self.defaultDbEncoding is "": # get out if encoding is known allready (can by None as well)) self.defaultDbEncoding = str(self.queryOne("SELECT rdb$character_set_name FROM rdb$database")[0].strip().lower()) # encoding defined during db creation if self.defaultDbEncoding == "none": self.defaultDbEncoding = None if self.dbEncoding != self.defaultDbEncoding: warningText = """\n Database charset: %s is different from connection charset: %s.\n""" % (self.defaultDbEncoding, self.dbEncoding) warnings.warn(warningText) #TODO: ??? print out the uri string, so user can see what is going on warningText = \ """\n Every CHAR or VARCHAR field can (or, better: must) have a character set defined in Firebird. In the case, field charset is not defined, SQLObject try to use a db default encoding instead. Firebird is unable to transliterate between character sets. So you must set the correct values on the server and on the client if everything is to work smoothely.\n""" warnings.warn(warningText) if not self.dbEncoding: # defined by user in the connection string self.dbEncoding = self.defaultDbEncoding warningText = """\n encoding: %s will be used as default for this connection\n""" % self.dbEncoding warnings.warn(warningText) def columnsFromSchema(self, tableName, soClass): """ Look at the given table and create Col instances (or subclasses of Col) for the fields it finds in that table. """ self.readDefaultEncodingFromDB() fieldQuery="""\ SELECT r.RDB$FIELD_NAME AS field_name, CASE f.RDB$FIELD_TYPE when 7 then 'smallint' when 8 then 'integer' when 16 then 'int64' when 9 then 'quad' when 10 then 'float' when 11 then 'd_float' when 17 then 'boolean' when 27 then 'double' when 12 then 'date' when 13 then 'time' when 35 then 'timestamp' when 261 then 'blob' when 37 then 'varchar' when 14 then 'char' when 40 then 'cstring' when 45 then 'blob_id' ELSE 'UNKNOWN' END AS field_type, case f.rdb$field_type when 7 then case f.rdb$field_sub_type when 1 then 'numeric' when 2 then 'decimal' end when 8 then case f.rdb$field_sub_type when 1 then 'numeric' when 2 then 'decimal' end when 16 then case f.rdb$field_sub_type when 1 then 'numeric' when 2 then 'decimal' else 'bigint' end when 14 then case f.rdb$field_sub_type when 0 then 'unspecified' when 1 then 'binary' when 3 then 'acl' else case when f.rdb$field_sub_type is null then 'unspecified' end end when 37 then case f.rdb$field_sub_type when 0 then 'unspecified' when 1 then 'text' when 3 then 'acl' else case when f.rdb$field_sub_type is null then 'unspecified' end end when 261 then case f.rdb$field_sub_type when 0 then 'unspecified' when 1 then 'text' when 2 then 'blr' when 3 then 'acl' when 4 then 'reserved' when 5 then 'encoded-meta-data' when 6 then 'irregular-finished-multi-db-tx' when 7 then 'transactional_description' when 8 then 'external_file_description' end end as "ActualSubType", f.RDB$FIELD_LENGTH AS field_length, f.RDB$FIELD_PRECISION AS field_precision, f.RDB$FIELD_SCALE AS field_scale, cset.RDB$CHARACTER_SET_NAME AS field_charset, coll.RDB$COLLATION_NAME AS field_collation, r.rdb$default_source, r.RDB$NULL_FLAG AS field_not_null_constraint, r.RDB$DESCRIPTION AS field_description FROM RDB$RELATION_FIELDS r LEFT JOIN RDB$FIELDS f ON r.RDB$FIELD_SOURCE = f.RDB$FIELD_NAME LEFT JOIN RDB$COLLATIONS coll ON f.RDB$COLLATION_ID = coll.RDB$COLLATION_ID LEFT JOIN RDB$CHARACTER_SETS cset ON f.RDB$CHARACTER_SET_ID = cset.RDB$CHARACTER_SET_ID WHERE r.RDB$RELATION_NAME='%s' -- table name ORDER BY r.RDB$FIELD_POSITION """ colData = self.queryAll(fieldQuery % tableName.upper()) results = [] for field, fieldType, fieldSubtype, fieldLength, fieldPrecision, fieldScale, fieldCharset, collationName, defaultSource, fieldNotNullConstraint, fieldDescription in colData: field = field.strip().lower() fieldType = fieldType.strip() if fieldCharset: fieldCharset = str(fieldCharset.strip()) if fieldCharset.startswith('UNICODE_FSS'): # 'UNICODE_FSS' is less strict Firebird/Interbase UTF8 definition fieldCharset = "UTF8" if fieldSubtype: fieldSubtype=fieldSubtype.strip() if fieldType == "int64": fieldType = fieldSubtype if defaultSource: # can look like: "DEFAULT 0", "DEFAULT 'default text'", None defaultSource = defaultSource.split(' ')[1] if defaultSource.startswith ("'") and defaultSource.endswith ("'"): defaultSource = str(defaultSource[1:-1]) elif fieldType in ("integer", "smallint", "bigint"): defaultSource=int(defaultSource) elif fieldType in ("float", "double"): defaultSource=float(defaultSource) #TODO: other types for defaultSource # elif fieldType == "datetime": idName = str(soClass.sqlmeta.idName or 'id').upper() if field.upper() == idName: continue if fieldScale: #PRECISION refers to the total number of digits, and SCALE refers to the number of digits to the right of the decimal point #Both numbers can be from 1 to 18 (SQL dialect 1: 1-15), but SCALE mustbe less than or equal to PRECISION if fieldScale > fieldLength: fieldScale = fieldLength colClass, kw = self.guessClass(fieldType, fieldLength, fieldCharset, fieldScale, ) kw['name'] = str(soClass.sqlmeta.style.dbColumnToPythonAttr(field).strip()) kw['dbName'] = str(field) kw['notNone'] = not fieldNotNullConstraint kw['default'] = defaultSource results.append(colClass(**kw)) return results def guessClass(self, t, flength, fCharset, fscale=None): """ An internal method that tries to figure out what Col subclass is appropriate given whatever introspective information is available -- both very database-specific. """ ##TODO: check if negative values are allowed for fscale if t == 'smallint': # -32,768 to +32,767, 16 bits return col.IntCol, {} elif t == 'integer': # -2,147,483,648 to +2,147,483,647, 32 bits return col.IntCol, {} elif t == 'bigint': # -2^63 to 2^63-1 or -9,223,372,036,854,775,808 to 9,223,372,036,854,775,807, 64 bits return col.IntCol, {} elif t == 'float': # 32 bits, 3.4x10^-38 to 3.4x10^38, 7 digit precision (7 significant decimals) return col.FloatCol, {} elif t == 'double': # 64 bits, 1.7x10^-308 to 1.7x10^308, 15 digit precision (15 significant decimals) return col.FloatCol, {} elif t == 'numeric': # Numeric and Decimal are internally stored as smallint, integer or bigint depending on the size. They can handle up to 18 digits. if (not flength or not fscale): # If neither PRECISION nor SCALE are specified, Firebird/InterBase defines the column as INTEGER instead of NUMERIC and stores only the integer portion of the value return col.IntCol, {} return col.DecimalCol, {'size': flength, 'precision': fscale} # check if negative values are allowed for fscale elif t == 'decimal': # Numeric and Decimal are internally stored as smallint, integer or bigint depending on the size. They can handle up to 18 digits. return col.DecimalCol, {'size': flength, 'precision': fscale} # check if negative values are allowed for fscale elif t == 'date': # 32 bits, 1 Jan 100. to 29 Feb 32768. return col.DateCol, {} elif t == 'time': # 32 bits, 00:00 to 23:59.9999 return col.TimeCol, {} elif t == 'timestamp': # 64 bits, 1 Jan 100 to 28 Feb 32768. return col.DateTimeCol, {} elif t == 'char': # 32767 bytes if fCharset and (fCharset != "NONE"): return col.UnicodeCol, {'length': flength, 'varchar': False, 'dbEncoding': fCharset} elif self.dbEncoding: return col.UnicodeCol, {'length': flength, 'varchar': False, 'dbEncoding': self.dbEncoding} else: return col.StringCol, {'length': flength, 'varchar': False} elif t == 'varchar': # 32767 bytes if fCharset and (fCharset != "NONE"): return col.UnicodeCol, {'length': flength, 'varchar': True, 'dbEncoding': fCharset} elif self.dbEncoding: return col.UnicodeCol, {'length': flength, 'varchar': True, 'dbEncoding': self.dbEncoding} else: return col.StringCol, {'length': flength, 'varchar': True} elif t == 'blob': # 32GB return col.BLOBCol, {} else: return col.Col, {} def createEmptyDatabase(self): self.module.create_database("CREATE DATABASE '%s' user '%s' password '%s'" % \ (self.db, self.user, self.password)) def dropDatabase(self): self.module.drop_database() SQLObject-1.5.2/sqlobject/sybase/0000755000175000017500000000000012322476205016134 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/sybase/__init__.py0000644000175000017500000000027011133142452020235 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import sybaseconnection return sybaseconnection.SybaseConnection registerConnection(['sybase'], builder) SQLObject-1.5.2/sqlobject/sybase/sybaseconnection.py0000644000175000017500000001326312134465736022071 0ustar phdphd00000000000000from sqlobject.dbconnection import DBAPI from sqlobject import col class SybaseConnection(DBAPI): supportTransactions = False dbName = 'sybase' schemes = [dbName] NumericType = None def __init__(self, db, user, password='', host='localhost', port=None, locking=1, **kw): db = db.strip('/') import Sybase Sybase._ctx.debug = 0 if SybaseConnection.NumericType is None: from Sybase import NumericType SybaseConnection.NumericType = NumericType from sqlobject.converters import registerConverter, IntConverter registerConverter(NumericType, IntConverter) self.module = Sybase self.locking = int(locking) self.host = host self.port = port self.db = db self.user = user self.password = password autoCommit = kw.get('autoCommit') if autoCommit: autoCommmit = int(autoCommit) else: autoCommit = None kw['autoCommit'] = autoCommit DBAPI.__init__(self, **kw) @classmethod def _connectionFromParams(cls, user, password, host, port, path, args): return cls(user=user, password=password, host=host or 'localhost', port=port, db=path, **args) def insert_id(self, conn): """ Sybase adapter/cursor does not support the insert_id method. """ c = conn.cursor() c.execute('SELECT @@IDENTITY') return c.fetchone()[0] def makeConnection(self): return self.module.connect(self.host, self.user, self.password, database=self.db, auto_commit=self.autoCommit, locking=self.locking) HAS_IDENTITY = """ SELECT col.name, col.status, obj.name FROM syscolumns col JOIN sysobjects obj ON obj.id = col.id WHERE obj.name = '%s' AND (col.status & 0x80) = 0x80 """ def _hasIdentity(self, conn, table): query = self.HAS_IDENTITY % table c = conn.cursor() c.execute(query) r = c.fetchone() return r is not None def _queryInsertID(self, conn, soInstance, id, names, values): table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName c = conn.cursor() if id is not None: names = [idName] + names values = [id] + values has_identity = self._hasIdentity(conn, table) identity_insert_on = False if has_identity and (id is not None): identity_insert_on = True c.execute('SET IDENTITY_INSERT %s ON' % table) q = self._insertSQL(table, names, values) if self.debug: self.printDebug(conn, q, 'QueryIns') c.execute(q) if has_identity and identity_insert_on: c.execute('SET IDENTITY_INSERT %s OFF' % table) if id is None: id = self.insert_id(conn) if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id @classmethod def _queryAddLimitOffset(cls, query, start, end): # XXX Sybase doesn't support OFFSET if end: return "SET ROWCOUNT %i %s SET ROWCOUNT 0" % (end, query) return query def createReferenceConstraint(self, soClass, col): return None def createColumn(self, soClass, col): return col.sybaseCreateSQL() def createIDColumn(self, soClass): key_type = {int: "NUMERIC(18,0)", str: "TEXT"}[soClass.sqlmeta.idType] return '%s %s IDENTITY UNIQUE' % (soClass.sqlmeta.idName, key_type) def createIndexSQL(self, soClass, index): return index.sybaseCreateIndexSQL(soClass) def joinSQLType(self, join): return 'NUMERIC(18,0) NOT NULL' SHOW_TABLES="SELECT name FROM sysobjects WHERE type='U'" def tableExists(self, tableName): for (table,) in self.queryAll(self.SHOW_TABLES): if table.lower() == tableName.lower(): return True return False def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD COLUMN %s' % (tableName, column.sybaseCreateSQL())) def delColumn(self, sqlmeta, column): self.query('ALTER TABLE %s DROP COLUMN %s' % (sqlmeta.table, column.dbName)) SHOW_COLUMNS=('SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, COLUMN_DEFAULT FROM INFORMATION_SCHEMA.COLUMNS ' 'WHERE TABLE_NAME = \'%s\'') def columnsFromSchema(self, tableName, soClass): colData = self.queryAll(self.SHOW_COLUMNS % tableName) results = [] for field, t, nullAllowed, default in colData: if field == soClass.sqlmeta.idName: continue colClass, kw = self.guessClass(t) kw['name'] = soClass.sqlmeta.style.dbColumnToPythonAttr(field) kw['dbName'] = field kw['notNone'] = not nullAllowed kw['default'] = default # @@ skip key... # @@ skip extra... kw['forceDBName'] = True results.append(colClass(**kw)) return results def _setAutoCommit(self, conn, auto): conn.auto_commit = auto def guessClass(self, t): if t.startswith('int'): return col.IntCol, {} elif t.startswith('varchar'): return col.StringCol, {'length': int(t[8:-1])} elif t.startswith('char'): return col.StringCol, {'length': int(t[5:-1]), 'varchar': False} elif t.startswith('datetime'): return col.DateTimeCol, {} else: return col.Col, {} SQLObject-1.5.2/sqlobject/styles.py0000644000175000017500000001054710674745560016565 0ustar phdphd00000000000000import re __all__ = ["Style", "MixedCaseUnderscoreStyle", "DefaultStyle", "MixedCaseStyle"] class Style(object): """ The base Style class, and also the simplest implementation. No translation occurs -- column names and attribute names match, as do class names and table names (when using auto class or schema generation). """ def __init__(self, pythonAttrToDBColumn=None, dbColumnToPythonAttr=None, pythonClassToDBTable=None, dbTableToPythonClass=None, idForTable=None, longID=False): if pythonAttrToDBColumn: self.pythonAttrToDBColumn = lambda a, s=self: pythonAttrToDBColumn(s, a) if dbColumnToPythonAttr: self.dbColumnToPythonAttr = lambda a, s=self: dbColumnToPythonAttr(s, a) if pythonClassToDBTable: self.pythonClassToDBTable = lambda a, s=self: pythonClassToDBTable(s, a) if dbTableToPythonClass: self.dbTableToPythonClass = lambda a, s=self: dbTableToPythonClass(s, a) if idForTable: self.idForTable = lambda a, s=self: idForTable(s, a) self.longID = longID def pythonAttrToDBColumn(self, attr): return attr def dbColumnToPythonAttr(self, col): return col def pythonClassToDBTable(self, className): return className def dbTableToPythonClass(self, table): return table def idForTable(self, table): if self.longID: return self.tableReference(table) else: return 'id' def pythonClassToAttr(self, className): return lowerword(className) def instanceAttrToIDAttr(self, attr): return attr + "ID" def instanceIDAttrToAttr(self, attr): return attr[:-2] def tableReference(self, table): return table + "_id" class MixedCaseUnderscoreStyle(Style): """ This is the default style. Python attributes use mixedCase, while database columns use underscore_separated. """ def pythonAttrToDBColumn(self, attr): return mixedToUnder(attr) def dbColumnToPythonAttr(self, col): return underToMixed(col) def pythonClassToDBTable(self, className): return className[0].lower() \ + mixedToUnder(className[1:]) def dbTableToPythonClass(self, table): return table[0].upper() \ + underToMixed(table[1:]) def pythonClassToDBTableReference(self, className): return self.tableReference(self.pythonClassToDBTable(className)) def tableReference(self, table): return table + "_id" DefaultStyle = MixedCaseUnderscoreStyle class MixedCaseStyle(Style): """ This style leaves columns as mixed-case, and uses long ID names (like ProductID instead of simply id). """ def pythonAttrToDBColumn(self, attr): return capword(attr) def dbColumnToPythonAttr(self, col): return lowerword(col) def dbTableToPythonClass(self, table): return capword(table) def tableReference(self, table): return table + "ID" defaultStyle = DefaultStyle() def getStyle(soClass, dbConnection=None): if dbConnection is None: if hasattr(soClass, '_connection'): dbConnection = soClass._connection if hasattr(soClass.sqlmeta, 'style') and soClass.sqlmeta.style: return soClass.sqlmeta.style elif dbConnection and dbConnection.style: return dbConnection.style else: return defaultStyle ############################################################ ## Text utilities ############################################################ _mixedToUnderRE = re.compile(r'[A-Z]+') def mixedToUnder(s): if s.endswith('ID'): return mixedToUnder(s[:-2] + "_id") trans = _mixedToUnderRE.sub(mixedToUnderSub, s) if trans.startswith('_'): trans = trans[1:] return trans def mixedToUnderSub(match): m = match.group(0).lower() if len(m) > 1: return '_%s_%s' % (m[:-1], m[-1]) else: return '_%s' % m def capword(s): return s[0].upper() + s[1:] def lowerword(s): return s[0].lower() + s[1:] _underToMixedRE = re.compile('_.') def underToMixed(name): if name.endswith('_id'): return underToMixed(name[:-3] + "ID") return _underToMixedRE.sub(lambda m: m.group(0)[1].upper(), name) SQLObject-1.5.2/sqlobject/sqlite/0000755000175000017500000000000012322476205016147 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/sqlite/__init__.py0000644000175000017500000000027011133142440020245 0ustar phdphd00000000000000from sqlobject.dbconnection import registerConnection def builder(): import sqliteconnection return sqliteconnection.SQLiteConnection registerConnection(['sqlite'], builder) SQLObject-1.5.2/sqlobject/sqlite/sqliteconnection.py0000644000175000017500000003640312322321175022103 0ustar phdphd00000000000000import base64 import os import thread import urllib from sqlobject.dbconnection import DBAPI, Boolean from sqlobject import col, sqlbuilder from sqlobject.dberrors import * sqlite2_Binary = None class ErrorMessage(str): def __new__(cls, e): obj = str.__new__(cls, e[0]) obj.code = None obj.module = e.__module__ obj.exception = e.__class__.__name__ return obj class SQLiteConnection(DBAPI): supportTransactions = True dbName = 'sqlite' schemes = [dbName] def __init__(self, filename, autoCommit=1, **kw): drivers = kw.pop('driver', None) or 'pysqlite2,sqlite3,sqlite' for driver in drivers.split(','): driver = driver.strip() if not driver: continue try: if driver in ('sqlite2', 'pysqlite2'): from pysqlite2 import dbapi2 as sqlite self.using_sqlite2 = True elif driver == 'sqlite3': import sqlite3 as sqlite self.using_sqlite2 = True elif driver in ('sqlite', 'sqlite1'): import sqlite self.using_sqlite2 = False else: raise ValueError('Unknown SQLite driver "%s", expected pysqlite2, sqlite3 or sqlite' % driver) except ImportError: pass else: break else: raise ImportError('Cannot find an SQLite driver, tried %s' % drivers) if self.using_sqlite2: sqlite.encode = base64.encodestring sqlite.decode = base64.decodestring self.module = sqlite self.filename = filename # full path to sqlite-db-file self._memory = filename == ':memory:' if self._memory and not self.using_sqlite2: raise ValueError("You must use sqlite2 to use in-memory databases") # connection options opts = {} if self.using_sqlite2: if autoCommit: opts["isolation_level"] = None global sqlite2_Binary if sqlite2_Binary is None: sqlite2_Binary = sqlite.Binary sqlite.Binary = lambda s: sqlite2_Binary(sqlite.encode(s)) if 'factory' in kw: factory = kw.pop('factory') if isinstance(factory, str): factory = globals()[factory] opts['factory'] = factory(sqlite) else: opts['autocommit'] = Boolean(autoCommit) if 'encoding' in kw: opts['encoding'] = kw.pop('encoding') if 'mode' in kw: opts['mode'] = int(kw.pop('mode'), 0) if 'timeout' in kw: if self.using_sqlite2: opts['timeout'] = float(kw.pop('timeout')) else: opts['timeout'] = int(float(kw.pop('timeout')) * 1000) if 'check_same_thread' in kw: opts["check_same_thread"] = Boolean(kw.pop('check_same_thread')) # use only one connection for sqlite - supports multiple) # cursors per connection self._connOptions = opts self.use_table_info = Boolean(kw.pop("use_table_info", True)) DBAPI.__init__(self, **kw) self._threadPool = {} self._threadOrigination = {} if self._memory: self.makeMemoryConnection() @classmethod def _connectionFromParams(cls, user, password, host, port, path, args): assert host is None and port is None, ( "SQLite can only be used locally (with a URI like " "sqlite:/file or sqlite:///file, not sqlite://%s%s)" % (host, port and ':%r' % port or '')) assert user is None and password is None, ( "You may not provide usernames or passwords for SQLite " "databases") if path == "/:memory:": path = ":memory:" return cls(filename=path, **args) def oldUri(self): path = self.filename if path == ":memory:": path = "/:memory:" else: path = "//" + path return 'sqlite:%s' % path def uri(self): path = self.filename if path == ":memory:": path = "/:memory:" else: if path.startswith('/'): path = "//" + path else: path = "///" + path path = urllib.quote(path) return 'sqlite:%s' % path def getConnection(self): # SQLite can't share connections between threads, and so can't # pool connections. Since we are isolating threads here, we # don't have to worry about locking as much. if self._memory: conn = self.makeConnection() self._connectionNumbers[id(conn)] = self._connectionCount self._connectionCount += 1 return conn threadid = thread.get_ident() if (self._pool is not None and threadid in self._threadPool): conn = self._threadPool[threadid] del self._threadPool[threadid] if conn in self._pool: self._pool.remove(conn) else: conn = self.makeConnection() if self._pool is not None: self._threadOrigination[id(conn)] = threadid self._connectionNumbers[id(conn)] = self._connectionCount self._connectionCount += 1 if self.debug: s = 'ACQUIRE' if self._pool is not None: s += ' pool=[%s]' % ', '.join([str(self._connectionNumbers[id(v)]) for v in self._pool]) self.printDebug(conn, s, 'Pool') return conn def releaseConnection(self, conn, explicit=False): if self._memory: return threadid = self._threadOrigination.get(id(conn)) DBAPI.releaseConnection(self, conn, explicit=explicit) if (self._pool is not None and threadid and threadid not in self._threadPool): self._threadPool[threadid] = conn else: if self._pool and conn in self._pool: self._pool.remove(conn) conn.close() def _setAutoCommit(self, conn, auto): if self.using_sqlite2: if auto: conn.isolation_level = None else: conn.isolation_level = "" else: conn.autocommit = auto def _setIsolationLevel(self, conn, level): if not self.using_sqlite2: return conn.isolation_level = level def makeMemoryConnection(self): self._memoryConn = self.module.connect( self.filename, **self._connOptions) # Convert text data from SQLite to str, not unicode - # SQLObject converts it to unicode itself. self._memoryConn.text_factory = str def makeConnection(self): if self._memory: return self._memoryConn conn = self.module.connect(self.filename, **self._connOptions) conn.text_factory = str # Convert text data to str, not unicode return conn def close(self): DBAPI.close(self) self._threadPool = {} if self._memory: self._memoryConn.close() self.makeMemoryConnection() def _executeRetry(self, conn, cursor, query): if self.debug: self.printDebug(conn, query, 'QueryR') try: return cursor.execute(query) except self.module.OperationalError, e: raise OperationalError(ErrorMessage(e)) except self.module.IntegrityError, e: msg = ErrorMessage(e) if msg.startswith('column') and msg.endswith('not unique') \ or msg.startswith('UNIQUE constraint failed:'): raise DuplicateEntryError(msg) else: raise IntegrityError(msg) except self.module.InternalError, e: raise InternalError(ErrorMessage(e)) except self.module.ProgrammingError, e: raise ProgrammingError(ErrorMessage(e)) except self.module.DataError, e: raise DataError(ErrorMessage(e)) except self.module.NotSupportedError, e: raise NotSupportedError(ErrorMessage(e)) except self.module.DatabaseError, e: raise DatabaseError(ErrorMessage(e)) except self.module.InterfaceError, e: raise InterfaceError(ErrorMessage(e)) except self.module.Warning, e: raise Warning(ErrorMessage(e)) except self.module.Error, e: raise Error(ErrorMessage(e)) def _queryInsertID(self, conn, soInstance, id, names, values): table = soInstance.sqlmeta.table idName = soInstance.sqlmeta.idName c = conn.cursor() if id is not None: names = [idName] + names values = [id] + values q = self._insertSQL(table, names, values) if self.debug: self.printDebug(conn, q, 'QueryIns') self._executeRetry(conn, c, q) # lastrowid is a DB-API extension from "PEP 0249": if id is None: id = int(c.lastrowid) if self.debugOutput: self.printDebug(conn, id, 'QueryIns', 'result') return id def _insertSQL(self, table, names, values): if not names: assert not values # INSERT INTO table () VALUES () isn't allowed in # SQLite (though it is in other databases) return ("INSERT INTO %s VALUES (NULL)" % table) else: return DBAPI._insertSQL(self, table, names, values) @classmethod def _queryAddLimitOffset(cls, query, start, end): if not start: return "%s LIMIT %i" % (query, end) if not end: return "%s LIMIT 0 OFFSET %i" % (query, start) return "%s LIMIT %i OFFSET %i" % (query, end-start, start) def createColumn(self, soClass, col): return col.sqliteCreateSQL() def createReferenceConstraint(self, soClass, col): return None def createIDColumn(self, soClass): return self._createIDColumn(soClass.sqlmeta) def _createIDColumn(self, sqlmeta): if sqlmeta.idType == str: return '%s TEXT PRIMARY KEY' % sqlmeta.idName return '%s INTEGER PRIMARY KEY AUTOINCREMENT' % sqlmeta.idName def joinSQLType(self, join): return 'INT NOT NULL' def tableExists(self, tableName): result = self.queryOne("SELECT tbl_name FROM sqlite_master WHERE type='table' AND tbl_name = '%s'" % tableName) # turn it into a boolean: return not not result def createIndexSQL(self, soClass, index): return index.sqliteCreateIndexSQL(soClass) def addColumn(self, tableName, column): self.query('ALTER TABLE %s ADD COLUMN %s' % (tableName, column.sqliteCreateSQL())) self.query('VACUUM %s' % tableName) def delColumn(self, sqlmeta, column): self.recreateTableWithoutColumn(sqlmeta, column) def recreateTableWithoutColumn(self, sqlmeta, column): new_name = sqlmeta.table + '_ORIGINAL' self.query('ALTER TABLE %s RENAME TO %s' % (sqlmeta.table, new_name)) cols = [self._createIDColumn(sqlmeta)] \ + [self.createColumn(None, col) for col in sqlmeta.columnList if col.name != column.name] cols = ",\n".join([" %s" % c for c in cols]) self.query('CREATE TABLE %s (\n%s\n)' % (sqlmeta.table, cols)) all_columns = ', '.join([sqlmeta.idName] + [col.dbName for col in sqlmeta.columnList]) self.query('INSERT INTO %s (%s) SELECT %s FROM %s' % ( sqlmeta.table, all_columns, all_columns, new_name)) self.query('DROP TABLE %s' % new_name) def columnsFromSchema(self, tableName, soClass): if self.use_table_info: return self._columnsFromSchemaTableInfo(tableName, soClass) else: return self._columnsFromSchemaParse(tableName, soClass) def _columnsFromSchemaTableInfo(self, tableName, soClass): colData = self.queryAll("PRAGMA table_info(%s)" % tableName) results = [] for index, field, t, nullAllowed, default, key in colData: if field == soClass.sqlmeta.idName: continue colClass, kw = self.guessClass(t) if default == 'NULL': nullAllowed = True default = None kw['name'] = soClass.sqlmeta.style.dbColumnToPythonAttr(field) kw['dbName'] = field kw['notNone'] = not nullAllowed kw['default'] = default # @@ skip key... # @@ skip extra... results.append(colClass(**kw)) return results def _columnsFromSchemaParse(self, tableName, soClass): colData = self.queryOne("SELECT sql FROM sqlite_master WHERE type='table' AND name='%s'" % tableName) if not colData: raise ValueError('The table %s was not found in the database. Load failed.' % tableName) colData = colData[0].split('(', 1)[1].strip()[:-2] while True: start = colData.find('(') if start == -1: break end = colData.find(')', start) if end == -1: break colData = colData[:start] + colData[end+1:] results = [] for colDesc in colData.split(','): parts = colDesc.strip().split(' ', 2) field = parts[0].strip() # skip comments if field.startswith('--'): continue # get rid of enclosing quotes if field[0] == field[-1] == '"': field = field[1:-1] if field == getattr(soClass.sqlmeta, 'idName', 'id'): continue colClass, kw = self.guessClass(parts[1].strip()) if len(parts) == 2: index_info = '' else: index_info = parts[2].strip().upper() kw['name'] = soClass.sqlmeta.style.dbColumnToPythonAttr(field) kw['dbName'] = field import re nullble = re.search(r'(\b\S*)\sNULL', index_info) default = re.search(r"DEFAULT\s((?:\d[\dA-FX.]*)|(?:'[^']*')|(?:#[^#]*#))", index_info) kw['notNone'] = nullble and nullble.group(1) == 'NOT' kw['default'] = default and default.group(1) # @@ skip key... # @@ skip extra... results.append(colClass(**kw)) return results def guessClass(self, t): t = t.upper() if t.find('INT') >= 0: return col.IntCol, {} elif t.find('TEXT') >= 0 or t.find('CHAR') >= 0 or t.find('CLOB') >= 0: return col.StringCol, {'length': 2**32-1} elif t.find('BLOB') >= 0: return col.BLOBCol, {"length": 2**32-1} elif t.find('REAL') >= 0 or t.find('FLOAT') >= 0: return col.FloatCol, {} elif t.find('DECIMAL') >= 0: return col.DecimalCol, {'size': None, 'precision': None} elif t.find('BOOL') >= 0: return col.BoolCol, {} else: return col.Col, {} def createEmptyDatabase(self): if self._memory: return open(self.filename, 'w').close() def dropDatabase(self): if self._memory: return os.unlink(self.filename) SQLObject-1.5.2/sqlobject/constraints.py0000644000175000017500000000351410674745606017606 0ustar phdphd00000000000000""" Constraints """ class BadValue(ValueError): def __init__(self, desc, obj, col, value, *args): self.desc = desc self.col = col # I want these objects to be garbage-collectable, so # I just keep their repr: self.obj = repr(obj) self.value = repr(value) fullDesc = "%s.%s %s (you gave: %s)" \ % (obj, col.name, desc, value) ValueError.__init__(self, fullDesc, *args) def isString(obj, col, value): if not isinstance(value, str): raise BadValue("only allows strings", obj, col, value) def notNull(obj, col, value): if value is None: raise BadValue("is defined NOT NULL", obj, col, value) def isInt(obj, col, value): if not isinstance(value, (int, long)): raise BadValue("only allows integers", obj, col, value) def isFloat(obj, col, value): if not isinstance(value, (int, long, float)): raise BadValue("only allows floating point numbers", obj, col, value) def isBool(obj, col, value): if not isinstance(value, bool): raise BadValue("only allows booleans", obj, col, value) class InList: def __init__(self, l): self.list = l def __call__(self, obj, col, value): if value not in self.list: raise BadValue("accepts only values in %s" % repr(self.list), obj, col, value) class MaxLength: def __init__(self, length): self.length = length def __call__(self, obj, col, value): try: length = len(value) except TypeError: raise BadValue("object does not have a length", obj, col, value) if length > self.length: raise BadValue("must be shorter in length than %s" % self.length, obj, col, value) SQLObject-1.5.2/sqlobject/dberrors.py0000644000175000017500000000115010510760460017033 0ustar phdphd00000000000000"""dberrors: database exception classes for SQLObject. These classes are dictated by the DB API v2.0: http://www.python.org/topics/database/DatabaseAPI-2.0.html """ class Error(StandardError): pass class Warning(StandardError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass class InternalError(DatabaseError): pass class OperationalError(DatabaseError): pass class ProgrammingError(DatabaseError): pass class IntegrityError(DatabaseError): pass class DataError(DatabaseError): pass class NotSupportedError(DatabaseError): pass class DuplicateEntryError(IntegrityError): pass SQLObject-1.5.2/sqlobject/events.py0000644000175000017500000002732211467547020016535 0ustar phdphd00000000000000import sys import types from sqlobject.include.pydispatch import dispatcher from weakref import ref subclassClones = {} def listen(receiver, soClass, signal, alsoSubclasses=True, weak=True): """ Listen for the given ``signal`` on the SQLObject subclass ``soClass``, calling ``receiver()`` when ``send(soClass, signal, ...)`` is called. If ``alsoSubclasses`` is true, receiver will also be called when an event is fired on any subclass. """ dispatcher.connect(receiver, signal=signal, sender=soClass, weak=weak) weakReceiver = ref(receiver) subclassClones.setdefault(soClass, []).append((weakReceiver, signal)) # We export this function: send = dispatcher.send class Signal(object): """ Base event for all SQLObject events. In general the sender for these methods is the class, not the instance. """ class ClassCreateSignal(Signal): """ Signal raised after class creation. The sender is the superclass (in case of multiple superclasses, the first superclass). The arguments are ``(new_class_name, bases, new_attrs, post_funcs, early_funcs)``. ``new_attrs`` is a dictionary and may be modified (but ``new_class_name`` and ``bases`` are immutable). ``post_funcs`` is an initially-empty list that can have callbacks appended to it. Note: at the time this event is called, the new class has not yet been created. The functions in ``post_funcs`` will be called after the class is created, with the single arguments of ``(new_class)``. Also, ``early_funcs`` will be called at the soonest possible time after class creation (``post_funcs`` is called after the class's ``__classinit__``). """ def _makeSubclassConnections(new_class_name, bases, new_attrs, post_funcs, early_funcs): early_funcs.insert(0, _makeSubclassConnectionsPost) def _makeSubclassConnectionsPost(new_class): for cls in new_class.__bases__: for weakReceiver, signal in subclassClones.get(cls, []): receiver = weakReceiver() if not receiver: continue listen(receiver, new_class, signal) dispatcher.connect(_makeSubclassConnections, signal=ClassCreateSignal) # @@: Should there be a class reload event? This would allow modules # to be reloaded, possibly. Or it could even be folded into # ClassCreateSignal, since anything that listens to that needs to pay # attention to reloads (or else it is probably buggy). class RowCreateSignal(Signal): """ Called before an instance is created, with the class as the sender. Called with the arguments ``(instance, kwargs, post_funcs)``. There may be a ``connection`` argument. ``kwargs``may be usefully modified. ``post_funcs`` is a list of callbacks, intended to have functions appended to it, and are called with the arguments ``(new_instance)``. Note: this is not called when an instance is created from an existing database row. """ class RowCreatedSignal(Signal): """ Called after an instance is created, with the class as the sender. Called with the arguments ``(instance, kwargs, post_funcs)``. There may be a ``connection`` argument. ``kwargs``may be usefully modified. ``post_funcs`` is a list of callbacks, intended to have functions appended to it, and are called with the arguments ``(new_instance)``. Note: this is not called when an instance is created from an existing database row. """ # @@: An event for getting a row? But for each row, when doing a # select? For .sync, .syncUpdate, .expire? class RowDestroySignal(Signal): """ Called before an instance is deleted. Sender is the instance's class. Arguments are ``(instance, post_funcs)``. ``post_funcs`` is a list of callbacks, intended to have functions appended to it, and are called with arguments ``(instance)``. If any of the post_funcs raises an exception, the deletion is only affected if this will prevent a commit. You cannot cancel the delete, but you can raise an exception (which will probably cancel the delete, but also cause an uncaught exception if not expected). Note: this is not called when an instance is destroyed through garbage collection. @@: Should this allow ``instance`` to be a primary key, so that a row can be deleted without first fetching it? """ class RowDestroyedSignal(Signal): """ Called after an instance is deleted. Sender is the instance's class. Arguments are ``(instance)``. This is called before the post_funcs of RowDestroySignal Note: this is not called when an instance is destroyed through garbage collection. """ class RowUpdateSignal(Signal): """ Called when an instance is updated through a call to ``.set()`` (or a column attribute assignment). The arguments are ``(instance, kwargs)``. ``kwargs`` can be modified. This is run *before* the instance is updated; if you want to look at the current values, simply look at ``instance``. """ class RowUpdatedSignal(Signal): """ Called when an instance is updated through a call to ``.set()`` (or a column attribute assignment). The arguments are ``(instance, post_funcs)``. ``post_funcs`` is a list of callbacks, intended to have functions appended to it, and are called with the arguments ``(new_instance)``. This is run *after* the instance is updated; Works better with lazyUpdate = True. """ class AddColumnSignal(Signal): """ Called when a column is added to a class, with arguments ``(cls, connection, column_name, column_definition, changeSchema, post_funcs)``. This is called *after* the column has been added, and is called for each column after class creation. post_funcs are called with ``(cls, so_column_obj)`` """ class DeleteColumnSignal(Signal): """ Called when a column is removed from a class, with the arguments ``(cls, connection, column_name, so_column_obj, post_funcs)``. Like ``AddColumnSignal`` this is called after the action has been performed, and is called for subclassing (when a column is implicitly removed by setting it to ``None``). post_funcs are called with ``(cls, so_column_obj)`` """ # @@: Signals for indexes and joins? These are mostly event consumers, # though. class CreateTableSignal(Signal): """ Called when a table is created. If ``ifNotExists==True`` and the table exists, this event is not called. Called with ``(cls, connection, extra_sql, post_funcs)``. ``extra_sql`` is a list (which can be appended to) of extra SQL statements to be run after the table is created. ``post_funcs`` functions are called with ``(cls, connection)`` after the table has been created. Those functions are *not* called simply when constructing the SQL. """ class DropTableSignal(Signal): """ Called when a table is dropped. If ``ifExists==True`` and the table doesn't exist, this event is not called. Called with ``(cls, connection, extra_sql, post_funcs)``. ``post_funcs`` functions are called with ``(cls, connection)`` after the table has been dropped. """ ############################################################ ## Event Debugging ############################################################ def summarize_events_by_sender(sender=None, output=None, indent=0): """ Prints out a summary of the senders and listeners in the system, for debugging purposes. """ if output is None: output = sys.stdout if sender is None: send_list = [ (deref(dispatcher.senders.get(sid)), listeners) for sid, listeners in dispatcher.connections.items() if deref(dispatcher.senders.get(sid))] for sender, listeners in sorted_items(send_list): real_sender = deref(sender) if not real_sender: continue header = 'Sender: %r' % real_sender print >> output, (' '*indent) + header print >> output, (' '*indent) + '='*len(header) summarize_events_by_sender(real_sender, output=output, indent=indent+2) else: for signal, receivers in sorted_items(dispatcher.connections.get(id(sender), [])): receivers = [deref(r) for r in receivers if deref(r)] header = 'Signal: %s (%i receivers)' % (sort_name(signal), len(receivers)) print >> output, (' '*indent) + header print >> output, (' '*indent) + '-'*len(header) for receiver in sorted(receivers, key=sort_name): print >> output, (' '*indent) + ' ' + nice_repr(receiver) def deref(value): if isinstance(value, dispatcher.WEAKREF_TYPES): return value() else: return value def sorted_items(a_dict): if isinstance(a_dict, dict): a_dict = a_dict.items() return sorted(a_dict, key=lambda t: sort_name(t[0])) def sort_name(value): if isinstance(value, type): return value.__name__ elif isinstance(value, types.FunctionType): return value.func_name else: return str(value) _real_dispatcher_send = dispatcher.send _real_dispatcher_sendExact = dispatcher.sendExact _real_dispatcher_disconnect = dispatcher.disconnect _real_dispatcher_connect = dispatcher.connect _debug_enabled = False def debug_events(): global _debug_enabled, send if _debug_enabled: return _debug_enabled = True dispatcher.send = send = _debug_send dispatcher.sendExact = _debug_sendExact dispatcher.disconnect = _debug_disconnect dispatcher.connect = _debug_connect def _debug_send(signal=dispatcher.Any, sender=dispatcher.Anonymous, *arguments, **named): print "send %s from %s: %s" % ( nice_repr(signal), nice_repr(sender), fmt_args(*arguments, **named)) return _real_dispatcher_send(signal, sender, *arguments, **named) def _debug_sendExact(signal=dispatcher.Any, sender=dispatcher.Anonymous, *arguments, **named): print "sendExact %s from %s: %s" % ( nice_repr(signal), nice_repr(sender), fmt_args(*arguments, **name)) return _real_dispatcher_sendExact(signal, sender, *arguments, **named) def _debug_connect(receiver, signal=dispatcher.Any, sender=dispatcher.Any, weak=True): print "connect %s to %s signal %s" % ( nice_repr(receiver), nice_repr(signal), nice_repr(sender)) return _real_dispatcher_connect(receiver, signal, sender, weak) def _debug_disconnect(receiver, signal=dispatcher.Any, sender=dispatcher.Any, weak=True): print "disconnecting %s from %s signal %s" % ( nice_repr(receiver), nice_repr(signal), nice_repr(sender)) return disconnect(receiver, signal, sender, weak) def fmt_args(*arguments, **name): args = [repr(a) for a in arguments] args.extend([ '%s=%r' % (n, v) for n, v in sorted(name.items())]) return ', '.join(args) def nice_repr(v): """ Like repr(), but nicer for debugging here. """ if isinstance(v, (types.ClassType, type)): return v.__module__ + '.' + v.__name__ elif isinstance(v, types.FunctionType): if '__name__' in v.func_globals: if getattr(sys.modules[v.func_globals['__name__']], v.func_name, None) is v: return '%s.%s' % (v.func_globals['__name__'], v.func_name) return repr(v) elif isinstance(v, types.MethodType): return '%s.%s of %s' % ( nice_repr(v.im_class), v.im_func.func_name, nice_repr(v.im_self)) else: return repr(v) __all__ = ['listen', 'send'] for name, value in globals().items(): if isinstance(value, type) and issubclass(value, Signal): __all__.append(name) SQLObject-1.5.2/sqlobject/views.py0000644000175000017500000001254411641113565016363 0ustar phdphd00000000000000from sqlbuilder import * from main import SQLObject, sqlmeta import types, threading #### class ViewSQLObjectField(SQLObjectField): def __init__(self, alias, *arg): SQLObjectField.__init__(self, *arg) self.alias = alias def __sqlrepr__(self, db): return self.alias + "." + self.fieldName def tablesUsedImmediate(self): return [self.tableName] class ViewSQLObjectTable(SQLObjectTable): FieldClass = ViewSQLObjectField def __getattr__(self, attr): if attr == 'sqlmeta': raise AttributeError return SQLObjectTable.__getattr__(self, attr) def _getattrFromID(self, attr): return self.FieldClass(self.soClass.sqlmeta.alias, self.tableName, 'id', attr, self.soClass, None) def _getattrFromColumn(self, column, attr): return self.FieldClass(self.soClass.sqlmeta.alias, self.tableName, column.name, attr, self.soClass, column) class ViewSQLObject(SQLObject): """ A SQLObject class that derives all it's values from other SQLObject classes. Columns on subclasses should use SQLBuilder constructs for dbName, and sqlmeta should specify: * idName as a SQLBuilder construction * clause as SQLBuilder clause for specifying join conditions or other restrictions * table as an optional alternate name for the class alias See test_views.py for simple examples. """ def __classinit__(cls, new_attrs): SQLObject.__classinit__(cls, new_attrs) # like is_base if cls.__name__ != 'ViewSQLObject': dbName = hasattr(cls,'_connection') and (cls._connection and cls._connection.dbName) or None if getattr(cls.sqlmeta, 'table', None): cls.sqlmeta.alias = cls.sqlmeta.table else: cls.sqlmeta.alias = cls.sqlmeta.style.pythonClassToDBTable(cls.__name__) alias = cls.sqlmeta.alias columns = [ColumnAS(cls.sqlmeta.idName, 'id')] # {sqlrepr-key: [restriction, *aggregate-column]} aggregates = {'':[None]} inverseColumns = dict([(y,x) for x,y in cls.sqlmeta.columns.iteritems()]) for col in cls.sqlmeta.columnList: n = inverseColumns[col] ascol = ColumnAS(col.dbName, n) if isAggregate(col.dbName): restriction = getattr(col, 'aggregateClause',None) if restriction: restrictkey = sqlrepr(restriction, dbName) aggregates[restrictkey] = aggregates.get(restrictkey, [restriction]) + [ascol] else: aggregates[''].append(ascol) else: columns.append(ascol) metajoin = getattr(cls.sqlmeta, 'join', NoDefault) clause = getattr(cls.sqlmeta, 'clause', NoDefault) select = Select(columns, distinct=True, # @@ LDO check if this really mattered for performance # @@ Postgres (and MySQL?) extension! #distinctOn=cls.sqlmeta.idName, join=metajoin, clause=clause) aggregates = aggregates.values() #print cls.__name__, sqlrepr(aggregates, dbName) if aggregates != [[None]]: join = [] last_alias = "%s_base" % alias last_id = "id" last = Alias(select, last_alias) columns = [ColumnAS(SQLConstant("%s.%s"%(last_alias,x.expr2)), x.expr2) for x in columns] for i, agg in enumerate(aggregates): restriction = agg[0] if restriction is None: restriction = clause else: restriction = AND(clause, restriction) agg = agg[1:] agg_alias = "%s_%s" % (alias, i) agg_id = '%s_id'%agg_alias if not last.q.alias.endswith('base'): last = None new_alias = Alias(Select([ColumnAS(cls.sqlmeta.idName, agg_id)]+agg, groupBy=cls.sqlmeta.idName, join=metajoin, clause=restriction), agg_alias) agg_join = LEFTJOINOn(last, new_alias, "%s.%s = %s.%s" % (last_alias, last_id, agg_alias, agg_id)) join.append(agg_join) for col in agg: columns.append(ColumnAS(SQLConstant("%s.%s"%(agg_alias, col.expr2)),col.expr2)) last = new_alias last_alias = agg_alias last_id = agg_id select = Select(columns, join=join) cls.sqlmeta.table = Alias(select, alias) cls.q = ViewSQLObjectTable(cls) for n,col in cls.sqlmeta.columns.iteritems(): col.dbName = n def isAggregate(expr): if isinstance(expr, SQLCall): return True if isinstance(expr, SQLOp): return isAggregate(expr.expr1) or isAggregate(expr.expr2) return False ###### SQLObject-1.5.2/sqlobject/include/0000755000175000017500000000000012322476205016271 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/include/__init__.py0000644000175000017500000000000210372665114020374 0ustar phdphd00000000000000# SQLObject-1.5.2/sqlobject/include/hashcol.py0000644000175000017500000000413411037640072020263 0ustar phdphd00000000000000__all__ = ['HashCol'] import sqlobject.col class DbHash: """ Presents a comparison object for hashes, allowing plain text to be automagically compared with the base content. """ def __init__( self, hash, hashMethod ): self.hash = hash self.hashMethod = hashMethod def __cmp__( self, other ): if other is None: if self.hash is None: return 0 return True if not isinstance( other, basestring ): raise TypeError( "A hash may only be compared with a string, or None." ) return cmp( self.hashMethod( other ), self.hash ) def __repr__( self ): return "" class HashValidator( sqlobject.col.StringValidator ): """ Provides formal SQLObject validation services for the HashCol. """ def to_python( self, value, state ): """ Passes out a hash object. """ if value is None: return None return DbHash( hash = value, hashMethod = self.hashMethod ) def from_python( self, value, state ): """ Store the given value as a MD5 hash, or None if specified. """ if value is None: return None return self.hashMethod( value ) class SOHashCol( sqlobject.col.SOStringCol ): """ The internal HashCol definition. By default, enforces a md5 digest. """ def __init__( self, **kw ): if 'hashMethod' not in kw: from md5 import md5 self.hashMethod = lambda v: md5( v ).hexdigest() if 'length' not in kw: kw['length'] = 32 else: self.hashMethod = kw['hashMethod'] del kw['hashMethod'] super( sqlobject.col.SOStringCol, self ).__init__( **kw ) def createValidators( self ): return [HashValidator( name=self.name, hashMethod=self.hashMethod )] + \ super( SOHashCol, self ).createValidators() class HashCol( sqlobject.col.StringCol ): """ End-user HashCol class. May be instantiated with 'hashMethod', a function which returns the string hash of any other string (i.e. basestring). """ baseClass = SOHashCol SQLObject-1.5.2/sqlobject/include/pydispatch/0000755000175000017500000000000012322476205020441 5ustar phdphd00000000000000SQLObject-1.5.2/sqlobject/include/pydispatch/dispatcher.py0000644000175000017500000003435511563772713023164 0ustar phdphd00000000000000"""Multiple-producer-multiple-consumer signal-dispatching dispatcher is the core of the PyDispatcher system, providing the primary API and the core logic for the system. Module attributes of note: Any -- Singleton used to signal either "Any Sender" or "Any Signal". See documentation of the _Any class. Anonymous -- Singleton used to signal "Anonymous Sender" See documentation of the _Anonymous class. Internal attributes: WEAKREF_TYPES -- tuple of types/classes which represent weak references to receivers, and thus must be de- referenced on retrieval to retrieve the callable object connections -- { senderkey (id) : { signal : [receivers...]}} senders -- { senderkey (id) : weakref(sender) } used for cleaning up sender references on sender deletion sendersBack -- { receiverkey (id) : [senderkey (id)...] } used for cleaning up receiver references on receiver deletion, (considerably speeds up the cleanup process vs. the original code.) """ from __future__ import generators import types, weakref import saferef, robustapply, errors __author__ = "Patrick K. O'Brien " __cvsid__ = "$Id: dispatcher.py,v 1.9 2005/09/17 04:55:57 mcfletch Exp $" __version__ = "$Revision: 1.9 $"[11:-2] try: True except NameError: True = 1==1 False = 1==0 class _Parameter: """Used to represent default parameter values.""" def __repr__(self): return self.__class__.__name__ class _Any(_Parameter): """Singleton used to signal either "Any Sender" or "Any Signal" The Any object can be used with connect, disconnect, send, or sendExact to signal that the parameter given Any should react to all senders/signals, not just a particular sender/signal. """ Any = _Any() class _Anonymous(_Parameter): """Singleton used to signal "Anonymous Sender" The Anonymous object is used to signal that the sender of a message is not specified (as distinct from being "any sender"). Registering callbacks for Anonymous will only receive messages sent without senders. Sending with anonymous will only send messages to those receivers registered for Any or Anonymous. Note: The default sender for connect is Any, while the default sender for send is Anonymous. This has the effect that if you do not specify any senders in either function then all messages are routed as though there was a single sender (Anonymous) being used everywhere. """ Anonymous = _Anonymous() WEAKREF_TYPES = (weakref.ReferenceType, saferef.BoundMethodWeakref) connections = {} senders = {} sendersBack = {} def connect(receiver, signal=Any, sender=Any, weak=True): """Connect receiver to sender for signal receiver -- a callable Python object which is to receive messages/signals/events. Receivers must be hashable objects. if weak is True, then receiver must be weak-referencable (more precisely saferef.safeRef() must be able to create a reference to the receiver). Receivers are fairly flexible in their specification, as the machinery in the robustApply module takes care of most of the details regarding figuring out appropriate subsets of the sent arguments to apply to a given receiver. Note: if receiver is itself a weak reference (a callable), it will be de-referenced by the system's machinery, so *generally* weak references are not suitable as receivers, though some use might be found for the facility whereby a higher-level library passes in pre-weakrefed receiver references. signal -- the signal to which the receiver should respond if Any, receiver will receive any signal from the indicated sender (which might also be Any, but is not necessarily Any). Otherwise must be a hashable Python object other than None (DispatcherError raised on None). sender -- the sender to which the receiver should respond if Any, receiver will receive the indicated signals from any sender. if Anonymous, receiver will only receive indicated signals from send/sendExact which do not specify a sender, or specify Anonymous explicitly as the sender. Otherwise can be any python object. weak -- whether to use weak references to the receiver By default, the module will attempt to use weak references to the receiver objects. If this parameter is false, then strong references will be used. returns None, may raise DispatcherTypeError """ if signal is None: raise errors.DispatcherTypeError( 'Signal cannot be None (receiver=%r sender=%r)'%( receiver,sender) ) if weak: receiver = saferef.safeRef(receiver, onDelete=_removeReceiver) senderkey = id(sender) if senderkey in connections: signals = connections[senderkey] else: connections[senderkey] = signals = {} # Keep track of senders for cleanup. # Is Anonymous something we want to clean up? if sender not in (None, Anonymous, Any): def remove(object, senderkey=senderkey): _removeSender(senderkey=senderkey) # Skip objects that can not be weakly referenced, which means # they won't be automatically cleaned up, but that's too bad. try: weakSender = weakref.ref(sender, remove) senders[senderkey] = weakSender except: pass receiverID = id(receiver) # get current set, remove any current references to # this receiver in the set, including back-references if signal in signals: receivers = signals[signal] _removeOldBackRefs(senderkey, signal, receiver, receivers) else: receivers = signals[signal] = [] try: current = sendersBack.get( receiverID ) if current is None: sendersBack[ receiverID ] = current = [] if senderkey not in current: current.append(senderkey) except: pass receivers.append(receiver) def disconnect(receiver, signal=Any, sender=Any, weak=True): """Disconnect receiver from sender for signal receiver -- the registered receiver to disconnect signal -- the registered signal to disconnect sender -- the registered sender to disconnect weak -- the weakref state to disconnect disconnect reverses the process of connect, the semantics for the individual elements are logically equivalent to a tuple of (receiver, signal, sender, weak) used as a key to be deleted from the internal routing tables. (The actual process is slightly more complex but the semantics are basically the same). Note: Using disconnect is not required to cleanup routing when an object is deleted, the framework will remove routes for deleted objects automatically. It's only necessary to disconnect if you want to stop routing to a live object. returns None, may raise DispatcherTypeError or DispatcherKeyError """ if signal is None: raise errors.DispatcherTypeError( 'Signal cannot be None (receiver=%r sender=%r)'%( receiver,sender) ) if weak: receiver = saferef.safeRef(receiver) senderkey = id(sender) try: signals = connections[senderkey] receivers = signals[signal] except KeyError: raise errors.DispatcherKeyError( """No receivers found for signal %r from sender %r""" %( signal, sender ) ) try: # also removes from receivers _removeOldBackRefs(senderkey, signal, receiver, receivers) except ValueError: raise errors.DispatcherKeyError( """No connection to receiver %s for signal %s from sender %s""" %( receiver, signal, sender ) ) _cleanupConnections(senderkey, signal) def getReceivers( sender = Any, signal = Any ): """Get list of receivers from global tables This utility function allows you to retrieve the raw list of receivers from the connections table for the given sender and signal pair. Note: there is no guarantee that this is the actual list stored in the connections table, so the value should be treated as a simple iterable/truth value rather than, for instance a list to which you might append new records. Normally you would use liveReceivers( getReceivers( ...)) to retrieve the actual receiver objects as an iterable object. """ try: return connections[id(sender)][signal] except KeyError: return [] def liveReceivers(receivers): """Filter sequence of receivers to get resolved, live receivers This is a generator which will iterate over the passed sequence, checking for weak references and resolving them, then returning all live receivers. """ for receiver in receivers: if isinstance( receiver, WEAKREF_TYPES): # Dereference the weak reference. receiver = receiver() if receiver is not None: yield receiver else: yield receiver def getAllReceivers( sender = Any, signal = Any ): """Get list of all receivers from global tables This gets all receivers which should receive the given signal from sender, each receiver should be produced only once by the resulting generator """ receivers = {} for set in ( # Get receivers that receive *this* signal from *this* sender. getReceivers( sender, signal ), # Add receivers that receive *any* signal from *this* sender. getReceivers( sender, Any ), # Add receivers that receive *this* signal from *any* sender. getReceivers( Any, signal ), # Add receivers that receive *any* signal from *any* sender. getReceivers( Any, Any ), ): for receiver in set: if receiver: # filter out dead instance-method weakrefs try: if not receiver in receivers: receivers[receiver] = 1 yield receiver except TypeError: # dead weakrefs raise TypeError on hash... pass def send(signal=Any, sender=Anonymous, *arguments, **named): """Send signal from sender to all connected receivers. signal -- (hashable) signal value, see connect for details sender -- the sender of the signal if Any, only receivers registered for Any will receive the message. if Anonymous, only receivers registered to receive messages from Anonymous or Any will receive the message Otherwise can be any python object (normally one registered with a connect if you actually want something to occur). arguments -- positional arguments which will be passed to *all* receivers. Note that this may raise TypeErrors if the receivers do not allow the particular arguments. Note also that arguments are applied before named arguments, so they should be used with care. named -- named arguments which will be filtered according to the parameters of the receivers to only provide those acceptable to the receiver. Return a list of tuple pairs [(receiver, response), ... ] if any receiver raises an error, the error propagates back through send, terminating the dispatch loop, so it is quite possible to not have all receivers called if a raises an error. """ # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. responses = [] for receiver in liveReceivers(getAllReceivers(sender, signal)): response = robustapply.robustApply( receiver, signal=signal, sender=sender, *arguments, **named ) responses.append((receiver, response)) return responses def sendExact( signal=Any, sender=Anonymous, *arguments, **named ): """Send signal only to those receivers registered for exact message sendExact allows for avoiding Any/Anonymous registered handlers, sending only to those receivers explicitly registered for a particular signal on a particular sender. """ responses = [] for receiver in liveReceivers(getReceivers(sender, signal)): response = robustapply.robustApply( receiver, signal=signal, sender=sender, *arguments, **named ) responses.append((receiver, response)) return responses def _removeReceiver(receiver): """Remove receiver from connections.""" if not sendersBack: # During module cleanup the mapping will be replaced with None return False backKey = id(receiver) for senderkey in sendersBack.get(backKey,()): try: signals = connections[senderkey].keys() except KeyError,err: pass else: for signal in signals: try: receivers = connections[senderkey][signal] except KeyError: pass else: try: receivers.remove( receiver ) except Exception, err: pass _cleanupConnections(senderkey, signal) try: del sendersBack[ backKey ] except KeyError: pass def _cleanupConnections(senderkey, signal): """Delete any empty signals for senderkey. Delete senderkey if empty.""" try: receivers = connections[senderkey][signal] except: pass else: if not receivers: # No more connected receivers. Therefore, remove the signal. try: signals = connections[senderkey] except KeyError: pass else: del signals[signal] if not signals: # No more signal connections. Therefore, remove the sender. _removeSender(senderkey) def _removeSender(senderkey): """Remove senderkey from connections.""" _removeBackrefs(senderkey) try: del connections[senderkey] except KeyError: pass # Senderkey will only be in senders dictionary if sender # could be weakly referenced. try: del senders[senderkey] except: pass def _removeBackrefs( senderkey): """Remove all back-references to this senderkey""" try: signals = connections[senderkey] except KeyError: signals = None else: items = signals.items() def allReceivers( ): for signal,set in items: for item in set: yield item for receiver in allReceivers(): _killBackref( receiver, senderkey ) def _removeOldBackRefs(senderkey, signal, receiver, receivers): """Kill old sendersBack references from receiver This guards against multiple registration of the same receiver for a given signal and sender leaking memory as old back reference records build up. Also removes old receiver instance from receivers """ try: index = receivers.index(receiver) # need to scan back references here and remove senderkey except ValueError: return False else: oldReceiver = receivers[index] del receivers[index] found = 0 signals = connections.get(signal) if signals is not None: for sig,recs in connections.get(signal,{}).iteritems(): if sig != signal: for rec in recs: if rec is oldReceiver: found = 1 break if not found: _killBackref( oldReceiver, senderkey ) return True return False def _killBackref( receiver, senderkey ): """Do the actual removal of back reference from receiver to senderkey""" receiverkey = id(receiver) set = sendersBack.get( receiverkey, () ) while senderkey in set: try: set.remove( senderkey ) except: break if not set: try: del sendersBack[ receiverkey ] except KeyError: pass return True SQLObject-1.5.2/sqlobject/include/pydispatch/robust.py0000644000175000017500000000342310372665114022335 0ustar phdphd00000000000000"""Module implementing error-catching version of send (sendRobust)""" from dispatcher import Any, Anonymous, liveReceivers, getAllReceivers from robustapply import robustApply def sendRobust( signal=Any, sender=Anonymous, *arguments, **named ): """Send signal from sender to all connected receivers catching errors signal -- (hashable) signal value, see connect for details sender -- the sender of the signal if Any, only receivers registered for Any will receive the message. if Anonymous, only receivers registered to receive messages from Anonymous or Any will receive the message Otherwise can be any python object (normally one registered with a connect if you actually want something to occur). arguments -- positional arguments which will be passed to *all* receivers. Note that this may raise TypeErrors if the receivers do not allow the particular arguments. Note also that arguments are applied before named arguments, so they should be used with care. named -- named arguments which will be filtered according to the parameters of the receivers to only provide those acceptable to the receiver. Return a list of tuple pairs [(receiver, response), ... ] if any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. # Return a list of tuple pairs [(receiver, response), ... ]. responses = [] for receiver in liveReceivers(getAllReceivers(sender, signal)): try: response = robustApply( receiver, signal=signal, sender=sender, *arguments, **named ) except Exception, err: responses.append((receiver, err)) else: responses.append((receiver, response)) return responses SQLObject-1.5.2/sqlobject/include/pydispatch/__init__.py0000644000175000017500000000025310372665114022554 0ustar phdphd00000000000000"""Multi-consumer multi-producer dispatching mechanism """ __version__ = "1.0.0" __author__ = "Patrick K. O'Brien" __license__ = "BSD-style, see license.txt for details" SQLObject-1.5.2/sqlobject/include/pydispatch/robustapply.py0000644000175000017500000000272011604624052023375 0ustar phdphd00000000000000"""Robust apply mechanism Provides a function "call", which can sort out what arguments a given callable object can take, and subset the given arguments to match only those which are acceptable. """ def function( receiver ): """Get function-like callable object for given receiver returns (function_or_method, codeObject, fromMethod) If fromMethod is true, then the callable already has its first argument bound """ if hasattr(receiver, 'im_func'): return receiver, receiver.im_func.func_code, 1 elif hasattr(receiver, 'func_code'): return receiver, receiver.func_code, 0 elif hasattr(receiver, '__call__'): return function(receiver.__call__) else: raise ValueError('unknown reciever type %s %s'%(receiver, type(receiver))) def robustApply(receiver, *arguments, **named): """Call receiver with arguments and an appropriate subset of named """ receiver, codeObject, startIndex = function( receiver ) acceptable = codeObject.co_varnames[startIndex+len(arguments):codeObject.co_argcount] for name in codeObject.co_varnames[startIndex:startIndex+len(arguments)]: if name in named: raise TypeError( """Argument %r specified both positionally and as a keyword for calling %r"""% ( name, receiver, ) ) if not (codeObject.co_flags & 8): # fc does not have a **kwds type parameter, therefore # remove unacceptable arguments. for arg in named.keys(): if arg not in acceptable: del named[arg] return receiver(*arguments, **named) SQLObject-1.5.2/sqlobject/include/pydispatch/saferef.py0000644000175000017500000001360410372665114022434 0ustar phdphd00000000000000"""Refactored "safe reference" from dispatcher.py""" import weakref, traceback def safeRef(target, onDelete = None): """Return a *safe* weak reference to a callable target target -- the object to be weakly referenced, if it's a bound method reference, will create a BoundMethodWeakref, otherwise creates a simple weakref. onDelete -- if provided, will have a hard reference stored to the callable to be called after the safe reference goes out of scope with the reference object, (either a weakref or a BoundMethodWeakref) as argument. """ if hasattr(target, 'im_self'): if target.im_self is not None: # Turn a bound method into a BoundMethodWeakref instance. # Keep track of these instances for lookup by disconnect(). assert hasattr(target, 'im_func'), """safeRef target %r has im_self, but no im_func, don't know how to create reference"""%( target,) reference = BoundMethodWeakref( target=target, onDelete=onDelete ) return reference if callable(onDelete): return weakref.ref(target, onDelete) else: return weakref.ref( target ) class BoundMethodWeakref(object): """'Safe' and reusable weak references to instance methods BoundMethodWeakref objects provide a mechanism for referencing a bound method without requiring that the method object itself (which is normally a transient object) is kept alive. Instead, the BoundMethodWeakref object keeps weak references to both the object and the function which together define the instance method. Attributes: key -- the identity key for the reference, calculated by the class's calculateKey method applied to the target instance method deletionMethods -- sequence of callable objects taking single argument, a reference to this object which will be called when *either* the target object or target function is garbage collected (i.e. when this object becomes invalid). These are specified as the onDelete parameters of safeRef calls. weakSelf -- weak reference to the target object weakFunc -- weak reference to the target function Class Attributes: _allInstances -- class attribute pointing to all live BoundMethodWeakref objects indexed by the class's calculateKey(target) method applied to the target objects. This weak value dictionary is used to short-circuit creation so that multiple references to the same (object, function) pair produce the same BoundMethodWeakref instance. """ _allInstances = weakref.WeakValueDictionary() def __new__( cls, target, onDelete=None, *arguments,**named ): """Create new instance or return current instance Basically this method of construction allows us to short-circuit creation of references to already- referenced instance methods. The key corresponding to the target is calculated, and if there is already an existing reference, that is returned, with its deletionMethods attribute updated. Otherwise the new instance is created and registered in the table of already-referenced methods. """ key = cls.calculateKey(target) current =cls._allInstances.get(key) if current is not None: current.deletionMethods.append( onDelete) return current else: base = super( BoundMethodWeakref, cls).__new__( cls ) cls._allInstances[key] = base base.__init__( target, onDelete, *arguments,**named) return base def __init__(self, target, onDelete=None): """Return a weak-reference-like instance for a bound method target -- the instance-method target for the weak reference, must have im_self and im_func attributes and be reconstructable via: target.im_func.__get__( target.im_self ) which is true of built-in instance methods. onDelete -- optional callback which will be called when this weak reference ceases to be valid (i.e. either the object or the function is garbage collected). Should take a single argument, which will be passed a pointer to this object. """ def remove(weak, self=self): """Set self.isDead to true when method or instance is destroyed""" methods = self.deletionMethods[:] del self.deletionMethods[:] try: del self.__class__._allInstances[ self.key ] except KeyError: pass for function in methods: try: if callable( function ): function( self ) except Exception, e: try: traceback.print_exc() except AttributeError, err: print '''Exception during saferef %s cleanup function %s: %s'''%( self, function, e ) self.deletionMethods = [onDelete] self.key = self.calculateKey( target ) self.weakSelf = weakref.ref(target.im_self, remove) self.weakFunc = weakref.ref(target.im_func, remove) self.selfName = str(target.im_self) self.funcName = str(target.im_func.__name__) def calculateKey( cls, target ): """Calculate the reference key for this reference Currently this is a two-tuple of the id()'s of the target object and the target function respectively. """ return (id(target.im_self),id(target.im_func)) calculateKey = classmethod( calculateKey ) def __str__(self): """Give a friendly representation of the object""" return """%s( %s.%s )"""%( self.__class__.__name__, self.selfName, self.funcName, ) __repr__ = __str__ def __nonzero__( self ): """Whether we are still a valid reference""" return self() is not None def __cmp__( self, other ): """Compare with another reference""" if not isinstance (other,self.__class__): return cmp( self.__class__, type(other) ) return cmp( self.key, other.key) def __call__(self): """Return a strong reference to the bound method If the target cannot be retrieved, then will return None, otherwise returns a bound instance method for our object and function. Note: You may call this method any number of times, as it does not invalidate the reference. """ target = self.weakSelf() if target is not None: function = self.weakFunc() if function is not None: return function.__get__(target) return None SQLObject-1.5.2/sqlobject/include/pydispatch/README.txt0000644000175000017500000000060410372665114022141 0ustar phdphd00000000000000This is from PyDispatcher It was moved here because installation of PyDispatcher conflicts with RuleDispatch (they both use the dispatch top-level module), and I thought it would be easier to just put it here. Also, PyDispatcher is small and stable and doesn't need updating often. If the name conflict is resolved in the future, this package can go away. SQLObject-1.5.2/sqlobject/include/pydispatch/errors.py0000644000175000017500000000055310372665114022334 0ustar phdphd00000000000000"""Error types for dispatcher mechanism """ class DispatcherError(Exception): """Base class for all Dispatcher errors""" class DispatcherKeyError(KeyError, DispatcherError): """Error raised when unknown (sender,signal) set specified""" class DispatcherTypeError(TypeError, DispatcherError): """Error raised when inappropriate signal-type specified (None)""" SQLObject-1.5.2/sqlobject/wsgi_middleware.py0000644000175000017500000000654210372665120020374 0ustar phdphd00000000000000from paste.deploy.converters import asbool from paste.wsgilib import catch_errors from paste.util import import_string import sqlobject import threading def make_middleware(app, global_conf, database=None, use_transaction=False, hub=None): """ WSGI middleware that sets the connection for the request (using the database URI or connection object) and the given hub (or ``sqlobject.sqlhub`` if not given). If ``use_transaction`` is true, then the request will be run in a transaction. Applications can use the keys (which are all no-argument functions): ``sqlobject.get_connection()``: Returns the connection object ``sqlobject.abort()``: Aborts the transaction. Does not raise an error, but at the *end* of the request there will be a rollback. ``sqlobject.begin()``: Starts a transaction. First commits (or rolls back if aborted) if this is run in a transaction. ``sqlobject.in_transaction()``: Returns true or false, depending if we are currently in a transaction. """ use_transaction = asbool(use_transaction) if database is None: database = global_conf.get('database') if not database: raise ValueError( "You must provide a 'database' configuration value") if isinstance(hub, basestring): hub = import_string.eval_import(hub) if not hub: hub = sqlobject.sqlhub if isinstance(database, basestring): database = sqlobject.connectionForURI(database) return SQLObjectMiddleware(app, database, use_transaction, hub) class SQLObjectMiddleware(object): def __init__(self, app, conn, use_transaction, hub): self.app = app self.conn = conn self.use_transaction = use_transaction self.hub = hub def __call__(self, environ, start_response): conn = [self.conn] if self.use_transaction: conn[0] = conn[0].transaction() any_errors = [] use_transaction = [self.use_transaction] self.hub.threadConnection = conn[0] def abort(): assert use_transaction[0], ( "You cannot abort, because a transaction is not being used") any_errors.append(None) def begin(): if use_transaction[0]: if any_errors: conn[0].rollback() else: conn[0].commit() any_errors[:] = [] use_transaction[0] = True conn[0] = self.conn.transaction() self.hub.threadConnection = conn[0] def error(exc_info=None): any_errors.append(None) ok() def ok(): if use_transaction[0]: if any_errors: conn[0].rollback() else: conn[0].commit(close=True) self.hub.threadConnection = None def in_transaction(): return use_transaction[0] def get_connection(): return conn[0] environ['sqlobject.get_connection'] = get_connection environ['sqlobject.abort'] = abort environ['sqlobject.begin'] = begin environ['sqlobject.in_transaction'] = in_transaction return catch_errors(self.app, environ, start_response, error_callback=error, ok_callback=ok) SQLObject-1.5.2/sqlobject/sqlbuilder.py0000644000175000017500000013002512223766654017401 0ustar phdphd00000000000000""" sqlobject.sqlbuilder -------------------- :author: Ian Bicking Builds SQL expressions from normal Python expressions. Disclaimer ---------- This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. Instructions ------------ To begin a SQL expression, you must use some sort of SQL object -- a field, table, or SQL statement (``SELECT``, ``INSERT``, etc.) You can then use normal operators, with the exception of: `and`, `or`, `not`, and `in`. You can use the `AND`, `OR`, `NOT`, and `IN` functions instead, or you can also use `&`, `|`, and `~` for `and`, `or`, and `not` respectively (however -- the precidence for these operators doesn't work as you would want, so you must use many parenthesis). To create a sql field, table, or constant/function, use the namespaces `table`, `const`, and `func`. For instance, ``table.address`` refers to the ``address`` table, and ``table.address.state`` refers to the ``state`` field in the address table. ``const.NULL`` is the ``NULL`` SQL constant, and ``func.NOW()`` is the ``NOW()`` function call (`const` and `func` are actually identicle, but the two names are provided for clarity). Once you create this object, expressions formed with it will produce SQL statements. The ``sqlrepr(obj)`` function gets the SQL representation of these objects, as well as the proper SQL representation of basic Python types (None==NULL). There are a number of DB-specific SQL features that this does not implement. There are a bunch of normal ANSI features also not present. See the bottom of this module for some examples, and run it (i.e. ``python sql.py``) to see the results of those examples. """ ######################################## ## Constants ######################################## import fnmatch import operator import re import threading import types import weakref import classregistry from converters import registerConverter, sqlrepr, quote_str, unquote_str class VersionError(Exception): pass class NoDefault: pass class SQLObjectState(object): def __init__(self, soObject, connection=None): self.soObject = weakref.proxy(soObject) self.connection = connection safeSQLRE = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_\.]*$') def sqlIdentifier(obj): # some db drivers return unicode column names return isinstance(obj, basestring) and bool(safeSQLRE.search(obj.strip())) def execute(expr, executor): if hasattr(expr, 'execute'): return expr.execute(executor) else: return expr def _str_or_sqlrepr(expr, db): if isinstance(expr, basestring): return expr return sqlrepr(expr, db) ######################################## ## Expression generation ######################################## class SQLExpression: def __add__(self, other): return SQLOp("+", self, other) def __radd__(self, other): return SQLOp("+", other, self) def __sub__(self, other): return SQLOp("-", self, other) def __rsub__(self, other): return SQLOp("-", other, self) def __mul__(self, other): return SQLOp("*", self, other) def __rmul__(self, other): return SQLOp("*", other, self) def __div__(self, other): return SQLOp("/", self, other) def __rdiv__(self, other): return SQLOp("/", other, self) def __pos__(self): return SQLPrefix("+", self) def __neg__(self): return SQLPrefix("-", self) def __pow__(self, other): return SQLConstant("POW")(self, other) def __rpow__(self, other): return SQLConstant("POW")(other, self) def __abs__(self): return SQLConstant("ABS")(self) def __mod__(self, other): return SQLModulo(self, other) def __rmod__(self, other): return SQLConstant("MOD")(other, self) def __lt__(self, other): return SQLOp("<", self, other) def __le__(self, other): return SQLOp("<=", self, other) def __gt__(self, other): return SQLOp(">", self, other) def __ge__(self, other): return SQLOp(">=", self, other) def __eq__(self, other): if other is None: return ISNULL(self) else: return SQLOp("=", self, other) def __ne__(self, other): if other is None: return ISNOTNULL(self) else: return SQLOp("<>", self, other) def __and__(self, other): return SQLOp("AND", self, other) def __rand__(self, other): return SQLOp("AND", other, self) def __or__(self, other): return SQLOp("OR", self, other) def __ror__(self, other): return SQLOp("OR", other, self) def __invert__(self): return SQLPrefix("NOT", self) def __call__(self, *args): return SQLCall(self, args) def __repr__(self): try: return self.__sqlrepr__(None) except AssertionError: return '<%s %s>' % ( self.__class__.__name__, hex(id(self))[2:]) def __str__(self): return repr(self) def __cmp__(self, other): raise VersionError, "Python 2.1+ required" def __rcmp__(self, other): raise VersionError, "Python 2.1+ required" def startswith(self, s): return STARTSWITH(self, s) def endswith(self, s): return ENDSWITH(self, s) def contains(self, s): return CONTAINSSTRING(self, s) def components(self): return [] def tablesUsed(self, db): return self.tablesUsedSet(db) def tablesUsedSet(self, db): tables = set() for table in self.tablesUsedImmediate(): if hasattr(table, '__sqlrepr__'): table = sqlrepr(table, db) tables.add(table) for component in self.components(): tables.update(tablesUsedSet(component, db)) return tables def tablesUsedImmediate(self): return [] ####################################### # Converter for SQLExpression instances ####################################### def SQLExprConverter(value, db): return value.__sqlrepr__() registerConverter(SQLExpression, SQLExprConverter) def tablesUsedSet(obj, db): if hasattr(obj, "tablesUsedSet"): return obj.tablesUsedSet(db) else: return {} operatorMap = { "+": operator.add, "/": operator.div, "-": operator.sub, "*": operator.mul, "<": operator.lt, "<=": operator.le, "=": operator.eq, "!=": operator.ne, ">=": operator.ge, ">": operator.gt, "IN": operator.contains, "IS": operator.eq, } class SQLOp(SQLExpression): def __init__(self, op, expr1, expr2): self.op = op.upper() self.expr1 = expr1 self.expr2 = expr2 def __sqlrepr__(self, db): s1 = sqlrepr(self.expr1, db) s2 = sqlrepr(self.expr2, db) if s1[0] != '(' and s1 != 'NULL': s1 = '(' + s1 + ')' if s2[0] != '(' and s2 != 'NULL': s2 = '(' + s2 + ')' return "(%s %s %s)" % (s1, self.op, s2) def components(self): return [self.expr1, self.expr2] def execute(self, executor): if self.op == "AND": return execute(self.expr1, executor) \ and execute(self.expr2, executor) elif self.op == "OR": return execute(self.expr1, executor) \ or execute(self.expr2, executor) else: return operatorMap[self.op.upper()](execute(self.expr1, executor), execute(self.expr2, executor)) class SQLModulo(SQLOp): def __init__(self, expr1, expr2): SQLOp.__init__(self, '%', expr1, expr2) def __sqlrepr__(self, db): if db == 'sqlite': return SQLOp.__sqlrepr__(self, db) s1 = sqlrepr(self.expr1, db) s2 = sqlrepr(self.expr2, db) return "MOD(%s, %s)" % (s1, s2) registerConverter(SQLOp, SQLExprConverter) registerConverter(SQLModulo, SQLExprConverter) class SQLCall(SQLExpression): def __init__(self, expr, args): self.expr = expr self.args = args def __sqlrepr__(self, db): return "%s%s" % (sqlrepr(self.expr, db), sqlrepr(self.args, db)) def components(self): return [self.expr] + list(self.args) def execute(self, executor): raise ValueError, "I don't yet know how to locally execute functions" registerConverter(SQLCall, SQLExprConverter) class SQLPrefix(SQLExpression): def __init__(self, prefix, expr): self.prefix = prefix self.expr = expr def __sqlrepr__(self, db): return "%s %s" % (self.prefix, sqlrepr(self.expr, db)) def components(self): return [self.expr] def execute(self, executor): expr = execute(self.expr, executor) if prefix == "+": return expr elif prefix == "-": return -expr elif prefix.upper() == "NOT": return not expr registerConverter(SQLPrefix, SQLExprConverter) class SQLConstant(SQLExpression): def __init__(self, const): self.const = const def __sqlrepr__(self, db): return self.const def execute(self, executor): raise ValueError, "I don't yet know how to execute SQL constants" registerConverter(SQLConstant, SQLExprConverter) class SQLTrueClauseClass(SQLExpression): def __sqlrepr__(self, db): return "1 = 1" def execute(self, executor): return 1 SQLTrueClause = SQLTrueClauseClass() registerConverter(SQLTrueClauseClass, SQLExprConverter) ######################################## ## Namespaces ######################################## class Field(SQLExpression): def __init__(self, tableName, fieldName): self.tableName = tableName self.fieldName = fieldName def __sqlrepr__(self, db): return self.tableName + "." + self.fieldName def tablesUsedImmediate(self): return [self.tableName] def execute(self, executor): return executor.field(self.tableName, self.fieldName) class SQLObjectField(Field): def __init__(self, tableName, fieldName, original, soClass, column): Field.__init__(self, tableName, fieldName) self.original = original self.soClass = soClass self.column = column def _from_python(self, value): column = self.column if not isinstance(value, SQLExpression) and column and column.from_python: value = column.from_python(value, SQLObjectState(self.soClass)) return value def __eq__(self, other): if other is None: return ISNULL(self) other = self._from_python(other) return SQLOp('=', self, other) def __ne__(self, other): if other is None: return ISNOTNULL(self) other = self._from_python(other) return SQLOp('<>', self, other) def startswith(self, s): s = self._from_python(s) return STARTSWITH(self, s) def endswith(self, s): s = self._from_python(s) return ENDSWITH(self, s) def contains(self, s): s = self._from_python(s) return CONTAINSSTRING(self, s) registerConverter(SQLObjectField, SQLExprConverter) class Table(SQLExpression): FieldClass = Field def __init__(self, tableName): self.tableName = tableName def __getattr__(self, attr): if attr.startswith('__'): raise AttributeError return self.FieldClass(self.tableName, attr) def __sqlrepr__(self, db): return _str_or_sqlrepr(self.tableName, db) def execute(self, executor): raise ValueError, "Tables don't have values" class SQLObjectTable(Table): FieldClass = SQLObjectField def __init__(self, soClass): self.soClass = soClass assert soClass.sqlmeta.table, ( "Bad table name in class %r: %r" % (soClass, soClass.sqlmeta.table)) Table.__init__(self, soClass.sqlmeta.table) def __getattr__(self, attr): if attr.startswith('__'): raise AttributeError if attr == 'id': return self._getattrFromID(attr) elif attr in self.soClass.sqlmeta.columns: column = self.soClass.sqlmeta.columns[attr] return self._getattrFromColumn(column, attr) elif attr+'ID' in [k for (k, v) in self.soClass.sqlmeta.columns.items() if v.foreignKey]: attr += 'ID' column = self.soClass.sqlmeta.columns[attr] return self._getattrFromColumn(column, attr) else: raise AttributeError("%s instance has no attribute '%s'" % (self.soClass.__name__, attr)) def _getattrFromID(self, attr): return self.FieldClass(self.tableName, self.soClass.sqlmeta.idName, attr, self.soClass, None) def _getattrFromColumn(self, column, attr): return self.FieldClass(self.tableName, column.dbName, attr, self.soClass, column) class SQLObjectTableWithJoins(SQLObjectTable): def __getattr__(self, attr): if attr+'ID' in [k for (k, v) in self.soClass.sqlmeta.columns.items() if v.foreignKey]: column = self.soClass.sqlmeta.columns[attr+'ID'] return self._getattrFromForeignKey(column, attr) elif attr in [x.joinMethodName for x in self.soClass.sqlmeta.joins]: join = [x for x in self.soClass.sqlmeta.joins if x.joinMethodName == attr][0] return self._getattrFromJoin(join, attr) else: return SQLObjectTable.__getattr__(self, attr) def _getattrFromForeignKey(self, column, attr): ret = getattr(self, column.name) == \ getattr(self.soClass, '_SO_class_'+column.foreignKey).q.id return ret def _getattrFromJoin(self, join, attr): if hasattr(join, 'otherColumn'): return AND(join.otherClass.q.id == Field(join.intermediateTable, join.otherColumn), Field(join.intermediateTable, join.joinColumn) == self.soClass.q.id) else: return getattr(join.otherClass.q, join.joinColumn)==self.soClass.q.id class TableSpace: TableClass = Table def __getattr__(self, attr): if attr.startswith('__'): raise AttributeError return self.TableClass(attr) class ConstantSpace: def __getattr__(self, attr): if attr.startswith('__'): raise AttributeError return SQLConstant(attr) ######################################## ## Table aliases ######################################## class AliasField(Field): def __init__(self, tableName, fieldName, alias, aliasTable): Field.__init__(self, tableName, fieldName) self.alias = alias self.aliasTable = aliasTable def __sqlrepr__(self, db): fieldName = self.fieldName if isinstance(fieldName, SQLExpression): fieldName = sqlrepr(fieldName, db) return self.alias + "." + fieldName def tablesUsedImmediate(self): return [self.aliasTable] class AliasTable(Table): as_string = '' # set it to "AS" if your database requires it FieldClass = AliasField _alias_lock = threading.Lock() _alias_counter = 0 def __init__(self, table, alias=None): if hasattr(table, "sqlmeta"): tableName = SQLConstant(table.sqlmeta.table) elif isinstance(table, (Select, Union)): assert alias is not None, "Alias name cannot be constructed from Select instances, please provide 'alias' kw." tableName = Subquery('', table) table = None else: tableName = SQLConstant(table) table = None Table.__init__(self, tableName) self.table = table if alias is None: self._alias_lock.acquire() try: AliasTable._alias_counter += 1 alias = "%s_alias%d" % (tableName, AliasTable._alias_counter) finally: self._alias_lock.release() self.alias = alias def __getattr__(self, attr): if attr.startswith('__'): raise AttributeError if self.table: attr = getattr(self.table.q, attr).fieldName return self.FieldClass(self.tableName, attr, self.alias, self) def __sqlrepr__(self, db): return "%s %s %s" % (sqlrepr(self.tableName, db), self.as_string, self.alias) class Alias(SQLExpression): def __init__(self, table, alias=None): self.q = AliasTable(table, alias) def __sqlrepr__(self, db): return sqlrepr(self.q, db) def components(self): return [self.q] class Union(SQLExpression): def __init__(self, *tables): tabs = [] for t in tables: if not isinstance(t, SQLExpression) and hasattr(t, 'sqlmeta'): t = t.sqlmeta.table if isinstance(t, Alias): t = t.q if isinstance(t, Table): t = t.tableName if not isinstance(t, SQLExpression): t = SQLConstant(t) tabs.append(t) self.tables = tabs def __sqlrepr__(self, db): return " UNION ".join([str(sqlrepr(t, db)) for t in self.tables]) ######################################## ## SQL Statements ######################################## class Select(SQLExpression): def __init__(self, items=NoDefault, where=NoDefault, groupBy=NoDefault, having=NoDefault, orderBy=NoDefault, limit=NoDefault, join=NoDefault, lazyColumns=False, distinct=False, start=0, end=None, reversed=False, forUpdate=False, clause=NoDefault, staticTables=NoDefault, distinctOn=NoDefault): self.ops = {} if not isinstance(items, (list, tuple, types.GeneratorType)): items = [items] if clause is NoDefault and where is not NoDefault: clause = where if staticTables is NoDefault: staticTables = [] self.ops['items'] = items self.ops['clause'] = clause self.ops['groupBy'] = groupBy self.ops['having'] = having self.ops['orderBy'] = orderBy self.ops['limit'] = limit self.ops['join'] = join self.ops['lazyColumns'] = lazyColumns self.ops['distinct'] = distinct self.ops['distinctOn'] = distinctOn self.ops['start'] = start self.ops['end'] = end self.ops['reversed'] = reversed self.ops['forUpdate'] = forUpdate self.ops['staticTables'] = staticTables def clone(self, **newOps): ops = self.ops.copy() ops.update(newOps) return self.__class__(**ops) def newItems(self, items): return self.clone(items=items) def newClause(self, new_clause): return self.clone(clause=new_clause) def orderBy(self, orderBy): return self.clone(orderBy=orderBy) def unlimited(self): return self.clone(limit=NoDefault, start=0, end=None) def limit(self, limit): self.clone(limit=limit) def lazyColumns(self, value): return self.clone(lazyColumns=value) def reversed(self): return self.clone(reversed=not self.ops.get('reversed', False)) def distinct(self): return self.clone(distinct=True) def filter(self, filter_clause): if filter_clause is None: # None doesn't filter anything, it's just a no-op: return self clause = self.ops['clause'] if isinstance(clause, basestring): clause = SQLConstant('(%s)' % clause) return self.newClause(AND(clause, filter_clause)) def __sqlrepr__(self, db): select = "SELECT" if self.ops['distinct']: select += " DISTINCT" if self.ops['distinctOn'] is not NoDefault: select += " ON(%s)" % _str_or_sqlrepr(self.ops['distinctOn'], db) if not self.ops['lazyColumns']: select += " %s" % ", ".join([str(_str_or_sqlrepr(v, db)) for v in self.ops['items']]) else: select += " %s" % _str_or_sqlrepr(self.ops['items'][0], db) join = [] join_str = '' if self.ops['join'] is not NoDefault and self.ops['join'] is not None: _join = self.ops['join'] if isinstance(_join, str): join_str = " " + _join elif isinstance(_join, SQLJoin): join.append(_join) else: join.extend(_join) tables = set() for x in self.ops['staticTables']: if isinstance(x, SQLExpression): x = sqlrepr(x, db) tables.add(x) things = list(self.ops['items']) + join if self.ops['clause'] is not NoDefault: things.append(self.ops['clause']) for thing in things: if isinstance(thing, SQLExpression): tables.update(tablesUsedSet(thing, db)) for j in join: t1 = _str_or_sqlrepr(j.table1, db) if t1 in tables: tables.remove(t1) t2 = _str_or_sqlrepr(j.table2, db) if t2 in tables: tables.remove(t2) if tables: select += " FROM %s" % ", ".join(tables) elif join: select += " FROM" tablesYet = tables for j in join: if tablesYet and j.table1: sep = ", " else: sep = " " select += sep + sqlrepr(j, db) tablesYet = True if join_str: select += join_str if self.ops['clause'] is not NoDefault: select += " WHERE %s" % _str_or_sqlrepr(self.ops['clause'], db) if self.ops['groupBy'] is not NoDefault: groupBy = _str_or_sqlrepr(self.ops['groupBy'], db) if isinstance(self.ops['groupBy'], (list, tuple)): groupBy = groupBy[1:-1] # Remove parens select += " GROUP BY %s" % groupBy if self.ops['having'] is not NoDefault: select += " HAVING %s" % _str_or_sqlrepr(self.ops['having'], db) if self.ops['orderBy'] is not NoDefault and self.ops['orderBy'] is not None: orderBy = self.ops['orderBy'] if self.ops['reversed']: reverser = DESC else: reverser = lambda x: x if isinstance(orderBy, (list, tuple)): select += " ORDER BY %s" % ", ".join([_str_or_sqlrepr(reverser(x), db) for x in orderBy]) else: select += " ORDER BY %s" % _str_or_sqlrepr(reverser(orderBy), db) start, end = self.ops['start'], self.ops['end'] if self.ops['limit'] is not NoDefault: end = start + self.ops['limit'] if start or end: from dbconnection import dbConnectionForScheme select = dbConnectionForScheme(db)._queryAddLimitOffset(select, start, end) if self.ops['forUpdate']: select += " FOR UPDATE" return select registerConverter(Select, SQLExprConverter) class Insert(SQLExpression): def __init__(self, table, valueList=None, values=None, template=NoDefault): self.template = template self.table = table if valueList: if values: raise TypeError, "You may only give valueList *or* values" self.valueList = valueList else: self.valueList = [values] def __sqlrepr__(self, db): if not self.valueList: return '' insert = "INSERT INTO %s" % self.table allowNonDict = True template = self.template if (template is NoDefault) and isinstance(self.valueList[0], dict): template = self.valueList[0].keys() allowNonDict = False if template is not NoDefault: insert += " (%s)" % ", ".join(template) insert += " VALUES " listToJoin = [] listToJoin_app = listToJoin.append for value in self.valueList: if isinstance(value, dict): if template is NoDefault: raise TypeError, "You can't mix non-dictionaries with dictionaries in an INSERT if you don't provide a template (%s)" % repr(value) value = dictToList(template, value) elif not allowNonDict: raise TypeError, "You can't mix non-dictionaries with dictionaries in an INSERT if you don't provide a template (%s)" % repr(value) listToJoin_app("(%s)" % ", ".join([sqlrepr(v, db) for v in value])) insert = "%s%s" % (insert, ", ".join(listToJoin)) return insert registerConverter(Insert, SQLExprConverter) def dictToList(template, dict): list = [] for key in template: list.append(dict[key]) if len(dict.keys()) > len(template): raise TypeError, "Extra entries in dictionary that aren't asked for in template (template=%s, dict=%s)" % (repr(template), repr(dict)) return list class Update(SQLExpression): def __init__(self, table, values, template=NoDefault, where=NoDefault): self.table = table self.values = values self.template = template self.whereClause = where def __sqlrepr__(self, db): update = "%s %s" % (self.sqlName(), self.table) update += " SET" first = True if self.template is not NoDefault: for i in range(len(self.template)): if first: first = False else: update += "," update += " %s=%s" % (self.template[i], sqlrepr(self.values[i], db)) else: for key, value in self.values.items(): if first: first = False else: update += "," update += " %s=%s" % (key, sqlrepr(value, db)) if self.whereClause is not NoDefault: update += " WHERE %s" % _str_or_sqlrepr(self.whereClause, db) return update def sqlName(self): return "UPDATE" registerConverter(Update, SQLExprConverter) class Delete(SQLExpression): """To be safe, this will signal an error if there is no where clause, unless you pass in where=None to the constructor.""" def __init__(self, table, where=NoDefault): self.table = table if where is NoDefault: raise TypeError, "You must give a where clause or pass in None to indicate no where clause" self.whereClause = where def __sqlrepr__(self, db): whereClause = self.whereClause if whereClause is None: return "DELETE FROM %s" % self.table whereClause = _str_or_sqlrepr(whereClause, db) return "DELETE FROM %s WHERE %s" % (self.table, whereClause) registerConverter(Delete, SQLExprConverter) class Replace(Update): def sqlName(self): return "REPLACE" registerConverter(Replace, SQLExprConverter) ######################################## ## SQL Builtins ######################################## class DESC(SQLExpression): def __init__(self, expr): self.expr = expr def __sqlrepr__(self, db): if isinstance(self.expr, DESC): return sqlrepr(self.expr.expr, db) return '%s DESC' % sqlrepr(self.expr, db) def AND(*ops): if not ops: return None op1 = ops[0] ops = ops[1:] if ops: return SQLOp("AND", op1, AND(*ops)) else: return op1 def OR(*ops): if not ops: return None op1 = ops[0] ops = ops[1:] if ops: return SQLOp("OR", op1, OR(*ops)) else: return op1 def NOT(op): return SQLPrefix("NOT", op) def _IN(item, list): return SQLOp("IN", item, list) def IN(item, list): from sresults import SelectResults # Import here to avoid circular import if isinstance(list, SelectResults): query = list.queryForSelect() query.ops['items'] = [list.sourceClass.q.id] list = query if isinstance(list, Select): return INSubquery(item, list) else: return _IN(item, list) def NOTIN(item, list): if isinstance(list, Select): return NOTINSubquery(item, list) else: return NOT(_IN(item, list)) def STARTSWITH(expr, pattern): return LIKE(expr, _LikeQuoted(pattern) + '%', escape='\\') def ENDSWITH(expr, pattern): return LIKE(expr, '%' + _LikeQuoted(pattern), escape='\\') def CONTAINSSTRING(expr, pattern): return LIKE(expr, '%' + _LikeQuoted(pattern) + '%', escape='\\') def ISNULL(expr): return SQLOp("IS", expr, None) def ISNOTNULL(expr): return SQLOp("IS NOT", expr, None) class ColumnAS(SQLOp): ''' Just like SQLOp('AS', expr, name) except without the parentheses ''' def __init__(self, expr, name): if isinstance(name, basestring): name = SQLConstant(name) SQLOp.__init__(self, 'AS', expr, name) def __sqlrepr__(self, db): return "%s %s %s" % (sqlrepr(self.expr1, db), self.op, sqlrepr(self.expr2, db)) class _LikeQuoted: # It assumes prefix and postfix are strings; usually just a percent sign. # @@: I'm not sure what the quoting rules really are for all the # databases def __init__(self, expr): self.expr = expr self.prefix = '' self.postfix = '' def __radd__(self, s): self.prefix = s + self.prefix return self def __add__(self, s): self.postfix += s return self def __sqlrepr__(self, db): s = self.expr if isinstance(s, SQLExpression): values = [] if self.prefix: values.append(quote_str(self.prefix, db)) s = _quote_like_special(sqlrepr(s, db), db) values.append(s) if self.postfix: values.append(quote_str(self.postfix, db)) if db == "mysql": return "CONCAT(%s)" % ", ".join(values) else: return " || ".join(values) elif isinstance(s, basestring): s = _quote_like_special(unquote_str(sqlrepr(s, db)), db) return quote_str("%s%s%s" % (self.prefix, s, self.postfix), db) else: raise TypeError, "expected str, unicode or SQLExpression, got %s" % type(s) def _quote_like_special(s, db): if db in ('postgres', 'rdbhost'): escape = r'\\' else: escape = '\\' s = s.replace('\\', r'\\').replace('%', escape+'%').replace('_', escape+'_') return s ######################################## ## SQL JOINs ######################################## class SQLJoin(SQLExpression): def __init__(self, table1, table2, op=','): if hasattr(table1, 'sqlmeta'): table1 = table1.sqlmeta.table if hasattr(table2, 'sqlmeta'): table2 = table2.sqlmeta.table if isinstance(table1, str): table1 = SQLConstant(table1) if isinstance(table2, str): table2 = SQLConstant(table2) self.table1 = table1 self.table2 = table2 self.op = op def __sqlrepr__(self, db): if self.table1: return "%s%s %s" % (sqlrepr(self.table1, db), self.op, sqlrepr(self.table2, db)) else: return "%s %s" % (self.op, sqlrepr(self.table2, db)) registerConverter(SQLJoin, SQLExprConverter) def JOIN(table1, table2): return SQLJoin(table1, table2, " JOIN") def INNERJOIN(table1, table2): return SQLJoin(table1, table2, " INNER JOIN") def CROSSJOIN(table1, table2): return SQLJoin(table1, table2, " CROSS JOIN") def STRAIGHTJOIN(table1, table2): return SQLJoin(table1, table2, " STRAIGHT JOIN") def LEFTJOIN(table1, table2): return SQLJoin(table1, table2, " LEFT JOIN") def LEFTOUTERJOIN(table1, table2): return SQLJoin(table1, table2, " LEFT OUTER JOIN") def NATURALJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL JOIN") def NATURALLEFTJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL LEFT JOIN") def NATURALLEFTOUTERJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL LEFT OUTER JOIN") def RIGHTJOIN(table1, table2): return SQLJoin(table1, table2, " RIGHT JOIN") def RIGHTOUTERJOIN(table1, table2): return SQLJoin(table1, table2, " RIGHT OUTER JOIN") def NATURALRIGHTJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL RIGHT JOIN") def NATURALRIGHTOUTERJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL RIGHT OUTER JOIN") def FULLJOIN(table1, table2): return SQLJoin(table1, table2, " FULL JOIN") def FULLOUTERJOIN(table1, table2): return SQLJoin(table1, table2, " FULL OUTER JOIN") def NATURALFULLJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL FULL JOIN") def NATURALFULLOUTERJOIN(table1, table2): return SQLJoin(table1, table2, " NATURAL FULL OUTER JOIN") class SQLJoinConditional(SQLJoin): """Conditional JOIN""" def __init__(self, table1, table2, op, on_condition=None, using_columns=None): """For condition you must give on_condition or using_columns but not both on_condition can be a string or SQLExpression, for example Table1.q.col1 == Table2.q.col2 using_columns can be a string or a list of columns, e.g. (Table1.q.col1, Table2.q.col2) """ if not on_condition and not using_columns: raise TypeError, "You must give ON condition or USING columns" if on_condition and using_columns: raise TypeError, "You must give ON condition or USING columns but not both" SQLJoin.__init__(self, table1, table2, op) self.on_condition = on_condition self.using_columns = using_columns def __sqlrepr__(self, db): if self.on_condition: on_condition = self.on_condition if hasattr(on_condition, "__sqlrepr__"): on_condition = sqlrepr(on_condition, db) join = "%s %s ON %s" % (self.op, sqlrepr(self.table2, db), on_condition) if self.table1: join = "%s %s" % (sqlrepr(self.table1, db), join) return join elif self.using_columns: using_columns = [] for col in self.using_columns: if hasattr(col, "__sqlrepr__"): col = sqlrepr(col, db) using_columns.append(col) using_columns = ", ".join(using_columns) join = "%s %s USING (%s)" % (self.op, sqlrepr(self.table2, db), using_columns) if self.table1: join = "%s %s" % (sqlrepr(self.table1, db), join) return join else: RuntimeError, "Impossible error" registerConverter(SQLJoinConditional, SQLExprConverter) def INNERJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "INNER JOIN", on_condition, using_columns) def LEFTJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "LEFT JOIN", on_condition, using_columns) def LEFTOUTERJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "LEFT OUTER JOIN", on_condition, using_columns) def RIGHTJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "RIGHT JOIN", on_condition, using_columns) def RIGHTOUTERJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "RIGHT OUTER JOIN", on_condition, using_columns) def FULLJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "FULL JOIN", on_condition, using_columns) def FULLOUTERJOINConditional(table1, table2, on_condition=None, using_columns=None): return SQLJoinConditional(table1, table2, "FULL OUTER JOIN", on_condition, using_columns) class SQLJoinOn(SQLJoinConditional): """Conditional JOIN ON""" def __init__(self, table1, table2, op, on_condition): SQLJoinConditional.__init__(self, table1, table2, op, on_condition) registerConverter(SQLJoinOn, SQLExprConverter) class SQLJoinUsing(SQLJoinConditional): """Conditional JOIN USING""" def __init__(self, table1, table2, op, using_columns): SQLJoinConditional.__init__(self, table1, table2, op, None, using_columns) registerConverter(SQLJoinUsing, SQLExprConverter) def INNERJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "INNER JOIN", on_condition) def LEFTJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "LEFT JOIN", on_condition) def LEFTOUTERJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "LEFT OUTER JOIN", on_condition) def RIGHTJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "RIGHT JOIN", on_condition) def RIGHTOUTERJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "RIGHT OUTER JOIN", on_condition) def FULLJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "FULL JOIN", on_condition) def FULLOUTERJOINOn(table1, table2, on_condition): return SQLJoinOn(table1, table2, "FULL OUTER JOIN", on_condition) def INNERJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "INNER JOIN", using_columns) def LEFTJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "LEFT JOIN", using_columns) def LEFTOUTERJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "LEFT OUTER JOIN", using_columns) def RIGHTJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "RIGHT JOIN", using_columns) def RIGHTOUTERJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "RIGHT OUTER JOIN", using_columns) def FULLJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "FULL JOIN", using_columns) def FULLOUTERJOINUsing(table1, table2, using_columns): return SQLJoinUsing(table1, table2, "FULL OUTER JOIN", using_columns) ######################################## ## Subqueries (subselects) ######################################## class OuterField(SQLObjectField): def tablesUsedImmediate(self): return [] class OuterTable(SQLObjectTable): FieldClass = OuterField class Outer: def __init__(self, table): self.q = OuterTable(table) class LIKE(SQLExpression): op = "LIKE" def __init__(self, expr, string, escape=None): self.expr = expr self.string = string self.escape = escape def __sqlrepr__(self, db): escape = self.escape like = "%s %s (%s)" % (sqlrepr(self.expr, db), self.op, sqlrepr(self.string, db)) if escape is None: return "(%s)" % like else: return "(%s ESCAPE %s)" % (like, sqlrepr(escape, db)) def components(self): return [self.expr, self.string] def execute(self, executor): if not hasattr(self, '_regex'): # @@: Crude, not entirely accurate dest = self.string dest = dest.replace("%%", "\001") dest = dest.replace("*", "\002") dest = dest.replace("%", "*") dest = dest.replace("\001", "%") dest = dest.replace("\002", "[*]") self._regex = re.compile(fnmatch.translate(dest), re.I) return self._regex.search(execute(self.expr, executor)) class RLIKE(LIKE): op = "RLIKE" op_db = { 'firebird': 'RLIKE', 'maxdb': 'RLIKE', 'mysql': 'RLIKE', 'postgres': '~', 'rdbhost': '~', 'sqlite': 'REGEXP' } def _get_op(self, db): return self.op_db.get(db, 'LIKE') def __sqlrepr__(self, db): return "(%s %s (%s))" % ( sqlrepr(self.expr, db), self._get_op(db), sqlrepr(self.string, db) ) def execute(self, executor): self.op = self._get_op(self.db) return LIKE.execute(self, executor) class INSubquery(SQLExpression): op = "IN" def __init__(self, item, subquery): self.item = item self.subquery = subquery def components(self): return [self.item] def __sqlrepr__(self, db): return "%s %s (%s)" % (sqlrepr(self.item, db), self.op, sqlrepr(self.subquery, db)) class NOTINSubquery(INSubquery): op = "NOT IN" class Subquery(SQLExpression): def __init__(self, op, subquery): self.op = op self.subquery = subquery def __sqlrepr__(self, db): return "%s (%s)" % (self.op, sqlrepr(self.subquery, db)) def EXISTS(subquery): return Subquery("EXISTS", subquery) def NOTEXISTS(subquery): return Subquery("NOT EXISTS", subquery) def SOME(subquery): return Subquery("SOME", subquery) def ANY(subquery): return Subquery("ANY", subquery) def ALL(subquery): return Subquery("ALL", subquery) #### class ImportProxyField(SQLObjectField): def tablesUsedImmediate(self): return [str(self.tableName)] class ImportProxy(SQLExpression): '''Class to be used in column definitions that rely on other tables that might not yet be in a classregistry. ''' FieldClass = ImportProxyField def __init__(self, clsName, registry=None): self.tableName = _DelayClass(self, clsName) self.sqlmeta = _Delay_proxy(table=_DelayClass(self, clsName)) self.q = self self.soClass = None classregistry.registry(registry).addClassCallback(clsName, lambda foreign, me: setattr(me, 'soClass', foreign), self) def __nonzero__(self): return True def __getattr__(self, attr): if self.soClass is None: return _Delay(self, attr) return getattr(self.soClass.q, attr) class _Delay(SQLExpression): def __init__(self, proxy, attr): self.attr = attr self.proxy = proxy def __sqlrepr__(self, db): if self.proxy.soClass is None: return '_DELAYED_' + self.attr val = self._resolve() if isinstance(val, SQLExpression): val = sqlrepr(val, db) return val def tablesUsedImmediate(self): return getattr(self._resolve(), 'tablesUsedImmediate', lambda: [])() def components(self): return getattr(self._resolve(), 'components', lambda: [])() def _resolve(self): return getattr(self.proxy, self.attr) # For AliasTable etc def fieldName(self): class _aliasFieldName(SQLExpression): def __init__(self, proxy): self.proxy = proxy def __sqlrepr__(self, db): return self.proxy._resolve().fieldName return _aliasFieldName(self) fieldName = property(fieldName) class _DelayClass(_Delay): def _resolve(self): return self.proxy.soClass.sqlmeta.table class _Delay_proxy(object): def __init__(self, **kw): self.__dict__.update(kw) ###### ######################################## ## Global initializations ######################################## table = TableSpace() const = ConstantSpace() func = const ######################################## ## Testing ######################################## if __name__ == "__main__": tests = """ >>> AND(table.address.name == "Ian Bicking", table.address.zip > 30000) >>> table.address.name >>> AND(LIKE(table.address.name, "this"), IN(table.address.zip, [100, 200, 300])) >>> Select([table.address.name, table.address.state], where=LIKE(table.address.name, "%ian%")) >>> Select([table.user.name], where=AND(table.user.state == table.states.abbrev)) >>> Insert(table.address, [{"name": "BOB", "address": "3049 N. 18th St."}, {"name": "TIM", "address": "409 S. 10th St."}]) >>> Insert(table.address, [("BOB", "3049 N. 18th St."), ("TIM", "409 S. 10th St.")], template=('name', 'address')) >>> Delete(table.address, where="BOB"==table.address.name) >>> Update(table.address, {"lastModified": const.NOW()}) >>> Replace(table.address, [("BOB", "3049 N. 18th St."), ("TIM", "409 S. 10th St.")], template=('name', 'address')) """ for expr in tests.split('\n'): if not expr.strip(): continue if expr.startswith('>>> '): expr = expr[4:] SQLObject-1.5.2/SQLObject.egg-info/0000755000175000017500000000000012322476205016140 5ustar phdphd00000000000000SQLObject-1.5.2/SQLObject.egg-info/entry_points.txt0000644000175000017500000000013112322476200021424 0ustar phdphd00000000000000 [paste.filter_app_factory] main = sqlobject.wsgi_middleware:make_middleware SQLObject-1.5.2/SQLObject.egg-info/dependency_links.txt0000644000175000017500000000000112322476200022201 0ustar phdphd00000000000000 SQLObject-1.5.2/SQLObject.egg-info/SOURCES.txt0000644000175000017500000001343012322476201020021 0ustar phdphd00000000000000MANIFEST.in README.txt setup.cfg setup.py SQLObject.egg-info/PKG-INFO SQLObject.egg-info/SOURCES.txt SQLObject.egg-info/dependency_links.txt SQLObject.egg-info/entry_points.txt SQLObject.egg-info/requires.txt SQLObject.egg-info/top_level.txt debian/changelog debian/control debian/copyright debian/docs debian/examples debian/rules docs/Authors.txt docs/DeveloperGuide.txt docs/FAQ.txt docs/Inheritance.txt docs/LICENSE docs/News.txt docs/SQLBuilder.txt docs/SQLObject.txt docs/SelectResults.txt docs/TODO.txt docs/Versioning.txt docs/Views.txt docs/community.txt docs/default.css docs/download.txt docs/index.txt docs/interface.py docs/links.txt docs/rebuild docs/sqlobject-admin.txt docs/test.py docs/europython/europython_sqlobj.py docs/europython/main.css docs/europython/person.py docs/presentation-2004-11/sqlobject-and-database-programming.html docs/presentation-2004-11/ui/bodybg.gif docs/presentation-2004-11/ui/custom.css docs/presentation-2004-11/ui/framing.css docs/presentation-2004-11/ui/opera.css docs/presentation-2004-11/ui/pretty.css docs/presentation-2004-11/ui/print.css docs/presentation-2004-11/ui/s5-core.css docs/presentation-2004-11/ui/slides.css docs/presentation-2004-11/ui/slides.js ez_setup/README.txt ez_setup/__init__.py scripts/sqlobject-admin scripts/sqlobject-convertOldURI sqlobject/__init__.py sqlobject/__version__.py sqlobject/boundattributes.py sqlobject/cache.py sqlobject/classregistry.py sqlobject/col.py sqlobject/conftest.py sqlobject/constraints.py sqlobject/converters.py sqlobject/dbconnection.py sqlobject/dberrors.py sqlobject/declarative.py sqlobject/events.py sqlobject/index.py sqlobject/joins.py sqlobject/main.py sqlobject/sqlbuilder.py sqlobject/sresults.py sqlobject/styles.py sqlobject/views.py sqlobject/wsgi_middleware.py sqlobject/firebird/__init__.py sqlobject/firebird/firebirdconnection.py sqlobject/include/__init__.py sqlobject/include/hashcol.py sqlobject/include/pydispatch/README.txt sqlobject/include/pydispatch/__init__.py sqlobject/include/pydispatch/dispatcher.py sqlobject/include/pydispatch/errors.py sqlobject/include/pydispatch/robust.py sqlobject/include/pydispatch/robustapply.py sqlobject/include/pydispatch/saferef.py sqlobject/inheritance/__init__.py sqlobject/inheritance/iteration.py sqlobject/inheritance/tests/__init__.py sqlobject/inheritance/tests/testDestroyCascade.py sqlobject/inheritance/tests/test_aggregates.py sqlobject/inheritance/tests/test_asdict.py sqlobject/inheritance/tests/test_deep_inheritance.py sqlobject/inheritance/tests/test_foreignKey.py sqlobject/inheritance/tests/test_indexes.py sqlobject/inheritance/tests/test_inheritance.py sqlobject/inheritance/tests/test_inheritance_tree.py sqlobject/manager/__init__.py sqlobject/manager/command.py sqlobject/maxdb/__init__.py sqlobject/maxdb/maxdbconnection.py sqlobject/maxdb/readme.txt sqlobject/mssql/__init__.py sqlobject/mssql/mssqlconnection.py sqlobject/mysql/__init__.py sqlobject/mysql/mysqlconnection.py sqlobject/postgres/__init__.py sqlobject/postgres/pgconnection.py sqlobject/rdbhost/__init__.py sqlobject/rdbhost/rdbhostconnection.py sqlobject/sqlite/__init__.py sqlobject/sqlite/sqliteconnection.py sqlobject/sybase/__init__.py sqlobject/sybase/sybaseconnection.py sqlobject/tests/__init__.py sqlobject/tests/dbtest.py sqlobject/tests/test_ForeignKey.py sqlobject/tests/test_NoneValuedResultItem.py sqlobject/tests/test_SQLMultipleJoin.py sqlobject/tests/test_SQLRelatedJoin.py sqlobject/tests/test_SingleJoin.py sqlobject/tests/test_aggregates.py sqlobject/tests/test_aliases.py sqlobject/tests/test_asdict.py sqlobject/tests/test_auto.py sqlobject/tests/test_basic.py sqlobject/tests/test_blob.py sqlobject/tests/test_boundattributes.py sqlobject/tests/test_cache.py sqlobject/tests/test_columns_order.py sqlobject/tests/test_combining_joins.py sqlobject/tests/test_comparison.py sqlobject/tests/test_constraints.py sqlobject/tests/test_converters.py sqlobject/tests/test_create_drop.py sqlobject/tests/test_csvexport.py sqlobject/tests/test_cyclic_reference.py sqlobject/tests/test_datetime.py sqlobject/tests/test_decimal.py sqlobject/tests/test_declarative.py sqlobject/tests/test_default_style.py sqlobject/tests/test_delete.py sqlobject/tests/test_distinct.py sqlobject/tests/test_empty.py sqlobject/tests/test_enum.py sqlobject/tests/test_events.py sqlobject/tests/test_exceptions.py sqlobject/tests/test_expire.py sqlobject/tests/test_groupBy.py sqlobject/tests/test_identity.py sqlobject/tests/test_indexes.py sqlobject/tests/test_inheritance.py sqlobject/tests/test_joins.py sqlobject/tests/test_joins_conditional.py sqlobject/tests/test_lazy.py sqlobject/tests/test_md5.py sqlobject/tests/test_new_joins.py sqlobject/tests/test_parse_uri.py sqlobject/tests/test_paste.py sqlobject/tests/test_perConnection.py sqlobject/tests/test_pickle.py sqlobject/tests/test_picklecol.py sqlobject/tests/test_psycopg_sslmode.py sqlobject/tests/test_reparent_sqlmeta.py sqlobject/tests/test_schema.py sqlobject/tests/test_select.py sqlobject/tests/test_select_through.py sqlobject/tests/test_setters.py sqlobject/tests/test_slice.py sqlobject/tests/test_sorting.py sqlobject/tests/test_sqlbuilder.py sqlobject/tests/test_sqlbuilder_dbspecific.py sqlobject/tests/test_sqlbuilder_importproxy.py sqlobject/tests/test_sqlbuilder_joins_instances.py sqlobject/tests/test_sqlite.py sqlobject/tests/test_sqlmeta_idName.py sqlobject/tests/test_sqlobject_admin.py sqlobject/tests/test_string_id.py sqlobject/tests/test_style.py sqlobject/tests/test_subqueries.py sqlobject/tests/test_transactions.py sqlobject/tests/test_unicode.py sqlobject/tests/test_validation.py sqlobject/tests/test_views.py sqlobject/util/__init__.py sqlobject/util/csvexport.py sqlobject/util/csvimport.py sqlobject/util/moduleloader.py sqlobject/util/threadinglocal.py sqlobject/versioning/__init__.py sqlobject/versioning/test/__init__.py sqlobject/versioning/test/test_version.pySQLObject-1.5.2/SQLObject.egg-info/top_level.txt0000644000175000017500000000001212322476200020656 0ustar phdphd00000000000000sqlobject SQLObject-1.5.2/SQLObject.egg-info/requires.txt0000644000175000017500000000022512322476200020532 0ustar phdphd00000000000000FormEncode>=1.1.1 [sqlite] pysqlite [sapdb] sapdb [postgresql] psycopg [firebird] kinterbasdb [sybase] Sybase [mysql] MySQLdb [mssql] adodbapiSQLObject-1.5.2/SQLObject.egg-info/PKG-INFO0000644000175000017500000000237612322476200017240 0ustar phdphd00000000000000Metadata-Version: 1.1 Name: SQLObject Version: 1.5.2 Summary: Object-Relational Manager, aka database wrapper Home-page: http://sqlobject.org/ Author: Ian Bicking Author-email: ianb@colorstudy.com License: LGPL Download-URL: https://pypi.python.org/pypi/SQLObject/1.5.2 Description: SQLObject is a popular *Object Relational Manager* for providing an object interface to your database, with tables as classes, rows as instances, and columns as attributes. SQLObject includes a Python-object-based query language that makes SQL more abstract, and provides substantial database independence for applications. Supports MySQL, PostgreSQL, SQLite, Firebird, Sybase, MSSQL and MaxDB (SAPDB). For development see the `subversion repository `_ Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) Classifier: Programming Language :: Python Classifier: Topic :: Database Classifier: Topic :: Database :: Front-Ends Classifier: Topic :: Software Development :: Libraries :: Python Modules SQLObject-1.5.2/setup.py0000755000175000017500000000727412172541154014406 0ustar phdphd00000000000000#!/usr/bin/env python from imp import load_source from os.path import abspath, dirname, isfile, join try: from ez_setup import use_setuptools use_setuptools() from setuptools import setup is_setuptools = True except ImportError: from distutils.core import setup is_setuptools = False versionpath = join(abspath(dirname(__file__)), "sqlobject", "__version__.py") load_source("sqlobject_version", versionpath) from sqlobject_version import version subpackages = ['firebird', 'include', 'include.pydispatch', 'inheritance', 'manager', 'maxdb', 'mysql', 'mssql', 'postgres', 'rdbhost', 'sqlite', 'sybase', 'util', 'versioning'] kw = {} if is_setuptools: kw['entry_points'] = """ [paste.filter_app_factory] main = sqlobject.wsgi_middleware:make_middleware """ setup(name="SQLObject", version=version, description="Object-Relational Manager, aka database wrapper", long_description="""\ SQLObject is a popular *Object Relational Manager* for providing an object interface to your database, with tables as classes, rows as instances, and columns as attributes. SQLObject includes a Python-object-based query language that makes SQL more abstract, and provides substantial database independence for applications. Supports MySQL, PostgreSQL, SQLite, Firebird, Sybase, MSSQL and MaxDB (SAPDB). For development see the `subversion repository `_ """, classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Programming Language :: Python", "Topic :: Database", "Topic :: Database :: Front-Ends", "Topic :: Software Development :: Libraries :: Python Modules", ], author="Ian Bicking", author_email="ianb@colorstudy.com", url="http://sqlobject.org/", download_url="https://pypi.python.org/pypi/SQLObject/%s" % version, license="LGPL", packages=["sqlobject"] + ['sqlobject.%s' % package for package in subpackages], scripts=["scripts/sqlobject-admin", "scripts/sqlobject-convertOldURI"], install_requires=["FormEncode>=1.1.1"], extras_require={ 'mysql': ['MySQLdb'], 'postgresql': ['psycopg'], # or pgdb from PyGreSQL 'sqlite': ['pysqlite'], 'firebird': ['kinterbasdb'], 'sybase': ['Sybase'], 'mssql': ['adodbapi'], # or pymssql 'sapdb': ['sapdb'], }, **kw ) # Send announce to: # sqlobject-discuss@lists.sourceforge.net # python-announce@python.org # python-list@python.org # db-sig@python.org # Email tempate: """ @@ INTRO What's new in SQLObject ======================= @@ CHANGES For a more complete list, please see the news: http://sqlobject.org/docs/News.html What is SQLObject ================= SQLObject is an object-relational mapper. Your database tables are described as classes, and rows are instances of those classes. SQLObject is meant to be easy to use and quick to get started with. It currently supports MySQL through the `MySQLdb` package, PostgreSQL through the `psycopg` package, SQLite, Firebird, MaxDB (SAP DB), MS SQL Sybase and Rdbhost. It should support Python versions back to 2.4. Where is SQLObject ================== Site: http://sqlobject.org Mailing list: https://lists.sourceforge.net/mailman/listinfo/sqlobject-discuss Archives: http://news.gmane.org/gmane.comp.python.sqlobject Download: https://pypi.python.org/pypi/SQLObject/@@ News and changes: http://sqlobject.org/docs/News.html -- Ian Bicking / ianb@colorstudy.com / http://blog.ianbicking.org """ SQLObject-1.5.2/docs/0000755000175000017500000000000012322476205013610 5ustar phdphd00000000000000SQLObject-1.5.2/docs/download.txt0000644000175000017500000000344511606632651016171 0ustar phdphd00000000000000Download SQLObject ++++++++++++++++++ The latest releases are always available on the `Python Package Index `_, and is installable with `easy_install `_. You can install the latest release with:: easy_install -U SQLObject You can install the latest version of SQLObject with:: easy_install SQLObject==dev You can install the latest bug fixing branch with:: easy_install SQLObject==bugfix If you want to require a specific revision (because, for instance, you need a bugfix that hasn't appeared in a release), you can put this in your `setuptools `_ using ``setup.py`` file:: setup(... install_requires=["SQLObject==bugfix,>=0.7.1dev-r1485"], ) This says that you *need* revision 1485 or higher. But it also says that you can aquire the "bugfix" version to try to get that. In fact, when you install ``SQLObject==bugfix`` you will be installing a specific version, and "bugfix" is just a kind of label for a way of acquiring the version (it points to a branch in the repository). Repository ---------- The SQLObject `Subversion `_ repository is located at http://svn.colorstudy.com/SQLObject/trunk If you are using a command-line Subversion client, you can check it out like:: svn co http://svn.colorstudy.com/SQLObject/trunk SQLObject If you are on Windows you may want to use `TortoiseSVN `_. .. image:: http://sflogo.sourceforge.net/sflogo.php?group_id=74338&type=10 :target: http://sourceforge.net/projects/sqlobject :class: noborder :align: center :height: 15 :width: 80 :alt: Get SQLObject at SourceForge.net. Fast, secure and Free Open Source software downloads SQLObject-1.5.2/docs/interface.py0000644000175000017500000002671410707102005016121 0ustar phdphd00000000000000""" This is a not-very-formal outline of the interface that SQLObject provides. While its in the form of a formal interface, it doesn't use any interface system. """ class Interface(object): pass class ISQLObject(Interface): sqlmeta = """ A class or instance representing internal state and methods for introspecting this class. ``MyClass.sqlmeta`` is a class, and ``myInstance.sqlmeta`` is an instance of this class. So every instance gets its own instance of the metadata. This object follows the ``Isqlmeta`` interface. """ # classmethod def get(id, connection=None): """ Returns the object with the given `id`. If `connection` is given, then get the object from the given connection (otherwise use the default or configured connection) It raises ``SQLObjectNotFound`` if no row exists with that ID. """ # classmethod def selectBy(connection=None, **attrs): """ Performs a ``SELECT`` in the given `connection` (or default connection). Each of the keyword arguments should be a column, and the equality comparisons will be ``ANDed`` together to produce the result. """ # classmethod def dropTable(ifExists=False, dropJoinTables=True, cascade=False, connection=None): """ Drops this table from the database. If ``ifExists`` is true, then it is not an error if the table doesn't exist. Join tables (mapping tables for many-to-many joins) are dropped if this class comes alphabetically before the other join class, and if ``dropJoinTables`` is true. ``cascade`` is passed to the connection, and if true tries to drop tables that depend on this table. """ # classmethod def createTable(ifNotExists=False, createJoinTables=True, createIndexes=True, connection=None): """ Creates the table. If ``ifNotExists`` is true, then it is not an error if the table already exists. Join tables (mapping tables for many-to-many joins) are created if this class comes alphabetically before the other join class, and if ``createJoinTables`` is true. If ``createIndexes`` is true, indexes are also created. """ # classmethod def createTableSQL(createJoinTables=True, connection=None, createIndexes=True): """ Returns the SQL that would be sent with the analogous call to ``Class.createTable(...)`` """ def sync(): """ This will refetch the data from the database, putting it in sync with the database (in case another process has modified the database since this object was fetched). It will raise ``SQLObjectNotFound`` if the row has been deleted. This will call ``self.syncUpdate()`` if ``lazyUpdates`` are on. """ def syncUpdate(): """ If ``.sqlmeta.lazyUpdates`` is true, then this method must be called to push accumulated updates to the server. """ def expire(): """ This will remove all the column information from the object. The next time this information is used, a ``SELECT`` will be made to re-fetch the data. This is like a lazy ``.sync()``. """ def set(**attrs): """ This sets many column attributes at once. ``obj.set(a=1, b=2)`` is equivalent to ``obj.a=1; obj.b=2``, except that it will be grouped into one ``UPDATE`` """ def destroySelf(): """ Deletes this row from the database. This is called on instances, not on the class. The object still persists, because objects cannot be deleted from the Python process (they can only be forgotten about, at which time they are garbage collected). The object becomes obsolete, and further activity on it will raise errors. """ def sqlrepr(obj, connection=None): """ Returns the SQL representation of the given object, for the configured database connection. """ class Isqlmeta(Interface): table = """ The name of the table in the database. This is derived from ``style`` and the class name if no explicit name is given. """ idName = """ The name of the primary key column in the database. This is derived from ``style`` if no explicit name is given. """ idType = """ A function that coerces/normalizes IDs when setting IDs. This is ``int`` by default (all IDs are normalized to integers). """ style = """ An instance of the ``IStyle`` interface. This maps Python identifiers to database names. """ lazyUpdate = """ A boolean (default false). If true, then setting attributes on instances (or using ``inst.set(...)`` will not send ``UPDATE`` queries immediately (you must call ``inst.syncUpdates()`` or ``inst.sync()`` first). """ defaultOrder = """ When selecting objects and not giving an explicit order, this attribute indicates the default ordering. It is like this value is passed to ``.select()`` and related methods; see those method's documentation for details. """ cacheValues = """ A boolean (default true). If true, then the values in the row are cached as long as the instance is kept (and ``inst.expire()`` is not called). If false, then every attribute access causes a ``SELECT`` (which is absurdly inefficient). """ registry = """ Because SQLObject uses strings to relate classes, and these strings do not respect module names, name clashes will occur if you put different systems together. This string value serves as a namespace for classes. """ fromDatabase = """ A boolean (default false). If true, then on class creation the database will be queried for the table's columns, and any missing columns (possible all columns) will be added automatically. """ columns = """ A dictionary of ``{columnName: anSOColInstance}``. You can get information on the columns via this read-only attribute. """ columnList = """ A list of the values in ``columns``. Sometimes a stable, ordered version of the columns is necessary; this is used for that. """ columnDefinitions = """ A dictionary like ``columns``, but contains the original column definitions (which are not class-specific, and have no logic). """ joins = """ A list of all the Join objects for this class. """ indexes = """ A list of all the indexes for this class. """ # Instance attributes expired = """ A boolean. If true, then the next time this object's column attributes are accessed a query will be run. """ # Methods def addColumn(columnDef, changeSchema=False, connection=None): """ Adds the described column to the table. If ``changeSchema`` is true, then an ``ALTER TABLE`` query is called to change the database. Attributes given in the body of the SQLObject subclass are collected and become calls to this method. """ def delColumn(column, changeSchema=False, connection=None): """ Removes the given column (either the definition from ``columnDefinition`` or the SOCol object from ``columns``). If ``changeSchema`` is true, then an ``ALTER TABLE`` query is made. """ def addColumnsFromDatabase(connection=None): """ Adds all the columns from the database that are not already defined. If the ``fromDatabase`` attribute is true, then this is called on class instantiation. """ def addJoin(joinDef): """ Adds a join to the class. """ def delJoin(joinDef): """ Removes a join from the class. """ def addIndex(indexDef): """ Adds the index to the class. """ def asDict(): """ Returns the SQLObject instance as a dictionary (column names as keys, column values as values). Use like:: ASQLObjectClass(a=1, b=2).asDict() Which should return ``{'a': 1, 'b': 2}``. Note: this is a *copy* of the object's columns; changing the dictionary will not effect the object it was created from. """ class ICol(Interface): def __init__(name=None, **kw): """ Creates a column definition. This is an object that describes a column, basically just holding the keywords for later creating an ``SOCol`` (or subclass) instance. Subclasses of ``Col`` (which implement this interface) typically create the related subclass of ``SOCol``. """ name = """ The name of the column. If this is not given in the constructor, ``SQLObject`` will set this attribute from the variable name this object is assigned to. """ class ISOCol(Interface): """ This is a column description that is bound to a single class. This cannot be shared by subclasses, so a new instance is created for every new class (in cases where classes share columns). These objects are created by ``Col`` instances, you do not create them directly. """ name = """ The name of the attribute that points to this column. This is the Python name of the column. """ columnDef = """ The ``Col`` object that created this instance. """ immutable = """ Boolean, default false. If true, then this column cannot be modified. It cannot even be modified at construction, rendering the table read-only. This will probably change in the future, as it renders the option rather useless. """ cascade = """ If a foreign key, then this indicates if deletes in that foreign table should cascade into this table. This can be true (deletes cascade), false (the default, they do not cascade), or ``'null'`` (this column is set to ``NULL`` if the foreign key is deleted). """ constraints = """ A list of ... @@? """ notNone = """ Boolean, default false. It true, then ``None`` (aka ``NULL``) is not allowed in this column. Also the ``notNull`` attribute can be used. """ foreignKey = """ If not None, then this column points to another table. The attribute is the name (a string) of that table/class. """ dbName = """ The name of this column in the database. """ alternateID = """ Boolean, default false. If true, then this column is assumed to be unique, and you can fetch individual rows based on this column's value. This will add a method ``byAttributeName`` to the parent SQLObject subclass. """ unique = """ Boolean, default false. If this column is unique; effects the database ``CREATE`` statement, and is implied by ``alternateID=True``. """ validator = """ A IValidator object. All setting of this column goes through the ``fromPython`` method of the validator. All getting of this column from the database goes through ``toPython``. """ default = """ A value that holds the default value for this column. If the default value passed in is a callable, then that value is called to return a default (a typical example being ``DateTime.now``). """ sqlType = """ The SQL type of the column, overriding the default column type. """ SQLObject-1.5.2/docs/FAQ.txt0000644000175000017500000005073211502551444014765 0ustar phdphd00000000000000+++++++++++++ SQLObject FAQ +++++++++++++ .. contents:: SQLExpression ------------- In `SomeTable.select(SomeTable.q.Foo > 30)` why doesn't the inner parameter, `SomeTable.q.Foo > 30`, get evaluated to some boolean value? `q` is an object that returns special attributes of type `sqlbuilder.SQLExpression`. SQLExpression is a special class that overrides almost all Python magic methods and upon any operation instead of evaluating it constructs another instance of SQLExpression that remembers what operation it has to do. Similar to a symbolic algebra. Example: SQLExpression("foo") > 30 produces SQLExpression("foo", ">", 30) (well, it really produces SQLExpression(SQLExpression("foo")...)) How does the select(...) method know what to do? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In short, select() recursively evaluates the top-most SQLExpression to a string: SQLExpression("foo", ">", 30) => "foo > 30" and passes the result as a string to the SQL backend. The longer but more detailed and correct explanation is that select() produces an instance of SelectResults_ class that upon being iterated over produces an instance of Iteration class that upon calling its next() method (it is iterator!) construct the SQL query string, passes it to the backend, fetches the results, wraps every row as SQLObject instance and passes them back to the user. .. _SelectResults: SelectResults.html For the details of the implementation see sqlobject/main.py for SQLObject, sqlobject/sqlbuilder.py for SQLExpression, sqlobject/dbconnection.py for DBConnection class (that constructs the query strings) and Iteration class, and different subdirectories of sqlobject for concrete implementations of connection classes - different backends require different query strings. Why there is no __len__? ------------------------ There are reasons why there is no __len__ method, though many people think having those make them feel more integrated into Python. One is that len(foo) is expected to be fast, but issuing a COUNT query can be slow. Worse, often this causes the database to do essentially redundant work when the actual query is performed (generally taking the len of a sequence is followed by accessing items from that sequence). Another is that list(foo) implicitly tries to do a len first, as an optimization (because len is expected to be cheap -- see previous point). Worse, it swallows *all* exceptions that occur during that call to __len__, so if it fails (e.g. there's a typo somewhere in the query), the original cause is silently discarded, and instead you're left with mysterious errors like "current transaction is aborted, commands ignored until end of transaction block" for no apparent reason. How can I do a LEFT JOIN? ------------------------- The short: you can't. You don't need to. That's a relational way of thinking, not an object way of thinking. But it's okay! It's not hard to do the same thing, even if it's not with the same query. For these examples, imagine you have a bunch of customers, with contacts. Not all customers have a contact, some have several. The left join would look like:: SELECT customer.id, customer.first_name, customer.last_name, contact.id, contact.address FROM customer LEFT JOIN contact ON contact.customer_id = customer.id Simple ~~~~~~ :: for customer in Customer.select(): print customer.firstName, customer.lastName for contact in customer.contacts: print ' ', contact.phoneNumber The effect is the same as the left join -- you get all the customers, and you get all their contacts. The problem, however, is that you will be executing more queries -- a query for each customer to fetch the contacts -- where with the left join you'd only do one query. The actual amount of information returned from the database will be the same. There's a good chance that this won't be significantly slower. I'd advise doing it this way unless you hit an actual performance problem. Efficient ~~~~~~~~~ Lets say you really don't want to do all those queries. Okay, fine:: custContacts = {} for contact in Contact.select(): custContacts.setdefault(contact.customerID, []).append(contact) for customer in Customer.select(): print customer.firstName, customer.lastName for contact in custContacts.get(customer.id, []): print ' ', contact.phoneNumber This way there will only be at most two queries. It's a little more crude, but this is an optimization, and optimizations often look less than pretty. But, say you don't want to get everyone, just some group of people (presumably a large enough group that you still need this optimization):: query = Customer.q.firstName.startswith('J') custContacts = {} for contact in Contact.select(AND(Contact.q.customerID == Customer.q.id, query)): custContacts.setdefault(contact.customerID, []).append(contact) for customer in Customer.select(query): print customer.firstName, customer.lastName for contact in custContacts.get(customer.id, []): print ' ', contact.phoneNumber SQL-wise ~~~~~~~~ Use LEFTJOIN() from SQLBuilder_. How can I join a table with itself? ----------------------------------- Use Alias from SQLBuilder_. See example_. .. _SQLBuilder: SQLBuilder.html .. _example: SQLObject.html#how-can-i-join-a-table-with-itself How can I define my own intermediate table in my Many-to-Many relationship? --------------------------------------------------------------------------- .. note:: In User and Role, SQLRelatedJoin is used with createRelatedTable=False so the intermediate table is not created automatically. We also set the intermediate table name with intermediateTable='user_roles'. UserRoles is the definition of our intermediate table. UserRoles creates a unique index to make sure we don't have duplicate data in the database. We also added an extra field called active which has a boolean value. The active column might be used to activate/deactivate a given role for a user in this example. Another common field to add in this an intermediate table might be a sort field. If you want to get a list of rows from the intermediate table directly add a MultipleJoin to User or Role class. We'll expand on the User and Role example and define our own UserRoles class which will be the intermediate table for the User and Role Many-to-Many relationship. Example:: >>> class User(SQLObject): ... class sqlmeta: ... table = "user_table" ... username = StringCol(alternateID=True, length=20) ... roles = SQLRelatedJoin('Role', ... intermediateTable='user_roles', ... createRelatedTable=False) >>> class Role(SQLObject): ... name = StringCol(alternateID=True, length=20) ... users = SQLRelatedJoin('User', ... intermediateTable='user_roles', ... createRelatedTable=False) >>> class UserRoles(SQLObject): ... class sqlmeta: ... table = "user_roles" ... user = ForeignKey('User', notNull=True, cascade=True) ... role = ForeignKey('Role', notNull=True, cascade=True) ... active = BoolCol(notNull=True, default=False) ... unique = index.DatabaseIndex(user, role, unique=True) How Does Inheritance Work? -------------------------- SQLObject is not intended to represent every Python inheritance structure in an RDBMS -- rather it is intended to represent RDBMS structures as Python objects. So lots of things you can do in Python you can't do with SQLObject classes. However, some form of inheritance is possible. One way of using this is to create local conventions. Perhaps:: class SiteSQLObject(SQLObject): _connection = DBConnection.MySQLConnection(user='test', db='test') _style = MixedCaseStyle() # And maybe you want a list of the columns, to autogenerate # forms from: def columns(self): return [col.name for col in self._columns] Since SQLObject doesn't have a firm introspection mechanism (at least not yet) the example shows the beginnings of a bit of ad hoc introspection (in this case exposing the ``_columns`` attribute in a more pleasing/public interface). However, this doesn't relate to *database* inheritance at all, since we didn't define any columns. What if we do? :: class Person(SQLObject): firstName = StringCol() lastName = StringCol() class Employee(Person): position = StringCol() Unfortunately, the resultant schema probably doesn't look like what you might have wanted:: CREATE TABLE person ( id INT PRIMARY KEY, first_name TEXT, last_name TEXT ); CREATE TABLE employee ( id INT PRIMARY KEY first_name TEXT, last_name TEXT, position TEXT ) All the columns from ``person`` are just repeated in the ``employee`` table. What's more, an ID for a Person is distinct from an ID for an employee, so for instance you must choose ``ForeignKey("Person")`` or ``ForeignKey("Employee")``, you can't have a foreign key that sometimes refers to one, and sometimes refers to the other. Altogether, not very useful. You probably want a ``person`` table, and then an ``employee`` table with a one-to-one relation between the two. Of course, you can have that, just create the appropriate classes/tables -- but it will appear as two distinct classes, and you'd have to do something like ``Person(1).employee.position``. Of course, you can always create the necessary shortcuts, like:: class Person(SQLObject): firstName = StringCol() lastName = StringCol() def _get_employee(self): value = Employee.selectBy(person=self) if value: return value[0] else: raise AttributeError, '%r is not an employee' % self def _get_isEmployee(self): value = Employee.selectBy(person=self) # turn into a bool: return not not value def _set_isEmployee(self, value): if value: # Make sure we are an employee... if not self.isEmployee: Empoyee.new(person=self, position=None) else: if self.isEmployee: self.employee.destroySelf() def _get_position(self): return self.employee.position def _set_position(self, value): self.employee.position = value class Employee(SQLObject): person = ForeignKey('Person') position = StringCol() There is also another kind of inheritance. See Inheritance.html_ .. _Inheritance.html: Inheritance.html Composite/Compound Attributes ----------------------------- A composite attribute is an attribute formed from two columns. For example:: CREATE TABLE invoice_item ( id INT PRIMARY KEY, amount NUMERIC(10, 2), currency CHAR(3) ); Now, you'll probably want to deal with one amount/currency value, instead of two columns. SQLObject doesn't directly support this, but it's easy (and encouraged) to do it on your own:: class InvoiceItem(SQLObject): amount = Currency() currency = StringChar(length=3) def _get_price(self): return Price(self.amount, self.currency) def _set_price(self, price): self.amount = price.amount self.currency = price.currency class Price(object): def __init__(self, amount, currency): self._amount = amount self._currency = currency def _get_amount(self): return self._amount amount = property(_get_amount) def _get_currency(self): return self._currency currency = property(_get_currency) def __repr__(self): return '' % (self.amount, self.currency) You'll note we go to some trouble to make sure that ``Price`` is an immutable object. This is important, because if ``Price`` wasn't and someone changed an attribute, the containing ``InvoiceItem`` instance wouldn't detect the change and update the database. (Also, since ``Price`` doesn't subclass ``SQLObject``, we have to be explicit about creating properties) Some people refer to this sort of class as a *Value Object*, that can be used similar to how an integer or string is used. You could also use a mutable composite class:: class Address(SQLObject): street = StringCol() city = StringCol() state = StringCol(length=2) latitude = FloatCol() longitude = FloatCol() def _init(self, id): SQLObject._init(self, id) self._coords = SOCoords(self) def _get_coords(self): return self._coords class SOCoords(object): def __init__(self, so): self._so = so def _get_latitude(self): return self._so.latitude def _set_latitude(self, value): self._so.latitude = value latitude = property(_get_latitude, set_latitude) def _get_longitude(self): return self._so.longitude def _set_longitude(self, value): self._so.longitude = value longitude = property(_get_longitude, set_longitude) Pretty much a proxy, really, but ``SOCoords`` could contain other logic, could interact with non-SQLObject-based latitude/longitude values, or could be used among several objects that have latitude/longitude columns. Non-Integer IDs --------------- Yes, you can use non-integer IDs. If you use non-integer IDs, you will not be able to use automatic ``CREATE TABLE`` generation (i.e., ``createTable``); SQLObject can create tables with int or str IDs. You also will have to give your own ID values when creating an object, like:: color = Something(id="blue", r=0, b=100, g=0) IDs are, and always will in future versions, be considered immutable. Right now that is not enforced; you can assign to the ``id`` attribute. But if you do you'll just mess everything up. This will probably be taken away sometime to avoid possibly confusing bugs (actually, assigning to ``id`` is almost certain to cause confusing bugs). If you are concerned about enforcing the type of IDs (which can be a problem even with integer IDs) you may want to do this:: def Color(SQLObject): def _init(self, id, connection=None): id = str(id) SQLObject._init(self, id, connection) Instead of ``str()`` you may use ``int()`` or whatever else you want. This will be resolved in a future version when ID column types can be declared like other columns. Additionally you can set idType=str in you SQLObject class. Binary Values ------------- Binary values can be difficult to store in databases, as SQL doesn't have a widely-implemented way to express binaries as literals, and there's differing support in database. The module sqlobject.col defines validators and column classes that to some extent support binary values. There is BLOBCol that extends StringCol and allow to store binary values; currently it works only with PostgreSQL and MySQL. PickleCol extends BLOBCol and allows to store any object in the column; the column, naturally, pickles the object upon assignment and unpickles it upon retrieving the data from the DB. Another possible way to keep binary data in a database is by using encoding. Base 64 is a good encoding, reasonably compact but also safe. As an example, imagine you want to store images in the database:: class Image(SQLObject): data = StringCol() height = IntCol() width = IntCol() def _set_data(self, value): self._SO_set_data(value.encode('base64')) def _get_data(self, value): return self._SO_get_data().decode('base64') Reloading Modules ----------------- If you've tried to reload a module that defines SQLObject subclasses, you've probably encountered various odd errors. The short answer: you can't reload these modules. The long answer: reloading modules in Python doesn't work very well. Reloading actually means *re-running* the module. Every ``class`` statement creates a class -- but your old classes don't disappear. When you reload a module, new classes are created, and they take over the names in the module. SQLObject, however, doesn't search the names in a module to find a class. When you say ``ForeignKey('SomeClass')``, SQLObject looks for any SQLObject subclass anywhere with the name ``SomeClass``. This is to avoid problems with circular imports and circular dependencies, as tables have forward- and back-references, and other circular dependencies. SQLObject resolves these dependencies lazily. But when you reload a module, suddenly there will be two SQLObject classes in the process with the same name. SQLObject doesn't know that one of them is obsolete. And even if it did, it doesn't know every other place in the system that has a reference to that obsolete class. For this reason and several others, reloading modules is highly error-prone and difficult to support. Python Keywords --------------- If you have a table column that is a Python keyword, you should know that the Python attribute doesn't have to match the name of the column. See `Irregular Naming`_ in the documentation. .. _Irregular Naming: SQLObject.html#irregular-naming Lazy Updates and Insert ----------------------- `Lazy updates `_ allow you to defer sending ``UPDATES`` until you synchronize the object. However, there is no way to do a lazy insert; as soon as you create an instance the ``INSERT`` is executed. The reason for this limit is that each object needs a database ID, and in many databases you cannot attain an ID until you create a row. Mutually referencing tables --------------------------- How can I create mutually referencing tables? For the code:: class Person(SQLObject): role = ForeignKey("Role") class Role(SQLObject): person = ForeignKey("Person") Person.createTable() Role.createTable() Postgres raises ProgrammingError: ERROR: relation "role" does not exist. The correct way is to delay constraints creation until all tables are created:: class Person(SQLObject): role = ForeignKey("Role") class Role(SQLObject): person = ForeignKey("Person") constraints = Person.createTable(applyConstraints=False) constraints += Role.createTable(applyConstraints=False) for constraint in constraints: connection.query(constraint) What about GROUP BY, UNION, etc? -------------------------------- In short - not every query can be represented in SQLObject. SQLOBject's objects are instances of "table" clasess:: class MyTable(SQLObject): ... my_table_row = MyTable.get(id) Now my_table_row is an instance of MyTable class and represents a row in the my_table table. But for a statement with GROUP BY like this:: SELECT my_column, COUNT(*) FROM my_table GROUP BY my_column; there is no table, there is no corresponding "table" class, and SQLObject cannot return a list of meaningful objects. You can use a lower-level machinery available in SQLBuilder_. How to do mass-insertion? ------------------------- Mass-insertion using high-level API in SQLObject is slow. There are many reasons for that. First, on creation SQLObject instances pass all values through validators/converters which is convenient but takes time. Second, after an INSERT query SQLObject executes a SELECT query to get back autogenerated values (id and timestamps). Third, there is caching and cache maintaining. Most of this is unnecessary for mass-insertion, hence high-level API is unsuitable. Less convenient (no validators) but much faster API is Insert_ from SQLBuilder_. .. _Insert: SQLBuilder.html#insert How can I specify the MySQL engine to use, or tweak other SQL-engine specific features? --------------------------------------------------------------------------------------- You can *ALTER* the table just after creation using the ``sqlmeta`` attribute ``createSQL``, for example:: class SomeObject(SQLObject): class sqlmeta: createSQL = { 'mysql' : 'ALTER TABLE some_object ENGINE InnoDB' } # your columns here Maybe you want to specify the charset too? No problem:: class SomeObject(SQLObject): class sqlmeta: createSQL = { 'mysql' : [ 'ALTER TABLE some_object ENGINE InnoDB', '''ALTER TABLE some_object CHARACTER SET utf8 COLLATE utf8_estonian_ci'''] } .. image:: http://sflogo.sourceforge.net/sflogo.php?group_id=74338&type=10 :target: http://sourceforge.net/projects/sqlobject :class: noborder :align: center :height: 15 :width: 80 :alt: Get SQLObject at SourceForge.net. Fast, secure and Free Open Source software downloads SQLObject-1.5.2/docs/presentation-2004-11/0000755000175000017500000000000012322476205017125 5ustar phdphd00000000000000SQLObject-1.5.2/docs/presentation-2004-11/ui/0000755000175000017500000000000012322476205017542 5ustar phdphd00000000000000SQLObject-1.5.2/docs/presentation-2004-11/ui/bodybg.gif0000644000175000017500000002360710707102005021474 0ustar phdphd00000000000000GIF89aæNÄÿÀÀÀµµµ½½½ÆÆÆÎÎÎÖÖÖÞÞÞçççïïï÷÷÷çïïï÷÷÷ÿÿÆÎÎÎÖÖÖÞÞÞççµ½½½ÆÆç÷÷ÎÞÞÖççÞïïÆÖÖ½ÎÎÎçç!ù,æNÿà!ÏuÔó@ŽšB¤ÌôlM€µÔ|?)  BÖc(, Fo¹Ô1–³¦ÀŽDEe¨­ü,Ópm›gÇÙ¢ÒpœßðÚˆÔ¸< ªºÃÑ(8 e= qXdK P‡q<8k:a D9U3 G  ;W[hQ[ …˜L H5 2³‚¦¹"®A0g#pñ~TÇ+u£AáBU·*4¤QmÉÆWJÖ`ËPŒ2ðPUB •¦æ´tÙ æ­êtIí×JmÚ!œ€ðP:ãÏh( |–!u ÒnE¢ YÚÀ$PLÊB,&ªø^ü C[°bæ0KvsLPá­Ý$Ø ‚ œÊñ†ºîÔ™Œ, xʳ(‹c- ú²+ £f¶© ¡OR4ZCÈ Á7æ%# \ ;Âô³É]?ZË CÊp·µ6áh欄„,8X°@7ÖB¼UòFÂ-ÃÕöJáÖ@ûóÃpFgØyr_8`@„Yÿh6õÉbtsÅ%¡öŸ±!QZP†!‚uÔUwÝ&âÄÀHˆ™„ ¸XÃBb ÄHwé`aˆS,eFŠ”|Ò@|< Âu%`2„¦’VNªx@Ú˜ØJU%ƒÌT$ÆM‘ß^;ÐÒ€<€d’$ì!^нù" pñÈ!§Ô{èUsÑ—bLðgØ7ƒp0·á4À¦X…Î #Ž´êÆSšV“±§k¨sEë­Aù5CA„d  çŽ*ñÀ*ìÔÅë3h†aï½îߎ4< EÞ9ðX9‘{Òò0<äÍu°˜‹BLédù’‰†AÊñtÜèUWšrÃ\4‡‘/~þ•׳Ê+ϰ…I\-ªAÔ± =èµÌe›ÒSrC›5ÑîÆU*í-çÀɺtŒ‡vU ¯o=c+W#’Á \‡ 9I@Åb…H@·ÓSIÜgô„ÄbÐþƇþ=ãÞî†ã*sí qò—ÔŒv¼ÞŒAQ.‹ÿH×Ð¥èƒl »f$ýå®éÊ‚:GH¡%Ž!!ÀÆe|Ç}:p‰úS@Ж‡·;Ì ÑÛ„ô¹86ÐiR íXbÖ+0÷ü hn##›zsòL P‡çȪ‘— ‘˜±3Į<äL4X ® ðà “'/³Ê&˜@0Ìb òÚÁ‘Ph¢#˜7:·¬ÍÄ ²:ˆÒ°½ Á1Ô“\Aèã>’4ÌCÂ)®Þ‰Î{@àÇ,”g§#4o+'¬’cp¦çœ«ÈZÀ\£M¡#Œ@à ›8ÀÉL±)o@^<ÓÀYA~Š N®„ÿðŒ(ÜÄ7q„fh„(ßðñ¨À3¤B¸á›â\a)ihãÆ ¨0à@X£›ö(>$‡Qy¡) @¦À¨"YL {4ÒÌ(IcÉ£Êðg0 .B‰ÀQJC.á!‹„B%‰ƒ8æ G‘œÕ?†aé FP^Tc‰Šá­ e$WY†5\8ìÑB9â³–d¸Òry#f E&¦dpØÁ T·k„02oß47q9m, 4‹üÖAƒ_Q !©¤[ƒ†™3’PËBkHÀÈF/… ~*ç@ÆÈa:+àà9 uÄ?ýÜÍBFcž¼ÿÂŒ¨¡˜— z¥nžÁ ™ B#RX±€¡" ’—$ x8Ëh|ÓM£8I Øh^¡hTEbÑÐæÂÄXÉmýr¥øBjÊl¢ HpÃ`Õ&œÆ‘L owŒ’`œV»GÚÃT@ˆ„˜ÊpïŒõreXôë¨Óÿ¯Äf[ˆ6Ï@Cð‹$-42x'%ÝõɌ铄\¤•KŒQ4œ™PÀX`«ín>|¡#Þ^ùÝ8t3/pB8[š`ðX=T”ß- =m‘„FÑ)­œzÆ›×Èñ1 Ì3NpAÐJkh­Ÿ!äy8€U¾&ðá¢C+ÍÏ,C†# `P£¯lcœ!L ”Ðhò¬6(˜=¹(®ªJƒ wÚƒ]ø“ éÚ6g‰[“ëD°v‘¤Y~†»ë¡™ ÛØzªTt×0ãÜÉ)šO±Í(UìVi£À¥x~6²»fÔzL˜ÿìC#P@ „£¤Ÿh7r£™Ê›r8uÞ­Z3a€m‚ý3¥Æ)ù ‹q©ªDE†„r°g ‡ÍP;J›BÍØÜ"@™É‘Á#Û¦…Žô¤j0 +ë’Cr(˜œÝ#RÖ»ˆõl$/øÕP eYÉñË9µ;Š.Ø}0FЙ¨[ùä¢:¥/â Xl$¬X…'p`8ªƒ†’QÁÊâsp$lë(Á\B å[uVwzñB™f<½âPèÉr®¿Ê«d¿5êÌ÷ðÓx£ÁÞ[‰jìŽò¶½kÏ .3¡QGÆ ¤ëó[Ò¹à0ÉÏ*'6ÿ!Æ7¨ðdGÀÇô‰As;™8ýS%$Á£ „:p 7Cü$È¿\9f0̲m0 \jmDV-¦—ÌGzmÁ¼“rUèt¡R«˜§œ<ÿPé:Î0ôÉ:JÂÒ mŸƼ³»®N1–Þƒ‡  Ž`»5P]õ~reÔ"1Žü~OçMÐ 3»@"Àõ €í´u€À¨ÈÄ(âJJ¡9P‹„ RòSC°çRFsF”` 4«Ò#4‰q¼A“’ÿµ| ÍP‚åU€9%# ÖÚÔ­]‘*Oª ÃâÖ¿B¼(“•ÿC:z1¢NýÔW° Á<Äl$æí¼Ðu­éõa¯uY–µ“C 0Ò"j)ú”› ™uÿ@¦u­jðϯģu•‰Z©Ú(¬[B1¤¼Ö;@°D4P¨DÖ`ŽÂ ¡è ðL¤2Z^º cãÇò¶f´Ã ³Fa'ÃÆîJ3¨ÍGŽå²P0jÂuý·¶ðË‹¥<Ç–P§ˆÚÖÒÆèÉAçpP-âG3ö?ÍáçVB‹’N°´¼ ,ÿÒ(t4—yÙ‘²á¹mµyÝðžzw1kÀr­ݼM~ê o:üè*u Z\g’$‰0 ¾M}‘z‘Å­°}6Ž„Œgx‚ãé¹@1ì«âAGb.–i.[…ƒpÃX.Ú ûF1‰ò'ÞÈÁv¶)¬*Å2ÞþDC“ãF²ù( &hŠ€€f;E¦íûæLJAv09#ðO·”ÂTÇ?jÄ™ÀÝïH~láÞåŸ ½Ð Ã"pWQ)Áéµà#¸^ ˆ !2¤ñâ€ïUÁ~›.Ô æ£L%+ €°>ÞÀÁIu€+¹ôLÿ¤ÐGAx{¸”Ù#‰N!ßóHnFp ÑÑ íäŒ0LÐ[†Æʃ+´ŽAcó³Y(„KdH)Q ]ïõàAo$%Hðµy‡QÙaÕMV•jš ÷tJ§;cz QÇG(žlTÄ~_ MsH¡ ÊAþ$©Dëºoú[øï|ohÑðjÈ`z´1Z!ªí~òK€Ž ÓJ0½®ú½‘“a¨‡È ‚’ô”Z °/ 8›«DÙÞ;4ÃÁ‡².ñµ©å“?C ‚ !”˜Sq„Ñ[(I[ »šîâîŽ`‡">hm hÿ 4åRgY&••¬‰²¬Lå²™µò<£(&—m×3eRH†`¼ŽÒ)µj½b³Úì™`t¥Dé`>£ ÇÃqh¿ DEa‚¤P¼„Å –@@|¨$Pè00H%$)PT´$àñ%È,P´ly~‚†Š¦,„Q-Ø ­¨A8P¨=<)ôMà-,èy•BÐXÌXá5Bø¶@,öüì,ì4úr%QPp&v{šR%Ôª²ž©­94<¤Šä•0@æÜ ÓØ´ ôûD’¤$JT‚õÒ†" ¸†šWesç \8¡ vŒp;‘… <*ÿHâdÇ5C´ôÃÅÕ¦ Ô4Lˆ>‚r–@fåΙy…¦:Šù‰Da*$g+´áƒ0G n<öàvÁj•&Pxªví”. J;zÓ3pÎp “çz†þY0áVŸÖ+m‹£‚³>p-Ÿ1î‰TˆÊƒ$Qع.Äûß¼çŠ “iv@ñ&½«J;\l8PÀ¬¬®ÅäÊ€_!Ln¾õCR83ŒÁ r±Í{¼84lôïÇ_øÇÉ=Œ°‡ Ê\‚}X@ÓIÿAÔ7ßl1’"æñÏVJ¸Ÿ<õ·Þ’O©VñµÌ? M5´ëLƒÏ[ÛxKih?3µÝn§É e‹ À m5Œàšì¶Û–°3³¾ÑÀíüÌ C{GFÚ˜§ðgdµÿçÅ݃G.B+Nõ^=©äÌx×g–˜€÷ßÑœ z .Oq›4ºØ¾åÀ–úÅ/*Ò¢©Ò­]n|&xfØég!;ɳ´HCßöOõ½£‡˜ÆÓ»ºam] ÀhmF®ò€ü3È0 šã¯ ½ vdÒG$ß§/ß«Dcȧ8˜oy ƒ, ;÷]À lÃÄ4ì4âzB#òBD >Ú]J8aFD;„–«Ú%BÏ ”šÓrH¾m‡5ÿ£ Ü /ѲWÆd$®pU„8E‹hxÉÅÐkPC]˜V >ô‚záÅT²ÍPðÿK¡Z`¢$ªê§H… 0%ç%eBé €„€*”(£ƒÙ¨ÊBl'‚3ê j2IƈFª]‹rùXe(¿¼,- ˆš‡/r”Ó0n&ßz$&sHªÁ­(‹ÜZèg² Ëf 0Ôˆ0äLÈ6²@^f8P-mƒ­'—Ÿic_ÑÂ(c‘UfÐöÊÓ`ÛÉd3™ÔÂJÆ"Ÿiä+(¨’R‰œ]Ùd ³ (¯œÐʤ VÀHK­Ã w+×D ’,X2lÇÔ§‚Âä¢r¨3F’s£”bX‹a+ «ÿ–˜•‡¡‹ZDø 5 9sšÓ*Fg…U>§žâØ•×'R&)OcZ`VúD\M4¬p‡'R„¨*D"H…“ƒØnªDù¡3ÚÖ¶ànFëe)°i¦n é `0\uêšòi­0¨ã§s²*:Žv…a‰ãaƒDz4²oT ÀZ úņü`Y¨²€%W¥ñ wÀ«ÍB™0<ÀþbkØIÓéað¢‚ÌËØ›1®u+XCd?æFøŒXäR¬å ®b¤ì³©3+Ø% ]~íAÁ–F82¹VtM•íÍëƒXùsklØôÞ1\2½)h.%®êžÿ7¬cèÈÜöà‚P 70½;\x©K/ãî0¤Ÿ™Fæv“Fa¨4hÈzß™×x:…“;T‹¦¡øà«ï蛿¶A¾‚ÓA~72æ}er`ߌ†6ð®¼Z>v¶é5ØÁãHë¯H ’ƒž &ˆa€ä†Á<Ð)RrB‰„x¶ÑÅ%"Øèa*–žÁh›.ØEŒðä‚ 7αi@[¨‘ ˆÀäÀe7@r³Hé”àË>¸iz ä ŒS*¯Ÿ!íëRLQ €†lÈ3$ ÐÆ5ë%#9ÉÙP[8“/Ï`Àn´¯ÿä ï}â~ügOT€åE´Û ™eì" )a†tD£ÄÓpŽîœY@4P¤–Ri/­…K¸YÕh„9¤°FF@?žÜTRÚ€FÙCaºNôN]všÔ)}¯T  4.P€~uƒA(_Á[vu‹£hìö,²Ô¼@´U<«ŽdNØG¨ ’»Êm‰—è&¡`_ûÚþË·é‘ïf²ÎÙÒô·Â9Õƒë{y­^¸¿!.ÛœqáÜØe8¦,îT7ñRï¸ÇEš<ÎÀ€¯G¹­Knrâ8jT,o¸Ëkùé°kå,¯ùÇ9C-‹Íœç  móŸ  ½ÿ–p ¥£ª€ô¤×²ZVÉ×ÄýËq¨—Ó¸DwúαŽðgh\ç#÷zÔ‹Cb*<ìqŽÖ^¯vµ“80 ¹Õ÷:é UßøØïnÀ™.ðá­¸ßSW ¢è o/üÅJj=~»½ëŒ¯Zz¼–…ÅOþ©0k»â%ŸyzÑ@&Ôæ??ÒÊó¸ó}7=ÆX•s+”žõKj‘)æ.øº^öŒÊûÙUuݧ‘,z¿=îéü‘:ÜW#>Å|&‘”,‹È5é=ÿü†ú<åòìA쯳~á-Ÿè¾÷1“OjmŸûÖ/ÿyà–rækì_Q’·w®¯~þæÉ'Úïõë[8ÿƒ«$(_>YÝþýZµXȈÂB`C°‹4 Úݲ…8ÌÝ(X ŽÂ~EDê ’`CÝù^® D€`ãtÃÆà˜ †¨‚7Øà bcY™ÿÁž ú`PpAi|Cá)'áæß†Â æ8(¡¡òñ”!Z„Á3m`õE¡f[ÈÃû5Äša2 üAášá‚] ¤ßåuáfáÂC°áÒÖf¼æ¡>‚Lß’¡¶áæÁ 'Œáø" J‹P,zµÈ &á$’ é”²þÄ–Ÿû9«pžOâõÑÿžuÌ$"P¬âñ•”´G"UÂDb(Ì¢ìÙ‹q¸I ¢-Þa –!%öŸ…Üܦâ(rb)>Ìiâ>@ž,>#+¾Åéà :È®E/fI)`h" ô![€cæ~¡.^F:†£øµI`<^ômŸà":^ãóµâc1â1²_N zÅÀ߇=bÝ>¢•¥7¡•y0$Ô$®e#"!Eöã碄Vf$äóÉ£<ÂEâŒ(žGEÖ\ÿ^"ä~UãBvdá9d:\Ø%Ÿ7¶äMø" "B„K’Æå"K*Rbí=â”c=åç%ÿÏ>z<Ôd‚8åSÂŒ<`%WV¥ß]âfD¥®IWZ\TKå àHZcIú¢^™9žÒ¢¨%Ä㘅Qò£\êBö$„áåXª]h•TX.I^BïµÊ³0&¹A$8ë@¦az¥ÜÕÀ.þ$`ª#õYX¡Mz&ãáR:!\Gdrš“øAïA kšÔgªˆlæØ•Õ¦ÛàfƒÁŠæÍô&uMßž#ob¦Å‘”qNel&çÁÙ¢Þ¥ñ §SUbzÀãOuÞÔꈃRªu>§®]mªfZާª UPÄh2 w–Ó½Wej'Á§ÒMŸ)L]s*Î}~RNÅÿ€P„§}¦'q%’c-mÚŒ¦Î'"×|š€‚.h~Ví­d«¸çÅ0¨á¡Ï Ô§>mèâ8¢‡žç„–æ²å<\_ö'…’v†Áΰh‹ž(•]ÂP ž[ðg-…¨ª\¢u5a«”(2â'e(3ºf˜ (Yõè7üh >éÔߎêeÝV´‘ wf(BŠÞ évâŠûÔ ®”©–v œÜ­YYƒ(0†X+ Ã5 ¡š6–'úA]š]Oú€èñ¤l½…˜i–NÉQÛb™…6 mž_ —ûÈé¤ÒÂ¡Š†(¦qªüU&>Vâdª_îhÆ©ûêŒÐi+tÿË6¶«Ÿæ"æŒ8Vâq•c¾DÊŒ2ÔœŠYêŸ1ˆJF„<¨â«Ô@>¨«‚]NíL˜6+d©DÖŒÄÀfü©(žæKyò<†Ð&7Yb¶Ž‰)P´–ݤª³ÕŸ9"¸º³–««­é•-g[X(bÖf¦RÃá «d$í¹¬ªé<â\–­ŽÜéH¢e>ƒL¼ÅVêæa«øáhÁ~Ã?Êç:,'|šÔ-#Ω’[éÁÿEŸ7¬i³ ebGY¾¿J¥­†lÉá>®©`2Qø}k «½(O)¤ìÖ™›P„®RWâê!~e(¦aÖž ¶¢Ã®ÿ΋dÒë핪šb\­Víùù)r=Sòì$Ñ)嵯 †NKïEHlÉ]aÆݵÈêfTìr^§ÝfåÖ=“ÝZ-úÐÖÑi¿v!ÁÅ«”¢Ó£è$Þö-ZâkÇ’&nŸ[(ÎåØF[†‰Ð²J-ºnâ`4j*½æKÚRË[:bííäÉ&[Þâ!šÅ‚‘`®îµÖnæ1æÖ&âÚá:æd¹æÜÏ!­Å1fèÝ\ýõ-±öÎÍåËËêÞò ã-6˨2ØÝ"éB\^š«ºÊB­wœ«÷*¯‹ú€( c ô—z¤G÷¢/ܪ¯ùbY6 Q˜Îo͹ÿ¤R¶ï*åV],º$®}ÂÉ…aì®àÿ¡ѥÉ^  ¯ÃÉhÿÞw‚¢TÊ/öfÓáÜË®²æÒ‚åùâà9PL/(ÿ¦püUŽO¬l«„ZòN0…þ-ÎyX O=ð^ÍÌÆp:š%®Ö1Pf£f\e÷®Ú fô—"€Á³bð bžÕÞ¨ÏMnGñs%X±Ü‚%ÑÇHÊû0ú[À* f …Œc§°`5Ð ”O.öâp+Þò\¸QŸo òÄZRˆZdÉO$À$Kа.²e8ÍÊ$«»‘ŒÁ%«ÈáU­(›ò)£²'„;SQLObject-1.5.2/docs/presentation-2004-11/ui/s5-core.css0000644000175000017500000000060610707102005021520 0ustar phdphd00000000000000/* Do not edit or override these styles! The system will likely break if you do. */ div#header, div#footer, div.slide {position: absolute;} html>body div#header, html>body div#footer, html>body div.slide {position: fixed;} div.slide { visibility: hidden;} #slide0 {visibility: visible;} div#controls {position: absolute;} #footer>div#controls {position: fixed;} .handout {display: none;} SQLObject-1.5.2/docs/presentation-2004-11/ui/framing.css0000644000175000017500000000173010707102005021665 0ustar phdphd00000000000000/* The following styles size, place, and layer the slide components. Edit these if you want to change the overall slide layout. The commented lines can be uncommented (and modified, if necessary) to help you with the rearrangement process. */ div#header, div#footer, div.slide {width: 100%; top: 0; left: 0;} div#header {top: 0; height: 2em; z-index: 1;} div#footer {top: auto; bottom: 0; height: 1.75em; z-index: 5;} div.slide {top: 0; width: 92%; padding: 3.5em 4% 4%; z-index: 2;} div#controls {left: 50%; top: 0; width: 50%; height: 100%; z-index: 1;} #footer>div#controls {bottom: 0; top: auto; height: auto;} div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; margin: 0;} #currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1.5em; z-index: 10;} html>body #currentSlide {position: fixed;} /* div#header {background: #FCC;} div#footer {background: #CCF;} div#controls {background: #BBD;} div#currentSlide {background: #FFC;} */ SQLObject-1.5.2/docs/presentation-2004-11/ui/pretty.css0000644000175000017500000000611010707102005021566 0ustar phdphd00000000000000/* Following are the presentation styles -- edit away! Note that the 'body' font size may have to be changed if the resolution is different than expected. */ /* url(bodybg.gif) */ body {background: #fff -16px 0 no-repeat; color: #000; font-size: 2em;} :link, :visited {text-decoration: none;} #controls :active {color: #88A !important;} #controls :focus {outline: 1px dotted #227;} h1, h2, h3, h4 {font-size: 100%; margin: 0; padding: 0; font-weight: inherit;} ul, pre {margin: 0; line-height: 1em;} html, body {margin: 0; padding: 0;} blockquote, q {font-style: italic;} blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em; text-align: center; font-size: 1em;} blockquote p {margin: 0;} blockquote i {font-style: normal;} blockquote b {display: block; margin-top: 0.5em; font-weight: normal; font-size: smaller; font-style: normal;} blockquote b i {font-style: italic;} kbd {font-weight: bold; font-size: 1em;} sup {font-size: smaller; line-height: 1px;} code {padding: 2px 0.25em; font-weight: bold; color: #533;} code.bad, code del {color: red;} code.old {color: silver;} pre {padding: 0; margin: 0.25em 0 0.5em 0.5em; color: #533; font-size: 90%;} pre code {display: block;} ul {margin-left: 5%; margin-right: 7%; list-style: disc;} li {margin-top: 0.75em; margin-right: 0;} ul ul {line-height: 1;} ul ul li {margin: .2em; font-size: 85%; list-style: square;} img.leader {display: block; margin: 0 auto;} div#header, div#footer {background: #005; color: #BBC; font-family: Verdana, Helvetica, sans-serif;} div#header {background: #005 -16px 0 no-repeat; line-height: 1px;} div#footer {font-size: 0.5em; font-weight: bold; padding: 1em 0;} #footer h1, #footer h2 {display: block; padding: 0 1em;} #footer h2 {font-style: italic;} div.long {font-size: 0.75em;} .slide h1 {position: absolute; top: 0.0em; left: 0px; z-index: 1; margin: 0; padding: 0.3em 0 0 10px; white-space: nowrap; font: bold 150%/1em Helvetica, sans-serif; text-transform: capitalize; color: #DDE; background: #005;} .slide h3 {font-size: 130%;} h1 abbr {font-variant: small-caps;} div#controls {position: absolute; z-index: 1; left: 50%; top: 0; width: 50%; height: 100%; text-align: right;} #footer>div#controls {position: fixed; bottom: 0; padding: 1em 0; top: auto; height: auto;} div#controls form {position: absolute; bottom: 0; right: 0; width: 100%; margin: 0; padding: 0;} div#controls a {font-size: 1.7em; padding: 0; margin: 0 0.5em; background: #005; border: none; color: #779; cursor: pointer;} div#controls select {visibility: hidden; background: #DDD; color: #227;} div#controls div:hover select {visibility: visible;} #currentSlide {text-align: center; font-size: 0.5em; color: #77B;} #slide0 {padding-top: 3.5em; font-size: 90%;} #slide0 h1 {position: static; margin: 1em 0 1.33em; padding: 0; font: bold 2em Helvetica, sans-serif; white-space: normal; color: #000; background: transparent;} #slide0 h3 {margin-top: 0.5em; font-size: 1.5em;} #slide0 h4 {margin-top: 0; font-size: 1em;} ul.urls {list-style: none; display: inline; margin: 0;} .urls li {display: inline; margin: 0;} .note {display: none;} SQLObject-1.5.2/docs/presentation-2004-11/ui/opera.css0000644000175000017500000000032210707102005021344 0ustar phdphd00000000000000/* DO NOT CHANGE THESE unless you really want to break Opera Show */ div.slide { visibility: visible !important; position: static !important; page-break-before: always; } #slide0 {page-break-before: avoid;} SQLObject-1.5.2/docs/presentation-2004-11/ui/slides.js0000644000175000017500000001542310707102005021355 0ustar phdphd00000000000000// S5 slides.js -- released under CC by-sa 2.0 license // // Please see http://www.meyerweb.com/eric/tools/s5/credits.html for information // about all the wonderful and talented contributors to this code! var snum = 0; var smax = 1; var undef; var slcss = 1; var isIE = navigator.appName == 'Microsoft Internet Explorer' ? 1 : 0; var isOp = navigator.userAgent.indexOf('Opera') > -1 ? 1 : 0; var isGe = navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('Safari') < 1 ? 1 : 0; var slideCSS = document.getElementById('slideProj').href; function isClass(object, className) { return (object.className.search('(^|\\s)' + className + '(\\s|$)') != -1); } function GetElementsWithClassName(elementName,className) { var allElements = document.getElementsByTagName(elementName); var elemColl = new Array(); for (i = 0; i< allElements.length; i++) { if (isClass(allElements[i], className)) { elemColl[elemColl.length] = allElements[i]; } } return elemColl; } function isParentOrSelf(element, id) { if (element == null || element.nodeName=='BODY') return false; else if (element.id == id) return true; else return isParentOrSelf(element.parentNode, id); } function nodeValue(node) { var result = ""; if (node.nodeType == 1) { var children = node.childNodes; for ( i = 0; i < children.length; ++i ) { result += nodeValue(children[i]); } } else if (node.nodeType == 3) { result = node.nodeValue; } return(result); } function slideLabel() { var slideColl = GetElementsWithClassName('div','slide'); var list = document.getElementById('jumplist'); smax = slideColl.length; for (n = 0; n < smax; n++) { var obj = slideColl[n]; var did = 'slide' + n.toString(); obj.setAttribute('id',did); if(isOp) continue; var otext = ''; var menu = obj.firstChild; if (!menu) continue; // to cope with empty slides while (menu && menu.nodeType == 3) { menu = menu.nextSibling; } if (!menu) continue; // to cope with slides with only text nodes var menunodes = menu.childNodes; for (o = 0; o < menunodes.length; o++) { otext += nodeValue(menunodes[o]); } list.options[list.length] = new Option(n+' : ' +otext,n); } } function currentSlide() { var cs; if (document.getElementById) { cs = document.getElementById('currentSlide'); } else { cs = document.currentSlide; } cs.innerHTML = '' + snum + '<\/span> ' + '\/<\/span> ' + '' + (smax-1) + '<\/span>'; if (snum == 0) { cs.style.visibility = 'hidden'; } else { cs.style.visibility = 'visible'; } } function go(inc) { if (document.getElementById("slideProj").disabled) return; var cid = 'slide' + snum; if (inc != 'j') { snum += inc; lmax = smax - 1; if (snum > lmax) snum = 0; if (snum < 0) snum = lmax; } else { snum = parseInt(document.getElementById('jumplist').value); } var nid = 'slide' + snum; var ne = document.getElementById(nid); if (!ne) { ne = document.getElementById('slide0'); snum = 0; } document.getElementById(cid).style.visibility = 'hidden'; ne.style.visibility = 'visible'; document.getElementById('jumplist').selectedIndex = snum; currentSlide(); } function toggle() { var slideColl = GetElementsWithClassName('div','slide'); var obj = document.getElementById('slideProj'); if (!obj.disabled) { obj.disabled = true; for (n = 0; n < smax; n++) { var slide = slideColl[n]; slide.style.visibility = 'visible'; } } else { obj.disabled = false; for (n = 0; n < smax; n++) { var slide = slideColl[n]; slide.style.visibility = 'hidden'; } slideColl[snum].style.visibility = 'visible'; } } function showHide(action) { var obj = document.getElementById('jumplist'); switch (action) { case 's': obj.style.visibility = 'visible'; break; case 'h': obj.style.visibility = 'hidden'; break; case 'k': if (obj.style.visibility != 'visible') { obj.style.visibility = 'visible'; } else { obj.style.visibility = 'hidden'; } break; } } // 'keys' code adapted from MozPoint (http://mozpoint.mozdev.org/) function keys(key) { if (!key) { key = event; key.which = key.keyCode; } switch (key.which) { case 10: // return case 13: // enter if (window.event && isParentOrSelf(window.event.srcElement, "controls")) return; if (key.target && isParentOrSelf(key.target, "controls")) return; case 32: // spacebar case 34: // page down case 39: // rightkey case 40: // downkey go(1); break; case 33: // page up case 37: // leftkey case 38: // upkey go(-1); break; case 84: // t toggle(); break; case 67: // c showHide('k'); break; } } function clicker(e) { var target; if (window.event) { target = window.event.srcElement; e = window.event; } else target = e.target; if (target.href != null || isParentOrSelf(target, 'controls')) return true; if (!e.which || e.which == 1) go(1); } function slideJump() { if (window.location.hash == null) return; var sregex = /^#slide(\d+)$/; var matches = sregex.exec(window.location.hash); var dest = null; if (matches != null) { dest = parseInt(matches[1]); } else { var target = window.location.hash.slice(1); var targetElement = null; var aelements = document.getElementsByTagName("a"); for (i = 0; i < aelements.length; i++) { var aelement = aelements[i]; if ( (aelement.name && aelement.name == target) || (aelement.id && aelement.id == target) ) { targetElement = aelement; break; } } while(targetElement != null && targetElement.nodeName != "body") { if (targetElement.className == "slide") break; targetElement = targetElement.parentNode; } if (targetElement != null && targetElement.className == "slide") { dest = parseInt(targetElement.id.slice(1)); } } if (dest != null) go(dest - snum); } function createControls() { controlsDiv = document.getElementById("controls"); if (!controlsDiv) return; controlsDiv.innerHTML = '
' + '
' + 'Ø<\/a>' + '