Elixir-0.7.1/ 0000755 0001750 0001750 00000000000 11300243237 011026 5 ustar ged ged Elixir-0.7.1/elixir/ 0000755 0001750 0001750 00000000000 11300243237 012322 5 ustar ged ged Elixir-0.7.1/elixir/events.py 0000644 0001750 0001750 00000001631 11277334310 014207 0 ustar ged ged __all__ = [
'before_insert',
'after_insert',
'before_update',
'after_update',
'before_delete',
'after_delete',
'reconstructor'
]
def create_decorator(event_name):
def decorator(func):
if not hasattr(func, '_elixir_events'):
func._elixir_events = []
func._elixir_events.append(event_name)
return func
return decorator
before_insert = create_decorator('before_insert')
after_insert = create_decorator('after_insert')
before_update = create_decorator('before_update')
after_update = create_decorator('after_update')
before_delete = create_decorator('before_delete')
after_delete = create_decorator('after_delete')
try:
from sqlalchemy.orm import reconstructor
except ImportError:
def reconstructor(func):
raise Exception('The reconstructor method decorator is only '
'available with SQLAlchemy 0.5 and later')
Elixir-0.7.1/elixir/statements.py 0000644 0001750 0001750 00000003555 11261351706 015102 0 ustar ged ged import sys
MUTATORS = '__elixir_mutators__'
class ClassMutator(object):
'''
DSL-style syntax
A ``ClassMutator`` object represents a DSL term.
'''
def __init__(self, handler):
'''
Create a new ClassMutator, using the `handler` callable to process it
when the time will come.
'''
self.handler = handler
# called when a mutator (eg. "has_field(...)") is parsed
def __call__(self, *args, **kwargs):
# self in this case is the "generic" mutator (eg "has_field")
# jam this mutator into the class's mutator list
class_locals = sys._getframe(1).f_locals
mutators = class_locals.setdefault(MUTATORS, [])
mutators.append((self, args, kwargs))
def process(self, entity, *args, **kwargs):
'''
Process one mutator. This version simply calls the handler callable,
but another mutator (sub)class could do more processing.
'''
self.handler(entity, *args, **kwargs)
#TODO: move this to the super class (to be created here) of EntityMeta
def process_mutators(entity):
'''
Apply all mutators of the given entity. That is, loop over all mutators
in the class's mutator list and process them.
'''
# we don't use getattr here to not inherit from the parent mutators
# inadvertantly if the current entity hasn't defined any mutator.
mutators = entity.__dict__.get(MUTATORS, [])
for mutator, args, kwargs in mutators:
mutator.process(entity, *args, **kwargs)
class Statement(ClassMutator):
def process(self, entity, *args, **kwargs):
builder = self.handler(entity, *args, **kwargs)
entity._descriptor.builders.append(builder)
class PropertyStatement(ClassMutator):
def process(self, entity, name, *args, **kwargs):
prop = self.handler(*args, **kwargs)
prop.attach(entity, name)
Elixir-0.7.1/elixir/py23compat.py 0000644 0001750 0001750 00000004626 11261351706 014714 0 ustar ged ged # Some helper functions to get by without Python 2.4
# set
try:
set = set
except NameError:
from sets import Set as set
orig_cmp = cmp
# [].sort
def sort_list(l, cmp=None, key=None, reverse=False):
try:
l.sort(cmp, key, reverse)
except TypeError, e:
if not str(e).startswith('sort expected at most 1 arguments'):
raise
if cmp is None:
cmp = orig_cmp
if key is not None:
# the cmp=cmp parameter is required to get the original comparator
# into the lambda namespace
cmp = lambda self, other, cmp=cmp: cmp(key(self), key(other))
if reverse:
cmp = lambda self, other, cmp=cmp: -cmp(self,other)
l.sort(cmp)
# sorted
try:
sorted = sorted
except NameError:
# global name 'sorted' doesn't exist in Python2.3
# this provides a poor-man's emulation of the sorted built-in method
def sorted(l, cmp=None, key=None, reverse=False):
sorted_list = list(l)
sort_list(sorted_list, cmp, key, reverse)
return sorted_list
# rsplit
try:
''.rsplit
def rsplit(s, delim, maxsplit):
return s.rsplit(delim, maxsplit)
except AttributeError:
def rsplit(s, delim, maxsplit):
"""Return a list of the words of the string s, scanning s
from the end. To all intents and purposes, the resulting
list of words is the same as returned by split(), except
when the optional third argument maxsplit is explicitly
specified and nonzero. When maxsplit is nonzero, at most
maxsplit number of splits - the rightmost ones - occur,
and the remainder of the string is returned as the first
element of the list (thus, the list will have at most
maxsplit+1 elements). New in version 2.4.
>>> rsplit('foo.bar.baz', '.', 0)
['foo.bar.baz']
>>> rsplit('foo.bar.baz', '.', 1)
['foo.bar', 'baz']
>>> rsplit('foo.bar.baz', '.', 2)
['foo', 'bar', 'baz']
>>> rsplit('foo.bar.baz', '.', 99)
['foo', 'bar', 'baz']
"""
assert maxsplit >= 0
if maxsplit == 0: return [s]
# the following lines perform the function, but inefficiently.
# This may be adequate for compatibility purposes
items = s.split(delim)
if maxsplit < len(items):
items[:-maxsplit] = [delim.join(items[:-maxsplit])]
return items
Elixir-0.7.1/elixir/__init__.py 0000644 0001750 0001750 00000007553 11300236506 014446 0 ustar ged ged '''
Elixir package
A declarative layer on top of the `SQLAlchemy library
`_. It is a fairly thin wrapper, which provides
the ability to create simple Python classes that map directly to relational
database tables (this pattern is often referred to as the Active Record design
pattern), providing many of the benefits of traditional databases
without losing the convenience of Python objects.
Elixir is intended to replace the ActiveMapper SQLAlchemy extension, and the
TurboEntity project but does not intend to replace SQLAlchemy's core features,
and instead focuses on providing a simpler syntax for defining model objects
when you do not need the full expressiveness of SQLAlchemy's manual mapper
definitions.
'''
try:
set
except NameError:
from sets import Set as set
import sqlalchemy
from sqlalchemy.types import *
from elixir.options import using_options, using_table_options, \
using_mapper_options, options_defaults, \
using_options_defaults
from elixir.entity import Entity, EntityBase, EntityMeta, EntityDescriptor, \
setup_entities, cleanup_entities
from elixir.fields import has_field, Field
from elixir.relationships import belongs_to, has_one, has_many, \
has_and_belongs_to_many, \
ManyToOne, OneToOne, OneToMany, ManyToMany
from elixir.properties import has_property, GenericProperty, ColumnProperty, \
Synonym
from elixir.statements import Statement
from elixir.collection import EntityCollection, GlobalEntityCollection
__version__ = '0.7.1'
__all__ = ['Entity', 'EntityBase', 'EntityMeta', 'EntityCollection',
'entities',
'Field', 'has_field',
'has_property', 'GenericProperty', 'ColumnProperty', 'Synonym',
'belongs_to', 'has_one', 'has_many', 'has_and_belongs_to_many',
'ManyToOne', 'OneToOne', 'OneToMany', 'ManyToMany',
'using_options', 'using_table_options', 'using_mapper_options',
'options_defaults', 'using_options_defaults',
'metadata', 'session',
'create_all', 'drop_all',
'setup_all', 'cleanup_all',
'setup_entities', 'cleanup_entities'] + \
sqlalchemy.types.__all__
__doc_all__ = ['create_all', 'drop_all',
'setup_all', 'cleanup_all',
'metadata', 'session']
# default session
session = sqlalchemy.orm.scoped_session(sqlalchemy.orm.sessionmaker())
# default metadata
metadata = sqlalchemy.MetaData()
metadatas = set()
# default entity collection
entities = GlobalEntityCollection()
def create_all(*args, **kwargs):
'''Create the necessary tables for all declared entities'''
for md in metadatas:
md.create_all(*args, **kwargs)
def drop_all(*args, **kwargs):
'''Drop tables for all declared entities'''
for md in metadatas:
md.drop_all(*args, **kwargs)
def setup_all(create_tables=False, *args, **kwargs):
'''Setup the table and mapper of all entities in the default entity
collection.
This is called automatically if any entity of the collection is configured
with the `autosetup` option and it is first accessed,
instanciated (called) or the create_all method of a metadata containing
tables from any of those entities is called.
'''
setup_entities(entities)
# issue the "CREATE" SQL statements
if create_tables:
create_all(*args, **kwargs)
def cleanup_all(drop_tables=False, *args, **kwargs):
'''Clear all mappers, clear the session, and clear all metadatas.
Optionally drops the tables.
'''
session.close()
cleanup_entities(entities)
sqlalchemy.orm.clear_mappers()
entities.clear()
if drop_tables:
drop_all(*args, **kwargs)
for md in metadatas:
md.clear()
metadatas.clear()
Elixir-0.7.1/elixir/relationships.py 0000644 0001750 0001750 00000157667 11277335712 015623 0 ustar ged ged '''
This module provides support for defining relationships between your Elixir
entities. Elixir currently supports two syntaxes to do so: the default
`Attribute-based syntax`_ which supports the following types of relationships:
ManyToOne_, OneToMany_, OneToOne_ and ManyToMany_, as well as a
`DSL-based syntax`_ which provides the following statements: belongs_to_,
has_many_, has_one_ and has_and_belongs_to_many_.
======================
Attribute-based syntax
======================
The first argument to all these "normal" relationship classes is the name of
the class (entity) you are relating to.
Following that first mandatory argument, any number of additional keyword
arguments can be specified for advanced behavior. See each relationship type
for a list of their specific keyword arguments. At this point, we'll just note
that all the arguments that are not specifically processed by Elixir, as
mentioned in the documentation below are passed on to the SQLAlchemy
``relation`` function. So, please refer to the `SQLAlchemy relation function's
documentation `_ for further detail about which
keyword arguments are supported.
You should keep in mind that the following
keyword arguments are automatically generated by Elixir and should not be used
unless you want to override the value provided by Elixir: ``uselist``,
``remote_side``, ``secondary``, ``primaryjoin`` and ``secondaryjoin``.
Additionally, if you want a bidirectionnal relationship, you should define the
inverse relationship on the other entity explicitly (as opposed to how
SQLAlchemy's backrefs are defined). In non-ambiguous situations, Elixir will
match relationships together automatically. If there are several relationships
of the same type between two entities, Elixir is not able to determine which
relationship is the inverse of which, so you have to disambiguate the
situation by giving the name of the inverse relationship in the ``inverse``
keyword argument.
Here is a detailed explanation of each relation type:
`ManyToOne`
-----------
Describes the child's side of a parent-child relationship. For example,
a `Pet` object may belong to its owner, who is a `Person`. This could be
expressed like so:
.. sourcecode:: python
class Pet(Entity):
owner = ManyToOne('Person')
Behind the scene, assuming the primary key of the `Person` entity is
an integer column named `id`, the ``ManyToOne`` relationship will
automatically add an integer column named `owner_id` to the entity, with a
foreign key referencing the `id` column of the `Person` entity.
In addition to the keyword arguments inherited from SQLAlchemy's relation
function, ``ManyToOne`` relationships accept the following optional arguments
which will be directed to the created column:
+----------------------+------------------------------------------------------+
| Option Name | Description |
+======================+======================================================+
| ``colname`` | Specify a custom name for the foreign key column(s). |
| | This argument accepts either a single string or a |
| | list of strings. The number of strings passed must |
| | match the number of primary key columns of the target|
| | entity. If this argument is not used, the name of the|
| | column(s) is generated with the pattern |
| | defined in options.FKCOL_NAMEFORMAT, which is, by |
| | default: "%(relname)s_%(key)s", where relname is the |
| | name of the ManyToOne relationship, and 'key' is the |
| | name (key) of the primary column in the target |
| | entity. That's with, in the above Pet/owner example, |
| | the name of the column would be: "owner_id". |
+----------------------+------------------------------------------------------+
| ``required`` | Specify whether or not this field can be set to None |
| | (left without a value). Defaults to ``False``, |
| | unless the field is a primary key. |
+----------------------+------------------------------------------------------+
| ``primary_key`` | Specify whether or not the column(s) created by this |
| | relationship should act as a primary_key. |
| | Defaults to ``False``. |
+----------------------+------------------------------------------------------+
| ``column_kwargs`` | A dictionary holding any other keyword argument you |
| | might want to pass to the Column. |
+----------------------+------------------------------------------------------+
| ``target_column`` | Name (or list of names) of the target column(s). |
| | If this argument is not specified, the target entity |
| | primary key column(s) are used. |
+----------------------+------------------------------------------------------+
The following optional arguments are also supported to customize the
ForeignKeyConstraint that is created:
+----------------------+------------------------------------------------------+
| Option Name | Description |
+======================+======================================================+
| ``use_alter`` | If True, SQLAlchemy will add the constraint in a |
| | second SQL statement (as opposed to within the |
| | create table statement). This permits to define |
| | tables with a circular foreign key dependency |
| | between them. |
+----------------------+------------------------------------------------------+
| ``ondelete`` | Value for the foreign key constraint ondelete clause.|
| | May be one of: ``cascade``, ``restrict``, |
| | ``set null``, or ``set default``. |
+----------------------+------------------------------------------------------+
| ``onupdate`` | Value for the foreign key constraint onupdate clause.|
| | May be one of: ``cascade``, ``restrict``, |
| | ``set null``, or ``set default``. |
+----------------------+------------------------------------------------------+
| ``constraint_kwargs``| A dictionary holding any other keyword argument you |
| | might want to pass to the Constraint. |
+----------------------+------------------------------------------------------+
In some cases, you may want to declare the foreign key column explicitly,
instead of letting it be generated automatically. There are several reasons to
that: it could be because you want to declare it with precise arguments and
using column_kwargs makes your code ugly, or because the name of
your column conflicts with the property name (in which case an error is
thrown). In those cases, you can use the ``field`` argument to specify an
already-declared field to be used for the foreign key column.
For example, for the Pet example above, if you want the database column
(holding the foreign key) to be called 'owner', one should use the field
parameter to specify the field manually.
.. sourcecode:: python
class Pet(Entity):
owner_id = Field(Integer, colname='owner')
owner = ManyToOne('Person', field=owner_id)
+----------------------+------------------------------------------------------+
| Option Name | Description |
+======================+======================================================+
| ``field`` | Specify the previously-declared field to be used for |
| | the foreign key column. Use of this parameter is |
| | mutually exclusive with the colname and column_kwargs|
| | arguments. |
+----------------------+------------------------------------------------------+
Additionally, Elixir supports the belongs_to_ statement as an alternative,
DSL-based, syntax to define ManyToOne_ relationships.
`OneToMany`
-----------
Describes the parent's side of a parent-child relationship when there can be
several children. For example, a `Person` object has many children, each of
them being a `Person`. This could be expressed like so:
.. sourcecode:: python
class Person(Entity):
parent = ManyToOne('Person')
children = OneToMany('Person')
Note that a ``OneToMany`` relationship **cannot exist** without a
corresponding ``ManyToOne`` relationship in the other way. This is because the
``OneToMany`` relationship needs the foreign key created by the ``ManyToOne``
relationship.
In addition to keyword arguments inherited from SQLAlchemy, ``OneToMany``
relationships accept the following optional (keyword) arguments:
+--------------------+--------------------------------------------------------+
| Option Name | Description |
+====================+========================================================+
| ``order_by`` | Specify which field(s) should be used to sort the |
| | results given by accessing the relation field. |
| | Note that this sort order is only applied when loading |
| | objects from the database. Objects appended to the |
| | collection afterwards are not re-sorted in-memory on |
| | the fly. |
| | This argument accepts either a string or a list of |
| | strings, each corresponding to the name of a field in |
| | the target entity. These field names can optionally be |
| | prefixed by a minus (for descending order). |
+--------------------+--------------------------------------------------------+
| ``filter`` | Specify a filter criterion (as a clause element) for |
| | this relationship. This criterion will be ``and_`` ed |
| | with the normal join criterion (primaryjoin) generated |
| | by Elixir for the relationship. For example: |
| | boston_addresses = |
| | OneToMany('Address', filter=Address.city == 'Boston') |
+--------------------+--------------------------------------------------------+
Additionally, Elixir supports an alternate, DSL-based, syntax to define
OneToMany_ relationships, with the has_many_ statement.
`OneToOne`
----------
Describes the parent's side of a parent-child relationship when there is only
one child. For example, a `Car` object has one gear stick, which is
represented as a `GearStick` object. This could be expressed like so:
.. sourcecode:: python
class Car(Entity):
gear_stick = OneToOne('GearStick', inverse='car')
class GearStick(Entity):
car = ManyToOne('Car')
Note that a ``OneToOne`` relationship **cannot exist** without a corresponding
``ManyToOne`` relationship in the other way. This is because the ``OneToOne``
relationship needs the foreign_key created by the ``ManyToOne`` relationship.
Additionally, Elixir supports an alternate, DSL-based, syntax to define
OneToOne_ relationships, with the has_one_ statement.
`ManyToMany`
------------
Describes a relationship in which one kind of entity can be related to several
objects of the other kind but the objects of that other kind can be related to
several objects of the first kind. For example, an `Article` can have several
tags, but the same `Tag` can be used on several articles.
.. sourcecode:: python
class Article(Entity):
tags = ManyToMany('Tag')
class Tag(Entity):
articles = ManyToMany('Article')
Behind the scene, the ``ManyToMany`` relationship will automatically create an
intermediate table to host its data.
Note that you don't necessarily need to define the inverse relationship. In
our example, even though we want tags to be usable on several articles, we
might not be interested in which articles correspond to a particular tag. In
that case, we could have omitted the `Tag` side of the relationship.
If your ``ManyToMany`` relationship is self-referencial, the entity
containing it is autoloaded (and you don't intend to specify both the
primaryjoin and secondaryjoin arguments manually), you must specify at least
one of either the ``remote_colname`` or ``local_colname`` argument.
In addition to keyword arguments inherited from SQLAlchemy, ``ManyToMany``
relationships accept the following optional (keyword) arguments:
+--------------------+--------------------------------------------------------+
| Option Name | Description |
+====================+========================================================+
| ``tablename`` | Specify a custom name for the intermediary table. This |
| | can be used both when the tables needs to be created |
| | and when the table is autoloaded/reflected from the |
| | database. If this argument is not used, a name will be |
| | automatically generated by Elixir depending on the name|
| | of the tables of the two entities of the relationship, |
| | the name of the relationship, and, if present, the name|
| | of its inverse. Even though this argument is optional, |
| | it is wise to use it if you are not sure what are the |
| | exact consequence of using a generated table name. |
+--------------------+--------------------------------------------------------+
| ``schema`` | Specify a custom schema for the intermediate table. |
| | This can be used both when the tables needs to |
| | be created and when the table is autoloaded/reflected |
| | from the database. |
+--------------------+--------------------------------------------------------+
| ``remote_colname`` | A string or list of strings specifying the names of |
| | the column(s) in the intermediary table which |
| | reference the "remote"/target entity's table. |
+--------------------+--------------------------------------------------------+
| ``local_colname`` | A string or list of strings specifying the names of |
| | the column(s) in the intermediary table which |
| | reference the "local"/current entity's table. |
+--------------------+--------------------------------------------------------+
| ``table`` | Use a manually created table. If this argument is |
| | used, Elixir won't generate a table for this |
| | relationship, and use the one given instead. |
+--------------------+--------------------------------------------------------+
| ``order_by`` | Specify which field(s) should be used to sort the |
| | results given by accessing the relation field. |
| | Note that this sort order is only applied when loading |
| | objects from the database. Objects appended to the |
| | collection afterwards are not re-sorted in-memory on |
| | the fly. |
| | This argument accepts either a string or a list of |
| | strings, each corresponding to the name of a field in |
| | the target entity. These field names can optionally be |
| | prefixed by a minus (for descending order). |
+----------------------+------------------------------------------------------+
| ``ondelete`` | Value for the foreign key constraint ondelete clause. |
| | May be one of: ``cascade``, ``restrict``, |
| | ``set null``, or ``set default``. |
+--------------------+--------------------------------------------------------+
| ``onupdate`` | Value for the foreign key constraint onupdate clause. |
| | May be one of: ``cascade``, ``restrict``, |
| | ``set null``, or ``set default``. |
+--------------------+--------------------------------------------------------+
| ``table_kwargs`` | A dictionary holding any other keyword argument you |
| | might want to pass to the underlying Table object. |
+--------------------+--------------------------------------------------------+
| ``column_format`` | DEPRECATED. Specify an alternate format string for |
| | naming the |
| | columns in the mapping table. The default value is |
| | defined in ``elixir.options.M2MCOL_NAMEFORMAT``. You |
| | will be passed ``tablename``, ``key``, and ``entity`` |
| | as arguments to the format string. |
+--------------------+--------------------------------------------------------+
================
DSL-based syntax
================
The following DSL statements provide an alternative way to define relationships
between your entities. The first argument to all those statements is the name
of the relationship, the second is the 'kind' of object you are relating to
(it is usually given using the ``of_kind`` keyword).
`belongs_to`
------------
The ``belongs_to`` statement is the DSL syntax equivalent to the ManyToOne_
relationship. As such, it supports all the same arguments as ManyToOne_
relationships.
.. sourcecode:: python
class Pet(Entity):
belongs_to('feeder', of_kind='Person')
belongs_to('owner', of_kind='Person', colname="owner_id")
`has_many`
----------
The ``has_many`` statement is the DSL syntax equivalent to the OneToMany_
relationship. As such, it supports all the same arguments as OneToMany_
relationships.
.. sourcecode:: python
class Person(Entity):
belongs_to('parent', of_kind='Person')
has_many('children', of_kind='Person')
There is also an alternate form of the ``has_many`` relationship that takes
only two keyword arguments: ``through`` and ``via`` in order to encourage a
richer form of many-to-many relationship that is an alternative to the
``has_and_belongs_to_many`` statement. Here is an example:
.. sourcecode:: python
class Person(Entity):
has_field('name', Unicode)
has_many('assignments', of_kind='Assignment')
has_many('projects', through='assignments', via='project')
class Assignment(Entity):
has_field('start_date', DateTime)
belongs_to('person', of_kind='Person')
belongs_to('project', of_kind='Project')
class Project(Entity):
has_field('title', Unicode)
has_many('assignments', of_kind='Assignment')
In the above example, a `Person` has many `projects` through the `Assignment`
relationship object, via a `project` attribute.
`has_one`
---------
The ``has_one`` statement is the DSL syntax equivalent to the OneToOne_
relationship. As such, it supports all the same arguments as OneToOne_
relationships.
.. sourcecode:: python
class Car(Entity):
has_one('gear_stick', of_kind='GearStick', inverse='car')
class GearStick(Entity):
belongs_to('car', of_kind='Car')
`has_and_belongs_to_many`
-------------------------
The ``has_and_belongs_to_many`` statement is the DSL syntax equivalent to the
ManyToMany_ relationship. As such, it supports all the same arguments as
ManyToMany_ relationships.
.. sourcecode:: python
class Article(Entity):
has_and_belongs_to_many('tags', of_kind='Tag')
class Tag(Entity):
has_and_belongs_to_many('articles', of_kind='Article')
'''
import warnings
from sqlalchemy import ForeignKeyConstraint, Column, Table, and_
from sqlalchemy.orm import relation, backref, class_mapper
from sqlalchemy.ext.associationproxy import association_proxy
import options
from elixir.statements import ClassMutator
from elixir.properties import Property
from elixir.entity import EntityMeta, DEBUG
__doc_all__ = []
class Relationship(Property):
'''
Base class for relationships.
'''
def __init__(self, of_kind, inverse=None, *args, **kwargs):
super(Relationship, self).__init__()
self.of_kind = of_kind
self.inverse_name = inverse
self._target = None
self.property = None # sqlalchemy property
self.backref = None # sqlalchemy backref
#TODO: unused for now
self.args = args
self.kwargs = kwargs
def attach(self, entity, name):
super(Relationship, self).attach(entity, name)
entity._descriptor.relationships.append(self)
def create_pk_cols(self):
self.create_keys(True)
def create_non_pk_cols(self):
self.create_keys(False)
def create_keys(self, pk):
'''
Subclasses (ie. concrete relationships) may override this method to
create foreign keys.
'''
def create_properties(self):
if self.property or self.backref:
return
kwargs = self.get_prop_kwargs()
if 'order_by' in kwargs:
kwargs['order_by'] = \
self.target._descriptor.translate_order_by(kwargs['order_by'])
# transform callable arguments
for arg in ('primaryjoin', 'secondaryjoin', 'remote_side',
'foreign_keys'):
kwarg = kwargs.get(arg, None)
if hasattr(kwarg, '__call__'):
kwargs[arg] = kwarg()
# viewonly relationships need to create "standalone" relations (ie
# shouldn't be a backref of another relation).
if self.inverse and not kwargs.get('viewonly', False):
# check if the inverse was already processed (and thus has already
# defined a backref we can use)
if self.inverse.backref:
# let the user override the backref argument
if 'backref' not in kwargs:
kwargs['backref'] = self.inverse.backref
else:
# SQLAlchemy doesn't like when 'secondary' is both defined on
# the relation and the backref
kwargs.pop('secondary', None)
# define backref for use by the inverse
self.backref = backref(self.name, **kwargs)
return
self.property = relation(self.target, **kwargs)
self.add_mapper_property(self.name, self.property)
def target(self):
if not self._target:
if isinstance(self.of_kind, basestring):
collection = self.entity._descriptor.collection
self._target = collection.resolve(self.of_kind, self.entity)
else:
self._target = self.of_kind
return self._target
target = property(target)
def inverse(self):
if not hasattr(self, '_inverse'):
if self.inverse_name:
desc = self.target._descriptor
inverse = desc.find_relationship(self.inverse_name)
if inverse is None:
raise Exception(
"Couldn't find a relationship named '%s' in "
"entity '%s' or its parent entities."
% (self.inverse_name, self.target.__name__))
assert self.match_type_of(inverse), \
"Relationships '%s' in entity '%s' and '%s' in entity " \
"'%s' cannot be inverse of each other because their " \
"types do not form a valid combination." % \
(self.name, self.entity.__name__,
self.inverse_name, self.target.__name__)
else:
check_reverse = not self.kwargs.get('viewonly', False)
if isinstance(self.target, EntityMeta):
inverse = self.target._descriptor.get_inverse_relation(
self, check_reverse=check_reverse)
else:
inverse = None
self._inverse = inverse
if inverse and not self.kwargs.get('viewonly', False):
inverse._inverse = self
return self._inverse
inverse = property(inverse)
def match_type_of(self, other):
return False
def is_inverse(self, other):
# viewonly relationships are not symmetrical: a viewonly relationship
# should have exactly one inverse (a ManyToOne relationship), but that
# inverse shouldn't have the viewonly relationship as its inverse.
return not other.kwargs.get('viewonly', False) and \
other is not self and \
self.match_type_of(other) and \
self.entity == other.target and \
other.entity == self.target and \
(self.inverse_name == other.name or not self.inverse_name) and \
(other.inverse_name == self.name or not other.inverse_name)
class ManyToOne(Relationship):
'''
'''
def __init__(self, of_kind,
column_kwargs=None,
colname=None, required=None, primary_key=None,
field=None,
constraint_kwargs=None,
use_alter=None, ondelete=None, onupdate=None,
target_column=None,
*args, **kwargs):
# 1) handle column-related args
# check that the column arguments don't conflict
assert not (field and (column_kwargs or colname)), \
"ManyToOne can accept the 'field' argument or column " \
"arguments ('colname' or 'column_kwargs') but not both!"
if colname and not isinstance(colname, list):
colname = [colname]
self.colname = colname or []
column_kwargs = column_kwargs or {}
# kwargs go by default to the relation(), so we need to manually
# extract those targeting the Column
if required is not None:
column_kwargs['nullable'] = not required
if primary_key is not None:
column_kwargs['primary_key'] = primary_key
# by default, created columns will have an index.
column_kwargs.setdefault('index', True)
self.column_kwargs = column_kwargs
if field and not isinstance(field, list):
field = [field]
self.field = field or []
# 2) handle constraint kwargs
constraint_kwargs = constraint_kwargs or {}
if use_alter is not None:
constraint_kwargs['use_alter'] = use_alter
if ondelete is not None:
constraint_kwargs['ondelete'] = ondelete
if onupdate is not None:
constraint_kwargs['onupdate'] = onupdate
self.constraint_kwargs = constraint_kwargs
# 3) misc arguments
if target_column and not isinstance(target_column, list):
target_column = [target_column]
self.target_column = target_column
self.foreign_key = []
self.primaryjoin_clauses = []
super(ManyToOne, self).__init__(of_kind, *args, **kwargs)
def match_type_of(self, other):
return isinstance(other, (OneToMany, OneToOne))
def target_table(self):
if isinstance(self.target, EntityMeta):
return self.target._descriptor.table
else:
return class_mapper(self.target).local_table
target_table = property(target_table)
def create_keys(self, pk):
'''
Find all primary keys on the target and create foreign keys on the
source accordingly.
'''
if self.foreign_key:
return
if self.column_kwargs.get('primary_key', False) != pk:
return
source_desc = self.entity._descriptor
if isinstance(self.target, EntityMeta):
# make sure the target has all its pk set up
self.target._descriptor.create_pk_cols()
#XXX: another option, instead of the FakeTable, would be to create an
# EntityDescriptor for the SA class.
target_table = self.target_table
if source_desc.autoload:
#TODO: allow target_column to be used as an alternative to
# specifying primaryjoin, to be consistent with non-autoloaded
# tables
if self.colname:
if 'primaryjoin' not in self.kwargs:
self.primaryjoin_clauses = \
_get_join_clauses(self.entity.table,
self.colname, None,
target_table)[0]
if not self.primaryjoin_clauses:
colnames = ', '.join(self.colname)
raise Exception(
"Couldn't find a foreign key constraint in table "
"'%s' using the following columns: %s."
% (self.entity.table.name, colnames))
if self.field:
raise NotImplementedError(
"'field' argument not allowed on autoloaded table "
"relationships.")
else:
fk_refcols = []
fk_colnames = []
if self.target_column is None:
target_columns = target_table.primary_key.columns
else:
target_columns = [target_table.columns[col]
for col in self.target_column]
if not target_columns:
raise Exception("No primary key found in target table ('%s') "
"for the '%s' relationship of the '%s' entity."
% (target_table.name, self.name,
self.entity.__name__))
if self.colname and \
len(self.colname) != len(target_columns):
raise Exception(
"The number of column names provided in the colname "
"keyword argument of the '%s' relationship of the "
"'%s' entity is not the same as the number of columns "
"of the primary key of '%s'."
% (self.name, self.entity.__name__,
self.target.__name__))
for key_num, target_col in enumerate(target_columns):
if self.field:
col = self.field[key_num].column
else:
if self.colname:
colname = self.colname[key_num]
else:
colname = options.FKCOL_NAMEFORMAT % \
{'relname': self.name,
'key': target_col.key}
# We can't add the column to the table directly as the
# table might not be created yet.
col = Column(colname, target_col.type,
**self.column_kwargs)
source_desc.add_column(col)
# If the column name was specified, and it is the same as
# this property's name, there is going to be a conflict.
# Don't allow this to happen.
if col.key == self.name:
raise ValueError(
"ManyToOne named '%s' in '%s' conficts "
" with the column of the same name. "
"You should probably define the foreign key "
"field manually and use the 'field' "
"argument on the ManyToOne relationship"
% (self.name, self.entity.__name__))
# Build the list of local columns which will be part of
# the foreign key
self.foreign_key.append(col)
# Store the names of those columns
fk_colnames.append(col.key)
# Build the list of column "paths" the foreign key will
# point to
fk_refcols.append("%s.%s" % \
(target_table.fullname, target_col.key))
# Build up the primary join. This is needed when you have
# several ManyToOne relationships between two objects
self.primaryjoin_clauses.append(col == target_col)
if 'name' not in self.constraint_kwargs:
# In some databases (at least MySQL) the constraint name needs
# to be unique for the whole database, instead of per table.
fk_name = options.CONSTRAINT_NAMEFORMAT % \
{'tablename': source_desc.tablename,
'colnames': '_'.join(fk_colnames)}
self.constraint_kwargs['name'] = fk_name
source_desc.add_constraint(
ForeignKeyConstraint(fk_colnames, fk_refcols,
**self.constraint_kwargs))
def get_prop_kwargs(self):
kwargs = {'uselist': False}
if self.entity.table is self.target_table:
# this is needed because otherwise SA has no way to know what is
# the direction of the relationship since both columns present in
# the primaryjoin belong to the same table. In other words, it is
# necessary to know if this particular relation
# is the many-to-one side, or the one-to-xxx side. The foreignkey
# doesn't help in this case.
kwargs['remote_side'] = \
[col for col in self.target_table.primary_key.columns]
if self.primaryjoin_clauses:
kwargs['primaryjoin'] = and_(*self.primaryjoin_clauses)
kwargs.update(self.kwargs)
return kwargs
class OneToOne(Relationship):
uselist = False
def __init__(self, of_kind, filter=None, *args, **kwargs):
self.filter = filter
if filter is not None:
# We set viewonly to True by default for filtered relationships,
# unless manually overridden.
# This is not strictly necessary, as SQLAlchemy allows non viewonly
# relationships with a custom join/filter. The example at:
# SADOCS/05/mappers.html#advdatamapping_relation_customjoin
# is not viewonly. Those relationships can be used as if the extra
# filter wasn't present when inserting. This can lead to a
# confusing behavior (if you insert data which doesn't match the
# extra criterion it'll get inserted anyway but you won't see it
# when you query back the attribute after a round-trip to the
# database).
if 'viewonly' not in kwargs:
kwargs['viewonly'] = True
super(OneToOne, self).__init__(of_kind, *args, **kwargs)
def match_type_of(self, other):
return isinstance(other, ManyToOne)
def create_keys(self, pk):
# make sure an inverse relationship exists
if self.inverse is None:
raise Exception(
"Couldn't find any relationship in '%s' which "
"match as inverse of the '%s' relationship "
"defined in the '%s' entity. If you are using "
"inheritance you "
"might need to specify inverse relationships "
"manually by using the 'inverse' argument."
% (self.target, self.name,
self.entity))
def get_prop_kwargs(self):
kwargs = {'uselist': self.uselist}
#TODO: for now, we don't break any test if we remove those 2 lines.
# So, we should either complete the selfref test to prove that they
# are indeed useful, or remove them. It might be they are indeed
# useless because the remote_side is already setup in the other way
# (ManyToOne).
if self.entity.table is self.target.table:
#FIXME: IF this code is of any use, it will probably break for
# autoloaded tables
kwargs['remote_side'] = self.inverse.foreign_key
# Contrary to ManyToMany relationships, we need to specify the join
# clauses even if this relationship is not self-referencial because
# there could be several ManyToOne from the target class to us.
joinclauses = self.inverse.primaryjoin_clauses
if self.filter:
# We need to make a copy of the joinclauses, to not add the filter
# on the backref
joinclauses = joinclauses[:] + [self.filter(self.target.table.c)]
if joinclauses:
kwargs['primaryjoin'] = and_(*joinclauses)
kwargs.update(self.kwargs)
return kwargs
class OneToMany(OneToOne):
uselist = True
class ManyToMany(Relationship):
uselist = True
def __init__(self, of_kind, tablename=None,
local_colname=None, remote_colname=None,
ondelete=None, onupdate=None,
table=None, schema=None,
column_format=None,
filter=None,
table_kwargs=None,
*args, **kwargs):
self.user_tablename = tablename
if local_colname and not isinstance(local_colname, list):
local_colname = [local_colname]
self.local_colname = local_colname or []
if remote_colname and not isinstance(remote_colname, list):
remote_colname = [remote_colname]
self.remote_colname = remote_colname or []
self.ondelete = ondelete
self.onupdate = onupdate
self.table = table
self.schema = schema
if column_format:
warnings.warn("The 'column_format' argument on ManyToMany "
"relationships is deprecated. Please use the 'local_colname' "
"and/or 'remote_colname' arguments if you want custom "
"column names for this table only, or modify "
"options.M2MCOL_NAMEFORMAT if you want a custom format for "
"all ManyToMany tables", DeprecationWarning, stacklevel=3)
self.column_format = column_format or options.M2MCOL_NAMEFORMAT
if not hasattr(self.column_format, '__call__'):
# we need to store the format in a variable so that the
# closure of the lambda is correct
format = self.column_format
self.column_format = lambda data: format % data
if options.MIGRATION_TO_07_AID:
self.column_format = \
migration_aid_m2m_column_formatter(
lambda data: options.OLD_M2MCOL_NAMEFORMAT % data,
self.column_format)
self.filter = filter
if filter is not None:
# We set viewonly to True by default for filtered relationships,
# unless manually overridden.
if 'viewonly' not in kwargs:
kwargs['viewonly'] = True
self.table_kwargs = table_kwargs or {}
self.primaryjoin_clauses = []
self.secondaryjoin_clauses = []
super(ManyToMany, self).__init__(of_kind, *args, **kwargs)
def get_table(self):
warnings.warn("The secondary_table attribute on ManyToMany objects is "
"deprecated. You should rather use the table attribute.",
DeprecationWarning, stacklevel=2)
return self.table
secondary_table = property(get_table)
def match_type_of(self, other):
return isinstance(other, ManyToMany)
def create_tables(self):
if self.table is not None:
if 'primaryjoin' not in self.kwargs or \
'secondaryjoin' not in self.kwargs:
self._build_join_clauses()
assert self.inverse is None or self.inverse.table is None or \
self.inverse.table is self.table
return
if self.inverse:
inverse = self.inverse
if inverse.table is not None:
self.table = inverse.table
self.primaryjoin_clauses = inverse.secondaryjoin_clauses
self.secondaryjoin_clauses = inverse.primaryjoin_clauses
return
assert not inverse.user_tablename or not self.user_tablename or \
inverse.user_tablename == self.user_tablename
assert not inverse.remote_colname or not self.local_colname or \
inverse.remote_colname == self.local_colname
assert not inverse.local_colname or not self.remote_colname or \
inverse.local_colname == self.remote_colname
assert not inverse.schema or not self.schema or \
inverse.schema == self.schema
assert not inverse.table_kwargs or not self.table_kwargs or \
inverse.table_kwargs == self.table_kwargs
self.user_tablename = inverse.user_tablename or self.user_tablename
self.local_colname = inverse.remote_colname or self.local_colname
self.remote_colname = inverse.local_colname or self.remote_colname
self.schema = inverse.schema or self.schema
self.local_colname = inverse.remote_colname or self.local_colname
# compute table_kwargs
complete_kwargs = options.options_defaults['table_options'].copy()
complete_kwargs.update(self.table_kwargs)
#needs: table_options['schema'], autoload, tablename, primary_keys,
#entity.__name__, table_fullname
e1_desc = self.entity._descriptor
e2_desc = self.target._descriptor
e1_schema = e1_desc.table_options.get('schema', None)
e2_schema = e2_desc.table_options.get('schema', None)
schema = (self.schema is not None) and self.schema or e1_schema
assert e1_schema == e2_schema or self.schema, \
"Schema %r for entity %s differs from schema %r of entity %s." \
" Consider using the schema-parameter. "\
% (e1_schema, self.entity.__name__,
e2_schema, self.target.__name__)
# First, we compute the name of the table. Note that some of the
# intermediary variables are reused later for the constraint
# names.
# We use the name of the relation for the first entity
# (instead of the name of its primary key), so that we can
# have two many-to-many relations between the same objects
# without having a table name collision.
source_part = "%s_%s" % (e1_desc.tablename, self.name)
# And we use only the name of the table of the second entity
# when there is no inverse, so that a many-to-many relation
# can be defined without an inverse.
if self.inverse:
target_part = "%s_%s" % (e2_desc.tablename, self.inverse.name)
else:
target_part = e2_desc.tablename
if self.user_tablename:
tablename = self.user_tablename
else:
# We need to keep the table name consistent (independant of
# whether this relation or its inverse is setup first).
if self.inverse and source_part < target_part:
#XXX: use a different scheme for selfref (to not include the
# table name twice)?
tablename = "%s__%s" % (target_part, source_part)
else:
tablename = "%s__%s" % (source_part, target_part)
if options.MIGRATION_TO_07_AID:
oldname = (self.inverse and
e1_desc.tablename < e2_desc.tablename) and \
"%s__%s" % (target_part, source_part) or \
"%s__%s" % (source_part, target_part)
if oldname != tablename:
warnings.warn(
"The generated table name for the '%s' relationship "
"on the '%s' entity changed from '%s' (the name "
"generated by Elixir 0.6.1 and earlier) to '%s'. "
"You should either rename the table in the database "
"to the new name or use the tablename argument on the "
"relationship to force the old name: tablename='%s'!"
% (self.name, self.entity.__name__, oldname,
tablename, oldname))
if e1_desc.autoload:
if not e2_desc.autoload:
raise Exception(
"Entity '%s' is autoloaded and its '%s' "
"ManyToMany relationship points to "
"the '%s' entity which is not autoloaded"
% (self.entity.__name__, self.name,
self.target.__name__))
self.table = Table(tablename, e1_desc.metadata, autoload=True,
**complete_kwargs)
if 'primaryjoin' not in self.kwargs or \
'secondaryjoin' not in self.kwargs:
self._build_join_clauses()
else:
# We pre-compute the names of the foreign key constraints
# pointing to the source (local) entity's table and to the
# target's table
# In some databases (at least MySQL) the constraint names need
# to be unique for the whole database, instead of per table.
source_fk_name = "%s_fk" % source_part
if self.inverse:
target_fk_name = "%s_fk" % target_part
else:
target_fk_name = "%s_inverse_fk" % source_part
columns = []
constraints = []
for num, desc, fk_name, rel, inverse, colnames, join_clauses in (
(0, e1_desc, source_fk_name, self, self.inverse,
self.local_colname, self.primaryjoin_clauses),
(1, e2_desc, target_fk_name, self.inverse, self,
self.remote_colname, self.secondaryjoin_clauses)):
fk_colnames = []
fk_refcols = []
if colnames:
assert len(colnames) == len(desc.primary_keys)
else:
# The data generated here will be fed to the M2M column
# formatter to generate the name of the columns of the
# intermediate table for *one* side of the relationship,
# that is, from the intermediate table to the current
# entity, as stored in the "desc" variable.
data = {# A) relationships info
# the name of the rel going *from* the entity
# we are currently generating a column pointing
# *to*. This is generally *not* what you want to
# use. eg in a "Post" and "Tag" example, with
# relationships named 'tags' and 'posts', when
# creating the columns from the intermediate
# table to the "Post" entity, 'relname' will
# contain 'tags'.
'relname': rel and rel.name or 'inverse',
# the name of the inverse relationship. In the
# above example, 'inversename' will contain
# 'posts'.
'inversename': inverse and inverse.name
or 'inverse',
# is A == B?
'selfref': e1_desc is e2_desc,
# provided for backward compatibility, DO NOT USE!
'num': num,
# provided for backward compatibility, DO NOT USE!
'numifself': e1_desc is e2_desc and str(num + 1)
or '',
# B) target information (from the perspective of
# the intermediate table)
'target': desc.entity,
'entity': desc.entity.__name__.lower(),
'tablename': desc.tablename,
# C) current (intermediate) table name
'current_table': tablename
}
colnames = []
for pk_col in desc.primary_keys:
data.update(key=pk_col.key)
colnames.append(self.column_format(data))
for pk_col, colname in zip(desc.primary_keys, colnames):
col = Column(colname, pk_col.type, primary_key=True)
columns.append(col)
# Build the list of local columns which will be part
# of the foreign key.
fk_colnames.append(colname)
# Build the list of column "paths" the foreign key will
# point to
target_path = "%s.%s" % (desc.table_fullname, pk_col.key)
fk_refcols.append(target_path)
# Build join clauses (in case we have a self-ref)
if self.entity is self.target:
join_clauses.append(col == pk_col)
onupdate = rel and rel.onupdate
ondelete = rel and rel.ondelete
#FIXME: fk_name is misleading
constraints.append(
ForeignKeyConstraint(fk_colnames, fk_refcols,
name=fk_name, onupdate=onupdate,
ondelete=ondelete))
args = columns + constraints
self.table = Table(tablename, e1_desc.metadata,
schema=schema, *args, **complete_kwargs)
if DEBUG:
print self.table.repr2()
def _build_join_clauses(self):
# In the case we have a self-reference, we need to build join clauses
if self.entity is self.target:
if not self.local_colname and not self.remote_colname:
raise Exception(
"Self-referential ManyToMany "
"relationships in autoloaded entities need to have at "
"least one of either 'local_colname' or 'remote_colname' "
"argument specified. The '%s' relationship in the '%s' "
"entity doesn't have either."
% (self.name, self.entity.__name__))
self.primaryjoin_clauses, self.secondaryjoin_clauses = \
_get_join_clauses(self.table,
self.local_colname, self.remote_colname,
self.entity.table)
def get_prop_kwargs(self):
kwargs = {'secondary': self.table,
'uselist': self.uselist}
if self.filter:
# we need to make a copy of the joinclauses
secondaryjoin_clauses = self.secondaryjoin_clauses[:] + \
[self.filter(self.target.table.c)]
else:
secondaryjoin_clauses = self.secondaryjoin_clauses
if self.target is self.entity or self.filter:
kwargs['primaryjoin'] = and_(*self.primaryjoin_clauses)
kwargs['secondaryjoin'] = and_(*secondaryjoin_clauses)
kwargs.update(self.kwargs)
return kwargs
def is_inverse(self, other):
return super(ManyToMany, self).is_inverse(other) and \
(self.user_tablename == other.user_tablename or
(not self.user_tablename and not other.user_tablename))
def migration_aid_m2m_column_formatter(oldformatter, newformatter):
def debug_formatter(data):
old_name = oldformatter(data)
new_name = newformatter(data)
if new_name != old_name:
complete_data = data.copy()
complete_data.update(old_name=old_name,
new_name=new_name,
targetname=data['target'].__name__)
# Specifying a stacklevel is useless in this case as the name
# generation is triggered by setup_all(), not by the declaration
# of the offending relationship.
warnings.warn("The '%(old_name)s' column in the "
"'%(current_table)s' table, used as the "
"intermediate table for the '%(relname)s' "
"relationship on the '%(targetname)s' entity "
"was renamed to '%(new_name)s'."
% complete_data)
return new_name
return debug_formatter
def _get_join_clauses(local_table, local_cols1, local_cols2, target_table):
primary_join, secondary_join = [], []
cols1 = local_cols1[:]
cols1.sort()
cols1 = tuple(cols1)
if local_cols2 is not None:
cols2 = local_cols2[:]
cols2.sort()
cols2 = tuple(cols2)
else:
cols2 = None
# Build a map of fk constraints pointing to the correct table.
# The map is indexed on the local col names.
constraint_map = {}
for constraint in local_table.constraints:
if isinstance(constraint, ForeignKeyConstraint):
use_constraint = True
fk_colnames = []
# if all columns point to the correct table, we use the constraint
#TODO: check that it contains as many columns as the pk of the
#target entity, or even that it points to the actual pk columns
for fk in constraint.elements:
if fk.references(target_table):
# local column key
fk_colnames.append(fk.parent.key)
else:
use_constraint = False
if use_constraint:
fk_colnames.sort()
constraint_map[tuple(fk_colnames)] = constraint
# Either the fk column names match explicitely with the columns given for
# one of the joins (primary or secondary), or we assume the current
# columns match because the columns for this join were not given and we
# know the other join is either not used (is None) or has an explicit
# match.
#TODO: rewrite this. Even with the comment, I don't even understand it myself.
for cols, constraint in constraint_map.iteritems():
if cols == cols1 or (cols != cols2 and
not cols1 and (cols2 in constraint_map or
cols2 is None)):
join = primary_join
elif cols == cols2 or (cols2 == () and cols1 in constraint_map):
join = secondary_join
else:
continue
for fk in constraint.elements:
join.append(fk.parent == fk.column)
return primary_join, secondary_join
def rel_mutator_handler(target):
def handler(entity, name, of_kind=None, through=None, via=None,
*args, **kwargs):
if through and via:
setattr(entity, name,
association_proxy(through, via, **kwargs))
return
elif through or via:
raise Exception("'through' and 'via' relationship keyword "
"arguments should be used in combination.")
rel = target(of_kind, *args, **kwargs)
rel.attach(entity, name)
return handler
belongs_to = ClassMutator(rel_mutator_handler(ManyToOne))
has_one = ClassMutator(rel_mutator_handler(OneToOne))
has_many = ClassMutator(rel_mutator_handler(OneToMany))
has_and_belongs_to_many = ClassMutator(rel_mutator_handler(ManyToMany))
Elixir-0.7.1/elixir/collection.py 0000644 0001750 0001750 00000010620 11277334065 015043 0 ustar ged ged '''
Default entity collection implementation
'''
import sys
import re
from elixir.py23compat import rsplit
class BaseCollection(list):
def __init__(self, entities=None):
list.__init__(self)
if entities is not None:
self.extend(entities)
def extend(self, entities):
for e in entities:
self.append(e)
def clear(self):
del self[:]
def resolve_absolute(self, key, full_path, entity=None, root=None):
if root is None:
root = entity._descriptor.resolve_root
if root:
full_path = '%s.%s' % (root, full_path)
module_path, classname = rsplit(full_path, '.', 1)
module = sys.modules[module_path]
res = getattr(module, classname, None)
if res is None:
if entity is not None:
raise Exception("Couldn't resolve target '%s' <%s> in '%s'!"
% (key, full_path, entity.__name__))
else:
raise Exception("Couldn't resolve target '%s' <%s>!"
% (key, full_path))
return res
def __getattr__(self, key):
return self.resolve(key)
# default entity collection
class GlobalEntityCollection(BaseCollection):
def __init__(self, entities=None):
# _entities is a dict of entities keyed on their name.
self._entities = {}
super(GlobalEntityCollection, self).__init__(entities)
def append(self, entity):
'''
Add an entity to the collection.
'''
super(EntityCollection, self).append(entity)
existing_entities = self._entities.setdefault(entity.__name__, [])
existing_entities.append(entity)
def resolve(self, key, entity=None):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
# Do we have a fully qualified entity name?
if '.' in key:
return self.resolve_absolute(key, key, entity)
else:
# Otherwise we look in the entities of this collection
res = self._entities.get(key, None)
if res is None:
if entity:
raise Exception("Couldn't resolve target '%s' in '%s'"
% (key, entity.__name__))
else:
raise Exception("This collection does not contain any "
"entity corresponding to the key '%s'!"
% key)
elif len(res) > 1:
raise Exception("'%s' resolves to several entities, you should"
" use the full path (including the full module"
" name) to that entity." % key)
else:
return res[0]
def clear(self):
self._entities = {}
super(GlobalEntityCollection, self).clear()
# backward compatible name
EntityCollection = GlobalEntityCollection
_leading_dots = re.compile('^([.]*).*$')
class RelativeEntityCollection(BaseCollection):
# the entity=None does not make any sense with a relative entity collection
def resolve(self, key, entity):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
full_path = key
if '.' not in key or key.startswith('.'):
# relative target
# any leading dot is stripped and with each dot removed,
# the entity_module is stripped of one more chunk (starting with
# the last one).
num_dots = _leading_dots.match(full_path).end(1)
full_path = full_path[num_dots:]
chunks = entity.__module__.split('.')
chunkstokeep = len(chunks) - num_dots
if chunkstokeep < 0:
raise Exception("Couldn't resolve relative target "
"'%s' relative to '%s'" % (key, entity.__module__))
entity_module = '.'.join(chunks[:chunkstokeep])
if entity_module and entity_module is not '__main__':
full_path = '%s.%s' % (entity_module, full_path)
root = ''
else:
root = None
return self.resolve_absolute(key, full_path, entity, root=root)
def __getattr__(self, key):
raise NotImplementedError
Elixir-0.7.1/elixir/fields.py 0000644 0001750 0001750 00000020325 11261351706 014153 0 ustar ged ged '''
This module provides support for defining the fields (columns) of your
entities. Elixir currently supports two syntaxes to do so: the default
`Attribute-based syntax`_ as well as the has_field_ DSL statement.
Attribute-based syntax
----------------------
Here is a quick example of how to use the object-oriented syntax.
.. sourcecode:: python
class Person(Entity):
id = Field(Integer, primary_key=True)
name = Field(String(50), required=True)
ssn = Field(String(50), unique=True)
biography = Field(Text)
join_date = Field(DateTime, default=datetime.datetime.now)
photo = Field(Binary, deferred=True)
_email = Field(String(20), colname='email', synonym='email')
def _set_email(self, email):
self._email = email
def _get_email(self):
return self._email
email = property(_get_email, _set_email)
The Field class takes one mandatory argument, which is its type. Please refer
to SQLAlchemy documentation for a list of `types supported by SQLAlchemy
`_.
Following that first mandatory argument, fields can take any number of
optional keyword arguments. Please note that all the **arguments** that are
**not specifically processed by Elixir**, as mentioned in the documentation
below **are passed on to the SQLAlchemy ``Column`` object**. Please refer to
the `SQLAlchemy Column object's documentation
`_ for more details about other
supported keyword arguments.
The following Elixir-specific arguments are supported:
+-------------------+---------------------------------------------------------+
| Argument Name | Description |
+===================+=========================================================+
| ``required`` | Specify whether or not this field can be set to None |
| | (left without a value). Defaults to ``False``, unless |
| | the field is a primary key. |
+-------------------+---------------------------------------------------------+
| ``colname`` | Specify a custom name for the column of this field. By |
| | default the column will have the same name as the |
| | attribute. |
+-------------------+---------------------------------------------------------+
| ``deferred`` | Specify whether this particular column should be |
| | fetched by default (along with the other columns) when |
| | an instance of the entity is fetched from the database |
| | or rather only later on when this particular column is |
| | first referenced. This can be useful when one wants to |
| | avoid loading a large text or binary field into memory |
| | when its not needed. Individual columns can be lazy |
| | loaded by themselves (by using ``deferred=True``) |
| | or placed into groups that lazy-load together (by using |
| | ``deferred`` = `"group_name"`). |
+-------------------+---------------------------------------------------------+
| ``synonym`` | Specify a synonym name for this field. The field will |
| | also be usable under that name in keyword-based Query |
| | functions such as filter_by. The Synonym class (see the |
| | `properties` module) provides a similar functionality |
| | with an (arguably) nicer syntax, but a limited scope. |
+-------------------+---------------------------------------------------------+
has_field
---------
The `has_field` statement allows you to define fields one at a time.
The first argument is the name of the field, the second is its type. Following
these, any number of keyword arguments can be specified for additional
behavior. The following arguments are supported:
+-------------------+---------------------------------------------------------+
| Argument Name | Description |
+===================+=========================================================+
| ``through`` | Specify a relation name to go through. This field will |
| | not exist as a column on the database but will be a |
| | property which automatically proxy values to the |
| | ``attribute`` attribute of the object pointed to by the |
| | relation. If the ``attribute`` argument is not present, |
| | the name of the current field will be used. In an |
| | has_field statement, you can only proxy through a |
| | belongs_to or an has_one relationship. |
+-------------------+---------------------------------------------------------+
| ``attribute`` | Name of the "endpoint" attribute to proxy to. This |
| | should only be used in combination with the ``through`` |
| | argument. |
+-------------------+---------------------------------------------------------+
Here is a quick example of how to use ``has_field``.
.. sourcecode:: python
class Person(Entity):
has_field('id', Integer, primary_key=True)
has_field('name', String(50))
'''
from sqlalchemy import Column
from sqlalchemy.orm import deferred, synonym
from sqlalchemy.ext.associationproxy import association_proxy
from elixir.statements import ClassMutator
from elixir.properties import Property
__doc_all__ = ['Field']
class Field(Property):
'''
Represents the definition of a 'field' on an entity.
This class represents a column on the table where the entity is stored.
'''
def __init__(self, type, *args, **kwargs):
super(Field, self).__init__()
self.colname = kwargs.pop('colname', None)
self.synonym = kwargs.pop('synonym', None)
self.deferred = kwargs.pop('deferred', False)
if 'required' in kwargs:
kwargs['nullable'] = not kwargs.pop('required')
self.type = type
self.primary_key = kwargs.get('primary_key', False)
self.column = None
self.property = None
self.args = args
self.kwargs = kwargs
def attach(self, entity, name):
# If no colname was defined (through the 'colname' kwarg), set
# it to the name of the attr.
if self.colname is None:
self.colname = name
super(Field, self).attach(entity, name)
def create_pk_cols(self):
if self.primary_key:
self.create_col()
def create_non_pk_cols(self):
if not self.primary_key:
self.create_col()
def create_col(self):
self.column = Column(self.colname, self.type,
*self.args, **self.kwargs)
self.add_table_column(self.column)
def create_properties(self):
if self.deferred:
group = None
if isinstance(self.deferred, basestring):
group = self.deferred
self.property = deferred(self.column, group=group)
elif self.name != self.colname:
# if the property name is different from the column name, we need
# to add an explicit property (otherwise nothing is needed as it's
# done automatically by SA)
self.property = self.column
if self.property is not None:
self.add_mapper_property(self.name, self.property)
if self.synonym:
self.add_mapper_property(self.synonym, synonym(self.name))
def has_field_handler(entity, name, *args, **kwargs):
if 'through' in kwargs:
setattr(entity, name,
association_proxy(kwargs.pop('through'),
kwargs.pop('attribute', name),
**kwargs))
return
field = Field(*args, **kwargs)
field.attach(entity, name)
has_field = ClassMutator(has_field_handler)
Elixir-0.7.1/elixir/properties.py 0000644 0001750 0001750 00000017634 11261351706 015112 0 ustar ged ged '''
This module provides support for defining properties on your entities. It both
provides, the `Property` class which acts as a building block for common
properties such as fields and relationships (for those, please consult the
corresponding modules), but also provides some more specialized properties,
such as `ColumnProperty` and `Synonym`. It also provides the GenericProperty
class which allows you to wrap any SQLAlchemy property, and its DSL-syntax
equivalent: has_property_.
`has_property`
--------------
The ``has_property`` statement allows you to define properties which rely on
their entity's table (and columns) being defined before they can be declared
themselves. The `has_property` statement takes two arguments: first the name of
the property to be defined and second a function (often given as an anonymous
lambda) taking one argument and returning the desired SQLAlchemy property. That
function will be called whenever the entity table is completely defined, and
will be given the .c attribute of the entity as argument (as a way to access
the entity columns).
Here is a quick example of how to use ``has_property``.
.. sourcecode:: python
class OrderLine(Entity):
has_field('quantity', Float)
has_field('unit_price', Float)
has_property('price',
lambda c: column_property(
(c.quantity * c.unit_price).label('price')))
'''
from elixir.statements import PropertyStatement
from sqlalchemy.orm import column_property, synonym
__doc_all__ = ['EntityBuilder', 'Property', 'GenericProperty',
'ColumnProperty']
class EntityBuilder(object):
'''
Abstract base class for all entity builders. An Entity builder is a class
of objects which can be added to an Entity (usually by using special
properties or statements) to "build" that entity. Building an entity,
meaning to add columns to its "main" table, create other tables, add
properties to its mapper, ... To do so an EntityBuilder must override the
corresponding method(s). This is to ensure the different operations happen
in the correct order (for example, that the table is fully created before
the mapper that use it is defined).
'''
def create_pk_cols(self):
pass
def create_non_pk_cols(self):
pass
def before_table(self):
pass
def create_tables(self):
'''
Subclasses may override this method to create tables.
'''
def after_table(self):
pass
def create_properties(self):
'''
Subclasses may override this method to add properties to the involved
entity.
'''
def before_mapper(self):
pass
def after_mapper(self):
pass
def finalize(self):
pass
# helper methods
def add_table_column(self, column):
self.entity._descriptor.add_column(column)
def add_mapper_property(self, name, prop):
self.entity._descriptor.add_property(name, prop)
def add_mapper_extension(self, ext):
self.entity._descriptor.add_mapper_extension(ext)
class CounterMeta(type):
'''
A simple meta class which adds a ``_counter`` attribute to the instances of
the classes it is used on. This counter is simply incremented for each new
instance.
'''
counter = 0
def __call__(self, *args, **kwargs):
instance = type.__call__(self, *args, **kwargs)
instance._counter = CounterMeta.counter
CounterMeta.counter += 1
return instance
class Property(EntityBuilder):
'''
Abstract base class for all properties of an Entity.
'''
__metaclass__ = CounterMeta
def __init__(self, *args, **kwargs):
self.entity = None
self.name = None
def attach(self, entity, name):
"""Attach this property to its entity, using 'name' as name.
Properties will be attached in the order they were declared.
"""
self.entity = entity
self.name = name
# register this property as a builder
entity._descriptor.builders.append(self)
def __repr__(self):
return "Property(%s, %s)" % (self.name, self.entity)
class GenericProperty(Property):
'''
Generic catch-all class to wrap an SQLAlchemy property.
.. sourcecode:: python
class OrderLine(Entity):
quantity = Field(Float)
unit_price = Field(Numeric)
price = GenericProperty(lambda c: column_property(
(c.quantity * c.unit_price).label('price')))
'''
def __init__(self, prop, *args, **kwargs):
super(GenericProperty, self).__init__(*args, **kwargs)
self.prop = prop
#XXX: move this to Property?
self.args = args
self.kwargs = kwargs
def create_properties(self):
if hasattr(self.prop, '__call__'):
prop_value = self.prop(self.entity.table.c)
else:
prop_value = self.prop
prop_value = self.evaluate_property(prop_value)
self.add_mapper_property(self.name, prop_value)
def evaluate_property(self, prop):
if self.args or self.kwargs:
raise Exception('superfluous arguments passed to GenericProperty')
return prop
class ColumnProperty(GenericProperty):
'''
A specialized form of the GenericProperty to generate SQLAlchemy
``column_property``'s.
It takes a function (often given as an anonymous lambda) as its first
argument. Other arguments and keyword arguments are forwarded to the
column_property construct. That first-argument function must accept exactly
one argument and must return the desired (scalar-returning) SQLAlchemy
ClauseElement.
The function will be called whenever the entity table is completely
defined, and will be given
the .c attribute of the table of the entity as argument (as a way to
access the entity columns). The ColumnProperty will first wrap your
ClauseElement in an
"empty" label (ie it will be labelled automatically during queries),
then wrap that in a column_property.
.. sourcecode:: python
class OrderLine(Entity):
quantity = Field(Float)
unit_price = Field(Numeric)
price = ColumnProperty(lambda c: c.quantity * c.unit_price,
deferred=True)
Please look at the `corresponding SQLAlchemy
documentation `_ for details.
'''
def evaluate_property(self, prop):
return column_property(prop.label(None), *self.args, **self.kwargs)
class Synonym(GenericProperty):
'''
This class represents a synonym property of another property (column, ...)
of an entity. As opposed to the `synonym` kwarg to the Field class (which
share the same goal), this class can be used to define a synonym of a
property defined in a parent class (of the current class). On the other
hand, it cannot define a synonym for the purpose of using a standard python
property in queries. See the Field class for details on that usage.
.. sourcecode:: python
class Person(Entity):
name = Field(String(30))
primary_email = Field(String(100))
email_address = Synonym('primary_email')
class User(Person):
user_name = Synonym('name')
password = Field(String(20))
'''
def evaluate_property(self, prop):
return synonym(prop, *self.args, **self.kwargs)
#class Composite(GenericProperty):
# def __init__(self, prop):
# super(GenericProperty, self).__init__()
# self.prop = prop
# def evaluate_property(self, prop):
# return composite(prop.label(self.name))
#start = Composite(Point, lambda c: (c.x1, c.y1))
#mapper(Vertex, vertices, properties={
# 'start':composite(Point, vertices.c.x1, vertices.c.y1),
# 'end':composite(Point, vertices.c.x2, vertices.c.y2)
#})
has_property = PropertyStatement(GenericProperty)
Elixir-0.7.1/elixir/entity.py 0000644 0001750 0001750 00000127040 11277334216 014227 0 ustar ged ged '''
This module provides the ``Entity`` base class, as well as its metaclass
``EntityMeta``.
'''
from py23compat import sorted
import sys
import types
import warnings
from copy import deepcopy
import sqlalchemy
from sqlalchemy import Table, Column, Integer, desc, ForeignKey, and_, \
ForeignKeyConstraint
from sqlalchemy.orm import MapperExtension, mapper, object_session, \
EXT_CONTINUE, polymorphic_union, ScopedSession, \
ColumnProperty
from sqlalchemy.sql import ColumnCollection
import elixir
from elixir.statements import process_mutators, MUTATORS
from elixir import options
from elixir.properties import Property
DEBUG = False
try:
from sqlalchemy.orm import EXT_PASS
SA05orlater = False
except ImportError:
SA05orlater = True
__doc_all__ = ['Entity', 'EntityMeta']
def session_mapper_factory(scoped_session):
def session_mapper(cls, *args, **kwargs):
if kwargs.pop('save_on_init', True):
old_init = cls.__init__
def __init__(self, *args, **kwargs):
old_init(self, *args, **kwargs)
scoped_session.add(self)
cls.__init__ = __init__
cls.query = scoped_session.query_property()
return mapper(cls, *args, **kwargs)
return session_mapper
class EntityDescriptor(object):
'''
EntityDescriptor describes fields and options needed for table creation.
'''
def __init__(self, entity):
self.entity = entity
self.parent = None
bases = []
for base in entity.__bases__:
if isinstance(base, EntityMeta):
if is_entity(base) and not is_abstract_entity(base):
if self.parent:
raise Exception(
'%s entity inherits from several entities, '
'and this is not supported.'
% self.entity.__name__)
else:
self.parent = base
bases.extend(base._descriptor.bases)
self.parent._descriptor.children.append(entity)
else:
bases.append(base)
self.bases = bases
if not is_entity(entity) or is_abstract_entity(entity):
return
# entity.__module__ is not always reliable (eg in mod_python)
self.module = sys.modules.get(entity.__module__)
self.builders = []
#XXX: use entity.__subclasses__ ?
self.children = []
# used for multi-table inheritance
self.join_condition = None
self.has_pk = False
self._pk_col_done = False
# columns and constraints waiting for a table to exist
self._columns = ColumnCollection()
self.constraints = []
# properties (it is only useful for checking dupe properties at the
# moment, and when adding properties before the mapper is created,
# which shouldn't happen).
self.properties = {}
#
self.relationships = []
# set default value for options
self.table_args = []
# base class(es) options_defaults
options_defaults = self.options_defaults()
complete_defaults = options.options_defaults.copy()
complete_defaults.update({
'metadata': elixir.metadata,
'session': elixir.session,
'collection': elixir.entities
})
# set default value for other options
for key in options.valid_options:
value = options_defaults.get(key, complete_defaults[key])
if isinstance(value, dict):
value = value.copy()
setattr(self, key, value)
# override options with module-level defaults defined
for key in ('metadata', 'session', 'collection'):
attr = '__%s__' % key
if hasattr(self.module, attr):
setattr(self, key, getattr(self.module, attr))
def options_defaults(self):
base_defaults = {}
for base in self.bases:
base_defaults.update(base._descriptor.options_defaults())
base_defaults.update(getattr(self.entity, 'options_defaults', {}))
return base_defaults
def setup_options(self):
'''
Setup any values that might depend on the "using_options" class
mutator. For example, the tablename or the metadata.
'''
elixir.metadatas.add(self.metadata)
if self.collection is not None:
self.collection.append(self.entity)
entity = self.entity
if self.parent:
if self.inheritance == 'single':
self.tablename = self.parent._descriptor.tablename
if not self.tablename:
if self.shortnames:
self.tablename = entity.__name__.lower()
else:
modulename = entity.__module__.replace('.', '_')
tablename = "%s_%s" % (modulename, entity.__name__)
self.tablename = tablename.lower()
elif hasattr(self.tablename, '__call__'):
self.tablename = self.tablename(entity)
if not self.identity:
if 'polymorphic_identity' in self.mapper_options:
self.identity = self.mapper_options['polymorphic_identity']
else:
#TODO: include module name (We could have b.Account inherit
# from a.Account)
self.identity = entity.__name__.lower()
elif 'polymorphic_identity' in self.mapper_options:
raise Exception('You cannot use the "identity" option and the '
'polymorphic_identity mapper option at the same '
'time.')
elif hasattr(self.identity, '__call__'):
self.identity = self.identity(entity)
if self.polymorphic:
if not isinstance(self.polymorphic, basestring):
self.polymorphic = options.DEFAULT_POLYMORPHIC_COL_NAME
#---------------------
# setup phase methods
def setup_autoload_table(self):
self.setup_table(True)
def create_pk_cols(self):
"""
Create primary_key columns. That is, call the 'create_pk_cols'
builders then add a primary key to the table if it hasn't already got
one and needs one.
This method is "semi-recursive" in some cases: it calls the
create_keys method on ManyToOne relationships and those in turn call
create_pk_cols on their target. It shouldn't be possible to have an
infinite loop since a loop of primary_keys is not a valid situation.
"""
if self._pk_col_done:
return
self.call_builders('create_pk_cols')
if not self.autoload:
if self.parent:
if self.inheritance == 'multi':
# Add columns with foreign keys to the parent's primary
# key columns
parent_desc = self.parent._descriptor
tablename = parent_desc.table_fullname
join_clauses = []
for pk_col in parent_desc.primary_keys:
colname = options.MULTIINHERITANCECOL_NAMEFORMAT % \
{'entity': self.parent.__name__.lower(),
'key': pk_col.key}
# It seems like SA ForeignKey is not happy being given
# a real column object when said column is not yet
# attached to a table
pk_col_name = "%s.%s" % (tablename, pk_col.key)
fk = ForeignKey(pk_col_name, ondelete='cascade')
col = Column(colname, pk_col.type, fk,
primary_key=True)
self.add_column(col)
join_clauses.append(col == pk_col)
self.join_condition = and_(*join_clauses)
elif self.inheritance == 'concrete':
# Copy primary key columns from the parent.
for col in self.parent._descriptor.columns:
if col.primary_key:
self.add_column(col.copy())
elif not self.has_pk and self.auto_primarykey:
if isinstance(self.auto_primarykey, basestring):
colname = self.auto_primarykey
else:
colname = options.DEFAULT_AUTO_PRIMARYKEY_NAME
self.add_column(
Column(colname, options.DEFAULT_AUTO_PRIMARYKEY_TYPE,
primary_key=True))
self._pk_col_done = True
def setup_relkeys(self):
self.call_builders('create_non_pk_cols')
def before_table(self):
self.call_builders('before_table')
def setup_table(self, only_autoloaded=False):
'''
Create a SQLAlchemy table-object with all columns that have been
defined up to this point.
'''
if self.entity.table is not None:
return
if self.autoload != only_autoloaded:
return
kwargs = self.table_options
if self.autoload:
args = self.table_args
kwargs['autoload'] = True
else:
if self.parent:
if self.inheritance == 'single':
# we know the parent is setup before the child
self.entity.table = self.parent.table
# re-add the entity columns to the parent entity so that
# they are added to the parent's table (whether the
# parent's table is already setup or not).
for col in self._columns:
self.parent._descriptor.add_column(col)
for constraint in self.constraints:
self.parent._descriptor.add_constraint(constraint)
return
elif self.inheritance == 'concrete':
#TODO: we should also copy columns from the parent table
# if the parent is a base (abstract?) entity (whatever the
# inheritance type -> elif will need to be changed)
# Copy all non-primary key columns from parent table
# (primary key columns have already been copied earlier).
for col in self.parent._descriptor.columns:
if not col.primary_key:
self.add_column(col.copy())
for con in self.parent._descriptor.constraints:
self.add_constraint(
ForeignKeyConstraint(
[e.parent.key for e in con.elements],
[e._get_colspec() for e in con.elements],
name=con.name, #TODO: modify it
onupdate=con.onupdate, ondelete=con.ondelete,
use_alter=con.use_alter))
if self.polymorphic and \
self.inheritance in ('single', 'multi') and \
self.children and not self.parent:
self.add_column(Column(self.polymorphic,
options.POLYMORPHIC_COL_TYPE))
if self.version_id_col:
if not isinstance(self.version_id_col, basestring):
self.version_id_col = options.DEFAULT_VERSION_ID_COL_NAME
self.add_column(Column(self.version_id_col, Integer))
args = list(self.columns) + self.constraints + self.table_args
self.entity.table = Table(self.tablename, self.metadata,
*args, **kwargs)
if DEBUG:
print self.entity.table.repr2()
def setup_reltables(self):
self.call_builders('create_tables')
def after_table(self):
self.call_builders('after_table')
def setup_events(self):
def make_proxy_method(methods):
def proxy_method(self, mapper, connection, instance):
for func in methods:
ret = func(instance)
# I couldn't commit myself to force people to
# systematicaly return EXT_CONTINUE in all their event
# methods.
# But not doing that diverge to how SQLAlchemy works.
# I should try to convince Mike to do EXT_CONTINUE by
# default, and stop processing as the special case.
# if ret != EXT_CONTINUE:
if ret is not None and ret != EXT_CONTINUE:
return ret
return EXT_CONTINUE
return proxy_method
# create a list of callbacks for each event
methods = {}
all_methods = getmembers(self.entity,
lambda a: isinstance(a, types.MethodType))
for name, method in all_methods:
for event in getattr(method, '_elixir_events', []):
event_methods = methods.setdefault(event, [])
event_methods.append(method)
if not methods:
return
# transform that list into methods themselves
for event in methods:
methods[event] = make_proxy_method(methods[event])
# create a custom mapper extension class, tailored to our entity
ext = type('EventMapperExtension', (MapperExtension,), methods)()
# then, make sure that the entity's mapper has our mapper extension
self.add_mapper_extension(ext)
def before_mapper(self):
self.call_builders('before_mapper')
def _get_children(self):
children = self.children[:]
for child in self.children:
children.extend(child._descriptor._get_children())
return children
def translate_order_by(self, order_by):
if isinstance(order_by, basestring):
order_by = [order_by]
order = []
for colname in order_by:
col = self.get_column(colname.strip('-'))
if colname.startswith('-'):
col = desc(col)
order.append(col)
return order
def setup_mapper(self):
'''
Initializes and assign a mapper to the entity.
At this point the mapper will usually have no property as they are
added later.
'''
if self.entity.mapper:
return
# for now we don't support the "abstract" parent class in a concrete
# inheritance scenario as demonstrated in
# sqlalchemy/test/orm/inheritance/concrete.py
# this should be added along other
kwargs = {}
if self.order_by:
kwargs['order_by'] = self.translate_order_by(self.order_by)
if self.version_id_col:
kwargs['version_id_col'] = self.get_column(self.version_id_col)
if self.inheritance in ('single', 'concrete', 'multi'):
if self.parent and \
(self.inheritance != 'concrete' or self.polymorphic):
# non-polymorphic concrete doesn't need this
kwargs['inherits'] = self.parent.mapper
if self.inheritance == 'multi' and self.parent:
kwargs['inherit_condition'] = self.join_condition
if self.polymorphic:
if self.children:
if self.inheritance == 'concrete':
keys = [(self.identity, self.entity.table)]
keys.extend([(child._descriptor.identity, child.table)
for child in self._get_children()])
# Having the same alias name for an entity and one of
# its child (which is a parent itself) shouldn't cause
# any problem because the join shouldn't be used at
# the same time. But in reality, some versions of SA
# do misbehave on this. Since it doesn't hurt to have
# different names anyway, here they go.
pjoin = polymorphic_union(
dict(keys), self.polymorphic,
'pjoin_%s' % self.identity)
kwargs['with_polymorphic'] = ('*', pjoin)
kwargs['polymorphic_on'] = \
getattr(pjoin.c, self.polymorphic)
elif not self.parent:
kwargs['polymorphic_on'] = \
self.get_column(self.polymorphic)
if self.children or self.parent:
kwargs['polymorphic_identity'] = self.identity
if self.parent and self.inheritance == 'concrete':
kwargs['concrete'] = True
if self.parent and self.inheritance == 'single':
args = []
else:
args = [self.entity.table]
# let user-defined kwargs override Elixir-generated ones, though that's
# not very usefull since most of them expect Column instances.
kwargs.update(self.mapper_options)
#TODO: document this!
if 'primary_key' in kwargs:
cols = self.entity.table.c
kwargs['primary_key'] = [getattr(cols, colname) for
colname in kwargs['primary_key']]
# do the mapping
if self.session is None:
self.entity.mapper = mapper(self.entity, *args, **kwargs)
elif isinstance(self.session, ScopedSession):
session_mapper = session_mapper_factory(self.session)
self.entity.mapper = session_mapper(self.entity, *args, **kwargs)
else:
raise Exception("Failed to map entity '%s' with its table or "
"selectable. You can only bind an Entity to a "
"ScopedSession object or None for manual session "
"management."
% self.entity.__name__)
def after_mapper(self):
self.call_builders('after_mapper')
def setup_properties(self):
self.call_builders('create_properties')
def finalize(self):
self.call_builders('finalize')
self.entity._setup_done = True
#----------------
# helper methods
def call_builders(self, what):
for builder in self.builders:
if hasattr(builder, what):
getattr(builder, what)()
def add_column(self, col, check_duplicate=None):
'''when check_duplicate is None, the value of the allowcoloverride
option of the entity is used.
'''
if check_duplicate is None:
check_duplicate = not self.allowcoloverride
if col.key in self._columns:
if check_duplicate:
raise Exception("Column '%s' already exist in '%s' ! " %
(col.key, self.entity.__name__))
else:
del self._columns[col.key]
self._columns.add(col)
if col.primary_key:
self.has_pk = True
# Autosetup triggers shouldn't be active anymore at this point, so we
# can theoretically access the entity's table safely. But the problem
# is that if, for some reason, the trigger removal phase didn't
# happen, we'll get an infinite loop. So we just make sure we don't
# get one in any case.
table = type.__getattribute__(self.entity, 'table')
if table is not None:
if check_duplicate and col.key in table.columns.keys():
raise Exception("Column '%s' already exist in table '%s' ! " %
(col.key, table.name))
table.append_column(col)
if DEBUG:
print "table.append_column(%s)" % col
def add_constraint(self, constraint):
self.constraints.append(constraint)
table = self.entity.table
if table is not None:
table.append_constraint(constraint)
def add_property(self, name, property, check_duplicate=True):
if check_duplicate and name in self.properties:
raise Exception("property '%s' already exist in '%s' ! " %
(name, self.entity.__name__))
self.properties[name] = property
#FIXME: something like this is needed to propagate the relationships from
# parent entities to their children in a concrete inheritance scenario. But
# this doesn't work because of the backref matching code. In most case
# (test_concrete.py) it doesn't even happen at all.
# if self.children and self.inheritance == 'concrete':
# for child in self.children:
# child._descriptor.add_property(name, property)
mapper = self.entity.mapper
if mapper:
mapper.add_property(name, property)
if DEBUG:
print "mapper.add_property('%s', %s)" % (name, repr(property))
def add_mapper_extension(self, extension):
extensions = self.mapper_options.get('extension', [])
if not isinstance(extensions, list):
extensions = [extensions]
extensions.append(extension)
self.mapper_options['extension'] = extensions
def get_column(self, key, check_missing=True):
#TODO: this needs to work whether the table is already setup or not
#TODO: support SA table/autoloaded entity
try:
return self.columns[key]
except KeyError:
if check_missing:
raise Exception("No column named '%s' found in the table of "
"the '%s' entity!"
% (key, self.entity.__name__))
def get_inverse_relation(self, rel, check_reverse=True):
'''
Return the inverse relation of rel, if any, None otherwise.
'''
matching_rel = None
for other_rel in self.relationships:
if rel.is_inverse(other_rel):
if matching_rel is None:
matching_rel = other_rel
else:
raise Exception(
"Several relations match as inverse of the '%s' "
"relation in entity '%s'. You should specify "
"inverse relations manually by using the inverse "
"keyword."
% (rel.name, rel.entity.__name__))
# When a matching inverse is found, we check that it has only
# one relation matching as its own inverse. We don't need the result
# of the method though. But we do need to be careful not to start an
# infinite recursive loop.
if matching_rel and check_reverse:
rel.entity._descriptor.get_inverse_relation(matching_rel, False)
return matching_rel
def find_relationship(self, name):
for rel in self.relationships:
if rel.name == name:
return rel
if self.parent:
return self.parent._descriptor.find_relationship(name)
else:
return None
#------------------------
# some useful properties
def table_fullname(self):
'''
Complete name of the table for the related entity.
Includes the schema name if there is one specified.
'''
schema = self.table_options.get('schema', None)
if schema is not None:
return "%s.%s" % (schema, self.tablename)
else:
return self.tablename
table_fullname = property(table_fullname)
def columns(self):
if self.entity.table is not None:
return self.entity.table.columns
else:
#FIXME: depending on the type of inheritance, we should also
# return the parent entity's columns (for example for order_by
# using a column defined in the parent.
return self._columns
columns = property(columns)
def primary_keys(self):
"""
Returns the list of primary key columns of the entity.
This property isn't valid before the "create_pk_cols" phase.
"""
if self.autoload:
return [col for col in self.entity.table.primary_key.columns]
else:
if self.parent and self.inheritance == 'single':
return self.parent._descriptor.primary_keys
else:
return [col for col in self.columns if col.primary_key]
primary_keys = property(primary_keys)
def table(self):
if self.entity.table is not None:
return self.entity.table
else:
return FakeTable(self)
table = property(table)
def primary_key_properties(self):
"""
Returns the list of (mapper) properties corresponding to the primary
key columns of the table of the entity.
This property caches its value, so it shouldn't be called before the
entity is fully set up.
"""
if not hasattr(self, '_pk_props'):
col_to_prop = {}
mapper = self.entity.mapper
for prop in mapper.iterate_properties:
if isinstance(prop, ColumnProperty):
for col in prop.columns:
for col in col.proxy_set:
col_to_prop[col] = prop
pk_cols = [c for c in mapper.mapped_table.c if c.primary_key]
self._pk_props = [col_to_prop[c] for c in pk_cols]
return self._pk_props
primary_key_properties = property(primary_key_properties)
class FakePK(object):
def __init__(self, descriptor):
self.descriptor = descriptor
def columns(self):
return self.descriptor.primary_keys
columns = property(columns)
class FakeTable(object):
def __init__(self, descriptor):
self.descriptor = descriptor
self.primary_key = FakePK(descriptor)
def columns(self):
return self.descriptor.columns
columns = property(columns)
def fullname(self):
'''
Complete name of the table for the related entity.
Includes the schema name if there is one specified.
'''
schema = self.descriptor.table_options.get('schema', None)
if schema is not None:
return "%s.%s" % (schema, self.descriptor.tablename)
else:
return self.descriptor.tablename
fullname = property(fullname)
class TriggerProxy(object):
"""
A class that serves as a "trigger" ; accessing its attributes runs
the setup_all function.
Note that the `setup_all` is called on each access of the attribute.
"""
def __init__(self, class_, attrname):
self.class_ = class_
self.attrname = attrname
def __getattr__(self, name):
elixir.setup_all()
#FIXME: it's possible to get an infinite loop here if setup_all doesn't
#remove the triggers for this entity. This can happen if the entity is
#not in the `entities` list for some reason.
proxied_attr = getattr(self.class_, self.attrname)
return getattr(proxied_attr, name)
def __repr__(self):
proxied_attr = getattr(self.class_, self.attrname)
return "" % (self.class_.__name__)
class TriggerAttribute(object):
def __init__(self, attrname):
self.attrname = attrname
def __get__(self, instance, owner):
#FIXME: it's possible to get an infinite loop here if setup_all doesn't
#remove the triggers for this entity. This can happen if the entity is
#not in the `entities` list for some reason.
elixir.setup_all()
return getattr(owner, self.attrname)
def is_entity(cls):
"""
Scan the bases classes of `cls` to see if any is an instance of
EntityMeta. If we don't find any, it means it is either an unrelated class
or an entity base class (like the 'Entity' class).
"""
for base in cls.__bases__:
if isinstance(base, EntityMeta):
return True
return False
# Note that we don't use inspect.getmembers because of
# http://bugs.python.org/issue1785
# See also http://elixir.ematia.de/trac/changeset/262
def getmembers(object, predicate=None):
base_props = []
for key in dir(object):
try:
value = getattr(object, key)
except AttributeError:
continue
if not predicate or predicate(value):
base_props.append((key, value))
return base_props
def is_abstract_entity(dict_or_cls):
if not isinstance(dict_or_cls, dict):
dict_or_cls = dict_or_cls.__dict__
for mutator, args, kwargs in dict_or_cls.get(MUTATORS, []):
if 'abstract' in kwargs:
return kwargs['abstract']
return False
def instrument_class(cls):
"""
Instrument a class as an Entity. This is usually done automatically through
the EntityMeta metaclass.
"""
# Create the entity descriptor
desc = cls._descriptor = EntityDescriptor(cls)
# Process mutators
# We *do* want mutators to be processed for base/abstract classes
# (so that statements like using_options_defaults work).
process_mutators(cls)
# We do not want to do any more processing for base/abstract classes
# (Entity et al.).
if not is_entity(cls) or is_abstract_entity(cls):
return
cls.table = None
cls.mapper = None
# Copy the properties ('Property' instances) of the entity base class(es).
# We use getmembers (instead of __dict__) so that we also get the
# properties from the parents of the base class if any.
base_props = []
for base in cls.__bases__:
if isinstance(base, EntityMeta) and \
(not is_entity(base) or is_abstract_entity(base)):
base_props += [(name, deepcopy(attr)) for name, attr in
getmembers(base, lambda a: isinstance(a, Property))]
# Process attributes (using the assignment syntax), looking for
# 'Property' instances and attaching them to this entity.
properties = [(name, attr) for name, attr in cls.__dict__.iteritems()
if isinstance(attr, Property)]
sorted_props = sorted(base_props + properties,
key=lambda i: i[1]._counter)
for name, prop in sorted_props:
prop.attach(cls, name)
# setup misc options here (like tablename etc.)
desc.setup_options()
# create trigger proxies
# TODO: support entity_name... It makes sense only for autoloaded
# tables for now, and would make more sense if we support "external"
# tables
if desc.autosetup:
_install_autosetup_triggers(cls)
class EntityMeta(type):
"""
Entity meta class.
You should only use it directly if you want to define your own base class
for your entities (ie you don't want to use the provided 'Entity' class).
"""
def __init__(cls, name, bases, dict_):
instrument_class(cls)
def __call__(cls, *args, **kwargs):
if cls._descriptor.autosetup and not hasattr(cls, '_setup_done'):
elixir.setup_all()
return type.__call__(cls, *args, **kwargs)
def __setattr__(cls, key, value):
if isinstance(value, Property):
if hasattr(cls, '_setup_done'):
raise Exception('Cannot set attribute on a class after '
'setup_all')
else:
value.attach(cls, key)
else:
type.__setattr__(cls, key, value)
def _install_autosetup_triggers(cls, entity_name=None):
#TODO: move as much as possible of those "_private" values to the
# descriptor, so that we don't mess the initial class.
warnings.warn("The 'autosetup' option on entities is deprecated. "
"Please call setup_all() manually after all your entities have been "
"declared.", DeprecationWarning, stacklevel=4)
tablename = cls._descriptor.tablename
schema = cls._descriptor.table_options.get('schema', None)
cls._table_key = sqlalchemy.schema._get_table_key(tablename, schema)
table_proxy = TriggerProxy(cls, 'table')
md = cls._descriptor.metadata
md.tables[cls._table_key] = table_proxy
# We need to monkeypatch the metadata's table iterator method because
# otherwise it doesn't work if the setup is triggered by the
# metadata.create_all().
# This is because ManyToMany relationships add tables AFTER the list
# of tables that are going to be created is "computed"
# (metadata.tables.values()).
# see:
# - table_iterator method in MetaData class in sqlalchemy/schema.py
# - visit_metadata method in sqlalchemy/ansisql.py
if SA05orlater:
warnings.warn(
"The automatic setup via metadata.create_all() through "
"the autosetup option doesn't work with SQLAlchemy 0.5 and later!")
else:
# SA 0.6 does not use table_iterator anymore (it was already deprecated
# since SA 0.5.0)
original_table_iterator = md.table_iterator
if not hasattr(original_table_iterator,
'_non_elixir_patched_iterator'):
def table_iterator(*args, **kwargs):
elixir.setup_all()
return original_table_iterator(*args, **kwargs)
table_iterator.__doc__ = original_table_iterator.__doc__
table_iterator._non_elixir_patched_iterator = \
original_table_iterator
md.table_iterator = table_iterator
#TODO: we might want to add all columns that will be available as
#attributes on the class itself (in SA 0.4+). This is a pretty
#rare usecase, as people will normally hit the query attribute before the
#column attributes, but I've seen people hitting this problem...
for name in ('c', 'table', 'mapper', 'query'):
setattr(cls, name, TriggerAttribute(name))
cls._has_triggers = True
def _cleanup_autosetup_triggers(cls):
if not hasattr(cls, '_has_triggers'):
return
for name in ('table', 'mapper'):
setattr(cls, name, None)
for name in ('c', 'query'):
delattr(cls, name)
desc = cls._descriptor
md = desc.metadata
# the fake table could have already been removed (namely in a
# single table inheritance scenario)
md.tables.pop(cls._table_key, None)
# restore original table iterator if not done already
if not SA05orlater:
if hasattr(md.table_iterator, '_non_elixir_patched_iterator'):
md.table_iterator = \
md.table_iterator._non_elixir_patched_iterator
del cls._has_triggers
def setup_entities(entities):
'''Setup all entities in the list passed as argument'''
for entity in entities:
# delete all Elixir properties so that it doesn't interfere with
# SQLAlchemy. At this point they should have be converted to
# builders.
for name, attr in entity.__dict__.items():
if isinstance(attr, Property):
delattr(entity, name)
if entity._descriptor.autosetup:
_cleanup_autosetup_triggers(entity)
for method_name in (
'setup_autoload_table', 'create_pk_cols', 'setup_relkeys',
'before_table', 'setup_table', 'setup_reltables', 'after_table',
'setup_events',
'before_mapper', 'setup_mapper', 'after_mapper',
'setup_properties',
'finalize'):
# if DEBUG:
# print "=" * 40
# print method_name
# print "=" * 40
for entity in entities:
# print entity.__name__, "...",
if hasattr(entity, '_setup_done'):
# print "already done"
continue
method = getattr(entity._descriptor, method_name)
method()
# print "ok"
def cleanup_entities(entities):
"""
Try to revert back the list of entities passed as argument to the state
they had just before their setup phase. It will not work entirely for
autosetup entities as we need to remove the autosetup triggers.
As of now, this function is *not* functional in that it doesn't revert to
the exact same state the entities were before setup. For example, the
properties do not work yet as those would need to be regenerated (since the
columns they are based on are regenerated too -- and as such the
corresponding joins are not correct) but this doesn't happen because of
the way relationship setup is designed to be called only once (especially
the backref stuff in create_properties).
"""
for entity in entities:
desc = entity._descriptor
if desc.autosetup:
_cleanup_autosetup_triggers(entity)
if hasattr(entity, '_setup_done'):
del entity._setup_done
entity.table = None
entity.mapper = None
desc._pk_col_done = False
desc.has_pk = False
desc._columns = ColumnCollection()
desc.constraints = []
desc.properties = {}
class EntityBase(object):
"""
This class holds all methods of the "Entity" base class, but does not act
as a base class itself (it does not use the EntityMeta metaclass), but
rather as a parent class for Entity. This is meant so that people who want
to provide their own base class but don't want to loose or copy-paste all
the methods of Entity can do so by inheriting from EntityBase:
.. sourcecode:: python
class MyBase(EntityBase):
__metaclass__ = EntityMeta
def myCustomMethod(self):
# do something great
"""
def __init__(self, **kwargs):
self.set(**kwargs)
def set(self, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
def update_or_create(cls, data, surrogate=True):
pk_props = cls._descriptor.primary_key_properties
# if all pk are present and not None
if not [1 for p in pk_props if data.get(p.key) is None]:
pk_tuple = tuple([data[prop.key] for prop in pk_props])
record = cls.query.get(pk_tuple)
if record is None:
if surrogate:
raise Exception("cannot create surrogate with pk")
else:
record = cls()
else:
if surrogate:
record = cls()
else:
raise Exception("cannot create non surrogate without pk")
record.from_dict(data)
return record
update_or_create = classmethod(update_or_create)
def from_dict(self, data):
"""
Update a mapped class with data from a JSON-style nested dict/list
structure.
"""
# surrogate can be guessed from autoincrement/sequence but I guess
# that's not 100% reliable, so we'll need an override
mapper = sqlalchemy.orm.object_mapper(self)
for key, value in data.iteritems():
if isinstance(value, dict):
dbvalue = getattr(self, key)
rel_class = mapper.get_property(key).mapper.class_
pk_props = rel_class._descriptor.primary_key_properties
# If the data doesn't contain any pk, and the relationship
# already has a value, update that record.
if not [1 for p in pk_props if p.key in data] and \
dbvalue is not None:
dbvalue.from_dict(value)
else:
record = rel_class.update_or_create(value)
setattr(self, key, record)
elif isinstance(value, list) and \
value and isinstance(value[0], dict):
rel_class = mapper.get_property(key).mapper.class_
new_attr_value = []
for row in value:
if not isinstance(row, dict):
raise Exception(
'Cannot send mixed (dict/non dict) data '
'to list relationships in from_dict data.')
record = rel_class.update_or_create(row)
new_attr_value.append(record)
setattr(self, key, new_attr_value)
else:
setattr(self, key, value)
def to_dict(self, deep={}, exclude=[]):
"""Generate a JSON-style nested dict/list structure from an object."""
col_prop_names = [p.key for p in self.mapper.iterate_properties \
if isinstance(p, ColumnProperty)]
data = dict([(name, getattr(self, name))
for name in col_prop_names if name not in exclude])
for rname, rdeep in deep.iteritems():
dbdata = getattr(self, rname)
#FIXME: use attribute names (ie coltoprop) instead of column names
fks = self.mapper.get_property(rname).remote_side
exclude = [c.name for c in fks]
if dbdata is None:
data[rname] = None
elif isinstance(dbdata, list):
data[rname] = [o.to_dict(rdeep, exclude) for o in dbdata]
else:
data[rname] = dbdata.to_dict(rdeep, exclude)
return data
# session methods
def flush(self, *args, **kwargs):
return object_session(self).flush([self], *args, **kwargs)
def delete(self, *args, **kwargs):
return object_session(self).delete(self, *args, **kwargs)
def expire(self, *args, **kwargs):
return object_session(self).expire(self, *args, **kwargs)
def refresh(self, *args, **kwargs):
return object_session(self).refresh(self, *args, **kwargs)
def expunge(self, *args, **kwargs):
return object_session(self).expunge(self, *args, **kwargs)
# This bunch of session methods, along with all the query methods below
# only make sense when using a global/scoped/contextual session.
def _global_session(self):
return self._descriptor.session.registry()
_global_session = property(_global_session)
def merge(self, *args, **kwargs):
return self._global_session.merge(self, *args, **kwargs)
def save(self, *args, **kwargs):
return self._global_session.save(self, *args, **kwargs)
def update(self, *args, **kwargs):
return self._global_session.update(self, *args, **kwargs)
# only exist in SA < 0.5
# IMO, the replacement (session.add) doesn't sound good enough to be added
# here. For example: "o = Order(); o.add()" is not very telling. It's
# better to leave it as "session.add(o)"
def save_or_update(self, *args, **kwargs):
return self._global_session.save_or_update(self, *args, **kwargs)
# query methods
def get_by(cls, *args, **kwargs):
"""
Returns the first instance of this class matching the given criteria.
This is equivalent to:
session.query(MyClass).filter_by(...).first()
"""
return cls.query.filter_by(*args, **kwargs).first()
get_by = classmethod(get_by)
def get(cls, *args, **kwargs):
"""
Return the instance of this class based on the given identifier,
or None if not found. This is equivalent to:
session.query(MyClass).get(...)
"""
return cls.query.get(*args, **kwargs)
get = classmethod(get)
class Entity(EntityBase):
'''
The base class for all entities
All Elixir model objects should inherit from this class. Statements can
appear within the body of the definition of an entity to define its
fields, relationships, and other options.
Here is an example:
.. sourcecode:: python
class Person(Entity):
name = Field(Unicode(128))
birthdate = Field(DateTime, default=datetime.now)
Please note, that if you don't specify any primary keys, Elixir will
automatically create one called ``id``.
For further information, please refer to the provided examples or
tutorial.
'''
__metaclass__ = EntityMeta
Elixir-0.7.1/elixir/options.py 0000644 0001750 0001750 00000035663 11277270647 014426 0 ustar ged ged '''
This module provides support for defining several options on your Elixir
entities. There are three different kinds of options that can be set
up, and for this there are three different statements: using_options_,
using_table_options_ and using_mapper_options_.
Alternatively, these options can be set on all Elixir entities by modifying
the `options_defaults` dictionary before defining any entity.
`using_options`
---------------
The 'using_options' DSL statement allows you to set up some additional
behaviors on your model objects, including table names, ordering, and
more. To specify an option, simply supply the option as a keyword
argument onto the statement, as follows:
.. sourcecode:: python
class Person(Entity):
name = Field(Unicode(64))
using_options(shortnames=True, order_by='name')
The list of supported arguments are as follows:
+---------------------+-------------------------------------------------------+
| Option Name | Description |
+=====================+=======================================================+
| ``inheritance`` | Specify the type of inheritance this entity must use. |
| | It can be one of ``single``, ``concrete`` or |
| | ``multi``. Defaults to ``single``. |
| | Note that polymorphic concrete inheritance is |
| | currently not implemented. See: |
| | http://www.sqlalchemy.org/docs/05/mappers.html |
| | #mapping-class-inheritance-hierarchies for an |
| | explanation of the different kinds of inheritances. |
+---------------------+-------------------------------------------------------+
| ``abstract`` | Set 'abstract'=True to declare abstract entity. |
| | Abstract base classes are useful when you want to put |
| | some common information into a number of other |
| | entities. Abstract entity will not be used to create |
| | any database table. Instead, when it is used as a base|
| | class for other entity, its fields will be added to |
| | those of the child class. |
+---------------------+-------------------------------------------------------+
| ``polymorphic`` | Whether the inheritance should be polymorphic or not. |
| | Defaults to ``True``. The column used to store the |
| | type of each row is named "row_type" by default. You |
| | can change this by passing the desired name for the |
| | column to this argument. |
+---------------------+-------------------------------------------------------+
| ``identity`` | Specify a custom polymorphic identity. When using |
| | polymorphic inheritance, this value (usually a |
| | string) will represent this particular entity (class) |
| | . It will be used to differentiate it from other |
| | entities (classes) in your inheritance hierarchy when |
| | loading from the database instances of different |
| | entities in that hierarchy at the same time. |
| | This value will be stored by default in the |
| | "row_type" column of the entity's table (see above). |
| | You can either provide a |
| | plain string or a callable. The callable will be |
| | given the entity (ie class) as argument and must |
| | return a value (usually a string) representing the |
| | polymorphic identity of that entity. |
| | By default, this value is automatically generated: it |
| | is the name of the entity lower-cased. |
+---------------------+-------------------------------------------------------+
| ``metadata`` | Specify a custom MetaData for this entity. |
| | By default, entities uses the global |
| | ``elixir.metadata``. |
| | This option can also be set for all entities of a |
| | module by setting the ``__metadata__`` attribute of |
| | that module. |
+---------------------+-------------------------------------------------------+
| ``autoload`` | Automatically load column definitions from the |
| | existing database table. |
+---------------------+-------------------------------------------------------+
| ``tablename`` | Specify a custom tablename. You can either provide a |
| | plain string or a callable. The callable will be |
| | given the entity (ie class) as argument and must |
| | return a string representing the name of the table |
| | for that entity. By default, the tablename is |
| | automatically generated: it is a concatenation of the |
| | full module-path to the entity and the entity (class) |
| | name itself. The result is lower-cased and separated |
| | by underscores ("_"), eg.: for an entity named |
| | "MyEntity" in the module "project1.model", the |
| | generated table name will be |
| | "project1_model_myentity". |
+---------------------+-------------------------------------------------------+
| ``shortnames`` | Specify whether or not the automatically generated |
| | table names include the full module-path |
| | to the entity. If ``shortnames`` is ``True``, only |
| | the entity name is used. Defaults to ``False``. |
+---------------------+-------------------------------------------------------+
| ``auto_primarykey`` | If given as string, it will represent the |
| | auto-primary-key's column name. If this option |
| | is True, it will allow auto-creation of a primary |
| | key if there's no primary key defined for the |
| | corresponding entity. If this option is False, |
| | it will disallow auto-creation of a primary key. |
| | Defaults to ``True``. |
+---------------------+-------------------------------------------------------+
| ``version_id_col`` | If this option is True, it will create a version |
| | column automatically using the default name. If given |
| | as string, it will create the column using that name. |
| | This can be used to prevent concurrent modifications |
| | to the entity's table rows (i.e. it will raise an |
| | exception if it happens). Defaults to ``False``. |
+---------------------+-------------------------------------------------------+
| ``order_by`` | How to order select results. Either a string or a |
| | list of strings, composed of the field name, |
| | optionally lead by a minus (for descending order). |
+---------------------+-------------------------------------------------------+
| ``session`` | Specify a custom contextual session for this entity. |
| | By default, entities uses the global |
| | ``elixir.session``. |
| | This option takes a ``ScopedSession`` object or |
| | ``None``. In the later case your entity will be |
| | mapped using a non-contextual mapper which requires |
| | manual session management, as seen in pure SQLAlchemy.|
| | This option can also be set for all entities of a |
| | module by setting the ``__session__`` attribute of |
| | that module. |
+---------------------+-------------------------------------------------------+
| ``autosetup`` | DEPRECATED. Specify whether that entity will contain |
| | automatic setup triggers. |
| | That is if this entity will be |
| | automatically setup (along with all other entities |
| | which were already declared) if any of the following |
| | condition happen: some of its attributes are accessed |
| | ('c', 'table', 'mapper' or 'query'), instanciated |
| | (called) or the create_all method of this entity's |
| | metadata is called. Defaults to ``False``. |
+---------------------+-------------------------------------------------------+
| ``allowcoloverride``| Specify whether it is allowed to override columns. |
| | By default, Elixir forbids you to add a column to an |
| | entity's table which already exist in that table. If |
| | you set this option to ``True`` it will skip that |
| | check. Use with care as it is easy to shoot oneself |
| | in the foot when overriding columns. |
+---------------------+-------------------------------------------------------+
For examples, please refer to the examples and unit tests.
`using_table_options`
---------------------
The 'using_table_options' DSL statement allows you to set up some
additional options on your entity table. It is meant only to handle the
options which are not supported directly by the 'using_options' statement.
By opposition to the 'using_options' statement, these options are passed
directly to the underlying SQLAlchemy Table object (both non-keyword arguments
and keyword arguments) without any processing.
For further information, please refer to the `SQLAlchemy table's documentation
`_.
You might also be interested in the section about `constraints
`_.
`using_mapper_options`
----------------------
The 'using_mapper_options' DSL statement allows you to set up some
additional options on your entity mapper. It is meant only to handle the
options which are not supported directly by the 'using_options' statement.
By opposition to the 'using_options' statement, these options are passed
directly to the underlying SQLAlchemy mapper (as keyword arguments)
without any processing.
For further information, please refer to the `SQLAlchemy mapper
function's documentation
`_.
`using_options_defaults`
------------------------
The 'using_options_defaults' DSL statement allows you to set up some
default options on a custom base class. These will be used as the default value
for options of all its subclasses. Note that any option not set within the
using_options_defaults (nor specifically on a particular Entity) will use the
global defaults, so you don't have to provide a default value for all options,
but only those you want to change. Please also note that this statement does
not work on normal entities, and the normal using_options statement does not
work on base classes (because normal options do not and should not propagate to
the children classes).
'''
from sqlalchemy import Integer, String
from elixir.statements import ClassMutator
__doc_all__ = ['options_defaults']
OLD_M2MCOL_NAMEFORMAT = "%(tablename)s_%(key)s%(numifself)s"
ALTERNATE_M2MCOL_NAMEFORMAT = "%(inversename)s_%(key)s"
def default_m2m_column_formatter(data):
if data['selfref']:
return ALTERNATE_M2MCOL_NAMEFORMAT % data
else:
return OLD_M2MCOL_NAMEFORMAT % data
NEW_M2MCOL_NAMEFORMAT = default_m2m_column_formatter
# format constants
FKCOL_NAMEFORMAT = "%(relname)s_%(key)s"
M2MCOL_NAMEFORMAT = NEW_M2MCOL_NAMEFORMAT
CONSTRAINT_NAMEFORMAT = "%(tablename)s_%(colnames)s_fk"
MULTIINHERITANCECOL_NAMEFORMAT = "%(entity)s_%(key)s"
# other global constants
DEFAULT_AUTO_PRIMARYKEY_NAME = "id"
DEFAULT_AUTO_PRIMARYKEY_TYPE = Integer
DEFAULT_VERSION_ID_COL_NAME = "row_version"
DEFAULT_POLYMORPHIC_COL_NAME = "row_type"
POLYMORPHIC_COL_SIZE = 40
POLYMORPHIC_COL_TYPE = String(POLYMORPHIC_COL_SIZE)
# debugging/migration help
MIGRATION_TO_07_AID = False
#
options_defaults = dict(
abstract=False,
autosetup=False,
inheritance='single',
polymorphic=True,
identity=None,
autoload=False,
tablename=None,
shortnames=False,
auto_primarykey=True,
version_id_col=False,
allowcoloverride=False,
order_by=None,
resolve_root=None,
mapper_options={},
table_options={}
)
valid_options = options_defaults.keys() + [
'metadata',
'session',
'collection'
]
def using_options_defaults_handler(entity, **kwargs):
for kwarg in kwargs:
if kwarg not in valid_options:
raise Exception("'%s' is not a valid option for Elixir entities."
% kwarg)
# We use __dict__ instead of hasattr to not check its presence within the
# parent, and thus update the parent dict instead of creating a local dict.
if not entity.__dict__.get('options_defaults'):
entity.options_defaults = {}
entity.options_defaults.update(kwargs)
def using_options_handler(entity, *args, **kwargs):
for kwarg in kwargs:
if kwarg in valid_options:
setattr(entity._descriptor, kwarg, kwargs[kwarg])
else:
raise Exception("'%s' is not a valid option for Elixir entities."
% kwarg)
def using_table_options_handler(entity, *args, **kwargs):
entity._descriptor.table_args.extend(list(args))
entity._descriptor.table_options.update(kwargs)
def using_mapper_options_handler(entity, *args, **kwargs):
entity._descriptor.mapper_options.update(kwargs)
using_options_defaults = ClassMutator(using_options_defaults_handler)
using_options = ClassMutator(using_options_handler)
using_table_options = ClassMutator(using_table_options_handler)
using_mapper_options = ClassMutator(using_mapper_options_handler)
Elixir-0.7.1/elixir/ext/ 0000755 0001750 0001750 00000000000 11300243237 013122 5 ustar ged ged Elixir-0.7.1/elixir/ext/versioned.py 0000644 0001750 0001750 00000026034 11277333701 015510 0 ustar ged ged '''
A versioning plugin for Elixir.
Entities that are marked as versioned with the `acts_as_versioned` statement
will automatically have a history table created and a timestamp and version
column added to their tables. In addition, versioned entities are provided
with four new methods: revert, revert_to, compare_with and get_as_of, and one
new attribute: versions. Entities with compound primary keys are supported.
The `versions` attribute will contain a list of previous versions of the
instance, in increasing version number order.
The `get_as_of` method will retrieve a previous version of the instance "as of"
a specified datetime. If the current version is the most recent, it will be
returned.
The `revert` method will rollback the current instance to its previous version,
if possible. Once reverted, the current instance will be expired from the
session, and you will need to fetch it again to retrieve the now reverted
instance.
The `revert_to` method will rollback the current instance to the specified
version number, if possibe. Once reverted, the current instance will be expired
from the session, and you will need to fetch it again to retrieve the now
reverted instance.
The `compare_with` method will compare the instance with a previous version. A
dictionary will be returned with each field difference as an element in the
dictionary where the key is the field name and the value is a tuple of the
format (current_value, version_value). Version instances also have a
`compare_with` method so that two versions can be compared.
Also included in the module is a `after_revert` decorator that can be used to
decorate methods on the versioned entity that will be called following that
instance being reverted.
The acts_as_versioned statement also accepts an optional `ignore` argument
that consists of a list of strings, specifying names of fields. Changes in
those fields will not result in a version increment. In addition, you can
pass in an optional `check_concurrent` argument, which will use SQLAlchemy's
built-in optimistic concurrency mechanisms.
Note that relationships that are stored in mapping tables will not be included
as part of the versioning process, and will need to be handled manually. Only
values within the entity's main table will be versioned into the history table.
'''
from datetime import datetime
import inspect
from sqlalchemy import Table, Column, and_, desc
from sqlalchemy.orm import mapper, MapperExtension, EXT_CONTINUE, \
object_session
from elixir import Integer, DateTime
from elixir.statements import Statement
from elixir.properties import EntityBuilder
from elixir.entity import getmembers
__all__ = ['acts_as_versioned', 'after_revert']
__doc_all__ = []
#
# utility functions
#
def get_entity_where(instance):
clauses = []
for column in instance.table.primary_key.columns:
instance_value = getattr(instance, column.name)
clauses.append(column==instance_value)
return and_(*clauses)
def get_history_where(instance):
clauses = []
history_columns = instance.__history_table__.primary_key.columns
for column in instance.table.primary_key.columns:
instance_value = getattr(instance, column.name)
history_column = getattr(history_columns, column.name)
clauses.append(history_column==instance_value)
return and_(*clauses)
#
# a mapper extension to track versions on insert, update, and delete
#
class VersionedMapperExtension(MapperExtension):
def before_insert(self, mapper, connection, instance):
version_colname, timestamp_colname = \
instance.__class__.__versioned_column_names__
setattr(instance, version_colname, 1)
setattr(instance, timestamp_colname, datetime.now())
return EXT_CONTINUE
def before_update(self, mapper, connection, instance):
old_values = instance.table.select(get_entity_where(instance)) \
.execute().fetchone()
# SA might've flagged this for an update even though it didn't change.
# This occurs when a relation is updated, thus marking this instance
# for a save/update operation. We check here against the last version
# to ensure we really should save this version and update the version
# data.
ignored = instance.__class__.__ignored_fields__
version_colname, timestamp_colname = \
instance.__class__.__versioned_column_names__
for key in instance.table.c.keys():
if key in ignored:
continue
if getattr(instance, key) != old_values[key]:
# the instance was really updated, so we create a new version
dict_values = dict(old_values.items())
connection.execute(
instance.__class__.__history_table__.insert(), dict_values)
old_version = getattr(instance, version_colname)
setattr(instance, version_colname, old_version + 1)
setattr(instance, timestamp_colname, datetime.now())
break
return EXT_CONTINUE
def before_delete(self, mapper, connection, instance):
connection.execute(instance.__history_table__.delete(
get_history_where(instance)
))
return EXT_CONTINUE
versioned_mapper_extension = VersionedMapperExtension()
#
# the acts_as_versioned statement
#
class VersionedEntityBuilder(EntityBuilder):
def __init__(self, entity, ignore=None, check_concurrent=False,
column_names=None):
self.entity = entity
self.add_mapper_extension(versioned_mapper_extension)
#TODO: we should rather check that the version_id_col isn't set
# externally
self.check_concurrent = check_concurrent
# Changes in these fields will be ignored
if column_names is None:
column_names = ['version', 'timestamp']
entity.__versioned_column_names__ = column_names
if ignore is None:
ignore = []
ignore.extend(column_names)
entity.__ignored_fields__ = ignore
def create_non_pk_cols(self):
# add a version column to the entity, along with a timestamp
version_colname, timestamp_colname = \
self.entity.__versioned_column_names__
#XXX: fail in case the columns already exist?
#col_names = [col.name for col in self.entity._descriptor.columns]
#if version_colname not in col_names:
self.add_table_column(Column(version_colname, Integer))
#if timestamp_colname not in col_names:
self.add_table_column(Column(timestamp_colname, DateTime))
# add a concurrent_version column to the entity, if required
if self.check_concurrent:
self.entity._descriptor.version_id_col = 'concurrent_version'
# we copy columns from the main entity table, so we need it to exist first
def after_table(self):
entity = self.entity
version_colname, timestamp_colname = \
entity.__versioned_column_names__
# look for events
after_revert_events = []
for name, func in getmembers(entity, inspect.ismethod):
if getattr(func, '_elixir_after_revert', False):
after_revert_events.append(func)
# create a history table for the entity
skipped_columns = [version_colname]
if self.check_concurrent:
skipped_columns.append('concurrent_version')
columns = [
column.copy() for column in entity.table.c
if column.name not in skipped_columns
]
columns.append(Column(version_colname, Integer, primary_key=True))
table = Table(entity.table.name + '_history', entity.table.metadata,
*columns
)
entity.__history_table__ = table
# create an object that represents a version of this entity
class Version(object):
pass
# map the version class to the history table for this entity
Version.__name__ = entity.__name__ + 'Version'
Version.__versioned_entity__ = entity
mapper(Version, entity.__history_table__)
version_col = getattr(table.c, version_colname)
timestamp_col = getattr(table.c, timestamp_colname)
# attach utility methods and properties to the entity
def get_versions(self):
v = object_session(self).query(Version) \
.filter(get_history_where(self)) \
.order_by(version_col) \
.all()
# history contains all the previous records.
# Add the current one to the list to get all the versions
v.append(self)
return v
def get_as_of(self, dt):
# if the passed in timestamp is older than our current version's
# time stamp, then the most recent version is our current version
if getattr(self, timestamp_colname) < dt:
return self
# otherwise, we need to look to the history table to get our
# older version
sess = object_session(self)
query = sess.query(Version) \
.filter(and_(get_history_where(self),
timestamp_col <= dt)) \
.order_by(desc(timestamp_col)).limit(1)
return query.first()
def revert_to(self, to_version):
if isinstance(to_version, Version):
to_version = getattr(to_version, version_colname)
old_version = table.select(and_(
get_history_where(self),
version_col == to_version
)).execute().fetchone()
entity.table.update(get_entity_where(self)).execute(
dict(old_version.items())
)
table.delete(and_(get_history_where(self),
version_col >= to_version)).execute()
self.expire()
for event in after_revert_events:
event(self)
def revert(self):
assert getattr(self, version_colname) > 1
self.revert_to(getattr(self, version_colname) - 1)
def compare_with(self, version):
differences = {}
for column in self.table.c:
if column.name in (version_colname, 'concurrent_version'):
continue
this = getattr(self, column.name)
that = getattr(version, column.name)
if this != that:
differences[column.name] = (this, that)
return differences
entity.versions = property(get_versions)
entity.get_as_of = get_as_of
entity.revert_to = revert_to
entity.revert = revert
entity.compare_with = compare_with
Version.compare_with = compare_with
acts_as_versioned = Statement(VersionedEntityBuilder)
def after_revert(func):
"""
Decorator for watching for revert events.
"""
func._elixir_after_revert = True
return func
Elixir-0.7.1/elixir/ext/__init__.py 0000644 0001750 0001750 00000000105 11261351706 015236 0 ustar ged ged '''
Ext package
Additional Elixir statements and functionality.
'''
Elixir-0.7.1/elixir/ext/list.py 0000644 0001750 0001750 00000022466 11261351706 014470 0 ustar ged ged '''
This extension is DEPRECATED. Please use the orderinglist SQLAlchemy
extension instead.
For details:
http://www.sqlalchemy.org/docs/05/reference/ext/orderinglist.html
For an Elixir example:
http://elixir.ematia.de/trac/wiki/Recipes/UsingEntityForOrderedList
or
http://elixir.ematia.de/trac/browser/elixir/0.7.0/tests/test_o2m.py#L155
An ordered-list plugin for Elixir to help you make an entity be able to be
managed in a list-like way. Much inspiration comes from the Ruby on Rails
acts_as_list plugin, which is currently more full-featured than this plugin.
Once you flag an entity with an `acts_as_list()` statement, a column will be
added to the entity called `position` which will be an integer column that is
managed for you by the plugin. You can pass an alternative column name to
the plugin using the `column_name` keyword argument.
In addition, your entity will get a series of new methods attached to it,
including:
+----------------------+------------------------------------------------------+
| Method Name | Description |
+======================+======================================================+
| ``move_lower`` | Move the item lower in the list |
+----------------------+------------------------------------------------------+
| ``move_higher`` | Move the item higher in the list |
+----------------------+------------------------------------------------------+
| ``move_to_bottom`` | Move the item to the bottom of the list |
+----------------------+------------------------------------------------------+
| ``move_to_top`` | Move the item to the top of the list |
+----------------------+------------------------------------------------------+
| ``move_to`` | Move the item to a specific position in the list |
+----------------------+------------------------------------------------------+
Sometimes, your entities that represent list items will be a part of different
lists. To implement this behavior, simply pass the `acts_as_list` statement a
callable that returns a "qualifier" SQLAlchemy expression. This expression will
be added to the generated WHERE clauses used by the plugin.
Example model usage:
.. sourcecode:: python
from elixir import *
from elixir.ext.list import acts_as_list
class ToDo(Entity):
subject = Field(String(128))
owner = ManyToOne('Person')
def qualify(self):
return ToDo.owner_id == self.owner_id
acts_as_list(qualifier=qualify)
class Person(Entity):
name = Field(String(64))
todos = OneToMany('ToDo', order_by='position')
The above example can then be used to manage ordered todo lists for people.
Note that you must set the `order_by` property on the `Person.todo` relation in
order for the relation to respect the ordering. Here is an example of using
this model in practice:
.. sourcecode:: python
p = Person.query.filter_by(name='Jonathan').one()
p.todos.append(ToDo(subject='Three'))
p.todos.append(ToDo(subject='Two'))
p.todos.append(ToDo(subject='One'))
session.commit(); session.clear()
p = Person.query.filter_by(name='Jonathan').one()
p.todos[0].move_to_bottom()
p.todos[2].move_to_top()
session.commit(); session.clear()
p = Person.query.filter_by(name='Jonathan').one()
assert p.todos[0].subject == 'One'
assert p.todos[1].subject == 'Two'
assert p.todos[2].subject == 'Three'
For more examples, refer to the unit tests for this plugin.
'''
from elixir.statements import Statement
from elixir.events import before_insert, before_delete
from sqlalchemy import Column, Integer, select, func, literal, and_
import warnings
__all__ = ['acts_as_list']
__doc_all__ = []
def get_entity_where(instance):
clauses = []
for column in instance.table.primary_key.columns:
instance_value = getattr(instance, column.name)
clauses.append(column == instance_value)
return and_(*clauses)
class ListEntityBuilder(object):
def __init__(self, entity, qualifier=None, column_name='position'):
warnings.warn("The act_as_list extension is deprecated. Please use "
"SQLAlchemy's orderinglist extension instead",
DeprecationWarning, stacklevel=6)
self.entity = entity
self.qualifier_method = qualifier
self.column_name = column_name
def create_non_pk_cols(self):
if self.entity._descriptor.autoload:
for c in self.entity.table.c:
if c.name == self.column_name:
self.position_column = c
if not hasattr(self, 'position_column'):
raise Exception(
"Could not find column '%s' in autoloaded table '%s', "
"needed by entity '%s'." % (self.column_name,
self.entity.table.name, self.entity.__name__))
else:
self.position_column = Column(self.column_name, Integer)
self.entity._descriptor.add_column(self.position_column)
def after_table(self):
position_column = self.position_column
position_column_name = self.column_name
qualifier_method = self.qualifier_method
if not qualifier_method:
qualifier_method = lambda self: None
def _init_position(self):
s = select(
[(func.max(position_column)+1).label('value')],
qualifier_method(self)
).union(
select([literal(1).label('value')])
)
a = s.alias()
# we use a second func.max to get the maximum between 1 and the
# real max position if any exist
setattr(self, position_column_name, select([func.max(a.c.value)]))
# Note that this method could be rewritten more simply like below,
# but because this extension is going to be deprecated anyway,
# I don't want to risk breaking something I don't want to maintain.
# setattr(self, position_column_name, select(
# [func.coalesce(func.max(position_column), 0) + 1],
# qualifier_method(self)
# ))
_init_position = before_insert(_init_position)
def _shift_items(self):
self.table.update(
and_(
position_column > getattr(self, position_column_name),
qualifier_method(self)
),
values={
position_column : position_column - 1
}
).execute()
_shift_items = before_delete(_shift_items)
def move_to_bottom(self):
# move the items that were above this item up one
self.table.update(
and_(
position_column >= getattr(self, position_column_name),
qualifier_method(self)
),
values = {
position_column : position_column - 1
}
).execute()
# move this item to the max position
# MySQL does not support the correlated subquery, so we need to
# execute the query (through scalar()). See ticket #34.
self.table.update(
get_entity_where(self),
values={
position_column : select(
[func.max(position_column) + 1],
qualifier_method(self)
).scalar()
}
).execute()
def move_to_top(self):
self.move_to(1)
def move_to(self, position):
current_position = getattr(self, position_column_name)
# determine which direction we're moving
if position < current_position:
where = and_(
position <= position_column,
position_column < current_position,
qualifier_method(self)
)
modifier = 1
elif position > current_position:
where = and_(
current_position < position_column,
position_column <= position,
qualifier_method(self)
)
modifier = -1
# shift the items in between the current and new positions
self.table.update(where, values = {
position_column : position_column + modifier
}).execute()
# update this item's position to the desired position
self.table.update(get_entity_where(self)) \
.execute(**{position_column_name: position})
def move_lower(self):
# replace for ex.: p.todos.insert(x + 1, p.todos.pop(x))
self.move_to(getattr(self, position_column_name) + 1)
def move_higher(self):
self.move_to(getattr(self, position_column_name) - 1)
# attach new methods to entity
self.entity._init_position = _init_position
self.entity._shift_items = _shift_items
self.entity.move_lower = move_lower
self.entity.move_higher = move_higher
self.entity.move_to_bottom = move_to_bottom
self.entity.move_to_top = move_to_top
self.entity.move_to = move_to
acts_as_list = Statement(ListEntityBuilder)
Elixir-0.7.1/elixir/ext/associable.py 0000644 0001750 0001750 00000022251 11261351706 015612 0 ustar ged ged '''
Associable Elixir Statement Generator
==========
Associable
==========
About Polymorphic Associations
------------------------------
A frequent pattern in database schemas is the has_and_belongs_to_many, or a
many-to-many table. Quite often multiple tables will refer to a single one
creating quite a few many-to-many intermediate tables.
Polymorphic associations lower the amount of many-to-many tables by setting up
a table that allows relations to any other table in the database, and relates
it to the associable table. In some implementations, this layout does not
enforce referential integrity with database foreign key constraints, this
implementation uses an additional many-to-many table with foreign key
constraints to avoid this problem.
.. note:
SQLite does not support foreign key constraints, so referential integrity
can only be enforced using database backends with such support.
Elixir Statement Generator for Polymorphic Associations
-------------------------------------------------------
The ``associable`` function generates the intermediary tables for an Elixir
entity that should be associable with other Elixir entities and returns an
Elixir Statement for use with them. This automates the process of creating the
polymorphic association tables and ensuring their referential integrity.
Matching select_XXX and select_by_XXX are also added to the associated entity
which allow queries to be run for the associated objects.
Example usage:
.. sourcecode:: python
class Tag(Entity):
name = Field(Unicode)
acts_as_taggable = associable(Tag)
class Entry(Entity):
title = Field(Unicode)
acts_as_taggable('tags')
class Article(Entity):
title = Field(Unicode)
acts_as_taggable('tags')
Or if one of the entities being associated should only have a single member of
the associated table:
.. sourcecode:: python
class Address(Entity):
street = Field(String(130))
city = Field(String(100))
is_addressable = associable(Address, 'addresses')
class Person(Entity):
name = Field(Unicode)
orders = OneToMany('Order')
is_addressable()
class Order(Entity):
order_num = Field(primary_key=True)
item_count = Field(Integer)
person = ManyToOne('Person')
is_addressable('address', uselist=False)
home = Address(street='123 Elm St.', city='Spooksville')
user = Person(name='Jane Doe')
user.addresses.append(home)
neworder = Order(item_count=4)
neworder.address = home
user.orders.append(neworder)
# Queries using the added helpers
Person.select_by_addresses(city='Cupertino')
Person.select_addresses(and_(Address.c.street=='132 Elm St',
Address.c.city=='Smallville'))
Statement Options
-----------------
The generated Elixir Statement has several options available:
+---------------+-------------------------------------------------------------+
| Option Name | Description |
+===============+=============================================================+
| ``name`` | Specify a custom name for the Entity attribute. This is |
| | used to declare the attribute used to access the associated |
| | table values. Otherwise, the name will use the plural_name |
| | provided to the associable call. |
+---------------+-------------------------------------------------------------+
| ``uselist`` | Whether or not the associated table should be represented |
| | as a list, or a single property. It should be set to False |
| | when the entity should only have a single associated |
| | entity. Defaults to True. |
+---------------+-------------------------------------------------------------+
| ``lazy`` | Determines eager loading of the associated entity objects. |
| | Defaults to False, to indicate that they should not be |
| | lazily loaded. |
+---------------+-------------------------------------------------------------+
'''
from elixir.statements import Statement
import sqlalchemy as sa
__doc_all__ = ['associable']
def associable(assoc_entity, plural_name=None, lazy=True):
'''
Generate an associable Elixir Statement
'''
interface_name = assoc_entity._descriptor.tablename
able_name = interface_name + 'able'
if plural_name:
attr_name = "%s_rel" % plural_name
else:
plural_name = interface_name
attr_name = "%s_rel" % interface_name
class GenericAssoc(object):
def __init__(self, tablename):
self.type = tablename
#TODO: inherit from entity builder
class Associable(object):
"""An associable Elixir Statement object"""
def __init__(self, entity, name=None, uselist=True, lazy=True):
self.entity = entity
self.lazy = lazy
self.uselist = uselist
if name is None:
self.name = plural_name
else:
self.name = name
def after_table(self):
col = sa.Column('%s_assoc_id' % interface_name, sa.Integer,
sa.ForeignKey('%s.id' % able_name))
self.entity._descriptor.add_column(col)
if not hasattr(assoc_entity, '_assoc_table'):
metadata = assoc_entity._descriptor.metadata
association_table = sa.Table("%s" % able_name, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('type', sa.String(40), nullable=False),
)
tablename = "%s_to_%s" % (able_name, interface_name)
association_to_table = sa.Table(tablename, metadata,
sa.Column('assoc_id', sa.Integer,
sa.ForeignKey(association_table.c.id,
ondelete="CASCADE"),
primary_key=True),
#FIXME: this assumes a single id col
sa.Column('%s_id' % interface_name, sa.Integer,
sa.ForeignKey(assoc_entity.table.c.id,
ondelete="RESTRICT"),
primary_key=True),
)
assoc_entity._assoc_table = association_table
assoc_entity._assoc_to_table = association_to_table
def after_mapper(self):
if not hasattr(assoc_entity, '_assoc_mapper'):
assoc_entity._assoc_mapper = sa.orm.mapper(
GenericAssoc, assoc_entity._assoc_table, properties={
'targets': sa.orm.relation(
assoc_entity,
secondary=assoc_entity._assoc_to_table,
lazy=lazy, backref='associations',
order_by=assoc_entity.mapper.order_by)
})
entity = self.entity
entity.mapper.add_property(
attr_name,
sa.orm.relation(GenericAssoc, lazy=self.lazy,
backref='_backref_%s' % entity.table.name)
)
if self.uselist:
def get(self):
if getattr(self, attr_name) is None:
setattr(self, attr_name,
GenericAssoc(entity.table.name))
return getattr(self, attr_name).targets
setattr(entity, self.name, property(get))
else:
# scalar based property decorator
def get(self):
attr = getattr(self, attr_name)
if attr is not None:
return attr.targets[0]
else:
return None
def set(self, value):
if getattr(self, attr_name) is None:
setattr(self, attr_name,
GenericAssoc(entity.table.name))
getattr(self, attr_name).targets = [value]
setattr(entity, self.name, property(get, set))
# self.name is both set via mapper synonym and the python
# property, but that's how synonym properties work.
# adding synonym property after "real" property otherwise it
# breaks when using SQLAlchemy > 0.4.1
entity.mapper.add_property(self.name, sa.orm.synonym(attr_name))
# add helper methods
def select_by(cls, **kwargs):
return cls.query.join([attr_name, 'targets']) \
.filter_by(**kwargs).all()
setattr(entity, 'select_by_%s' % self.name, classmethod(select_by))
def select(cls, *args, **kwargs):
return cls.query.join([attr_name, 'targets']) \
.filter(*args, **kwargs).all()
setattr(entity, 'select_%s' % self.name, classmethod(select))
return Statement(Associable)
Elixir-0.7.1/elixir/ext/encrypted.py 0000644 0001750 0001750 00000010352 11261351706 015501 0 ustar ged ged '''
An encryption plugin for Elixir utilizing the excellent PyCrypto library, which
can be downloaded here: http://www.amk.ca/python/code/crypto
Values for columns that are specified to be encrypted will be transparently
encrypted and safely encoded for storage in a unicode column using the powerful
and secure Blowfish Cipher using a specified "secret" which can be passed into
the plugin at class declaration time.
Example usage:
.. sourcecode:: python
from elixir import *
from elixir.ext.encrypted import acts_as_encrypted
class Person(Entity):
name = Field(Unicode)
password = Field(Unicode)
ssn = Field(Unicode)
acts_as_encrypted(for_fields=['password', 'ssn'],
with_secret='secret')
The above Person entity will automatically encrypt and decrypt the password and
ssn columns on save, update, and load. Different secrets can be specified on
an entity by entity basis, for added security.
**Important note**: instance attributes are encrypted in-place. This means that
if one of the encrypted attributes of an instance is accessed after the
instance has been flushed to the database (and thus encrypted), the value for
that attribute will be crypted in the in-memory object in addition to the
database row.
'''
from Crypto.Cipher import Blowfish
from elixir.statements import Statement
from sqlalchemy.orm import MapperExtension, EXT_CONTINUE, EXT_STOP
try:
from sqlalchemy.orm import EXT_PASS
SA05orlater = False
except ImportError:
SA05orlater = True
__all__ = ['acts_as_encrypted']
__doc_all__ = []
#
# encryption and decryption functions
#
def encrypt_value(value, secret):
return Blowfish.new(secret, Blowfish.MODE_CFB) \
.encrypt(value).encode('string_escape')
def decrypt_value(value, secret):
return Blowfish.new(secret, Blowfish.MODE_CFB) \
.decrypt(value.decode('string_escape'))
#
# acts_as_encrypted statement
#
class ActsAsEncrypted(object):
def __init__(self, entity, for_fields=[], with_secret='abcdef'):
def perform_encryption(instance, encrypt=True):
encrypted = getattr(instance, '_elixir_encrypted', None)
if encrypted is encrypt:
# skipping encryption or decryption, as it is already done
return
else:
# marking instance as already encrypted/decrypted
instance._elixir_encrypted = encrypt
if encrypt:
func = encrypt_value
else:
func = decrypt_value
for column_name in for_fields:
current_value = getattr(instance, column_name)
if current_value:
setattr(instance, column_name,
func(current_value, with_secret))
def perform_decryption(instance):
perform_encryption(instance, encrypt=False)
class EncryptedMapperExtension(MapperExtension):
def before_insert(self, mapper, connection, instance):
perform_encryption(instance)
return EXT_CONTINUE
def before_update(self, mapper, connection, instance):
perform_encryption(instance)
return EXT_CONTINUE
if SA05orlater:
def reconstruct_instance(self, mapper, instance):
perform_decryption(instance)
# no special return value is required for
# reconstruct_instance, but you never know...
return EXT_CONTINUE
else:
def populate_instance(self, mapper, selectcontext, row,
instance, *args, **kwargs):
mapper.populate_instance(selectcontext, instance, row,
*args, **kwargs)
perform_decryption(instance)
# EXT_STOP because we already did populate the instance and
# the normal processing should not happen
return EXT_STOP
# make sure that the entity's mapper has our mapper extension
entity._descriptor.add_mapper_extension(EncryptedMapperExtension())
acts_as_encrypted = Statement(ActsAsEncrypted)
Elixir-0.7.1/elixir/ext/perform_ddl.py 0000644 0001750 0001750 00000006363 11277333630 016013 0 ustar ged ged '''
DDL statements for Elixir.
Entities having the perform_ddl statement, will automatically execute the
given DDL statement, at the given moment: ether before or after the table
creation in SQL.
The 'when' argument can be either 'before-create' or 'after-create'.
The 'statement' argument can be one of:
- a single string statement
- a list of string statements, in which case, each of them will be executed
in turn.
- a callable which should take no argument and return either a single string
or a list of strings.
In each string statement, you may use the special '%(fullname)s' construct,
that will be replaced with the real table name including schema, if unknown
to you. Also, self explained '%(table)s' and '%(schema)s' may be used here.
You would use this extension to handle non elixir sql statemts, like triggers
etc.
.. sourcecode:: python
class Movie(Entity):
title = Field(Unicode(30), primary_key=True)
year = Field(Integer)
perform_ddl('after-create',
"insert into %(fullname)s values ('Alien', 1979)")
preload_data is a more specific statement meant to preload data in your
entity table from a list of tuples (of fields values for each row).
.. sourcecode:: python
class Movie(Entity):
title = Field(Unicode(30), primary_key=True)
year = Field(Integer)
preload_data(('title', 'year'),
[(u'Alien', 1979), (u'Star Wars', 1977)])
preload_data(('year', 'title'),
[(1982, u'Blade Runner')])
preload_data(data=[(u'Batman', 1966)])
'''
from elixir.statements import Statement
from elixir.properties import EntityBuilder
from sqlalchemy import DDL
__all__ = ['perform_ddl', 'preload_data']
__doc_all__ = []
#
# the perform_ddl statement
#
class PerformDDLEntityBuilder(EntityBuilder):
def __init__(self, entity, when, statement, on=None, context=None):
self.entity = entity
self.when = when
self.statement = statement
self.on = on
self.context = context
def after_table(self):
statement = self.statement
if hasattr(statement, '__call__'):
statement = statement()
if not isinstance(statement, list):
statement = [statement]
for s in statement:
ddl = DDL(s, self.on, self.context)
ddl.execute_at(self.when, self.entity.table)
perform_ddl = Statement(PerformDDLEntityBuilder)
#
# the preload_data statement
#
class PreloadDataEntityBuilder(EntityBuilder):
def __init__(self, entity, columns=None, data=None):
self.entity = entity
self.columns = columns
self.data = data
def after_table(self):
all_columns = [col.name for col in self.entity.table.columns]
def onload(event, schema_item, connection):
columns = self.columns
if columns is None:
columns = all_columns
data = self.data
if hasattr(data, '__call__'):
data = data()
insert = schema_item.insert()
connection.execute(insert,
[dict(zip(columns, values)) for values in data])
self.entity.table.append_ddl_listener('after-create', onload)
preload_data = Statement(PreloadDataEntityBuilder)
Elixir-0.7.1/Elixir.egg-info/ 0000755 0001750 0001750 00000000000 11300243237 013754 5 ustar ged ged Elixir-0.7.1/Elixir.egg-info/requires.txt 0000644 0001750 0001750 00000000023 11300243237 016347 0 ustar ged ged SQLAlchemy >= 0.4.0 Elixir-0.7.1/Elixir.egg-info/PKG-INFO 0000644 0001750 0001750 00000003005 11300243237 015047 0 ustar ged ged Metadata-Version: 1.0
Name: Elixir
Version: 0.7.1
Summary: Declarative Mapper for SQLAlchemy
Home-page: http://elixir.ematia.de
Author: Gaetan de Menten, Daniel Haus and Jonathan LaCour
Author-email: sqlelixir@googlegroups.com
License: MIT License
Description:
Elixir
======
A declarative layer on top of SQLAlchemy. It is a fairly thin wrapper, which
provides the ability to create simple Python classes that map directly to
relational database tables (this pattern is often referred to as the Active
Record design pattern), providing many of the benefits of traditional
databases without losing the convenience of Python objects.
Elixir is intended to replace the ActiveMapper SQLAlchemy extension, and the
TurboEntity project but does not intend to replace SQLAlchemy's core features,
and instead focuses on providing a simpler syntax for defining model objects
when you do not need the full expressiveness of SQLAlchemy's manual mapper
definitions.
SVN version:
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Database :: Front-Ends
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Elixir-0.7.1/Elixir.egg-info/SOURCES.txt 0000644 0001750 0001750 00000001163 11300243237 015641 0 ustar ged ged MANIFEST.in
README
setup.cfg
setup.py
Elixir.egg-info/PKG-INFO
Elixir.egg-info/SOURCES.txt
Elixir.egg-info/dependency_links.txt
Elixir.egg-info/requires.txt
Elixir.egg-info/top_level.txt
elixir/__init__.py
elixir/collection.py
elixir/entity.py
elixir/events.py
elixir/fields.py
elixir/options.py
elixir/properties.py
elixir/py23compat.py
elixir/relationships.py
elixir/statements.py
elixir/ext/__init__.py
elixir/ext/associable.py
elixir/ext/encrypted.py
elixir/ext/list.py
elixir/ext/perform_ddl.py
elixir/ext/versioned.py
tests/db1/__init__.py
tests/db1/a.py
tests/db1/b.py
tests/db1/c.py
tests/db2/__init__.py
tests/db2/a.py Elixir-0.7.1/Elixir.egg-info/top_level.txt 0000644 0001750 0001750 00000000015 11300243237 016502 0 ustar ged ged tests
elixir
Elixir-0.7.1/Elixir.egg-info/dependency_links.txt 0000644 0001750 0001750 00000000001 11300243237 020022 0 ustar ged ged
Elixir-0.7.1/README 0000644 0001750 0001750 00000001361 11261351707 011717 0 ustar ged ged -----
About
-----
Elixir is a declarative layer on top of the `SQLAlchemy library
`_. It is a fairly thin wrapper, which provides
the ability to create simple Python classes that map directly to relational
database tables (this pattern is often referred to as the Active Record design
pattern), providing many of the benefits of traditional databases
without losing the convenience of Python objects.
Elixir is intended to replace the ActiveMapper SQLAlchemy extension, and the
TurboEntity project but does not intend to replace SQLAlchemy's core features,
and instead focuses on providing a simpler syntax for defining model objects
when you do not need the full expressiveness of SQLAlchemy's manual mapper
definitions.
Elixir-0.7.1/MANIFEST.in 0000644 0001750 0001750 00000000026 11261351707 012572 0 ustar ged ged exclude release.howto
Elixir-0.7.1/tests/ 0000755 0001750 0001750 00000000000 11300243237 012170 5 ustar ged ged Elixir-0.7.1/tests/db2/ 0000755 0001750 0001750 00000000000 11300243237 012637 5 ustar ged ged Elixir-0.7.1/tests/db2/__init__.py 0000644 0001750 0001750 00000000011 11276616130 014750 0 ustar ged ged import a
Elixir-0.7.1/tests/db2/a.py 0000644 0001750 0001750 00000000131 11276616251 013440 0 ustar ged ged from elixir import Entity, ManyToMany
class A(Entity):
cs = ManyToMany('..db1.c.C')
Elixir-0.7.1/tests/db1/ 0000755 0001750 0001750 00000000000 11300243237 012636 5 ustar ged ged Elixir-0.7.1/tests/db1/b.py 0000644 0001750 0001750 00000000255 11276621773 013454 0 ustar ged ged from elixir import Entity, ManyToMany, using_options
class B(Entity):
using_options(resolve_root='tests.db1')
cs = ManyToMany('.c.C')
a1s = ManyToMany('a.A1')
Elixir-0.7.1/tests/db1/c.py 0000644 0001750 0001750 00000000166 11276616272 013454 0 ustar ged ged from elixir import Entity, ManyToMany
class C(Entity):
cs = ManyToMany('.b.B')
as_ = ManyToMany('..db2.a.A')
Elixir-0.7.1/tests/db1/__init__.py 0000644 0001750 0001750 00000000017 11276616045 014762 0 ustar ged ged import a, b, c
Elixir-0.7.1/tests/db1/a.py 0000644 0001750 0001750 00000000355 11276621753 013452 0 ustar ged ged from elixir import Entity, ManyToOne, OneToMany, ManyToMany, using_options
class A1(Entity):
using_options(resolve_root='tests.db1')
a2s = OneToMany('A2')
bs = ManyToMany('b.B')
class A2(Entity):
a1 = ManyToOne('A1')
Elixir-0.7.1/PKG-INFO 0000644 0001750 0001750 00000003005 11300243237 012121 0 ustar ged ged Metadata-Version: 1.0
Name: Elixir
Version: 0.7.1
Summary: Declarative Mapper for SQLAlchemy
Home-page: http://elixir.ematia.de
Author: Gaetan de Menten, Daniel Haus and Jonathan LaCour
Author-email: sqlelixir@googlegroups.com
License: MIT License
Description:
Elixir
======
A declarative layer on top of SQLAlchemy. It is a fairly thin wrapper, which
provides the ability to create simple Python classes that map directly to
relational database tables (this pattern is often referred to as the Active
Record design pattern), providing many of the benefits of traditional
databases without losing the convenience of Python objects.
Elixir is intended to replace the ActiveMapper SQLAlchemy extension, and the
TurboEntity project but does not intend to replace SQLAlchemy's core features,
and instead focuses on providing a simpler syntax for defining model objects
when you do not need the full expressiveness of SQLAlchemy's manual mapper
definitions.
SVN version:
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Database :: Front-Ends
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Elixir-0.7.1/setup.cfg 0000644 0001750 0001750 00000000511 11300243237 012644 0 ustar ged ged [apydia]
trac_browser_url = http://elixir.ematia.de/trac/browser/elixir/tags/0.7.1
docformat = reStructuredText
title = Elixir
modules = elixir, elixir.ext.associable, elixir.ext.encrypted,
elixir.ext.list, elixir.ext.perform_ddl, elixir.ext.versioned,
theme = elixir
[egg_info]
tag_build =
tag_date = 0
tag_svn_revision = 0
Elixir-0.7.1/setup.py 0000644 0001750 0001750 00000003240 11261351752 012547 0 ustar ged ged from setuptools import setup, find_packages
setup(name="Elixir",
version="0.7.1",
description="Declarative Mapper for SQLAlchemy",
long_description="""
Elixir
======
A declarative layer on top of SQLAlchemy. It is a fairly thin wrapper, which
provides the ability to create simple Python classes that map directly to
relational database tables (this pattern is often referred to as the Active
Record design pattern), providing many of the benefits of traditional
databases without losing the convenience of Python objects.
Elixir is intended to replace the ActiveMapper SQLAlchemy extension, and the
TurboEntity project but does not intend to replace SQLAlchemy's core features,
and instead focuses on providing a simpler syntax for defining model objects
when you do not need the full expressiveness of SQLAlchemy's manual mapper
definitions.
SVN version:
""",
author="Gaetan de Menten, Daniel Haus and Jonathan LaCour",
author_email="sqlelixir@googlegroups.com",
url="http://elixir.ematia.de",
license = "MIT License",
install_requires = [
"SQLAlchemy >= 0.4.0"
],
packages=find_packages(exclude=['ez_setup', 'tests', 'examples']),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Database :: Front-Ends",
"Topic :: Software Development :: Libraries :: Python Modules"
],
test_suite = 'nose.collector')