Updated SqlAlchemy + the new files

This commit is contained in:
2017-04-15 16:33:29 +00:00
parent e3267d4bda
commit 4669737fe3
134 changed files with 66374 additions and 4528 deletions

1048
sqlalchemy/ext/automap.py Normal file

File diff suppressed because it is too large Load Diff

559
sqlalchemy/ext/baked.py Normal file
View File

@@ -0,0 +1,559 @@
# sqlalchemy/ext/baked.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Baked query extension.
Provides a creational pattern for the :class:`.query.Query` object which
allows the fully constructed object, Core select statement, and string
compiled result to be fully cached.
"""
from ..orm.query import Query
from ..orm import strategies, attributes, properties, \
strategy_options, util as orm_util, interfaces
from .. import log as sqla_log
from ..sql import util as sql_util, func, literal_column
from ..orm import exc as orm_exc
from .. import exc as sa_exc
from .. import util
import copy
import logging
log = logging.getLogger(__name__)
class BakedQuery(object):
"""A builder object for :class:`.query.Query` objects."""
__slots__ = 'steps', '_bakery', '_cache_key', '_spoiled'
def __init__(self, bakery, initial_fn, args=()):
self._cache_key = ()
self._update_cache_key(initial_fn, args)
self.steps = [initial_fn]
self._spoiled = False
self._bakery = bakery
@classmethod
def bakery(cls, size=200):
"""Construct a new bakery."""
_bakery = util.LRUCache(size)
def call(initial_fn, *args):
return cls(_bakery, initial_fn, args)
return call
def _clone(self):
b1 = BakedQuery.__new__(BakedQuery)
b1._cache_key = self._cache_key
b1.steps = list(self.steps)
b1._bakery = self._bakery
b1._spoiled = self._spoiled
return b1
def _update_cache_key(self, fn, args=()):
self._cache_key += (fn.__code__,) + args
def __iadd__(self, other):
if isinstance(other, tuple):
self.add_criteria(*other)
else:
self.add_criteria(other)
return self
def __add__(self, other):
if isinstance(other, tuple):
return self.with_criteria(*other)
else:
return self.with_criteria(other)
def add_criteria(self, fn, *args):
"""Add a criteria function to this :class:`.BakedQuery`.
This is equivalent to using the ``+=`` operator to
modify a :class:`.BakedQuery` in-place.
"""
self._update_cache_key(fn, args)
self.steps.append(fn)
return self
def with_criteria(self, fn, *args):
"""Add a criteria function to a :class:`.BakedQuery` cloned from this one.
This is equivalent to using the ``+`` operator to
produce a new :class:`.BakedQuery` with modifications.
"""
return self._clone().add_criteria(fn, *args)
def for_session(self, session):
"""Return a :class:`.Result` object for this :class:`.BakedQuery`.
This is equivalent to calling the :class:`.BakedQuery` as a
Python callable, e.g. ``result = my_baked_query(session)``.
"""
return Result(self, session)
def __call__(self, session):
return self.for_session(session)
def spoil(self, full=False):
"""Cancel any query caching that will occur on this BakedQuery object.
The BakedQuery can continue to be used normally, however additional
creational functions will not be cached; they will be called
on every invocation.
This is to support the case where a particular step in constructing
a baked query disqualifies the query from being cacheable, such
as a variant that relies upon some uncacheable value.
:param full: if False, only functions added to this
:class:`.BakedQuery` object subsequent to the spoil step will be
non-cached; the state of the :class:`.BakedQuery` up until
this point will be pulled from the cache. If True, then the
entire :class:`.Query` object is built from scratch each
time, with all creational functions being called on each
invocation.
"""
if not full:
_spoil_point = self._clone()
_spoil_point._cache_key += ('_query_only', )
self.steps = [_spoil_point._retrieve_baked_query]
self._spoiled = True
return self
def _retrieve_baked_query(self, session):
query = self._bakery.get(self._cache_key, None)
if query is None:
query = self._as_query(session)
self._bakery[self._cache_key] = query.with_session(None)
return query.with_session(session)
def _bake(self, session):
query = self._as_query(session)
context = query._compile_context()
self._bake_subquery_loaders(session, context)
context.session = None
context.query = query = context.query.with_session(None)
query._execution_options = query._execution_options.union(
{"compiled_cache": self._bakery}
)
# we'll be holding onto the query for some of its state,
# so delete some compilation-use-only attributes that can take up
# space
for attr in (
'_correlate', '_from_obj', '_mapper_adapter_map',
'_joinpath', '_joinpoint'):
query.__dict__.pop(attr, None)
self._bakery[self._cache_key] = context
return context
def _as_query(self, session):
query = self.steps[0](session)
for step in self.steps[1:]:
query = step(query)
return query
def _bake_subquery_loaders(self, session, context):
"""convert subquery eager loaders in the cache into baked queries.
For subquery eager loading to work, all we need here is that the
Query point to the correct session when it is run. However, since
we are "baking" anyway, we may as well also turn the query into
a "baked" query so that we save on performance too.
"""
context.attributes['baked_queries'] = baked_queries = []
for k, v in list(context.attributes.items()):
if isinstance(v, Query):
if 'subquery' in k:
bk = BakedQuery(self._bakery, lambda *args: v)
bk._cache_key = self._cache_key + k
bk._bake(session)
baked_queries.append((k, bk._cache_key, v))
del context.attributes[k]
def _unbake_subquery_loaders(self, session, context, params):
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
and turn them back into Result objects that will iterate just
like a Query object.
"""
for k, cache_key, query in context.attributes["baked_queries"]:
bk = BakedQuery(self._bakery,
lambda sess, q=query: q.with_session(sess))
bk._cache_key = cache_key
context.attributes[k] = bk.for_session(session).params(**params)
class Result(object):
"""Invokes a :class:`.BakedQuery` against a :class:`.Session`.
The :class:`.Result` object is where the actual :class:`.query.Query`
object gets created, or retrieved from the cache,
against a target :class:`.Session`, and is then invoked for results.
"""
__slots__ = 'bq', 'session', '_params'
def __init__(self, bq, session):
self.bq = bq
self.session = session
self._params = {}
def params(self, *args, **kw):
"""Specify parameters to be replaced into the string SQL statement."""
if len(args) == 1:
kw.update(args[0])
elif len(args) > 0:
raise sa_exc.ArgumentError(
"params() takes zero or one positional argument, "
"which is a dictionary.")
self._params.update(kw)
return self
def _as_query(self):
return self.bq._as_query(self.session).params(self._params)
def __str__(self):
return str(self._as_query())
def __iter__(self):
bq = self.bq
if bq._spoiled:
return iter(self._as_query())
baked_context = bq._bakery.get(bq._cache_key, None)
if baked_context is None:
baked_context = bq._bake(self.session)
context = copy.copy(baked_context)
context.session = self.session
context.attributes = context.attributes.copy()
bq._unbake_subquery_loaders(self.session, context, self._params)
context.statement.use_labels = True
if context.autoflush and not context.populate_existing:
self.session._autoflush()
return context.query.params(self._params).\
with_session(self.session)._execute_and_instances(context)
def count(self):
"""return the 'count'.
Equivalent to :meth:`.Query.count`.
Note this uses a subquery to ensure an accurate count regardless
of the structure of the original statement.
.. versionadded:: 1.1.6
"""
col = func.count(literal_column('*'))
bq = self.bq.with_criteria(lambda q: q.from_self(col))
return bq.for_session(self.session).params(self._params).scalar()
def scalar(self):
"""Return the first element of the first result or None
if no rows present. If multiple rows are returned,
raises MultipleResultsFound.
Equivalent to :meth:`.Query.scalar`.
.. versionadded:: 1.1.6
"""
try:
ret = self.one()
if not isinstance(ret, tuple):
return ret
return ret[0]
except orm_exc.NoResultFound:
return None
def first(self):
"""Return the first row.
Equivalent to :meth:`.Query.first`.
"""
bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
ret = list(bq.for_session(self.session).params(self._params))
if len(ret) > 0:
return ret[0]
else:
return None
def one(self):
"""Return exactly one result or raise an exception.
Equivalent to :meth:`.Query.one`.
"""
try:
ret = self.one_or_none()
except orm_exc.MultipleResultsFound:
raise orm_exc.MultipleResultsFound(
"Multiple rows were found for one()")
else:
if ret is None:
raise orm_exc.NoResultFound("No row was found for one()")
return ret
def one_or_none(self):
"""Return one or zero results, or raise an exception for multiple
rows.
Equivalent to :meth:`.Query.one_or_none`.
.. versionadded:: 1.0.9
"""
ret = list(self)
l = len(ret)
if l == 1:
return ret[0]
elif l == 0:
return None
else:
raise orm_exc.MultipleResultsFound(
"Multiple rows were found for one_or_none()")
def all(self):
"""Return all rows.
Equivalent to :meth:`.Query.all`.
"""
return list(self)
def get(self, ident):
"""Retrieve an object based on identity.
Equivalent to :meth:`.Query.get`.
"""
query = self.bq.steps[0](self.session)
return query._get_impl(ident, self._load_on_ident)
def _load_on_ident(self, query, key):
"""Load the given identity key from the database."""
ident = key[1]
mapper = query._mapper_zero()
_get_clause, _get_params = mapper._get_clause
def setup(query):
_lcl_get_clause = _get_clause
q = query._clone()
q._get_condition()
q._order_by = None
# None present in ident - turn those comparisons
# into "IS NULL"
if None in ident:
nones = set([
_get_params[col].key for col, value in
zip(mapper.primary_key, ident) if value is None
])
_lcl_get_clause = sql_util.adapt_criterion_to_null(
_lcl_get_clause, nones)
_lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
q._criterion = _lcl_get_clause
return q
# cache the query against a key that includes
# which positions in the primary key are NULL
# (remember, we can map to an OUTER JOIN)
bq = self.bq
# add the clause we got from mapper._get_clause to the cache
# key so that if a race causes multiple calls to _get_clause,
# we've cached on ours
bq = bq._clone()
bq._cache_key += (_get_clause, )
bq = bq.with_criteria(setup, tuple(elem is None for elem in ident))
params = dict([
(_get_params[primary_key].key, id_val)
for id_val, primary_key in zip(ident, mapper.primary_key)
])
result = list(bq.for_session(self.session).params(**params))
l = len(result)
if l > 1:
raise orm_exc.MultipleResultsFound()
elif l:
return result[0]
else:
return None
def bake_lazy_loaders():
"""Enable the use of baked queries for all lazyloaders systemwide.
This operation should be safe for all lazy loaders, and will reduce
Python overhead for these operations.
"""
BakedLazyLoader._strategy_keys[:] = []
properties.RelationshipProperty.strategy_for(
lazy="select")(BakedLazyLoader)
properties.RelationshipProperty.strategy_for(
lazy=True)(BakedLazyLoader)
properties.RelationshipProperty.strategy_for(
lazy="baked_select")(BakedLazyLoader)
strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:]
def unbake_lazy_loaders():
"""Disable the use of baked queries for all lazyloaders systemwide.
This operation reverts the changes produced by :func:`.bake_lazy_loaders`.
"""
strategies.LazyLoader._strategy_keys[:] = []
BakedLazyLoader._strategy_keys[:] = []
properties.RelationshipProperty.strategy_for(
lazy="select")(strategies.LazyLoader)
properties.RelationshipProperty.strategy_for(
lazy=True)(strategies.LazyLoader)
properties.RelationshipProperty.strategy_for(
lazy="baked_select")(BakedLazyLoader)
assert strategies.LazyLoader._strategy_keys
@sqla_log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="baked_select")
class BakedLazyLoader(strategies.LazyLoader):
def _emit_lazyload(self, session, state, ident_key, passive):
q = BakedQuery(
self.mapper._compiled_cache,
lambda session: session.query(self.mapper))
q.add_criteria(
lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
self.parent_property)
if not self.parent_property.bake_queries:
q.spoil(full=True)
if self.parent_property.secondary is not None:
q.add_criteria(
lambda q:
q.select_from(self.mapper, self.parent_property.secondary))
pending = not state.key
# don't autoflush on pending
if pending or passive & attributes.NO_AUTOFLUSH:
q.add_criteria(lambda q: q.autoflush(False))
if state.load_options:
q.spoil()
args = state.load_path[self.parent_property]
q.add_criteria(
lambda q:
q._with_current_path(args), args)
q.add_criteria(
lambda q: q._conditional_options(*state.load_options))
if self.use_get:
return q(session)._load_on_ident(
session.query(self.mapper), ident_key)
if self.parent_property.order_by:
q.add_criteria(
lambda q:
q.order_by(*util.to_list(self.parent_property.order_by)))
for rev in self.parent_property._reverse_property:
# reverse props that are MANYTOONE are loading *this*
# object from get(), so don't need to eager out to those.
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, strategies.LazyLoader):
q.add_criteria(
lambda q:
q.options(
strategy_options.Load.for_existing_path(
q._current_path[rev.parent]
).baked_lazyload(rev.key)
)
)
lazy_clause, params = self._generate_lazy_clause(state, passive)
if pending:
if orm_util._none_set.intersection(params.values()):
return None
q.add_criteria(lambda q: q.filter(lazy_clause))
result = q(session).params(**params).all()
if self.uselist:
return result
else:
l = len(result)
if l:
if l > 1:
util.warn(
"Multiple rows returned with "
"uselist=False for lazily-loaded attribute '%s' "
% self.parent_property)
return result[0]
else:
return None
@strategy_options.loader_option()
def baked_lazyload(loadopt, attr):
"""Indicate that the given attribute should be loaded using "lazy"
loading with a "baked" query used in the load.
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
@baked_lazyload._add_unbound_fn
def baked_lazyload(*keys):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.baked_lazyload, keys, False, {})
@baked_lazyload._add_unbound_all_fn
def baked_lazyload_all(*keys):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.baked_lazyload, keys, True, {})
baked_lazyload = baked_lazyload._unbound_fn
baked_lazyload_all = baked_lazyload_all._unbound_all_fn
bakery = BakedQuery.bakery

View File

@@ -0,0 +1,18 @@
# ext/declarative/__init__.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .api import declarative_base, synonym_for, comparable_using, \
instrument_declarative, ConcreteBase, AbstractConcreteBase, \
DeclarativeMeta, DeferredReflection, has_inherited_table,\
declared_attr, as_declarative
__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
'comparable_using', 'instrument_declarative', 'declared_attr',
'as_declarative',
'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
'DeferredReflection']

View File

@@ -0,0 +1,696 @@
# ext/declarative/api.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Public API functions and helpers for declarative."""
from ...schema import Table, MetaData, Column
from ...orm import synonym as _orm_synonym, \
comparable_property,\
interfaces, properties, attributes
from ...orm.util import polymorphic_union
from ...orm.base import _mapper_or_none
from ...util import OrderedDict, hybridmethod, hybridproperty
from ... import util
from ... import exc
import weakref
from .base import _as_declarative, \
_declarative_constructor,\
_DeferredMapperConfig, _add_attribute
from .clsregistry import _class_resolver
def instrument_declarative(cls, registry, metadata):
"""Given a class, configure the class declaratively,
using the given registry, which can be any dictionary, and
MetaData object.
"""
if '_decl_class_registry' in cls.__dict__:
raise exc.InvalidRequestError(
"Class %r already has been "
"instrumented declaratively" % cls)
cls._decl_class_registry = registry
cls.metadata = metadata
_as_declarative(cls, cls.__name__, cls.__dict__)
def has_inherited_table(cls):
"""Given a class, return True if any of the classes it inherits from has a
mapped table, otherwise return False.
This is used in declarative mixins to build attributes that behave
differently for the base class vs. a subclass in an inheritance
hierarchy.
.. seealso::
:ref:`decl_mixin_inheritance`
"""
for class_ in cls.__mro__[1:]:
if getattr(class_, '__table__', None) is not None:
return True
return False
class DeclarativeMeta(type):
def __init__(cls, classname, bases, dict_):
if '_decl_class_registry' not in cls.__dict__:
_as_declarative(cls, classname, cls.__dict__)
type.__init__(cls, classname, bases, dict_)
def __setattr__(cls, key, value):
_add_attribute(cls, key, value)
def synonym_for(name, map_column=False):
"""Decorator, make a Python @property a query synonym for a column.
A decorator version of :func:`~sqlalchemy.orm.synonym`. The function being
decorated is the 'descriptor', otherwise passes its arguments through to
synonym()::
@synonym_for('col')
@property
def prop(self):
return 'special sauce'
The regular ``synonym()`` is also usable directly in a declarative setting
and may be convenient for read/write properties::
prop = synonym('col', descriptor=property(_read_prop, _write_prop))
"""
def decorate(fn):
return _orm_synonym(name, map_column=map_column, descriptor=fn)
return decorate
def comparable_using(comparator_factory):
"""Decorator, allow a Python @property to be used in query criteria.
This is a decorator front end to
:func:`~sqlalchemy.orm.comparable_property` that passes
through the comparator_factory and the function being decorated::
@comparable_using(MyComparatorType)
@property
def prop(self):
return 'special sauce'
The regular ``comparable_property()`` is also usable directly in a
declarative setting and may be convenient for read/write properties::
prop = comparable_property(MyComparatorType)
"""
def decorate(fn):
return comparable_property(comparator_factory, fn)
return decorate
class declared_attr(interfaces._MappedAttribute, property):
"""Mark a class-level method as representing the definition of
a mapped property or special declarative member name.
@declared_attr turns the attribute into a scalar-like
property that can be invoked from the uninstantiated class.
Declarative treats attributes specifically marked with
@declared_attr as returning a construct that is specific
to mapping or declarative table configuration. The name
of the attribute is that of what the non-dynamic version
of the attribute would be.
@declared_attr is more often than not applicable to mixins,
to define relationships that are to be applied to different
implementors of the class::
class ProvidesUser(object):
"A mixin that adds a 'user' relationship to classes."
@declared_attr
def user(self):
return relationship("User")
It also can be applied to mapped classes, such as to provide
a "polymorphic" scheme for inheritance::
class Employee(Base):
id = Column(Integer, primary_key=True)
type = Column(String(50), nullable=False)
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Employee':
return {
"polymorphic_on":cls.type,
"polymorphic_identity":"Employee"
}
else:
return {"polymorphic_identity":cls.__name__}
.. versionchanged:: 0.8 :class:`.declared_attr` can be used with
non-ORM or extension attributes, such as user-defined attributes
or :func:`.association_proxy` objects, which will be assigned
to the class at class construction time.
"""
def __init__(self, fget, cascading=False):
super(declared_attr, self).__init__(fget)
self.__doc__ = fget.__doc__
self._cascading = cascading
def __get__(desc, self, cls):
reg = cls.__dict__.get('_sa_declared_attr_reg', None)
if reg is None:
manager = attributes.manager_of_class(cls)
if manager is None:
util.warn(
"Unmanaged access of declarative attribute %s from "
"non-mapped class %s" %
(desc.fget.__name__, cls.__name__))
return desc.fget(cls)
elif desc in reg:
return reg[desc]
else:
reg[desc] = obj = desc.fget(cls)
return obj
@hybridmethod
def _stateful(cls, **kw):
return _stateful_declared_attr(**kw)
@hybridproperty
def cascading(cls):
"""Mark a :class:`.declared_attr` as cascading.
This is a special-use modifier which indicates that a column
or MapperProperty-based declared attribute should be configured
distinctly per mapped subclass, within a mapped-inheritance scenario.
Below, both MyClass as well as MySubClass will have a distinct
``id`` Column object established::
class HasIdMixin(object):
@declared_attr.cascading
def id(cls):
if has_inherited_table(cls):
return Column(ForeignKey('myclass.id'), primary_key=True)
else:
return Column(Integer, primary_key=True)
class MyClass(HasIdMixin, Base):
__tablename__ = 'myclass'
# ...
class MySubClass(MyClass):
""
# ...
The behavior of the above configuration is that ``MySubClass``
will refer to both its own ``id`` column as well as that of
``MyClass`` underneath the attribute named ``some_id``.
.. seealso::
:ref:`declarative_inheritance`
:ref:`mixin_inheritance_columns`
"""
return cls._stateful(cascading=True)
class _stateful_declared_attr(declared_attr):
def __init__(self, **kw):
self.kw = kw
def _stateful(self, **kw):
new_kw = self.kw.copy()
new_kw.update(kw)
return _stateful_declared_attr(**new_kw)
def __call__(self, fn):
return declared_attr(fn, **self.kw)
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
name='Base', constructor=_declarative_constructor,
class_registry=None,
metaclass=DeclarativeMeta):
r"""Construct a base class for declarative class definitions.
The new base class will be given a metaclass that produces
appropriate :class:`~sqlalchemy.schema.Table` objects and makes
the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
information provided declaratively in the class and any subclasses
of the class.
:param bind: An optional
:class:`~sqlalchemy.engine.Connectable`, will be assigned
the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
instance.
:param metadata:
An optional :class:`~sqlalchemy.schema.MetaData` instance. All
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
subclasses of the base will share this MetaData. A MetaData instance
will be created if none is provided. The
:class:`~sqlalchemy.schema.MetaData` instance will be available via the
`metadata` attribute of the generated declarative base class.
:param mapper:
An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will
be used to map subclasses to their Tables.
:param cls:
Defaults to :class:`object`. A type to use as the base for the generated
declarative base class. May be a class or tuple of classes.
:param name:
Defaults to ``Base``. The display name for the generated
class. Customizing this is not required, but can improve clarity in
tracebacks and debugging.
:param constructor:
Defaults to
:func:`~sqlalchemy.ext.declarative.base._declarative_constructor`, an
__init__ implementation that assigns \**kwargs for declared
fields and relationships to an instance. If ``None`` is supplied,
no __init__ will be provided and construction will fall back to
cls.__init__ by way of the normal Python semantics.
:param class_registry: optional dictionary that will serve as the
registry of class names-> mapped classes when string names
are used to identify classes inside of :func:`.relationship`
and others. Allows two or more declarative base classes
to share the same registry of class names for simplified
inter-base relationships.
:param metaclass:
Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__
compatible callable to use as the meta type of the generated
declarative base class.
.. versionchanged:: 1.1 if :paramref:`.declarative_base.cls` is a single class (rather
than a tuple), the constructed base class will inherit its docstring.
.. seealso::
:func:`.as_declarative`
"""
lcl_metadata = metadata or MetaData()
if bind:
lcl_metadata.bind = bind
if class_registry is None:
class_registry = weakref.WeakValueDictionary()
bases = not isinstance(cls, tuple) and (cls,) or cls
class_dict = dict(_decl_class_registry=class_registry,
metadata=lcl_metadata)
if isinstance(cls, type):
class_dict['__doc__'] = cls.__doc__
if constructor:
class_dict['__init__'] = constructor
if mapper:
class_dict['__mapper_cls__'] = mapper
return metaclass(name, bases, class_dict)
def as_declarative(**kw):
"""
Class decorator for :func:`.declarative_base`.
Provides a syntactical shortcut to the ``cls`` argument
sent to :func:`.declarative_base`, allowing the base class
to be converted in-place to a "declarative" base::
from sqlalchemy.ext.declarative import as_declarative
@as_declarative()
class Base(object):
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
class MyMappedClass(Base):
# ...
All keyword arguments passed to :func:`.as_declarative` are passed
along to :func:`.declarative_base`.
.. versionadded:: 0.8.3
.. seealso::
:func:`.declarative_base`
"""
def decorate(cls):
kw['cls'] = cls
kw['name'] = cls.__name__
return declarative_base(**kw)
return decorate
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
:class:`.ConcreteBase` will use the :func:`.polymorphic_union`
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
a hook for the :meth:`.after_configured` event.
:class:`.ConcreteBase` produces a mapped
table for the class itself. Compare to :class:`.AbstractConcreteBase`,
which does not.
Example::
from sqlalchemy.ext.declarative import ConcreteBase
class Employee(ConcreteBase, Base):
__tablename__ = 'employee'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
__mapper_args__ = {
'polymorphic_identity':'employee',
'concrete':True}
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
manager_data = Column(String(40))
__mapper_args__ = {
'polymorphic_identity':'manager',
'concrete':True}
.. seealso::
:class:`.AbstractConcreteBase`
:ref:`concrete_inheritance`
:ref:`inheritance_concrete_helpers`
"""
@classmethod
def _create_polymorphic_union(cls, mappers):
return polymorphic_union(OrderedDict(
(mp.polymorphic_identity, mp.local_table)
for mp in mappers
), 'type', 'pjoin')
@classmethod
def __declare_first__(cls):
m = cls.__mapper__
if m.with_polymorphic:
return
mappers = list(m.self_and_descendants)
pjoin = cls._create_polymorphic_union(mappers)
m._set_with_polymorphic(("*", pjoin))
m._set_polymorphic_on(pjoin.c.type)
class AbstractConcreteBase(ConcreteBase):
"""A helper class for 'concrete' declarative mappings.
:class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union`
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
a hook for the :meth:`.after_configured` event.
:class:`.AbstractConcreteBase` does produce a mapped class
for the base class, however it is not persisted to any table; it
is instead mapped directly to the "polymorphic" selectable directly
and is only used for selecting. Compare to :class:`.ConcreteBase`,
which does create a persisted table for the base class.
Example::
from sqlalchemy.ext.declarative import AbstractConcreteBase
class Employee(AbstractConcreteBase, Base):
pass
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
manager_data = Column(String(40))
__mapper_args__ = {
'polymorphic_identity':'manager',
'concrete':True}
The abstract base class is handled by declarative in a special way;
at class configuration time, it behaves like a declarative mixin
or an ``__abstract__`` base class. Once classes are configured
and mappings are produced, it then gets mapped itself, but
after all of its decscendants. This is a very unique system of mapping
not found in any other SQLAlchemy system.
Using this approach, we can specify columns and properties
that will take place on mapped subclasses, in the way that
we normally do as in :ref:`declarative_mixins`::
class Company(Base):
__tablename__ = 'company'
id = Column(Integer, primary_key=True)
class Employee(AbstractConcreteBase, Base):
employee_id = Column(Integer, primary_key=True)
@declared_attr
def company_id(cls):
return Column(ForeignKey('company.id'))
@declared_attr
def company(cls):
return relationship("Company")
class Manager(Employee):
__tablename__ = 'manager'
name = Column(String(50))
manager_data = Column(String(40))
__mapper_args__ = {
'polymorphic_identity':'manager',
'concrete':True}
When we make use of our mappings however, both ``Manager`` and
``Employee`` will have an independently usable ``.company`` attribute::
session.query(Employee).filter(Employee.company.has(id=5))
.. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
have been reworked to support relationships established directly
on the abstract base, without any special configurational steps.
.. seealso::
:class:`.ConcreteBase`
:ref:`concrete_inheritance`
:ref:`inheritance_concrete_helpers`
"""
__no_table__ = True
@classmethod
def __declare_first__(cls):
cls._sa_decl_prepare_nocascade()
@classmethod
def _sa_decl_prepare_nocascade(cls):
if getattr(cls, '__mapper__', None):
return
to_map = _DeferredMapperConfig.config_for_cls(cls)
# can't rely on 'self_and_descendants' here
# since technically an immediate subclass
# might not be mapped, but a subclass
# may be.
mappers = []
stack = list(cls.__subclasses__())
while stack:
klass = stack.pop()
stack.extend(klass.__subclasses__())
mn = _mapper_or_none(klass)
if mn is not None:
mappers.append(mn)
pjoin = cls._create_polymorphic_union(mappers)
# For columns that were declared on the class, these
# are normally ignored with the "__no_table__" mapping,
# unless they have a different attribute key vs. col name
# and are in the properties argument.
# In that case, ensure we update the properties entry
# to the correct column from the pjoin target table.
declared_cols = set(to_map.declared_columns)
for k, v in list(to_map.properties.items()):
if v in declared_cols:
to_map.properties[k] = pjoin.c[v.key]
to_map.local_table = pjoin
m_args = to_map.mapper_args_fn or dict
def mapper_args():
args = m_args()
args['polymorphic_on'] = pjoin.c.type
return args
to_map.mapper_args_fn = mapper_args
m = to_map.map()
for scls in cls.__subclasses__():
sm = _mapper_or_none(scls)
if sm and sm.concrete and cls in scls.__bases__:
sm._set_concrete_base(m)
class DeferredReflection(object):
"""A helper class for construction of mappings based on
a deferred reflection step.
Normally, declarative can be used with reflection by
setting a :class:`.Table` object using autoload=True
as the ``__table__`` attribute on a declarative class.
The caveat is that the :class:`.Table` must be fully
reflected, or at the very least have a primary key column,
at the point at which a normal declarative mapping is
constructed, meaning the :class:`.Engine` must be available
at class declaration time.
The :class:`.DeferredReflection` mixin moves the construction
of mappers to be at a later point, after a specific
method is called which first reflects all :class:`.Table`
objects created so far. Classes can define it as such::
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import DeferredReflection
Base = declarative_base()
class MyClass(DeferredReflection, Base):
__tablename__ = 'mytable'
Above, ``MyClass`` is not yet mapped. After a series of
classes have been defined in the above fashion, all tables
can be reflected and mappings created using
:meth:`.prepare`::
engine = create_engine("someengine://...")
DeferredReflection.prepare(engine)
The :class:`.DeferredReflection` mixin can be applied to individual
classes, used as the base for the declarative base itself,
or used in a custom abstract class. Using an abstract base
allows that only a subset of classes to be prepared for a
particular prepare step, which is necessary for applications
that use more than one engine. For example, if an application
has two engines, you might use two bases, and prepare each
separately, e.g.::
class ReflectedOne(DeferredReflection, Base):
__abstract__ = True
class ReflectedTwo(DeferredReflection, Base):
__abstract__ = True
class MyClass(ReflectedOne):
__tablename__ = 'mytable'
class MyOtherClass(ReflectedOne):
__tablename__ = 'myothertable'
class YetAnotherClass(ReflectedTwo):
__tablename__ = 'yetanothertable'
# ... etc.
Above, the class hierarchies for ``ReflectedOne`` and
``ReflectedTwo`` can be configured separately::
ReflectedOne.prepare(engine_one)
ReflectedTwo.prepare(engine_two)
.. versionadded:: 0.8
"""
@classmethod
def prepare(cls, engine):
"""Reflect all :class:`.Table` objects for all current
:class:`.DeferredReflection` subclasses"""
to_map = _DeferredMapperConfig.classes_for_base(cls)
for thingy in to_map:
cls._sa_decl_prepare(thingy.local_table, engine)
thingy.map()
mapper = thingy.cls.__mapper__
metadata = mapper.class_.metadata
for rel in mapper._props.values():
if isinstance(rel, properties.RelationshipProperty) and \
rel.secondary is not None:
if isinstance(rel.secondary, Table):
cls._reflect_table(rel.secondary, engine)
elif isinstance(rel.secondary, _class_resolver):
rel.secondary._resolvers += (
cls._sa_deferred_table_resolver(engine, metadata),
)
@classmethod
def _sa_deferred_table_resolver(cls, engine, metadata):
def _resolve(key):
t1 = Table(key, metadata)
cls._reflect_table(t1, engine)
return t1
return _resolve
@classmethod
def _sa_decl_prepare(cls, local_table, engine):
# autoload Table, which is already
# present in the metadata. This
# will fill in db-loaded columns
# into the existing Table object.
if local_table is not None:
cls._reflect_table(local_table, engine)
@classmethod
def _reflect_table(cls, table, engine):
Table(table.name,
table.metadata,
extend_existing=True,
autoload_replace=False,
autoload=True,
autoload_with=engine,
schema=table.schema)

View File

@@ -0,0 +1,662 @@
# ext/declarative/base.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Internal implementation for declarative."""
from ...schema import Table, Column
from ...orm import mapper, class_mapper, synonym
from ...orm.interfaces import MapperProperty
from ...orm.properties import ColumnProperty, CompositeProperty
from ...orm.attributes import QueryableAttribute
from ...orm.base import _is_mapped_class
from ... import util, exc
from ...util import topological
from ...sql import expression
from ... import event
from . import clsregistry
import collections
import weakref
from sqlalchemy.orm import instrumentation
declared_attr = declarative_props = None
def _declared_mapping_info(cls):
# deferred mapping
if _DeferredMapperConfig.has_cls(cls):
return _DeferredMapperConfig.config_for_cls(cls)
# regular mapping
elif _is_mapped_class(cls):
return class_mapper(cls, configure=False)
else:
return None
def _resolve_for_abstract(cls):
if cls is object:
return None
if _get_immediate_cls_attr(cls, '__abstract__', strict=True):
for sup in cls.__bases__:
sup = _resolve_for_abstract(sup)
if sup is not None:
return sup
else:
return None
else:
return cls
def _get_immediate_cls_attr(cls, attrname, strict=False):
"""return an attribute of the class that is either present directly
on the class, e.g. not on a superclass, or is from a superclass but
this superclass is a mixin, that is, not a descendant of
the declarative base.
This is used to detect attributes that indicate something about
a mapped class independently from any mapped classes that it may
inherit from.
"""
if not issubclass(cls, object):
return None
for base in cls.__mro__:
_is_declarative_inherits = hasattr(base, '_decl_class_registry')
if attrname in base.__dict__ and (
base is cls or
((base in cls.__bases__ if strict else True)
and not _is_declarative_inherits)
):
return getattr(base, attrname)
else:
return None
def _as_declarative(cls, classname, dict_):
global declared_attr, declarative_props
if declared_attr is None:
from .api import declared_attr
declarative_props = (declared_attr, util.classproperty)
if _get_immediate_cls_attr(cls, '__abstract__', strict=True):
return
_MapperConfig.setup_mapping(cls, classname, dict_)
class _MapperConfig(object):
@classmethod
def setup_mapping(cls, cls_, classname, dict_):
defer_map = _get_immediate_cls_attr(
cls_, '_sa_decl_prepare_nocascade', strict=True) or \
hasattr(cls_, '_sa_decl_prepare')
if defer_map:
cfg_cls = _DeferredMapperConfig
else:
cfg_cls = _MapperConfig
cfg_cls(cls_, classname, dict_)
def __init__(self, cls_, classname, dict_):
self.cls = cls_
# dict_ will be a dictproxy, which we can't write to, and we need to!
self.dict_ = dict(dict_)
self.classname = classname
self.mapped_table = None
self.properties = util.OrderedDict()
self.declared_columns = set()
self.column_copies = {}
self._setup_declared_events()
# temporary registry. While early 1.0 versions
# set up the ClassManager here, by API contract
# we can't do that until there's a mapper.
self.cls._sa_declared_attr_reg = {}
self._scan_attributes()
clsregistry.add_class(self.classname, self.cls)
self._extract_mappable_attributes()
self._extract_declared_columns()
self._setup_table()
self._setup_inheritance()
self._early_mapping()
def _early_mapping(self):
self.map()
def _setup_declared_events(self):
if _get_immediate_cls_attr(self.cls, '__declare_last__'):
@event.listens_for(mapper, "after_configured")
def after_configured():
self.cls.__declare_last__()
if _get_immediate_cls_attr(self.cls, '__declare_first__'):
@event.listens_for(mapper, "before_configured")
def before_configured():
self.cls.__declare_first__()
def _scan_attributes(self):
cls = self.cls
dict_ = self.dict_
column_copies = self.column_copies
mapper_args_fn = None
table_args = inherited_table_args = None
tablename = None
for base in cls.__mro__:
class_mapped = base is not cls and \
_declared_mapping_info(base) is not None and \
not _get_immediate_cls_attr(
base, '_sa_decl_prepare_nocascade', strict=True)
if not class_mapped and base is not cls:
self._produce_column_copies(base)
for name, obj in vars(base).items():
if name == '__mapper_args__':
if not mapper_args_fn and (
not class_mapped or
isinstance(obj, declarative_props)
):
# don't even invoke __mapper_args__ until
# after we've determined everything about the
# mapped table.
# make a copy of it so a class-level dictionary
# is not overwritten when we update column-based
# arguments.
mapper_args_fn = lambda: dict(cls.__mapper_args__)
elif name == '__tablename__':
if not tablename and (
not class_mapped or
isinstance(obj, declarative_props)
):
tablename = cls.__tablename__
elif name == '__table_args__':
if not table_args and (
not class_mapped or
isinstance(obj, declarative_props)
):
table_args = cls.__table_args__
if not isinstance(
table_args, (tuple, dict, type(None))):
raise exc.ArgumentError(
"__table_args__ value must be a tuple, "
"dict, or None")
if base is not cls:
inherited_table_args = True
elif class_mapped:
if isinstance(obj, declarative_props):
util.warn("Regular (i.e. not __special__) "
"attribute '%s.%s' uses @declared_attr, "
"but owning class %s is mapped - "
"not applying to subclass %s."
% (base.__name__, name, base, cls))
continue
elif base is not cls:
# we're a mixin, abstract base, or something that is
# acting like that for now.
if isinstance(obj, Column):
# already copied columns to the mapped class.
continue
elif isinstance(obj, MapperProperty):
raise exc.InvalidRequestError(
"Mapper properties (i.e. deferred,"
"column_property(), relationship(), etc.) must "
"be declared as @declared_attr callables "
"on declarative mixin classes.")
elif isinstance(obj, declarative_props):
oldclassprop = isinstance(obj, util.classproperty)
if not oldclassprop and obj._cascading:
dict_[name] = column_copies[obj] = \
ret = obj.__get__(obj, cls)
setattr(cls, name, ret)
else:
if oldclassprop:
util.warn_deprecated(
"Use of sqlalchemy.util.classproperty on "
"declarative classes is deprecated.")
dict_[name] = column_copies[obj] = \
ret = getattr(cls, name)
if isinstance(ret, (Column, MapperProperty)) and \
ret.doc is None:
ret.doc = obj.__doc__
if inherited_table_args and not tablename:
table_args = None
self.table_args = table_args
self.tablename = tablename
self.mapper_args_fn = mapper_args_fn
def _produce_column_copies(self, base):
cls = self.cls
dict_ = self.dict_
column_copies = self.column_copies
# copy mixin columns to the mapped class
for name, obj in vars(base).items():
if isinstance(obj, Column):
if getattr(cls, name) is not obj:
# if column has been overridden
# (like by the InstrumentedAttribute of the
# superclass), skip
continue
elif obj.foreign_keys:
raise exc.InvalidRequestError(
"Columns with foreign keys to other columns "
"must be declared as @declared_attr callables "
"on declarative mixin classes. ")
elif name not in dict_ and not (
'__table__' in dict_ and
(obj.name or name) in dict_['__table__'].c
):
column_copies[obj] = copy_ = obj.copy()
copy_._creation_order = obj._creation_order
setattr(cls, name, copy_)
dict_[name] = copy_
def _extract_mappable_attributes(self):
cls = self.cls
dict_ = self.dict_
our_stuff = self.properties
for k in list(dict_):
if k in ('__table__', '__tablename__', '__mapper_args__'):
continue
value = dict_[k]
if isinstance(value, declarative_props):
value = getattr(cls, k)
elif isinstance(value, QueryableAttribute) and \
value.class_ is not cls and \
value.key != k:
# detect a QueryableAttribute that's already mapped being
# assigned elsewhere in userland, turn into a synonym()
value = synonym(value.key)
setattr(cls, k, value)
if (isinstance(value, tuple) and len(value) == 1 and
isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
"%s: possibly a copy-and-paste error with a comma "
"left at the end of the line?" % k)
continue
elif not isinstance(value, (Column, MapperProperty)):
# using @declared_attr for some object that
# isn't Column/MapperProperty; remove from the dict_
# and place the evaluated value onto the class.
if not k.startswith('__'):
dict_.pop(k)
setattr(cls, k, value)
continue
# we expect to see the name 'metadata' in some valid cases;
# however at this point we see it's assigned to something trying
# to be mapped, so raise for that.
elif k == 'metadata':
raise exc.InvalidRequestError(
"Attribute name 'metadata' is reserved "
"for the MetaData instance when using a "
"declarative base class."
)
prop = clsregistry._deferred_relationship(cls, value)
our_stuff[k] = prop
def _extract_declared_columns(self):
our_stuff = self.properties
# set up attributes in the order they were created
our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
# extract columns from the class dict
declared_columns = self.declared_columns
name_to_prop_key = collections.defaultdict(set)
for key, c in list(our_stuff.items()):
if isinstance(c, (ColumnProperty, CompositeProperty)):
for col in c.columns:
if isinstance(col, Column) and \
col.table is None:
_undefer_column_name(key, col)
if not isinstance(c, CompositeProperty):
name_to_prop_key[col.name].add(key)
declared_columns.add(col)
elif isinstance(c, Column):
_undefer_column_name(key, c)
name_to_prop_key[c.name].add(key)
declared_columns.add(c)
# if the column is the same name as the key,
# remove it from the explicit properties dict.
# the normal rules for assigning column-based properties
# will take over, including precedence of columns
# in multi-column ColumnProperties.
if key == c.key:
del our_stuff[key]
for name, keys in name_to_prop_key.items():
if len(keys) > 1:
util.warn(
"On class %r, Column object %r named "
"directly multiple times, "
"only one will be used: %s. "
"Consider using orm.synonym instead" %
(self.classname, name, (", ".join(sorted(keys))))
)
def _setup_table(self):
cls = self.cls
tablename = self.tablename
table_args = self.table_args
dict_ = self.dict_
declared_columns = self.declared_columns
declared_columns = self.declared_columns = sorted(
declared_columns, key=lambda c: c._creation_order)
table = None
if hasattr(cls, '__table_cls__'):
table_cls = util.unbound_method_to_callable(cls.__table_cls__)
else:
table_cls = Table
if '__table__' not in dict_:
if tablename is not None:
args, table_kw = (), {}
if table_args:
if isinstance(table_args, dict):
table_kw = table_args
elif isinstance(table_args, tuple):
if isinstance(table_args[-1], dict):
args, table_kw = table_args[0:-1], table_args[-1]
else:
args = table_args
autoload = dict_.get('__autoload__')
if autoload:
table_kw['autoload'] = True
cls.__table__ = table = table_cls(
tablename, cls.metadata,
*(tuple(declared_columns) + tuple(args)),
**table_kw)
else:
table = cls.__table__
if declared_columns:
for c in declared_columns:
if not table.c.contains_column(c):
raise exc.ArgumentError(
"Can't add additional column %r when "
"specifying __table__" % c.key
)
self.local_table = table
def _setup_inheritance(self):
table = self.local_table
cls = self.cls
table_args = self.table_args
declared_columns = self.declared_columns
for c in cls.__bases__:
c = _resolve_for_abstract(c)
if c is None:
continue
if _declared_mapping_info(c) is not None and \
not _get_immediate_cls_attr(
c, '_sa_decl_prepare_nocascade', strict=True):
self.inherits = c
break
else:
self.inherits = None
if table is None and self.inherits is None and \
not _get_immediate_cls_attr(cls, '__no_table__'):
raise exc.InvalidRequestError(
"Class %r does not have a __table__ or __tablename__ "
"specified and does not inherit from an existing "
"table-mapped class." % cls
)
elif self.inherits:
inherited_mapper = _declared_mapping_info(self.inherits)
inherited_table = inherited_mapper.local_table
inherited_mapped_table = inherited_mapper.mapped_table
if table is None:
# single table inheritance.
# ensure no table args
if table_args:
raise exc.ArgumentError(
"Can't place __table_args__ on an inherited class "
"with no table."
)
# add any columns declared here to the inherited table.
for c in declared_columns:
if c.primary_key:
raise exc.ArgumentError(
"Can't place primary key columns on an inherited "
"class with no table."
)
if c.name in inherited_table.c:
if inherited_table.c[c.name] is c:
continue
raise exc.ArgumentError(
"Column '%s' on class %s conflicts with "
"existing column '%s'" %
(c, cls, inherited_table.c[c.name])
)
inherited_table.append_column(c)
if inherited_mapped_table is not None and \
inherited_mapped_table is not inherited_table:
inherited_mapped_table._refresh_for_new_column(c)
def _prepare_mapper_arguments(self):
properties = self.properties
if self.mapper_args_fn:
mapper_args = self.mapper_args_fn()
else:
mapper_args = {}
# make sure that column copies are used rather
# than the original columns from any mixins
for k in ('version_id_col', 'polymorphic_on',):
if k in mapper_args:
v = mapper_args[k]
mapper_args[k] = self.column_copies.get(v, v)
assert 'inherits' not in mapper_args, \
"Can't specify 'inherits' explicitly with declarative mappings"
if self.inherits:
mapper_args['inherits'] = self.inherits
if self.inherits and not mapper_args.get('concrete', False):
# single or joined inheritance
# exclude any cols on the inherited table which are
# not mapped on the parent class, to avoid
# mapping columns specific to sibling/nephew classes
inherited_mapper = _declared_mapping_info(self.inherits)
inherited_table = inherited_mapper.local_table
if 'exclude_properties' not in mapper_args:
mapper_args['exclude_properties'] = exclude_properties = \
set(
[c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty]
).union(
inherited_mapper.exclude_properties or ()
)
exclude_properties.difference_update(
[c.key for c in self.declared_columns])
# look through columns in the current mapper that
# are keyed to a propname different than the colname
# (if names were the same, we'd have popped it out above,
# in which case the mapper makes this combination).
# See if the superclass has a similar column property.
# If so, join them together.
for k, col in list(properties.items()):
if not isinstance(col, expression.ColumnElement):
continue
if k in inherited_mapper._props:
p = inherited_mapper._props[k]
if isinstance(p, ColumnProperty):
# note here we place the subclass column
# first. See [ticket:1892] for background.
properties[k] = [col] + p.columns
result_mapper_args = mapper_args.copy()
result_mapper_args['properties'] = properties
self.mapper_args = result_mapper_args
def map(self):
self._prepare_mapper_arguments()
if hasattr(self.cls, '__mapper_cls__'):
mapper_cls = util.unbound_method_to_callable(
self.cls.__mapper_cls__)
else:
mapper_cls = mapper
self.cls.__mapper__ = mp_ = mapper_cls(
self.cls,
self.local_table,
**self.mapper_args
)
del self.cls._sa_declared_attr_reg
return mp_
class _DeferredMapperConfig(_MapperConfig):
_configs = util.OrderedDict()
def _early_mapping(self):
pass
@property
def cls(self):
return self._cls()
@cls.setter
def cls(self, class_):
self._cls = weakref.ref(class_, self._remove_config_cls)
self._configs[self._cls] = self
@classmethod
def _remove_config_cls(cls, ref):
cls._configs.pop(ref, None)
@classmethod
def has_cls(cls, class_):
# 2.6 fails on weakref if class_ is an old style class
return isinstance(class_, type) and \
weakref.ref(class_) in cls._configs
@classmethod
def config_for_cls(cls, class_):
return cls._configs[weakref.ref(class_)]
@classmethod
def classes_for_base(cls, base_cls, sort=True):
classes_for_base = [m for m in cls._configs.values()
if issubclass(m.cls, base_cls)]
if not sort:
return classes_for_base
all_m_by_cls = dict(
(m.cls, m)
for m in classes_for_base
)
tuples = []
for m_cls in all_m_by_cls:
tuples.extend(
(all_m_by_cls[base_cls], all_m_by_cls[m_cls])
for base_cls in m_cls.__bases__
if base_cls in all_m_by_cls
)
return list(
topological.sort(
tuples,
classes_for_base
)
)
def map(self):
self._configs.pop(self._cls, None)
return super(_DeferredMapperConfig, self).map()
def _add_attribute(cls, key, value):
"""add an attribute to an existing declarative class.
This runs through the logic to determine MapperProperty,
adds it to the Mapper, adds a column to the mapped Table, etc.
"""
if '__mapper__' in cls.__dict__:
if isinstance(value, Column):
_undefer_column_name(key, value)
cls.__table__.append_column(value)
cls.__mapper__.add_property(key, value)
elif isinstance(value, ColumnProperty):
for col in value.columns:
if isinstance(col, Column) and col.table is None:
_undefer_column_name(key, col)
cls.__table__.append_column(col)
cls.__mapper__.add_property(key, value)
elif isinstance(value, MapperProperty):
cls.__mapper__.add_property(
key,
clsregistry._deferred_relationship(cls, value)
)
elif isinstance(value, QueryableAttribute) and value.key != key:
# detect a QueryableAttribute that's already mapped being
# assigned elsewhere in userland, turn into a synonym()
value = synonym(value.key)
cls.__mapper__.add_property(
key,
clsregistry._deferred_relationship(cls, value)
)
else:
type.__setattr__(cls, key, value)
else:
type.__setattr__(cls, key, value)
def _declarative_constructor(self, **kwargs):
"""A simple constructor that allows initialization from kwargs.
Sets attributes on the constructed instance using the names and
values in ``kwargs``.
Only keys that are present as
attributes of the instance's class are allowed. These could be,
for example, any mapped columns or relationships.
"""
cls_ = type(self)
for k in kwargs:
if not hasattr(cls_, k):
raise TypeError(
"%r is an invalid keyword argument for %s" %
(k, cls_.__name__))
setattr(self, k, kwargs[k])
_declarative_constructor.__name__ = '__init__'
def _undefer_column_name(key, column):
if column.key is None:
column.key = key
if column.name is None:
column.name = key

View File

@@ -0,0 +1,328 @@
# ext/declarative/clsregistry.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Routines to handle the string class registry used by declarative.
This system allows specification of classes and expressions used in
:func:`.relationship` using strings.
"""
from ...orm.properties import ColumnProperty, RelationshipProperty, \
SynonymProperty
from ...schema import _get_table_key
from ...orm import class_mapper, interfaces
from ... import util
from ... import inspection
from ... import exc
import weakref
# strong references to registries which we place in
# the _decl_class_registry, which is usually weak referencing.
# the internal registries here link to classes with weakrefs and remove
# themselves when all references to contained classes are removed.
_registries = set()
def add_class(classname, cls):
"""Add a class to the _decl_class_registry associated with the
given declarative class.
"""
if classname in cls._decl_class_registry:
# class already exists.
existing = cls._decl_class_registry[classname]
if not isinstance(existing, _MultipleClassMarker):
existing = \
cls._decl_class_registry[classname] = \
_MultipleClassMarker([cls, existing])
else:
cls._decl_class_registry[classname] = cls
try:
root_module = cls._decl_class_registry['_sa_module_registry']
except KeyError:
cls._decl_class_registry['_sa_module_registry'] = \
root_module = _ModuleMarker('_sa_module_registry', None)
tokens = cls.__module__.split(".")
# build up a tree like this:
# modulename: myapp.snacks.nuts
#
# myapp->snack->nuts->(classes)
# snack->nuts->(classes)
# nuts->(classes)
#
# this allows partial token paths to be used.
while tokens:
token = tokens.pop(0)
module = root_module.get_module(token)
for token in tokens:
module = module.get_module(token)
module.add_class(classname, cls)
class _MultipleClassMarker(object):
"""refers to multiple classes of the same name
within _decl_class_registry.
"""
__slots__ = 'on_remove', 'contents', '__weakref__'
def __init__(self, classes, on_remove=None):
self.on_remove = on_remove
self.contents = set([
weakref.ref(item, self._remove_item) for item in classes])
_registries.add(self)
def __iter__(self):
return (ref() for ref in self.contents)
def attempt_get(self, path, key):
if len(self.contents) > 1:
raise exc.InvalidRequestError(
"Multiple classes found for path \"%s\" "
"in the registry of this declarative "
"base. Please use a fully module-qualified path." %
(".".join(path + [key]))
)
else:
ref = list(self.contents)[0]
cls = ref()
if cls is None:
raise NameError(key)
return cls
def _remove_item(self, ref):
self.contents.remove(ref)
if not self.contents:
_registries.discard(self)
if self.on_remove:
self.on_remove()
def add_item(self, item):
# protect against class registration race condition against
# asynchronous garbage collection calling _remove_item,
# [ticket:3208]
modules = set([
cls.__module__ for cls in
[ref() for ref in self.contents] if cls is not None])
if item.__module__ in modules:
util.warn(
"This declarative base already contains a class with the "
"same class name and module name as %s.%s, and will "
"be replaced in the string-lookup table." % (
item.__module__,
item.__name__
)
)
self.contents.add(weakref.ref(item, self._remove_item))
class _ModuleMarker(object):
""""refers to a module name within
_decl_class_registry.
"""
__slots__ = 'parent', 'name', 'contents', 'mod_ns', 'path', '__weakref__'
def __init__(self, name, parent):
self.parent = parent
self.name = name
self.contents = {}
self.mod_ns = _ModNS(self)
if self.parent:
self.path = self.parent.path + [self.name]
else:
self.path = []
_registries.add(self)
def __contains__(self, name):
return name in self.contents
def __getitem__(self, name):
return self.contents[name]
def _remove_item(self, name):
self.contents.pop(name, None)
if not self.contents and self.parent is not None:
self.parent._remove_item(self.name)
_registries.discard(self)
def resolve_attr(self, key):
return getattr(self.mod_ns, key)
def get_module(self, name):
if name not in self.contents:
marker = _ModuleMarker(name, self)
self.contents[name] = marker
else:
marker = self.contents[name]
return marker
def add_class(self, name, cls):
if name in self.contents:
existing = self.contents[name]
existing.add_item(cls)
else:
existing = self.contents[name] = \
_MultipleClassMarker([cls],
on_remove=lambda: self._remove_item(name))
class _ModNS(object):
__slots__ = '__parent',
def __init__(self, parent):
self.__parent = parent
def __getattr__(self, key):
try:
value = self.__parent.contents[key]
except KeyError:
pass
else:
if value is not None:
if isinstance(value, _ModuleMarker):
return value.mod_ns
else:
assert isinstance(value, _MultipleClassMarker)
return value.attempt_get(self.__parent.path, key)
raise AttributeError("Module %r has no mapped classes "
"registered under the name %r" % (
self.__parent.name, key))
class _GetColumns(object):
__slots__ = 'cls',
def __init__(self, cls):
self.cls = cls
def __getattr__(self, key):
mp = class_mapper(self.cls, configure=False)
if mp:
if key not in mp.all_orm_descriptors:
raise exc.InvalidRequestError(
"Class %r does not have a mapped column named %r"
% (self.cls, key))
desc = mp.all_orm_descriptors[key]
if desc.extension_type is interfaces.NOT_EXTENSION:
prop = desc.property
if isinstance(prop, SynonymProperty):
key = prop.name
elif not isinstance(prop, ColumnProperty):
raise exc.InvalidRequestError(
"Property %r is not an instance of"
" ColumnProperty (i.e. does not correspond"
" directly to a Column)." % key)
return getattr(self.cls, key)
inspection._inspects(_GetColumns)(
lambda target: inspection.inspect(target.cls))
class _GetTable(object):
__slots__ = 'key', 'metadata'
def __init__(self, key, metadata):
self.key = key
self.metadata = metadata
def __getattr__(self, key):
return self.metadata.tables[
_get_table_key(key, self.key)
]
def _determine_container(key, value):
if isinstance(value, _MultipleClassMarker):
value = value.attempt_get([], key)
return _GetColumns(value)
class _class_resolver(object):
def __init__(self, cls, prop, fallback, arg):
self.cls = cls
self.prop = prop
self.arg = self._declarative_arg = arg
self.fallback = fallback
self._dict = util.PopulateDict(self._access_cls)
self._resolvers = ()
def _access_cls(self, key):
cls = self.cls
if key in cls._decl_class_registry:
return _determine_container(key, cls._decl_class_registry[key])
elif key in cls.metadata.tables:
return cls.metadata.tables[key]
elif key in cls.metadata._schemas:
return _GetTable(key, cls.metadata)
elif '_sa_module_registry' in cls._decl_class_registry and \
key in cls._decl_class_registry['_sa_module_registry']:
registry = cls._decl_class_registry['_sa_module_registry']
return registry.resolve_attr(key)
elif self._resolvers:
for resolv in self._resolvers:
value = resolv(key)
if value is not None:
return value
return self.fallback[key]
def __call__(self):
try:
x = eval(self.arg, globals(), self._dict)
if isinstance(x, _GetColumns):
return x.cls
else:
return x
except NameError as n:
raise exc.InvalidRequestError(
"When initializing mapper %s, expression %r failed to "
"locate a name (%r). If this is a class name, consider "
"adding this relationship() to the %r class after "
"both dependent classes have been defined." %
(self.prop.parent, self.arg, n.args[0], self.cls)
)
def _resolver(cls, prop):
import sqlalchemy
from sqlalchemy.orm import foreign, remote
fallback = sqlalchemy.__dict__.copy()
fallback.update({'foreign': foreign, 'remote': remote})
def resolve_arg(arg):
return _class_resolver(cls, prop, fallback, arg)
return resolve_arg
def _deferred_relationship(cls, prop):
if isinstance(prop, RelationshipProperty):
resolve_arg = _resolver(cls, prop)
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side'):
v = getattr(prop, attr)
if isinstance(v, util.string_types):
setattr(prop, attr, resolve_arg(v))
if prop.backref and isinstance(prop.backref, tuple):
key, kwargs = prop.backref
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
'foreign_keys', 'remote_side', 'order_by'):
if attr in kwargs and isinstance(kwargs[attr],
util.string_types):
kwargs[attr] = resolve_arg(kwargs[attr])
return prop

841
sqlalchemy/ext/hybrid.py Normal file
View File

@@ -0,0 +1,841 @@
# ext/hybrid.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
r"""Define attributes on ORM-mapped classes that have "hybrid" behavior.
"hybrid" means the attribute has distinct behaviors defined at the
class level and at the instance level.
The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of
method decorator, is around 50 lines of code and has almost no
dependencies on the rest of SQLAlchemy. It can, in theory, work with
any descriptor-based expression system.
Consider a mapping ``Interval``, representing integer ``start`` and ``end``
values. We can define higher level functions on mapped classes that produce
SQL expressions at the class level, and Python expression evaluation at the
instance level. Below, each function decorated with :class:`.hybrid_method` or
:class:`.hybrid_property` may receive ``self`` as an instance of the class, or
as the class itself::
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, aliased
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
Base = declarative_base()
class Interval(Base):
__tablename__ = 'interval'
id = Column(Integer, primary_key=True)
start = Column(Integer, nullable=False)
end = Column(Integer, nullable=False)
def __init__(self, start, end):
self.start = start
self.end = end
@hybrid_property
def length(self):
return self.end - self.start
@hybrid_method
def contains(self, point):
return (self.start <= point) & (point <= self.end)
@hybrid_method
def intersects(self, other):
return self.contains(other.start) | self.contains(other.end)
Above, the ``length`` property returns the difference between the
``end`` and ``start`` attributes. With an instance of ``Interval``,
this subtraction occurs in Python, using normal Python descriptor
mechanics::
>>> i1 = Interval(5, 10)
>>> i1.length
5
When dealing with the ``Interval`` class itself, the :class:`.hybrid_property`
descriptor evaluates the function body given the ``Interval`` class as
the argument, which when evaluated with SQLAlchemy expression mechanics
returns a new SQL expression::
>>> print Interval.length
interval."end" - interval.start
>>> print Session().query(Interval).filter(Interval.length > 10)
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE interval."end" - interval.start > :param_1
ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to
locate attributes, so can also be used with hybrid attributes::
>>> print Session().query(Interval).filter_by(length=5)
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE interval."end" - interval.start = :param_1
The ``Interval`` class example also illustrates two methods,
``contains()`` and ``intersects()``, decorated with
:class:`.hybrid_method`. This decorator applies the same idea to
methods that :class:`.hybrid_property` applies to attributes. The
methods return boolean values, and take advantage of the Python ``|``
and ``&`` bitwise operators to produce equivalent instance-level and
SQL expression-level boolean behavior::
>>> i1.contains(6)
True
>>> i1.contains(15)
False
>>> i1.intersects(Interval(7, 18))
True
>>> i1.intersects(Interval(25, 29))
False
>>> print Session().query(Interval).filter(Interval.contains(15))
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE interval.start <= :start_1 AND interval."end" > :end_1
>>> ia = aliased(Interval)
>>> print Session().query(Interval, ia).filter(Interval.intersects(ia))
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end, interval_1.id AS interval_1_id,
interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end
FROM interval, interval AS interval_1
WHERE interval.start <= interval_1.start
AND interval."end" > interval_1.start
OR interval.start <= interval_1."end"
AND interval."end" > interval_1."end"
Defining Expression Behavior Distinct from Attribute Behavior
--------------------------------------------------------------
Our usage of the ``&`` and ``|`` bitwise operators above was
fortunate, considering our functions operated on two boolean values to
return a new one. In many cases, the construction of an in-Python
function and a SQLAlchemy SQL expression have enough differences that
two separate Python expressions should be defined. The
:mod:`~sqlalchemy.ext.hybrid` decorators define the
:meth:`.hybrid_property.expression` modifier for this purpose. As an
example we'll define the radius of the interval, which requires the
usage of the absolute value function::
from sqlalchemy import func
class Interval(object):
# ...
@hybrid_property
def radius(self):
return abs(self.length) / 2
@radius.expression
def radius(cls):
return func.abs(cls.length) / 2
Above the Python function ``abs()`` is used for instance-level
operations, the SQL function ``ABS()`` is used via the :data:`.func`
object for class-level expressions::
>>> i1.radius
2
>>> print Session().query(Interval).filter(Interval.radius > 5)
SELECT interval.id AS interval_id, interval.start AS interval_start,
interval."end" AS interval_end
FROM interval
WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1
Defining Setters
----------------
Hybrid properties can also define setter methods. If we wanted
``length`` above, when set, to modify the endpoint value::
class Interval(object):
# ...
@hybrid_property
def length(self):
return self.end - self.start
@length.setter
def length(self, value):
self.end = self.start + value
The ``length(self, value)`` method is now called upon set::
>>> i1 = Interval(5, 10)
>>> i1.length
5
>>> i1.length = 12
>>> i1.end
17
Working with Relationships
--------------------------
There's no essential difference when creating hybrids that work with
related objects as opposed to column-based data. The need for distinct
expressions tends to be greater. The two variants we'll illustrate
are the "join-dependent" hybrid, and the "correlated subquery" hybrid.
Join-Dependent Relationship Hybrid
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Consider the following declarative
mapping which relates a ``User`` to a ``SavingsAccount``::
from sqlalchemy import Column, Integer, ForeignKey, Numeric, String
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
Base = declarative_base()
class SavingsAccount(Base):
__tablename__ = 'account'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'), nullable=False)
balance = Column(Numeric(15, 5))
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
accounts = relationship("SavingsAccount", backref="owner")
@hybrid_property
def balance(self):
if self.accounts:
return self.accounts[0].balance
else:
return None
@balance.setter
def balance(self, value):
if not self.accounts:
account = Account(owner=self)
else:
account = self.accounts[0]
account.balance = value
@balance.expression
def balance(cls):
return SavingsAccount.balance
The above hybrid property ``balance`` works with the first
``SavingsAccount`` entry in the list of accounts for this user. The
in-Python getter/setter methods can treat ``accounts`` as a Python
list available on ``self``.
However, at the expression level, it's expected that the ``User`` class will
be used in an appropriate context such that an appropriate join to
``SavingsAccount`` will be present::
>>> print Session().query(User, User.balance).\
... join(User.accounts).filter(User.balance > 5000)
SELECT "user".id AS user_id, "user".name AS user_name,
account.balance AS account_balance
FROM "user" JOIN account ON "user".id = account.user_id
WHERE account.balance > :balance_1
Note however, that while the instance level accessors need to worry
about whether ``self.accounts`` is even present, this issue expresses
itself differently at the SQL expression level, where we basically
would use an outer join::
>>> from sqlalchemy import or_
>>> print (Session().query(User, User.balance).outerjoin(User.accounts).
... filter(or_(User.balance < 5000, User.balance == None)))
SELECT "user".id AS user_id, "user".name AS user_name,
account.balance AS account_balance
FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id
WHERE account.balance < :balance_1 OR account.balance IS NULL
Correlated Subquery Relationship Hybrid
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We can, of course, forego being dependent on the enclosing query's usage
of joins in favor of the correlated subquery, which can portably be packed
into a single column expression. A correlated subquery is more portable, but
often performs more poorly at the SQL level. Using the same technique
illustrated at :ref:`mapper_column_property_sql_expressions`,
we can adjust our ``SavingsAccount`` example to aggregate the balances for
*all* accounts, and use a correlated subquery for the column expression::
from sqlalchemy import Column, Integer, ForeignKey, Numeric, String
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy import select, func
Base = declarative_base()
class SavingsAccount(Base):
__tablename__ = 'account'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'), nullable=False)
balance = Column(Numeric(15, 5))
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
accounts = relationship("SavingsAccount", backref="owner")
@hybrid_property
def balance(self):
return sum(acc.balance for acc in self.accounts)
@balance.expression
def balance(cls):
return select([func.sum(SavingsAccount.balance)]).\
where(SavingsAccount.user_id==cls.id).\
label('total_balance')
The above recipe will give us the ``balance`` column which renders
a correlated SELECT::
>>> print s.query(User).filter(User.balance > 400)
SELECT "user".id AS user_id, "user".name AS user_name
FROM "user"
WHERE (SELECT sum(account.balance) AS sum_1
FROM account
WHERE account.user_id = "user".id) > :param_1
.. _hybrid_custom_comparators:
Building Custom Comparators
---------------------------
The hybrid property also includes a helper that allows construction of
custom comparators. A comparator object allows one to customize the
behavior of each SQLAlchemy expression operator individually. They
are useful when creating custom types that have some highly
idiosyncratic behavior on the SQL side.
The example class below allows case-insensitive comparisons on the attribute
named ``word_insensitive``::
from sqlalchemy.ext.hybrid import Comparator, hybrid_property
from sqlalchemy import func, Column, Integer, String
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class CaseInsensitiveComparator(Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
class SearchWord(Base):
__tablename__ = 'searchword'
id = Column(Integer, primary_key=True)
word = Column(String(255), nullable=False)
@hybrid_property
def word_insensitive(self):
return self.word.lower()
@word_insensitive.comparator
def word_insensitive(cls):
return CaseInsensitiveComparator(cls.word)
Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()``
SQL function to both sides::
>>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks")
SELECT searchword.id AS searchword_id, searchword.word AS searchword_word
FROM searchword
WHERE lower(searchword.word) = lower(:lower_1)
The ``CaseInsensitiveComparator`` above implements part of the
:class:`.ColumnOperators` interface. A "coercion" operation like
lowercasing can be applied to all comparison operations (i.e. ``eq``,
``lt``, ``gt``, etc.) using :meth:`.Operators.operate`::
class CaseInsensitiveComparator(Comparator):
def operate(self, op, other):
return op(func.lower(self.__clause_element__()), func.lower(other))
Hybrid Value Objects
--------------------
Note in our previous example, if we were to compare the
``word_insensitive`` attribute of a ``SearchWord`` instance to a plain
Python string, the plain Python string would not be coerced to lower
case - the ``CaseInsensitiveComparator`` we built, being returned by
``@word_insensitive.comparator``, only applies to the SQL side.
A more comprehensive form of the custom comparator is to construct a
*Hybrid Value Object*. This technique applies the target value or
expression to a value object which is then returned by the accessor in
all cases. The value object allows control of all operations upon
the value as well as how compared values are treated, both on the SQL
expression side as well as the Python value side. Replacing the
previous ``CaseInsensitiveComparator`` class with a new
``CaseInsensitiveWord`` class::
class CaseInsensitiveWord(Comparator):
"Hybrid value representing a lower case representation of a word."
def __init__(self, word):
if isinstance(word, basestring):
self.word = word.lower()
elif isinstance(word, CaseInsensitiveWord):
self.word = word.word
else:
self.word = func.lower(word)
def operate(self, op, other):
if not isinstance(other, CaseInsensitiveWord):
other = CaseInsensitiveWord(other)
return op(self.word, other.word)
def __clause_element__(self):
return self.word
def __str__(self):
return self.word
key = 'word'
"Label to apply to Query tuple results"
Above, the ``CaseInsensitiveWord`` object represents ``self.word``,
which may be a SQL function, or may be a Python native. By
overriding ``operate()`` and ``__clause_element__()`` to work in terms
of ``self.word``, all comparison operations will work against the
"converted" form of ``word``, whether it be SQL side or Python side.
Our ``SearchWord`` class can now deliver the ``CaseInsensitiveWord``
object unconditionally from a single hybrid call::
class SearchWord(Base):
__tablename__ = 'searchword'
id = Column(Integer, primary_key=True)
word = Column(String(255), nullable=False)
@hybrid_property
def word_insensitive(self):
return CaseInsensitiveWord(self.word)
The ``word_insensitive`` attribute now has case-insensitive comparison
behavior universally, including SQL expression vs. Python expression
(note the Python value is converted to lower case on the Python side
here)::
>>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks")
SELECT searchword.id AS searchword_id, searchword.word AS searchword_word
FROM searchword
WHERE lower(searchword.word) = :lower_1
SQL expression versus SQL expression::
>>> sw1 = aliased(SearchWord)
>>> sw2 = aliased(SearchWord)
>>> print Session().query(
... sw1.word_insensitive,
... sw2.word_insensitive).\
... filter(
... sw1.word_insensitive > sw2.word_insensitive
... )
SELECT lower(searchword_1.word) AS lower_1,
lower(searchword_2.word) AS lower_2
FROM searchword AS searchword_1, searchword AS searchword_2
WHERE lower(searchword_1.word) > lower(searchword_2.word)
Python only expression::
>>> ws1 = SearchWord(word="SomeWord")
>>> ws1.word_insensitive == "sOmEwOrD"
True
>>> ws1.word_insensitive == "XOmEwOrX"
False
>>> print ws1.word_insensitive
someword
The Hybrid Value pattern is very useful for any kind of value that may
have multiple representations, such as timestamps, time deltas, units
of measurement, currencies and encrypted passwords.
.. seealso::
`Hybrids and Value Agnostic Types
<http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_
- on the techspot.zzzeek.org blog
`Value Agnostic Types, Part II
<http://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ -
on the techspot.zzzeek.org blog
.. _hybrid_transformers:
Building Transformers
----------------------
A *transformer* is an object which can receive a :class:`.Query`
object and return a new one. The :class:`.Query` object includes a
method :meth:`.with_transformation` that returns a new :class:`.Query`
transformed by the given function.
We can combine this with the :class:`.Comparator` class to produce one type
of recipe which can both set up the FROM clause of a query as well as assign
filtering criterion.
Consider a mapped class ``Node``, which assembles using adjacency list
into a hierarchical tree pattern::
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Node(Base):
__tablename__ = 'node'
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('node.id'))
parent = relationship("Node", remote_side=id)
Suppose we wanted to add an accessor ``grandparent``. This would
return the ``parent`` of ``Node.parent``. When we have an instance of
``Node``, this is simple::
from sqlalchemy.ext.hybrid import hybrid_property
class Node(Base):
# ...
@hybrid_property
def grandparent(self):
return self.parent.parent
For the expression, things are not so clear. We'd need to construct
a :class:`.Query` where we :meth:`~.Query.join` twice along
``Node.parent`` to get to the ``grandparent``. We can instead return
a transforming callable that we'll combine with the
:class:`.Comparator` class to receive any :class:`.Query` object, and
return a new one that's joined to the ``Node.parent`` attribute and
filtered based on the given criterion::
from sqlalchemy.ext.hybrid import Comparator
class GrandparentTransformer(Comparator):
def operate(self, op, other):
def transform(q):
cls = self.__clause_element__()
parent_alias = aliased(cls)
return q.join(parent_alias, cls.parent).\
filter(op(parent_alias.parent, other))
return transform
Base = declarative_base()
class Node(Base):
__tablename__ = 'node'
id =Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('node.id'))
parent = relationship("Node", remote_side=id)
@hybrid_property
def grandparent(self):
return self.parent.parent
@grandparent.comparator
def grandparent(cls):
return GrandparentTransformer(cls)
The ``GrandparentTransformer`` overrides the core
:meth:`.Operators.operate` method at the base of the
:class:`.Comparator` hierarchy to return a query-transforming
callable, which then runs the given comparison operation in a
particular context. Such as, in the example above, the ``operate``
method is called, given the :attr:`.Operators.eq` callable as well as
the right side of the comparison ``Node(id=5)``. A function
``transform`` is then returned which will transform a :class:`.Query`
first to join to ``Node.parent``, then to compare ``parent_alias``
using :attr:`.Operators.eq` against the left and right sides, passing
into :class:`.Query.filter`:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.orm import Session
>>> session = Session()
{sql}>>> session.query(Node).\
... with_transformation(Node.grandparent==Node(id=5)).\
... all()
SELECT node.id AS node_id, node.parent_id AS node_parent_id
FROM node JOIN node AS node_1 ON node_1.id = node.parent_id
WHERE :param_1 = node_1.parent_id
{stop}
We can modify the pattern to be more verbose but flexible by separating
the "join" step from the "filter" step. The tricky part here is ensuring
that successive instances of ``GrandparentTransformer`` use the same
:class:`.AliasedClass` object against ``Node``. Below we use a simple
memoizing approach that associates a ``GrandparentTransformer``
with each class::
class Node(Base):
# ...
@grandparent.comparator
def grandparent(cls):
# memoize a GrandparentTransformer
# per class
if '_gp' not in cls.__dict__:
cls._gp = GrandparentTransformer(cls)
return cls._gp
class GrandparentTransformer(Comparator):
def __init__(self, cls):
self.parent_alias = aliased(cls)
@property
def join(self):
def go(q):
return q.join(self.parent_alias, Node.parent)
return go
def operate(self, op, other):
return op(self.parent_alias.parent, other)
.. sourcecode:: pycon+sql
{sql}>>> session.query(Node).\
... with_transformation(Node.grandparent.join).\
... filter(Node.grandparent==Node(id=5))
SELECT node.id AS node_id, node.parent_id AS node_parent_id
FROM node JOIN node AS node_1 ON node_1.id = node.parent_id
WHERE :param_1 = node_1.parent_id
{stop}
The "transformer" pattern is an experimental pattern that starts
to make usage of some functional programming paradigms.
While it's only recommended for advanced and/or patient developers,
there's probably a whole lot of amazing things it can be used for.
"""
from .. import util
from ..orm import attributes, interfaces
HYBRID_METHOD = util.symbol('HYBRID_METHOD')
"""Symbol indicating an :class:`InspectionAttr` that's
of type :class:`.hybrid_method`.
Is assigned to the :attr:`.InspectionAttr.extension_type`
attibute.
.. seealso::
:attr:`.Mapper.all_orm_attributes`
"""
HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
"""Symbol indicating an :class:`InspectionAttr` that's
of type :class:`.hybrid_method`.
Is assigned to the :attr:`.InspectionAttr.extension_type`
attibute.
.. seealso::
:attr:`.Mapper.all_orm_attributes`
"""
class hybrid_method(interfaces.InspectionAttrInfo):
"""A decorator which allows definition of a Python object method with both
instance-level and class-level behavior.
"""
is_attribute = True
extension_type = HYBRID_METHOD
def __init__(self, func, expr=None):
"""Create a new :class:`.hybrid_method`.
Usage is typically via decorator::
from sqlalchemy.ext.hybrid import hybrid_method
class SomeClass(object):
@hybrid_method
def value(self, x, y):
return self._value + x + y
@value.expression
def value(self, x, y):
return func.some_function(self._value, x, y)
"""
self.func = func
self.expression(expr or func)
def __get__(self, instance, owner):
if instance is None:
return self.expr.__get__(owner, owner.__class__)
else:
return self.func.__get__(instance, owner)
def expression(self, expr):
"""Provide a modifying decorator that defines a
SQL-expression producing method."""
self.expr = expr
if not self.expr.__doc__:
self.expr.__doc__ = self.func.__doc__
return self
class hybrid_property(interfaces.InspectionAttrInfo):
"""A decorator which allows definition of a Python descriptor with both
instance-level and class-level behavior.
"""
is_attribute = True
extension_type = HYBRID_PROPERTY
def __init__(self, fget, fset=None, fdel=None, expr=None):
"""Create a new :class:`.hybrid_property`.
Usage is typically via decorator::
from sqlalchemy.ext.hybrid import hybrid_property
class SomeClass(object):
@hybrid_property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
"""
self.fget = fget
self.fset = fset
self.fdel = fdel
self.expression(expr or fget)
util.update_wrapper(self, fget)
def __get__(self, instance, owner):
if instance is None:
return self.expr(owner)
else:
return self.fget(instance)
def __set__(self, instance, value):
if self.fset is None:
raise AttributeError("can't set attribute")
self.fset(instance, value)
def __delete__(self, instance):
if self.fdel is None:
raise AttributeError("can't delete attribute")
self.fdel(instance)
def setter(self, fset):
"""Provide a modifying decorator that defines a value-setter method."""
self.fset = fset
return self
def deleter(self, fdel):
"""Provide a modifying decorator that defines a
value-deletion method."""
self.fdel = fdel
return self
def expression(self, expr):
"""Provide a modifying decorator that defines a SQL-expression
producing method."""
def _expr(cls):
return ExprComparator(expr(cls), self)
util.update_wrapper(_expr, expr)
self.expr = _expr
return self.comparator(_expr)
def comparator(self, comparator):
"""Provide a modifying decorator that defines a custom
comparator producing method.
The return value of the decorated method should be an instance of
:class:`~.hybrid.Comparator`.
"""
proxy_attr = attributes.\
create_proxied_attribute(self)
def expr(owner):
return proxy_attr(
owner, self.__name__, self, comparator(owner),
doc=comparator.__doc__ or self.__doc__)
self.expr = expr
return self
class Comparator(interfaces.PropComparator):
"""A helper class that allows easy construction of custom
:class:`~.orm.interfaces.PropComparator`
classes for usage with hybrids."""
property = None
def __init__(self, expression):
self.expression = expression
def __clause_element__(self):
expr = self.expression
if hasattr(expr, '__clause_element__'):
expr = expr.__clause_element__()
return expr
def adapt_to_entity(self, adapt_to_entity):
# interesting....
return self
class ExprComparator(Comparator):
def __init__(self, expression, hybrid):
self.expression = expression
self.hybrid = hybrid
def __getattr__(self, key):
return getattr(self.expression, key)
@property
def info(self):
return self.hybrid.info
@property
def property(self):
return self.expression.property
def operate(self, op, *other, **kwargs):
return op(self.expression, *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
return op(other, self.expression, **kwargs)

349
sqlalchemy/ext/indexable.py Normal file
View File

@@ -0,0 +1,349 @@
# ext/index.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Define attributes on ORM-mapped classes that have "index" attributes for
columns with :class:`~.types.Indexable` types.
"index" means the attribute is associated with an element of an
:class:`~.types.Indexable` column with the predefined index to access it.
The :class:`~.types.Indexable` types include types such as
:class:`~.types.ARRAY`, :class:`~.types.JSON` and
:class:`~.postgresql.HSTORE`.
The :mod:`~sqlalchemy.ext.indexable` extension provides
:class:`~.schema.Column`-like interface for any element of an
:class:`~.types.Indexable` typed column. In simple cases, it can be
treated as a :class:`~.schema.Column` - mapped attribute.
.. versionadded:: 1.1
Synopsis
========
Given ``Person`` as a model with a primary key and JSON data field.
While this field may have any number of elements encoded within it,
we would like to refer to the element called ``name`` individually
as a dedicated attribute which behaves like a standalone column::
from sqlalchemy import Column, JSON, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.indexable import index_property
Base = declarative_base()
class Person(Base):
__tablename__ = 'person'
id = Column(Integer, primary_key=True)
data = Column(JSON)
name = index_property('data', 'name')
Above, the ``name`` attribute now behaves like a mapped column. We
can compose a new ``Person`` and set the value of ``name``::
>>> person = Person(name='Alchemist')
The value is now accessible::
>>> person.name
'Alchemist'
Behind the scenes, the JSON field was initialized to a new blank dictionary
and the field was set::
>>> person.data
{"name": "Alchemist'}
The field is mutable in place::
>>> person.name = 'Renamed'
>>> person.name
'Renamed'
>>> person.data
{'name': 'Renamed'}
When using :class:`.index_property`, the change that we make to the indexable
structure is also automatically tracked as history; we no longer need
to use :class:`~.mutable.MutableDict` in order to track this change
for the unit of work.
Deletions work normally as well::
>>> del person.name
>>> person.data
{}
Above, deletion of ``person.name`` deletes the value from the dictionary,
but not the dictionary itself.
A missing key will produce ``AttributeError``::
>>> person = Person()
>>> person.name
...
AttributeError: 'name'
Unless you set a default value::
>>> class Person(Base):
>>> __tablename__ = 'person'
>>>
>>> id = Column(Integer, primary_key=True)
>>> data = Column(JSON)
>>>
>>> name = index_property('data', 'name', default=None) # See default
>>> person = Person()
>>> print(person.name)
None
The attributes are also accessible at the class level.
Below, we illustrate ``Person.name`` used to generate
an indexed SQL criteria::
>>> from sqlalchemy.orm import Session
>>> session = Session()
>>> query = session.query(Person).filter(Person.name == 'Alchemist')
The above query is equivalent to::
>>> query = session.query(Person).filter(Person.data['name'] == 'Alchemist')
Multiple :class:`.index_property` objects can be chained to produce
multiple levels of indexing::
from sqlalchemy import Column, JSON, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.indexable import index_property
Base = declarative_base()
class Person(Base):
__tablename__ = 'person'
id = Column(Integer, primary_key=True)
data = Column(JSON)
birthday = index_property('data', 'birthday')
year = index_property('birthday', 'year')
month = index_property('birthday', 'month')
day = index_property('birthday', 'day')
Above, a query such as::
q = session.query(Person).filter(Person.year == '1980')
On a PostgreSQL backend, the above query will render as::
SELECT person.id, person.data
FROM person
WHERE person.data -> %(data_1)s -> %(param_1)s = %(param_2)s
Default Values
==============
:class:`.index_property` includes special behaviors for when the indexed
data structure does not exist, and a set operation is called:
* For an :class:`.index_property` that is given an integer index value,
the default data structure will be a Python list of ``None`` values,
at least as long as the index value; the value is then set at its
place in the list. This means for an index value of zero, the list
will be initialized to ``[None]`` before setting the given value,
and for an index value of five, the list will be initialized to
``[None, None, None, None, None]`` before setting the fifth element
to the given value. Note that an existing list is **not** extended
in place to receive a value.
* for an :class:`.index_property` that is given any other kind of index
value (e.g. strings usually), a Python dictionary is used as the
default data structure.
* The default data structure can be set to any Python callable using the
:paramref:`.index_property.datatype` parameter, overriding the previous
rules.
Subclassing
===========
:class:`.index_property` can be subclassed, in particular for the common
use case of providing coercion of values or SQL expressions as they are
accessed. Below is a common recipe for use with a PostgreSQL JSON type,
where we want to also include automatic casting plus ``astext()``::
class pg_json_property(index_property):
def __init__(self, attr_name, index, cast_type):
super(pg_json_property, self).__init__(attr_name, index)
self.cast_type = cast_type
def expr(self, model):
expr = super(pg_json_property, self).expr(model)
return expr.astext.cast(self.cast_type)
The above subclass can be used with the PostgreSQL-specific
version of :class:`.postgresql.JSON`::
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.dialects.postgresql import JSON
Base = declarative_base()
class Person(Base):
__tablename__ = 'person'
id = Column(Integer, primary_key=True)
data = Column(JSON)
age = pg_json_property('data', 'age', Integer)
The ``age`` attribute at the instance level works as before; however
when rendering SQL, PostgreSQL's ``->>`` operator will be used
for indexed access, instead of the usual index opearator of ``->``::
>>> query = session.query(Person).filter(Person.age < 20)
The above query will render::
SELECT person.id, person.data
FROM person
WHERE CAST(person.data ->> %(data_1)s AS INTEGER) < %(param_1)s
"""
from __future__ import absolute_import
from sqlalchemy import inspect
from ..orm.attributes import flag_modified
from ..ext.hybrid import hybrid_property
__all__ = ['index_property']
class index_property(hybrid_property): # noqa
"""A property generator. The generated property describes an object
attribute that corresponds to an :class:`~.types.Indexable`
column.
.. versionadded:: 1.1
.. seealso::
:mod:`sqlalchemy.ext.indexable`
"""
_NO_DEFAULT_ARGUMENT = object()
def __init__(
self, attr_name, index, default=_NO_DEFAULT_ARGUMENT,
datatype=None, mutable=True, onebased=True):
"""Create a new :class:`.index_property`.
:param attr_name:
An attribute name of an `Indexable` typed column, or other
attribute that returns an indexable structure.
:param index:
The index to be used for getting and setting this value. This
should be the Python-side index value for integers.
:param default:
A value which will be returned instead of `AttributeError`
when there is not a value at given index.
:param datatype: default datatype to use when the field is empty.
By default, this is derived from the type of index used; a
Python list for an integer index, or a Python dictionary for
any other style of index. For a list, the list will be
initialized to a list of None values that is at least
``index`` elements long.
:param mutable: if False, writes and deletes to the attribute will
be disallowed.
:param onebased: assume the SQL representation of this value is
one-based; that is, the first index in SQL is 1, not zero.
"""
if mutable:
super(index_property, self).__init__(
self.fget, self.fset, self.fdel, self.expr
)
else:
super(index_property, self).__init__(
self.fget, None, None, self.expr
)
self.attr_name = attr_name
self.index = index
self.default = default
is_numeric = isinstance(index, int)
onebased = is_numeric and onebased
if datatype is not None:
self.datatype = datatype
else:
if is_numeric:
self.datatype = lambda: [None for x in range(index + 1)]
else:
self.datatype = dict
self.onebased = onebased
def _fget_default(self):
if self.default == self._NO_DEFAULT_ARGUMENT:
raise AttributeError(self.attr_name)
else:
return self.default
def fget(self, instance):
attr_name = self.attr_name
column_value = getattr(instance, attr_name)
if column_value is None:
return self._fget_default()
try:
value = column_value[self.index]
except (KeyError, IndexError):
return self._fget_default()
else:
return value
def fset(self, instance, value):
attr_name = self.attr_name
column_value = getattr(instance, attr_name, None)
if column_value is None:
column_value = self.datatype()
setattr(instance, attr_name, column_value)
column_value[self.index] = value
setattr(instance, attr_name, column_value)
if attr_name in inspect(instance).mapper.attrs:
flag_modified(instance, attr_name)
def fdel(self, instance):
attr_name = self.attr_name
column_value = getattr(instance, attr_name)
if column_value is None:
raise AttributeError(self.attr_name)
try:
del column_value[self.index]
except KeyError:
raise AttributeError(self.attr_name)
else:
setattr(instance, attr_name, column_value)
flag_modified(instance, attr_name)
def expr(self, model):
column = getattr(model, self.attr_name)
index = self.index
if self.onebased:
index += 1
return column[index]

View File

@@ -0,0 +1,414 @@
"""Extensible class instrumentation.
The :mod:`sqlalchemy.ext.instrumentation` package provides for alternate
systems of class instrumentation within the ORM. Class instrumentation
refers to how the ORM places attributes on the class which maintain
data and track changes to that data, as well as event hooks installed
on the class.
.. note::
The extension package is provided for the benefit of integration
with other object management packages, which already perform
their own instrumentation. It is not intended for general use.
For examples of how the instrumentation extension is used,
see the example :ref:`examples_instrumentation`.
.. versionchanged:: 0.8
The :mod:`sqlalchemy.orm.instrumentation` was split out so
that all functionality having to do with non-standard
instrumentation was moved out to :mod:`sqlalchemy.ext.instrumentation`.
When imported, the module installs itself within
:mod:`sqlalchemy.orm.instrumentation` so that it
takes effect, including recognition of
``__sa_instrumentation_manager__`` on mapped classes, as
well :data:`.instrumentation_finders`
being used to determine class instrumentation resolution.
"""
from ..orm import instrumentation as orm_instrumentation
from ..orm.instrumentation import (
ClassManager, InstrumentationFactory, _default_state_getter,
_default_dict_getter, _default_manager_getter
)
from ..orm import attributes, collections, base as orm_base
from .. import util
from ..orm import exc as orm_exc
import weakref
INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__'
"""Attribute, elects custom instrumentation when present on a mapped class.
Allows a class to specify a slightly or wildly different technique for
tracking changes made to mapped attributes and collections.
Only one instrumentation implementation is allowed in a given object
inheritance hierarchy.
The value of this attribute must be a callable and will be passed a class
object. The callable must return one of:
- An instance of an InstrumentationManager or subclass
- An object implementing all or some of InstrumentationManager (TODO)
- A dictionary of callables, implementing all or some of the above (TODO)
- An instance of a ClassManager or subclass
This attribute is consulted by SQLAlchemy instrumentation
resolution, once the :mod:`sqlalchemy.ext.instrumentation` module
has been imported. If custom finders are installed in the global
instrumentation_finders list, they may or may not choose to honor this
attribute.
"""
def find_native_user_instrumentation_hook(cls):
"""Find user-specified instrumentation management for a class."""
return getattr(cls, INSTRUMENTATION_MANAGER, None)
instrumentation_finders = [find_native_user_instrumentation_hook]
"""An extensible sequence of callables which return instrumentation
implementations
When a class is registered, each callable will be passed a class object.
If None is returned, the
next finder in the sequence is consulted. Otherwise the return must be an
instrumentation factory that follows the same guidelines as
sqlalchemy.ext.instrumentation.INSTRUMENTATION_MANAGER.
By default, the only finder is find_native_user_instrumentation_hook, which
searches for INSTRUMENTATION_MANAGER. If all finders return None, standard
ClassManager instrumentation is used.
"""
class ExtendedInstrumentationRegistry(InstrumentationFactory):
"""Extends :class:`.InstrumentationFactory` with additional
bookkeeping, to accommodate multiple types of
class managers.
"""
_manager_finders = weakref.WeakKeyDictionary()
_state_finders = weakref.WeakKeyDictionary()
_dict_finders = weakref.WeakKeyDictionary()
_extended = False
def _locate_extended_factory(self, class_):
for finder in instrumentation_finders:
factory = finder(class_)
if factory is not None:
manager = self._extended_class_manager(class_, factory)
return manager, factory
else:
return None, None
def _check_conflicts(self, class_, factory):
existing_factories = self._collect_management_factories_for(class_).\
difference([factory])
if existing_factories:
raise TypeError(
"multiple instrumentation implementations specified "
"in %s inheritance hierarchy: %r" % (
class_.__name__, list(existing_factories)))
def _extended_class_manager(self, class_, factory):
manager = factory(class_)
if not isinstance(manager, ClassManager):
manager = _ClassInstrumentationAdapter(class_, manager)
if factory != ClassManager and not self._extended:
# somebody invoked a custom ClassManager.
# reinstall global "getter" functions with the more
# expensive ones.
self._extended = True
_install_instrumented_lookups()
self._manager_finders[class_] = manager.manager_getter()
self._state_finders[class_] = manager.state_getter()
self._dict_finders[class_] = manager.dict_getter()
return manager
def _collect_management_factories_for(self, cls):
"""Return a collection of factories in play or specified for a
hierarchy.
Traverses the entire inheritance graph of a cls and returns a
collection of instrumentation factories for those classes. Factories
are extracted from active ClassManagers, if available, otherwise
instrumentation_finders is consulted.
"""
hierarchy = util.class_hierarchy(cls)
factories = set()
for member in hierarchy:
manager = self.manager_of_class(member)
if manager is not None:
factories.add(manager.factory)
else:
for finder in instrumentation_finders:
factory = finder(member)
if factory is not None:
break
else:
factory = None
factories.add(factory)
factories.discard(None)
return factories
def unregister(self, class_):
if class_ in self._manager_finders:
del self._manager_finders[class_]
del self._state_finders[class_]
del self._dict_finders[class_]
super(ExtendedInstrumentationRegistry, self).unregister(class_)
def manager_of_class(self, cls):
if cls is None:
return None
try:
finder = self._manager_finders.get(cls, _default_manager_getter)
except TypeError:
# due to weakref lookup on invalid object
return None
else:
return finder(cls)
def state_of(self, instance):
if instance is None:
raise AttributeError("None has no persistent state.")
return self._state_finders.get(
instance.__class__, _default_state_getter)(instance)
def dict_of(self, instance):
if instance is None:
raise AttributeError("None has no persistent state.")
return self._dict_finders.get(
instance.__class__, _default_dict_getter)(instance)
orm_instrumentation._instrumentation_factory = \
_instrumentation_factory = ExtendedInstrumentationRegistry()
orm_instrumentation.instrumentation_finders = instrumentation_finders
class InstrumentationManager(object):
"""User-defined class instrumentation extension.
:class:`.InstrumentationManager` can be subclassed in order
to change
how class instrumentation proceeds. This class exists for
the purposes of integration with other object management
frameworks which would like to entirely modify the
instrumentation methodology of the ORM, and is not intended
for regular usage. For interception of class instrumentation
events, see :class:`.InstrumentationEvents`.
The API for this class should be considered as semi-stable,
and may change slightly with new releases.
.. versionchanged:: 0.8
:class:`.InstrumentationManager` was moved from
:mod:`sqlalchemy.orm.instrumentation` to
:mod:`sqlalchemy.ext.instrumentation`.
"""
# r4361 added a mandatory (cls) constructor to this interface.
# given that, perhaps class_ should be dropped from all of these
# signatures.
def __init__(self, class_):
pass
def manage(self, class_, manager):
setattr(class_, '_default_class_manager', manager)
def dispose(self, class_, manager):
delattr(class_, '_default_class_manager')
def manager_getter(self, class_):
def get(cls):
return cls._default_class_manager
return get
def instrument_attribute(self, class_, key, inst):
pass
def post_configure_attribute(self, class_, key, inst):
pass
def install_descriptor(self, class_, key, inst):
setattr(class_, key, inst)
def uninstall_descriptor(self, class_, key):
delattr(class_, key)
def install_member(self, class_, key, implementation):
setattr(class_, key, implementation)
def uninstall_member(self, class_, key):
delattr(class_, key)
def instrument_collection_class(self, class_, key, collection_class):
return collections.prepare_instrumentation(collection_class)
def get_instance_dict(self, class_, instance):
return instance.__dict__
def initialize_instance_dict(self, class_, instance):
pass
def install_state(self, class_, instance, state):
setattr(instance, '_default_state', state)
def remove_state(self, class_, instance):
delattr(instance, '_default_state')
def state_getter(self, class_):
return lambda instance: getattr(instance, '_default_state')
def dict_getter(self, class_):
return lambda inst: self.get_instance_dict(class_, inst)
class _ClassInstrumentationAdapter(ClassManager):
"""Adapts a user-defined InstrumentationManager to a ClassManager."""
def __init__(self, class_, override):
self._adapted = override
self._get_state = self._adapted.state_getter(class_)
self._get_dict = self._adapted.dict_getter(class_)
ClassManager.__init__(self, class_)
def manage(self):
self._adapted.manage(self.class_, self)
def dispose(self):
self._adapted.dispose(self.class_)
def manager_getter(self):
return self._adapted.manager_getter(self.class_)
def instrument_attribute(self, key, inst, propagated=False):
ClassManager.instrument_attribute(self, key, inst, propagated)
if not propagated:
self._adapted.instrument_attribute(self.class_, key, inst)
def post_configure_attribute(self, key):
super(_ClassInstrumentationAdapter, self).post_configure_attribute(key)
self._adapted.post_configure_attribute(self.class_, key, self[key])
def install_descriptor(self, key, inst):
self._adapted.install_descriptor(self.class_, key, inst)
def uninstall_descriptor(self, key):
self._adapted.uninstall_descriptor(self.class_, key)
def install_member(self, key, implementation):
self._adapted.install_member(self.class_, key, implementation)
def uninstall_member(self, key):
self._adapted.uninstall_member(self.class_, key)
def instrument_collection_class(self, key, collection_class):
return self._adapted.instrument_collection_class(
self.class_, key, collection_class)
def initialize_collection(self, key, state, factory):
delegate = getattr(self._adapted, 'initialize_collection', None)
if delegate:
return delegate(key, state, factory)
else:
return ClassManager.initialize_collection(self, key,
state, factory)
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
self.setup_instance(instance, state)
return instance
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
A private convenience method used by the __init__ decorator.
"""
if self.has_state(instance):
return False
else:
return self.setup_instance(instance)
def setup_instance(self, instance, state=None):
self._adapted.initialize_instance_dict(self.class_, instance)
if state is None:
state = self._state_constructor(instance, self)
# the given instance is assumed to have no state
self._adapted.install_state(self.class_, instance, state)
return state
def teardown_instance(self, instance):
self._adapted.remove_state(self.class_, instance)
def has_state(self, instance):
try:
self._get_state(instance)
except orm_exc.NO_STATE:
return False
else:
return True
def state_getter(self):
return self._get_state
def dict_getter(self):
return self._get_dict
def _install_instrumented_lookups():
"""Replace global class/object management functions
with ExtendedInstrumentationRegistry implementations, which
allow multiple types of class managers to be present,
at the cost of performance.
This function is called only by ExtendedInstrumentationRegistry
and unit tests specific to this behavior.
The _reinstall_default_lookups() function can be called
after this one to re-establish the default functions.
"""
_install_lookups(
dict(
instance_state=_instrumentation_factory.state_of,
instance_dict=_instrumentation_factory.dict_of,
manager_of_class=_instrumentation_factory.manager_of_class
)
)
def _reinstall_default_lookups():
"""Restore simplified lookups."""
_install_lookups(
dict(
instance_state=_default_state_getter,
instance_dict=_default_dict_getter,
manager_of_class=_default_manager_getter
)
)
_instrumentation_factory._extended = False
def _install_lookups(lookups):
global instance_state, instance_dict, manager_of_class
instance_state = lookups['instance_state']
instance_dict = lookups['instance_dict']
manager_of_class = lookups['manager_of_class']
orm_base.instance_state = attributes.instance_state = \
orm_instrumentation.instance_state = instance_state
orm_base.instance_dict = attributes.instance_dict = \
orm_instrumentation.instance_dict = instance_dict
orm_base.manager_of_class = attributes.manager_of_class = \
orm_instrumentation.manager_of_class = manager_of_class

904
sqlalchemy/ext/mutable.py Normal file
View File

@@ -0,0 +1,904 @@
# ext/mutable.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
r"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
.. versionadded:: 0.7 :mod:`sqlalchemy.ext.mutable` replaces SQLAlchemy's
legacy approach to in-place mutations of scalar values; see
:ref:`07_migration_mutation_extension`.
.. _mutable_scalars:
Establishing Mutability on Scalar Column Values
===============================================
A typical example of a "mutable" structure is a Python dictionary.
Following the example introduced in :ref:`types_toplevel`, we
begin with a custom type that marshals Python dictionaries into
JSON strings before being persisted::
from sqlalchemy.types import TypeDecorator, VARCHAR
import json
class JSONEncodedDict(TypeDecorator):
"Represents an immutable structure as a json-encoded string."
impl = VARCHAR
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
The usage of ``json`` is only for the purposes of example. The
:mod:`sqlalchemy.ext.mutable` extension can be used
with any type whose target Python type may be mutable, including
:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself
tracks all parents which reference it. Below, we illustrate a simple
version of the :class:`.MutableDict` dictionary object, which applies
the :class:`.Mutable` mixin to a plain Python dictionary::
from sqlalchemy.ext.mutable import Mutable
class MutableDict(Mutable, dict):
@classmethod
def coerce(cls, key, value):
"Convert plain dictionaries to MutableDict."
if not isinstance(value, MutableDict):
if isinstance(value, dict):
return MutableDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
"Detect dictionary set events and emit change events."
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
"Detect dictionary del events and emit change events."
dict.__delitem__(self, key)
self.changed()
The above dictionary class takes the approach of subclassing the Python
built-in ``dict`` to produce a dict
subclass which routes all mutation events through ``__setitem__``. There are
variants on this approach, such as subclassing ``UserDict.UserDict`` or
``collections.MutableMapping``; the part that's important to this example is
that the :meth:`.Mutable.changed` method is called whenever an in-place
change to the datastructure takes place.
We also redefine the :meth:`.Mutable.coerce` method which will be used to
convert any values that are not instances of ``MutableDict``, such
as the plain dictionaries returned by the ``json`` module, into the
appropriate type. Defining this method is optional; we could just as well
created our ``JSONEncodedDict`` such that it always returns an instance
of ``MutableDict``, and additionally ensured that all calling code
uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not
overridden, any values applied to a parent object which are not instances
of the mutable type will raise a ``ValueError``.
Our new ``MutableDict`` type offers a class method
:meth:`~.Mutable.as_mutable` which we can use within column metadata
to associate with types. This method grabs the given type object or
class and associates a listener that will detect all future mappings
of this type, applying event listening instrumentation to the mapped
attribute. Such as, with classical table metadata::
from sqlalchemy import Table, Column, Integer
my_data = Table('my_data', metadata,
Column('id', Integer, primary_key=True),
Column('data', MutableDict.as_mutable(JSONEncodedDict))
)
Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict``
(if the type object was not an instance already), which will intercept any
attributes which are mapped against this type. Below we establish a simple
mapping against the ``my_data`` table::
from sqlalchemy import mapper
class MyDataClass(object):
pass
# associates mutation listeners with MyDataClass.data
mapper(MyDataClass, my_data)
The ``MyDataClass.data`` member will now be notified of in place changes
to its value.
There's no difference in usage when using declarative::
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(MutableDict.as_mutable(JSONEncodedDict))
Any in-place changes to the ``MyDataClass.data`` member
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> m1 = MyDataClass(data={'value1':'foo'})
>>> sess.add(m1)
>>> sess.commit()
>>> m1.data['value1'] = 'bar'
>>> assert m1 in sess.dirty
True
The ``MutableDict`` can be associated with all future instances
of ``JSONEncodedDict`` in one step, using
:meth:`~.Mutable.associate_with`. This is similar to
:meth:`~.Mutable.as_mutable` except it will intercept all occurrences
of ``MutableDict`` in all mappings unconditionally, without
the need to declare it individually::
MutableDict.associate_with(JSONEncodedDict)
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(JSONEncodedDict)
Supporting Pickling
--------------------
The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the
placement of a ``weakref.WeakKeyDictionary`` upon the value object, which
stores a mapping of parent mapped objects keyed to the attribute name under
which they are associated with this value. ``WeakKeyDictionary`` objects are
not picklable, due to the fact that they contain weakrefs and function
callbacks. In our case, this is a good thing, since if this dictionary were
picklable, it could lead to an excessively large pickle size for our value
objects that are pickled by themselves outside of the context of the parent.
The developer responsibility here is only to provide a ``__getstate__`` method
that excludes the :meth:`~MutableBase._parents` collection from the pickle
stream::
class MyMutableType(Mutable):
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_parents', None)
return d
With our dictionary example, we need to return the contents of the dict itself
(and also restore them on __setstate__)::
class MutableDict(Mutable, dict):
# ....
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
In the case that our mutable value object is pickled as it is attached to one
or more parent objects that are also part of the pickle, the :class:`.Mutable`
mixin will re-establish the :attr:`.Mutable._parents` collection on each value
object as the owning parents themselves are unpickled.
.. _mutable_composites:
Establishing Mutability on Composites
=====================================
Composites are a special ORM feature which allow a single scalar attribute to
be assigned an object value which represents information "composed" from one
or more columns from the underlying mapped table. The usual example is that of
a geometric "point", and is introduced in :ref:`mapper_composite`.
.. versionchanged:: 0.7
The internals of :func:`.orm.composite` have been
greatly simplified and in-place mutation detection is no longer enabled by
default; instead, the user-defined value must detect changes on its own and
propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable`
extension provides the helper class :class:`.MutableComposite`, which is a
slight variant on the :class:`.Mutable` class.
As is the case with :class:`.Mutable`, the user-defined composite class
subclasses :class:`.MutableComposite` as a mixin, and detects and delivers
change events to its parents via the :meth:`.MutableComposite.changed` method.
In the case of a composite class, the detection is usually via the usage of
Python descriptors (i.e. ``@property``), or alternatively via the special
Python method ``__setattr__()``. Below we expand upon the ``Point`` class
introduced in :ref:`mapper_composite` to subclass :class:`.MutableComposite`
and to also route attribute set events via ``__setattr__`` to the
:meth:`.MutableComposite.changed` method::
from sqlalchemy.ext.mutable import MutableComposite
class Point(MutableComposite):
def __init__(self, x, y):
self.x = x
self.y = y
def __setattr__(self, key, value):
"Intercept set events"
# set the attribute
object.__setattr__(self, key, value)
# alert all parents to the change
self.changed()
def __composite_values__(self):
return self.x, self.y
def __eq__(self, other):
return isinstance(other, Point) and \
other.x == self.x and \
other.y == self.y
def __ne__(self, other):
return not self.__eq__(other)
The :class:`.MutableComposite` class uses a Python metaclass to automatically
establish listeners for any usage of :func:`.orm.composite` that specifies our
``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` class,
listeners are established which will route change events from ``Point``
objects to each of the ``Vertex.start`` and ``Vertex.end`` attributes::
from sqlalchemy.orm import composite, mapper
from sqlalchemy import Table, Column
vertices = Table('vertices', metadata,
Column('id', Integer, primary_key=True),
Column('x1', Integer),
Column('y1', Integer),
Column('x2', Integer),
Column('y2', Integer),
)
class Vertex(object):
pass
mapper(Vertex, vertices, properties={
'start': composite(Point, vertices.c.x1, vertices.c.y1),
'end': composite(Point, vertices.c.x2, vertices.c.y2)
})
Any in-place changes to the ``Vertex.start`` or ``Vertex.end`` members
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> v1 = Vertex(start=Point(3, 4), end=Point(12, 15))
>>> sess.add(v1)
>>> sess.commit()
>>> v1.end.x = 8
>>> assert v1 in sess.dirty
True
Coercing Mutable Composites
---------------------------
The :meth:`.MutableBase.coerce` method is also supported on composite types.
In the case of :class:`.MutableComposite`, the :meth:`.MutableBase.coerce`
method is only called for attribute set operations, not load operations.
Overriding the :meth:`.MutableBase.coerce` method is essentially equivalent
to using a :func:`.validates` validation routine for all attributes which
make use of the custom composite type::
class Point(MutableComposite):
# other Point methods
# ...
def coerce(cls, key, value):
if isinstance(value, tuple):
value = Point(*value)
elif not isinstance(value, Point):
raise ValueError("tuple or Point expected")
return value
.. versionadded:: 0.7.10,0.8.0b2
Support for the :meth:`.MutableBase.coerce` method in conjunction with
objects of type :class:`.MutableComposite`.
Supporting Pickling
--------------------
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
class uses a ``weakref.WeakKeyDictionary`` available via the
:meth:`MutableBase._parents` attribute which isn't picklable. If we need to
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
the minimal form of our ``Point`` class::
class Point(MutableComposite):
# ...
def __getstate__(self):
return self.x, self.y
def __setstate__(self, state):
self.x, self.y = state
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
pickling process of the parent's object-relational state so that the
:meth:`MutableBase._parents` collection is restored to all ``Point`` objects.
"""
from ..orm.attributes import flag_modified
from .. import event, types
from ..orm import mapper, object_mapper, Mapper
from ..util import memoized_property
from ..sql.base import SchemaEventTarget
import weakref
class MutableBase(object):
"""Common base class to :class:`.Mutable`
and :class:`.MutableComposite`.
"""
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent.
This attribute is a so-called "memoized" property. It initializes
itself with a new ``weakref.WeakKeyDictionary`` the first time
it is accessed, returning the same object upon subsequent access.
"""
return weakref.WeakKeyDictionary()
@classmethod
def coerce(cls, key, value):
"""Given a value, coerce it into the target type.
Can be overridden by custom subclasses to coerce incoming
data into a particular type.
By default, raises ``ValueError``.
This method is called in different scenarios depending on if
the parent class is of type :class:`.Mutable` or of type
:class:`.MutableComposite`. In the case of the former, it is called
for both attribute-set operations as well as during ORM loading
operations. For the latter, it is only called during attribute-set
operations; the mechanics of the :func:`.composite` construct
handle coercion during load operations.
:param key: string name of the ORM-mapped attribute being set.
:param value: the incoming value.
:return: the method should return the coerced value, or raise
``ValueError`` if the coercion cannot be completed.
"""
if value is None:
return None
msg = "Attribute '%s' does not accept objects of type %s"
raise ValueError(msg % (key, type(value)))
@classmethod
def _get_listen_keys(cls, attribute):
"""Given a descriptor attribute, return a ``set()`` of the attribute
keys which indicate a change in the state of this attribute.
This is normally just ``set([attribute.key])``, but can be overridden
to provide for additional keys. E.g. a :class:`.MutableComposite`
augments this set with the attribute keys associated with the columns
that comprise the composite value.
This collection is consulted in the case of intercepting the
:meth:`.InstanceEvents.refresh` and
:meth:`.InstanceEvents.refresh_flush` events, which pass along a list
of attribute names that have been refreshed; the list is compared
against this set to determine if action needs to be taken.
.. versionadded:: 1.0.5
"""
return set([attribute.key])
@classmethod
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
key = attribute.key
if parent_cls is not attribute.class_:
return
# rely on "propagate" here
parent_cls = attribute.class_
listen_keys = cls._get_listen_keys(attribute)
def load(state, *args):
"""Listen for objects loaded or refreshed.
Wrap the target data member's value with
``Mutable``.
"""
val = state.dict.get(key, None)
if val is not None:
if coerce:
val = cls.coerce(key, val)
state.dict[key] = val
val._parents[state.obj()] = key
def load_attrs(state, ctx, attrs):
if not attrs or listen_keys.intersection(attrs):
load(state)
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
"""
if value is oldvalue:
return value
if not isinstance(value, cls):
value = cls.coerce(key, value)
if value is not None:
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(target.obj(), None)
return value
def pickle(state, state_dict):
val = state.dict.get(key, None)
if val is not None:
if 'ext.mutable.values' not in state_dict:
state_dict['ext.mutable.values'] = []
state_dict['ext.mutable.values'].append(val)
def unpickle(state, state_dict):
if 'ext.mutable.values' in state_dict:
for val in state_dict['ext.mutable.values']:
val._parents[state.obj()] = key
event.listen(parent_cls, 'load', load,
raw=True, propagate=True)
event.listen(parent_cls, 'refresh', load_attrs,
raw=True, propagate=True)
event.listen(parent_cls, 'refresh_flush', load_attrs,
raw=True, propagate=True)
event.listen(attribute, 'set', set,
raw=True, retval=True, propagate=True)
event.listen(parent_cls, 'pickle', pickle,
raw=True, propagate=True)
event.listen(parent_cls, 'unpickle', unpickle,
raw=True, propagate=True)
class Mutable(MutableBase):
"""Mixin that defines transparent propagation of change
events to a parent object.
See the example in :ref:`mutable_scalars` for usage information.
"""
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
flag_modified(parent, key)
@classmethod
def associate_with_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
cls._listen_on_attribute(attribute, True, attribute.class_)
@classmethod
def associate_with(cls, sqltype):
"""Associate this wrapper with all future mapped columns
of the given type.
This is a convenience method that calls
``associate_with_attribute`` automatically.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.associate_with` for types that are permanent to an
application, not with ad-hoc types else this will cause unbounded
growth in memory usage.
"""
def listen_for_type(mapper, class_):
for prop in mapper.column_attrs:
if isinstance(prop.columns[0].type, sqltype):
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
@classmethod
def as_mutable(cls, sqltype):
"""Associate a SQL type with this mutable Python type.
This establishes listeners that will detect ORM mappings against
the given type, adding mutation event trackers to those mappings.
The type is returned, unconditionally as an instance, so that
:meth:`.as_mutable` can be used inline::
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('data', MyMutableType.as_mutable(PickleType))
)
Note that the returned type is always an instance, even if a class
is given, and that only columns which are declared specifically with
that type instance receive additional instrumentation.
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
of the particular :class:`.Mutable` subclass to establish a global
association.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.as_mutable` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
"""
sqltype = types.to_instance(sqltype)
# a SchemaType will be copied when the Column is copied,
# and we'll lose our ability to link that type back to the original.
# so track our original type w/ columns
if isinstance(sqltype, SchemaEventTarget):
@event.listens_for(sqltype, "before_parent_attach")
def _add_column_memo(sqltyp, parent):
parent.info['_ext_mutable_orig_type'] = sqltyp
schema_event_check = True
else:
schema_event_check = False
def listen_for_type(mapper, class_):
for prop in mapper.column_attrs:
if (
schema_event_check and
hasattr(prop.expression, 'info') and
prop.expression.info.get('_ext_mutable_orig_type')
is sqltype
) or (
prop.columns[0].type is sqltype
):
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
return sqltype
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
owning parent or parents.
See the example in :ref:`mutable_composites` for usage information.
"""
@classmethod
def _get_listen_keys(cls, attribute):
return set([attribute.key]).union(attribute.property._attribute_keys)
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
self.__composite_values__(),
prop._attribute_keys):
setattr(parent, attr_name, value)
def _setup_composite_listener():
def _listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if (hasattr(prop, 'composite_class') and
isinstance(prop.composite_class, type) and
issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
if not event.contains(Mapper, "mapper_configured", _listen_for_type):
event.listen(Mapper, 'mapper_configured', _listen_for_type)
_setup_composite_listener()
class MutableDict(Mutable, dict):
"""A dictionary type that implements :class:`.Mutable`.
The :class:`.MutableDict` object implements a dictionary that will
emit change events to the underlying mapping when the contents of
the dictionary are altered, including when values are added or removed.
Note that :class:`.MutableDict` does **not** apply mutable tracking to the
*values themselves* inside the dictionary. Therefore it is not a sufficient
solution for the use case of tracking deep changes to a *recursive*
dictionary structure, such as a JSON structure. To support this use case,
build a subclass of :class:`.MutableDict` that provides appropriate
coersion to the values placed in the dictionary so that they too are
"mutable", and emit events up to their parent structure.
.. versionadded:: 0.8
.. seealso::
:class:`.MutableList`
:class:`.MutableSet`
"""
def __setitem__(self, key, value):
"""Detect dictionary set events and emit change events."""
dict.__setitem__(self, key, value)
self.changed()
def setdefault(self, key, value):
result = dict.setdefault(self, key, value)
self.changed()
return result
def __delitem__(self, key):
"""Detect dictionary del events and emit change events."""
dict.__delitem__(self, key)
self.changed()
def update(self, *a, **kw):
dict.update(self, *a, **kw)
self.changed()
def pop(self, *arg):
result = dict.pop(self, *arg)
self.changed()
return result
def popitem(self):
result = dict.popitem(self)
self.changed()
return result
def clear(self):
dict.clear(self)
self.changed()
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to instance of this class."""
if not isinstance(value, cls):
if isinstance(value, dict):
return cls(value)
return Mutable.coerce(key, value)
else:
return value
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
class MutableList(Mutable, list):
"""A list type that implements :class:`.Mutable`.
The :class:`.MutableList` object implements a list that will
emit change events to the underlying mapping when the contents of
the list are altered, including when values are added or removed.
Note that :class:`.MutableList` does **not** apply mutable tracking to the
*values themselves* inside the list. Therefore it is not a sufficient
solution for the use case of tracking deep changes to a *recursive*
mutable structure, such as a JSON structure. To support this use case,
build a subclass of :class:`.MutableList` that provides appropriate
coersion to the values placed in the dictionary so that they too are
"mutable", and emit events up to their parent structure.
.. versionadded:: 1.1
.. seealso::
:class:`.MutableDict`
:class:`.MutableSet`
"""
def __setitem__(self, index, value):
"""Detect list set events and emit change events."""
list.__setitem__(self, index, value)
self.changed()
def __setslice__(self, start, end, value):
"""Detect list set events and emit change events."""
list.__setslice__(self, start, end, value)
self.changed()
def __delitem__(self, index):
"""Detect list del events and emit change events."""
list.__delitem__(self, index)
self.changed()
def __delslice__(self, start, end):
"""Detect list del events and emit change events."""
list.__delslice__(self, start, end)
self.changed()
def pop(self, *arg):
result = list.pop(self, *arg)
self.changed()
return result
def append(self, x):
list.append(self, x)
self.changed()
def extend(self, x):
list.extend(self, x)
self.changed()
def insert(self, i, x):
list.insert(self, i, x)
self.changed()
def remove(self, i):
list.remove(self, i)
self.changed()
def clear(self):
list.clear(self)
self.changed()
def sort(self):
list.sort(self)
self.changed()
def reverse(self):
list.reverse(self)
self.changed()
@classmethod
def coerce(cls, index, value):
"""Convert plain list to instance of this class."""
if not isinstance(value, cls):
if isinstance(value, list):
return cls(value)
return Mutable.coerce(index, value)
else:
return value
def __getstate__(self):
return list(self)
def __setstate__(self, state):
self[:] = state
class MutableSet(Mutable, set):
"""A set type that implements :class:`.Mutable`.
The :class:`.MutableSet` object implements a set that will
emit change events to the underlying mapping when the contents of
the set are altered, including when values are added or removed.
Note that :class:`.MutableSet` does **not** apply mutable tracking to the
*values themselves* inside the set. Therefore it is not a sufficient
solution for the use case of tracking deep changes to a *recursive*
mutable structure. To support this use case,
build a subclass of :class:`.MutableSet` that provides appropriate
coersion to the values placed in the dictionary so that they too are
"mutable", and emit events up to their parent structure.
.. versionadded:: 1.1
.. seealso::
:class:`.MutableDict`
:class:`.MutableList`
"""
def update(self, *arg):
set.update(self, *arg)
self.changed()
def intersection_update(self, *arg):
set.intersection_update(self, *arg)
self.changed()
def difference_update(self, *arg):
set.difference_update(self, *arg)
self.changed()
def symmetric_difference_update(self, *arg):
set.symmetric_difference_update(self, *arg)
self.changed()
def add(self, elem):
set.add(self, elem)
self.changed()
def remove(self, elem):
set.remove(self, elem)
self.changed()
def discard(self, elem):
set.discard(self, elem)
self.changed()
def pop(self, *arg):
result = set.pop(self, *arg)
self.changed()
return result
def clear(self):
set.clear(self)
self.changed()
@classmethod
def coerce(cls, index, value):
"""Convert plain set to instance of this class."""
if not isinstance(value, cls):
if isinstance(value, set):
return cls(value)
return Mutable.coerce(index, value)
else:
return value
def __getstate__(self):
return set(self)
def __setstate__(self, state):
self.update(state)
def __reduce_ex__(self, proto):
return (self.__class__, (list(self), ))