2010-05-07 19:33:49 +02:00
|
|
|
# orm/query.py
|
2017-04-15 18:27:12 +02:00
|
|
|
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
|
|
|
# <see AUTHORS file>
|
2010-05-07 19:33:49 +02:00
|
|
|
#
|
|
|
|
# This module is part of SQLAlchemy and is released under
|
|
|
|
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
|
|
|
|
|
|
|
"""The Query class and support.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Defines the :class:`.Query` class, the central
|
|
|
|
construct used by the ORM to construct database queries.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The :class:`.Query` class should not be confused with the
|
|
|
|
:class:`.Select` class, which defines database
|
|
|
|
SELECT operations at the SQL (non-ORM) level. ``Query`` differs from
|
|
|
|
``Select`` in that it returns ORM-mapped objects and interacts with an
|
|
|
|
ORM session, whereas the ``Select`` construct interacts directly with the
|
|
|
|
database to return iterable result sets.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
from itertools import chain
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
from . import (
|
|
|
|
attributes, interfaces, object_mapper, persistence,
|
|
|
|
exc as orm_exc, loading
|
|
|
|
)
|
|
|
|
from .base import _entity_descriptor, _is_aliased_class, \
|
|
|
|
_is_mapped_class, _orm_columns, _generative, InspectionAttr
|
|
|
|
from .path_registry import PathRegistry
|
|
|
|
from .util import (
|
|
|
|
AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
|
|
|
|
)
|
|
|
|
from .. import sql, util, log, exc as sa_exc, inspect, inspection
|
|
|
|
from ..sql.expression import _interpret_as_from
|
|
|
|
from ..sql import (
|
|
|
|
util as sql_util,
|
|
|
|
expression, visitors
|
|
|
|
)
|
|
|
|
from ..sql.base import ColumnCollection
|
|
|
|
from . import properties
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
__all__ = ['Query', 'QueryContext', 'aliased']
|
|
|
|
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
_path_registry = PathRegistry.root
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@inspection._self_inspects
|
|
|
|
@log.class_logger
|
2010-05-07 19:33:49 +02:00
|
|
|
class Query(object):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""ORM-level SQL construction object.
|
|
|
|
|
|
|
|
:class:`.Query` is the source of all SELECT statements generated by the
|
|
|
|
ORM, both those formulated by end-user query operations as well as by
|
|
|
|
high level internal operations such as related collection loading. It
|
|
|
|
features a generative interface whereby successive calls return a new
|
|
|
|
:class:`.Query` object, a copy of the former with additional
|
|
|
|
criteria and options associated with it.
|
|
|
|
|
|
|
|
:class:`.Query` objects are normally initially generated using the
|
|
|
|
:meth:`~.Session.query` method of :class:`.Session`, and in
|
|
|
|
less common cases by instantiating the :class:`.Query` directly and
|
|
|
|
associating with a :class:`.Session` using the :meth:`.Query.with_session`
|
|
|
|
method.
|
|
|
|
|
|
|
|
For a full walkthrough of :class:`.Query` usage, see the
|
|
|
|
:ref:`ormtutorial_toplevel`.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
_enable_eagerloads = True
|
|
|
|
_enable_assertions = True
|
|
|
|
_with_labels = False
|
|
|
|
_criterion = None
|
|
|
|
_yield_per = None
|
|
|
|
_order_by = False
|
|
|
|
_group_by = False
|
|
|
|
_having = None
|
|
|
|
_distinct = False
|
2017-04-15 18:27:12 +02:00
|
|
|
_prefixes = None
|
|
|
|
_suffixes = None
|
2010-05-07 19:33:49 +02:00
|
|
|
_offset = None
|
|
|
|
_limit = None
|
2017-04-15 18:27:12 +02:00
|
|
|
_for_update_arg = None
|
2010-05-07 19:33:49 +02:00
|
|
|
_statement = None
|
|
|
|
_correlate = frozenset()
|
|
|
|
_populate_existing = False
|
2017-04-15 18:27:12 +02:00
|
|
|
_invoke_all_eagers = True
|
2010-05-07 19:33:49 +02:00
|
|
|
_version_check = False
|
|
|
|
_autoflush = True
|
|
|
|
_only_load_props = None
|
|
|
|
_refresh_state = None
|
|
|
|
_from_obj = ()
|
2017-04-15 18:27:12 +02:00
|
|
|
_join_entities = ()
|
|
|
|
_select_from_entity = None
|
|
|
|
_mapper_adapter_map = {}
|
2010-05-07 19:33:49 +02:00
|
|
|
_filter_aliases = None
|
|
|
|
_from_obj_alias = None
|
2017-04-15 18:27:12 +02:00
|
|
|
_joinpath = _joinpoint = util.immutabledict()
|
|
|
|
_execution_options = util.immutabledict()
|
|
|
|
_params = util.immutabledict()
|
|
|
|
_attributes = util.immutabledict()
|
2010-05-07 19:33:49 +02:00
|
|
|
_with_options = ()
|
|
|
|
_with_hints = ()
|
2017-04-15 18:27:12 +02:00
|
|
|
_enable_single_crit = True
|
|
|
|
_orm_only_adapt = True
|
|
|
|
_orm_only_from_obj_alias = True
|
|
|
|
_current_path = _path_registry
|
|
|
|
_has_mapper_entities = False
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def __init__(self, entities, session=None):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Construct a :class:`.Query` directly.
|
|
|
|
|
|
|
|
E.g.::
|
|
|
|
|
|
|
|
q = Query([User, Address], session=some_session)
|
|
|
|
|
|
|
|
The above is equivalent to::
|
|
|
|
|
|
|
|
q = some_session.query(User, Address)
|
|
|
|
|
|
|
|
:param entities: a sequence of entities and/or SQL expressions.
|
|
|
|
|
|
|
|
:param session: a :class:`.Session` with which the :class:`.Query`
|
|
|
|
will be associated. Optional; a :class:`.Query` can be associated
|
|
|
|
with a :class:`.Session` generatively via the
|
|
|
|
:meth:`.Query.with_session` method as well.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Session.query`
|
|
|
|
|
|
|
|
:meth:`.Query.with_session`
|
|
|
|
|
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
self.session = session
|
|
|
|
self._polymorphic_adapters = {}
|
|
|
|
self._set_entities(entities)
|
|
|
|
|
|
|
|
def _set_entities(self, entities, entity_wrapper=None):
|
|
|
|
if entity_wrapper is None:
|
|
|
|
entity_wrapper = _QueryEntity
|
|
|
|
self._entities = []
|
2017-04-15 18:27:12 +02:00
|
|
|
self._primary_entity = None
|
|
|
|
self._has_mapper_entities = False
|
2010-05-07 19:33:49 +02:00
|
|
|
for ent in util.to_list(entities):
|
|
|
|
entity_wrapper(self, ent)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
self._set_entity_selectables(self._entities)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _set_entity_selectables(self, entities):
|
|
|
|
self._mapper_adapter_map = d = self._mapper_adapter_map.copy()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
for ent in entities:
|
|
|
|
for entity in ent.entities:
|
|
|
|
if entity not in d:
|
2017-04-15 18:27:12 +02:00
|
|
|
ext_info = inspect(entity)
|
|
|
|
if not ext_info.is_aliased_class and \
|
|
|
|
ext_info.mapper.with_polymorphic:
|
|
|
|
if ext_info.mapper.mapped_table not in \
|
|
|
|
self._polymorphic_adapters:
|
|
|
|
self._mapper_loads_polymorphically_with(
|
|
|
|
ext_info.mapper,
|
|
|
|
sql_util.ColumnAdapter(
|
|
|
|
ext_info.selectable,
|
|
|
|
ext_info.mapper._equivalent_columns
|
|
|
|
)
|
|
|
|
)
|
|
|
|
aliased_adapter = None
|
|
|
|
elif ext_info.is_aliased_class:
|
|
|
|
aliased_adapter = ext_info._adapter
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
aliased_adapter = None
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
d[entity] = (
|
|
|
|
ext_info,
|
|
|
|
aliased_adapter
|
|
|
|
)
|
|
|
|
ent.setup_entity(*d[entity])
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _mapper_loads_polymorphically_with(self, mapper, adapter):
|
|
|
|
for m2 in mapper._with_polymorphic_mappers or [mapper]:
|
2010-05-07 19:33:49 +02:00
|
|
|
self._polymorphic_adapters[m2] = adapter
|
|
|
|
for m in m2.iterate_to_root():
|
2017-04-15 18:27:12 +02:00
|
|
|
self._polymorphic_adapters[m.local_table] = adapter
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _set_select_from(self, obj, set_base_alias):
|
2010-05-07 19:33:49 +02:00
|
|
|
fa = []
|
2017-04-15 18:27:12 +02:00
|
|
|
select_from_alias = None
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
for from_obj in obj:
|
2017-04-15 18:27:12 +02:00
|
|
|
info = inspect(from_obj)
|
|
|
|
if hasattr(info, 'mapper') and \
|
|
|
|
(info.is_mapper or info.is_aliased_class):
|
|
|
|
self._select_from_entity = info
|
|
|
|
if set_base_alias and not info.is_aliased_class:
|
|
|
|
raise sa_exc.ArgumentError(
|
|
|
|
"A selectable (FromClause) instance is "
|
|
|
|
"expected when the base alias is being set.")
|
|
|
|
fa.append(info.selectable)
|
|
|
|
elif not info.is_selectable:
|
|
|
|
raise sa_exc.ArgumentError(
|
|
|
|
"argument is not a mapped class, mapper, "
|
|
|
|
"aliased(), or FromClause instance.")
|
|
|
|
else:
|
|
|
|
if isinstance(from_obj, expression.SelectBase):
|
|
|
|
from_obj = from_obj.alias()
|
|
|
|
if set_base_alias:
|
|
|
|
select_from_alias = from_obj
|
|
|
|
fa.append(from_obj)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
self._from_obj = tuple(fa)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if set_base_alias and \
|
|
|
|
len(self._from_obj) == 1 and \
|
|
|
|
isinstance(select_from_alias, expression.Alias):
|
2010-05-07 19:33:49 +02:00
|
|
|
equivs = self.__all_equivs()
|
2017-04-15 18:27:12 +02:00
|
|
|
self._from_obj_alias = sql_util.ColumnAdapter(
|
|
|
|
self._from_obj[0], equivs)
|
|
|
|
elif set_base_alias and \
|
|
|
|
len(self._from_obj) == 1 and \
|
|
|
|
hasattr(info, "mapper") and \
|
|
|
|
info.is_aliased_class:
|
|
|
|
self._from_obj_alias = info._adapter
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def _reset_polymorphic_adapter(self, mapper):
|
|
|
|
for m2 in mapper._with_polymorphic_mappers:
|
|
|
|
self._polymorphic_adapters.pop(m2, None)
|
|
|
|
for m in m2.iterate_to_root():
|
|
|
|
self._polymorphic_adapters.pop(m.local_table, None)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _adapt_polymorphic_element(self, element):
|
|
|
|
if "parententity" in element._annotations:
|
|
|
|
search = element._annotations['parententity']
|
|
|
|
alias = self._polymorphic_adapters.get(search, None)
|
|
|
|
if alias:
|
|
|
|
return alias.adapt_clause(element)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
if isinstance(element, expression.FromClause):
|
|
|
|
search = element
|
|
|
|
elif hasattr(element, 'table'):
|
|
|
|
search = element.table
|
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
return None
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
alias = self._polymorphic_adapters.get(search, None)
|
|
|
|
if alias:
|
|
|
|
return alias.adapt_clause(element)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _adapt_col_list(self, cols):
|
|
|
|
return [
|
|
|
|
self._adapt_clause(
|
|
|
|
expression._literal_as_label_reference(o),
|
|
|
|
True, True)
|
|
|
|
for o in cols
|
|
|
|
]
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def _adapt_all_clauses(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
self._orm_only_adapt = False
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def _adapt_clause(self, clause, as_filter, orm_only):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Adapt incoming clauses to transformations which
|
|
|
|
have been applied within this query."""
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
adapters = []
|
2017-04-15 18:27:12 +02:00
|
|
|
# do we adapt all expression elements or only those
|
|
|
|
# tagged as 'ORM' constructs ?
|
|
|
|
if not self._orm_only_adapt:
|
|
|
|
orm_only = False
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
if as_filter and self._filter_aliases:
|
|
|
|
for fa in self._filter_aliases._visitor_iterator:
|
2017-04-15 18:27:12 +02:00
|
|
|
adapters.append(
|
|
|
|
(
|
|
|
|
orm_only, fa.replace
|
|
|
|
)
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if self._from_obj_alias:
|
2017-04-15 18:27:12 +02:00
|
|
|
# for the "from obj" alias, apply extra rule to the
|
|
|
|
# 'ORM only' check, if this query were generated from a
|
|
|
|
# subquery of itself, i.e. _from_selectable(), apply adaption
|
|
|
|
# to all SQL constructs.
|
|
|
|
adapters.append(
|
|
|
|
(
|
|
|
|
orm_only if self._orm_only_from_obj_alias else False,
|
|
|
|
self._from_obj_alias.replace
|
|
|
|
)
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if self._polymorphic_adapters:
|
2017-04-15 18:27:12 +02:00
|
|
|
adapters.append(
|
|
|
|
(
|
|
|
|
orm_only, self._adapt_polymorphic_element
|
|
|
|
)
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if not adapters:
|
|
|
|
return clause
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def replace(elem):
|
|
|
|
for _orm_only, adapter in adapters:
|
|
|
|
# if 'orm only', look for ORM annotations
|
|
|
|
# in the element before adapting.
|
|
|
|
if not _orm_only or \
|
|
|
|
'_orm_adapt' in elem._annotations or \
|
|
|
|
"parententity" in elem._annotations:
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
e = adapter(elem)
|
|
|
|
if e is not None:
|
|
|
|
return e
|
|
|
|
|
|
|
|
return visitors.replacement_traverse(
|
|
|
|
clause,
|
|
|
|
{},
|
|
|
|
replace
|
|
|
|
)
|
|
|
|
|
|
|
|
def _query_entity_zero(self):
|
|
|
|
"""Return the first QueryEntity."""
|
2010-05-07 19:33:49 +02:00
|
|
|
return self._entities[0]
|
|
|
|
|
|
|
|
def _mapper_zero(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""return the Mapper associated with the first QueryEntity."""
|
|
|
|
return self._entities[0].mapper
|
|
|
|
|
|
|
|
def _entity_zero(self):
|
|
|
|
"""Return the 'entity' (mapper or AliasedClass) associated
|
|
|
|
with the first QueryEntity, or alternatively the 'select from'
|
|
|
|
entity if specified."""
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._select_from_entity \
|
|
|
|
if self._select_from_entity is not None \
|
|
|
|
else self._query_entity_zero().entity_zero
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def _mapper_entities(self):
|
|
|
|
for ent in self._entities:
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(ent, _MapperEntity):
|
2010-05-07 19:33:49 +02:00
|
|
|
yield ent
|
|
|
|
|
|
|
|
def _joinpoint_zero(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._joinpoint.get(
|
|
|
|
'_joinpoint_entity',
|
|
|
|
self._entity_zero()
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _bind_mapper(self):
|
|
|
|
ezero = self._entity_zero()
|
|
|
|
if ezero is not None:
|
|
|
|
insp = inspect(ezero)
|
|
|
|
if not insp.is_clause_element:
|
|
|
|
return insp.mapper
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
def _only_full_mapper_zero(self, methname):
|
|
|
|
if self._entities != [self._primary_entity]:
|
2010-05-07 19:33:49 +02:00
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"%s() can only be used against "
|
|
|
|
"a single mapped class." % methname)
|
|
|
|
return self._primary_entity.entity_zero
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def _only_entity_zero(self, rationale=None):
|
|
|
|
if len(self._entities) > 1:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
rationale or
|
|
|
|
"This operation requires a Query "
|
|
|
|
"against a single mapper."
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
return self._entity_zero()
|
|
|
|
|
|
|
|
def __all_equivs(self):
|
|
|
|
equivs = {}
|
|
|
|
for ent in self._mapper_entities:
|
|
|
|
equivs.update(ent.mapper._equivalent_columns)
|
|
|
|
return equivs
|
|
|
|
|
|
|
|
def _get_condition(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._no_criterion_condition(
|
|
|
|
"get", order_by=False, distinct=False)
|
|
|
|
|
|
|
|
def _get_existing_condition(self):
|
|
|
|
self._no_criterion_assertion("get", order_by=False, distinct=False)
|
|
|
|
|
|
|
|
def _no_criterion_assertion(self, meth, order_by=True, distinct=True):
|
2010-05-07 19:33:49 +02:00
|
|
|
if not self._enable_assertions:
|
|
|
|
return
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._criterion is not None or \
|
|
|
|
self._statement is not None or self._from_obj or \
|
2010-05-07 19:33:49 +02:00
|
|
|
self._limit is not None or self._offset is not None or \
|
2017-04-15 18:27:12 +02:00
|
|
|
self._group_by or (order_by and self._order_by) or \
|
|
|
|
(distinct and self._distinct):
|
2010-05-07 19:33:49 +02:00
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Query.%s() being called on a "
|
|
|
|
"Query with existing criterion. " % meth)
|
|
|
|
|
|
|
|
def _no_criterion_condition(self, meth, order_by=True, distinct=True):
|
|
|
|
self._no_criterion_assertion(meth, order_by, distinct)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
self._from_obj = ()
|
|
|
|
self._statement = self._criterion = None
|
|
|
|
self._order_by = self._group_by = self._distinct = False
|
|
|
|
|
|
|
|
def _no_clauseelement_condition(self, meth):
|
|
|
|
if not self._enable_assertions:
|
|
|
|
return
|
|
|
|
if self._order_by:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Query.%s() being called on a "
|
|
|
|
"Query with existing criterion. " % meth)
|
2010-05-07 19:33:49 +02:00
|
|
|
self._no_criterion_condition(meth)
|
|
|
|
|
|
|
|
def _no_statement_condition(self, meth):
|
|
|
|
if not self._enable_assertions:
|
|
|
|
return
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._statement is not None:
|
2010-05-07 19:33:49 +02:00
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
("Query.%s() being called on a Query with an existing full "
|
|
|
|
"statement - can't apply criterion.") % meth)
|
|
|
|
|
|
|
|
def _no_limit_offset(self, meth):
|
|
|
|
if not self._enable_assertions:
|
|
|
|
return
|
|
|
|
if self._limit is not None or self._offset is not None:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Query.%s() being called on a Query which already has LIMIT "
|
|
|
|
"or OFFSET applied. To modify the row-limited results of a "
|
|
|
|
" Query, call from_self() first. "
|
|
|
|
"Otherwise, call %s() before limit() or offset() "
|
|
|
|
"are applied."
|
|
|
|
% (meth, meth)
|
2010-05-07 19:33:49 +02:00
|
|
|
)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _get_options(self, populate_existing=None,
|
|
|
|
version_check=None,
|
|
|
|
only_load_props=None,
|
|
|
|
refresh_state=None):
|
2010-05-07 19:33:49 +02:00
|
|
|
if populate_existing:
|
|
|
|
self._populate_existing = populate_existing
|
|
|
|
if version_check:
|
|
|
|
self._version_check = version_check
|
|
|
|
if refresh_state:
|
|
|
|
self._refresh_state = refresh_state
|
|
|
|
if only_load_props:
|
|
|
|
self._only_load_props = set(only_load_props)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def _clone(self):
|
|
|
|
cls = self.__class__
|
|
|
|
q = cls.__new__(cls)
|
|
|
|
q.__dict__ = self.__dict__.copy()
|
|
|
|
return q
|
|
|
|
|
|
|
|
@property
|
|
|
|
def statement(self):
|
|
|
|
"""The full SELECT statement represented by this Query.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
The statement by default will not have disambiguating labels
|
|
|
|
applied to the construct unless with_labels(True) is called
|
|
|
|
first.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
stmt = self._compile_context(labels=self._with_labels).\
|
|
|
|
statement
|
|
|
|
if self._params:
|
|
|
|
stmt = stmt.params(self._params)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
# TODO: there's no tests covering effects of
|
|
|
|
# the annotation not being there
|
|
|
|
return stmt._annotate({'no_replacement_traverse': True})
|
|
|
|
|
|
|
|
def subquery(self, name=None, with_labels=False, reduce_columns=False):
|
|
|
|
"""return the full SELECT statement represented by
|
|
|
|
this :class:`.Query`, embedded within an :class:`.Alias`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
Eager JOIN generation within the query is disabled.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:param name: string name to be assigned as the alias;
|
|
|
|
this is passed through to :meth:`.FromClause.alias`.
|
|
|
|
If ``None``, a name will be deterministically generated
|
|
|
|
at compile time.
|
|
|
|
|
|
|
|
:param with_labels: if True, :meth:`.with_labels` will be called
|
|
|
|
on the :class:`.Query` first to apply table-qualified labels
|
|
|
|
to all columns.
|
|
|
|
|
|
|
|
:param reduce_columns: if True, :meth:`.Select.reduce_columns` will
|
|
|
|
be called on the resulting :func:`.select` construct,
|
|
|
|
to remove same-named columns where one also refers to the other
|
|
|
|
via foreign key or WHERE clause equivalence.
|
|
|
|
|
|
|
|
.. versionchanged:: 0.8 the ``with_labels`` and ``reduce_columns``
|
|
|
|
keyword arguments were added.
|
|
|
|
|
|
|
|
"""
|
|
|
|
q = self.enable_eagerloads(False)
|
|
|
|
if with_labels:
|
|
|
|
q = q.with_labels()
|
|
|
|
q = q.statement
|
|
|
|
if reduce_columns:
|
|
|
|
q = q.reduce_columns()
|
|
|
|
return q.alias(name=name)
|
|
|
|
|
|
|
|
def cte(self, name=None, recursive=False):
|
|
|
|
r"""Return the full SELECT statement represented by this
|
|
|
|
:class:`.Query` represented as a common table expression (CTE).
|
|
|
|
|
|
|
|
Parameters and usage are the same as those of the
|
|
|
|
:meth:`.SelectBase.cte` method; see that method for
|
|
|
|
further details.
|
|
|
|
|
|
|
|
Here is the `PostgreSQL WITH
|
|
|
|
RECURSIVE example
|
|
|
|
<http://www.postgresql.org/docs/8.4/static/queries-with.html>`_.
|
|
|
|
Note that, in this example, the ``included_parts`` cte and the
|
|
|
|
``incl_alias`` alias of it are Core selectables, which
|
|
|
|
means the columns are accessed via the ``.c.`` attribute. The
|
|
|
|
``parts_alias`` object is an :func:`.orm.aliased` instance of the
|
|
|
|
``Part`` entity, so column-mapped attributes are available
|
|
|
|
directly::
|
|
|
|
|
|
|
|
from sqlalchemy.orm import aliased
|
|
|
|
|
|
|
|
class Part(Base):
|
|
|
|
__tablename__ = 'part'
|
|
|
|
part = Column(String, primary_key=True)
|
|
|
|
sub_part = Column(String, primary_key=True)
|
|
|
|
quantity = Column(Integer)
|
|
|
|
|
|
|
|
included_parts = session.query(
|
|
|
|
Part.sub_part,
|
|
|
|
Part.part,
|
|
|
|
Part.quantity).\
|
|
|
|
filter(Part.part=="our part").\
|
|
|
|
cte(name="included_parts", recursive=True)
|
|
|
|
|
|
|
|
incl_alias = aliased(included_parts, name="pr")
|
|
|
|
parts_alias = aliased(Part, name="p")
|
|
|
|
included_parts = included_parts.union_all(
|
|
|
|
session.query(
|
|
|
|
parts_alias.sub_part,
|
|
|
|
parts_alias.part,
|
|
|
|
parts_alias.quantity).\
|
|
|
|
filter(parts_alias.part==incl_alias.c.sub_part)
|
|
|
|
)
|
|
|
|
|
|
|
|
q = session.query(
|
|
|
|
included_parts.c.sub_part,
|
|
|
|
func.sum(included_parts.c.quantity).
|
|
|
|
label('total_quantity')
|
|
|
|
).\
|
|
|
|
group_by(included_parts.c.sub_part)
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.HasCTE.cte`
|
|
|
|
|
|
|
|
"""
|
|
|
|
return self.enable_eagerloads(False).\
|
|
|
|
statement.cte(name=name, recursive=recursive)
|
|
|
|
|
|
|
|
def label(self, name):
|
|
|
|
"""Return the full SELECT statement represented by this
|
|
|
|
:class:`.Query`, converted
|
|
|
|
to a scalar subquery with a label of the given name.
|
|
|
|
|
|
|
|
Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.label`.
|
|
|
|
|
|
|
|
.. versionadded:: 0.6.5
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.enable_eagerloads(False).statement.label(name)
|
|
|
|
|
|
|
|
def as_scalar(self):
|
|
|
|
"""Return the full SELECT statement represented by this
|
|
|
|
:class:`.Query`, converted to a scalar subquery.
|
|
|
|
|
|
|
|
Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.as_scalar`.
|
|
|
|
|
|
|
|
.. versionadded:: 0.6.5
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
return self.enable_eagerloads(False).statement.as_scalar()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def selectable(self):
|
|
|
|
"""Return the :class:`.Select` object emitted by this :class:`.Query`.
|
|
|
|
|
|
|
|
Used for :func:`.inspect` compatibility, this is equivalent to::
|
|
|
|
|
|
|
|
query.enable_eagerloads(False).with_labels().statement
|
|
|
|
|
|
|
|
"""
|
|
|
|
return self.__clause_element__()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def __clause_element__(self):
|
|
|
|
return self.enable_eagerloads(False).with_labels().statement
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def enable_eagerloads(self, value):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Control whether or not eager joins and subqueries are
|
2010-05-07 19:33:49 +02:00
|
|
|
rendered.
|
|
|
|
|
|
|
|
When set to False, the returned Query will not render
|
|
|
|
eager joins regardless of :func:`~sqlalchemy.orm.joinedload`,
|
|
|
|
:func:`~sqlalchemy.orm.subqueryload` options
|
|
|
|
or mapper-level ``lazy='joined'``/``lazy='subquery'``
|
|
|
|
configurations.
|
|
|
|
|
|
|
|
This is used primarily when nesting the Query's
|
|
|
|
statement into a subquery or other
|
2017-04-15 18:27:12 +02:00
|
|
|
selectable, or when using :meth:`.Query.yield_per`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
self._enable_eagerloads = value
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _no_yield_per(self, message):
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"The yield_per Query option is currently not "
|
|
|
|
"compatible with %s eager loading. Please "
|
|
|
|
"specify lazyload('*') or query.enable_eagerloads(False) in "
|
|
|
|
"order to "
|
|
|
|
"proceed with query.yield_per()." % message)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative()
|
|
|
|
def with_labels(self):
|
|
|
|
"""Apply column labels to the return value of Query.statement.
|
|
|
|
|
|
|
|
Indicates that this Query's `statement` accessor should return
|
|
|
|
a SELECT statement that applies labels to all columns in the
|
|
|
|
form <tablename>_<columnname>; this is commonly used to
|
|
|
|
disambiguate columns from multiple tables which have the same
|
|
|
|
name.
|
|
|
|
|
|
|
|
When the `Query` actually issues SQL to load rows, it always
|
|
|
|
uses column labeling.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. note:: The :meth:`.Query.with_labels` method *only* applies
|
|
|
|
the output of :attr:`.Query.statement`, and *not* to any of
|
|
|
|
the result-row invoking systems of :class:`.Query` itself, e.g.
|
|
|
|
:meth:`.Query.first`, :meth:`.Query.all`, etc. To execute
|
|
|
|
a query using :meth:`.Query.with_labels`, invoke the
|
|
|
|
:attr:`.Query.statement` using :meth:`.Session.execute`::
|
|
|
|
|
|
|
|
result = session.execute(query.with_labels().statement)
|
|
|
|
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
|
|
|
self._with_labels = True
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative()
|
|
|
|
def enable_assertions(self, value):
|
|
|
|
"""Control whether assertions are generated.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
When set to False, the returned Query will
|
|
|
|
not assert its state before certain operations,
|
2010-05-07 19:33:49 +02:00
|
|
|
including that LIMIT/OFFSET has not been applied
|
|
|
|
when filter() is called, no criterion exists
|
|
|
|
when get() is called, and no "from_statement()"
|
|
|
|
exists when filter()/order_by()/group_by() etc.
|
2017-04-15 18:27:12 +02:00
|
|
|
is called. This more permissive mode is used by
|
|
|
|
custom Query subclasses to specify criterion or
|
2010-05-07 19:33:49 +02:00
|
|
|
other modifiers outside of the usual usage patterns.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
Care should be taken to ensure that the usage
|
2010-05-07 19:33:49 +02:00
|
|
|
pattern is even possible. A statement applied
|
|
|
|
by from_statement() will override any criterion
|
|
|
|
set by filter() or order_by(), for example.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
|
|
|
self._enable_assertions = value
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@property
|
|
|
|
def whereclause(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""A readonly attribute which returns the current WHERE criterion for
|
|
|
|
this Query.
|
|
|
|
|
|
|
|
This returned value is a SQL expression construct, or ``None`` if no
|
|
|
|
criterion has been established.
|
|
|
|
|
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
return self._criterion
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def _with_current_path(self, path):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""indicate that this query applies to objects loaded
|
|
|
|
within a certain path.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Used by deferred loaders (see strategies.py) which transfer
|
|
|
|
query options from an originating query to a newly generated
|
|
|
|
query intended for the deferred load.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
self._current_path = path
|
|
|
|
|
|
|
|
@_generative(_no_clauseelement_condition)
|
2017-04-15 18:27:12 +02:00
|
|
|
def with_polymorphic(self,
|
|
|
|
cls_or_mappers,
|
|
|
|
selectable=None,
|
|
|
|
polymorphic_on=None):
|
|
|
|
"""Load columns for inheriting classes.
|
|
|
|
|
|
|
|
:meth:`.Query.with_polymorphic` applies transformations
|
|
|
|
to the "main" mapped class represented by this :class:`.Query`.
|
|
|
|
The "main" mapped class here means the :class:`.Query`
|
|
|
|
object's first argument is a full class, i.e.
|
|
|
|
``session.query(SomeClass)``. These transformations allow additional
|
|
|
|
tables to be present in the FROM clause so that columns for a
|
|
|
|
joined-inheritance subclass are available in the query, both for the
|
|
|
|
purposes of load-time efficiency as well as the ability to use
|
|
|
|
these columns at query time.
|
|
|
|
|
|
|
|
See the documentation section :ref:`with_polymorphic` for
|
|
|
|
details on how this method is used.
|
|
|
|
|
|
|
|
.. versionchanged:: 0.8
|
|
|
|
A new and more flexible function
|
|
|
|
:func:`.orm.with_polymorphic` supersedes
|
|
|
|
:meth:`.Query.with_polymorphic`, as it can apply the equivalent
|
|
|
|
functionality to any set of columns or classes in the
|
|
|
|
:class:`.Query`, not just the "zero mapper". See that
|
|
|
|
function for a description of arguments.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not self._primary_entity:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"No primary mapper set up for this Query.")
|
|
|
|
entity = self._entities[0]._clone()
|
|
|
|
self._entities = [entity] + self._entities[1:]
|
|
|
|
entity.set_with_polymorphic(self,
|
|
|
|
cls_or_mappers,
|
|
|
|
selectable=selectable,
|
|
|
|
polymorphic_on=polymorphic_on)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def yield_per(self, count):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""Yield only ``count`` rows at a time.
|
|
|
|
|
|
|
|
The purpose of this method is when fetching very large result sets
|
|
|
|
(> 10K rows), to batch results in sub-collections and yield them
|
|
|
|
out partially, so that the Python interpreter doesn't need to declare
|
|
|
|
very large areas of memory which is both time consuming and leads
|
|
|
|
to excessive memory use. The performance from fetching hundreds of
|
|
|
|
thousands of rows can often double when a suitable yield-per setting
|
|
|
|
(e.g. approximately 1000) is used, even with DBAPIs that buffer
|
|
|
|
rows (which are most).
|
|
|
|
|
|
|
|
The :meth:`.Query.yield_per` method **is not compatible with most
|
|
|
|
eager loading schemes, including subqueryload and joinedload with
|
|
|
|
collections**. For this reason, it may be helpful to disable
|
|
|
|
eager loads, either unconditionally with
|
|
|
|
:meth:`.Query.enable_eagerloads`::
|
|
|
|
|
|
|
|
q = sess.query(Object).yield_per(100).enable_eagerloads(False)
|
|
|
|
|
|
|
|
Or more selectively using :func:`.lazyload`; such as with
|
|
|
|
an asterisk to specify the default loader scheme::
|
|
|
|
|
|
|
|
q = sess.query(Object).yield_per(100).\
|
|
|
|
options(lazyload('*'), joinedload(Object.some_related))
|
|
|
|
|
|
|
|
.. warning::
|
|
|
|
|
|
|
|
Use this method with caution; if the same instance is
|
|
|
|
present in more than one batch of rows, end-user changes
|
|
|
|
to attributes will be overwritten.
|
|
|
|
|
|
|
|
In particular, it's usually impossible to use this setting
|
|
|
|
with eagerly loaded collections (i.e. any lazy='joined' or
|
|
|
|
'subquery') since those collections will be cleared for a
|
|
|
|
new load when encountered in a subsequent result batch.
|
|
|
|
In the case of 'subquery' loading, the full result for all
|
|
|
|
rows is fetched which generally defeats the purpose of
|
|
|
|
:meth:`~sqlalchemy.orm.query.Query.yield_per`.
|
|
|
|
|
|
|
|
Also note that while
|
|
|
|
:meth:`~sqlalchemy.orm.query.Query.yield_per` will set the
|
|
|
|
``stream_results`` execution option to True, currently
|
|
|
|
this is only understood by
|
|
|
|
:mod:`~sqlalchemy.dialects.postgresql.psycopg2`,
|
|
|
|
:mod:`~sqlalchemy.dialects.mysql.mysqldb` and
|
|
|
|
:mod:`~sqlalchemy.dialects.mysql.pymysql` dialects
|
|
|
|
which will stream results using server side cursors
|
|
|
|
instead of pre-buffer all rows for this query. Other
|
|
|
|
DBAPIs **pre-buffer all rows** before making them
|
|
|
|
available. The memory use of raw database rows is much less
|
|
|
|
than that of an ORM-mapped object, but should still be taken into
|
|
|
|
consideration when benchmarking.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.enable_eagerloads`
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
|
|
|
self._yield_per = count
|
2017-04-15 18:27:12 +02:00
|
|
|
self._execution_options = self._execution_options.union(
|
|
|
|
{"stream_results": True,
|
|
|
|
"max_row_buffer": count})
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def get(self, ident):
|
|
|
|
"""Return an instance based on the given primary key identifier,
|
|
|
|
or ``None`` if not found.
|
|
|
|
|
|
|
|
E.g.::
|
|
|
|
|
|
|
|
my_user = session.query(User).get(5)
|
|
|
|
|
|
|
|
some_object = session.query(VersionedFoo).get((5, 10))
|
|
|
|
|
|
|
|
:meth:`~.Query.get` is special in that it provides direct
|
|
|
|
access to the identity map of the owning :class:`.Session`.
|
|
|
|
If the given primary key identifier is present
|
|
|
|
in the local identity map, the object is returned
|
|
|
|
directly from this collection and no SQL is emitted,
|
|
|
|
unless the object has been marked fully expired.
|
|
|
|
If not present,
|
|
|
|
a SELECT is performed in order to locate the object.
|
|
|
|
|
|
|
|
:meth:`~.Query.get` also will perform a check if
|
|
|
|
the object is present in the identity map and
|
|
|
|
marked as expired - a SELECT
|
|
|
|
is emitted to refresh the object as well as to
|
|
|
|
ensure that the row is still present.
|
|
|
|
If not, :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
|
|
|
|
|
|
|
|
:meth:`~.Query.get` is only used to return a single
|
|
|
|
mapped instance, not multiple instances or
|
|
|
|
individual column constructs, and strictly
|
|
|
|
on a single primary key value. The originating
|
|
|
|
:class:`.Query` must be constructed in this way,
|
|
|
|
i.e. against a single mapped entity,
|
|
|
|
with no additional filtering criterion. Loading
|
|
|
|
options via :meth:`~.Query.options` may be applied
|
|
|
|
however, and will be used if the object is not
|
|
|
|
yet locally present.
|
|
|
|
|
|
|
|
A lazy-loading, many-to-one attribute configured
|
|
|
|
by :func:`.relationship`, using a simple
|
|
|
|
foreign-key-to-primary-key criterion, will also use an
|
|
|
|
operation equivalent to :meth:`~.Query.get` in order to retrieve
|
|
|
|
the target value from the local identity map
|
|
|
|
before querying the database. See :doc:`/orm/loading_relationships`
|
|
|
|
for further details on relationship loading.
|
|
|
|
|
|
|
|
:param ident: A scalar or tuple value representing
|
|
|
|
the primary key. For a composite primary key,
|
|
|
|
the order of identifiers corresponds in most cases
|
|
|
|
to that of the mapped :class:`.Table` object's
|
|
|
|
primary key columns. For a :func:`.mapper` that
|
|
|
|
was given the ``primary key`` argument during
|
|
|
|
construction, the order of identifiers corresponds
|
|
|
|
to the elements present in this collection.
|
|
|
|
|
|
|
|
:return: The object instance, or ``None``.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._get_impl(ident, loading.load_on_ident)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _get_impl(self, ident, fallback_fn):
|
2010-05-07 19:33:49 +02:00
|
|
|
# convert composite types to individual args
|
|
|
|
if hasattr(ident, '__composite_values__'):
|
|
|
|
ident = ident.__composite_values__()
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
ident = util.to_list(ident)
|
|
|
|
|
|
|
|
mapper = self._only_full_mapper_zero("get")
|
|
|
|
|
|
|
|
if len(ident) != len(mapper.primary_key):
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"Incorrect number of values in identifier to formulate "
|
|
|
|
"primary key for query.get(); primary key columns are %s" %
|
|
|
|
','.join("'%s'" % c for c in mapper.primary_key))
|
|
|
|
|
|
|
|
key = mapper.identity_key_from_primary_key(ident)
|
|
|
|
|
|
|
|
if not self._populate_existing and \
|
|
|
|
not mapper.always_refresh and \
|
|
|
|
self._for_update_arg is None:
|
|
|
|
|
|
|
|
instance = loading.get_from_identity(
|
|
|
|
self.session, key, attributes.PASSIVE_OFF)
|
|
|
|
if instance is not None:
|
|
|
|
self._get_existing_condition()
|
|
|
|
# reject calls for id in identity map but class
|
|
|
|
# mismatch.
|
|
|
|
if not issubclass(instance.__class__, mapper.class_):
|
|
|
|
return None
|
|
|
|
return instance
|
|
|
|
|
|
|
|
return fallback_fn(self, key)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def correlate(self, *args):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return a :class:`.Query` construct which will correlate the given
|
|
|
|
FROM clauses to that of an enclosing :class:`.Query` or
|
|
|
|
:func:`~.expression.select`.
|
|
|
|
|
|
|
|
The method here accepts mapped classes, :func:`.aliased` constructs,
|
|
|
|
and :func:`.mapper` constructs as arguments, which are resolved into
|
|
|
|
expression constructs, in addition to appropriate expression
|
|
|
|
constructs.
|
|
|
|
|
|
|
|
The correlation arguments are ultimately passed to
|
|
|
|
:meth:`.Select.correlate` after coercion to expression constructs.
|
|
|
|
|
|
|
|
The correlation arguments take effect in such cases
|
|
|
|
as when :meth:`.Query.from_self` is used, or when
|
|
|
|
a subquery as returned by :meth:`.Query.subquery` is
|
|
|
|
embedded in another :func:`~.expression.select` construct.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
for s in args:
|
|
|
|
if s is None:
|
|
|
|
self._correlate = self._correlate.union([None])
|
|
|
|
else:
|
|
|
|
self._correlate = self._correlate.union(
|
|
|
|
sql_util.surface_selectables(_interpret_as_from(s))
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def autoflush(self, setting):
|
|
|
|
"""Return a Query with a specific 'autoflush' setting.
|
|
|
|
|
|
|
|
Note that a Session with autoflush=False will
|
|
|
|
not autoflush, even if this flag is set to True at the
|
|
|
|
Query level. Therefore this flag is usually used only
|
|
|
|
to disable autoflush for a specific Query.
|
|
|
|
|
|
|
|
"""
|
|
|
|
self._autoflush = setting
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def populate_existing(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return a :class:`.Query` that will expire and refresh all instances
|
|
|
|
as they are loaded, or reused from the current :class:`.Session`.
|
|
|
|
|
|
|
|
:meth:`.populate_existing` does not improve behavior when
|
|
|
|
the ORM is used normally - the :class:`.Session` object's usual
|
|
|
|
behavior of maintaining a transaction and expiring all attributes
|
|
|
|
after rollback or commit handles object state automatically.
|
|
|
|
This method is not intended for general use.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
"""
|
|
|
|
self._populate_existing = True
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@_generative()
|
|
|
|
def _with_invoke_all_eagers(self, value):
|
|
|
|
"""Set the 'invoke all eagers' flag which causes joined- and
|
|
|
|
subquery loaders to traverse into already-loaded related objects
|
|
|
|
and collections.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Default is that of :attr:`.Query._invoke_all_eagers`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
self._invoke_all_eagers = value
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def with_parent(self, instance, property=None):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Add filtering criterion that relates the given instance
|
|
|
|
to a child object or collection, using its attribute state
|
|
|
|
as well as an established :func:`.relationship()`
|
|
|
|
configuration.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The method uses the :func:`.with_parent` function to generate
|
|
|
|
the clause, the result of which is passed to :meth:`.Query.filter`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Parameters are the same as :func:`.with_parent`, with the exception
|
|
|
|
that the given property can be None, in which case a search is
|
|
|
|
performed against this :class:`.Query` object's target mapper.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
if property is None:
|
2017-04-15 18:27:12 +02:00
|
|
|
mapper_zero = self._mapper_zero()
|
|
|
|
|
|
|
|
mapper = object_mapper(instance)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
for prop in mapper.iterate_properties:
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(prop, properties.RelationshipProperty) and \
|
|
|
|
prop.mapper is mapper_zero:
|
|
|
|
property = prop
|
2010-05-07 19:33:49 +02:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Could not locate a property which relates instances "
|
|
|
|
"of class '%s' to instances of class '%s'" %
|
|
|
|
(
|
|
|
|
self._mapper_zero().class_.__name__,
|
|
|
|
instance.__class__.__name__)
|
|
|
|
)
|
|
|
|
|
|
|
|
return self.filter(with_parent(instance, property))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def add_entity(self, entity, alias=None):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""add a mapped entity to the list of result columns
|
|
|
|
to be returned."""
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if alias is not None:
|
|
|
|
entity = aliased(entity, alias)
|
|
|
|
|
|
|
|
self._entities = list(self._entities)
|
|
|
|
m = _MapperEntity(self, entity)
|
2017-04-15 18:27:12 +02:00
|
|
|
self._set_entity_selectables([m])
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def with_session(self, session):
|
|
|
|
"""Return a :class:`.Query` that will use the given :class:`.Session`.
|
|
|
|
|
|
|
|
While the :class:`.Query` object is normally instantiated using the
|
|
|
|
:meth:`.Session.query` method, it is legal to build the :class:`.Query`
|
|
|
|
directly without necessarily using a :class:`.Session`. Such a
|
|
|
|
:class:`.Query` object, or any :class:`.Query` already associated
|
|
|
|
with a different :class:`.Session`, can produce a new :class:`.Query`
|
|
|
|
object associated with a target session using this method::
|
|
|
|
|
|
|
|
from sqlalchemy.orm import Query
|
|
|
|
|
|
|
|
query = Query([MyClass]).filter(MyClass.id == 5)
|
|
|
|
|
|
|
|
result = query.with_session(my_session).one()
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.session = session
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def from_self(self, *entities):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""return a Query that selects from this Query's
|
|
|
|
SELECT statement.
|
|
|
|
|
|
|
|
:meth:`.Query.from_self` essentially turns the SELECT statement
|
|
|
|
into a SELECT of itself. Given a query such as::
|
|
|
|
|
|
|
|
q = session.query(User).filter(User.name.like('e%'))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Given the :meth:`.Query.from_self` version::
|
|
|
|
|
|
|
|
q = session.query(User).filter(User.name.like('e%')).from_self()
|
|
|
|
|
|
|
|
This query renders as:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT anon_1.user_id AS anon_1_user_id,
|
|
|
|
anon_1.user_name AS anon_1_user_name
|
|
|
|
FROM (SELECT "user".id AS user_id, "user".name AS user_name
|
|
|
|
FROM "user"
|
|
|
|
WHERE "user".name LIKE :name_1) AS anon_1
|
|
|
|
|
|
|
|
There are lots of cases where :meth:`.Query.from_self` may be useful.
|
|
|
|
A simple one is where above, we may want to apply a row LIMIT to
|
|
|
|
the set of user objects we query against, and then apply additional
|
|
|
|
joins against that row-limited set::
|
|
|
|
|
|
|
|
q = session.query(User).filter(User.name.like('e%')).\
|
|
|
|
limit(5).from_self().\
|
|
|
|
join(User.addresses).filter(Address.email.like('q%'))
|
|
|
|
|
|
|
|
The above query joins to the ``Address`` entity but only against the
|
|
|
|
first five results of the ``User`` query:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT anon_1.user_id AS anon_1_user_id,
|
|
|
|
anon_1.user_name AS anon_1_user_name
|
|
|
|
FROM (SELECT "user".id AS user_id, "user".name AS user_name
|
|
|
|
FROM "user"
|
|
|
|
WHERE "user".name LIKE :name_1
|
|
|
|
LIMIT :param_1) AS anon_1
|
|
|
|
JOIN address ON anon_1.user_id = address.user_id
|
|
|
|
WHERE address.email LIKE :email_1
|
|
|
|
|
|
|
|
**Automatic Aliasing**
|
|
|
|
|
|
|
|
Another key behavior of :meth:`.Query.from_self` is that it applies
|
|
|
|
**automatic aliasing** to the entities inside the subquery, when
|
|
|
|
they are referenced on the outside. Above, if we continue to
|
|
|
|
refer to the ``User`` entity without any additional aliasing applied
|
|
|
|
to it, those references wil be in terms of the subquery::
|
|
|
|
|
|
|
|
q = session.query(User).filter(User.name.like('e%')).\
|
|
|
|
limit(5).from_self().\
|
|
|
|
join(User.addresses).filter(Address.email.like('q%')).\
|
|
|
|
order_by(User.name)
|
|
|
|
|
|
|
|
The ORDER BY against ``User.name`` is aliased to be in terms of the
|
|
|
|
inner subquery:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT anon_1.user_id AS anon_1_user_id,
|
|
|
|
anon_1.user_name AS anon_1_user_name
|
|
|
|
FROM (SELECT "user".id AS user_id, "user".name AS user_name
|
|
|
|
FROM "user"
|
|
|
|
WHERE "user".name LIKE :name_1
|
|
|
|
LIMIT :param_1) AS anon_1
|
|
|
|
JOIN address ON anon_1.user_id = address.user_id
|
|
|
|
WHERE address.email LIKE :email_1 ORDER BY anon_1.user_name
|
|
|
|
|
|
|
|
The automatic aliasing feature only works in a **limited** way,
|
|
|
|
for simple filters and orderings. More ambitious constructions
|
|
|
|
such as referring to the entity in joins should prefer to use
|
|
|
|
explicit subquery objects, typically making use of the
|
|
|
|
:meth:`.Query.subquery` method to produce an explicit subquery object.
|
|
|
|
Always test the structure of queries by viewing the SQL to ensure
|
|
|
|
a particular structure does what's expected!
|
|
|
|
|
|
|
|
**Changing the Entities**
|
|
|
|
|
|
|
|
:meth:`.Query.from_self` also includes the ability to modify what
|
|
|
|
columns are being queried. In our example, we want ``User.id``
|
|
|
|
to be queried by the inner query, so that we can join to the
|
|
|
|
``Address`` entity on the outside, but we only wanted the outer
|
|
|
|
query to return the ``Address.email`` column::
|
|
|
|
|
|
|
|
q = session.query(User).filter(User.name.like('e%')).\
|
|
|
|
limit(5).from_self(Address.email).\
|
|
|
|
join(User.addresses).filter(Address.email.like('q%'))
|
|
|
|
|
|
|
|
yielding:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT address.email AS address_email
|
|
|
|
FROM (SELECT "user".id AS user_id, "user".name AS user_name
|
|
|
|
FROM "user"
|
|
|
|
WHERE "user".name LIKE :name_1
|
|
|
|
LIMIT :param_1) AS anon_1
|
|
|
|
JOIN address ON anon_1.user_id = address.user_id
|
|
|
|
WHERE address.email LIKE :email_1
|
|
|
|
|
|
|
|
**Looking out for Inner / Outer Columns**
|
|
|
|
|
|
|
|
Keep in mind that when referring to columns that originate from
|
|
|
|
inside the subquery, we need to ensure they are present in the
|
|
|
|
columns clause of the subquery itself; this is an ordinary aspect of
|
|
|
|
SQL. For example, if we wanted to load from a joined entity inside
|
|
|
|
the subquery using :func:`.contains_eager`, we need to add those
|
|
|
|
columns. Below illustrates a join of ``Address`` to ``User``,
|
|
|
|
then a subquery, and then we'd like :func:`.contains_eager` to access
|
|
|
|
the ``User`` columns::
|
|
|
|
|
|
|
|
q = session.query(Address).join(Address.user).\
|
|
|
|
filter(User.name.like('e%'))
|
|
|
|
|
|
|
|
q = q.add_entity(User).from_self().\
|
|
|
|
options(contains_eager(Address.user))
|
|
|
|
|
|
|
|
We use :meth:`.Query.add_entity` above **before** we call
|
|
|
|
:meth:`.Query.from_self` so that the ``User`` columns are present
|
|
|
|
in the inner subquery, so that they are available to the
|
|
|
|
:func:`.contains_eager` modifier we are using on the outside,
|
|
|
|
producing:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT anon_1.address_id AS anon_1_address_id,
|
|
|
|
anon_1.address_email AS anon_1_address_email,
|
|
|
|
anon_1.address_user_id AS anon_1_address_user_id,
|
|
|
|
anon_1.user_id AS anon_1_user_id,
|
|
|
|
anon_1.user_name AS anon_1_user_name
|
|
|
|
FROM (
|
|
|
|
SELECT address.id AS address_id,
|
|
|
|
address.email AS address_email,
|
|
|
|
address.user_id AS address_user_id,
|
|
|
|
"user".id AS user_id,
|
|
|
|
"user".name AS user_name
|
|
|
|
FROM address JOIN "user" ON "user".id = address.user_id
|
|
|
|
WHERE "user".name LIKE :name_1) AS anon_1
|
|
|
|
|
|
|
|
If we didn't call ``add_entity(User)``, but still asked
|
|
|
|
:func:`.contains_eager` to load the ``User`` entity, it would be
|
|
|
|
forced to add the table on the outside without the correct
|
|
|
|
join criteria - note the ``anon1, "user"`` phrase at
|
|
|
|
the end:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
-- incorrect query
|
|
|
|
SELECT anon_1.address_id AS anon_1_address_id,
|
|
|
|
anon_1.address_email AS anon_1_address_email,
|
|
|
|
anon_1.address_user_id AS anon_1_address_user_id,
|
|
|
|
"user".id AS user_id,
|
|
|
|
"user".name AS user_name
|
|
|
|
FROM (
|
|
|
|
SELECT address.id AS address_id,
|
|
|
|
address.email AS address_email,
|
|
|
|
address.user_id AS address_user_id
|
|
|
|
FROM address JOIN "user" ON "user".id = address.user_id
|
|
|
|
WHERE "user".name LIKE :name_1) AS anon_1, "user"
|
|
|
|
|
|
|
|
:param \*entities: optional list of entities which will replace
|
|
|
|
those being selected.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
fromclause = self.with_labels().enable_eagerloads(False).\
|
2017-04-15 18:27:12 +02:00
|
|
|
statement.correlate(None)
|
2010-05-07 19:33:49 +02:00
|
|
|
q = self._from_selectable(fromclause)
|
2017-04-15 18:27:12 +02:00
|
|
|
q._enable_single_crit = False
|
|
|
|
q._select_from_entity = self._entity_zero()
|
2010-05-07 19:33:49 +02:00
|
|
|
if entities:
|
|
|
|
q._set_entities(entities)
|
|
|
|
return q
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def _set_enable_single_crit(self, val):
|
|
|
|
self._enable_single_crit = val
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative()
|
|
|
|
def _from_selectable(self, fromclause):
|
2017-04-15 18:27:12 +02:00
|
|
|
for attr in (
|
|
|
|
'_statement', '_criterion',
|
|
|
|
'_order_by', '_group_by',
|
|
|
|
'_limit', '_offset',
|
|
|
|
'_joinpath', '_joinpoint',
|
|
|
|
'_distinct', '_having',
|
|
|
|
'_prefixes', '_suffixes'
|
2010-05-07 19:33:49 +02:00
|
|
|
):
|
|
|
|
self.__dict__.pop(attr, None)
|
2017-04-15 18:27:12 +02:00
|
|
|
self._set_select_from([fromclause], True)
|
|
|
|
|
|
|
|
# this enables clause adaptation for non-ORM
|
|
|
|
# expressions.
|
|
|
|
self._orm_only_from_obj_alias = False
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
old_entities = self._entities
|
|
|
|
self._entities = []
|
|
|
|
for e in old_entities:
|
|
|
|
e.adapt_to_selectable(self, self._from_obj[0])
|
|
|
|
|
|
|
|
def values(self, *columns):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return an iterator yielding result tuples corresponding
|
|
|
|
to the given list of columns"""
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if not columns:
|
|
|
|
return iter(())
|
|
|
|
q = self._clone()
|
|
|
|
q._set_entities(columns, entity_wrapper=_ColumnEntity)
|
|
|
|
if not q._yield_per:
|
|
|
|
q._yield_per = 10
|
|
|
|
return iter(q)
|
|
|
|
_values = values
|
|
|
|
|
|
|
|
def value(self, column):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return a scalar result corresponding to the given
|
|
|
|
column expression."""
|
2010-05-07 19:33:49 +02:00
|
|
|
try:
|
2017-04-15 18:27:12 +02:00
|
|
|
return next(self.values(column))[0]
|
2010-05-07 19:33:49 +02:00
|
|
|
except StopIteration:
|
|
|
|
return None
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@_generative()
|
|
|
|
def with_entities(self, *entities):
|
|
|
|
"""Return a new :class:`.Query` replacing the SELECT list with the
|
|
|
|
given entities.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
# Users, filtered on some arbitrary criterion
|
|
|
|
# and then ordered by related email address
|
|
|
|
q = session.query(User).\
|
|
|
|
join(User.address).\
|
|
|
|
filter(User.name.like('%ed%')).\
|
|
|
|
order_by(Address.email)
|
|
|
|
|
|
|
|
# given *only* User.id==5, Address.email, and 'q', what
|
|
|
|
# would the *next* User in the result be ?
|
|
|
|
subq = q.with_entities(Address.email).\
|
|
|
|
order_by(None).\
|
|
|
|
filter(User.id==5).\
|
|
|
|
subquery()
|
|
|
|
q = q.join((subq, subq.c.email < Address.email)).\
|
|
|
|
limit(1)
|
|
|
|
|
|
|
|
.. versionadded:: 0.6.5
|
|
|
|
|
|
|
|
"""
|
|
|
|
self._set_entities(entities)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative()
|
|
|
|
def add_columns(self, *column):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Add one or more column expressions to the list
|
2010-05-07 19:33:49 +02:00
|
|
|
of result columns to be returned."""
|
|
|
|
|
|
|
|
self._entities = list(self._entities)
|
|
|
|
l = len(self._entities)
|
|
|
|
for c in column:
|
|
|
|
_ColumnEntity(self, c)
|
|
|
|
# _ColumnEntity may add many entities if the
|
|
|
|
# given arg is a FROM clause
|
2017-04-15 18:27:12 +02:00
|
|
|
self._set_entity_selectables(self._entities[l:])
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@util.pending_deprecation("0.7",
|
|
|
|
":meth:`.add_column` is superseded "
|
|
|
|
"by :meth:`.add_columns`",
|
|
|
|
False)
|
2010-05-07 19:33:49 +02:00
|
|
|
def add_column(self, column):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Add a column expression to the list of result columns to be
|
|
|
|
returned.
|
|
|
|
|
|
|
|
Pending deprecation: :meth:`.add_column` will be superseded by
|
|
|
|
:meth:`.add_columns`.
|
|
|
|
|
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
return self.add_columns(column)
|
|
|
|
|
|
|
|
def options(self, *args):
|
|
|
|
"""Return a new Query object, applying the given list of
|
2017-04-15 18:27:12 +02:00
|
|
|
mapper options.
|
|
|
|
|
|
|
|
Most supplied options regard changing how column- and
|
|
|
|
relationship-mapped attributes are loaded. See the sections
|
|
|
|
:ref:`deferred` and :doc:`/orm/loading_relationships` for reference
|
|
|
|
documentation.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
return self._options(False, *args)
|
|
|
|
|
|
|
|
def _conditional_options(self, *args):
|
|
|
|
return self._options(True, *args)
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def _options(self, conditional, *args):
|
|
|
|
# most MapperOptions write to the '_attributes' dictionary,
|
|
|
|
# so copy that as well
|
|
|
|
self._attributes = self._attributes.copy()
|
|
|
|
opts = tuple(util.flatten_iterator(args))
|
|
|
|
self._with_options = self._with_options + opts
|
|
|
|
if conditional:
|
|
|
|
for opt in opts:
|
|
|
|
opt.process_query_conditionally(self)
|
|
|
|
else:
|
|
|
|
for opt in opts:
|
|
|
|
opt.process_query(self)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def with_transformation(self, fn):
|
|
|
|
"""Return a new :class:`.Query` object transformed by
|
|
|
|
the given function.
|
|
|
|
|
|
|
|
E.g.::
|
|
|
|
|
|
|
|
def filter_something(criterion):
|
|
|
|
def transform(q):
|
|
|
|
return q.filter(criterion)
|
|
|
|
return transform
|
|
|
|
|
|
|
|
q = q.with_transformation(filter_something(x==5))
|
|
|
|
|
|
|
|
This allows ad-hoc recipes to be created for :class:`.Query`
|
|
|
|
objects. See the example at :ref:`hybrid_transformers`.
|
|
|
|
|
|
|
|
.. versionadded:: 0.7.4
|
|
|
|
|
|
|
|
"""
|
|
|
|
return fn(self)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative()
|
2017-04-15 18:27:12 +02:00
|
|
|
def with_hint(self, selectable, text, dialect_name='*'):
|
|
|
|
"""Add an indexing or other executional context
|
|
|
|
hint for the given entity or selectable to
|
|
|
|
this :class:`.Query`.
|
|
|
|
|
|
|
|
Functionality is passed straight through to
|
|
|
|
:meth:`~sqlalchemy.sql.expression.Select.with_hint`,
|
|
|
|
with the addition that ``selectable`` can be a
|
|
|
|
:class:`.Table`, :class:`.Alias`, or ORM entity / mapped class
|
2010-05-07 19:33:49 +02:00
|
|
|
/etc.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.with_statement_hint`
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
if selectable is not None:
|
|
|
|
selectable = inspect(selectable).selectable
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
self._with_hints += ((selectable, text, dialect_name),)
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
def with_statement_hint(self, text, dialect_name='*'):
|
|
|
|
"""add a statement hint to this :class:`.Select`.
|
|
|
|
|
|
|
|
This method is similar to :meth:`.Select.with_hint` except that
|
|
|
|
it does not require an individual table, and instead applies to the
|
|
|
|
statement as a whole.
|
|
|
|
|
|
|
|
This feature calls down into :meth:`.Select.with_statement_hint`.
|
|
|
|
|
|
|
|
.. versionadded:: 1.0.0
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.with_hint`
|
|
|
|
|
|
|
|
"""
|
|
|
|
return self.with_hint(None, text, dialect_name)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative()
|
|
|
|
def execution_options(self, **kwargs):
|
|
|
|
""" Set non-SQL options which take effect during execution.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
The options are the same as those accepted by
|
|
|
|
:meth:`.Connection.execution_options`.
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
Note that the ``stream_results`` execution option is enabled
|
|
|
|
automatically if the :meth:`~sqlalchemy.orm.query.Query.yield_per()`
|
|
|
|
method is used.
|
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
self._execution_options = self._execution_options.union(kwargs)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def with_lockmode(self, mode):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return a new :class:`.Query` object with the specified "locking mode",
|
|
|
|
which essentially refers to the ``FOR UPDATE`` clause.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. deprecated:: 0.9.0 superseded by :meth:`.Query.with_for_update`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:param mode: a string representing the desired locking mode.
|
|
|
|
Valid values are:
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
* ``None`` - translates to no lockmode
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
* ``'update'`` - translates to ``FOR UPDATE``
|
|
|
|
(standard SQL, supported by most dialects)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
* ``'update_nowait'`` - translates to ``FOR UPDATE NOWAIT``
|
|
|
|
(supported by Oracle, PostgreSQL 8.1 upwards)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
* ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
|
|
|
|
and ``FOR SHARE`` (for PostgreSQL)
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.with_for_update` - improved API for
|
|
|
|
specifying the ``FOR UPDATE`` clause.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
self._for_update_arg = LockmodeArg.parse_legacy_query(mode)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@_generative()
|
|
|
|
def with_for_update(self, read=False, nowait=False, of=None,
|
|
|
|
skip_locked=False, key_share=False):
|
|
|
|
"""return a new :class:`.Query` with the specified options for the
|
|
|
|
``FOR UPDATE`` clause.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The behavior of this method is identical to that of
|
|
|
|
:meth:`.SelectBase.with_for_update`. When called with no arguments,
|
|
|
|
the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause
|
|
|
|
appended. When additional arguments are specified, backend-specific
|
|
|
|
options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE``
|
|
|
|
can take effect.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
E.g.::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
q = sess.query(User).with_for_update(nowait=True, of=User)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The above query on a PostgreSQL backend will render like::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. versionadded:: 0.9.0 :meth:`.Query.with_for_update` supersedes
|
|
|
|
the :meth:`.Query.with_lockmode` method.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.GenerativeSelect.with_for_update` - Core level method with
|
|
|
|
full argument and behavioral description.
|
|
|
|
|
|
|
|
"""
|
|
|
|
self._for_update_arg = LockmodeArg(read=read, nowait=nowait, of=of,
|
|
|
|
skip_locked=skip_locked,
|
|
|
|
key_share=key_share)
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def params(self, *args, **kwargs):
|
|
|
|
r"""add values for bind parameters which may have been
|
|
|
|
specified in filter().
|
|
|
|
|
|
|
|
parameters may be specified using \**kwargs, or optionally a single
|
|
|
|
dictionary as the first positional argument. The reason for both is
|
|
|
|
that \**kwargs is convenient, however some parameter dictionaries
|
|
|
|
contain unicode keys in which case \**kwargs cannot be used.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if len(args) == 1:
|
|
|
|
kwargs.update(args[0])
|
|
|
|
elif len(args) > 0:
|
|
|
|
raise sa_exc.ArgumentError(
|
|
|
|
"params() takes zero or one positional argument, "
|
|
|
|
"which is a dictionary.")
|
|
|
|
self._params = self._params.copy()
|
|
|
|
self._params.update(kwargs)
|
|
|
|
|
|
|
|
@_generative(_no_statement_condition, _no_limit_offset)
|
|
|
|
def filter(self, *criterion):
|
|
|
|
r"""apply the given filtering criterion to a copy
|
|
|
|
of this :class:`.Query`, using SQL expressions.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
session.query(MyClass).filter(MyClass.name == 'some name')
|
|
|
|
|
|
|
|
Multiple criteria may be specified as comma separated; the effect
|
|
|
|
is that they will be joined together using the :func:`.and_`
|
|
|
|
function::
|
|
|
|
|
|
|
|
session.query(MyClass).\
|
|
|
|
filter(MyClass.name == 'some name', MyClass.id > 5)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The criterion is any SQL expression object applicable to the
|
|
|
|
WHERE clause of a select. String expressions are coerced
|
|
|
|
into SQL expression constructs via the :func:`.text` construct.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.filter_by` - filter on keyword expressions.
|
|
|
|
|
|
|
|
"""
|
|
|
|
for criterion in list(criterion):
|
|
|
|
criterion = expression._expression_literal_as_text(criterion)
|
|
|
|
|
|
|
|
criterion = self._adapt_clause(criterion, True, True)
|
|
|
|
|
|
|
|
if self._criterion is not None:
|
|
|
|
self._criterion = self._criterion & criterion
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
self._criterion = criterion
|
|
|
|
|
|
|
|
def filter_by(self, **kwargs):
|
|
|
|
r"""apply the given filtering criterion to a copy
|
|
|
|
of this :class:`.Query`, using keyword expressions.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
session.query(MyClass).filter_by(name = 'some name')
|
|
|
|
|
|
|
|
Multiple criteria may be specified as comma separated; the effect
|
|
|
|
is that they will be joined together using the :func:`.and_`
|
|
|
|
function::
|
|
|
|
|
|
|
|
session.query(MyClass).\
|
|
|
|
filter_by(name = 'some name', id = 5)
|
|
|
|
|
|
|
|
The keyword expressions are extracted from the primary
|
|
|
|
entity of the query, or the last entity that was the
|
|
|
|
target of a call to :meth:`.Query.join`.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.filter` - filter on SQL expressions.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value
|
|
|
|
for key, value in kwargs.items()]
|
|
|
|
return self.filter(sql.and_(*clauses))
|
|
|
|
|
|
|
|
@_generative(_no_statement_condition, _no_limit_offset)
|
|
|
|
def order_by(self, *criterion):
|
|
|
|
"""apply one or more ORDER BY criterion to the query and return
|
|
|
|
the newly resulting ``Query``
|
|
|
|
|
|
|
|
All existing ORDER BY settings can be suppressed by
|
|
|
|
passing ``None`` - this will suppress any ORDER BY configured
|
|
|
|
on mappers as well.
|
|
|
|
|
|
|
|
Alternatively, passing False will reset ORDER BY and additionally
|
|
|
|
re-allow default mapper.order_by to take place. Note mapper.order_by
|
|
|
|
is deprecated.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
if len(criterion) == 1:
|
|
|
|
if criterion[0] is False:
|
|
|
|
if '_order_by' in self.__dict__:
|
|
|
|
self._order_by = False
|
|
|
|
return
|
|
|
|
if criterion[0] is None:
|
|
|
|
self._order_by = None
|
|
|
|
return
|
|
|
|
|
|
|
|
criterion = self._adapt_col_list(criterion)
|
|
|
|
|
|
|
|
if self._order_by is False or self._order_by is None:
|
|
|
|
self._order_by = criterion
|
|
|
|
else:
|
|
|
|
self._order_by = self._order_by + criterion
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative(_no_statement_condition, _no_limit_offset)
|
|
|
|
def group_by(self, *criterion):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""apply one or more GROUP BY criterion to the query and return
|
|
|
|
the newly resulting :class:`.Query`
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
All existing GROUP BY settings can be suppressed by
|
|
|
|
passing ``None`` - this will suppress any GROUP BY configured
|
|
|
|
on mappers as well.
|
|
|
|
|
|
|
|
.. versionadded:: 1.1 GROUP BY can be cancelled by passing None,
|
|
|
|
in the same way as ORDER BY.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
if len(criterion) == 1:
|
|
|
|
if criterion[0] is None:
|
|
|
|
self._group_by = False
|
|
|
|
return
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
criterion = list(chain(*[_orm_columns(c) for c in criterion]))
|
2010-05-07 19:33:49 +02:00
|
|
|
criterion = self._adapt_col_list(criterion)
|
|
|
|
|
|
|
|
if self._group_by is False:
|
|
|
|
self._group_by = criterion
|
|
|
|
else:
|
|
|
|
self._group_by = self._group_by + criterion
|
|
|
|
|
|
|
|
@_generative(_no_statement_condition, _no_limit_offset)
|
|
|
|
def having(self, criterion):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""apply a HAVING criterion to the query and return the
|
|
|
|
newly resulting :class:`.Query`.
|
|
|
|
|
|
|
|
:meth:`~.Query.having` is used in conjunction with
|
|
|
|
:meth:`~.Query.group_by`.
|
|
|
|
|
|
|
|
HAVING criterion makes it possible to use filters on aggregate
|
|
|
|
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
|
|
|
|
|
|
|
|
q = session.query(User.id).\
|
|
|
|
join(User.addresses).\
|
|
|
|
group_by(User.id).\
|
|
|
|
having(func.count(Address.id) > 2)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
criterion = expression._expression_literal_as_text(criterion)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if criterion is not None and \
|
|
|
|
not isinstance(criterion, sql.ClauseElement):
|
|
|
|
raise sa_exc.ArgumentError(
|
|
|
|
"having() argument must be of type "
|
|
|
|
"sqlalchemy.sql.ClauseElement or string")
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
criterion = self._adapt_clause(criterion, True, True)
|
|
|
|
|
|
|
|
if self._having is not None:
|
|
|
|
self._having = self._having & criterion
|
|
|
|
else:
|
|
|
|
self._having = criterion
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _set_op(self, expr_fn, *q):
|
|
|
|
return self._from_selectable(
|
|
|
|
expr_fn(*([self] + list(q)))
|
|
|
|
)._set_enable_single_crit(False)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def union(self, *q):
|
|
|
|
"""Produce a UNION of this Query against one or more queries.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar')
|
|
|
|
q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo')
|
|
|
|
|
|
|
|
q3 = q1.union(q2)
|
|
|
|
|
|
|
|
The method accepts multiple Query objects so as to control
|
|
|
|
the level of nesting. A series of ``union()`` calls such as::
|
|
|
|
|
|
|
|
x.union(y).union(z).all()
|
|
|
|
|
|
|
|
will nest on each ``union()``, and produces::
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION
|
|
|
|
SELECT * FROM y) UNION SELECT * FROM Z)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
Whereas::
|
|
|
|
|
|
|
|
x.union(y, z).all()
|
|
|
|
|
|
|
|
produces::
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION
|
|
|
|
SELECT * FROM Z)
|
|
|
|
|
|
|
|
Note that many database backends do not allow ORDER BY to
|
|
|
|
be rendered on a query called within UNION, EXCEPT, etc.
|
|
|
|
To disable all ORDER BY clauses including those configured
|
|
|
|
on mappers, issue ``query.order_by(None)`` - the resulting
|
|
|
|
:class:`.Query` object will not render ORDER BY within
|
|
|
|
its SELECT statement.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._set_op(expression.union, *q)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def union_all(self, *q):
|
|
|
|
"""Produce a UNION ALL of this Query against one or more queries.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
|
|
|
|
that method for usage examples.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._set_op(expression.union_all, *q)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def intersect(self, *q):
|
|
|
|
"""Produce an INTERSECT of this Query against one or more queries.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
|
|
|
|
that method for usage examples.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._set_op(expression.intersect, *q)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def intersect_all(self, *q):
|
|
|
|
"""Produce an INTERSECT ALL of this Query against one or more queries.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
|
|
|
|
that method for usage examples.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._set_op(expression.intersect_all, *q)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def except_(self, *q):
|
|
|
|
"""Produce an EXCEPT of this Query against one or more queries.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
|
|
|
|
that method for usage examples.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._set_op(expression.except_, *q)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def except_all(self, *q):
|
|
|
|
"""Produce an EXCEPT ALL of this Query against one or more queries.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
|
|
|
|
that method for usage examples.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
return self._set_op(expression.except_all, *q)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def join(self, *props, **kwargs):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""Create a SQL JOIN against this :class:`.Query` object's criterion
|
|
|
|
and apply generatively, returning the newly resulting :class:`.Query`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
**Simple Relationship Joins**
|
|
|
|
|
|
|
|
Consider a mapping between two classes ``User`` and ``Address``,
|
|
|
|
with a relationship ``User.addresses`` representing a collection
|
|
|
|
of ``Address`` objects associated with each ``User``. The most
|
|
|
|
common usage of :meth:`~.Query.join` is to create a JOIN along this
|
|
|
|
relationship, using the ``User.addresses`` attribute as an indicator
|
|
|
|
for how this should occur::
|
|
|
|
|
|
|
|
q = session.query(User).join(User.addresses)
|
|
|
|
|
|
|
|
Where above, the call to :meth:`~.Query.join` along ``User.addresses``
|
|
|
|
will result in SQL equivalent to::
|
|
|
|
|
|
|
|
SELECT user.* FROM user JOIN address ON user.id = address.user_id
|
|
|
|
|
|
|
|
In the above example we refer to ``User.addresses`` as passed to
|
|
|
|
:meth:`~.Query.join` as the *on clause*, that is, it indicates
|
|
|
|
how the "ON" portion of the JOIN should be constructed. For a
|
|
|
|
single-entity query such as the one above (i.e. we start by selecting
|
|
|
|
only from ``User`` and nothing else), the relationship can also be
|
|
|
|
specified by its string name::
|
|
|
|
|
|
|
|
q = session.query(User).join("addresses")
|
|
|
|
|
|
|
|
:meth:`~.Query.join` can also accommodate multiple
|
|
|
|
"on clause" arguments to produce a chain of joins, such as below
|
|
|
|
where a join across four related entities is constructed::
|
|
|
|
|
|
|
|
q = session.query(User).join("orders", "items", "keywords")
|
|
|
|
|
|
|
|
The above would be shorthand for three separate calls to
|
|
|
|
:meth:`~.Query.join`, each using an explicit attribute to indicate
|
|
|
|
the source entity::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
q = session.query(User).\
|
|
|
|
join(User.orders).\
|
|
|
|
join(Order.items).\
|
|
|
|
join(Item.keywords)
|
|
|
|
|
|
|
|
**Joins to a Target Entity or Selectable**
|
|
|
|
|
|
|
|
A second form of :meth:`~.Query.join` allows any mapped entity
|
|
|
|
or core selectable construct as a target. In this usage,
|
|
|
|
:meth:`~.Query.join` will attempt
|
|
|
|
to create a JOIN along the natural foreign key relationship between
|
|
|
|
two entities::
|
|
|
|
|
|
|
|
q = session.query(User).join(Address)
|
|
|
|
|
|
|
|
The above calling form of :meth:`~.Query.join` will raise an error if
|
|
|
|
either there are no foreign keys between the two entities, or if
|
|
|
|
there are multiple foreign key linkages between them. In the
|
|
|
|
above calling form, :meth:`~.Query.join` is called upon to
|
|
|
|
create the "on clause" automatically for us. The target can
|
|
|
|
be any mapped entity or selectable, such as a :class:`.Table`::
|
|
|
|
|
|
|
|
q = session.query(User).join(addresses_table)
|
|
|
|
|
|
|
|
**Joins to a Target with an ON Clause**
|
|
|
|
|
|
|
|
The third calling form allows both the target entity as well
|
|
|
|
as the ON clause to be passed explicitly. Suppose for
|
|
|
|
example we wanted to join to ``Address`` twice, using
|
|
|
|
an alias the second time. We use :func:`~sqlalchemy.orm.aliased`
|
|
|
|
to create a distinct alias of ``Address``, and join
|
|
|
|
to it using the ``target, onclause`` form, so that the
|
|
|
|
alias can be specified explicitly as the target along with
|
|
|
|
the relationship to instruct how the ON clause should proceed::
|
|
|
|
|
|
|
|
a_alias = aliased(Address)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
q = session.query(User).\
|
|
|
|
join(User.addresses).\
|
|
|
|
join(a_alias, User.addresses).\
|
|
|
|
filter(Address.email_address=='ed@foo.com').\
|
|
|
|
filter(a_alias.email_address=='ed@bar.com')
|
|
|
|
|
|
|
|
Where above, the generated SQL would be similar to::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
SELECT user.* FROM user
|
|
|
|
JOIN address ON user.id = address.user_id
|
|
|
|
JOIN address AS address_1 ON user.id=address_1.user_id
|
|
|
|
WHERE address.email_address = :email_address_1
|
|
|
|
AND address_1.email_address = :email_address_2
|
|
|
|
|
|
|
|
The two-argument calling form of :meth:`~.Query.join`
|
|
|
|
also allows us to construct arbitrary joins with SQL-oriented
|
|
|
|
"on clause" expressions, not relying upon configured relationships
|
|
|
|
at all. Any SQL expression can be passed as the ON clause
|
|
|
|
when using the two-argument form, which should refer to the target
|
|
|
|
entity in some way as well as an applicable source entity::
|
|
|
|
|
|
|
|
q = session.query(User).join(Address, User.id==Address.user_id)
|
|
|
|
|
|
|
|
.. versionchanged:: 0.7
|
|
|
|
In SQLAlchemy 0.6 and earlier, the two argument form of
|
|
|
|
:meth:`~.Query.join` requires the usage of a tuple:
|
|
|
|
``query(User).join((Address, User.id==Address.user_id))``\ .
|
|
|
|
This calling form is accepted in 0.7 and further, though
|
|
|
|
is not necessary unless multiple join conditions are passed to
|
|
|
|
a single :meth:`~.Query.join` call, which itself is also not
|
|
|
|
generally necessary as it is now equivalent to multiple
|
|
|
|
calls (this wasn't always the case).
|
|
|
|
|
|
|
|
**Advanced Join Targeting and Adaption**
|
|
|
|
|
|
|
|
There is a lot of flexibility in what the "target" can be when using
|
|
|
|
:meth:`~.Query.join`. As noted previously, it also accepts
|
|
|
|
:class:`.Table` constructs and other selectables such as
|
|
|
|
:func:`.alias` and :func:`.select` constructs, with either the one
|
|
|
|
or two-argument forms::
|
|
|
|
|
|
|
|
addresses_q = select([Address.user_id]).\
|
|
|
|
where(Address.email_address.endswith("@bar.com")).\
|
|
|
|
alias()
|
|
|
|
|
|
|
|
q = session.query(User).\
|
|
|
|
join(addresses_q, addresses_q.c.user_id==User.id)
|
|
|
|
|
|
|
|
:meth:`~.Query.join` also features the ability to *adapt* a
|
|
|
|
:meth:`~sqlalchemy.orm.relationship` -driven ON clause to the target
|
|
|
|
selectable. Below we construct a JOIN from ``User`` to a subquery
|
|
|
|
against ``Address``, allowing the relationship denoted by
|
|
|
|
``User.addresses`` to *adapt* itself to the altered target::
|
|
|
|
|
|
|
|
address_subq = session.query(Address).\
|
|
|
|
filter(Address.email_address == 'ed@foo.com').\
|
|
|
|
subquery()
|
|
|
|
|
|
|
|
q = session.query(User).join(address_subq, User.addresses)
|
|
|
|
|
|
|
|
Producing SQL similar to::
|
|
|
|
|
|
|
|
SELECT user.* FROM user
|
|
|
|
JOIN (
|
|
|
|
SELECT address.id AS id,
|
|
|
|
address.user_id AS user_id,
|
|
|
|
address.email_address AS email_address
|
|
|
|
FROM address
|
|
|
|
WHERE address.email_address = :email_address_1
|
|
|
|
) AS anon_1 ON user.id = anon_1.user_id
|
|
|
|
|
|
|
|
The above form allows one to fall back onto an explicit ON
|
|
|
|
clause at any time::
|
|
|
|
|
|
|
|
q = session.query(User).\
|
|
|
|
join(address_subq, User.id==address_subq.c.user_id)
|
|
|
|
|
|
|
|
**Controlling what to Join From**
|
|
|
|
|
|
|
|
While :meth:`~.Query.join` exclusively deals with the "right"
|
|
|
|
side of the JOIN, we can also control the "left" side, in those
|
|
|
|
cases where it's needed, using :meth:`~.Query.select_from`.
|
|
|
|
Below we construct a query against ``Address`` but can still
|
|
|
|
make usage of ``User.addresses`` as our ON clause by instructing
|
|
|
|
the :class:`.Query` to select first from the ``User``
|
|
|
|
entity::
|
|
|
|
|
|
|
|
q = session.query(Address).select_from(User).\
|
|
|
|
join(User.addresses).\
|
|
|
|
filter(User.name == 'ed')
|
|
|
|
|
|
|
|
Which will produce SQL similar to::
|
|
|
|
|
|
|
|
SELECT address.* FROM user
|
|
|
|
JOIN address ON user.id=address.user_id
|
|
|
|
WHERE user.name = :name_1
|
|
|
|
|
|
|
|
**Constructing Aliases Anonymously**
|
|
|
|
|
|
|
|
:meth:`~.Query.join` can construct anonymous aliases
|
|
|
|
using the ``aliased=True`` flag. This feature is useful
|
|
|
|
when a query is being joined algorithmically, such as
|
|
|
|
when querying self-referentially to an arbitrary depth::
|
|
|
|
|
|
|
|
q = session.query(Node).\
|
|
|
|
join("children", "children", aliased=True)
|
|
|
|
|
|
|
|
When ``aliased=True`` is used, the actual "alias" construct
|
|
|
|
is not explicitly available. To work with it, methods such as
|
|
|
|
:meth:`.Query.filter` will adapt the incoming entity to
|
|
|
|
the last join point::
|
|
|
|
|
|
|
|
q = session.query(Node).\
|
|
|
|
join("children", "children", aliased=True).\
|
|
|
|
filter(Node.name == 'grandchild 1')
|
|
|
|
|
|
|
|
When using automatic aliasing, the ``from_joinpoint=True``
|
|
|
|
argument can allow a multi-node join to be broken into
|
|
|
|
multiple calls to :meth:`~.Query.join`, so that
|
|
|
|
each path along the way can be further filtered::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
q = session.query(Node).\
|
|
|
|
join("children", aliased=True).\
|
|
|
|
filter(Node.name='child 1').\
|
|
|
|
join("children", aliased=True, from_joinpoint=True).\
|
|
|
|
filter(Node.name == 'grandchild 1')
|
|
|
|
|
|
|
|
The filtering aliases above can then be reset back to the
|
|
|
|
original ``Node`` entity using :meth:`~.Query.reset_joinpoint`::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
q = session.query(Node).\
|
|
|
|
join("children", "children", aliased=True).\
|
|
|
|
filter(Node.name == 'grandchild 1').\
|
|
|
|
reset_joinpoint().\
|
|
|
|
filter(Node.name == 'parent 1)
|
|
|
|
|
|
|
|
For an example of ``aliased=True``, see the distribution
|
|
|
|
example :ref:`examples_xmlpersistence` which illustrates
|
|
|
|
an XPath-like query system using algorithmic joins.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:param \*props: A collection of one or more join conditions,
|
|
|
|
each consisting of a relationship-bound attribute or string
|
|
|
|
relationship name representing an "on clause", or a single
|
|
|
|
target entity, or a tuple in the form of ``(target, onclause)``.
|
|
|
|
A special two-argument calling form of the form ``target, onclause``
|
|
|
|
is also accepted.
|
|
|
|
:param aliased=False: If True, indicate that the JOIN target should be
|
|
|
|
anonymously aliased. Subsequent calls to :meth:`~.Query.filter`
|
|
|
|
and similar will adapt the incoming criterion to the target
|
|
|
|
alias, until :meth:`~.Query.reset_joinpoint` is called.
|
|
|
|
:param isouter=False: If True, the join used will be a left outer join,
|
|
|
|
just as if the :meth:`.Query.outerjoin` method were called. This
|
|
|
|
flag is here to maintain consistency with the same flag as accepted
|
|
|
|
by :meth:`.FromClause.join` and other Core constructs.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. versionadded:: 1.0.0
|
|
|
|
|
|
|
|
:param full=False: render FULL OUTER JOIN; implies ``isouter``.
|
|
|
|
|
|
|
|
.. versionadded:: 1.1
|
|
|
|
|
|
|
|
:param from_joinpoint=False: When using ``aliased=True``, a setting
|
|
|
|
of True here will cause the join to be from the most recent
|
|
|
|
joined target, rather than starting back from the original
|
|
|
|
FROM clauses of the query.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:ref:`ormtutorial_joins` in the ORM tutorial.
|
|
|
|
|
|
|
|
:ref:`inheritance_toplevel` for details on how
|
|
|
|
:meth:`~.Query.join` is used for inheritance relationships.
|
|
|
|
|
|
|
|
:func:`.orm.join` - a standalone ORM-level join function,
|
|
|
|
used internally by :meth:`.Query.join`, which in previous
|
|
|
|
SQLAlchemy versions was the primary ORM-level joining interface.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
aliased, from_joinpoint, isouter, full = kwargs.pop('aliased', False),\
|
|
|
|
kwargs.pop('from_joinpoint', False),\
|
|
|
|
kwargs.pop('isouter', False),\
|
|
|
|
kwargs.pop('full', False)
|
2010-05-07 19:33:49 +02:00
|
|
|
if kwargs:
|
2017-04-15 18:27:12 +02:00
|
|
|
raise TypeError("unknown arguments: %s" %
|
|
|
|
', '.join(sorted(kwargs)))
|
|
|
|
return self._join(props,
|
|
|
|
outerjoin=isouter, full=full,
|
|
|
|
create_aliases=aliased,
|
|
|
|
from_joinpoint=from_joinpoint)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def outerjoin(self, *props, **kwargs):
|
|
|
|
"""Create a left outer join against this ``Query`` object's criterion
|
2017-04-15 18:27:12 +02:00
|
|
|
and apply generatively, returning the newly resulting ``Query``.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
Usage is the same as the ``join()`` method.
|
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
aliased, from_joinpoint, full = kwargs.pop('aliased', False), \
|
|
|
|
kwargs.pop('from_joinpoint', False), \
|
|
|
|
kwargs.pop('full', False)
|
2010-05-07 19:33:49 +02:00
|
|
|
if kwargs:
|
2017-04-15 18:27:12 +02:00
|
|
|
raise TypeError("unknown arguments: %s" %
|
|
|
|
', '.join(sorted(kwargs)))
|
|
|
|
return self._join(props,
|
|
|
|
outerjoin=True, full=full, create_aliases=aliased,
|
|
|
|
from_joinpoint=from_joinpoint)
|
|
|
|
|
|
|
|
def _update_joinpoint(self, jp):
|
|
|
|
self._joinpoint = jp
|
|
|
|
# copy backwards to the root of the _joinpath
|
|
|
|
# dict, so that no existing dict in the path is mutated
|
|
|
|
while 'prev' in jp:
|
|
|
|
f, prev = jp['prev']
|
|
|
|
prev = prev.copy()
|
|
|
|
prev[f] = jp
|
|
|
|
jp['prev'] = (f, prev)
|
|
|
|
jp = prev
|
|
|
|
self._joinpath = jp
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative(_no_statement_condition, _no_limit_offset)
|
2017-04-15 18:27:12 +02:00
|
|
|
def _join(self, keys, outerjoin, full, create_aliases, from_joinpoint):
|
|
|
|
"""consumes arguments from join() or outerjoin(), places them into a
|
|
|
|
consistent format with which to form the actual JOIN constructs.
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not from_joinpoint:
|
|
|
|
self._reset_joinpoint()
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if len(keys) == 2 and \
|
|
|
|
isinstance(keys[0], (expression.FromClause,
|
|
|
|
type, AliasedClass)) and \
|
|
|
|
isinstance(keys[1], (str, expression.ClauseElement,
|
|
|
|
interfaces.PropComparator)):
|
|
|
|
# detect 2-arg form of join and
|
|
|
|
# convert to a tuple.
|
|
|
|
keys = (keys,)
|
|
|
|
|
|
|
|
keylist = util.to_list(keys)
|
|
|
|
for idx, arg1 in enumerate(keylist):
|
2010-05-07 19:33:49 +02:00
|
|
|
if isinstance(arg1, tuple):
|
2017-04-15 18:27:12 +02:00
|
|
|
# "tuple" form of join, multiple
|
|
|
|
# tuples are accepted as well. The simpler
|
|
|
|
# "2-arg" form is preferred. May deprecate
|
|
|
|
# the "tuple" usage.
|
2010-05-07 19:33:49 +02:00
|
|
|
arg1, arg2 = arg1
|
|
|
|
else:
|
|
|
|
arg2 = None
|
|
|
|
|
|
|
|
# determine onclause/right_entity. there
|
|
|
|
# is a little bit of legacy behavior still at work here
|
|
|
|
# which means they might be in either order. may possibly
|
|
|
|
# lock this down to (right_entity, onclause) in 0.6.
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(
|
|
|
|
arg1, (interfaces.PropComparator, util.string_types)):
|
2010-05-07 19:33:49 +02:00
|
|
|
right_entity, onclause = arg2, arg1
|
|
|
|
else:
|
|
|
|
right_entity, onclause = arg1, arg2
|
|
|
|
|
|
|
|
left_entity = prop = None
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
if isinstance(onclause, interfaces.PropComparator):
|
|
|
|
of_type = getattr(onclause, '_of_type', None)
|
|
|
|
else:
|
|
|
|
of_type = None
|
|
|
|
|
|
|
|
if isinstance(onclause, util.string_types):
|
2010-05-07 19:33:49 +02:00
|
|
|
left_entity = self._joinpoint_zero()
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
descriptor = _entity_descriptor(left_entity, onclause)
|
2010-05-07 19:33:49 +02:00
|
|
|
onclause = descriptor
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
# check for q.join(Class.propname, from_joinpoint=True)
|
|
|
|
# and Class is that of the current joinpoint
|
2017-04-15 18:27:12 +02:00
|
|
|
elif from_joinpoint and \
|
|
|
|
isinstance(onclause, interfaces.PropComparator):
|
|
|
|
left_entity = onclause._parententity
|
|
|
|
|
|
|
|
info = inspect(self._joinpoint_zero())
|
2010-05-07 19:33:49 +02:00
|
|
|
left_mapper, left_selectable, left_is_aliased = \
|
2017-04-15 18:27:12 +02:00
|
|
|
getattr(info, 'mapper', None), \
|
|
|
|
info.selectable, \
|
|
|
|
getattr(info, 'is_aliased_class', None)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
if left_mapper is left_entity:
|
|
|
|
left_entity = self._joinpoint_zero()
|
2017-04-15 18:27:12 +02:00
|
|
|
descriptor = _entity_descriptor(left_entity,
|
|
|
|
onclause.key)
|
2010-05-07 19:33:49 +02:00
|
|
|
onclause = descriptor
|
|
|
|
|
|
|
|
if isinstance(onclause, interfaces.PropComparator):
|
|
|
|
if right_entity is None:
|
|
|
|
if of_type:
|
|
|
|
right_entity = of_type
|
|
|
|
else:
|
|
|
|
right_entity = onclause.property.mapper
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
left_entity = onclause._parententity
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
prop = onclause.property
|
2017-04-15 18:27:12 +02:00
|
|
|
if not isinstance(onclause, attributes.QueryableAttribute):
|
2010-05-07 19:33:49 +02:00
|
|
|
onclause = prop
|
|
|
|
|
|
|
|
if not create_aliases:
|
|
|
|
# check for this path already present.
|
|
|
|
# don't render in that case.
|
2017-04-15 18:27:12 +02:00
|
|
|
edge = (left_entity, right_entity, prop.key)
|
|
|
|
if edge in self._joinpoint:
|
|
|
|
# The child's prev reference might be stale --
|
|
|
|
# it could point to a parent older than the
|
|
|
|
# current joinpoint. If this is the case,
|
|
|
|
# then we need to update it and then fix the
|
|
|
|
# tree's spine with _update_joinpoint. Copy
|
|
|
|
# and then mutate the child, which might be
|
|
|
|
# shared by a different query object.
|
|
|
|
jp = self._joinpoint[edge].copy()
|
|
|
|
jp['prev'] = (edge, self._joinpoint)
|
|
|
|
self._update_joinpoint(jp)
|
|
|
|
|
|
|
|
if idx == len(keylist) - 1:
|
|
|
|
util.warn(
|
|
|
|
"Pathed join target %s has already "
|
|
|
|
"been joined to; skipping" % prop)
|
2010-05-07 19:33:49 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
elif onclause is not None and right_entity is None:
|
|
|
|
# TODO: no coverage here
|
|
|
|
raise NotImplementedError("query.join(a==b) not supported.")
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
self._join_left_to_right(
|
2017-04-15 18:27:12 +02:00
|
|
|
left_entity,
|
|
|
|
right_entity, onclause,
|
|
|
|
outerjoin, full, create_aliases, prop)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _join_left_to_right(self, left, right,
|
|
|
|
onclause, outerjoin, full, create_aliases, prop):
|
2010-05-07 19:33:49 +02:00
|
|
|
"""append a JOIN to the query's from clause."""
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
self._polymorphic_adapters = self._polymorphic_adapters.copy()
|
|
|
|
|
|
|
|
if left is None:
|
|
|
|
if self._from_obj:
|
|
|
|
left = self._from_obj[0]
|
|
|
|
elif self._entities:
|
|
|
|
left = self._entities[0].entity_zero_or_selectable
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
if left is None:
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._entities:
|
|
|
|
problem = "Don't know how to join from %s" % self._entities[0]
|
|
|
|
else:
|
|
|
|
problem = "No entities to join from"
|
|
|
|
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"%s; please use "
|
|
|
|
"select_from() to establish the left "
|
|
|
|
"entity/selectable of this join" % problem)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if left is right and \
|
|
|
|
not create_aliases:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Can't construct a join from %s to %s, they "
|
|
|
|
"are the same entity" %
|
|
|
|
(left, right))
|
|
|
|
|
|
|
|
l_info = inspect(left)
|
|
|
|
r_info = inspect(right)
|
|
|
|
|
|
|
|
overlap = False
|
|
|
|
if not create_aliases:
|
|
|
|
right_mapper = getattr(r_info, "mapper", None)
|
|
|
|
# if the target is a joined inheritance mapping,
|
|
|
|
# be more liberal about auto-aliasing.
|
|
|
|
if right_mapper and (
|
|
|
|
right_mapper.with_polymorphic or
|
|
|
|
isinstance(right_mapper.mapped_table, expression.Join)
|
|
|
|
):
|
|
|
|
for from_obj in self._from_obj or [l_info.selectable]:
|
|
|
|
if sql_util.selectables_overlap(
|
|
|
|
l_info.selectable, from_obj) and \
|
|
|
|
sql_util.selectables_overlap(
|
|
|
|
from_obj, r_info.selectable):
|
|
|
|
overlap = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if (overlap or not create_aliases) and \
|
|
|
|
l_info.selectable is r_info.selectable:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"Can't join table/selectable '%s' to itself" %
|
|
|
|
l_info.selectable)
|
|
|
|
|
|
|
|
right, onclause = self._prepare_right_side(
|
|
|
|
r_info, right, onclause,
|
|
|
|
create_aliases,
|
|
|
|
prop, overlap)
|
|
|
|
|
|
|
|
# if joining on a MapperProperty path,
|
|
|
|
# track the path to prevent redundant joins
|
|
|
|
if not create_aliases and prop:
|
|
|
|
self._update_joinpoint({
|
|
|
|
'_joinpoint_entity': right,
|
|
|
|
'prev': ((left, right, prop.key), self._joinpoint)
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
self._joinpoint = {'_joinpoint_entity': right}
|
|
|
|
|
|
|
|
self._join_to_left(l_info, left, right, onclause, outerjoin, full)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _prepare_right_side(self, r_info, right, onclause, create_aliases,
|
|
|
|
prop, overlap):
|
|
|
|
info = r_info
|
|
|
|
|
|
|
|
right_mapper, right_selectable, right_is_aliased = \
|
|
|
|
getattr(info, 'mapper', None), \
|
|
|
|
info.selectable, \
|
|
|
|
getattr(info, 'is_aliased_class', False)
|
|
|
|
|
|
|
|
if right_mapper:
|
|
|
|
self._join_entities += (info, )
|
|
|
|
|
|
|
|
if right_mapper and prop and \
|
|
|
|
not right_mapper.common_parent(prop.mapper):
|
2010-05-07 19:33:49 +02:00
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Join target %s does not correspond to "
|
|
|
|
"the right side of join condition %s" % (right, onclause)
|
2010-05-07 19:33:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if not right_mapper and prop:
|
|
|
|
right_mapper = prop.mapper
|
|
|
|
|
|
|
|
need_adapter = False
|
|
|
|
|
|
|
|
if right_mapper and right is right_selectable:
|
2017-04-15 18:27:12 +02:00
|
|
|
if not right_selectable.is_derived_from(
|
|
|
|
right_mapper.mapped_table):
|
2010-05-07 19:33:49 +02:00
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"Selectable '%s' is not derived from '%s'" %
|
2017-04-15 18:27:12 +02:00
|
|
|
(right_selectable.description,
|
|
|
|
right_mapper.mapped_table.description))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(right_selectable, expression.SelectBase):
|
|
|
|
# TODO: this isn't even covered now!
|
2010-05-07 19:33:49 +02:00
|
|
|
right_selectable = right_selectable.alias()
|
2017-04-15 18:27:12 +02:00
|
|
|
need_adapter = True
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
right = aliased(right_mapper, right_selectable)
|
|
|
|
|
|
|
|
aliased_entity = right_mapper and \
|
2017-04-15 18:27:12 +02:00
|
|
|
not right_is_aliased and \
|
|
|
|
(
|
|
|
|
right_mapper.with_polymorphic and isinstance(
|
|
|
|
right_mapper._with_polymorphic_selectable,
|
|
|
|
expression.Alias)
|
|
|
|
or
|
|
|
|
overlap # test for overlap:
|
|
|
|
# orm/inheritance/relationships.py
|
|
|
|
# SelfReferentialM2MTest
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if not need_adapter and (create_aliases or aliased_entity):
|
2017-04-15 18:27:12 +02:00
|
|
|
right = aliased(right, flat=True)
|
2010-05-07 19:33:49 +02:00
|
|
|
need_adapter = True
|
|
|
|
|
|
|
|
# if an alias() of the right side was generated here,
|
|
|
|
# apply an adapter to all subsequent filter() calls
|
|
|
|
# until reset_joinpoint() is called.
|
|
|
|
if need_adapter:
|
2017-04-15 18:27:12 +02:00
|
|
|
self._filter_aliases = ORMAdapter(
|
|
|
|
right,
|
|
|
|
equivalents=right_mapper and
|
|
|
|
right_mapper._equivalent_columns or {},
|
|
|
|
chain_to=self._filter_aliases)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
# if the onclause is a ClauseElement, adapt it with any
|
2010-05-07 19:33:49 +02:00
|
|
|
# adapters that are in place right now
|
|
|
|
if isinstance(onclause, expression.ClauseElement):
|
|
|
|
onclause = self._adapt_clause(onclause, True, True)
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
# if an alias() on the right side was generated,
|
|
|
|
# which is intended to wrap a the right side in a subquery,
|
|
|
|
# ensure that columns retrieved from this target in the result
|
|
|
|
# set are also adapted.
|
2017-04-15 18:27:12 +02:00
|
|
|
if aliased_entity and not create_aliases:
|
|
|
|
self._mapper_loads_polymorphically_with(
|
|
|
|
right_mapper,
|
|
|
|
ORMAdapter(
|
|
|
|
right,
|
|
|
|
equivalents=right_mapper._equivalent_columns
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return right, onclause
|
|
|
|
|
|
|
|
def _join_to_left(self, l_info, left, right, onclause, outerjoin, full):
|
|
|
|
info = l_info
|
|
|
|
left_mapper = getattr(info, 'mapper', None)
|
|
|
|
left_selectable = info.selectable
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if self._from_obj:
|
|
|
|
replace_clause_index, clause = sql_util.find_join_source(
|
2017-04-15 18:27:12 +02:00
|
|
|
self._from_obj,
|
|
|
|
left_selectable)
|
2010-05-07 19:33:49 +02:00
|
|
|
if clause is not None:
|
2017-04-15 18:27:12 +02:00
|
|
|
try:
|
|
|
|
clause = orm_join(clause,
|
|
|
|
right,
|
|
|
|
onclause, isouter=outerjoin, full=full)
|
|
|
|
except sa_exc.ArgumentError as ae:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"Could not find a FROM clause to join from. "
|
|
|
|
"Tried joining to %s, but got: %s" % (right, ae))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
self._from_obj = \
|
2017-04-15 18:27:12 +02:00
|
|
|
self._from_obj[:replace_clause_index] + \
|
|
|
|
(clause, ) + \
|
|
|
|
self._from_obj[replace_clause_index + 1:]
|
2010-05-07 19:33:49 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
if left_mapper:
|
|
|
|
for ent in self._entities:
|
|
|
|
if ent.corresponds_to(left):
|
|
|
|
clause = ent.selectable
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
clause = left
|
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
clause = left_selectable
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
assert clause is not None
|
|
|
|
try:
|
|
|
|
clause = orm_join(
|
|
|
|
clause, right, onclause, isouter=outerjoin, full=full)
|
|
|
|
except sa_exc.ArgumentError as ae:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
|
|
|
"Could not find a FROM clause to join from. "
|
|
|
|
"Tried joining to %s, but got: %s" % (right, ae))
|
2010-05-07 19:33:49 +02:00
|
|
|
self._from_obj = self._from_obj + (clause,)
|
|
|
|
|
|
|
|
def _reset_joinpoint(self):
|
|
|
|
self._joinpoint = self._joinpath
|
|
|
|
self._filter_aliases = None
|
|
|
|
|
|
|
|
@_generative(_no_statement_condition)
|
|
|
|
def reset_joinpoint(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return a new :class:`.Query`, where the "join point" has
|
|
|
|
been reset back to the base FROM entities of the query.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
This method is usually used in conjunction with the
|
|
|
|
``aliased=True`` feature of the :meth:`~.Query.join`
|
|
|
|
method. See the example in :meth:`~.Query.join` for how
|
|
|
|
this is used.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
self._reset_joinpoint()
|
|
|
|
|
|
|
|
@_generative(_no_clauseelement_condition)
|
|
|
|
def select_from(self, *from_obj):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""Set the FROM clause of this :class:`.Query` explicitly.
|
|
|
|
|
|
|
|
:meth:`.Query.select_from` is often used in conjunction with
|
|
|
|
:meth:`.Query.join` in order to control which entity is selected
|
|
|
|
from on the "left" side of the join.
|
|
|
|
|
|
|
|
The entity or selectable object here effectively replaces the
|
|
|
|
"left edge" of any calls to :meth:`~.Query.join`, when no
|
|
|
|
joinpoint is otherwise established - usually, the default "join
|
|
|
|
point" is the leftmost entity in the :class:`~.Query` object's
|
|
|
|
list of entities to be selected.
|
|
|
|
|
|
|
|
A typical example::
|
|
|
|
|
|
|
|
q = session.query(Address).select_from(User).\
|
|
|
|
join(User.addresses).\
|
|
|
|
filter(User.name == 'ed')
|
|
|
|
|
|
|
|
Which produces SQL equivalent to::
|
|
|
|
|
|
|
|
SELECT address.* FROM user
|
|
|
|
JOIN address ON user.id=address.user_id
|
|
|
|
WHERE user.name = :name_1
|
|
|
|
|
|
|
|
:param \*from_obj: collection of one or more entities to apply
|
|
|
|
to the FROM clause. Entities can be mapped classes,
|
|
|
|
:class:`.AliasedClass` objects, :class:`.Mapper` objects
|
|
|
|
as well as core :class:`.FromClause` elements like subqueries.
|
|
|
|
|
|
|
|
.. versionchanged:: 0.9
|
|
|
|
This method no longer applies the given FROM object
|
|
|
|
to be the selectable from which matching entities
|
|
|
|
select from; the :meth:`.select_entity_from` method
|
|
|
|
now accomplishes this. See that method for a description
|
|
|
|
of this behavior.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`~.Query.join`
|
|
|
|
|
|
|
|
:meth:`.Query.select_entity_from`
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
self._set_select_from(from_obj, False)
|
|
|
|
|
|
|
|
@_generative(_no_clauseelement_condition)
|
|
|
|
def select_entity_from(self, from_obj):
|
|
|
|
r"""Set the FROM clause of this :class:`.Query` to a
|
|
|
|
core selectable, applying it as a replacement FROM clause
|
|
|
|
for corresponding mapped entities.
|
|
|
|
|
|
|
|
The :meth:`.Query.select_entity_from` method supplies an alternative
|
|
|
|
approach to the use case of applying an :func:`.aliased` construct
|
|
|
|
explicitly throughout a query. Instead of referring to the
|
|
|
|
:func:`.aliased` construct explicitly,
|
|
|
|
:meth:`.Query.select_entity_from` automatically *adapts* all occurences
|
|
|
|
of the entity to the target selectable.
|
|
|
|
|
|
|
|
Given a case for :func:`.aliased` such as selecting ``User``
|
|
|
|
objects from a SELECT statement::
|
|
|
|
|
|
|
|
select_stmt = select([User]).where(User.id == 7)
|
|
|
|
user_alias = aliased(User, select_stmt)
|
|
|
|
|
|
|
|
q = session.query(user_alias).\
|
|
|
|
filter(user_alias.name == 'ed')
|
|
|
|
|
|
|
|
Above, we apply the ``user_alias`` object explicitly throughout the
|
|
|
|
query. When it's not feasible for ``user_alias`` to be referenced
|
|
|
|
explicitly in many places, :meth:`.Query.select_entity_from` may be
|
|
|
|
used at the start of the query to adapt the existing ``User`` entity::
|
|
|
|
|
|
|
|
q = session.query(User).\
|
|
|
|
select_entity_from(select_stmt).\
|
|
|
|
filter(User.name == 'ed')
|
|
|
|
|
|
|
|
Above, the generated SQL will show that the ``User`` entity is
|
|
|
|
adapted to our statement, even in the case of the WHERE clause:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name
|
|
|
|
FROM (SELECT "user".id AS id, "user".name AS name
|
|
|
|
FROM "user"
|
|
|
|
WHERE "user".id = :id_1) AS anon_1
|
|
|
|
WHERE anon_1.name = :name_1
|
|
|
|
|
|
|
|
The :meth:`.Query.select_entity_from` method is similar to the
|
|
|
|
:meth:`.Query.select_from` method, in that it sets the FROM clause
|
|
|
|
of the query. The difference is that it additionally applies
|
|
|
|
adaptation to the other parts of the query that refer to the
|
|
|
|
primary entity. If above we had used :meth:`.Query.select_from`
|
|
|
|
instead, the SQL generated would have been:
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
-- uses plain select_from(), not select_entity_from()
|
|
|
|
SELECT "user".id AS user_id, "user".name AS user_name
|
|
|
|
FROM "user", (SELECT "user".id AS id, "user".name AS name
|
|
|
|
FROM "user"
|
|
|
|
WHERE "user".id = :id_1) AS anon_1
|
|
|
|
WHERE "user".name = :name_1
|
|
|
|
|
|
|
|
To supply textual SQL to the :meth:`.Query.select_entity_from` method,
|
|
|
|
we can make use of the :func:`.text` construct. However, the
|
|
|
|
:func:`.text` construct needs to be aligned with the columns of our
|
|
|
|
entity, which is achieved by making use of the
|
|
|
|
:meth:`.TextClause.columns` method::
|
|
|
|
|
|
|
|
text_stmt = text("select id, name from user").columns(
|
|
|
|
User.id, User.name)
|
|
|
|
q = session.query(User).select_entity_from(text_stmt)
|
|
|
|
|
|
|
|
:meth:`.Query.select_entity_from` itself accepts an :func:`.aliased`
|
|
|
|
object, so that the special options of :func:`.aliased` such as
|
|
|
|
:paramref:`.aliased.adapt_on_names` may be used within the
|
|
|
|
scope of the :meth:`.Query.select_entity_from` method's adaptation
|
|
|
|
services. Suppose
|
|
|
|
a view ``user_view`` also returns rows from ``user``. If
|
|
|
|
we reflect this view into a :class:`.Table`, this view has no
|
|
|
|
relationship to the :class:`.Table` to which we are mapped, however
|
|
|
|
we can use name matching to select from it::
|
|
|
|
|
|
|
|
user_view = Table('user_view', metadata,
|
|
|
|
autoload_with=engine)
|
|
|
|
user_view_alias = aliased(
|
|
|
|
User, user_view, adapt_on_names=True)
|
|
|
|
q = session.query(User).\
|
|
|
|
select_entity_from(user_view_alias).\
|
|
|
|
order_by(User.name)
|
|
|
|
|
|
|
|
.. versionchanged:: 1.1.7 The :meth:`.Query.select_entity_from`
|
|
|
|
method now accepts an :func:`.aliased` object as an alternative
|
|
|
|
to a :class:`.FromClause` object.
|
|
|
|
|
|
|
|
:param from_obj: a :class:`.FromClause` object that will replace
|
|
|
|
the FROM clause of this :class:`.Query`. It also may be an instance
|
|
|
|
of :func:`.aliased`.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.select_from`
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
self._set_select_from([from_obj], True)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
if isinstance(item, slice):
|
|
|
|
start, stop, step = util.decode_slice(item)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(stop, int) and \
|
|
|
|
isinstance(start, int) and \
|
|
|
|
stop - start <= 0:
|
2010-05-07 19:33:49 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
# perhaps we should execute a count() here so that we
|
|
|
|
# can still use LIMIT/OFFSET ?
|
|
|
|
elif (isinstance(start, int) and start < 0) \
|
2017-04-15 18:27:12 +02:00
|
|
|
or (isinstance(stop, int) and stop < 0):
|
2010-05-07 19:33:49 +02:00
|
|
|
return list(self)[item]
|
|
|
|
|
|
|
|
res = self.slice(start, stop)
|
|
|
|
if step is not None:
|
|
|
|
return list(res)[None:None:item.step]
|
|
|
|
else:
|
|
|
|
return list(res)
|
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
if item == -1:
|
|
|
|
return list(self)[-1]
|
|
|
|
else:
|
|
|
|
return list(self[item:item + 1])[0]
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
@_generative(_no_statement_condition)
|
|
|
|
def slice(self, start, stop):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Computes the "slice" of the :class:`.Query` represented by
|
|
|
|
the given indices and returns the resulting :class:`.Query`.
|
|
|
|
|
|
|
|
The start and stop indices behave like the argument to Python's
|
|
|
|
built-in :func:`range` function. This method provides an
|
|
|
|
alternative to using ``LIMIT``/``OFFSET`` to get a slice of the
|
|
|
|
query.
|
|
|
|
|
|
|
|
For example, ::
|
|
|
|
|
|
|
|
session.query(User).order_by(User.id).slice(1, 3)
|
|
|
|
|
|
|
|
renders as
|
|
|
|
|
|
|
|
.. sourcecode:: sql
|
|
|
|
|
|
|
|
SELECT users.id AS users_id,
|
|
|
|
users.name AS users_name
|
|
|
|
FROM users ORDER BY users.id
|
|
|
|
LIMIT ? OFFSET ?
|
|
|
|
(2, 1)
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.limit`
|
|
|
|
|
|
|
|
:meth:`.Query.offset`
|
|
|
|
|
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
if start is not None and stop is not None:
|
|
|
|
self._offset = (self._offset or 0) + start
|
|
|
|
self._limit = stop - start
|
|
|
|
elif start is None and stop is not None:
|
|
|
|
self._limit = stop
|
|
|
|
elif start is not None and stop is None:
|
|
|
|
self._offset = (self._offset or 0) + start
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._offset == 0:
|
|
|
|
self._offset = None
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
@_generative(_no_statement_condition)
|
|
|
|
def limit(self, limit):
|
|
|
|
"""Apply a ``LIMIT`` to the query and return the newly resulting
|
|
|
|
``Query``.
|
|
|
|
|
|
|
|
"""
|
|
|
|
self._limit = limit
|
|
|
|
|
|
|
|
@_generative(_no_statement_condition)
|
|
|
|
def offset(self, offset):
|
|
|
|
"""Apply an ``OFFSET`` to the query and return the newly resulting
|
|
|
|
``Query``.
|
|
|
|
|
|
|
|
"""
|
|
|
|
self._offset = offset
|
|
|
|
|
|
|
|
@_generative(_no_statement_condition)
|
2017-04-15 18:27:12 +02:00
|
|
|
def distinct(self, *criterion):
|
|
|
|
r"""Apply a ``DISTINCT`` to the query and return the newly resulting
|
|
|
|
``Query``.
|
|
|
|
|
|
|
|
|
|
|
|
.. note::
|
|
|
|
|
|
|
|
The :meth:`.distinct` call includes logic that will automatically
|
|
|
|
add columns from the ORDER BY of the query to the columns
|
|
|
|
clause of the SELECT statement, to satisfy the common need
|
|
|
|
of the database backend that ORDER BY columns be part of the
|
|
|
|
SELECT list when DISTINCT is used. These columns *are not*
|
|
|
|
added to the list of columns actually fetched by the
|
|
|
|
:class:`.Query`, however, so would not affect results.
|
|
|
|
The columns are passed through when using the
|
|
|
|
:attr:`.Query.statement` accessor, however.
|
|
|
|
|
|
|
|
:param \*expr: optional column expressions. When present,
|
|
|
|
the PostgreSQL dialect will render a ``DISTINCT ON (<expressions>>)``
|
|
|
|
construct.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if not criterion:
|
|
|
|
self._distinct = True
|
|
|
|
else:
|
|
|
|
criterion = self._adapt_col_list(criterion)
|
|
|
|
if isinstance(self._distinct, list):
|
|
|
|
self._distinct += criterion
|
|
|
|
else:
|
|
|
|
self._distinct = criterion
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def prefix_with(self, *prefixes):
|
|
|
|
r"""Apply the prefixes to the query and return the newly resulting
|
|
|
|
``Query``.
|
|
|
|
|
|
|
|
:param \*prefixes: optional prefixes, typically strings,
|
|
|
|
not using any commas. In particular is useful for MySQL keywords.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
query = sess.query(User.name).\
|
|
|
|
prefix_with('HIGH_PRIORITY').\
|
|
|
|
prefix_with('SQL_SMALL_RESULT', 'ALL')
|
|
|
|
|
|
|
|
Would render::
|
|
|
|
|
|
|
|
SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL users.name AS users_name
|
|
|
|
FROM users
|
|
|
|
|
|
|
|
.. versionadded:: 0.7.7
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.HasPrefixes.prefix_with`
|
|
|
|
|
|
|
|
"""
|
|
|
|
if self._prefixes:
|
|
|
|
self._prefixes += prefixes
|
|
|
|
else:
|
|
|
|
self._prefixes = prefixes
|
|
|
|
|
|
|
|
@_generative()
|
|
|
|
def suffix_with(self, *suffixes):
|
|
|
|
r"""Apply the suffix to the query and return the newly resulting
|
2010-05-07 19:33:49 +02:00
|
|
|
``Query``.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:param \*suffixes: optional suffixes, typically strings,
|
|
|
|
not using any commas.
|
|
|
|
|
|
|
|
.. versionadded:: 1.0.0
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.prefix_with`
|
|
|
|
|
|
|
|
:meth:`.HasSuffixes.suffix_with`
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._suffixes:
|
|
|
|
self._suffixes += suffixes
|
|
|
|
else:
|
|
|
|
self._suffixes = suffixes
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def all(self):
|
|
|
|
"""Return the results represented by this ``Query`` as a list.
|
|
|
|
|
|
|
|
This results in an execution of the underlying query.
|
|
|
|
|
|
|
|
"""
|
|
|
|
return list(self)
|
|
|
|
|
|
|
|
@_generative(_no_clauseelement_condition)
|
|
|
|
def from_statement(self, statement):
|
|
|
|
"""Execute the given SELECT statement and return results.
|
|
|
|
|
|
|
|
This method bypasses all internal statement compilation, and the
|
|
|
|
statement is executed without modification.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The statement is typically either a :func:`~.expression.text`
|
|
|
|
or :func:`~.expression.select` construct, and should return the set
|
|
|
|
of columns
|
|
|
|
appropriate to the entity class represented by this :class:`.Query`.
|
|
|
|
|
|
|
|
.. seealso::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:ref:`orm_tutorial_literal_sql` - usage examples in the
|
|
|
|
ORM tutorial
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
statement = expression._expression_literal_as_text(statement)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if not isinstance(statement,
|
|
|
|
(expression.TextClause,
|
|
|
|
expression.SelectBase)):
|
|
|
|
raise sa_exc.ArgumentError(
|
|
|
|
"from_statement accepts text(), select(), "
|
|
|
|
"and union() objects only.")
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
self._statement = statement
|
|
|
|
|
|
|
|
def first(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
"""Return the first result of this ``Query`` or
|
2010-05-07 19:33:49 +02:00
|
|
|
None if the result doesn't contain any row.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
first() applies a limit of one within the generated SQL, so that
|
2017-04-15 18:27:12 +02:00
|
|
|
only one primary entity row is generated on the server side
|
|
|
|
(note this may consist of multiple result rows if join-loaded
|
2010-05-07 19:33:49 +02:00
|
|
|
collections are present).
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Calling :meth:`.Query.first` results in an execution of the underlying query.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.one`
|
|
|
|
|
|
|
|
:meth:`.Query.one_or_none`
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
if self._statement is not None:
|
|
|
|
ret = list(self)[0:1]
|
|
|
|
else:
|
|
|
|
ret = list(self[0:1])
|
|
|
|
if len(ret) > 0:
|
|
|
|
return ret[0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def one_or_none(self):
|
|
|
|
"""Return at most one result or raise an exception.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Returns ``None`` if the query selects
|
|
|
|
no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound``
|
2010-05-07 19:33:49 +02:00
|
|
|
if multiple object identities are returned, or if multiple
|
2017-04-15 18:27:12 +02:00
|
|
|
rows are returned for a query that returns only scalar values
|
|
|
|
as opposed to full identity-mapped entities.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Calling :meth:`.Query.one_or_none` results in an execution of the
|
|
|
|
underlying query.
|
|
|
|
|
|
|
|
.. versionadded:: 1.0.9
|
|
|
|
|
|
|
|
Added :meth:`.Query.one_or_none`
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.first`
|
|
|
|
|
|
|
|
:meth:`.Query.one`
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
ret = list(self)
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
l = len(ret)
|
|
|
|
if l == 1:
|
|
|
|
return ret[0]
|
|
|
|
elif l == 0:
|
2017-04-15 18:27:12 +02:00
|
|
|
return None
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
raise orm_exc.MultipleResultsFound(
|
|
|
|
"Multiple rows were found for one_or_none()")
|
|
|
|
|
|
|
|
def one(self):
|
|
|
|
"""Return exactly one result or raise an exception.
|
|
|
|
|
|
|
|
Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects
|
|
|
|
no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound``
|
|
|
|
if multiple object identities are returned, or if multiple
|
|
|
|
rows are returned for a query that returns only scalar values
|
|
|
|
as opposed to full identity-mapped entities.
|
|
|
|
|
|
|
|
Calling :meth:`.one` results in an execution of the underlying query.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.first`
|
|
|
|
|
|
|
|
:meth:`.Query.one_or_none`
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
ret = self.one_or_none()
|
|
|
|
except orm_exc.MultipleResultsFound:
|
2010-05-07 19:33:49 +02:00
|
|
|
raise orm_exc.MultipleResultsFound(
|
|
|
|
"Multiple rows were found for one()")
|
2017-04-15 18:27:12 +02:00
|
|
|
else:
|
|
|
|
if ret is None:
|
|
|
|
raise orm_exc.NoResultFound("No row was found for one()")
|
|
|
|
return ret
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def scalar(self):
|
|
|
|
"""Return the first element of the first result or None
|
|
|
|
if no rows present. If multiple rows are returned,
|
|
|
|
raises MultipleResultsFound.
|
|
|
|
|
|
|
|
>>> session.query(Item).scalar()
|
|
|
|
<Item>
|
|
|
|
>>> session.query(Item.id).scalar()
|
|
|
|
1
|
|
|
|
>>> session.query(Item.id).filter(Item.id < 0).scalar()
|
|
|
|
None
|
|
|
|
>>> session.query(Item.id, Item.name).scalar()
|
|
|
|
1
|
|
|
|
>>> session.query(func.count(Parent.id)).scalar()
|
|
|
|
20
|
|
|
|
|
|
|
|
This results in an execution of the underlying query.
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
ret = self.one()
|
|
|
|
if not isinstance(ret, tuple):
|
|
|
|
return ret
|
|
|
|
return ret[0]
|
|
|
|
except orm_exc.NoResultFound:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
context = self._compile_context()
|
|
|
|
context.statement.use_labels = True
|
|
|
|
if self._autoflush and not self._populate_existing:
|
|
|
|
self.session._autoflush()
|
|
|
|
return self._execute_and_instances(context)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def __str__(self):
|
|
|
|
context = self._compile_context()
|
|
|
|
try:
|
|
|
|
bind = self._get_bind_args(
|
|
|
|
context, self.session.get_bind) if self.session else None
|
|
|
|
except sa_exc.UnboundExecutionError:
|
|
|
|
bind = None
|
|
|
|
return str(context.statement.compile(bind))
|
|
|
|
|
|
|
|
def _connection_from_session(self, **kw):
|
|
|
|
conn = self.session.connection(**kw)
|
|
|
|
if self._execution_options:
|
|
|
|
conn = conn.execution_options(**self._execution_options)
|
|
|
|
return conn
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def _execute_and_instances(self, querycontext):
|
2017-04-15 18:27:12 +02:00
|
|
|
conn = self._get_bind_args(
|
|
|
|
querycontext,
|
|
|
|
self._connection_from_session,
|
|
|
|
close_with_result=True)
|
|
|
|
|
|
|
|
result = conn.execute(querycontext.statement, self._params)
|
|
|
|
return loading.instances(querycontext.query, result, querycontext)
|
|
|
|
|
|
|
|
def _get_bind_args(self, querycontext, fn, **kw):
|
|
|
|
return fn(
|
|
|
|
mapper=self._bind_mapper(),
|
|
|
|
clause=querycontext.statement,
|
|
|
|
**kw
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@property
|
|
|
|
def column_descriptions(self):
|
|
|
|
"""Return metadata about the columns which would be
|
|
|
|
returned by this :class:`.Query`.
|
|
|
|
|
|
|
|
Format is a list of dictionaries::
|
|
|
|
|
|
|
|
user_alias = aliased(User, name='user2')
|
|
|
|
q = sess.query(User, User.id, user_alias)
|
|
|
|
|
|
|
|
# this expression:
|
|
|
|
q.column_descriptions
|
|
|
|
|
|
|
|
# would return:
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'name':'User',
|
|
|
|
'type':User,
|
|
|
|
'aliased':False,
|
|
|
|
'expr':User,
|
|
|
|
'entity': User
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name':'id',
|
|
|
|
'type':Integer(),
|
|
|
|
'aliased':False,
|
|
|
|
'expr':User.id,
|
|
|
|
'entity': User
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name':'user2',
|
|
|
|
'type':User,
|
|
|
|
'aliased':True,
|
|
|
|
'expr':user_alias,
|
|
|
|
'entity': user_alias
|
|
|
|
}
|
|
|
|
]
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
return [
|
|
|
|
{
|
|
|
|
'name': ent._label_name,
|
|
|
|
'type': ent.type,
|
|
|
|
'aliased': getattr(insp_ent, 'is_aliased_class', False),
|
|
|
|
'expr': ent.expr,
|
|
|
|
'entity':
|
|
|
|
getattr(insp_ent, "entity", None)
|
|
|
|
if ent.entity_zero is not None
|
|
|
|
and not insp_ent.is_clause_element
|
|
|
|
else None
|
|
|
|
}
|
|
|
|
for ent, insp_ent in [
|
|
|
|
(
|
|
|
|
_ent,
|
|
|
|
(inspect(_ent.entity_zero)
|
|
|
|
if _ent.entity_zero is not None else None)
|
|
|
|
)
|
|
|
|
for _ent in self._entities
|
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
def instances(self, cursor, __context=None):
|
|
|
|
"""Given a ResultProxy cursor as returned by connection.execute(),
|
|
|
|
return an ORM result as an iterator.
|
|
|
|
|
|
|
|
e.g.::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
result = engine.execute("select * from users")
|
|
|
|
for u in session.query(User).instances(result):
|
|
|
|
print u
|
|
|
|
"""
|
|
|
|
context = __context
|
|
|
|
if context is None:
|
|
|
|
context = QueryContext(self)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
return loading.instances(self, cursor, context)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def merge_result(self, iterator, load=True):
|
|
|
|
"""Merge a result into this :class:`.Query` object's Session.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Given an iterator returned by a :class:`.Query` of the same structure
|
|
|
|
as this one, return an identical iterator of results, with all mapped
|
|
|
|
instances merged into the session using :meth:`.Session.merge`. This
|
|
|
|
is an optimized method which will merge all mapped instances,
|
|
|
|
preserving the structure of the result rows and unmapped columns with
|
|
|
|
less method overhead than that of calling :meth:`.Session.merge`
|
|
|
|
explicitly for each value.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The structure of the results is determined based on the column list of
|
|
|
|
this :class:`.Query` - if these do not correspond, unchecked errors
|
|
|
|
will occur.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The 'load' argument is the same as that of :meth:`.Session.merge`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
For an example of how :meth:`~.Query.merge_result` is used, see
|
|
|
|
the source code for the example :ref:`examples_caching`, where
|
|
|
|
:meth:`~.Query.merge_result` is used to efficiently restore state
|
|
|
|
from a cache back into a target :class:`.Session`.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
return loading.merge_result(self, iterator, load)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@property
|
|
|
|
def _select_args(self):
|
|
|
|
return {
|
|
|
|
'limit': self._limit,
|
|
|
|
'offset': self._offset,
|
|
|
|
'distinct': self._distinct,
|
|
|
|
'prefixes': self._prefixes,
|
|
|
|
'suffixes': self._suffixes,
|
|
|
|
'group_by': self._group_by or None,
|
|
|
|
'having': self._having
|
|
|
|
}
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@property
|
|
|
|
def _should_nest_selectable(self):
|
|
|
|
kwargs = self._select_args
|
|
|
|
return (kwargs.get('limit') is not None or
|
|
|
|
kwargs.get('offset') is not None or
|
|
|
|
kwargs.get('distinct', False))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def exists(self):
|
|
|
|
"""A convenience method that turns a query into an EXISTS subquery
|
|
|
|
of the form EXISTS (SELECT 1 FROM ... WHERE ...).
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
e.g.::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
q = session.query(User).filter(User.name == 'fred')
|
|
|
|
session.query(q.exists())
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Producing SQL similar to::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
SELECT EXISTS (
|
|
|
|
SELECT 1 FROM users WHERE users.name = :name_1
|
|
|
|
) AS anon_1
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
The EXISTS construct is usually used in the WHERE clause::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
session.query(User.id).filter(q.exists()).scalar()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Note that some databases such as SQL Server don't allow an
|
|
|
|
EXISTS expression to be present in the columns clause of a
|
|
|
|
SELECT. To select a simple boolean value based on the exists
|
|
|
|
as a WHERE, use :func:`.literal`::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
from sqlalchemy import literal
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
session.query(literal(True)).filter(q.exists()).scalar()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. versionadded:: 0.8.1
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
# .add_columns() for the case that we are a query().select_from(X),
|
|
|
|
# so that ".statement" can be produced (#2995) but also without
|
|
|
|
# omitting the FROM clause from a query(X) (#2818);
|
|
|
|
# .with_only_columns() after we have a core select() so that
|
|
|
|
# we get just "SELECT 1" without any entities.
|
|
|
|
return sql.exists(self.add_columns('1').with_labels().
|
|
|
|
statement.with_only_columns([1]))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def count(self):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""Return a count of rows this Query would return.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
This generates the SQL for this Query as follows::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
SELECT count(1) AS count_1 FROM (
|
|
|
|
SELECT <rest of query follows...>
|
|
|
|
) AS anon_1
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
.. versionchanged:: 0.7
|
|
|
|
The above scheme is newly refined as of 0.7b3.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
For fine grained control over specific columns
|
|
|
|
to count, to skip the usage of a subquery or
|
|
|
|
otherwise control of the FROM clause,
|
|
|
|
or to use other aggregate functions,
|
|
|
|
use :attr:`~sqlalchemy.sql.expression.func`
|
|
|
|
expressions in conjunction
|
|
|
|
with :meth:`~.Session.query`, i.e.::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
from sqlalchemy import func
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
# count User records, without
|
|
|
|
# using a subquery.
|
|
|
|
session.query(func.count(User.id))
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
# return count of user "id" grouped
|
|
|
|
# by "name"
|
|
|
|
session.query(func.count(User.id)).\
|
|
|
|
group_by(User.name)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
from sqlalchemy import distinct
|
|
|
|
|
|
|
|
# count distinct "name" values
|
|
|
|
session.query(func.count(distinct(User.name)))
|
|
|
|
|
|
|
|
"""
|
|
|
|
col = sql.func.count(sql.literal_column('*'))
|
|
|
|
return self.from_self(col).scalar()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def delete(self, synchronize_session='evaluate'):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""Perform a bulk delete query.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
Deletes rows matched by this query from the database.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
E.g.::
|
|
|
|
|
|
|
|
sess.query(User).filter(User.age == 25).\
|
|
|
|
delete(synchronize_session=False)
|
|
|
|
|
|
|
|
sess.query(User).filter(User.age == 25).\
|
|
|
|
delete(synchronize_session='evaluate')
|
|
|
|
|
|
|
|
.. warning:: The :meth:`.Query.delete` method is a "bulk" operation,
|
|
|
|
which bypasses ORM unit-of-work automation in favor of greater
|
|
|
|
performance. **Please read all caveats and warnings below.**
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
:param synchronize_session: chooses the strategy for the removal of
|
|
|
|
matched objects from the session. Valid values are:
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
``False`` - don't synchronize the session. This option is the most
|
2010-05-07 19:33:49 +02:00
|
|
|
efficient and is reliable once the session is expired, which
|
|
|
|
typically occurs after a commit(), or explicitly using
|
|
|
|
expire_all(). Before the expiration, objects may still remain in
|
|
|
|
the session which were in fact deleted which can lead to confusing
|
|
|
|
results if they are accessed via get() or already loaded
|
|
|
|
collections.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
``'fetch'`` - performs a select query before the delete to find
|
2010-05-07 19:33:49 +02:00
|
|
|
objects that are matched by the delete query and need to be
|
|
|
|
removed from the session. Matched objects are removed from the
|
|
|
|
session.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
``'evaluate'`` - Evaluate the query's criteria in Python straight
|
|
|
|
on the objects in the session. If evaluation of the criteria isn't
|
|
|
|
implemented, an error is raised.
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
The expression evaluator currently doesn't account for differing
|
|
|
|
string collations between the database and Python.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:return: the count of rows matched as returned by the database's
|
|
|
|
"row count" feature.
|
|
|
|
|
|
|
|
.. warning:: **Additional Caveats for bulk query deletes**
|
|
|
|
|
|
|
|
* This method does **not work for joined
|
|
|
|
inheritance mappings**, since the **multiple table
|
|
|
|
deletes are not supported by SQL** as well as that the
|
|
|
|
**join condition of an inheritance mapper is not
|
|
|
|
automatically rendered**. Care must be taken in any
|
|
|
|
multiple-table delete to first accommodate via some other means
|
|
|
|
how the related table will be deleted, as well as to
|
|
|
|
explicitly include the joining
|
|
|
|
condition between those tables, even in mappings where
|
|
|
|
this is normally automatic. E.g. if a class ``Engineer``
|
|
|
|
subclasses ``Employee``, a DELETE against the ``Employee``
|
|
|
|
table would look like::
|
|
|
|
|
|
|
|
session.query(Engineer).\
|
|
|
|
filter(Engineer.id == Employee.id).\
|
|
|
|
filter(Employee.name == 'dilbert').\
|
|
|
|
delete()
|
|
|
|
|
|
|
|
However the above SQL will not delete from the Engineer table,
|
|
|
|
unless an ON DELETE CASCADE rule is established in the database
|
|
|
|
to handle it.
|
|
|
|
|
|
|
|
Short story, **do not use this method for joined inheritance
|
|
|
|
mappings unless you have taken the additional steps to make
|
|
|
|
this feasible**.
|
|
|
|
|
|
|
|
* The polymorphic identity WHERE criteria is **not** included
|
|
|
|
for single- or
|
|
|
|
joined- table updates - this must be added **manually** even
|
|
|
|
for single table inheritance.
|
|
|
|
|
|
|
|
* The method does **not** offer in-Python cascading of
|
|
|
|
relationships - it is assumed that ON DELETE CASCADE/SET
|
|
|
|
NULL/etc. is configured for any foreign key references
|
|
|
|
which require it, otherwise the database may emit an
|
|
|
|
integrity violation if foreign key references are being
|
|
|
|
enforced.
|
|
|
|
|
|
|
|
After the DELETE, dependent objects in the
|
|
|
|
:class:`.Session` which were impacted by an ON DELETE
|
|
|
|
may not contain the current state, or may have been
|
|
|
|
deleted. This issue is resolved once the
|
|
|
|
:class:`.Session` is expired, which normally occurs upon
|
|
|
|
:meth:`.Session.commit` or can be forced by using
|
|
|
|
:meth:`.Session.expire_all`. Accessing an expired
|
|
|
|
object whose row has been deleted will invoke a SELECT
|
|
|
|
to locate the row; when the row is not found, an
|
|
|
|
:class:`~sqlalchemy.orm.exc.ObjectDeletedError` is
|
|
|
|
raised.
|
|
|
|
|
|
|
|
* The ``'fetch'`` strategy results in an additional
|
|
|
|
SELECT statement emitted and will significantly reduce
|
|
|
|
performance.
|
|
|
|
|
|
|
|
* The ``'evaluate'`` strategy performs a scan of
|
|
|
|
all matching objects within the :class:`.Session`; if the
|
|
|
|
contents of the :class:`.Session` are expired, such as
|
|
|
|
via a proceeding :meth:`.Session.commit` call, **this will
|
|
|
|
result in SELECT queries emitted for every matching object**.
|
|
|
|
|
|
|
|
* The :meth:`.MapperEvents.before_delete` and
|
|
|
|
:meth:`.MapperEvents.after_delete`
|
|
|
|
events **are not invoked** from this method. Instead, the
|
|
|
|
:meth:`.SessionEvents.after_bulk_delete` method is provided to
|
|
|
|
act upon a mass DELETE of entity rows.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.update`
|
|
|
|
|
|
|
|
:ref:`inserts_and_updates` - Core SQL tutorial
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
delete_op = persistence.BulkDelete.factory(
|
|
|
|
self, synchronize_session)
|
|
|
|
delete_op.exec_()
|
|
|
|
return delete_op.rowcount
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def update(self, values, synchronize_session='evaluate', update_args=None):
|
|
|
|
r"""Perform a bulk update query.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
Updates rows matched by this query in the database.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
E.g.::
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
sess.query(User).filter(User.age == 25).\
|
|
|
|
update({User.age: User.age - 10}, synchronize_session=False)
|
|
|
|
|
|
|
|
sess.query(User).filter(User.age == 25).\
|
|
|
|
update({"age": User.age - 10}, synchronize_session='evaluate')
|
|
|
|
|
|
|
|
|
|
|
|
.. warning:: The :meth:`.Query.update` method is a "bulk" operation,
|
|
|
|
which bypasses ORM unit-of-work automation in favor of greater
|
|
|
|
performance. **Please read all caveats and warnings below.**
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:param values: a dictionary with attributes names, or alternatively
|
|
|
|
mapped attributes or SQL expressions, as keys, and literal
|
|
|
|
values or sql expressions as values. If :ref:`parameter-ordered
|
|
|
|
mode <updates_order_parameters>` is desired, the values can be
|
|
|
|
passed as a list of 2-tuples;
|
|
|
|
this requires that the :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
|
|
|
|
flag is passed to the :paramref:`.Query.update.update_args` dictionary
|
|
|
|
as well.
|
|
|
|
|
|
|
|
.. versionchanged:: 1.0.0 - string names in the values dictionary
|
|
|
|
are now resolved against the mapped entity; previously, these
|
|
|
|
strings were passed as literal column names with no mapper-level
|
|
|
|
translation.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
:param synchronize_session: chooses the strategy to update the
|
2017-04-15 18:27:12 +02:00
|
|
|
attributes on objects in the session. Valid values are:
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
``False`` - don't synchronize the session. This option is the most
|
2010-05-07 19:33:49 +02:00
|
|
|
efficient and is reliable once the session is expired, which
|
|
|
|
typically occurs after a commit(), or explicitly using
|
|
|
|
expire_all(). Before the expiration, updated objects may still
|
|
|
|
remain in the session with stale values on their attributes, which
|
|
|
|
can lead to confusing results.
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
``'fetch'`` - performs a select query before the update to find
|
2010-05-07 19:33:49 +02:00
|
|
|
objects that are matched by the update query. The updated
|
|
|
|
attributes are expired on matched objects.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
``'evaluate'`` - Evaluate the Query's criteria in Python straight
|
|
|
|
on the objects in the session. If evaluation of the criteria isn't
|
2010-05-07 19:33:49 +02:00
|
|
|
implemented, an exception is raised.
|
|
|
|
|
|
|
|
The expression evaluator currently doesn't account for differing
|
|
|
|
string collations between the database and Python.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
:param update_args: Optional dictionary, if present will be passed
|
|
|
|
to the underlying :func:`.update` construct as the ``**kw`` for
|
|
|
|
the object. May be used to pass dialect-specific arguments such
|
|
|
|
as ``mysql_limit``, as well as other special arguments such as
|
|
|
|
:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`.
|
|
|
|
|
|
|
|
.. versionadded:: 1.0.0
|
|
|
|
|
|
|
|
:return: the count of rows matched as returned by the database's
|
|
|
|
"row count" feature.
|
|
|
|
|
|
|
|
.. warning:: **Additional Caveats for bulk query updates**
|
|
|
|
|
|
|
|
* The method does **not** offer in-Python cascading of
|
|
|
|
relationships - it is assumed that ON UPDATE CASCADE is
|
|
|
|
configured for any foreign key references which require
|
|
|
|
it, otherwise the database may emit an integrity
|
|
|
|
violation if foreign key references are being enforced.
|
|
|
|
|
|
|
|
After the UPDATE, dependent objects in the
|
|
|
|
:class:`.Session` which were impacted by an ON UPDATE
|
|
|
|
CASCADE may not contain the current state; this issue is
|
|
|
|
resolved once the :class:`.Session` is expired, which
|
|
|
|
normally occurs upon :meth:`.Session.commit` or can be
|
|
|
|
forced by using :meth:`.Session.expire_all`.
|
|
|
|
|
|
|
|
* The ``'fetch'`` strategy results in an additional
|
|
|
|
SELECT statement emitted and will significantly reduce
|
|
|
|
performance.
|
|
|
|
|
|
|
|
* The ``'evaluate'`` strategy performs a scan of
|
|
|
|
all matching objects within the :class:`.Session`; if the
|
|
|
|
contents of the :class:`.Session` are expired, such as
|
|
|
|
via a proceeding :meth:`.Session.commit` call, **this will
|
|
|
|
result in SELECT queries emitted for every matching object**.
|
|
|
|
|
|
|
|
* The method supports multiple table updates, as detailed
|
|
|
|
in :ref:`multi_table_updates`, and this behavior does
|
|
|
|
extend to support updates of joined-inheritance and
|
|
|
|
other multiple table mappings. However, the **join
|
|
|
|
condition of an inheritance mapper is not
|
|
|
|
automatically rendered**. Care must be taken in any
|
|
|
|
multiple-table update to explicitly include the joining
|
|
|
|
condition between those tables, even in mappings where
|
|
|
|
this is normally automatic. E.g. if a class ``Engineer``
|
|
|
|
subclasses ``Employee``, an UPDATE of the ``Engineer``
|
|
|
|
local table using criteria against the ``Employee``
|
|
|
|
local table might look like::
|
|
|
|
|
|
|
|
session.query(Engineer).\
|
|
|
|
filter(Engineer.id == Employee.id).\
|
|
|
|
filter(Employee.name == 'dilbert').\
|
|
|
|
update({"engineer_type": "programmer"})
|
|
|
|
|
|
|
|
* The polymorphic identity WHERE criteria is **not** included
|
|
|
|
for single- or
|
|
|
|
joined- table updates - this must be added **manually**, even
|
|
|
|
for single table inheritance.
|
|
|
|
|
|
|
|
* The :meth:`.MapperEvents.before_update` and
|
|
|
|
:meth:`.MapperEvents.after_update`
|
|
|
|
events **are not invoked from this method**. Instead, the
|
|
|
|
:meth:`.SessionEvents.after_bulk_update` method is provided to
|
|
|
|
act upon a mass UPDATE of entity rows.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:meth:`.Query.delete`
|
|
|
|
|
|
|
|
:ref:`inserts_and_updates` - Core SQL tutorial
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
update_args = update_args or {}
|
|
|
|
update_op = persistence.BulkUpdate.factory(
|
|
|
|
self, synchronize_session, values, update_args)
|
|
|
|
update_op.exec_()
|
|
|
|
return update_op.rowcount
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def _compile_context(self, labels=True):
|
2017-04-15 18:27:12 +02:00
|
|
|
if self.dispatch.before_compile:
|
|
|
|
for fn in self.dispatch.before_compile:
|
|
|
|
new_query = fn(self)
|
|
|
|
if new_query is not None:
|
|
|
|
self = new_query
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
context = QueryContext(self)
|
|
|
|
|
|
|
|
if context.statement is not None:
|
|
|
|
return context
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
context.labels = labels
|
|
|
|
|
|
|
|
context._for_update_arg = self._for_update_arg
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
for entity in self._entities:
|
|
|
|
entity.setup_context(self, context)
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
for rec in context.create_eager_joins:
|
|
|
|
strategy = rec[0]
|
|
|
|
strategy(*rec[1:])
|
|
|
|
|
|
|
|
if context.from_clause:
|
2017-04-15 18:27:12 +02:00
|
|
|
# "load from explicit FROMs" mode,
|
|
|
|
# i.e. when select_from() or join() is used
|
|
|
|
context.froms = list(context.from_clause)
|
|
|
|
# else "load from discrete FROMs" mode,
|
|
|
|
# i.e. when each _MappedEntity has its own FROM
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._enable_single_crit:
|
|
|
|
self._adjust_for_single_inheritance(context)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if not context.primary_columns:
|
|
|
|
if self._only_load_props:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"No column-based properties specified for "
|
|
|
|
"refresh operation. Use session.expire() "
|
|
|
|
"to reload collections and related items.")
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"Query contains no columns with which to "
|
|
|
|
"SELECT from.")
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if context.multi_row_eager_loaders and self._should_nest_selectable:
|
2017-04-15 18:27:12 +02:00
|
|
|
context.statement = self._compound_eager_statement(context)
|
|
|
|
else:
|
|
|
|
context.statement = self._simple_statement(context)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
return context
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def _compound_eager_statement(self, context):
|
|
|
|
# for eager joins present and LIMIT/OFFSET/DISTINCT,
|
|
|
|
# wrap the query inside a select,
|
|
|
|
# then append eager joins onto that
|
|
|
|
|
|
|
|
if context.order_by:
|
|
|
|
order_by_col_expr = \
|
|
|
|
sql_util.expand_column_list_from_order_by(
|
|
|
|
context.primary_columns,
|
|
|
|
context.order_by
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
context.order_by = None
|
|
|
|
order_by_col_expr = []
|
|
|
|
|
|
|
|
inner = sql.select(
|
|
|
|
context.primary_columns + order_by_col_expr,
|
|
|
|
context.whereclause,
|
|
|
|
from_obj=context.froms,
|
|
|
|
use_labels=context.labels,
|
|
|
|
# TODO: this order_by is only needed if
|
|
|
|
# LIMIT/OFFSET is present in self._select_args,
|
|
|
|
# else the application on the outside is enough
|
|
|
|
order_by=context.order_by,
|
|
|
|
**self._select_args
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
for hint in self._with_hints:
|
|
|
|
inner = inner.with_hint(*hint)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if self._correlate:
|
|
|
|
inner = inner.correlate(*self._correlate)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
inner = inner.alias()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
equivs = self.__all_equivs()
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
context.adapter = sql_util.ColumnAdapter(inner, equivs)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
statement = sql.select(
|
|
|
|
[inner] + context.secondary_columns,
|
|
|
|
use_labels=context.labels)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
statement._for_update_arg = context._for_update_arg
|
|
|
|
|
|
|
|
from_clause = inner
|
|
|
|
for eager_join in context.eager_joins.values():
|
|
|
|
# EagerLoader places a 'stop_on' attribute on the join,
|
|
|
|
# giving us a marker as to where the "splice point" of
|
|
|
|
# the join should be
|
|
|
|
from_clause = sql_util.splice_joins(
|
|
|
|
from_clause,
|
|
|
|
eager_join, eager_join.stop_on)
|
|
|
|
|
|
|
|
statement.append_from(from_clause)
|
|
|
|
|
|
|
|
if context.order_by:
|
|
|
|
statement.append_order_by(
|
|
|
|
*context.adapter.copy_and_process(
|
|
|
|
context.order_by
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
statement.append_order_by(*context.eager_order_by)
|
|
|
|
return statement
|
|
|
|
|
|
|
|
def _simple_statement(self, context):
|
|
|
|
if not context.order_by:
|
|
|
|
context.order_by = None
|
|
|
|
|
|
|
|
if self._distinct is True and context.order_by:
|
|
|
|
context.primary_columns += \
|
|
|
|
sql_util.expand_column_list_from_order_by(
|
|
|
|
context.primary_columns,
|
|
|
|
context.order_by
|
|
|
|
)
|
|
|
|
context.froms += tuple(context.eager_joins.values())
|
|
|
|
|
|
|
|
statement = sql.select(
|
|
|
|
context.primary_columns +
|
|
|
|
context.secondary_columns,
|
|
|
|
context.whereclause,
|
|
|
|
from_obj=context.froms,
|
|
|
|
use_labels=context.labels,
|
|
|
|
order_by=context.order_by,
|
|
|
|
**self._select_args
|
|
|
|
)
|
|
|
|
statement._for_update_arg = context._for_update_arg
|
|
|
|
|
|
|
|
for hint in self._with_hints:
|
|
|
|
statement = statement.with_hint(*hint)
|
|
|
|
|
|
|
|
if self._correlate:
|
|
|
|
statement = statement.correlate(*self._correlate)
|
|
|
|
|
|
|
|
if context.eager_order_by:
|
|
|
|
statement.append_order_by(*context.eager_order_by)
|
|
|
|
return statement
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def _adjust_for_single_inheritance(self, context):
|
|
|
|
"""Apply single-table-inheritance filtering.
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
For all distinct single-table-inheritance mappers represented in
|
|
|
|
the columns clause of this query, add criterion to the WHERE
|
|
|
|
clause of the given QueryContext such that only the appropriate
|
|
|
|
subtypes are selected from the total results.
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
"""
|
2017-04-15 18:27:12 +02:00
|
|
|
for (ext_info, adapter) in set(self._mapper_adapter_map.values()):
|
|
|
|
if ext_info in self._join_entities:
|
|
|
|
continue
|
|
|
|
single_crit = ext_info.mapper._single_table_criterion
|
2010-05-07 19:33:49 +02:00
|
|
|
if single_crit is not None:
|
|
|
|
if adapter:
|
|
|
|
single_crit = adapter.traverse(single_crit)
|
|
|
|
single_crit = self._adapt_clause(single_crit, False, False)
|
2017-04-15 18:27:12 +02:00
|
|
|
context.whereclause = sql.and_(
|
|
|
|
sql.True_._ifnone(context.whereclause),
|
|
|
|
single_crit)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
|
|
|
|
from ..sql.selectable import ForUpdateArg
|
|
|
|
|
|
|
|
|
|
|
|
class LockmodeArg(ForUpdateArg):
|
|
|
|
@classmethod
|
|
|
|
def parse_legacy_query(self, mode):
|
|
|
|
if mode in (None, False):
|
|
|
|
return None
|
|
|
|
|
|
|
|
if mode == "read":
|
|
|
|
read = True
|
|
|
|
nowait = False
|
|
|
|
elif mode == "update":
|
|
|
|
read = nowait = False
|
|
|
|
elif mode == "update_nowait":
|
|
|
|
nowait = True
|
|
|
|
read = False
|
|
|
|
else:
|
|
|
|
raise sa_exc.ArgumentError(
|
|
|
|
"Unknown with_lockmode argument: %r" % mode)
|
|
|
|
|
|
|
|
return LockmodeArg(read=read, nowait=nowait)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
class _QueryEntity(object):
|
|
|
|
"""represent an entity column returned within a Query result."""
|
|
|
|
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
if cls is _QueryEntity:
|
|
|
|
entity = args[1]
|
2017-04-15 18:27:12 +02:00
|
|
|
if not isinstance(entity, util.string_types) and \
|
|
|
|
_is_mapped_class(entity):
|
2010-05-07 19:33:49 +02:00
|
|
|
cls = _MapperEntity
|
2017-04-15 18:27:12 +02:00
|
|
|
elif isinstance(entity, Bundle):
|
|
|
|
cls = _BundleEntity
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
|
|
|
cls = _ColumnEntity
|
|
|
|
return object.__new__(cls)
|
|
|
|
|
|
|
|
def _clone(self):
|
|
|
|
q = self.__class__.__new__(self.__class__)
|
|
|
|
q.__dict__ = self.__dict__.copy()
|
|
|
|
return q
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
class _MapperEntity(_QueryEntity):
|
|
|
|
"""mapper/class/AliasedClass entity"""
|
|
|
|
|
|
|
|
def __init__(self, query, entity):
|
2017-04-15 18:27:12 +02:00
|
|
|
if not query._primary_entity:
|
|
|
|
query._primary_entity = self
|
2010-05-07 19:33:49 +02:00
|
|
|
query._entities.append(self)
|
2017-04-15 18:27:12 +02:00
|
|
|
query._has_mapper_entities = True
|
2010-05-07 19:33:49 +02:00
|
|
|
self.entities = [entity]
|
2017-04-15 18:27:12 +02:00
|
|
|
self.expr = entity
|
|
|
|
|
|
|
|
supports_single_entity = True
|
|
|
|
|
|
|
|
use_id_for_hash = True
|
|
|
|
|
|
|
|
def setup_entity(self, ext_info, aliased_adapter):
|
|
|
|
self.mapper = ext_info.mapper
|
|
|
|
self.aliased_adapter = aliased_adapter
|
|
|
|
self.selectable = ext_info.selectable
|
|
|
|
self.is_aliased_class = ext_info.is_aliased_class
|
|
|
|
self._with_polymorphic = ext_info.with_polymorphic_mappers
|
|
|
|
self._polymorphic_discriminator = \
|
|
|
|
ext_info.polymorphic_on
|
|
|
|
self.entity_zero = ext_info
|
|
|
|
if ext_info.is_aliased_class:
|
|
|
|
self._label_name = self.entity_zero.name
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
self._label_name = self.mapper.class_.__name__
|
|
|
|
self.path = self.entity_zero._path_registry
|
|
|
|
|
|
|
|
def set_with_polymorphic(self, query, cls_or_mappers,
|
|
|
|
selectable, polymorphic_on):
|
|
|
|
"""Receive an update from a call to query.with_polymorphic().
|
|
|
|
|
|
|
|
Note the newer style of using a free standing with_polymporphic()
|
|
|
|
construct doesn't make use of this method.
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
if self.is_aliased_class:
|
|
|
|
# TODO: invalidrequest ?
|
|
|
|
raise NotImplementedError(
|
|
|
|
"Can't use with_polymorphic() against "
|
|
|
|
"an Aliased object"
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if cls_or_mappers is None:
|
|
|
|
query._reset_polymorphic_adapter(self.mapper)
|
|
|
|
return
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
mappers, from_obj = self.mapper._with_polymorphic_args(
|
|
|
|
cls_or_mappers, selectable)
|
2010-05-07 19:33:49 +02:00
|
|
|
self._with_polymorphic = mappers
|
2017-04-15 18:27:12 +02:00
|
|
|
self._polymorphic_discriminator = polymorphic_on
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
self.selectable = from_obj
|
|
|
|
query._mapper_loads_polymorphically_with(
|
|
|
|
self.mapper, sql_util.ColumnAdapter(
|
|
|
|
from_obj, self.mapper._equivalent_columns))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def type(self):
|
|
|
|
return self.mapper.class_
|
|
|
|
|
|
|
|
@property
|
|
|
|
def entity_zero_or_selectable(self):
|
|
|
|
return self.entity_zero
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def corresponds_to(self, entity):
|
2017-04-15 18:27:12 +02:00
|
|
|
if entity.is_aliased_class:
|
|
|
|
if self.is_aliased_class:
|
|
|
|
if entity._base_alias is self.entity_zero._base_alias:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
elif self.is_aliased_class:
|
|
|
|
if self.entity_zero._use_mapper_path:
|
|
|
|
return entity in self._with_polymorphic
|
|
|
|
else:
|
|
|
|
return entity is self.entity_zero
|
|
|
|
|
|
|
|
return entity.common_parent(self.entity_zero)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def adapt_to_selectable(self, query, sel):
|
|
|
|
query._entities.append(self)
|
|
|
|
|
|
|
|
def _get_entity_clauses(self, query, context):
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
adapter = None
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if not self.is_aliased_class:
|
|
|
|
if query._polymorphic_adapters:
|
|
|
|
adapter = query._polymorphic_adapters.get(self.mapper, None)
|
|
|
|
else:
|
|
|
|
adapter = self.aliased_adapter
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
if adapter:
|
|
|
|
if query._from_obj_alias:
|
|
|
|
ret = adapter.wrap(query._from_obj_alias)
|
|
|
|
else:
|
|
|
|
ret = adapter
|
|
|
|
else:
|
|
|
|
ret = query._from_obj_alias
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def row_processor(self, query, context, result):
|
2010-05-07 19:33:49 +02:00
|
|
|
adapter = self._get_entity_clauses(query, context)
|
|
|
|
|
|
|
|
if context.adapter and adapter:
|
|
|
|
adapter = adapter.wrap(context.adapter)
|
|
|
|
elif not adapter:
|
|
|
|
adapter = context.adapter
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
# polymorphic mappers which have concrete tables in
|
|
|
|
# their hierarchy usually
|
2010-05-07 19:33:49 +02:00
|
|
|
# require row aliasing unconditionally.
|
|
|
|
if not adapter and self.mapper._requires_row_aliasing:
|
2017-04-15 18:27:12 +02:00
|
|
|
adapter = sql_util.ColumnAdapter(
|
|
|
|
self.selectable,
|
|
|
|
self.mapper._equivalent_columns)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if query._primary_entity is self:
|
|
|
|
only_load_props = query._only_load_props
|
|
|
|
refresh_state = context.refresh_state
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
only_load_props = refresh_state = None
|
|
|
|
|
|
|
|
_instance = loading._instance_processor(
|
|
|
|
self.mapper,
|
|
|
|
context,
|
|
|
|
result,
|
|
|
|
self.path,
|
|
|
|
adapter,
|
|
|
|
only_load_props=only_load_props,
|
|
|
|
refresh_state=refresh_state,
|
|
|
|
polymorphic_discriminator=self._polymorphic_discriminator
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
return _instance, self._label_name
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def setup_context(self, query, context):
|
|
|
|
adapter = self._get_entity_clauses(query, context)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
# if self._adapted_selectable is None:
|
2010-05-07 19:33:49 +02:00
|
|
|
context.froms += (self.selectable,)
|
|
|
|
|
|
|
|
if context.order_by is False and self.mapper.order_by:
|
|
|
|
context.order_by = self.mapper.order_by
|
|
|
|
|
|
|
|
# apply adaptation to the mapper's order_by if needed.
|
|
|
|
if adapter:
|
2017-04-15 18:27:12 +02:00
|
|
|
context.order_by = adapter.adapt_list(
|
|
|
|
util.to_list(
|
|
|
|
context.order_by
|
|
|
|
)
|
|
|
|
)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
loading._setup_entity_query(
|
|
|
|
context, self.mapper, self,
|
|
|
|
self.path, adapter, context.primary_columns,
|
|
|
|
with_polymorphic=self._with_polymorphic,
|
|
|
|
only_load_props=query._only_load_props,
|
|
|
|
polymorphic_discriminator=self._polymorphic_discriminator)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.mapper)
|
|
|
|
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@inspection._self_inspects
|
|
|
|
class Bundle(InspectionAttr):
|
|
|
|
"""A grouping of SQL expressions that are returned by a :class:`.Query`
|
|
|
|
under one namespace.
|
|
|
|
|
|
|
|
The :class:`.Bundle` essentially allows nesting of the tuple-based
|
|
|
|
results returned by a column-oriented :class:`.Query` object. It also
|
|
|
|
is extensible via simple subclassing, where the primary capability
|
|
|
|
to override is that of how the set of expressions should be returned,
|
|
|
|
allowing post-processing as well as custom return types, without
|
|
|
|
involving ORM identity-mapped classes.
|
|
|
|
|
|
|
|
.. versionadded:: 0.9.0
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:ref:`bundles`
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
single_entity = False
|
|
|
|
"""If True, queries for a single Bundle will be returned as a single
|
|
|
|
entity, rather than an element within a keyed tuple."""
|
|
|
|
|
|
|
|
is_clause_element = False
|
|
|
|
|
|
|
|
is_mapper = False
|
|
|
|
|
|
|
|
is_aliased_class = False
|
|
|
|
|
|
|
|
def __init__(self, name, *exprs, **kw):
|
|
|
|
r"""Construct a new :class:`.Bundle`.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
bn = Bundle("mybundle", MyClass.x, MyClass.y)
|
|
|
|
|
|
|
|
for row in session.query(bn).filter(
|
|
|
|
bn.c.x == 5).filter(bn.c.y == 4):
|
|
|
|
print(row.mybundle.x, row.mybundle.y)
|
|
|
|
|
|
|
|
:param name: name of the bundle.
|
|
|
|
:param \*exprs: columns or SQL expressions comprising the bundle.
|
|
|
|
:param single_entity=False: if True, rows for this :class:`.Bundle`
|
|
|
|
can be returned as a "single entity" outside of any enclosing tuple
|
|
|
|
in the same manner as a mapped entity.
|
|
|
|
|
|
|
|
"""
|
|
|
|
self.name = self._label = name
|
|
|
|
self.exprs = exprs
|
|
|
|
self.c = self.columns = ColumnCollection()
|
|
|
|
self.columns.update((getattr(col, "key", col._label), col)
|
|
|
|
for col in exprs)
|
|
|
|
self.single_entity = kw.pop('single_entity', self.single_entity)
|
|
|
|
|
|
|
|
columns = None
|
|
|
|
"""A namespace of SQL expressions referred to by this :class:`.Bundle`.
|
|
|
|
|
|
|
|
e.g.::
|
|
|
|
|
|
|
|
bn = Bundle("mybundle", MyClass.x, MyClass.y)
|
|
|
|
|
|
|
|
q = sess.query(bn).filter(bn.c.x == 5)
|
|
|
|
|
|
|
|
Nesting of bundles is also supported::
|
|
|
|
|
|
|
|
b1 = Bundle("b1",
|
|
|
|
Bundle('b2', MyClass.a, MyClass.b),
|
|
|
|
Bundle('b3', MyClass.x, MyClass.y)
|
|
|
|
)
|
|
|
|
|
|
|
|
q = sess.query(b1).filter(
|
|
|
|
b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:attr:`.Bundle.c`
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
c = None
|
|
|
|
"""An alias for :attr:`.Bundle.columns`."""
|
|
|
|
|
|
|
|
def _clone(self):
|
|
|
|
cloned = self.__class__.__new__(self.__class__)
|
|
|
|
cloned.__dict__.update(self.__dict__)
|
|
|
|
return cloned
|
|
|
|
|
|
|
|
def __clause_element__(self):
|
|
|
|
return expression.ClauseList(group=False, *self.c)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def clauses(self):
|
|
|
|
return self.__clause_element__().clauses
|
|
|
|
|
|
|
|
def label(self, name):
|
|
|
|
"""Provide a copy of this :class:`.Bundle` passing a new label."""
|
|
|
|
|
|
|
|
cloned = self._clone()
|
|
|
|
cloned.name = name
|
|
|
|
return cloned
|
|
|
|
|
|
|
|
def create_row_processor(self, query, procs, labels):
|
|
|
|
"""Produce the "row processing" function for this :class:`.Bundle`.
|
|
|
|
|
|
|
|
May be overridden by subclasses.
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:ref:`bundles` - includes an example of subclassing.
|
|
|
|
|
|
|
|
"""
|
|
|
|
keyed_tuple = util.lightweight_named_tuple('result', labels)
|
|
|
|
|
|
|
|
def proc(row):
|
|
|
|
return keyed_tuple([proc(row) for proc in procs])
|
|
|
|
return proc
|
|
|
|
|
|
|
|
|
|
|
|
class _BundleEntity(_QueryEntity):
|
|
|
|
use_id_for_hash = False
|
|
|
|
|
|
|
|
def __init__(self, query, bundle, setup_entities=True):
|
|
|
|
query._entities.append(self)
|
|
|
|
self.bundle = self.expr = bundle
|
|
|
|
self.type = type(bundle)
|
|
|
|
self._label_name = bundle.name
|
|
|
|
self._entities = []
|
|
|
|
|
|
|
|
if setup_entities:
|
|
|
|
for expr in bundle.exprs:
|
|
|
|
if isinstance(expr, Bundle):
|
|
|
|
_BundleEntity(self, expr)
|
|
|
|
else:
|
|
|
|
_ColumnEntity(self, expr, namespace=self)
|
|
|
|
|
|
|
|
self.supports_single_entity = self.bundle.single_entity
|
|
|
|
|
|
|
|
@property
|
|
|
|
def entities(self):
|
|
|
|
entities = []
|
|
|
|
for ent in self._entities:
|
|
|
|
entities.extend(ent.entities)
|
|
|
|
return entities
|
|
|
|
|
|
|
|
@property
|
|
|
|
def entity_zero(self):
|
|
|
|
for ent in self._entities:
|
|
|
|
ezero = ent.entity_zero
|
|
|
|
if ezero is not None:
|
|
|
|
return ezero
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
return None
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def corresponds_to(self, entity):
|
|
|
|
# TODO: this seems to have no effect for
|
|
|
|
# _ColumnEntity either
|
|
|
|
return False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def entity_zero_or_selectable(self):
|
|
|
|
for ent in self._entities:
|
|
|
|
ezero = ent.entity_zero_or_selectable
|
|
|
|
if ezero is not None:
|
|
|
|
return ezero
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def adapt_to_selectable(self, query, sel):
|
|
|
|
c = _BundleEntity(query, self.bundle, setup_entities=False)
|
|
|
|
# c._label_name = self._label_name
|
|
|
|
# c.entity_zero = self.entity_zero
|
|
|
|
# c.entities = self.entities
|
|
|
|
|
|
|
|
for ent in self._entities:
|
|
|
|
ent.adapt_to_selectable(c, sel)
|
|
|
|
|
|
|
|
def setup_entity(self, ext_info, aliased_adapter):
|
|
|
|
for ent in self._entities:
|
|
|
|
ent.setup_entity(ext_info, aliased_adapter)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def setup_context(self, query, context):
|
|
|
|
for ent in self._entities:
|
|
|
|
ent.setup_context(query, context)
|
|
|
|
|
|
|
|
def row_processor(self, query, context, result):
|
|
|
|
procs, labels = zip(
|
|
|
|
*[ent.row_processor(query, context, result)
|
|
|
|
for ent in self._entities]
|
|
|
|
)
|
|
|
|
|
|
|
|
proc = self.bundle.create_row_processor(query, procs, labels)
|
|
|
|
|
|
|
|
return proc, self._label_name
|
|
|
|
|
|
|
|
|
|
|
|
class _ColumnEntity(_QueryEntity):
|
|
|
|
"""Column/expression based entity."""
|
|
|
|
|
|
|
|
def __init__(self, query, column, namespace=None):
|
|
|
|
self.expr = column
|
|
|
|
self.namespace = namespace
|
|
|
|
search_entities = True
|
|
|
|
check_column = False
|
|
|
|
|
|
|
|
if isinstance(column, util.string_types):
|
|
|
|
column = sql.literal_column(column)
|
|
|
|
self._label_name = column.name
|
|
|
|
search_entities = False
|
|
|
|
check_column = True
|
|
|
|
_entity = None
|
|
|
|
elif isinstance(column, (
|
|
|
|
attributes.QueryableAttribute,
|
|
|
|
interfaces.PropComparator
|
|
|
|
)):
|
|
|
|
_entity = getattr(column, '_parententity', None)
|
|
|
|
if _entity is not None:
|
|
|
|
search_entities = False
|
|
|
|
self._label_name = column.key
|
|
|
|
column = column._query_clause_element()
|
|
|
|
check_column = True
|
|
|
|
if isinstance(column, Bundle):
|
|
|
|
_BundleEntity(query, column)
|
2010-05-07 19:33:49 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
if not isinstance(column, sql.ColumnElement):
|
2017-04-15 18:27:12 +02:00
|
|
|
if hasattr(column, '_select_iterable'):
|
|
|
|
# break out an object like Table into
|
|
|
|
# individual columns
|
|
|
|
for c in column._select_iterable:
|
|
|
|
if c is column:
|
|
|
|
break
|
|
|
|
_ColumnEntity(query, c, namespace=column)
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
raise sa_exc.InvalidRequestError(
|
2017-04-15 18:27:12 +02:00
|
|
|
"SQL expression, column, or mapped entity "
|
|
|
|
"expected - got '%r'" % (column, )
|
2010-05-07 19:33:49 +02:00
|
|
|
)
|
2017-04-15 18:27:12 +02:00
|
|
|
elif not check_column:
|
|
|
|
self._label_name = getattr(column, 'key', None)
|
|
|
|
search_entities = True
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
self.type = type_ = column.type
|
|
|
|
self.use_id_for_hash = not type_.hashable
|
|
|
|
|
|
|
|
# If the Column is unnamed, give it a
|
2010-05-07 19:33:49 +02:00
|
|
|
# label() so that mutable column expressions
|
|
|
|
# can be located in the result even
|
|
|
|
# if the expression's identity has been changed
|
2017-04-15 18:27:12 +02:00
|
|
|
# due to adaption.
|
|
|
|
|
|
|
|
if not column._label and not getattr(column, 'is_literal', False):
|
|
|
|
column = column.label(self._label_name)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
query._entities.append(self)
|
|
|
|
|
|
|
|
self.column = column
|
|
|
|
self.froms = set()
|
|
|
|
|
|
|
|
# look for ORM entities represented within the
|
|
|
|
# given expression. Try to count only entities
|
2017-04-15 18:27:12 +02:00
|
|
|
# for columns whose FROM object is in the actual list
|
2010-05-07 19:33:49 +02:00
|
|
|
# of FROMs for the overall expression - this helps
|
|
|
|
# subqueries which were built from ORM constructs from
|
|
|
|
# leaking out their entities into the main select construct
|
2017-04-15 18:27:12 +02:00
|
|
|
self.actual_froms = actual_froms = set(column._from_objects)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
if not search_entities:
|
|
|
|
self.entity_zero = _entity
|
|
|
|
if _entity:
|
|
|
|
self.entities = [_entity]
|
|
|
|
self.mapper = _entity.mapper
|
|
|
|
else:
|
|
|
|
self.entities = []
|
|
|
|
self.mapper = None
|
|
|
|
self._from_entities = set(self.entities)
|
|
|
|
else:
|
|
|
|
all_elements = [
|
|
|
|
elem for elem in sql_util.surface_column_elements(column)
|
|
|
|
if 'parententity' in elem._annotations
|
|
|
|
]
|
|
|
|
|
|
|
|
self.entities = util.unique_list([
|
|
|
|
elem._annotations['parententity']
|
|
|
|
for elem in all_elements
|
|
|
|
if 'parententity' in elem._annotations
|
|
|
|
])
|
|
|
|
|
|
|
|
self._from_entities = set([
|
|
|
|
elem._annotations['parententity']
|
|
|
|
for elem in all_elements
|
|
|
|
if 'parententity' in elem._annotations
|
|
|
|
and actual_froms.intersection(elem._from_objects)
|
|
|
|
])
|
|
|
|
if self.entities:
|
|
|
|
self.entity_zero = self.entities[0]
|
|
|
|
self.mapper = self.entity_zero.mapper
|
|
|
|
elif self.namespace is not None:
|
|
|
|
self.entity_zero = self.namespace
|
|
|
|
self.mapper = None
|
|
|
|
else:
|
|
|
|
self.entity_zero = None
|
|
|
|
self.mapper = None
|
|
|
|
|
|
|
|
supports_single_entity = False
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
@property
|
|
|
|
def entity_zero_or_selectable(self):
|
|
|
|
if self.entity_zero is not None:
|
|
|
|
return self.entity_zero
|
|
|
|
elif self.actual_froms:
|
|
|
|
return list(self.actual_froms)[0]
|
2010-05-07 19:33:49 +02:00
|
|
|
else:
|
2017-04-15 18:27:12 +02:00
|
|
|
return None
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def adapt_to_selectable(self, query, sel):
|
2017-04-15 18:27:12 +02:00
|
|
|
c = _ColumnEntity(query, sel.corresponding_column(self.column))
|
|
|
|
c._label_name = self._label_name
|
|
|
|
c.entity_zero = self.entity_zero
|
|
|
|
c.entities = self.entities
|
|
|
|
|
|
|
|
def setup_entity(self, ext_info, aliased_adapter):
|
|
|
|
if 'selectable' not in self.__dict__:
|
|
|
|
self.selectable = ext_info.selectable
|
|
|
|
|
|
|
|
if self.actual_froms.intersection(ext_info.selectable._from_objects):
|
|
|
|
self.froms.add(ext_info.selectable)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def corresponds_to(self, entity):
|
2017-04-15 18:27:12 +02:00
|
|
|
# TODO: just returning False here,
|
|
|
|
# no tests fail
|
2010-05-07 19:33:49 +02:00
|
|
|
if self.entity_zero is None:
|
|
|
|
return False
|
|
|
|
elif _is_aliased_class(entity):
|
2017-04-15 18:27:12 +02:00
|
|
|
# TODO: polymorphic subclasses ?
|
2010-05-07 19:33:49 +02:00
|
|
|
return entity is self.entity_zero
|
|
|
|
else:
|
|
|
|
return not _is_aliased_class(self.entity_zero) and \
|
2017-04-15 18:27:12 +02:00
|
|
|
entity.common_parent(self.entity_zero)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
def row_processor(self, query, context, result):
|
|
|
|
if ('fetch_column', self) in context.attributes:
|
|
|
|
column = context.attributes[('fetch_column', self)]
|
|
|
|
else:
|
|
|
|
column = query._adapt_clause(self.column, False, True)
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
if context.adapter:
|
|
|
|
column = context.adapter.columns[column]
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
getter = result._getter(column)
|
|
|
|
return getter, self._label_name
|
2010-05-07 19:33:49 +02:00
|
|
|
|
|
|
|
def setup_context(self, query, context):
|
2017-04-15 18:27:12 +02:00
|
|
|
column = query._adapt_clause(self.column, False, True)
|
2010-05-07 19:33:49 +02:00
|
|
|
context.froms += tuple(self.froms)
|
|
|
|
context.primary_columns.append(column)
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
context.attributes[('fetch_column', self)] = column
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.column)
|
|
|
|
|
|
|
|
|
|
|
|
class QueryContext(object):
|
2017-04-15 18:27:12 +02:00
|
|
|
__slots__ = (
|
|
|
|
'multi_row_eager_loaders', 'adapter', 'froms', 'for_update',
|
|
|
|
'query', 'session', 'autoflush', 'populate_existing',
|
|
|
|
'invoke_all_eagers', 'version_check', 'refresh_state',
|
|
|
|
'primary_columns', 'secondary_columns', 'eager_order_by',
|
|
|
|
'eager_joins', 'create_eager_joins', 'propagate_options',
|
|
|
|
'attributes', 'statement', 'from_clause', 'whereclause',
|
|
|
|
'order_by', 'labels', '_for_update_arg', 'runid', 'partials'
|
|
|
|
)
|
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
def __init__(self, query):
|
|
|
|
|
|
|
|
if query._statement is not None:
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(query._statement, expression.SelectBase) and \
|
|
|
|
not query._statement._textual and \
|
|
|
|
not query._statement.use_labels:
|
2010-05-07 19:33:49 +02:00
|
|
|
self.statement = query._statement.apply_labels()
|
|
|
|
else:
|
|
|
|
self.statement = query._statement
|
|
|
|
else:
|
|
|
|
self.statement = None
|
|
|
|
self.from_clause = query._from_obj
|
|
|
|
self.whereclause = query._criterion
|
|
|
|
self.order_by = query._order_by
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
self.multi_row_eager_loaders = False
|
|
|
|
self.adapter = None
|
|
|
|
self.froms = ()
|
|
|
|
self.for_update = None
|
2010-05-07 19:33:49 +02:00
|
|
|
self.query = query
|
|
|
|
self.session = query.session
|
2017-04-15 18:27:12 +02:00
|
|
|
self.autoflush = query._autoflush
|
2010-05-07 19:33:49 +02:00
|
|
|
self.populate_existing = query._populate_existing
|
2017-04-15 18:27:12 +02:00
|
|
|
self.invoke_all_eagers = query._invoke_all_eagers
|
2010-05-07 19:33:49 +02:00
|
|
|
self.version_check = query._version_check
|
|
|
|
self.refresh_state = query._refresh_state
|
|
|
|
self.primary_columns = []
|
|
|
|
self.secondary_columns = []
|
|
|
|
self.eager_order_by = []
|
|
|
|
self.eager_joins = {}
|
|
|
|
self.create_eager_joins = []
|
2017-04-15 18:27:12 +02:00
|
|
|
self.propagate_options = set(o for o in query._with_options if
|
|
|
|
o.propagate_to_loaders)
|
2010-05-07 19:33:49 +02:00
|
|
|
self.attributes = query._attributes.copy()
|
|
|
|
|
2017-04-15 18:27:12 +02:00
|
|
|
|
2010-05-07 19:33:49 +02:00
|
|
|
class AliasOption(interfaces.MapperOption):
|
|
|
|
|
|
|
|
def __init__(self, alias):
|
2017-04-15 18:27:12 +02:00
|
|
|
r"""Return a :class:`.MapperOption` that will indicate to the :class:`.Query`
|
|
|
|
that the main table has been aliased.
|
|
|
|
|
|
|
|
This is a seldom-used option to suit the
|
|
|
|
very rare case that :func:`.contains_eager`
|
|
|
|
is being used in conjunction with a user-defined SELECT
|
|
|
|
statement that aliases the parent table. E.g.::
|
|
|
|
|
|
|
|
# define an aliased UNION called 'ulist'
|
|
|
|
ulist = users.select(users.c.user_id==7).\
|
|
|
|
union(users.select(users.c.user_id>7)).\
|
|
|
|
alias('ulist')
|
|
|
|
|
|
|
|
# add on an eager load of "addresses"
|
|
|
|
statement = ulist.outerjoin(addresses).\
|
|
|
|
select().apply_labels()
|
|
|
|
|
|
|
|
# create query, indicating "ulist" will be an
|
|
|
|
# alias for the main table, "addresses"
|
|
|
|
# property should be eager loaded
|
|
|
|
query = session.query(User).options(
|
|
|
|
contains_alias(ulist),
|
|
|
|
contains_eager(User.addresses))
|
|
|
|
|
|
|
|
# then get results via the statement
|
|
|
|
results = query.from_statement(statement).all()
|
|
|
|
|
|
|
|
:param alias: is the string name of an alias, or a
|
|
|
|
:class:`~.sql.expression.Alias` object representing
|
|
|
|
the alias.
|
|
|
|
|
|
|
|
"""
|
2010-05-07 19:33:49 +02:00
|
|
|
self.alias = alias
|
|
|
|
|
|
|
|
def process_query(self, query):
|
2017-04-15 18:27:12 +02:00
|
|
|
if isinstance(self.alias, util.string_types):
|
2010-05-07 19:33:49 +02:00
|
|
|
alias = query._mapper_zero().mapped_table.alias(self.alias)
|
|
|
|
else:
|
|
|
|
alias = self.alias
|
|
|
|
query._from_obj_alias = sql_util.ColumnAdapter(alias)
|