Updated SqlAlchemy + the new files
This commit is contained in:
203
sqlalchemy/sql/annotation.py
Normal file
203
sqlalchemy/sql/annotation.py
Normal file
@@ -0,0 +1,203 @@
|
||||
# sql/annotation.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""The :class:`.Annotated` class and related routines; creates hash-equivalent
|
||||
copies of SQL constructs which contain context-specific markers and
|
||||
associations.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
from . import operators
|
||||
|
||||
|
||||
class Annotated(object):
|
||||
"""clones a ClauseElement and applies an 'annotations' dictionary.
|
||||
|
||||
Unlike regular clones, this clone also mimics __hash__() and
|
||||
__cmp__() of the original element so that it takes its place
|
||||
in hashed collections.
|
||||
|
||||
A reference to the original element is maintained, for the important
|
||||
reason of keeping its hash value current. When GC'ed, the
|
||||
hash value may be reused, causing conflicts.
|
||||
|
||||
.. note:: The rationale for Annotated producing a brand new class,
|
||||
rather than placing the functionality directly within ClauseElement,
|
||||
is **performance**. The __hash__() method is absent on plain
|
||||
ClauseElement which leads to significantly reduced function call
|
||||
overhead, as the use of sets and dictionaries against ClauseElement
|
||||
objects is prevalent, but most are not "annotated".
|
||||
|
||||
"""
|
||||
|
||||
def __new__(cls, *args):
|
||||
if not args:
|
||||
# clone constructor
|
||||
return object.__new__(cls)
|
||||
else:
|
||||
element, values = args
|
||||
# pull appropriate subclass from registry of annotated
|
||||
# classes
|
||||
try:
|
||||
cls = annotated_classes[element.__class__]
|
||||
except KeyError:
|
||||
cls = _new_annotation_type(element.__class__, cls)
|
||||
return object.__new__(cls)
|
||||
|
||||
def __init__(self, element, values):
|
||||
self.__dict__ = element.__dict__.copy()
|
||||
self.__element = element
|
||||
self._annotations = values
|
||||
self._hash = hash(element)
|
||||
|
||||
def _annotate(self, values):
|
||||
_values = self._annotations.copy()
|
||||
_values.update(values)
|
||||
return self._with_annotations(_values)
|
||||
|
||||
def _with_annotations(self, values):
|
||||
clone = self.__class__.__new__(self.__class__)
|
||||
clone.__dict__ = self.__dict__.copy()
|
||||
clone._annotations = values
|
||||
return clone
|
||||
|
||||
def _deannotate(self, values=None, clone=True):
|
||||
if values is None:
|
||||
return self.__element
|
||||
else:
|
||||
_values = self._annotations.copy()
|
||||
for v in values:
|
||||
_values.pop(v, None)
|
||||
return self._with_annotations(_values)
|
||||
|
||||
def _compiler_dispatch(self, visitor, **kw):
|
||||
return self.__element.__class__._compiler_dispatch(
|
||||
self, visitor, **kw)
|
||||
|
||||
@property
|
||||
def _constructor(self):
|
||||
return self.__element._constructor
|
||||
|
||||
def _clone(self):
|
||||
clone = self.__element._clone()
|
||||
if clone is self.__element:
|
||||
# detect immutable, don't change anything
|
||||
return self
|
||||
else:
|
||||
# update the clone with any changes that have occurred
|
||||
# to this object's __dict__.
|
||||
clone.__dict__.update(self.__dict__)
|
||||
return self.__class__(clone, self._annotations)
|
||||
|
||||
def __hash__(self):
|
||||
return self._hash
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(self.__element, operators.ColumnOperators):
|
||||
return self.__element.__class__.__eq__(self, other)
|
||||
else:
|
||||
return hash(other) == hash(self)
|
||||
|
||||
|
||||
# hard-generate Annotated subclasses. this technique
|
||||
# is used instead of on-the-fly types (i.e. type.__new__())
|
||||
# so that the resulting objects are pickleable.
|
||||
annotated_classes = {}
|
||||
|
||||
|
||||
def _deep_annotate(element, annotations, exclude=None):
|
||||
"""Deep copy the given ClauseElement, annotating each element
|
||||
with the given annotations dictionary.
|
||||
|
||||
Elements within the exclude collection will be cloned but not annotated.
|
||||
|
||||
"""
|
||||
def clone(elem):
|
||||
if exclude and \
|
||||
hasattr(elem, 'proxy_set') and \
|
||||
elem.proxy_set.intersection(exclude):
|
||||
newelem = elem._clone()
|
||||
elif annotations != elem._annotations:
|
||||
newelem = elem._annotate(annotations)
|
||||
else:
|
||||
newelem = elem
|
||||
newelem._copy_internals(clone=clone)
|
||||
return newelem
|
||||
|
||||
if element is not None:
|
||||
element = clone(element)
|
||||
return element
|
||||
|
||||
|
||||
def _deep_deannotate(element, values=None):
|
||||
"""Deep copy the given element, removing annotations."""
|
||||
|
||||
cloned = util.column_dict()
|
||||
|
||||
def clone(elem):
|
||||
# if a values dict is given,
|
||||
# the elem must be cloned each time it appears,
|
||||
# as there may be different annotations in source
|
||||
# elements that are remaining. if totally
|
||||
# removing all annotations, can assume the same
|
||||
# slate...
|
||||
if values or elem not in cloned:
|
||||
newelem = elem._deannotate(values=values, clone=True)
|
||||
newelem._copy_internals(clone=clone)
|
||||
if not values:
|
||||
cloned[elem] = newelem
|
||||
return newelem
|
||||
else:
|
||||
return cloned[elem]
|
||||
|
||||
if element is not None:
|
||||
element = clone(element)
|
||||
return element
|
||||
|
||||
|
||||
def _shallow_annotate(element, annotations):
|
||||
"""Annotate the given ClauseElement and copy its internals so that
|
||||
internal objects refer to the new annotated object.
|
||||
|
||||
Basically used to apply a "dont traverse" annotation to a
|
||||
selectable, without digging throughout the whole
|
||||
structure wasting time.
|
||||
"""
|
||||
element = element._annotate(annotations)
|
||||
element._copy_internals()
|
||||
return element
|
||||
|
||||
|
||||
def _new_annotation_type(cls, base_cls):
|
||||
if issubclass(cls, Annotated):
|
||||
return cls
|
||||
elif cls in annotated_classes:
|
||||
return annotated_classes[cls]
|
||||
|
||||
for super_ in cls.__mro__:
|
||||
# check if an Annotated subclass more specific than
|
||||
# the given base_cls is already registered, such
|
||||
# as AnnotatedColumnElement.
|
||||
if super_ in annotated_classes:
|
||||
base_cls = annotated_classes[super_]
|
||||
break
|
||||
|
||||
annotated_classes[cls] = anno_cls = type(
|
||||
"Annotated%s" % cls.__name__,
|
||||
(base_cls, cls), {})
|
||||
globals()["Annotated%s" % cls.__name__] = anno_cls
|
||||
return anno_cls
|
||||
|
||||
|
||||
def _prepare_annotations(target_hierarchy, base_cls):
|
||||
stack = [target_hierarchy]
|
||||
while stack:
|
||||
cls = stack.pop()
|
||||
stack.extend(cls.__subclasses__())
|
||||
|
||||
_new_annotation_type(cls, base_cls)
|
||||
633
sqlalchemy/sql/base.py
Normal file
633
sqlalchemy/sql/base.py
Normal file
@@ -0,0 +1,633 @@
|
||||
# sql/base.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Foundational utilities common to many sql modules.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from .. import util, exc
|
||||
import itertools
|
||||
from .visitors import ClauseVisitor
|
||||
import re
|
||||
import collections
|
||||
|
||||
PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
|
||||
NO_ARG = util.symbol('NO_ARG')
|
||||
|
||||
|
||||
class Immutable(object):
|
||||
"""mark a ClauseElement as 'immutable' when expressions are cloned."""
|
||||
|
||||
def unique_params(self, *optionaldict, **kwargs):
|
||||
raise NotImplementedError("Immutable objects do not support copying")
|
||||
|
||||
def params(self, *optionaldict, **kwargs):
|
||||
raise NotImplementedError("Immutable objects do not support copying")
|
||||
|
||||
def _clone(self):
|
||||
return self
|
||||
|
||||
|
||||
def _from_objects(*elements):
|
||||
return itertools.chain(*[element._from_objects for element in elements])
|
||||
|
||||
|
||||
@util.decorator
|
||||
def _generative(fn, *args, **kw):
|
||||
"""Mark a method as generative."""
|
||||
|
||||
self = args[0]._generate()
|
||||
fn(self, *args[1:], **kw)
|
||||
return self
|
||||
|
||||
|
||||
class _DialectArgView(collections.MutableMapping):
|
||||
"""A dictionary view of dialect-level arguments in the form
|
||||
<dialectname>_<argument_name>.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
|
||||
def _key(self, key):
|
||||
try:
|
||||
dialect, value_key = key.split("_", 1)
|
||||
except ValueError:
|
||||
raise KeyError(key)
|
||||
else:
|
||||
return dialect, value_key
|
||||
|
||||
def __getitem__(self, key):
|
||||
dialect, value_key = self._key(key)
|
||||
|
||||
try:
|
||||
opt = self.obj.dialect_options[dialect]
|
||||
except exc.NoSuchModuleError:
|
||||
raise KeyError(key)
|
||||
else:
|
||||
return opt[value_key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
try:
|
||||
dialect, value_key = self._key(key)
|
||||
except KeyError:
|
||||
raise exc.ArgumentError(
|
||||
"Keys must be of the form <dialectname>_<argname>")
|
||||
else:
|
||||
self.obj.dialect_options[dialect][value_key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
dialect, value_key = self._key(key)
|
||||
del self.obj.dialect_options[dialect][value_key]
|
||||
|
||||
def __len__(self):
|
||||
return sum(len(args._non_defaults) for args in
|
||||
self.obj.dialect_options.values())
|
||||
|
||||
def __iter__(self):
|
||||
return (
|
||||
util.safe_kwarg("%s_%s" % (dialect_name, value_name))
|
||||
for dialect_name in self.obj.dialect_options
|
||||
for value_name in
|
||||
self.obj.dialect_options[dialect_name]._non_defaults
|
||||
)
|
||||
|
||||
|
||||
class _DialectArgDict(collections.MutableMapping):
|
||||
"""A dictionary view of dialect-level arguments for a specific
|
||||
dialect.
|
||||
|
||||
Maintains a separate collection of user-specified arguments
|
||||
and dialect-specified default arguments.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._non_defaults = {}
|
||||
self._defaults = {}
|
||||
|
||||
def __len__(self):
|
||||
return len(set(self._non_defaults).union(self._defaults))
|
||||
|
||||
def __iter__(self):
|
||||
return iter(set(self._non_defaults).union(self._defaults))
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self._non_defaults:
|
||||
return self._non_defaults[key]
|
||||
else:
|
||||
return self._defaults[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self._non_defaults[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self._non_defaults[key]
|
||||
|
||||
|
||||
class DialectKWArgs(object):
|
||||
"""Establish the ability for a class to have dialect-specific arguments
|
||||
with defaults and constructor validation.
|
||||
|
||||
The :class:`.DialectKWArgs` interacts with the
|
||||
:attr:`.DefaultDialect.construct_arguments` present on a dialect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.DefaultDialect.construct_arguments`
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def argument_for(cls, dialect_name, argument_name, default):
|
||||
"""Add a new kind of dialect-specific keyword argument for this class.
|
||||
|
||||
E.g.::
|
||||
|
||||
Index.argument_for("mydialect", "length", None)
|
||||
|
||||
some_index = Index('a', 'b', mydialect_length=5)
|
||||
|
||||
The :meth:`.DialectKWArgs.argument_for` method is a per-argument
|
||||
way adding extra arguments to the
|
||||
:attr:`.DefaultDialect.construct_arguments` dictionary. This
|
||||
dictionary provides a list of argument names accepted by various
|
||||
schema-level constructs on behalf of a dialect.
|
||||
|
||||
New dialects should typically specify this dictionary all at once as a
|
||||
data member of the dialect class. The use case for ad-hoc addition of
|
||||
argument names is typically for end-user code that is also using
|
||||
a custom compilation scheme which consumes the additional arguments.
|
||||
|
||||
:param dialect_name: name of a dialect. The dialect must be
|
||||
locatable, else a :class:`.NoSuchModuleError` is raised. The
|
||||
dialect must also include an existing
|
||||
:attr:`.DefaultDialect.construct_arguments` collection, indicating
|
||||
that it participates in the keyword-argument validation and default
|
||||
system, else :class:`.ArgumentError` is raised. If the dialect does
|
||||
not include this collection, then any keyword argument can be
|
||||
specified on behalf of this dialect already. All dialects packaged
|
||||
within SQLAlchemy include this collection, however for third party
|
||||
dialects, support may vary.
|
||||
|
||||
:param argument_name: name of the parameter.
|
||||
|
||||
:param default: default value of the parameter.
|
||||
|
||||
.. versionadded:: 0.9.4
|
||||
|
||||
"""
|
||||
|
||||
construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
|
||||
if construct_arg_dictionary is None:
|
||||
raise exc.ArgumentError(
|
||||
"Dialect '%s' does have keyword-argument "
|
||||
"validation and defaults enabled configured" %
|
||||
dialect_name)
|
||||
if cls not in construct_arg_dictionary:
|
||||
construct_arg_dictionary[cls] = {}
|
||||
construct_arg_dictionary[cls][argument_name] = default
|
||||
|
||||
@util.memoized_property
|
||||
def dialect_kwargs(self):
|
||||
"""A collection of keyword arguments specified as dialect-specific
|
||||
options to this construct.
|
||||
|
||||
The arguments are present here in their original ``<dialect>_<kwarg>``
|
||||
format. Only arguments that were actually passed are included;
|
||||
unlike the :attr:`.DialectKWArgs.dialect_options` collection, which
|
||||
contains all options known by this dialect including defaults.
|
||||
|
||||
The collection is also writable; keys are accepted of the
|
||||
form ``<dialect>_<kwarg>`` where the value will be assembled
|
||||
into the list of options.
|
||||
|
||||
.. versionadded:: 0.9.2
|
||||
|
||||
.. versionchanged:: 0.9.4 The :attr:`.DialectKWArgs.dialect_kwargs`
|
||||
collection is now writable.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.DialectKWArgs.dialect_options` - nested dictionary form
|
||||
|
||||
"""
|
||||
return _DialectArgView(self)
|
||||
|
||||
@property
|
||||
def kwargs(self):
|
||||
"""A synonym for :attr:`.DialectKWArgs.dialect_kwargs`."""
|
||||
return self.dialect_kwargs
|
||||
|
||||
@util.dependencies("sqlalchemy.dialects")
|
||||
def _kw_reg_for_dialect(dialects, dialect_name):
|
||||
dialect_cls = dialects.registry.load(dialect_name)
|
||||
if dialect_cls.construct_arguments is None:
|
||||
return None
|
||||
return dict(dialect_cls.construct_arguments)
|
||||
_kw_registry = util.PopulateDict(_kw_reg_for_dialect)
|
||||
|
||||
def _kw_reg_for_dialect_cls(self, dialect_name):
|
||||
construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
|
||||
d = _DialectArgDict()
|
||||
|
||||
if construct_arg_dictionary is None:
|
||||
d._defaults.update({"*": None})
|
||||
else:
|
||||
for cls in reversed(self.__class__.__mro__):
|
||||
if cls in construct_arg_dictionary:
|
||||
d._defaults.update(construct_arg_dictionary[cls])
|
||||
return d
|
||||
|
||||
@util.memoized_property
|
||||
def dialect_options(self):
|
||||
"""A collection of keyword arguments specified as dialect-specific
|
||||
options to this construct.
|
||||
|
||||
This is a two-level nested registry, keyed to ``<dialect_name>``
|
||||
and ``<argument_name>``. For example, the ``postgresql_where``
|
||||
argument would be locatable as::
|
||||
|
||||
arg = my_object.dialect_options['postgresql']['where']
|
||||
|
||||
.. versionadded:: 0.9.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form
|
||||
|
||||
"""
|
||||
|
||||
return util.PopulateDict(
|
||||
util.portable_instancemethod(self._kw_reg_for_dialect_cls)
|
||||
)
|
||||
|
||||
def _validate_dialect_kwargs(self, kwargs):
|
||||
# validate remaining kwargs that they all specify DB prefixes
|
||||
|
||||
if not kwargs:
|
||||
return
|
||||
|
||||
for k in kwargs:
|
||||
m = re.match('^(.+?)_(.+)$', k)
|
||||
if not m:
|
||||
raise TypeError(
|
||||
"Additional arguments should be "
|
||||
"named <dialectname>_<argument>, got '%s'" % k)
|
||||
dialect_name, arg_name = m.group(1, 2)
|
||||
|
||||
try:
|
||||
construct_arg_dictionary = self.dialect_options[dialect_name]
|
||||
except exc.NoSuchModuleError:
|
||||
util.warn(
|
||||
"Can't validate argument %r; can't "
|
||||
"locate any SQLAlchemy dialect named %r" %
|
||||
(k, dialect_name))
|
||||
self.dialect_options[dialect_name] = d = _DialectArgDict()
|
||||
d._defaults.update({"*": None})
|
||||
d._non_defaults[arg_name] = kwargs[k]
|
||||
else:
|
||||
if "*" not in construct_arg_dictionary and \
|
||||
arg_name not in construct_arg_dictionary:
|
||||
raise exc.ArgumentError(
|
||||
"Argument %r is not accepted by "
|
||||
"dialect %r on behalf of %r" % (
|
||||
k,
|
||||
dialect_name, self.__class__
|
||||
))
|
||||
else:
|
||||
construct_arg_dictionary[arg_name] = kwargs[k]
|
||||
|
||||
|
||||
class Generative(object):
|
||||
"""Allow a ClauseElement to generate itself via the
|
||||
@_generative decorator.
|
||||
|
||||
"""
|
||||
|
||||
def _generate(self):
|
||||
s = self.__class__.__new__(self.__class__)
|
||||
s.__dict__ = self.__dict__.copy()
|
||||
return s
|
||||
|
||||
|
||||
class Executable(Generative):
|
||||
"""Mark a ClauseElement as supporting execution.
|
||||
|
||||
:class:`.Executable` is a superclass for all "statement" types
|
||||
of objects, including :func:`select`, :func:`delete`, :func:`update`,
|
||||
:func:`insert`, :func:`text`.
|
||||
|
||||
"""
|
||||
|
||||
supports_execution = True
|
||||
_execution_options = util.immutabledict()
|
||||
_bind = None
|
||||
|
||||
@_generative
|
||||
def execution_options(self, **kw):
|
||||
""" Set non-SQL options for the statement which take effect during
|
||||
execution.
|
||||
|
||||
Execution options can be set on a per-statement or
|
||||
per :class:`.Connection` basis. Additionally, the
|
||||
:class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide
|
||||
access to execution options which they in turn configure upon
|
||||
connections.
|
||||
|
||||
The :meth:`execution_options` method is generative. A new
|
||||
instance of this statement is returned that contains the options::
|
||||
|
||||
statement = select([table.c.x, table.c.y])
|
||||
statement = statement.execution_options(autocommit=True)
|
||||
|
||||
Note that only a subset of possible execution options can be applied
|
||||
to a statement - these include "autocommit" and "stream_results",
|
||||
but not "isolation_level" or "compiled_cache".
|
||||
See :meth:`.Connection.execution_options` for a full list of
|
||||
possible options.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Connection.execution_options()`
|
||||
|
||||
:meth:`.Query.execution_options()`
|
||||
|
||||
"""
|
||||
if 'isolation_level' in kw:
|
||||
raise exc.ArgumentError(
|
||||
"'isolation_level' execution option may only be specified "
|
||||
"on Connection.execution_options(), or "
|
||||
"per-engine using the isolation_level "
|
||||
"argument to create_engine()."
|
||||
)
|
||||
if 'compiled_cache' in kw:
|
||||
raise exc.ArgumentError(
|
||||
"'compiled_cache' execution option may only be specified "
|
||||
"on Connection.execution_options(), not per statement."
|
||||
)
|
||||
self._execution_options = self._execution_options.union(kw)
|
||||
|
||||
def execute(self, *multiparams, **params):
|
||||
"""Compile and execute this :class:`.Executable`."""
|
||||
e = self.bind
|
||||
if e is None:
|
||||
label = getattr(self, 'description', self.__class__.__name__)
|
||||
msg = ('This %s is not directly bound to a Connection or Engine.'
|
||||
'Use the .execute() method of a Connection or Engine '
|
||||
'to execute this construct.' % label)
|
||||
raise exc.UnboundExecutionError(msg)
|
||||
return e._execute_clauseelement(self, multiparams, params)
|
||||
|
||||
def scalar(self, *multiparams, **params):
|
||||
"""Compile and execute this :class:`.Executable`, returning the
|
||||
result's scalar representation.
|
||||
|
||||
"""
|
||||
return self.execute(*multiparams, **params).scalar()
|
||||
|
||||
@property
|
||||
def bind(self):
|
||||
"""Returns the :class:`.Engine` or :class:`.Connection` to
|
||||
which this :class:`.Executable` is bound, or None if none found.
|
||||
|
||||
This is a traversal which checks locally, then
|
||||
checks among the "from" clauses of associated objects
|
||||
until a bound engine or connection is found.
|
||||
|
||||
"""
|
||||
if self._bind is not None:
|
||||
return self._bind
|
||||
|
||||
for f in _from_objects(self):
|
||||
if f is self:
|
||||
continue
|
||||
engine = f.bind
|
||||
if engine is not None:
|
||||
return engine
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class SchemaEventTarget(object):
|
||||
"""Base class for elements that are the targets of :class:`.DDLEvents`
|
||||
events.
|
||||
|
||||
This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
|
||||
|
||||
"""
|
||||
|
||||
def _set_parent(self, parent):
|
||||
"""Associate with this SchemaEvent's parent object."""
|
||||
|
||||
def _set_parent_with_dispatch(self, parent):
|
||||
self.dispatch.before_parent_attach(self, parent)
|
||||
self._set_parent(parent)
|
||||
self.dispatch.after_parent_attach(self, parent)
|
||||
|
||||
|
||||
class SchemaVisitor(ClauseVisitor):
|
||||
"""Define the visiting for ``SchemaItem`` objects."""
|
||||
|
||||
__traverse_options__ = {'schema_visitor': True}
|
||||
|
||||
|
||||
class ColumnCollection(util.OrderedProperties):
|
||||
"""An ordered dictionary that stores a list of ColumnElement
|
||||
instances.
|
||||
|
||||
Overrides the ``__eq__()`` method to produce SQL clauses between
|
||||
sets of correlated columns.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = '_all_columns'
|
||||
|
||||
def __init__(self, *columns):
|
||||
super(ColumnCollection, self).__init__()
|
||||
object.__setattr__(self, '_all_columns', [])
|
||||
for c in columns:
|
||||
self.add(c)
|
||||
|
||||
def __str__(self):
|
||||
return repr([str(c) for c in self])
|
||||
|
||||
def replace(self, column):
|
||||
"""add the given column to this collection, removing unaliased
|
||||
versions of this column as well as existing columns with the
|
||||
same key.
|
||||
|
||||
e.g.::
|
||||
|
||||
t = Table('sometable', metadata, Column('col1', Integer))
|
||||
t.columns.replace(Column('col1', Integer, key='columnone'))
|
||||
|
||||
will remove the original 'col1' from the collection, and add
|
||||
the new column under the name 'columnname'.
|
||||
|
||||
Used by schema.Column to override columns during table reflection.
|
||||
|
||||
"""
|
||||
remove_col = None
|
||||
if column.name in self and column.key != column.name:
|
||||
other = self[column.name]
|
||||
if other.name == other.key:
|
||||
remove_col = other
|
||||
del self._data[other.key]
|
||||
|
||||
if column.key in self._data:
|
||||
remove_col = self._data[column.key]
|
||||
|
||||
self._data[column.key] = column
|
||||
if remove_col is not None:
|
||||
self._all_columns[:] = [column if c is remove_col
|
||||
else c for c in self._all_columns]
|
||||
else:
|
||||
self._all_columns.append(column)
|
||||
|
||||
def add(self, column):
|
||||
"""Add a column to this collection.
|
||||
|
||||
The key attribute of the column will be used as the hash key
|
||||
for this dictionary.
|
||||
|
||||
"""
|
||||
if not column.key:
|
||||
raise exc.ArgumentError(
|
||||
"Can't add unnamed column to column collection")
|
||||
self[column.key] = column
|
||||
|
||||
def __delitem__(self, key):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __setattr__(self, key, object):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key in self:
|
||||
|
||||
# this warning is primarily to catch select() statements
|
||||
# which have conflicting column names in their exported
|
||||
# columns collection
|
||||
|
||||
existing = self[key]
|
||||
if not existing.shares_lineage(value):
|
||||
util.warn('Column %r on table %r being replaced by '
|
||||
'%r, which has the same key. Consider '
|
||||
'use_labels for select() statements.' %
|
||||
(key, getattr(existing, 'table', None), value))
|
||||
|
||||
# pop out memoized proxy_set as this
|
||||
# operation may very well be occurring
|
||||
# in a _make_proxy operation
|
||||
util.memoized_property.reset(value, "proxy_set")
|
||||
|
||||
self._all_columns.append(value)
|
||||
self._data[key] = value
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def remove(self, column):
|
||||
del self._data[column.key]
|
||||
self._all_columns[:] = [
|
||||
c for c in self._all_columns if c is not column]
|
||||
|
||||
def update(self, iter):
|
||||
cols = list(iter)
|
||||
all_col_set = set(self._all_columns)
|
||||
self._all_columns.extend(
|
||||
c for label, c in cols if c not in all_col_set)
|
||||
self._data.update((label, c) for label, c in cols)
|
||||
|
||||
def extend(self, iter):
|
||||
cols = list(iter)
|
||||
all_col_set = set(self._all_columns)
|
||||
self._all_columns.extend(c for c in cols if c not in all_col_set)
|
||||
self._data.update((c.key, c) for c in cols)
|
||||
|
||||
__hash__ = None
|
||||
|
||||
@util.dependencies("sqlalchemy.sql.elements")
|
||||
def __eq__(self, elements, other):
|
||||
l = []
|
||||
for c in getattr(other, "_all_columns", other):
|
||||
for local in self._all_columns:
|
||||
if c.shares_lineage(local):
|
||||
l.append(c == local)
|
||||
return elements.and_(*l)
|
||||
|
||||
def __contains__(self, other):
|
||||
if not isinstance(other, util.string_types):
|
||||
raise exc.ArgumentError("__contains__ requires a string argument")
|
||||
return util.OrderedProperties.__contains__(self, other)
|
||||
|
||||
def __getstate__(self):
|
||||
return {'_data': self._data,
|
||||
'_all_columns': self._all_columns}
|
||||
|
||||
def __setstate__(self, state):
|
||||
object.__setattr__(self, '_data', state['_data'])
|
||||
object.__setattr__(self, '_all_columns', state['_all_columns'])
|
||||
|
||||
def contains_column(self, col):
|
||||
return col in set(self._all_columns)
|
||||
|
||||
def as_immutable(self):
|
||||
return ImmutableColumnCollection(self._data, self._all_columns)
|
||||
|
||||
|
||||
class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
|
||||
def __init__(self, data, all_columns):
|
||||
util.ImmutableProperties.__init__(self, data)
|
||||
object.__setattr__(self, '_all_columns', all_columns)
|
||||
|
||||
extend = remove = util.ImmutableProperties._immutable
|
||||
|
||||
|
||||
class ColumnSet(util.ordered_column_set):
|
||||
def contains_column(self, col):
|
||||
return col in self
|
||||
|
||||
def extend(self, cols):
|
||||
for col in cols:
|
||||
self.add(col)
|
||||
|
||||
def __add__(self, other):
|
||||
return list(self) + list(other)
|
||||
|
||||
@util.dependencies("sqlalchemy.sql.elements")
|
||||
def __eq__(self, elements, other):
|
||||
l = []
|
||||
for c in other:
|
||||
for local in self:
|
||||
if c.shares_lineage(local):
|
||||
l.append(c == local)
|
||||
return elements.and_(*l)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(x for x in self))
|
||||
|
||||
|
||||
def _bind_or_error(schemaitem, msg=None):
|
||||
bind = schemaitem.bind
|
||||
if not bind:
|
||||
name = schemaitem.__class__.__name__
|
||||
label = getattr(schemaitem, 'fullname',
|
||||
getattr(schemaitem, 'name', None))
|
||||
if label:
|
||||
item = '%s object %r' % (name, label)
|
||||
else:
|
||||
item = '%s object' % name
|
||||
if msg is None:
|
||||
msg = "%s is not bound to an Engine or Connection. "\
|
||||
"Execution can not proceed without a database to execute "\
|
||||
"against." % item
|
||||
raise exc.UnboundExecutionError(msg)
|
||||
return bind
|
||||
692
sqlalchemy/sql/crud.py
Normal file
692
sqlalchemy/sql/crud.py
Normal file
@@ -0,0 +1,692 @@
|
||||
# sql/crud.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Functions used by compiler.py to determine the parameters rendered
|
||||
within INSERT and UPDATE statements.
|
||||
|
||||
"""
|
||||
from .. import util
|
||||
from .. import exc
|
||||
from . import dml
|
||||
from . import elements
|
||||
import operator
|
||||
|
||||
REQUIRED = util.symbol('REQUIRED', """
|
||||
Placeholder for the value within a :class:`.BindParameter`
|
||||
which is required to be present when the statement is passed
|
||||
to :meth:`.Connection.execute`.
|
||||
|
||||
This symbol is typically used when a :func:`.expression.insert`
|
||||
or :func:`.expression.update` statement is compiled without parameter
|
||||
values present.
|
||||
|
||||
""")
|
||||
|
||||
ISINSERT = util.symbol('ISINSERT')
|
||||
ISUPDATE = util.symbol('ISUPDATE')
|
||||
ISDELETE = util.symbol('ISDELETE')
|
||||
|
||||
|
||||
def _setup_crud_params(compiler, stmt, local_stmt_type, **kw):
|
||||
restore_isinsert = compiler.isinsert
|
||||
restore_isupdate = compiler.isupdate
|
||||
restore_isdelete = compiler.isdelete
|
||||
|
||||
should_restore = (
|
||||
restore_isinsert or restore_isupdate or restore_isdelete
|
||||
) or len(compiler.stack) > 1
|
||||
|
||||
if local_stmt_type is ISINSERT:
|
||||
compiler.isupdate = False
|
||||
compiler.isinsert = True
|
||||
elif local_stmt_type is ISUPDATE:
|
||||
compiler.isupdate = True
|
||||
compiler.isinsert = False
|
||||
elif local_stmt_type is ISDELETE:
|
||||
if not should_restore:
|
||||
compiler.isdelete = True
|
||||
else:
|
||||
assert False, "ISINSERT, ISUPDATE, or ISDELETE expected"
|
||||
|
||||
try:
|
||||
if local_stmt_type in (ISINSERT, ISUPDATE):
|
||||
return _get_crud_params(compiler, stmt, **kw)
|
||||
finally:
|
||||
if should_restore:
|
||||
compiler.isinsert = restore_isinsert
|
||||
compiler.isupdate = restore_isupdate
|
||||
compiler.isdelete = restore_isdelete
|
||||
|
||||
|
||||
def _get_crud_params(compiler, stmt, **kw):
|
||||
"""create a set of tuples representing column/string pairs for use
|
||||
in an INSERT or UPDATE statement.
|
||||
|
||||
Also generates the Compiled object's postfetch, prefetch, and
|
||||
returning column collections, used for default handling and ultimately
|
||||
populating the ResultProxy's prefetch_cols() and postfetch_cols()
|
||||
collections.
|
||||
|
||||
"""
|
||||
|
||||
compiler.postfetch = []
|
||||
compiler.insert_prefetch = []
|
||||
compiler.update_prefetch = []
|
||||
compiler.returning = []
|
||||
|
||||
# no parameters in the statement, no parameters in the
|
||||
# compiled params - return binds for all columns
|
||||
if compiler.column_keys is None and stmt.parameters is None:
|
||||
return [
|
||||
(c, _create_bind_param(
|
||||
compiler, c, None, required=True))
|
||||
for c in stmt.table.columns
|
||||
]
|
||||
|
||||
if stmt._has_multi_parameters:
|
||||
stmt_parameters = stmt.parameters[0]
|
||||
else:
|
||||
stmt_parameters = stmt.parameters
|
||||
|
||||
# getters - these are normally just column.key,
|
||||
# but in the case of mysql multi-table update, the rules for
|
||||
# .key must conditionally take tablename into account
|
||||
_column_as_key, _getattr_col_key, _col_bind_name = \
|
||||
_key_getters_for_crud_column(compiler, stmt)
|
||||
|
||||
# if we have statement parameters - set defaults in the
|
||||
# compiled params
|
||||
if compiler.column_keys is None:
|
||||
parameters = {}
|
||||
else:
|
||||
parameters = dict((_column_as_key(key), REQUIRED)
|
||||
for key in compiler.column_keys
|
||||
if not stmt_parameters or
|
||||
key not in stmt_parameters)
|
||||
|
||||
# create a list of column assignment clauses as tuples
|
||||
values = []
|
||||
|
||||
if stmt_parameters is not None:
|
||||
_get_stmt_parameters_params(
|
||||
compiler,
|
||||
parameters, stmt_parameters, _column_as_key, values, kw)
|
||||
|
||||
check_columns = {}
|
||||
|
||||
# special logic that only occurs for multi-table UPDATE
|
||||
# statements
|
||||
if compiler.isupdate and stmt._extra_froms and stmt_parameters:
|
||||
_get_multitable_params(
|
||||
compiler, stmt, stmt_parameters, check_columns,
|
||||
_col_bind_name, _getattr_col_key, values, kw)
|
||||
|
||||
if compiler.isinsert and stmt.select_names:
|
||||
_scan_insert_from_select_cols(
|
||||
compiler, stmt, parameters,
|
||||
_getattr_col_key, _column_as_key,
|
||||
_col_bind_name, check_columns, values, kw)
|
||||
else:
|
||||
_scan_cols(
|
||||
compiler, stmt, parameters,
|
||||
_getattr_col_key, _column_as_key,
|
||||
_col_bind_name, check_columns, values, kw)
|
||||
|
||||
if parameters and stmt_parameters:
|
||||
check = set(parameters).intersection(
|
||||
_column_as_key(k) for k in stmt_parameters
|
||||
).difference(check_columns)
|
||||
if check:
|
||||
raise exc.CompileError(
|
||||
"Unconsumed column names: %s" %
|
||||
(", ".join("%s" % c for c in check))
|
||||
)
|
||||
|
||||
if stmt._has_multi_parameters:
|
||||
values = _extend_values_for_multiparams(compiler, stmt, values, kw)
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def _create_bind_param(
|
||||
compiler, col, value, process=True,
|
||||
required=False, name=None, **kw):
|
||||
if name is None:
|
||||
name = col.key
|
||||
bindparam = elements.BindParameter(
|
||||
name, value, type_=col.type, required=required)
|
||||
bindparam._is_crud = True
|
||||
if process:
|
||||
bindparam = bindparam._compiler_dispatch(compiler, **kw)
|
||||
return bindparam
|
||||
|
||||
|
||||
def _key_getters_for_crud_column(compiler, stmt):
|
||||
if compiler.isupdate and stmt._extra_froms:
|
||||
# when extra tables are present, refer to the columns
|
||||
# in those extra tables as table-qualified, including in
|
||||
# dictionaries and when rendering bind param names.
|
||||
# the "main" table of the statement remains unqualified,
|
||||
# allowing the most compatibility with a non-multi-table
|
||||
# statement.
|
||||
_et = set(stmt._extra_froms)
|
||||
|
||||
def _column_as_key(key):
|
||||
str_key = elements._column_as_key(key)
|
||||
if hasattr(key, 'table') and key.table in _et:
|
||||
return (key.table.name, str_key)
|
||||
else:
|
||||
return str_key
|
||||
|
||||
def _getattr_col_key(col):
|
||||
if col.table in _et:
|
||||
return (col.table.name, col.key)
|
||||
else:
|
||||
return col.key
|
||||
|
||||
def _col_bind_name(col):
|
||||
if col.table in _et:
|
||||
return "%s_%s" % (col.table.name, col.key)
|
||||
else:
|
||||
return col.key
|
||||
|
||||
else:
|
||||
_column_as_key = elements._column_as_key
|
||||
_getattr_col_key = _col_bind_name = operator.attrgetter("key")
|
||||
|
||||
return _column_as_key, _getattr_col_key, _col_bind_name
|
||||
|
||||
|
||||
def _scan_insert_from_select_cols(
|
||||
compiler, stmt, parameters, _getattr_col_key,
|
||||
_column_as_key, _col_bind_name, check_columns, values, kw):
|
||||
|
||||
need_pks, implicit_returning, \
|
||||
implicit_return_defaults, postfetch_lastrowid = \
|
||||
_get_returning_modifiers(compiler, stmt)
|
||||
|
||||
cols = [stmt.table.c[_column_as_key(name)]
|
||||
for name in stmt.select_names]
|
||||
|
||||
compiler._insert_from_select = stmt.select
|
||||
|
||||
add_select_cols = []
|
||||
if stmt.include_insert_from_select_defaults:
|
||||
col_set = set(cols)
|
||||
for col in stmt.table.columns:
|
||||
if col not in col_set and col.default:
|
||||
cols.append(col)
|
||||
|
||||
for c in cols:
|
||||
col_key = _getattr_col_key(c)
|
||||
if col_key in parameters and col_key not in check_columns:
|
||||
parameters.pop(col_key)
|
||||
values.append((c, None))
|
||||
else:
|
||||
_append_param_insert_select_hasdefault(
|
||||
compiler, stmt, c, add_select_cols, kw)
|
||||
|
||||
if add_select_cols:
|
||||
values.extend(add_select_cols)
|
||||
compiler._insert_from_select = compiler._insert_from_select._generate()
|
||||
compiler._insert_from_select._raw_columns = \
|
||||
tuple(compiler._insert_from_select._raw_columns) + tuple(
|
||||
expr for col, expr in add_select_cols)
|
||||
|
||||
|
||||
def _scan_cols(
|
||||
compiler, stmt, parameters, _getattr_col_key,
|
||||
_column_as_key, _col_bind_name, check_columns, values, kw):
|
||||
|
||||
need_pks, implicit_returning, \
|
||||
implicit_return_defaults, postfetch_lastrowid = \
|
||||
_get_returning_modifiers(compiler, stmt)
|
||||
|
||||
if stmt._parameter_ordering:
|
||||
parameter_ordering = [
|
||||
_column_as_key(key) for key in stmt._parameter_ordering
|
||||
]
|
||||
ordered_keys = set(parameter_ordering)
|
||||
cols = [
|
||||
stmt.table.c[key] for key in parameter_ordering
|
||||
] + [
|
||||
c for c in stmt.table.c if c.key not in ordered_keys
|
||||
]
|
||||
else:
|
||||
cols = stmt.table.columns
|
||||
|
||||
for c in cols:
|
||||
col_key = _getattr_col_key(c)
|
||||
|
||||
if col_key in parameters and col_key not in check_columns:
|
||||
|
||||
_append_param_parameter(
|
||||
compiler, stmt, c, col_key, parameters, _col_bind_name,
|
||||
implicit_returning, implicit_return_defaults, values, kw)
|
||||
|
||||
elif compiler.isinsert:
|
||||
if c.primary_key and \
|
||||
need_pks and \
|
||||
(
|
||||
implicit_returning or
|
||||
not postfetch_lastrowid or
|
||||
c is not stmt.table._autoincrement_column
|
||||
):
|
||||
|
||||
if implicit_returning:
|
||||
_append_param_insert_pk_returning(
|
||||
compiler, stmt, c, values, kw)
|
||||
else:
|
||||
_append_param_insert_pk(compiler, stmt, c, values, kw)
|
||||
|
||||
elif c.default is not None:
|
||||
|
||||
_append_param_insert_hasdefault(
|
||||
compiler, stmt, c, implicit_return_defaults,
|
||||
values, kw)
|
||||
|
||||
elif c.server_default is not None:
|
||||
if implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
elif not c.primary_key:
|
||||
compiler.postfetch.append(c)
|
||||
elif implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
elif c.primary_key and \
|
||||
c is not stmt.table._autoincrement_column and \
|
||||
not c.nullable:
|
||||
_warn_pk_with_no_anticipated_value(c)
|
||||
|
||||
elif compiler.isupdate:
|
||||
_append_param_update(
|
||||
compiler, stmt, c, implicit_return_defaults, values, kw)
|
||||
|
||||
|
||||
def _append_param_parameter(
|
||||
compiler, stmt, c, col_key, parameters, _col_bind_name,
|
||||
implicit_returning, implicit_return_defaults, values, kw):
|
||||
value = parameters.pop(col_key)
|
||||
if elements._is_literal(value):
|
||||
value = _create_bind_param(
|
||||
compiler, c, value, required=value is REQUIRED,
|
||||
name=_col_bind_name(c)
|
||||
if not stmt._has_multi_parameters
|
||||
else "%s_m0" % _col_bind_name(c),
|
||||
**kw
|
||||
)
|
||||
else:
|
||||
if isinstance(value, elements.BindParameter) and \
|
||||
value.type._isnull:
|
||||
value = value._clone()
|
||||
value.type = c.type
|
||||
|
||||
if c.primary_key and implicit_returning:
|
||||
compiler.returning.append(c)
|
||||
value = compiler.process(value.self_group(), **kw)
|
||||
elif implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
value = compiler.process(value.self_group(), **kw)
|
||||
else:
|
||||
compiler.postfetch.append(c)
|
||||
value = compiler.process(value.self_group(), **kw)
|
||||
values.append((c, value))
|
||||
|
||||
|
||||
def _append_param_insert_pk_returning(compiler, stmt, c, values, kw):
|
||||
"""Create a primary key expression in the INSERT statement and
|
||||
possibly a RETURNING clause for it.
|
||||
|
||||
If the column has a Python-side default, we will create a bound
|
||||
parameter for it and "pre-execute" the Python function. If
|
||||
the column has a SQL expression default, or is a sequence,
|
||||
we will add it directly into the INSERT statement and add a
|
||||
RETURNING element to get the new value. If the column has a
|
||||
server side default or is marked as the "autoincrement" column,
|
||||
we will add a RETRUNING element to get at the value.
|
||||
|
||||
If all the above tests fail, that indicates a primary key column with no
|
||||
noted default generation capabilities that has no parameter passed;
|
||||
raise an exception.
|
||||
|
||||
"""
|
||||
if c.default is not None:
|
||||
if c.default.is_sequence:
|
||||
if compiler.dialect.supports_sequences and \
|
||||
(not c.default.optional or
|
||||
not compiler.dialect.sequences_optional):
|
||||
proc = compiler.process(c.default, **kw)
|
||||
values.append((c, proc))
|
||||
compiler.returning.append(c)
|
||||
elif c.default.is_clause_element:
|
||||
values.append(
|
||||
(c, compiler.process(
|
||||
c.default.arg.self_group(), **kw))
|
||||
)
|
||||
compiler.returning.append(c)
|
||||
else:
|
||||
values.append(
|
||||
(c, _create_insert_prefetch_bind_param(compiler, c))
|
||||
)
|
||||
elif c is stmt.table._autoincrement_column or c.server_default is not None:
|
||||
compiler.returning.append(c)
|
||||
elif not c.nullable:
|
||||
# no .default, no .server_default, not autoincrement, we have
|
||||
# no indication this primary key column will have any value
|
||||
_warn_pk_with_no_anticipated_value(c)
|
||||
|
||||
|
||||
def _create_insert_prefetch_bind_param(compiler, c, process=True, name=None):
|
||||
param = _create_bind_param(compiler, c, None, process=process, name=name)
|
||||
compiler.insert_prefetch.append(c)
|
||||
return param
|
||||
|
||||
|
||||
def _create_update_prefetch_bind_param(compiler, c, process=True, name=None):
|
||||
param = _create_bind_param(compiler, c, None, process=process, name=name)
|
||||
compiler.update_prefetch.append(c)
|
||||
return param
|
||||
|
||||
|
||||
class _multiparam_column(elements.ColumnElement):
|
||||
def __init__(self, original, index):
|
||||
self.key = "%s_m%d" % (original.key, index + 1)
|
||||
self.original = original
|
||||
self.default = original.default
|
||||
self.type = original.type
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, _multiparam_column) and \
|
||||
other.key == self.key and \
|
||||
other.original == self.original
|
||||
|
||||
|
||||
def _process_multiparam_default_bind(compiler, stmt, c, index, kw):
|
||||
|
||||
if not c.default:
|
||||
raise exc.CompileError(
|
||||
"INSERT value for column %s is explicitly rendered as a bound"
|
||||
"parameter in the VALUES clause; "
|
||||
"a Python-side value or SQL expression is required" % c)
|
||||
elif c.default.is_clause_element:
|
||||
return compiler.process(c.default.arg.self_group(), **kw)
|
||||
else:
|
||||
col = _multiparam_column(c, index)
|
||||
if isinstance(stmt, dml.Insert):
|
||||
return _create_insert_prefetch_bind_param(compiler, col)
|
||||
else:
|
||||
return _create_update_prefetch_bind_param(compiler, col)
|
||||
|
||||
|
||||
def _append_param_insert_pk(compiler, stmt, c, values, kw):
|
||||
"""Create a bound parameter in the INSERT statement to receive a
|
||||
'prefetched' default value.
|
||||
|
||||
The 'prefetched' value indicates that we are to invoke a Python-side
|
||||
default function or expliclt SQL expression before the INSERT statement
|
||||
proceeds, so that we have a primary key value available.
|
||||
|
||||
if the column has no noted default generation capabilities, it has
|
||||
no value passed in either; raise an exception.
|
||||
|
||||
"""
|
||||
if (
|
||||
(
|
||||
# column has a Python-side default
|
||||
c.default is not None and
|
||||
(
|
||||
# and it won't be a Sequence
|
||||
not c.default.is_sequence or
|
||||
compiler.dialect.supports_sequences
|
||||
)
|
||||
)
|
||||
or
|
||||
(
|
||||
# column is the "autoincrement column"
|
||||
c is stmt.table._autoincrement_column and
|
||||
(
|
||||
# and it's either a "sequence" or a
|
||||
# pre-executable "autoincrement" sequence
|
||||
compiler.dialect.supports_sequences or
|
||||
compiler.dialect.preexecute_autoincrement_sequences
|
||||
)
|
||||
)
|
||||
):
|
||||
values.append(
|
||||
(c, _create_insert_prefetch_bind_param(compiler, c))
|
||||
)
|
||||
elif c.default is None and c.server_default is None and not c.nullable:
|
||||
# no .default, no .server_default, not autoincrement, we have
|
||||
# no indication this primary key column will have any value
|
||||
_warn_pk_with_no_anticipated_value(c)
|
||||
|
||||
|
||||
def _append_param_insert_hasdefault(
|
||||
compiler, stmt, c, implicit_return_defaults, values, kw):
|
||||
|
||||
if c.default.is_sequence:
|
||||
if compiler.dialect.supports_sequences and \
|
||||
(not c.default.optional or
|
||||
not compiler.dialect.sequences_optional):
|
||||
proc = compiler.process(c.default, **kw)
|
||||
values.append((c, proc))
|
||||
if implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
elif not c.primary_key:
|
||||
compiler.postfetch.append(c)
|
||||
elif c.default.is_clause_element:
|
||||
proc = compiler.process(c.default.arg.self_group(), **kw)
|
||||
values.append((c, proc))
|
||||
|
||||
if implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
elif not c.primary_key:
|
||||
# don't add primary key column to postfetch
|
||||
compiler.postfetch.append(c)
|
||||
else:
|
||||
values.append(
|
||||
(c, _create_insert_prefetch_bind_param(compiler, c))
|
||||
)
|
||||
|
||||
|
||||
def _append_param_insert_select_hasdefault(
|
||||
compiler, stmt, c, values, kw):
|
||||
|
||||
if c.default.is_sequence:
|
||||
if compiler.dialect.supports_sequences and \
|
||||
(not c.default.optional or
|
||||
not compiler.dialect.sequences_optional):
|
||||
proc = c.default
|
||||
values.append((c, proc.next_value()))
|
||||
elif c.default.is_clause_element:
|
||||
proc = c.default.arg.self_group()
|
||||
values.append((c, proc))
|
||||
else:
|
||||
values.append(
|
||||
(c, _create_insert_prefetch_bind_param(compiler, c, process=False))
|
||||
)
|
||||
|
||||
|
||||
def _append_param_update(
|
||||
compiler, stmt, c, implicit_return_defaults, values, kw):
|
||||
|
||||
if c.onupdate is not None and not c.onupdate.is_sequence:
|
||||
if c.onupdate.is_clause_element:
|
||||
values.append(
|
||||
(c, compiler.process(
|
||||
c.onupdate.arg.self_group(), **kw))
|
||||
)
|
||||
if implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
else:
|
||||
compiler.postfetch.append(c)
|
||||
else:
|
||||
values.append(
|
||||
(c, _create_update_prefetch_bind_param(compiler, c))
|
||||
)
|
||||
elif c.server_onupdate is not None:
|
||||
if implicit_return_defaults and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
else:
|
||||
compiler.postfetch.append(c)
|
||||
elif implicit_return_defaults and \
|
||||
stmt._return_defaults is not True and \
|
||||
c in implicit_return_defaults:
|
||||
compiler.returning.append(c)
|
||||
|
||||
|
||||
def _get_multitable_params(
|
||||
compiler, stmt, stmt_parameters, check_columns,
|
||||
_col_bind_name, _getattr_col_key, values, kw):
|
||||
|
||||
normalized_params = dict(
|
||||
(elements._clause_element_as_expr(c), param)
|
||||
for c, param in stmt_parameters.items()
|
||||
)
|
||||
affected_tables = set()
|
||||
for t in stmt._extra_froms:
|
||||
for c in t.c:
|
||||
if c in normalized_params:
|
||||
affected_tables.add(t)
|
||||
check_columns[_getattr_col_key(c)] = c
|
||||
value = normalized_params[c]
|
||||
if elements._is_literal(value):
|
||||
value = _create_bind_param(
|
||||
compiler, c, value, required=value is REQUIRED,
|
||||
name=_col_bind_name(c))
|
||||
else:
|
||||
compiler.postfetch.append(c)
|
||||
value = compiler.process(value.self_group(), **kw)
|
||||
values.append((c, value))
|
||||
# determine tables which are actually to be updated - process onupdate
|
||||
# and server_onupdate for these
|
||||
for t in affected_tables:
|
||||
for c in t.c:
|
||||
if c in normalized_params:
|
||||
continue
|
||||
elif (c.onupdate is not None and not
|
||||
c.onupdate.is_sequence):
|
||||
if c.onupdate.is_clause_element:
|
||||
values.append(
|
||||
(c, compiler.process(
|
||||
c.onupdate.arg.self_group(),
|
||||
**kw)
|
||||
)
|
||||
)
|
||||
compiler.postfetch.append(c)
|
||||
else:
|
||||
values.append(
|
||||
(c, _create_update_prefetch_bind_param(
|
||||
compiler, c, name=_col_bind_name(c)))
|
||||
)
|
||||
elif c.server_onupdate is not None:
|
||||
compiler.postfetch.append(c)
|
||||
|
||||
|
||||
def _extend_values_for_multiparams(compiler, stmt, values, kw):
|
||||
values_0 = values
|
||||
values = [values]
|
||||
|
||||
values.extend(
|
||||
[
|
||||
(
|
||||
c,
|
||||
(_create_bind_param(
|
||||
compiler, c, row[c.key],
|
||||
name="%s_m%d" % (c.key, i + 1), **kw
|
||||
) if elements._is_literal(row[c.key])
|
||||
else compiler.process(
|
||||
row[c.key].self_group(), **kw))
|
||||
if c.key in row else
|
||||
_process_multiparam_default_bind(compiler, stmt, c, i, kw)
|
||||
)
|
||||
for (c, param) in values_0
|
||||
]
|
||||
for i, row in enumerate(stmt.parameters[1:])
|
||||
)
|
||||
return values
|
||||
|
||||
|
||||
def _get_stmt_parameters_params(
|
||||
compiler, parameters, stmt_parameters, _column_as_key, values, kw):
|
||||
for k, v in stmt_parameters.items():
|
||||
colkey = _column_as_key(k)
|
||||
if colkey is not None:
|
||||
parameters.setdefault(colkey, v)
|
||||
else:
|
||||
# a non-Column expression on the left side;
|
||||
# add it to values() in an "as-is" state,
|
||||
# coercing right side to bound param
|
||||
if elements._is_literal(v):
|
||||
v = compiler.process(
|
||||
elements.BindParameter(None, v, type_=k.type),
|
||||
**kw)
|
||||
else:
|
||||
v = compiler.process(v.self_group(), **kw)
|
||||
|
||||
values.append((k, v))
|
||||
|
||||
|
||||
def _get_returning_modifiers(compiler, stmt):
|
||||
need_pks = compiler.isinsert and \
|
||||
not compiler.inline and \
|
||||
not stmt._returning and \
|
||||
not stmt._has_multi_parameters
|
||||
|
||||
implicit_returning = need_pks and \
|
||||
compiler.dialect.implicit_returning and \
|
||||
stmt.table.implicit_returning
|
||||
|
||||
if compiler.isinsert:
|
||||
implicit_return_defaults = (implicit_returning and
|
||||
stmt._return_defaults)
|
||||
elif compiler.isupdate:
|
||||
implicit_return_defaults = (compiler.dialect.implicit_returning and
|
||||
stmt.table.implicit_returning and
|
||||
stmt._return_defaults)
|
||||
else:
|
||||
# this line is unused, currently we are always
|
||||
# isinsert or isupdate
|
||||
implicit_return_defaults = False # pragma: no cover
|
||||
|
||||
if implicit_return_defaults:
|
||||
if stmt._return_defaults is True:
|
||||
implicit_return_defaults = set(stmt.table.c)
|
||||
else:
|
||||
implicit_return_defaults = set(stmt._return_defaults)
|
||||
|
||||
postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid
|
||||
|
||||
return need_pks, implicit_returning, \
|
||||
implicit_return_defaults, postfetch_lastrowid
|
||||
|
||||
|
||||
def _warn_pk_with_no_anticipated_value(c):
|
||||
msg = (
|
||||
"Column '%s.%s' is marked as a member of the "
|
||||
"primary key for table '%s', "
|
||||
"but has no Python-side or server-side default generator indicated, "
|
||||
"nor does it indicate 'autoincrement=True' or 'nullable=True', "
|
||||
"and no explicit value is passed. "
|
||||
"Primary key columns typically may not store NULL."
|
||||
%
|
||||
(c.table.fullname, c.name, c.table.fullname))
|
||||
if len(c.table.primary_key) > 1:
|
||||
msg += (
|
||||
" Note that as of SQLAlchemy 1.1, 'autoincrement=True' must be "
|
||||
"indicated explicitly for composite (e.g. multicolumn) primary "
|
||||
"keys if AUTO_INCREMENT/SERIAL/IDENTITY "
|
||||
"behavior is expected for one of the columns in the primary key. "
|
||||
"CREATE TABLE statements are impacted by this change as well on "
|
||||
"most backends.")
|
||||
util.warn(msg)
|
||||
1100
sqlalchemy/sql/ddl.py
Normal file
1100
sqlalchemy/sql/ddl.py
Normal file
File diff suppressed because it is too large
Load Diff
308
sqlalchemy/sql/default_comparator.py
Normal file
308
sqlalchemy/sql/default_comparator.py
Normal file
@@ -0,0 +1,308 @@
|
||||
# sql/default_comparator.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Default implementation of SQL comparison operations.
|
||||
"""
|
||||
|
||||
from .. import exc, util
|
||||
from . import type_api
|
||||
from . import operators
|
||||
from .elements import BindParameter, True_, False_, BinaryExpression, \
|
||||
Null, _const_expr, _clause_element_as_expr, \
|
||||
ClauseList, ColumnElement, TextClause, UnaryExpression, \
|
||||
collate, _is_literal, _literal_as_text, ClauseElement, and_, or_, \
|
||||
Slice, Visitable, _literal_as_binds
|
||||
from .selectable import SelectBase, Alias, Selectable, ScalarSelect
|
||||
|
||||
|
||||
def _boolean_compare(expr, op, obj, negate=None, reverse=False,
|
||||
_python_is_types=(util.NoneType, bool),
|
||||
result_type = None,
|
||||
**kwargs):
|
||||
|
||||
if result_type is None:
|
||||
result_type = type_api.BOOLEANTYPE
|
||||
|
||||
if isinstance(obj, _python_is_types + (Null, True_, False_)):
|
||||
|
||||
# allow x ==/!= True/False to be treated as a literal.
|
||||
# this comes out to "== / != true/false" or "1/0" if those
|
||||
# constants aren't supported and works on all platforms
|
||||
if op in (operators.eq, operators.ne) and \
|
||||
isinstance(obj, (bool, True_, False_)):
|
||||
return BinaryExpression(expr,
|
||||
_literal_as_text(obj),
|
||||
op,
|
||||
type_=result_type,
|
||||
negate=negate, modifiers=kwargs)
|
||||
elif op in (operators.is_distinct_from, operators.isnot_distinct_from):
|
||||
return BinaryExpression(expr,
|
||||
_literal_as_text(obj),
|
||||
op,
|
||||
type_=result_type,
|
||||
negate=negate, modifiers=kwargs)
|
||||
else:
|
||||
# all other None/True/False uses IS, IS NOT
|
||||
if op in (operators.eq, operators.is_):
|
||||
return BinaryExpression(expr, _const_expr(obj),
|
||||
operators.is_,
|
||||
negate=operators.isnot)
|
||||
elif op in (operators.ne, operators.isnot):
|
||||
return BinaryExpression(expr, _const_expr(obj),
|
||||
operators.isnot,
|
||||
negate=operators.is_)
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Only '=', '!=', 'is_()', 'isnot()', "
|
||||
"'is_distinct_from()', 'isnot_distinct_from()' "
|
||||
"operators can be used with None/True/False")
|
||||
else:
|
||||
obj = _check_literal(expr, op, obj)
|
||||
|
||||
if reverse:
|
||||
return BinaryExpression(obj,
|
||||
expr,
|
||||
op,
|
||||
type_=result_type,
|
||||
negate=negate, modifiers=kwargs)
|
||||
else:
|
||||
return BinaryExpression(expr,
|
||||
obj,
|
||||
op,
|
||||
type_=result_type,
|
||||
negate=negate, modifiers=kwargs)
|
||||
|
||||
|
||||
def _binary_operate(expr, op, obj, reverse=False, result_type=None,
|
||||
**kw):
|
||||
obj = _check_literal(expr, op, obj)
|
||||
|
||||
if reverse:
|
||||
left, right = obj, expr
|
||||
else:
|
||||
left, right = expr, obj
|
||||
|
||||
if result_type is None:
|
||||
op, result_type = left.comparator._adapt_expression(
|
||||
op, right.comparator)
|
||||
|
||||
return BinaryExpression(
|
||||
left, right, op, type_=result_type, modifiers=kw)
|
||||
|
||||
|
||||
def _conjunction_operate(expr, op, other, **kw):
|
||||
if op is operators.and_:
|
||||
return and_(expr, other)
|
||||
elif op is operators.or_:
|
||||
return or_(expr, other)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _scalar(expr, op, fn, **kw):
|
||||
return fn(expr)
|
||||
|
||||
|
||||
def _in_impl(expr, op, seq_or_selectable, negate_op, **kw):
|
||||
seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
|
||||
|
||||
if isinstance(seq_or_selectable, ScalarSelect):
|
||||
return _boolean_compare(expr, op, seq_or_selectable,
|
||||
negate=negate_op)
|
||||
elif isinstance(seq_or_selectable, SelectBase):
|
||||
|
||||
# TODO: if we ever want to support (x, y, z) IN (select x,
|
||||
# y, z from table), we would need a multi-column version of
|
||||
# as_scalar() to produce a multi- column selectable that
|
||||
# does not export itself as a FROM clause
|
||||
|
||||
return _boolean_compare(
|
||||
expr, op, seq_or_selectable.as_scalar(),
|
||||
negate=negate_op, **kw)
|
||||
elif isinstance(seq_or_selectable, (Selectable, TextClause)):
|
||||
return _boolean_compare(expr, op, seq_or_selectable,
|
||||
negate=negate_op, **kw)
|
||||
elif isinstance(seq_or_selectable, ClauseElement):
|
||||
raise exc.InvalidRequestError(
|
||||
'in_() accepts'
|
||||
' either a list of expressions '
|
||||
'or a selectable: %r' % seq_or_selectable)
|
||||
|
||||
# Handle non selectable arguments as sequences
|
||||
args = []
|
||||
for o in seq_or_selectable:
|
||||
if not _is_literal(o):
|
||||
if not isinstance(o, operators.ColumnOperators):
|
||||
raise exc.InvalidRequestError(
|
||||
'in_() accepts'
|
||||
' either a list of expressions '
|
||||
'or a selectable: %r' % o)
|
||||
elif o is None:
|
||||
o = Null()
|
||||
else:
|
||||
o = expr._bind_param(op, o)
|
||||
args.append(o)
|
||||
if len(args) == 0:
|
||||
|
||||
# Special case handling for empty IN's, behave like
|
||||
# comparison against zero row selectable. We use != to
|
||||
# build the contradiction as it handles NULL values
|
||||
# appropriately, i.e. "not (x IN ())" should not return NULL
|
||||
# values for x.
|
||||
|
||||
util.warn('The IN-predicate on "%s" was invoked with an '
|
||||
'empty sequence. This results in a '
|
||||
'contradiction, which nonetheless can be '
|
||||
'expensive to evaluate. Consider alternative '
|
||||
'strategies for improved performance.' % expr)
|
||||
if op is operators.in_op:
|
||||
return expr != expr
|
||||
else:
|
||||
return expr == expr
|
||||
|
||||
return _boolean_compare(expr, op,
|
||||
ClauseList(*args).self_group(against=op),
|
||||
negate=negate_op)
|
||||
|
||||
|
||||
def _getitem_impl(expr, op, other, **kw):
|
||||
if isinstance(expr.type, type_api.INDEXABLE):
|
||||
other = _check_literal(expr, op, other)
|
||||
return _binary_operate(expr, op, other, **kw)
|
||||
else:
|
||||
_unsupported_impl(expr, op, other, **kw)
|
||||
|
||||
|
||||
def _unsupported_impl(expr, op, *arg, **kw):
|
||||
raise NotImplementedError("Operator '%s' is not supported on "
|
||||
"this expression" % op.__name__)
|
||||
|
||||
|
||||
def _inv_impl(expr, op, **kw):
|
||||
"""See :meth:`.ColumnOperators.__inv__`."""
|
||||
if hasattr(expr, 'negation_clause'):
|
||||
return expr.negation_clause
|
||||
else:
|
||||
return expr._negate()
|
||||
|
||||
|
||||
def _neg_impl(expr, op, **kw):
|
||||
"""See :meth:`.ColumnOperators.__neg__`."""
|
||||
return UnaryExpression(expr, operator=operators.neg, type_=expr.type)
|
||||
|
||||
|
||||
def _match_impl(expr, op, other, **kw):
|
||||
"""See :meth:`.ColumnOperators.match`."""
|
||||
|
||||
return _boolean_compare(
|
||||
expr, operators.match_op,
|
||||
_check_literal(
|
||||
expr, operators.match_op, other),
|
||||
result_type=type_api.MATCHTYPE,
|
||||
negate=operators.notmatch_op
|
||||
if op is operators.match_op else operators.match_op,
|
||||
**kw
|
||||
)
|
||||
|
||||
|
||||
def _distinct_impl(expr, op, **kw):
|
||||
"""See :meth:`.ColumnOperators.distinct`."""
|
||||
return UnaryExpression(expr, operator=operators.distinct_op,
|
||||
type_=expr.type)
|
||||
|
||||
|
||||
def _between_impl(expr, op, cleft, cright, **kw):
|
||||
"""See :meth:`.ColumnOperators.between`."""
|
||||
return BinaryExpression(
|
||||
expr,
|
||||
ClauseList(
|
||||
_check_literal(expr, operators.and_, cleft),
|
||||
_check_literal(expr, operators.and_, cright),
|
||||
operator=operators.and_,
|
||||
group=False, group_contents=False),
|
||||
op,
|
||||
negate=operators.notbetween_op
|
||||
if op is operators.between_op
|
||||
else operators.between_op,
|
||||
modifiers=kw)
|
||||
|
||||
|
||||
def _collate_impl(expr, op, other, **kw):
|
||||
return collate(expr, other)
|
||||
|
||||
# a mapping of operators with the method they use, along with
|
||||
# their negated operator for comparison operators
|
||||
operator_lookup = {
|
||||
"and_": (_conjunction_operate,),
|
||||
"or_": (_conjunction_operate,),
|
||||
"inv": (_inv_impl,),
|
||||
"add": (_binary_operate,),
|
||||
"mul": (_binary_operate,),
|
||||
"sub": (_binary_operate,),
|
||||
"div": (_binary_operate,),
|
||||
"mod": (_binary_operate,),
|
||||
"truediv": (_binary_operate,),
|
||||
"custom_op": (_binary_operate,),
|
||||
"json_path_getitem_op": (_binary_operate, ),
|
||||
"json_getitem_op": (_binary_operate, ),
|
||||
"concat_op": (_binary_operate,),
|
||||
"lt": (_boolean_compare, operators.ge),
|
||||
"le": (_boolean_compare, operators.gt),
|
||||
"ne": (_boolean_compare, operators.eq),
|
||||
"gt": (_boolean_compare, operators.le),
|
||||
"ge": (_boolean_compare, operators.lt),
|
||||
"eq": (_boolean_compare, operators.ne),
|
||||
"is_distinct_from": (_boolean_compare, operators.isnot_distinct_from),
|
||||
"isnot_distinct_from": (_boolean_compare, operators.is_distinct_from),
|
||||
"like_op": (_boolean_compare, operators.notlike_op),
|
||||
"ilike_op": (_boolean_compare, operators.notilike_op),
|
||||
"notlike_op": (_boolean_compare, operators.like_op),
|
||||
"notilike_op": (_boolean_compare, operators.ilike_op),
|
||||
"contains_op": (_boolean_compare, operators.notcontains_op),
|
||||
"startswith_op": (_boolean_compare, operators.notstartswith_op),
|
||||
"endswith_op": (_boolean_compare, operators.notendswith_op),
|
||||
"desc_op": (_scalar, UnaryExpression._create_desc),
|
||||
"asc_op": (_scalar, UnaryExpression._create_asc),
|
||||
"nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst),
|
||||
"nullslast_op": (_scalar, UnaryExpression._create_nullslast),
|
||||
"in_op": (_in_impl, operators.notin_op),
|
||||
"notin_op": (_in_impl, operators.in_op),
|
||||
"is_": (_boolean_compare, operators.is_),
|
||||
"isnot": (_boolean_compare, operators.isnot),
|
||||
"collate": (_collate_impl,),
|
||||
"match_op": (_match_impl,),
|
||||
"notmatch_op": (_match_impl,),
|
||||
"distinct_op": (_distinct_impl,),
|
||||
"between_op": (_between_impl, ),
|
||||
"notbetween_op": (_between_impl, ),
|
||||
"neg": (_neg_impl,),
|
||||
"getitem": (_getitem_impl,),
|
||||
"lshift": (_unsupported_impl,),
|
||||
"rshift": (_unsupported_impl,),
|
||||
"contains": (_unsupported_impl,),
|
||||
}
|
||||
|
||||
|
||||
def _check_literal(expr, operator, other, bindparam_type=None):
|
||||
if isinstance(other, (ColumnElement, TextClause)):
|
||||
if isinstance(other, BindParameter) and \
|
||||
other.type._isnull:
|
||||
other = other._clone()
|
||||
other.type = expr.type
|
||||
return other
|
||||
elif hasattr(other, '__clause_element__'):
|
||||
other = other.__clause_element__()
|
||||
elif isinstance(other, type_api.TypeEngine.Comparator):
|
||||
other = other.expr
|
||||
|
||||
if isinstance(other, (SelectBase, Alias)):
|
||||
return other.as_scalar()
|
||||
elif not isinstance(other, Visitable):
|
||||
return expr._bind_param(operator, other, type_=bindparam_type)
|
||||
else:
|
||||
return other
|
||||
|
||||
851
sqlalchemy/sql/dml.py
Normal file
851
sqlalchemy/sql/dml.py
Normal file
@@ -0,0 +1,851 @@
|
||||
# sql/dml.py
|
||||
# Copyright (C) 2009-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`.
|
||||
|
||||
"""
|
||||
|
||||
from .base import Executable, _generative, _from_objects, DialectKWArgs, \
|
||||
ColumnCollection
|
||||
from .elements import ClauseElement, _literal_as_text, Null, and_, _clone, \
|
||||
_column_as_key
|
||||
from .selectable import _interpret_as_from, _interpret_as_select, \
|
||||
HasPrefixes, HasCTE
|
||||
from .. import util
|
||||
from .. import exc
|
||||
|
||||
|
||||
class UpdateBase(
|
||||
HasCTE, DialectKWArgs, HasPrefixes, Executable, ClauseElement):
|
||||
"""Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'update_base'
|
||||
|
||||
_execution_options = \
|
||||
Executable._execution_options.union({'autocommit': True})
|
||||
_hints = util.immutabledict()
|
||||
_parameter_ordering = None
|
||||
_prefixes = ()
|
||||
named_with_column = False
|
||||
|
||||
def _process_colparams(self, parameters):
|
||||
def process_single(p):
|
||||
if isinstance(p, (list, tuple)):
|
||||
return dict(
|
||||
(c.key, pval)
|
||||
for c, pval in zip(self.table.c, p)
|
||||
)
|
||||
else:
|
||||
return p
|
||||
|
||||
if self._preserve_parameter_order and parameters is not None:
|
||||
if not isinstance(parameters, list) or \
|
||||
(parameters and not isinstance(parameters[0], tuple)):
|
||||
raise ValueError(
|
||||
"When preserve_parameter_order is True, "
|
||||
"values() only accepts a list of 2-tuples")
|
||||
self._parameter_ordering = [key for key, value in parameters]
|
||||
|
||||
return dict(parameters), False
|
||||
|
||||
if (isinstance(parameters, (list, tuple)) and parameters and
|
||||
isinstance(parameters[0], (list, tuple, dict))):
|
||||
|
||||
if not self._supports_multi_parameters:
|
||||
raise exc.InvalidRequestError(
|
||||
"This construct does not support "
|
||||
"multiple parameter sets.")
|
||||
|
||||
return [process_single(p) for p in parameters], True
|
||||
else:
|
||||
return process_single(parameters), False
|
||||
|
||||
def params(self, *arg, **kw):
|
||||
"""Set the parameters for the statement.
|
||||
|
||||
This method raises ``NotImplementedError`` on the base class,
|
||||
and is overridden by :class:`.ValuesBase` to provide the
|
||||
SET/VALUES clause of UPDATE and INSERT.
|
||||
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"params() is not supported for INSERT/UPDATE/DELETE statements."
|
||||
" To set the values for an INSERT or UPDATE statement, use"
|
||||
" stmt.values(**parameters).")
|
||||
|
||||
def bind(self):
|
||||
"""Return a 'bind' linked to this :class:`.UpdateBase`
|
||||
or a :class:`.Table` associated with it.
|
||||
|
||||
"""
|
||||
return self._bind or self.table.bind
|
||||
|
||||
def _set_bind(self, bind):
|
||||
self._bind = bind
|
||||
bind = property(bind, _set_bind)
|
||||
|
||||
@_generative
|
||||
def returning(self, *cols):
|
||||
r"""Add a :term:`RETURNING` or equivalent clause to this statement.
|
||||
|
||||
e.g.::
|
||||
|
||||
stmt = table.update().\
|
||||
where(table.c.data == 'value').\
|
||||
values(status='X').\
|
||||
returning(table.c.server_flag,
|
||||
table.c.updated_timestamp)
|
||||
|
||||
for server_flag, updated_timestamp in connection.execute(stmt):
|
||||
print(server_flag, updated_timestamp)
|
||||
|
||||
The given collection of column expressions should be derived from
|
||||
the table that is
|
||||
the target of the INSERT, UPDATE, or DELETE. While :class:`.Column`
|
||||
objects are typical, the elements can also be expressions::
|
||||
|
||||
stmt = table.insert().returning(
|
||||
(table.c.first_name + " " + table.c.last_name).
|
||||
label('fullname'))
|
||||
|
||||
Upon compilation, a RETURNING clause, or database equivalent,
|
||||
will be rendered within the statement. For INSERT and UPDATE,
|
||||
the values are the newly inserted/updated values. For DELETE,
|
||||
the values are those of the rows which were deleted.
|
||||
|
||||
Upon execution, the values of the columns to be returned are made
|
||||
available via the result set and can be iterated using
|
||||
:meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not
|
||||
natively support returning values (i.e. cx_oracle), SQLAlchemy will
|
||||
approximate this behavior at the result level so that a reasonable
|
||||
amount of behavioral neutrality is provided.
|
||||
|
||||
Note that not all databases/DBAPIs
|
||||
support RETURNING. For those backends with no support,
|
||||
an exception is raised upon compilation and/or execution.
|
||||
For those who do support it, the functionality across backends
|
||||
varies greatly, including restrictions on executemany()
|
||||
and other statements which return multiple rows. Please
|
||||
read the documentation notes for the database in use in
|
||||
order to determine the availability of RETURNING.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ValuesBase.return_defaults` - an alternative method tailored
|
||||
towards efficient fetching of server-side defaults and triggers
|
||||
for single-row INSERTs or UPDATEs.
|
||||
|
||||
|
||||
"""
|
||||
self._returning = cols
|
||||
|
||||
@_generative
|
||||
def with_hint(self, text, selectable=None, dialect_name="*"):
|
||||
"""Add a table hint for a single table to this
|
||||
INSERT/UPDATE/DELETE statement.
|
||||
|
||||
.. note::
|
||||
|
||||
:meth:`.UpdateBase.with_hint` currently applies only to
|
||||
Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use
|
||||
:meth:`.UpdateBase.prefix_with`.
|
||||
|
||||
The text of the hint is rendered in the appropriate
|
||||
location for the database backend in use, relative
|
||||
to the :class:`.Table` that is the subject of this
|
||||
statement, or optionally to that of the given
|
||||
:class:`.Table` passed as the ``selectable`` argument.
|
||||
|
||||
The ``dialect_name`` option will limit the rendering of a particular
|
||||
hint to a particular backend. Such as, to add a hint
|
||||
that only takes effect for SQL Server::
|
||||
|
||||
mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
|
||||
|
||||
.. versionadded:: 0.7.6
|
||||
|
||||
:param text: Text of the hint.
|
||||
:param selectable: optional :class:`.Table` that specifies
|
||||
an element of the FROM clause within an UPDATE or DELETE
|
||||
to be the subject of the hint - applies only to certain backends.
|
||||
:param dialect_name: defaults to ``*``, if specified as the name
|
||||
of a particular dialect, will apply these hints only when
|
||||
that dialect is in use.
|
||||
"""
|
||||
if selectable is None:
|
||||
selectable = self.table
|
||||
|
||||
self._hints = self._hints.union(
|
||||
{(selectable, dialect_name): text})
|
||||
|
||||
|
||||
class ValuesBase(UpdateBase):
|
||||
"""Supplies support for :meth:`.ValuesBase.values` to
|
||||
INSERT and UPDATE constructs."""
|
||||
|
||||
__visit_name__ = 'values_base'
|
||||
|
||||
_supports_multi_parameters = False
|
||||
_has_multi_parameters = False
|
||||
_preserve_parameter_order = False
|
||||
select = None
|
||||
_post_values_clause = None
|
||||
|
||||
def __init__(self, table, values, prefixes):
|
||||
self.table = _interpret_as_from(table)
|
||||
self.parameters, self._has_multi_parameters = \
|
||||
self._process_colparams(values)
|
||||
if prefixes:
|
||||
self._setup_prefixes(prefixes)
|
||||
|
||||
@_generative
|
||||
def values(self, *args, **kwargs):
|
||||
r"""specify a fixed VALUES clause for an INSERT statement, or the SET
|
||||
clause for an UPDATE.
|
||||
|
||||
Note that the :class:`.Insert` and :class:`.Update` constructs support
|
||||
per-execution time formatting of the VALUES and/or SET clauses,
|
||||
based on the arguments passed to :meth:`.Connection.execute`.
|
||||
However, the :meth:`.ValuesBase.values` method can be used to "fix" a
|
||||
particular set of parameters into the statement.
|
||||
|
||||
Multiple calls to :meth:`.ValuesBase.values` will produce a new
|
||||
construct, each one with the parameter list modified to include
|
||||
the new parameters sent. In the typical case of a single
|
||||
dictionary of parameters, the newly passed keys will replace
|
||||
the same keys in the previous construct. In the case of a list-based
|
||||
"multiple values" construct, each new list of values is extended
|
||||
onto the existing list of values.
|
||||
|
||||
:param \**kwargs: key value pairs representing the string key
|
||||
of a :class:`.Column` mapped to the value to be rendered into the
|
||||
VALUES or SET clause::
|
||||
|
||||
users.insert().values(name="some name")
|
||||
|
||||
users.update().where(users.c.id==5).values(name="some name")
|
||||
|
||||
:param \*args: As an alternative to passing key/value parameters,
|
||||
a dictionary, tuple, or list of dictionaries or tuples can be passed
|
||||
as a single positional argument in order to form the VALUES or
|
||||
SET clause of the statement. The forms that are accepted vary
|
||||
based on whether this is an :class:`.Insert` or an :class:`.Update`
|
||||
construct.
|
||||
|
||||
For either an :class:`.Insert` or :class:`.Update` construct, a
|
||||
single dictionary can be passed, which works the same as that of
|
||||
the kwargs form::
|
||||
|
||||
users.insert().values({"name": "some name"})
|
||||
|
||||
users.update().values({"name": "some new name"})
|
||||
|
||||
Also for either form but more typically for the :class:`.Insert`
|
||||
construct, a tuple that contains an entry for every column in the
|
||||
table is also accepted::
|
||||
|
||||
users.insert().values((5, "some name"))
|
||||
|
||||
The :class:`.Insert` construct also supports being passed a list
|
||||
of dictionaries or full-table-tuples, which on the server will
|
||||
render the less common SQL syntax of "multiple values" - this
|
||||
syntax is supported on backends such as SQLite, PostgreSQL, MySQL,
|
||||
but not necessarily others::
|
||||
|
||||
users.insert().values([
|
||||
{"name": "some name"},
|
||||
{"name": "some other name"},
|
||||
{"name": "yet another name"},
|
||||
])
|
||||
|
||||
The above form would render a multiple VALUES statement similar to::
|
||||
|
||||
INSERT INTO users (name) VALUES
|
||||
(:name_1),
|
||||
(:name_2),
|
||||
(:name_3)
|
||||
|
||||
It is essential to note that **passing multiple values is
|
||||
NOT the same as using traditional executemany() form**. The above
|
||||
syntax is a **special** syntax not typically used. To emit an
|
||||
INSERT statement against multiple rows, the normal method is
|
||||
to pass a multiple values list to the :meth:`.Connection.execute`
|
||||
method, which is supported by all database backends and is generally
|
||||
more efficient for a very large number of parameters.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`execute_multiple` - an introduction to
|
||||
the traditional Core method of multiple parameter set
|
||||
invocation for INSERTs and other statements.
|
||||
|
||||
.. versionchanged:: 1.0.0 an INSERT that uses a multiple-VALUES
|
||||
clause, even a list of length one,
|
||||
implies that the :paramref:`.Insert.inline` flag is set to
|
||||
True, indicating that the statement will not attempt to fetch
|
||||
the "last inserted primary key" or other defaults. The
|
||||
statement deals with an arbitrary number of rows, so the
|
||||
:attr:`.ResultProxy.inserted_primary_key` accessor does not
|
||||
apply.
|
||||
|
||||
.. versionchanged:: 1.0.0 A multiple-VALUES INSERT now supports
|
||||
columns with Python side default values and callables in the
|
||||
same way as that of an "executemany" style of invocation; the
|
||||
callable is invoked for each row. See :ref:`bug_3288`
|
||||
for other details.
|
||||
|
||||
The :class:`.Update` construct supports a special form which is a
|
||||
list of 2-tuples, which when provided must be passed in conjunction
|
||||
with the
|
||||
:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
|
||||
parameter.
|
||||
This form causes the UPDATE statement to render the SET clauses
|
||||
using the order of parameters given to :meth:`.Update.values`, rather
|
||||
than the ordering of columns given in the :class:`.Table`.
|
||||
|
||||
.. versionadded:: 1.0.10 - added support for parameter-ordered
|
||||
UPDATE statements via the
|
||||
:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
|
||||
flag.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`updates_order_parameters` - full example of the
|
||||
:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
|
||||
flag
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inserts_and_updates` - SQL Expression
|
||||
Language Tutorial
|
||||
|
||||
:func:`~.expression.insert` - produce an ``INSERT`` statement
|
||||
|
||||
:func:`~.expression.update` - produce an ``UPDATE`` statement
|
||||
|
||||
"""
|
||||
if self.select is not None:
|
||||
raise exc.InvalidRequestError(
|
||||
"This construct already inserts from a SELECT")
|
||||
if self._has_multi_parameters and kwargs:
|
||||
raise exc.InvalidRequestError(
|
||||
"This construct already has multiple parameter sets.")
|
||||
|
||||
if args:
|
||||
if len(args) > 1:
|
||||
raise exc.ArgumentError(
|
||||
"Only a single dictionary/tuple or list of "
|
||||
"dictionaries/tuples is accepted positionally.")
|
||||
v = args[0]
|
||||
else:
|
||||
v = {}
|
||||
|
||||
if self.parameters is None:
|
||||
self.parameters, self._has_multi_parameters = \
|
||||
self._process_colparams(v)
|
||||
else:
|
||||
if self._has_multi_parameters:
|
||||
self.parameters = list(self.parameters)
|
||||
p, self._has_multi_parameters = self._process_colparams(v)
|
||||
if not self._has_multi_parameters:
|
||||
raise exc.ArgumentError(
|
||||
"Can't mix single-values and multiple values "
|
||||
"formats in one statement")
|
||||
|
||||
self.parameters.extend(p)
|
||||
else:
|
||||
self.parameters = self.parameters.copy()
|
||||
p, self._has_multi_parameters = self._process_colparams(v)
|
||||
if self._has_multi_parameters:
|
||||
raise exc.ArgumentError(
|
||||
"Can't mix single-values and multiple values "
|
||||
"formats in one statement")
|
||||
self.parameters.update(p)
|
||||
|
||||
if kwargs:
|
||||
if self._has_multi_parameters:
|
||||
raise exc.ArgumentError(
|
||||
"Can't pass kwargs and multiple parameter sets "
|
||||
"simultaneously")
|
||||
else:
|
||||
self.parameters.update(kwargs)
|
||||
|
||||
@_generative
|
||||
def return_defaults(self, *cols):
|
||||
"""Make use of a :term:`RETURNING` clause for the purpose
|
||||
of fetching server-side expressions and defaults.
|
||||
|
||||
E.g.::
|
||||
|
||||
stmt = table.insert().values(data='newdata').return_defaults()
|
||||
|
||||
result = connection.execute(stmt)
|
||||
|
||||
server_created_at = result.returned_defaults['created_at']
|
||||
|
||||
When used against a backend that supports RETURNING, all column
|
||||
values generated by SQL expression or server-side-default will be
|
||||
added to any existing RETURNING clause, provided that
|
||||
:meth:`.UpdateBase.returning` is not used simultaneously. The column
|
||||
values will then be available on the result using the
|
||||
:attr:`.ResultProxy.returned_defaults` accessor as a dictionary,
|
||||
referring to values keyed to the :class:`.Column` object as well as
|
||||
its ``.key``.
|
||||
|
||||
This method differs from :meth:`.UpdateBase.returning` in these ways:
|
||||
|
||||
1. :meth:`.ValuesBase.return_defaults` is only intended for use with
|
||||
an INSERT or an UPDATE statement that matches exactly one row.
|
||||
While the RETURNING construct in the general sense supports
|
||||
multiple rows for a multi-row UPDATE or DELETE statement, or for
|
||||
special cases of INSERT that return multiple rows (e.g. INSERT from
|
||||
SELECT, multi-valued VALUES clause),
|
||||
:meth:`.ValuesBase.return_defaults` is intended only for an
|
||||
"ORM-style" single-row INSERT/UPDATE statement. The row returned
|
||||
by the statement is also consumed implicitly when
|
||||
:meth:`.ValuesBase.return_defaults` is used. By contrast,
|
||||
:meth:`.UpdateBase.returning` leaves the RETURNING result-set
|
||||
intact with a collection of any number of rows.
|
||||
|
||||
2. It is compatible with the existing logic to fetch auto-generated
|
||||
primary key values, also known as "implicit returning". Backends
|
||||
that support RETURNING will automatically make use of RETURNING in
|
||||
order to fetch the value of newly generated primary keys; while the
|
||||
:meth:`.UpdateBase.returning` method circumvents this behavior,
|
||||
:meth:`.ValuesBase.return_defaults` leaves it intact.
|
||||
|
||||
3. It can be called against any backend. Backends that don't support
|
||||
RETURNING will skip the usage of the feature, rather than raising
|
||||
an exception. The return value of
|
||||
:attr:`.ResultProxy.returned_defaults` will be ``None``
|
||||
|
||||
:meth:`.ValuesBase.return_defaults` is used by the ORM to provide
|
||||
an efficient implementation for the ``eager_defaults`` feature of
|
||||
:func:`.mapper`.
|
||||
|
||||
:param cols: optional list of column key names or :class:`.Column`
|
||||
objects. If omitted, all column expressions evaluated on the server
|
||||
are added to the returning list.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.UpdateBase.returning`
|
||||
|
||||
:attr:`.ResultProxy.returned_defaults`
|
||||
|
||||
"""
|
||||
self._return_defaults = cols or True
|
||||
|
||||
|
||||
class Insert(ValuesBase):
|
||||
"""Represent an INSERT construct.
|
||||
|
||||
The :class:`.Insert` object is created using the
|
||||
:func:`~.expression.insert()` function.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`coretutorial_insert_expressions`
|
||||
|
||||
"""
|
||||
__visit_name__ = 'insert'
|
||||
|
||||
_supports_multi_parameters = True
|
||||
|
||||
def __init__(self,
|
||||
table,
|
||||
values=None,
|
||||
inline=False,
|
||||
bind=None,
|
||||
prefixes=None,
|
||||
returning=None,
|
||||
return_defaults=False,
|
||||
**dialect_kw):
|
||||
"""Construct an :class:`.Insert` object.
|
||||
|
||||
Similar functionality is available via the
|
||||
:meth:`~.TableClause.insert` method on
|
||||
:class:`~.schema.Table`.
|
||||
|
||||
:param table: :class:`.TableClause` which is the subject of the
|
||||
insert.
|
||||
|
||||
:param values: collection of values to be inserted; see
|
||||
:meth:`.Insert.values` for a description of allowed formats here.
|
||||
Can be omitted entirely; a :class:`.Insert` construct will also
|
||||
dynamically render the VALUES clause at execution time based on
|
||||
the parameters passed to :meth:`.Connection.execute`.
|
||||
|
||||
:param inline: if True, no attempt will be made to retrieve the
|
||||
SQL-generated default values to be provided within the statement;
|
||||
in particular,
|
||||
this allows SQL expressions to be rendered 'inline' within the
|
||||
statement without the need to pre-execute them beforehand; for
|
||||
backends that support "returning", this turns off the "implicit
|
||||
returning" feature for the statement.
|
||||
|
||||
If both `values` and compile-time bind parameters are present, the
|
||||
compile-time bind parameters override the information specified
|
||||
within `values` on a per-key basis.
|
||||
|
||||
The keys within `values` can be either
|
||||
:class:`~sqlalchemy.schema.Column` objects or their string
|
||||
identifiers. Each key may reference one of:
|
||||
|
||||
* a literal data value (i.e. string, number, etc.);
|
||||
* a Column object;
|
||||
* a SELECT statement.
|
||||
|
||||
If a ``SELECT`` statement is specified which references this
|
||||
``INSERT`` statement's table, the statement will be correlated
|
||||
against the ``INSERT`` statement.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`coretutorial_insert_expressions` - SQL Expression Tutorial
|
||||
|
||||
:ref:`inserts_and_updates` - SQL Expression Tutorial
|
||||
|
||||
"""
|
||||
ValuesBase.__init__(self, table, values, prefixes)
|
||||
self._bind = bind
|
||||
self.select = self.select_names = None
|
||||
self.include_insert_from_select_defaults = False
|
||||
self.inline = inline
|
||||
self._returning = returning
|
||||
self._validate_dialect_kwargs(dialect_kw)
|
||||
self._return_defaults = return_defaults
|
||||
|
||||
def get_children(self, **kwargs):
|
||||
if self.select is not None:
|
||||
return self.select,
|
||||
else:
|
||||
return ()
|
||||
|
||||
@_generative
|
||||
def from_select(self, names, select, include_defaults=True):
|
||||
"""Return a new :class:`.Insert` construct which represents
|
||||
an ``INSERT...FROM SELECT`` statement.
|
||||
|
||||
e.g.::
|
||||
|
||||
sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
|
||||
ins = table2.insert().from_select(['a', 'b'], sel)
|
||||
|
||||
:param names: a sequence of string column names or :class:`.Column`
|
||||
objects representing the target columns.
|
||||
:param select: a :func:`.select` construct, :class:`.FromClause`
|
||||
or other construct which resolves into a :class:`.FromClause`,
|
||||
such as an ORM :class:`.Query` object, etc. The order of
|
||||
columns returned from this FROM clause should correspond to the
|
||||
order of columns sent as the ``names`` parameter; while this
|
||||
is not checked before passing along to the database, the database
|
||||
would normally raise an exception if these column lists don't
|
||||
correspond.
|
||||
:param include_defaults: if True, non-server default values and
|
||||
SQL expressions as specified on :class:`.Column` objects
|
||||
(as documented in :ref:`metadata_defaults_toplevel`) not
|
||||
otherwise specified in the list of names will be rendered
|
||||
into the INSERT and SELECT statements, so that these values are also
|
||||
included in the data to be inserted.
|
||||
|
||||
.. note:: A Python-side default that uses a Python callable function
|
||||
will only be invoked **once** for the whole statement, and **not
|
||||
per row**.
|
||||
|
||||
.. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders
|
||||
Python-side and SQL expression column defaults into the
|
||||
SELECT statement for columns otherwise not included in the
|
||||
list of column names.
|
||||
|
||||
.. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT
|
||||
implies that the :paramref:`.insert.inline` flag is set to
|
||||
True, indicating that the statement will not attempt to fetch
|
||||
the "last inserted primary key" or other defaults. The statement
|
||||
deals with an arbitrary number of rows, so the
|
||||
:attr:`.ResultProxy.inserted_primary_key` accessor does not apply.
|
||||
|
||||
.. versionadded:: 0.8.3
|
||||
|
||||
"""
|
||||
if self.parameters:
|
||||
raise exc.InvalidRequestError(
|
||||
"This construct already inserts value expressions")
|
||||
|
||||
self.parameters, self._has_multi_parameters = \
|
||||
self._process_colparams(
|
||||
dict((_column_as_key(n), Null()) for n in names))
|
||||
|
||||
self.select_names = names
|
||||
self.inline = True
|
||||
self.include_insert_from_select_defaults = include_defaults
|
||||
self.select = _interpret_as_select(select)
|
||||
|
||||
def _copy_internals(self, clone=_clone, **kw):
|
||||
# TODO: coverage
|
||||
self.parameters = self.parameters.copy()
|
||||
if self.select is not None:
|
||||
self.select = _clone(self.select)
|
||||
|
||||
|
||||
class Update(ValuesBase):
|
||||
"""Represent an Update construct.
|
||||
|
||||
The :class:`.Update` object is created using the :func:`update()`
|
||||
function.
|
||||
|
||||
"""
|
||||
__visit_name__ = 'update'
|
||||
|
||||
def __init__(self,
|
||||
table,
|
||||
whereclause=None,
|
||||
values=None,
|
||||
inline=False,
|
||||
bind=None,
|
||||
prefixes=None,
|
||||
returning=None,
|
||||
return_defaults=False,
|
||||
preserve_parameter_order=False,
|
||||
**dialect_kw):
|
||||
r"""Construct an :class:`.Update` object.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy import update
|
||||
|
||||
stmt = update(users).where(users.c.id==5).\
|
||||
values(name='user #5')
|
||||
|
||||
Similar functionality is available via the
|
||||
:meth:`~.TableClause.update` method on
|
||||
:class:`.Table`::
|
||||
|
||||
stmt = users.update().\
|
||||
where(users.c.id==5).\
|
||||
values(name='user #5')
|
||||
|
||||
:param table: A :class:`.Table` object representing the database
|
||||
table to be updated.
|
||||
|
||||
:param whereclause: Optional SQL expression describing the ``WHERE``
|
||||
condition of the ``UPDATE`` statement. Modern applications
|
||||
may prefer to use the generative :meth:`~Update.where()`
|
||||
method to specify the ``WHERE`` clause.
|
||||
|
||||
The WHERE clause can refer to multiple tables.
|
||||
For databases which support this, an ``UPDATE FROM`` clause will
|
||||
be generated, or on MySQL, a multi-table update. The statement
|
||||
will fail on databases that don't have support for multi-table
|
||||
update statements. A SQL-standard method of referring to
|
||||
additional tables in the WHERE clause is to use a correlated
|
||||
subquery::
|
||||
|
||||
users.update().values(name='ed').where(
|
||||
users.c.name==select([addresses.c.email_address]).\
|
||||
where(addresses.c.user_id==users.c.id).\
|
||||
as_scalar()
|
||||
)
|
||||
|
||||
.. versionchanged:: 0.7.4
|
||||
The WHERE clause can refer to multiple tables.
|
||||
|
||||
:param values:
|
||||
Optional dictionary which specifies the ``SET`` conditions of the
|
||||
``UPDATE``. If left as ``None``, the ``SET``
|
||||
conditions are determined from those parameters passed to the
|
||||
statement during the execution and/or compilation of the
|
||||
statement. When compiled standalone without any parameters,
|
||||
the ``SET`` clause generates for all columns.
|
||||
|
||||
Modern applications may prefer to use the generative
|
||||
:meth:`.Update.values` method to set the values of the
|
||||
UPDATE statement.
|
||||
|
||||
:param inline:
|
||||
if True, SQL defaults present on :class:`.Column` objects via
|
||||
the ``default`` keyword will be compiled 'inline' into the statement
|
||||
and not pre-executed. This means that their values will not
|
||||
be available in the dictionary returned from
|
||||
:meth:`.ResultProxy.last_updated_params`.
|
||||
|
||||
:param preserve_parameter_order: if True, the update statement is
|
||||
expected to receive parameters **only** via the :meth:`.Update.values`
|
||||
method, and they must be passed as a Python ``list`` of 2-tuples.
|
||||
The rendered UPDATE statement will emit the SET clause for each
|
||||
referenced column maintaining this order.
|
||||
|
||||
.. versionadded:: 1.0.10
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`updates_order_parameters` - full example of the
|
||||
:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` flag
|
||||
|
||||
If both ``values`` and compile-time bind parameters are present, the
|
||||
compile-time bind parameters override the information specified
|
||||
within ``values`` on a per-key basis.
|
||||
|
||||
The keys within ``values`` can be either :class:`.Column`
|
||||
objects or their string identifiers (specifically the "key" of the
|
||||
:class:`.Column`, normally but not necessarily equivalent to
|
||||
its "name"). Normally, the
|
||||
:class:`.Column` objects used here are expected to be
|
||||
part of the target :class:`.Table` that is the table
|
||||
to be updated. However when using MySQL, a multiple-table
|
||||
UPDATE statement can refer to columns from any of
|
||||
the tables referred to in the WHERE clause.
|
||||
|
||||
The values referred to in ``values`` are typically:
|
||||
|
||||
* a literal data value (i.e. string, number, etc.)
|
||||
* a SQL expression, such as a related :class:`.Column`,
|
||||
a scalar-returning :func:`.select` construct,
|
||||
etc.
|
||||
|
||||
When combining :func:`.select` constructs within the values
|
||||
clause of an :func:`.update` construct,
|
||||
the subquery represented by the :func:`.select` should be
|
||||
*correlated* to the parent table, that is, providing criterion
|
||||
which links the table inside the subquery to the outer table
|
||||
being updated::
|
||||
|
||||
users.update().values(
|
||||
name=select([addresses.c.email_address]).\
|
||||
where(addresses.c.user_id==users.c.id).\
|
||||
as_scalar()
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`inserts_and_updates` - SQL Expression
|
||||
Language Tutorial
|
||||
|
||||
|
||||
"""
|
||||
self._preserve_parameter_order = preserve_parameter_order
|
||||
ValuesBase.__init__(self, table, values, prefixes)
|
||||
self._bind = bind
|
||||
self._returning = returning
|
||||
if whereclause is not None:
|
||||
self._whereclause = _literal_as_text(whereclause)
|
||||
else:
|
||||
self._whereclause = None
|
||||
self.inline = inline
|
||||
self._validate_dialect_kwargs(dialect_kw)
|
||||
self._return_defaults = return_defaults
|
||||
|
||||
def get_children(self, **kwargs):
|
||||
if self._whereclause is not None:
|
||||
return self._whereclause,
|
||||
else:
|
||||
return ()
|
||||
|
||||
def _copy_internals(self, clone=_clone, **kw):
|
||||
# TODO: coverage
|
||||
self._whereclause = clone(self._whereclause, **kw)
|
||||
self.parameters = self.parameters.copy()
|
||||
|
||||
@_generative
|
||||
def where(self, whereclause):
|
||||
"""return a new update() construct with the given expression added to
|
||||
its WHERE clause, joined to the existing clause via AND, if any.
|
||||
|
||||
"""
|
||||
if self._whereclause is not None:
|
||||
self._whereclause = and_(self._whereclause,
|
||||
_literal_as_text(whereclause))
|
||||
else:
|
||||
self._whereclause = _literal_as_text(whereclause)
|
||||
|
||||
@property
|
||||
def _extra_froms(self):
|
||||
# TODO: this could be made memoized
|
||||
# if the memoization is reset on each generative call.
|
||||
froms = []
|
||||
seen = set([self.table])
|
||||
|
||||
if self._whereclause is not None:
|
||||
for item in _from_objects(self._whereclause):
|
||||
if not seen.intersection(item._cloned_set):
|
||||
froms.append(item)
|
||||
seen.update(item._cloned_set)
|
||||
|
||||
return froms
|
||||
|
||||
|
||||
class Delete(UpdateBase):
|
||||
"""Represent a DELETE construct.
|
||||
|
||||
The :class:`.Delete` object is created using the :func:`delete()`
|
||||
function.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'delete'
|
||||
|
||||
def __init__(self,
|
||||
table,
|
||||
whereclause=None,
|
||||
bind=None,
|
||||
returning=None,
|
||||
prefixes=None,
|
||||
**dialect_kw):
|
||||
"""Construct :class:`.Delete` object.
|
||||
|
||||
Similar functionality is available via the
|
||||
:meth:`~.TableClause.delete` method on
|
||||
:class:`~.schema.Table`.
|
||||
|
||||
:param table: The table to delete rows from.
|
||||
|
||||
:param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
|
||||
condition of the ``DELETE`` statement. Note that the
|
||||
:meth:`~Delete.where()` generative method may be used instead.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`deletes` - SQL Expression Tutorial
|
||||
|
||||
"""
|
||||
self._bind = bind
|
||||
self.table = _interpret_as_from(table)
|
||||
self._returning = returning
|
||||
|
||||
if prefixes:
|
||||
self._setup_prefixes(prefixes)
|
||||
|
||||
if whereclause is not None:
|
||||
self._whereclause = _literal_as_text(whereclause)
|
||||
else:
|
||||
self._whereclause = None
|
||||
|
||||
self._validate_dialect_kwargs(dialect_kw)
|
||||
|
||||
def get_children(self, **kwargs):
|
||||
if self._whereclause is not None:
|
||||
return self._whereclause,
|
||||
else:
|
||||
return ()
|
||||
|
||||
@_generative
|
||||
def where(self, whereclause):
|
||||
"""Add the given WHERE clause to a newly returned delete construct."""
|
||||
|
||||
if self._whereclause is not None:
|
||||
self._whereclause = and_(self._whereclause,
|
||||
_literal_as_text(whereclause))
|
||||
else:
|
||||
self._whereclause = _literal_as_text(whereclause)
|
||||
|
||||
def _copy_internals(self, clone=_clone, **kw):
|
||||
# TODO: coverage
|
||||
self._whereclause = clone(self._whereclause, **kw)
|
||||
4403
sqlalchemy/sql/elements.py
Normal file
4403
sqlalchemy/sql/elements.py
Normal file
File diff suppressed because it is too large
Load Diff
146
sqlalchemy/sql/naming.py
Normal file
146
sqlalchemy/sql/naming.py
Normal file
@@ -0,0 +1,146 @@
|
||||
# sqlalchemy/naming.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Establish constraint and index naming conventions.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from .schema import Constraint, ForeignKeyConstraint, PrimaryKeyConstraint, \
|
||||
UniqueConstraint, CheckConstraint, Index, Table, Column
|
||||
from .. import event, events
|
||||
from .. import exc
|
||||
from .elements import _truncated_label, _defer_name, _defer_none_name, conv
|
||||
import re
|
||||
|
||||
|
||||
class ConventionDict(object):
|
||||
|
||||
def __init__(self, const, table, convention):
|
||||
self.const = const
|
||||
self._is_fk = isinstance(const, ForeignKeyConstraint)
|
||||
self.table = table
|
||||
self.convention = convention
|
||||
self._const_name = const.name
|
||||
|
||||
def _key_table_name(self):
|
||||
return self.table.name
|
||||
|
||||
def _column_X(self, idx):
|
||||
if self._is_fk:
|
||||
fk = self.const.elements[idx]
|
||||
return fk.parent
|
||||
else:
|
||||
return list(self.const.columns)[idx]
|
||||
|
||||
def _key_constraint_name(self):
|
||||
if isinstance(self._const_name, (type(None), _defer_none_name)):
|
||||
raise exc.InvalidRequestError(
|
||||
"Naming convention including "
|
||||
"%(constraint_name)s token requires that "
|
||||
"constraint is explicitly named."
|
||||
)
|
||||
if not isinstance(self._const_name, conv):
|
||||
self.const.name = None
|
||||
return self._const_name
|
||||
|
||||
def _key_column_X_name(self, idx):
|
||||
return self._column_X(idx).name
|
||||
|
||||
def _key_column_X_label(self, idx):
|
||||
return self._column_X(idx)._label
|
||||
|
||||
def _key_referred_table_name(self):
|
||||
fk = self.const.elements[0]
|
||||
refs = fk.target_fullname.split(".")
|
||||
if len(refs) == 3:
|
||||
refschema, reftable, refcol = refs
|
||||
else:
|
||||
reftable, refcol = refs
|
||||
return reftable
|
||||
|
||||
def _key_referred_column_X_name(self, idx):
|
||||
fk = self.const.elements[idx]
|
||||
refs = fk.target_fullname.split(".")
|
||||
if len(refs) == 3:
|
||||
refschema, reftable, refcol = refs
|
||||
else:
|
||||
reftable, refcol = refs
|
||||
return refcol
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key in self.convention:
|
||||
return self.convention[key](self.const, self.table)
|
||||
elif hasattr(self, '_key_%s' % key):
|
||||
return getattr(self, '_key_%s' % key)()
|
||||
else:
|
||||
col_template = re.match(r".*_?column_(\d+)_.+", key)
|
||||
if col_template:
|
||||
idx = col_template.group(1)
|
||||
attr = "_key_" + key.replace(idx, "X")
|
||||
idx = int(idx)
|
||||
if hasattr(self, attr):
|
||||
return getattr(self, attr)(idx)
|
||||
raise KeyError(key)
|
||||
|
||||
_prefix_dict = {
|
||||
Index: "ix",
|
||||
PrimaryKeyConstraint: "pk",
|
||||
CheckConstraint: "ck",
|
||||
UniqueConstraint: "uq",
|
||||
ForeignKeyConstraint: "fk"
|
||||
}
|
||||
|
||||
|
||||
def _get_convention(dict_, key):
|
||||
|
||||
for super_ in key.__mro__:
|
||||
if super_ in _prefix_dict and _prefix_dict[super_] in dict_:
|
||||
return dict_[_prefix_dict[super_]]
|
||||
elif super_ in dict_:
|
||||
return dict_[super_]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _constraint_name_for_table(const, table):
|
||||
metadata = table.metadata
|
||||
convention = _get_convention(metadata.naming_convention, type(const))
|
||||
|
||||
if isinstance(const.name, conv):
|
||||
return const.name
|
||||
elif convention is not None and \
|
||||
not isinstance(const.name, conv) and \
|
||||
(
|
||||
const.name is None or
|
||||
"constraint_name" in convention or
|
||||
isinstance(const.name, _defer_name)):
|
||||
return conv(
|
||||
convention % ConventionDict(const, table,
|
||||
metadata.naming_convention)
|
||||
)
|
||||
elif isinstance(convention, _defer_none_name):
|
||||
return None
|
||||
|
||||
|
||||
@event.listens_for(Constraint, "after_parent_attach")
|
||||
@event.listens_for(Index, "after_parent_attach")
|
||||
def _constraint_name(const, table):
|
||||
if isinstance(table, Column):
|
||||
# for column-attached constraint, set another event
|
||||
# to link the column attached to the table as this constraint
|
||||
# associated with the table.
|
||||
event.listen(table, "after_parent_attach",
|
||||
lambda col, table: _constraint_name(const, table)
|
||||
)
|
||||
elif isinstance(table, Table):
|
||||
if isinstance(const.name, (conv, _defer_name)):
|
||||
return
|
||||
|
||||
newname = _constraint_name_for_table(const, table)
|
||||
if newname is not None:
|
||||
const.name = newname
|
||||
4027
sqlalchemy/sql/schema.py
Normal file
4027
sqlalchemy/sql/schema.py
Normal file
File diff suppressed because it is too large
Load Diff
3716
sqlalchemy/sql/selectable.py
Normal file
3716
sqlalchemy/sql/selectable.py
Normal file
File diff suppressed because it is too large
Load Diff
2619
sqlalchemy/sql/sqltypes.py
Normal file
2619
sqlalchemy/sql/sqltypes.py
Normal file
File diff suppressed because it is too large
Load Diff
1307
sqlalchemy/sql/type_api.py
Normal file
1307
sqlalchemy/sql/type_api.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user