Removed SqlAlchemy from repo
This commit is contained in:
parent
bae4a74196
commit
a03ca8f299
@ -1,146 +0,0 @@
|
||||
# sqlalchemy/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from .sql import (
|
||||
alias,
|
||||
all_,
|
||||
and_,
|
||||
any_,
|
||||
asc,
|
||||
between,
|
||||
bindparam,
|
||||
case,
|
||||
cast,
|
||||
collate,
|
||||
column,
|
||||
delete,
|
||||
desc,
|
||||
distinct,
|
||||
except_,
|
||||
except_all,
|
||||
exists,
|
||||
extract,
|
||||
false,
|
||||
func,
|
||||
funcfilter,
|
||||
insert,
|
||||
intersect,
|
||||
intersect_all,
|
||||
join,
|
||||
lateral,
|
||||
literal,
|
||||
literal_column,
|
||||
modifier,
|
||||
not_,
|
||||
null,
|
||||
or_,
|
||||
outerjoin,
|
||||
outparam,
|
||||
over,
|
||||
select,
|
||||
subquery,
|
||||
table,
|
||||
tablesample,
|
||||
text,
|
||||
true,
|
||||
tuple_,
|
||||
type_coerce,
|
||||
union,
|
||||
union_all,
|
||||
update,
|
||||
within_group,
|
||||
)
|
||||
|
||||
from .types import (
|
||||
ARRAY,
|
||||
BIGINT,
|
||||
BINARY,
|
||||
BLOB,
|
||||
BOOLEAN,
|
||||
BigInteger,
|
||||
Binary,
|
||||
Boolean,
|
||||
CHAR,
|
||||
CLOB,
|
||||
DATE,
|
||||
DATETIME,
|
||||
DECIMAL,
|
||||
Date,
|
||||
DateTime,
|
||||
Enum,
|
||||
FLOAT,
|
||||
Float,
|
||||
INT,
|
||||
INTEGER,
|
||||
Integer,
|
||||
Interval,
|
||||
JSON,
|
||||
LargeBinary,
|
||||
NCHAR,
|
||||
NVARCHAR,
|
||||
NUMERIC,
|
||||
Numeric,
|
||||
PickleType,
|
||||
REAL,
|
||||
SMALLINT,
|
||||
SmallInteger,
|
||||
String,
|
||||
TEXT,
|
||||
TIME,
|
||||
TIMESTAMP,
|
||||
Text,
|
||||
Time,
|
||||
TypeDecorator,
|
||||
Unicode,
|
||||
UnicodeText,
|
||||
VARBINARY,
|
||||
VARCHAR,
|
||||
)
|
||||
|
||||
|
||||
from .schema import (
|
||||
CheckConstraint,
|
||||
Column,
|
||||
ColumnDefault,
|
||||
Constraint,
|
||||
DefaultClause,
|
||||
FetchedValue,
|
||||
ForeignKey,
|
||||
ForeignKeyConstraint,
|
||||
Index,
|
||||
MetaData,
|
||||
PassiveDefault,
|
||||
PrimaryKeyConstraint,
|
||||
Sequence,
|
||||
Table,
|
||||
ThreadLocalMetaData,
|
||||
UniqueConstraint,
|
||||
DDL,
|
||||
BLANK_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
from .inspection import inspect
|
||||
from .engine import create_engine, engine_from_config
|
||||
|
||||
__version__ = '1.1.9'
|
||||
|
||||
|
||||
def __go(lcls):
|
||||
global __all__
|
||||
|
||||
from . import events
|
||||
from . import util as _sa_util
|
||||
|
||||
import inspect as _inspect
|
||||
|
||||
__all__ = sorted(name for name, obj in lcls.items()
|
||||
if not (name.startswith('_') or _inspect.ismodule(obj)))
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy")
|
||||
__go(locals())
|
@ -1,696 +0,0 @@
|
||||
/*
|
||||
processors.c
|
||||
Copyright (C) 2010-2017 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
|
||||
|
||||
This module is part of SQLAlchemy and is released under
|
||||
the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
*/
|
||||
|
||||
#include <Python.h>
|
||||
#include <datetime.h>
|
||||
|
||||
#define MODULE_NAME "cprocessors"
|
||||
#define MODULE_DOC "Module containing C versions of data processing functions."
|
||||
|
||||
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
|
||||
typedef int Py_ssize_t;
|
||||
#define PY_SSIZE_T_MAX INT_MAX
|
||||
#define PY_SSIZE_T_MIN INT_MIN
|
||||
#endif
|
||||
|
||||
static PyObject *
|
||||
int_to_boolean(PyObject *self, PyObject *arg)
|
||||
{
|
||||
int l = 0;
|
||||
PyObject *res;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
l = PyObject_IsTrue(arg);
|
||||
if (l == 0) {
|
||||
res = Py_False;
|
||||
} else if (l == 1) {
|
||||
res = Py_True;
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_INCREF(res);
|
||||
return res;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
to_str(PyObject *self, PyObject *arg)
|
||||
{
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
return PyObject_Str(arg);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
to_float(PyObject *self, PyObject *arg)
|
||||
{
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
return PyNumber_Float(arg);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
str_to_datetime(PyObject *self, PyObject *arg)
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
const char *str;
|
||||
int numparsed;
|
||||
unsigned int year, month, day, hour, minute, second, microsecond = 0;
|
||||
PyObject *err_repr;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(arg);
|
||||
if (bytes == NULL)
|
||||
str = NULL;
|
||||
else
|
||||
str = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
str = PyString_AsString(arg);
|
||||
#endif
|
||||
if (str == NULL) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string '%.200s' "
|
||||
"- value is not a string.",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string '%.200s' "
|
||||
"- value is not a string.",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* microseconds are optional */
|
||||
/*
|
||||
TODO: this is slightly less picky than the Python version which would
|
||||
not accept "2000-01-01 00:00:00.". I don't know which is better, but they
|
||||
should be coherent.
|
||||
*/
|
||||
numparsed = sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
|
||||
&hour, &minute, &second, µsecond);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_DECREF(bytes);
|
||||
#endif
|
||||
if (numparsed < 6) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string: %.200s",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse datetime string: %.200s",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
return PyDateTime_FromDateAndTime(year, month, day,
|
||||
hour, minute, second, microsecond);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
str_to_time(PyObject *self, PyObject *arg)
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
const char *str;
|
||||
int numparsed;
|
||||
unsigned int hour, minute, second, microsecond = 0;
|
||||
PyObject *err_repr;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(arg);
|
||||
if (bytes == NULL)
|
||||
str = NULL;
|
||||
else
|
||||
str = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
str = PyString_AsString(arg);
|
||||
#endif
|
||||
if (str == NULL) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string '%.200s' - value is not a string.",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string '%.200s' - value is not a string.",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* microseconds are optional */
|
||||
/*
|
||||
TODO: this is slightly less picky than the Python version which would
|
||||
not accept "00:00:00.". I don't know which is better, but they should be
|
||||
coherent.
|
||||
*/
|
||||
numparsed = sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
|
||||
µsecond);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_DECREF(bytes);
|
||||
#endif
|
||||
if (numparsed < 3) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string: %.200s",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse time string: %.200s",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
return PyTime_FromTime(hour, minute, second, microsecond);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
str_to_date(PyObject *self, PyObject *arg)
|
||||
{
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
const char *str;
|
||||
int numparsed;
|
||||
unsigned int year, month, day;
|
||||
PyObject *err_repr;
|
||||
|
||||
if (arg == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(arg);
|
||||
if (bytes == NULL)
|
||||
str = NULL;
|
||||
else
|
||||
str = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
str = PyString_AsString(arg);
|
||||
#endif
|
||||
if (str == NULL) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string '%.200s' - value is not a string.",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string '%.200s' - value is not a string.",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
numparsed = sscanf(str, "%4u-%2u-%2u", &year, &month, &day);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_DECREF(bytes);
|
||||
#endif
|
||||
if (numparsed != 3) {
|
||||
err_repr = PyObject_Repr(arg);
|
||||
if (err_repr == NULL)
|
||||
return NULL;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(err_repr);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string: %.200s",
|
||||
PyBytes_AS_STRING(err_bytes));
|
||||
Py_DECREF(err_bytes);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_ValueError,
|
||||
"Couldn't parse date string: %.200s",
|
||||
PyString_AsString(err_repr));
|
||||
#endif
|
||||
Py_DECREF(err_repr);
|
||||
return NULL;
|
||||
}
|
||||
return PyDate_FromDate(year, month, day);
|
||||
}
|
||||
|
||||
|
||||
/***********
|
||||
* Structs *
|
||||
***********/
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
PyObject *encoding;
|
||||
PyObject *errors;
|
||||
} UnicodeResultProcessor;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
PyObject *type;
|
||||
PyObject *format;
|
||||
} DecimalResultProcessor;
|
||||
|
||||
|
||||
|
||||
/**************************
|
||||
* UnicodeResultProcessor *
|
||||
**************************/
|
||||
|
||||
static int
|
||||
UnicodeResultProcessor_init(UnicodeResultProcessor *self, PyObject *args,
|
||||
PyObject *kwds)
|
||||
{
|
||||
PyObject *encoding, *errors = NULL;
|
||||
static char *kwlist[] = {"encoding", "errors", NULL};
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "U|U:__init__", kwlist,
|
||||
&encoding, &errors))
|
||||
return -1;
|
||||
#else
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "S|S:__init__", kwlist,
|
||||
&encoding, &errors))
|
||||
return -1;
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
encoding = PyUnicode_AsASCIIString(encoding);
|
||||
#else
|
||||
Py_INCREF(encoding);
|
||||
#endif
|
||||
self->encoding = encoding;
|
||||
|
||||
if (errors) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
errors = PyUnicode_AsASCIIString(errors);
|
||||
#else
|
||||
Py_INCREF(errors);
|
||||
#endif
|
||||
} else {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
errors = PyBytes_FromString("strict");
|
||||
#else
|
||||
errors = PyString_FromString("strict");
|
||||
#endif
|
||||
if (errors == NULL)
|
||||
return -1;
|
||||
}
|
||||
self->errors = errors;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
UnicodeResultProcessor_process(UnicodeResultProcessor *self, PyObject *value)
|
||||
{
|
||||
const char *encoding, *errors;
|
||||
char *str;
|
||||
Py_ssize_t len;
|
||||
|
||||
if (value == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PyBytes_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
encoding = PyBytes_AS_STRING(self->encoding);
|
||||
errors = PyBytes_AS_STRING(self->errors);
|
||||
#else
|
||||
if (PyString_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
encoding = PyString_AS_STRING(self->encoding);
|
||||
errors = PyString_AS_STRING(self->errors);
|
||||
#endif
|
||||
|
||||
return PyUnicode_Decode(str, len, encoding, errors);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
UnicodeResultProcessor_conditional_process(UnicodeResultProcessor *self, PyObject *value)
|
||||
{
|
||||
const char *encoding, *errors;
|
||||
char *str;
|
||||
Py_ssize_t len;
|
||||
|
||||
if (value == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PyUnicode_Check(value) == 1) {
|
||||
Py_INCREF(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
if (PyBytes_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
encoding = PyBytes_AS_STRING(self->encoding);
|
||||
errors = PyBytes_AS_STRING(self->errors);
|
||||
#else
|
||||
|
||||
if (PyUnicode_Check(value) == 1) {
|
||||
Py_INCREF(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
if (PyString_AsStringAndSize(value, &str, &len))
|
||||
return NULL;
|
||||
|
||||
|
||||
encoding = PyString_AS_STRING(self->encoding);
|
||||
errors = PyString_AS_STRING(self->errors);
|
||||
#endif
|
||||
|
||||
return PyUnicode_Decode(str, len, encoding, errors);
|
||||
}
|
||||
|
||||
static void
|
||||
UnicodeResultProcessor_dealloc(UnicodeResultProcessor *self)
|
||||
{
|
||||
Py_XDECREF(self->encoding);
|
||||
Py_XDECREF(self->errors);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
#else
|
||||
self->ob_type->tp_free((PyObject*)self);
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyMethodDef UnicodeResultProcessor_methods[] = {
|
||||
{"process", (PyCFunction)UnicodeResultProcessor_process, METH_O,
|
||||
"The value processor itself."},
|
||||
{"conditional_process", (PyCFunction)UnicodeResultProcessor_conditional_process, METH_O,
|
||||
"Conditional version of the value processor."},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject UnicodeResultProcessorType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"sqlalchemy.cprocessors.UnicodeResultProcessor", /* tp_name */
|
||||
sizeof(UnicodeResultProcessor), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)UnicodeResultProcessor_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
"UnicodeResultProcessor objects", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
UnicodeResultProcessor_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)UnicodeResultProcessor_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
0, /* tp_new */
|
||||
};
|
||||
|
||||
/**************************
|
||||
* DecimalResultProcessor *
|
||||
**************************/
|
||||
|
||||
static int
|
||||
DecimalResultProcessor_init(DecimalResultProcessor *self, PyObject *args,
|
||||
PyObject *kwds)
|
||||
{
|
||||
PyObject *type, *format;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (!PyArg_ParseTuple(args, "OU", &type, &format))
|
||||
#else
|
||||
if (!PyArg_ParseTuple(args, "OS", &type, &format))
|
||||
#endif
|
||||
return -1;
|
||||
|
||||
Py_INCREF(type);
|
||||
self->type = type;
|
||||
|
||||
Py_INCREF(format);
|
||||
self->format = format;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
DecimalResultProcessor_process(DecimalResultProcessor *self, PyObject *value)
|
||||
{
|
||||
PyObject *str, *result, *args;
|
||||
|
||||
if (value == Py_None)
|
||||
Py_RETURN_NONE;
|
||||
|
||||
/* Decimal does not accept float values directly */
|
||||
/* SQLite can also give us an integer here (see [ticket:2432]) */
|
||||
/* XXX: starting with Python 3.1, we could use Decimal.from_float(f),
|
||||
but the result wouldn't be the same */
|
||||
|
||||
args = PyTuple_Pack(1, value);
|
||||
if (args == NULL)
|
||||
return NULL;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
str = PyUnicode_Format(self->format, args);
|
||||
#else
|
||||
str = PyString_Format(self->format, args);
|
||||
#endif
|
||||
|
||||
Py_DECREF(args);
|
||||
if (str == NULL)
|
||||
return NULL;
|
||||
|
||||
result = PyObject_CallFunctionObjArgs(self->type, str, NULL);
|
||||
Py_DECREF(str);
|
||||
return result;
|
||||
}
|
||||
|
||||
static void
|
||||
DecimalResultProcessor_dealloc(DecimalResultProcessor *self)
|
||||
{
|
||||
Py_XDECREF(self->type);
|
||||
Py_XDECREF(self->format);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
#else
|
||||
self->ob_type->tp_free((PyObject*)self);
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyMethodDef DecimalResultProcessor_methods[] = {
|
||||
{"process", (PyCFunction)DecimalResultProcessor_process, METH_O,
|
||||
"The value processor itself."},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject DecimalResultProcessorType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"sqlalchemy.DecimalResultProcessor", /* tp_name */
|
||||
sizeof(DecimalResultProcessor), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)DecimalResultProcessor_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
"DecimalResultProcessor objects", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
DecimalResultProcessor_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)DecimalResultProcessor_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
0, /* tp_new */
|
||||
};
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"int_to_boolean", int_to_boolean, METH_O,
|
||||
"Convert an integer to a boolean."},
|
||||
{"to_str", to_str, METH_O,
|
||||
"Convert any value to its string representation."},
|
||||
{"to_float", to_float, METH_O,
|
||||
"Convert any value to its floating point representation."},
|
||||
{"str_to_datetime", str_to_datetime, METH_O,
|
||||
"Convert an ISO string to a datetime.datetime object."},
|
||||
{"str_to_time", str_to_time, METH_O,
|
||||
"Convert an ISO string to a datetime.time object."},
|
||||
{"str_to_date", str_to_date, METH_O,
|
||||
"Convert an ISO string to a datetime.date object."},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
static struct PyModuleDef module_def = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
MODULE_NAME,
|
||||
MODULE_DOC,
|
||||
-1,
|
||||
module_methods
|
||||
};
|
||||
|
||||
#define INITERROR return NULL
|
||||
|
||||
PyMODINIT_FUNC
|
||||
PyInit_cprocessors(void)
|
||||
|
||||
#else
|
||||
|
||||
#define INITERROR return
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initcprocessors(void)
|
||||
|
||||
#endif
|
||||
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
UnicodeResultProcessorType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&UnicodeResultProcessorType) < 0)
|
||||
INITERROR;
|
||||
|
||||
DecimalResultProcessorType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&DecimalResultProcessorType) < 0)
|
||||
INITERROR;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&module_def);
|
||||
#else
|
||||
m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
|
||||
#endif
|
||||
if (m == NULL)
|
||||
INITERROR;
|
||||
|
||||
PyDateTime_IMPORT;
|
||||
|
||||
Py_INCREF(&UnicodeResultProcessorType);
|
||||
PyModule_AddObject(m, "UnicodeResultProcessor",
|
||||
(PyObject *)&UnicodeResultProcessorType);
|
||||
|
||||
Py_INCREF(&DecimalResultProcessorType);
|
||||
PyModule_AddObject(m, "DecimalResultProcessor",
|
||||
(PyObject *)&DecimalResultProcessorType);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
return m;
|
||||
#endif
|
||||
}
|
@ -1,727 +0,0 @@
|
||||
/*
|
||||
resultproxy.c
|
||||
Copyright (C) 2010-2017 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
|
||||
|
||||
This module is part of SQLAlchemy and is released under
|
||||
the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
*/
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#define MODULE_NAME "cresultproxy"
|
||||
#define MODULE_DOC "Module containing C versions of core ResultProxy classes."
|
||||
|
||||
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
|
||||
typedef int Py_ssize_t;
|
||||
#define PY_SSIZE_T_MAX INT_MAX
|
||||
#define PY_SSIZE_T_MIN INT_MIN
|
||||
typedef Py_ssize_t (*lenfunc)(PyObject *);
|
||||
#define PyInt_FromSsize_t(x) PyInt_FromLong(x)
|
||||
typedef intargfunc ssizeargfunc;
|
||||
#endif
|
||||
|
||||
|
||||
/***********
|
||||
* Structs *
|
||||
***********/
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
PyObject *parent;
|
||||
PyObject *row;
|
||||
PyObject *processors;
|
||||
PyObject *keymap;
|
||||
} BaseRowProxy;
|
||||
|
||||
/****************
|
||||
* BaseRowProxy *
|
||||
****************/
|
||||
|
||||
static PyObject *
|
||||
safe_rowproxy_reconstructor(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *cls, *state, *tmp;
|
||||
BaseRowProxy *obj;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "OO", &cls, &state))
|
||||
return NULL;
|
||||
|
||||
obj = (BaseRowProxy *)PyObject_CallMethod(cls, "__new__", "O", cls);
|
||||
if (obj == NULL)
|
||||
return NULL;
|
||||
|
||||
tmp = PyObject_CallMethod((PyObject *)obj, "__setstate__", "O", state);
|
||||
if (tmp == NULL) {
|
||||
Py_DECREF(obj);
|
||||
return NULL;
|
||||
}
|
||||
Py_DECREF(tmp);
|
||||
|
||||
if (obj->parent == NULL || obj->row == NULL ||
|
||||
obj->processors == NULL || obj->keymap == NULL) {
|
||||
PyErr_SetString(PyExc_RuntimeError,
|
||||
"__setstate__ for BaseRowProxy subclasses must set values "
|
||||
"for parent, row, processors and keymap");
|
||||
Py_DECREF(obj);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return (PyObject *)obj;
|
||||
}
|
||||
|
||||
static int
|
||||
BaseRowProxy_init(BaseRowProxy *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *parent, *row, *processors, *keymap;
|
||||
|
||||
if (!PyArg_UnpackTuple(args, "BaseRowProxy", 4, 4,
|
||||
&parent, &row, &processors, &keymap))
|
||||
return -1;
|
||||
|
||||
Py_INCREF(parent);
|
||||
self->parent = parent;
|
||||
|
||||
if (!PySequence_Check(row)) {
|
||||
PyErr_SetString(PyExc_TypeError, "row must be a sequence");
|
||||
return -1;
|
||||
}
|
||||
Py_INCREF(row);
|
||||
self->row = row;
|
||||
|
||||
if (!PyList_CheckExact(processors)) {
|
||||
PyErr_SetString(PyExc_TypeError, "processors must be a list");
|
||||
return -1;
|
||||
}
|
||||
Py_INCREF(processors);
|
||||
self->processors = processors;
|
||||
|
||||
if (!PyDict_CheckExact(keymap)) {
|
||||
PyErr_SetString(PyExc_TypeError, "keymap must be a dict");
|
||||
return -1;
|
||||
}
|
||||
Py_INCREF(keymap);
|
||||
self->keymap = keymap;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* We need the reduce method because otherwise the default implementation
|
||||
* does very weird stuff for pickle protocol 0 and 1. It calls
|
||||
* BaseRowProxy.__new__(RowProxy_instance) upon *pickling*.
|
||||
*/
|
||||
static PyObject *
|
||||
BaseRowProxy_reduce(PyObject *self)
|
||||
{
|
||||
PyObject *method, *state;
|
||||
PyObject *module, *reconstructor, *cls;
|
||||
|
||||
method = PyObject_GetAttrString(self, "__getstate__");
|
||||
if (method == NULL)
|
||||
return NULL;
|
||||
|
||||
state = PyObject_CallObject(method, NULL);
|
||||
Py_DECREF(method);
|
||||
if (state == NULL)
|
||||
return NULL;
|
||||
|
||||
module = PyImport_ImportModule("sqlalchemy.engine.result");
|
||||
if (module == NULL)
|
||||
return NULL;
|
||||
|
||||
reconstructor = PyObject_GetAttrString(module, "rowproxy_reconstructor");
|
||||
Py_DECREF(module);
|
||||
if (reconstructor == NULL) {
|
||||
Py_DECREF(state);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
cls = PyObject_GetAttrString(self, "__class__");
|
||||
if (cls == NULL) {
|
||||
Py_DECREF(reconstructor);
|
||||
Py_DECREF(state);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return Py_BuildValue("(N(NN))", reconstructor, cls, state);
|
||||
}
|
||||
|
||||
static void
|
||||
BaseRowProxy_dealloc(BaseRowProxy *self)
|
||||
{
|
||||
Py_XDECREF(self->parent);
|
||||
Py_XDECREF(self->row);
|
||||
Py_XDECREF(self->processors);
|
||||
Py_XDECREF(self->keymap);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
Py_TYPE(self)->tp_free((PyObject *)self);
|
||||
#else
|
||||
self->ob_type->tp_free((PyObject *)self);
|
||||
#endif
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_processvalues(PyObject *values, PyObject *processors, int astuple)
|
||||
{
|
||||
Py_ssize_t num_values, num_processors;
|
||||
PyObject **valueptr, **funcptr, **resultptr;
|
||||
PyObject *func, *result, *processed_value, *values_fastseq;
|
||||
|
||||
num_values = PySequence_Length(values);
|
||||
num_processors = PyList_Size(processors);
|
||||
if (num_values != num_processors) {
|
||||
PyErr_Format(PyExc_RuntimeError,
|
||||
"number of values in row (%d) differ from number of column "
|
||||
"processors (%d)",
|
||||
(int)num_values, (int)num_processors);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (astuple) {
|
||||
result = PyTuple_New(num_values);
|
||||
} else {
|
||||
result = PyList_New(num_values);
|
||||
}
|
||||
if (result == NULL)
|
||||
return NULL;
|
||||
|
||||
values_fastseq = PySequence_Fast(values, "row must be a sequence");
|
||||
if (values_fastseq == NULL)
|
||||
return NULL;
|
||||
|
||||
valueptr = PySequence_Fast_ITEMS(values_fastseq);
|
||||
funcptr = PySequence_Fast_ITEMS(processors);
|
||||
resultptr = PySequence_Fast_ITEMS(result);
|
||||
while (--num_values >= 0) {
|
||||
func = *funcptr;
|
||||
if (func != Py_None) {
|
||||
processed_value = PyObject_CallFunctionObjArgs(func, *valueptr,
|
||||
NULL);
|
||||
if (processed_value == NULL) {
|
||||
Py_DECREF(values_fastseq);
|
||||
Py_DECREF(result);
|
||||
return NULL;
|
||||
}
|
||||
*resultptr = processed_value;
|
||||
} else {
|
||||
Py_INCREF(*valueptr);
|
||||
*resultptr = *valueptr;
|
||||
}
|
||||
valueptr++;
|
||||
funcptr++;
|
||||
resultptr++;
|
||||
}
|
||||
Py_DECREF(values_fastseq);
|
||||
return result;
|
||||
}
|
||||
|
||||
static PyListObject *
|
||||
BaseRowProxy_values(BaseRowProxy *self)
|
||||
{
|
||||
return (PyListObject *)BaseRowProxy_processvalues(self->row,
|
||||
self->processors, 0);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_iter(BaseRowProxy *self)
|
||||
{
|
||||
PyObject *values, *result;
|
||||
|
||||
values = BaseRowProxy_processvalues(self->row, self->processors, 1);
|
||||
if (values == NULL)
|
||||
return NULL;
|
||||
|
||||
result = PyObject_GetIter(values);
|
||||
Py_DECREF(values);
|
||||
if (result == NULL)
|
||||
return NULL;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static Py_ssize_t
|
||||
BaseRowProxy_length(BaseRowProxy *self)
|
||||
{
|
||||
return PySequence_Length(self->row);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
|
||||
{
|
||||
PyObject *processors, *values;
|
||||
PyObject *processor, *value, *processed_value;
|
||||
PyObject *row, *record, *result, *indexobject;
|
||||
PyObject *exc_module, *exception, *cstr_obj;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *bytes;
|
||||
#endif
|
||||
char *cstr_key;
|
||||
long index;
|
||||
int key_fallback = 0;
|
||||
int tuple_check = 0;
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (PyInt_CheckExact(key)) {
|
||||
index = PyInt_AS_LONG(key);
|
||||
if (index < 0)
|
||||
index += BaseRowProxy_length(self);
|
||||
} else
|
||||
#endif
|
||||
|
||||
if (PyLong_CheckExact(key)) {
|
||||
index = PyLong_AsLong(key);
|
||||
if ((index == -1) && PyErr_Occurred())
|
||||
/* -1 can be either the actual value, or an error flag. */
|
||||
return NULL;
|
||||
if (index < 0)
|
||||
index += BaseRowProxy_length(self);
|
||||
} else if (PySlice_Check(key)) {
|
||||
values = PyObject_GetItem(self->row, key);
|
||||
if (values == NULL)
|
||||
return NULL;
|
||||
|
||||
processors = PyObject_GetItem(self->processors, key);
|
||||
if (processors == NULL) {
|
||||
Py_DECREF(values);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
result = BaseRowProxy_processvalues(values, processors, 1);
|
||||
Py_DECREF(values);
|
||||
Py_DECREF(processors);
|
||||
return result;
|
||||
} else {
|
||||
record = PyDict_GetItem((PyObject *)self->keymap, key);
|
||||
if (record == NULL) {
|
||||
record = PyObject_CallMethod(self->parent, "_key_fallback",
|
||||
"O", key);
|
||||
if (record == NULL)
|
||||
return NULL;
|
||||
key_fallback = 1;
|
||||
}
|
||||
|
||||
indexobject = PyTuple_GetItem(record, 2);
|
||||
if (indexobject == NULL)
|
||||
return NULL;
|
||||
|
||||
if (key_fallback) {
|
||||
Py_DECREF(record);
|
||||
}
|
||||
|
||||
if (indexobject == Py_None) {
|
||||
exc_module = PyImport_ImportModule("sqlalchemy.exc");
|
||||
if (exc_module == NULL)
|
||||
return NULL;
|
||||
|
||||
exception = PyObject_GetAttrString(exc_module,
|
||||
"InvalidRequestError");
|
||||
Py_DECREF(exc_module);
|
||||
if (exception == NULL)
|
||||
return NULL;
|
||||
|
||||
cstr_obj = PyTuple_GetItem(record, 1);
|
||||
if (cstr_obj == NULL)
|
||||
return NULL;
|
||||
|
||||
cstr_obj = PyObject_Str(cstr_obj);
|
||||
if (cstr_obj == NULL)
|
||||
return NULL;
|
||||
|
||||
/*
|
||||
FIXME: raise encoding error exception (in both versions below)
|
||||
if the key contains non-ascii chars, instead of an
|
||||
InvalidRequestError without any message like in the
|
||||
python version.
|
||||
*/
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bytes = PyUnicode_AsASCIIString(cstr_obj);
|
||||
if (bytes == NULL)
|
||||
return NULL;
|
||||
cstr_key = PyBytes_AS_STRING(bytes);
|
||||
#else
|
||||
cstr_key = PyString_AsString(cstr_obj);
|
||||
#endif
|
||||
if (cstr_key == NULL) {
|
||||
Py_DECREF(cstr_obj);
|
||||
return NULL;
|
||||
}
|
||||
Py_DECREF(cstr_obj);
|
||||
|
||||
PyErr_Format(exception,
|
||||
"Ambiguous column name '%.200s' in "
|
||||
"result set column descriptions", cstr_key);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
index = PyLong_AsLong(indexobject);
|
||||
#else
|
||||
index = PyInt_AsLong(indexobject);
|
||||
#endif
|
||||
if ((index == -1) && PyErr_Occurred())
|
||||
/* -1 can be either the actual value, or an error flag. */
|
||||
return NULL;
|
||||
}
|
||||
processor = PyList_GetItem(self->processors, index);
|
||||
if (processor == NULL)
|
||||
return NULL;
|
||||
|
||||
row = self->row;
|
||||
if (PyTuple_CheckExact(row)) {
|
||||
value = PyTuple_GetItem(row, index);
|
||||
tuple_check = 1;
|
||||
}
|
||||
else {
|
||||
value = PySequence_GetItem(row, index);
|
||||
tuple_check = 0;
|
||||
}
|
||||
|
||||
if (value == NULL)
|
||||
return NULL;
|
||||
|
||||
if (processor != Py_None) {
|
||||
processed_value = PyObject_CallFunctionObjArgs(processor, value, NULL);
|
||||
if (!tuple_check) {
|
||||
Py_DECREF(value);
|
||||
}
|
||||
return processed_value;
|
||||
} else {
|
||||
if (tuple_check) {
|
||||
Py_INCREF(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getitem(PyObject *self, Py_ssize_t i)
|
||||
{
|
||||
PyObject *index;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
index = PyLong_FromSsize_t(i);
|
||||
#else
|
||||
index = PyInt_FromSsize_t(i);
|
||||
#endif
|
||||
return BaseRowProxy_subscript((BaseRowProxy*)self, index);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
|
||||
{
|
||||
PyObject *tmp;
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *err_bytes;
|
||||
#endif
|
||||
|
||||
if (!(tmp = PyObject_GenericGetAttr((PyObject *)self, name))) {
|
||||
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
|
||||
return NULL;
|
||||
PyErr_Clear();
|
||||
}
|
||||
else
|
||||
return tmp;
|
||||
|
||||
tmp = BaseRowProxy_subscript(self, name);
|
||||
if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) {
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
err_bytes = PyUnicode_AsASCIIString(name);
|
||||
if (err_bytes == NULL)
|
||||
return NULL;
|
||||
PyErr_Format(
|
||||
PyExc_AttributeError,
|
||||
"Could not locate column in row for column '%.200s'",
|
||||
PyBytes_AS_STRING(err_bytes)
|
||||
);
|
||||
#else
|
||||
PyErr_Format(
|
||||
PyExc_AttributeError,
|
||||
"Could not locate column in row for column '%.200s'",
|
||||
PyString_AsString(name)
|
||||
);
|
||||
#endif
|
||||
return NULL;
|
||||
}
|
||||
return tmp;
|
||||
}
|
||||
|
||||
/***********************
|
||||
* getters and setters *
|
||||
***********************/
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getparent(BaseRowProxy *self, void *closure)
|
||||
{
|
||||
Py_INCREF(self->parent);
|
||||
return self->parent;
|
||||
}
|
||||
|
||||
static int
|
||||
BaseRowProxy_setparent(BaseRowProxy *self, PyObject *value, void *closure)
|
||||
{
|
||||
PyObject *module, *cls;
|
||||
|
||||
if (value == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"Cannot delete the 'parent' attribute");
|
||||
return -1;
|
||||
}
|
||||
|
||||
module = PyImport_ImportModule("sqlalchemy.engine.result");
|
||||
if (module == NULL)
|
||||
return -1;
|
||||
|
||||
cls = PyObject_GetAttrString(module, "ResultMetaData");
|
||||
Py_DECREF(module);
|
||||
if (cls == NULL)
|
||||
return -1;
|
||||
|
||||
if (PyObject_IsInstance(value, cls) != 1) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"The 'parent' attribute value must be an instance of "
|
||||
"ResultMetaData");
|
||||
return -1;
|
||||
}
|
||||
Py_DECREF(cls);
|
||||
Py_XDECREF(self->parent);
|
||||
Py_INCREF(value);
|
||||
self->parent = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getrow(BaseRowProxy *self, void *closure)
|
||||
{
|
||||
Py_INCREF(self->row);
|
||||
return self->row;
|
||||
}
|
||||
|
||||
static int
|
||||
BaseRowProxy_setrow(BaseRowProxy *self, PyObject *value, void *closure)
|
||||
{
|
||||
if (value == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"Cannot delete the 'row' attribute");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!PySequence_Check(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"The 'row' attribute value must be a sequence");
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_XDECREF(self->row);
|
||||
Py_INCREF(value);
|
||||
self->row = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getprocessors(BaseRowProxy *self, void *closure)
|
||||
{
|
||||
Py_INCREF(self->processors);
|
||||
return self->processors;
|
||||
}
|
||||
|
||||
static int
|
||||
BaseRowProxy_setprocessors(BaseRowProxy *self, PyObject *value, void *closure)
|
||||
{
|
||||
if (value == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"Cannot delete the 'processors' attribute");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!PyList_CheckExact(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"The 'processors' attribute value must be a list");
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_XDECREF(self->processors);
|
||||
Py_INCREF(value);
|
||||
self->processors = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
BaseRowProxy_getkeymap(BaseRowProxy *self, void *closure)
|
||||
{
|
||||
Py_INCREF(self->keymap);
|
||||
return self->keymap;
|
||||
}
|
||||
|
||||
static int
|
||||
BaseRowProxy_setkeymap(BaseRowProxy *self, PyObject *value, void *closure)
|
||||
{
|
||||
if (value == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"Cannot delete the 'keymap' attribute");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!PyDict_CheckExact(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"The 'keymap' attribute value must be a dict");
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_XDECREF(self->keymap);
|
||||
Py_INCREF(value);
|
||||
self->keymap = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyGetSetDef BaseRowProxy_getseters[] = {
|
||||
{"_parent",
|
||||
(getter)BaseRowProxy_getparent, (setter)BaseRowProxy_setparent,
|
||||
"ResultMetaData",
|
||||
NULL},
|
||||
{"_row",
|
||||
(getter)BaseRowProxy_getrow, (setter)BaseRowProxy_setrow,
|
||||
"Original row tuple",
|
||||
NULL},
|
||||
{"_processors",
|
||||
(getter)BaseRowProxy_getprocessors, (setter)BaseRowProxy_setprocessors,
|
||||
"list of type processors",
|
||||
NULL},
|
||||
{"_keymap",
|
||||
(getter)BaseRowProxy_getkeymap, (setter)BaseRowProxy_setkeymap,
|
||||
"Key to (processor, index) dict",
|
||||
NULL},
|
||||
{NULL}
|
||||
};
|
||||
|
||||
static PyMethodDef BaseRowProxy_methods[] = {
|
||||
{"values", (PyCFunction)BaseRowProxy_values, METH_NOARGS,
|
||||
"Return the values represented by this BaseRowProxy as a list."},
|
||||
{"__reduce__", (PyCFunction)BaseRowProxy_reduce, METH_NOARGS,
|
||||
"Pickle support method."},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PySequenceMethods BaseRowProxy_as_sequence = {
|
||||
(lenfunc)BaseRowProxy_length, /* sq_length */
|
||||
0, /* sq_concat */
|
||||
0, /* sq_repeat */
|
||||
(ssizeargfunc)BaseRowProxy_getitem, /* sq_item */
|
||||
0, /* sq_slice */
|
||||
0, /* sq_ass_item */
|
||||
0, /* sq_ass_slice */
|
||||
0, /* sq_contains */
|
||||
0, /* sq_inplace_concat */
|
||||
0, /* sq_inplace_repeat */
|
||||
};
|
||||
|
||||
static PyMappingMethods BaseRowProxy_as_mapping = {
|
||||
(lenfunc)BaseRowProxy_length, /* mp_length */
|
||||
(binaryfunc)BaseRowProxy_subscript, /* mp_subscript */
|
||||
0 /* mp_ass_subscript */
|
||||
};
|
||||
|
||||
static PyTypeObject BaseRowProxyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"sqlalchemy.cresultproxy.BaseRowProxy", /* tp_name */
|
||||
sizeof(BaseRowProxy), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)BaseRowProxy_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
&BaseRowProxy_as_sequence, /* tp_as_sequence */
|
||||
&BaseRowProxy_as_mapping, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
(getattrofunc)BaseRowProxy_getattro,/* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
"BaseRowProxy is a abstract base class for RowProxy", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
(getiterfunc)BaseRowProxy_iter, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
BaseRowProxy_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
BaseRowProxy_getseters, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)BaseRowProxy_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
0 /* tp_new */
|
||||
};
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
|
||||
"reconstruct a RowProxy instance from its pickled form."},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
static struct PyModuleDef module_def = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
MODULE_NAME,
|
||||
MODULE_DOC,
|
||||
-1,
|
||||
module_methods
|
||||
};
|
||||
|
||||
#define INITERROR return NULL
|
||||
|
||||
PyMODINIT_FUNC
|
||||
PyInit_cresultproxy(void)
|
||||
|
||||
#else
|
||||
|
||||
#define INITERROR return
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initcresultproxy(void)
|
||||
|
||||
#endif
|
||||
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
BaseRowProxyType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&BaseRowProxyType) < 0)
|
||||
INITERROR;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&module_def);
|
||||
#else
|
||||
m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
|
||||
#endif
|
||||
if (m == NULL)
|
||||
INITERROR;
|
||||
|
||||
Py_INCREF(&BaseRowProxyType);
|
||||
PyModule_AddObject(m, "BaseRowProxy", (PyObject *)&BaseRowProxyType);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
return m;
|
||||
#endif
|
||||
}
|
@ -1,225 +0,0 @@
|
||||
/*
|
||||
utils.c
|
||||
Copyright (C) 2012-2017 the SQLAlchemy authors and contributors <see AUTHORS file>
|
||||
|
||||
This module is part of SQLAlchemy and is released under
|
||||
the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
*/
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#define MODULE_NAME "cutils"
|
||||
#define MODULE_DOC "Module containing C versions of utility functions."
|
||||
|
||||
/*
|
||||
Given arguments from the calling form *multiparams, **params,
|
||||
return a list of bind parameter structures, usually a list of
|
||||
dictionaries.
|
||||
|
||||
In the case of 'raw' execution which accepts positional parameters,
|
||||
it may be a list of tuples or lists.
|
||||
|
||||
*/
|
||||
static PyObject *
|
||||
distill_params(PyObject *self, PyObject *args)
|
||||
{
|
||||
PyObject *multiparams, *params;
|
||||
PyObject *enclosing_list, *double_enclosing_list;
|
||||
PyObject *zero_element, *zero_element_item;
|
||||
Py_ssize_t multiparam_size, zero_element_length;
|
||||
|
||||
if (!PyArg_UnpackTuple(args, "_distill_params", 2, 2, &multiparams, ¶ms)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (multiparams != Py_None) {
|
||||
multiparam_size = PyTuple_Size(multiparams);
|
||||
if (multiparam_size < 0) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
else {
|
||||
multiparam_size = 0;
|
||||
}
|
||||
|
||||
if (multiparam_size == 0) {
|
||||
if (params != Py_None && PyDict_Size(params) != 0) {
|
||||
enclosing_list = PyList_New(1);
|
||||
if (enclosing_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(params);
|
||||
if (PyList_SetItem(enclosing_list, 0, params) == -1) {
|
||||
Py_DECREF(params);
|
||||
Py_DECREF(enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
else {
|
||||
enclosing_list = PyList_New(0);
|
||||
if (enclosing_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
return enclosing_list;
|
||||
}
|
||||
else if (multiparam_size == 1) {
|
||||
zero_element = PyTuple_GetItem(multiparams, 0);
|
||||
if (PyTuple_Check(zero_element) || PyList_Check(zero_element)) {
|
||||
zero_element_length = PySequence_Length(zero_element);
|
||||
|
||||
if (zero_element_length != 0) {
|
||||
zero_element_item = PySequence_GetItem(zero_element, 0);
|
||||
if (zero_element_item == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
else {
|
||||
zero_element_item = NULL;
|
||||
}
|
||||
|
||||
if (zero_element_length == 0 ||
|
||||
(
|
||||
PyObject_HasAttrString(zero_element_item, "__iter__") &&
|
||||
!PyObject_HasAttrString(zero_element_item, "strip")
|
||||
)
|
||||
) {
|
||||
/*
|
||||
* execute(stmt, [{}, {}, {}, ...])
|
||||
* execute(stmt, [(), (), (), ...])
|
||||
*/
|
||||
Py_XDECREF(zero_element_item);
|
||||
Py_INCREF(zero_element);
|
||||
return zero_element;
|
||||
}
|
||||
else {
|
||||
/*
|
||||
* execute(stmt, ("value", "value"))
|
||||
*/
|
||||
Py_XDECREF(zero_element_item);
|
||||
enclosing_list = PyList_New(1);
|
||||
if (enclosing_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(zero_element);
|
||||
if (PyList_SetItem(enclosing_list, 0, zero_element) == -1) {
|
||||
Py_DECREF(zero_element);
|
||||
Py_DECREF(enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
return enclosing_list;
|
||||
}
|
||||
}
|
||||
else if (PyObject_HasAttrString(zero_element, "keys")) {
|
||||
/*
|
||||
* execute(stmt, {"key":"value"})
|
||||
*/
|
||||
enclosing_list = PyList_New(1);
|
||||
if (enclosing_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(zero_element);
|
||||
if (PyList_SetItem(enclosing_list, 0, zero_element) == -1) {
|
||||
Py_DECREF(zero_element);
|
||||
Py_DECREF(enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
return enclosing_list;
|
||||
} else {
|
||||
enclosing_list = PyList_New(1);
|
||||
if (enclosing_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
double_enclosing_list = PyList_New(1);
|
||||
if (double_enclosing_list == NULL) {
|
||||
Py_DECREF(enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(zero_element);
|
||||
if (PyList_SetItem(enclosing_list, 0, zero_element) == -1) {
|
||||
Py_DECREF(zero_element);
|
||||
Py_DECREF(enclosing_list);
|
||||
Py_DECREF(double_enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
if (PyList_SetItem(double_enclosing_list, 0, enclosing_list) == -1) {
|
||||
Py_DECREF(zero_element);
|
||||
Py_DECREF(enclosing_list);
|
||||
Py_DECREF(double_enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
return double_enclosing_list;
|
||||
}
|
||||
}
|
||||
else {
|
||||
zero_element = PyTuple_GetItem(multiparams, 0);
|
||||
if (PyObject_HasAttrString(zero_element, "__iter__") &&
|
||||
!PyObject_HasAttrString(zero_element, "strip")
|
||||
) {
|
||||
Py_INCREF(multiparams);
|
||||
return multiparams;
|
||||
}
|
||||
else {
|
||||
enclosing_list = PyList_New(1);
|
||||
if (enclosing_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(multiparams);
|
||||
if (PyList_SetItem(enclosing_list, 0, multiparams) == -1) {
|
||||
Py_DECREF(multiparams);
|
||||
Py_DECREF(enclosing_list);
|
||||
return NULL;
|
||||
}
|
||||
return enclosing_list;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{"_distill_params", distill_params, METH_VARARGS,
|
||||
"Distill an execute() parameter structure."},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
|
||||
static struct PyModuleDef module_def = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
MODULE_NAME,
|
||||
MODULE_DOC,
|
||||
-1,
|
||||
module_methods
|
||||
};
|
||||
#endif
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyMODINIT_FUNC
|
||||
PyInit_cutils(void)
|
||||
#else
|
||||
PyMODINIT_FUNC
|
||||
initcutils(void)
|
||||
#endif
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&module_def);
|
||||
#else
|
||||
m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
|
||||
#endif
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (m == NULL)
|
||||
return NULL;
|
||||
return m;
|
||||
#else
|
||||
if (m == NULL)
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
|
@ -1,10 +0,0 @@
|
||||
# connectors/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
class Connector(object):
|
||||
pass
|
@ -1,150 +0,0 @@
|
||||
# connectors/mxodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
Provide a SQLALchemy connector for the eGenix mxODBC commercial
|
||||
Python adapter for ODBC. This is not a free product, but eGenix
|
||||
provides SQLAlchemy with a license for use in continuous integration
|
||||
testing.
|
||||
|
||||
This has been tested for use with mxODBC 3.1.2 on SQL Server 2005
|
||||
and 2008, using the SQL Server Native driver. However, it is
|
||||
possible for this to be used on other database platforms.
|
||||
|
||||
For more info on mxODBC, see http://www.egenix.com/
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from . import Connector
|
||||
|
||||
|
||||
class MxODBCConnector(Connector):
|
||||
driver = 'mxodbc'
|
||||
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
# this classmethod will normally be replaced by an instance
|
||||
# attribute of the same name, so this is normally only called once.
|
||||
cls._load_mx_exceptions()
|
||||
platform = sys.platform
|
||||
if platform == 'win32':
|
||||
from mx.ODBC import Windows as module
|
||||
# this can be the string "linux2", and possibly others
|
||||
elif 'linux' in platform:
|
||||
from mx.ODBC import unixODBC as module
|
||||
elif platform == 'darwin':
|
||||
from mx.ODBC import iODBC as module
|
||||
else:
|
||||
raise ImportError("Unrecognized platform for mxODBC import")
|
||||
return module
|
||||
|
||||
@classmethod
|
||||
def _load_mx_exceptions(cls):
|
||||
""" Import mxODBC exception classes into the module namespace,
|
||||
as if they had been imported normally. This is done here
|
||||
to avoid requiring all SQLAlchemy users to install mxODBC.
|
||||
"""
|
||||
global InterfaceError, ProgrammingError
|
||||
from mx.ODBC import InterfaceError
|
||||
from mx.ODBC import ProgrammingError
|
||||
|
||||
def on_connect(self):
|
||||
def connect(conn):
|
||||
conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
|
||||
conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT
|
||||
conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
|
||||
conn.errorhandler = self._error_handler()
|
||||
return connect
|
||||
|
||||
def _error_handler(self):
|
||||
""" Return a handler that adjusts mxODBC's raised Warnings to
|
||||
emit Python standard warnings.
|
||||
"""
|
||||
from mx.ODBC.Error import Warning as MxOdbcWarning
|
||||
|
||||
def error_handler(connection, cursor, errorclass, errorvalue):
|
||||
if issubclass(errorclass, MxOdbcWarning):
|
||||
errorclass.__bases__ = (Warning,)
|
||||
warnings.warn(message=str(errorvalue),
|
||||
category=errorclass,
|
||||
stacklevel=2)
|
||||
else:
|
||||
raise errorclass(errorvalue)
|
||||
return error_handler
|
||||
|
||||
def create_connect_args(self, url):
|
||||
""" Return a tuple of *args,**kwargs for creating a connection.
|
||||
|
||||
The mxODBC 3.x connection constructor looks like this:
|
||||
|
||||
connect(dsn, user='', password='',
|
||||
clear_auto_commit=1, errorhandler=None)
|
||||
|
||||
This method translates the values in the provided uri
|
||||
into args and kwargs needed to instantiate an mxODBC Connection.
|
||||
|
||||
The arg 'errorhandler' is not used by SQLAlchemy and will
|
||||
not be populated.
|
||||
|
||||
"""
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
args = opts.pop('host')
|
||||
opts.pop('port', None)
|
||||
opts.pop('database', None)
|
||||
return (args,), opts
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
# TODO: eGenix recommends checking connection.closed here
|
||||
# Does that detect dropped connections ?
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return "connection already closed" in str(e)
|
||||
elif isinstance(e, self.dbapi.Error):
|
||||
return '[08S01]' in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# eGenix suggests using conn.dbms_version instead
|
||||
# of what we're doing here
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
# 18 == pyodbc.SQL_DBMS_VER
|
||||
for n in r.split(dbapi_con.getinfo(18)[1]):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _get_direct(self, context):
|
||||
if context:
|
||||
native_odbc_execute = context.execution_options.\
|
||||
get('native_odbc_execute', 'auto')
|
||||
# default to direct=True in all cases, is more generally
|
||||
# compatible especially with SQL Server
|
||||
return False if native_odbc_execute is True else True
|
||||
else:
|
||||
return True
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
cursor.executemany(
|
||||
statement, parameters, direct=self._get_direct(context))
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
cursor.execute(statement, parameters, direct=self._get_direct(context))
|
@ -1,196 +0,0 @@
|
||||
# connectors/pyodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import Connector
|
||||
from .. import util
|
||||
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
||||
|
||||
class PyODBCConnector(Connector):
|
||||
driver = 'pyodbc'
|
||||
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
if util.py2k:
|
||||
# PyODBC unicode is broken on UCS-4 builds
|
||||
supports_unicode = sys.maxunicode == 65535
|
||||
supports_unicode_statements = supports_unicode
|
||||
|
||||
supports_native_decimal = True
|
||||
default_paramstyle = 'named'
|
||||
|
||||
# for non-DSN connections, this *may* be used to
|
||||
# hold the desired driver name
|
||||
pyodbc_driver_name = None
|
||||
|
||||
# will be set to True after initialize()
|
||||
# if the freetds.so is detected
|
||||
freetds = False
|
||||
|
||||
# will be set to the string version of
|
||||
# the FreeTDS driver if freetds is detected
|
||||
freetds_driver_version = None
|
||||
|
||||
# will be set to True after initialize()
|
||||
# if the libessqlsrv.so is detected
|
||||
easysoft = False
|
||||
|
||||
def __init__(self, supports_unicode_binds=None, **kw):
|
||||
super(PyODBCConnector, self).__init__(**kw)
|
||||
self._user_supports_unicode_binds = supports_unicode_binds
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pyodbc')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
|
||||
keys = opts
|
||||
|
||||
query = url.query
|
||||
|
||||
connect_args = {}
|
||||
for param in ('ansi', 'unicode_results', 'autocommit'):
|
||||
if param in keys:
|
||||
connect_args[param] = util.asbool(keys.pop(param))
|
||||
|
||||
if 'odbc_connect' in keys:
|
||||
connectors = [util.unquote_plus(keys.pop('odbc_connect'))]
|
||||
else:
|
||||
def check_quote(token):
|
||||
if ";" in str(token):
|
||||
token = "'%s'" % token
|
||||
return token
|
||||
|
||||
keys = dict(
|
||||
(k, check_quote(v)) for k, v in keys.items()
|
||||
)
|
||||
|
||||
dsn_connection = 'dsn' in keys or \
|
||||
('host' in keys and 'database' not in keys)
|
||||
if dsn_connection:
|
||||
connectors = ['dsn=%s' % (keys.pop('host', '') or
|
||||
keys.pop('dsn', ''))]
|
||||
else:
|
||||
port = ''
|
||||
if 'port' in keys and 'port' not in query:
|
||||
port = ',%d' % int(keys.pop('port'))
|
||||
|
||||
connectors = []
|
||||
driver = keys.pop('driver', self.pyodbc_driver_name)
|
||||
if driver is None:
|
||||
util.warn(
|
||||
"No driver name specified; "
|
||||
"this is expected by PyODBC when using "
|
||||
"DSN-less connections")
|
||||
else:
|
||||
connectors.append("DRIVER={%s}" % driver)
|
||||
|
||||
connectors.extend(
|
||||
[
|
||||
'Server=%s%s' % (keys.pop('host', ''), port),
|
||||
'Database=%s' % keys.pop('database', '')
|
||||
])
|
||||
|
||||
user = keys.pop("user", None)
|
||||
if user:
|
||||
connectors.append("UID=%s" % user)
|
||||
connectors.append("PWD=%s" % keys.pop('password', ''))
|
||||
else:
|
||||
connectors.append("Trusted_Connection=Yes")
|
||||
|
||||
# if set to 'Yes', the ODBC layer will try to automagically
|
||||
# convert textual data from your database encoding to your
|
||||
# client encoding. This should obviously be set to 'No' if
|
||||
# you query a cp1253 encoded database from a latin1 client...
|
||||
if 'odbc_autotranslate' in keys:
|
||||
connectors.append("AutoTranslate=%s" %
|
||||
keys.pop("odbc_autotranslate"))
|
||||
|
||||
connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
|
||||
|
||||
return [[";".join(connectors)], connect_args]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return "The cursor's connection has been closed." in str(e) or \
|
||||
'Attempt to use a closed connection.' in str(e)
|
||||
elif isinstance(e, self.dbapi.Error):
|
||||
return '[08S01]' in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
def initialize(self, connection):
|
||||
# determine FreeTDS first. can't issue SQL easily
|
||||
# without getting unicode_statements/binds set up.
|
||||
|
||||
pyodbc = self.dbapi
|
||||
|
||||
dbapi_con = connection.connection
|
||||
|
||||
_sql_driver_name = dbapi_con.getinfo(pyodbc.SQL_DRIVER_NAME)
|
||||
self.freetds = bool(re.match(r".*libtdsodbc.*\.so", _sql_driver_name
|
||||
))
|
||||
self.easysoft = bool(re.match(r".*libessqlsrv.*\.so", _sql_driver_name
|
||||
))
|
||||
|
||||
if self.freetds:
|
||||
self.freetds_driver_version = dbapi_con.getinfo(
|
||||
pyodbc.SQL_DRIVER_VER)
|
||||
|
||||
self.supports_unicode_statements = (
|
||||
not util.py2k or
|
||||
(not self.freetds and not self.easysoft)
|
||||
)
|
||||
|
||||
if self._user_supports_unicode_binds is not None:
|
||||
self.supports_unicode_binds = self._user_supports_unicode_binds
|
||||
elif util.py2k:
|
||||
self.supports_unicode_binds = (
|
||||
not self.freetds or self.freetds_driver_version >= '0.91'
|
||||
) and not self.easysoft
|
||||
else:
|
||||
self.supports_unicode_binds = True
|
||||
|
||||
# run other initialization which asks for user name, etc.
|
||||
super(PyODBCConnector, self).initialize(connection)
|
||||
|
||||
def _dbapi_version(self):
|
||||
if not self.dbapi:
|
||||
return ()
|
||||
return self._parse_dbapi_version(self.dbapi.version)
|
||||
|
||||
def _parse_dbapi_version(self, vers):
|
||||
m = re.match(
|
||||
r'(?:py.*-)?([\d\.]+)(?:-(\w+))?',
|
||||
vers
|
||||
)
|
||||
if not m:
|
||||
return ()
|
||||
vers = tuple([int(x) for x in m.group(1).split(".")])
|
||||
if m.group(2):
|
||||
vers += (m.group(2),)
|
||||
return vers
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# NOTE: this function is not reliable, particularly when
|
||||
# freetds is in use. Implement database-specific server version
|
||||
# queries.
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
@ -1,60 +0,0 @@
|
||||
# connectors/zxJDBC.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import sys
|
||||
from . import Connector
|
||||
|
||||
|
||||
class ZxJDBCConnector(Connector):
|
||||
driver = 'zxjdbc'
|
||||
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_unicode_binds = True
|
||||
supports_unicode_statements = sys.version > '2.5.0+'
|
||||
description_encoding = None
|
||||
default_paramstyle = 'qmark'
|
||||
|
||||
jdbc_db_name = None
|
||||
jdbc_driver_name = None
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from com.ziclix.python.sql import zxJDBC
|
||||
return zxJDBC
|
||||
|
||||
def _driver_kwargs(self):
|
||||
"""Return kw arg dict to be sent to connect()."""
|
||||
return {}
|
||||
|
||||
def _create_jdbc_url(self, url):
|
||||
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
|
||||
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
|
||||
url.port is not None
|
||||
and ':%s' % url.port or '',
|
||||
url.database)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = self._driver_kwargs()
|
||||
opts.update(url.query)
|
||||
return [
|
||||
[self._create_jdbc_url(url),
|
||||
url.username, url.password,
|
||||
self.jdbc_driver_name],
|
||||
opts]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if not isinstance(e, self.dbapi.ProgrammingError):
|
||||
return False
|
||||
e = str(e)
|
||||
return 'connection is closed' in e or 'cursor is closed' in e
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# use connection.connection.dbversion, and parse appropriately
|
||||
# to get a tuple
|
||||
raise NotImplementedError()
|
@ -1,30 +0,0 @@
|
||||
# databases/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Include imports from the sqlalchemy.dialects package for backwards
|
||||
compatibility with pre 0.6 versions.
|
||||
|
||||
"""
|
||||
from ..dialects.sqlite import base as sqlite
|
||||
from ..dialects.postgresql import base as postgresql
|
||||
postgres = postgresql
|
||||
from ..dialects.mysql import base as mysql
|
||||
from ..dialects.oracle import base as oracle
|
||||
from ..dialects.firebird import base as firebird
|
||||
from ..dialects.mssql import base as mssql
|
||||
from ..dialects.sybase import base as sybase
|
||||
|
||||
|
||||
__all__ = (
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
'postgresql',
|
||||
'sqlite',
|
||||
'oracle',
|
||||
'sybase',
|
||||
)
|
@ -1,56 +0,0 @@
|
||||
# dialects/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
__all__ = (
|
||||
'firebird',
|
||||
'mssql',
|
||||
'mysql',
|
||||
'oracle',
|
||||
'postgresql',
|
||||
'sqlite',
|
||||
'sybase',
|
||||
)
|
||||
|
||||
from .. import util
|
||||
|
||||
_translates = {'postgres': 'postgresql'}
|
||||
|
||||
def _auto_fn(name):
|
||||
"""default dialect importer.
|
||||
|
||||
plugs into the :class:`.PluginLoader`
|
||||
as a first-hit system.
|
||||
|
||||
"""
|
||||
if "." in name:
|
||||
dialect, driver = name.split(".")
|
||||
else:
|
||||
dialect = name
|
||||
driver = "base"
|
||||
|
||||
if dialect in _translates:
|
||||
translated = _translates[dialect]
|
||||
util.warn_deprecated(
|
||||
"The '%s' dialect name has been "
|
||||
"renamed to '%s'" % (dialect, translated)
|
||||
)
|
||||
dialect = translated
|
||||
try:
|
||||
module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
module = getattr(module, dialect)
|
||||
if hasattr(module, driver):
|
||||
module = getattr(module, driver)
|
||||
return lambda: module.dialect
|
||||
else:
|
||||
return None
|
||||
|
||||
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
|
||||
|
||||
plugins = util.PluginLoader("sqlalchemy.plugins")
|
@ -1,418 +0,0 @@
|
||||
# access.py
|
||||
# Copyright (C) 2007 Paul Johnston, paj@pajhome.org.uk
|
||||
# Portions derived from jet2sql.py by Matt Keranen, mksql@yahoo.com
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
Support for the Microsoft Access database.
|
||||
|
||||
This dialect is *not* ported to SQLAlchemy 0.6.
|
||||
|
||||
This dialect is *not* tested on SQLAlchemy 0.6.
|
||||
|
||||
|
||||
"""
|
||||
from sqlalchemy import sql, schema, types, exc, pool
|
||||
from sqlalchemy.sql import compiler, expression
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy import processors
|
||||
|
||||
class AcNumeric(types.Numeric):
|
||||
def get_col_spec(self):
|
||||
return "NUMERIC"
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return processors.to_str
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
return None
|
||||
|
||||
class AcFloat(types.Float):
|
||||
def get_col_spec(self):
|
||||
return "FLOAT"
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
"""By converting to string, we can use Decimal types round-trip."""
|
||||
return processors.to_str
|
||||
|
||||
class AcInteger(types.Integer):
|
||||
def get_col_spec(self):
|
||||
return "INTEGER"
|
||||
|
||||
class AcTinyInteger(types.Integer):
|
||||
def get_col_spec(self):
|
||||
return "TINYINT"
|
||||
|
||||
class AcSmallInteger(types.SmallInteger):
|
||||
def get_col_spec(self):
|
||||
return "SMALLINT"
|
||||
|
||||
class AcDateTime(types.DateTime):
|
||||
def __init__(self, *a, **kw):
|
||||
super(AcDateTime, self).__init__(False)
|
||||
|
||||
def get_col_spec(self):
|
||||
return "DATETIME"
|
||||
|
||||
class AcDate(types.Date):
|
||||
def __init__(self, *a, **kw):
|
||||
super(AcDate, self).__init__(False)
|
||||
|
||||
def get_col_spec(self):
|
||||
return "DATETIME"
|
||||
|
||||
class AcText(types.Text):
|
||||
def get_col_spec(self):
|
||||
return "MEMO"
|
||||
|
||||
class AcString(types.String):
|
||||
def get_col_spec(self):
|
||||
return "TEXT" + (self.length and ("(%d)" % self.length) or "")
|
||||
|
||||
class AcUnicode(types.Unicode):
|
||||
def get_col_spec(self):
|
||||
return "TEXT" + (self.length and ("(%d)" % self.length) or "")
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
return None
|
||||
|
||||
class AcChar(types.CHAR):
|
||||
def get_col_spec(self):
|
||||
return "TEXT" + (self.length and ("(%d)" % self.length) or "")
|
||||
|
||||
class AcBinary(types.LargeBinary):
|
||||
def get_col_spec(self):
|
||||
return "BINARY"
|
||||
|
||||
class AcBoolean(types.Boolean):
|
||||
def get_col_spec(self):
|
||||
return "YESNO"
|
||||
|
||||
class AcTimeStamp(types.TIMESTAMP):
|
||||
def get_col_spec(self):
|
||||
return "TIMESTAMP"
|
||||
|
||||
class AccessExecutionContext(default.DefaultExecutionContext):
|
||||
def _has_implicit_sequence(self, column):
|
||||
if column.primary_key and column.autoincrement:
|
||||
if isinstance(column.type, types.Integer) and not column.foreign_keys:
|
||||
if column.default is None or (isinstance(column.default, schema.Sequence) and \
|
||||
column.default.optional):
|
||||
return True
|
||||
return False
|
||||
|
||||
def post_exec(self):
|
||||
"""If we inserted into a row with a COUNTER column, fetch the ID"""
|
||||
|
||||
if self.compiled.isinsert:
|
||||
tbl = self.compiled.statement.table
|
||||
if not hasattr(tbl, 'has_sequence'):
|
||||
tbl.has_sequence = None
|
||||
for column in tbl.c:
|
||||
if getattr(column, 'sequence', False) or self._has_implicit_sequence(column):
|
||||
tbl.has_sequence = column
|
||||
break
|
||||
|
||||
if bool(tbl.has_sequence):
|
||||
# TBD: for some reason _last_inserted_ids doesn't exist here
|
||||
# (but it does at corresponding point in mssql???)
|
||||
#if not len(self._last_inserted_ids) or self._last_inserted_ids[0] is None:
|
||||
self.cursor.execute("SELECT @@identity AS lastrowid")
|
||||
row = self.cursor.fetchone()
|
||||
self._last_inserted_ids = [int(row[0])] #+ self._last_inserted_ids[1:]
|
||||
# print "LAST ROW ID", self._last_inserted_ids
|
||||
|
||||
super(AccessExecutionContext, self).post_exec()
|
||||
|
||||
|
||||
const, daoEngine = None, None
|
||||
class AccessDialect(default.DefaultDialect):
|
||||
colspecs = {
|
||||
types.Unicode : AcUnicode,
|
||||
types.Integer : AcInteger,
|
||||
types.SmallInteger: AcSmallInteger,
|
||||
types.Numeric : AcNumeric,
|
||||
types.Float : AcFloat,
|
||||
types.DateTime : AcDateTime,
|
||||
types.Date : AcDate,
|
||||
types.String : AcString,
|
||||
types.LargeBinary : AcBinary,
|
||||
types.Boolean : AcBoolean,
|
||||
types.Text : AcText,
|
||||
types.CHAR: AcChar,
|
||||
types.TIMESTAMP: AcTimeStamp,
|
||||
}
|
||||
name = 'access'
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
ported_sqla_06 = False
|
||||
|
||||
def type_descriptor(self, typeobj):
|
||||
newobj = types.adapt_type(typeobj, self.colspecs)
|
||||
return newobj
|
||||
|
||||
def __init__(self, **params):
|
||||
super(AccessDialect, self).__init__(**params)
|
||||
self.text_as_varchar = False
|
||||
self._dtbs = None
|
||||
|
||||
def dbapi(cls):
|
||||
import win32com.client, pythoncom
|
||||
|
||||
global const, daoEngine
|
||||
if const is None:
|
||||
const = win32com.client.constants
|
||||
for suffix in (".36", ".35", ".30"):
|
||||
try:
|
||||
daoEngine = win32com.client.gencache.EnsureDispatch("DAO.DBEngine" + suffix)
|
||||
break
|
||||
except pythoncom.com_error:
|
||||
pass
|
||||
else:
|
||||
raise exc.InvalidRequestError("Can't find a DB engine. Check http://support.microsoft.com/kb/239114 for details.")
|
||||
|
||||
import pyodbc as module
|
||||
return module
|
||||
dbapi = classmethod(dbapi)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args()
|
||||
connectors = ["Driver={Microsoft Access Driver (*.mdb)}"]
|
||||
connectors.append("Dbq=%s" % opts["database"])
|
||||
user = opts.get("username", None)
|
||||
if user:
|
||||
connectors.append("UID=%s" % user)
|
||||
connectors.append("PWD=%s" % opts.get("password", ""))
|
||||
return [[";".join(connectors)], {}]
|
||||
|
||||
def last_inserted_ids(self):
|
||||
return self.context.last_inserted_ids
|
||||
|
||||
def do_execute(self, cursor, statement, params, **kwargs):
|
||||
if params == {}:
|
||||
params = ()
|
||||
super(AccessDialect, self).do_execute(cursor, statement, params, **kwargs)
|
||||
|
||||
def _execute(self, c, statement, parameters):
|
||||
try:
|
||||
if parameters == {}:
|
||||
parameters = ()
|
||||
c.execute(statement, parameters)
|
||||
self.context.rowcount = c.rowcount
|
||||
except Exception, e:
|
||||
raise exc.DBAPIError.instance(statement, parameters, e)
|
||||
|
||||
def has_table(self, connection, tablename, schema=None):
|
||||
# This approach seems to be more reliable that using DAO
|
||||
try:
|
||||
connection.execute('select top 1 * from [%s]' % tablename)
|
||||
return True
|
||||
except Exception, e:
|
||||
return False
|
||||
|
||||
def reflecttable(self, connection, table, include_columns):
|
||||
# This is defined in the function, as it relies on win32com constants,
|
||||
# that aren't imported until dbapi method is called
|
||||
if not hasattr(self, 'ischema_names'):
|
||||
self.ischema_names = {
|
||||
const.dbByte: AcBinary,
|
||||
const.dbInteger: AcInteger,
|
||||
const.dbLong: AcInteger,
|
||||
const.dbSingle: AcFloat,
|
||||
const.dbDouble: AcFloat,
|
||||
const.dbDate: AcDateTime,
|
||||
const.dbLongBinary: AcBinary,
|
||||
const.dbMemo: AcText,
|
||||
const.dbBoolean: AcBoolean,
|
||||
const.dbText: AcUnicode, # All Access strings are unicode
|
||||
const.dbCurrency: AcNumeric,
|
||||
}
|
||||
|
||||
# A fresh DAO connection is opened for each reflection
|
||||
# This is necessary, so we get the latest updates
|
||||
dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
|
||||
|
||||
try:
|
||||
for tbl in dtbs.TableDefs:
|
||||
if tbl.Name.lower() == table.name.lower():
|
||||
break
|
||||
else:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
for col in tbl.Fields:
|
||||
coltype = self.ischema_names[col.Type]
|
||||
if col.Type == const.dbText:
|
||||
coltype = coltype(col.Size)
|
||||
|
||||
colargs = \
|
||||
{
|
||||
'nullable': not(col.Required or col.Attributes & const.dbAutoIncrField),
|
||||
}
|
||||
default = col.DefaultValue
|
||||
|
||||
if col.Attributes & const.dbAutoIncrField:
|
||||
colargs['default'] = schema.Sequence(col.Name + '_seq')
|
||||
elif default:
|
||||
if col.Type == const.dbBoolean:
|
||||
default = default == 'Yes' and '1' or '0'
|
||||
colargs['server_default'] = schema.DefaultClause(sql.text(default))
|
||||
|
||||
table.append_column(schema.Column(col.Name, coltype, **colargs))
|
||||
|
||||
# TBD: check constraints
|
||||
|
||||
# Find primary key columns first
|
||||
for idx in tbl.Indexes:
|
||||
if idx.Primary:
|
||||
for col in idx.Fields:
|
||||
thecol = table.c[col.Name]
|
||||
table.primary_key.add(thecol)
|
||||
if isinstance(thecol.type, AcInteger) and \
|
||||
not (thecol.default and isinstance(thecol.default.arg, schema.Sequence)):
|
||||
thecol.autoincrement = False
|
||||
|
||||
# Then add other indexes
|
||||
for idx in tbl.Indexes:
|
||||
if not idx.Primary:
|
||||
if len(idx.Fields) == 1:
|
||||
col = table.c[idx.Fields[0].Name]
|
||||
if not col.primary_key:
|
||||
col.index = True
|
||||
col.unique = idx.Unique
|
||||
else:
|
||||
pass # TBD: multi-column indexes
|
||||
|
||||
|
||||
for fk in dtbs.Relations:
|
||||
if fk.ForeignTable != table.name:
|
||||
continue
|
||||
scols = [c.ForeignName for c in fk.Fields]
|
||||
rcols = ['%s.%s' % (fk.Table, c.Name) for c in fk.Fields]
|
||||
table.append_constraint(schema.ForeignKeyConstraint(scols, rcols, link_to_name=True))
|
||||
|
||||
finally:
|
||||
dtbs.Close()
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
# A fresh DAO connection is opened for each reflection
|
||||
# This is necessary, so we get the latest updates
|
||||
dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
|
||||
|
||||
names = [t.Name for t in dtbs.TableDefs if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"]
|
||||
dtbs.Close()
|
||||
return names
|
||||
|
||||
|
||||
class AccessCompiler(compiler.SQLCompiler):
|
||||
extract_map = compiler.SQLCompiler.extract_map.copy()
|
||||
extract_map.update ({
|
||||
'month': 'm',
|
||||
'day': 'd',
|
||||
'year': 'yyyy',
|
||||
'second': 's',
|
||||
'hour': 'h',
|
||||
'doy': 'y',
|
||||
'minute': 'n',
|
||||
'quarter': 'q',
|
||||
'dow': 'w',
|
||||
'week': 'ww'
|
||||
})
|
||||
|
||||
def visit_select_precolumns(self, select):
|
||||
"""Access puts TOP, it's version of LIMIT here """
|
||||
s = select.distinct and "DISTINCT " or ""
|
||||
if select.limit:
|
||||
s += "TOP %s " % (select.limit)
|
||||
if select.offset:
|
||||
raise exc.InvalidRequestError('Access does not support LIMIT with an offset')
|
||||
return s
|
||||
|
||||
def limit_clause(self, select):
|
||||
"""Limit in access is after the select keyword"""
|
||||
return ""
|
||||
|
||||
def binary_operator_string(self, binary):
|
||||
"""Access uses "mod" instead of "%" """
|
||||
return binary.operator == '%' and 'mod' or binary.operator
|
||||
|
||||
def label_select_column(self, select, column, asfrom):
|
||||
if isinstance(column, expression.Function):
|
||||
return column.label()
|
||||
else:
|
||||
return super(AccessCompiler, self).label_select_column(select, column, asfrom)
|
||||
|
||||
function_rewrites = {'current_date': 'now',
|
||||
'current_timestamp': 'now',
|
||||
'length': 'len',
|
||||
}
|
||||
def visit_function(self, func):
|
||||
"""Access function names differ from the ANSI SQL names; rewrite common ones"""
|
||||
func.name = self.function_rewrites.get(func.name, func.name)
|
||||
return super(AccessCompiler, self).visit_function(func)
|
||||
|
||||
def for_update_clause(self, select):
|
||||
"""FOR UPDATE is not supported by Access; silently ignore"""
|
||||
return ''
|
||||
|
||||
# Strip schema
|
||||
def visit_table(self, table, asfrom=False, **kwargs):
|
||||
if asfrom:
|
||||
return self.preparer.quote(table.name, table.quote)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def visit_join(self, join, asfrom=False, **kwargs):
|
||||
return (self.process(join.left, asfrom=True) + (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN ") + \
|
||||
self.process(join.right, asfrom=True) + " ON " + self.process(join.onclause))
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART("%s", %s)' % (field, self.process(extract.expr, **kw))
|
||||
|
||||
class AccessDDLCompiler(compiler.DDLCompiler):
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = self.preparer.format_column(column) + " " + column.type.dialect_impl(self.dialect).get_col_spec()
|
||||
|
||||
# install a sequence if we have an implicit IDENTITY column
|
||||
if (not getattr(column.table, 'has_sequence', False)) and column.primary_key and \
|
||||
column.autoincrement and isinstance(column.type, types.Integer) and not column.foreign_keys:
|
||||
if column.default is None or (isinstance(column.default, schema.Sequence) and column.default.optional):
|
||||
column.sequence = schema.Sequence(column.name + '_seq')
|
||||
|
||||
if not column.nullable:
|
||||
colspec += " NOT NULL"
|
||||
|
||||
if hasattr(column, 'sequence'):
|
||||
column.table.has_sequence = column
|
||||
colspec = self.preparer.format_column(column) + " counter"
|
||||
else:
|
||||
default = self.get_column_default_string(column)
|
||||
if default is not None:
|
||||
colspec += " DEFAULT " + default
|
||||
|
||||
return colspec
|
||||
|
||||
def visit_drop_index(self, drop):
|
||||
index = drop.element
|
||||
self.append("\nDROP INDEX [%s].[%s]" % (index.table.name, self._validate_identifier(index.name, False)))
|
||||
|
||||
class AccessIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
reserved_words = compiler.RESERVED_WORDS.copy()
|
||||
reserved_words.update(['value', 'text'])
|
||||
def __init__(self, dialect):
|
||||
super(AccessIdentifierPreparer, self).__init__(dialect, initial_quote='[', final_quote=']')
|
||||
|
||||
|
||||
dialect = AccessDialect
|
||||
dialect.poolclass = pool.SingletonThreadPool
|
||||
dialect.statement_compiler = AccessCompiler
|
||||
dialect.ddlcompiler = AccessDDLCompiler
|
||||
dialect.preparer = AccessIdentifierPreparer
|
||||
dialect.execution_ctx_cls = AccessExecutionContext
|
@ -1,21 +0,0 @@
|
||||
# firebird/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.firebird import base, kinterbasdb, fdb
|
||||
|
||||
base.dialect = fdb.dialect
|
||||
|
||||
from sqlalchemy.dialects.firebird.base import \
|
||||
SMALLINT, BIGINT, FLOAT, FLOAT, DATE, TIME, \
|
||||
TEXT, NUMERIC, FLOAT, TIMESTAMP, VARCHAR, CHAR, BLOB,\
|
||||
dialect
|
||||
|
||||
__all__ = (
|
||||
'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
|
||||
'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
|
||||
'dialect'
|
||||
)
|
@ -1,741 +0,0 @@
|
||||
# firebird/base.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
|
||||
.. dialect:: firebird
|
||||
:name: Firebird
|
||||
|
||||
Firebird Dialects
|
||||
-----------------
|
||||
|
||||
Firebird offers two distinct dialects_ (not to be confused with a
|
||||
SQLAlchemy ``Dialect``):
|
||||
|
||||
dialect 1
|
||||
This is the old syntax and behaviour, inherited from Interbase pre-6.0.
|
||||
|
||||
dialect 3
|
||||
This is the newer and supported syntax, introduced in Interbase 6.0.
|
||||
|
||||
The SQLAlchemy Firebird dialect detects these versions and
|
||||
adjusts its representation of SQL accordingly. However,
|
||||
support for dialect 1 is not well tested and probably has
|
||||
incompatibilities.
|
||||
|
||||
Locking Behavior
|
||||
----------------
|
||||
|
||||
Firebird locks tables aggressively. For this reason, a DROP TABLE may
|
||||
hang until other transactions are released. SQLAlchemy does its best
|
||||
to release transactions as quickly as possible. The most common cause
|
||||
of hanging transactions is a non-fully consumed result set, i.e.::
|
||||
|
||||
result = engine.execute("select * from table")
|
||||
row = result.fetchone()
|
||||
return
|
||||
|
||||
Where above, the ``ResultProxy`` has not been fully consumed. The
|
||||
connection will be returned to the pool and the transactional state
|
||||
rolled back once the Python garbage collector reclaims the objects
|
||||
which hold onto the connection, which often occurs asynchronously.
|
||||
The above use case can be alleviated by calling ``first()`` on the
|
||||
``ResultProxy`` which will fetch the first row and immediately close
|
||||
all remaining cursor/connection resources.
|
||||
|
||||
RETURNING support
|
||||
-----------------
|
||||
|
||||
Firebird 2.0 supports returning a result set from inserts, and 2.1
|
||||
extends that to deletes and updates. This is generically exposed by
|
||||
the SQLAlchemy ``returning()`` method, such as::
|
||||
|
||||
# INSERT..RETURNING
|
||||
result = table.insert().returning(table.c.col1, table.c.col2).\
|
||||
values(name='foo')
|
||||
print result.fetchall()
|
||||
|
||||
# UPDATE..RETURNING
|
||||
raises = empl.update().returning(empl.c.id, empl.c.salary).\
|
||||
where(empl.c.sales>100).\
|
||||
values(dict(salary=empl.c.salary * 1.1))
|
||||
print raises.fetchall()
|
||||
|
||||
|
||||
.. _dialects: http://mc-computing.com/Databases/Firebird/SQL_Dialect.html
|
||||
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import schema as sa_schema
|
||||
from sqlalchemy import exc, types as sqltypes, sql, util
|
||||
from sqlalchemy.sql import expression
|
||||
from sqlalchemy.engine import base, default, reflection
|
||||
from sqlalchemy.sql import compiler
|
||||
from sqlalchemy.sql.elements import quoted_name
|
||||
|
||||
from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC,
|
||||
SMALLINT, TEXT, TIME, TIMESTAMP, Integer)
|
||||
|
||||
|
||||
RESERVED_WORDS = set([
|
||||
"active", "add", "admin", "after", "all", "alter", "and", "any", "as",
|
||||
"asc", "ascending", "at", "auto", "avg", "before", "begin", "between",
|
||||
"bigint", "bit_length", "blob", "both", "by", "case", "cast", "char",
|
||||
"character", "character_length", "char_length", "check", "close",
|
||||
"collate", "column", "commit", "committed", "computed", "conditional",
|
||||
"connect", "constraint", "containing", "count", "create", "cross",
|
||||
"cstring", "current", "current_connection", "current_date",
|
||||
"current_role", "current_time", "current_timestamp",
|
||||
"current_transaction", "current_user", "cursor", "database", "date",
|
||||
"day", "dec", "decimal", "declare", "default", "delete", "desc",
|
||||
"descending", "disconnect", "distinct", "do", "domain", "double",
|
||||
"drop", "else", "end", "entry_point", "escape", "exception",
|
||||
"execute", "exists", "exit", "external", "extract", "fetch", "file",
|
||||
"filter", "float", "for", "foreign", "from", "full", "function",
|
||||
"gdscode", "generator", "gen_id", "global", "grant", "group",
|
||||
"having", "hour", "if", "in", "inactive", "index", "inner",
|
||||
"input_type", "insensitive", "insert", "int", "integer", "into", "is",
|
||||
"isolation", "join", "key", "leading", "left", "length", "level",
|
||||
"like", "long", "lower", "manual", "max", "maximum_segment", "merge",
|
||||
"min", "minute", "module_name", "month", "names", "national",
|
||||
"natural", "nchar", "no", "not", "null", "numeric", "octet_length",
|
||||
"of", "on", "only", "open", "option", "or", "order", "outer",
|
||||
"output_type", "overflow", "page", "pages", "page_size", "parameter",
|
||||
"password", "plan", "position", "post_event", "precision", "primary",
|
||||
"privileges", "procedure", "protected", "rdb$db_key", "read", "real",
|
||||
"record_version", "recreate", "recursive", "references", "release",
|
||||
"reserv", "reserving", "retain", "returning_values", "returns",
|
||||
"revoke", "right", "rollback", "rows", "row_count", "savepoint",
|
||||
"schema", "second", "segment", "select", "sensitive", "set", "shadow",
|
||||
"shared", "singular", "size", "smallint", "snapshot", "some", "sort",
|
||||
"sqlcode", "stability", "start", "starting", "starts", "statistics",
|
||||
"sub_type", "sum", "suspend", "table", "then", "time", "timestamp",
|
||||
"to", "trailing", "transaction", "trigger", "trim", "uncommitted",
|
||||
"union", "unique", "update", "upper", "user", "using", "value",
|
||||
"values", "varchar", "variable", "varying", "view", "wait", "when",
|
||||
"where", "while", "with", "work", "write", "year",
|
||||
])
|
||||
|
||||
|
||||
class _StringType(sqltypes.String):
|
||||
"""Base for Firebird string types."""
|
||||
|
||||
def __init__(self, charset=None, **kw):
|
||||
self.charset = charset
|
||||
super(_StringType, self).__init__(**kw)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""Firebird VARCHAR type"""
|
||||
__visit_name__ = 'VARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
super(VARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""Firebird CHAR type"""
|
||||
__visit_name__ = 'CHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class _FBDateTime(sqltypes.DateTime):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if type(value) == datetime.date:
|
||||
return datetime.datetime(value.year, value.month, value.day)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
colspecs = {
|
||||
sqltypes.DateTime: _FBDateTime
|
||||
}
|
||||
|
||||
ischema_names = {
|
||||
'SHORT': SMALLINT,
|
||||
'LONG': INTEGER,
|
||||
'QUAD': FLOAT,
|
||||
'FLOAT': FLOAT,
|
||||
'DATE': DATE,
|
||||
'TIME': TIME,
|
||||
'TEXT': TEXT,
|
||||
'INT64': BIGINT,
|
||||
'DOUBLE': FLOAT,
|
||||
'TIMESTAMP': TIMESTAMP,
|
||||
'VARYING': VARCHAR,
|
||||
'CSTRING': CHAR,
|
||||
'BLOB': BLOB,
|
||||
}
|
||||
|
||||
|
||||
# TODO: date conversion types (should be implemented as _FBDateTime,
|
||||
# _FBDate, etc. as bind/result functionality is required)
|
||||
|
||||
class FBTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_SMALLINT(type_, **kw)
|
||||
|
||||
def visit_datetime(self, type_, **kw):
|
||||
return self.visit_TIMESTAMP(type_, **kw)
|
||||
|
||||
def visit_TEXT(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 1"
|
||||
|
||||
def visit_BLOB(self, type_, **kw):
|
||||
return "BLOB SUB_TYPE 0"
|
||||
|
||||
def _extend_string(self, type_, basic):
|
||||
charset = getattr(type_, 'charset', None)
|
||||
if charset is None:
|
||||
return basic
|
||||
else:
|
||||
return '%s CHARACTER SET %s' % (basic, charset)
|
||||
|
||||
def visit_CHAR(self, type_, **kw):
|
||||
basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
def visit_VARCHAR(self, type_, **kw):
|
||||
if not type_.length:
|
||||
raise exc.CompileError(
|
||||
"VARCHAR requires a length on dialect %s" %
|
||||
self.dialect.name)
|
||||
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw)
|
||||
return self._extend_string(type_, basic)
|
||||
|
||||
|
||||
class FBCompiler(sql.compiler.SQLCompiler):
|
||||
"""Firebird specific idiosyncrasies"""
|
||||
|
||||
ansi_bind_rules = True
|
||||
|
||||
# def visit_contains_op_binary(self, binary, operator, **kw):
|
||||
# cant use CONTAINING b.c. it's case insensitive.
|
||||
|
||||
# def visit_notcontains_op_binary(self, binary, operator, **kw):
|
||||
# cant use NOT CONTAINING b.c. it's case insensitive.
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "CURRENT_TIMESTAMP"
|
||||
|
||||
def visit_startswith_op_binary(self, binary, operator, **kw):
|
||||
return '%s STARTING WITH %s' % (
|
||||
binary.left._compiler_dispatch(self, **kw),
|
||||
binary.right._compiler_dispatch(self, **kw))
|
||||
|
||||
def visit_notstartswith_op_binary(self, binary, operator, **kw):
|
||||
return '%s NOT STARTING WITH %s' % (
|
||||
binary.left._compiler_dispatch(self, **kw),
|
||||
binary.right._compiler_dispatch(self, **kw))
|
||||
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return "mod(%s, %s)" % (
|
||||
self.process(binary.left, **kw),
|
||||
self.process(binary.right, **kw))
|
||||
|
||||
def visit_alias(self, alias, asfrom=False, **kwargs):
|
||||
if self.dialect._version_two:
|
||||
return super(FBCompiler, self).\
|
||||
visit_alias(alias, asfrom=asfrom, **kwargs)
|
||||
else:
|
||||
# Override to not use the AS keyword which FB 1.5 does not like
|
||||
if asfrom:
|
||||
alias_name = isinstance(alias.name,
|
||||
expression._truncated_label) and \
|
||||
self._truncated_identifier("alias",
|
||||
alias.name) or alias.name
|
||||
|
||||
return self.process(
|
||||
alias.original, asfrom=asfrom, **kwargs) + \
|
||||
" " + \
|
||||
self.preparer.format_alias(alias, alias_name)
|
||||
else:
|
||||
return self.process(alias.original, **kwargs)
|
||||
|
||||
def visit_substring_func(self, func, **kw):
|
||||
s = self.process(func.clauses.clauses[0])
|
||||
start = self.process(func.clauses.clauses[1])
|
||||
if len(func.clauses.clauses) > 2:
|
||||
length = self.process(func.clauses.clauses[2])
|
||||
return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
|
||||
else:
|
||||
return "SUBSTRING(%s FROM %s)" % (s, start)
|
||||
|
||||
def visit_length_func(self, function, **kw):
|
||||
if self.dialect._version_two:
|
||||
return "char_length" + self.function_argspec(function)
|
||||
else:
|
||||
return "strlen" + self.function_argspec(function)
|
||||
|
||||
visit_char_length_func = visit_length_func
|
||||
|
||||
def function_argspec(self, func, **kw):
|
||||
# TODO: this probably will need to be
|
||||
# narrowed to a fixed list, some no-arg functions
|
||||
# may require parens - see similar example in the oracle
|
||||
# dialect
|
||||
if func.clauses is not None and len(func.clauses):
|
||||
return self.process(func.clause_expr, **kw)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def default_from(self):
|
||||
return " FROM rdb$database"
|
||||
|
||||
def visit_sequence(self, seq):
|
||||
return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
|
||||
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
"""Called when building a ``SELECT`` statement, position is just
|
||||
before column list Firebird puts the limit and offset right
|
||||
after the ``SELECT``...
|
||||
"""
|
||||
|
||||
result = ""
|
||||
if select._limit_clause is not None:
|
||||
result += "FIRST %s " % self.process(select._limit_clause, **kw)
|
||||
if select._offset_clause is not None:
|
||||
result += "SKIP %s " % self.process(select._offset_clause, **kw)
|
||||
if select._distinct:
|
||||
result += "DISTINCT "
|
||||
return result
|
||||
|
||||
def limit_clause(self, select, **kw):
|
||||
"""Already taken care of in the `get_select_precolumns` method."""
|
||||
|
||||
return ""
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
columns = [
|
||||
self._label_select_column(None, c, True, False, {})
|
||||
for c in expression._select_iterables(returning_cols)
|
||||
]
|
||||
|
||||
return 'RETURNING ' + ', '.join(columns)
|
||||
|
||||
|
||||
class FBDDLCompiler(sql.compiler.DDLCompiler):
|
||||
"""Firebird syntactic idiosyncrasies"""
|
||||
|
||||
def visit_create_sequence(self, create):
|
||||
"""Generate a ``CREATE GENERATOR`` statement for the sequence."""
|
||||
|
||||
# no syntax for these
|
||||
# http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html
|
||||
if create.element.start is not None:
|
||||
raise NotImplemented(
|
||||
"Firebird SEQUENCE doesn't support START WITH")
|
||||
if create.element.increment is not None:
|
||||
raise NotImplemented(
|
||||
"Firebird SEQUENCE doesn't support INCREMENT BY")
|
||||
|
||||
if self.dialect._version_two:
|
||||
return "CREATE SEQUENCE %s" % \
|
||||
self.preparer.format_sequence(create.element)
|
||||
else:
|
||||
return "CREATE GENERATOR %s" % \
|
||||
self.preparer.format_sequence(create.element)
|
||||
|
||||
def visit_drop_sequence(self, drop):
|
||||
"""Generate a ``DROP GENERATOR`` statement for the sequence."""
|
||||
|
||||
if self.dialect._version_two:
|
||||
return "DROP SEQUENCE %s" % \
|
||||
self.preparer.format_sequence(drop.element)
|
||||
else:
|
||||
return "DROP GENERATOR %s" % \
|
||||
self.preparer.format_sequence(drop.element)
|
||||
|
||||
|
||||
class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
|
||||
"""Install Firebird specific reserved words."""
|
||||
|
||||
reserved_words = RESERVED_WORDS
|
||||
illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(
|
||||
['_'])
|
||||
|
||||
def __init__(self, dialect):
|
||||
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
|
||||
|
||||
|
||||
class FBExecutionContext(default.DefaultExecutionContext):
|
||||
def fire_sequence(self, seq, type_):
|
||||
"""Get the next value from the sequence using ``gen_id()``."""
|
||||
|
||||
return self._execute_scalar(
|
||||
"SELECT gen_id(%s, 1) FROM rdb$database" %
|
||||
self.dialect.identifier_preparer.format_sequence(seq),
|
||||
type_
|
||||
)
|
||||
|
||||
|
||||
class FBDialect(default.DefaultDialect):
|
||||
"""Firebird dialect"""
|
||||
|
||||
name = 'firebird'
|
||||
|
||||
max_identifier_length = 31
|
||||
|
||||
supports_sequences = True
|
||||
sequences_optional = False
|
||||
supports_default_values = True
|
||||
postfetch_lastrowid = False
|
||||
|
||||
supports_native_boolean = False
|
||||
|
||||
requires_name_normalize = True
|
||||
supports_empty_insert = False
|
||||
|
||||
statement_compiler = FBCompiler
|
||||
ddl_compiler = FBDDLCompiler
|
||||
preparer = FBIdentifierPreparer
|
||||
type_compiler = FBTypeCompiler
|
||||
execution_ctx_cls = FBExecutionContext
|
||||
|
||||
colspecs = colspecs
|
||||
ischema_names = ischema_names
|
||||
|
||||
construct_arguments = []
|
||||
|
||||
# defaults to dialect ver. 3,
|
||||
# will be autodetected off upon
|
||||
# first connect
|
||||
_version_two = True
|
||||
|
||||
def initialize(self, connection):
|
||||
super(FBDialect, self).initialize(connection)
|
||||
self._version_two = ('firebird' in self.server_version_info and
|
||||
self.server_version_info >= (2, )
|
||||
) or \
|
||||
('interbase' in self.server_version_info and
|
||||
self.server_version_info >= (6, )
|
||||
)
|
||||
|
||||
if not self._version_two:
|
||||
# TODO: whatever other pre < 2.0 stuff goes here
|
||||
self.ischema_names = ischema_names.copy()
|
||||
self.ischema_names['TIMESTAMP'] = sqltypes.DATE
|
||||
self.colspecs = {
|
||||
sqltypes.DateTime: sqltypes.DATE
|
||||
}
|
||||
|
||||
self.implicit_returning = self._version_two and \
|
||||
self.__dict__.get('implicit_returning', True)
|
||||
|
||||
def normalize_name(self, name):
|
||||
# Remove trailing spaces: FB uses a CHAR() type,
|
||||
# that is padded with spaces
|
||||
name = name and name.rstrip()
|
||||
if name is None:
|
||||
return None
|
||||
elif name.upper() == name and \
|
||||
not self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.lower()
|
||||
elif name.lower() == name:
|
||||
return quoted_name(name, quote=True)
|
||||
else:
|
||||
return name
|
||||
|
||||
def denormalize_name(self, name):
|
||||
if name is None:
|
||||
return None
|
||||
elif name.lower() == name and \
|
||||
not self.identifier_preparer._requires_quotes(name.lower()):
|
||||
return name.upper()
|
||||
else:
|
||||
return name
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
"""Return ``True`` if the given table exists, ignoring
|
||||
the `schema`."""
|
||||
|
||||
tblqry = """
|
||||
SELECT 1 AS has_table FROM rdb$database
|
||||
WHERE EXISTS (SELECT rdb$relation_name
|
||||
FROM rdb$relations
|
||||
WHERE rdb$relation_name=?)
|
||||
"""
|
||||
c = connection.execute(tblqry, [self.denormalize_name(table_name)])
|
||||
return c.first() is not None
|
||||
|
||||
def has_sequence(self, connection, sequence_name, schema=None):
|
||||
"""Return ``True`` if the given sequence (generator) exists."""
|
||||
|
||||
genqry = """
|
||||
SELECT 1 AS has_sequence FROM rdb$database
|
||||
WHERE EXISTS (SELECT rdb$generator_name
|
||||
FROM rdb$generators
|
||||
WHERE rdb$generator_name=?)
|
||||
"""
|
||||
c = connection.execute(genqry, [self.denormalize_name(sequence_name)])
|
||||
return c.first() is not None
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
# there are two queries commonly mentioned for this.
|
||||
# this one, using view_blr, is at the Firebird FAQ among other places:
|
||||
# http://www.firebirdfaq.org/faq174/
|
||||
s = """
|
||||
select rdb$relation_name
|
||||
from rdb$relations
|
||||
where rdb$view_blr is null
|
||||
and (rdb$system_flag is null or rdb$system_flag = 0);
|
||||
"""
|
||||
|
||||
# the other query is this one. It's not clear if there's really
|
||||
# any difference between these two. This link:
|
||||
# http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
|
||||
# states them as interchangeable. Some discussion at [ticket:2898]
|
||||
# SELECT DISTINCT rdb$relation_name
|
||||
# FROM rdb$relation_fields
|
||||
# WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
|
||||
|
||||
return [self.normalize_name(row[0]) for row in connection.execute(s)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
# see http://www.firebirdfaq.org/faq174/
|
||||
s = """
|
||||
select rdb$relation_name
|
||||
from rdb$relations
|
||||
where rdb$view_blr is not null
|
||||
and (rdb$system_flag is null or rdb$system_flag = 0);
|
||||
"""
|
||||
return [self.normalize_name(row[0]) for row in connection.execute(s)]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
qry = """
|
||||
SELECT rdb$view_source AS view_source
|
||||
FROM rdb$relations
|
||||
WHERE rdb$relation_name=?
|
||||
"""
|
||||
rp = connection.execute(qry, [self.denormalize_name(view_name)])
|
||||
row = rp.first()
|
||||
if row:
|
||||
return row['view_source']
|
||||
else:
|
||||
return None
|
||||
|
||||
@reflection.cache
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
# Query to extract the PK/FK constrained fields of the given table
|
||||
keyqry = """
|
||||
SELECT se.rdb$field_name AS fname
|
||||
FROM rdb$relation_constraints rc
|
||||
JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name
|
||||
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
|
||||
"""
|
||||
tablename = self.denormalize_name(table_name)
|
||||
# get primary key fields
|
||||
c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
|
||||
pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()]
|
||||
return {'constrained_columns': pkfields, 'name': None}
|
||||
|
||||
@reflection.cache
|
||||
def get_column_sequence(self, connection,
|
||||
table_name, column_name,
|
||||
schema=None, **kw):
|
||||
tablename = self.denormalize_name(table_name)
|
||||
colname = self.denormalize_name(column_name)
|
||||
# Heuristic-query to determine the generator associated to a PK field
|
||||
genqry = """
|
||||
SELECT trigdep.rdb$depended_on_name AS fgenerator
|
||||
FROM rdb$dependencies tabdep
|
||||
JOIN rdb$dependencies trigdep
|
||||
ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name
|
||||
AND trigdep.rdb$depended_on_type=14
|
||||
AND trigdep.rdb$dependent_type=2
|
||||
JOIN rdb$triggers trig ON
|
||||
trig.rdb$trigger_name=tabdep.rdb$dependent_name
|
||||
WHERE tabdep.rdb$depended_on_name=?
|
||||
AND tabdep.rdb$depended_on_type=0
|
||||
AND trig.rdb$trigger_type=1
|
||||
AND tabdep.rdb$field_name=?
|
||||
AND (SELECT count(*)
|
||||
FROM rdb$dependencies trigdep2
|
||||
WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
|
||||
"""
|
||||
genr = connection.execute(genqry, [tablename, colname]).first()
|
||||
if genr is not None:
|
||||
return dict(name=self.normalize_name(genr['fgenerator']))
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
# Query to extract the details of all the fields of the given table
|
||||
tblqry = """
|
||||
SELECT r.rdb$field_name AS fname,
|
||||
r.rdb$null_flag AS null_flag,
|
||||
t.rdb$type_name AS ftype,
|
||||
f.rdb$field_sub_type AS stype,
|
||||
f.rdb$field_length/
|
||||
COALESCE(cs.rdb$bytes_per_character,1) AS flen,
|
||||
f.rdb$field_precision AS fprec,
|
||||
f.rdb$field_scale AS fscale,
|
||||
COALESCE(r.rdb$default_source,
|
||||
f.rdb$default_source) AS fdefault
|
||||
FROM rdb$relation_fields r
|
||||
JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
|
||||
JOIN rdb$types t
|
||||
ON t.rdb$type=f.rdb$field_type AND
|
||||
t.rdb$field_name='RDB$FIELD_TYPE'
|
||||
LEFT JOIN rdb$character_sets cs ON
|
||||
f.rdb$character_set_id=cs.rdb$character_set_id
|
||||
WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?
|
||||
ORDER BY r.rdb$field_position
|
||||
"""
|
||||
# get the PK, used to determine the eventual associated sequence
|
||||
pk_constraint = self.get_pk_constraint(connection, table_name)
|
||||
pkey_cols = pk_constraint['constrained_columns']
|
||||
|
||||
tablename = self.denormalize_name(table_name)
|
||||
# get all of the fields for this table
|
||||
c = connection.execute(tblqry, [tablename])
|
||||
cols = []
|
||||
while True:
|
||||
row = c.fetchone()
|
||||
if row is None:
|
||||
break
|
||||
name = self.normalize_name(row['fname'])
|
||||
orig_colname = row['fname']
|
||||
|
||||
# get the data type
|
||||
colspec = row['ftype'].rstrip()
|
||||
coltype = self.ischema_names.get(colspec)
|
||||
if coltype is None:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(colspec, name))
|
||||
coltype = sqltypes.NULLTYPE
|
||||
elif issubclass(coltype, Integer) and row['fprec'] != 0:
|
||||
coltype = NUMERIC(
|
||||
precision=row['fprec'],
|
||||
scale=row['fscale'] * -1)
|
||||
elif colspec in ('VARYING', 'CSTRING'):
|
||||
coltype = coltype(row['flen'])
|
||||
elif colspec == 'TEXT':
|
||||
coltype = TEXT(row['flen'])
|
||||
elif colspec == 'BLOB':
|
||||
if row['stype'] == 1:
|
||||
coltype = TEXT()
|
||||
else:
|
||||
coltype = BLOB()
|
||||
else:
|
||||
coltype = coltype()
|
||||
|
||||
# does it have a default value?
|
||||
defvalue = None
|
||||
if row['fdefault'] is not None:
|
||||
# the value comes down as "DEFAULT 'value'": there may be
|
||||
# more than one whitespace around the "DEFAULT" keyword
|
||||
# and it may also be lower case
|
||||
# (see also http://tracker.firebirdsql.org/browse/CORE-356)
|
||||
defexpr = row['fdefault'].lstrip()
|
||||
assert defexpr[:8].rstrip().upper() == \
|
||||
'DEFAULT', "Unrecognized default value: %s" % \
|
||||
defexpr
|
||||
defvalue = defexpr[8:].strip()
|
||||
if defvalue == 'NULL':
|
||||
# Redundant
|
||||
defvalue = None
|
||||
col_d = {
|
||||
'name': name,
|
||||
'type': coltype,
|
||||
'nullable': not bool(row['null_flag']),
|
||||
'default': defvalue,
|
||||
'autoincrement': 'auto',
|
||||
}
|
||||
|
||||
if orig_colname.lower() == orig_colname:
|
||||
col_d['quote'] = True
|
||||
|
||||
# if the PK is a single field, try to see if its linked to
|
||||
# a sequence thru a trigger
|
||||
if len(pkey_cols) == 1 and name == pkey_cols[0]:
|
||||
seq_d = self.get_column_sequence(connection, tablename, name)
|
||||
if seq_d is not None:
|
||||
col_d['sequence'] = seq_d
|
||||
|
||||
cols.append(col_d)
|
||||
return cols
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
# Query to extract the details of each UK/FK of the given table
|
||||
fkqry = """
|
||||
SELECT rc.rdb$constraint_name AS cname,
|
||||
cse.rdb$field_name AS fname,
|
||||
ix2.rdb$relation_name AS targetrname,
|
||||
se.rdb$field_name AS targetfname
|
||||
FROM rdb$relation_constraints rc
|
||||
JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
|
||||
JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
|
||||
JOIN rdb$index_segments cse ON
|
||||
cse.rdb$index_name=ix1.rdb$index_name
|
||||
JOIN rdb$index_segments se
|
||||
ON se.rdb$index_name=ix2.rdb$index_name
|
||||
AND se.rdb$field_position=cse.rdb$field_position
|
||||
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
|
||||
ORDER BY se.rdb$index_name, se.rdb$field_position
|
||||
"""
|
||||
tablename = self.denormalize_name(table_name)
|
||||
|
||||
c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
|
||||
fks = util.defaultdict(lambda: {
|
||||
'name': None,
|
||||
'constrained_columns': [],
|
||||
'referred_schema': None,
|
||||
'referred_table': None,
|
||||
'referred_columns': []
|
||||
})
|
||||
|
||||
for row in c:
|
||||
cname = self.normalize_name(row['cname'])
|
||||
fk = fks[cname]
|
||||
if not fk['name']:
|
||||
fk['name'] = cname
|
||||
fk['referred_table'] = self.normalize_name(row['targetrname'])
|
||||
fk['constrained_columns'].append(
|
||||
self.normalize_name(row['fname']))
|
||||
fk['referred_columns'].append(
|
||||
self.normalize_name(row['targetfname']))
|
||||
return list(fks.values())
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
qry = """
|
||||
SELECT ix.rdb$index_name AS index_name,
|
||||
ix.rdb$unique_flag AS unique_flag,
|
||||
ic.rdb$field_name AS field_name
|
||||
FROM rdb$indices ix
|
||||
JOIN rdb$index_segments ic
|
||||
ON ix.rdb$index_name=ic.rdb$index_name
|
||||
LEFT OUTER JOIN rdb$relation_constraints
|
||||
ON rdb$relation_constraints.rdb$index_name =
|
||||
ic.rdb$index_name
|
||||
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
|
||||
AND rdb$relation_constraints.rdb$constraint_type IS NULL
|
||||
ORDER BY index_name, ic.rdb$field_position
|
||||
"""
|
||||
c = connection.execute(qry, [self.denormalize_name(table_name)])
|
||||
|
||||
indexes = util.defaultdict(dict)
|
||||
for row in c:
|
||||
indexrec = indexes[row['index_name']]
|
||||
if 'name' not in indexrec:
|
||||
indexrec['name'] = self.normalize_name(row['index_name'])
|
||||
indexrec['column_names'] = []
|
||||
indexrec['unique'] = bool(row['unique_flag'])
|
||||
|
||||
indexrec['column_names'].append(
|
||||
self.normalize_name(row['field_name']))
|
||||
|
||||
return list(indexes.values())
|
@ -1,118 +0,0 @@
|
||||
# firebird/fdb.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: firebird+fdb
|
||||
:name: fdb
|
||||
:dbapi: pyodbc
|
||||
:connectstring: firebird+fdb://user:password@host:port/path/to/db\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/fdb/
|
||||
|
||||
fdb is a kinterbasdb compatible DBAPI for Firebird.
|
||||
|
||||
.. versionadded:: 0.8 - Support for the fdb Firebird driver.
|
||||
|
||||
.. versionchanged:: 0.9 - The fdb dialect is now the default dialect
|
||||
under the ``firebird://`` URL space, as ``fdb`` is now the official
|
||||
Python driver for Firebird.
|
||||
|
||||
Arguments
|
||||
----------
|
||||
|
||||
The ``fdb`` dialect is based on the
|
||||
:mod:`sqlalchemy.dialects.firebird.kinterbasdb` dialect, however does not
|
||||
accept every argument that Kinterbasdb does.
|
||||
|
||||
* ``enable_rowcount`` - True by default, setting this to False disables
|
||||
the usage of "cursor.rowcount" with the
|
||||
Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically
|
||||
after any UPDATE or DELETE statement. When disabled, SQLAlchemy's
|
||||
ResultProxy will return -1 for result.rowcount. The rationale here is
|
||||
that Kinterbasdb requires a second round trip to the database when
|
||||
.rowcount is called - since SQLA's resultproxy automatically closes
|
||||
the cursor after a non-result-returning statement, rowcount must be
|
||||
called, if at all, before the result object is returned. Additionally,
|
||||
cursor.rowcount may not return correct results with older versions
|
||||
of Firebird, and setting this flag to False will also cause the
|
||||
SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a
|
||||
per-execution basis using the ``enable_rowcount`` option with
|
||||
:meth:`.Connection.execution_options`::
|
||||
|
||||
conn = engine.connect().execution_options(enable_rowcount=True)
|
||||
r = conn.execute(stmt)
|
||||
print r.rowcount
|
||||
|
||||
* ``retaining`` - False by default. Setting this to True will pass the
|
||||
``retaining=True`` keyword argument to the ``.commit()`` and ``.rollback()``
|
||||
methods of the DBAPI connection, which can improve performance in some
|
||||
situations, but apparently with significant caveats.
|
||||
Please read the fdb and/or kinterbasdb DBAPI documentation in order to
|
||||
understand the implications of this flag.
|
||||
|
||||
.. versionadded:: 0.8.2 - ``retaining`` keyword argument specifying
|
||||
transaction retaining behavior - in 0.8 it defaults to ``True``
|
||||
for backwards compatibility.
|
||||
|
||||
.. versionchanged:: 0.9.0 - the ``retaining`` flag defaults to ``False``.
|
||||
In 0.8 it defaulted to ``True``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions
|
||||
- information on the "retaining" flag.
|
||||
|
||||
"""
|
||||
|
||||
from .kinterbasdb import FBDialect_kinterbasdb
|
||||
from ... import util
|
||||
|
||||
|
||||
class FBDialect_fdb(FBDialect_kinterbasdb):
|
||||
|
||||
def __init__(self, enable_rowcount=True,
|
||||
retaining=False, **kwargs):
|
||||
super(FBDialect_fdb, self).__init__(
|
||||
enable_rowcount=enable_rowcount,
|
||||
retaining=retaining, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('fdb')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if opts.get('port'):
|
||||
opts['host'] = "%s/%s" % (opts['host'], opts['port'])
|
||||
del opts['port']
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'type_conv', int)
|
||||
|
||||
return ([], opts)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
"""Get the version of the Firebird server used by a connection.
|
||||
|
||||
Returns a tuple of (`major`, `minor`, `build`), three integers
|
||||
representing the version of the attached server.
|
||||
"""
|
||||
|
||||
# This is the simpler approach (the other uses the services api),
|
||||
# that for backward compatibility reasons returns a string like
|
||||
# LI-V6.3.3.12981 Firebird 2.0
|
||||
# where the first version is a fake one resembling the old
|
||||
# Interbase signature.
|
||||
|
||||
isc_info_firebird_version = 103
|
||||
fbconn = connection.connection
|
||||
|
||||
version = fbconn.db_info(isc_info_firebird_version)
|
||||
|
||||
return self._parse_version_info(version)
|
||||
|
||||
dialect = FBDialect_fdb
|
@ -1,184 +0,0 @@
|
||||
# firebird/kinterbasdb.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: firebird+kinterbasdb
|
||||
:name: kinterbasdb
|
||||
:dbapi: kinterbasdb
|
||||
:connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db\
|
||||
[?key=value&key=value...]
|
||||
:url: http://firebirdsql.org/index.php?op=devel&sub=python
|
||||
|
||||
Arguments
|
||||
----------
|
||||
|
||||
The Kinterbasdb backend accepts the ``enable_rowcount`` and ``retaining``
|
||||
arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect.
|
||||
In addition, it also accepts the following:
|
||||
|
||||
* ``type_conv`` - select the kind of mapping done on the types: by default
|
||||
SQLAlchemy uses 200 with Unicode, datetime and decimal support. See
|
||||
the linked documents below for further information.
|
||||
|
||||
* ``concurrency_level`` - set the backend policy with regards to threading
|
||||
issues: by default SQLAlchemy uses policy 1. See the linked documents
|
||||
below for further information.
|
||||
|
||||
.. seealso::
|
||||
|
||||
http://sourceforge.net/projects/kinterbasdb
|
||||
|
||||
http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation
|
||||
|
||||
http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurrency
|
||||
|
||||
"""
|
||||
|
||||
from .base import FBDialect, FBExecutionContext
|
||||
from ... import util, types as sqltypes
|
||||
from re import match
|
||||
import decimal
|
||||
|
||||
|
||||
class _kinterbasdb_numeric(object):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if isinstance(value, decimal.Decimal):
|
||||
return str(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
|
||||
pass
|
||||
|
||||
|
||||
class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
|
||||
pass
|
||||
|
||||
|
||||
class FBExecutionContext_kinterbasdb(FBExecutionContext):
|
||||
@property
|
||||
def rowcount(self):
|
||||
if self.execution_options.get('enable_rowcount',
|
||||
self.dialect.enable_rowcount):
|
||||
return self.cursor.rowcount
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
class FBDialect_kinterbasdb(FBDialect):
|
||||
driver = 'kinterbasdb'
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
execution_ctx_cls = FBExecutionContext_kinterbasdb
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
colspecs = util.update_copy(
|
||||
FBDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _FBNumeric_kinterbasdb,
|
||||
sqltypes.Float: _FBFloat_kinterbasdb,
|
||||
}
|
||||
|
||||
)
|
||||
|
||||
def __init__(self, type_conv=200, concurrency_level=1,
|
||||
enable_rowcount=True,
|
||||
retaining=False, **kwargs):
|
||||
super(FBDialect_kinterbasdb, self).__init__(**kwargs)
|
||||
self.enable_rowcount = enable_rowcount
|
||||
self.type_conv = type_conv
|
||||
self.concurrency_level = concurrency_level
|
||||
self.retaining = retaining
|
||||
if enable_rowcount:
|
||||
self.supports_sane_rowcount = True
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('kinterbasdb')
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
# kinterbase does not accept a None, but wants an empty list
|
||||
# when there are no arguments.
|
||||
cursor.execute(statement, parameters or [])
|
||||
|
||||
def do_rollback(self, dbapi_connection):
|
||||
dbapi_connection.rollback(self.retaining)
|
||||
|
||||
def do_commit(self, dbapi_connection):
|
||||
dbapi_connection.commit(self.retaining)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if opts.get('port'):
|
||||
opts['host'] = "%s/%s" % (opts['host'], opts['port'])
|
||||
del opts['port']
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'type_conv', int)
|
||||
|
||||
type_conv = opts.pop('type_conv', self.type_conv)
|
||||
concurrency_level = opts.pop('concurrency_level',
|
||||
self.concurrency_level)
|
||||
|
||||
if self.dbapi is not None:
|
||||
initialized = getattr(self.dbapi, 'initialized', None)
|
||||
if initialized is None:
|
||||
# CVS rev 1.96 changed the name of the attribute:
|
||||
# http://kinterbasdb.cvs.sourceforge.net/viewvc/kinterbasdb/
|
||||
# Kinterbasdb-3.0/__init__.py?r1=1.95&r2=1.96
|
||||
initialized = getattr(self.dbapi, '_initialized', False)
|
||||
if not initialized:
|
||||
self.dbapi.init(type_conv=type_conv,
|
||||
concurrency_level=concurrency_level)
|
||||
return ([], opts)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
"""Get the version of the Firebird server used by a connection.
|
||||
|
||||
Returns a tuple of (`major`, `minor`, `build`), three integers
|
||||
representing the version of the attached server.
|
||||
"""
|
||||
|
||||
# This is the simpler approach (the other uses the services api),
|
||||
# that for backward compatibility reasons returns a string like
|
||||
# LI-V6.3.3.12981 Firebird 2.0
|
||||
# where the first version is a fake one resembling the old
|
||||
# Interbase signature.
|
||||
|
||||
fbconn = connection.connection
|
||||
version = fbconn.server_version
|
||||
|
||||
return self._parse_version_info(version)
|
||||
|
||||
def _parse_version_info(self, version):
|
||||
m = match(
|
||||
r'\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version)
|
||||
if not m:
|
||||
raise AssertionError(
|
||||
"Could not determine version from string '%s'" % version)
|
||||
|
||||
if m.group(5) != None:
|
||||
return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird'])
|
||||
else:
|
||||
return tuple([int(x) for x in m.group(1, 2, 3)] + ['interbase'])
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, (self.dbapi.OperationalError,
|
||||
self.dbapi.ProgrammingError)):
|
||||
msg = str(e)
|
||||
return ('Unable to complete network request to host' in msg or
|
||||
'Invalid connection state' in msg or
|
||||
'Invalid cursor state' in msg or
|
||||
'connection shutdown' in msg)
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = FBDialect_kinterbasdb
|
@ -1,3 +0,0 @@
|
||||
from sqlalchemy.dialects.informix import base, informixdb
|
||||
|
||||
base.dialect = informixdb.dialect
|
@ -1,306 +0,0 @@
|
||||
# informix.py
|
||||
# Copyright (C) 2005,2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# coding: gbk
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Support for the Informix database.
|
||||
|
||||
This dialect is *not* tested on SQLAlchemy 0.6.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
|
||||
from sqlalchemy import sql, schema, exc, pool, util
|
||||
from sqlalchemy.sql import compiler
|
||||
from sqlalchemy.engine import default, reflection
|
||||
from sqlalchemy import types as sqltypes
|
||||
|
||||
|
||||
class InfoDateTime(sqltypes.DateTime):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if value.microsecond:
|
||||
value = value.replace(microsecond=0)
|
||||
return value
|
||||
return process
|
||||
|
||||
class InfoTime(sqltypes.Time):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if value.microsecond:
|
||||
value = value.replace(microsecond=0)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.time()
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
colspecs = {
|
||||
sqltypes.DateTime : InfoDateTime,
|
||||
sqltypes.Time: InfoTime,
|
||||
}
|
||||
|
||||
|
||||
ischema_names = {
|
||||
0 : sqltypes.CHAR, # CHAR
|
||||
1 : sqltypes.SMALLINT, # SMALLINT
|
||||
2 : sqltypes.INTEGER, # INT
|
||||
3 : sqltypes.FLOAT, # Float
|
||||
3 : sqltypes.Float, # SmallFloat
|
||||
5 : sqltypes.DECIMAL, # DECIMAL
|
||||
6 : sqltypes.Integer, # Serial
|
||||
7 : sqltypes.DATE, # DATE
|
||||
8 : sqltypes.Numeric, # MONEY
|
||||
10 : sqltypes.DATETIME, # DATETIME
|
||||
11 : sqltypes.LargeBinary, # BYTE
|
||||
12 : sqltypes.TEXT, # TEXT
|
||||
13 : sqltypes.VARCHAR, # VARCHAR
|
||||
15 : sqltypes.NCHAR, # NCHAR
|
||||
16 : sqltypes.NVARCHAR, # NVARCHAR
|
||||
17 : sqltypes.Integer, # INT8
|
||||
18 : sqltypes.Integer, # Serial8
|
||||
43 : sqltypes.String, # LVARCHAR
|
||||
-1 : sqltypes.BLOB, # BLOB
|
||||
-1 : sqltypes.CLOB, # CLOB
|
||||
}
|
||||
|
||||
|
||||
class InfoTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_DATETIME(self, type_):
|
||||
return "DATETIME YEAR TO SECOND"
|
||||
|
||||
def visit_TIME(self, type_):
|
||||
return "DATETIME HOUR TO SECOND"
|
||||
|
||||
def visit_large_binary(self, type_):
|
||||
return "BYTE"
|
||||
|
||||
def visit_boolean(self, type_):
|
||||
return "SMALLINT"
|
||||
|
||||
class InfoSQLCompiler(compiler.SQLCompiler):
|
||||
|
||||
def default_from(self):
|
||||
return " from systables where tabname = 'systables' "
|
||||
|
||||
def get_select_precolumns(self, select):
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
# only has limit
|
||||
if select._limit:
|
||||
s += " FIRST %s " % select._limit
|
||||
else:
|
||||
s += ""
|
||||
return s
|
||||
|
||||
def visit_select(self, select):
|
||||
# the column in order by clause must in select too
|
||||
|
||||
def __label(c):
|
||||
try:
|
||||
return c._label.lower()
|
||||
except:
|
||||
return ''
|
||||
|
||||
# TODO: dont modify the original select, generate a new one
|
||||
a = [__label(c) for c in select._raw_columns]
|
||||
for c in select._order_by_clause.clauses:
|
||||
if __label(c) not in a:
|
||||
select.append_column(c)
|
||||
|
||||
return compiler.SQLCompiler.visit_select(self, select)
|
||||
|
||||
def limit_clause(self, select):
|
||||
if select._offset is not None and select._offset > 0:
|
||||
raise NotImplementedError("Informix does not support OFFSET")
|
||||
return ""
|
||||
|
||||
def visit_function(self, func):
|
||||
if func.name.lower() == 'current_date':
|
||||
return "today"
|
||||
elif func.name.lower() == 'current_time':
|
||||
return "CURRENT HOUR TO SECOND"
|
||||
elif func.name.lower() in ('current_timestamp', 'now'):
|
||||
return "CURRENT YEAR TO SECOND"
|
||||
else:
|
||||
return compiler.SQLCompiler.visit_function(self, func)
|
||||
|
||||
|
||||
class InfoDDLCompiler(compiler.DDLCompiler):
|
||||
def get_column_specification(self, column, first_pk=False):
|
||||
colspec = self.preparer.format_column(column)
|
||||
if column.primary_key and len(column.foreign_keys)==0 and column.autoincrement and \
|
||||
isinstance(column.type, sqltypes.Integer) and first_pk:
|
||||
colspec += " SERIAL"
|
||||
else:
|
||||
colspec += " " + self.dialect.type_compiler.process(column.type)
|
||||
default = self.get_column_default_string(column)
|
||||
if default is not None:
|
||||
colspec += " DEFAULT " + default
|
||||
|
||||
if not column.nullable:
|
||||
colspec += " NOT NULL"
|
||||
|
||||
return colspec
|
||||
|
||||
|
||||
class InfoIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
def __init__(self, dialect):
|
||||
super(InfoIdentifierPreparer, self).__init__(dialect, initial_quote="'")
|
||||
|
||||
def format_constraint(self, constraint):
|
||||
# informix doesnt support names for constraints
|
||||
return ''
|
||||
|
||||
def _requires_quotes(self, value):
|
||||
return False
|
||||
|
||||
class InformixDialect(default.DefaultDialect):
|
||||
name = 'informix'
|
||||
|
||||
max_identifier_length = 128 # adjusts at runtime based on server version
|
||||
|
||||
type_compiler = InfoTypeCompiler
|
||||
statement_compiler = InfoSQLCompiler
|
||||
ddl_compiler = InfoDDLCompiler
|
||||
preparer = InfoIdentifierPreparer
|
||||
colspecs = colspecs
|
||||
ischema_names = ischema_names
|
||||
|
||||
def initialize(self, connection):
|
||||
super(InformixDialect, self).initialize(connection)
|
||||
|
||||
# http://www.querix.com/support/knowledge-base/error_number_message/error_200
|
||||
if self.server_version_info < (9, 2):
|
||||
self.max_identifier_length = 18
|
||||
else:
|
||||
self.max_identifier_length = 128
|
||||
|
||||
def do_begin(self, connect):
|
||||
cu = connect.cursor()
|
||||
cu.execute('SET LOCK MODE TO WAIT')
|
||||
#cu.execute('SET ISOLATION TO REPEATABLE READ')
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
s = "select tabname from systables"
|
||||
return [row[0] for row in connection.execute(s)]
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
cursor = connection.execute("""select tabname from systables where tabname=?""", table_name.lower())
|
||||
return cursor.first() is not None
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
c = connection.execute ("""select colname , coltype , collength , t3.default , t1.colno from
|
||||
syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
|
||||
where t1.tabid = t2.tabid and t2.tabname=?
|
||||
and t3.tabid = t2.tabid and t3.colno = t1.colno
|
||||
order by t1.colno""", table.name.lower())
|
||||
columns = []
|
||||
for name, colattr, collength, default, colno in rows:
|
||||
name = name.lower()
|
||||
if include_columns and name not in include_columns:
|
||||
continue
|
||||
|
||||
# in 7.31, coltype = 0x000
|
||||
# ^^-- column type
|
||||
# ^-- 1 not null, 0 null
|
||||
nullable, coltype = divmod(colattr, 256)
|
||||
if coltype not in (0, 13) and default:
|
||||
default = default.split()[-1]
|
||||
|
||||
if coltype == 0 or coltype == 13: # char, varchar
|
||||
coltype = ischema_names[coltype](collength)
|
||||
if default:
|
||||
default = "'%s'" % default
|
||||
elif coltype == 5: # decimal
|
||||
precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
|
||||
if scale == 255:
|
||||
scale = 0
|
||||
coltype = sqltypes.Numeric(precision, scale)
|
||||
else:
|
||||
try:
|
||||
coltype = ischema_names[coltype]
|
||||
except KeyError:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(coltype, name))
|
||||
coltype = sqltypes.NULLTYPE
|
||||
|
||||
# TODO: nullability ??
|
||||
nullable = True
|
||||
|
||||
column_info = dict(name=name, type=coltype, nullable=nullable,
|
||||
default=default)
|
||||
columns.append(column_info)
|
||||
return columns
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
# FK
|
||||
c = connection.execute("""select t1.constrname as cons_name , t1.constrtype as cons_type ,
|
||||
t4.colname as local_column , t7.tabname as remote_table ,
|
||||
t6.colname as remote_column
|
||||
from sysconstraints as t1 , systables as t2 ,
|
||||
sysindexes as t3 , syscolumns as t4 ,
|
||||
sysreferences as t5 , syscolumns as t6 , systables as t7 ,
|
||||
sysconstraints as t8 , sysindexes as t9
|
||||
where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'R'
|
||||
and t3.tabid = t2.tabid and t3.idxname = t1.idxname
|
||||
and t4.tabid = t2.tabid and t4.colno = t3.part1
|
||||
and t5.constrid = t1.constrid and t8.constrid = t5.primary
|
||||
and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname = t8.idxname
|
||||
and t7.tabid = t5.ptabid""", table.name.lower())
|
||||
|
||||
|
||||
def fkey_rec():
|
||||
return {
|
||||
'name' : None,
|
||||
'constrained_columns' : [],
|
||||
'referred_schema' : None,
|
||||
'referred_table' : None,
|
||||
'referred_columns' : []
|
||||
}
|
||||
|
||||
fkeys = util.defaultdict(fkey_rec)
|
||||
|
||||
for cons_name, cons_type, local_column, remote_table, remote_column in rows:
|
||||
|
||||
rec = fkeys[cons_name]
|
||||
rec['name'] = cons_name
|
||||
local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']
|
||||
|
||||
if not rec['referred_table']:
|
||||
rec['referred_table'] = remote_table
|
||||
|
||||
local_cols.append(local_column)
|
||||
remote_cols.append(remote_column)
|
||||
|
||||
return fkeys.values()
|
||||
|
||||
@reflection.cache
|
||||
def get_primary_keys(self, connection, table_name, schema=None, **kw):
|
||||
c = connection.execute("""select t4.colname as local_column
|
||||
from sysconstraints as t1 , systables as t2 ,
|
||||
sysindexes as t3 , syscolumns as t4
|
||||
where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'P'
|
||||
and t3.tabid = t2.tabid and t3.idxname = t1.idxname
|
||||
and t4.tabid = t2.tabid and t4.colno = t3.part1""", table.name.lower())
|
||||
return [r[0] for r in c.fetchall()]
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema, **kw):
|
||||
# TODO
|
||||
return []
|
@ -1,46 +0,0 @@
|
||||
from sqlalchemy.dialects.informix.base import InformixDialect
|
||||
from sqlalchemy.engine import default
|
||||
|
||||
class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
|
||||
def post_exec(self):
|
||||
if self.isinsert:
|
||||
self._lastrowid = [self.cursor.sqlerrd[1]]
|
||||
|
||||
|
||||
class InformixDialect_informixdb(InformixDialect):
|
||||
driver = 'informixdb'
|
||||
default_paramstyle = 'qmark'
|
||||
execution_context_cls = InformixExecutionContext_informixdb
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('informixdb')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
if url.host:
|
||||
dsn = '%s@%s' % (url.database, url.host)
|
||||
else:
|
||||
dsn = url.database
|
||||
|
||||
if url.username:
|
||||
opt = {'user': url.username, 'password': url.password}
|
||||
else:
|
||||
opt = {}
|
||||
|
||||
return ([dsn], opt)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
# http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
|
||||
vers = connection.dbms_version
|
||||
|
||||
# TODO: not tested
|
||||
return tuple([int(x) for x in vers.split('.')])
|
||||
|
||||
def is_disconnect(self, e):
|
||||
if isinstance(e, self.dbapi.OperationalError):
|
||||
return 'closed the connection' in str(e) or 'connection not open' in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
dialect = InformixDialect_informixdb
|
@ -1,3 +0,0 @@
|
||||
from sqlalchemy.dialects.maxdb import base, sapdb
|
||||
|
||||
base.dialect = sapdb.dialect
|
File diff suppressed because it is too large
Load Diff
@ -1,17 +0,0 @@
|
||||
from sqlalchemy.dialects.maxdb.base import MaxDBDialect
|
||||
|
||||
class MaxDBDialect_sapdb(MaxDBDialect):
|
||||
driver = 'sapdb'
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from sapdb import dbapi as _dbapi
|
||||
return _dbapi
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
return [], opts
|
||||
|
||||
|
||||
dialect = MaxDBDialect_sapdb
|
@ -1,27 +0,0 @@
|
||||
# mssql/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.mssql import base, pyodbc, adodbapi, \
|
||||
pymssql, zxjdbc, mxodbc
|
||||
|
||||
base.dialect = pyodbc.dialect
|
||||
|
||||
from sqlalchemy.dialects.mssql.base import \
|
||||
INTEGER, BIGINT, SMALLINT, TINYINT, VARCHAR, NVARCHAR, CHAR, \
|
||||
NCHAR, TEXT, NTEXT, DECIMAL, NUMERIC, FLOAT, DATETIME,\
|
||||
DATETIME2, DATETIMEOFFSET, DATE, TIME, SMALLDATETIME, \
|
||||
BINARY, VARBINARY, BIT, REAL, IMAGE, TIMESTAMP,\
|
||||
MONEY, SMALLMONEY, UNIQUEIDENTIFIER, SQL_VARIANT, dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR',
|
||||
'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME',
|
||||
'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
|
||||
'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP',
|
||||
'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect'
|
||||
)
|
@ -1,87 +0,0 @@
|
||||
# mssql/adodbapi.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+adodbapi
|
||||
:name: adodbapi
|
||||
:dbapi: adodbapi
|
||||
:connectstring: mssql+adodbapi://<username>:<password>@<dsnname>
|
||||
:url: http://adodbapi.sourceforge.net/
|
||||
|
||||
.. note::
|
||||
|
||||
The adodbapi dialect is not implemented SQLAlchemy versions 0.6 and
|
||||
above at this time.
|
||||
|
||||
"""
|
||||
import datetime
|
||||
from sqlalchemy import types as sqltypes, util
|
||||
from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect
|
||||
import sys
|
||||
|
||||
|
||||
class MSDateTime_adodbapi(MSDateTime):
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
# adodbapi will return datetimes with empty time
|
||||
# values as datetime.date() objects.
|
||||
# Promote them back to full datetime.datetime()
|
||||
if type(value) is datetime.date:
|
||||
return datetime.datetime(value.year, value.month, value.day)
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class MSDialect_adodbapi(MSDialect):
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
supports_unicode = sys.maxunicode == 65535
|
||||
supports_unicode_statements = True
|
||||
driver = 'adodbapi'
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
import adodbapi as module
|
||||
return module
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.DateTime: MSDateTime_adodbapi
|
||||
}
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
def check_quote(token):
|
||||
if ";" in str(token):
|
||||
token = "'%s'" % token
|
||||
return token
|
||||
|
||||
keys = dict(
|
||||
(k, check_quote(v)) for k, v in url.query.items()
|
||||
)
|
||||
|
||||
connectors = ["Provider=SQLOLEDB"]
|
||||
if 'port' in keys:
|
||||
connectors.append("Data Source=%s, %s" %
|
||||
(keys.get("host"), keys.get("port")))
|
||||
else:
|
||||
connectors.append("Data Source=%s" % keys.get("host"))
|
||||
connectors.append("Initial Catalog=%s" % keys.get("database"))
|
||||
user = keys.get("user")
|
||||
if user:
|
||||
connectors.append("User Id=%s" % user)
|
||||
connectors.append("Password=%s" % keys.get("password", ""))
|
||||
else:
|
||||
connectors.append("Integrated Security=SSPI")
|
||||
return [[";".join(connectors)], {}]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \
|
||||
"'connection failure'" in str(e)
|
||||
|
||||
dialect = MSDialect_adodbapi
|
File diff suppressed because it is too large
Load Diff
@ -1,136 +0,0 @@
|
||||
# mssql/information_schema.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
# TODO: should be using the sys. catalog with SQL Server, not information
|
||||
# schema
|
||||
|
||||
from ... import Table, MetaData, Column
|
||||
from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator
|
||||
from ... import cast
|
||||
from ... import util
|
||||
from ...sql import expression
|
||||
from ...ext.compiler import compiles
|
||||
|
||||
ischema = MetaData()
|
||||
|
||||
|
||||
class CoerceUnicode(TypeDecorator):
|
||||
impl = Unicode
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if util.py2k and isinstance(value, util.binary_type):
|
||||
value = value.decode(dialect.encoding)
|
||||
return value
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return _cast_on_2005(bindvalue)
|
||||
|
||||
|
||||
class _cast_on_2005(expression.ColumnElement):
|
||||
def __init__(self, bindvalue):
|
||||
self.bindvalue = bindvalue
|
||||
|
||||
|
||||
@compiles(_cast_on_2005)
|
||||
def _compile(element, compiler, **kw):
|
||||
from . import base
|
||||
if compiler.dialect.server_version_info < base.MS_2005_VERSION:
|
||||
return compiler.process(element.bindvalue, **kw)
|
||||
else:
|
||||
return compiler.process(cast(element.bindvalue, Unicode), **kw)
|
||||
|
||||
schemata = Table("SCHEMATA", ischema,
|
||||
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
|
||||
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
|
||||
Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
tables = Table("TABLES", ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column(
|
||||
"TABLE_TYPE", String(convert_unicode=True),
|
||||
key="table_type"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
columns = Table("COLUMNS", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("IS_NULLABLE", Integer, key="is_nullable"),
|
||||
Column("DATA_TYPE", String, key="data_type"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
Column("CHARACTER_MAXIMUM_LENGTH", Integer,
|
||||
key="character_maximum_length"),
|
||||
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
|
||||
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
|
||||
Column("COLUMN_DEFAULT", Integer, key="column_default"),
|
||||
Column("COLLATION_NAME", String, key="collation_name"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
constraints = Table("TABLE_CONSTRAINTS", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
Column("CONSTRAINT_TYPE", String(
|
||||
convert_unicode=True), key="constraint_type"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode,
|
||||
key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode,
|
||||
key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode,
|
||||
key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
key_constraints = Table("KEY_COLUMN_USAGE", ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode,
|
||||
key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode,
|
||||
key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode,
|
||||
key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
Column("ORDINAL_POSITION", Integer,
|
||||
key="ordinal_position"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
|
||||
Column("CONSTRAINT_CATALOG", CoerceUnicode,
|
||||
key="constraint_catalog"),
|
||||
Column("CONSTRAINT_SCHEMA", CoerceUnicode,
|
||||
key="constraint_schema"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="constraint_name"),
|
||||
# TODO: is CATLOG misspelled ?
|
||||
Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
|
||||
key="unique_constraint_catalog"),
|
||||
|
||||
Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
|
||||
key="unique_constraint_schema"),
|
||||
Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
|
||||
key="unique_constraint_name"),
|
||||
Column("MATCH_OPTION", String, key="match_option"),
|
||||
Column("UPDATE_RULE", String, key="update_rule"),
|
||||
Column("DELETE_RULE", String, key="delete_rule"),
|
||||
schema="INFORMATION_SCHEMA")
|
||||
|
||||
views = Table("VIEWS", ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
|
||||
Column("CHECK_OPTION", String, key="check_option"),
|
||||
Column("IS_UPDATABLE", String, key="is_updatable"),
|
||||
schema="INFORMATION_SCHEMA")
|
@ -1,139 +0,0 @@
|
||||
# mssql/mxodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+mxodbc
|
||||
:name: mxODBC
|
||||
:dbapi: mxodbc
|
||||
:connectstring: mssql+mxodbc://<username>:<password>@<dsnname>
|
||||
:url: http://www.egenix.com/
|
||||
|
||||
Execution Modes
|
||||
---------------
|
||||
|
||||
mxODBC features two styles of statement execution, using the
|
||||
``cursor.execute()`` and ``cursor.executedirect()`` methods (the second being
|
||||
an extension to the DBAPI specification). The former makes use of a particular
|
||||
API call specific to the SQL Server Native Client ODBC driver known
|
||||
SQLDescribeParam, while the latter does not.
|
||||
|
||||
mxODBC apparently only makes repeated use of a single prepared statement
|
||||
when SQLDescribeParam is used. The advantage to prepared statement reuse is
|
||||
one of performance. The disadvantage is that SQLDescribeParam has a limited
|
||||
set of scenarios in which bind parameters are understood, including that they
|
||||
cannot be placed within the argument lists of function calls, anywhere outside
|
||||
the FROM, or even within subqueries within the FROM clause - making the usage
|
||||
of bind parameters within SELECT statements impossible for all but the most
|
||||
simplistic statements.
|
||||
|
||||
For this reason, the mxODBC dialect uses the "native" mode by default only for
|
||||
INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for
|
||||
all other statements.
|
||||
|
||||
This behavior can be controlled via
|
||||
:meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the
|
||||
``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a
|
||||
value of ``True`` will unconditionally use native bind parameters and a value
|
||||
of ``False`` will unconditionally use string-escaped parameters.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from ... import types as sqltypes
|
||||
from ...connectors.mxodbc import MxODBCConnector
|
||||
from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc
|
||||
from .base import (MSDialect,
|
||||
MSSQLStrictCompiler,
|
||||
VARBINARY,
|
||||
_MSDateTime, _MSDate, _MSTime)
|
||||
|
||||
|
||||
class _MSNumeric_mxodbc(_MSNumeric_pyodbc):
|
||||
"""Include pyodbc's numeric processor.
|
||||
"""
|
||||
|
||||
|
||||
class _MSDate_mxodbc(_MSDate):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return "%s-%s-%s" % (value.year, value.month, value.day)
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class _MSTime_mxodbc(_MSTime):
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return "%s:%s:%s" % (value.hour, value.minute, value.second)
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class _VARBINARY_mxodbc(VARBINARY):
|
||||
|
||||
"""
|
||||
mxODBC Support for VARBINARY column types.
|
||||
|
||||
This handles the special case for null VARBINARY values,
|
||||
which maps None values to the mx.ODBC.Manager.BinaryNull symbol.
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# should pull from mx.ODBC.Manager.BinaryNull
|
||||
return dialect.dbapi.BinaryNull
|
||||
return process
|
||||
|
||||
|
||||
class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
|
||||
"""
|
||||
The pyodbc execution context is useful for enabling
|
||||
SELECT SCOPE_IDENTITY in cases where OUTPUT clause
|
||||
does not work (tables with insert triggers).
|
||||
"""
|
||||
# todo - investigate whether the pyodbc execution context
|
||||
# is really only being used in cases where OUTPUT
|
||||
# won't work.
|
||||
|
||||
|
||||
class MSDialect_mxodbc(MxODBCConnector, MSDialect):
|
||||
|
||||
# this is only needed if "native ODBC" mode is used,
|
||||
# which is now disabled by default.
|
||||
# statement_compiler = MSSQLStrictCompiler
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_mxodbc
|
||||
|
||||
# flag used by _MSNumeric_mxodbc
|
||||
_need_decimal_fix = True
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: _MSNumeric_mxodbc,
|
||||
sqltypes.DateTime: _MSDateTime,
|
||||
sqltypes.Date: _MSDate_mxodbc,
|
||||
sqltypes.Time: _MSTime_mxodbc,
|
||||
VARBINARY: _VARBINARY_mxodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_mxodbc,
|
||||
}
|
||||
|
||||
def __init__(self, description_encoding=None, **params):
|
||||
super(MSDialect_mxodbc, self).__init__(**params)
|
||||
self.description_encoding = description_encoding
|
||||
|
||||
dialect = MSDialect_mxodbc
|
@ -1,97 +0,0 @@
|
||||
# mssql/pymssql.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+pymssql
|
||||
:name: pymssql
|
||||
:dbapi: pymssql
|
||||
:connectstring: mssql+pymssql://<username>:<password>@<freetds_name>/?\
|
||||
charset=utf8
|
||||
:url: http://pymssql.org/
|
||||
|
||||
pymssql is a Python module that provides a Python DBAPI interface around
|
||||
`FreeTDS <http://www.freetds.org/>`_. Compatible builds are available for
|
||||
Linux, MacOSX and Windows platforms.
|
||||
|
||||
"""
|
||||
from .base import MSDialect
|
||||
from ... import types as sqltypes, util, processors
|
||||
import re
|
||||
|
||||
|
||||
class _MSNumeric_pymssql(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, type_):
|
||||
if not self.asdecimal:
|
||||
return processors.to_float
|
||||
else:
|
||||
return sqltypes.Numeric.result_processor(self, dialect, type_)
|
||||
|
||||
|
||||
class MSDialect_pymssql(MSDialect):
|
||||
supports_sane_rowcount = False
|
||||
driver = 'pymssql'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pymssql,
|
||||
sqltypes.Float: sqltypes.Float,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
module = __import__('pymssql')
|
||||
# pymmsql < 2.1.1 doesn't have a Binary method. we use string
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (2, 1, 1):
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
|
||||
|
||||
if client_ver < (1, ):
|
||||
util.warn("The pymssql dialect expects at least "
|
||||
"the 1.0 series of the pymssql DBAPI.")
|
||||
return module
|
||||
|
||||
def __init__(self, **params):
|
||||
super(MSDialect_pymssql, self).__init__(**params)
|
||||
self.use_scope_identity = True
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
vers = connection.scalar("select @@version")
|
||||
m = re.match(
|
||||
r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3, 4))
|
||||
else:
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
opts.update(url.query)
|
||||
port = opts.pop('port', None)
|
||||
if port and 'host' in opts:
|
||||
opts['host'] = "%s:%s" % (opts['host'], port)
|
||||
return [[], opts]
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
for msg in (
|
||||
"Adaptive Server connection timed out",
|
||||
"Net-Lib error during Connection reset by peer",
|
||||
"message 20003", # connection timeout
|
||||
"Error 10054",
|
||||
"Not connected to any MS SQL server",
|
||||
"Connection is closed",
|
||||
"message 20006", # Write to the server failed
|
||||
"message 20017", # Unexpected EOF from the server
|
||||
):
|
||||
if msg in str(e):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = MSDialect_pymssql
|
@ -1,292 +0,0 @@
|
||||
# mssql/pyodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
.. dialect:: mssql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
Connecting to PyODBC
|
||||
--------------------
|
||||
|
||||
The URL here is to be translated to PyODBC connection strings, as
|
||||
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
|
||||
|
||||
DSN Connections
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
A DSN-based connection is **preferred** overall when using ODBC. A
|
||||
basic DSN-based connection looks like::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
|
||||
|
||||
Which above, will pass the following connection string to PyODBC::
|
||||
|
||||
dsn=mydsn;UID=user;PWD=pass
|
||||
|
||||
If the username and password are omitted, the DSN form will also add
|
||||
the ``Trusted_Connection=yes`` directive to the ODBC string.
|
||||
|
||||
Hostname Connections
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Hostname-based connections are **not preferred**, however are supported.
|
||||
The ODBC driver name must be explicitly specified::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0")
|
||||
|
||||
.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the
|
||||
SQL Server driver name specified explicitly. SQLAlchemy cannot
|
||||
choose an optimal default here as it varies based on platform
|
||||
and installed drivers.
|
||||
|
||||
Other keywords interpreted by the Pyodbc dialect to be passed to
|
||||
``pyodbc.connect()`` in both the DSN and hostname cases include:
|
||||
``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``.
|
||||
|
||||
Pass through exact Pyodbc string
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A PyODBC connection string can also be sent exactly as specified in
|
||||
`ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_
|
||||
into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however,
|
||||
as illustrated below using ``urllib.quote_plus``::
|
||||
|
||||
import urllib
|
||||
params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password")
|
||||
|
||||
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
|
||||
|
||||
|
||||
Unicode Binds
|
||||
-------------
|
||||
|
||||
The current state of PyODBC on a unix backend with FreeTDS and/or
|
||||
EasySoft is poor regarding unicode; different OS platforms and versions of
|
||||
UnixODBC versus IODBC versus FreeTDS/EasySoft versus PyODBC itself
|
||||
dramatically alter how strings are received. The PyODBC dialect attempts to
|
||||
use all the information it knows to determine whether or not a Python unicode
|
||||
literal can be passed directly to the PyODBC driver or not; while SQLAlchemy
|
||||
can encode these to bytestrings first, some users have reported that PyODBC
|
||||
mis-handles bytestrings for certain encodings and requires a Python unicode
|
||||
object, while the author has observed widespread cases where a Python unicode
|
||||
is completely misinterpreted by PyODBC, particularly when dealing with
|
||||
the information schema tables used in table reflection, and the value
|
||||
must first be encoded to a bytestring.
|
||||
|
||||
It is for this reason that whether or not unicode literals for bound
|
||||
parameters be sent to PyODBC can be controlled using the
|
||||
``supports_unicode_binds`` parameter to ``create_engine()``. When
|
||||
left at its default of ``None``, the PyODBC dialect will use its
|
||||
best guess as to whether or not the driver deals with unicode literals
|
||||
well. When ``False``, unicode literals will be encoded first, and when
|
||||
``True`` unicode literals will be passed straight through. This is an interim
|
||||
flag that hopefully should not be needed when the unicode situation stabilizes
|
||||
for unix + PyODBC.
|
||||
|
||||
.. versionadded:: 0.7.7
|
||||
``supports_unicode_binds`` parameter to ``create_engine()``\ .
|
||||
|
||||
Rowcount Support
|
||||
----------------
|
||||
|
||||
Pyodbc only has partial support for rowcount. See the notes at
|
||||
:ref:`mssql_rowcount_versioning` for important notes when using ORM
|
||||
versioning.
|
||||
|
||||
"""
|
||||
|
||||
from .base import MSExecutionContext, MSDialect, VARBINARY
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ... import types as sqltypes, util, exc
|
||||
import decimal
|
||||
import re
|
||||
|
||||
|
||||
class _ms_numeric_pyodbc(object):
|
||||
|
||||
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
|
||||
|
||||
The routines here are needed for older pyodbc versions
|
||||
as well as current mxODBC versions.
|
||||
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
|
||||
super_process = super(_ms_numeric_pyodbc, self).\
|
||||
bind_processor(dialect)
|
||||
|
||||
if not dialect._need_decimal_fix:
|
||||
return super_process
|
||||
|
||||
def process(value):
|
||||
if self.asdecimal and \
|
||||
isinstance(value, decimal.Decimal):
|
||||
|
||||
adjusted = value.adjusted()
|
||||
if adjusted < 0:
|
||||
return self._small_dec_to_string(value)
|
||||
elif adjusted > 7:
|
||||
return self._large_dec_to_string(value)
|
||||
|
||||
if super_process:
|
||||
return super_process(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
# these routines needed for older versions of pyodbc.
|
||||
# as of 2.1.8 this logic is integrated.
|
||||
|
||||
def _small_dec_to_string(self, value):
|
||||
return "%s0.%s%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
'0' * (abs(value.adjusted()) - 1),
|
||||
"".join([str(nint) for nint in value.as_tuple()[1]]))
|
||||
|
||||
def _large_dec_to_string(self, value):
|
||||
_int = value.as_tuple()[1]
|
||||
if 'E' in str(value):
|
||||
result = "%s%s%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
"".join([str(s) for s in _int]),
|
||||
"0" * (value.adjusted() - (len(_int) - 1)))
|
||||
else:
|
||||
if (len(_int) - 1) > value.adjusted():
|
||||
result = "%s%s.%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
"".join(
|
||||
[str(s) for s in _int][0:value.adjusted() + 1]),
|
||||
"".join(
|
||||
[str(s) for s in _int][value.adjusted() + 1:]))
|
||||
else:
|
||||
result = "%s%s" % (
|
||||
(value < 0 and '-' or ''),
|
||||
"".join(
|
||||
[str(s) for s in _int][0:value.adjusted() + 1]))
|
||||
return result
|
||||
|
||||
|
||||
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
|
||||
pass
|
||||
|
||||
|
||||
class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
|
||||
pass
|
||||
|
||||
|
||||
class _VARBINARY_pyodbc(VARBINARY):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# pyodbc-specific
|
||||
return dialect.dbapi.BinaryNull
|
||||
return process
|
||||
|
||||
|
||||
class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
_embedded_scope_identity = False
|
||||
|
||||
def pre_exec(self):
|
||||
"""where appropriate, issue "select scope_identity()" in the same
|
||||
statement.
|
||||
|
||||
Background on why "scope_identity()" is preferable to "@@identity":
|
||||
http://msdn.microsoft.com/en-us/library/ms190315.aspx
|
||||
|
||||
Background on why we attempt to embed "scope_identity()" into the same
|
||||
statement as the INSERT:
|
||||
http://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
|
||||
|
||||
"""
|
||||
|
||||
super(MSExecutionContext_pyodbc, self).pre_exec()
|
||||
|
||||
# don't embed the scope_identity select into an
|
||||
# "INSERT .. DEFAULT VALUES"
|
||||
if self._select_lastrowid and \
|
||||
self.dialect.use_scope_identity and \
|
||||
len(self.parameters[0]):
|
||||
self._embedded_scope_identity = True
|
||||
|
||||
self.statement += "; select scope_identity()"
|
||||
|
||||
def post_exec(self):
|
||||
if self._embedded_scope_identity:
|
||||
# Fetch the last inserted id from the manipulated statement
|
||||
# We may have to skip over a number of result sets with
|
||||
# no data (due to triggers, etc.)
|
||||
while True:
|
||||
try:
|
||||
# fetchall() ensures the cursor is consumed
|
||||
# without closing it (FreeTDS particularly)
|
||||
row = self.cursor.fetchall()[0]
|
||||
break
|
||||
except self.dialect.dbapi.Error as e:
|
||||
# no way around this - nextset() consumes the previous set
|
||||
# so we need to just keep flipping
|
||||
self.cursor.nextset()
|
||||
|
||||
self._lastrowid = int(row[0])
|
||||
else:
|
||||
super(MSExecutionContext_pyodbc, self).post_exec()
|
||||
|
||||
|
||||
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_pyodbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pyodbc,
|
||||
sqltypes.Float: _MSFloat_pyodbc,
|
||||
VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_pyodbc,
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, description_encoding=None, **params):
|
||||
if 'description_encoding' in params:
|
||||
self.description_encoding = params.pop('description_encoding')
|
||||
super(MSDialect_pyodbc, self).__init__(**params)
|
||||
self.use_scope_identity = self.use_scope_identity and \
|
||||
self.dbapi and \
|
||||
hasattr(self.dbapi.Cursor, 'nextset')
|
||||
self._need_decimal_fix = self.dbapi and \
|
||||
self._dbapi_version() < (2, 1, 8)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
try:
|
||||
raw = connection.scalar("SELECT SERVERPROPERTY('ProductVersion')")
|
||||
except exc.DBAPIError:
|
||||
# SQL Server docs indicate this function isn't present prior to
|
||||
# 2008; additionally, unknown combinations of pyodbc aren't
|
||||
# able to run this query.
|
||||
return super(MSDialect_pyodbc, self).\
|
||||
_get_server_version_info(connection)
|
||||
else:
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(raw):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
dialect = MSDialect_pyodbc
|
@ -1,69 +0,0 @@
|
||||
# mssql/zxjdbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: mssql+zxjdbc://user:pass@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:driverurl: http://jtds.sourceforge.net/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
from .base import MSDialect, MSExecutionContext
|
||||
from ... import engine
|
||||
|
||||
|
||||
class MSExecutionContext_zxjdbc(MSExecutionContext):
|
||||
|
||||
_embedded_scope_identity = False
|
||||
|
||||
def pre_exec(self):
|
||||
super(MSExecutionContext_zxjdbc, self).pre_exec()
|
||||
# scope_identity after the fact returns null in jTDS so we must
|
||||
# embed it
|
||||
if self._select_lastrowid and self.dialect.use_scope_identity:
|
||||
self._embedded_scope_identity = True
|
||||
self.statement += "; SELECT scope_identity()"
|
||||
|
||||
def post_exec(self):
|
||||
if self._embedded_scope_identity:
|
||||
while True:
|
||||
try:
|
||||
row = self.cursor.fetchall()[0]
|
||||
break
|
||||
except self.dialect.dbapi.Error:
|
||||
self.cursor.nextset()
|
||||
self._lastrowid = int(row[0])
|
||||
|
||||
if (self.isinsert or self.isupdate or self.isdelete) and \
|
||||
self.compiled.returning:
|
||||
self._result_proxy = engine.FullyBufferedResultProxy(self)
|
||||
|
||||
if self._enable_identity_insert:
|
||||
table = self.dialect.identifier_preparer.format_table(
|
||||
self.compiled.statement.table)
|
||||
self.cursor.execute("SET IDENTITY_INSERT %s OFF" % table)
|
||||
|
||||
|
||||
class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect):
|
||||
jdbc_db_name = 'jtds:sqlserver'
|
||||
jdbc_driver_name = 'net.sourceforge.jtds.jdbc.Driver'
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_zxjdbc
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in connection.connection.dbversion.split('.')
|
||||
)
|
||||
|
||||
dialect = MSDialect_zxjdbc
|
@ -1,31 +0,0 @@
|
||||
# mysql/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, mysqldb, oursql, \
|
||||
pyodbc, zxjdbc, mysqlconnector, pymysql,\
|
||||
gaerdbms, cymysql
|
||||
|
||||
# default dialect
|
||||
base.dialect = mysqldb.dialect
|
||||
|
||||
from .base import \
|
||||
BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \
|
||||
DECIMAL, DOUBLE, ENUM, DECIMAL,\
|
||||
FLOAT, INTEGER, INTEGER, JSON, LONGBLOB, LONGTEXT, MEDIUMBLOB, \
|
||||
MEDIUMINT, MEDIUMTEXT, NCHAR, \
|
||||
NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \
|
||||
TINYBLOB, TINYINT, TINYTEXT,\
|
||||
VARBINARY, VARCHAR, YEAR, dialect
|
||||
|
||||
__all__ = (
|
||||
'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME',
|
||||
'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER',
|
||||
'JSON', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT',
|
||||
'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME',
|
||||
'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR',
|
||||
'YEAR', 'dialect'
|
||||
)
|
File diff suppressed because it is too large
Load Diff
@ -1,87 +0,0 @@
|
||||
# mysql/cymysql.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+cymysql
|
||||
:name: CyMySQL
|
||||
:dbapi: cymysql
|
||||
:connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>\
|
||||
[?<options>]
|
||||
:url: https://github.com/nakagami/CyMySQL
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from .base import (BIT, MySQLDialect)
|
||||
from ... import util
|
||||
|
||||
|
||||
class _cymysqlBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Convert a MySQL's 64 bit, variable length binary string to a long.
|
||||
"""
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in util.iterbytes(value):
|
||||
v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
|
||||
driver = 'cymysql'
|
||||
|
||||
description_encoding = None
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
supports_unicode_statements = True
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
BIT: _cymysqlBIT,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('cymysql')
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.server_version):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return connection.connection.charset
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.OperationalError):
|
||||
return self._extract_error_code(e) in \
|
||||
(2006, 2013, 2014, 2045, 2055)
|
||||
elif isinstance(e, self.dbapi.InterfaceError):
|
||||
# if underlying connection is closed,
|
||||
# this is the error you get
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = MySQLDialect_cymysql
|
@ -1,311 +0,0 @@
|
||||
# mysql/enumerated.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
|
||||
from .types import _StringType
|
||||
from ... import exc, sql, util
|
||||
from ... import types as sqltypes
|
||||
|
||||
|
||||
class _EnumeratedValues(_StringType):
|
||||
def _init_values(self, values, kw):
|
||||
self.quoting = kw.pop('quoting', 'auto')
|
||||
|
||||
if self.quoting == 'auto' and len(values):
|
||||
# What quoting character are we using?
|
||||
q = None
|
||||
for e in values:
|
||||
if len(e) == 0:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
elif q is None:
|
||||
q = e[0]
|
||||
|
||||
if len(e) == 1 or e[0] != q or e[-1] != q:
|
||||
self.quoting = 'unquoted'
|
||||
break
|
||||
else:
|
||||
self.quoting = 'quoted'
|
||||
|
||||
if self.quoting == 'quoted':
|
||||
util.warn_deprecated(
|
||||
'Manually quoting %s value literals is deprecated. Supply '
|
||||
'unquoted values and use the quoting= option in cases of '
|
||||
'ambiguity.' % self.__class__.__name__)
|
||||
|
||||
values = self._strip_values(values)
|
||||
|
||||
self._enumerated_values = values
|
||||
length = max([len(v) for v in values] + [0])
|
||||
return values, length
|
||||
|
||||
@classmethod
|
||||
def _strip_values(cls, values):
|
||||
strip_values = []
|
||||
for a in values:
|
||||
if a[0:1] == '"' or a[0:1] == "'":
|
||||
# strip enclosing quotes and unquote interior
|
||||
a = a[1:-1].replace(a[0] * 2, a[0])
|
||||
strip_values.append(a)
|
||||
return strip_values
|
||||
|
||||
|
||||
class ENUM(sqltypes.Enum, _EnumeratedValues):
|
||||
"""MySQL ENUM type."""
|
||||
|
||||
__visit_name__ = 'ENUM'
|
||||
|
||||
def __init__(self, *enums, **kw):
|
||||
"""Construct an ENUM.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myenum', ENUM("foo", "bar", "baz"))
|
||||
|
||||
:param enums: The range of valid values for this ENUM. Values will be
|
||||
quoted when generating the schema according to the quoting flag (see
|
||||
below). This object may also be a PEP-435-compliant enumerated
|
||||
type.
|
||||
|
||||
.. versionadded: 1.1 added support for PEP-435-compliant enumerated
|
||||
types.
|
||||
|
||||
:param strict: This flag has no effect.
|
||||
|
||||
.. versionchanged:: The MySQL ENUM type as well as the base Enum
|
||||
type now validates all Python data values.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine enum value
|
||||
quoting. If all enum values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
"""
|
||||
|
||||
kw.pop('strict', None)
|
||||
validate_strings = kw.pop("validate_strings", False)
|
||||
sqltypes.Enum.__init__(
|
||||
self, validate_strings=validate_strings, *enums)
|
||||
kw.pop('metadata', None)
|
||||
kw.pop('schema', None)
|
||||
kw.pop('name', None)
|
||||
kw.pop('quote', None)
|
||||
kw.pop('native_enum', None)
|
||||
kw.pop('inherit_schema', None)
|
||||
kw.pop('_create_events', None)
|
||||
_StringType.__init__(self, length=self.length, **kw)
|
||||
|
||||
def _setup_for_values(self, values, objects, kw):
|
||||
values, length = self._init_values(values, kw)
|
||||
return sqltypes.Enum._setup_for_values(self, values, objects, kw)
|
||||
|
||||
def _object_value_for_elem(self, elem):
|
||||
# mysql sends back a blank string for any value that
|
||||
# was persisted that was not in the enums; that is, it does no
|
||||
# validation on the incoming data, it "truncates" it to be
|
||||
# the blank string. Return it straight.
|
||||
if elem == "":
|
||||
return elem
|
||||
else:
|
||||
return super(ENUM, self)._object_value_for_elem(elem)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(
|
||||
self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
|
||||
|
||||
def adapt(self, cls, **kw):
|
||||
return sqltypes.Enum.adapt(self, cls, **kw)
|
||||
|
||||
|
||||
class SET(_EnumeratedValues):
|
||||
"""MySQL SET type."""
|
||||
|
||||
__visit_name__ = 'SET'
|
||||
|
||||
def __init__(self, *values, **kw):
|
||||
"""Construct a SET.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myset', SET("foo", "bar", "baz"))
|
||||
|
||||
|
||||
The list of potential values is required in the case that this
|
||||
set will be used to generate DDL for a table, or if the
|
||||
:paramref:`.SET.retrieve_as_bitwise` flag is set to True.
|
||||
|
||||
:param values: The range of valid values for this SET.
|
||||
|
||||
:param convert_unicode: Same flag as that of
|
||||
:paramref:`.String.convert_unicode`.
|
||||
|
||||
:param collation: same as that of :paramref:`.String.collation`
|
||||
|
||||
:param charset: same as that of :paramref:`.VARCHAR.charset`.
|
||||
|
||||
:param ascii: same as that of :paramref:`.VARCHAR.ascii`.
|
||||
|
||||
:param unicode: same as that of :paramref:`.VARCHAR.unicode`.
|
||||
|
||||
:param binary: same as that of :paramref:`.VARCHAR.binary`.
|
||||
|
||||
:param quoting: Defaults to 'auto': automatically determine set value
|
||||
quoting. If all values are surrounded by the same quoting
|
||||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
|
||||
|
||||
'quoted': values in enums are already quoted, they will be used
|
||||
directly when generating the schema - this usage is deprecated.
|
||||
|
||||
'unquoted': values in enums are not quoted, they will be escaped and
|
||||
surrounded by single quotes when generating the schema.
|
||||
|
||||
Previous versions of this type always required manually quoted
|
||||
values to be supplied; future versions will always quote the string
|
||||
literals for you. This is a transitional option.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
:param retrieve_as_bitwise: if True, the data for the set type will be
|
||||
persisted and selected using an integer value, where a set is coerced
|
||||
into a bitwise mask for persistence. MySQL allows this mode which
|
||||
has the advantage of being able to store values unambiguously,
|
||||
such as the blank string ``''``. The datatype will appear
|
||||
as the expression ``col + 0`` in a SELECT statement, so that the
|
||||
value is coerced into an integer value in result sets.
|
||||
This flag is required if one wishes
|
||||
to persist a set that can store the blank string ``''`` as a value.
|
||||
|
||||
.. warning::
|
||||
|
||||
When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
|
||||
essential that the list of set values is expressed in the
|
||||
**exact same order** as exists on the MySQL database.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
|
||||
"""
|
||||
self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False)
|
||||
values, length = self._init_values(values, kw)
|
||||
self.values = tuple(values)
|
||||
if not self.retrieve_as_bitwise and '' in values:
|
||||
raise exc.ArgumentError(
|
||||
"Can't use the blank value '' in a SET without "
|
||||
"setting retrieve_as_bitwise=True")
|
||||
if self.retrieve_as_bitwise:
|
||||
self._bitmap = dict(
|
||||
(value, 2 ** idx)
|
||||
for idx, value in enumerate(self.values)
|
||||
)
|
||||
self._bitmap.update(
|
||||
(2 ** idx, value)
|
||||
for idx, value in enumerate(self.values)
|
||||
)
|
||||
kw.setdefault('length', length)
|
||||
super(SET, self).__init__(**kw)
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
if self.retrieve_as_bitwise:
|
||||
return sql.type_coerce(
|
||||
sql.type_coerce(colexpr, sqltypes.Integer) + 0,
|
||||
self
|
||||
)
|
||||
else:
|
||||
return colexpr
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.retrieve_as_bitwise:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = int(value)
|
||||
|
||||
return set(
|
||||
util.map_bits(self._bitmap.__getitem__, value)
|
||||
)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
super_convert = super(SET, self).result_processor(dialect, coltype)
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, util.string_types):
|
||||
# MySQLdb returns a string, let's parse
|
||||
if super_convert:
|
||||
value = super_convert(value)
|
||||
return set(re.findall(r'[^,]+', value))
|
||||
else:
|
||||
# mysql-connector-python does a naive
|
||||
# split(",") which throws in an empty string
|
||||
if value is not None:
|
||||
value.discard('')
|
||||
return value
|
||||
return process
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_convert = super(SET, self).bind_processor(dialect)
|
||||
if self.retrieve_as_bitwise:
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, util.int_types + util.string_types):
|
||||
if super_convert:
|
||||
return super_convert(value)
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
int_value = 0
|
||||
for v in value:
|
||||
int_value |= self._bitmap[v]
|
||||
return int_value
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
# accept strings and int (actually bitflag) values directly
|
||||
if value is not None and not isinstance(
|
||||
value, util.int_types + util.string_types):
|
||||
value = ",".join(value)
|
||||
|
||||
if super_convert:
|
||||
return super_convert(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
def adapt(self, impltype, **kw):
|
||||
kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise
|
||||
return util.constructor_copy(
|
||||
self, impltype,
|
||||
*self.values,
|
||||
**kw
|
||||
)
|
@ -1,102 +0,0 @@
|
||||
# mysql/gaerdbms.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: mysql+gaerdbms
|
||||
:name: Google Cloud SQL
|
||||
:dbapi: rdbms
|
||||
:connectstring: mysql+gaerdbms:///<dbname>?instance=<instancename>
|
||||
:url: https://developers.google.com/appengine/docs/python/cloud-sql/\
|
||||
developers-guide
|
||||
|
||||
This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with
|
||||
minimal changes.
|
||||
|
||||
.. versionadded:: 0.7.8
|
||||
|
||||
.. deprecated:: 1.0 This dialect is **no longer necessary** for
|
||||
Google Cloud SQL; the MySQLdb dialect can be used directly.
|
||||
Cloud SQL now recommends creating connections via the
|
||||
mysql dialect using the URL format
|
||||
|
||||
``mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>``
|
||||
|
||||
|
||||
Pooling
|
||||
-------
|
||||
|
||||
Google App Engine connections appear to be randomly recycled,
|
||||
so the dialect does not pool connections. The :class:`.NullPool`
|
||||
implementation is installed within the :class:`.Engine` by
|
||||
default.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...pool import NullPool
|
||||
import re
|
||||
from sqlalchemy.util import warn_deprecated
|
||||
|
||||
|
||||
def _is_dev_environment():
|
||||
return os.environ.get('SERVER_SOFTWARE', '').startswith('Development/')
|
||||
|
||||
|
||||
class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
|
||||
warn_deprecated(
|
||||
"Google Cloud SQL now recommends creating connections via the "
|
||||
"MySQLdb dialect directly, using the URL format "
|
||||
"mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/"
|
||||
"<projectid>:<instancename>"
|
||||
)
|
||||
|
||||
# from django:
|
||||
# http://code.google.com/p/googleappengine/source/
|
||||
# browse/trunk/python/google/storage/speckle/
|
||||
# python/django/backend/base.py#118
|
||||
# see also [ticket:2649]
|
||||
# see also http://stackoverflow.com/q/14224679/34549
|
||||
from google.appengine.api import apiproxy_stub_map
|
||||
|
||||
if _is_dev_environment():
|
||||
from google.appengine.api import rdbms_mysqldb
|
||||
return rdbms_mysqldb
|
||||
elif apiproxy_stub_map.apiproxy.GetStub('rdbms'):
|
||||
from google.storage.speckle.python.api import rdbms_apiproxy
|
||||
return rdbms_apiproxy
|
||||
else:
|
||||
from google.storage.speckle.python.api import rdbms_googleapi
|
||||
return rdbms_googleapi
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
# Cloud SQL connections die at any moment
|
||||
return NullPool
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args()
|
||||
if not _is_dev_environment():
|
||||
# 'dsn' and 'instance' are because we are skipping
|
||||
# the traditional google.api.rdbms wrapper
|
||||
opts['dsn'] = ''
|
||||
opts['instance'] = url.query['instance']
|
||||
return [], opts
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
match = re.compile(r"^(\d+)L?:|^\((\d+)L?,").match(str(exception))
|
||||
# The rdbms api will wrap then re-raise some types of errors
|
||||
# making this regex return no matches.
|
||||
code = match.group(1) or match.group(2) if match else None
|
||||
if code:
|
||||
return int(code)
|
||||
|
||||
dialect = MySQLDialect_gaerdbms
|
@ -1,79 +0,0 @@
|
||||
# mysql/json.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from ...sql import elements
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""MySQL JSON type.
|
||||
|
||||
MySQL supports JSON as of version 5.7. Note that MariaDB does **not**
|
||||
support JSON at the time of this writing.
|
||||
|
||||
The :class:`.mysql.JSON` type supports persistence of JSON values
|
||||
as well as the core index operations provided by :class:`.types.JSON`
|
||||
datatype, by adapting the operations to render the ``JSON_EXTRACT``
|
||||
function at the database level.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class _FormatTypeMixin(object):
|
||||
def _format_value(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
|
||||
|
||||
def _format_value(self, value):
|
||||
if isinstance(value, int):
|
||||
value = "$[%s]" % value
|
||||
else:
|
||||
value = '$."%s"' % value
|
||||
return value
|
||||
|
||||
|
||||
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
|
||||
def _format_value(self, value):
|
||||
return "$%s" % (
|
||||
"".join([
|
||||
"[%s]" % elem if isinstance(elem, int)
|
||||
else '."%s"' % elem for elem in value
|
||||
])
|
||||
)
|
@ -1,203 +0,0 @@
|
||||
# mysql/mysqlconnector.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: mysql+mysqlconnector
|
||||
:name: MySQL Connector/Python
|
||||
:dbapi: myconnpy
|
||||
:connectstring: mysql+mysqlconnector://<user>:<password>@\
|
||||
<host>[:<port>]/<dbname>
|
||||
:url: http://dev.mysql.com/downloads/connector/python/
|
||||
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
MySQLCompiler, MySQLIdentifierPreparer,
|
||||
BIT)
|
||||
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
|
||||
|
||||
def get_lastrowid(self):
|
||||
return self.cursor.lastrowid
|
||||
|
||||
|
||||
class MySQLCompiler_mysqlconnector(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
else:
|
||||
return self.process(binary.left, **kw) + " % " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
def escape_literal_column(self, text):
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return text.replace('%', '%%')
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
if self.dialect._mysqlconnector_double_percents:
|
||||
return value.replace("%", "%%")
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class _myconnpyBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""MySQL-connector already converts mysql bits, so."""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class MySQLDialect_mysqlconnector(MySQLDialect):
|
||||
driver = 'mysqlconnector'
|
||||
|
||||
supports_unicode_binds = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
|
||||
statement_compiler = MySQLCompiler_mysqlconnector
|
||||
|
||||
preparer = MySQLIdentifierPreparer_mysqlconnector
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
BIT: _myconnpyBIT,
|
||||
}
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def supports_unicode_statements(self):
|
||||
return util.py3k or self._mysqlconnector_version_info > (2, 0)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from mysql import connector
|
||||
return connector
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'allow_local_infile', bool)
|
||||
util.coerce_kw_type(opts, 'autocommit', bool)
|
||||
util.coerce_kw_type(opts, 'buffered', bool)
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connection_timeout', int)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'consume_results', bool)
|
||||
util.coerce_kw_type(opts, 'force_ipv6', bool)
|
||||
util.coerce_kw_type(opts, 'get_warnings', bool)
|
||||
util.coerce_kw_type(opts, 'pool_reset_session', bool)
|
||||
util.coerce_kw_type(opts, 'pool_size', int)
|
||||
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
|
||||
util.coerce_kw_type(opts, 'raw', bool)
|
||||
util.coerce_kw_type(opts, 'ssl_verify_cert', bool)
|
||||
util.coerce_kw_type(opts, 'use_pure', bool)
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
|
||||
# unfortunately, MySQL/connector python refuses to release a
|
||||
# cursor without reading fully, so non-buffered isn't an option
|
||||
opts.setdefault('buffered', True)
|
||||
|
||||
# FOUND_ROWS must be set in ClientFlag to enable
|
||||
# supports_sane_rowcount.
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
from mysql.connector.constants import ClientFlag
|
||||
client_flags = opts.get(
|
||||
'client_flags', ClientFlag.get_default())
|
||||
client_flags |= ClientFlag.FOUND_ROWS
|
||||
opts['client_flags'] = client_flags
|
||||
except Exception:
|
||||
pass
|
||||
return [[], opts]
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_version_info(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
|
||||
self.dbapi.__version__)
|
||||
if m:
|
||||
return tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
|
||||
@util.memoized_property
|
||||
def _mysqlconnector_double_percents(self):
|
||||
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = dbapi_con.get_server_version()
|
||||
return tuple(version)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
return connection.connection.charset
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
|
||||
exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
|
||||
if isinstance(e, exceptions):
|
||||
return e.errno in errnos or \
|
||||
"MySQL Connection not available." in str(e)
|
||||
else:
|
||||
return False
|
||||
|
||||
def _compat_fetchall(self, rp, charset=None):
|
||||
return rp.fetchall()
|
||||
|
||||
def _compat_fetchone(self, rp, charset=None):
|
||||
return rp.fetchone()
|
||||
|
||||
_isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
|
||||
'READ COMMITTED', 'REPEATABLE READ',
|
||||
'AUTOCOMMIT'])
|
||||
|
||||
def _set_isolation_level(self, connection, level):
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
super(MySQLDialect_mysqlconnector, self)._set_isolation_level(
|
||||
connection, level)
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqlconnector
|
@ -1,228 +0,0 @@
|
||||
# mysql/mysqldb.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+mysqldb
|
||||
:name: MySQL-Python
|
||||
:dbapi: mysqldb
|
||||
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://sourceforge.net/projects/mysql-python
|
||||
|
||||
.. _mysqldb_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
Py3K Support
|
||||
------------
|
||||
|
||||
Currently, MySQLdb only runs on Python 2 and development has been stopped.
|
||||
`mysqlclient`_ is fork of MySQLdb and provides Python 3 support as well
|
||||
as some bugfixes.
|
||||
|
||||
.. _mysqlclient: https://github.com/PyMySQL/mysqlclient-python
|
||||
|
||||
Using MySQLdb with Google Cloud SQL
|
||||
-----------------------------------
|
||||
|
||||
Google Cloud SQL now recommends use of the MySQLdb dialect. Connect
|
||||
using a URL like the following::
|
||||
|
||||
mysql+mysqldb://root@/<dbname>?unix_socket=/cloudsql/<projectid>:<instancename>
|
||||
|
||||
Server Side Cursors
|
||||
-------------------
|
||||
|
||||
The mysqldb dialect supports server-side cursors. See :ref:`mysql_ss_cursors`.
|
||||
|
||||
"""
|
||||
|
||||
from .base import (MySQLDialect, MySQLExecutionContext,
|
||||
MySQLCompiler, MySQLIdentifierPreparer)
|
||||
from .base import TEXT
|
||||
from ... import sql
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
if hasattr(self, '_rowcount'):
|
||||
return self._rowcount
|
||||
else:
|
||||
return self.cursor.rowcount
|
||||
|
||||
|
||||
class MySQLCompiler_mysqldb(MySQLCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace("%", "%%")
|
||||
|
||||
|
||||
class MySQLDialect_mysqldb(MySQLDialect):
|
||||
driver = 'mysqldb'
|
||||
supports_unicode_statements = True
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
execution_ctx_cls = MySQLExecutionContext_mysqldb
|
||||
statement_compiler = MySQLCompiler_mysqldb
|
||||
preparer = MySQLIdentifierPreparer_mysqldb
|
||||
|
||||
def __init__(self, server_side_cursors=False, **kwargs):
|
||||
super(MySQLDialect_mysqldb, self).__init__(**kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
|
||||
@util.langhelpers.memoized_property
|
||||
def supports_server_side_cursors(self):
|
||||
try:
|
||||
cursors = __import__('MySQLdb.cursors').cursors
|
||||
self._sscursor = cursors.SSCursor
|
||||
return True
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('MySQLdb')
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
rowcount = cursor.executemany(statement, parameters)
|
||||
if context is not None:
|
||||
context._rowcount = rowcount
|
||||
|
||||
def _check_unicode_returns(self, connection):
|
||||
# work around issue fixed in
|
||||
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
|
||||
# specific issue w/ the utf8_bin collation and unicode returns
|
||||
|
||||
has_utf8_bin = self.server_version_info > (5, ) and \
|
||||
connection.scalar(
|
||||
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
|
||||
% (
|
||||
self.identifier_preparer.quote("Charset"),
|
||||
self.identifier_preparer.quote("Collation")
|
||||
))
|
||||
if has_utf8_bin:
|
||||
additional_tests = [
|
||||
sql.collate(sql.cast(
|
||||
sql.literal_column(
|
||||
"'test collated returns'"),
|
||||
TEXT(charset='utf8')), "utf8_bin")
|
||||
]
|
||||
else:
|
||||
additional_tests = []
|
||||
return super(MySQLDialect_mysqldb, self)._check_unicode_returns(
|
||||
connection, additional_tests)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'connect_timeout', int)
|
||||
util.coerce_kw_type(opts, 'read_timeout', int)
|
||||
util.coerce_kw_type(opts, 'client_flag', int)
|
||||
util.coerce_kw_type(opts, 'local_infile', int)
|
||||
# Note: using either of the below will cause all strings to be
|
||||
# returned as Unicode, both in raw SQL operations and with column
|
||||
# types like String and MSString.
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
|
||||
# Rich values 'cursorclass' and 'conv' are not supported via
|
||||
# query string.
|
||||
|
||||
ssl = {}
|
||||
keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']
|
||||
for key in keys:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
client_flag = opts.get('client_flag', 0)
|
||||
if self.dbapi is not None:
|
||||
try:
|
||||
CLIENT_FLAGS = __import__(
|
||||
self.dbapi.__name__ + '.constants.CLIENT'
|
||||
).constants.CLIENT
|
||||
client_flag |= CLIENT_FLAGS.FOUND_ROWS
|
||||
except (AttributeError, ImportError):
|
||||
self.supports_sane_rowcount = False
|
||||
opts['client_flag'] = client_flag
|
||||
return [[], opts]
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.get_server_info()):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.args[0]
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
try:
|
||||
# note: the SQL here would be
|
||||
# "SHOW VARIABLES LIKE 'character_set%%'"
|
||||
cset_name = connection.connection.character_set_name
|
||||
except AttributeError:
|
||||
util.warn(
|
||||
"No 'character_set_name' can be detected with "
|
||||
"this MySQL-Python version; "
|
||||
"please upgrade to a recent version of MySQL-Python. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
else:
|
||||
return cset_name()
|
||||
|
||||
_isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
|
||||
'READ COMMITTED', 'REPEATABLE READ',
|
||||
'AUTOCOMMIT'])
|
||||
|
||||
def _set_isolation_level(self, connection, level):
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit(True)
|
||||
else:
|
||||
connection.autocommit(False)
|
||||
super(MySQLDialect_mysqldb, self)._set_isolation_level(connection,
|
||||
level)
|
||||
|
||||
|
||||
dialect = MySQLDialect_mysqldb
|
@ -1,254 +0,0 @@
|
||||
# mysql/oursql.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+oursql
|
||||
:name: OurSQL
|
||||
:dbapi: oursql
|
||||
:connectstring: mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
|
||||
:url: http://packages.python.org/oursql/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from .base import (BIT, MySQLDialect, MySQLExecutionContext)
|
||||
from ... import types as sqltypes, util
|
||||
|
||||
|
||||
class _oursqlBIT(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""oursql already converts mysql bits, so."""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class MySQLExecutionContext_oursql(MySQLExecutionContext):
|
||||
|
||||
@property
|
||||
def plain_query(self):
|
||||
return self.execution_options.get('_oursql_plain_query', False)
|
||||
|
||||
|
||||
class MySQLDialect_oursql(MySQLDialect):
|
||||
driver = 'oursql'
|
||||
|
||||
if util.py2k:
|
||||
supports_unicode_binds = True
|
||||
supports_unicode_statements = True
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = MySQLExecutionContext_oursql
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
sqltypes.Time: sqltypes.Time,
|
||||
BIT: _oursqlBIT,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('oursql')
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context=None):
|
||||
"""Provide an implementation of
|
||||
*cursor.execute(statement, parameters)*."""
|
||||
|
||||
if context and context.plain_query:
|
||||
cursor.execute(statement, plain_query=True)
|
||||
else:
|
||||
cursor.execute(statement, parameters)
|
||||
|
||||
def do_begin(self, connection):
|
||||
connection.cursor().execute('BEGIN', plain_query=True)
|
||||
|
||||
def _xa_query(self, connection, query, xid):
|
||||
if util.py2k:
|
||||
arg = connection.connection._escape_string(xid)
|
||||
else:
|
||||
charset = self._connection_charset
|
||||
arg = connection.connection._escape_string(
|
||||
xid.encode(charset)).decode(charset)
|
||||
arg = "'%s'" % arg
|
||||
connection.execution_options(
|
||||
_oursql_plain_query=True).execute(query % arg)
|
||||
|
||||
# Because mysql is bad, these methods have to be
|
||||
# reimplemented to use _PlainQuery. Basically, some queries
|
||||
# refuse to return any data if they're run through
|
||||
# the parameterized query API, or refuse to be parameterized
|
||||
# in the first place.
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
self._xa_query(connection, 'XA BEGIN %s', xid)
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
self._xa_query(connection, 'XA END %s', xid)
|
||||
self._xa_query(connection, 'XA PREPARE %s', xid)
|
||||
|
||||
def do_rollback_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
if not is_prepared:
|
||||
self._xa_query(connection, 'XA END %s', xid)
|
||||
self._xa_query(connection, 'XA ROLLBACK %s', xid)
|
||||
|
||||
def do_commit_twophase(self, connection, xid, is_prepared=True,
|
||||
recover=False):
|
||||
if not is_prepared:
|
||||
self.do_prepare_twophase(connection, xid)
|
||||
self._xa_query(connection, 'XA COMMIT %s', xid)
|
||||
|
||||
# Q: why didn't we need all these "plain_query" overrides earlier ?
|
||||
# am i on a newer/older version of OurSQL ?
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
return MySQLDialect.has_table(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
table_name,
|
||||
schema
|
||||
)
|
||||
|
||||
def get_table_options(self, connection, table_name, schema=None, **kw):
|
||||
return MySQLDialect.get_table_options(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
table_name,
|
||||
schema=schema,
|
||||
**kw
|
||||
)
|
||||
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
return MySQLDialect.get_columns(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
table_name,
|
||||
schema=schema,
|
||||
**kw
|
||||
)
|
||||
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
return MySQLDialect.get_view_names(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
schema=schema,
|
||||
**kw
|
||||
)
|
||||
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
return MySQLDialect.get_table_names(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
schema
|
||||
)
|
||||
|
||||
def get_schema_names(self, connection, **kw):
|
||||
return MySQLDialect.get_schema_names(
|
||||
self,
|
||||
connection.connect().execution_options(_oursql_plain_query=True),
|
||||
**kw
|
||||
)
|
||||
|
||||
def initialize(self, connection):
|
||||
return MySQLDialect.initialize(
|
||||
self,
|
||||
connection.execution_options(_oursql_plain_query=True)
|
||||
)
|
||||
|
||||
def _show_create_table(self, connection, table, charset=None,
|
||||
full_name=None):
|
||||
return MySQLDialect._show_create_table(
|
||||
self,
|
||||
connection.contextual_connect(close_with_result=True).
|
||||
execution_options(_oursql_plain_query=True),
|
||||
table, charset, full_name
|
||||
)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.ProgrammingError):
|
||||
return e.errno is None and 'cursor' not in e.args[1] \
|
||||
and e.args[1].endswith('closed')
|
||||
else:
|
||||
return e.errno in (2006, 2013, 2014, 2045, 2055)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(database='db', username='user',
|
||||
password='passwd')
|
||||
opts.update(url.query)
|
||||
|
||||
util.coerce_kw_type(opts, 'port', int)
|
||||
util.coerce_kw_type(opts, 'compress', bool)
|
||||
util.coerce_kw_type(opts, 'autoping', bool)
|
||||
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
|
||||
|
||||
util.coerce_kw_type(opts, 'default_charset', bool)
|
||||
if opts.pop('default_charset', False):
|
||||
opts['charset'] = None
|
||||
else:
|
||||
util.coerce_kw_type(opts, 'charset', str)
|
||||
opts['use_unicode'] = opts.get('use_unicode', True)
|
||||
util.coerce_kw_type(opts, 'use_unicode', bool)
|
||||
|
||||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
|
||||
# supports_sane_rowcount.
|
||||
opts.setdefault('found_rows', True)
|
||||
|
||||
ssl = {}
|
||||
for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
|
||||
'ssl_capath', 'ssl_cipher']:
|
||||
if key in opts:
|
||||
ssl[key[4:]] = opts[key]
|
||||
util.coerce_kw_type(ssl, key[4:], str)
|
||||
del opts[key]
|
||||
if ssl:
|
||||
opts['ssl'] = ssl
|
||||
|
||||
return [[], opts]
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.server_info):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
return exception.errno
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
return connection.connection.charset
|
||||
|
||||
def _compat_fetchall(self, rp, charset=None):
|
||||
"""oursql isn't super-broken like MySQLdb, yaaay."""
|
||||
return rp.fetchall()
|
||||
|
||||
def _compat_fetchone(self, rp, charset=None):
|
||||
"""oursql isn't super-broken like MySQLdb, yaaay."""
|
||||
return rp.fetchone()
|
||||
|
||||
def _compat_first(self, rp, charset=None):
|
||||
return rp.first()
|
||||
|
||||
|
||||
dialect = MySQLDialect_oursql
|
@ -1,70 +0,0 @@
|
||||
# mysql/pymysql.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+pymysql
|
||||
:name: PyMySQL
|
||||
:dbapi: pymysql
|
||||
:connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>\
|
||||
[?<options>]
|
||||
:url: http://www.pymysql.org/
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
Please see :ref:`mysql_unicode` for current recommendations on unicode
|
||||
handling.
|
||||
|
||||
MySQL-Python Compatibility
|
||||
--------------------------
|
||||
|
||||
The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
|
||||
and targets 100% compatibility. Most behavioral notes for MySQL-python apply
|
||||
to the pymysql driver as well.
|
||||
|
||||
"""
|
||||
|
||||
from .mysqldb import MySQLDialect_mysqldb
|
||||
from ...util import langhelpers, py3k
|
||||
|
||||
|
||||
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
|
||||
driver = 'pymysql'
|
||||
|
||||
description_encoding = None
|
||||
|
||||
# generally, these two values should be both True
|
||||
# or both False. PyMySQL unicode tests pass all the way back
|
||||
# to 0.4 either way. See [ticket:3337]
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
|
||||
def __init__(self, server_side_cursors=False, **kwargs):
|
||||
super(MySQLDialect_pymysql, self).__init__(**kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
|
||||
@langhelpers.memoized_property
|
||||
def supports_server_side_cursors(self):
|
||||
try:
|
||||
cursors = __import__('pymysql.cursors').cursors
|
||||
self._sscursor = cursors.SSCursor
|
||||
return True
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pymysql')
|
||||
|
||||
if py3k:
|
||||
def _extract_error_code(self, exception):
|
||||
if isinstance(exception.args[0], Exception):
|
||||
exception = exception.args[0]
|
||||
return exception.args[0]
|
||||
|
||||
dialect = MySQLDialect_pymysql
|
@ -1,79 +0,0 @@
|
||||
# mysql/pyodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
|
||||
.. dialect:: mysql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
.. note:: The PyODBC for MySQL dialect is not well supported, and
|
||||
is subject to unresolved character encoding issues
|
||||
which exist within the current ODBC drivers available.
|
||||
(see http://code.google.com/p/pyodbc/issues/detail?id=25).
|
||||
Other dialects for MySQL are recommended.
|
||||
|
||||
"""
|
||||
|
||||
from .base import MySQLDialect, MySQLExecutionContext
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ... import util
|
||||
import re
|
||||
|
||||
|
||||
class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
|
||||
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT LAST_INSERT_ID()")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
|
||||
supports_unicode_statements = False
|
||||
execution_ctx_cls = MySQLExecutionContext_pyodbc
|
||||
|
||||
pyodbc_driver_name = "MySQL"
|
||||
|
||||
def __init__(self, **kw):
|
||||
# deal with http://code.google.com/p/pyodbc/issues/detail?id=25
|
||||
kw.setdefault('convert_unicode', True)
|
||||
super(MySQLDialect_pyodbc, self).__init__(**kw)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
|
||||
# Prefer 'character_set_results' for the current connection over the
|
||||
# value in the driver. SET NAMES or individual variable SETs will
|
||||
# change the charset without updating the driver's view of the world.
|
||||
#
|
||||
# If it's decided that issuing that sort of SQL leaves you SOL, then
|
||||
# this can prefer the driver value.
|
||||
rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
|
||||
opts = dict([(row[0], row[1]) for row in self._compat_fetchall(rs)])
|
||||
for key in ('character_set_connection', 'character_set'):
|
||||
if opts.get(key, None):
|
||||
return opts[key]
|
||||
|
||||
util.warn("Could not detect the connection character set. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
m = re.compile(r"\((\d+)\)").search(str(exception.args))
|
||||
c = m.group(1)
|
||||
if c:
|
||||
return int(c)
|
||||
else:
|
||||
return None
|
||||
|
||||
dialect = MySQLDialect_pyodbc
|
@ -1,450 +0,0 @@
|
||||
# mysql/reflection.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
from ... import log, util
|
||||
from ... import types as sqltypes
|
||||
from .enumerated import _EnumeratedValues, SET
|
||||
from .types import DATETIME, TIME, TIMESTAMP
|
||||
|
||||
|
||||
class ReflectedState(object):
|
||||
"""Stores raw information about a SHOW CREATE TABLE statement."""
|
||||
|
||||
def __init__(self):
|
||||
self.columns = []
|
||||
self.table_options = {}
|
||||
self.table_name = None
|
||||
self.keys = []
|
||||
self.constraints = []
|
||||
|
||||
|
||||
@log.class_logger
|
||||
class MySQLTableDefinitionParser(object):
|
||||
"""Parses the results of a SHOW CREATE TABLE statement."""
|
||||
|
||||
def __init__(self, dialect, preparer):
|
||||
self.dialect = dialect
|
||||
self.preparer = preparer
|
||||
self._prep_regexes()
|
||||
|
||||
def parse(self, show_create, charset):
|
||||
state = ReflectedState()
|
||||
state.charset = charset
|
||||
for line in re.split(r'\r?\n', show_create):
|
||||
if line.startswith(' ' + self.preparer.initial_quote):
|
||||
self._parse_column(line, state)
|
||||
# a regular table options line
|
||||
elif line.startswith(') '):
|
||||
self._parse_table_options(line, state)
|
||||
# an ANSI-mode table options line
|
||||
elif line == ')':
|
||||
pass
|
||||
elif line.startswith('CREATE '):
|
||||
self._parse_table_name(line, state)
|
||||
# Not present in real reflection, but may be if
|
||||
# loading from a file.
|
||||
elif not line:
|
||||
pass
|
||||
else:
|
||||
type_, spec = self._parse_constraints(line)
|
||||
if type_ is None:
|
||||
util.warn("Unknown schema content: %r" % line)
|
||||
elif type_ == 'key':
|
||||
state.keys.append(spec)
|
||||
elif type_ == 'constraint':
|
||||
state.constraints.append(spec)
|
||||
else:
|
||||
pass
|
||||
return state
|
||||
|
||||
def _parse_constraints(self, line):
|
||||
"""Parse a KEY or CONSTRAINT line.
|
||||
|
||||
:param line: A line of SHOW CREATE TABLE output
|
||||
"""
|
||||
|
||||
# KEY
|
||||
m = self._re_key.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
# convert columns into name, length pairs
|
||||
spec['columns'] = self._parse_keyexprs(spec['columns'])
|
||||
return 'key', spec
|
||||
|
||||
# CONSTRAINT
|
||||
m = self._re_constraint.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec['table'] = \
|
||||
self.preparer.unformat_identifiers(spec['table'])
|
||||
spec['local'] = [c[0]
|
||||
for c in self._parse_keyexprs(spec['local'])]
|
||||
spec['foreign'] = [c[0]
|
||||
for c in self._parse_keyexprs(spec['foreign'])]
|
||||
return 'constraint', spec
|
||||
|
||||
# PARTITION and SUBPARTITION
|
||||
m = self._re_partition.match(line)
|
||||
if m:
|
||||
# Punt!
|
||||
return 'partition', line
|
||||
|
||||
# No match.
|
||||
return (None, line)
|
||||
|
||||
def _parse_table_name(self, line, state):
|
||||
"""Extract the table name.
|
||||
|
||||
:param line: The first line of SHOW CREATE TABLE
|
||||
"""
|
||||
|
||||
regex, cleanup = self._pr_name
|
||||
m = regex.match(line)
|
||||
if m:
|
||||
state.table_name = cleanup(m.group('name'))
|
||||
|
||||
def _parse_table_options(self, line, state):
|
||||
"""Build a dictionary of all reflected table-level options.
|
||||
|
||||
:param line: The final line of SHOW CREATE TABLE output.
|
||||
"""
|
||||
|
||||
options = {}
|
||||
|
||||
if not line or line == ')':
|
||||
pass
|
||||
|
||||
else:
|
||||
rest_of_line = line[:]
|
||||
for regex, cleanup in self._pr_options:
|
||||
m = regex.search(rest_of_line)
|
||||
if not m:
|
||||
continue
|
||||
directive, value = m.group('directive'), m.group('val')
|
||||
if cleanup:
|
||||
value = cleanup(value)
|
||||
options[directive.lower()] = value
|
||||
rest_of_line = regex.sub('', rest_of_line)
|
||||
|
||||
for nope in ('auto_increment', 'data directory', 'index directory'):
|
||||
options.pop(nope, None)
|
||||
|
||||
for opt, val in options.items():
|
||||
state.table_options['%s_%s' % (self.dialect.name, opt)] = val
|
||||
|
||||
def _parse_column(self, line, state):
|
||||
"""Extract column details.
|
||||
|
||||
Falls back to a 'minimal support' variant if full parse fails.
|
||||
|
||||
:param line: Any column-bearing line from SHOW CREATE TABLE
|
||||
"""
|
||||
|
||||
spec = None
|
||||
m = self._re_column.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec['full'] = True
|
||||
else:
|
||||
m = self._re_column_loose.match(line)
|
||||
if m:
|
||||
spec = m.groupdict()
|
||||
spec['full'] = False
|
||||
if not spec:
|
||||
util.warn("Unknown column definition %r" % line)
|
||||
return
|
||||
if not spec['full']:
|
||||
util.warn("Incomplete reflection of column definition %r" % line)
|
||||
|
||||
name, type_, args = spec['name'], spec['coltype'], spec['arg']
|
||||
|
||||
try:
|
||||
col_type = self.dialect.ischema_names[type_]
|
||||
except KeyError:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(type_, name))
|
||||
col_type = sqltypes.NullType
|
||||
|
||||
# Column type positional arguments eg. varchar(32)
|
||||
if args is None or args == '':
|
||||
type_args = []
|
||||
elif args[0] == "'" and args[-1] == "'":
|
||||
type_args = self._re_csv_str.findall(args)
|
||||
else:
|
||||
type_args = [int(v) for v in self._re_csv_int.findall(args)]
|
||||
|
||||
# Column type keyword options
|
||||
type_kw = {}
|
||||
|
||||
if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
|
||||
if type_args:
|
||||
type_kw['fsp'] = type_args.pop(0)
|
||||
|
||||
for kw in ('unsigned', 'zerofill'):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = True
|
||||
for kw in ('charset', 'collate'):
|
||||
if spec.get(kw, False):
|
||||
type_kw[kw] = spec[kw]
|
||||
if issubclass(col_type, _EnumeratedValues):
|
||||
type_args = _EnumeratedValues._strip_values(type_args)
|
||||
|
||||
if issubclass(col_type, SET) and '' in type_args:
|
||||
type_kw['retrieve_as_bitwise'] = True
|
||||
|
||||
type_instance = col_type(*type_args, **type_kw)
|
||||
|
||||
col_kw = {}
|
||||
|
||||
# NOT NULL
|
||||
col_kw['nullable'] = True
|
||||
# this can be "NULL" in the case of TIMESTAMP
|
||||
if spec.get('notnull', False) == 'NOT NULL':
|
||||
col_kw['nullable'] = False
|
||||
|
||||
# AUTO_INCREMENT
|
||||
if spec.get('autoincr', False):
|
||||
col_kw['autoincrement'] = True
|
||||
elif issubclass(col_type, sqltypes.Integer):
|
||||
col_kw['autoincrement'] = False
|
||||
|
||||
# DEFAULT
|
||||
default = spec.get('default', None)
|
||||
|
||||
if default == 'NULL':
|
||||
# eliminates the need to deal with this later.
|
||||
default = None
|
||||
|
||||
col_d = dict(name=name, type=type_instance, default=default)
|
||||
col_d.update(col_kw)
|
||||
state.columns.append(col_d)
|
||||
|
||||
def _describe_to_create(self, table_name, columns):
|
||||
"""Re-format DESCRIBE output as a SHOW CREATE TABLE string.
|
||||
|
||||
DESCRIBE is a much simpler reflection and is sufficient for
|
||||
reflecting views for runtime use. This method formats DDL
|
||||
for columns only- keys are omitted.
|
||||
|
||||
:param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
|
||||
SHOW FULL COLUMNS FROM rows must be rearranged for use with
|
||||
this function.
|
||||
"""
|
||||
|
||||
buffer = []
|
||||
for row in columns:
|
||||
(name, col_type, nullable, default, extra) = \
|
||||
[row[i] for i in (0, 1, 2, 4, 5)]
|
||||
|
||||
line = [' ']
|
||||
line.append(self.preparer.quote_identifier(name))
|
||||
line.append(col_type)
|
||||
if not nullable:
|
||||
line.append('NOT NULL')
|
||||
if default:
|
||||
if 'auto_increment' in default:
|
||||
pass
|
||||
elif (col_type.startswith('timestamp') and
|
||||
default.startswith('C')):
|
||||
line.append('DEFAULT')
|
||||
line.append(default)
|
||||
elif default == 'NULL':
|
||||
line.append('DEFAULT')
|
||||
line.append(default)
|
||||
else:
|
||||
line.append('DEFAULT')
|
||||
line.append("'%s'" % default.replace("'", "''"))
|
||||
if extra:
|
||||
line.append(extra)
|
||||
|
||||
buffer.append(' '.join(line))
|
||||
|
||||
return ''.join([('CREATE TABLE %s (\n' %
|
||||
self.preparer.quote_identifier(table_name)),
|
||||
',\n'.join(buffer),
|
||||
'\n) '])
|
||||
|
||||
def _parse_keyexprs(self, identifiers):
|
||||
"""Unpack '"col"(2),"col" ASC'-ish strings into components."""
|
||||
|
||||
return self._re_keyexprs.findall(identifiers)
|
||||
|
||||
def _prep_regexes(self):
|
||||
"""Pre-compile regular expressions."""
|
||||
|
||||
self._re_columns = []
|
||||
self._pr_options = []
|
||||
|
||||
_final = self.preparer.final_quote
|
||||
|
||||
quotes = dict(zip(('iq', 'fq', 'esc_fq'),
|
||||
[re.escape(s) for s in
|
||||
(self.preparer.initial_quote,
|
||||
_final,
|
||||
self.preparer._escape_identifier(_final))]))
|
||||
|
||||
self._pr_name = _pr_compile(
|
||||
r'^CREATE (?:\w+ +)?TABLE +'
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes,
|
||||
self.preparer._unescape_identifier)
|
||||
|
||||
# `col`,`col2`(32),`col3`(15) DESC
|
||||
#
|
||||
# Note: ASC and DESC aren't reflected, so we'll punt...
|
||||
self._re_keyexprs = _re_compile(
|
||||
r'(?:'
|
||||
r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)'
|
||||
r'(?:\((\d+)\))?(?=\,|$))+' % quotes)
|
||||
|
||||
# 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
|
||||
self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27')
|
||||
|
||||
# 123 or 123,456
|
||||
self._re_csv_int = _re_compile(r'\d+')
|
||||
|
||||
# `colname` <type> [type opts]
|
||||
# (NOT NULL | NULL)
|
||||
# DEFAULT ('value' | CURRENT_TIMESTAMP...)
|
||||
# COMMENT 'comment'
|
||||
# COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
|
||||
# STORAGE (DISK|MEMORY)
|
||||
self._re_column = _re_compile(
|
||||
r' '
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
|
||||
r'(?P<coltype>\w+)'
|
||||
r'(?:\((?P<arg>(?:\d+|\d+,\d+|'
|
||||
r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?'
|
||||
r'(?: +(?P<unsigned>UNSIGNED))?'
|
||||
r'(?: +(?P<zerofill>ZEROFILL))?'
|
||||
r'(?: +CHARACTER SET +(?P<charset>[\w_]+))?'
|
||||
r'(?: +COLLATE +(?P<collate>[\w_]+))?'
|
||||
r'(?: +(?P<notnull>(?:NOT )?NULL))?'
|
||||
r'(?: +DEFAULT +(?P<default>'
|
||||
r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
|
||||
r'(?: +ON UPDATE \w+)?)'
|
||||
r'))?'
|
||||
r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
|
||||
r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
|
||||
r'(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?'
|
||||
r'(?: +STORAGE +(?P<storage>\w+))?'
|
||||
r'(?: +(?P<extra>.*))?'
|
||||
r',?$'
|
||||
% quotes
|
||||
)
|
||||
|
||||
# Fallback, try to parse as little as possible
|
||||
self._re_column_loose = _re_compile(
|
||||
r' '
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
|
||||
r'(?P<coltype>\w+)'
|
||||
r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?'
|
||||
r'.*?(?P<notnull>(?:NOT )NULL)?'
|
||||
% quotes
|
||||
)
|
||||
|
||||
# (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
|
||||
# (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
|
||||
# KEY_BLOCK_SIZE size | WITH PARSER name
|
||||
self._re_key = _re_compile(
|
||||
r' '
|
||||
r'(?:(?P<type>\S+) )?KEY'
|
||||
r'(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?'
|
||||
r'(?: +USING +(?P<using_pre>\S+))?'
|
||||
r' +\((?P<columns>.+?)\)'
|
||||
r'(?: +USING +(?P<using_post>\S+))?'
|
||||
r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?'
|
||||
r'(?: +WITH PARSER +(?P<parser>\S+))?'
|
||||
r'(?: +COMMENT +(?P<comment>(\x27\x27|\x27([^\x27])*?\x27)+))?'
|
||||
r',?$'
|
||||
% quotes
|
||||
)
|
||||
|
||||
# CONSTRAINT `name` FOREIGN KEY (`local_col`)
|
||||
# REFERENCES `remote` (`remote_col`)
|
||||
# MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
|
||||
# ON DELETE CASCADE ON UPDATE RESTRICT
|
||||
#
|
||||
# unique constraints come back as KEYs
|
||||
kw = quotes.copy()
|
||||
kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
|
||||
self._re_constraint = _re_compile(
|
||||
r' '
|
||||
r'CONSTRAINT +'
|
||||
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
|
||||
r'FOREIGN KEY +'
|
||||
r'\((?P<local>[^\)]+?)\) REFERENCES +'
|
||||
r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s'
|
||||
r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
|
||||
r'\((?P<foreign>[^\)]+?)\)'
|
||||
r'(?: +(?P<match>MATCH \w+))?'
|
||||
r'(?: +ON DELETE (?P<ondelete>%(on)s))?'
|
||||
r'(?: +ON UPDATE (?P<onupdate>%(on)s))?'
|
||||
% kw
|
||||
)
|
||||
|
||||
# PARTITION
|
||||
#
|
||||
# punt!
|
||||
self._re_partition = _re_compile(r'(?:.*)(?:SUB)?PARTITION(?:.*)')
|
||||
|
||||
# Table-level options (COLLATE, ENGINE, etc.)
|
||||
# Do the string options first, since they have quoted
|
||||
# strings we need to get rid of.
|
||||
for option in _options_of_type_string:
|
||||
self._add_option_string(option)
|
||||
|
||||
for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT',
|
||||
'AVG_ROW_LENGTH', 'CHARACTER SET',
|
||||
'DEFAULT CHARSET', 'CHECKSUM',
|
||||
'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD',
|
||||
'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT',
|
||||
'KEY_BLOCK_SIZE'):
|
||||
self._add_option_word(option)
|
||||
|
||||
self._add_option_regex('UNION', r'\([^\)]+\)')
|
||||
self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK')
|
||||
self._add_option_regex(
|
||||
'RAID_TYPE',
|
||||
r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
|
||||
|
||||
_optional_equals = r'(?:\s*(?:=\s*)|\s+)'
|
||||
|
||||
def _add_option_string(self, directive):
|
||||
regex = (r'(?P<directive>%s)%s'
|
||||
r"'(?P<val>(?:[^']|'')*?)'(?!')" %
|
||||
(re.escape(directive), self._optional_equals))
|
||||
self._pr_options.append(_pr_compile(
|
||||
regex, lambda v: v.replace("\\\\", "\\").replace("''", "'")
|
||||
))
|
||||
|
||||
def _add_option_word(self, directive):
|
||||
regex = (r'(?P<directive>%s)%s'
|
||||
r'(?P<val>\w+)' %
|
||||
(re.escape(directive), self._optional_equals))
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
def _add_option_regex(self, directive, regex):
|
||||
regex = (r'(?P<directive>%s)%s'
|
||||
r'(?P<val>%s)' %
|
||||
(re.escape(directive), self._optional_equals, regex))
|
||||
self._pr_options.append(_pr_compile(regex))
|
||||
|
||||
_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
|
||||
'PASSWORD', 'CONNECTION')
|
||||
|
||||
|
||||
def _pr_compile(regex, cleanup=None):
|
||||
"""Prepare a 2-tuple of compiled regex and callable."""
|
||||
|
||||
return (_re_compile(regex), cleanup)
|
||||
|
||||
|
||||
def _re_compile(regex):
|
||||
"""Compile a string to regex, I and UNICODE."""
|
||||
|
||||
return re.compile(regex, re.I | re.UNICODE)
|
@ -1,766 +0,0 @@
|
||||
# mysql/types.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import datetime
|
||||
from ... import exc, util
|
||||
from ... import types as sqltypes
|
||||
|
||||
|
||||
class _NumericType(object):
|
||||
"""Base for MySQL numeric types.
|
||||
|
||||
This is the base both for NUMERIC as well as INTEGER, hence
|
||||
it's a mixin.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, unsigned=False, zerofill=False, **kw):
|
||||
self.unsigned = unsigned
|
||||
self.zerofill = zerofill
|
||||
super(_NumericType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_NumericType, sqltypes.Numeric])
|
||||
|
||||
|
||||
class _FloatType(_NumericType, sqltypes.Float):
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
if isinstance(self, (REAL, DOUBLE)) and \
|
||||
(
|
||||
(precision is None and scale is not None) or
|
||||
(precision is not None and scale is None)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"You must specify both precision and scale or omit "
|
||||
"both altogether.")
|
||||
super(_FloatType, self).__init__(
|
||||
precision=precision, asdecimal=asdecimal, **kw)
|
||||
self.scale = scale
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self, to_inspect=[_FloatType,
|
||||
_NumericType,
|
||||
sqltypes.Float])
|
||||
|
||||
|
||||
class _IntegerType(_NumericType, sqltypes.Integer):
|
||||
def __init__(self, display_width=None, **kw):
|
||||
self.display_width = display_width
|
||||
super(_IntegerType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self, to_inspect=[_IntegerType,
|
||||
_NumericType,
|
||||
sqltypes.Integer])
|
||||
|
||||
|
||||
class _StringType(sqltypes.String):
|
||||
"""Base for MySQL string types."""
|
||||
|
||||
def __init__(self, charset=None, collation=None,
|
||||
ascii=False, binary=False, unicode=False,
|
||||
national=False, **kw):
|
||||
self.charset = charset
|
||||
|
||||
# allow collate= or collation=
|
||||
kw.setdefault('collation', kw.pop('collate', collation))
|
||||
|
||||
self.ascii = ascii
|
||||
self.unicode = unicode
|
||||
self.binary = binary
|
||||
self.national = national
|
||||
super(_StringType, self).__init__(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return util.generic_repr(self,
|
||||
to_inspect=[_StringType, sqltypes.String])
|
||||
|
||||
|
||||
class _MatchType(sqltypes.Float, sqltypes.MatchType):
|
||||
def __init__(self, **kw):
|
||||
# TODO: float arguments?
|
||||
sqltypes.Float.__init__(self)
|
||||
sqltypes.MatchType.__init__(self)
|
||||
|
||||
|
||||
|
||||
class NUMERIC(_NumericType, sqltypes.NUMERIC):
|
||||
"""MySQL NUMERIC type."""
|
||||
|
||||
__visit_name__ = 'NUMERIC'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a NUMERIC.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(NUMERIC, self).__init__(precision=precision,
|
||||
scale=scale, asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DECIMAL(_NumericType, sqltypes.DECIMAL):
|
||||
"""MySQL DECIMAL type."""
|
||||
|
||||
__visit_name__ = 'DECIMAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DECIMAL.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(DECIMAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class DOUBLE(_FloatType):
|
||||
"""MySQL DOUBLE type."""
|
||||
|
||||
__visit_name__ = 'DOUBLE'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a DOUBLE.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.DOUBLE` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits.
|
||||
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
|
||||
to change this scale, or ``asdecimal=False`` to return values
|
||||
directly as Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(DOUBLE, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class REAL(_FloatType, sqltypes.REAL):
|
||||
"""MySQL REAL type."""
|
||||
|
||||
__visit_name__ = 'REAL'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
|
||||
"""Construct a REAL.
|
||||
|
||||
.. note::
|
||||
|
||||
The :class:`.REAL` type by default converts from float
|
||||
to Decimal, using a truncation that defaults to 10 digits.
|
||||
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
|
||||
to change this scale, or ``asdecimal=False`` to return values
|
||||
directly as Python floating points.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(REAL, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
|
||||
class FLOAT(_FloatType, sqltypes.FLOAT):
|
||||
"""MySQL FLOAT type."""
|
||||
|
||||
__visit_name__ = 'FLOAT'
|
||||
|
||||
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
|
||||
"""Construct a FLOAT.
|
||||
|
||||
:param precision: Total digits in this number. If scale and precision
|
||||
are both None, values are stored to limits allowed by the server.
|
||||
|
||||
:param scale: The number of digits after the decimal point.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(FLOAT, self).__init__(precision=precision, scale=scale,
|
||||
asdecimal=asdecimal, **kw)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class INTEGER(_IntegerType, sqltypes.INTEGER):
|
||||
"""MySQL INTEGER type."""
|
||||
|
||||
__visit_name__ = 'INTEGER'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct an INTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(INTEGER, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class BIGINT(_IntegerType, sqltypes.BIGINT):
|
||||
"""MySQL BIGINTEGER type."""
|
||||
|
||||
__visit_name__ = 'BIGINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a BIGINTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(BIGINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class MEDIUMINT(_IntegerType):
|
||||
"""MySQL MEDIUMINTEGER type."""
|
||||
|
||||
__visit_name__ = 'MEDIUMINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a MEDIUMINTEGER
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class TINYINT(_IntegerType):
|
||||
"""MySQL TINYINT type."""
|
||||
|
||||
__visit_name__ = 'TINYINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a TINYINT.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(TINYINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
|
||||
"""MySQL SMALLINTEGER type."""
|
||||
|
||||
__visit_name__ = 'SMALLINT'
|
||||
|
||||
def __init__(self, display_width=None, **kw):
|
||||
"""Construct a SMALLINTEGER.
|
||||
|
||||
:param display_width: Optional, maximum display width for this number.
|
||||
|
||||
:param unsigned: a boolean, optional.
|
||||
|
||||
:param zerofill: Optional. If true, values will be stored as strings
|
||||
left-padded with zeros. Note that this does not effect the values
|
||||
returned by the underlying database API, which continue to be
|
||||
numeric.
|
||||
|
||||
"""
|
||||
super(SMALLINT, self).__init__(display_width=display_width, **kw)
|
||||
|
||||
|
||||
class BIT(sqltypes.TypeEngine):
|
||||
"""MySQL BIT type.
|
||||
|
||||
This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
|
||||
for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
|
||||
MSTinyInteger() type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'BIT'
|
||||
|
||||
def __init__(self, length=None):
|
||||
"""Construct a BIT.
|
||||
|
||||
:param length: Optional, number of bits.
|
||||
|
||||
"""
|
||||
self.length = length
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Convert a MySQL's 64 bit, variable length binary string to a long.
|
||||
|
||||
TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
|
||||
already do this, so this logic should be moved to those dialects.
|
||||
|
||||
"""
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
v = 0
|
||||
for i in value:
|
||||
if not isinstance(i, int):
|
||||
i = ord(i) # convert byte to int on Python 2
|
||||
v = v << 8 | i
|
||||
return v
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class TIME(sqltypes.TIME):
|
||||
"""MySQL TIME type. """
|
||||
|
||||
__visit_name__ = 'TIME'
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL TIME type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the TIME type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
.. versionadded:: 0.8 The MySQL-specific TIME
|
||||
type as well as fractional seconds support.
|
||||
|
||||
"""
|
||||
super(TIME, self).__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
time = datetime.time
|
||||
|
||||
def process(value):
|
||||
# convert from a timedelta value
|
||||
if value is not None:
|
||||
microseconds = value.microseconds
|
||||
seconds = value.seconds
|
||||
minutes = seconds // 60
|
||||
return time(minutes // 60,
|
||||
minutes % 60,
|
||||
seconds - minutes * 60,
|
||||
microsecond=microseconds)
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class TIMESTAMP(sqltypes.TIMESTAMP):
|
||||
"""MySQL TIMESTAMP type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'TIMESTAMP'
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL TIMESTAMP type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6.4 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the TIMESTAMP type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP`
|
||||
with fractional seconds support.
|
||||
|
||||
"""
|
||||
super(TIMESTAMP, self).__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
|
||||
class DATETIME(sqltypes.DATETIME):
|
||||
"""MySQL DATETIME type.
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'DATETIME'
|
||||
|
||||
def __init__(self, timezone=False, fsp=None):
|
||||
"""Construct a MySQL DATETIME type.
|
||||
|
||||
:param timezone: not used by the MySQL dialect.
|
||||
:param fsp: fractional seconds precision value.
|
||||
MySQL 5.6.4 supports storage of fractional seconds;
|
||||
this parameter will be used when emitting DDL
|
||||
for the DATETIME type.
|
||||
|
||||
.. note::
|
||||
|
||||
DBAPI driver support for fractional seconds may
|
||||
be limited; current support includes
|
||||
MySQL Connector/Python.
|
||||
|
||||
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME`
|
||||
with fractional seconds support.
|
||||
|
||||
"""
|
||||
super(DATETIME, self).__init__(timezone=timezone)
|
||||
self.fsp = fsp
|
||||
|
||||
|
||||
class YEAR(sqltypes.TypeEngine):
|
||||
"""MySQL YEAR type, for single byte storage of years 1901-2155."""
|
||||
|
||||
__visit_name__ = 'YEAR'
|
||||
|
||||
def __init__(self, display_width=None):
|
||||
self.display_width = display_width
|
||||
|
||||
|
||||
class TEXT(_StringType, sqltypes.TEXT):
|
||||
"""MySQL TEXT type, for text up to 2^16 characters."""
|
||||
|
||||
__visit_name__ = 'TEXT'
|
||||
|
||||
def __init__(self, length=None, **kw):
|
||||
"""Construct a TEXT.
|
||||
|
||||
:param length: Optional, if provided the server may optimize storage
|
||||
by substituting the smallest TEXT type sufficient to store
|
||||
``length`` characters.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(TEXT, self).__init__(length=length, **kw)
|
||||
|
||||
|
||||
class TINYTEXT(_StringType):
|
||||
"""MySQL TINYTEXT type, for text up to 2^8 characters."""
|
||||
|
||||
__visit_name__ = 'TINYTEXT'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a TINYTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(TINYTEXT, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class MEDIUMTEXT(_StringType):
|
||||
"""MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
|
||||
|
||||
__visit_name__ = 'MEDIUMTEXT'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a MEDIUMTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(MEDIUMTEXT, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class LONGTEXT(_StringType):
|
||||
"""MySQL LONGTEXT type, for text up to 2^32 characters."""
|
||||
|
||||
__visit_name__ = 'LONGTEXT'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Construct a LONGTEXT.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(LONGTEXT, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class VARCHAR(_StringType, sqltypes.VARCHAR):
|
||||
"""MySQL VARCHAR type, for variable-length character data."""
|
||||
|
||||
__visit_name__ = 'VARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a VARCHAR.
|
||||
|
||||
:param charset: Optional, a column-level character set for this string
|
||||
value. Takes precedence to 'ascii' or 'unicode' short-hand.
|
||||
|
||||
:param collation: Optional, a column-level collation for this string
|
||||
value. Takes precedence to 'binary' short-hand.
|
||||
|
||||
:param ascii: Defaults to False: short-hand for the ``latin1``
|
||||
character set, generates ASCII in schema.
|
||||
|
||||
:param unicode: Defaults to False: short-hand for the ``ucs2``
|
||||
character set, generates UNICODE in schema.
|
||||
|
||||
:param national: Optional. If true, use the server's configured
|
||||
national character set.
|
||||
|
||||
:param binary: Defaults to False: short-hand, pick the binary
|
||||
collation type that matches the column's character set. Generates
|
||||
BINARY in schema. This does not affect the type of data stored,
|
||||
only the collation of character data.
|
||||
|
||||
"""
|
||||
super(VARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class CHAR(_StringType, sqltypes.CHAR):
|
||||
"""MySQL CHAR type, for fixed-length character data."""
|
||||
|
||||
__visit_name__ = 'CHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct a CHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
super(CHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _adapt_string_for_cast(self, type_):
|
||||
# copy the given string type into a CHAR
|
||||
# for the purposes of rendering a CAST expression
|
||||
type_ = sqltypes.to_instance(type_)
|
||||
if isinstance(type_, sqltypes.CHAR):
|
||||
return type_
|
||||
elif isinstance(type_, _StringType):
|
||||
return CHAR(
|
||||
length=type_.length,
|
||||
charset=type_.charset,
|
||||
collation=type_.collation,
|
||||
ascii=type_.ascii,
|
||||
binary=type_.binary,
|
||||
unicode=type_.unicode,
|
||||
national=False # not supported in CAST
|
||||
)
|
||||
else:
|
||||
return CHAR(length=type_.length)
|
||||
|
||||
|
||||
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
|
||||
"""MySQL NVARCHAR type.
|
||||
|
||||
For variable-length character data in the server's configured national
|
||||
character set.
|
||||
"""
|
||||
|
||||
__visit_name__ = 'NVARCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct an NVARCHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
kwargs['national'] = True
|
||||
super(NVARCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class NCHAR(_StringType, sqltypes.NCHAR):
|
||||
"""MySQL NCHAR type.
|
||||
|
||||
For fixed-length character data in the server's configured national
|
||||
character set.
|
||||
"""
|
||||
|
||||
__visit_name__ = 'NCHAR'
|
||||
|
||||
def __init__(self, length=None, **kwargs):
|
||||
"""Construct an NCHAR.
|
||||
|
||||
:param length: Maximum data length, in characters.
|
||||
|
||||
:param binary: Optional, use the default binary collation for the
|
||||
national character set. This does not affect the type of data
|
||||
stored, use a BINARY type for binary data.
|
||||
|
||||
:param collation: Optional, request a particular collation. Must be
|
||||
compatible with the national character set.
|
||||
|
||||
"""
|
||||
kwargs['national'] = True
|
||||
super(NCHAR, self).__init__(length=length, **kwargs)
|
||||
|
||||
|
||||
class TINYBLOB(sqltypes._Binary):
|
||||
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
|
||||
|
||||
__visit_name__ = 'TINYBLOB'
|
||||
|
||||
|
||||
class MEDIUMBLOB(sqltypes._Binary):
|
||||
"""MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
|
||||
|
||||
__visit_name__ = 'MEDIUMBLOB'
|
||||
|
||||
|
||||
class LONGBLOB(sqltypes._Binary):
|
||||
"""MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
|
||||
|
||||
__visit_name__ = 'LONGBLOB'
|
@ -1,117 +0,0 @@
|
||||
# mysql/zxjdbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: mysql+zxjdbc
|
||||
:name: zxjdbc for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/\
|
||||
<database>
|
||||
:driverurl: http://dev.mysql.com/downloads/connector/j/
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
Character Sets
|
||||
--------------
|
||||
|
||||
SQLAlchemy zxjdbc dialects pass unicode straight through to the
|
||||
zxjdbc/JDBC layer. To allow multiple character sets to be sent from the
|
||||
MySQL Connector/J JDBC driver, by default SQLAlchemy sets its
|
||||
``characterEncoding`` connection property to ``UTF-8``. It may be
|
||||
overridden via a ``create_engine`` URL parameter.
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
from ... import types as sqltypes, util
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
from .base import BIT, MySQLDialect, MySQLExecutionContext
|
||||
|
||||
|
||||
class _ZxJDBCBit(BIT):
|
||||
def result_processor(self, dialect, coltype):
|
||||
"""Converts boolean or byte arrays from MySQL Connector/J to longs."""
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
if isinstance(value, bool):
|
||||
return int(value)
|
||||
v = 0
|
||||
for i in value:
|
||||
v = v << 8 | (i & 0xff)
|
||||
value = v
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class MySQLExecutionContext_zxjdbc(MySQLExecutionContext):
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT LAST_INSERT_ID()")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
|
||||
jdbc_db_name = 'mysql'
|
||||
jdbc_driver_name = 'com.mysql.jdbc.Driver'
|
||||
|
||||
execution_ctx_cls = MySQLExecutionContext_zxjdbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs,
|
||||
{
|
||||
sqltypes.Time: sqltypes.Time,
|
||||
BIT: _ZxJDBCBit
|
||||
}
|
||||
)
|
||||
|
||||
def _detect_charset(self, connection):
|
||||
"""Sniff out the character set in use for connection results."""
|
||||
# Prefer 'character_set_results' for the current connection over the
|
||||
# value in the driver. SET NAMES or individual variable SETs will
|
||||
# change the charset without updating the driver's view of the world.
|
||||
#
|
||||
# If it's decided that issuing that sort of SQL leaves you SOL, then
|
||||
# this can prefer the driver value.
|
||||
rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
|
||||
opts = dict((row[0], row[1]) for row in self._compat_fetchall(rs))
|
||||
for key in ('character_set_connection', 'character_set'):
|
||||
if opts.get(key, None):
|
||||
return opts[key]
|
||||
|
||||
util.warn("Could not detect the connection character set. "
|
||||
"Assuming latin1.")
|
||||
return 'latin1'
|
||||
|
||||
def _driver_kwargs(self):
|
||||
"""return kw arg dict to be sent to connect()."""
|
||||
return dict(characterEncoding='UTF-8', yearIsDateType='false')
|
||||
|
||||
def _extract_error_code(self, exception):
|
||||
# e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
|
||||
# [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
|
||||
m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
|
||||
c = m.group(1)
|
||||
if c:
|
||||
return int(c)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
dbapi_con = connection.connection
|
||||
version = []
|
||||
r = re.compile(r'[.\-]')
|
||||
for n in r.split(dbapi_con.dbversion):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
version.append(n)
|
||||
return tuple(version)
|
||||
|
||||
dialect = MySQLDialect_zxjdbc
|
@ -1,24 +0,0 @@
|
||||
# oracle/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.oracle import base, cx_oracle, zxjdbc
|
||||
|
||||
base.dialect = cx_oracle.dialect
|
||||
|
||||
from sqlalchemy.dialects.oracle.base import \
|
||||
VARCHAR, NVARCHAR, CHAR, DATE, NUMBER,\
|
||||
BLOB, BFILE, CLOB, NCLOB, TIMESTAMP, RAW,\
|
||||
FLOAT, DOUBLE_PRECISION, LONG, dialect, INTERVAL,\
|
||||
VARCHAR2, NVARCHAR2, ROWID, dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER',
|
||||
'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
|
||||
'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL',
|
||||
'VARCHAR2', 'NVARCHAR2', 'ROWID'
|
||||
)
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,235 +0,0 @@
|
||||
# oracle/zxjdbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: oracle+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: oracle+zxjdbc://user:pass@host/dbname
|
||||
:driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
|
||||
|
||||
.. note:: Jython is not supported by current versions of SQLAlchemy. The
|
||||
zxjdbc dialect should be considered as experimental.
|
||||
|
||||
"""
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from sqlalchemy import sql, types as sqltypes, util
|
||||
from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
|
||||
from sqlalchemy.dialects.oracle.base import (OracleCompiler,
|
||||
OracleDialect,
|
||||
OracleExecutionContext)
|
||||
from sqlalchemy.engine import result as _result
|
||||
from sqlalchemy.sql import expression
|
||||
import collections
|
||||
|
||||
SQLException = zxJDBC = None
|
||||
|
||||
|
||||
class _ZxJDBCDate(sqltypes.Date):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return value.date()
|
||||
return process
|
||||
|
||||
|
||||
class _ZxJDBCNumeric(sqltypes.Numeric):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
# XXX: does the dialect return Decimal or not???
|
||||
# if it does (in all cases), we could use a None processor as well as
|
||||
# the to_float generic processor
|
||||
if self.asdecimal:
|
||||
def process(value):
|
||||
if isinstance(value, decimal.Decimal):
|
||||
return value
|
||||
else:
|
||||
return decimal.Decimal(str(value))
|
||||
else:
|
||||
def process(value):
|
||||
if isinstance(value, decimal.Decimal):
|
||||
return float(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class OracleCompiler_zxjdbc(OracleCompiler):
|
||||
|
||||
def returning_clause(self, stmt, returning_cols):
|
||||
self.returning_cols = list(
|
||||
expression._select_iterables(returning_cols))
|
||||
|
||||
# within_columns_clause=False so that labels (foo AS bar) don't render
|
||||
columns = [self.process(c, within_columns_clause=False)
|
||||
for c in self.returning_cols]
|
||||
|
||||
if not hasattr(self, 'returning_parameters'):
|
||||
self.returning_parameters = []
|
||||
|
||||
binds = []
|
||||
for i, col in enumerate(self.returning_cols):
|
||||
dbtype = col.type.dialect_impl(
|
||||
self.dialect).get_dbapi_type(self.dialect.dbapi)
|
||||
self.returning_parameters.append((i + 1, dbtype))
|
||||
|
||||
bindparam = sql.bindparam(
|
||||
"ret_%d" % i, value=ReturningParam(dbtype))
|
||||
self.binds[bindparam.key] = bindparam
|
||||
binds.append(
|
||||
self.bindparam_string(self._truncate_bindparam(bindparam)))
|
||||
|
||||
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
|
||||
|
||||
|
||||
class OracleExecutionContext_zxjdbc(OracleExecutionContext):
|
||||
|
||||
def pre_exec(self):
|
||||
if hasattr(self.compiled, 'returning_parameters'):
|
||||
# prepare a zxJDBC statement so we can grab its underlying
|
||||
# OraclePreparedStatement's getReturnResultSet later
|
||||
self.statement = self.cursor.prepare(self.statement)
|
||||
|
||||
def get_result_proxy(self):
|
||||
if hasattr(self.compiled, 'returning_parameters'):
|
||||
rrs = None
|
||||
try:
|
||||
try:
|
||||
rrs = self.statement.__statement__.getReturnResultSet()
|
||||
next(rrs)
|
||||
except SQLException as sqle:
|
||||
msg = '%s [SQLCode: %d]' % (
|
||||
sqle.getMessage(), sqle.getErrorCode())
|
||||
if sqle.getSQLState() is not None:
|
||||
msg += ' [SQLState: %s]' % sqle.getSQLState()
|
||||
raise zxJDBC.Error(msg)
|
||||
else:
|
||||
row = tuple(
|
||||
self.cursor.datahandler.getPyObject(
|
||||
rrs, index, dbtype)
|
||||
for index, dbtype in
|
||||
self.compiled.returning_parameters)
|
||||
return ReturningResultProxy(self, row)
|
||||
finally:
|
||||
if rrs is not None:
|
||||
try:
|
||||
rrs.close()
|
||||
except SQLException:
|
||||
pass
|
||||
self.statement.close()
|
||||
|
||||
return _result.ResultProxy(self)
|
||||
|
||||
def create_cursor(self):
|
||||
cursor = self._dbapi_connection.cursor()
|
||||
cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
|
||||
return cursor
|
||||
|
||||
|
||||
class ReturningResultProxy(_result.FullyBufferedResultProxy):
|
||||
|
||||
"""ResultProxy backed by the RETURNING ResultSet results."""
|
||||
|
||||
def __init__(self, context, returning_row):
|
||||
self._returning_row = returning_row
|
||||
super(ReturningResultProxy, self).__init__(context)
|
||||
|
||||
def _cursor_description(self):
|
||||
ret = []
|
||||
for c in self.context.compiled.returning_cols:
|
||||
if hasattr(c, 'name'):
|
||||
ret.append((c.name, c.type))
|
||||
else:
|
||||
ret.append((c.anon_label, c.type))
|
||||
return ret
|
||||
|
||||
def _buffer_rows(self):
|
||||
return collections.deque([self._returning_row])
|
||||
|
||||
|
||||
class ReturningParam(object):
|
||||
|
||||
"""A bindparam value representing a RETURNING parameter.
|
||||
|
||||
Specially handled by OracleReturningDataHandler.
|
||||
"""
|
||||
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, ReturningParam):
|
||||
return self.type == other.type
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, ReturningParam):
|
||||
return self.type != other.type
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
kls = self.__class__
|
||||
return '<%s.%s object at 0x%x type=%s>' % (
|
||||
kls.__module__, kls.__name__, id(self), self.type)
|
||||
|
||||
|
||||
class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):
|
||||
jdbc_db_name = 'oracle'
|
||||
jdbc_driver_name = 'oracle.jdbc.OracleDriver'
|
||||
|
||||
statement_compiler = OracleCompiler_zxjdbc
|
||||
execution_ctx_cls = OracleExecutionContext_zxjdbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
OracleDialect.colspecs,
|
||||
{
|
||||
sqltypes.Date: _ZxJDBCDate,
|
||||
sqltypes.Numeric: _ZxJDBCNumeric
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OracleDialect_zxjdbc, self).__init__(*args, **kwargs)
|
||||
global SQLException, zxJDBC
|
||||
from java.sql import SQLException
|
||||
from com.ziclix.python.sql import zxJDBC
|
||||
from com.ziclix.python.sql.handler import OracleDataHandler
|
||||
|
||||
class OracleReturningDataHandler(OracleDataHandler):
|
||||
"""zxJDBC DataHandler that specially handles ReturningParam."""
|
||||
|
||||
def setJDBCObject(self, statement, index, object, dbtype=None):
|
||||
if type(object) is ReturningParam:
|
||||
statement.registerReturnParameter(index, object.type)
|
||||
elif dbtype is None:
|
||||
OracleDataHandler.setJDBCObject(
|
||||
self, statement, index, object)
|
||||
else:
|
||||
OracleDataHandler.setJDBCObject(
|
||||
self, statement, index, object, dbtype)
|
||||
self.DataHandler = OracleReturningDataHandler
|
||||
|
||||
def initialize(self, connection):
|
||||
super(OracleDialect_zxjdbc, self).initialize(connection)
|
||||
self.implicit_returning = \
|
||||
connection.connection.driverversion >= '10.2'
|
||||
|
||||
def _create_jdbc_url(self, url):
|
||||
return 'jdbc:oracle:thin:@%s:%s:%s' % (
|
||||
url.host, url.port or 1521, url.database)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
version = re.search(
|
||||
r'Release ([\d\.]+)', connection.connection.dbversion).group(1)
|
||||
return tuple(int(x) for x in version.split('.'))
|
||||
|
||||
dialect = OracleDialect_zxjdbc
|
@ -1,10 +0,0 @@
|
||||
# backwards compat with the old name
|
||||
from sqlalchemy.util import warn_deprecated
|
||||
|
||||
warn_deprecated(
|
||||
"The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'. "
|
||||
"The new URL format is postgresql[+driver]://<user>:<pass>@<host>/<dbname>"
|
||||
)
|
||||
|
||||
from sqlalchemy.dialects.postgresql import *
|
||||
from sqlalchemy.dialects.postgresql import base
|
@ -1,36 +0,0 @@
|
||||
# postgresql/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import base, psycopg2, pg8000, pypostgresql, pygresql, \
|
||||
zxjdbc, psycopg2cffi
|
||||
|
||||
base.dialect = psycopg2.dialect
|
||||
|
||||
from .base import \
|
||||
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
|
||||
INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \
|
||||
DATE, BYTEA, BOOLEAN, INTERVAL, ENUM, dialect, TSVECTOR, DropEnumType, \
|
||||
CreateEnumType
|
||||
from .hstore import HSTORE, hstore
|
||||
from .json import JSON, JSONB
|
||||
from .array import array, ARRAY, Any, All
|
||||
from .ext import aggregate_order_by, ExcludeConstraint, array_agg
|
||||
from .dml import insert, Insert
|
||||
|
||||
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
|
||||
TSTZRANGE
|
||||
|
||||
__all__ = (
|
||||
'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC',
|
||||
'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'OID',
|
||||
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
|
||||
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'array', 'HSTORE',
|
||||
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
|
||||
'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'Any', 'All',
|
||||
'DropEnumType', 'CreateEnumType', 'ExcludeConstraint',
|
||||
'aggregate_order_by', 'array_agg', 'insert', 'Insert'
|
||||
)
|
@ -1,314 +0,0 @@
|
||||
# postgresql/array.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .base import ischema_names
|
||||
from ...sql import expression, operators
|
||||
from ...sql.base import SchemaEventTarget
|
||||
from ... import types as sqltypes
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
def Any(other, arrexpr, operator=operators.eq):
|
||||
"""A synonym for the :meth:`.ARRAY.Comparator.any` method.
|
||||
|
||||
This method is legacy and is here for backwards-compatibility.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.expression.any_`
|
||||
|
||||
"""
|
||||
|
||||
return arrexpr.any(other, operator)
|
||||
|
||||
|
||||
def All(other, arrexpr, operator=operators.eq):
|
||||
"""A synonym for the :meth:`.ARRAY.Comparator.all` method.
|
||||
|
||||
This method is legacy and is here for backwards-compatibility.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.expression.all_`
|
||||
|
||||
"""
|
||||
|
||||
return arrexpr.all(other, operator)
|
||||
|
||||
|
||||
class array(expression.Tuple):
|
||||
|
||||
"""A PostgreSQL ARRAY literal.
|
||||
|
||||
This is used to produce ARRAY literals in SQL expressions, e.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import array
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy import select, func
|
||||
|
||||
stmt = select([
|
||||
array([1,2]) + array([3,4,5])
|
||||
])
|
||||
|
||||
print stmt.compile(dialect=postgresql.dialect())
|
||||
|
||||
Produces the SQL::
|
||||
|
||||
SELECT ARRAY[%(param_1)s, %(param_2)s] ||
|
||||
ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1
|
||||
|
||||
An instance of :class:`.array` will always have the datatype
|
||||
:class:`.ARRAY`. The "inner" type of the array is inferred from
|
||||
the values present, unless the ``type_`` keyword argument is passed::
|
||||
|
||||
array(['foo', 'bar'], type_=CHAR)
|
||||
|
||||
.. versionadded:: 0.8 Added the :class:`~.postgresql.array` literal type.
|
||||
|
||||
See also:
|
||||
|
||||
:class:`.postgresql.ARRAY`
|
||||
|
||||
"""
|
||||
__visit_name__ = 'array'
|
||||
|
||||
def __init__(self, clauses, **kw):
|
||||
super(array, self).__init__(*clauses, **kw)
|
||||
self.type = ARRAY(self.type)
|
||||
|
||||
def _bind_param(self, operator, obj, _assume_scalar=False, type_=None):
|
||||
if _assume_scalar or operator is operators.getitem:
|
||||
# if getitem->slice were called, Indexable produces
|
||||
# a Slice object from that
|
||||
assert isinstance(obj, int)
|
||||
return expression.BindParameter(
|
||||
None, obj, _compared_to_operator=operator,
|
||||
type_=type_,
|
||||
_compared_to_type=self.type, unique=True)
|
||||
|
||||
else:
|
||||
return array([
|
||||
self._bind_param(operator, o, _assume_scalar=True, type_=type_)
|
||||
for o in obj])
|
||||
|
||||
def self_group(self, against=None):
|
||||
if (against in (
|
||||
operators.any_op, operators.all_op, operators.getitem)):
|
||||
return expression.Grouping(self)
|
||||
else:
|
||||
return self
|
||||
|
||||
|
||||
CONTAINS = operators.custom_op("@>", precedence=5)
|
||||
|
||||
CONTAINED_BY = operators.custom_op("<@", precedence=5)
|
||||
|
||||
OVERLAP = operators.custom_op("&&", precedence=5)
|
||||
|
||||
|
||||
class ARRAY(SchemaEventTarget, sqltypes.ARRAY):
|
||||
|
||||
"""PostgreSQL ARRAY type.
|
||||
|
||||
.. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now
|
||||
a subclass of the core :class:`.types.ARRAY` type.
|
||||
|
||||
The :class:`.postgresql.ARRAY` type is constructed in the same way
|
||||
as the core :class:`.types.ARRAY` type; a member type is required, and a
|
||||
number of dimensions is recommended if the type is to be used for more
|
||||
than one dimension::
|
||||
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
mytable = Table("mytable", metadata,
|
||||
Column("data", postgresql.ARRAY(Integer, dimensions=2))
|
||||
)
|
||||
|
||||
The :class:`.postgresql.ARRAY` type provides all operations defined on the
|
||||
core :class:`.types.ARRAY` type, including support for "dimensions", indexed
|
||||
access, and simple matching such as :meth:`.types.ARRAY.Comparator.any`
|
||||
and :meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also
|
||||
provides PostgreSQL-specific methods for containment operations, including
|
||||
:meth:`.postgresql.ARRAY.Comparator.contains`
|
||||
:meth:`.postgresql.ARRAY.Comparator.contained_by`,
|
||||
and :meth:`.postgresql.ARRAY.Comparator.overlap`, e.g.::
|
||||
|
||||
mytable.c.data.contains([1, 2])
|
||||
|
||||
The :class:`.postgresql.ARRAY` type may not be supported on all
|
||||
PostgreSQL DBAPIs; it is currently known to work on psycopg2 only.
|
||||
|
||||
Additionally, the :class:`.postgresql.ARRAY` type does not work directly in
|
||||
conjunction with the :class:`.ENUM` type. For a workaround, see the
|
||||
special type at :ref:`postgresql_array_of_enum`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.types.ARRAY` - base array type
|
||||
|
||||
:class:`.postgresql.array` - produces a literal array value.
|
||||
|
||||
"""
|
||||
|
||||
class Comparator(sqltypes.ARRAY.Comparator):
|
||||
|
||||
"""Define comparison operations for :class:`.ARRAY`.
|
||||
|
||||
Note that these operations are in addition to those provided
|
||||
by the base :class:`.types.ARRAY.Comparator` class, including
|
||||
:meth:`.types.ARRAY.Comparator.any` and
|
||||
:meth:`.types.ARRAY.Comparator.all`.
|
||||
|
||||
"""
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if elements are a superset of the
|
||||
elements of the argument array expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if elements are a proper subset of the
|
||||
elements of the argument array expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def overlap(self, other):
|
||||
"""Boolean expression. Test if array has elements in common with
|
||||
an argument array expression.
|
||||
"""
|
||||
return self.operate(OVERLAP, other, result_type=sqltypes.Boolean)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
def __init__(self, item_type, as_tuple=False, dimensions=None,
|
||||
zero_indexes=False):
|
||||
"""Construct an ARRAY.
|
||||
|
||||
E.g.::
|
||||
|
||||
Column('myarray', ARRAY(Integer))
|
||||
|
||||
Arguments are:
|
||||
|
||||
:param item_type: The data type of items of this array. Note that
|
||||
dimensionality is irrelevant here, so multi-dimensional arrays like
|
||||
``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
|
||||
``ARRAY(ARRAY(Integer))`` or such.
|
||||
|
||||
:param as_tuple=False: Specify whether return results
|
||||
should be converted to tuples from lists. DBAPIs such
|
||||
as psycopg2 return lists by default. When tuples are
|
||||
returned, the results are hashable.
|
||||
|
||||
:param dimensions: if non-None, the ARRAY will assume a fixed
|
||||
number of dimensions. This will cause the DDL emitted for this
|
||||
ARRAY to include the exact number of bracket clauses ``[]``,
|
||||
and will also optimize the performance of the type overall.
|
||||
Note that PG arrays are always implicitly "non-dimensioned",
|
||||
meaning they can store any number of dimensions no matter how
|
||||
they were declared.
|
||||
|
||||
:param zero_indexes=False: when True, index values will be converted
|
||||
between Python zero-based and PostgreSQL one-based indexes, e.g.
|
||||
a value of one will be added to all index values before passing
|
||||
to the database.
|
||||
|
||||
.. versionadded:: 0.9.5
|
||||
|
||||
|
||||
"""
|
||||
if isinstance(item_type, ARRAY):
|
||||
raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
|
||||
"handles multi-dimensional arrays of basetype")
|
||||
if isinstance(item_type, type):
|
||||
item_type = item_type()
|
||||
self.item_type = item_type
|
||||
self.as_tuple = as_tuple
|
||||
self.dimensions = dimensions
|
||||
self.zero_indexes = zero_indexes
|
||||
|
||||
@property
|
||||
def hashable(self):
|
||||
return self.as_tuple
|
||||
|
||||
@property
|
||||
def python_type(self):
|
||||
return list
|
||||
|
||||
def compare_values(self, x, y):
|
||||
return x == y
|
||||
|
||||
def _set_parent(self, column):
|
||||
"""Support SchemaEventTarget"""
|
||||
|
||||
if isinstance(self.item_type, SchemaEventTarget):
|
||||
self.item_type._set_parent(column)
|
||||
|
||||
def _set_parent_with_dispatch(self, parent):
|
||||
"""Support SchemaEventTarget"""
|
||||
|
||||
if isinstance(self.item_type, SchemaEventTarget):
|
||||
self.item_type._set_parent_with_dispatch(parent)
|
||||
|
||||
def _proc_array(self, arr, itemproc, dim, collection):
|
||||
if dim is None:
|
||||
arr = list(arr)
|
||||
if dim == 1 or dim is None and (
|
||||
# this has to be (list, tuple), or at least
|
||||
# not hasattr('__iter__'), since Py3K strings
|
||||
# etc. have __iter__
|
||||
not arr or not isinstance(arr[0], (list, tuple))):
|
||||
if itemproc:
|
||||
return collection(itemproc(x) for x in arr)
|
||||
else:
|
||||
return collection(arr)
|
||||
else:
|
||||
return collection(
|
||||
self._proc_array(
|
||||
x, itemproc,
|
||||
dim - 1 if dim is not None else None,
|
||||
collection)
|
||||
for x in arr
|
||||
)
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
item_proc = self.item_type.dialect_impl(dialect).\
|
||||
bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return self._proc_array(
|
||||
value,
|
||||
item_proc,
|
||||
self.dimensions,
|
||||
list)
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
item_proc = self.item_type.dialect_impl(dialect).\
|
||||
result_processor(dialect, coltype)
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return value
|
||||
else:
|
||||
return self._proc_array(
|
||||
value,
|
||||
item_proc,
|
||||
self.dimensions,
|
||||
tuple if self.as_tuple else list)
|
||||
return process
|
||||
|
||||
ischema_names['_array'] = ARRAY
|
File diff suppressed because it is too large
Load Diff
@ -1,213 +0,0 @@
|
||||
# postgresql/on_conflict.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from ...sql.elements import ClauseElement, _literal_as_binds
|
||||
from ...sql.dml import Insert as StandardInsert
|
||||
from ...sql.expression import alias
|
||||
from ...sql import schema
|
||||
from ...util.langhelpers import public_factory
|
||||
from ...sql.base import _generative
|
||||
from ... import util
|
||||
from . import ext
|
||||
|
||||
__all__ = ('Insert', 'insert')
|
||||
|
||||
|
||||
class Insert(StandardInsert):
|
||||
"""PostgreSQL-specific implementation of INSERT.
|
||||
|
||||
Adds methods for PG-specific syntaxes such as ON CONFLICT.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
@util.memoized_property
|
||||
def excluded(self):
|
||||
"""Provide the ``excluded`` namespace for an ON CONFLICT statement
|
||||
|
||||
PG's ON CONFLICT clause allows reference to the row that would
|
||||
be inserted, known as ``excluded``. This attribute provides
|
||||
all columns in this row to be referenaceable.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict` - example of how
|
||||
to use :attr:`.Insert.excluded`
|
||||
|
||||
"""
|
||||
return alias(self.table, name='excluded').columns
|
||||
|
||||
@_generative
|
||||
def on_conflict_do_update(
|
||||
self,
|
||||
constraint=None, index_elements=None,
|
||||
index_where=None, set_=None, where=None):
|
||||
"""
|
||||
Specifies a DO UPDATE SET action for ON CONFLICT clause.
|
||||
|
||||
Either the ``constraint`` or ``index_elements`` argument is
|
||||
required, but only one of these can be specified.
|
||||
|
||||
:param constraint:
|
||||
The name of a unique or exclusion constraint on the table,
|
||||
or the constraint object itself if it has a .name attribute.
|
||||
|
||||
:param index_elements:
|
||||
A sequence consisting of string column names, :class:`.Column`
|
||||
objects, or other column expression objects that will be used
|
||||
to infer a target index.
|
||||
|
||||
:param index_where:
|
||||
Additional WHERE criterion that can be used to infer a
|
||||
conditional target index.
|
||||
|
||||
:param set_:
|
||||
Required argument. A dictionary or other mapping object
|
||||
with column names as keys and expressions or literals as values,
|
||||
specifying the ``SET`` actions to take.
|
||||
If the target :class:`.Column` specifies a ".key" attribute distinct
|
||||
from the column name, that key should be used.
|
||||
|
||||
.. warning:: This dictionary does **not** take into account
|
||||
Python-specified default UPDATE values or generation functions,
|
||||
e.g. those specified using :paramref:`.Column.onupdate`.
|
||||
These values will not be exercised for an ON CONFLICT style of
|
||||
UPDATE, unless they are manually specified in the
|
||||
:paramref:`.Insert.on_conflict_do_update.set_` dictionary.
|
||||
|
||||
:param where:
|
||||
Optional argument. If present, can be a literal SQL
|
||||
string or an acceptable expression for a ``WHERE`` clause
|
||||
that restricts the rows affected by ``DO UPDATE SET``. Rows
|
||||
not meeting the ``WHERE`` condition will not be updated
|
||||
(effectively a ``DO NOTHING`` for those rows).
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict`
|
||||
|
||||
"""
|
||||
self._post_values_clause = OnConflictDoUpdate(
|
||||
constraint, index_elements, index_where, set_, where)
|
||||
return self
|
||||
|
||||
@_generative
|
||||
def on_conflict_do_nothing(
|
||||
self,
|
||||
constraint=None, index_elements=None, index_where=None):
|
||||
"""
|
||||
Specifies a DO NOTHING action for ON CONFLICT clause.
|
||||
|
||||
The ``constraint`` and ``index_elements`` arguments
|
||||
are optional, but only one of these can be specified.
|
||||
|
||||
:param constraint:
|
||||
The name of a unique or exclusion constraint on the table,
|
||||
or the constraint object itself if it has a .name attribute.
|
||||
|
||||
:param index_elements:
|
||||
A sequence consisting of string column names, :class:`.Column`
|
||||
objects, or other column expression objects that will be used
|
||||
to infer a target index.
|
||||
|
||||
:param index_where:
|
||||
Additional WHERE criterion that can be used to infer a
|
||||
conditional target index.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_insert_on_conflict`
|
||||
|
||||
"""
|
||||
self._post_values_clause = OnConflictDoNothing(
|
||||
constraint, index_elements, index_where)
|
||||
return self
|
||||
|
||||
insert = public_factory(Insert, '.dialects.postgresql.insert')
|
||||
|
||||
|
||||
class OnConflictClause(ClauseElement):
|
||||
def __init__(
|
||||
self,
|
||||
constraint=None,
|
||||
index_elements=None,
|
||||
index_where=None):
|
||||
|
||||
if constraint is not None:
|
||||
if not isinstance(constraint, util.string_types) and \
|
||||
isinstance(constraint, (
|
||||
schema.Index, schema.Constraint,
|
||||
ext.ExcludeConstraint)):
|
||||
constraint = getattr(constraint, 'name') or constraint
|
||||
|
||||
if constraint is not None:
|
||||
if index_elements is not None:
|
||||
raise ValueError(
|
||||
"'constraint' and 'index_elements' are mutually exclusive")
|
||||
|
||||
if isinstance(constraint, util.string_types):
|
||||
self.constraint_target = constraint
|
||||
self.inferred_target_elements = None
|
||||
self.inferred_target_whereclause = None
|
||||
elif isinstance(constraint, schema.Index):
|
||||
index_elements = constraint.expressions
|
||||
index_where = \
|
||||
constraint.dialect_options['postgresql'].get("where")
|
||||
elif isinstance(constraint, ext.ExcludeConstraint):
|
||||
index_elements = constraint.columns
|
||||
index_where = constraint.where
|
||||
else:
|
||||
index_elements = constraint.columns
|
||||
index_where = \
|
||||
constraint.dialect_options['postgresql'].get("where")
|
||||
|
||||
if index_elements is not None:
|
||||
self.constraint_target = None
|
||||
self.inferred_target_elements = index_elements
|
||||
self.inferred_target_whereclause = index_where
|
||||
elif constraint is None:
|
||||
self.constraint_target = self.inferred_target_elements = \
|
||||
self.inferred_target_whereclause = None
|
||||
|
||||
|
||||
class OnConflictDoNothing(OnConflictClause):
|
||||
__visit_name__ = 'on_conflict_do_nothing'
|
||||
|
||||
|
||||
class OnConflictDoUpdate(OnConflictClause):
|
||||
__visit_name__ = 'on_conflict_do_update'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
constraint=None,
|
||||
index_elements=None,
|
||||
index_where=None,
|
||||
set_=None,
|
||||
where=None):
|
||||
super(OnConflictDoUpdate, self).__init__(
|
||||
constraint=constraint,
|
||||
index_elements=index_elements,
|
||||
index_where=index_where)
|
||||
|
||||
if self.inferred_target_elements is None and \
|
||||
self.constraint_target is None:
|
||||
raise ValueError(
|
||||
"Either constraint or index_elements, "
|
||||
"but not both, must be specified unless DO NOTHING")
|
||||
|
||||
if (not isinstance(set_, dict) or not set_):
|
||||
raise ValueError("set parameter must be a non-empty dictionary")
|
||||
self.update_values_to_set = [
|
||||
(key, value)
|
||||
for key, value in set_.items()
|
||||
]
|
||||
self.update_whereclause = where
|
@ -1,218 +0,0 @@
|
||||
# postgresql/ext.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from ...sql import expression
|
||||
from ...sql import elements
|
||||
from ...sql import functions
|
||||
from ...sql.schema import ColumnCollectionConstraint
|
||||
from .array import ARRAY
|
||||
|
||||
|
||||
class aggregate_order_by(expression.ColumnElement):
|
||||
"""Represent a PostgreSQL aggregate order by expression.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import aggregate_order_by
|
||||
expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
|
||||
stmt = select([expr])
|
||||
|
||||
would represent the expression::
|
||||
|
||||
SELECT array_agg(a ORDER BY b DESC) FROM table;
|
||||
|
||||
Similarly::
|
||||
|
||||
expr = func.string_agg(
|
||||
table.c.a,
|
||||
aggregate_order_by(literal_column("','"), table.c.a)
|
||||
)
|
||||
stmt = select([expr])
|
||||
|
||||
Would represent::
|
||||
|
||||
SELECT string_agg(a, ',' ORDER BY a) FROM table;
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.array_agg`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'aggregate_order_by'
|
||||
|
||||
def __init__(self, target, order_by):
|
||||
self.target = elements._literal_as_binds(target)
|
||||
self.order_by = elements._literal_as_binds(order_by)
|
||||
|
||||
def self_group(self, against=None):
|
||||
return self
|
||||
|
||||
def get_children(self, **kwargs):
|
||||
return self.target, self.order_by
|
||||
|
||||
def _copy_internals(self, clone=elements._clone, **kw):
|
||||
self.target = clone(self.target, **kw)
|
||||
self.order_by = clone(self.order_by, **kw)
|
||||
|
||||
@property
|
||||
def _from_objects(self):
|
||||
return self.target._from_objects + self.order_by._from_objects
|
||||
|
||||
|
||||
class ExcludeConstraint(ColumnCollectionConstraint):
|
||||
"""A table-level EXCLUDE constraint.
|
||||
|
||||
Defines an EXCLUDE constraint as described in the `postgres
|
||||
documentation`__.
|
||||
|
||||
__ http://www.postgresql.org/docs/9.0/\
|
||||
static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
|
||||
"""
|
||||
|
||||
__visit_name__ = 'exclude_constraint'
|
||||
|
||||
where = None
|
||||
|
||||
def __init__(self, *elements, **kw):
|
||||
r"""
|
||||
Create an :class:`.ExcludeConstraint` object.
|
||||
|
||||
E.g.::
|
||||
|
||||
const = ExcludeConstraint(
|
||||
(Column('period'), '&&'),
|
||||
(Column('group'), '='),
|
||||
where=(Column('group') != 'some group')
|
||||
)
|
||||
|
||||
The constraint is normally embedded into the :class:`.Table` construct
|
||||
directly, or added later using :meth:`.append_constraint`::
|
||||
|
||||
some_table = Table(
|
||||
'some_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('period', TSRANGE()),
|
||||
Column('group', String)
|
||||
)
|
||||
|
||||
some_table.append_constraint(
|
||||
ExcludeConstraint(
|
||||
(some_table.c.period, '&&'),
|
||||
(some_table.c.group, '='),
|
||||
where=some_table.c.group != 'some group',
|
||||
name='some_table_excl_const'
|
||||
)
|
||||
)
|
||||
|
||||
:param \*elements:
|
||||
A sequence of two tuples of the form ``(column, operator)`` where
|
||||
"column" is a SQL expression element or a raw SQL string, most
|
||||
typically a :class:`.Column` object,
|
||||
and "operator" is a string containing the operator to use.
|
||||
|
||||
.. note::
|
||||
|
||||
A plain string passed for the value of "column" is interpreted
|
||||
as an arbitrary SQL expression; when passing a plain string,
|
||||
any necessary quoting and escaping syntaxes must be applied
|
||||
manually. In order to specify a column name when a
|
||||
:class:`.Column` object is not available, while ensuring that
|
||||
any necessary quoting rules take effect, an ad-hoc
|
||||
:class:`.Column` or :func:`.sql.expression.column` object may
|
||||
be used.
|
||||
|
||||
:param name:
|
||||
Optional, the in-database name of this constraint.
|
||||
|
||||
:param deferrable:
|
||||
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
|
||||
issuing DDL for this constraint.
|
||||
|
||||
:param initially:
|
||||
Optional string. If set, emit INITIALLY <value> when issuing DDL
|
||||
for this constraint.
|
||||
|
||||
:param using:
|
||||
Optional string. If set, emit USING <index_method> when issuing DDL
|
||||
for this constraint. Defaults to 'gist'.
|
||||
|
||||
:param where:
|
||||
Optional SQL expression construct or literal SQL string.
|
||||
If set, emit WHERE <predicate> when issuing DDL
|
||||
for this constraint.
|
||||
|
||||
.. note::
|
||||
|
||||
A plain string passed here is interpreted as an arbitrary SQL
|
||||
expression; when passing a plain string, any necessary quoting
|
||||
and escaping syntaxes must be applied manually.
|
||||
|
||||
"""
|
||||
columns = []
|
||||
render_exprs = []
|
||||
self.operators = {}
|
||||
|
||||
expressions, operators = zip(*elements)
|
||||
|
||||
for (expr, column, strname, add_element), operator in zip(
|
||||
self._extract_col_expression_collection(expressions),
|
||||
operators
|
||||
):
|
||||
if add_element is not None:
|
||||
columns.append(add_element)
|
||||
|
||||
name = column.name if column is not None else strname
|
||||
|
||||
if name is not None:
|
||||
# backwards compat
|
||||
self.operators[name] = operator
|
||||
|
||||
expr = expression._literal_as_text(expr)
|
||||
|
||||
render_exprs.append(
|
||||
(expr, name, operator)
|
||||
)
|
||||
|
||||
self._render_exprs = render_exprs
|
||||
ColumnCollectionConstraint.__init__(
|
||||
self,
|
||||
*columns,
|
||||
name=kw.get('name'),
|
||||
deferrable=kw.get('deferrable'),
|
||||
initially=kw.get('initially')
|
||||
)
|
||||
self.using = kw.get('using', 'gist')
|
||||
where = kw.get('where')
|
||||
if where is not None:
|
||||
self.where = expression._literal_as_text(where)
|
||||
|
||||
def copy(self, **kw):
|
||||
elements = [(col, self.operators[col])
|
||||
for col in self.columns.keys()]
|
||||
c = self.__class__(*elements,
|
||||
name=self.name,
|
||||
deferrable=self.deferrable,
|
||||
initially=self.initially,
|
||||
where=self.where,
|
||||
using=self.using)
|
||||
c.dispatch._update(self.dispatch)
|
||||
return c
|
||||
|
||||
|
||||
def array_agg(*arg, **kw):
|
||||
"""PostgreSQL-specific form of :class:`.array_agg`, ensures
|
||||
return type is :class:`.postgresql.ARRAY` and not
|
||||
the plain :class:`.types.ARRAY`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
kw['type_'] = ARRAY(functions._type_from_args(arg))
|
||||
return functions.func.array_agg(*arg, **kw)
|
@ -1,420 +0,0 @@
|
||||
# postgresql/hstore.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
|
||||
from .base import ischema_names
|
||||
from .array import ARRAY
|
||||
from ... import types as sqltypes
|
||||
from ...sql import functions as sqlfunc
|
||||
from ...sql import operators
|
||||
from ... import util
|
||||
|
||||
__all__ = ('HSTORE', 'hstore')
|
||||
|
||||
idx_precedence = operators._PRECEDENCE[operators.json_getitem_op]
|
||||
|
||||
GETITEM = operators.custom_op(
|
||||
"->", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_KEY = operators.custom_op(
|
||||
"?", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ALL = operators.custom_op(
|
||||
"?&", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ANY = operators.custom_op(
|
||||
"?|", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINS = operators.custom_op(
|
||||
"@>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINED_BY = operators.custom_op(
|
||||
"<@", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
|
||||
class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL HSTORE type.
|
||||
|
||||
The :class:`.HSTORE` type stores dictionaries containing strings, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', HSTORE)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
:class:`.HSTORE` provides for a wide range of operations, including:
|
||||
|
||||
* Index operations::
|
||||
|
||||
data_table.c.data['some key'] == 'some value'
|
||||
|
||||
* Containment operations::
|
||||
|
||||
data_table.c.data.has_key('some key')
|
||||
|
||||
data_table.c.data.has_all(['one', 'two', 'three'])
|
||||
|
||||
* Concatenation::
|
||||
|
||||
data_table.c.data + {"k1": "v1"}
|
||||
|
||||
For a full list of special methods see
|
||||
:class:`.HSTORE.comparator_factory`.
|
||||
|
||||
For usage with the SQLAlchemy ORM, it may be desirable to combine
|
||||
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
|
||||
now part of the :mod:`sqlalchemy.ext.mutable`
|
||||
extension. This extension will allow "in-place" changes to the
|
||||
dictionary, e.g. addition of new keys or replacement/removal of existing
|
||||
keys to/from the current dictionary, to produce events which will be
|
||||
detected by the unit of work::
|
||||
|
||||
from sqlalchemy.ext.mutable import MutableDict
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'data_table'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
data = Column(MutableDict.as_mutable(HSTORE))
|
||||
|
||||
my_object = session.query(MyClass).one()
|
||||
|
||||
# in-place mutation, requires Mutable extension
|
||||
# in order for the ORM to detect
|
||||
my_object.data['some_key'] = 'some value'
|
||||
|
||||
session.commit()
|
||||
|
||||
When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
|
||||
will not be alerted to any changes to the contents of an existing
|
||||
dictionary, unless that dictionary value is re-assigned to the
|
||||
HSTORE-attribute itself, thus generating a change event.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.hstore` - render the PostgreSQL ``hstore()`` function.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'HSTORE'
|
||||
hashable = False
|
||||
text_type = sqltypes.Text()
|
||||
|
||||
def __init__(self, text_type=None):
|
||||
"""Construct a new :class:`.HSTORE`.
|
||||
|
||||
:param text_type: the type that should be used for indexed values.
|
||||
Defaults to :class:`.types.Text`.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
"""
|
||||
if text_type is not None:
|
||||
self.text_type = text_type
|
||||
|
||||
class Comparator(
|
||||
sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for :class:`.HSTORE`."""
|
||||
|
||||
def has_key(self, other):
|
||||
"""Boolean expression. Test for presence of a key. Note that the
|
||||
key may be a SQLA expression.
|
||||
"""
|
||||
return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_all(self, other):
|
||||
"""Boolean expression. Test for presence of all keys in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_any(self, other):
|
||||
"""Boolean expression. Test for presence of any key in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if keys (or array) are a superset
|
||||
of/contained the keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if keys are a proper subset of the
|
||||
keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def _setup_getitem(self, index):
|
||||
return GETITEM, index, self.type.text_type
|
||||
|
||||
def defined(self, key):
|
||||
"""Boolean expression. Test for presence of a non-NULL value for
|
||||
the key. Note that the key may be a SQLA expression.
|
||||
"""
|
||||
return _HStoreDefinedFunction(self.expr, key)
|
||||
|
||||
def delete(self, key):
|
||||
"""HStore expression. Returns the contents of this hstore with the
|
||||
given key deleted. Note that the key may be a SQLA expression.
|
||||
"""
|
||||
if isinstance(key, dict):
|
||||
key = _serialize_hstore(key)
|
||||
return _HStoreDeleteFunction(self.expr, key)
|
||||
|
||||
def slice(self, array):
|
||||
"""HStore expression. Returns a subset of an hstore defined by
|
||||
array of keys.
|
||||
"""
|
||||
return _HStoreSliceFunction(self.expr, array)
|
||||
|
||||
def keys(self):
|
||||
"""Text array expression. Returns array of keys."""
|
||||
return _HStoreKeysFunction(self.expr)
|
||||
|
||||
def vals(self):
|
||||
"""Text array expression. Returns array of values."""
|
||||
return _HStoreValsFunction(self.expr)
|
||||
|
||||
def array(self):
|
||||
"""Text array expression. Returns array of alternating keys and
|
||||
values.
|
||||
"""
|
||||
return _HStoreArrayFunction(self.expr)
|
||||
|
||||
def matrix(self):
|
||||
"""Text array expression. Returns array of [key, value] pairs."""
|
||||
return _HStoreMatrixFunction(self.expr)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return _serialize_hstore(value).encode(encoding)
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return _serialize_hstore(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if util.py2k:
|
||||
encoding = dialect.encoding
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return _parse_hstore(value.decode(encoding))
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return _parse_hstore(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
ischema_names['hstore'] = HSTORE
|
||||
|
||||
|
||||
class hstore(sqlfunc.GenericFunction):
|
||||
"""Construct an hstore value within a SQL expression using the
|
||||
PostgreSQL ``hstore()`` function.
|
||||
|
||||
The :class:`.hstore` function accepts one or two arguments as described
|
||||
in the PostgreSQL documentation.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.dialects.postgresql import array, hstore
|
||||
|
||||
select([hstore('key1', 'value1')])
|
||||
|
||||
select([
|
||||
hstore(
|
||||
array(['key1', 'key2', 'key3']),
|
||||
array(['value1', 'value2', 'value3'])
|
||||
)
|
||||
])
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.HSTORE` - the PostgreSQL ``HSTORE`` datatype.
|
||||
|
||||
"""
|
||||
type = HSTORE
|
||||
name = 'hstore'
|
||||
|
||||
|
||||
class _HStoreDefinedFunction(sqlfunc.GenericFunction):
|
||||
type = sqltypes.Boolean
|
||||
name = 'defined'
|
||||
|
||||
|
||||
class _HStoreDeleteFunction(sqlfunc.GenericFunction):
|
||||
type = HSTORE
|
||||
name = 'delete'
|
||||
|
||||
|
||||
class _HStoreSliceFunction(sqlfunc.GenericFunction):
|
||||
type = HSTORE
|
||||
name = 'slice'
|
||||
|
||||
|
||||
class _HStoreKeysFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'akeys'
|
||||
|
||||
|
||||
class _HStoreValsFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'avals'
|
||||
|
||||
|
||||
class _HStoreArrayFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'hstore_to_array'
|
||||
|
||||
|
||||
class _HStoreMatrixFunction(sqlfunc.GenericFunction):
|
||||
type = ARRAY(sqltypes.Text)
|
||||
name = 'hstore_to_matrix'
|
||||
|
||||
|
||||
#
|
||||
# parsing. note that none of this is used with the psycopg2 backend,
|
||||
# which provides its own native extensions.
|
||||
#
|
||||
|
||||
# My best guess at the parsing rules of hstore literals, since no formal
|
||||
# grammar is given. This is mostly reverse engineered from PG's input parser
|
||||
# behavior.
|
||||
HSTORE_PAIR_RE = re.compile(r"""
|
||||
(
|
||||
"(?P<key> (\\ . | [^"])* )" # Quoted key
|
||||
)
|
||||
[ ]* => [ ]* # Pair operator, optional adjoining whitespace
|
||||
(
|
||||
(?P<value_null> NULL ) # NULL value
|
||||
| "(?P<value> (\\ . | [^"])* )" # Quoted value
|
||||
)
|
||||
""", re.VERBOSE)
|
||||
|
||||
HSTORE_DELIMITER_RE = re.compile(r"""
|
||||
[ ]* , [ ]*
|
||||
""", re.VERBOSE)
|
||||
|
||||
|
||||
def _parse_error(hstore_str, pos):
|
||||
"""format an unmarshalling error."""
|
||||
|
||||
ctx = 20
|
||||
hslen = len(hstore_str)
|
||||
|
||||
parsed_tail = hstore_str[max(pos - ctx - 1, 0):min(pos, hslen)]
|
||||
residual = hstore_str[min(pos, hslen):min(pos + ctx + 1, hslen)]
|
||||
|
||||
if len(parsed_tail) > ctx:
|
||||
parsed_tail = '[...]' + parsed_tail[1:]
|
||||
if len(residual) > ctx:
|
||||
residual = residual[:-1] + '[...]'
|
||||
|
||||
return "After %r, could not parse residual at position %d: %r" % (
|
||||
parsed_tail, pos, residual)
|
||||
|
||||
|
||||
def _parse_hstore(hstore_str):
|
||||
"""Parse an hstore from its literal string representation.
|
||||
|
||||
Attempts to approximate PG's hstore input parsing rules as closely as
|
||||
possible. Although currently this is not strictly necessary, since the
|
||||
current implementation of hstore's output syntax is stricter than what it
|
||||
accepts as input, the documentation makes no guarantees that will always
|
||||
be the case.
|
||||
|
||||
|
||||
|
||||
"""
|
||||
result = {}
|
||||
pos = 0
|
||||
pair_match = HSTORE_PAIR_RE.match(hstore_str)
|
||||
|
||||
while pair_match is not None:
|
||||
key = pair_match.group('key').replace(r'\"', '"').replace(
|
||||
"\\\\", "\\")
|
||||
if pair_match.group('value_null'):
|
||||
value = None
|
||||
else:
|
||||
value = pair_match.group('value').replace(
|
||||
r'\"', '"').replace("\\\\", "\\")
|
||||
result[key] = value
|
||||
|
||||
pos += pair_match.end()
|
||||
|
||||
delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:])
|
||||
if delim_match is not None:
|
||||
pos += delim_match.end()
|
||||
|
||||
pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:])
|
||||
|
||||
if pos != len(hstore_str):
|
||||
raise ValueError(_parse_error(hstore_str, pos))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _serialize_hstore(val):
|
||||
"""Serialize a dictionary into an hstore literal. Keys and values must
|
||||
both be strings (except None for values).
|
||||
|
||||
"""
|
||||
def esc(s, position):
|
||||
if position == 'value' and s is None:
|
||||
return 'NULL'
|
||||
elif isinstance(s, util.string_types):
|
||||
return '"%s"' % s.replace("\\", "\\\\").replace('"', r'\"')
|
||||
else:
|
||||
raise ValueError("%r in %s position is not a string." %
|
||||
(s, position))
|
||||
|
||||
return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
|
||||
for k, v in val.items())
|
||||
|
||||
|
@ -1,301 +0,0 @@
|
||||
# postgresql/json.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import collections
|
||||
|
||||
from .base import ischema_names, colspecs
|
||||
from ... import types as sqltypes
|
||||
from ...sql import operators
|
||||
from ...sql import elements
|
||||
from ... import util
|
||||
|
||||
__all__ = ('JSON', 'JSONB')
|
||||
|
||||
idx_precedence = operators._PRECEDENCE[operators.json_getitem_op]
|
||||
|
||||
ASTEXT = operators.custom_op(
|
||||
"->>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
JSONPATH_ASTEXT = operators.custom_op(
|
||||
"#>>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
|
||||
HAS_KEY = operators.custom_op(
|
||||
"?", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ALL = operators.custom_op(
|
||||
"?&", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
HAS_ANY = operators.custom_op(
|
||||
"?|", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINS = operators.custom_op(
|
||||
"@>", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
CONTAINED_BY = operators.custom_op(
|
||||
"<@", precedence=idx_precedence, natural_self_precedent=True,
|
||||
eager_grouping=True
|
||||
)
|
||||
|
||||
|
||||
class JSONPathType(sqltypes.JSON.JSONPathType):
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
assert isinstance(value, collections.Sequence)
|
||||
tokens = [util.text_type(elem)for elem in value]
|
||||
value = "{%s}" % (", ".join(tokens))
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
assert isinstance(value, collections.Sequence)
|
||||
tokens = [util.text_type(elem)for elem in value]
|
||||
value = "{%s}" % (", ".join(tokens))
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
colspecs[sqltypes.JSON.JSONPathType] = JSONPathType
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""Represent the PostgreSQL JSON type.
|
||||
|
||||
This type is a specialization of the Core-level :class:`.types.JSON`
|
||||
type. Be sure to read the documentation for :class:`.types.JSON` for
|
||||
important tips regarding treatment of NULL values and ORM use.
|
||||
|
||||
.. versionchanged:: 1.1 :class:`.postgresql.JSON` is now a PostgreSQL-
|
||||
specific specialization of the new :class:`.types.JSON` type.
|
||||
|
||||
The operators provided by the PostgreSQL version of :class:`.JSON`
|
||||
include:
|
||||
|
||||
* Index operations (the ``->`` operator)::
|
||||
|
||||
data_table.c.data['some key']
|
||||
|
||||
data_table.c.data[5]
|
||||
|
||||
|
||||
* Index operations returning text (the ``->>`` operator)::
|
||||
|
||||
data_table.c.data['some key'].astext == 'some value'
|
||||
|
||||
* Index operations with CAST
|
||||
(equivalent to ``CAST(col ->> ['some key'] AS <type>)``)::
|
||||
|
||||
data_table.c.data['some key'].astext.cast(Integer) == 5
|
||||
|
||||
* Path index operations (the ``#>`` operator)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
|
||||
|
||||
* Path index operations returning text (the ``#>>`` operator)::
|
||||
|
||||
data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == \
|
||||
'some value'
|
||||
|
||||
.. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on
|
||||
JSON objects now requires that the :attr:`.JSON.Comparator.astext`
|
||||
modifier be called explicitly, if the cast works only from a textual
|
||||
string.
|
||||
|
||||
Index operations return an expression object whose type defaults to
|
||||
:class:`.JSON` by default, so that further JSON-oriented instructions
|
||||
may be called upon the result type.
|
||||
|
||||
Custom serializers and deserializers are specified at the dialect level,
|
||||
that is using :func:`.create_engine`. The reason for this is that when
|
||||
using psycopg2, the DBAPI only allows serializers at the per-cursor
|
||||
or per-connection level. E.g.::
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test",
|
||||
json_serializer=my_serialize_fn,
|
||||
json_deserializer=my_deserialize_fn
|
||||
)
|
||||
|
||||
When using the psycopg2 dialect, the json_deserializer is registered
|
||||
against the database using ``psycopg2.extras.register_default_json``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.types.JSON` - Core level JSON type
|
||||
|
||||
:class:`.JSONB`
|
||||
|
||||
"""
|
||||
|
||||
astext_type = sqltypes.Text()
|
||||
|
||||
def __init__(self, none_as_null=False, astext_type=None):
|
||||
"""Construct a :class:`.JSON` type.
|
||||
|
||||
:param none_as_null: if True, persist the value ``None`` as a
|
||||
SQL NULL value, not the JSON encoding of ``null``. Note that
|
||||
when this flag is False, the :func:`.null` construct can still
|
||||
be used to persist a NULL value::
|
||||
|
||||
from sqlalchemy import null
|
||||
conn.execute(table.insert(), data=null())
|
||||
|
||||
.. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null`
|
||||
is now supported in order to persist a NULL value.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.JSON.NULL`
|
||||
|
||||
:param astext_type: the type to use for the
|
||||
:attr:`.JSON.Comparator.astext`
|
||||
accessor on indexed attributes. Defaults to :class:`.types.Text`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
super(JSON, self).__init__(none_as_null=none_as_null)
|
||||
if astext_type is not None:
|
||||
self.astext_type = astext_type
|
||||
|
||||
class Comparator(sqltypes.JSON.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
@property
|
||||
def astext(self):
|
||||
"""On an indexed expression, use the "astext" (e.g. "->>")
|
||||
conversion when rendered in SQL.
|
||||
|
||||
E.g.::
|
||||
|
||||
select([data_table.c.data['some key'].astext])
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.ColumnElement.cast`
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType):
|
||||
return self.expr.left.operate(
|
||||
JSONPATH_ASTEXT,
|
||||
self.expr.right, result_type=self.type.astext_type)
|
||||
else:
|
||||
return self.expr.left.operate(
|
||||
ASTEXT, self.expr.right, result_type=self.type.astext_type)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
|
||||
colspecs[sqltypes.JSON] = JSON
|
||||
ischema_names['json'] = JSON
|
||||
|
||||
|
||||
class JSONB(JSON):
|
||||
"""Represent the PostgreSQL JSONB type.
|
||||
|
||||
The :class:`.JSONB` type stores arbitrary JSONB format data, e.g.::
|
||||
|
||||
data_table = Table('data_table', metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('data', JSONB)
|
||||
)
|
||||
|
||||
with engine.connect() as conn:
|
||||
conn.execute(
|
||||
data_table.insert(),
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
)
|
||||
|
||||
The :class:`.JSONB` type includes all operations provided by
|
||||
:class:`.JSON`, including the same behaviors for indexing operations.
|
||||
It also adds additional operators specific to JSONB, including
|
||||
:meth:`.JSONB.Comparator.has_key`, :meth:`.JSONB.Comparator.has_all`,
|
||||
:meth:`.JSONB.Comparator.has_any`, :meth:`.JSONB.Comparator.contains`,
|
||||
and :meth:`.JSONB.Comparator.contained_by`.
|
||||
|
||||
Like the :class:`.JSON` type, the :class:`.JSONB` type does not detect
|
||||
in-place changes when used with the ORM, unless the
|
||||
:mod:`sqlalchemy.ext.mutable` extension is used.
|
||||
|
||||
Custom serializers and deserializers
|
||||
are shared with the :class:`.JSON` class, using the ``json_serializer``
|
||||
and ``json_deserializer`` keyword arguments. These must be specified
|
||||
at the dialect level using :func:`.create_engine`. When using
|
||||
psycopg2, the serializers are associated with the jsonb type using
|
||||
``psycopg2.extras.register_default_jsonb`` on a per-connection basis,
|
||||
in the same way that ``psycopg2.extras.register_default_json`` is used
|
||||
to register these handlers with the json type.
|
||||
|
||||
.. versionadded:: 0.9.7
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.JSON`
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'JSONB'
|
||||
|
||||
class Comparator(JSON.Comparator):
|
||||
"""Define comparison operations for :class:`.JSON`."""
|
||||
|
||||
def has_key(self, other):
|
||||
"""Boolean expression. Test for presence of a key. Note that the
|
||||
key may be a SQLA expression.
|
||||
"""
|
||||
return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_all(self, other):
|
||||
"""Boolean expression. Test for presence of all keys in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def has_any(self, other):
|
||||
"""Boolean expression. Test for presence of any key in jsonb
|
||||
"""
|
||||
return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contains(self, other, **kwargs):
|
||||
"""Boolean expression. Test if keys (or array) are a superset
|
||||
of/contained the keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Test if keys are a proper subset of the
|
||||
keys of the argument jsonb expression.
|
||||
"""
|
||||
return self.operate(
|
||||
CONTAINED_BY, other, result_type=sqltypes.Boolean)
|
||||
|
||||
comparator_factory = Comparator
|
||||
|
||||
ischema_names['jsonb'] = JSONB
|
@ -1,265 +0,0 @@
|
||||
# postgresql/pg8000.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors <see AUTHORS
|
||||
# file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+pg8000
|
||||
:name: pg8000
|
||||
:dbapi: pg8000
|
||||
:connectstring: \
|
||||
postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]
|
||||
:url: https://pythonhosted.org/pg8000/
|
||||
|
||||
|
||||
.. _pg8000_unicode:
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
pg8000 will encode / decode string values between it and the server using the
|
||||
PostgreSQL ``client_encoding`` parameter; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf-8``, as a more useful default::
|
||||
|
||||
#client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
The ``client_encoding`` can be overridden for a session by executing the SQL:
|
||||
|
||||
SET CLIENT_ENCODING TO 'utf8';
|
||||
|
||||
SQLAlchemy will execute this SQL on all new connections based on the value
|
||||
passed to :func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
engine = create_engine(
|
||||
"postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8')
|
||||
|
||||
|
||||
.. _pg8000_isolation_level:
|
||||
|
||||
pg8000 Transaction Isolation Level
|
||||
-------------------------------------
|
||||
|
||||
The pg8000 dialect offers the same isolation level settings as that
|
||||
of the :ref:`psycopg2 <psycopg2_isolation_level>` dialect:
|
||||
|
||||
* ``READ COMMITTED``
|
||||
* ``READ UNCOMMITTED``
|
||||
* ``REPEATABLE READ``
|
||||
* ``SERIALIZABLE``
|
||||
* ``AUTOCOMMIT``
|
||||
|
||||
.. versionadded:: 0.9.5 support for AUTOCOMMIT isolation level when using
|
||||
pg8000.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_isolation_level`
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
|
||||
"""
|
||||
from ... import util, exc
|
||||
import decimal
|
||||
from ... import processors
|
||||
from ... import types as sqltypes
|
||||
from .base import (
|
||||
PGDialect, PGCompiler, PGIdentifierPreparer, PGExecutionContext,
|
||||
_DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES)
|
||||
import re
|
||||
from sqlalchemy.dialects.postgresql.json import JSON
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal, self._effective_decimal_return_scale)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# pg8000 returns float natively for 701
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
|
||||
|
||||
class _PGNumericNoBind(_PGNumeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._dbapi_version > (1, 10, 1):
|
||||
return None # Has native JSON
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class PGExecutionContext_pg8000(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class PGCompiler_pg8000(PGCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
if '%%' in text:
|
||||
util.warn("The SQLAlchemy postgresql dialect "
|
||||
"now automatically escapes '%' in text() "
|
||||
"expressions to '%%'.")
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace('%', '%%')
|
||||
|
||||
|
||||
class PGDialect_pg8000(PGDialect):
|
||||
driver = 'pg8000'
|
||||
|
||||
supports_unicode_statements = True
|
||||
|
||||
supports_unicode_binds = True
|
||||
|
||||
default_paramstyle = 'format'
|
||||
supports_sane_multi_rowcount = True
|
||||
execution_ctx_cls = PGExecutionContext_pg8000
|
||||
statement_compiler = PGCompiler_pg8000
|
||||
preparer = PGIdentifierPreparer_pg8000
|
||||
description_encoding = 'use_encoding'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumericNoBind,
|
||||
sqltypes.Float: _PGNumeric,
|
||||
JSON: _PGJSON,
|
||||
sqltypes.JSON: _PGJSON
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, client_encoding=None, **kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.client_encoding = client_encoding
|
||||
|
||||
def initialize(self, connection):
|
||||
self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14)
|
||||
super(PGDialect_pg8000, self).initialize(connection)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
return tuple(
|
||||
[
|
||||
int(x) for x in re.findall(
|
||||
r'(\d+)(?:[-\.]?|$)', self.dbapi.__version__)])
|
||||
else:
|
||||
return (99, 99, 99)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('pg8000')
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['port'] = int(opts['port'])
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return "connection is closed" in str(e)
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
level = level.replace('_', ' ')
|
||||
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
if level == 'AUTOCOMMIT':
|
||||
connection.autocommit = True
|
||||
elif level in self._isolation_lookup:
|
||||
connection.autocommit = False
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"SET SESSION CHARACTERISTICS AS TRANSACTION "
|
||||
"ISOLATION LEVEL %s" % level)
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid value '%s' for isolation_level. "
|
||||
"Valid isolation levels for %s are %s or AUTOCOMMIT" %
|
||||
(level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
def set_client_encoding(self, connection, client_encoding):
|
||||
# adjust for ConnectionFairy possibly being present
|
||||
if hasattr(connection, 'connection'):
|
||||
connection = connection.connection
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SET CLIENT_ENCODING TO '" + client_encoding + "'")
|
||||
cursor.execute("COMMIT")
|
||||
cursor.close()
|
||||
|
||||
def do_begin_twophase(self, connection, xid):
|
||||
connection.connection.tpc_begin((0, xid, ''))
|
||||
|
||||
def do_prepare_twophase(self, connection, xid):
|
||||
connection.connection.tpc_prepare()
|
||||
|
||||
def do_rollback_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_rollback((0, xid, ''))
|
||||
|
||||
def do_commit_twophase(
|
||||
self, connection, xid, is_prepared=True, recover=False):
|
||||
connection.connection.tpc_commit((0, xid, ''))
|
||||
|
||||
def do_recover_twophase(self, connection):
|
||||
return [row[1] for row in connection.connection.tpc_recover()]
|
||||
|
||||
def on_connect(self):
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
def on_connect(conn):
|
||||
self.set_client_encoding(conn, self.client_encoding)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if len(fns) > 0:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
dialect = PGDialect_pg8000
|
@ -1,702 +0,0 @@
|
||||
# postgresql/psycopg2.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+psycopg2
|
||||
:name: psycopg2
|
||||
:dbapi: psycopg2
|
||||
:connectstring: postgresql+psycopg2://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2/
|
||||
|
||||
psycopg2 Connect Arguments
|
||||
-----------------------------------
|
||||
|
||||
psycopg2-specific keyword arguments which are accepted by
|
||||
:func:`.create_engine()` are:
|
||||
|
||||
* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
|
||||
statements which support this feature. What this essentially means from a
|
||||
psycopg2 point of view is that the cursor is created using a name, e.g.
|
||||
``connection.cursor('some name')``, which has the effect that result rows
|
||||
are not immediately pre-fetched and buffered after statement execution, but
|
||||
are instead left on the server and only retrieved as needed. SQLAlchemy's
|
||||
:class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
|
||||
behavior when this feature is enabled, such that groups of 100 rows at a
|
||||
time are fetched over the wire to reduce conversational overhead.
|
||||
Note that the :paramref:`.Connection.execution_options.stream_results`
|
||||
execution option is a more targeted
|
||||
way of enabling this mode on a per-execution basis.
|
||||
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
|
||||
per connection. True by default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_disable_native_unicode`
|
||||
|
||||
* ``isolation_level``: This option, available for all PostgreSQL dialects,
|
||||
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
|
||||
dialect.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_isolation_level`
|
||||
|
||||
* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
|
||||
using psycopg2's ``set_client_encoding()`` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`psycopg2_unicode`
|
||||
|
||||
Unix Domain Connections
|
||||
------------------------
|
||||
|
||||
psycopg2 supports connecting via Unix domain connections. When the ``host``
|
||||
portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2,
|
||||
which specifies Unix-domain communication rather than TCP/IP communication::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname")
|
||||
|
||||
By default, the socket file used is to connect to a Unix-domain socket
|
||||
in ``/tmp``, or whatever socket directory was specified when PostgreSQL
|
||||
was built. This value can be overridden by passing a pathname to psycopg2,
|
||||
using ``host`` as an additional keyword argument::
|
||||
|
||||
create_engine("postgresql+psycopg2://user:password@/dbname?\
|
||||
host=/var/lib/postgresql")
|
||||
|
||||
See also:
|
||||
|
||||
`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/\
|
||||
libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
|
||||
|
||||
.. _psycopg2_execution_options:
|
||||
|
||||
Per-Statement/Connection Execution Options
|
||||
-------------------------------------------
|
||||
|
||||
The following DBAPI-specific options are respected when used with
|
||||
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
|
||||
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
|
||||
|
||||
* ``isolation_level`` - Set the transaction isolation level for the lifespan of a
|
||||
:class:`.Connection` (can only be set on a connection, not a statement
|
||||
or query). See :ref:`psycopg2_isolation_level`.
|
||||
|
||||
* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
|
||||
this feature makes use of "named" cursors in combination with special
|
||||
result handling methods so that result rows are not fully buffered.
|
||||
If ``None`` or not set, the ``server_side_cursors`` option of the
|
||||
:class:`.Engine` is used.
|
||||
|
||||
* ``max_row_buffer`` - when using ``stream_results``, an integer value that
|
||||
specifies the maximum number of rows to buffer at a time. This is
|
||||
interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
|
||||
buffer will grow to ultimately store 1000 rows at a time.
|
||||
|
||||
.. versionadded:: 1.0.6
|
||||
|
||||
.. _psycopg2_unicode:
|
||||
|
||||
Unicode with Psycopg2
|
||||
----------------------
|
||||
|
||||
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
|
||||
extension, such that the DBAPI receives and returns all strings as Python
|
||||
Unicode objects directly - SQLAlchemy passes these values through without
|
||||
change. Psycopg2 here will encode/decode string values based on the
|
||||
current "client encoding" setting; by default this is the value in
|
||||
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
|
||||
Typically, this can be changed to ``utf8``, as a more useful default::
|
||||
|
||||
# postgresql.conf file
|
||||
|
||||
# client_encoding = sql_ascii # actually, defaults to database
|
||||
# encoding
|
||||
client_encoding = utf8
|
||||
|
||||
A second way to affect the client encoding is to set it within Psycopg2
|
||||
locally. SQLAlchemy will call psycopg2's
|
||||
:meth:`psycopg2:connection.set_client_encoding` method
|
||||
on all new connections based on the value passed to
|
||||
:func:`.create_engine` using the ``client_encoding`` parameter::
|
||||
|
||||
# set_client_encoding() setting;
|
||||
# works for *all* PostgreSQL versions
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
client_encoding='utf8')
|
||||
|
||||
This overrides the encoding specified in the PostgreSQL client configuration.
|
||||
When using the parameter in this way, the psycopg2 driver emits
|
||||
``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
|
||||
in all PostgreSQL versions.
|
||||
|
||||
Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
|
||||
is **not the same** as the more recently added ``client_encoding`` parameter
|
||||
now supported by libpq directly. This is enabled when ``client_encoding``
|
||||
is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
|
||||
using the :paramref:`.create_engine.connect_args` parameter::
|
||||
|
||||
# libpq direct parameter setting;
|
||||
# only works for PostgreSQL **9.1 and above**
|
||||
engine = create_engine("postgresql://user:pass@host/dbname",
|
||||
connect_args={'client_encoding': 'utf8'})
|
||||
|
||||
# using the query string is equivalent
|
||||
engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
|
||||
|
||||
The above parameter was only added to libpq as of version 9.1 of PostgreSQL,
|
||||
so using the previous method is better for cross-version support.
|
||||
|
||||
.. _psycopg2_disable_native_unicode:
|
||||
|
||||
Disabling Native Unicode
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SQLAlchemy can also be instructed to skip the usage of the psycopg2
|
||||
``UNICODE`` extension and to instead utilize its own unicode encode/decode
|
||||
services, which are normally reserved only for those DBAPIs that don't
|
||||
fully support unicode directly. Passing ``use_native_unicode=False`` to
|
||||
:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
|
||||
SQLAlchemy will instead encode data itself into Python bytestrings on the way
|
||||
in and coerce from bytes on the way back,
|
||||
using the value of the :func:`.create_engine` ``encoding`` parameter, which
|
||||
defaults to ``utf-8``.
|
||||
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
|
||||
obsolete as most DBAPIs now support unicode fully.
|
||||
|
||||
Bound Parameter Styles
|
||||
----------------------
|
||||
|
||||
The default parameter style for the psycopg2 dialect is "pyformat", where
|
||||
SQL is rendered using ``%(paramname)s`` style. This format has the limitation
|
||||
that it does not accommodate the unusual case of parameter names that
|
||||
actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
|
||||
generates bound parameter names based on the name of a column, the presence
|
||||
of these characters in a column name can lead to problems.
|
||||
|
||||
There are two solutions to the issue of a :class:`.schema.Column` that contains
|
||||
one of these characters in its name. One is to specify the
|
||||
:paramref:`.schema.Column.key` for columns that have such names::
|
||||
|
||||
measurement = Table('measurement', metadata,
|
||||
Column('Size (meters)', Integer, key='size_meters')
|
||||
)
|
||||
|
||||
Above, an INSERT statement such as ``measurement.insert()`` will use
|
||||
``size_meters`` as the parameter name, and a SQL expression such as
|
||||
``measurement.c.size_meters > 10`` will derive the bound parameter name
|
||||
from the ``size_meters`` key as well.
|
||||
|
||||
.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
|
||||
as the source of naming when anonymous bound parameters are created
|
||||
in SQL expressions; previously, this behavior only applied to
|
||||
:meth:`.Table.insert` and :meth:`.Table.update` parameter names.
|
||||
|
||||
The other solution is to use a positional format; psycopg2 allows use of the
|
||||
"format" paramstyle, which can be passed to
|
||||
:paramref:`.create_engine.paramstyle`::
|
||||
|
||||
engine = create_engine(
|
||||
'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
|
||||
|
||||
With the above engine, instead of a statement like::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
|
||||
{'Size (meters)': 1}
|
||||
|
||||
we instead see::
|
||||
|
||||
INSERT INTO measurement ("Size (meters)") VALUES (%s)
|
||||
(1, )
|
||||
|
||||
Where above, the dictionary style is converted into a tuple with positional
|
||||
style.
|
||||
|
||||
|
||||
Transactions
|
||||
------------
|
||||
|
||||
The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
|
||||
|
||||
.. _psycopg2_isolation_level:
|
||||
|
||||
Psycopg2 Transaction Isolation Level
|
||||
-------------------------------------
|
||||
|
||||
As discussed in :ref:`postgresql_isolation_level`,
|
||||
all PostgreSQL dialects support setting of transaction isolation level
|
||||
both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
|
||||
as well as the ``isolation_level`` argument used by
|
||||
:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these
|
||||
options make use of psycopg2's ``set_isolation_level()`` connection method,
|
||||
rather than emitting a PostgreSQL directive; this is because psycopg2's
|
||||
API-level setting is always emitted at the start of each transaction in any
|
||||
case.
|
||||
|
||||
The psycopg2 dialect supports these constants for isolation level:
|
||||
|
||||
* ``READ COMMITTED``
|
||||
* ``READ UNCOMMITTED``
|
||||
* ``REPEATABLE READ``
|
||||
* ``SERIALIZABLE``
|
||||
* ``AUTOCOMMIT``
|
||||
|
||||
.. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using
|
||||
psycopg2.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`postgresql_isolation_level`
|
||||
|
||||
:ref:`pg8000_isolation_level`
|
||||
|
||||
|
||||
NOTICE logging
|
||||
---------------
|
||||
|
||||
The psycopg2 dialect will log PostgreSQL NOTICE messages via the
|
||||
``sqlalchemy.dialects.postgresql`` logger::
|
||||
|
||||
import logging
|
||||
logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
|
||||
|
||||
.. _psycopg2_hstore::
|
||||
|
||||
HSTORE type
|
||||
------------
|
||||
|
||||
The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
|
||||
the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
|
||||
by default when psycopg2 version 2.4 or greater is used, and
|
||||
it is detected that the target database has the HSTORE type set up for use.
|
||||
In other words, when the dialect makes the first
|
||||
connection, a sequence like the following is performed:
|
||||
|
||||
1. Request the available HSTORE oids using
|
||||
``psycopg2.extras.HstoreAdapter.get_oids()``.
|
||||
If this function returns a list of HSTORE identifiers, we then determine
|
||||
that the ``HSTORE`` extension is present.
|
||||
This function is **skipped** if the version of psycopg2 installed is
|
||||
less than version 2.4.
|
||||
|
||||
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
|
||||
we've detected that ``HSTORE`` oids are available, the
|
||||
``psycopg2.extensions.register_hstore()`` extension is invoked for all
|
||||
connections.
|
||||
|
||||
The ``register_hstore()`` extension has the effect of **all Python
|
||||
dictionaries being accepted as parameters regardless of the type of target
|
||||
column in SQL**. The dictionaries are converted by this extension into a
|
||||
textual HSTORE expression. If this behavior is not desired, disable the
|
||||
use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
|
||||
follows::
|
||||
|
||||
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
|
||||
use_native_hstore=False)
|
||||
|
||||
The ``HSTORE`` type is **still supported** when the
|
||||
``psycopg2.extensions.register_hstore()`` extension is not used. It merely
|
||||
means that the coercion between Python dictionaries and the HSTORE
|
||||
string format, on both the parameter side and the result side, will take
|
||||
place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
|
||||
which may be more performant.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import logging
|
||||
|
||||
from ... import util, exc
|
||||
import decimal
|
||||
from ... import processors
|
||||
from ...engine import result as _result
|
||||
from ...sql import expression
|
||||
from ... import types as sqltypes
|
||||
from .base import PGDialect, PGCompiler, \
|
||||
PGIdentifierPreparer, PGExecutionContext, \
|
||||
ENUM, _DECIMAL_TYPES, _FLOAT_TYPES,\
|
||||
_INT_TYPES, UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON, JSONB
|
||||
|
||||
try:
|
||||
from uuid import UUID as _python_UUID
|
||||
except ImportError:
|
||||
_python_UUID = None
|
||||
|
||||
|
||||
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
|
||||
|
||||
|
||||
class _PGNumeric(sqltypes.Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal,
|
||||
self._effective_decimal_return_scale)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# pg8000 returns Decimal natively for 1700
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# pg8000 returns float natively for 701
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
|
||||
|
||||
class _PGEnum(ENUM):
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.native_enum and util.py2k and self.convert_unicode is True:
|
||||
# we can't easily use PG's extensions here because
|
||||
# the OID is on the fly, and we need to give it a python
|
||||
# function anyway - not really worth it.
|
||||
self.convert_unicode = "force_nocheck"
|
||||
return super(_PGEnum, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect._has_native_hstore:
|
||||
return None
|
||||
else:
|
||||
return super(_PGHStore, self).bind_processor(dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_hstore:
|
||||
return None
|
||||
else:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_json:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect._has_native_jsonb:
|
||||
return None
|
||||
else:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
def bind_processor(self, dialect):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
nonetype = type(None)
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not self.as_uuid and dialect.use_native_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = str(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
_server_side_id = util.counter()
|
||||
|
||||
|
||||
class PGExecutionContext_psycopg2(PGExecutionContext):
|
||||
def create_server_side_cursor(self):
|
||||
# use server-side cursors:
|
||||
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
|
||||
ident = "c_%s_%s" % (hex(id(self))[2:],
|
||||
hex(_server_side_id())[2:])
|
||||
return self._dbapi_connection.cursor(ident)
|
||||
|
||||
def get_result_proxy(self):
|
||||
# TODO: ouch
|
||||
if logger.isEnabledFor(logging.INFO):
|
||||
self._log_notices(self.cursor)
|
||||
|
||||
if self._is_server_side:
|
||||
return _result.BufferedRowResultProxy(self)
|
||||
else:
|
||||
return _result.ResultProxy(self)
|
||||
|
||||
def _log_notices(self, cursor):
|
||||
for notice in cursor.connection.notices:
|
||||
# NOTICE messages have a
|
||||
# newline character at the end
|
||||
logger.info(notice.rstrip())
|
||||
|
||||
cursor.connection.notices[:] = []
|
||||
|
||||
|
||||
class PGCompiler_psycopg2(PGCompiler):
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace('%', '%%')
|
||||
|
||||
|
||||
class PGDialect_psycopg2(PGDialect):
|
||||
driver = 'psycopg2'
|
||||
if util.py2k:
|
||||
supports_unicode_statements = False
|
||||
|
||||
supports_server_side_cursors = True
|
||||
|
||||
default_paramstyle = 'pyformat'
|
||||
# set to true based on psycopg2 version
|
||||
supports_sane_multi_rowcount = False
|
||||
execution_ctx_cls = PGExecutionContext_psycopg2
|
||||
statement_compiler = PGCompiler_psycopg2
|
||||
preparer = PGIdentifierPreparer_psycopg2
|
||||
psycopg2_version = (0, 0)
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 5),
|
||||
native_jsonb=(2, 5, 4),
|
||||
sane_multi_rowcount=(2, 0, 9),
|
||||
array_oid=(2, 4, 3),
|
||||
hstore_adapter=(2, 4)
|
||||
)
|
||||
|
||||
_has_native_hstore = False
|
||||
_has_native_json = False
|
||||
_has_native_jsonb = False
|
||||
|
||||
engine_config_types = PGDialect.engine_config_types.union([
|
||||
('use_native_unicode', util.asbool),
|
||||
])
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _PGNumeric,
|
||||
ENUM: _PGEnum, # needs force_unicode
|
||||
sqltypes.Enum: _PGEnum, # needs force_unicode
|
||||
HSTORE: _PGHStore,
|
||||
JSON: _PGJSON,
|
||||
sqltypes.JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, server_side_cursors=False, use_native_unicode=True,
|
||||
client_encoding=None,
|
||||
use_native_hstore=True, use_native_uuid=True,
|
||||
**kwargs):
|
||||
PGDialect.__init__(self, **kwargs)
|
||||
self.server_side_cursors = server_side_cursors
|
||||
self.use_native_unicode = use_native_unicode
|
||||
self.use_native_hstore = use_native_hstore
|
||||
self.use_native_uuid = use_native_uuid
|
||||
self.supports_unicode_binds = use_native_unicode
|
||||
self.client_encoding = client_encoding
|
||||
if self.dbapi and hasattr(self.dbapi, '__version__'):
|
||||
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
|
||||
self.dbapi.__version__)
|
||||
if m:
|
||||
self.psycopg2_version = tuple(
|
||||
int(x)
|
||||
for x in m.group(1, 2, 3)
|
||||
if x is not None)
|
||||
|
||||
def initialize(self, connection):
|
||||
super(PGDialect_psycopg2, self).initialize(connection)
|
||||
self._has_native_hstore = self.use_native_hstore and \
|
||||
self._hstore_oids(connection.connection) \
|
||||
is not None
|
||||
self._has_native_json = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
|
||||
self._has_native_jsonb = \
|
||||
self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
|
||||
|
||||
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
|
||||
self.supports_sane_multi_rowcount = \
|
||||
self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['sane_multi_rowcount']
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import psycopg2
|
||||
return psycopg2
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
from psycopg2 import extensions
|
||||
return extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
from psycopg2 import extras
|
||||
return extras
|
||||
|
||||
@util.memoized_property
|
||||
def _isolation_lookup(self):
|
||||
extensions = self._psycopg2_extensions()
|
||||
return {
|
||||
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
|
||||
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||
'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
|
||||
'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ,
|
||||
'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE
|
||||
}
|
||||
|
||||
def set_isolation_level(self, connection, level):
|
||||
try:
|
||||
level = self._isolation_lookup[level.replace('_', ' ')]
|
||||
except KeyError:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid value '%s' for isolation_level. "
|
||||
"Valid isolation levels for %s are %s" %
|
||||
(level, self.name, ", ".join(self._isolation_lookup))
|
||||
)
|
||||
|
||||
connection.set_isolation_level(level)
|
||||
|
||||
def on_connect(self):
|
||||
extras = self._psycopg2_extras()
|
||||
extensions = self._psycopg2_extensions()
|
||||
|
||||
fns = []
|
||||
if self.client_encoding is not None:
|
||||
def on_connect(conn):
|
||||
conn.set_client_encoding(self.client_encoding)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.isolation_level is not None:
|
||||
def on_connect(conn):
|
||||
self.set_isolation_level(conn, self.isolation_level)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_uuid:
|
||||
def on_connect(conn):
|
||||
extras.register_uuid(None, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_unicode:
|
||||
def on_connect(conn):
|
||||
extensions.register_type(extensions.UNICODE, conn)
|
||||
extensions.register_type(extensions.UNICODEARRAY, conn)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self.use_native_hstore:
|
||||
def on_connect(conn):
|
||||
hstore_oids = self._hstore_oids(conn)
|
||||
if hstore_oids is not None:
|
||||
oid, array_oid = hstore_oids
|
||||
kw = {'oid': oid}
|
||||
if util.py2k:
|
||||
kw['unicode'] = True
|
||||
if self.psycopg2_version >= \
|
||||
self.FEATURE_VERSION_MAP['array_oid']:
|
||||
kw['array_oid'] = array_oid
|
||||
extras.register_hstore(conn, **kw)
|
||||
fns.append(on_connect)
|
||||
|
||||
if self.dbapi and self._json_deserializer:
|
||||
def on_connect(conn):
|
||||
if self._has_native_json:
|
||||
extras.register_default_json(
|
||||
conn, loads=self._json_deserializer)
|
||||
if self._has_native_jsonb:
|
||||
extras.register_default_jsonb(
|
||||
conn, loads=self._json_deserializer)
|
||||
fns.append(on_connect)
|
||||
|
||||
if fns:
|
||||
def on_connect(conn):
|
||||
for fn in fns:
|
||||
fn(conn)
|
||||
return on_connect
|
||||
else:
|
||||
return None
|
||||
|
||||
@util.memoized_instancemethod
|
||||
def _hstore_oids(self, conn):
|
||||
if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
|
||||
extras = self._psycopg2_extras()
|
||||
oids = extras.HstoreAdapter.get_oids(conn)
|
||||
if oids is not None and oids[0]:
|
||||
return oids[0:2]
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['port'] = int(opts['port'])
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
# check the "closed" flag. this might not be
|
||||
# present on old psycopg2 versions. Also,
|
||||
# this flag doesn't actually help in a lot of disconnect
|
||||
# situations, so don't rely on it.
|
||||
if getattr(connection, 'closed', False):
|
||||
return True
|
||||
|
||||
# checks based on strings. in the case that .closed
|
||||
# didn't cut it, fall back onto these.
|
||||
str_e = str(e).partition("\n")[0]
|
||||
for msg in [
|
||||
# these error messages from libpq: interfaces/libpq/fe-misc.c
|
||||
# and interfaces/libpq/fe-secure.c.
|
||||
'terminating connection',
|
||||
'closed the connection',
|
||||
'connection not open',
|
||||
'could not receive data from server',
|
||||
'could not send data to server',
|
||||
# psycopg2 client errors, psycopg2/conenction.h,
|
||||
# psycopg2/cursor.h
|
||||
'connection already closed',
|
||||
'cursor already closed',
|
||||
# not sure where this path is originally from, it may
|
||||
# be obsolete. It really says "losed", not "closed".
|
||||
'losed the connection unexpectedly',
|
||||
# these can occur in newer SSL
|
||||
'connection has been closed unexpectedly',
|
||||
'SSL SYSCALL error: Bad file descriptor',
|
||||
'SSL SYSCALL error: EOF detected',
|
||||
'SSL error: decryption failed or bad record mac',
|
||||
]:
|
||||
idx = str_e.find(msg)
|
||||
if idx >= 0 and '"' not in str_e[:idx]:
|
||||
return True
|
||||
return False
|
||||
|
||||
dialect = PGDialect_psycopg2
|
@ -1,61 +0,0 @@
|
||||
# testing/engines.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
.. dialect:: postgresql+psycopg2cffi
|
||||
:name: psycopg2cffi
|
||||
:dbapi: psycopg2cffi
|
||||
:connectstring: \
|
||||
postgresql+psycopg2cffi://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://pypi.python.org/pypi/psycopg2cffi/
|
||||
|
||||
``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C
|
||||
layer. This makes it suitable for use in e.g. PyPy. Documentation
|
||||
is as per ``psycopg2``.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:mod:`sqlalchemy.dialects.postgresql.psycopg2`
|
||||
|
||||
"""
|
||||
from .psycopg2 import PGDialect_psycopg2
|
||||
|
||||
|
||||
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
|
||||
driver = 'psycopg2cffi'
|
||||
supports_unicode_statements = True
|
||||
|
||||
# psycopg2cffi's first release is 2.5.0, but reports
|
||||
# __version__ as 2.4.4. Subsequent releases seem to have
|
||||
# fixed this.
|
||||
|
||||
FEATURE_VERSION_MAP = dict(
|
||||
native_json=(2, 4, 4),
|
||||
native_jsonb=(2, 7, 1),
|
||||
sane_multi_rowcount=(2, 4, 4),
|
||||
array_oid=(2, 4, 4),
|
||||
hstore_adapter=(2, 4, 4)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
return __import__('psycopg2cffi')
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extensions(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extensions'])
|
||||
return root.extensions
|
||||
|
||||
@classmethod
|
||||
def _psycopg2_extras(cls):
|
||||
root = __import__('psycopg2cffi', fromlist=['extras'])
|
||||
return root.extras
|
||||
|
||||
|
||||
dialect = PGDialect_psycopg2cffi
|
@ -1,243 +0,0 @@
|
||||
# postgresql/pygresql.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+pygresql
|
||||
:name: pygresql
|
||||
:dbapi: pgdb
|
||||
:connectstring: postgresql+pygresql://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://www.pygresql.org/
|
||||
"""
|
||||
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from ... import exc, processors, util
|
||||
from ...types import Numeric, JSON as Json
|
||||
from ...sql.elements import Null
|
||||
from .base import PGDialect, PGCompiler, PGIdentifierPreparer, \
|
||||
_DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES, UUID
|
||||
from .hstore import HSTORE
|
||||
from .json import JSON, JSONB
|
||||
|
||||
|
||||
class _PGNumeric(Numeric):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
return None
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not isinstance(coltype, int):
|
||||
coltype = coltype.oid
|
||||
if self.asdecimal:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
return processors.to_decimal_processor_factory(
|
||||
decimal.Decimal,
|
||||
self._effective_decimal_return_scale)
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
# PyGreSQL returns Decimal natively for 1700 (numeric)
|
||||
return None
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
else:
|
||||
if coltype in _FLOAT_TYPES:
|
||||
# PyGreSQL returns float natively for 701 (float8)
|
||||
return None
|
||||
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
|
||||
return processors.to_float
|
||||
else:
|
||||
raise exc.InvalidRequestError(
|
||||
"Unknown PG numeric type: %d" % coltype)
|
||||
|
||||
|
||||
class _PGHStore(HSTORE):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_hstore:
|
||||
return super(_PGHStore, self).bind_processor(dialect)
|
||||
hstore = dialect.dbapi.Hstore
|
||||
def process(value):
|
||||
if isinstance(value, dict):
|
||||
return hstore(value)
|
||||
return value
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_hstore:
|
||||
return super(_PGHStore, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSON(JSON):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSON, self).bind_processor(dialect)
|
||||
json = dialect.dbapi.Json
|
||||
|
||||
def process(value):
|
||||
if value is self.NULL:
|
||||
value = None
|
||||
elif isinstance(value, Null) or (
|
||||
value is None and self.none_as_null):
|
||||
return None
|
||||
if value is None or isinstance(value, (dict, list)):
|
||||
return json(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSON, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGJSONB(JSONB):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSONB, self).bind_processor(dialect)
|
||||
json = dialect.dbapi.Json
|
||||
|
||||
def process(value):
|
||||
if value is self.NULL:
|
||||
value = None
|
||||
elif isinstance(value, Null) or (
|
||||
value is None and self.none_as_null):
|
||||
return None
|
||||
if value is None or isinstance(value, (dict, list)):
|
||||
return json(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_json:
|
||||
return super(_PGJSONB, self).result_processor(dialect, coltype)
|
||||
|
||||
|
||||
class _PGUUID(UUID):
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if not dialect.has_native_uuid:
|
||||
return super(_PGUUID, self).bind_processor(dialect)
|
||||
uuid = dialect.dbapi.Uuid
|
||||
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, (str, bytes)):
|
||||
if len(value) == 16:
|
||||
return uuid(bytes=value)
|
||||
return uuid(value)
|
||||
if isinstance(value, int):
|
||||
return uuid(int=value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if not dialect.has_native_uuid:
|
||||
return super(_PGUUID, self).result_processor(dialect, coltype)
|
||||
if not self.as_uuid:
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return str(value)
|
||||
return process
|
||||
|
||||
|
||||
class _PGCompiler(PGCompiler):
|
||||
|
||||
def visit_mod_binary(self, binary, operator, **kw):
|
||||
return self.process(binary.left, **kw) + " %% " + \
|
||||
self.process(binary.right, **kw)
|
||||
|
||||
def post_process_text(self, text):
|
||||
return text.replace('%', '%%')
|
||||
|
||||
|
||||
class _PGIdentifierPreparer(PGIdentifierPreparer):
|
||||
|
||||
def _escape_identifier(self, value):
|
||||
value = value.replace(self.escape_quote, self.escape_to_quote)
|
||||
return value.replace('%', '%%')
|
||||
|
||||
|
||||
class PGDialect_pygresql(PGDialect):
|
||||
|
||||
driver = 'pygresql'
|
||||
|
||||
statement_compiler = _PGCompiler
|
||||
preparer = _PGIdentifierPreparer
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import pgdb
|
||||
return pgdb
|
||||
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
Numeric: _PGNumeric,
|
||||
HSTORE: _PGHStore,
|
||||
Json: _PGJSON,
|
||||
JSON: _PGJSON,
|
||||
JSONB: _PGJSONB,
|
||||
UUID: _PGUUID,
|
||||
}
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(PGDialect_pygresql, self).__init__(**kwargs)
|
||||
try:
|
||||
version = self.dbapi.version
|
||||
m = re.match(r'(\d+)\.(\d+)', version)
|
||||
version = (int(m.group(1)), int(m.group(2)))
|
||||
except (AttributeError, ValueError, TypeError):
|
||||
version = (0, 0)
|
||||
self.dbapi_version = version
|
||||
if version < (5, 0):
|
||||
has_native_hstore = has_native_json = has_native_uuid = False
|
||||
if version != (0, 0):
|
||||
util.warn("PyGreSQL is only fully supported by SQLAlchemy"
|
||||
" since version 5.0.")
|
||||
else:
|
||||
self.supports_unicode_statements = True
|
||||
self.supports_unicode_binds = True
|
||||
has_native_hstore = has_native_json = has_native_uuid = True
|
||||
self.has_native_hstore = has_native_hstore
|
||||
self.has_native_json = has_native_json
|
||||
self.has_native_uuid = has_native_uuid
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['host'] = '%s:%s' % (
|
||||
opts.get('host', '').rsplit(':', 1)[0], opts.pop('port'))
|
||||
opts.update(url.query)
|
||||
return [], opts
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
if not connection:
|
||||
return False
|
||||
try:
|
||||
connection = connection.connection
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if not connection:
|
||||
return False
|
||||
try:
|
||||
return connection.closed
|
||||
except AttributeError: # PyGreSQL < 5.0
|
||||
return connection._cnx is None
|
||||
return False
|
||||
|
||||
|
||||
dialect = PGDialect_pygresql
|
@ -1,97 +0,0 @@
|
||||
# postgresql/pypostgresql.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+pypostgresql
|
||||
:name: py-postgresql
|
||||
:dbapi: pypostgresql
|
||||
:connectstring: postgresql+pypostgresql://user:password@host:port/dbname\
|
||||
[?key=value&key=value...]
|
||||
:url: http://python.projects.pgfoundry.org/
|
||||
|
||||
|
||||
"""
|
||||
from ... import util
|
||||
from ... import types as sqltypes
|
||||
from .base import PGDialect, PGExecutionContext
|
||||
from ... import processors
|
||||
|
||||
|
||||
class PGNumeric(sqltypes.Numeric):
|
||||
def bind_processor(self, dialect):
|
||||
return processors.to_str
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.asdecimal:
|
||||
return None
|
||||
else:
|
||||
return processors.to_float
|
||||
|
||||
|
||||
class PGExecutionContext_pypostgresql(PGExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class PGDialect_pypostgresql(PGDialect):
|
||||
driver = 'pypostgresql'
|
||||
|
||||
supports_unicode_statements = True
|
||||
supports_unicode_binds = True
|
||||
description_encoding = None
|
||||
default_paramstyle = 'pyformat'
|
||||
|
||||
# requires trunk version to support sane rowcounts
|
||||
# TODO: use dbapi version information to set this flag appropriately
|
||||
supports_sane_rowcount = True
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
execution_ctx_cls = PGExecutionContext_pypostgresql
|
||||
colspecs = util.update_copy(
|
||||
PGDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: PGNumeric,
|
||||
|
||||
# prevents PGNumeric from being used
|
||||
sqltypes.Float: sqltypes.Float,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
from postgresql.driver import dbapi20
|
||||
return dbapi20
|
||||
|
||||
_DBAPI_ERROR_NAMES = [
|
||||
"Error",
|
||||
"InterfaceError", "DatabaseError", "DataError",
|
||||
"OperationalError", "IntegrityError", "InternalError",
|
||||
"ProgrammingError", "NotSupportedError"
|
||||
]
|
||||
|
||||
@util.memoized_property
|
||||
def dbapi_exception_translation_map(self):
|
||||
if self.dbapi is None:
|
||||
return {}
|
||||
|
||||
return dict(
|
||||
(getattr(self.dbapi, name).__name__, name)
|
||||
for name in self._DBAPI_ERROR_NAMES
|
||||
)
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user')
|
||||
if 'port' in opts:
|
||||
opts['port'] = int(opts['port'])
|
||||
else:
|
||||
opts['port'] = 5432
|
||||
opts.update(url.query)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return "connection is closed" in str(e)
|
||||
|
||||
dialect = PGDialect_pypostgresql
|
@ -1,168 +0,0 @@
|
||||
# Copyright (C) 2013-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .base import ischema_names
|
||||
from ... import types as sqltypes
|
||||
|
||||
__all__ = ('INT4RANGE', 'INT8RANGE', 'NUMRANGE')
|
||||
|
||||
|
||||
class RangeOperators(object):
|
||||
"""
|
||||
This mixin provides functionality for the Range Operators
|
||||
listed in Table 9-44 of the `postgres documentation`__ for Range
|
||||
Functions and Operators. It is used by all the range types
|
||||
provided in the ``postgres`` dialect and can likely be used for
|
||||
any range types you create yourself.
|
||||
|
||||
__ http://www.postgresql.org/docs/devel/static/functions-range.html
|
||||
|
||||
No extra support is provided for the Range Functions listed in
|
||||
Table 9-45 of the postgres documentation. For these, the normal
|
||||
:func:`~sqlalchemy.sql.expression.func` object should be used.
|
||||
|
||||
.. versionadded:: 0.8.2 Support for PostgreSQL RANGE operations.
|
||||
|
||||
"""
|
||||
|
||||
class comparator_factory(sqltypes.Concatenable.Comparator):
|
||||
"""Define comparison operations for range types."""
|
||||
|
||||
def __ne__(self, other):
|
||||
"Boolean expression. Returns true if two ranges are not equal"
|
||||
return self.expr.op('<>')(other)
|
||||
|
||||
def contains(self, other, **kw):
|
||||
"""Boolean expression. Returns true if the right hand operand,
|
||||
which can be an element or a range, is contained within the
|
||||
column.
|
||||
"""
|
||||
return self.expr.op('@>')(other)
|
||||
|
||||
def contained_by(self, other):
|
||||
"""Boolean expression. Returns true if the column is contained
|
||||
within the right hand operand.
|
||||
"""
|
||||
return self.expr.op('<@')(other)
|
||||
|
||||
def overlaps(self, other):
|
||||
"""Boolean expression. Returns true if the column overlaps
|
||||
(has points in common with) the right hand operand.
|
||||
"""
|
||||
return self.expr.op('&&')(other)
|
||||
|
||||
def strictly_left_of(self, other):
|
||||
"""Boolean expression. Returns true if the column is strictly
|
||||
left of the right hand operand.
|
||||
"""
|
||||
return self.expr.op('<<')(other)
|
||||
|
||||
__lshift__ = strictly_left_of
|
||||
|
||||
def strictly_right_of(self, other):
|
||||
"""Boolean expression. Returns true if the column is strictly
|
||||
right of the right hand operand.
|
||||
"""
|
||||
return self.expr.op('>>')(other)
|
||||
|
||||
__rshift__ = strictly_right_of
|
||||
|
||||
def not_extend_right_of(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
does not extend right of the range in the operand.
|
||||
"""
|
||||
return self.expr.op('&<')(other)
|
||||
|
||||
def not_extend_left_of(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
does not extend left of the range in the operand.
|
||||
"""
|
||||
return self.expr.op('&>')(other)
|
||||
|
||||
def adjacent_to(self, other):
|
||||
"""Boolean expression. Returns true if the range in the column
|
||||
is adjacent to the range in the operand.
|
||||
"""
|
||||
return self.expr.op('-|-')(other)
|
||||
|
||||
def __add__(self, other):
|
||||
"""Range expression. Returns the union of the two ranges.
|
||||
Will raise an exception if the resulting range is not
|
||||
contigous.
|
||||
"""
|
||||
return self.expr.op('+')(other)
|
||||
|
||||
|
||||
class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL INT4RANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'INT4RANGE'
|
||||
|
||||
ischema_names['int4range'] = INT4RANGE
|
||||
|
||||
|
||||
class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL INT8RANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'INT8RANGE'
|
||||
|
||||
ischema_names['int8range'] = INT8RANGE
|
||||
|
||||
|
||||
class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL NUMRANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'NUMRANGE'
|
||||
|
||||
ischema_names['numrange'] = NUMRANGE
|
||||
|
||||
|
||||
class DATERANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL DATERANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'DATERANGE'
|
||||
|
||||
ischema_names['daterange'] = DATERANGE
|
||||
|
||||
|
||||
class TSRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL TSRANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'TSRANGE'
|
||||
|
||||
ischema_names['tsrange'] = TSRANGE
|
||||
|
||||
|
||||
class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
|
||||
"""Represent the PostgreSQL TSTZRANGE type.
|
||||
|
||||
.. versionadded:: 0.8.2
|
||||
|
||||
"""
|
||||
|
||||
__visit_name__ = 'TSTZRANGE'
|
||||
|
||||
ischema_names['tstzrange'] = TSTZRANGE
|
@ -1,46 +0,0 @@
|
||||
# postgresql/zxjdbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: postgresql+zxjdbc
|
||||
:name: zxJDBC for Jython
|
||||
:dbapi: zxjdbc
|
||||
:connectstring: postgresql+zxjdbc://scott:tiger@localhost/db
|
||||
:driverurl: http://jdbc.postgresql.org/
|
||||
|
||||
|
||||
"""
|
||||
from ...connectors.zxJDBC import ZxJDBCConnector
|
||||
from .base import PGDialect, PGExecutionContext
|
||||
|
||||
|
||||
class PGExecutionContext_zxjdbc(PGExecutionContext):
|
||||
|
||||
def create_cursor(self):
|
||||
cursor = self._dbapi_connection.cursor()
|
||||
cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
|
||||
return cursor
|
||||
|
||||
|
||||
class PGDialect_zxjdbc(ZxJDBCConnector, PGDialect):
|
||||
jdbc_db_name = 'postgresql'
|
||||
jdbc_driver_name = 'org.postgresql.Driver'
|
||||
|
||||
execution_ctx_cls = PGExecutionContext_zxjdbc
|
||||
|
||||
supports_native_decimal = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PGDialect_zxjdbc, self).__init__(*args, **kwargs)
|
||||
from com.ziclix.python.sql.handler import PostgresqlDataHandler
|
||||
self.DataHandler = PostgresqlDataHandler
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
parts = connection.connection.dbversion.split('.')
|
||||
return tuple(int(x) for x in parts)
|
||||
|
||||
dialect = PGDialect_zxjdbc
|
@ -1,20 +0,0 @@
|
||||
# sqlite/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher
|
||||
|
||||
# default dialect
|
||||
base.dialect = pysqlite.dialect
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import (
|
||||
BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, FLOAT, INTEGER, REAL,
|
||||
NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, dialect,
|
||||
)
|
||||
|
||||
__all__ = ('BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL',
|
||||
'FLOAT', 'INTEGER', 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME',
|
||||
'TIMESTAMP', 'VARCHAR', 'REAL', 'dialect')
|
File diff suppressed because it is too large
Load Diff
@ -1,130 +0,0 @@
|
||||
# sqlite/pysqlcipher.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sqlite+pysqlcipher
|
||||
:name: pysqlcipher
|
||||
:dbapi: pysqlcipher
|
||||
:connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=<iter>]
|
||||
:url: https://pypi.python.org/pypi/pysqlcipher
|
||||
|
||||
``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make
|
||||
use of the `SQLCipher <https://www.zetetic.net/sqlcipher>`_ backend.
|
||||
|
||||
``pysqlcipher3`` is a fork of ``pysqlcipher`` for Python 3. This dialect
|
||||
will attempt to import it if ``pysqlcipher`` is non-present.
|
||||
|
||||
.. versionadded:: 1.1.4 - added fallback import for pysqlcipher3
|
||||
|
||||
.. versionadded:: 0.9.9 - added pysqlcipher dialect
|
||||
|
||||
Driver
|
||||
------
|
||||
|
||||
The driver here is the `pysqlcipher <https://pypi.python.org/pypi/pysqlcipher>`_
|
||||
driver, which makes use of the SQLCipher engine. This system essentially
|
||||
introduces new PRAGMA commands to SQLite which allows the setting of a
|
||||
passphrase and other encryption parameters, allowing the database
|
||||
file to be encrypted.
|
||||
|
||||
`pysqlcipher3` is a fork of `pysqlcipher` with support for Python 3,
|
||||
the driver is the same.
|
||||
|
||||
Connect Strings
|
||||
---------------
|
||||
|
||||
The format of the connect string is in every way the same as that
|
||||
of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the
|
||||
"password" field is now accepted, which should contain a passphrase::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db')
|
||||
|
||||
For an absolute file path, two leading slashes should be used for the
|
||||
database name::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db')
|
||||
|
||||
A selection of additional encryption-related pragmas supported by SQLCipher
|
||||
as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed
|
||||
in the query string, and will result in that PRAGMA being called for each
|
||||
new connection. Currently, ``cipher``, ``kdf_iter``
|
||||
``cipher_page_size`` and ``cipher_use_hmac`` are supported::
|
||||
|
||||
e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000')
|
||||
|
||||
|
||||
Pooling Behavior
|
||||
----------------
|
||||
|
||||
The driver makes a change to the default pool behavior of pysqlite
|
||||
as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver
|
||||
has been observed to be significantly slower on connection than the
|
||||
pysqlite driver, most likely due to the encryption overhead, so the
|
||||
dialect here defaults to using the :class:`.SingletonThreadPool`
|
||||
implementation,
|
||||
instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool
|
||||
implementation is entirely configurable using the
|
||||
:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may
|
||||
be more feasible for single-threaded use, or :class:`.NullPool` may be used
|
||||
to prevent unencrypted connections from being held open for long periods of
|
||||
time, at the expense of slower startup time for new connections.
|
||||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from .pysqlite import SQLiteDialect_pysqlite
|
||||
from ...engine import url as _url
|
||||
from ... import pool
|
||||
|
||||
|
||||
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
|
||||
driver = 'pysqlcipher'
|
||||
|
||||
pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac')
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
try:
|
||||
from pysqlcipher import dbapi2 as sqlcipher
|
||||
except ImportError as e:
|
||||
try:
|
||||
from pysqlcipher3 import dbapi2 as sqlcipher
|
||||
except ImportError:
|
||||
raise e
|
||||
return sqlcipher
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
return pool.SingletonThreadPool
|
||||
|
||||
def connect(self, *cargs, **cparams):
|
||||
passphrase = cparams.pop('passphrase', '')
|
||||
|
||||
pragmas = dict(
|
||||
(key, cparams.pop(key, None)) for key in
|
||||
self.pragmas
|
||||
)
|
||||
|
||||
conn = super(SQLiteDialect_pysqlcipher, self).\
|
||||
connect(*cargs, **cparams)
|
||||
conn.execute('pragma key="%s"' % passphrase)
|
||||
for prag, value in pragmas.items():
|
||||
if value is not None:
|
||||
conn.execute('pragma %s="%s"' % (prag, value))
|
||||
|
||||
return conn
|
||||
|
||||
def create_connect_args(self, url):
|
||||
super_url = _url.URL(
|
||||
url.drivername, username=url.username,
|
||||
host=url.host, database=url.database, query=url.query)
|
||||
c_args, opts = super(SQLiteDialect_pysqlcipher, self).\
|
||||
create_connect_args(super_url)
|
||||
opts['passphrase'] = url.password
|
||||
return c_args, opts
|
||||
|
||||
dialect = SQLiteDialect_pysqlcipher
|
@ -1,377 +0,0 @@
|
||||
# sqlite/pysqlite.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""
|
||||
.. dialect:: sqlite+pysqlite
|
||||
:name: pysqlite
|
||||
:dbapi: sqlite3
|
||||
:connectstring: sqlite+pysqlite:///file_path
|
||||
:url: http://docs.python.org/library/sqlite3.html
|
||||
|
||||
Note that ``pysqlite`` is the same driver as the ``sqlite3``
|
||||
module included with the Python distribution.
|
||||
|
||||
Driver
|
||||
------
|
||||
|
||||
When using Python 2.5 and above, the built in ``sqlite3`` driver is
|
||||
already installed and no additional installation is needed. Otherwise,
|
||||
the ``pysqlite2`` driver needs to be present. This is the same driver as
|
||||
``sqlite3``, just with a different name.
|
||||
|
||||
The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3``
|
||||
is loaded. This allows an explicitly installed pysqlite driver to take
|
||||
precedence over the built in one. As with all dialects, a specific
|
||||
DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control
|
||||
this explicitly::
|
||||
|
||||
from sqlite3 import dbapi2 as sqlite
|
||||
e = create_engine('sqlite+pysqlite:///file.db', module=sqlite)
|
||||
|
||||
|
||||
Connect Strings
|
||||
---------------
|
||||
|
||||
The file specification for the SQLite database is taken as the "database"
|
||||
portion of the URL. Note that the format of a SQLAlchemy url is::
|
||||
|
||||
driver://user:pass@host/database
|
||||
|
||||
This means that the actual filename to be used starts with the characters to
|
||||
the **right** of the third slash. So connecting to a relative filepath
|
||||
looks like::
|
||||
|
||||
# relative path
|
||||
e = create_engine('sqlite:///path/to/database.db')
|
||||
|
||||
An absolute path, which is denoted by starting with a slash, means you
|
||||
need **four** slashes::
|
||||
|
||||
# absolute path
|
||||
e = create_engine('sqlite:////path/to/database.db')
|
||||
|
||||
To use a Windows path, regular drive specifications and backslashes can be
|
||||
used. Double backslashes are probably needed::
|
||||
|
||||
# absolute path on Windows
|
||||
e = create_engine('sqlite:///C:\\path\\to\\database.db')
|
||||
|
||||
The sqlite ``:memory:`` identifier is the default if no filepath is
|
||||
present. Specify ``sqlite://`` and nothing else::
|
||||
|
||||
# in-memory database
|
||||
e = create_engine('sqlite://')
|
||||
|
||||
Compatibility with sqlite3 "native" date and datetime types
|
||||
-----------------------------------------------------------
|
||||
|
||||
The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and
|
||||
sqlite3.PARSE_COLNAMES options, which have the effect of any column
|
||||
or expression explicitly cast as "date" or "timestamp" will be converted
|
||||
to a Python date or datetime object. The date and datetime types provided
|
||||
with the pysqlite dialect are not currently compatible with these options,
|
||||
since they render the ISO date/datetime including microseconds, which
|
||||
pysqlite's driver does not. Additionally, SQLAlchemy does not at
|
||||
this time automatically render the "cast" syntax required for the
|
||||
freestanding functions "current_timestamp" and "current_date" to return
|
||||
datetime/date types natively. Unfortunately, pysqlite
|
||||
does not provide the standard DBAPI types in ``cursor.description``,
|
||||
leaving SQLAlchemy with no way to detect these types on the fly
|
||||
without expensive per-row type checks.
|
||||
|
||||
Keeping in mind that pysqlite's parsing option is not recommended,
|
||||
nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
|
||||
can be forced if one configures "native_datetime=True" on create_engine()::
|
||||
|
||||
engine = create_engine('sqlite://',
|
||||
connect_args={'detect_types':
|
||||
sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
|
||||
native_datetime=True
|
||||
)
|
||||
|
||||
With this flag enabled, the DATE and TIMESTAMP types (but note - not the
|
||||
DATETIME or TIME types...confused yet ?) will not perform any bind parameter
|
||||
or result processing. Execution of "func.current_date()" will return a string.
|
||||
"func.current_timestamp()" is registered as returning a DATETIME type in
|
||||
SQLAlchemy, so this function still receives SQLAlchemy-level result
|
||||
processing.
|
||||
|
||||
.. _pysqlite_threading_pooling:
|
||||
|
||||
Threading/Pooling Behavior
|
||||
---------------------------
|
||||
|
||||
Pysqlite's default behavior is to prohibit the usage of a single connection
|
||||
in more than one thread. This is originally intended to work with older
|
||||
versions of SQLite that did not support multithreaded operation under
|
||||
various circumstances. In particular, older SQLite versions
|
||||
did not allow a ``:memory:`` database to be used in multiple threads
|
||||
under any circumstances.
|
||||
|
||||
Pysqlite does include a now-undocumented flag known as
|
||||
``check_same_thread`` which will disable this check, however note that
|
||||
pysqlite connections are still not safe to use in concurrently in multiple
|
||||
threads. In particular, any statement execution calls would need to be
|
||||
externally mutexed, as Pysqlite does not provide for thread-safe propagation
|
||||
of error messages among other things. So while even ``:memory:`` databases
|
||||
can be shared among threads in modern SQLite, Pysqlite doesn't provide enough
|
||||
thread-safety to make this usage worth it.
|
||||
|
||||
SQLAlchemy sets up pooling to work with Pysqlite's default behavior:
|
||||
|
||||
* When a ``:memory:`` SQLite database is specified, the dialect by default
|
||||
will use :class:`.SingletonThreadPool`. This pool maintains a single
|
||||
connection per thread, so that all access to the engine within the current
|
||||
thread use the same ``:memory:`` database - other threads would access a
|
||||
different ``:memory:`` database.
|
||||
* When a file-based database is specified, the dialect will use
|
||||
:class:`.NullPool` as the source of connections. This pool closes and
|
||||
discards connections which are returned to the pool immediately. SQLite
|
||||
file-based connections have extremely low overhead, so pooling is not
|
||||
necessary. The scheme also prevents a connection from being used again in
|
||||
a different thread and works best with SQLite's coarse-grained file locking.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
Default selection of :class:`.NullPool` for SQLite file-based databases.
|
||||
Previous versions select :class:`.SingletonThreadPool` by
|
||||
default for all SQLite databases.
|
||||
|
||||
|
||||
Using a Memory Database in Multiple Threads
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use a ``:memory:`` database in a multithreaded scenario, the same
|
||||
connection object must be shared among threads, since the database exists
|
||||
only within the scope of that connection. The
|
||||
:class:`.StaticPool` implementation will maintain a single connection
|
||||
globally, and the ``check_same_thread`` flag can be passed to Pysqlite
|
||||
as ``False``::
|
||||
|
||||
from sqlalchemy.pool import StaticPool
|
||||
engine = create_engine('sqlite://',
|
||||
connect_args={'check_same_thread':False},
|
||||
poolclass=StaticPool)
|
||||
|
||||
Note that using a ``:memory:`` database in multiple threads requires a recent
|
||||
version of SQLite.
|
||||
|
||||
Using Temporary Tables with SQLite
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Due to the way SQLite deals with temporary tables, if you wish to use a
|
||||
temporary table in a file-based SQLite database across multiple checkouts
|
||||
from the connection pool, such as when using an ORM :class:`.Session` where
|
||||
the temporary table should continue to remain after :meth:`.Session.commit` or
|
||||
:meth:`.Session.rollback` is called, a pool which maintains a single
|
||||
connection must be used. Use :class:`.SingletonThreadPool` if the scope is
|
||||
only needed within the current thread, or :class:`.StaticPool` is scope is
|
||||
needed within multiple threads for this case::
|
||||
|
||||
# maintain the same connection per thread
|
||||
from sqlalchemy.pool import SingletonThreadPool
|
||||
engine = create_engine('sqlite:///mydb.db',
|
||||
poolclass=SingletonThreadPool)
|
||||
|
||||
|
||||
# maintain the same connection across all threads
|
||||
from sqlalchemy.pool import StaticPool
|
||||
engine = create_engine('sqlite:///mydb.db',
|
||||
poolclass=StaticPool)
|
||||
|
||||
Note that :class:`.SingletonThreadPool` should be configured for the number
|
||||
of threads that are to be used; beyond that number, connections will be
|
||||
closed out in a non deterministic way.
|
||||
|
||||
Unicode
|
||||
-------
|
||||
|
||||
The pysqlite driver only returns Python ``unicode`` objects in result sets,
|
||||
never plain strings, and accommodates ``unicode`` objects within bound
|
||||
parameter values in all cases. Regardless of the SQLAlchemy string type in
|
||||
use, string-based result values will by Python ``unicode`` in Python 2.
|
||||
The :class:`.Unicode` type should still be used to indicate those columns that
|
||||
require unicode, however, so that non-``unicode`` values passed inadvertently
|
||||
will emit a warning. Pysqlite will emit an error if a non-``unicode`` string
|
||||
is passed containing non-ASCII characters.
|
||||
|
||||
.. _pysqlite_serializable:
|
||||
|
||||
Serializable isolation / Savepoints / Transactional DDL
|
||||
-------------------------------------------------------
|
||||
|
||||
In the section :ref:`sqlite_concurrency`, we refer to the pysqlite
|
||||
driver's assortment of issues that prevent several features of SQLite
|
||||
from working correctly. The pysqlite DBAPI driver has several
|
||||
long-standing bugs which impact the correctness of its transactional
|
||||
behavior. In its default mode of operation, SQLite features such as
|
||||
SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are
|
||||
non-functional, and in order to use these features, workarounds must
|
||||
be taken.
|
||||
|
||||
The issue is essentially that the driver attempts to second-guess the user's
|
||||
intent, failing to start transactions and sometimes ending them prematurely, in
|
||||
an effort to minimize the SQLite databases's file locking behavior, even
|
||||
though SQLite itself uses "shared" locks for read-only activities.
|
||||
|
||||
SQLAlchemy chooses to not alter this behavior by default, as it is the
|
||||
long-expected behavior of the pysqlite driver; if and when the pysqlite
|
||||
driver attempts to repair these issues, that will be more of a driver towards
|
||||
defaults for SQLAlchemy.
|
||||
|
||||
The good news is that with a few events, we can implement transactional
|
||||
support fully, by disabling pysqlite's feature entirely and emitting BEGIN
|
||||
ourselves. This is achieved using two event listeners::
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
|
||||
engine = create_engine("sqlite:///myfile.db")
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def do_connect(dbapi_connection, connection_record):
|
||||
# disable pysqlite's emitting of the BEGIN statement entirely.
|
||||
# also stops it from emitting COMMIT before any DDL.
|
||||
dbapi_connection.isolation_level = None
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
# emit our own BEGIN
|
||||
conn.execute("BEGIN")
|
||||
|
||||
Above, we intercept a new pysqlite connection and disable any transactional
|
||||
integration. Then, at the point at which SQLAlchemy knows that transaction
|
||||
scope is to begin, we emit ``"BEGIN"`` ourselves.
|
||||
|
||||
When we take control of ``"BEGIN"``, we can also control directly SQLite's
|
||||
locking modes, introduced at `BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_,
|
||||
by adding the desired locking mode to our ``"BEGIN"``::
|
||||
|
||||
@event.listens_for(engine, "begin")
|
||||
def do_begin(conn):
|
||||
conn.execute("BEGIN EXCLUSIVE")
|
||||
|
||||
.. seealso::
|
||||
|
||||
`BEGIN TRANSACTION <http://sqlite.org/lang_transaction.html>`_ - on the SQLite site
|
||||
|
||||
`sqlite3 SELECT does not BEGIN a transaction <http://bugs.python.org/issue9924>`_ - on the Python bug tracker
|
||||
|
||||
`sqlite3 module breaks transactions and potentially corrupts data <http://bugs.python.org/issue10740>`_ - on the Python bug tracker
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE
|
||||
from sqlalchemy import exc, pool
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy import util
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class _SQLite_pysqliteTimeStamp(DATETIME):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATETIME.bind_processor(self, dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATETIME.result_processor(self, dialect, coltype)
|
||||
|
||||
|
||||
class _SQLite_pysqliteDate(DATE):
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATE.bind_processor(self, dialect)
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
if dialect.native_datetime:
|
||||
return None
|
||||
else:
|
||||
return DATE.result_processor(self, dialect, coltype)
|
||||
|
||||
|
||||
class SQLiteDialect_pysqlite(SQLiteDialect):
|
||||
default_paramstyle = 'qmark'
|
||||
|
||||
colspecs = util.update_copy(
|
||||
SQLiteDialect.colspecs,
|
||||
{
|
||||
sqltypes.Date: _SQLite_pysqliteDate,
|
||||
sqltypes.TIMESTAMP: _SQLite_pysqliteTimeStamp,
|
||||
}
|
||||
)
|
||||
|
||||
if not util.py2k:
|
||||
description_encoding = None
|
||||
|
||||
driver = 'pysqlite'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
SQLiteDialect.__init__(self, **kwargs)
|
||||
|
||||
if self.dbapi is not None:
|
||||
sqlite_ver = self.dbapi.version_info
|
||||
if sqlite_ver < (2, 1, 3):
|
||||
util.warn(
|
||||
("The installed version of pysqlite2 (%s) is out-dated "
|
||||
"and will cause errors in some cases. Version 2.1.3 "
|
||||
"or greater is recommended.") %
|
||||
'.'.join([str(subver) for subver in sqlite_ver]))
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
try:
|
||||
from pysqlite2 import dbapi2 as sqlite
|
||||
except ImportError as e:
|
||||
try:
|
||||
from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name.
|
||||
except ImportError:
|
||||
raise e
|
||||
return sqlite
|
||||
|
||||
@classmethod
|
||||
def get_pool_class(cls, url):
|
||||
if url.database and url.database != ':memory:':
|
||||
return pool.NullPool
|
||||
else:
|
||||
return pool.SingletonThreadPool
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
return self.dbapi.sqlite_version_info
|
||||
|
||||
def create_connect_args(self, url):
|
||||
if url.username or url.password or url.host or url.port:
|
||||
raise exc.ArgumentError(
|
||||
"Invalid SQLite URL: %s\n"
|
||||
"Valid SQLite URL forms are:\n"
|
||||
" sqlite:///:memory: (or, sqlite://)\n"
|
||||
" sqlite:///relative/path/to/file.db\n"
|
||||
" sqlite:////absolute/path/to/file.db" % (url,))
|
||||
filename = url.database or ':memory:'
|
||||
if filename != ':memory:':
|
||||
filename = os.path.abspath(filename)
|
||||
|
||||
opts = url.query.copy()
|
||||
util.coerce_kw_type(opts, 'timeout', float)
|
||||
util.coerce_kw_type(opts, 'isolation_level', str)
|
||||
util.coerce_kw_type(opts, 'detect_types', int)
|
||||
util.coerce_kw_type(opts, 'check_same_thread', bool)
|
||||
util.coerce_kw_type(opts, 'cached_statements', int)
|
||||
|
||||
return ([filename], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
return isinstance(e, self.dbapi.ProgrammingError) and \
|
||||
"Cannot operate on a closed database." in str(e)
|
||||
|
||||
dialect = SQLiteDialect_pysqlite
|
@ -1,28 +0,0 @@
|
||||
# sybase/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from sqlalchemy.dialects.sybase import base, pysybase, pyodbc
|
||||
|
||||
# default dialect
|
||||
base.dialect = pyodbc.dialect
|
||||
|
||||
from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
|
||||
TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
|
||||
BIGINT, INT, INTEGER, SMALLINT, BINARY,\
|
||||
VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
|
||||
IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\
|
||||
dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
'CHAR', 'VARCHAR', 'TIME', 'NCHAR', 'NVARCHAR',
|
||||
'TEXT', 'DATE', 'DATETIME', 'FLOAT', 'NUMERIC',
|
||||
'BIGINT', 'INT', 'INTEGER', 'SMALLINT', 'BINARY',
|
||||
'VARBINARY', 'UNITEXT', 'UNICHAR', 'UNIVARCHAR',
|
||||
'IMAGE', 'BIT', 'MONEY', 'SMALLMONEY', 'TINYINT',
|
||||
'dialect'
|
||||
)
|
@ -1,821 +0,0 @@
|
||||
# sybase/base.py
|
||||
# Copyright (C) 2010-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# get_select_precolumns(), limit_clause() implementation
|
||||
# copyright (C) 2007 Fisch Asset Management
|
||||
# AG http://www.fam.ch, with coding by Alexander Houben
|
||||
# alexander.houben@thor-solutions.ch
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
|
||||
.. dialect:: sybase
|
||||
:name: Sybase
|
||||
|
||||
.. note::
|
||||
|
||||
The Sybase dialect functions on current SQLAlchemy versions
|
||||
but is not regularly tested, and may have many issues and
|
||||
caveats not currently handled.
|
||||
|
||||
"""
|
||||
import operator
|
||||
import re
|
||||
|
||||
from sqlalchemy.sql import compiler, expression, text, bindparam
|
||||
from sqlalchemy.engine import default, base, reflection
|
||||
from sqlalchemy import types as sqltypes
|
||||
from sqlalchemy.sql import operators as sql_operators
|
||||
from sqlalchemy import schema as sa_schema
|
||||
from sqlalchemy import util, sql, exc
|
||||
|
||||
from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
|
||||
TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
|
||||
BIGINT, INT, INTEGER, SMALLINT, BINARY,\
|
||||
VARBINARY, DECIMAL, TIMESTAMP, Unicode,\
|
||||
UnicodeText, REAL
|
||||
|
||||
RESERVED_WORDS = set([
|
||||
"add", "all", "alter", "and",
|
||||
"any", "as", "asc", "backup",
|
||||
"begin", "between", "bigint", "binary",
|
||||
"bit", "bottom", "break", "by",
|
||||
"call", "capability", "cascade", "case",
|
||||
"cast", "char", "char_convert", "character",
|
||||
"check", "checkpoint", "close", "comment",
|
||||
"commit", "connect", "constraint", "contains",
|
||||
"continue", "convert", "create", "cross",
|
||||
"cube", "current", "current_timestamp", "current_user",
|
||||
"cursor", "date", "dbspace", "deallocate",
|
||||
"dec", "decimal", "declare", "default",
|
||||
"delete", "deleting", "desc", "distinct",
|
||||
"do", "double", "drop", "dynamic",
|
||||
"else", "elseif", "encrypted", "end",
|
||||
"endif", "escape", "except", "exception",
|
||||
"exec", "execute", "existing", "exists",
|
||||
"externlogin", "fetch", "first", "float",
|
||||
"for", "force", "foreign", "forward",
|
||||
"from", "full", "goto", "grant",
|
||||
"group", "having", "holdlock", "identified",
|
||||
"if", "in", "index", "index_lparen",
|
||||
"inner", "inout", "insensitive", "insert",
|
||||
"inserting", "install", "instead", "int",
|
||||
"integer", "integrated", "intersect", "into",
|
||||
"iq", "is", "isolation", "join",
|
||||
"key", "lateral", "left", "like",
|
||||
"lock", "login", "long", "match",
|
||||
"membership", "message", "mode", "modify",
|
||||
"natural", "new", "no", "noholdlock",
|
||||
"not", "notify", "null", "numeric",
|
||||
"of", "off", "on", "open",
|
||||
"option", "options", "or", "order",
|
||||
"others", "out", "outer", "over",
|
||||
"passthrough", "precision", "prepare", "primary",
|
||||
"print", "privileges", "proc", "procedure",
|
||||
"publication", "raiserror", "readtext", "real",
|
||||
"reference", "references", "release", "remote",
|
||||
"remove", "rename", "reorganize", "resource",
|
||||
"restore", "restrict", "return", "revoke",
|
||||
"right", "rollback", "rollup", "save",
|
||||
"savepoint", "scroll", "select", "sensitive",
|
||||
"session", "set", "setuser", "share",
|
||||
"smallint", "some", "sqlcode", "sqlstate",
|
||||
"start", "stop", "subtrans", "subtransaction",
|
||||
"synchronize", "syntax_error", "table", "temporary",
|
||||
"then", "time", "timestamp", "tinyint",
|
||||
"to", "top", "tran", "trigger",
|
||||
"truncate", "tsequal", "unbounded", "union",
|
||||
"unique", "unknown", "unsigned", "update",
|
||||
"updating", "user", "using", "validate",
|
||||
"values", "varbinary", "varchar", "variable",
|
||||
"varying", "view", "wait", "waitfor",
|
||||
"when", "where", "while", "window",
|
||||
"with", "with_cube", "with_lparen", "with_rollup",
|
||||
"within", "work", "writetext",
|
||||
])
|
||||
|
||||
|
||||
class _SybaseUnitypeMixin(object):
|
||||
"""these types appear to return a buffer object."""
|
||||
|
||||
def result_processor(self, dialect, coltype):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return str(value) # decode("ucs-2")
|
||||
else:
|
||||
return None
|
||||
return process
|
||||
|
||||
|
||||
class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
|
||||
__visit_name__ = 'UNICHAR'
|
||||
|
||||
|
||||
class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
|
||||
__visit_name__ = 'UNIVARCHAR'
|
||||
|
||||
|
||||
class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText):
|
||||
__visit_name__ = 'UNITEXT'
|
||||
|
||||
|
||||
class TINYINT(sqltypes.Integer):
|
||||
__visit_name__ = 'TINYINT'
|
||||
|
||||
|
||||
class BIT(sqltypes.TypeEngine):
|
||||
__visit_name__ = 'BIT'
|
||||
|
||||
|
||||
class MONEY(sqltypes.TypeEngine):
|
||||
__visit_name__ = "MONEY"
|
||||
|
||||
|
||||
class SMALLMONEY(sqltypes.TypeEngine):
|
||||
__visit_name__ = "SMALLMONEY"
|
||||
|
||||
|
||||
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
|
||||
__visit_name__ = "UNIQUEIDENTIFIER"
|
||||
|
||||
|
||||
class IMAGE(sqltypes.LargeBinary):
|
||||
__visit_name__ = 'IMAGE'
|
||||
|
||||
|
||||
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
|
||||
def visit_large_binary(self, type_, **kw):
|
||||
return self.visit_IMAGE(type_)
|
||||
|
||||
def visit_boolean(self, type_, **kw):
|
||||
return self.visit_BIT(type_)
|
||||
|
||||
def visit_unicode(self, type_, **kw):
|
||||
return self.visit_NVARCHAR(type_)
|
||||
|
||||
def visit_UNICHAR(self, type_, **kw):
|
||||
return "UNICHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNIVARCHAR(self, type_, **kw):
|
||||
return "UNIVARCHAR(%d)" % type_.length
|
||||
|
||||
def visit_UNITEXT(self, type_, **kw):
|
||||
return "UNITEXT"
|
||||
|
||||
def visit_TINYINT(self, type_, **kw):
|
||||
return "TINYINT"
|
||||
|
||||
def visit_IMAGE(self, type_, **kw):
|
||||
return "IMAGE"
|
||||
|
||||
def visit_BIT(self, type_, **kw):
|
||||
return "BIT"
|
||||
|
||||
def visit_MONEY(self, type_, **kw):
|
||||
return "MONEY"
|
||||
|
||||
def visit_SMALLMONEY(self, type_, **kw):
|
||||
return "SMALLMONEY"
|
||||
|
||||
def visit_UNIQUEIDENTIFIER(self, type_, **kw):
|
||||
return "UNIQUEIDENTIFIER"
|
||||
|
||||
ischema_names = {
|
||||
'bigint': BIGINT,
|
||||
'int': INTEGER,
|
||||
'integer': INTEGER,
|
||||
'smallint': SMALLINT,
|
||||
'tinyint': TINYINT,
|
||||
'unsigned bigint': BIGINT, # TODO: unsigned flags
|
||||
'unsigned int': INTEGER, # TODO: unsigned flags
|
||||
'unsigned smallint': SMALLINT, # TODO: unsigned flags
|
||||
'numeric': NUMERIC,
|
||||
'decimal': DECIMAL,
|
||||
'dec': DECIMAL,
|
||||
'float': FLOAT,
|
||||
'double': NUMERIC, # TODO
|
||||
'double precision': NUMERIC, # TODO
|
||||
'real': REAL,
|
||||
'smallmoney': SMALLMONEY,
|
||||
'money': MONEY,
|
||||
'smalldatetime': DATETIME,
|
||||
'datetime': DATETIME,
|
||||
'date': DATE,
|
||||
'time': TIME,
|
||||
'char': CHAR,
|
||||
'character': CHAR,
|
||||
'varchar': VARCHAR,
|
||||
'character varying': VARCHAR,
|
||||
'char varying': VARCHAR,
|
||||
'unichar': UNICHAR,
|
||||
'unicode character': UNIVARCHAR,
|
||||
'nchar': NCHAR,
|
||||
'national char': NCHAR,
|
||||
'national character': NCHAR,
|
||||
'nvarchar': NVARCHAR,
|
||||
'nchar varying': NVARCHAR,
|
||||
'national char varying': NVARCHAR,
|
||||
'national character varying': NVARCHAR,
|
||||
'text': TEXT,
|
||||
'unitext': UNITEXT,
|
||||
'binary': BINARY,
|
||||
'varbinary': VARBINARY,
|
||||
'image': IMAGE,
|
||||
'bit': BIT,
|
||||
|
||||
# not in documentation for ASE 15.7
|
||||
'long varchar': TEXT, # TODO
|
||||
'timestamp': TIMESTAMP,
|
||||
'uniqueidentifier': UNIQUEIDENTIFIER,
|
||||
|
||||
}
|
||||
|
||||
|
||||
class SybaseInspector(reflection.Inspector):
|
||||
|
||||
def __init__(self, conn):
|
||||
reflection.Inspector.__init__(self, conn)
|
||||
|
||||
def get_table_id(self, table_name, schema=None):
|
||||
"""Return the table id from `table_name` and `schema`."""
|
||||
|
||||
return self.dialect.get_table_id(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache)
|
||||
|
||||
|
||||
class SybaseExecutionContext(default.DefaultExecutionContext):
|
||||
_enable_identity_insert = False
|
||||
|
||||
def set_ddl_autocommit(self, connection, value):
|
||||
"""Must be implemented by subclasses to accommodate DDL executions.
|
||||
|
||||
"connection" is the raw unwrapped DBAPI connection. "value"
|
||||
is True or False. when True, the connection should be configured
|
||||
such that a DDL can take place subsequently. when False,
|
||||
a DDL has taken place and the connection should be resumed
|
||||
into non-autocommit mode.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def pre_exec(self):
|
||||
if self.isinsert:
|
||||
tbl = self.compiled.statement.table
|
||||
seq_column = tbl._autoincrement_column
|
||||
insert_has_sequence = seq_column is not None
|
||||
|
||||
if insert_has_sequence:
|
||||
self._enable_identity_insert = \
|
||||
seq_column.key in self.compiled_parameters[0]
|
||||
else:
|
||||
self._enable_identity_insert = False
|
||||
|
||||
if self._enable_identity_insert:
|
||||
self.cursor.execute(
|
||||
"SET IDENTITY_INSERT %s ON" %
|
||||
self.dialect.identifier_preparer.format_table(tbl))
|
||||
|
||||
if self.isddl:
|
||||
# TODO: to enhance this, we can detect "ddl in tran" on the
|
||||
# database settings. this error message should be improved to
|
||||
# include a note about that.
|
||||
if not self.should_autocommit:
|
||||
raise exc.InvalidRequestError(
|
||||
"The Sybase dialect only supports "
|
||||
"DDL in 'autocommit' mode at this time.")
|
||||
|
||||
self.root_connection.engine.logger.info(
|
||||
"AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
|
||||
|
||||
self.set_ddl_autocommit(
|
||||
self.root_connection.connection.connection,
|
||||
True)
|
||||
|
||||
def post_exec(self):
|
||||
if self.isddl:
|
||||
self.set_ddl_autocommit(self.root_connection, False)
|
||||
|
||||
if self._enable_identity_insert:
|
||||
self.cursor.execute(
|
||||
"SET IDENTITY_INSERT %s OFF" %
|
||||
self.dialect.identifier_preparer.
|
||||
format_table(self.compiled.statement.table)
|
||||
)
|
||||
|
||||
def get_lastrowid(self):
|
||||
cursor = self.create_cursor()
|
||||
cursor.execute("SELECT @@identity AS lastrowid")
|
||||
lastrowid = cursor.fetchone()[0]
|
||||
cursor.close()
|
||||
return lastrowid
|
||||
|
||||
|
||||
class SybaseSQLCompiler(compiler.SQLCompiler):
|
||||
ansi_bind_rules = True
|
||||
|
||||
extract_map = util.update_copy(
|
||||
compiler.SQLCompiler.extract_map,
|
||||
{
|
||||
'doy': 'dayofyear',
|
||||
'dow': 'weekday',
|
||||
'milliseconds': 'millisecond'
|
||||
})
|
||||
|
||||
def get_select_precolumns(self, select, **kw):
|
||||
s = select._distinct and "DISTINCT " or ""
|
||||
# TODO: don't think Sybase supports
|
||||
# bind params for FIRST / TOP
|
||||
limit = select._limit
|
||||
if limit:
|
||||
# if select._limit == 1:
|
||||
# s += "FIRST "
|
||||
# else:
|
||||
# s += "TOP %s " % (select._limit,)
|
||||
s += "TOP %s " % (limit,)
|
||||
offset = select._offset
|
||||
if offset:
|
||||
raise NotImplementedError("Sybase ASE does not support OFFSET")
|
||||
return s
|
||||
|
||||
def get_from_hint_text(self, table, text):
|
||||
return text
|
||||
|
||||
def limit_clause(self, select, **kw):
|
||||
# Limit in sybase is after the select keyword
|
||||
return ""
|
||||
|
||||
def visit_extract(self, extract, **kw):
|
||||
field = self.extract_map.get(extract.field, extract.field)
|
||||
return 'DATEPART("%s", %s)' % (
|
||||
field, self.process(extract.expr, **kw))
|
||||
|
||||
def visit_now_func(self, fn, **kw):
|
||||
return "GETDATE()"
|
||||
|
||||
def for_update_clause(self, select):
|
||||
# "FOR UPDATE" is only allowed on "DECLARE CURSOR"
|
||||
# which SQLAlchemy doesn't use
|
||||
return ''
|
||||
|
||||
def order_by_clause(self, select, **kw):
|
||||
kw['literal_binds'] = True
|
||||
order_by = self.process(select._order_by_clause, **kw)
|
||||
|
||||
# SybaseSQL only allows ORDER BY in subqueries if there is a LIMIT
|
||||
if order_by and (not self.is_subquery() or select._limit):
|
||||
return " ORDER BY " + order_by
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
class SybaseDDLCompiler(compiler.DDLCompiler):
|
||||
def get_column_specification(self, column, **kwargs):
|
||||
colspec = self.preparer.format_column(column) + " " + \
|
||||
self.dialect.type_compiler.process(
|
||||
column.type, type_expression=column)
|
||||
|
||||
if column.table is None:
|
||||
raise exc.CompileError(
|
||||
"The Sybase dialect requires Table-bound "
|
||||
"columns in order to generate DDL")
|
||||
seq_col = column.table._autoincrement_column
|
||||
|
||||
# install a IDENTITY Sequence if we have an implicit IDENTITY column
|
||||
if seq_col is column:
|
||||
sequence = isinstance(column.default, sa_schema.Sequence) \
|
||||
and column.default
|
||||
if sequence:
|
||||
start, increment = sequence.start or 1, \
|
||||
sequence.increment or 1
|
||||
else:
|
||||
start, increment = 1, 1
|
||||
if (start, increment) == (1, 1):
|
||||
colspec += " IDENTITY"
|
||||
else:
|
||||
# TODO: need correct syntax for this
|
||||
colspec += " IDENTITY(%s,%s)" % (start, increment)
|
||||
else:
|
||||
default = self.get_column_default_string(column)
|
||||
if default is not None:
|
||||
colspec += " DEFAULT " + default
|
||||
|
||||
if column.nullable is not None:
|
||||
if not column.nullable or column.primary_key:
|
||||
colspec += " NOT NULL"
|
||||
else:
|
||||
colspec += " NULL"
|
||||
|
||||
return colspec
|
||||
|
||||
def visit_drop_index(self, drop):
|
||||
index = drop.element
|
||||
return "\nDROP INDEX %s.%s" % (
|
||||
self.preparer.quote_identifier(index.table.name),
|
||||
self._prepared_index_name(drop.element,
|
||||
include_schema=False)
|
||||
)
|
||||
|
||||
|
||||
class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
|
||||
reserved_words = RESERVED_WORDS
|
||||
|
||||
|
||||
class SybaseDialect(default.DefaultDialect):
|
||||
name = 'sybase'
|
||||
supports_unicode_statements = False
|
||||
supports_sane_rowcount = False
|
||||
supports_sane_multi_rowcount = False
|
||||
|
||||
supports_native_boolean = False
|
||||
supports_unicode_binds = False
|
||||
postfetch_lastrowid = True
|
||||
|
||||
colspecs = {}
|
||||
ischema_names = ischema_names
|
||||
|
||||
type_compiler = SybaseTypeCompiler
|
||||
statement_compiler = SybaseSQLCompiler
|
||||
ddl_compiler = SybaseDDLCompiler
|
||||
preparer = SybaseIdentifierPreparer
|
||||
inspector = SybaseInspector
|
||||
|
||||
construct_arguments = []
|
||||
|
||||
def _get_default_schema_name(self, connection):
|
||||
return connection.scalar(
|
||||
text("SELECT user_name() as user_name",
|
||||
typemap={'user_name': Unicode})
|
||||
)
|
||||
|
||||
def initialize(self, connection):
|
||||
super(SybaseDialect, self).initialize(connection)
|
||||
if self.server_version_info is not None and\
|
||||
self.server_version_info < (15, ):
|
||||
self.max_identifier_length = 30
|
||||
else:
|
||||
self.max_identifier_length = 255
|
||||
|
||||
def get_table_id(self, connection, table_name, schema=None, **kw):
|
||||
"""Fetch the id for schema.table_name.
|
||||
|
||||
Several reflection methods require the table id. The idea for using
|
||||
this method is that it can be fetched one time and cached for
|
||||
subsequent calls.
|
||||
|
||||
"""
|
||||
|
||||
table_id = None
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
TABLEID_SQL = text("""
|
||||
SELECT o.id AS id
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid=u.uid
|
||||
WHERE u.name = :schema_name
|
||||
AND o.name = :table_name
|
||||
AND o.type in ('U', 'V')
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
if isinstance(table_name, unicode):
|
||||
table_name = table_name.encode("ascii")
|
||||
result = connection.execute(TABLEID_SQL,
|
||||
schema_name=schema,
|
||||
table_name=table_name)
|
||||
table_id = result.scalar()
|
||||
if table_id is None:
|
||||
raise exc.NoSuchTableError(table_name)
|
||||
return table_id
|
||||
|
||||
@reflection.cache
|
||||
def get_columns(self, connection, table_name, schema=None, **kw):
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
COLUMN_SQL = text("""
|
||||
SELECT col.name AS name,
|
||||
t.name AS type,
|
||||
(col.status & 8) AS nullable,
|
||||
(col.status & 128) AS autoincrement,
|
||||
com.text AS 'default',
|
||||
col.prec AS precision,
|
||||
col.scale AS scale,
|
||||
col.length AS length
|
||||
FROM systypes t, syscolumns col LEFT OUTER JOIN syscomments com ON
|
||||
col.cdefault = com.id
|
||||
WHERE col.usertype = t.usertype
|
||||
AND col.id = :table_id
|
||||
ORDER BY col.colid
|
||||
""")
|
||||
|
||||
results = connection.execute(COLUMN_SQL, table_id=table_id)
|
||||
|
||||
columns = []
|
||||
for (name, type_, nullable, autoincrement, default, precision, scale,
|
||||
length) in results:
|
||||
col_info = self._get_column_info(name, type_, bool(nullable),
|
||||
bool(autoincrement),
|
||||
default, precision, scale,
|
||||
length)
|
||||
columns.append(col_info)
|
||||
|
||||
return columns
|
||||
|
||||
def _get_column_info(self, name, type_, nullable, autoincrement, default,
|
||||
precision, scale, length):
|
||||
|
||||
coltype = self.ischema_names.get(type_, None)
|
||||
|
||||
kwargs = {}
|
||||
|
||||
if coltype in (NUMERIC, DECIMAL):
|
||||
args = (precision, scale)
|
||||
elif coltype == FLOAT:
|
||||
args = (precision,)
|
||||
elif coltype in (CHAR, VARCHAR, UNICHAR, UNIVARCHAR, NCHAR, NVARCHAR):
|
||||
args = (length,)
|
||||
else:
|
||||
args = ()
|
||||
|
||||
if coltype:
|
||||
coltype = coltype(*args, **kwargs)
|
||||
# is this necessary
|
||||
# if is_array:
|
||||
# coltype = ARRAY(coltype)
|
||||
else:
|
||||
util.warn("Did not recognize type '%s' of column '%s'" %
|
||||
(type_, name))
|
||||
coltype = sqltypes.NULLTYPE
|
||||
|
||||
if default:
|
||||
default = default.replace("DEFAULT", "").strip()
|
||||
default = re.sub("^'(.*)'$", lambda m: m.group(1), default)
|
||||
else:
|
||||
default = None
|
||||
|
||||
column_info = dict(name=name, type=coltype, nullable=nullable,
|
||||
default=default, autoincrement=autoincrement)
|
||||
return column_info
|
||||
|
||||
@reflection.cache
|
||||
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
|
||||
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
table_cache = {}
|
||||
column_cache = {}
|
||||
foreign_keys = []
|
||||
|
||||
table_cache[table_id] = {"name": table_name, "schema": schema}
|
||||
|
||||
COLUMN_SQL = text("""
|
||||
SELECT c.colid AS id, c.name AS name
|
||||
FROM syscolumns c
|
||||
WHERE c.id = :table_id
|
||||
""")
|
||||
|
||||
results = connection.execute(COLUMN_SQL, table_id=table_id)
|
||||
columns = {}
|
||||
for col in results:
|
||||
columns[col["id"]] = col["name"]
|
||||
column_cache[table_id] = columns
|
||||
|
||||
REFCONSTRAINT_SQL = text("""
|
||||
SELECT o.name AS name, r.reftabid AS reftable_id,
|
||||
r.keycnt AS 'count',
|
||||
r.fokey1 AS fokey1, r.fokey2 AS fokey2, r.fokey3 AS fokey3,
|
||||
r.fokey4 AS fokey4, r.fokey5 AS fokey5, r.fokey6 AS fokey6,
|
||||
r.fokey7 AS fokey7, r.fokey1 AS fokey8, r.fokey9 AS fokey9,
|
||||
r.fokey10 AS fokey10, r.fokey11 AS fokey11, r.fokey12 AS fokey12,
|
||||
r.fokey13 AS fokey13, r.fokey14 AS fokey14, r.fokey15 AS fokey15,
|
||||
r.fokey16 AS fokey16,
|
||||
r.refkey1 AS refkey1, r.refkey2 AS refkey2, r.refkey3 AS refkey3,
|
||||
r.refkey4 AS refkey4, r.refkey5 AS refkey5, r.refkey6 AS refkey6,
|
||||
r.refkey7 AS refkey7, r.refkey1 AS refkey8, r.refkey9 AS refkey9,
|
||||
r.refkey10 AS refkey10, r.refkey11 AS refkey11,
|
||||
r.refkey12 AS refkey12, r.refkey13 AS refkey13,
|
||||
r.refkey14 AS refkey14, r.refkey15 AS refkey15,
|
||||
r.refkey16 AS refkey16
|
||||
FROM sysreferences r JOIN sysobjects o on r.tableid = o.id
|
||||
WHERE r.tableid = :table_id
|
||||
""")
|
||||
referential_constraints = connection.execute(
|
||||
REFCONSTRAINT_SQL, table_id=table_id).fetchall()
|
||||
|
||||
REFTABLE_SQL = text("""
|
||||
SELECT o.name AS name, u.name AS 'schema'
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
|
||||
WHERE o.id = :table_id
|
||||
""")
|
||||
|
||||
for r in referential_constraints:
|
||||
reftable_id = r["reftable_id"]
|
||||
|
||||
if reftable_id not in table_cache:
|
||||
c = connection.execute(REFTABLE_SQL, table_id=reftable_id)
|
||||
reftable = c.fetchone()
|
||||
c.close()
|
||||
table_info = {"name": reftable["name"], "schema": None}
|
||||
if (schema is not None or
|
||||
reftable["schema"] != self.default_schema_name):
|
||||
table_info["schema"] = reftable["schema"]
|
||||
|
||||
table_cache[reftable_id] = table_info
|
||||
results = connection.execute(COLUMN_SQL, table_id=reftable_id)
|
||||
reftable_columns = {}
|
||||
for col in results:
|
||||
reftable_columns[col["id"]] = col["name"]
|
||||
column_cache[reftable_id] = reftable_columns
|
||||
|
||||
reftable = table_cache[reftable_id]
|
||||
reftable_columns = column_cache[reftable_id]
|
||||
|
||||
constrained_columns = []
|
||||
referred_columns = []
|
||||
for i in range(1, r["count"] + 1):
|
||||
constrained_columns.append(columns[r["fokey%i" % i]])
|
||||
referred_columns.append(reftable_columns[r["refkey%i" % i]])
|
||||
|
||||
fk_info = {
|
||||
"constrained_columns": constrained_columns,
|
||||
"referred_schema": reftable["schema"],
|
||||
"referred_table": reftable["name"],
|
||||
"referred_columns": referred_columns,
|
||||
"name": r["name"]
|
||||
}
|
||||
|
||||
foreign_keys.append(fk_info)
|
||||
|
||||
return foreign_keys
|
||||
|
||||
@reflection.cache
|
||||
def get_indexes(self, connection, table_name, schema=None, **kw):
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
INDEX_SQL = text("""
|
||||
SELECT object_name(i.id) AS table_name,
|
||||
i.keycnt AS 'count',
|
||||
i.name AS name,
|
||||
(i.status & 0x2) AS 'unique',
|
||||
index_col(object_name(i.id), i.indid, 1) AS col_1,
|
||||
index_col(object_name(i.id), i.indid, 2) AS col_2,
|
||||
index_col(object_name(i.id), i.indid, 3) AS col_3,
|
||||
index_col(object_name(i.id), i.indid, 4) AS col_4,
|
||||
index_col(object_name(i.id), i.indid, 5) AS col_5,
|
||||
index_col(object_name(i.id), i.indid, 6) AS col_6,
|
||||
index_col(object_name(i.id), i.indid, 7) AS col_7,
|
||||
index_col(object_name(i.id), i.indid, 8) AS col_8,
|
||||
index_col(object_name(i.id), i.indid, 9) AS col_9,
|
||||
index_col(object_name(i.id), i.indid, 10) AS col_10,
|
||||
index_col(object_name(i.id), i.indid, 11) AS col_11,
|
||||
index_col(object_name(i.id), i.indid, 12) AS col_12,
|
||||
index_col(object_name(i.id), i.indid, 13) AS col_13,
|
||||
index_col(object_name(i.id), i.indid, 14) AS col_14,
|
||||
index_col(object_name(i.id), i.indid, 15) AS col_15,
|
||||
index_col(object_name(i.id), i.indid, 16) AS col_16
|
||||
FROM sysindexes i, sysobjects o
|
||||
WHERE o.id = i.id
|
||||
AND o.id = :table_id
|
||||
AND (i.status & 2048) = 0
|
||||
AND i.indid BETWEEN 1 AND 254
|
||||
""")
|
||||
|
||||
results = connection.execute(INDEX_SQL, table_id=table_id)
|
||||
indexes = []
|
||||
for r in results:
|
||||
column_names = []
|
||||
for i in range(1, r["count"]):
|
||||
column_names.append(r["col_%i" % (i,)])
|
||||
index_info = {"name": r["name"],
|
||||
"unique": bool(r["unique"]),
|
||||
"column_names": column_names}
|
||||
indexes.append(index_info)
|
||||
|
||||
return indexes
|
||||
|
||||
@reflection.cache
|
||||
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
|
||||
table_id = self.get_table_id(connection, table_name, schema,
|
||||
info_cache=kw.get("info_cache"))
|
||||
|
||||
PK_SQL = text("""
|
||||
SELECT object_name(i.id) AS table_name,
|
||||
i.keycnt AS 'count',
|
||||
i.name AS name,
|
||||
index_col(object_name(i.id), i.indid, 1) AS pk_1,
|
||||
index_col(object_name(i.id), i.indid, 2) AS pk_2,
|
||||
index_col(object_name(i.id), i.indid, 3) AS pk_3,
|
||||
index_col(object_name(i.id), i.indid, 4) AS pk_4,
|
||||
index_col(object_name(i.id), i.indid, 5) AS pk_5,
|
||||
index_col(object_name(i.id), i.indid, 6) AS pk_6,
|
||||
index_col(object_name(i.id), i.indid, 7) AS pk_7,
|
||||
index_col(object_name(i.id), i.indid, 8) AS pk_8,
|
||||
index_col(object_name(i.id), i.indid, 9) AS pk_9,
|
||||
index_col(object_name(i.id), i.indid, 10) AS pk_10,
|
||||
index_col(object_name(i.id), i.indid, 11) AS pk_11,
|
||||
index_col(object_name(i.id), i.indid, 12) AS pk_12,
|
||||
index_col(object_name(i.id), i.indid, 13) AS pk_13,
|
||||
index_col(object_name(i.id), i.indid, 14) AS pk_14,
|
||||
index_col(object_name(i.id), i.indid, 15) AS pk_15,
|
||||
index_col(object_name(i.id), i.indid, 16) AS pk_16
|
||||
FROM sysindexes i, sysobjects o
|
||||
WHERE o.id = i.id
|
||||
AND o.id = :table_id
|
||||
AND (i.status & 2048) = 2048
|
||||
AND i.indid BETWEEN 1 AND 254
|
||||
""")
|
||||
|
||||
results = connection.execute(PK_SQL, table_id=table_id)
|
||||
pks = results.fetchone()
|
||||
results.close()
|
||||
|
||||
constrained_columns = []
|
||||
if pks:
|
||||
for i in range(1, pks["count"] + 1):
|
||||
constrained_columns.append(pks["pk_%i" % (i,)])
|
||||
return {"constrained_columns": constrained_columns,
|
||||
"name": pks["name"]}
|
||||
else:
|
||||
return {"constrained_columns": [], "name": None}
|
||||
|
||||
@reflection.cache
|
||||
def get_schema_names(self, connection, **kw):
|
||||
|
||||
SCHEMA_SQL = text("SELECT u.name AS name FROM sysusers u")
|
||||
|
||||
schemas = connection.execute(SCHEMA_SQL)
|
||||
|
||||
return [s["name"] for s in schemas]
|
||||
|
||||
@reflection.cache
|
||||
def get_table_names(self, connection, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
TABLE_SQL = text("""
|
||||
SELECT o.name AS name
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
|
||||
WHERE u.name = :schema_name
|
||||
AND o.type = 'U'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
|
||||
tables = connection.execute(TABLE_SQL, schema_name=schema)
|
||||
|
||||
return [t["name"] for t in tables]
|
||||
|
||||
@reflection.cache
|
||||
def get_view_definition(self, connection, view_name, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
VIEW_DEF_SQL = text("""
|
||||
SELECT c.text
|
||||
FROM syscomments c JOIN sysobjects o ON c.id = o.id
|
||||
WHERE o.name = :view_name
|
||||
AND o.type = 'V'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(view_name, unicode):
|
||||
view_name = view_name.encode("ascii")
|
||||
|
||||
view = connection.execute(VIEW_DEF_SQL, view_name=view_name)
|
||||
|
||||
return view.scalar()
|
||||
|
||||
@reflection.cache
|
||||
def get_view_names(self, connection, schema=None, **kw):
|
||||
if schema is None:
|
||||
schema = self.default_schema_name
|
||||
|
||||
VIEW_SQL = text("""
|
||||
SELECT o.name AS name
|
||||
FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
|
||||
WHERE u.name = :schema_name
|
||||
AND o.type = 'V'
|
||||
""")
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, unicode):
|
||||
schema = schema.encode("ascii")
|
||||
views = connection.execute(VIEW_SQL, schema_name=schema)
|
||||
|
||||
return [v["name"] for v in views]
|
||||
|
||||
def has_table(self, connection, table_name, schema=None):
|
||||
try:
|
||||
self.get_table_id(connection, table_name, schema)
|
||||
except exc.NoSuchTableError:
|
||||
return False
|
||||
else:
|
||||
return True
|
@ -1,33 +0,0 @@
|
||||
# sybase/mxodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""
|
||||
|
||||
.. dialect:: sybase+mxodbc
|
||||
:name: mxODBC
|
||||
:dbapi: mxodbc
|
||||
:connectstring: sybase+mxodbc://<username>:<password>@<dsnname>
|
||||
:url: http://www.egenix.com/
|
||||
|
||||
.. note::
|
||||
|
||||
This dialect is a stub only and is likely non functional at this time.
|
||||
|
||||
|
||||
"""
|
||||
from sqlalchemy.dialects.sybase.base import SybaseDialect
|
||||
from sqlalchemy.dialects.sybase.base import SybaseExecutionContext
|
||||
from sqlalchemy.connectors.mxodbc import MxODBCConnector
|
||||
|
||||
|
||||
class SybaseExecutionContext_mxodbc(SybaseExecutionContext):
|
||||
pass
|
||||
|
||||
|
||||
class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect):
|
||||
execution_ctx_cls = SybaseExecutionContext_mxodbc
|
||||
|
||||
dialect = SybaseDialect_mxodbc
|
@ -1,86 +0,0 @@
|
||||
# sybase/pyodbc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sybase+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: sybase+pyodbc://<username>:<password>@<dsnname>\
|
||||
[/<database>]
|
||||
:url: http://pypi.python.org/pypi/pyodbc/
|
||||
|
||||
|
||||
Unicode Support
|
||||
---------------
|
||||
|
||||
The pyodbc driver currently supports usage of these Sybase types with
|
||||
Unicode or multibyte strings::
|
||||
|
||||
CHAR
|
||||
NCHAR
|
||||
NVARCHAR
|
||||
TEXT
|
||||
VARCHAR
|
||||
|
||||
Currently *not* supported are::
|
||||
|
||||
UNICHAR
|
||||
UNITEXT
|
||||
UNIVARCHAR
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.dialects.sybase.base import SybaseDialect,\
|
||||
SybaseExecutionContext
|
||||
from sqlalchemy.connectors.pyodbc import PyODBCConnector
|
||||
from sqlalchemy import types as sqltypes, processors
|
||||
import decimal
|
||||
|
||||
|
||||
class _SybNumeric_pyodbc(sqltypes.Numeric):
|
||||
"""Turns Decimals with adjusted() < -6 into floats.
|
||||
|
||||
It's not yet known how to get decimals with many
|
||||
significant digits or very large adjusted() into Sybase
|
||||
via pyodbc.
|
||||
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_process = super(_SybNumeric_pyodbc, self).\
|
||||
bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
if self.asdecimal and \
|
||||
isinstance(value, decimal.Decimal):
|
||||
|
||||
if value.adjusted() < -6:
|
||||
return processors.to_float(value)
|
||||
|
||||
if super_process:
|
||||
return super_process(value)
|
||||
else:
|
||||
return value
|
||||
return process
|
||||
|
||||
|
||||
class SybaseExecutionContext_pyodbc(SybaseExecutionContext):
|
||||
def set_ddl_autocommit(self, connection, value):
|
||||
if value:
|
||||
connection.autocommit = True
|
||||
else:
|
||||
connection.autocommit = False
|
||||
|
||||
|
||||
class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect):
|
||||
execution_ctx_cls = SybaseExecutionContext_pyodbc
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: _SybNumeric_pyodbc,
|
||||
}
|
||||
|
||||
dialect = SybaseDialect_pyodbc
|
@ -1,102 +0,0 @@
|
||||
# sybase/pysybase.py
|
||||
# Copyright (C) 2010-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""
|
||||
.. dialect:: sybase+pysybase
|
||||
:name: Python-Sybase
|
||||
:dbapi: Sybase
|
||||
:connectstring: sybase+pysybase://<username>:<password>@<dsn>/\
|
||||
[database name]
|
||||
:url: http://python-sybase.sourceforge.net/
|
||||
|
||||
Unicode Support
|
||||
---------------
|
||||
|
||||
The python-sybase driver does not appear to support non-ASCII strings of any
|
||||
kind at this time.
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy import types as sqltypes, processors
|
||||
from sqlalchemy.dialects.sybase.base import SybaseDialect, \
|
||||
SybaseExecutionContext, SybaseSQLCompiler
|
||||
|
||||
|
||||
class _SybNumeric(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, type_):
|
||||
if not self.asdecimal:
|
||||
return processors.to_float
|
||||
else:
|
||||
return sqltypes.Numeric.result_processor(self, dialect, type_)
|
||||
|
||||
|
||||
class SybaseExecutionContext_pysybase(SybaseExecutionContext):
|
||||
|
||||
def set_ddl_autocommit(self, dbapi_connection, value):
|
||||
if value:
|
||||
# call commit() on the Sybase connection directly,
|
||||
# to avoid any side effects of calling a Connection
|
||||
# transactional method inside of pre_exec()
|
||||
dbapi_connection.commit()
|
||||
|
||||
def pre_exec(self):
|
||||
SybaseExecutionContext.pre_exec(self)
|
||||
|
||||
for param in self.parameters:
|
||||
for key in list(param):
|
||||
param["@" + key] = param[key]
|
||||
del param[key]
|
||||
|
||||
|
||||
class SybaseSQLCompiler_pysybase(SybaseSQLCompiler):
|
||||
def bindparam_string(self, name, **kw):
|
||||
return "@" + name
|
||||
|
||||
|
||||
class SybaseDialect_pysybase(SybaseDialect):
|
||||
driver = 'pysybase'
|
||||
execution_ctx_cls = SybaseExecutionContext_pysybase
|
||||
statement_compiler = SybaseSQLCompiler_pysybase
|
||||
|
||||
colspecs = {
|
||||
sqltypes.Numeric: _SybNumeric,
|
||||
sqltypes.Float: sqltypes.Float
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def dbapi(cls):
|
||||
import Sybase
|
||||
return Sybase
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username='user', password='passwd')
|
||||
|
||||
return ([opts.pop('host')], opts)
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
# calling python-sybase executemany yields:
|
||||
# TypeError: string too long for buffer
|
||||
for param in parameters:
|
||||
cursor.execute(statement, param)
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
vers = connection.scalar("select @@version_number")
|
||||
# i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
|
||||
# (12, 5, 0, 0)
|
||||
return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, (self.dbapi.OperationalError,
|
||||
self.dbapi.ProgrammingError)):
|
||||
msg = str(e)
|
||||
return ('Unable to complete network request to host' in msg or
|
||||
'Invalid connection state' in msg or
|
||||
'Invalid cursor state' in msg)
|
||||
else:
|
||||
return False
|
||||
|
||||
dialect = SybaseDialect_pysybase
|
@ -1,145 +0,0 @@
|
||||
Rules for Migrating TypeEngine classes to 0.6
|
||||
---------------------------------------------
|
||||
|
||||
1. the TypeEngine classes are used for:
|
||||
|
||||
a. Specifying behavior which needs to occur for bind parameters
|
||||
or result row columns.
|
||||
|
||||
b. Specifying types that are entirely specific to the database
|
||||
in use and have no analogue in the sqlalchemy.types package.
|
||||
|
||||
c. Specifying types where there is an analogue in sqlalchemy.types,
|
||||
but the database in use takes vendor-specific flags for those
|
||||
types.
|
||||
|
||||
d. If a TypeEngine class doesn't provide any of this, it should be
|
||||
*removed* from the dialect.
|
||||
|
||||
2. the TypeEngine classes are *no longer* used for generating DDL. Dialects
|
||||
now have a TypeCompiler subclass which uses the same visit_XXX model as
|
||||
other compilers.
|
||||
|
||||
3. the "ischema_names" and "colspecs" dictionaries are now required members on
|
||||
the Dialect class.
|
||||
|
||||
4. The names of types within dialects are now important. If a dialect-specific type
|
||||
is a subclass of an existing generic type and is only provided for bind/result behavior,
|
||||
the current mixed case naming can remain, i.e. _PGNumeric for Numeric - in this case,
|
||||
end users would never need to use _PGNumeric directly. However, if a dialect-specific
|
||||
type is specifying a type *or* arguments that are not present generically, it should
|
||||
match the real name of the type on that backend, in uppercase. E.g. postgresql.INET,
|
||||
mysql.ENUM, postgresql.ARRAY.
|
||||
|
||||
Or follow this handy flowchart:
|
||||
|
||||
is the type meant to provide bind/result is the type the same name as an
|
||||
behavior to a generic type (i.e. MixedCase) ---- no ---> UPPERCASE type in types.py ?
|
||||
type in types.py ? | |
|
||||
| no yes
|
||||
yes | |
|
||||
| | does your type need special
|
||||
| +<--- yes --- behavior or arguments ?
|
||||
| | |
|
||||
| | no
|
||||
name the type using | |
|
||||
_MixedCase, i.e. v V
|
||||
_OracleBoolean. it name the type don't make a
|
||||
stays private to the dialect identically as that type, make sure the dialect's
|
||||
and is invoked *only* via within the DB, base.py imports the types.py
|
||||
the colspecs dict. using UPPERCASE UPPERCASE name into its namespace
|
||||
| (i.e. BIT, NCHAR, INTERVAL).
|
||||
| Users can import it.
|
||||
| |
|
||||
v v
|
||||
subclass the closest is the name of this type
|
||||
MixedCase type types.py, identical to an UPPERCASE
|
||||
i.e. <--- no ------- name in types.py ?
|
||||
class _DateTime(types.DateTime),
|
||||
class DATETIME2(types.DateTime), |
|
||||
class BIT(types.TypeEngine). yes
|
||||
|
|
||||
v
|
||||
the type should
|
||||
subclass the
|
||||
UPPERCASE
|
||||
type in types.py
|
||||
(i.e. class BLOB(types.BLOB))
|
||||
|
||||
|
||||
Example 1. pysqlite needs bind/result processing for the DateTime type in types.py,
|
||||
which applies to all DateTimes and subclasses. It's named _SLDateTime and
|
||||
subclasses types.DateTime.
|
||||
|
||||
Example 2. MS-SQL has a TIME type which takes a non-standard "precision" argument
|
||||
that is rendered within DDL. So it's named TIME in the MS-SQL dialect's base.py,
|
||||
and subclasses types.TIME. Users can then say mssql.TIME(precision=10).
|
||||
|
||||
Example 3. MS-SQL dialects also need special bind/result processing for date
|
||||
But its DATE type doesn't render DDL differently than that of a plain
|
||||
DATE, i.e. it takes no special arguments. Therefore we are just adding behavior
|
||||
to types.Date, so it's named _MSDate in the MS-SQL dialect's base.py, and subclasses
|
||||
types.Date.
|
||||
|
||||
Example 4. MySQL has a SET type, there's no analogue for this in types.py. So
|
||||
MySQL names it SET in the dialect's base.py, and it subclasses types.String, since
|
||||
it ultimately deals with strings.
|
||||
|
||||
Example 5. PostgreSQL has a DATETIME type. The DBAPIs handle dates correctly,
|
||||
and no special arguments are used in PG's DDL beyond what types.py provides.
|
||||
PostgreSQL dialect therefore imports types.DATETIME into its base.py.
|
||||
|
||||
Ideally one should be able to specify a schema using names imported completely from a
|
||||
dialect, all matching the real name on that backend:
|
||||
|
||||
from sqlalchemy.dialects.postgresql import base as pg
|
||||
|
||||
t = Table('mytable', metadata,
|
||||
Column('id', pg.INTEGER, primary_key=True),
|
||||
Column('name', pg.VARCHAR(300)),
|
||||
Column('inetaddr', pg.INET)
|
||||
)
|
||||
|
||||
where above, the INTEGER and VARCHAR types are ultimately from sqlalchemy.types,
|
||||
but the PG dialect makes them available in its own namespace.
|
||||
|
||||
5. "colspecs" now is a dictionary of generic or uppercased types from sqlalchemy.types
|
||||
linked to types specified in the dialect. Again, if a type in the dialect does not
|
||||
specify any special behavior for bind_processor() or result_processor() and does not
|
||||
indicate a special type only available in this database, it must be *removed* from the
|
||||
module and from this dictionary.
|
||||
|
||||
6. "ischema_names" indicates string descriptions of types as returned from the database
|
||||
linked to TypeEngine classes.
|
||||
|
||||
a. The string name should be matched to the most specific type possible within
|
||||
sqlalchemy.types, unless there is no matching type within sqlalchemy.types in which
|
||||
case it points to a dialect type. *It doesn't matter* if the dialect has its
|
||||
own subclass of that type with special bind/result behavior - reflect to the types.py
|
||||
UPPERCASE type as much as possible. With very few exceptions, all types
|
||||
should reflect to an UPPERCASE type.
|
||||
|
||||
b. If the dialect contains a matching dialect-specific type that takes extra arguments
|
||||
which the generic one does not, then point to the dialect-specific type. E.g.
|
||||
mssql.VARCHAR takes a "collation" parameter which should be preserved.
|
||||
|
||||
5. DDL, or what was formerly issued by "get_col_spec()", is now handled exclusively by
|
||||
a subclass of compiler.GenericTypeCompiler.
|
||||
|
||||
a. your TypeCompiler class will receive generic and uppercase types from
|
||||
sqlalchemy.types. Do not assume the presence of dialect-specific attributes on
|
||||
these types.
|
||||
|
||||
b. the visit_UPPERCASE methods on GenericTypeCompiler should *not* be overridden with
|
||||
methods that produce a different DDL name. Uppercase types don't do any kind of
|
||||
"guessing" - if visit_TIMESTAMP is called, the DDL should render as TIMESTAMP in
|
||||
all cases, regardless of whether or not that type is legal on the backend database.
|
||||
|
||||
c. the visit_UPPERCASE methods *should* be overridden with methods that add additional
|
||||
arguments and flags to those types.
|
||||
|
||||
d. the visit_lowercase methods are overridden to provide an interpretation of a generic
|
||||
type. E.g. visit_large_binary() might be overridden to say "return self.visit_BIT(type_)".
|
||||
|
||||
e. visit_lowercase methods should *never* render strings directly - it should always
|
||||
be via calling a visit_UPPERCASE() method.
|
@ -1,434 +0,0 @@
|
||||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQL connections, SQL execution and high-level DB-API interface.
|
||||
|
||||
The engine package defines the basic components used to interface
|
||||
DB-API modules with higher-level statement construction,
|
||||
connection-management, execution and result contexts. The primary
|
||||
"entry point" class into this package is the Engine and its public
|
||||
constructor ``create_engine()``.
|
||||
|
||||
This package includes:
|
||||
|
||||
base.py
|
||||
Defines interface classes and some implementation classes which
|
||||
comprise the basic components used to interface between a DB-API,
|
||||
constructed and plain-text statements, connections, transactions,
|
||||
and results.
|
||||
|
||||
default.py
|
||||
Contains default implementations of some of the components defined
|
||||
in base.py. All current database dialects use the classes in
|
||||
default.py as base classes for their own database-specific
|
||||
implementations.
|
||||
|
||||
strategies.py
|
||||
The mechanics of constructing ``Engine`` objects are represented
|
||||
here. Defines the ``EngineStrategy`` class which represents how
|
||||
to go from arguments specified to the ``create_engine()``
|
||||
function, to a fully constructed ``Engine``, including
|
||||
initialization of connection pooling, dialects, and specific
|
||||
subclasses of ``Engine``.
|
||||
|
||||
threadlocal.py
|
||||
The ``TLEngine`` class is defined here, which is a subclass of
|
||||
the generic ``Engine`` and tracks ``Connection`` and
|
||||
``Transaction`` objects against the identity of the current
|
||||
thread. This allows certain programming patterns based around
|
||||
the concept of a "thread-local connection" to be possible.
|
||||
The ``TLEngine`` is created by using the "threadlocal" engine
|
||||
strategy in conjunction with the ``create_engine()`` function.
|
||||
|
||||
url.py
|
||||
Defines the ``URL`` class which represents the individual
|
||||
components of a string URL passed to ``create_engine()``. Also
|
||||
defines a basic module-loading strategy for the dialect specifier
|
||||
within a URL.
|
||||
"""
|
||||
|
||||
from .interfaces import (
|
||||
Connectable,
|
||||
CreateEnginePlugin,
|
||||
Dialect,
|
||||
ExecutionContext,
|
||||
ExceptionContext,
|
||||
|
||||
# backwards compat
|
||||
Compiled,
|
||||
TypeCompiler
|
||||
)
|
||||
|
||||
from .base import (
|
||||
Connection,
|
||||
Engine,
|
||||
NestedTransaction,
|
||||
RootTransaction,
|
||||
Transaction,
|
||||
TwoPhaseTransaction,
|
||||
)
|
||||
|
||||
from .result import (
|
||||
BaseRowProxy,
|
||||
BufferedColumnResultProxy,
|
||||
BufferedColumnRow,
|
||||
BufferedRowResultProxy,
|
||||
FullyBufferedResultProxy,
|
||||
ResultProxy,
|
||||
RowProxy,
|
||||
)
|
||||
|
||||
from .util import (
|
||||
connection_memoize
|
||||
)
|
||||
|
||||
|
||||
from . import util, strategies
|
||||
|
||||
# backwards compat
|
||||
from ..sql import ddl
|
||||
|
||||
default_strategy = 'plain'
|
||||
|
||||
|
||||
def create_engine(*args, **kwargs):
|
||||
"""Create a new :class:`.Engine` instance.
|
||||
|
||||
The standard calling form is to send the URL as the
|
||||
first positional argument, usually a string
|
||||
that indicates database dialect and connection arguments::
|
||||
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test")
|
||||
|
||||
Additional keyword arguments may then follow it which
|
||||
establish various options on the resulting :class:`.Engine`
|
||||
and its underlying :class:`.Dialect` and :class:`.Pool`
|
||||
constructs::
|
||||
|
||||
engine = create_engine("mysql://scott:tiger@hostname/dbname",
|
||||
encoding='latin1', echo=True)
|
||||
|
||||
The string form of the URL is
|
||||
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
|
||||
``dialect`` is a database name such as ``mysql``, ``oracle``,
|
||||
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
|
||||
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
|
||||
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
|
||||
|
||||
``**kwargs`` takes a wide variety of options which are routed
|
||||
towards their appropriate components. Arguments may be specific to
|
||||
the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
|
||||
:class:`.Pool`. Specific dialects also accept keyword arguments that
|
||||
are unique to that dialect. Here, we describe the parameters
|
||||
that are common to most :func:`.create_engine()` usage.
|
||||
|
||||
Once established, the newly resulting :class:`.Engine` will
|
||||
request a connection from the underlying :class:`.Pool` once
|
||||
:meth:`.Engine.connect` is called, or a method which depends on it
|
||||
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
|
||||
will establish the first actual DBAPI connection when this request
|
||||
is received. The :func:`.create_engine` call itself does **not**
|
||||
establish any actual DBAPI connections directly.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:doc:`/core/engines`
|
||||
|
||||
:doc:`/dialects/index`
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:param case_sensitive=True: if False, result column names
|
||||
will match in a case-insensitive fashion, that is,
|
||||
``row['SomeColumn']``.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
By default, result row names match case-sensitively.
|
||||
In version 0.7 and prior, all matches were case-insensitive.
|
||||
|
||||
:param connect_args: a dictionary of options which will be
|
||||
passed directly to the DBAPI's ``connect()`` method as
|
||||
additional keyword arguments. See the example
|
||||
at :ref:`custom_dbapi_args`.
|
||||
|
||||
:param convert_unicode=False: if set to True, sets
|
||||
the default behavior of ``convert_unicode`` on the
|
||||
:class:`.String` type to ``True``, regardless
|
||||
of a setting of ``False`` on an individual
|
||||
:class:`.String` type, thus causing all :class:`.String`
|
||||
-based columns
|
||||
to accommodate Python ``unicode`` objects. This flag
|
||||
is useful as an engine-wide setting when using a
|
||||
DBAPI that does not natively support Python
|
||||
``unicode`` objects and raises an error when
|
||||
one is received (such as pyodbc with FreeTDS).
|
||||
|
||||
See :class:`.String` for further details on
|
||||
what this flag indicates.
|
||||
|
||||
:param creator: a callable which returns a DBAPI connection.
|
||||
This creation function will be passed to the underlying
|
||||
connection pool and will be used to create all new database
|
||||
connections. Usage of this function causes connection
|
||||
parameters specified in the URL argument to be bypassed.
|
||||
|
||||
:param echo=False: if True, the Engine will log all statements
|
||||
as well as a repr() of their parameter lists to the engines
|
||||
logger, which defaults to sys.stdout. The ``echo`` attribute of
|
||||
``Engine`` can be modified at any time to turn logging on and
|
||||
off. If set to the string ``"debug"``, result rows will be
|
||||
printed to the standard output as well. This flag ultimately
|
||||
controls a Python logger; see :ref:`dbengine_logging` for
|
||||
information on how to configure logging directly.
|
||||
|
||||
:param echo_pool=False: if True, the connection pool will log
|
||||
all checkouts/checkins to the logging stream, which defaults to
|
||||
sys.stdout. This flag ultimately controls a Python logger; see
|
||||
:ref:`dbengine_logging` for information on how to configure logging
|
||||
directly.
|
||||
|
||||
:param encoding: Defaults to ``utf-8``. This is the string
|
||||
encoding used by SQLAlchemy for string encode/decode
|
||||
operations which occur within SQLAlchemy, **outside of
|
||||
the DBAPI.** Most modern DBAPIs feature some degree of
|
||||
direct support for Python ``unicode`` objects,
|
||||
what you see in Python 2 as a string of the form
|
||||
``u'some string'``. For those scenarios where the
|
||||
DBAPI is detected as not supporting a Python ``unicode``
|
||||
object, this encoding is used to determine the
|
||||
source/destination encoding. It is **not used**
|
||||
for those cases where the DBAPI handles unicode
|
||||
directly.
|
||||
|
||||
To properly configure a system to accommodate Python
|
||||
``unicode`` objects, the DBAPI should be
|
||||
configured to handle unicode to the greatest
|
||||
degree as is appropriate - see
|
||||
the notes on unicode pertaining to the specific
|
||||
target database in use at :ref:`dialect_toplevel`.
|
||||
|
||||
Areas where string encoding may need to be accommodated
|
||||
outside of the DBAPI include zero or more of:
|
||||
|
||||
* the values passed to bound parameters, corresponding to
|
||||
the :class:`.Unicode` type or the :class:`.String` type
|
||||
when ``convert_unicode`` is ``True``;
|
||||
* the values returned in result set columns corresponding
|
||||
to the :class:`.Unicode` type or the :class:`.String`
|
||||
type when ``convert_unicode`` is ``True``;
|
||||
* the string SQL statement passed to the DBAPI's
|
||||
``cursor.execute()`` method;
|
||||
* the string names of the keys in the bound parameter
|
||||
dictionary passed to the DBAPI's ``cursor.execute()``
|
||||
as well as ``cursor.setinputsizes()`` methods;
|
||||
* the string column names retrieved from the DBAPI's
|
||||
``cursor.description`` attribute.
|
||||
|
||||
When using Python 3, the DBAPI is required to support
|
||||
*all* of the above values as Python ``unicode`` objects,
|
||||
which in Python 3 are just known as ``str``. In Python 2,
|
||||
the DBAPI does not specify unicode behavior at all,
|
||||
so SQLAlchemy must make decisions for each of the above
|
||||
values on a per-DBAPI basis - implementations are
|
||||
completely inconsistent in their behavior.
|
||||
|
||||
:param execution_options: Dictionary execution options which will
|
||||
be applied to all connections. See
|
||||
:meth:`~sqlalchemy.engine.Connection.execution_options`
|
||||
|
||||
:param implicit_returning=True: When ``True``, a RETURNING-
|
||||
compatible construct, if available, will be used to
|
||||
fetch newly generated primary key values when a single row
|
||||
INSERT statement is emitted with no existing returning()
|
||||
clause. This applies to those backends which support RETURNING
|
||||
or a compatible construct, including PostgreSQL, Firebird, Oracle,
|
||||
Microsoft SQL Server. Set this to ``False`` to disable
|
||||
the automatic usage of RETURNING.
|
||||
|
||||
:param isolation_level: this string parameter is interpreted by various
|
||||
dialects in order to affect the transaction isolation level of the
|
||||
database connection. The parameter essentially accepts some subset of
|
||||
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
|
||||
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
|
||||
Behavior here varies per backend, and
|
||||
individual dialects should be consulted directly.
|
||||
|
||||
Note that the isolation level can also be set on a per-:class:`.Connection`
|
||||
basis as well, using the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Connection.default_isolation_level` - view default level
|
||||
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
- set per :class:`.Connection` isolation level
|
||||
|
||||
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
|
||||
|
||||
:ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
|
||||
|
||||
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
|
||||
|
||||
:ref:`session_transaction_isolation` - for the ORM
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
"_(counter)". If ``None``, the value of
|
||||
``dialect.max_identifier_length`` is used instead.
|
||||
|
||||
:param listeners: A list of one or more
|
||||
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
|
||||
receive connection pool events.
|
||||
|
||||
:param logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.engine" logger. Defaults to a hexstring of the
|
||||
object's id.
|
||||
|
||||
:param max_overflow=10: the number of connections to allow in
|
||||
connection pool "overflow", that is connections that can be
|
||||
opened above and beyond the pool_size setting, which defaults
|
||||
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param module=None: reference to a Python module object (the module
|
||||
itself, not its string name). Specifies an alternate DBAPI module to
|
||||
be used by the engine's dialect. Each sub-dialect references a
|
||||
specific DBAPI which will be imported before first connect. This
|
||||
parameter causes the import to be bypassed, and the given module to
|
||||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`.Engine`.
|
||||
|
||||
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
|
||||
to use when rendering bound parameters. This style defaults to the
|
||||
one recommended by the DBAPI itself, which is retrieved from the
|
||||
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
|
||||
more than one paramstyle, and in particular it may be desirable
|
||||
to change a "named" paramstyle into a "positional" one, or vice versa.
|
||||
When this attribute is passed, it should be one of the values
|
||||
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
|
||||
``"pyformat"``, and should correspond to a parameter style known
|
||||
to be supported by the DBAPI in use.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
pool will be used directly as the underlying connection pool
|
||||
for the engine, bypassing whatever connection parameters are
|
||||
present in the URL argument. For information on constructing
|
||||
connection pools manually, see :ref:`pooling_toplevel`.
|
||||
|
||||
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
|
||||
subclass, which will be used to create a connection pool
|
||||
instance using the connection parameters given in the URL. Note
|
||||
this differs from ``pool`` in that you don't actually
|
||||
instantiate the pool in this case, you just indicate what type
|
||||
of pool to be used.
|
||||
|
||||
:param pool_logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
|
||||
id.
|
||||
|
||||
:param pool_size=5: the number of connections to keep open
|
||||
inside the connection pool. This used with
|
||||
:class:`~sqlalchemy.pool.QueuePool` as
|
||||
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
|
||||
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
|
||||
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
|
||||
:class:`~sqlalchemy.pool.NullPool` instead.
|
||||
|
||||
:param pool_recycle=-1: this setting causes the pool to recycle
|
||||
connections after the given number of seconds has passed. It
|
||||
defaults to -1, or no timeout. For example, setting to 3600
|
||||
means connections will be recycled after one hour. Note that
|
||||
MySQL in particular will disconnect automatically if no
|
||||
activity is detected on a connection for eight hours (although
|
||||
this is configurable with the MySQLDB connection itself and the
|
||||
server configuration as well).
|
||||
|
||||
:param pool_reset_on_return='rollback': set the "reset on return"
|
||||
behavior of the pool, which is whether ``rollback()``,
|
||||
``commit()``, or nothing is called upon connections
|
||||
being returned to the pool. See the docstring for
|
||||
``reset_on_return`` at :class:`.Pool`.
|
||||
|
||||
.. versionadded:: 0.7.6
|
||||
|
||||
:param pool_timeout=30: number of seconds to wait before giving
|
||||
up on getting a connection from the pool. This is only used
|
||||
with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param strategy='plain': selects alternate engine implementations.
|
||||
Currently available are:
|
||||
|
||||
* the ``threadlocal`` strategy, which is described in
|
||||
:ref:`threadlocal_strategy`;
|
||||
* the ``mock`` strategy, which dispatches all statement
|
||||
execution to a function passed as the argument ``executor``.
|
||||
See `example in the FAQ
|
||||
<http://docs.sqlalchemy.org/en/latest/faq/metadata_schema.html#how-can-i-get-the-create-table-drop-table-output-as-a-string>`_.
|
||||
|
||||
:param executor=None: a function taking arguments
|
||||
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
|
||||
dispatch all statement execution. Used only by ``strategy='mock'``.
|
||||
|
||||
"""
|
||||
|
||||
strategy = kwargs.pop('strategy', default_strategy)
|
||||
strategy = strategies.strategies[strategy]
|
||||
return strategy.create(*args, **kwargs)
|
||||
|
||||
|
||||
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file.
|
||||
|
||||
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
|
||||
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
|
||||
indicates the prefix to be searched for. Each matching key (after the
|
||||
prefix is stripped) is treated as though it were the corresponding keyword
|
||||
argument to a :func:`.create_engine` call.
|
||||
|
||||
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
|
||||
which provides the :ref:`database URL <database_urls>`.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. The set of arguments
|
||||
is extensible per-dialect using the ``engine_config_types`` accessor.
|
||||
|
||||
:param configuration: A dictionary (typically produced from a config file,
|
||||
but this is not a requirement). Items whose keys start with the value
|
||||
of 'prefix' will have that prefix stripped, and will then be passed to
|
||||
:ref:`create_engine`.
|
||||
|
||||
:param prefix: Prefix to match and then strip from keys
|
||||
in 'configuration'.
|
||||
|
||||
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
|
||||
overrides the corresponding item taken from the 'configuration'
|
||||
dictionary. Keyword arguments should *not* be prefixed.
|
||||
|
||||
"""
|
||||
|
||||
options = dict((key[len(prefix):], configuration[key])
|
||||
for key in configuration
|
||||
if key.startswith(prefix))
|
||||
options['_coerce_config'] = True
|
||||
options.update(kwargs)
|
||||
url = options.pop('url')
|
||||
return create_engine(url, **options)
|
||||
|
||||
|
||||
__all__ = (
|
||||
'create_engine',
|
||||
'engine_from_config',
|
||||
)
|
File diff suppressed because it is too large
Load Diff
@ -1,128 +0,0 @@
|
||||
# engine/ddl.py
|
||||
# Copyright (C) 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle CREATE/DROP workflow."""
|
||||
|
||||
from sqlalchemy import engine, schema
|
||||
from sqlalchemy.sql import util as sql_util
|
||||
|
||||
|
||||
class DDLBase(schema.SchemaVisitor):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
class SchemaGenerator(DDLBase):
|
||||
def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
|
||||
super(SchemaGenerator, self).__init__(connection, **kwargs)
|
||||
self.checkfirst = checkfirst
|
||||
self.tables = tables and set(tables) or None
|
||||
self.preparer = dialect.identifier_preparer
|
||||
self.dialect = dialect
|
||||
|
||||
def _can_create(self, table):
|
||||
self.dialect.validate_identifier(table.name)
|
||||
if table.schema:
|
||||
self.dialect.validate_identifier(table.schema)
|
||||
return not self.checkfirst or not self.dialect.has_table(self.connection, table.name, schema=table.schema)
|
||||
|
||||
def visit_metadata(self, metadata):
|
||||
if self.tables:
|
||||
tables = self.tables
|
||||
else:
|
||||
tables = metadata.tables.values()
|
||||
collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
|
||||
|
||||
for listener in metadata.ddl_listeners['before-create']:
|
||||
listener('before-create', metadata, self.connection, tables=collection)
|
||||
|
||||
for table in collection:
|
||||
self.traverse_single(table)
|
||||
|
||||
for listener in metadata.ddl_listeners['after-create']:
|
||||
listener('after-create', metadata, self.connection, tables=collection)
|
||||
|
||||
def visit_table(self, table):
|
||||
for listener in table.ddl_listeners['before-create']:
|
||||
listener('before-create', table, self.connection)
|
||||
|
||||
for column in table.columns:
|
||||
if column.default is not None:
|
||||
self.traverse_single(column.default)
|
||||
|
||||
self.connection.execute(schema.CreateTable(table))
|
||||
|
||||
if hasattr(table, 'indexes'):
|
||||
for index in table.indexes:
|
||||
self.traverse_single(index)
|
||||
|
||||
for listener in table.ddl_listeners['after-create']:
|
||||
listener('after-create', table, self.connection)
|
||||
|
||||
def visit_sequence(self, sequence):
|
||||
if self.dialect.supports_sequences:
|
||||
if ((not self.dialect.sequences_optional or
|
||||
not sequence.optional) and
|
||||
(not self.checkfirst or
|
||||
not self.dialect.has_sequence(self.connection, sequence.name, schema=sequence.schema))):
|
||||
self.connection.execute(schema.CreateSequence(sequence))
|
||||
|
||||
def visit_index(self, index):
|
||||
self.connection.execute(schema.CreateIndex(index))
|
||||
|
||||
|
||||
class SchemaDropper(DDLBase):
|
||||
def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
|
||||
super(SchemaDropper, self).__init__(connection, **kwargs)
|
||||
self.checkfirst = checkfirst
|
||||
self.tables = tables
|
||||
self.preparer = dialect.identifier_preparer
|
||||
self.dialect = dialect
|
||||
|
||||
def visit_metadata(self, metadata):
|
||||
if self.tables:
|
||||
tables = self.tables
|
||||
else:
|
||||
tables = metadata.tables.values()
|
||||
collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
|
||||
|
||||
for listener in metadata.ddl_listeners['before-drop']:
|
||||
listener('before-drop', metadata, self.connection, tables=collection)
|
||||
|
||||
for table in collection:
|
||||
self.traverse_single(table)
|
||||
|
||||
for listener in metadata.ddl_listeners['after-drop']:
|
||||
listener('after-drop', metadata, self.connection, tables=collection)
|
||||
|
||||
def _can_drop(self, table):
|
||||
self.dialect.validate_identifier(table.name)
|
||||
if table.schema:
|
||||
self.dialect.validate_identifier(table.schema)
|
||||
return not self.checkfirst or self.dialect.has_table(self.connection, table.name, schema=table.schema)
|
||||
|
||||
def visit_index(self, index):
|
||||
self.connection.execute(schema.DropIndex(index))
|
||||
|
||||
def visit_table(self, table):
|
||||
for listener in table.ddl_listeners['before-drop']:
|
||||
listener('before-drop', table, self.connection)
|
||||
|
||||
for column in table.columns:
|
||||
if column.default is not None:
|
||||
self.traverse_single(column.default)
|
||||
|
||||
self.connection.execute(schema.DropTable(table))
|
||||
|
||||
for listener in table.ddl_listeners['after-drop']:
|
||||
listener('after-drop', table, self.connection)
|
||||
|
||||
def visit_sequence(self, sequence):
|
||||
if self.dialect.supports_sequences:
|
||||
if ((not self.dialect.sequences_optional or
|
||||
not sequence.optional) and
|
||||
(not self.checkfirst or
|
||||
self.dialect.has_sequence(self.connection, sequence.name, schema=sequence.schema))):
|
||||
self.connection.execute(schema.DropSequence(sequence))
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,843 +0,0 @@
|
||||
# engine/reflection.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides an abstraction for obtaining database schema information.
|
||||
|
||||
Usage Notes:
|
||||
|
||||
Here are some general conventions when accessing the low level inspector
|
||||
methods such as get_table_names, get_columns, etc.
|
||||
|
||||
1. Inspector methods return lists of dicts in most cases for the following
|
||||
reasons:
|
||||
|
||||
* They're both standard types that can be serialized.
|
||||
* Using a dict instead of a tuple allows easy expansion of attributes.
|
||||
* Using a list for the outer structure maintains order and is easy to work
|
||||
with (e.g. list comprehension [d['name'] for d in cols]).
|
||||
|
||||
2. Records that contain a name, such as the column name in a column record
|
||||
use the key 'name'. So for most return values, each record will have a
|
||||
'name' attribute..
|
||||
"""
|
||||
|
||||
from .. import exc, sql
|
||||
from ..sql import schema as sa_schema
|
||||
from .. import util
|
||||
from ..sql.type_api import TypeEngine
|
||||
from ..util import deprecated
|
||||
from ..util import topological
|
||||
from .. import inspection
|
||||
from .base import Connectable
|
||||
|
||||
|
||||
@util.decorator
|
||||
def cache(fn, self, con, *args, **kw):
|
||||
info_cache = kw.get('info_cache', None)
|
||||
if info_cache is None:
|
||||
return fn(self, con, *args, **kw)
|
||||
key = (
|
||||
fn.__name__,
|
||||
tuple(a for a in args if isinstance(a, util.string_types)),
|
||||
tuple((k, v) for k, v in kw.items() if
|
||||
isinstance(v,
|
||||
util.string_types + util.int_types + (float, )
|
||||
)
|
||||
)
|
||||
)
|
||||
ret = info_cache.get(key)
|
||||
if ret is None:
|
||||
ret = fn(self, con, *args, **kw)
|
||||
info_cache[key] = ret
|
||||
return ret
|
||||
|
||||
|
||||
class Inspector(object):
|
||||
"""Performs database schema inspection.
|
||||
|
||||
The Inspector acts as a proxy to the reflection methods of the
|
||||
:class:`~sqlalchemy.engine.interfaces.Dialect`, providing a
|
||||
consistent interface as well as caching support for previously
|
||||
fetched metadata.
|
||||
|
||||
A :class:`.Inspector` object is usually created via the
|
||||
:func:`.inspect` function::
|
||||
|
||||
from sqlalchemy import inspect, create_engine
|
||||
engine = create_engine('...')
|
||||
insp = inspect(engine)
|
||||
|
||||
The inspection method above is equivalent to using the
|
||||
:meth:`.Inspector.from_engine` method, i.e.::
|
||||
|
||||
engine = create_engine('...')
|
||||
insp = Inspector.from_engine(engine)
|
||||
|
||||
Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` may opt
|
||||
to return an :class:`.Inspector` subclass that provides additional
|
||||
methods specific to the dialect's target database.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bind):
|
||||
"""Initialize a new :class:`.Inspector`.
|
||||
|
||||
:param bind: a :class:`~sqlalchemy.engine.Connectable`,
|
||||
which is typically an instance of
|
||||
:class:`~sqlalchemy.engine.Engine` or
|
||||
:class:`~sqlalchemy.engine.Connection`.
|
||||
|
||||
For a dialect-specific instance of :class:`.Inspector`, see
|
||||
:meth:`.Inspector.from_engine`
|
||||
|
||||
"""
|
||||
# this might not be a connection, it could be an engine.
|
||||
self.bind = bind
|
||||
|
||||
# set the engine
|
||||
if hasattr(bind, 'engine'):
|
||||
self.engine = bind.engine
|
||||
else:
|
||||
self.engine = bind
|
||||
|
||||
if self.engine is bind:
|
||||
# if engine, ensure initialized
|
||||
bind.connect().close()
|
||||
|
||||
self.dialect = self.engine.dialect
|
||||
self.info_cache = {}
|
||||
|
||||
@classmethod
|
||||
def from_engine(cls, bind):
|
||||
"""Construct a new dialect-specific Inspector object from the given
|
||||
engine or connection.
|
||||
|
||||
:param bind: a :class:`~sqlalchemy.engine.Connectable`,
|
||||
which is typically an instance of
|
||||
:class:`~sqlalchemy.engine.Engine` or
|
||||
:class:`~sqlalchemy.engine.Connection`.
|
||||
|
||||
This method differs from direct a direct constructor call of
|
||||
:class:`.Inspector` in that the
|
||||
:class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to
|
||||
provide a dialect-specific :class:`.Inspector` instance, which may
|
||||
provide additional methods.
|
||||
|
||||
See the example at :class:`.Inspector`.
|
||||
|
||||
"""
|
||||
if hasattr(bind.dialect, 'inspector'):
|
||||
return bind.dialect.inspector(bind)
|
||||
return Inspector(bind)
|
||||
|
||||
@inspection._inspects(Connectable)
|
||||
def _insp(bind):
|
||||
return Inspector.from_engine(bind)
|
||||
|
||||
@property
|
||||
def default_schema_name(self):
|
||||
"""Return the default schema name presented by the dialect
|
||||
for the current engine's database user.
|
||||
|
||||
E.g. this is typically ``public`` for PostgreSQL and ``dbo``
|
||||
for SQL Server.
|
||||
|
||||
"""
|
||||
return self.dialect.default_schema_name
|
||||
|
||||
def get_schema_names(self):
|
||||
"""Return all schema names.
|
||||
"""
|
||||
|
||||
if hasattr(self.dialect, 'get_schema_names'):
|
||||
return self.dialect.get_schema_names(self.bind,
|
||||
info_cache=self.info_cache)
|
||||
return []
|
||||
|
||||
def get_table_names(self, schema=None, order_by=None):
|
||||
"""Return all table names in referred to within a particular schema.
|
||||
|
||||
The names are expected to be real tables only, not views.
|
||||
Views are instead returned using the :meth:`.Inspector.get_view_names`
|
||||
method.
|
||||
|
||||
|
||||
:param schema: Schema name. If ``schema`` is left at ``None``, the
|
||||
database's default schema is
|
||||
used, else the named schema is searched. If the database does not
|
||||
support named schemas, behavior is undefined if ``schema`` is not
|
||||
passed as ``None``. For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
:param order_by: Optional, may be the string "foreign_key" to sort
|
||||
the result on foreign key dependencies. Does not automatically
|
||||
resolve cycles, and will raise :class:`.CircularDependencyError`
|
||||
if cycles exist.
|
||||
|
||||
.. deprecated:: 1.0.0 - see
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names` for a version
|
||||
of this which resolves foreign key cycles between tables
|
||||
automatically.
|
||||
|
||||
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
|
||||
in order of dependee to dependent; that is, in creation
|
||||
order, rather than in drop order. This is to maintain
|
||||
consistency with similar features such as
|
||||
:attr:`.MetaData.sorted_tables` and :func:`.util.sort_tables`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_sorted_table_and_fkc_names`
|
||||
|
||||
:attr:`.MetaData.sorted_tables`
|
||||
|
||||
"""
|
||||
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(
|
||||
self.bind, schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
if order_by == 'foreign_key':
|
||||
tuples = []
|
||||
for tname in tnames:
|
||||
for fkey in self.get_foreign_keys(tname, schema):
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.append((fkey['referred_table'], tname))
|
||||
tnames = list(topological.sort(tuples, tnames))
|
||||
return tnames
|
||||
|
||||
def get_sorted_table_and_fkc_names(self, schema=None):
|
||||
"""Return dependency-sorted table and foreign key constraint names in
|
||||
referred to within a particular schema.
|
||||
|
||||
This will yield 2-tuples of
|
||||
``(tablename, [(tname, fkname), (tname, fkname), ...])``
|
||||
consisting of table names in CREATE order grouped with the foreign key
|
||||
constraint names that are not detected as belonging to a cycle.
|
||||
The final element
|
||||
will be ``(None, [(tname, fkname), (tname, fkname), ..])``
|
||||
which will consist of remaining
|
||||
foreign key constraint names that would require a separate CREATE
|
||||
step after-the-fact, based on dependencies between tables.
|
||||
|
||||
.. versionadded:: 1.0.-
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.Inspector.get_table_names`
|
||||
|
||||
:func:`.sort_tables_and_constraints` - similar method which works
|
||||
with an already-given :class:`.MetaData`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_names'):
|
||||
tnames = self.dialect.get_table_names(
|
||||
self.bind, schema, info_cache=self.info_cache)
|
||||
else:
|
||||
tnames = self.engine.table_names(schema)
|
||||
|
||||
tuples = set()
|
||||
remaining_fkcs = set()
|
||||
|
||||
fknames_for_table = {}
|
||||
for tname in tnames:
|
||||
fkeys = self.get_foreign_keys(tname, schema)
|
||||
fknames_for_table[tname] = set(
|
||||
[fk['name'] for fk in fkeys]
|
||||
)
|
||||
for fkey in fkeys:
|
||||
if tname != fkey['referred_table']:
|
||||
tuples.add((fkey['referred_table'], tname))
|
||||
try:
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
except exc.CircularDependencyError as err:
|
||||
for edge in err.edges:
|
||||
tuples.remove(edge)
|
||||
remaining_fkcs.update(
|
||||
(edge[1], fkc)
|
||||
for fkc in fknames_for_table[edge[1]]
|
||||
)
|
||||
|
||||
candidate_sort = list(topological.sort(tuples, tnames))
|
||||
return [
|
||||
(tname, fknames_for_table[tname].difference(remaining_fkcs))
|
||||
for tname in candidate_sort
|
||||
] + [(None, list(remaining_fkcs))]
|
||||
|
||||
def get_temp_table_names(self):
|
||||
"""return a list of temporary table names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_table_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_temp_view_names(self):
|
||||
"""return a list of temporary view names for the current bind.
|
||||
|
||||
This method is unsupported by most dialects; currently
|
||||
only SQLite implements it.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
"""
|
||||
return self.dialect.get_temp_view_names(
|
||||
self.bind, info_cache=self.info_cache)
|
||||
|
||||
def get_table_options(self, table_name, schema=None, **kw):
|
||||
"""Return a dictionary of options specified when the table of the
|
||||
given name was created.
|
||||
|
||||
This currently includes some options that apply to MySQL tables.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
if hasattr(self.dialect, 'get_table_options'):
|
||||
return self.dialect.get_table_options(
|
||||
self.bind, table_name, schema,
|
||||
info_cache=self.info_cache, **kw)
|
||||
return {}
|
||||
|
||||
def get_view_names(self, schema=None):
|
||||
"""Return all view names in `schema`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_names(self.bind, schema,
|
||||
info_cache=self.info_cache)
|
||||
|
||||
def get_view_definition(self, view_name, schema=None):
|
||||
"""Return definition for `view_name`.
|
||||
|
||||
:param schema: Optional, retrieve names from a non-default schema.
|
||||
For special quoting, use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_view_definition(
|
||||
self.bind, view_name, schema, info_cache=self.info_cache)
|
||||
|
||||
def get_columns(self, table_name, schema=None, **kw):
|
||||
"""Return information about columns in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
column information as a list of dicts with these keys:
|
||||
|
||||
* ``name`` - the column's name
|
||||
|
||||
* ``type`` - the type of this column; an instance of
|
||||
:class:`~sqlalchemy.types.TypeEngine`
|
||||
|
||||
* ``nullable`` - boolean flag if the column is NULL or NOT NULL
|
||||
|
||||
* ``default`` - the column's server default value - this is returned
|
||||
as a string SQL expression.
|
||||
|
||||
* ``attrs`` - dict containing optional column attributes
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:return: list of dictionaries, each representing the definition of
|
||||
a database column.
|
||||
|
||||
"""
|
||||
|
||||
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
for col_def in col_defs:
|
||||
# make this easy and only return instances for coltype
|
||||
coltype = col_def['type']
|
||||
if not isinstance(coltype, TypeEngine):
|
||||
col_def['type'] = coltype()
|
||||
return col_defs
|
||||
|
||||
@deprecated('0.7', 'Call to deprecated method get_primary_keys.'
|
||||
' Use get_pk_constraint instead.')
|
||||
def get_primary_keys(self, table_name, schema=None, **kw):
|
||||
"""Return information about primary keys in `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
primary key information as a list of column names.
|
||||
"""
|
||||
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)['constrained_columns']
|
||||
|
||||
def get_pk_constraint(self, table_name, schema=None, **kw):
|
||||
"""Return information about primary key constraint on `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
primary key information as a dictionary with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the primary key
|
||||
|
||||
name
|
||||
optional name of the primary key constraint.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_foreign_keys(self, table_name, schema=None, **kw):
|
||||
"""Return information about foreign_keys in `table_name`.
|
||||
|
||||
Given a string `table_name`, and an optional string `schema`, return
|
||||
foreign key information as a list of dicts with these keys:
|
||||
|
||||
constrained_columns
|
||||
a list of column names that make up the foreign key
|
||||
|
||||
referred_schema
|
||||
the name of the referred schema
|
||||
|
||||
referred_table
|
||||
the name of the referred table
|
||||
|
||||
referred_columns
|
||||
a list of column names in the referred table that correspond to
|
||||
constrained_columns
|
||||
|
||||
name
|
||||
optional name of the foreign key constraint.
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
|
||||
info_cache=self.info_cache,
|
||||
**kw)
|
||||
|
||||
def get_indexes(self, table_name, schema=None, **kw):
|
||||
"""Return information about indexes in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
index information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the index's name
|
||||
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
unique
|
||||
boolean
|
||||
|
||||
dialect_options
|
||||
dict of dialect-specific index options. May not be present
|
||||
for all dialects.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_indexes(self.bind, table_name,
|
||||
schema,
|
||||
info_cache=self.info_cache, **kw)
|
||||
|
||||
def get_unique_constraints(self, table_name, schema=None, **kw):
|
||||
"""Return information about unique constraints in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
unique constraint information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the unique constraint's name
|
||||
|
||||
column_names
|
||||
list of column names in order
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
.. versionadded:: 0.8.4
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_unique_constraints(
|
||||
self.bind, table_name, schema, info_cache=self.info_cache, **kw)
|
||||
|
||||
def get_check_constraints(self, table_name, schema=None, **kw):
|
||||
"""Return information about check constraints in `table_name`.
|
||||
|
||||
Given a string `table_name` and an optional string `schema`, return
|
||||
check constraint information as a list of dicts with these keys:
|
||||
|
||||
name
|
||||
the check constraint's name
|
||||
|
||||
sqltext
|
||||
the check constraint's SQL expression
|
||||
|
||||
:param table_name: string name of the table. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
:param schema: string schema name; if omitted, uses the default schema
|
||||
of the database connection. For special quoting,
|
||||
use :class:`.quoted_name`.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
"""
|
||||
|
||||
return self.dialect.get_check_constraints(
|
||||
self.bind, table_name, schema, info_cache=self.info_cache, **kw)
|
||||
|
||||
def reflecttable(self, table, include_columns, exclude_columns=(),
|
||||
_extend_on=None):
|
||||
"""Given a Table object, load its internal constructs based on
|
||||
introspection.
|
||||
|
||||
This is the underlying method used by most dialects to produce
|
||||
table reflection. Direct usage is like::
|
||||
|
||||
from sqlalchemy import create_engine, MetaData, Table
|
||||
from sqlalchemy.engine import reflection
|
||||
|
||||
engine = create_engine('...')
|
||||
meta = MetaData()
|
||||
user_table = Table('user', meta)
|
||||
insp = Inspector.from_engine(engine)
|
||||
insp.reflecttable(user_table, None)
|
||||
|
||||
:param table: a :class:`~sqlalchemy.schema.Table` instance.
|
||||
:param include_columns: a list of string column names to include
|
||||
in the reflection process. If ``None``, all columns are reflected.
|
||||
|
||||
"""
|
||||
|
||||
if _extend_on is not None:
|
||||
if table in _extend_on:
|
||||
return
|
||||
else:
|
||||
_extend_on.add(table)
|
||||
|
||||
dialect = self.bind.dialect
|
||||
|
||||
schema = self.bind.schema_for_object(table)
|
||||
|
||||
table_name = table.name
|
||||
|
||||
# get table-level arguments that are specifically
|
||||
# intended for reflection, e.g. oracle_resolve_synonyms.
|
||||
# these are unconditionally passed to related Table
|
||||
# objects
|
||||
reflection_options = dict(
|
||||
(k, table.dialect_kwargs.get(k))
|
||||
for k in dialect.reflection_options
|
||||
if k in table.dialect_kwargs
|
||||
)
|
||||
|
||||
# reflect table options, like mysql_engine
|
||||
tbl_opts = self.get_table_options(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
if tbl_opts:
|
||||
# add additional kwargs to the Table if the dialect
|
||||
# returned them
|
||||
table._validate_dialect_kwargs(tbl_opts)
|
||||
|
||||
if util.py2k:
|
||||
if isinstance(schema, str):
|
||||
schema = schema.decode(dialect.encoding)
|
||||
if isinstance(table_name, str):
|
||||
table_name = table_name.decode(dialect.encoding)
|
||||
|
||||
found_table = False
|
||||
cols_by_orig_name = {}
|
||||
|
||||
for col_d in self.get_columns(
|
||||
table_name, schema, **table.dialect_kwargs):
|
||||
found_table = True
|
||||
|
||||
self._reflect_column(
|
||||
table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name)
|
||||
|
||||
if not found_table:
|
||||
raise exc.NoSuchTableError(table.name)
|
||||
|
||||
self._reflect_pk(
|
||||
table_name, schema, table, cols_by_orig_name, exclude_columns)
|
||||
|
||||
self._reflect_fk(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, _extend_on, reflection_options)
|
||||
|
||||
self._reflect_indexes(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_unique_constraints(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
self._reflect_check_constraints(
|
||||
table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options)
|
||||
|
||||
def _reflect_column(
|
||||
self, table, col_d, include_columns,
|
||||
exclude_columns, cols_by_orig_name):
|
||||
|
||||
orig_name = col_d['name']
|
||||
|
||||
table.dispatch.column_reflect(self, table, col_d)
|
||||
|
||||
# fetch name again as column_reflect is allowed to
|
||||
# change it
|
||||
name = col_d['name']
|
||||
if (include_columns and name not in include_columns) \
|
||||
or (exclude_columns and name in exclude_columns):
|
||||
return
|
||||
|
||||
coltype = col_d['type']
|
||||
|
||||
col_kw = dict(
|
||||
(k, col_d[k])
|
||||
for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
|
||||
if k in col_d
|
||||
)
|
||||
|
||||
colargs = []
|
||||
if col_d.get('default') is not None:
|
||||
default = col_d['default']
|
||||
if isinstance(default, sql.elements.TextClause):
|
||||
default = sa_schema.DefaultClause(default, _reflected=True)
|
||||
elif not isinstance(default, sa_schema.FetchedValue):
|
||||
default = sa_schema.DefaultClause(
|
||||
sql.text(col_d['default']), _reflected=True)
|
||||
|
||||
colargs.append(default)
|
||||
|
||||
if 'sequence' in col_d:
|
||||
self._reflect_col_sequence(col_d, colargs)
|
||||
|
||||
cols_by_orig_name[orig_name] = col = \
|
||||
sa_schema.Column(name, coltype, *colargs, **col_kw)
|
||||
|
||||
if col.key in table.primary_key:
|
||||
col.primary_key = True
|
||||
table.append_column(col)
|
||||
|
||||
def _reflect_col_sequence(self, col_d, colargs):
|
||||
if 'sequence' in col_d:
|
||||
# TODO: mssql and sybase are using this.
|
||||
seq = col_d['sequence']
|
||||
sequence = sa_schema.Sequence(seq['name'], 1, 1)
|
||||
if 'start' in seq:
|
||||
sequence.start = seq['start']
|
||||
if 'increment' in seq:
|
||||
sequence.increment = seq['increment']
|
||||
colargs.append(sequence)
|
||||
|
||||
def _reflect_pk(
|
||||
self, table_name, schema, table,
|
||||
cols_by_orig_name, exclude_columns):
|
||||
pk_cons = self.get_pk_constraint(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
if pk_cons:
|
||||
pk_cols = [
|
||||
cols_by_orig_name[pk]
|
||||
for pk in pk_cons['constrained_columns']
|
||||
if pk in cols_by_orig_name and pk not in exclude_columns
|
||||
]
|
||||
|
||||
# update pk constraint name
|
||||
table.primary_key.name = pk_cons.get('name')
|
||||
|
||||
# tell the PKConstraint to re-initialize
|
||||
# its column collection
|
||||
table.primary_key._reload(pk_cols)
|
||||
|
||||
def _reflect_fk(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
exclude_columns, _extend_on, reflection_options):
|
||||
fkeys = self.get_foreign_keys(
|
||||
table_name, schema, **table.dialect_kwargs)
|
||||
for fkey_d in fkeys:
|
||||
conname = fkey_d['name']
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_columns = [
|
||||
cols_by_orig_name[c].key
|
||||
if c in cols_by_orig_name else c
|
||||
for c in fkey_d['constrained_columns']
|
||||
]
|
||||
if exclude_columns and set(constrained_columns).intersection(
|
||||
exclude_columns):
|
||||
continue
|
||||
referred_schema = fkey_d['referred_schema']
|
||||
referred_table = fkey_d['referred_table']
|
||||
referred_columns = fkey_d['referred_columns']
|
||||
refspec = []
|
||||
if referred_schema is not None:
|
||||
sa_schema.Table(referred_table, table.metadata,
|
||||
autoload=True, schema=referred_schema,
|
||||
autoload_with=self.bind,
|
||||
_extend_on=_extend_on,
|
||||
**reflection_options
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join(
|
||||
[referred_schema, referred_table, column]))
|
||||
else:
|
||||
sa_schema.Table(referred_table, table.metadata, autoload=True,
|
||||
autoload_with=self.bind,
|
||||
schema=sa_schema.BLANK_SCHEMA,
|
||||
_extend_on=_extend_on,
|
||||
**reflection_options
|
||||
)
|
||||
for column in referred_columns:
|
||||
refspec.append(".".join([referred_table, column]))
|
||||
if 'options' in fkey_d:
|
||||
options = fkey_d['options']
|
||||
else:
|
||||
options = {}
|
||||
table.append_constraint(
|
||||
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
|
||||
conname, link_to_name=True,
|
||||
**options))
|
||||
|
||||
def _reflect_indexes(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
# Indexes
|
||||
indexes = self.get_indexes(table_name, schema)
|
||||
for index_d in indexes:
|
||||
name = index_d['name']
|
||||
columns = index_d['column_names']
|
||||
unique = index_d['unique']
|
||||
flavor = index_d.get('type', 'index')
|
||||
dialect_options = index_d.get('dialect_options', {})
|
||||
|
||||
duplicates = index_d.get('duplicates_constraint')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting %s key for (%s), key covers omitted columns." %
|
||||
(flavor, ', '.join(columns)))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
idx_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
idx_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"%s key '%s' was not located in "
|
||||
"columns for table '%s'" % (
|
||||
flavor, c, table_name
|
||||
))
|
||||
else:
|
||||
idx_cols.append(idx_col)
|
||||
|
||||
sa_schema.Index(
|
||||
name, *idx_cols,
|
||||
**dict(list(dialect_options.items()) + [('unique', unique)])
|
||||
)
|
||||
|
||||
def _reflect_unique_constraints(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
|
||||
# Unique Constraints
|
||||
try:
|
||||
constraints = self.get_unique_constraints(table_name, schema)
|
||||
except NotImplementedError:
|
||||
# optional dialect feature
|
||||
return
|
||||
|
||||
for const_d in constraints:
|
||||
conname = const_d['name']
|
||||
columns = const_d['column_names']
|
||||
duplicates = const_d.get('duplicates_index')
|
||||
if include_columns and \
|
||||
not set(columns).issubset(include_columns):
|
||||
util.warn(
|
||||
"Omitting unique constraint key for (%s), "
|
||||
"key covers omitted columns." %
|
||||
', '.join(columns))
|
||||
continue
|
||||
if duplicates:
|
||||
continue
|
||||
# look for columns by orig name in cols_by_orig_name,
|
||||
# but support columns that are in-Python only as fallback
|
||||
constrained_cols = []
|
||||
for c in columns:
|
||||
try:
|
||||
constrained_col = cols_by_orig_name[c] \
|
||||
if c in cols_by_orig_name else table.c[c]
|
||||
except KeyError:
|
||||
util.warn(
|
||||
"unique constraint key '%s' was not located in "
|
||||
"columns for table '%s'" % (c, table_name))
|
||||
else:
|
||||
constrained_cols.append(constrained_col)
|
||||
table.append_constraint(
|
||||
sa_schema.UniqueConstraint(*constrained_cols, name=conname))
|
||||
|
||||
def _reflect_check_constraints(
|
||||
self, table_name, schema, table, cols_by_orig_name,
|
||||
include_columns, exclude_columns, reflection_options):
|
||||
try:
|
||||
constraints = self.get_check_constraints(table_name, schema)
|
||||
except NotImplementedError:
|
||||
# optional dialect feature
|
||||
return
|
||||
|
||||
for const_d in constraints:
|
||||
table.append_constraint(
|
||||
sa_schema.CheckConstraint(**const_d))
|
File diff suppressed because it is too large
Load Diff
@ -1,283 +0,0 @@
|
||||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Strategies for creating new instances of Engine types.
|
||||
|
||||
These are semi-private implementation classes which provide the
|
||||
underlying behavior for the "strategy" keyword argument available on
|
||||
:func:`~sqlalchemy.engine.create_engine`. Current available options are
|
||||
``plain``, ``threadlocal``, and ``mock``.
|
||||
|
||||
New strategies can be added via new ``EngineStrategy`` classes.
|
||||
"""
|
||||
|
||||
from operator import attrgetter
|
||||
|
||||
from sqlalchemy.engine import base, threadlocal, url
|
||||
from sqlalchemy import util, event
|
||||
from sqlalchemy import pool as poollib
|
||||
from sqlalchemy.sql import schema
|
||||
|
||||
strategies = {}
|
||||
|
||||
|
||||
class EngineStrategy(object):
|
||||
"""An adaptor that processes input arguments and produces an Engine.
|
||||
|
||||
Provides a ``create`` method that receives input arguments and
|
||||
produces an instance of base.Engine or a subclass.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
strategies[self.name] = self
|
||||
|
||||
def create(self, *args, **kwargs):
|
||||
"""Given arguments, returns a new Engine instance."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DefaultEngineStrategy(EngineStrategy):
|
||||
"""Base class for built-in strategies."""
|
||||
|
||||
def create(self, name_or_url, **kwargs):
|
||||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
plugins = u._instantiate_plugins(kwargs)
|
||||
|
||||
u.query.pop('plugin', None)
|
||||
|
||||
entrypoint = u._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(u)
|
||||
|
||||
if kwargs.pop('_coerce_config', False):
|
||||
def pop_kwarg(key, default=None):
|
||||
value = kwargs.pop(key, default)
|
||||
if key in dialect_cls.engine_config_types:
|
||||
value = dialect_cls.engine_config_types[key](value)
|
||||
return value
|
||||
else:
|
||||
pop_kwarg = kwargs.pop
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = pop_kwarg(k)
|
||||
|
||||
dbapi = kwargs.pop('module', None)
|
||||
if dbapi is None:
|
||||
dbapi_args = {}
|
||||
for k in util.get_func_kwargs(dialect_cls.dbapi):
|
||||
if k in kwargs:
|
||||
dbapi_args[k] = pop_kwarg(k)
|
||||
dbapi = dialect_cls.dbapi(**dbapi_args)
|
||||
|
||||
dialect_args['dbapi'] = dbapi
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg('connect_args', {}))
|
||||
cargs = list(cargs) # allow mutability
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg('pool', None)
|
||||
if pool is None:
|
||||
def connect(connection_record=None):
|
||||
if dialect._has_events:
|
||||
for fn in dialect.dispatch.do_connect:
|
||||
connection = fn(
|
||||
dialect, connection_record, cargs, cparams)
|
||||
if connection is not None:
|
||||
return connection
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
|
||||
creator = pop_kwarg('creator', connect)
|
||||
|
||||
poolclass = pop_kwarg('poolclass', None)
|
||||
if poolclass is None:
|
||||
poolclass = dialect_cls.get_pool_class(u)
|
||||
pool_args = {
|
||||
'dialect': dialect
|
||||
}
|
||||
|
||||
# consume pool arguments from kwargs, translating a few of
|
||||
# the arguments
|
||||
translate = {'logging_name': 'pool_logging_name',
|
||||
'echo': 'echo_pool',
|
||||
'timeout': 'pool_timeout',
|
||||
'recycle': 'pool_recycle',
|
||||
'events': 'pool_events',
|
||||
'use_threadlocal': 'pool_threadlocal',
|
||||
'reset_on_return': 'pool_reset_on_return'}
|
||||
for k in util.get_cls_kwargs(poolclass):
|
||||
tk = translate.get(k, k)
|
||||
if tk in kwargs:
|
||||
pool_args[k] = pop_kwarg(tk)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_pool_kwargs(poolclass, pool_args)
|
||||
|
||||
pool = poolclass(creator, **pool_args)
|
||||
else:
|
||||
if isinstance(pool, poollib._DBProxy):
|
||||
pool = pool.get_pool(*cargs, **cparams)
|
||||
else:
|
||||
pool = pool
|
||||
|
||||
pool._dialect = dialect
|
||||
|
||||
# create engine.
|
||||
engineclass = self.engine_cls
|
||||
engine_args = {}
|
||||
for k in util.get_cls_kwargs(engineclass):
|
||||
if k in kwargs:
|
||||
engine_args[k] = pop_kwarg(k)
|
||||
|
||||
_initialize = kwargs.pop('_initialize', True)
|
||||
|
||||
# all kwargs should be consumed
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"Invalid argument(s) %s sent to create_engine(), "
|
||||
"using configuration %s/%s/%s. Please check that the "
|
||||
"keyword arguments are appropriate for this combination "
|
||||
"of components." % (','.join("'%s'" % k for k in kwargs),
|
||||
dialect.__class__.__name__,
|
||||
pool.__class__.__name__,
|
||||
engineclass.__name__))
|
||||
|
||||
engine = engineclass(pool, dialect, u, **engine_args)
|
||||
|
||||
if _initialize:
|
||||
do_on_connect = dialect.on_connect()
|
||||
if do_on_connect:
|
||||
def on_connect(dbapi_connection, connection_record):
|
||||
conn = getattr(
|
||||
dbapi_connection, '_sqla_unwrap', dbapi_connection)
|
||||
if conn is None:
|
||||
return
|
||||
do_on_connect(conn)
|
||||
|
||||
event.listen(pool, 'first_connect', on_connect)
|
||||
event.listen(pool, 'connect', on_connect)
|
||||
|
||||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(engine, connection=dbapi_connection,
|
||||
_has_events=False)
|
||||
c._execution_options = util.immutabledict()
|
||||
dialect.initialize(c)
|
||||
event.listen(pool, 'first_connect', first_connect, once=True)
|
||||
|
||||
dialect_cls.engine_created(engine)
|
||||
if entrypoint is not dialect_cls:
|
||||
entrypoint.engine_created(engine)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.engine_created(engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
class PlainEngineStrategy(DefaultEngineStrategy):
|
||||
"""Strategy for configuring a regular Engine."""
|
||||
|
||||
name = 'plain'
|
||||
engine_cls = base.Engine
|
||||
|
||||
PlainEngineStrategy()
|
||||
|
||||
|
||||
class ThreadLocalEngineStrategy(DefaultEngineStrategy):
|
||||
"""Strategy for configuring an Engine with threadlocal behavior."""
|
||||
|
||||
name = 'threadlocal'
|
||||
engine_cls = threadlocal.TLEngine
|
||||
|
||||
ThreadLocalEngineStrategy()
|
||||
|
||||
|
||||
class MockEngineStrategy(EngineStrategy):
|
||||
"""Strategy for configuring an Engine-like object with mocked execution.
|
||||
|
||||
Produces a single mock Connectable object which dispatches
|
||||
statement execution to a passed-in function.
|
||||
|
||||
"""
|
||||
|
||||
name = 'mock'
|
||||
|
||||
def create(self, name_or_url, executor, **kwargs):
|
||||
# create url.URL object
|
||||
u = url.make_url(name_or_url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = kwargs.pop(k)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
return MockEngineStrategy.MockConnection(dialect, executor)
|
||||
|
||||
class MockConnection(base.Connectable):
|
||||
def __init__(self, dialect, execute):
|
||||
self._dialect = dialect
|
||||
self.execute = execute
|
||||
|
||||
engine = property(lambda s: s)
|
||||
dialect = property(attrgetter('_dialect'))
|
||||
name = property(lambda s: s._dialect.name)
|
||||
|
||||
schema_for_object = schema._schema_getter(None)
|
||||
|
||||
def contextual_connect(self, **kwargs):
|
||||
return self
|
||||
|
||||
def execution_options(self, **kw):
|
||||
return self
|
||||
|
||||
def compiler(self, statement, parameters, **kwargs):
|
||||
return self._dialect.compiler(
|
||||
statement, parameters, engine=self, **kwargs)
|
||||
|
||||
def create(self, entity, **kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
from sqlalchemy.engine import ddl
|
||||
|
||||
ddl.SchemaGenerator(
|
||||
self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def drop(self, entity, **kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
from sqlalchemy.engine import ddl
|
||||
ddl.SchemaDropper(
|
||||
self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def _run_visitor(self, visitorcallable, element,
|
||||
connection=None,
|
||||
**kwargs):
|
||||
kwargs['checkfirst'] = False
|
||||
visitorcallable(self.dialect, self,
|
||||
**kwargs).traverse_single(element)
|
||||
|
||||
def execute(self, object, *multiparams, **params):
|
||||
raise NotImplementedError()
|
||||
|
||||
MockEngineStrategy()
|
@ -1,138 +0,0 @@
|
||||
# engine/threadlocal.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides a thread-local transactional wrapper around the root Engine class.
|
||||
|
||||
The ``threadlocal`` module is invoked when using the
|
||||
``strategy="threadlocal"`` flag with :func:`~sqlalchemy.engine.create_engine`.
|
||||
This module is semi-private and is invoked automatically when the threadlocal
|
||||
engine strategy is used.
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
from . import base
|
||||
import weakref
|
||||
|
||||
|
||||
class TLConnection(base.Connection):
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(TLConnection, self).__init__(*arg, **kw)
|
||||
self.__opencount = 0
|
||||
|
||||
def _increment_connect(self):
|
||||
self.__opencount += 1
|
||||
return self
|
||||
|
||||
def close(self):
|
||||
if self.__opencount == 1:
|
||||
base.Connection.close(self)
|
||||
self.__opencount -= 1
|
||||
|
||||
def _force_close(self):
|
||||
self.__opencount = 0
|
||||
base.Connection.close(self)
|
||||
|
||||
|
||||
class TLEngine(base.Engine):
|
||||
"""An Engine that includes support for thread-local managed
|
||||
transactions.
|
||||
|
||||
"""
|
||||
_tl_connection_cls = TLConnection
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TLEngine, self).__init__(*args, **kwargs)
|
||||
self._connections = util.threading.local()
|
||||
|
||||
def contextual_connect(self, **kw):
|
||||
if not hasattr(self._connections, 'conn'):
|
||||
connection = None
|
||||
else:
|
||||
connection = self._connections.conn()
|
||||
|
||||
if connection is None or connection.closed:
|
||||
# guards against pool-level reapers, if desired.
|
||||
# or not connection.connection.is_valid:
|
||||
connection = self._tl_connection_cls(
|
||||
self,
|
||||
self._wrap_pool_connect(
|
||||
self.pool.connect, connection),
|
||||
**kw)
|
||||
self._connections.conn = weakref.ref(connection)
|
||||
|
||||
return connection._increment_connect()
|
||||
|
||||
def begin_twophase(self, xid=None):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(
|
||||
self.contextual_connect().begin_twophase(xid=xid))
|
||||
return self
|
||||
|
||||
def begin_nested(self):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(
|
||||
self.contextual_connect().begin_nested())
|
||||
return self
|
||||
|
||||
def begin(self):
|
||||
if not hasattr(self._connections, 'trans'):
|
||||
self._connections.trans = []
|
||||
self._connections.trans.append(self.contextual_connect().begin())
|
||||
return self
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if type is None:
|
||||
self.commit()
|
||||
else:
|
||||
self.rollback()
|
||||
|
||||
def prepare(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
self._connections.trans[-1].prepare()
|
||||
|
||||
def commit(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
trans = self._connections.trans.pop(-1)
|
||||
trans.commit()
|
||||
|
||||
def rollback(self):
|
||||
if not hasattr(self._connections, 'trans') or \
|
||||
not self._connections.trans:
|
||||
return
|
||||
trans = self._connections.trans.pop(-1)
|
||||
trans.rollback()
|
||||
|
||||
def dispose(self):
|
||||
self._connections = util.threading.local()
|
||||
super(TLEngine, self).dispose()
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return not hasattr(self._connections, 'conn') or \
|
||||
self._connections.conn() is None or \
|
||||
self._connections.conn().closed
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self.contextual_connect().close()
|
||||
connection = self._connections.conn()
|
||||
connection._force_close()
|
||||
del self._connections.conn
|
||||
self._connections.trans = []
|
||||
|
||||
def __repr__(self):
|
||||
return 'TLEngine(%r)' % self.url
|
@ -1,261 +0,0 @@
|
||||
# engine/url.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
|
||||
information about a database connection specification.
|
||||
|
||||
The URL object is created automatically when
|
||||
:func:`~sqlalchemy.engine.create_engine` is called with a string
|
||||
argument; alternatively, the URL is a public-facing construct which can
|
||||
be used directly and is also accepted directly by ``create_engine()``.
|
||||
"""
|
||||
|
||||
import re
|
||||
from .. import exc, util
|
||||
from . import Dialect
|
||||
from ..dialects import registry, plugins
|
||||
|
||||
|
||||
class URL(object):
|
||||
"""
|
||||
Represent the components of a URL used to connect to a database.
|
||||
|
||||
This object is suitable to be passed directly to a
|
||||
:func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed
|
||||
from a string by the :func:`.make_url` function. the string
|
||||
format of the URL is an RFC-1738-style string.
|
||||
|
||||
All initialization parameters are available as public attributes.
|
||||
|
||||
:param drivername: the name of the database backend.
|
||||
This name will correspond to a module in sqlalchemy/databases
|
||||
or a third party plug-in.
|
||||
|
||||
:param username: The user name.
|
||||
|
||||
:param password: database password.
|
||||
|
||||
:param host: The name of the host.
|
||||
|
||||
:param port: The port number.
|
||||
|
||||
:param database: The database name.
|
||||
|
||||
:param query: A dictionary of options to be passed to the
|
||||
dialect and/or the DBAPI upon connect.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, drivername, username=None, password=None,
|
||||
host=None, port=None, database=None, query=None):
|
||||
self.drivername = drivername
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.host = host
|
||||
if port is not None:
|
||||
self.port = int(port)
|
||||
else:
|
||||
self.port = None
|
||||
self.database = database
|
||||
self.query = query or {}
|
||||
|
||||
def __to_string__(self, hide_password=True):
|
||||
s = self.drivername + "://"
|
||||
if self.username is not None:
|
||||
s += _rfc_1738_quote(self.username)
|
||||
if self.password is not None:
|
||||
s += ':' + ('***' if hide_password
|
||||
else _rfc_1738_quote(self.password))
|
||||
s += "@"
|
||||
if self.host is not None:
|
||||
if ':' in self.host:
|
||||
s += "[%s]" % self.host
|
||||
else:
|
||||
s += self.host
|
||||
if self.port is not None:
|
||||
s += ':' + str(self.port)
|
||||
if self.database is not None:
|
||||
s += '/' + self.database
|
||||
if self.query:
|
||||
keys = list(self.query)
|
||||
keys.sort()
|
||||
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
|
||||
return s
|
||||
|
||||
def __str__(self):
|
||||
return self.__to_string__(hide_password=False)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__to_string__()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
return \
|
||||
isinstance(other, URL) and \
|
||||
self.drivername == other.drivername and \
|
||||
self.username == other.username and \
|
||||
self.password == other.password and \
|
||||
self.host == other.host and \
|
||||
self.database == other.database and \
|
||||
self.query == other.query
|
||||
|
||||
def get_backend_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.drivername
|
||||
else:
|
||||
return self.drivername.split('+')[0]
|
||||
|
||||
def get_driver_name(self):
|
||||
if '+' not in self.drivername:
|
||||
return self.get_dialect().driver
|
||||
else:
|
||||
return self.drivername.split('+')[1]
|
||||
|
||||
def _instantiate_plugins(self, kwargs):
|
||||
plugin_names = util.to_list(self.query.get('plugin', ()))
|
||||
|
||||
return [
|
||||
plugins.load(plugin_name)(self, kwargs)
|
||||
for plugin_name in plugin_names
|
||||
]
|
||||
|
||||
def _get_entrypoint(self):
|
||||
"""Return the "entry point" dialect class.
|
||||
|
||||
This is normally the dialect itself except in the case when the
|
||||
returned class implements the get_dialect_cls() method.
|
||||
|
||||
"""
|
||||
if '+' not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
name = self.drivername.replace('+', '.')
|
||||
cls = registry.load(name)
|
||||
# check for legacy dialects that
|
||||
# would return a module with 'dialect' as the
|
||||
# actual class
|
||||
if hasattr(cls, 'dialect') and \
|
||||
isinstance(cls.dialect, type) and \
|
||||
issubclass(cls.dialect, Dialect):
|
||||
return cls.dialect
|
||||
else:
|
||||
return cls
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy database dialect class corresponding
|
||||
to this URL's driver name.
|
||||
"""
|
||||
entrypoint = self._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(self)
|
||||
return dialect_cls
|
||||
|
||||
def translate_connect_args(self, names=[], **kw):
|
||||
r"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
Returns attributes of this url (`host`, `database`, `username`,
|
||||
`password`, `port`) as a plain dictionary. The attribute names are
|
||||
used as the keys by default. Unset or false attributes are omitted
|
||||
from the final dictionary.
|
||||
|
||||
:param \**kw: Optional, alternate key names for url attributes.
|
||||
|
||||
:param names: Deprecated. Same purpose as the keyword-based alternate
|
||||
names, but correlates the name to the original positionally.
|
||||
"""
|
||||
|
||||
translated = {}
|
||||
attribute_names = ['host', 'database', 'username', 'password', 'port']
|
||||
for sname in attribute_names:
|
||||
if names:
|
||||
name = names.pop(0)
|
||||
elif sname in kw:
|
||||
name = kw[sname]
|
||||
else:
|
||||
name = sname
|
||||
if name is not None and getattr(self, sname, False):
|
||||
translated[name] = getattr(self, sname)
|
||||
return translated
|
||||
|
||||
|
||||
def make_url(name_or_url):
|
||||
"""Given a string or unicode instance, produce a new URL instance.
|
||||
|
||||
The given string is parsed according to the RFC 1738 spec. If an
|
||||
existing URL object is passed, just returns the object.
|
||||
"""
|
||||
|
||||
if isinstance(name_or_url, util.string_types):
|
||||
return _parse_rfc1738_args(name_or_url)
|
||||
else:
|
||||
return name_or_url
|
||||
|
||||
|
||||
def _parse_rfc1738_args(name):
|
||||
pattern = re.compile(r'''
|
||||
(?P<name>[\w\+]+)://
|
||||
(?:
|
||||
(?P<username>[^:/]*)
|
||||
(?::(?P<password>.*))?
|
||||
@)?
|
||||
(?:
|
||||
(?:
|
||||
\[(?P<ipv6host>[^/]+)\] |
|
||||
(?P<ipv4host>[^/:]+)
|
||||
)?
|
||||
(?::(?P<port>[^/]*))?
|
||||
)?
|
||||
(?:/(?P<database>.*))?
|
||||
''', re.X)
|
||||
|
||||
m = pattern.match(name)
|
||||
if m is not None:
|
||||
components = m.groupdict()
|
||||
if components['database'] is not None:
|
||||
tokens = components['database'].split('?', 2)
|
||||
components['database'] = tokens[0]
|
||||
query = (
|
||||
len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
|
||||
if util.py2k and query is not None:
|
||||
query = dict((k.encode('ascii'), query[k]) for k in query)
|
||||
else:
|
||||
query = None
|
||||
components['query'] = query
|
||||
|
||||
if components['username'] is not None:
|
||||
components['username'] = _rfc_1738_unquote(components['username'])
|
||||
|
||||
if components['password'] is not None:
|
||||
components['password'] = _rfc_1738_unquote(components['password'])
|
||||
|
||||
ipv4host = components.pop('ipv4host')
|
||||
ipv6host = components.pop('ipv6host')
|
||||
components['host'] = ipv4host or ipv6host
|
||||
name = components.pop('name')
|
||||
return URL(name, **components)
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Could not parse rfc1738 URL from string '%s'" % name)
|
||||
|
||||
|
||||
def _rfc_1738_quote(text):
|
||||
return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
|
||||
|
||||
|
||||
def _rfc_1738_unquote(text):
|
||||
return util.unquote(text)
|
||||
|
||||
|
||||
def _parse_keyvalue_args(name):
|
||||
m = re.match(r'(\w+)://(.*)', name)
|
||||
if m is not None:
|
||||
(name, args) = m.group(1, 2)
|
||||
opts = dict(util.parse_qsl(args))
|
||||
return URL(name, *opts)
|
||||
else:
|
||||
return None
|
@ -1,74 +0,0 @@
|
||||
# engine/util.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util
|
||||
|
||||
|
||||
def connection_memoize(key):
|
||||
"""Decorator, memoize a function in a connection.info stash.
|
||||
|
||||
Only applicable to functions which take no arguments other than a
|
||||
connection. The memo will be stored in ``connection.info[key]``.
|
||||
"""
|
||||
|
||||
@util.decorator
|
||||
def decorated(fn, self, connection):
|
||||
connection = connection.connect()
|
||||
try:
|
||||
return connection.info[key]
|
||||
except KeyError:
|
||||
connection.info[key] = val = fn(self, connection)
|
||||
return val
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def py_fallback():
|
||||
def _distill_params(multiparams, params):
|
||||
"""Given arguments from the calling form *multiparams, **params,
|
||||
return a list of bind parameter structures, usually a list of
|
||||
dictionaries.
|
||||
|
||||
In the case of 'raw' execution which accepts positional parameters,
|
||||
it may be a list of tuples or lists.
|
||||
|
||||
"""
|
||||
|
||||
if not multiparams:
|
||||
if params:
|
||||
return [params]
|
||||
else:
|
||||
return []
|
||||
elif len(multiparams) == 1:
|
||||
zero = multiparams[0]
|
||||
if isinstance(zero, (list, tuple)):
|
||||
if not zero or hasattr(zero[0], '__iter__') and \
|
||||
not hasattr(zero[0], 'strip'):
|
||||
# execute(stmt, [{}, {}, {}, ...])
|
||||
# execute(stmt, [(), (), (), ...])
|
||||
return zero
|
||||
else:
|
||||
# execute(stmt, ("value", "value"))
|
||||
return [zero]
|
||||
elif hasattr(zero, 'keys'):
|
||||
# execute(stmt, {"key":"value"})
|
||||
return [zero]
|
||||
else:
|
||||
# execute(stmt, "value")
|
||||
return [[zero]]
|
||||
else:
|
||||
if hasattr(multiparams[0], '__iter__') and \
|
||||
not hasattr(multiparams[0], 'strip'):
|
||||
return multiparams
|
||||
else:
|
||||
return [multiparams]
|
||||
|
||||
return locals()
|
||||
try:
|
||||
from sqlalchemy.cutils import _distill_params
|
||||
except ImportError:
|
||||
globals().update(py_fallback())
|
@ -1,11 +0,0 @@
|
||||
# event/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains
|
||||
from .base import Events, dispatcher
|
||||
from .attr import RefCollection
|
||||
from .legacy import _legacy_signature
|
@ -1,188 +0,0 @@
|
||||
# event/api.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Public API functions for the event system.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import util, exc
|
||||
from .base import _registrars
|
||||
from .registry import _EventKey
|
||||
|
||||
CANCEL = util.symbol('CANCEL')
|
||||
NO_RETVAL = util.symbol('NO_RETVAL')
|
||||
|
||||
|
||||
def _event_key(target, identifier, fn):
|
||||
for evt_cls in _registrars[identifier]:
|
||||
tgt = evt_cls._accept_with(target)
|
||||
if tgt is not None:
|
||||
return _EventKey(target, identifier, fn, tgt)
|
||||
else:
|
||||
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
|
||||
(identifier, target))
|
||||
|
||||
|
||||
def listen(target, identifier, fn, *args, **kw):
|
||||
"""Register a listener function for the given target.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
event.listen(
|
||||
UniqueConstraint,
|
||||
"after_parent_attach",
|
||||
unique_constraint_name)
|
||||
|
||||
|
||||
A given function can also be invoked for only the first invocation
|
||||
of the event using the ``once`` argument::
|
||||
|
||||
def on_config():
|
||||
do_config()
|
||||
|
||||
event.listen(Mapper, "before_configure", on_config, once=True)
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
.. note::
|
||||
|
||||
The :func:`.listen` function cannot be called at the same time
|
||||
that the target event is being run. This has implications
|
||||
for thread safety, and also means an event cannot be added
|
||||
from inside the listener function for itself. The list of
|
||||
events to be run are present inside of a mutable collection
|
||||
that can't be changed during iteration.
|
||||
|
||||
Event registration and removal is not intended to be a "high
|
||||
velocity" operation; it is a configurational operation. For
|
||||
systems that need to quickly associate and deassociate with
|
||||
events at high scale, use a mutable structure that is handled
|
||||
from inside of a single listener.
|
||||
|
||||
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
|
||||
used as the container for the list of events, which explicitly
|
||||
disallows collection mutation while the collection is being
|
||||
iterated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listens_for`
|
||||
|
||||
:func:`.remove`
|
||||
|
||||
"""
|
||||
|
||||
_event_key(target, identifier, fn).listen(*args, **kw)
|
||||
|
||||
|
||||
def listens_for(target, identifier, *args, **kw):
|
||||
"""Decorate a function as a listener for the given target + identifier.
|
||||
|
||||
e.g.::
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.schema import UniqueConstraint
|
||||
|
||||
@event.listens_for(UniqueConstraint, "after_parent_attach")
|
||||
def unique_constraint_name(const, table):
|
||||
const.name = "uq_%s_%s" % (
|
||||
table.name,
|
||||
list(const.columns)[0].name
|
||||
)
|
||||
|
||||
A given function can also be invoked for only the first invocation
|
||||
of the event using the ``once`` argument::
|
||||
|
||||
@event.listens_for(Mapper, "before_configure", once=True)
|
||||
def on_config():
|
||||
do_config()
|
||||
|
||||
|
||||
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
|
||||
and :func:`.event.listens_for`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listen` - general description of event listening
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
listen(target, identifier, fn, *args, **kw)
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
|
||||
def remove(target, identifier, fn):
|
||||
"""Remove an event listener.
|
||||
|
||||
The arguments here should match exactly those which were sent to
|
||||
:func:`.listen`; all the event registration which proceeded as a result
|
||||
of this call will be reverted by calling :func:`.remove` with the same
|
||||
arguments.
|
||||
|
||||
e.g.::
|
||||
|
||||
# if a function was registered like this...
|
||||
@event.listens_for(SomeMappedClass, "before_insert", propagate=True)
|
||||
def my_listener_function(*arg):
|
||||
pass
|
||||
|
||||
# ... it's removed like this
|
||||
event.remove(SomeMappedClass, "before_insert", my_listener_function)
|
||||
|
||||
Above, the listener function associated with ``SomeMappedClass`` was also
|
||||
propagated to subclasses of ``SomeMappedClass``; the :func:`.remove`
|
||||
function will revert all of these operations.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. note::
|
||||
|
||||
The :func:`.remove` function cannot be called at the same time
|
||||
that the target event is being run. This has implications
|
||||
for thread safety, and also means an event cannot be removed
|
||||
from inside the listener function for itself. The list of
|
||||
events to be run are present inside of a mutable collection
|
||||
that can't be changed during iteration.
|
||||
|
||||
Event registration and removal is not intended to be a "high
|
||||
velocity" operation; it is a configurational operation. For
|
||||
systems that need to quickly associate and deassociate with
|
||||
events at high scale, use a mutable structure that is handled
|
||||
from inside of a single listener.
|
||||
|
||||
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
|
||||
used as the container for the list of events, which explicitly
|
||||
disallows collection mutation while the collection is being
|
||||
iterated.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.listen`
|
||||
|
||||
"""
|
||||
_event_key(target, identifier, fn).remove()
|
||||
|
||||
|
||||
def contains(target, identifier, fn):
|
||||
"""Return True if the given target/ident/fn is set up to listen.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
"""
|
||||
|
||||
return _event_key(target, identifier, fn).contains()
|
@ -1,373 +0,0 @@
|
||||
# event/attr.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Attribute implementation for _Dispatch classes.
|
||||
|
||||
The various listener targets for a particular event class are represented
|
||||
as attributes, which refer to collections of listeners to be fired off.
|
||||
These collections can exist at the class level as well as at the instance
|
||||
level. An event is fired off using code like this::
|
||||
|
||||
some_object.dispatch.first_connect(arg1, arg2)
|
||||
|
||||
Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
|
||||
``first_connect`` is typically an instance of ``_ListenerCollection``
|
||||
if event listeners are present, or ``_EmptyListener`` if none are present.
|
||||
|
||||
The attribute mechanics here spend effort trying to ensure listener functions
|
||||
are available with a minimum of function call overhead, that unnecessary
|
||||
objects aren't created (i.e. many empty per-instance listener collections),
|
||||
as well as that everything is garbage collectable when owning references are
|
||||
lost. Other features such as "propagation" of listener functions across
|
||||
many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
|
||||
as well as support for subclass propagation (e.g. events assigned to
|
||||
``Pool`` vs. ``QueuePool``) are all implemented here.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, with_statement
|
||||
|
||||
from .. import util
|
||||
from ..util import threading
|
||||
from . import registry
|
||||
from . import legacy
|
||||
from itertools import chain
|
||||
import weakref
|
||||
import collections
|
||||
|
||||
|
||||
class RefCollection(util.MemoizedSlots):
|
||||
__slots__ = 'ref',
|
||||
|
||||
def _memoized_attr_ref(self):
|
||||
return weakref.ref(self, registry._collection_gced)
|
||||
|
||||
|
||||
class _ClsLevelDispatch(RefCollection):
|
||||
"""Class-level events on :class:`._Dispatch` classes."""
|
||||
|
||||
__slots__ = ('name', 'arg_names', 'has_kw',
|
||||
'legacy_signatures', '_clslevel', '__weakref__')
|
||||
|
||||
def __init__(self, parent_dispatch_cls, fn):
|
||||
self.name = fn.__name__
|
||||
argspec = util.inspect_getargspec(fn)
|
||||
self.arg_names = argspec.args[1:]
|
||||
self.has_kw = bool(argspec.keywords)
|
||||
self.legacy_signatures = list(reversed(
|
||||
sorted(
|
||||
getattr(fn, '_legacy_signatures', []),
|
||||
key=lambda s: s[0]
|
||||
)
|
||||
))
|
||||
fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn)
|
||||
|
||||
self._clslevel = weakref.WeakKeyDictionary()
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
if named:
|
||||
fn = self._wrap_fn_for_kw(fn)
|
||||
if self.legacy_signatures:
|
||||
try:
|
||||
argspec = util.get_callable_argspec(fn, no_self=True)
|
||||
except TypeError:
|
||||
pass
|
||||
else:
|
||||
fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
|
||||
return fn
|
||||
|
||||
def _wrap_fn_for_kw(self, fn):
|
||||
def wrap_kw(*args, **kw):
|
||||
argdict = dict(zip(self.arg_names, args))
|
||||
argdict.update(kw)
|
||||
return fn(**argdict)
|
||||
return wrap_kw
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
target = event_key.dispatch_target
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = collections.deque()
|
||||
self._clslevel[cls].appendleft(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
target = event_key.dispatch_target
|
||||
assert isinstance(target, type), \
|
||||
"Class-level Event targets must be classes."
|
||||
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls is not target and cls not in self._clslevel:
|
||||
self.update_subclass(cls)
|
||||
else:
|
||||
if cls not in self._clslevel:
|
||||
self._clslevel[cls] = collections.deque()
|
||||
self._clslevel[cls].append(event_key._listen_fn)
|
||||
registry._stored_in_collection(event_key, self)
|
||||
|
||||
def update_subclass(self, target):
|
||||
if target not in self._clslevel:
|
||||
self._clslevel[target] = collections.deque()
|
||||
clslevel = self._clslevel[target]
|
||||
for cls in target.__mro__[1:]:
|
||||
if cls in self._clslevel:
|
||||
clslevel.extend([
|
||||
fn for fn
|
||||
in self._clslevel[cls]
|
||||
if fn not in clslevel
|
||||
])
|
||||
|
||||
def remove(self, event_key):
|
||||
target = event_key.dispatch_target
|
||||
stack = [target]
|
||||
while stack:
|
||||
cls = stack.pop(0)
|
||||
stack.extend(cls.__subclasses__())
|
||||
if cls in self._clslevel:
|
||||
self._clslevel[cls].remove(event_key._listen_fn)
|
||||
registry._removed_from_collection(event_key, self)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all class level listeners"""
|
||||
|
||||
to_clear = set()
|
||||
for dispatcher in self._clslevel.values():
|
||||
to_clear.update(dispatcher)
|
||||
dispatcher.clear()
|
||||
registry._clear(self, to_clear)
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _ClsLevelDispatch at the class level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
|
||||
class _InstanceLevelDispatch(RefCollection):
|
||||
__slots__ = ()
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.parent._adjust_fn_spec(fn, named)
|
||||
|
||||
|
||||
class _EmptyListener(_InstanceLevelDispatch):
|
||||
"""Serves as a proxy interface to the events
|
||||
served by a _ClsLevelDispatch, when there are no
|
||||
instance-level events present.
|
||||
|
||||
Is replaced by _ListenerCollection when instance-level
|
||||
events are added.
|
||||
|
||||
"""
|
||||
|
||||
propagate = frozenset()
|
||||
listeners = ()
|
||||
|
||||
__slots__ = 'parent', 'parent_listeners', 'name'
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self.parent = parent # _ClsLevelDispatch
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.name = parent.name
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _EmptyListener at the instance level of
|
||||
a dispatcher, this generates a new
|
||||
_ListenerCollection, applies it to the instance,
|
||||
and returns it.
|
||||
|
||||
"""
|
||||
result = _ListenerCollection(self.parent, obj._instance_cls)
|
||||
if getattr(obj, self.name) is self:
|
||||
setattr(obj, self.name, result)
|
||||
else:
|
||||
assert isinstance(getattr(obj, self.name), _JoinedListener)
|
||||
return result
|
||||
|
||||
def _needs_modify(self, *args, **kw):
|
||||
raise NotImplementedError("need to call for_modify()")
|
||||
|
||||
exec_once = insert = append = remove = clear = _needs_modify
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.parent_listeners)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.parent_listeners)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _CompoundListener(_InstanceLevelDispatch):
|
||||
__slots__ = '_exec_once_mutex', '_exec_once'
|
||||
|
||||
def _memoized_attr__exec_once_mutex(self):
|
||||
return threading.Lock()
|
||||
|
||||
def exec_once(self, *args, **kw):
|
||||
"""Execute this event, but only if it has not been
|
||||
executed already for this collection."""
|
||||
|
||||
if not self._exec_once:
|
||||
with self._exec_once_mutex:
|
||||
if not self._exec_once:
|
||||
try:
|
||||
self(*args, **kw)
|
||||
finally:
|
||||
self._exec_once = True
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
"""Execute this event."""
|
||||
|
||||
for fn in self.parent_listeners:
|
||||
fn(*args, **kw)
|
||||
for fn in self.listeners:
|
||||
fn(*args, **kw)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.parent_listeners) + len(self.listeners)
|
||||
|
||||
def __iter__(self):
|
||||
return chain(self.parent_listeners, self.listeners)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.listeners or self.parent_listeners)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
class _ListenerCollection(_CompoundListener):
|
||||
"""Instance-level attributes on instances of :class:`._Dispatch`.
|
||||
|
||||
Represents a collection of listeners.
|
||||
|
||||
As of 0.7.9, _ListenerCollection is only first
|
||||
created via the _EmptyListener.for_modify() method.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'parent_listeners', 'parent', 'name', 'listeners',
|
||||
'propagate', '__weakref__')
|
||||
|
||||
def __init__(self, parent, target_cls):
|
||||
if target_cls not in parent._clslevel:
|
||||
parent.update_subclass(target_cls)
|
||||
self._exec_once = False
|
||||
self.parent_listeners = parent._clslevel[target_cls]
|
||||
self.parent = parent
|
||||
self.name = parent.name
|
||||
self.listeners = collections.deque()
|
||||
self.propagate = set()
|
||||
|
||||
def for_modify(self, obj):
|
||||
"""Return an event collection which can be modified.
|
||||
|
||||
For _ListenerCollection at the instance level of
|
||||
a dispatcher, this returns self.
|
||||
|
||||
"""
|
||||
return self
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
|
||||
existing_listeners = self.listeners
|
||||
existing_listener_set = set(existing_listeners)
|
||||
self.propagate.update(other.propagate)
|
||||
other_listeners = [l for l
|
||||
in other.listeners
|
||||
if l not in existing_listener_set
|
||||
and not only_propagate or l in self.propagate
|
||||
]
|
||||
|
||||
existing_listeners.extend(other_listeners)
|
||||
|
||||
to_associate = other.propagate.union(other_listeners)
|
||||
registry._stored_in_collection_multi(self, other, to_associate)
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
if event_key.prepend_to_list(self, self.listeners):
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
if event_key.append_to_list(self, self.listeners):
|
||||
if propagate:
|
||||
self.propagate.add(event_key._listen_fn)
|
||||
|
||||
def remove(self, event_key):
|
||||
self.listeners.remove(event_key._listen_fn)
|
||||
self.propagate.discard(event_key._listen_fn)
|
||||
registry._removed_from_collection(event_key, self)
|
||||
|
||||
def clear(self):
|
||||
registry._clear(self, self.listeners)
|
||||
self.propagate.clear()
|
||||
self.listeners.clear()
|
||||
|
||||
|
||||
class _JoinedListener(_CompoundListener):
|
||||
__slots__ = 'parent', 'name', 'local', 'parent_listeners'
|
||||
|
||||
def __init__(self, parent, name, local):
|
||||
self._exec_once = False
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
self.local = local
|
||||
self.parent_listeners = self.local
|
||||
|
||||
@property
|
||||
def listeners(self):
|
||||
return getattr(self.parent, self.name)
|
||||
|
||||
def _adjust_fn_spec(self, fn, named):
|
||||
return self.local._adjust_fn_spec(fn, named)
|
||||
|
||||
def for_modify(self, obj):
|
||||
self.local = self.parent_listeners = self.local.for_modify(obj)
|
||||
return self
|
||||
|
||||
def insert(self, event_key, propagate):
|
||||
self.local.insert(event_key, propagate)
|
||||
|
||||
def append(self, event_key, propagate):
|
||||
self.local.append(event_key, propagate)
|
||||
|
||||
def remove(self, event_key):
|
||||
self.local.remove(event_key)
|
||||
|
||||
def clear(self):
|
||||
raise NotImplementedError()
|
@ -1,289 +0,0 @@
|
||||
# event/base.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Base implementation classes.
|
||||
|
||||
The public-facing ``Events`` serves as the base class for an event interface;
|
||||
its public attributes represent different kinds of events. These attributes
|
||||
are mirrored onto a ``_Dispatch`` class, which serves as a container for
|
||||
collections of listener functions. These collections are represented both
|
||||
at the class level of a particular ``_Dispatch`` class as well as within
|
||||
instances of ``_Dispatch``.
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import weakref
|
||||
|
||||
from .. import util
|
||||
from .attr import _JoinedListener, \
|
||||
_EmptyListener, _ClsLevelDispatch
|
||||
|
||||
_registrars = util.defaultdict(list)
|
||||
|
||||
|
||||
def _is_event_name(name):
|
||||
return not name.startswith('_') and name != 'dispatch'
|
||||
|
||||
|
||||
class _UnpickleDispatch(object):
|
||||
"""Serializable callable that re-generates an instance of
|
||||
:class:`_Dispatch` given a particular :class:`.Events` subclass.
|
||||
|
||||
"""
|
||||
|
||||
def __call__(self, _instance_cls):
|
||||
for cls in _instance_cls.__mro__:
|
||||
if 'dispatch' in cls.__dict__:
|
||||
return cls.__dict__['dispatch'].\
|
||||
dispatch_cls._for_class(_instance_cls)
|
||||
else:
|
||||
raise AttributeError("No class with a 'dispatch' member present.")
|
||||
|
||||
|
||||
class _Dispatch(object):
|
||||
"""Mirror the event listening definitions of an Events class with
|
||||
listener collections.
|
||||
|
||||
Classes which define a "dispatch" member will return a
|
||||
non-instantiated :class:`._Dispatch` subclass when the member
|
||||
is accessed at the class level. When the "dispatch" member is
|
||||
accessed at the instance level of its owner, an instance
|
||||
of the :class:`._Dispatch` class is returned.
|
||||
|
||||
A :class:`._Dispatch` class is generated for each :class:`.Events`
|
||||
class defined, by the :func:`._create_dispatcher_class` function.
|
||||
The original :class:`.Events` classes remain untouched.
|
||||
This decouples the construction of :class:`.Events` subclasses from
|
||||
the implementation used by the event internals, and allows
|
||||
inspecting tools like Sphinx to work in an unsurprising
|
||||
way against the public API.
|
||||
|
||||
"""
|
||||
|
||||
# in one ORM edge case, an attribute is added to _Dispatch,
|
||||
# so __dict__ is used in just that case and potentially others.
|
||||
__slots__ = '_parent', '_instance_cls', '__dict__', '_empty_listeners'
|
||||
|
||||
_empty_listener_reg = weakref.WeakKeyDictionary()
|
||||
|
||||
def __init__(self, parent, instance_cls=None):
|
||||
self._parent = parent
|
||||
self._instance_cls = instance_cls
|
||||
if instance_cls:
|
||||
try:
|
||||
self._empty_listeners = self._empty_listener_reg[instance_cls]
|
||||
except KeyError:
|
||||
self._empty_listeners = \
|
||||
self._empty_listener_reg[instance_cls] = dict(
|
||||
(ls.name, _EmptyListener(ls, instance_cls))
|
||||
for ls in parent._event_descriptors
|
||||
)
|
||||
else:
|
||||
self._empty_listeners = {}
|
||||
|
||||
def __getattr__(self, name):
|
||||
# assign EmptyListeners as attributes on demand
|
||||
# to reduce startup time for new dispatch objects
|
||||
try:
|
||||
ls = self._empty_listeners[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
else:
|
||||
setattr(self, ls.name, ls)
|
||||
return ls
|
||||
|
||||
@property
|
||||
def _event_descriptors(self):
|
||||
for k in self._event_names:
|
||||
yield getattr(self, k)
|
||||
|
||||
def _for_class(self, instance_cls):
|
||||
return self.__class__(self, instance_cls)
|
||||
|
||||
def _for_instance(self, instance):
|
||||
instance_cls = instance.__class__
|
||||
return self._for_class(instance_cls)
|
||||
|
||||
@property
|
||||
def _listen(self):
|
||||
return self._events._listen
|
||||
|
||||
def _join(self, other):
|
||||
"""Create a 'join' of this :class:`._Dispatch` and another.
|
||||
|
||||
This new dispatcher will dispatch events to both
|
||||
:class:`._Dispatch` objects.
|
||||
|
||||
"""
|
||||
if '_joined_dispatch_cls' not in self.__class__.__dict__:
|
||||
cls = type(
|
||||
"Joined%s" % self.__class__.__name__,
|
||||
(_JoinedDispatcher, ), {'__slots__': self._event_names}
|
||||
)
|
||||
|
||||
self.__class__._joined_dispatch_cls = cls
|
||||
return self._joined_dispatch_cls(self, other)
|
||||
|
||||
def __reduce__(self):
|
||||
return _UnpickleDispatch(), (self._instance_cls, )
|
||||
|
||||
def _update(self, other, only_propagate=True):
|
||||
"""Populate from the listeners in another :class:`_Dispatch`
|
||||
object."""
|
||||
for ls in other._event_descriptors:
|
||||
if isinstance(ls, _EmptyListener):
|
||||
continue
|
||||
getattr(self, ls.name).\
|
||||
for_modify(self)._update(ls, only_propagate=only_propagate)
|
||||
|
||||
def _clear(self):
|
||||
for ls in self._event_descriptors:
|
||||
ls.for_modify(self).clear()
|
||||
|
||||
|
||||
class _EventMeta(type):
|
||||
"""Intercept new Event subclasses and create
|
||||
associated _Dispatch classes."""
|
||||
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
_create_dispatcher_class(cls, classname, bases, dict_)
|
||||
return type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
|
||||
def _create_dispatcher_class(cls, classname, bases, dict_):
|
||||
"""Create a :class:`._Dispatch` class corresponding to an
|
||||
:class:`.Events` class."""
|
||||
|
||||
# there's all kinds of ways to do this,
|
||||
# i.e. make a Dispatch class that shares the '_listen' method
|
||||
# of the Event class, this is the straight monkeypatch.
|
||||
if hasattr(cls, 'dispatch'):
|
||||
dispatch_base = cls.dispatch.__class__
|
||||
else:
|
||||
dispatch_base = _Dispatch
|
||||
|
||||
event_names = [k for k in dict_ if _is_event_name(k)]
|
||||
dispatch_cls = type("%sDispatch" % classname,
|
||||
(dispatch_base, ), {'__slots__': event_names})
|
||||
|
||||
dispatch_cls._event_names = event_names
|
||||
|
||||
dispatch_inst = cls._set_dispatch(cls, dispatch_cls)
|
||||
for k in dispatch_cls._event_names:
|
||||
setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k]))
|
||||
_registrars[k].append(cls)
|
||||
|
||||
for super_ in dispatch_cls.__bases__:
|
||||
if issubclass(super_, _Dispatch) and super_ is not _Dispatch:
|
||||
for ls in super_._events.dispatch._event_descriptors:
|
||||
setattr(dispatch_inst, ls.name, ls)
|
||||
dispatch_cls._event_names.append(ls.name)
|
||||
|
||||
if getattr(cls, '_dispatch_target', None):
|
||||
cls._dispatch_target.dispatch = dispatcher(cls)
|
||||
|
||||
|
||||
def _remove_dispatcher(cls):
|
||||
for k in cls.dispatch._event_names:
|
||||
_registrars[k].remove(cls)
|
||||
if not _registrars[k]:
|
||||
del _registrars[k]
|
||||
|
||||
|
||||
class Events(util.with_metaclass(_EventMeta, object)):
|
||||
"""Define event listening functions for a particular target type."""
|
||||
|
||||
@staticmethod
|
||||
def _set_dispatch(cls, dispatch_cls):
|
||||
# this allows an Events subclass to define additional utility
|
||||
# methods made available to the target via
|
||||
# "self.dispatch._events.<utilitymethod>"
|
||||
# @staticemethod to allow easy "super" calls while in a metaclass
|
||||
# constructor.
|
||||
cls.dispatch = dispatch_cls(None)
|
||||
dispatch_cls._events = cls
|
||||
return cls.dispatch
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
# Mapper, ClassManager, Session override this to
|
||||
# also accept classes, scoped_sessions, sessionmakers, etc.
|
||||
if hasattr(target, 'dispatch') and (
|
||||
|
||||
isinstance(target.dispatch, cls.dispatch.__class__) or
|
||||
|
||||
|
||||
(
|
||||
isinstance(target.dispatch, type) and
|
||||
isinstance(target.dispatch, cls.dispatch.__class__)
|
||||
) or
|
||||
|
||||
(
|
||||
isinstance(target.dispatch, _JoinedDispatcher) and
|
||||
isinstance(target.dispatch.parent, cls.dispatch.__class__)
|
||||
)
|
||||
|
||||
|
||||
):
|
||||
return target
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, propagate=False, insert=False, named=False):
|
||||
event_key.base_listen(propagate=propagate, insert=insert, named=named)
|
||||
|
||||
@classmethod
|
||||
def _remove(cls, event_key):
|
||||
event_key.remove()
|
||||
|
||||
@classmethod
|
||||
def _clear(cls):
|
||||
cls.dispatch._clear()
|
||||
|
||||
|
||||
class _JoinedDispatcher(object):
|
||||
"""Represent a connection between two _Dispatch objects."""
|
||||
|
||||
__slots__ = 'local', 'parent', '_instance_cls'
|
||||
|
||||
def __init__(self, local, parent):
|
||||
self.local = local
|
||||
self.parent = parent
|
||||
self._instance_cls = self.local._instance_cls
|
||||
|
||||
def __getattr__(self, name):
|
||||
# assign _JoinedListeners as attributes on demand
|
||||
# to reduce startup time for new dispatch objects
|
||||
ls = getattr(self.local, name)
|
||||
jl = _JoinedListener(self.parent, ls.name, ls)
|
||||
setattr(self, ls.name, jl)
|
||||
return jl
|
||||
|
||||
@property
|
||||
def _listen(self):
|
||||
return self.parent._listen
|
||||
|
||||
|
||||
class dispatcher(object):
|
||||
"""Descriptor used by target classes to
|
||||
deliver the _Dispatch class at the class level
|
||||
and produce new _Dispatch instances for target
|
||||
instances.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, events):
|
||||
self.dispatch_cls = events.dispatch
|
||||
self.events = events
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self.dispatch_cls
|
||||
obj.__dict__['dispatch'] = disp = self.dispatch_cls._for_instance(obj)
|
||||
return disp
|
@ -1,169 +0,0 @@
|
||||
# event/legacy.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Routines to handle adaption of legacy call signatures,
|
||||
generation of deprecation notes and docstrings.
|
||||
|
||||
"""
|
||||
|
||||
from .. import util
|
||||
|
||||
|
||||
def _legacy_signature(since, argnames, converter=None):
|
||||
def leg(fn):
|
||||
if not hasattr(fn, '_legacy_signatures'):
|
||||
fn._legacy_signatures = []
|
||||
fn._legacy_signatures.append((since, argnames, converter))
|
||||
return fn
|
||||
return leg
|
||||
|
||||
|
||||
def _wrap_fn_for_legacy(dispatch_collection, fn, argspec):
|
||||
for since, argnames, conv in dispatch_collection.legacy_signatures:
|
||||
if argnames[-1] == "**kw":
|
||||
has_kw = True
|
||||
argnames = argnames[0:-1]
|
||||
else:
|
||||
has_kw = False
|
||||
|
||||
if len(argnames) == len(argspec.args) \
|
||||
and has_kw is bool(argspec.keywords):
|
||||
|
||||
if conv:
|
||||
assert not has_kw
|
||||
|
||||
def wrap_leg(*args):
|
||||
return fn(*conv(*args))
|
||||
else:
|
||||
def wrap_leg(*args, **kw):
|
||||
argdict = dict(zip(dispatch_collection.arg_names, args))
|
||||
args = [argdict[name] for name in argnames]
|
||||
if has_kw:
|
||||
return fn(*args, **kw)
|
||||
else:
|
||||
return fn(*args)
|
||||
return wrap_leg
|
||||
else:
|
||||
return fn
|
||||
|
||||
|
||||
def _indent(text, indent):
|
||||
return "\n".join(
|
||||
indent + line
|
||||
for line in text.split("\n")
|
||||
)
|
||||
|
||||
|
||||
def _standard_listen_example(dispatch_collection, sample_target, fn):
|
||||
example_kw_arg = _indent(
|
||||
"\n".join(
|
||||
"%(arg)s = kw['%(arg)s']" % {"arg": arg}
|
||||
for arg in dispatch_collection.arg_names[0:2]
|
||||
),
|
||||
" ")
|
||||
if dispatch_collection.legacy_signatures:
|
||||
current_since = max(since for since, args, conv
|
||||
in dispatch_collection.legacy_signatures)
|
||||
else:
|
||||
current_since = None
|
||||
text = (
|
||||
"from sqlalchemy import event\n\n"
|
||||
"# standard decorator style%(current_since)s\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
"def receive_%(event_name)s("
|
||||
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
if len(dispatch_collection.arg_names) > 3:
|
||||
text += (
|
||||
|
||||
"\n# named argument style (new in 0.9)\n"
|
||||
"@event.listens_for("
|
||||
"%(sample_target)s, '%(event_name)s', named=True)\n"
|
||||
"def receive_%(event_name)s(**kw):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"%(example_kw_arg)s\n"
|
||||
"\n # ... (event handling logic) ...\n"
|
||||
)
|
||||
|
||||
text %= {
|
||||
"current_since": " (arguments as of %s)" %
|
||||
current_since if current_since else "",
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
|
||||
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
|
||||
"example_kw_arg": example_kw_arg,
|
||||
"sample_target": sample_target
|
||||
}
|
||||
return text
|
||||
|
||||
|
||||
def _legacy_listen_examples(dispatch_collection, sample_target, fn):
|
||||
text = ""
|
||||
for since, args, conv in dispatch_collection.legacy_signatures:
|
||||
text += (
|
||||
"\n# legacy calling style (pre-%(since)s)\n"
|
||||
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
|
||||
"def receive_%(event_name)s("
|
||||
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
|
||||
" \"listen for the '%(event_name)s' event\"\n"
|
||||
"\n # ... (event handling logic) ...\n" % {
|
||||
"since": since,
|
||||
"event_name": fn.__name__,
|
||||
"has_kw_arguments": " **kw"
|
||||
if dispatch_collection.has_kw else "",
|
||||
"named_event_arguments": ", ".join(args),
|
||||
"sample_target": sample_target
|
||||
}
|
||||
)
|
||||
return text
|
||||
|
||||
|
||||
def _version_signature_changes(dispatch_collection):
|
||||
since, args, conv = dispatch_collection.legacy_signatures[0]
|
||||
return (
|
||||
"\n.. versionchanged:: %(since)s\n"
|
||||
" The ``%(event_name)s`` event now accepts the \n"
|
||||
" arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
|
||||
" Listener functions which accept the previous argument \n"
|
||||
" signature(s) listed above will be automatically \n"
|
||||
" adapted to the new signature." % {
|
||||
"since": since,
|
||||
"event_name": dispatch_collection.name,
|
||||
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
|
||||
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else ""
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn):
|
||||
header = ".. container:: event_signatures\n\n"\
|
||||
" Example argument forms::\n"\
|
||||
"\n"
|
||||
|
||||
sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
|
||||
text = (
|
||||
header +
|
||||
_indent(
|
||||
_standard_listen_example(
|
||||
dispatch_collection, sample_target, fn),
|
||||
" " * 8)
|
||||
)
|
||||
if dispatch_collection.legacy_signatures:
|
||||
text += _indent(
|
||||
_legacy_listen_examples(
|
||||
dispatch_collection, sample_target, fn),
|
||||
" " * 8)
|
||||
|
||||
text += _version_signature_changes(dispatch_collection)
|
||||
|
||||
return util.inject_docstring_text(fn.__doc__,
|
||||
text,
|
||||
1
|
||||
)
|
@ -1,262 +0,0 @@
|
||||
# event/registry.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides managed registration services on behalf of :func:`.listen`
|
||||
arguments.
|
||||
|
||||
By "managed registration", we mean that event listening functions and
|
||||
other objects can be added to various collections in such a way that their
|
||||
membership in all those collections can be revoked at once, based on
|
||||
an equivalent :class:`._EventKey`.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import weakref
|
||||
import collections
|
||||
import types
|
||||
from .. import exc, util
|
||||
|
||||
|
||||
_key_to_collection = collections.defaultdict(dict)
|
||||
"""
|
||||
Given an original listen() argument, can locate all
|
||||
listener collections and the listener fn contained
|
||||
|
||||
(target, identifier, fn) -> {
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
ref(listenercollection) -> ref(listener_fn)
|
||||
}
|
||||
"""
|
||||
|
||||
_collection_to_key = collections.defaultdict(dict)
|
||||
"""
|
||||
Given a _ListenerCollection or _ClsLevelListener, can locate
|
||||
all the original listen() arguments and the listener fn contained
|
||||
|
||||
ref(listenercollection) -> {
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
ref(listener_fn) -> (target, identifier, fn),
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def _collection_gced(ref):
|
||||
# defaultdict, so can't get a KeyError
|
||||
if not _collection_to_key or ref not in _collection_to_key:
|
||||
return
|
||||
listener_to_key = _collection_to_key.pop(ref)
|
||||
for key in listener_to_key.values():
|
||||
if key in _key_to_collection:
|
||||
# defaultdict, so can't get a KeyError
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
dispatch_reg.pop(ref)
|
||||
if not dispatch_reg:
|
||||
_key_to_collection.pop(key)
|
||||
|
||||
|
||||
def _stored_in_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
|
||||
owner_ref = owner.ref
|
||||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
if owner_ref in dispatch_reg:
|
||||
return False
|
||||
|
||||
dispatch_reg[owner_ref] = listen_ref
|
||||
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key[listen_ref] = key
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _removed_from_collection(event_key, owner):
|
||||
key = event_key._key
|
||||
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
|
||||
listen_ref = weakref.ref(event_key._listen_fn)
|
||||
|
||||
owner_ref = owner.ref
|
||||
dispatch_reg.pop(owner_ref, None)
|
||||
if not dispatch_reg:
|
||||
del _key_to_collection[key]
|
||||
|
||||
if owner_ref in _collection_to_key:
|
||||
listener_to_key = _collection_to_key[owner_ref]
|
||||
listener_to_key.pop(listen_ref)
|
||||
|
||||
|
||||
def _stored_in_collection_multi(newowner, oldowner, elements):
|
||||
if not elements:
|
||||
return
|
||||
|
||||
oldowner = oldowner.ref
|
||||
newowner = newowner.ref
|
||||
|
||||
old_listener_to_key = _collection_to_key[oldowner]
|
||||
new_listener_to_key = _collection_to_key[newowner]
|
||||
|
||||
for listen_fn in elements:
|
||||
listen_ref = weakref.ref(listen_fn)
|
||||
key = old_listener_to_key[listen_ref]
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
if newowner in dispatch_reg:
|
||||
assert dispatch_reg[newowner] == listen_ref
|
||||
else:
|
||||
dispatch_reg[newowner] = listen_ref
|
||||
|
||||
new_listener_to_key[listen_ref] = key
|
||||
|
||||
|
||||
def _clear(owner, elements):
|
||||
if not elements:
|
||||
return
|
||||
|
||||
owner = owner.ref
|
||||
listener_to_key = _collection_to_key[owner]
|
||||
for listen_fn in elements:
|
||||
listen_ref = weakref.ref(listen_fn)
|
||||
key = listener_to_key[listen_ref]
|
||||
dispatch_reg = _key_to_collection[key]
|
||||
dispatch_reg.pop(owner, None)
|
||||
|
||||
if not dispatch_reg:
|
||||
del _key_to_collection[key]
|
||||
|
||||
|
||||
class _EventKey(object):
|
||||
"""Represent :func:`.listen` arguments.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target'
|
||||
)
|
||||
|
||||
def __init__(self, target, identifier,
|
||||
fn, dispatch_target, _fn_wrap=None):
|
||||
self.target = target
|
||||
self.identifier = identifier
|
||||
self.fn = fn
|
||||
if isinstance(fn, types.MethodType):
|
||||
self.fn_key = id(fn.__func__), id(fn.__self__)
|
||||
else:
|
||||
self.fn_key = id(fn)
|
||||
self.fn_wrap = _fn_wrap
|
||||
self.dispatch_target = dispatch_target
|
||||
|
||||
@property
|
||||
def _key(self):
|
||||
return (id(self.target), self.identifier, self.fn_key)
|
||||
|
||||
def with_wrapper(self, fn_wrap):
|
||||
if fn_wrap is self._listen_fn:
|
||||
return self
|
||||
else:
|
||||
return _EventKey(
|
||||
self.target,
|
||||
self.identifier,
|
||||
self.fn,
|
||||
self.dispatch_target,
|
||||
_fn_wrap=fn_wrap
|
||||
)
|
||||
|
||||
def with_dispatch_target(self, dispatch_target):
|
||||
if dispatch_target is self.dispatch_target:
|
||||
return self
|
||||
else:
|
||||
return _EventKey(
|
||||
self.target,
|
||||
self.identifier,
|
||||
self.fn,
|
||||
dispatch_target,
|
||||
_fn_wrap=self.fn_wrap
|
||||
)
|
||||
|
||||
def listen(self, *args, **kw):
|
||||
once = kw.pop("once", False)
|
||||
named = kw.pop("named", False)
|
||||
|
||||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_collection = getattr(target.dispatch, identifier)
|
||||
|
||||
adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named)
|
||||
|
||||
self = self.with_wrapper(adjusted_fn)
|
||||
|
||||
if once:
|
||||
self.with_wrapper(
|
||||
util.only_once(self._listen_fn)).listen(*args, **kw)
|
||||
else:
|
||||
self.dispatch_target.dispatch._listen(self, *args, **kw)
|
||||
|
||||
def remove(self):
|
||||
key = self._key
|
||||
|
||||
if key not in _key_to_collection:
|
||||
raise exc.InvalidRequestError(
|
||||
"No listeners found for event %s / %r / %s " %
|
||||
(self.target, self.identifier, self.fn)
|
||||
)
|
||||
dispatch_reg = _key_to_collection.pop(key)
|
||||
|
||||
for collection_ref, listener_ref in dispatch_reg.items():
|
||||
collection = collection_ref()
|
||||
listener_fn = listener_ref()
|
||||
if collection is not None and listener_fn is not None:
|
||||
collection.remove(self.with_wrapper(listener_fn))
|
||||
|
||||
def contains(self):
|
||||
"""Return True if this event key is registered to listen.
|
||||
"""
|
||||
return self._key in _key_to_collection
|
||||
|
||||
def base_listen(self, propagate=False, insert=False,
|
||||
named=False):
|
||||
|
||||
target, identifier, fn = \
|
||||
self.dispatch_target, self.identifier, self._listen_fn
|
||||
|
||||
dispatch_collection = getattr(target.dispatch, identifier)
|
||||
|
||||
if insert:
|
||||
dispatch_collection.\
|
||||
for_modify(target.dispatch).insert(self, propagate)
|
||||
else:
|
||||
dispatch_collection.\
|
||||
for_modify(target.dispatch).append(self, propagate)
|
||||
|
||||
@property
|
||||
def _listen_fn(self):
|
||||
return self.fn_wrap or self.fn
|
||||
|
||||
def append_to_list(self, owner, list_):
|
||||
if _stored_in_collection(self, owner):
|
||||
list_.append(self._listen_fn)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def remove_from_list(self, owner, list_):
|
||||
_removed_from_collection(self, owner)
|
||||
list_.remove(self._listen_fn)
|
||||
|
||||
def prepend_to_list(self, owner, list_):
|
||||
if _stored_in_collection(self, owner):
|
||||
list_.appendleft(self._listen_fn)
|
||||
return True
|
||||
else:
|
||||
return False
|
1173
sqlalchemy/events.py
1173
sqlalchemy/events.py
File diff suppressed because it is too large
Load Diff
@ -1,388 +0,0 @@
|
||||
# sqlalchemy/exc.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Exceptions used with SQLAlchemy.
|
||||
|
||||
The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are
|
||||
raised as a result of DBAPI exceptions are all subclasses of
|
||||
:exc:`.DBAPIError`.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class SQLAlchemyError(Exception):
|
||||
"""Generic error class."""
|
||||
|
||||
|
||||
class ArgumentError(SQLAlchemyError):
|
||||
"""Raised when an invalid or conflicting function argument is supplied.
|
||||
|
||||
This error generally corresponds to construction time state errors.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class ObjectNotExecutableError(ArgumentError):
|
||||
"""Raised when an object is passed to .execute() that can't be
|
||||
executed as SQL.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, target):
|
||||
super(ObjectNotExecutableError, self).__init__(
|
||||
"Not an executable object: %r" % target
|
||||
)
|
||||
|
||||
|
||||
class NoSuchModuleError(ArgumentError):
|
||||
"""Raised when a dynamically-loaded module (usually a database dialect)
|
||||
of a particular name cannot be located."""
|
||||
|
||||
|
||||
class NoForeignKeysError(ArgumentError):
|
||||
"""Raised when no foreign keys can be located between two selectables
|
||||
during a join."""
|
||||
|
||||
|
||||
class AmbiguousForeignKeysError(ArgumentError):
|
||||
"""Raised when more than one foreign key matching can be located
|
||||
between two selectables during a join."""
|
||||
|
||||
|
||||
class CircularDependencyError(SQLAlchemyError):
|
||||
"""Raised by topological sorts when a circular dependency is detected.
|
||||
|
||||
There are two scenarios where this error occurs:
|
||||
|
||||
* In a Session flush operation, if two objects are mutually dependent
|
||||
on each other, they can not be inserted or deleted via INSERT or
|
||||
DELETE statements alone; an UPDATE will be needed to post-associate
|
||||
or pre-deassociate one of the foreign key constrained values.
|
||||
The ``post_update`` flag described at :ref:`post_update` can resolve
|
||||
this cycle.
|
||||
* In a :attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey`
|
||||
or :class:`.ForeignKeyConstraint` objects mutually refer to each
|
||||
other. Apply the ``use_alter=True`` flag to one or both,
|
||||
see :ref:`use_alter`.
|
||||
|
||||
"""
|
||||
def __init__(self, message, cycles, edges, msg=None):
|
||||
if msg is None:
|
||||
message += " (%s)" % ", ".join(repr(s) for s in cycles)
|
||||
else:
|
||||
message = msg
|
||||
SQLAlchemyError.__init__(self, message)
|
||||
self.cycles = cycles
|
||||
self.edges = edges
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (None, self.cycles,
|
||||
self.edges, self.args[0])
|
||||
|
||||
|
||||
class CompileError(SQLAlchemyError):
|
||||
"""Raised when an error occurs during SQL compilation"""
|
||||
|
||||
|
||||
class UnsupportedCompilationError(CompileError):
|
||||
"""Raised when an operation is not supported by the given compiler.
|
||||
|
||||
|
||||
.. versionadded:: 0.8.3
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, compiler, element_type):
|
||||
super(UnsupportedCompilationError, self).__init__(
|
||||
"Compiler %r can't render element of type %s" %
|
||||
(compiler, element_type))
|
||||
|
||||
|
||||
class IdentifierError(SQLAlchemyError):
|
||||
"""Raised when a schema name is beyond the max character limit"""
|
||||
|
||||
|
||||
class DisconnectionError(SQLAlchemyError):
|
||||
"""A disconnect is detected on a raw DB-API connection.
|
||||
|
||||
This error is raised and consumed internally by a connection pool. It can
|
||||
be raised by the :meth:`.PoolEvents.checkout` event so that the host pool
|
||||
forces a retry; the exception will be caught three times in a row before
|
||||
the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError`
|
||||
regarding the connection attempt.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class TimeoutError(SQLAlchemyError):
|
||||
"""Raised when a connection pool times out on getting a connection."""
|
||||
|
||||
|
||||
class InvalidRequestError(SQLAlchemyError):
|
||||
"""SQLAlchemy was asked to do something it can't do.
|
||||
|
||||
This error generally corresponds to runtime state errors.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class NoInspectionAvailable(InvalidRequestError):
|
||||
"""A subject passed to :func:`sqlalchemy.inspection.inspect` produced
|
||||
no context for inspection."""
|
||||
|
||||
|
||||
class ResourceClosedError(InvalidRequestError):
|
||||
"""An operation was requested from a connection, cursor, or other
|
||||
object that's in a closed state."""
|
||||
|
||||
|
||||
class NoSuchColumnError(KeyError, InvalidRequestError):
|
||||
"""A nonexistent column is requested from a ``RowProxy``."""
|
||||
|
||||
|
||||
class NoReferenceError(InvalidRequestError):
|
||||
"""Raised by ``ForeignKey`` to indicate a reference cannot be resolved."""
|
||||
|
||||
|
||||
class NoReferencedTableError(NoReferenceError):
|
||||
"""Raised by ``ForeignKey`` when the referred ``Table`` cannot be
|
||||
located.
|
||||
|
||||
"""
|
||||
def __init__(self, message, tname):
|
||||
NoReferenceError.__init__(self, message)
|
||||
self.table_name = tname
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.table_name)
|
||||
|
||||
|
||||
class NoReferencedColumnError(NoReferenceError):
|
||||
"""Raised by ``ForeignKey`` when the referred ``Column`` cannot be
|
||||
located.
|
||||
|
||||
"""
|
||||
def __init__(self, message, tname, cname):
|
||||
NoReferenceError.__init__(self, message)
|
||||
self.table_name = tname
|
||||
self.column_name = cname
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.table_name,
|
||||
self.column_name)
|
||||
|
||||
|
||||
class NoSuchTableError(InvalidRequestError):
|
||||
"""Table does not exist or is not visible to a connection."""
|
||||
|
||||
|
||||
class UnboundExecutionError(InvalidRequestError):
|
||||
"""SQL was attempted without a database connection to execute it on."""
|
||||
|
||||
|
||||
class DontWrapMixin(object):
|
||||
"""A mixin class which, when applied to a user-defined Exception class,
|
||||
will not be wrapped inside of :exc:`.StatementError` if the error is
|
||||
emitted within the process of executing a statement.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy.exc import DontWrapMixin
|
||||
|
||||
class MyCustomException(Exception, DontWrapMixin):
|
||||
pass
|
||||
|
||||
class MySpecialType(TypeDecorator):
|
||||
impl = String
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if value == 'invalid':
|
||||
raise MyCustomException("invalid!")
|
||||
|
||||
"""
|
||||
|
||||
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
|
||||
UnmappedColumnError = None
|
||||
|
||||
|
||||
class StatementError(SQLAlchemyError):
|
||||
"""An error occurred during execution of a SQL statement.
|
||||
|
||||
:class:`StatementError` wraps the exception raised
|
||||
during execution, and features :attr:`.statement`
|
||||
and :attr:`.params` attributes which supply context regarding
|
||||
the specifics of the statement which had an issue.
|
||||
|
||||
The wrapped exception object is available in
|
||||
the :attr:`.orig` attribute.
|
||||
|
||||
"""
|
||||
|
||||
statement = None
|
||||
"""The string SQL statement being invoked when this exception occurred."""
|
||||
|
||||
params = None
|
||||
"""The parameter list being used when this exception occurred."""
|
||||
|
||||
orig = None
|
||||
"""The DBAPI exception object."""
|
||||
|
||||
def __init__(self, message, statement, params, orig):
|
||||
SQLAlchemyError.__init__(self, message)
|
||||
self.statement = statement
|
||||
self.params = params
|
||||
self.orig = orig
|
||||
self.detail = []
|
||||
|
||||
def add_detail(self, msg):
|
||||
self.detail.append(msg)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.args[0], self.statement,
|
||||
self.params, self.orig)
|
||||
|
||||
def __str__(self):
|
||||
from sqlalchemy.sql import util
|
||||
|
||||
details = [SQLAlchemyError.__str__(self)]
|
||||
if self.statement:
|
||||
details.append("[SQL: %r]" % self.statement)
|
||||
if self.params:
|
||||
params_repr = util._repr_params(self.params, 10)
|
||||
details.append("[parameters: %r]" % params_repr)
|
||||
return ' '.join([
|
||||
"(%s)" % det for det in self.detail
|
||||
] + details)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
class DBAPIError(StatementError):
|
||||
"""Raised when the execution of a database operation fails.
|
||||
|
||||
Wraps exceptions raised by the DB-API underlying the
|
||||
database operation. Driver-specific implementations of the standard
|
||||
DB-API exception types are wrapped by matching sub-types of SQLAlchemy's
|
||||
:class:`DBAPIError` when possible. DB-API's ``Error`` type maps to
|
||||
:class:`DBAPIError` in SQLAlchemy, otherwise the names are identical. Note
|
||||
that there is no guarantee that different DB-API implementations will
|
||||
raise the same exception type for any given error condition.
|
||||
|
||||
:class:`DBAPIError` features :attr:`~.StatementError.statement`
|
||||
and :attr:`~.StatementError.params` attributes which supply context
|
||||
regarding the specifics of the statement which had an issue, for the
|
||||
typical case when the error was raised within the context of
|
||||
emitting a SQL statement.
|
||||
|
||||
The wrapped exception object is available in the
|
||||
:attr:`~.StatementError.orig` attribute. Its type and properties are
|
||||
DB-API implementation specific.
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def instance(cls, statement, params,
|
||||
orig, dbapi_base_err,
|
||||
connection_invalidated=False,
|
||||
dialect=None):
|
||||
# Don't ever wrap these, just return them directly as if
|
||||
# DBAPIError didn't exist.
|
||||
if (isinstance(orig, BaseException) and
|
||||
not isinstance(orig, Exception)) or \
|
||||
isinstance(orig, DontWrapMixin):
|
||||
return orig
|
||||
|
||||
if orig is not None:
|
||||
# not a DBAPI error, statement is present.
|
||||
# raise a StatementError
|
||||
if not isinstance(orig, dbapi_base_err) and statement:
|
||||
return StatementError(
|
||||
"(%s.%s) %s" %
|
||||
(orig.__class__.__module__, orig.__class__.__name__,
|
||||
orig),
|
||||
statement, params, orig
|
||||
)
|
||||
|
||||
glob = globals()
|
||||
for super_ in orig.__class__.__mro__:
|
||||
name = super_.__name__
|
||||
if dialect:
|
||||
name = dialect.dbapi_exception_translation_map.get(
|
||||
name, name)
|
||||
if name in glob and issubclass(glob[name], DBAPIError):
|
||||
cls = glob[name]
|
||||
break
|
||||
|
||||
return cls(statement, params, orig, connection_invalidated)
|
||||
|
||||
def __reduce__(self):
|
||||
return self.__class__, (self.statement, self.params,
|
||||
self.orig, self.connection_invalidated)
|
||||
|
||||
def __init__(self, statement, params, orig, connection_invalidated=False):
|
||||
try:
|
||||
text = str(orig)
|
||||
except Exception as e:
|
||||
text = 'Error in str() of DB-API-generated exception: ' + str(e)
|
||||
StatementError.__init__(
|
||||
self,
|
||||
'(%s.%s) %s' % (
|
||||
orig.__class__.__module__, orig.__class__.__name__, text, ),
|
||||
statement,
|
||||
params,
|
||||
orig
|
||||
)
|
||||
self.connection_invalidated = connection_invalidated
|
||||
|
||||
|
||||
class InterfaceError(DBAPIError):
|
||||
"""Wraps a DB-API InterfaceError."""
|
||||
|
||||
|
||||
class DatabaseError(DBAPIError):
|
||||
"""Wraps a DB-API DatabaseError."""
|
||||
|
||||
|
||||
class DataError(DatabaseError):
|
||||
"""Wraps a DB-API DataError."""
|
||||
|
||||
|
||||
class OperationalError(DatabaseError):
|
||||
"""Wraps a DB-API OperationalError."""
|
||||
|
||||
|
||||
class IntegrityError(DatabaseError):
|
||||
"""Wraps a DB-API IntegrityError."""
|
||||
|
||||
|
||||
class InternalError(DatabaseError):
|
||||
"""Wraps a DB-API InternalError."""
|
||||
|
||||
|
||||
class ProgrammingError(DatabaseError):
|
||||
"""Wraps a DB-API ProgrammingError."""
|
||||
|
||||
|
||||
class NotSupportedError(DatabaseError):
|
||||
"""Wraps a DB-API NotSupportedError."""
|
||||
|
||||
|
||||
# Warnings
|
||||
|
||||
class SADeprecationWarning(DeprecationWarning):
|
||||
"""Issued once per usage of a deprecated API."""
|
||||
|
||||
|
||||
class SAPendingDeprecationWarning(PendingDeprecationWarning):
|
||||
"""Issued once per usage of a deprecated API."""
|
||||
|
||||
|
||||
class SAWarning(RuntimeWarning):
|
||||
"""Issued at runtime."""
|
@ -1,11 +0,0 @@
|
||||
# ext/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import util as _sa_util
|
||||
|
||||
_sa_util.dependencies.resolve_all("sqlalchemy.ext")
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,559 +0,0 @@
|
||||
# sqlalchemy/ext/baked.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Baked query extension.
|
||||
|
||||
Provides a creational pattern for the :class:`.query.Query` object which
|
||||
allows the fully constructed object, Core select statement, and string
|
||||
compiled result to be fully cached.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from ..orm.query import Query
|
||||
from ..orm import strategies, attributes, properties, \
|
||||
strategy_options, util as orm_util, interfaces
|
||||
from .. import log as sqla_log
|
||||
from ..sql import util as sql_util, func, literal_column
|
||||
from ..orm import exc as orm_exc
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
|
||||
import copy
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BakedQuery(object):
|
||||
"""A builder object for :class:`.query.Query` objects."""
|
||||
|
||||
__slots__ = 'steps', '_bakery', '_cache_key', '_spoiled'
|
||||
|
||||
def __init__(self, bakery, initial_fn, args=()):
|
||||
self._cache_key = ()
|
||||
self._update_cache_key(initial_fn, args)
|
||||
self.steps = [initial_fn]
|
||||
self._spoiled = False
|
||||
self._bakery = bakery
|
||||
|
||||
@classmethod
|
||||
def bakery(cls, size=200):
|
||||
"""Construct a new bakery."""
|
||||
|
||||
_bakery = util.LRUCache(size)
|
||||
|
||||
def call(initial_fn, *args):
|
||||
return cls(_bakery, initial_fn, args)
|
||||
|
||||
return call
|
||||
|
||||
def _clone(self):
|
||||
b1 = BakedQuery.__new__(BakedQuery)
|
||||
b1._cache_key = self._cache_key
|
||||
b1.steps = list(self.steps)
|
||||
b1._bakery = self._bakery
|
||||
b1._spoiled = self._spoiled
|
||||
return b1
|
||||
|
||||
def _update_cache_key(self, fn, args=()):
|
||||
self._cache_key += (fn.__code__,) + args
|
||||
|
||||
def __iadd__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
self.add_criteria(*other)
|
||||
else:
|
||||
self.add_criteria(other)
|
||||
return self
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
return self.with_criteria(*other)
|
||||
else:
|
||||
return self.with_criteria(other)
|
||||
|
||||
def add_criteria(self, fn, *args):
|
||||
"""Add a criteria function to this :class:`.BakedQuery`.
|
||||
|
||||
This is equivalent to using the ``+=`` operator to
|
||||
modify a :class:`.BakedQuery` in-place.
|
||||
|
||||
"""
|
||||
self._update_cache_key(fn, args)
|
||||
self.steps.append(fn)
|
||||
return self
|
||||
|
||||
def with_criteria(self, fn, *args):
|
||||
"""Add a criteria function to a :class:`.BakedQuery` cloned from this one.
|
||||
|
||||
This is equivalent to using the ``+`` operator to
|
||||
produce a new :class:`.BakedQuery` with modifications.
|
||||
|
||||
"""
|
||||
return self._clone().add_criteria(fn, *args)
|
||||
|
||||
def for_session(self, session):
|
||||
"""Return a :class:`.Result` object for this :class:`.BakedQuery`.
|
||||
|
||||
This is equivalent to calling the :class:`.BakedQuery` as a
|
||||
Python callable, e.g. ``result = my_baked_query(session)``.
|
||||
|
||||
"""
|
||||
return Result(self, session)
|
||||
|
||||
def __call__(self, session):
|
||||
return self.for_session(session)
|
||||
|
||||
def spoil(self, full=False):
|
||||
"""Cancel any query caching that will occur on this BakedQuery object.
|
||||
|
||||
The BakedQuery can continue to be used normally, however additional
|
||||
creational functions will not be cached; they will be called
|
||||
on every invocation.
|
||||
|
||||
This is to support the case where a particular step in constructing
|
||||
a baked query disqualifies the query from being cacheable, such
|
||||
as a variant that relies upon some uncacheable value.
|
||||
|
||||
:param full: if False, only functions added to this
|
||||
:class:`.BakedQuery` object subsequent to the spoil step will be
|
||||
non-cached; the state of the :class:`.BakedQuery` up until
|
||||
this point will be pulled from the cache. If True, then the
|
||||
entire :class:`.Query` object is built from scratch each
|
||||
time, with all creational functions being called on each
|
||||
invocation.
|
||||
|
||||
"""
|
||||
if not full:
|
||||
_spoil_point = self._clone()
|
||||
_spoil_point._cache_key += ('_query_only', )
|
||||
self.steps = [_spoil_point._retrieve_baked_query]
|
||||
self._spoiled = True
|
||||
return self
|
||||
|
||||
def _retrieve_baked_query(self, session):
|
||||
query = self._bakery.get(self._cache_key, None)
|
||||
if query is None:
|
||||
query = self._as_query(session)
|
||||
self._bakery[self._cache_key] = query.with_session(None)
|
||||
return query.with_session(session)
|
||||
|
||||
def _bake(self, session):
|
||||
query = self._as_query(session)
|
||||
|
||||
context = query._compile_context()
|
||||
self._bake_subquery_loaders(session, context)
|
||||
context.session = None
|
||||
context.query = query = context.query.with_session(None)
|
||||
query._execution_options = query._execution_options.union(
|
||||
{"compiled_cache": self._bakery}
|
||||
)
|
||||
# we'll be holding onto the query for some of its state,
|
||||
# so delete some compilation-use-only attributes that can take up
|
||||
# space
|
||||
for attr in (
|
||||
'_correlate', '_from_obj', '_mapper_adapter_map',
|
||||
'_joinpath', '_joinpoint'):
|
||||
query.__dict__.pop(attr, None)
|
||||
self._bakery[self._cache_key] = context
|
||||
return context
|
||||
|
||||
def _as_query(self, session):
|
||||
query = self.steps[0](session)
|
||||
|
||||
for step in self.steps[1:]:
|
||||
query = step(query)
|
||||
return query
|
||||
|
||||
def _bake_subquery_loaders(self, session, context):
|
||||
"""convert subquery eager loaders in the cache into baked queries.
|
||||
|
||||
For subquery eager loading to work, all we need here is that the
|
||||
Query point to the correct session when it is run. However, since
|
||||
we are "baking" anyway, we may as well also turn the query into
|
||||
a "baked" query so that we save on performance too.
|
||||
|
||||
"""
|
||||
context.attributes['baked_queries'] = baked_queries = []
|
||||
for k, v in list(context.attributes.items()):
|
||||
if isinstance(v, Query):
|
||||
if 'subquery' in k:
|
||||
bk = BakedQuery(self._bakery, lambda *args: v)
|
||||
bk._cache_key = self._cache_key + k
|
||||
bk._bake(session)
|
||||
baked_queries.append((k, bk._cache_key, v))
|
||||
del context.attributes[k]
|
||||
|
||||
def _unbake_subquery_loaders(self, session, context, params):
|
||||
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
|
||||
and turn them back into Result objects that will iterate just
|
||||
like a Query object.
|
||||
|
||||
"""
|
||||
for k, cache_key, query in context.attributes["baked_queries"]:
|
||||
bk = BakedQuery(self._bakery,
|
||||
lambda sess, q=query: q.with_session(sess))
|
||||
bk._cache_key = cache_key
|
||||
context.attributes[k] = bk.for_session(session).params(**params)
|
||||
|
||||
|
||||
class Result(object):
|
||||
"""Invokes a :class:`.BakedQuery` against a :class:`.Session`.
|
||||
|
||||
The :class:`.Result` object is where the actual :class:`.query.Query`
|
||||
object gets created, or retrieved from the cache,
|
||||
against a target :class:`.Session`, and is then invoked for results.
|
||||
|
||||
"""
|
||||
__slots__ = 'bq', 'session', '_params'
|
||||
|
||||
def __init__(self, bq, session):
|
||||
self.bq = bq
|
||||
self.session = session
|
||||
self._params = {}
|
||||
|
||||
def params(self, *args, **kw):
|
||||
"""Specify parameters to be replaced into the string SQL statement."""
|
||||
|
||||
if len(args) == 1:
|
||||
kw.update(args[0])
|
||||
elif len(args) > 0:
|
||||
raise sa_exc.ArgumentError(
|
||||
"params() takes zero or one positional argument, "
|
||||
"which is a dictionary.")
|
||||
self._params.update(kw)
|
||||
return self
|
||||
|
||||
def _as_query(self):
|
||||
return self.bq._as_query(self.session).params(self._params)
|
||||
|
||||
def __str__(self):
|
||||
return str(self._as_query())
|
||||
|
||||
def __iter__(self):
|
||||
bq = self.bq
|
||||
if bq._spoiled:
|
||||
return iter(self._as_query())
|
||||
|
||||
baked_context = bq._bakery.get(bq._cache_key, None)
|
||||
if baked_context is None:
|
||||
baked_context = bq._bake(self.session)
|
||||
|
||||
context = copy.copy(baked_context)
|
||||
context.session = self.session
|
||||
context.attributes = context.attributes.copy()
|
||||
|
||||
bq._unbake_subquery_loaders(self.session, context, self._params)
|
||||
|
||||
context.statement.use_labels = True
|
||||
if context.autoflush and not context.populate_existing:
|
||||
self.session._autoflush()
|
||||
return context.query.params(self._params).\
|
||||
with_session(self.session)._execute_and_instances(context)
|
||||
|
||||
def count(self):
|
||||
"""return the 'count'.
|
||||
|
||||
Equivalent to :meth:`.Query.count`.
|
||||
|
||||
Note this uses a subquery to ensure an accurate count regardless
|
||||
of the structure of the original statement.
|
||||
|
||||
.. versionadded:: 1.1.6
|
||||
|
||||
"""
|
||||
|
||||
col = func.count(literal_column('*'))
|
||||
bq = self.bq.with_criteria(lambda q: q.from_self(col))
|
||||
return bq.for_session(self.session).params(self._params).scalar()
|
||||
|
||||
def scalar(self):
|
||||
"""Return the first element of the first result or None
|
||||
if no rows present. If multiple rows are returned,
|
||||
raises MultipleResultsFound.
|
||||
|
||||
Equivalent to :meth:`.Query.scalar`.
|
||||
|
||||
.. versionadded:: 1.1.6
|
||||
|
||||
"""
|
||||
try:
|
||||
ret = self.one()
|
||||
if not isinstance(ret, tuple):
|
||||
return ret
|
||||
return ret[0]
|
||||
except orm_exc.NoResultFound:
|
||||
return None
|
||||
|
||||
def first(self):
|
||||
"""Return the first row.
|
||||
|
||||
Equivalent to :meth:`.Query.first`.
|
||||
|
||||
"""
|
||||
bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
|
||||
ret = list(bq.for_session(self.session).params(self._params))
|
||||
if len(ret) > 0:
|
||||
return ret[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def one(self):
|
||||
"""Return exactly one result or raise an exception.
|
||||
|
||||
Equivalent to :meth:`.Query.one`.
|
||||
|
||||
"""
|
||||
try:
|
||||
ret = self.one_or_none()
|
||||
except orm_exc.MultipleResultsFound:
|
||||
raise orm_exc.MultipleResultsFound(
|
||||
"Multiple rows were found for one()")
|
||||
else:
|
||||
if ret is None:
|
||||
raise orm_exc.NoResultFound("No row was found for one()")
|
||||
return ret
|
||||
|
||||
def one_or_none(self):
|
||||
"""Return one or zero results, or raise an exception for multiple
|
||||
rows.
|
||||
|
||||
Equivalent to :meth:`.Query.one_or_none`.
|
||||
|
||||
.. versionadded:: 1.0.9
|
||||
|
||||
"""
|
||||
ret = list(self)
|
||||
|
||||
l = len(ret)
|
||||
if l == 1:
|
||||
return ret[0]
|
||||
elif l == 0:
|
||||
return None
|
||||
else:
|
||||
raise orm_exc.MultipleResultsFound(
|
||||
"Multiple rows were found for one_or_none()")
|
||||
|
||||
def all(self):
|
||||
"""Return all rows.
|
||||
|
||||
Equivalent to :meth:`.Query.all`.
|
||||
|
||||
"""
|
||||
return list(self)
|
||||
|
||||
def get(self, ident):
|
||||
"""Retrieve an object based on identity.
|
||||
|
||||
Equivalent to :meth:`.Query.get`.
|
||||
|
||||
"""
|
||||
|
||||
query = self.bq.steps[0](self.session)
|
||||
return query._get_impl(ident, self._load_on_ident)
|
||||
|
||||
def _load_on_ident(self, query, key):
|
||||
"""Load the given identity key from the database."""
|
||||
|
||||
ident = key[1]
|
||||
|
||||
mapper = query._mapper_zero()
|
||||
|
||||
_get_clause, _get_params = mapper._get_clause
|
||||
|
||||
def setup(query):
|
||||
_lcl_get_clause = _get_clause
|
||||
q = query._clone()
|
||||
q._get_condition()
|
||||
q._order_by = None
|
||||
|
||||
# None present in ident - turn those comparisons
|
||||
# into "IS NULL"
|
||||
if None in ident:
|
||||
nones = set([
|
||||
_get_params[col].key for col, value in
|
||||
zip(mapper.primary_key, ident) if value is None
|
||||
])
|
||||
_lcl_get_clause = sql_util.adapt_criterion_to_null(
|
||||
_lcl_get_clause, nones)
|
||||
|
||||
_lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
|
||||
q._criterion = _lcl_get_clause
|
||||
return q
|
||||
|
||||
# cache the query against a key that includes
|
||||
# which positions in the primary key are NULL
|
||||
# (remember, we can map to an OUTER JOIN)
|
||||
bq = self.bq
|
||||
|
||||
# add the clause we got from mapper._get_clause to the cache
|
||||
# key so that if a race causes multiple calls to _get_clause,
|
||||
# we've cached on ours
|
||||
bq = bq._clone()
|
||||
bq._cache_key += (_get_clause, )
|
||||
|
||||
bq = bq.with_criteria(setup, tuple(elem is None for elem in ident))
|
||||
|
||||
params = dict([
|
||||
(_get_params[primary_key].key, id_val)
|
||||
for id_val, primary_key in zip(ident, mapper.primary_key)
|
||||
])
|
||||
|
||||
result = list(bq.for_session(self.session).params(**params))
|
||||
l = len(result)
|
||||
if l > 1:
|
||||
raise orm_exc.MultipleResultsFound()
|
||||
elif l:
|
||||
return result[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def bake_lazy_loaders():
|
||||
"""Enable the use of baked queries for all lazyloaders systemwide.
|
||||
|
||||
This operation should be safe for all lazy loaders, and will reduce
|
||||
Python overhead for these operations.
|
||||
|
||||
"""
|
||||
BakedLazyLoader._strategy_keys[:] = []
|
||||
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="select")(BakedLazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy=True)(BakedLazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="baked_select")(BakedLazyLoader)
|
||||
|
||||
strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:]
|
||||
|
||||
|
||||
def unbake_lazy_loaders():
|
||||
"""Disable the use of baked queries for all lazyloaders systemwide.
|
||||
|
||||
This operation reverts the changes produced by :func:`.bake_lazy_loaders`.
|
||||
|
||||
"""
|
||||
strategies.LazyLoader._strategy_keys[:] = []
|
||||
BakedLazyLoader._strategy_keys[:] = []
|
||||
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="select")(strategies.LazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy=True)(strategies.LazyLoader)
|
||||
properties.RelationshipProperty.strategy_for(
|
||||
lazy="baked_select")(BakedLazyLoader)
|
||||
assert strategies.LazyLoader._strategy_keys
|
||||
|
||||
|
||||
@sqla_log.class_logger
|
||||
@properties.RelationshipProperty.strategy_for(lazy="baked_select")
|
||||
class BakedLazyLoader(strategies.LazyLoader):
|
||||
|
||||
def _emit_lazyload(self, session, state, ident_key, passive):
|
||||
q = BakedQuery(
|
||||
self.mapper._compiled_cache,
|
||||
lambda session: session.query(self.mapper))
|
||||
q.add_criteria(
|
||||
lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
|
||||
self.parent_property)
|
||||
|
||||
if not self.parent_property.bake_queries:
|
||||
q.spoil(full=True)
|
||||
|
||||
if self.parent_property.secondary is not None:
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.select_from(self.mapper, self.parent_property.secondary))
|
||||
|
||||
pending = not state.key
|
||||
|
||||
# don't autoflush on pending
|
||||
if pending or passive & attributes.NO_AUTOFLUSH:
|
||||
q.add_criteria(lambda q: q.autoflush(False))
|
||||
|
||||
if state.load_options:
|
||||
q.spoil()
|
||||
args = state.load_path[self.parent_property]
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q._with_current_path(args), args)
|
||||
q.add_criteria(
|
||||
lambda q: q._conditional_options(*state.load_options))
|
||||
|
||||
if self.use_get:
|
||||
return q(session)._load_on_ident(
|
||||
session.query(self.mapper), ident_key)
|
||||
|
||||
if self.parent_property.order_by:
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.order_by(*util.to_list(self.parent_property.order_by)))
|
||||
|
||||
for rev in self.parent_property._reverse_property:
|
||||
# reverse props that are MANYTOONE are loading *this*
|
||||
# object from get(), so don't need to eager out to those.
|
||||
if rev.direction is interfaces.MANYTOONE and \
|
||||
rev._use_get and \
|
||||
not isinstance(rev.strategy, strategies.LazyLoader):
|
||||
|
||||
q.add_criteria(
|
||||
lambda q:
|
||||
q.options(
|
||||
strategy_options.Load.for_existing_path(
|
||||
q._current_path[rev.parent]
|
||||
).baked_lazyload(rev.key)
|
||||
)
|
||||
)
|
||||
|
||||
lazy_clause, params = self._generate_lazy_clause(state, passive)
|
||||
|
||||
if pending:
|
||||
if orm_util._none_set.intersection(params.values()):
|
||||
return None
|
||||
|
||||
q.add_criteria(lambda q: q.filter(lazy_clause))
|
||||
result = q(session).params(**params).all()
|
||||
if self.uselist:
|
||||
return result
|
||||
else:
|
||||
l = len(result)
|
||||
if l:
|
||||
if l > 1:
|
||||
util.warn(
|
||||
"Multiple rows returned with "
|
||||
"uselist=False for lazily-loaded attribute '%s' "
|
||||
% self.parent_property)
|
||||
|
||||
return result[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@strategy_options.loader_option()
|
||||
def baked_lazyload(loadopt, attr):
|
||||
"""Indicate that the given attribute should be loaded using "lazy"
|
||||
loading with a "baked" query used in the load.
|
||||
|
||||
"""
|
||||
return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
|
||||
|
||||
|
||||
@baked_lazyload._add_unbound_fn
|
||||
def baked_lazyload(*keys):
|
||||
return strategy_options._UnboundLoad._from_keys(
|
||||
strategy_options._UnboundLoad.baked_lazyload, keys, False, {})
|
||||
|
||||
|
||||
@baked_lazyload._add_unbound_all_fn
|
||||
def baked_lazyload_all(*keys):
|
||||
return strategy_options._UnboundLoad._from_keys(
|
||||
strategy_options._UnboundLoad.baked_lazyload, keys, True, {})
|
||||
|
||||
baked_lazyload = baked_lazyload._unbound_fn
|
||||
baked_lazyload_all = baked_lazyload_all._unbound_all_fn
|
||||
|
||||
bakery = BakedQuery.bakery
|
@ -1,474 +0,0 @@
|
||||
# ext/compiler.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
r"""Provides an API for creation of custom ClauseElements and compilers.
|
||||
|
||||
Synopsis
|
||||
========
|
||||
|
||||
Usage involves the creation of one or more
|
||||
:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or
|
||||
more callables defining its compilation::
|
||||
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.sql.expression import ColumnClause
|
||||
|
||||
class MyColumn(ColumnClause):
|
||||
pass
|
||||
|
||||
@compiles(MyColumn)
|
||||
def compile_mycolumn(element, compiler, **kw):
|
||||
return "[%s]" % element.name
|
||||
|
||||
Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`,
|
||||
the base expression element for named column objects. The ``compiles``
|
||||
decorator registers itself with the ``MyColumn`` class so that it is invoked
|
||||
when the object is compiled to a string::
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
s = select([MyColumn('x'), MyColumn('y')])
|
||||
print str(s)
|
||||
|
||||
Produces::
|
||||
|
||||
SELECT [x], [y]
|
||||
|
||||
Dialect-specific compilation rules
|
||||
==================================
|
||||
|
||||
Compilers can also be made dialect-specific. The appropriate compiler will be
|
||||
invoked for the dialect in use::
|
||||
|
||||
from sqlalchemy.schema import DDLElement
|
||||
|
||||
class AlterColumn(DDLElement):
|
||||
|
||||
def __init__(self, column, cmd):
|
||||
self.column = column
|
||||
self.cmd = cmd
|
||||
|
||||
@compiles(AlterColumn)
|
||||
def visit_alter_column(element, compiler, **kw):
|
||||
return "ALTER COLUMN %s ..." % element.column.name
|
||||
|
||||
@compiles(AlterColumn, 'postgresql')
|
||||
def visit_alter_column(element, compiler, **kw):
|
||||
return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name,
|
||||
element.column.name)
|
||||
|
||||
The second ``visit_alter_table`` will be invoked when any ``postgresql``
|
||||
dialect is used.
|
||||
|
||||
Compiling sub-elements of a custom expression construct
|
||||
=======================================================
|
||||
|
||||
The ``compiler`` argument is the
|
||||
:class:`~sqlalchemy.engine.interfaces.Compiled` object in use. This object
|
||||
can be inspected for any information about the in-progress compilation,
|
||||
including ``compiler.dialect``, ``compiler.statement`` etc. The
|
||||
:class:`~sqlalchemy.sql.compiler.SQLCompiler` and
|
||||
:class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()``
|
||||
method which can be used for compilation of embedded attributes::
|
||||
|
||||
from sqlalchemy.sql.expression import Executable, ClauseElement
|
||||
|
||||
class InsertFromSelect(Executable, ClauseElement):
|
||||
def __init__(self, table, select):
|
||||
self.table = table
|
||||
self.select = select
|
||||
|
||||
@compiles(InsertFromSelect)
|
||||
def visit_insert_from_select(element, compiler, **kw):
|
||||
return "INSERT INTO %s (%s)" % (
|
||||
compiler.process(element.table, asfrom=True),
|
||||
compiler.process(element.select)
|
||||
)
|
||||
|
||||
insert = InsertFromSelect(t1, select([t1]).where(t1.c.x>5))
|
||||
print insert
|
||||
|
||||
Produces::
|
||||
|
||||
"INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z
|
||||
FROM mytable WHERE mytable.x > :x_1)"
|
||||
|
||||
.. note::
|
||||
|
||||
The above ``InsertFromSelect`` construct is only an example, this actual
|
||||
functionality is already available using the
|
||||
:meth:`.Insert.from_select` method.
|
||||
|
||||
.. note::
|
||||
|
||||
The above ``InsertFromSelect`` construct probably wants to have "autocommit"
|
||||
enabled. See :ref:`enabling_compiled_autocommit` for this step.
|
||||
|
||||
Cross Compiling between SQL and DDL compilers
|
||||
---------------------------------------------
|
||||
|
||||
SQL and DDL constructs are each compiled using different base compilers -
|
||||
``SQLCompiler`` and ``DDLCompiler``. A common need is to access the
|
||||
compilation rules of SQL expressions from within a DDL expression. The
|
||||
``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as
|
||||
below where we generate a CHECK constraint that embeds a SQL expression::
|
||||
|
||||
@compiles(MyConstraint)
|
||||
def compile_my_constraint(constraint, ddlcompiler, **kw):
|
||||
return "CONSTRAINT %s CHECK (%s)" % (
|
||||
constraint.name,
|
||||
ddlcompiler.sql_compiler.process(
|
||||
constraint.expression, literal_binds=True)
|
||||
)
|
||||
|
||||
Above, we add an additional flag to the process step as called by
|
||||
:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This
|
||||
indicates that any SQL expression which refers to a :class:`.BindParameter`
|
||||
object or other "literal" object such as those which refer to strings or
|
||||
integers should be rendered **in-place**, rather than being referred to as
|
||||
a bound parameter; when emitting DDL, bound parameters are typically not
|
||||
supported.
|
||||
|
||||
|
||||
.. _enabling_compiled_autocommit:
|
||||
|
||||
Enabling Autocommit on a Construct
|
||||
==================================
|
||||
|
||||
Recall from the section :ref:`autocommit` that the :class:`.Engine`, when
|
||||
asked to execute a construct in the absence of a user-defined transaction,
|
||||
detects if the given construct represents DML or DDL, that is, a data
|
||||
modification or data definition statement, which requires (or may require,
|
||||
in the case of DDL) that the transaction generated by the DBAPI be committed
|
||||
(recall that DBAPI always has a transaction going on regardless of what
|
||||
SQLAlchemy does). Checking for this is actually accomplished by checking for
|
||||
the "autocommit" execution option on the construct. When building a
|
||||
construct like an INSERT derivation, a new DDL type, or perhaps a stored
|
||||
procedure that alters data, the "autocommit" option needs to be set in order
|
||||
for the statement to function with "connectionless" execution
|
||||
(as described in :ref:`dbengine_implicit`).
|
||||
|
||||
Currently a quick way to do this is to subclass :class:`.Executable`, then
|
||||
add the "autocommit" flag to the ``_execution_options`` dictionary (note this
|
||||
is a "frozen" dictionary which supplies a generative ``union()`` method)::
|
||||
|
||||
from sqlalchemy.sql.expression import Executable, ClauseElement
|
||||
|
||||
class MyInsertThing(Executable, ClauseElement):
|
||||
_execution_options = \
|
||||
Executable._execution_options.union({'autocommit': True})
|
||||
|
||||
More succinctly, if the construct is truly similar to an INSERT, UPDATE, or
|
||||
DELETE, :class:`.UpdateBase` can be used, which already is a subclass
|
||||
of :class:`.Executable`, :class:`.ClauseElement` and includes the
|
||||
``autocommit`` flag::
|
||||
|
||||
from sqlalchemy.sql.expression import UpdateBase
|
||||
|
||||
class MyInsertThing(UpdateBase):
|
||||
def __init__(self, ...):
|
||||
...
|
||||
|
||||
|
||||
|
||||
|
||||
DDL elements that subclass :class:`.DDLElement` already have the
|
||||
"autocommit" flag turned on.
|
||||
|
||||
|
||||
|
||||
|
||||
Changing the default compilation of existing constructs
|
||||
=======================================================
|
||||
|
||||
The compiler extension applies just as well to the existing constructs. When
|
||||
overriding the compilation of a built in SQL construct, the @compiles
|
||||
decorator is invoked upon the appropriate class (be sure to use the class,
|
||||
i.e. ``Insert`` or ``Select``, instead of the creation function such
|
||||
as ``insert()`` or ``select()``).
|
||||
|
||||
Within the new compilation function, to get at the "original" compilation
|
||||
routine, use the appropriate visit_XXX method - this
|
||||
because compiler.process() will call upon the overriding routine and cause
|
||||
an endless loop. Such as, to add "prefix" to all insert statements::
|
||||
|
||||
from sqlalchemy.sql.expression import Insert
|
||||
|
||||
@compiles(Insert)
|
||||
def prefix_inserts(insert, compiler, **kw):
|
||||
return compiler.visit_insert(insert.prefix_with("some prefix"), **kw)
|
||||
|
||||
The above compiler will prefix all INSERT statements with "some prefix" when
|
||||
compiled.
|
||||
|
||||
.. _type_compilation_extension:
|
||||
|
||||
Changing Compilation of Types
|
||||
=============================
|
||||
|
||||
``compiler`` works for types, too, such as below where we implement the
|
||||
MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
|
||||
|
||||
@compiles(String, 'mssql')
|
||||
@compiles(VARCHAR, 'mssql')
|
||||
def compile_varchar(element, compiler, **kw):
|
||||
if element.length == 'max':
|
||||
return "VARCHAR('max')"
|
||||
else:
|
||||
return compiler.visit_VARCHAR(element, **kw)
|
||||
|
||||
foo = Table('foo', metadata,
|
||||
Column('data', VARCHAR('max'))
|
||||
)
|
||||
|
||||
Subclassing Guidelines
|
||||
======================
|
||||
|
||||
A big part of using the compiler extension is subclassing SQLAlchemy
|
||||
expression constructs. To make this easier, the expression and
|
||||
schema packages feature a set of "bases" intended for common tasks.
|
||||
A synopsis is as follows:
|
||||
|
||||
* :class:`~sqlalchemy.sql.expression.ClauseElement` - This is the root
|
||||
expression class. Any SQL expression can be derived from this base, and is
|
||||
probably the best choice for longer constructs such as specialized INSERT
|
||||
statements.
|
||||
|
||||
* :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all
|
||||
"column-like" elements. Anything that you'd place in the "columns" clause of
|
||||
a SELECT statement (as well as order by and group by) can derive from this -
|
||||
the object will automatically have Python "comparison" behavior.
|
||||
|
||||
:class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a
|
||||
``type`` member which is expression's return type. This can be established
|
||||
at the instance level in the constructor, or at the class level if its
|
||||
generally constant::
|
||||
|
||||
class timestamp(ColumnElement):
|
||||
type = TIMESTAMP()
|
||||
|
||||
* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a
|
||||
``ColumnElement`` and a "from clause" like object, and represents a SQL
|
||||
function or stored procedure type of call. Since most databases support
|
||||
statements along the line of "SELECT FROM <some function>"
|
||||
``FunctionElement`` adds in the ability to be used in the FROM clause of a
|
||||
``select()`` construct::
|
||||
|
||||
from sqlalchemy.sql.expression import FunctionElement
|
||||
|
||||
class coalesce(FunctionElement):
|
||||
name = 'coalesce'
|
||||
|
||||
@compiles(coalesce)
|
||||
def compile(element, compiler, **kw):
|
||||
return "coalesce(%s)" % compiler.process(element.clauses)
|
||||
|
||||
@compiles(coalesce, 'oracle')
|
||||
def compile(element, compiler, **kw):
|
||||
if len(element.clauses) > 2:
|
||||
raise TypeError("coalesce only supports two arguments on Oracle")
|
||||
return "nvl(%s)" % compiler.process(element.clauses)
|
||||
|
||||
* :class:`~sqlalchemy.schema.DDLElement` - The root of all DDL expressions,
|
||||
like CREATE TABLE, ALTER TABLE, etc. Compilation of ``DDLElement``
|
||||
subclasses is issued by a ``DDLCompiler`` instead of a ``SQLCompiler``.
|
||||
``DDLElement`` also features ``Table`` and ``MetaData`` event hooks via the
|
||||
``execute_at()`` method, allowing the construct to be invoked during CREATE
|
||||
TABLE and DROP TABLE sequences.
|
||||
|
||||
* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which
|
||||
should be used with any expression class that represents a "standalone"
|
||||
SQL statement that can be passed directly to an ``execute()`` method. It
|
||||
is already implicit within ``DDLElement`` and ``FunctionElement``.
|
||||
|
||||
Further Examples
|
||||
================
|
||||
|
||||
"UTC timestamp" function
|
||||
-------------------------
|
||||
|
||||
A function that works like "CURRENT_TIMESTAMP" except applies the
|
||||
appropriate conversions so that the time is in UTC time. Timestamps are best
|
||||
stored in relational databases as UTC, without time zones. UTC so that your
|
||||
database doesn't think time has gone backwards in the hour when daylight
|
||||
savings ends, without timezones because timezones are like character
|
||||
encodings - they're best applied only at the endpoints of an application
|
||||
(i.e. convert to UTC upon user input, re-apply desired timezone upon display).
|
||||
|
||||
For PostgreSQL and Microsoft SQL Server::
|
||||
|
||||
from sqlalchemy.sql import expression
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.types import DateTime
|
||||
|
||||
class utcnow(expression.FunctionElement):
|
||||
type = DateTime()
|
||||
|
||||
@compiles(utcnow, 'postgresql')
|
||||
def pg_utcnow(element, compiler, **kw):
|
||||
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
|
||||
|
||||
@compiles(utcnow, 'mssql')
|
||||
def ms_utcnow(element, compiler, **kw):
|
||||
return "GETUTCDATE()"
|
||||
|
||||
Example usage::
|
||||
|
||||
from sqlalchemy import (
|
||||
Table, Column, Integer, String, DateTime, MetaData
|
||||
)
|
||||
metadata = MetaData()
|
||||
event = Table("event", metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("description", String(50), nullable=False),
|
||||
Column("timestamp", DateTime, server_default=utcnow())
|
||||
)
|
||||
|
||||
"GREATEST" function
|
||||
-------------------
|
||||
|
||||
The "GREATEST" function is given any number of arguments and returns the one
|
||||
that is of the highest value - its equivalent to Python's ``max``
|
||||
function. A SQL standard version versus a CASE based version which only
|
||||
accommodates two arguments::
|
||||
|
||||
from sqlalchemy.sql import expression
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.types import Numeric
|
||||
|
||||
class greatest(expression.FunctionElement):
|
||||
type = Numeric()
|
||||
name = 'greatest'
|
||||
|
||||
@compiles(greatest)
|
||||
def default_greatest(element, compiler, **kw):
|
||||
return compiler.visit_function(element)
|
||||
|
||||
@compiles(greatest, 'sqlite')
|
||||
@compiles(greatest, 'mssql')
|
||||
@compiles(greatest, 'oracle')
|
||||
def case_greatest(element, compiler, **kw):
|
||||
arg1, arg2 = list(element.clauses)
|
||||
return "CASE WHEN %s > %s THEN %s ELSE %s END" % (
|
||||
compiler.process(arg1),
|
||||
compiler.process(arg2),
|
||||
compiler.process(arg1),
|
||||
compiler.process(arg2),
|
||||
)
|
||||
|
||||
Example usage::
|
||||
|
||||
Session.query(Account).\
|
||||
filter(
|
||||
greatest(
|
||||
Account.checking_balance,
|
||||
Account.savings_balance) > 10000
|
||||
)
|
||||
|
||||
"false" expression
|
||||
------------------
|
||||
|
||||
Render a "false" constant expression, rendering as "0" on platforms that
|
||||
don't have a "false" constant::
|
||||
|
||||
from sqlalchemy.sql import expression
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
|
||||
class sql_false(expression.ColumnElement):
|
||||
pass
|
||||
|
||||
@compiles(sql_false)
|
||||
def default_false(element, compiler, **kw):
|
||||
return "false"
|
||||
|
||||
@compiles(sql_false, 'mssql')
|
||||
@compiles(sql_false, 'mysql')
|
||||
@compiles(sql_false, 'oracle')
|
||||
def int_false(element, compiler, **kw):
|
||||
return "0"
|
||||
|
||||
Example usage::
|
||||
|
||||
from sqlalchemy import select, union_all
|
||||
|
||||
exp = union_all(
|
||||
select([users.c.name, sql_false().label("enrolled")]),
|
||||
select([customers.c.name, customers.c.enrolled])
|
||||
)
|
||||
|
||||
"""
|
||||
from .. import exc
|
||||
from ..sql import visitors
|
||||
|
||||
|
||||
def compiles(class_, *specs):
|
||||
"""Register a function as a compiler for a
|
||||
given :class:`.ClauseElement` type."""
|
||||
|
||||
def decorate(fn):
|
||||
# get an existing @compiles handler
|
||||
existing = class_.__dict__.get('_compiler_dispatcher', None)
|
||||
|
||||
# get the original handler. All ClauseElement classes have one
|
||||
# of these, but some TypeEngine classes will not.
|
||||
existing_dispatch = getattr(class_, '_compiler_dispatch', None)
|
||||
|
||||
if not existing:
|
||||
existing = _dispatcher()
|
||||
|
||||
if existing_dispatch:
|
||||
def _wrap_existing_dispatch(element, compiler, **kw):
|
||||
try:
|
||||
return existing_dispatch(element, compiler, **kw)
|
||||
except exc.UnsupportedCompilationError:
|
||||
raise exc.CompileError(
|
||||
"%s construct has no default "
|
||||
"compilation handler." % type(element))
|
||||
existing.specs['default'] = _wrap_existing_dispatch
|
||||
|
||||
# TODO: why is the lambda needed ?
|
||||
setattr(class_, '_compiler_dispatch',
|
||||
lambda *arg, **kw: existing(*arg, **kw))
|
||||
setattr(class_, '_compiler_dispatcher', existing)
|
||||
|
||||
if specs:
|
||||
for s in specs:
|
||||
existing.specs[s] = fn
|
||||
|
||||
else:
|
||||
existing.specs['default'] = fn
|
||||
return fn
|
||||
return decorate
|
||||
|
||||
|
||||
def deregister(class_):
|
||||
"""Remove all custom compilers associated with a given
|
||||
:class:`.ClauseElement` type."""
|
||||
|
||||
if hasattr(class_, '_compiler_dispatcher'):
|
||||
# regenerate default _compiler_dispatch
|
||||
visitors._generate_dispatch(class_)
|
||||
# remove custom directive
|
||||
del class_._compiler_dispatcher
|
||||
|
||||
|
||||
class _dispatcher(object):
|
||||
def __init__(self):
|
||||
self.specs = {}
|
||||
|
||||
def __call__(self, element, compiler, **kw):
|
||||
# TODO: yes, this could also switch off of DBAPI in use.
|
||||
fn = self.specs.get(compiler.dialect.name, None)
|
||||
if not fn:
|
||||
try:
|
||||
fn = self.specs['default']
|
||||
except KeyError:
|
||||
raise exc.CompileError(
|
||||
"%s construct has no default "
|
||||
"compilation handler." % type(element))
|
||||
|
||||
return fn(element, compiler, **kw)
|
@ -1,940 +0,0 @@
|
||||
"""
|
||||
Synopsis
|
||||
========
|
||||
|
||||
SQLAlchemy object-relational configuration involves the use of
|
||||
:class:`~sqlalchemy.schema.Table`, :func:`~sqlalchemy.orm.mapper`, and
|
||||
class objects to define the three areas of configuration.
|
||||
:mod:`~sqlalchemy.ext.declarative` allows all three types of
|
||||
configuration to be expressed declaratively on an individual
|
||||
mapped class. Regular SQLAlchemy schema elements and ORM constructs
|
||||
are used in most cases.
|
||||
|
||||
As a simple example::
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class SomeClass(Base):
|
||||
__tablename__ = 'some_table'
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
|
||||
Above, the :func:`declarative_base` callable returns a new base class from which
|
||||
all mapped classes should inherit. When the class definition is completed, a
|
||||
new :class:`~sqlalchemy.schema.Table` and
|
||||
:class:`~sqlalchemy.orm.mapper` will have been generated, accessible
|
||||
via the ``__table__`` and ``__mapper__`` attributes on the ``SomeClass`` class.
|
||||
|
||||
Defining Attributes
|
||||
===================
|
||||
|
||||
In the above example, the :class:`~sqlalchemy.schema.Column` objects are
|
||||
automatically named with the name of the attribute to which they are
|
||||
assigned.
|
||||
|
||||
They can also be explicitly named, and that name does not have to be
|
||||
the same as name assigned on the class.
|
||||
The column will be assigned to the :class:`~sqlalchemy.schema.Table` using the
|
||||
given name, and mapped to the class using the attribute name::
|
||||
|
||||
class SomeClass(Base):
|
||||
__tablename__ = 'some_table'
|
||||
id = Column("some_table_id", Integer, primary_key=True)
|
||||
name = Column("name", String(50))
|
||||
|
||||
Attributes may be added to the class after its construction, and they will be
|
||||
added to the underlying :class:`~sqlalchemy.schema.Table` and
|
||||
:func:`~sqlalchemy.orm.mapper()` definitions as appropriate::
|
||||
|
||||
SomeClass.data = Column('data', Unicode)
|
||||
SomeClass.related = relationship(RelatedInfo)
|
||||
|
||||
Classes which are mapped explicitly using
|
||||
:func:`~sqlalchemy.orm.mapper()` can interact freely with declarative
|
||||
classes.
|
||||
|
||||
It is recommended, though not required, that all tables
|
||||
share the same underlying :class:`~sqlalchemy.schema.MetaData` object,
|
||||
so that string-configured :class:`~sqlalchemy.schema.ForeignKey`
|
||||
references can be resolved without issue.
|
||||
|
||||
Association of Metadata and Engine
|
||||
==================================
|
||||
|
||||
The :func:`declarative_base` base class contains a
|
||||
:class:`~sqlalchemy.schema.MetaData` object where newly
|
||||
defined :class:`~sqlalchemy.schema.Table` objects are collected. This
|
||||
is accessed via the :class:`~sqlalchemy.schema.MetaData` class level
|
||||
accessor, so to create tables we can say::
|
||||
|
||||
engine = create_engine('sqlite://')
|
||||
Base.metadata.create_all(engine)
|
||||
|
||||
The :class:`~sqlalchemy.engine.base.Engine` created above may also be
|
||||
directly associated with the declarative base class using the ``bind``
|
||||
keyword argument, where it will be associated with the underlying
|
||||
:class:`~sqlalchemy.schema.MetaData` object and allow SQL operations
|
||||
involving that metadata and its tables to make use of that engine
|
||||
automatically::
|
||||
|
||||
Base = declarative_base(bind=create_engine('sqlite://'))
|
||||
|
||||
Alternatively, by way of the normal
|
||||
:class:`~sqlalchemy.schema.MetaData` behaviour, the ``bind`` attribute
|
||||
of the class level accessor can be assigned at any time as follows::
|
||||
|
||||
Base.metadata.bind = create_engine('sqlite://')
|
||||
|
||||
The :func:`declarative_base` can also receive a pre-created
|
||||
:class:`~sqlalchemy.schema.MetaData` object, which allows a
|
||||
declarative setup to be associated with an already
|
||||
existing traditional collection of :class:`~sqlalchemy.schema.Table`
|
||||
objects::
|
||||
|
||||
mymetadata = MetaData()
|
||||
Base = declarative_base(metadata=mymetadata)
|
||||
|
||||
Configuring Relationships
|
||||
=========================
|
||||
|
||||
Relationships to other classes are done in the usual way, with the added
|
||||
feature that the class specified to :func:`~sqlalchemy.orm.relationship`
|
||||
may be a string name (note that :func:`~sqlalchemy.orm.relationship` is
|
||||
only available as of SQLAlchemy 0.6beta2, and in all prior versions is known
|
||||
as :func:`~sqlalchemy.orm.relation`,
|
||||
including 0.5 and 0.4). The "class registry" associated with ``Base``
|
||||
is used at mapper compilation time to resolve the name into the actual
|
||||
class object, which is expected to have been defined once the mapper
|
||||
configuration is used::
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = 'users'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
addresses = relationship("Address", backref="user")
|
||||
|
||||
class Address(Base):
|
||||
__tablename__ = 'addresses'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
email = Column(String(50))
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
|
||||
Column constructs, since they are just that, are immediately usable,
|
||||
as below where we define a primary join condition on the ``Address``
|
||||
class using them::
|
||||
|
||||
class Address(Base):
|
||||
__tablename__ = 'addresses'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
email = Column(String(50))
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
user = relationship(User, primaryjoin=user_id == User.id)
|
||||
|
||||
In addition to the main argument for :func:`~sqlalchemy.orm.relationship`,
|
||||
other arguments which depend upon the columns present on an as-yet
|
||||
undefined class may also be specified as strings. These strings are
|
||||
evaluated as Python expressions. The full namespace available within
|
||||
this evaluation includes all classes mapped for this declarative base,
|
||||
as well as the contents of the ``sqlalchemy`` package, including
|
||||
expression functions like :func:`~sqlalchemy.sql.expression.desc` and
|
||||
:attr:`~sqlalchemy.sql.expression.func`::
|
||||
|
||||
class User(Base):
|
||||
# ....
|
||||
addresses = relationship("Address",
|
||||
order_by="desc(Address.email)",
|
||||
primaryjoin="Address.user_id==User.id")
|
||||
|
||||
As an alternative to string-based attributes, attributes may also be
|
||||
defined after all classes have been created. Just add them to the target
|
||||
class after the fact::
|
||||
|
||||
User.addresses = relationship(Address,
|
||||
primaryjoin=Address.user_id==User.id)
|
||||
|
||||
Configuring Many-to-Many Relationships
|
||||
======================================
|
||||
|
||||
There's nothing special about many-to-many with declarative. The
|
||||
``secondary`` argument to :func:`~sqlalchemy.orm.relationship` still
|
||||
requires a :class:`~sqlalchemy.schema.Table` object, not a declarative
|
||||
class. The :class:`~sqlalchemy.schema.Table` should share the same
|
||||
:class:`~sqlalchemy.schema.MetaData` object used by the declarative
|
||||
base::
|
||||
|
||||
keywords = Table(
|
||||
'keywords', Base.metadata,
|
||||
Column('author_id', Integer, ForeignKey('authors.id')),
|
||||
Column('keyword_id', Integer, ForeignKey('keywords.id'))
|
||||
)
|
||||
|
||||
class Author(Base):
|
||||
__tablename__ = 'authors'
|
||||
id = Column(Integer, primary_key=True)
|
||||
keywords = relationship("Keyword", secondary=keywords)
|
||||
|
||||
You should generally **not** map a class and also specify its table in
|
||||
a many-to-many relationship, since the ORM may issue duplicate INSERT and
|
||||
DELETE statements.
|
||||
|
||||
|
||||
Defining Synonyms
|
||||
=================
|
||||
|
||||
Synonyms are introduced in :ref:`synonyms`. To define a getter/setter
|
||||
which proxies to an underlying attribute, use
|
||||
:func:`~sqlalchemy.orm.synonym` with the ``descriptor`` argument::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'sometable'
|
||||
|
||||
_attr = Column('attr', String)
|
||||
|
||||
def _get_attr(self):
|
||||
return self._some_attr
|
||||
def _set_attr(self, attr):
|
||||
self._some_attr = attr
|
||||
attr = synonym('_attr', descriptor=property(_get_attr, _set_attr))
|
||||
|
||||
The above synonym is then usable as an instance attribute as well as a
|
||||
class-level expression construct::
|
||||
|
||||
x = MyClass()
|
||||
x.attr = "some value"
|
||||
session.query(MyClass).filter(MyClass.attr == 'some other value').all()
|
||||
|
||||
For simple getters, the :func:`synonym_for` decorator can be used in
|
||||
conjunction with ``@property``::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'sometable'
|
||||
|
||||
_attr = Column('attr', String)
|
||||
|
||||
@synonym_for('_attr')
|
||||
@property
|
||||
def attr(self):
|
||||
return self._some_attr
|
||||
|
||||
Similarly, :func:`comparable_using` is a front end for the
|
||||
:func:`~sqlalchemy.orm.comparable_property` ORM function::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'sometable'
|
||||
|
||||
name = Column('name', String)
|
||||
|
||||
@comparable_using(MyUpperCaseComparator)
|
||||
@property
|
||||
def uc_name(self):
|
||||
return self.name.upper()
|
||||
|
||||
Table Configuration
|
||||
===================
|
||||
|
||||
Table arguments other than the name, metadata, and mapped Column
|
||||
arguments are specified using the ``__table_args__`` class attribute.
|
||||
This attribute accommodates both positional as well as keyword
|
||||
arguments that are normally sent to the
|
||||
:class:`~sqlalchemy.schema.Table` constructor.
|
||||
The attribute can be specified in one of two forms. One is as a
|
||||
dictionary::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'sometable'
|
||||
__table_args__ = {'mysql_engine':'InnoDB'}
|
||||
|
||||
The other, a tuple of the form
|
||||
``(arg1, arg2, ..., {kwarg1:value, ...})``, which allows positional
|
||||
arguments to be specified as well (usually constraints)::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'sometable'
|
||||
__table_args__ = (
|
||||
ForeignKeyConstraint(['id'], ['remote_table.id']),
|
||||
UniqueConstraint('foo'),
|
||||
{'autoload':True}
|
||||
)
|
||||
|
||||
Note that the keyword parameters dictionary is required in the tuple
|
||||
form even if empty.
|
||||
|
||||
As an alternative to ``__tablename__``, a direct
|
||||
:class:`~sqlalchemy.schema.Table` construct may be used. The
|
||||
:class:`~sqlalchemy.schema.Column` objects, which in this case require
|
||||
their names, will be added to the mapping just like a regular mapping
|
||||
to a table::
|
||||
|
||||
class MyClass(Base):
|
||||
__table__ = Table('my_table', Base.metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('name', String(50))
|
||||
)
|
||||
|
||||
Mapper Configuration
|
||||
====================
|
||||
|
||||
Configuration of mappers is done with the
|
||||
:func:`~sqlalchemy.orm.mapper` function and all the possible mapper
|
||||
configuration parameters can be found in the documentation for that
|
||||
function.
|
||||
|
||||
:func:`~sqlalchemy.orm.mapper` is still used by declaratively mapped
|
||||
classes and keyword parameters to the function can be passed by
|
||||
placing them in the ``__mapper_args__`` class variable::
|
||||
|
||||
class Widget(Base):
|
||||
__tablename__ = 'widgets'
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
__mapper_args__ = {'extension': MyWidgetExtension()}
|
||||
|
||||
Inheritance Configuration
|
||||
=========================
|
||||
|
||||
Declarative supports all three forms of inheritance as intuitively
|
||||
as possible. The ``inherits`` mapper keyword argument is not needed
|
||||
as declarative will determine this from the class itself. The various
|
||||
"polymorphic" keyword arguments are specified using ``__mapper_args__``.
|
||||
|
||||
Joined Table Inheritance
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Joined table inheritance is defined as a subclass that defines its own
|
||||
table::
|
||||
|
||||
class Person(Base):
|
||||
__tablename__ = 'people'
|
||||
id = Column(Integer, primary_key=True)
|
||||
discriminator = Column('type', String(50))
|
||||
__mapper_args__ = {'polymorphic_on': discriminator}
|
||||
|
||||
class Engineer(Person):
|
||||
__tablename__ = 'engineers'
|
||||
__mapper_args__ = {'polymorphic_identity': 'engineer'}
|
||||
id = Column(Integer, ForeignKey('people.id'), primary_key=True)
|
||||
primary_language = Column(String(50))
|
||||
|
||||
Note that above, the ``Engineer.id`` attribute, since it shares the
|
||||
same attribute name as the ``Person.id`` attribute, will in fact
|
||||
represent the ``people.id`` and ``engineers.id`` columns together, and
|
||||
will render inside a query as ``"people.id"``.
|
||||
To provide the ``Engineer`` class with an attribute that represents
|
||||
only the ``engineers.id`` column, give it a different attribute name::
|
||||
|
||||
class Engineer(Person):
|
||||
__tablename__ = 'engineers'
|
||||
__mapper_args__ = {'polymorphic_identity': 'engineer'}
|
||||
engineer_id = Column('id', Integer, ForeignKey('people.id'), primary_key=True)
|
||||
primary_language = Column(String(50))
|
||||
|
||||
Single Table Inheritance
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Single table inheritance is defined as a subclass that does not have
|
||||
its own table; you just leave out the ``__table__`` and ``__tablename__``
|
||||
attributes::
|
||||
|
||||
class Person(Base):
|
||||
__tablename__ = 'people'
|
||||
id = Column(Integer, primary_key=True)
|
||||
discriminator = Column('type', String(50))
|
||||
__mapper_args__ = {'polymorphic_on': discriminator}
|
||||
|
||||
class Engineer(Person):
|
||||
__mapper_args__ = {'polymorphic_identity': 'engineer'}
|
||||
primary_language = Column(String(50))
|
||||
|
||||
When the above mappers are configured, the ``Person`` class is mapped
|
||||
to the ``people`` table *before* the ``primary_language`` column is
|
||||
defined, and this column will not be included in its own mapping.
|
||||
When ``Engineer`` then defines the ``primary_language`` column, the
|
||||
column is added to the ``people`` table so that it is included in the
|
||||
mapping for ``Engineer`` and is also part of the table's full set of
|
||||
columns. Columns which are not mapped to ``Person`` are also excluded
|
||||
from any other single or joined inheriting classes using the
|
||||
``exclude_properties`` mapper argument. Below, ``Manager`` will have
|
||||
all the attributes of ``Person`` and ``Manager`` but *not* the
|
||||
``primary_language`` attribute of ``Engineer``::
|
||||
|
||||
class Manager(Person):
|
||||
__mapper_args__ = {'polymorphic_identity': 'manager'}
|
||||
golf_swing = Column(String(50))
|
||||
|
||||
The attribute exclusion logic is provided by the
|
||||
``exclude_properties`` mapper argument, and declarative's default
|
||||
behavior can be disabled by passing an explicit ``exclude_properties``
|
||||
collection (empty or otherwise) to the ``__mapper_args__``.
|
||||
|
||||
Concrete Table Inheritance
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Concrete is defined as a subclass which has its own table and sets the
|
||||
``concrete`` keyword argument to ``True``::
|
||||
|
||||
class Person(Base):
|
||||
__tablename__ = 'people'
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
|
||||
class Engineer(Person):
|
||||
__tablename__ = 'engineers'
|
||||
__mapper_args__ = {'concrete':True}
|
||||
id = Column(Integer, primary_key=True)
|
||||
primary_language = Column(String(50))
|
||||
name = Column(String(50))
|
||||
|
||||
Usage of an abstract base class is a little less straightforward as it
|
||||
requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`::
|
||||
|
||||
engineers = Table('engineers', Base.metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('name', String(50)),
|
||||
Column('primary_language', String(50))
|
||||
)
|
||||
managers = Table('managers', Base.metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('name', String(50)),
|
||||
Column('golf_swing', String(50))
|
||||
)
|
||||
|
||||
punion = polymorphic_union({
|
||||
'engineer':engineers,
|
||||
'manager':managers
|
||||
}, 'type', 'punion')
|
||||
|
||||
class Person(Base):
|
||||
__table__ = punion
|
||||
__mapper_args__ = {'polymorphic_on':punion.c.type}
|
||||
|
||||
class Engineer(Person):
|
||||
__table__ = engineers
|
||||
__mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True}
|
||||
|
||||
class Manager(Person):
|
||||
__table__ = managers
|
||||
__mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
|
||||
|
||||
|
||||
Mix-in Classes
|
||||
==============
|
||||
|
||||
A common need when using :mod:`~sqlalchemy.ext.declarative` is to
|
||||
share some functionality, often a set of columns, across many
|
||||
classes. The normal python idiom would be to put this common code into
|
||||
a base class and have all the other classes subclass this class.
|
||||
|
||||
When using :mod:`~sqlalchemy.ext.declarative`, this need is met by
|
||||
using a "mix-in class". A mix-in class is one that isn't mapped to a
|
||||
table and doesn't subclass the declarative :class:`Base`. For example::
|
||||
|
||||
class MyMixin(object):
|
||||
|
||||
__table_args__ = {'mysql_engine':'InnoDB'}
|
||||
__mapper_args__=dict(always_refresh=True)
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
def foo(self):
|
||||
return 'bar'+str(self.id)
|
||||
|
||||
class MyModel(Base,MyMixin):
|
||||
__tablename__='test'
|
||||
name = Column(String(1000), nullable=False, index=True)
|
||||
|
||||
As the above example shows, ``__table_args__`` and ``__mapper_args__``
|
||||
can both be abstracted out into a mix-in if you use common values for
|
||||
these across many classes.
|
||||
|
||||
However, particularly in the case of ``__table_args__``, you may want
|
||||
to combine some parameters from several mix-ins with those you wish to
|
||||
define on the class iteself. To help with this, a
|
||||
:func:`~sqlalchemy.util.classproperty` decorator is provided that lets
|
||||
you implement a class property with a function. For example::
|
||||
|
||||
from sqlalchemy.util import classproperty
|
||||
|
||||
class MySQLSettings:
|
||||
__table_args__ = {'mysql_engine':'InnoDB'}
|
||||
|
||||
class MyOtherMixin:
|
||||
__table_args__ = {'info':'foo'}
|
||||
|
||||
class MyModel(Base,MySQLSettings,MyOtherMixin):
|
||||
__tablename__='my_model'
|
||||
|
||||
@classproperty
|
||||
def __table_args__(self):
|
||||
args = dict()
|
||||
args.update(MySQLSettings.__table_args__)
|
||||
args.update(MyOtherMixin.__table_args__)
|
||||
return args
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
Class Constructor
|
||||
=================
|
||||
|
||||
As a convenience feature, the :func:`declarative_base` sets a default
|
||||
constructor on classes which takes keyword arguments, and assigns them
|
||||
to the named attributes::
|
||||
|
||||
e = Engineer(primary_language='python')
|
||||
|
||||
Sessions
|
||||
========
|
||||
|
||||
Note that ``declarative`` does nothing special with sessions, and is
|
||||
only intended as an easier way to configure mappers and
|
||||
:class:`~sqlalchemy.schema.Table` objects. A typical application
|
||||
setup using :func:`~sqlalchemy.orm.scoped_session` might look like::
|
||||
|
||||
engine = create_engine('postgresql://scott:tiger@localhost/test')
|
||||
Session = scoped_session(sessionmaker(autocommit=False,
|
||||
autoflush=False,
|
||||
bind=engine))
|
||||
Base = declarative_base()
|
||||
|
||||
Mapped instances then make usage of
|
||||
:class:`~sqlalchemy.orm.session.Session` in the usual way.
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy.schema import Table, Column, MetaData
|
||||
from sqlalchemy.orm import synonym as _orm_synonym, mapper, comparable_property, class_mapper
|
||||
from sqlalchemy.orm.interfaces import MapperProperty
|
||||
from sqlalchemy.orm.properties import RelationshipProperty, ColumnProperty
|
||||
from sqlalchemy.orm.util import _is_mapped_class
|
||||
from sqlalchemy import util, exceptions
|
||||
from sqlalchemy.sql import util as sql_util
|
||||
|
||||
|
||||
__all__ = 'declarative_base', 'synonym_for', 'comparable_using', 'instrument_declarative'
|
||||
|
||||
def instrument_declarative(cls, registry, metadata):
|
||||
"""Given a class, configure the class declaratively,
|
||||
using the given registry, which can be any dictionary, and
|
||||
MetaData object.
|
||||
|
||||
"""
|
||||
if '_decl_class_registry' in cls.__dict__:
|
||||
raise exceptions.InvalidRequestError(
|
||||
"Class %r already has been "
|
||||
"instrumented declaratively" % cls)
|
||||
cls._decl_class_registry = registry
|
||||
cls.metadata = metadata
|
||||
_as_declarative(cls, cls.__name__, cls.__dict__)
|
||||
|
||||
def _as_declarative(cls, classname, dict_):
|
||||
|
||||
# dict_ will be a dictproxy, which we can't write to, and we need to!
|
||||
dict_ = dict(dict_)
|
||||
|
||||
column_copies = dict()
|
||||
unmapped_mixins = False
|
||||
for base in cls.__bases__:
|
||||
names = dir(base)
|
||||
if not _is_mapped_class(base):
|
||||
unmapped_mixins = True
|
||||
for name in names:
|
||||
obj = getattr(base,name, None)
|
||||
if isinstance(obj, Column):
|
||||
if obj.foreign_keys:
|
||||
raise exceptions.InvalidRequestError(
|
||||
"Columns with foreign keys to other columns "
|
||||
"are not allowed on declarative mixins at this time."
|
||||
)
|
||||
dict_[name]=column_copies[obj]=obj.copy()
|
||||
elif isinstance(obj, RelationshipProperty):
|
||||
raise exceptions.InvalidRequestError(
|
||||
"relationships are not allowed on "
|
||||
"declarative mixins at this time.")
|
||||
|
||||
# doing it this way enables these attributes to be descriptors
|
||||
get_mapper_args = '__mapper_args__' in dict_
|
||||
get_table_args = '__table_args__' in dict_
|
||||
if unmapped_mixins:
|
||||
get_mapper_args = get_mapper_args or getattr(cls,'__mapper_args__',None)
|
||||
get_table_args = get_table_args or getattr(cls,'__table_args__',None)
|
||||
tablename = getattr(cls,'__tablename__',None)
|
||||
if tablename:
|
||||
# subtle: if tablename is a descriptor here, we actually
|
||||
# put the wrong value in, but it serves as a marker to get
|
||||
# the right value value...
|
||||
dict_['__tablename__']=tablename
|
||||
|
||||
# now that we know whether or not to get these, get them from the class
|
||||
# if we should, enabling them to be decorators
|
||||
mapper_args = get_mapper_args and cls.__mapper_args__ or {}
|
||||
table_args = get_table_args and cls.__table_args__ or None
|
||||
|
||||
# make sure that column copies are used rather than the original columns
|
||||
# from any mixins
|
||||
for k, v in mapper_args.iteritems():
|
||||
mapper_args[k] = column_copies.get(v,v)
|
||||
|
||||
cls._decl_class_registry[classname] = cls
|
||||
our_stuff = util.OrderedDict()
|
||||
for k in dict_:
|
||||
value = dict_[k]
|
||||
if (isinstance(value, tuple) and len(value) == 1 and
|
||||
isinstance(value[0], (Column, MapperProperty))):
|
||||
util.warn("Ignoring declarative-like tuple value of attribute "
|
||||
"%s: possibly a copy-and-paste error with a comma "
|
||||
"left at the end of the line?" % k)
|
||||
continue
|
||||
if not isinstance(value, (Column, MapperProperty)):
|
||||
continue
|
||||
prop = _deferred_relationship(cls, value)
|
||||
our_stuff[k] = prop
|
||||
|
||||
# set up attributes in the order they were created
|
||||
our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
|
||||
|
||||
# extract columns from the class dict
|
||||
cols = []
|
||||
for key, c in our_stuff.iteritems():
|
||||
if isinstance(c, ColumnProperty):
|
||||
for col in c.columns:
|
||||
if isinstance(col, Column) and col.table is None:
|
||||
_undefer_column_name(key, col)
|
||||
cols.append(col)
|
||||
elif isinstance(c, Column):
|
||||
_undefer_column_name(key, c)
|
||||
cols.append(c)
|
||||
# if the column is the same name as the key,
|
||||
# remove it from the explicit properties dict.
|
||||
# the normal rules for assigning column-based properties
|
||||
# will take over, including precedence of columns
|
||||
# in multi-column ColumnProperties.
|
||||
if key == c.key:
|
||||
del our_stuff[key]
|
||||
|
||||
table = None
|
||||
if '__table__' not in dict_:
|
||||
if '__tablename__' in dict_:
|
||||
# see above: if __tablename__ is a descriptor, this
|
||||
# means we get the right value used!
|
||||
tablename = cls.__tablename__
|
||||
|
||||
if isinstance(table_args, dict):
|
||||
args, table_kw = (), table_args
|
||||
elif isinstance(table_args, tuple):
|
||||
args = table_args[0:-1]
|
||||
table_kw = table_args[-1]
|
||||
if len(table_args) < 2 or not isinstance(table_kw, dict):
|
||||
raise exceptions.ArgumentError(
|
||||
"Tuple form of __table_args__ is "
|
||||
"(arg1, arg2, arg3, ..., {'kw1':val1, 'kw2':val2, ...})"
|
||||
)
|
||||
else:
|
||||
args, table_kw = (), {}
|
||||
|
||||
autoload = dict_.get('__autoload__')
|
||||
if autoload:
|
||||
table_kw['autoload'] = True
|
||||
|
||||
cls.__table__ = table = Table(tablename, cls.metadata,
|
||||
*(tuple(cols) + tuple(args)), **table_kw)
|
||||
else:
|
||||
table = cls.__table__
|
||||
if cols:
|
||||
for c in cols:
|
||||
if not table.c.contains_column(c):
|
||||
raise exceptions.ArgumentError(
|
||||
"Can't add additional column %r when specifying __table__" % key
|
||||
)
|
||||
|
||||
if 'inherits' not in mapper_args:
|
||||
for c in cls.__bases__:
|
||||
if _is_mapped_class(c):
|
||||
mapper_args['inherits'] = cls._decl_class_registry.get(c.__name__, None)
|
||||
break
|
||||
|
||||
if hasattr(cls, '__mapper_cls__'):
|
||||
mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
|
||||
else:
|
||||
mapper_cls = mapper
|
||||
|
||||
if table is None and 'inherits' not in mapper_args:
|
||||
raise exceptions.InvalidRequestError(
|
||||
"Class %r does not have a __table__ or __tablename__ "
|
||||
"specified and does not inherit from an existing table-mapped class." % cls
|
||||
)
|
||||
|
||||
elif 'inherits' in mapper_args and not mapper_args.get('concrete', False):
|
||||
inherited_mapper = class_mapper(mapper_args['inherits'], compile=False)
|
||||
inherited_table = inherited_mapper.local_table
|
||||
if 'inherit_condition' not in mapper_args and table is not None:
|
||||
# figure out the inherit condition with relaxed rules
|
||||
# about nonexistent tables, to allow for ForeignKeys to
|
||||
# not-yet-defined tables (since we know for sure that our
|
||||
# parent table is defined within the same MetaData)
|
||||
mapper_args['inherit_condition'] = sql_util.join_condition(
|
||||
mapper_args['inherits'].__table__, table,
|
||||
ignore_nonexistent_tables=True)
|
||||
|
||||
if table is None:
|
||||
# single table inheritance.
|
||||
# ensure no table args
|
||||
if table_args is not None:
|
||||
raise exceptions.ArgumentError(
|
||||
"Can't place __table_args__ on an inherited class with no table."
|
||||
)
|
||||
|
||||
# add any columns declared here to the inherited table.
|
||||
for c in cols:
|
||||
if c.primary_key:
|
||||
raise exceptions.ArgumentError(
|
||||
"Can't place primary key columns on an inherited class with no table."
|
||||
)
|
||||
if c.name in inherited_table.c:
|
||||
raise exceptions.ArgumentError(
|
||||
"Column '%s' on class %s conflicts with existing column '%s'" %
|
||||
(c, cls, inherited_table.c[c.name])
|
||||
)
|
||||
inherited_table.append_column(c)
|
||||
|
||||
# single or joined inheritance
|
||||
# exclude any cols on the inherited table which are not mapped on the
|
||||
# parent class, to avoid
|
||||
# mapping columns specific to sibling/nephew classes
|
||||
inherited_mapper = class_mapper(mapper_args['inherits'], compile=False)
|
||||
inherited_table = inherited_mapper.local_table
|
||||
|
||||
if 'exclude_properties' not in mapper_args:
|
||||
mapper_args['exclude_properties'] = exclude_properties = \
|
||||
set([c.key for c in inherited_table.c
|
||||
if c not in inherited_mapper._columntoproperty])
|
||||
exclude_properties.difference_update([c.key for c in cols])
|
||||
|
||||
cls.__mapper__ = mapper_cls(cls, table, properties=our_stuff, **mapper_args)
|
||||
|
||||
class DeclarativeMeta(type):
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
if '_decl_class_registry' in cls.__dict__:
|
||||
return type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
_as_declarative(cls, classname, cls.__dict__)
|
||||
return type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
def __setattr__(cls, key, value):
|
||||
if '__mapper__' in cls.__dict__:
|
||||
if isinstance(value, Column):
|
||||
_undefer_column_name(key, value)
|
||||
cls.__table__.append_column(value)
|
||||
cls.__mapper__.add_property(key, value)
|
||||
elif isinstance(value, ColumnProperty):
|
||||
for col in value.columns:
|
||||
if isinstance(col, Column) and col.table is None:
|
||||
_undefer_column_name(key, col)
|
||||
cls.__table__.append_column(col)
|
||||
cls.__mapper__.add_property(key, value)
|
||||
elif isinstance(value, MapperProperty):
|
||||
cls.__mapper__.add_property(key, _deferred_relationship(cls, value))
|
||||
else:
|
||||
type.__setattr__(cls, key, value)
|
||||
else:
|
||||
type.__setattr__(cls, key, value)
|
||||
|
||||
|
||||
class _GetColumns(object):
|
||||
def __init__(self, cls):
|
||||
self.cls = cls
|
||||
def __getattr__(self, key):
|
||||
|
||||
mapper = class_mapper(self.cls, compile=False)
|
||||
if mapper:
|
||||
prop = mapper.get_property(key)
|
||||
if not isinstance(prop, ColumnProperty):
|
||||
raise exceptions.InvalidRequestError(
|
||||
"Property %r is not an instance of"
|
||||
" ColumnProperty (i.e. does not correspond"
|
||||
" directly to a Column)." % key)
|
||||
return getattr(self.cls, key)
|
||||
|
||||
|
||||
def _deferred_relationship(cls, prop):
|
||||
def resolve_arg(arg):
|
||||
import sqlalchemy
|
||||
|
||||
def access_cls(key):
|
||||
if key in cls._decl_class_registry:
|
||||
return _GetColumns(cls._decl_class_registry[key])
|
||||
elif key in cls.metadata.tables:
|
||||
return cls.metadata.tables[key]
|
||||
else:
|
||||
return sqlalchemy.__dict__[key]
|
||||
|
||||
d = util.PopulateDict(access_cls)
|
||||
def return_cls():
|
||||
try:
|
||||
x = eval(arg, globals(), d)
|
||||
|
||||
if isinstance(x, _GetColumns):
|
||||
return x.cls
|
||||
else:
|
||||
return x
|
||||
except NameError, n:
|
||||
raise exceptions.InvalidRequestError(
|
||||
"When compiling mapper %s, expression %r failed to locate a name (%r). "
|
||||
"If this is a class name, consider adding this relationship() to the %r "
|
||||
"class after both dependent classes have been defined." % (
|
||||
prop.parent, arg, n.args[0], cls))
|
||||
return return_cls
|
||||
|
||||
if isinstance(prop, RelationshipProperty):
|
||||
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
|
||||
'secondary', '_foreign_keys', 'remote_side'):
|
||||
v = getattr(prop, attr)
|
||||
if isinstance(v, basestring):
|
||||
setattr(prop, attr, resolve_arg(v))
|
||||
|
||||
if prop.backref and isinstance(prop.backref, tuple):
|
||||
key, kwargs = prop.backref
|
||||
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
|
||||
'foreign_keys', 'remote_side', 'order_by'):
|
||||
if attr in kwargs and isinstance(kwargs[attr], basestring):
|
||||
kwargs[attr] = resolve_arg(kwargs[attr])
|
||||
|
||||
|
||||
return prop
|
||||
|
||||
def synonym_for(name, map_column=False):
|
||||
"""Decorator, make a Python @property a query synonym for a column.
|
||||
|
||||
A decorator version of :func:`~sqlalchemy.orm.synonym`. The function being
|
||||
decorated is the 'descriptor', otherwise passes its arguments through
|
||||
to synonym()::
|
||||
|
||||
@synonym_for('col')
|
||||
@property
|
||||
def prop(self):
|
||||
return 'special sauce'
|
||||
|
||||
The regular ``synonym()`` is also usable directly in a declarative setting
|
||||
and may be convenient for read/write properties::
|
||||
|
||||
prop = synonym('col', descriptor=property(_read_prop, _write_prop))
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
return _orm_synonym(name, map_column=map_column, descriptor=fn)
|
||||
return decorate
|
||||
|
||||
def comparable_using(comparator_factory):
|
||||
"""Decorator, allow a Python @property to be used in query criteria.
|
||||
|
||||
This is a decorator front end to
|
||||
:func:`~sqlalchemy.orm.comparable_property` that passes
|
||||
through the comparator_factory and the function being decorated::
|
||||
|
||||
@comparable_using(MyComparatorType)
|
||||
@property
|
||||
def prop(self):
|
||||
return 'special sauce'
|
||||
|
||||
The regular ``comparable_property()`` is also usable directly in a
|
||||
declarative setting and may be convenient for read/write properties::
|
||||
|
||||
prop = comparable_property(MyComparatorType)
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
return comparable_property(comparator_factory, fn)
|
||||
return decorate
|
||||
|
||||
def _declarative_constructor(self, **kwargs):
|
||||
"""A simple constructor that allows initialization from kwargs.
|
||||
|
||||
Sets attributes on the constructed instance using the names and
|
||||
values in ``kwargs``.
|
||||
|
||||
Only keys that are present as
|
||||
attributes of the instance's class are allowed. These could be,
|
||||
for example, any mapped columns or relationships.
|
||||
"""
|
||||
for k in kwargs:
|
||||
if not hasattr(type(self), k):
|
||||
raise TypeError(
|
||||
"%r is an invalid keyword argument for %s" %
|
||||
(k, type(self).__name__))
|
||||
setattr(self, k, kwargs[k])
|
||||
_declarative_constructor.__name__ = '__init__'
|
||||
|
||||
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
|
||||
name='Base', constructor=_declarative_constructor,
|
||||
metaclass=DeclarativeMeta):
|
||||
"""Construct a base class for declarative class definitions.
|
||||
|
||||
The new base class will be given a metaclass that produces
|
||||
appropriate :class:`~sqlalchemy.schema.Table` objects and makes
|
||||
the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
|
||||
information provided declaratively in the class and any subclasses
|
||||
of the class.
|
||||
|
||||
:param bind: An optional
|
||||
:class:`~sqlalchemy.engine.base.Connectable`, will be assigned
|
||||
the ``bind`` attribute on the :class:`~sqlalchemy.MetaData`
|
||||
instance.
|
||||
|
||||
|
||||
:param metadata:
|
||||
An optional :class:`~sqlalchemy.MetaData` instance. All
|
||||
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
|
||||
subclasses of the base will share this MetaData. A MetaData instance
|
||||
will be created if none is provided. The
|
||||
:class:`~sqlalchemy.MetaData` instance will be available via the
|
||||
`metadata` attribute of the generated declarative base class.
|
||||
|
||||
:param mapper:
|
||||
An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will be
|
||||
used to map subclasses to their Tables.
|
||||
|
||||
:param cls:
|
||||
Defaults to :class:`object`. A type to use as the base for the generated
|
||||
declarative base class. May be a class or tuple of classes.
|
||||
|
||||
:param name:
|
||||
Defaults to ``Base``. The display name for the generated
|
||||
class. Customizing this is not required, but can improve clarity in
|
||||
tracebacks and debugging.
|
||||
|
||||
:param constructor:
|
||||
Defaults to
|
||||
:func:`~sqlalchemy.ext.declarative._declarative_constructor`, an
|
||||
__init__ implementation that assigns \**kwargs for declared
|
||||
fields and relationships to an instance. If ``None`` is supplied,
|
||||
no __init__ will be provided and construction will fall back to
|
||||
cls.__init__ by way of the normal Python semantics.
|
||||
|
||||
:param metaclass:
|
||||
Defaults to :class:`DeclarativeMeta`. A metaclass or __metaclass__
|
||||
compatible callable to use as the meta type of the generated
|
||||
declarative base class.
|
||||
|
||||
"""
|
||||
lcl_metadata = metadata or MetaData()
|
||||
if bind:
|
||||
lcl_metadata.bind = bind
|
||||
|
||||
bases = not isinstance(cls, tuple) and (cls,) or cls
|
||||
class_dict = dict(_decl_class_registry=dict(),
|
||||
metadata=lcl_metadata)
|
||||
|
||||
if constructor:
|
||||
class_dict['__init__'] = constructor
|
||||
if mapper:
|
||||
class_dict['__mapper_cls__'] = mapper
|
||||
|
||||
return metaclass(name, bases, class_dict)
|
||||
|
||||
def _undefer_column_name(key, column):
|
||||
if column.key is None:
|
||||
column.key = key
|
||||
if column.name is None:
|
||||
column.name = key
|
@ -1,18 +0,0 @@
|
||||
# ext/declarative/__init__.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .api import declarative_base, synonym_for, comparable_using, \
|
||||
instrument_declarative, ConcreteBase, AbstractConcreteBase, \
|
||||
DeclarativeMeta, DeferredReflection, has_inherited_table,\
|
||||
declared_attr, as_declarative
|
||||
|
||||
|
||||
__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
|
||||
'comparable_using', 'instrument_declarative', 'declared_attr',
|
||||
'as_declarative',
|
||||
'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
|
||||
'DeferredReflection']
|
@ -1,696 +0,0 @@
|
||||
# ext/declarative/api.py
|
||||
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
"""Public API functions and helpers for declarative."""
|
||||
|
||||
|
||||
from ...schema import Table, MetaData, Column
|
||||
from ...orm import synonym as _orm_synonym, \
|
||||
comparable_property,\
|
||||
interfaces, properties, attributes
|
||||
from ...orm.util import polymorphic_union
|
||||
from ...orm.base import _mapper_or_none
|
||||
from ...util import OrderedDict, hybridmethod, hybridproperty
|
||||
from ... import util
|
||||
from ... import exc
|
||||
import weakref
|
||||
|
||||
from .base import _as_declarative, \
|
||||
_declarative_constructor,\
|
||||
_DeferredMapperConfig, _add_attribute
|
||||
from .clsregistry import _class_resolver
|
||||
|
||||
|
||||
def instrument_declarative(cls, registry, metadata):
|
||||
"""Given a class, configure the class declaratively,
|
||||
using the given registry, which can be any dictionary, and
|
||||
MetaData object.
|
||||
|
||||
"""
|
||||
if '_decl_class_registry' in cls.__dict__:
|
||||
raise exc.InvalidRequestError(
|
||||
"Class %r already has been "
|
||||
"instrumented declaratively" % cls)
|
||||
cls._decl_class_registry = registry
|
||||
cls.metadata = metadata
|
||||
_as_declarative(cls, cls.__name__, cls.__dict__)
|
||||
|
||||
|
||||
def has_inherited_table(cls):
|
||||
"""Given a class, return True if any of the classes it inherits from has a
|
||||
mapped table, otherwise return False.
|
||||
|
||||
This is used in declarative mixins to build attributes that behave
|
||||
differently for the base class vs. a subclass in an inheritance
|
||||
hierarchy.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`decl_mixin_inheritance`
|
||||
|
||||
"""
|
||||
for class_ in cls.__mro__[1:]:
|
||||
if getattr(class_, '__table__', None) is not None:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class DeclarativeMeta(type):
|
||||
def __init__(cls, classname, bases, dict_):
|
||||
if '_decl_class_registry' not in cls.__dict__:
|
||||
_as_declarative(cls, classname, cls.__dict__)
|
||||
type.__init__(cls, classname, bases, dict_)
|
||||
|
||||
def __setattr__(cls, key, value):
|
||||
_add_attribute(cls, key, value)
|
||||
|
||||
|
||||
def synonym_for(name, map_column=False):
|
||||
"""Decorator, make a Python @property a query synonym for a column.
|
||||
|
||||
A decorator version of :func:`~sqlalchemy.orm.synonym`. The function being
|
||||
decorated is the 'descriptor', otherwise passes its arguments through to
|
||||
synonym()::
|
||||
|
||||
@synonym_for('col')
|
||||
@property
|
||||
def prop(self):
|
||||
return 'special sauce'
|
||||
|
||||
The regular ``synonym()`` is also usable directly in a declarative setting
|
||||
and may be convenient for read/write properties::
|
||||
|
||||
prop = synonym('col', descriptor=property(_read_prop, _write_prop))
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
return _orm_synonym(name, map_column=map_column, descriptor=fn)
|
||||
return decorate
|
||||
|
||||
|
||||
def comparable_using(comparator_factory):
|
||||
"""Decorator, allow a Python @property to be used in query criteria.
|
||||
|
||||
This is a decorator front end to
|
||||
:func:`~sqlalchemy.orm.comparable_property` that passes
|
||||
through the comparator_factory and the function being decorated::
|
||||
|
||||
@comparable_using(MyComparatorType)
|
||||
@property
|
||||
def prop(self):
|
||||
return 'special sauce'
|
||||
|
||||
The regular ``comparable_property()`` is also usable directly in a
|
||||
declarative setting and may be convenient for read/write properties::
|
||||
|
||||
prop = comparable_property(MyComparatorType)
|
||||
|
||||
"""
|
||||
def decorate(fn):
|
||||
return comparable_property(comparator_factory, fn)
|
||||
return decorate
|
||||
|
||||
|
||||
class declared_attr(interfaces._MappedAttribute, property):
|
||||
"""Mark a class-level method as representing the definition of
|
||||
a mapped property or special declarative member name.
|
||||
|
||||
@declared_attr turns the attribute into a scalar-like
|
||||
property that can be invoked from the uninstantiated class.
|
||||
Declarative treats attributes specifically marked with
|
||||
@declared_attr as returning a construct that is specific
|
||||
to mapping or declarative table configuration. The name
|
||||
of the attribute is that of what the non-dynamic version
|
||||
of the attribute would be.
|
||||
|
||||
@declared_attr is more often than not applicable to mixins,
|
||||
to define relationships that are to be applied to different
|
||||
implementors of the class::
|
||||
|
||||
class ProvidesUser(object):
|
||||
"A mixin that adds a 'user' relationship to classes."
|
||||
|
||||
@declared_attr
|
||||
def user(self):
|
||||
return relationship("User")
|
||||
|
||||
It also can be applied to mapped classes, such as to provide
|
||||
a "polymorphic" scheme for inheritance::
|
||||
|
||||
class Employee(Base):
|
||||
id = Column(Integer, primary_key=True)
|
||||
type = Column(String(50), nullable=False)
|
||||
|
||||
@declared_attr
|
||||
def __tablename__(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
@declared_attr
|
||||
def __mapper_args__(cls):
|
||||
if cls.__name__ == 'Employee':
|
||||
return {
|
||||
"polymorphic_on":cls.type,
|
||||
"polymorphic_identity":"Employee"
|
||||
}
|
||||
else:
|
||||
return {"polymorphic_identity":cls.__name__}
|
||||
|
||||
.. versionchanged:: 0.8 :class:`.declared_attr` can be used with
|
||||
non-ORM or extension attributes, such as user-defined attributes
|
||||
or :func:`.association_proxy` objects, which will be assigned
|
||||
to the class at class construction time.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, fget, cascading=False):
|
||||
super(declared_attr, self).__init__(fget)
|
||||
self.__doc__ = fget.__doc__
|
||||
self._cascading = cascading
|
||||
|
||||
def __get__(desc, self, cls):
|
||||
reg = cls.__dict__.get('_sa_declared_attr_reg', None)
|
||||
if reg is None:
|
||||
manager = attributes.manager_of_class(cls)
|
||||
if manager is None:
|
||||
util.warn(
|
||||
"Unmanaged access of declarative attribute %s from "
|
||||
"non-mapped class %s" %
|
||||
(desc.fget.__name__, cls.__name__))
|
||||
return desc.fget(cls)
|
||||
elif desc in reg:
|
||||
return reg[desc]
|
||||
else:
|
||||
reg[desc] = obj = desc.fget(cls)
|
||||
return obj
|
||||
|
||||
@hybridmethod
|
||||
def _stateful(cls, **kw):
|
||||
return _stateful_declared_attr(**kw)
|
||||
|
||||
@hybridproperty
|
||||
def cascading(cls):
|
||||
"""Mark a :class:`.declared_attr` as cascading.
|
||||
|
||||
This is a special-use modifier which indicates that a column
|
||||
or MapperProperty-based declared attribute should be configured
|
||||
distinctly per mapped subclass, within a mapped-inheritance scenario.
|
||||
|
||||
Below, both MyClass as well as MySubClass will have a distinct
|
||||
``id`` Column object established::
|
||||
|
||||
class HasIdMixin(object):
|
||||
@declared_attr.cascading
|
||||
def id(cls):
|
||||
if has_inherited_table(cls):
|
||||
return Column(ForeignKey('myclass.id'), primary_key=True)
|
||||
else:
|
||||
return Column(Integer, primary_key=True)
|
||||
|
||||
class MyClass(HasIdMixin, Base):
|
||||
__tablename__ = 'myclass'
|
||||
# ...
|
||||
|
||||
class MySubClass(MyClass):
|
||||
""
|
||||
# ...
|
||||
|
||||
The behavior of the above configuration is that ``MySubClass``
|
||||
will refer to both its own ``id`` column as well as that of
|
||||
``MyClass`` underneath the attribute named ``some_id``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`declarative_inheritance`
|
||||
|
||||
:ref:`mixin_inheritance_columns`
|
||||
|
||||
|
||||
"""
|
||||
return cls._stateful(cascading=True)
|
||||
|
||||
|
||||
class _stateful_declared_attr(declared_attr):
|
||||
def __init__(self, **kw):
|
||||
self.kw = kw
|
||||
|
||||
def _stateful(self, **kw):
|
||||
new_kw = self.kw.copy()
|
||||
new_kw.update(kw)
|
||||
return _stateful_declared_attr(**new_kw)
|
||||
|
||||
def __call__(self, fn):
|
||||
return declared_attr(fn, **self.kw)
|
||||
|
||||
|
||||
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
|
||||
name='Base', constructor=_declarative_constructor,
|
||||
class_registry=None,
|
||||
metaclass=DeclarativeMeta):
|
||||
r"""Construct a base class for declarative class definitions.
|
||||
|
||||
The new base class will be given a metaclass that produces
|
||||
appropriate :class:`~sqlalchemy.schema.Table` objects and makes
|
||||
the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
|
||||
information provided declaratively in the class and any subclasses
|
||||
of the class.
|
||||
|
||||
:param bind: An optional
|
||||
:class:`~sqlalchemy.engine.Connectable`, will be assigned
|
||||
the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
|
||||
instance.
|
||||
|
||||
:param metadata:
|
||||
An optional :class:`~sqlalchemy.schema.MetaData` instance. All
|
||||
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
|
||||
subclasses of the base will share this MetaData. A MetaData instance
|
||||
will be created if none is provided. The
|
||||
:class:`~sqlalchemy.schema.MetaData` instance will be available via the
|
||||
`metadata` attribute of the generated declarative base class.
|
||||
|
||||
:param mapper:
|
||||
An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will
|
||||
be used to map subclasses to their Tables.
|
||||
|
||||
:param cls:
|
||||
Defaults to :class:`object`. A type to use as the base for the generated
|
||||
declarative base class. May be a class or tuple of classes.
|
||||
|
||||
:param name:
|
||||
Defaults to ``Base``. The display name for the generated
|
||||
class. Customizing this is not required, but can improve clarity in
|
||||
tracebacks and debugging.
|
||||
|
||||
:param constructor:
|
||||
Defaults to
|
||||
:func:`~sqlalchemy.ext.declarative.base._declarative_constructor`, an
|
||||
__init__ implementation that assigns \**kwargs for declared
|
||||
fields and relationships to an instance. If ``None`` is supplied,
|
||||
no __init__ will be provided and construction will fall back to
|
||||
cls.__init__ by way of the normal Python semantics.
|
||||
|
||||
:param class_registry: optional dictionary that will serve as the
|
||||
registry of class names-> mapped classes when string names
|
||||
are used to identify classes inside of :func:`.relationship`
|
||||
and others. Allows two or more declarative base classes
|
||||
to share the same registry of class names for simplified
|
||||
inter-base relationships.
|
||||
|
||||
:param metaclass:
|
||||
Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__
|
||||
compatible callable to use as the meta type of the generated
|
||||
declarative base class.
|
||||
|
||||
.. versionchanged:: 1.1 if :paramref:`.declarative_base.cls` is a single class (rather
|
||||
than a tuple), the constructed base class will inherit its docstring.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.as_declarative`
|
||||
|
||||
"""
|
||||
lcl_metadata = metadata or MetaData()
|
||||
if bind:
|
||||
lcl_metadata.bind = bind
|
||||
|
||||
if class_registry is None:
|
||||
class_registry = weakref.WeakValueDictionary()
|
||||
|
||||
bases = not isinstance(cls, tuple) and (cls,) or cls
|
||||
class_dict = dict(_decl_class_registry=class_registry,
|
||||
metadata=lcl_metadata)
|
||||
|
||||
if isinstance(cls, type):
|
||||
class_dict['__doc__'] = cls.__doc__
|
||||
|
||||
if constructor:
|
||||
class_dict['__init__'] = constructor
|
||||
if mapper:
|
||||
class_dict['__mapper_cls__'] = mapper
|
||||
|
||||
return metaclass(name, bases, class_dict)
|
||||
|
||||
|
||||
def as_declarative(**kw):
|
||||
"""
|
||||
Class decorator for :func:`.declarative_base`.
|
||||
|
||||
Provides a syntactical shortcut to the ``cls`` argument
|
||||
sent to :func:`.declarative_base`, allowing the base class
|
||||
to be converted in-place to a "declarative" base::
|
||||
|
||||
from sqlalchemy.ext.declarative import as_declarative
|
||||
|
||||
@as_declarative()
|
||||
class Base(object):
|
||||
@declared_attr
|
||||
def __tablename__(cls):
|
||||
return cls.__name__.lower()
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class MyMappedClass(Base):
|
||||
# ...
|
||||
|
||||
All keyword arguments passed to :func:`.as_declarative` are passed
|
||||
along to :func:`.declarative_base`.
|
||||
|
||||
.. versionadded:: 0.8.3
|
||||
|
||||
.. seealso::
|
||||
|
||||
:func:`.declarative_base`
|
||||
|
||||
"""
|
||||
def decorate(cls):
|
||||
kw['cls'] = cls
|
||||
kw['name'] = cls.__name__
|
||||
return declarative_base(**kw)
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
class ConcreteBase(object):
|
||||
"""A helper class for 'concrete' declarative mappings.
|
||||
|
||||
:class:`.ConcreteBase` will use the :func:`.polymorphic_union`
|
||||
function automatically, against all tables mapped as a subclass
|
||||
to this class. The function is called via the
|
||||
``__declare_last__()`` function, which is essentially
|
||||
a hook for the :meth:`.after_configured` event.
|
||||
|
||||
:class:`.ConcreteBase` produces a mapped
|
||||
table for the class itself. Compare to :class:`.AbstractConcreteBase`,
|
||||
which does not.
|
||||
|
||||
Example::
|
||||
|
||||
from sqlalchemy.ext.declarative import ConcreteBase
|
||||
|
||||
class Employee(ConcreteBase, Base):
|
||||
__tablename__ = 'employee'
|
||||
employee_id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'employee',
|
||||
'concrete':True}
|
||||
|
||||
class Manager(Employee):
|
||||
__tablename__ = 'manager'
|
||||
employee_id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.AbstractConcreteBase`
|
||||
|
||||
:ref:`concrete_inheritance`
|
||||
|
||||
:ref:`inheritance_concrete_helpers`
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _create_polymorphic_union(cls, mappers):
|
||||
return polymorphic_union(OrderedDict(
|
||||
(mp.polymorphic_identity, mp.local_table)
|
||||
for mp in mappers
|
||||
), 'type', 'pjoin')
|
||||
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
m = cls.__mapper__
|
||||
if m.with_polymorphic:
|
||||
return
|
||||
|
||||
mappers = list(m.self_and_descendants)
|
||||
pjoin = cls._create_polymorphic_union(mappers)
|
||||
m._set_with_polymorphic(("*", pjoin))
|
||||
m._set_polymorphic_on(pjoin.c.type)
|
||||
|
||||
|
||||
class AbstractConcreteBase(ConcreteBase):
|
||||
"""A helper class for 'concrete' declarative mappings.
|
||||
|
||||
:class:`.AbstractConcreteBase` will use the :func:`.polymorphic_union`
|
||||
function automatically, against all tables mapped as a subclass
|
||||
to this class. The function is called via the
|
||||
``__declare_last__()`` function, which is essentially
|
||||
a hook for the :meth:`.after_configured` event.
|
||||
|
||||
:class:`.AbstractConcreteBase` does produce a mapped class
|
||||
for the base class, however it is not persisted to any table; it
|
||||
is instead mapped directly to the "polymorphic" selectable directly
|
||||
and is only used for selecting. Compare to :class:`.ConcreteBase`,
|
||||
which does create a persisted table for the base class.
|
||||
|
||||
Example::
|
||||
|
||||
from sqlalchemy.ext.declarative import AbstractConcreteBase
|
||||
|
||||
class Employee(AbstractConcreteBase, Base):
|
||||
pass
|
||||
|
||||
class Manager(Employee):
|
||||
__tablename__ = 'manager'
|
||||
employee_id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
The abstract base class is handled by declarative in a special way;
|
||||
at class configuration time, it behaves like a declarative mixin
|
||||
or an ``__abstract__`` base class. Once classes are configured
|
||||
and mappings are produced, it then gets mapped itself, but
|
||||
after all of its decscendants. This is a very unique system of mapping
|
||||
not found in any other SQLAlchemy system.
|
||||
|
||||
Using this approach, we can specify columns and properties
|
||||
that will take place on mapped subclasses, in the way that
|
||||
we normally do as in :ref:`declarative_mixins`::
|
||||
|
||||
class Company(Base):
|
||||
__tablename__ = 'company'
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class Employee(AbstractConcreteBase, Base):
|
||||
employee_id = Column(Integer, primary_key=True)
|
||||
|
||||
@declared_attr
|
||||
def company_id(cls):
|
||||
return Column(ForeignKey('company.id'))
|
||||
|
||||
@declared_attr
|
||||
def company(cls):
|
||||
return relationship("Company")
|
||||
|
||||
class Manager(Employee):
|
||||
__tablename__ = 'manager'
|
||||
|
||||
name = Column(String(50))
|
||||
manager_data = Column(String(40))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity':'manager',
|
||||
'concrete':True}
|
||||
|
||||
When we make use of our mappings however, both ``Manager`` and
|
||||
``Employee`` will have an independently usable ``.company`` attribute::
|
||||
|
||||
session.query(Employee).filter(Employee.company.has(id=5))
|
||||
|
||||
.. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
|
||||
have been reworked to support relationships established directly
|
||||
on the abstract base, without any special configurational steps.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.ConcreteBase`
|
||||
|
||||
:ref:`concrete_inheritance`
|
||||
|
||||
:ref:`inheritance_concrete_helpers`
|
||||
|
||||
"""
|
||||
|
||||
__no_table__ = True
|
||||
|
||||
@classmethod
|
||||
def __declare_first__(cls):
|
||||
cls._sa_decl_prepare_nocascade()
|
||||
|
||||
@classmethod
|
||||
def _sa_decl_prepare_nocascade(cls):
|
||||
if getattr(cls, '__mapper__', None):
|
||||
return
|
||||
|
||||
to_map = _DeferredMapperConfig.config_for_cls(cls)
|
||||
|
||||
# can't rely on 'self_and_descendants' here
|
||||
# since technically an immediate subclass
|
||||
# might not be mapped, but a subclass
|
||||
# may be.
|
||||
mappers = []
|
||||
stack = list(cls.__subclasses__())
|
||||
while stack:
|
||||
klass = stack.pop()
|
||||
stack.extend(klass.__subclasses__())
|
||||
mn = _mapper_or_none(klass)
|
||||
if mn is not None:
|
||||
mappers.append(mn)
|
||||
pjoin = cls._create_polymorphic_union(mappers)
|
||||
|
||||
# For columns that were declared on the class, these
|
||||
# are normally ignored with the "__no_table__" mapping,
|
||||
# unless they have a different attribute key vs. col name
|
||||
# and are in the properties argument.
|
||||
# In that case, ensure we update the properties entry
|
||||
# to the correct column from the pjoin target table.
|
||||
declared_cols = set(to_map.declared_columns)
|
||||
for k, v in list(to_map.properties.items()):
|
||||
if v in declared_cols:
|
||||
to_map.properties[k] = pjoin.c[v.key]
|
||||
|
||||
to_map.local_table = pjoin
|
||||
|
||||
m_args = to_map.mapper_args_fn or dict
|
||||
|
||||
def mapper_args():
|
||||
args = m_args()
|
||||
args['polymorphic_on'] = pjoin.c.type
|
||||
return args
|
||||
to_map.mapper_args_fn = mapper_args
|
||||
|
||||
m = to_map.map()
|
||||
|
||||
for scls in cls.__subclasses__():
|
||||
sm = _mapper_or_none(scls)
|
||||
if sm and sm.concrete and cls in scls.__bases__:
|
||||
sm._set_concrete_base(m)
|
||||
|
||||
|
||||
class DeferredReflection(object):
|
||||
"""A helper class for construction of mappings based on
|
||||
a deferred reflection step.
|
||||
|
||||
Normally, declarative can be used with reflection by
|
||||
setting a :class:`.Table` object using autoload=True
|
||||
as the ``__table__`` attribute on a declarative class.
|
||||
The caveat is that the :class:`.Table` must be fully
|
||||
reflected, or at the very least have a primary key column,
|
||||
at the point at which a normal declarative mapping is
|
||||
constructed, meaning the :class:`.Engine` must be available
|
||||
at class declaration time.
|
||||
|
||||
The :class:`.DeferredReflection` mixin moves the construction
|
||||
of mappers to be at a later point, after a specific
|
||||
method is called which first reflects all :class:`.Table`
|
||||
objects created so far. Classes can define it as such::
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.ext.declarative import DeferredReflection
|
||||
Base = declarative_base()
|
||||
|
||||
class MyClass(DeferredReflection, Base):
|
||||
__tablename__ = 'mytable'
|
||||
|
||||
Above, ``MyClass`` is not yet mapped. After a series of
|
||||
classes have been defined in the above fashion, all tables
|
||||
can be reflected and mappings created using
|
||||
:meth:`.prepare`::
|
||||
|
||||
engine = create_engine("someengine://...")
|
||||
DeferredReflection.prepare(engine)
|
||||
|
||||
The :class:`.DeferredReflection` mixin can be applied to individual
|
||||
classes, used as the base for the declarative base itself,
|
||||
or used in a custom abstract class. Using an abstract base
|
||||
allows that only a subset of classes to be prepared for a
|
||||
particular prepare step, which is necessary for applications
|
||||
that use more than one engine. For example, if an application
|
||||
has two engines, you might use two bases, and prepare each
|
||||
separately, e.g.::
|
||||
|
||||
class ReflectedOne(DeferredReflection, Base):
|
||||
__abstract__ = True
|
||||
|
||||
class ReflectedTwo(DeferredReflection, Base):
|
||||
__abstract__ = True
|
||||
|
||||
class MyClass(ReflectedOne):
|
||||
__tablename__ = 'mytable'
|
||||
|
||||
class MyOtherClass(ReflectedOne):
|
||||
__tablename__ = 'myothertable'
|
||||
|
||||
class YetAnotherClass(ReflectedTwo):
|
||||
__tablename__ = 'yetanothertable'
|
||||
|
||||
# ... etc.
|
||||
|
||||
Above, the class hierarchies for ``ReflectedOne`` and
|
||||
``ReflectedTwo`` can be configured separately::
|
||||
|
||||
ReflectedOne.prepare(engine_one)
|
||||
ReflectedTwo.prepare(engine_two)
|
||||
|
||||
.. versionadded:: 0.8
|
||||
|
||||
"""
|
||||
@classmethod
|
||||
def prepare(cls, engine):
|
||||
"""Reflect all :class:`.Table` objects for all current
|
||||
:class:`.DeferredReflection` subclasses"""
|
||||
|
||||
to_map = _DeferredMapperConfig.classes_for_base(cls)
|
||||
for thingy in to_map:
|
||||
cls._sa_decl_prepare(thingy.local_table, engine)
|
||||
thingy.map()
|
||||
mapper = thingy.cls.__mapper__
|
||||
metadata = mapper.class_.metadata
|
||||
for rel in mapper._props.values():
|
||||
if isinstance(rel, properties.RelationshipProperty) and \
|
||||
rel.secondary is not None:
|
||||
if isinstance(rel.secondary, Table):
|
||||
cls._reflect_table(rel.secondary, engine)
|
||||
elif isinstance(rel.secondary, _class_resolver):
|
||||
rel.secondary._resolvers += (
|
||||
cls._sa_deferred_table_resolver(engine, metadata),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _sa_deferred_table_resolver(cls, engine, metadata):
|
||||
def _resolve(key):
|
||||
t1 = Table(key, metadata)
|
||||
cls._reflect_table(t1, engine)
|
||||
return t1
|
||||
return _resolve
|
||||
|
||||
@classmethod
|
||||
def _sa_decl_prepare(cls, local_table, engine):
|
||||
# autoload Table, which is already
|
||||
# present in the metadata. This
|
||||
# will fill in db-loaded columns
|
||||
# into the existing Table object.
|
||||
if local_table is not None:
|
||||
cls._reflect_table(local_table, engine)
|
||||
|
||||
@classmethod
|
||||
def _reflect_table(cls, table, engine):
|
||||
Table(table.name,
|
||||
table.metadata,
|
||||
extend_existing=True,
|
||||
autoload_replace=False,
|
||||
autoload=True,
|
||||
autoload_with=engine,
|
||||
schema=table.schema)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user