dibbler/sqlalchemy/ext/serializer.py

160 lines
5.5 KiB
Python
Raw Normal View History

2017-04-15 18:27:12 +02:00
# ext/serializer.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Serializer/Deserializer objects for usage with SQLAlchemy query structures,
2010-05-07 19:33:49 +02:00
allowing "contextual" deserialization.
Any SQLAlchemy query structure, either based on sqlalchemy.sql.*
or sqlalchemy.orm.* can be used. The mappers, Tables, Columns, Session
etc. which are referenced by the structure are not persisted in serialized
form, but are instead re-associated with the query structure
when it is deserialized.
Usage is nearly the same as that of the standard Python pickle module::
from sqlalchemy.ext.serializer import loads, dumps
metadata = MetaData(bind=some_engine)
Session = scoped_session(sessionmaker())
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
# ... define mappers
2017-04-15 18:27:12 +02:00
query = Session.query(MyClass).
filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
2010-05-07 19:33:49 +02:00
# pickle the query
serialized = dumps(query)
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
# unpickle. Pass in metadata + scoped_session
query2 = loads(serialized, metadata, Session)
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
print query2.all()
2017-04-15 18:27:12 +02:00
Similar restrictions as when using raw pickle apply; mapped classes must be
2010-05-07 19:33:49 +02:00
themselves be pickleable, meaning they are importable from a module-level
namespace.
The serializer module is only appropriate for query structures. It is not
needed for:
2017-04-15 18:27:12 +02:00
* instances of user-defined classes. These contain no references to engines,
sessions or expression constructs in the typical case and can be serialized
directly.
2010-05-07 19:33:49 +02:00
2017-04-15 18:27:12 +02:00
* Table metadata that is to be loaded entirely from the serialized structure
(i.e. is not already declared in the application). Regular
pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object,
typically one which was reflected from an existing database at some previous
point in time. The serializer module is specifically for the opposite case,
where the Table metadata is already present in memory.
2010-05-07 19:33:49 +02:00
"""
2017-04-15 18:27:12 +02:00
from ..orm import class_mapper
from ..orm.session import Session
from ..orm.mapper import Mapper
from ..orm.interfaces import MapperProperty
from ..orm.attributes import QueryableAttribute
from .. import Table, Column
from ..engine import Engine
from ..util import pickle, byte_buffer, b64encode, b64decode, text_type
2010-05-07 19:33:49 +02:00
import re
2017-04-15 18:27:12 +02:00
__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
2010-05-07 19:33:49 +02:00
def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
def persistent_id(obj):
2017-04-15 18:27:12 +02:00
# print "serializing:", repr(obj)
2010-05-07 19:33:49 +02:00
if isinstance(obj, QueryableAttribute):
cls = obj.impl.class_
key = obj.impl.key
id = "attribute:" + key + ":" + b64encode(pickle.dumps(cls))
elif isinstance(obj, Mapper) and not obj.non_primary:
id = "mapper:" + b64encode(pickle.dumps(obj.class_))
2017-04-15 18:27:12 +02:00
elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \
":" + obj.key
2010-05-07 19:33:49 +02:00
elif isinstance(obj, Table):
2017-04-15 18:27:12 +02:00
id = "table:" + text_type(obj.key)
2010-05-07 19:33:49 +02:00
elif isinstance(obj, Column) and isinstance(obj.table, Table):
2017-04-15 18:27:12 +02:00
id = "column:" + \
text_type(obj.table.key) + ":" + text_type(obj.key)
2010-05-07 19:33:49 +02:00
elif isinstance(obj, Session):
id = "session:"
elif isinstance(obj, Engine):
id = "engine:"
else:
return None
return id
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
pickler.persistent_id = persistent_id
return pickler
2017-04-15 18:27:12 +02:00
our_ids = re.compile(
r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
2010-05-07 19:33:49 +02:00
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler = pickle.Unpickler(file)
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
def get_engine():
if engine:
return engine
elif scoped_session and scoped_session().bind:
return scoped_session().bind
elif metadata and metadata.bind:
return metadata.bind
else:
return None
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
def persistent_load(id):
2017-04-15 18:27:12 +02:00
m = our_ids.match(text_type(id))
2010-05-07 19:33:49 +02:00
if not m:
return None
else:
type_, args = m.group(1, 2)
if type_ == 'attribute':
key, clsarg = args.split(":")
cls = pickle.loads(b64decode(clsarg))
return getattr(cls, key)
elif type_ == "mapper":
cls = pickle.loads(b64decode(args))
return class_mapper(cls)
2017-04-15 18:27:12 +02:00
elif type_ == "mapperprop":
mapper, keyname = args.split(':')
cls = pickle.loads(b64decode(mapper))
return class_mapper(cls).attrs[keyname]
2010-05-07 19:33:49 +02:00
elif type_ == "table":
return metadata.tables[args]
elif type_ == "column":
table, colname = args.split(':')
return metadata.tables[table].c[colname]
elif type_ == "session":
return scoped_session()
elif type_ == "engine":
return get_engine()
else:
raise Exception("Unknown token: %s" % type_)
unpickler.persistent_load = persistent_load
return unpickler
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
def dumps(obj, protocol=0):
buf = byte_buffer()
pickler = Serializer(buf, protocol)
pickler.dump(obj)
return buf.getvalue()
2017-04-15 18:27:12 +02:00
2010-05-07 19:33:49 +02:00
def loads(data, metadata=None, scoped_session=None, engine=None):
buf = byte_buffer(data)
unpickler = Deserializer(buf, metadata, scoped_session, engine)
return unpickler.load()