1 | """Serializer/Deserializer objects for usage with SQLAlchemy structures. |
---|
2 | |
---|
3 | Any SQLAlchemy structure, including Tables, Columns, expressions, mappers, |
---|
4 | Query objects etc. can be serialized in a minimally-sized format, |
---|
5 | and deserialized when given a Metadata and optional ScopedSession object |
---|
6 | to use as context on the way out. |
---|
7 | |
---|
8 | Usage is nearly the same as that of the standard Python pickle module:: |
---|
9 | |
---|
10 | from sqlalchemy.ext.serializer import loads, dumps |
---|
11 | metadata = MetaData(bind=some_engine) |
---|
12 | Session = scoped_session(sessionmaker()) |
---|
13 | |
---|
14 | # ... define mappers |
---|
15 | |
---|
16 | query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) |
---|
17 | |
---|
18 | # pickle the query |
---|
19 | serialized = dumps(query) |
---|
20 | |
---|
21 | # unpickle. Pass in metadata + scoped_session |
---|
22 | query2 = loads(serialized, metadata, Session) |
---|
23 | |
---|
24 | print query2.all() |
---|
25 | |
---|
26 | Similar restrictions as when using raw pickle apply; mapped classes must be |
---|
27 | themselves be pickleable, meaning they are importable from a module-level |
---|
28 | namespace. |
---|
29 | |
---|
30 | Note that instances of user-defined classes do not require this extension |
---|
31 | in order to be pickled; these contain no references to engines, sessions |
---|
32 | or expression constructs in the typical case and can be serialized directly. |
---|
33 | This module is specifically for ORM and expression constructs. |
---|
34 | |
---|
35 | """ |
---|
36 | |
---|
37 | from sqlalchemy.orm import class_mapper, Query |
---|
38 | from sqlalchemy.orm.session import Session |
---|
39 | from sqlalchemy.orm.mapper import Mapper |
---|
40 | from sqlalchemy.orm.attributes import QueryableAttribute |
---|
41 | from sqlalchemy import Table, Column |
---|
42 | from sqlalchemy.engine import Engine |
---|
43 | from sqlalchemy.util import pickle |
---|
44 | import re |
---|
45 | import base64 |
---|
46 | from cStringIO import StringIO |
---|
47 | |
---|
48 | __all__ = ['Serializer', 'Deserializer', 'dumps', 'loads'] |
---|
49 | |
---|
50 | def Serializer(*args, **kw): |
---|
51 | pickler = pickle.Pickler(*args, **kw) |
---|
52 | |
---|
53 | def persistent_id(obj): |
---|
54 | #print "serializing:", repr(obj) |
---|
55 | if isinstance(obj, QueryableAttribute): |
---|
56 | cls = obj.impl.class_ |
---|
57 | key = obj.impl.key |
---|
58 | id = "attribute:" + key + ":" + base64.b64encode(pickle.dumps(cls)) |
---|
59 | elif isinstance(obj, Mapper) and not obj.non_primary: |
---|
60 | id = "mapper:" + base64.b64encode(pickle.dumps(obj.class_)) |
---|
61 | elif isinstance(obj, Table): |
---|
62 | id = "table:" + str(obj) |
---|
63 | elif isinstance(obj, Column) and isinstance(obj.table, Table): |
---|
64 | id = "column:" + str(obj.table) + ":" + obj.key |
---|
65 | elif isinstance(obj, Session): |
---|
66 | id = "session:" |
---|
67 | elif isinstance(obj, Engine): |
---|
68 | id = "engine:" |
---|
69 | else: |
---|
70 | return None |
---|
71 | return id |
---|
72 | |
---|
73 | pickler.persistent_id = persistent_id |
---|
74 | return pickler |
---|
75 | |
---|
76 | our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)') |
---|
77 | |
---|
78 | def Deserializer(file, metadata=None, scoped_session=None, engine=None): |
---|
79 | unpickler = pickle.Unpickler(file) |
---|
80 | |
---|
81 | def get_engine(): |
---|
82 | if engine: |
---|
83 | return engine |
---|
84 | elif scoped_session and scoped_session().bind: |
---|
85 | return scoped_session().bind |
---|
86 | elif metadata and metadata.bind: |
---|
87 | return metadata.bind |
---|
88 | else: |
---|
89 | return None |
---|
90 | |
---|
91 | def persistent_load(id): |
---|
92 | m = our_ids.match(id) |
---|
93 | if not m: |
---|
94 | return None |
---|
95 | else: |
---|
96 | type_, args = m.group(1, 2) |
---|
97 | if type_ == 'attribute': |
---|
98 | key, clsarg = args.split(":") |
---|
99 | cls = pickle.loads(base64.b64decode(clsarg)) |
---|
100 | return getattr(cls, key) |
---|
101 | elif type_ == "mapper": |
---|
102 | cls = pickle.loads(base64.b64decode(args)) |
---|
103 | return class_mapper(cls) |
---|
104 | elif type_ == "table": |
---|
105 | return metadata.tables[args] |
---|
106 | elif type_ == "column": |
---|
107 | table, colname = args.split(':') |
---|
108 | return metadata.tables[table].c[colname] |
---|
109 | elif type_ == "session": |
---|
110 | return scoped_session() |
---|
111 | elif type_ == "engine": |
---|
112 | return get_engine() |
---|
113 | else: |
---|
114 | raise Exception("Unknown token: %s" % type_) |
---|
115 | unpickler.persistent_load = persistent_load |
---|
116 | return unpickler |
---|
117 | |
---|
118 | def dumps(obj): |
---|
119 | buf = StringIO() |
---|
120 | pickler = Serializer(buf) |
---|
121 | pickler.dump(obj) |
---|
122 | return buf.getvalue() |
---|
123 | |
---|
124 | def loads(data, metadata=None, scoped_session=None, engine=None): |
---|
125 | buf = StringIO(data) |
---|
126 | unpickler = Deserializer(buf, metadata, scoped_session, engine) |
---|
127 | return unpickler.load() |
---|
128 | |
---|
129 | |
---|