root/galaxy-central/eggs/SQLAlchemy-0.5.6_dev_r6498-py2.6.egg/sqlalchemy/ext/serializer.py

リビジョン 3, 4.4 KB (コミッタ: kohda, 14 年 前)

Install Unix tools  http://hannonlab.cshl.edu/galaxy_unix_tools/galaxy.html

行番号 
1"""Serializer/Deserializer objects for usage with SQLAlchemy structures.
2
3Any SQLAlchemy structure, including Tables, Columns, expressions, mappers,
4Query objects etc. can be serialized in a minimally-sized format,
5and deserialized when given a Metadata and optional ScopedSession object
6to use as context on the way out.
7
8Usage is nearly the same as that of the standard Python pickle module::
9
10    from sqlalchemy.ext.serializer import loads, dumps
11    metadata = MetaData(bind=some_engine)
12    Session = scoped_session(sessionmaker())
13   
14    # ... define mappers
15   
16    query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
17   
18    # pickle the query
19    serialized = dumps(query)
20   
21    # unpickle.  Pass in metadata + scoped_session
22    query2 = loads(serialized, metadata, Session)
23   
24    print query2.all()
25
26Similar restrictions as when using raw pickle apply; mapped classes must be
27themselves be pickleable, meaning they are importable from a module-level
28namespace.
29
30Note that instances of user-defined classes do not require this extension
31in order to be pickled; these contain no references to engines, sessions
32or expression constructs in the typical case and can be serialized directly.
33This module is specifically for ORM and expression constructs.
34
35"""
36
37from sqlalchemy.orm import class_mapper, Query
38from sqlalchemy.orm.session import Session
39from sqlalchemy.orm.mapper import Mapper
40from sqlalchemy.orm.attributes import QueryableAttribute
41from sqlalchemy import Table, Column
42from sqlalchemy.engine import Engine
43from sqlalchemy.util import pickle
44import re
45import base64
46from cStringIO import StringIO
47
48__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
49
50def Serializer(*args, **kw):
51    pickler = pickle.Pickler(*args, **kw)
52       
53    def persistent_id(obj):
54        #print "serializing:", repr(obj)
55        if isinstance(obj, QueryableAttribute):
56            cls = obj.impl.class_
57            key = obj.impl.key
58            id = "attribute:" + key + ":" + base64.b64encode(pickle.dumps(cls))
59        elif isinstance(obj, Mapper) and not obj.non_primary:
60            id = "mapper:" + base64.b64encode(pickle.dumps(obj.class_))
61        elif isinstance(obj, Table):
62            id = "table:" + str(obj)
63        elif isinstance(obj, Column) and isinstance(obj.table, Table):
64            id = "column:" + str(obj.table) + ":" + obj.key
65        elif isinstance(obj, Session):
66            id = "session:"
67        elif isinstance(obj, Engine):
68            id = "engine:"
69        else:
70            return None
71        return id
72       
73    pickler.persistent_id = persistent_id
74    return pickler
75   
76our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
77
78def Deserializer(file, metadata=None, scoped_session=None, engine=None):
79    unpickler = pickle.Unpickler(file)
80   
81    def get_engine():
82        if engine:
83            return engine
84        elif scoped_session and scoped_session().bind:
85            return scoped_session().bind
86        elif metadata and metadata.bind:
87            return metadata.bind
88        else:
89            return None
90           
91    def persistent_load(id):
92        m = our_ids.match(id)
93        if not m:
94            return None
95        else:
96            type_, args = m.group(1, 2)
97            if type_ == 'attribute':
98                key, clsarg = args.split(":")
99                cls = pickle.loads(base64.b64decode(clsarg))
100                return getattr(cls, key)
101            elif type_ == "mapper":
102                cls = pickle.loads(base64.b64decode(args))
103                return class_mapper(cls)
104            elif type_ == "table":
105                return metadata.tables[args]
106            elif type_ == "column":
107                table, colname = args.split(':')
108                return metadata.tables[table].c[colname]
109            elif type_ == "session":
110                return scoped_session()
111            elif type_ == "engine":
112                return get_engine()
113            else:
114                raise Exception("Unknown token: %s" % type_)
115    unpickler.persistent_load = persistent_load
116    return unpickler
117
118def dumps(obj):
119    buf = StringIO()
120    pickler = Serializer(buf)
121    pickler.dump(obj)
122    return buf.getvalue()
123   
124def loads(data, metadata=None, scoped_session=None, engine=None):
125    buf = StringIO(data)
126    unpickler = Deserializer(buf, metadata, scoped_session, engine)
127    return unpickler.load()
128   
129   
Note: リポジトリブラウザについてのヘルプは TracBrowser を参照してください。