root/galaxy-central/eggs/Mako-0.2.5-py2.6.egg/mako/util.py

リビジョン 3, 7.9 KB (コミッタ: kohda, 14 年 前)

Install Unix tools  http://hannonlab.cshl.edu/galaxy_unix_tools/galaxy.html

行番号 
1# util.py
2# Copyright (C) 2006, 2007, 2008, 2009 Michael Bayer mike_mp@zzzcomputing.com
3#
4# This module is part of Mako and is released under
5# the MIT License: http://www.opensource.org/licenses/mit-license.php
6
7import sys
8try:
9    Set = set
10except:
11    import sets
12    Set = sets.Set
13
14try:
15    from cStringIO import StringIO
16except:
17    from StringIO import StringIO
18
19import codecs, re, weakref, os, time
20
21try:
22    import threading
23    import thread
24except ImportError:
25    import dummy_threading as threading
26    import dummy_thread as thread
27
28if sys.platform.startswith('win') or sys.platform.startswith('java'):
29    time_func = time.clock
30else:
31    time_func = time.time
32   
33def verify_directory(dir):
34    """create and/or verify a filesystem directory."""
35   
36    tries = 0
37   
38    while not os.path.exists(dir):
39        try:
40            tries += 1
41            os.makedirs(dir, 0775)
42        except:
43            if tries > 5:
44                raise
45
46class SetLikeDict(dict):
47    """a dictionary that has some setlike methods on it"""
48    def union(self, other):
49        """produce a 'union' of this dict and another (at the key level).
50       
51        values in the second dict take precedence over that of the first"""
52        x = SetLikeDict(**self)
53        x.update(other)
54        return x
55
56class FastEncodingBuffer(object):
57    """a very rudimentary buffer that is faster than StringIO, but doesnt crash on unicode data like cStringIO."""
58   
59    def __init__(self, encoding=None, errors='strict', unicode=False):
60        self.data = []
61        self.encoding = encoding
62        if unicode:
63            self.delim = u''
64        else:
65            self.delim = ''
66        self.unicode = unicode
67        self.errors = errors
68        self.write = self.data.append
69       
70    def getvalue(self):
71        if self.encoding:
72            return self.delim.join(self.data).encode(self.encoding, self.errors)
73        else:
74            return self.delim.join(self.data)
75
76class LRUCache(dict):
77    """A dictionary-like object that stores a limited number of items, discarding
78    lesser used items periodically.
79   
80    this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based
81    paradigm so that synchronization is not really needed.  the size management
82    is inexact.
83    """
84   
85    class _Item(object):
86        def __init__(self, key, value):
87            self.key = key
88            self.value = value
89            self.timestamp = time_func()
90        def __repr__(self):
91            return repr(self.value)
92   
93    def __init__(self, capacity, threshold=.5):
94        self.capacity = capacity
95        self.threshold = threshold
96   
97    def __getitem__(self, key):
98        item = dict.__getitem__(self, key)
99        item.timestamp = time_func()
100        return item.value
101   
102    def values(self):
103        return [i.value for i in dict.values(self)]
104   
105    def setdefault(self, key, value):
106        if key in self:
107            return self[key]
108        else:
109            self[key] = value
110            return value
111   
112    def __setitem__(self, key, value):
113        item = dict.get(self, key)
114        if item is None:
115            item = self._Item(key, value)
116            dict.__setitem__(self, key, item)
117        else:
118            item.value = value
119        self._manage_size()
120   
121    def _manage_size(self):
122        while len(self) > self.capacity + self.capacity * self.threshold:
123            bytime = dict.values(self)
124            bytime.sort(lambda a, b: cmp(b.timestamp, a.timestamp))
125            for item in bytime[self.capacity:]:
126                try:
127                    del self[item.key]
128                except KeyError:
129                    # if we couldnt find a key, most likely some other thread broke in
130                    # on us. loop around and try again
131                    break
132
133# Regexp to match python magic encoding line
134_PYTHON_MAGIC_COMMENT_re = re.compile(
135    r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
136    re.VERBOSE)
137
138def parse_encoding(fp):
139    """Deduce the encoding of a source file from magic comment.
140
141    It does this in the same way as the `Python interpreter`__
142
143    .. __: http://docs.python.org/ref/encodings.html
144
145    The ``fp`` argument should be a seekable file object.
146    """
147    pos = fp.tell()
148    fp.seek(0)
149    try:
150        line1 = fp.readline()
151        has_bom = line1.startswith(codecs.BOM_UTF8)
152        if has_bom:
153            line1 = line1[len(codecs.BOM_UTF8):]
154
155        m = _PYTHON_MAGIC_COMMENT_re.match(line1)
156        if not m:
157            try:
158                import parser
159                parser.suite(line1)
160            except (ImportError, SyntaxError):
161                # Either it's a real syntax error, in which case the source
162                # is not valid python source, or line2 is a continuation of
163                # line1, in which case we don't want to scan line2 for a magic
164                # comment.
165                pass
166            else:
167                line2 = fp.readline()
168                m = _PYTHON_MAGIC_COMMENT_re.match(line2)
169
170        if has_bom:
171            if m:
172                raise SyntaxError, \
173                      "python refuses to compile code with both a UTF8" \
174                      " byte-order-mark and a magic encoding comment"
175            return 'utf_8'
176        elif m:
177            return m.group(1)
178        else:
179            return None
180    finally:
181        fp.seek(pos)
182
183def sorted_dict_repr(d):
184    """repr() a dictionary with the keys in order.
185   
186    Used by the lexer unit test to compare parse trees based on strings.
187   
188    """
189    keys = d.keys()
190    keys.sort()
191    return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
192   
193def restore__ast(_ast):
194    """Attempt to restore the required classes to the _ast module if it
195    appears to be missing them
196    """
197    if hasattr(_ast, 'AST'):
198        return
199    _ast.PyCF_ONLY_AST = 2 << 9
200    m = compile("""\
201def foo(): pass
202class Bar(object): pass
203if False: pass
204baz = 'mako'
2051 + 2 - 3 * 4 / 5
2066 // 7 % 8 << 9 >> 10
20711 & 12 ^ 13 | 14
20815 and 16 or 17
209-baz + (not +18) - ~17
210baz and 'foo' or 'bar'
211(mako is baz == baz) is not baz != mako
212mako > baz < mako >= baz <= mako
213mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
214    _ast.Module = type(m)
215
216    for cls in _ast.Module.__mro__:
217        if cls.__name__ == 'mod':
218            _ast.mod = cls
219        elif cls.__name__ == 'AST':
220            _ast.AST = cls
221
222    _ast.FunctionDef = type(m.body[0])
223    _ast.ClassDef = type(m.body[1])
224    _ast.If = type(m.body[2])
225
226    _ast.Name = type(m.body[3].targets[0])
227    _ast.Store = type(m.body[3].targets[0].ctx)
228    _ast.Str = type(m.body[3].value)
229
230    _ast.Sub = type(m.body[4].value.op)
231    _ast.Add = type(m.body[4].value.left.op)
232    _ast.Div = type(m.body[4].value.right.op)
233    _ast.Mult = type(m.body[4].value.right.left.op)
234
235    _ast.RShift = type(m.body[5].value.op)
236    _ast.LShift = type(m.body[5].value.left.op)
237    _ast.Mod = type(m.body[5].value.left.left.op)
238    _ast.FloorDiv = type(m.body[5].value.left.left.left.op)
239
240    _ast.BitOr = type(m.body[6].value.op)
241    _ast.BitXor = type(m.body[6].value.left.op)
242    _ast.BitAnd = type(m.body[6].value.left.left.op)
243
244    _ast.Or = type(m.body[7].value.op)
245    _ast.And = type(m.body[7].value.values[0].op)
246
247    _ast.Invert = type(m.body[8].value.right.op)
248    _ast.Not = type(m.body[8].value.left.right.op)
249    _ast.UAdd = type(m.body[8].value.left.right.operand.op)
250    _ast.USub = type(m.body[8].value.left.left.op)
251
252    _ast.Or = type(m.body[9].value.op)
253    _ast.And = type(m.body[9].value.values[0].op)
254
255    _ast.IsNot = type(m.body[10].value.ops[0])
256    _ast.NotEq = type(m.body[10].value.ops[1])
257    _ast.Is = type(m.body[10].value.left.ops[0])
258    _ast.Eq = type(m.body[10].value.left.ops[1])
259
260    _ast.Gt = type(m.body[11].value.ops[0])
261    _ast.Lt = type(m.body[11].value.ops[1])
262    _ast.GtE = type(m.body[11].value.ops[2])
263    _ast.LtE = type(m.body[11].value.ops[3])
264
265    _ast.In = type(m.body[12].value.ops[0])
266    _ast.NotIn = type(m.body[12].value.ops[1])
Note: リポジトリブラウザについてのヘルプは TracBrowser を参照してください。