1 | """Beaker utilities""" |
---|
2 | try: |
---|
3 | import thread as _thread |
---|
4 | import threading as _threading |
---|
5 | except ImportError: |
---|
6 | import dummy_thread as _thread |
---|
7 | import dummy_threading as _threading |
---|
8 | |
---|
9 | from datetime import datetime, timedelta |
---|
10 | import os |
---|
11 | import string |
---|
12 | import types |
---|
13 | import weakref |
---|
14 | import warnings |
---|
15 | |
---|
16 | try: |
---|
17 | Set = set |
---|
18 | except NameError: |
---|
19 | from sets import Set |
---|
20 | try: |
---|
21 | from hashlib import sha1 |
---|
22 | except ImportError: |
---|
23 | from sha import sha as sha1 |
---|
24 | |
---|
25 | from beaker.converters import asbool |
---|
26 | |
---|
27 | try: |
---|
28 | from base64 import b64encode, b64decode |
---|
29 | except ImportError: |
---|
30 | import binascii |
---|
31 | |
---|
32 | _translation = [chr(_x) for _x in range(256)] |
---|
33 | |
---|
34 | # From Python 2.5 base64.py |
---|
35 | def _translate(s, altchars): |
---|
36 | translation = _translation[:] |
---|
37 | for k, v in altchars.items(): |
---|
38 | translation[ord(k)] = v |
---|
39 | return s.translate(''.join(translation)) |
---|
40 | |
---|
41 | def b64encode(s, altchars=None): |
---|
42 | """Encode a string using Base64. |
---|
43 | |
---|
44 | s is the string to encode. Optional altchars must be a string of at least |
---|
45 | length 2 (additional characters are ignored) which specifies an |
---|
46 | alternative alphabet for the '+' and '/' characters. This allows an |
---|
47 | application to e.g. generate url or filesystem safe Base64 strings. |
---|
48 | |
---|
49 | The encoded string is returned. |
---|
50 | """ |
---|
51 | # Strip off the trailing newline |
---|
52 | encoded = binascii.b2a_base64(s)[:-1] |
---|
53 | if altchars is not None: |
---|
54 | return _translate(encoded, {'+': altchars[0], '/': altchars[1]}) |
---|
55 | return encoded |
---|
56 | |
---|
57 | def b64decode(s, altchars=None): |
---|
58 | """Decode a Base64 encoded string. |
---|
59 | |
---|
60 | s is the string to decode. Optional altchars must be a string of at least |
---|
61 | length 2 (additional characters are ignored) which specifies the |
---|
62 | alternative alphabet used instead of the '+' and '/' characters. |
---|
63 | |
---|
64 | The decoded string is returned. A TypeError is raised if s were |
---|
65 | incorrectly padded or if there are non-alphabet characters present in the |
---|
66 | string. |
---|
67 | """ |
---|
68 | if altchars is not None: |
---|
69 | s = _translate(s, {altchars[0]: '+', altchars[1]: '/'}) |
---|
70 | try: |
---|
71 | return binascii.a2b_base64(s) |
---|
72 | except binascii.Error, msg: |
---|
73 | # Transform this exception for consistency |
---|
74 | raise TypeError(msg) |
---|
75 | |
---|
76 | try: |
---|
77 | from threading import local as _tlocal |
---|
78 | except ImportError: |
---|
79 | try: |
---|
80 | from dummy_threading import local as _tlocal |
---|
81 | except ImportError: |
---|
82 | class _tlocal(object): |
---|
83 | def __init__(self): |
---|
84 | self.__dict__['_tdict'] = {} |
---|
85 | |
---|
86 | def __delattr__(self, key): |
---|
87 | try: |
---|
88 | del self._tdict[(thread.get_ident(), key)] |
---|
89 | except KeyError: |
---|
90 | raise AttributeError(key) |
---|
91 | |
---|
92 | def __getattr__(self, key): |
---|
93 | try: |
---|
94 | return self._tdict[(thread.get_ident(), key)] |
---|
95 | except KeyError: |
---|
96 | raise AttributeError(key) |
---|
97 | |
---|
98 | def __setattr__(self, key, value): |
---|
99 | self._tdict[(thread.get_ident(), key)] = value |
---|
100 | |
---|
101 | |
---|
102 | __all__ = ["ThreadLocal", "Registry", "WeakValuedRegistry", "SyncDict", |
---|
103 | "encoded_path", "verify_directory"] |
---|
104 | |
---|
105 | |
---|
106 | def verify_directory(dir): |
---|
107 | """verifies and creates a directory. tries to |
---|
108 | ignore collisions with other threads and processes.""" |
---|
109 | |
---|
110 | tries = 0 |
---|
111 | while not os.access(dir, os.F_OK): |
---|
112 | try: |
---|
113 | tries += 1 |
---|
114 | os.makedirs(dir) |
---|
115 | except: |
---|
116 | if tries > 5: |
---|
117 | raise |
---|
118 | |
---|
119 | |
---|
120 | def deprecated(func, message): |
---|
121 | def deprecated_method(*args, **kargs): |
---|
122 | warnings.warn(message, DeprecationWarning, 2) |
---|
123 | return func(*args, **kargs) |
---|
124 | try: |
---|
125 | deprecated_method.__name__ = func.__name__ |
---|
126 | except TypeError: # Python < 2.4 |
---|
127 | pass |
---|
128 | deprecated_method.__doc__ = "%s\n\n%s" % (message, func.__doc__) |
---|
129 | return deprecated_method |
---|
130 | |
---|
131 | class ThreadLocal(_tlocal): |
---|
132 | """stores a value on a per-thread basis""" |
---|
133 | |
---|
134 | def put(self, value): |
---|
135 | self.value = value |
---|
136 | |
---|
137 | def has(self): |
---|
138 | return hasattr(self, 'value') |
---|
139 | |
---|
140 | def get(self, default=None): |
---|
141 | return getattr(self, 'value', default) |
---|
142 | |
---|
143 | def remove(self): |
---|
144 | del self.value |
---|
145 | |
---|
146 | class SyncDict(object): |
---|
147 | """ |
---|
148 | An efficient/threadsafe singleton map algorithm, a.k.a. |
---|
149 | "get a value based on this key, and create if not found or not |
---|
150 | valid" paradigm: |
---|
151 | |
---|
152 | exists && isvalid ? get : create |
---|
153 | |
---|
154 | Works with weakref dictionaries and the LRUCache to handle items |
---|
155 | asynchronously disappearing from the dictionary. |
---|
156 | |
---|
157 | Use python 2.3.3 or greater ! a major bug was just fixed in Nov. |
---|
158 | 2003 that was driving me nuts with garbage collection/weakrefs in |
---|
159 | this section. |
---|
160 | |
---|
161 | """ |
---|
162 | def __init__(self): |
---|
163 | self.mutex = _thread.allocate_lock() |
---|
164 | self.dict = {} |
---|
165 | |
---|
166 | def get(self, key, createfunc, *args, **kwargs): |
---|
167 | try: |
---|
168 | if self.has_key(key): |
---|
169 | return self.dict[key] |
---|
170 | else: |
---|
171 | return self.sync_get(key, createfunc, *args, **kwargs) |
---|
172 | except KeyError: |
---|
173 | return self.sync_get(key, createfunc, *args, **kwargs) |
---|
174 | |
---|
175 | def sync_get(self, key, createfunc, *args, **kwargs): |
---|
176 | self.mutex.acquire() |
---|
177 | try: |
---|
178 | try: |
---|
179 | if self.has_key(key): |
---|
180 | return self.dict[key] |
---|
181 | else: |
---|
182 | return self._create(key, createfunc, *args, **kwargs) |
---|
183 | except KeyError: |
---|
184 | return self._create(key, createfunc, *args, **kwargs) |
---|
185 | finally: |
---|
186 | self.mutex.release() |
---|
187 | |
---|
188 | def _create(self, key, createfunc, *args, **kwargs): |
---|
189 | self[key] = obj = createfunc(*args, **kwargs) |
---|
190 | return obj |
---|
191 | |
---|
192 | def has_key(self, key): |
---|
193 | return self.dict.has_key(key) |
---|
194 | |
---|
195 | def __contains__(self, key): |
---|
196 | return self.dict.__contains__(key) |
---|
197 | def __getitem__(self, key): |
---|
198 | return self.dict.__getitem__(key) |
---|
199 | def __setitem__(self, key, value): |
---|
200 | self.dict.__setitem__(key, value) |
---|
201 | def __delitem__(self, key): |
---|
202 | return self.dict.__delitem__(key) |
---|
203 | def clear(self): |
---|
204 | self.dict.clear() |
---|
205 | |
---|
206 | |
---|
207 | class WeakValuedRegistry(SyncDict): |
---|
208 | def __init__(self): |
---|
209 | self.mutex = _threading.RLock() |
---|
210 | self.dict = weakref.WeakValueDictionary() |
---|
211 | |
---|
212 | |
---|
213 | def encoded_path(root, identifiers, extension = ".enc", depth = 3, |
---|
214 | digest_filenames=True): |
---|
215 | """Generate a unique file-accessible path from the given list of |
---|
216 | identifiers starting at the given root directory.""" |
---|
217 | ident = string.join(identifiers, "_") |
---|
218 | |
---|
219 | if digest_filenames: |
---|
220 | ident = sha1(ident).hexdigest() |
---|
221 | |
---|
222 | ident = os.path.basename(ident) |
---|
223 | |
---|
224 | tokens = [] |
---|
225 | for d in range(1, depth): |
---|
226 | tokens.append(ident[0:d]) |
---|
227 | |
---|
228 | dir = os.path.join(root, *tokens) |
---|
229 | verify_directory(dir) |
---|
230 | |
---|
231 | return os.path.join(dir, ident + extension) |
---|
232 | |
---|
233 | |
---|
234 | def verify_options(opt, types, error): |
---|
235 | if not isinstance(opt, types): |
---|
236 | if not isinstance(types, tuple): |
---|
237 | types = (types,) |
---|
238 | coerced = False |
---|
239 | for typ in types: |
---|
240 | try: |
---|
241 | if typ in (list, tuple): |
---|
242 | opt = [x.strip() for x in opt.split(',')] |
---|
243 | else: |
---|
244 | if typ == bool: |
---|
245 | typ = asbool |
---|
246 | opt = typ(opt) |
---|
247 | coerced = True |
---|
248 | except: |
---|
249 | pass |
---|
250 | if coerced: |
---|
251 | break |
---|
252 | if not coerced: |
---|
253 | raise Exception(error) |
---|
254 | return opt |
---|
255 | |
---|
256 | |
---|
257 | def verify_rules(params, ruleset): |
---|
258 | for key, types, message in ruleset: |
---|
259 | if key in params: |
---|
260 | params[key] = verify_options(params[key], types, message) |
---|
261 | return params |
---|
262 | |
---|
263 | |
---|
264 | def coerce_session_params(params): |
---|
265 | rules = [ |
---|
266 | ('data_dir', (str, types.NoneType), "data_dir must be a string " |
---|
267 | "referring to a directory."), |
---|
268 | ('lock_dir', (str,), "lock_dir must be a string referring to a " |
---|
269 | "directory."), |
---|
270 | ('type', (str, types.NoneType), "Session type must be a string."), |
---|
271 | ('cookie_expires', (bool, datetime, timedelta), "Cookie expires was " |
---|
272 | "not a boolean, datetime, or timedelta instance."), |
---|
273 | ('cookie_domain', (str, types.NoneType), "Cookie domain must be a " |
---|
274 | "string."), |
---|
275 | ('id', (str,), "Session id must be a string."), |
---|
276 | ('key', (str,), "Session key must be a string."), |
---|
277 | ('secret', (str, types.NoneType), "Session secret must be a string."), |
---|
278 | ('validate_key', (str, types.NoneType), "Session encrypt_key must be " |
---|
279 | "a string."), |
---|
280 | ('encrypt_key', (str, types.NoneType), "Session validate_key must be " |
---|
281 | "a string."), |
---|
282 | ('secure', (bool, types.NoneType), "Session secure must be a boolean."), |
---|
283 | ('timeout', (int, types.NoneType), "Session timeout must be an " |
---|
284 | "integer."), |
---|
285 | ('auto', (bool, types.NoneType), "Session is created if accessed."), |
---|
286 | ] |
---|
287 | return verify_rules(params, rules) |
---|
288 | |
---|
289 | |
---|
290 | def coerce_cache_params(params): |
---|
291 | rules = [ |
---|
292 | ('data_dir', (str, types.NoneType), "data_dir must be a string " |
---|
293 | "referring to a directory."), |
---|
294 | ('lock_dir', (str,), "lock_dir must be a string referring to a " |
---|
295 | "directory."), |
---|
296 | ('type', (str,), "Cache type must be a string."), |
---|
297 | ('enabled', (bool, types.NoneType), "enabled must be true/false " |
---|
298 | "if present."), |
---|
299 | ('expire', (int, types.NoneType), "expire must be an integer representing " |
---|
300 | "how many seconds the cache is valid for"), |
---|
301 | ('regions', (list, tuple, types.NoneType), "Regions must be a " |
---|
302 | "comma seperated list of valid regions") |
---|
303 | ] |
---|
304 | return verify_rules(params, rules) |
---|
305 | |
---|
306 | |
---|
307 | def parse_cache_config_options(config, include_defaults=True): |
---|
308 | """Parse configuration options and validate for use with the |
---|
309 | CacheManager""" |
---|
310 | # Load default cache options |
---|
311 | if include_defaults: |
---|
312 | options= dict(type='memory', data_dir=None, expire=None, |
---|
313 | log_file=None) |
---|
314 | else: |
---|
315 | options = {} |
---|
316 | for key, val in config.iteritems(): |
---|
317 | if key.startswith('beaker.cache.'): |
---|
318 | options[key[13:]] = val |
---|
319 | if key.startswith('cache.'): |
---|
320 | options[key[6:]] = val |
---|
321 | coerce_cache_params(options) |
---|
322 | |
---|
323 | # Set cache to enabled if not turned off |
---|
324 | if 'enabled' not in options: |
---|
325 | options['enabled'] = True |
---|
326 | |
---|
327 | # Configure region dict if regions are available |
---|
328 | regions = options.pop('regions', None) |
---|
329 | if regions: |
---|
330 | region_configs = {} |
---|
331 | for region in regions: |
---|
332 | # Setup the default cache options |
---|
333 | region_options = dict(data_dir=options.get('data_dir'), |
---|
334 | lock_dir=options.get('lock_dir'), |
---|
335 | type=options.get('type'), |
---|
336 | enabled=options['enabled'], |
---|
337 | expire=options.get('expire')) |
---|
338 | region_len = len(region) + 1 |
---|
339 | for key in options.keys(): |
---|
340 | if key.startswith('%s.' % region): |
---|
341 | region_options[key[region_len:]] = options.pop(key) |
---|
342 | coerce_cache_params(region_options) |
---|
343 | region_configs[region] = region_options |
---|
344 | options['cache_regions'] = region_configs |
---|
345 | return options |
---|
346 | |
---|
347 | def func_namespace(func): |
---|
348 | """Generates a unique namespace for a function""" |
---|
349 | kls = None |
---|
350 | if hasattr(func, 'im_func'): |
---|
351 | kls = func.im_class |
---|
352 | func = func.im_func |
---|
353 | |
---|
354 | if kls: |
---|
355 | return '%s.%s' % (kls.__module__, kls.__name__) |
---|
356 | else: |
---|
357 | return func.__module__ |
---|