1 | # mapper.py |
---|
2 | # Copyright (C) 2005, 2006, 2007, 2008, 2009 Michael Bayer mike_mp@zzzcomputing.com |
---|
3 | # |
---|
4 | # This module is part of SQLAlchemy and is released under |
---|
5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php |
---|
6 | |
---|
7 | """Logic to map Python classes to and from selectables. |
---|
8 | |
---|
9 | Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central configurational |
---|
10 | unit which associates a class with a database table. |
---|
11 | |
---|
12 | This is a semi-private module; the main configurational API of the ORM is |
---|
13 | available in :class:`~sqlalchemy.orm.`. |
---|
14 | |
---|
15 | """ |
---|
16 | |
---|
17 | import types |
---|
18 | import weakref |
---|
19 | import operator |
---|
20 | from itertools import chain |
---|
21 | deque = __import__('collections').deque |
---|
22 | |
---|
23 | from sqlalchemy import sql, util, log, exc as sa_exc |
---|
24 | from sqlalchemy.sql import expression, visitors, operators, util as sqlutil |
---|
25 | from sqlalchemy.orm import attributes, exc, sync |
---|
26 | from sqlalchemy.orm.interfaces import ( |
---|
27 | MapperProperty, EXT_CONTINUE, PropComparator |
---|
28 | ) |
---|
29 | from sqlalchemy.orm.util import ( |
---|
30 | ExtensionCarrier, _INSTRUMENTOR, _class_to_mapper, _state_has_identity, |
---|
31 | _state_mapper, class_mapper, instance_str, state_str, |
---|
32 | ) |
---|
33 | |
---|
34 | __all__ = ( |
---|
35 | 'Mapper', |
---|
36 | '_mapper_registry', |
---|
37 | 'class_mapper', |
---|
38 | 'object_mapper', |
---|
39 | ) |
---|
40 | |
---|
41 | _mapper_registry = weakref.WeakKeyDictionary() |
---|
42 | _new_mappers = False |
---|
43 | _already_compiling = False |
---|
44 | |
---|
45 | # a list of MapperExtensions that will be installed in all mappers by default |
---|
46 | global_extensions = [] |
---|
47 | |
---|
48 | # a constant returned by _get_attr_by_column to indicate |
---|
49 | # this mapper is not handling an attribute for a particular |
---|
50 | # column |
---|
51 | NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE') |
---|
52 | |
---|
53 | # lock used to synchronize the "mapper compile" step |
---|
54 | _COMPILE_MUTEX = util.threading.RLock() |
---|
55 | |
---|
56 | # initialize these lazily |
---|
57 | ColumnProperty = None |
---|
58 | SynonymProperty = None |
---|
59 | ComparableProperty = None |
---|
60 | RelationProperty = None |
---|
61 | ConcreteInheritedProperty = None |
---|
62 | _expire_state = None |
---|
63 | _state_session = None |
---|
64 | |
---|
65 | class Mapper(object): |
---|
66 | """Define the correlation of class attributes to database table |
---|
67 | columns. |
---|
68 | |
---|
69 | Instances of this class should be constructed via the |
---|
70 | :func:`~sqlalchemy.orm.mapper` function. |
---|
71 | |
---|
72 | """ |
---|
73 | def __init__(self, |
---|
74 | class_, |
---|
75 | local_table, |
---|
76 | properties = None, |
---|
77 | primary_key = None, |
---|
78 | non_primary = False, |
---|
79 | inherits = None, |
---|
80 | inherit_condition = None, |
---|
81 | inherit_foreign_keys = None, |
---|
82 | extension = None, |
---|
83 | order_by = False, |
---|
84 | always_refresh = False, |
---|
85 | version_id_col = None, |
---|
86 | polymorphic_on=None, |
---|
87 | _polymorphic_map=None, |
---|
88 | polymorphic_identity=None, |
---|
89 | polymorphic_fetch=None, |
---|
90 | concrete=False, |
---|
91 | select_table=None, |
---|
92 | with_polymorphic=None, |
---|
93 | allow_null_pks=False, |
---|
94 | batch=True, |
---|
95 | column_prefix=None, |
---|
96 | include_properties=None, |
---|
97 | exclude_properties=None, |
---|
98 | eager_defaults=False): |
---|
99 | """Construct a new mapper. |
---|
100 | |
---|
101 | Mappers are normally constructed via the :func:`~sqlalchemy.orm.mapper` |
---|
102 | function. See for details. |
---|
103 | |
---|
104 | """ |
---|
105 | |
---|
106 | self.class_ = util.assert_arg_type(class_, type, 'class_') |
---|
107 | |
---|
108 | self.class_manager = None |
---|
109 | |
---|
110 | self.primary_key_argument = primary_key |
---|
111 | self.non_primary = non_primary |
---|
112 | |
---|
113 | if order_by: |
---|
114 | self.order_by = util.to_list(order_by) |
---|
115 | else: |
---|
116 | self.order_by = order_by |
---|
117 | |
---|
118 | self.always_refresh = always_refresh |
---|
119 | self.version_id_col = version_id_col |
---|
120 | self.concrete = concrete |
---|
121 | self.single = False |
---|
122 | self.inherits = inherits |
---|
123 | self.local_table = local_table |
---|
124 | self.inherit_condition = inherit_condition |
---|
125 | self.inherit_foreign_keys = inherit_foreign_keys |
---|
126 | self.extension = extension |
---|
127 | self._init_properties = properties or {} |
---|
128 | self.allow_null_pks = allow_null_pks |
---|
129 | self.delete_orphans = [] |
---|
130 | self.batch = batch |
---|
131 | self.eager_defaults = eager_defaults |
---|
132 | self.column_prefix = column_prefix |
---|
133 | self.polymorphic_on = polymorphic_on |
---|
134 | self._dependency_processors = [] |
---|
135 | self._validators = {} |
---|
136 | self._clause_adapter = None |
---|
137 | self._requires_row_aliasing = False |
---|
138 | self._inherits_equated_pairs = None |
---|
139 | |
---|
140 | |
---|
141 | self.select_table = select_table |
---|
142 | if select_table: |
---|
143 | util.warn_deprecated('select_table option is deprecated. Use with_polymorphic=("*", selectable) ' |
---|
144 | 'instead.') |
---|
145 | |
---|
146 | if with_polymorphic: |
---|
147 | raise sa_exc.ArgumentError("select_table can't be used with " |
---|
148 | "with_polymorphic (they define conflicting settings)") |
---|
149 | self.with_polymorphic = ('*', select_table) |
---|
150 | else: |
---|
151 | if with_polymorphic == '*': |
---|
152 | self.with_polymorphic = ('*', None) |
---|
153 | elif isinstance(with_polymorphic, (tuple, list)): |
---|
154 | if isinstance(with_polymorphic[0], (basestring, tuple, list)): |
---|
155 | self.with_polymorphic = with_polymorphic |
---|
156 | else: |
---|
157 | self.with_polymorphic = (with_polymorphic, None) |
---|
158 | elif with_polymorphic is not None: |
---|
159 | raise sa_exc.ArgumentError("Invalid setting for with_polymorphic") |
---|
160 | else: |
---|
161 | self.with_polymorphic = None |
---|
162 | |
---|
163 | if isinstance(self.local_table, expression._SelectBaseMixin): |
---|
164 | util.warn("mapper %s creating an alias for the given " |
---|
165 | "selectable. References to the original selectable " |
---|
166 | "may be misinterpreted by queries, polymorphic_on, etc. " |
---|
167 | " Consider passing an explicit selectable.alias() construct instead." % self) |
---|
168 | self.local_table = self.local_table.alias() |
---|
169 | |
---|
170 | if self.with_polymorphic and isinstance(self.with_polymorphic[1], expression._SelectBaseMixin): |
---|
171 | self.with_polymorphic = (self.with_polymorphic[0], self.with_polymorphic[1].alias()) |
---|
172 | |
---|
173 | # our 'polymorphic identity', a string name that when located in a result set row |
---|
174 | # indicates this Mapper should be used to construct the object instance for that row. |
---|
175 | self.polymorphic_identity = polymorphic_identity |
---|
176 | |
---|
177 | if polymorphic_fetch: |
---|
178 | util.warn_deprecated('polymorphic_fetch option is deprecated. Unloaded columns ' |
---|
179 | 'load as deferred in all cases; loading can be controlled ' |
---|
180 | 'using the "with_polymorphic" option.') |
---|
181 | |
---|
182 | # a dictionary of 'polymorphic identity' names, associating those names with |
---|
183 | # Mappers that will be used to construct object instances upon a select operation. |
---|
184 | if _polymorphic_map is None: |
---|
185 | self.polymorphic_map = {} |
---|
186 | else: |
---|
187 | self.polymorphic_map = _polymorphic_map |
---|
188 | |
---|
189 | self.include_properties = include_properties |
---|
190 | self.exclude_properties = exclude_properties |
---|
191 | |
---|
192 | self.compiled = False |
---|
193 | |
---|
194 | self._configure_inheritance() |
---|
195 | self._configure_extensions() |
---|
196 | self._configure_class_instrumentation() |
---|
197 | self._configure_properties() |
---|
198 | self._configure_pks() |
---|
199 | global _new_mappers |
---|
200 | _new_mappers = True |
---|
201 | self._log("constructed") |
---|
202 | |
---|
203 | # configurational / mutating methods. not threadsafe |
---|
204 | # except for compile(). |
---|
205 | |
---|
206 | def _configure_inheritance(self): |
---|
207 | """Configure settings related to inherting and/or inherited mappers being present.""" |
---|
208 | |
---|
209 | # a set of all mappers which inherit from this one. |
---|
210 | self._inheriting_mappers = set() |
---|
211 | |
---|
212 | if self.inherits: |
---|
213 | if isinstance(self.inherits, type): |
---|
214 | self.inherits = class_mapper(self.inherits, compile=False) |
---|
215 | if not issubclass(self.class_, self.inherits.class_): |
---|
216 | raise sa_exc.ArgumentError( |
---|
217 | "Class '%s' does not inherit from '%s'" % |
---|
218 | (self.class_.__name__, self.inherits.class_.__name__)) |
---|
219 | if self.non_primary != self.inherits.non_primary: |
---|
220 | np = not self.non_primary and "primary" or "non-primary" |
---|
221 | raise sa_exc.ArgumentError("Inheritance of %s mapper for class '%s' is " |
---|
222 | "only allowed from a %s mapper" % (np, self.class_.__name__, np)) |
---|
223 | # inherit_condition is optional. |
---|
224 | if self.local_table is None: |
---|
225 | self.local_table = self.inherits.local_table |
---|
226 | self.mapped_table = self.inherits.mapped_table |
---|
227 | self.single = True |
---|
228 | elif not self.local_table is self.inherits.local_table: |
---|
229 | if self.concrete: |
---|
230 | self.mapped_table = self.local_table |
---|
231 | for mapper in self.iterate_to_root(): |
---|
232 | if mapper.polymorphic_on: |
---|
233 | mapper._requires_row_aliasing = True |
---|
234 | else: |
---|
235 | if not self.inherit_condition: |
---|
236 | # figure out inherit condition from our table to the immediate table |
---|
237 | # of the inherited mapper, not its full table which could pull in other |
---|
238 | # stuff we dont want (allows test/inheritance.InheritTest4 to pass) |
---|
239 | self.inherit_condition = sqlutil.join_condition(self.inherits.local_table, self.local_table) |
---|
240 | self.mapped_table = sql.join(self.inherits.mapped_table, self.local_table, self.inherit_condition) |
---|
241 | |
---|
242 | fks = util.to_set(self.inherit_foreign_keys) |
---|
243 | self._inherits_equated_pairs = sqlutil.criterion_as_pairs(self.mapped_table.onclause, consider_as_foreign_keys=fks) |
---|
244 | else: |
---|
245 | self.mapped_table = self.local_table |
---|
246 | |
---|
247 | if self.polymorphic_identity is not None and not self.concrete: |
---|
248 | self._identity_class = self.inherits._identity_class |
---|
249 | else: |
---|
250 | self._identity_class = self.class_ |
---|
251 | |
---|
252 | if self.version_id_col is None: |
---|
253 | self.version_id_col = self.inherits.version_id_col |
---|
254 | |
---|
255 | for mapper in self.iterate_to_root(): |
---|
256 | util.reset_memoized(mapper, '_equivalent_columns') |
---|
257 | util.reset_memoized(mapper, '_sorted_tables') |
---|
258 | |
---|
259 | if self.order_by is False and not self.concrete and self.inherits.order_by is not False: |
---|
260 | self.order_by = self.inherits.order_by |
---|
261 | |
---|
262 | self.polymorphic_map = self.inherits.polymorphic_map |
---|
263 | self.batch = self.inherits.batch |
---|
264 | self.inherits._inheriting_mappers.add(self) |
---|
265 | self.base_mapper = self.inherits.base_mapper |
---|
266 | self._all_tables = self.inherits._all_tables |
---|
267 | |
---|
268 | if self.polymorphic_identity is not None: |
---|
269 | self.polymorphic_map[self.polymorphic_identity] = self |
---|
270 | if not self.polymorphic_on: |
---|
271 | for mapper in self.iterate_to_root(): |
---|
272 | # try to set up polymorphic on using correesponding_column(); else leave |
---|
273 | # as None |
---|
274 | if mapper.polymorphic_on: |
---|
275 | self.polymorphic_on = self.mapped_table.corresponding_column(mapper.polymorphic_on) |
---|
276 | break |
---|
277 | else: |
---|
278 | self._all_tables = set() |
---|
279 | self.base_mapper = self |
---|
280 | self.mapped_table = self.local_table |
---|
281 | if self.polymorphic_identity is not None: |
---|
282 | self.polymorphic_map[self.polymorphic_identity] = self |
---|
283 | self._identity_class = self.class_ |
---|
284 | |
---|
285 | if self.mapped_table is None: |
---|
286 | raise sa_exc.ArgumentError("Mapper '%s' does not have a mapped_table specified." % self) |
---|
287 | |
---|
288 | def _configure_extensions(self): |
---|
289 | """Go through the global_extensions list as well as the list |
---|
290 | of ``MapperExtensions`` specified for this ``Mapper`` and |
---|
291 | creates a linked list of those extensions. |
---|
292 | |
---|
293 | """ |
---|
294 | extlist = util.OrderedSet() |
---|
295 | |
---|
296 | extension = self.extension |
---|
297 | if extension: |
---|
298 | for ext_obj in util.to_list(extension): |
---|
299 | # local MapperExtensions have already instrumented the class |
---|
300 | extlist.add(ext_obj) |
---|
301 | |
---|
302 | if self.inherits: |
---|
303 | for ext in self.inherits.extension: |
---|
304 | if ext not in extlist: |
---|
305 | extlist.add(ext) |
---|
306 | else: |
---|
307 | for ext in global_extensions: |
---|
308 | if isinstance(ext, type): |
---|
309 | ext = ext() |
---|
310 | if ext not in extlist: |
---|
311 | extlist.add(ext) |
---|
312 | |
---|
313 | self.extension = ExtensionCarrier() |
---|
314 | for ext in extlist: |
---|
315 | self.extension.append(ext) |
---|
316 | |
---|
317 | def _configure_class_instrumentation(self): |
---|
318 | """If this mapper is to be a primary mapper (i.e. the |
---|
319 | non_primary flag is not set), associate this Mapper with the |
---|
320 | given class_ and entity name. |
---|
321 | |
---|
322 | Subsequent calls to ``class_mapper()`` for the class_/entity |
---|
323 | name combination will return this mapper. Also decorate the |
---|
324 | `__init__` method on the mapped class to include optional |
---|
325 | auto-session attachment logic. |
---|
326 | |
---|
327 | """ |
---|
328 | manager = attributes.manager_of_class(self.class_) |
---|
329 | |
---|
330 | if self.non_primary: |
---|
331 | if not manager or manager.mapper is None: |
---|
332 | raise sa_exc.InvalidRequestError( |
---|
333 | "Class %s has no primary mapper configured. Configure " |
---|
334 | "a primary mapper first before setting up a non primary " |
---|
335 | "Mapper.") |
---|
336 | self.class_manager = manager |
---|
337 | _mapper_registry[self] = True |
---|
338 | return |
---|
339 | |
---|
340 | if manager is not None: |
---|
341 | assert manager.class_ is self.class_ |
---|
342 | if manager.mapper: |
---|
343 | raise sa_exc.ArgumentError( |
---|
344 | "Class '%s' already has a primary mapper defined. " |
---|
345 | "Use non_primary=True to " |
---|
346 | "create a non primary Mapper. clear_mappers() will " |
---|
347 | "remove *all* current mappers from all classes." % |
---|
348 | self.class_) |
---|
349 | #else: |
---|
350 | # a ClassManager may already exist as |
---|
351 | # ClassManager.instrument_attribute() creates |
---|
352 | # new managers for each subclass if they don't yet exist. |
---|
353 | |
---|
354 | _mapper_registry[self] = True |
---|
355 | |
---|
356 | self.extension.instrument_class(self, self.class_) |
---|
357 | |
---|
358 | if manager is None: |
---|
359 | manager = attributes.register_class(self.class_, |
---|
360 | deferred_scalar_loader = _load_scalar_attributes |
---|
361 | ) |
---|
362 | |
---|
363 | self.class_manager = manager |
---|
364 | |
---|
365 | manager.mapper = self |
---|
366 | |
---|
367 | # The remaining members can be added by any mapper, e_name None or not. |
---|
368 | if manager.info.get(_INSTRUMENTOR, False): |
---|
369 | return |
---|
370 | |
---|
371 | event_registry = manager.events |
---|
372 | event_registry.add_listener('on_init', _event_on_init) |
---|
373 | event_registry.add_listener('on_init_failure', _event_on_init_failure) |
---|
374 | event_registry.add_listener('on_resurrect', _event_on_resurrect) |
---|
375 | |
---|
376 | for key, method in util.iterate_attributes(self.class_): |
---|
377 | if isinstance(method, types.FunctionType): |
---|
378 | if hasattr(method, '__sa_reconstructor__'): |
---|
379 | event_registry.add_listener('on_load', method) |
---|
380 | elif hasattr(method, '__sa_validators__'): |
---|
381 | for name in method.__sa_validators__: |
---|
382 | self._validators[name] = method |
---|
383 | |
---|
384 | if 'reconstruct_instance' in self.extension: |
---|
385 | def reconstruct(instance): |
---|
386 | self.extension.reconstruct_instance(self, instance) |
---|
387 | event_registry.add_listener('on_load', reconstruct) |
---|
388 | |
---|
389 | manager.info[_INSTRUMENTOR] = self |
---|
390 | |
---|
391 | def dispose(self): |
---|
392 | # Disable any attribute-based compilation. |
---|
393 | self.compiled = True |
---|
394 | |
---|
395 | if hasattr(self, '_compile_failed'): |
---|
396 | del self._compile_failed |
---|
397 | |
---|
398 | if not self.non_primary and self.class_manager.mapper is self: |
---|
399 | attributes.unregister_class(self.class_) |
---|
400 | |
---|
401 | def _configure_pks(self): |
---|
402 | |
---|
403 | self.tables = sqlutil.find_tables(self.mapped_table) |
---|
404 | |
---|
405 | if not self.tables: |
---|
406 | raise sa_exc.InvalidRequestError("Could not find any Table objects in mapped table '%s'" % str(self.mapped_table)) |
---|
407 | |
---|
408 | self._pks_by_table = {} |
---|
409 | self._cols_by_table = {} |
---|
410 | |
---|
411 | all_cols = util.column_set(chain(*[col.proxy_set for col in self._columntoproperty])) |
---|
412 | pk_cols = util.column_set(c for c in all_cols if c.primary_key) |
---|
413 | |
---|
414 | # identify primary key columns which are also mapped by this mapper. |
---|
415 | tables = set(self.tables + [self.mapped_table]) |
---|
416 | self._all_tables.update(tables) |
---|
417 | for t in tables: |
---|
418 | if t.primary_key and pk_cols.issuperset(t.primary_key): |
---|
419 | # ordering is important since it determines the ordering of mapper.primary_key (and therefore query.get()) |
---|
420 | self._pks_by_table[t] = util.ordered_column_set(t.primary_key).intersection(pk_cols) |
---|
421 | self._cols_by_table[t] = util.ordered_column_set(t.c).intersection(all_cols) |
---|
422 | |
---|
423 | # determine cols that aren't expressed within our tables; mark these |
---|
424 | # as "read only" properties which are refreshed upon INSERT/UPDATE |
---|
425 | self._readonly_props = set( |
---|
426 | self._columntoproperty[col] |
---|
427 | for col in self._columntoproperty |
---|
428 | if not hasattr(col, 'table') or col.table not in self._cols_by_table) |
---|
429 | |
---|
430 | # if explicit PK argument sent, add those columns to the primary key mappings |
---|
431 | if self.primary_key_argument: |
---|
432 | for k in self.primary_key_argument: |
---|
433 | if k.table not in self._pks_by_table: |
---|
434 | self._pks_by_table[k.table] = util.OrderedSet() |
---|
435 | self._pks_by_table[k.table].add(k) |
---|
436 | |
---|
437 | if self.mapped_table not in self._pks_by_table or len(self._pks_by_table[self.mapped_table]) == 0: |
---|
438 | raise sa_exc.ArgumentError("Mapper %s could not assemble any primary " |
---|
439 | "key columns for mapped table '%s'" % (self, self.mapped_table.description)) |
---|
440 | |
---|
441 | if self.inherits and not self.concrete and not self.primary_key_argument: |
---|
442 | # if inheriting, the "primary key" for this mapper is that of the inheriting (unless concrete or explicit) |
---|
443 | self.primary_key = self.inherits.primary_key |
---|
444 | else: |
---|
445 | # determine primary key from argument or mapped_table pks - reduce to the minimal set of columns |
---|
446 | if self.primary_key_argument: |
---|
447 | primary_key = sqlutil.reduce_columns( |
---|
448 | [self.mapped_table.corresponding_column(c) for c in self.primary_key_argument], |
---|
449 | ignore_nonexistent_tables=True) |
---|
450 | else: |
---|
451 | primary_key = sqlutil.reduce_columns( |
---|
452 | self._pks_by_table[self.mapped_table], ignore_nonexistent_tables=True) |
---|
453 | |
---|
454 | if len(primary_key) == 0: |
---|
455 | raise sa_exc.ArgumentError("Mapper %s could not assemble any primary " |
---|
456 | "key columns for mapped table '%s'" % (self, self.mapped_table.description)) |
---|
457 | |
---|
458 | self.primary_key = primary_key |
---|
459 | self._log("Identified primary key columns: " + str(primary_key)) |
---|
460 | |
---|
461 | def _configure_properties(self): |
---|
462 | |
---|
463 | # Column and other ClauseElement objects which are mapped |
---|
464 | self.columns = self.c = util.OrderedProperties() |
---|
465 | |
---|
466 | # object attribute names mapped to MapperProperty objects |
---|
467 | self._props = util.OrderedDict() |
---|
468 | |
---|
469 | # table columns mapped to lists of MapperProperty objects |
---|
470 | # using a list allows a single column to be defined as |
---|
471 | # populating multiple object attributes |
---|
472 | self._columntoproperty = util.column_dict() |
---|
473 | |
---|
474 | # load custom properties |
---|
475 | if self._init_properties: |
---|
476 | for key, prop in self._init_properties.iteritems(): |
---|
477 | self._configure_property(key, prop, False) |
---|
478 | |
---|
479 | # pull properties from the inherited mapper if any. |
---|
480 | if self.inherits: |
---|
481 | for key, prop in self.inherits._props.iteritems(): |
---|
482 | if key not in self._props and not self._should_exclude(key, key, local=False): |
---|
483 | self._adapt_inherited_property(key, prop, False) |
---|
484 | |
---|
485 | # create properties for each column in the mapped table, |
---|
486 | # for those columns which don't already map to a property |
---|
487 | for column in self.mapped_table.columns: |
---|
488 | if column in self._columntoproperty: |
---|
489 | continue |
---|
490 | |
---|
491 | column_key = (self.column_prefix or '') + column.key |
---|
492 | |
---|
493 | if self._should_exclude(column.key, column_key, local=self.local_table.c.contains_column(column)): |
---|
494 | continue |
---|
495 | |
---|
496 | # adjust the "key" used for this column to that |
---|
497 | # of the inheriting mapper |
---|
498 | for mapper in self.iterate_to_root(): |
---|
499 | if column in mapper._columntoproperty: |
---|
500 | column_key = mapper._columntoproperty[column].key |
---|
501 | |
---|
502 | self._configure_property(column_key, column, init=False, setparent=True) |
---|
503 | |
---|
504 | # do a special check for the "discriminiator" column, as it may only be present |
---|
505 | # in the 'with_polymorphic' selectable but we need it for the base mapper |
---|
506 | if self.polymorphic_on and self.polymorphic_on not in self._columntoproperty: |
---|
507 | col = self.mapped_table.corresponding_column(self.polymorphic_on) |
---|
508 | if not col: |
---|
509 | dont_instrument = True |
---|
510 | col = self.polymorphic_on |
---|
511 | else: |
---|
512 | dont_instrument = False |
---|
513 | if self._should_exclude(col.key, col.key, local=False): |
---|
514 | raise sa_exc.InvalidRequestError("Cannot exclude or override the discriminator column %r" % col.key) |
---|
515 | self._configure_property(col.key, ColumnProperty(col, _no_instrument=dont_instrument), init=False, setparent=True) |
---|
516 | |
---|
517 | def _adapt_inherited_property(self, key, prop, init): |
---|
518 | if not self.concrete: |
---|
519 | self._configure_property(key, prop, init=False, setparent=False) |
---|
520 | elif key not in self._props: |
---|
521 | self._configure_property(key, ConcreteInheritedProperty(), init=init, setparent=True) |
---|
522 | |
---|
523 | def _configure_property(self, key, prop, init=True, setparent=True): |
---|
524 | self._log("_configure_property(%s, %s)" % (key, prop.__class__.__name__)) |
---|
525 | |
---|
526 | if not isinstance(prop, MapperProperty): |
---|
527 | # we were passed a Column or a list of Columns; generate a ColumnProperty |
---|
528 | columns = util.to_list(prop) |
---|
529 | column = columns[0] |
---|
530 | if not expression.is_column(column): |
---|
531 | raise sa_exc.ArgumentError("%s=%r is not an instance of MapperProperty or Column" % (key, prop)) |
---|
532 | |
---|
533 | prop = self._props.get(key, None) |
---|
534 | |
---|
535 | if isinstance(prop, ColumnProperty): |
---|
536 | # TODO: the "property already exists" case is still not well defined here. |
---|
537 | # assuming single-column, etc. |
---|
538 | |
---|
539 | if prop.parent is not self: |
---|
540 | # existing ColumnProperty from an inheriting mapper. |
---|
541 | # make a copy and append our column to it |
---|
542 | prop = prop.copy() |
---|
543 | prop.columns.append(column) |
---|
544 | self._log("appending to existing ColumnProperty %s" % (key)) |
---|
545 | elif prop is None or isinstance(prop, ConcreteInheritedProperty): |
---|
546 | mapped_column = [] |
---|
547 | for c in columns: |
---|
548 | mc = self.mapped_table.corresponding_column(c) |
---|
549 | if not mc: |
---|
550 | mc = self.local_table.corresponding_column(c) |
---|
551 | if mc: |
---|
552 | # if the column is in the local table but not the mapped table, |
---|
553 | # this corresponds to adding a column after the fact to the local table. |
---|
554 | # [ticket:1523] |
---|
555 | self.mapped_table._reset_exported() |
---|
556 | mc = self.mapped_table.corresponding_column(c) |
---|
557 | if not mc: |
---|
558 | raise sa_exc.ArgumentError("Column '%s' is not represented in mapper's table. " |
---|
559 | "Use the `column_property()` function to force this column " |
---|
560 | "to be mapped as a read-only attribute." % c) |
---|
561 | mapped_column.append(mc) |
---|
562 | prop = ColumnProperty(*mapped_column) |
---|
563 | else: |
---|
564 | raise sa_exc.ArgumentError("WARNING: column '%s' conflicts with property '%r'. " |
---|
565 | "To resolve this, map the column to the class under a different " |
---|
566 | "name in the 'properties' dictionary. Or, to remove all awareness " |
---|
567 | "of the column entirely (including its availability as a foreign key), " |
---|
568 | "use the 'include_properties' or 'exclude_properties' mapper arguments " |
---|
569 | "to control specifically which table columns get mapped." % (column.key, prop)) |
---|
570 | |
---|
571 | if isinstance(prop, ColumnProperty): |
---|
572 | col = self.mapped_table.corresponding_column(prop.columns[0]) |
---|
573 | |
---|
574 | # if the column is not present in the mapped table, |
---|
575 | # test if a column has been added after the fact to the parent table |
---|
576 | # (or their parent, etc.) |
---|
577 | # [ticket:1570] |
---|
578 | if col is None and self.inherits: |
---|
579 | path = [self] |
---|
580 | for m in self.inherits.iterate_to_root(): |
---|
581 | col = m.local_table.corresponding_column(prop.columns[0]) |
---|
582 | if col is not None: |
---|
583 | for m2 in path: |
---|
584 | m2.mapped_table._reset_exported() |
---|
585 | col = self.mapped_table.corresponding_column(prop.columns[0]) |
---|
586 | break |
---|
587 | path.append(m) |
---|
588 | |
---|
589 | # otherwise, col might not be present! the selectable given |
---|
590 | # to the mapper need not include "deferred" |
---|
591 | # columns (included in zblog tests) |
---|
592 | if col is None: |
---|
593 | col = prop.columns[0] |
---|
594 | |
---|
595 | # column is coming in after _readonly_props was initialized; check |
---|
596 | # for 'readonly' |
---|
597 | if hasattr(self, '_readonly_props') and \ |
---|
598 | (not hasattr(col, 'table') or col.table not in self._cols_by_table): |
---|
599 | self._readonly_props.add(prop) |
---|
600 | |
---|
601 | else: |
---|
602 | # if column is coming in after _cols_by_table was initialized, ensure the col is in the |
---|
603 | # right set |
---|
604 | if hasattr(self, '_cols_by_table') and col.table in self._cols_by_table and col not in self._cols_by_table[col.table]: |
---|
605 | self._cols_by_table[col.table].add(col) |
---|
606 | |
---|
607 | # if this ColumnProperty represents the "polymorphic discriminator" |
---|
608 | # column, mark it. We'll need this when rendering columns |
---|
609 | # in SELECT statements. |
---|
610 | if not hasattr(prop, '_is_polymorphic_discriminator'): |
---|
611 | prop._is_polymorphic_discriminator = (col is self.polymorphic_on or prop.columns[0] is self.polymorphic_on) |
---|
612 | |
---|
613 | self.columns[key] = col |
---|
614 | for col in prop.columns: |
---|
615 | for col in col.proxy_set: |
---|
616 | self._columntoproperty[col] = prop |
---|
617 | |
---|
618 | elif isinstance(prop, (ComparableProperty, SynonymProperty)) and setparent: |
---|
619 | if prop.descriptor is None: |
---|
620 | desc = getattr(self.class_, key, None) |
---|
621 | if self._is_userland_descriptor(desc): |
---|
622 | prop.descriptor = desc |
---|
623 | if getattr(prop, 'map_column', False): |
---|
624 | if key not in self.mapped_table.c: |
---|
625 | raise sa_exc.ArgumentError( |
---|
626 | "Can't compile synonym '%s': no column on table '%s' named '%s'" |
---|
627 | % (prop.name, self.mapped_table.description, key)) |
---|
628 | self._configure_property(prop.name, ColumnProperty(self.mapped_table.c[key]), init=init, setparent=setparent) |
---|
629 | |
---|
630 | self._props[key] = prop |
---|
631 | prop.key = key |
---|
632 | |
---|
633 | if setparent: |
---|
634 | prop.set_parent(self) |
---|
635 | |
---|
636 | if not self.non_primary: |
---|
637 | prop.instrument_class(self) |
---|
638 | |
---|
639 | for mapper in self._inheriting_mappers: |
---|
640 | mapper._adapt_inherited_property(key, prop, init) |
---|
641 | |
---|
642 | if init: |
---|
643 | prop.init() |
---|
644 | prop.post_instrument_class(self) |
---|
645 | |
---|
646 | |
---|
647 | def compile(self): |
---|
648 | """Compile this mapper and all other non-compiled mappers. |
---|
649 | |
---|
650 | This method checks the local compiled status as well as for |
---|
651 | any new mappers that have been defined, and is safe to call |
---|
652 | repeatedly. |
---|
653 | |
---|
654 | """ |
---|
655 | global _new_mappers |
---|
656 | if self.compiled and not _new_mappers: |
---|
657 | return self |
---|
658 | |
---|
659 | _COMPILE_MUTEX.acquire() |
---|
660 | try: |
---|
661 | try: |
---|
662 | global _already_compiling |
---|
663 | if _already_compiling: |
---|
664 | # re-entrance to compile() occurs rarely, when a class-mapped construct is |
---|
665 | # used within a ForeignKey, something that is possible |
---|
666 | # when using the declarative layer |
---|
667 | self._post_configure_properties() |
---|
668 | return |
---|
669 | _already_compiling = True |
---|
670 | try: |
---|
671 | |
---|
672 | # double-check inside mutex |
---|
673 | if self.compiled and not _new_mappers: |
---|
674 | return self |
---|
675 | |
---|
676 | # initialize properties on all mappers |
---|
677 | # note that _mapper_registry is unordered, which |
---|
678 | # may randomly conceal/reveal issues related to |
---|
679 | # the order of mapper compilation |
---|
680 | for mapper in list(_mapper_registry): |
---|
681 | if getattr(mapper, '_compile_failed', False): |
---|
682 | raise sa_exc.InvalidRequestError("One or more mappers failed to compile. Exception was probably " |
---|
683 | "suppressed within a hasattr() call. " |
---|
684 | "Message was: %s" % mapper._compile_failed) |
---|
685 | if not mapper.compiled: |
---|
686 | mapper._post_configure_properties() |
---|
687 | |
---|
688 | _new_mappers = False |
---|
689 | return self |
---|
690 | finally: |
---|
691 | _already_compiling = False |
---|
692 | except: |
---|
693 | import sys |
---|
694 | exc = sys.exc_info()[1] |
---|
695 | self._compile_failed = exc |
---|
696 | raise |
---|
697 | finally: |
---|
698 | _COMPILE_MUTEX.release() |
---|
699 | |
---|
700 | def _post_configure_properties(self): |
---|
701 | """Call the ``init()`` method on all ``MapperProperties`` |
---|
702 | attached to this mapper. |
---|
703 | |
---|
704 | This is a deferred configuration step which is intended |
---|
705 | to execute once all mappers have been constructed. |
---|
706 | |
---|
707 | """ |
---|
708 | |
---|
709 | self._log("_post_configure_properties() started") |
---|
710 | l = [(key, prop) for key, prop in self._props.iteritems()] |
---|
711 | for key, prop in l: |
---|
712 | self._log("initialize prop " + key) |
---|
713 | |
---|
714 | if prop.parent is self and not prop._compile_started: |
---|
715 | prop.init() |
---|
716 | |
---|
717 | if prop._compile_finished: |
---|
718 | prop.post_instrument_class(self) |
---|
719 | |
---|
720 | self._log("_post_configure_properties() complete") |
---|
721 | self.compiled = True |
---|
722 | |
---|
723 | def add_properties(self, dict_of_properties): |
---|
724 | """Add the given dictionary of properties to this mapper, |
---|
725 | using `add_property`. |
---|
726 | |
---|
727 | """ |
---|
728 | for key, value in dict_of_properties.iteritems(): |
---|
729 | self.add_property(key, value) |
---|
730 | |
---|
731 | def add_property(self, key, prop): |
---|
732 | """Add an individual MapperProperty to this mapper. |
---|
733 | |
---|
734 | If the mapper has not been compiled yet, just adds the |
---|
735 | property to the initial properties dictionary sent to the |
---|
736 | constructor. If this Mapper has already been compiled, then |
---|
737 | the given MapperProperty is compiled immediately. |
---|
738 | |
---|
739 | """ |
---|
740 | self._init_properties[key] = prop |
---|
741 | self._configure_property(key, prop, init=self.compiled) |
---|
742 | |
---|
743 | |
---|
744 | # class formatting / logging. |
---|
745 | |
---|
746 | def _log(self, msg): |
---|
747 | if self._should_log_info: |
---|
748 | self.logger.info( |
---|
749 | "(" + self.class_.__name__ + |
---|
750 | "|" + |
---|
751 | (self.local_table and self.local_table.description or str(self.local_table)) + |
---|
752 | (self.non_primary and "|non-primary" or "") + ") " + |
---|
753 | msg) |
---|
754 | |
---|
755 | def _log_debug(self, msg): |
---|
756 | if self._should_log_debug: |
---|
757 | self.logger.debug( |
---|
758 | "(" + self.class_.__name__ + |
---|
759 | "|" + |
---|
760 | (self.local_table and self.local_table.description or str(self.local_table)) + |
---|
761 | (self.non_primary and "|non-primary" or "") + ") " + |
---|
762 | msg) |
---|
763 | |
---|
764 | def __repr__(self): |
---|
765 | return '<Mapper at 0x%x; %s>' % ( |
---|
766 | id(self), self.class_.__name__) |
---|
767 | |
---|
768 | def __str__(self): |
---|
769 | return "Mapper|" + self.class_.__name__ + "|" + \ |
---|
770 | (self.local_table and self.local_table.description or str(self.local_table)) + \ |
---|
771 | (self.non_primary and "|non-primary" or "") |
---|
772 | |
---|
773 | # informational / status |
---|
774 | |
---|
775 | def _is_orphan(self, state): |
---|
776 | o = False |
---|
777 | for mapper in self.iterate_to_root(): |
---|
778 | for (key, cls) in mapper.delete_orphans: |
---|
779 | if attributes.manager_of_class(cls).has_parent( |
---|
780 | state, key, optimistic=_state_has_identity(state)): |
---|
781 | return False |
---|
782 | o = o or bool(mapper.delete_orphans) |
---|
783 | return o |
---|
784 | |
---|
785 | def has_property(self, key): |
---|
786 | return key in self._props |
---|
787 | |
---|
788 | def get_property(self, key, resolve_synonyms=False, raiseerr=True): |
---|
789 | """return a MapperProperty associated with the given key.""" |
---|
790 | |
---|
791 | self.compile() |
---|
792 | return self._get_property(key, resolve_synonyms=resolve_synonyms, raiseerr=raiseerr) |
---|
793 | |
---|
794 | def _get_property(self, key, resolve_synonyms=False, raiseerr=True): |
---|
795 | prop = self._props.get(key, None) |
---|
796 | if resolve_synonyms: |
---|
797 | while isinstance(prop, SynonymProperty): |
---|
798 | prop = self._props.get(prop.name, None) |
---|
799 | if prop is None and raiseerr: |
---|
800 | raise sa_exc.InvalidRequestError("Mapper '%s' has no property '%s'" % (str(self), key)) |
---|
801 | return prop |
---|
802 | |
---|
803 | @property |
---|
804 | def iterate_properties(self): |
---|
805 | """return an iterator of all MapperProperty objects.""" |
---|
806 | self.compile() |
---|
807 | return self._props.itervalues() |
---|
808 | |
---|
809 | def _mappers_from_spec(self, spec, selectable): |
---|
810 | """given a with_polymorphic() argument, return the set of mappers it represents. |
---|
811 | |
---|
812 | Trims the list of mappers to just those represented within the given selectable, if present. |
---|
813 | This helps some more legacy-ish mappings. |
---|
814 | |
---|
815 | """ |
---|
816 | if spec == '*': |
---|
817 | mappers = list(self.polymorphic_iterator()) |
---|
818 | elif spec: |
---|
819 | mappers = [_class_to_mapper(m) for m in util.to_list(spec)] |
---|
820 | for m in mappers: |
---|
821 | if not m.isa(self): |
---|
822 | raise sa_exc.InvalidRequestError("%r does not inherit from %r" % (m, self)) |
---|
823 | else: |
---|
824 | mappers = [] |
---|
825 | |
---|
826 | if selectable: |
---|
827 | tables = set(sqlutil.find_tables(selectable, include_aliases=True)) |
---|
828 | mappers = [m for m in mappers if m.local_table in tables] |
---|
829 | |
---|
830 | return mappers |
---|
831 | |
---|
832 | def _selectable_from_mappers(self, mappers): |
---|
833 | """given a list of mappers (assumed to be within this mapper's inheritance hierarchy), |
---|
834 | construct an outerjoin amongst those mapper's mapped tables. |
---|
835 | |
---|
836 | """ |
---|
837 | |
---|
838 | from_obj = self.mapped_table |
---|
839 | for m in mappers: |
---|
840 | if m is self: |
---|
841 | continue |
---|
842 | if m.concrete: |
---|
843 | raise sa_exc.InvalidRequestError("'with_polymorphic()' requires 'selectable' argument when concrete-inheriting mappers are used.") |
---|
844 | elif not m.single: |
---|
845 | from_obj = from_obj.outerjoin(m.local_table, m.inherit_condition) |
---|
846 | |
---|
847 | return from_obj |
---|
848 | |
---|
849 | @property |
---|
850 | def _single_table_criterion(self): |
---|
851 | if self.single and \ |
---|
852 | self.inherits and \ |
---|
853 | self.polymorphic_on and \ |
---|
854 | self.polymorphic_identity is not None: |
---|
855 | return self.polymorphic_on.in_( |
---|
856 | m.polymorphic_identity |
---|
857 | for m in self.polymorphic_iterator()) |
---|
858 | else: |
---|
859 | return None |
---|
860 | |
---|
861 | |
---|
862 | @util.memoized_property |
---|
863 | def _with_polymorphic_mappers(self): |
---|
864 | if not self.with_polymorphic: |
---|
865 | return [self] |
---|
866 | return self._mappers_from_spec(*self.with_polymorphic) |
---|
867 | |
---|
868 | @util.memoized_property |
---|
869 | def _with_polymorphic_selectable(self): |
---|
870 | if not self.with_polymorphic: |
---|
871 | return self.mapped_table |
---|
872 | |
---|
873 | spec, selectable = self.with_polymorphic |
---|
874 | if selectable: |
---|
875 | return selectable |
---|
876 | else: |
---|
877 | return self._selectable_from_mappers(self._mappers_from_spec(spec, selectable)) |
---|
878 | |
---|
879 | def _with_polymorphic_args(self, spec=None, selectable=False): |
---|
880 | if self.with_polymorphic: |
---|
881 | if not spec: |
---|
882 | spec = self.with_polymorphic[0] |
---|
883 | if selectable is False: |
---|
884 | selectable = self.with_polymorphic[1] |
---|
885 | |
---|
886 | mappers = self._mappers_from_spec(spec, selectable) |
---|
887 | if selectable: |
---|
888 | return mappers, selectable |
---|
889 | else: |
---|
890 | return mappers, self._selectable_from_mappers(mappers) |
---|
891 | |
---|
892 | def _iterate_polymorphic_properties(self, mappers=None): |
---|
893 | """Return an iterator of MapperProperty objects which will render into a SELECT.""" |
---|
894 | |
---|
895 | if mappers is None: |
---|
896 | mappers = self._with_polymorphic_mappers |
---|
897 | |
---|
898 | if not mappers: |
---|
899 | for c in self.iterate_properties: |
---|
900 | yield c |
---|
901 | else: |
---|
902 | # in the polymorphic case, filter out discriminator columns |
---|
903 | # from other mappers, as these are sometimes dependent on that |
---|
904 | # mapper's polymorphic selectable (which we don't want rendered) |
---|
905 | for c in util.unique_list( |
---|
906 | chain(*[list(mapper.iterate_properties) for mapper in [self] + mappers]) |
---|
907 | ): |
---|
908 | if getattr(c, '_is_polymorphic_discriminator', False) and \ |
---|
909 | (not self.polymorphic_on or c.columns[0] is not self.polymorphic_on): |
---|
910 | continue |
---|
911 | yield c |
---|
912 | |
---|
913 | @property |
---|
914 | def properties(self): |
---|
915 | raise NotImplementedError("Public collection of MapperProperty objects is " |
---|
916 | "provided by the get_property() and iterate_properties accessors.") |
---|
917 | |
---|
918 | @util.memoized_property |
---|
919 | def _get_clause(self): |
---|
920 | """create a "get clause" based on the primary key. this is used |
---|
921 | by query.get() and many-to-one lazyloads to load this item |
---|
922 | by primary key. |
---|
923 | |
---|
924 | """ |
---|
925 | params = [(primary_key, sql.bindparam(None, type_=primary_key.type)) for primary_key in self.primary_key] |
---|
926 | return sql.and_(*[k==v for (k, v) in params]), util.column_dict(params) |
---|
927 | |
---|
928 | @util.memoized_property |
---|
929 | def _equivalent_columns(self): |
---|
930 | """Create a map of all *equivalent* columns, based on |
---|
931 | the determination of column pairs that are equated to |
---|
932 | one another based on inherit condition. This is designed |
---|
933 | to work with the queries that util.polymorphic_union |
---|
934 | comes up with, which often don't include the columns from |
---|
935 | the base table directly (including the subclass table columns |
---|
936 | only). |
---|
937 | |
---|
938 | The resulting structure is a dictionary of columns mapped |
---|
939 | to lists of equivalent columns, i.e. |
---|
940 | |
---|
941 | { |
---|
942 | tablea.col1: |
---|
943 | set([tableb.col1, tablec.col1]), |
---|
944 | tablea.col2: |
---|
945 | set([tabled.col2]) |
---|
946 | } |
---|
947 | |
---|
948 | """ |
---|
949 | result = util.column_dict() |
---|
950 | def visit_binary(binary): |
---|
951 | if binary.operator == operators.eq: |
---|
952 | if binary.left in result: |
---|
953 | result[binary.left].add(binary.right) |
---|
954 | else: |
---|
955 | result[binary.left] = util.column_set((binary.right,)) |
---|
956 | if binary.right in result: |
---|
957 | result[binary.right].add(binary.left) |
---|
958 | else: |
---|
959 | result[binary.right] = util.column_set((binary.left,)) |
---|
960 | for mapper in self.base_mapper.polymorphic_iterator(): |
---|
961 | if mapper.inherit_condition: |
---|
962 | visitors.traverse(mapper.inherit_condition, {}, {'binary':visit_binary}) |
---|
963 | |
---|
964 | return result |
---|
965 | |
---|
966 | def _is_userland_descriptor(self, obj): |
---|
967 | return not isinstance(obj, (MapperProperty, attributes.InstrumentedAttribute)) and hasattr(obj, '__get__') |
---|
968 | |
---|
969 | def _should_exclude(self, name, assigned_name, local): |
---|
970 | """determine whether a particular property should be implicitly present on the class. |
---|
971 | |
---|
972 | This occurs when properties are propagated from an inherited class, or are |
---|
973 | applied from the columns present in the mapped table. |
---|
974 | |
---|
975 | """ |
---|
976 | |
---|
977 | # check for descriptors, either local or from |
---|
978 | # an inherited class |
---|
979 | if local: |
---|
980 | if self.class_.__dict__.get(assigned_name, None)\ |
---|
981 | and self._is_userland_descriptor(self.class_.__dict__[assigned_name]): |
---|
982 | return True |
---|
983 | else: |
---|
984 | if getattr(self.class_, assigned_name, None)\ |
---|
985 | and self._is_userland_descriptor(getattr(self.class_, assigned_name)): |
---|
986 | return True |
---|
987 | |
---|
988 | if (self.include_properties is not None and |
---|
989 | name not in self.include_properties): |
---|
990 | self._log("not including property %s" % (name)) |
---|
991 | return True |
---|
992 | |
---|
993 | if (self.exclude_properties is not None and |
---|
994 | name in self.exclude_properties): |
---|
995 | self._log("excluding property %s" % (name)) |
---|
996 | return True |
---|
997 | |
---|
998 | return False |
---|
999 | |
---|
1000 | def common_parent(self, other): |
---|
1001 | """Return true if the given mapper shares a common inherited parent as this mapper.""" |
---|
1002 | |
---|
1003 | return self.base_mapper is other.base_mapper |
---|
1004 | |
---|
1005 | def _canload(self, state, allow_subtypes): |
---|
1006 | s = self.primary_mapper() |
---|
1007 | if self.polymorphic_on or allow_subtypes: |
---|
1008 | return _state_mapper(state).isa(s) |
---|
1009 | else: |
---|
1010 | return _state_mapper(state) is s |
---|
1011 | |
---|
1012 | def isa(self, other): |
---|
1013 | """Return True if the this mapper inherits from the given mapper.""" |
---|
1014 | |
---|
1015 | m = self |
---|
1016 | while m and m is not other: |
---|
1017 | m = m.inherits |
---|
1018 | return bool(m) |
---|
1019 | |
---|
1020 | def iterate_to_root(self): |
---|
1021 | m = self |
---|
1022 | while m: |
---|
1023 | yield m |
---|
1024 | m = m.inherits |
---|
1025 | |
---|
1026 | def polymorphic_iterator(self): |
---|
1027 | """Iterate through the collection including this mapper and |
---|
1028 | all descendant mappers. |
---|
1029 | |
---|
1030 | This includes not just the immediately inheriting mappers but |
---|
1031 | all their inheriting mappers as well. |
---|
1032 | |
---|
1033 | To iterate through an entire hierarchy, use |
---|
1034 | ``mapper.base_mapper.polymorphic_iterator()``. |
---|
1035 | |
---|
1036 | """ |
---|
1037 | stack = deque([self]) |
---|
1038 | while stack: |
---|
1039 | item = stack.popleft() |
---|
1040 | yield item |
---|
1041 | stack.extend(item._inheriting_mappers) |
---|
1042 | |
---|
1043 | def primary_mapper(self): |
---|
1044 | """Return the primary mapper corresponding to this mapper's class key (class).""" |
---|
1045 | |
---|
1046 | return self.class_manager.mapper |
---|
1047 | |
---|
1048 | def identity_key_from_row(self, row, adapter=None): |
---|
1049 | """Return an identity-map key for use in storing/retrieving an |
---|
1050 | item from the identity map. |
---|
1051 | |
---|
1052 | row |
---|
1053 | A ``sqlalchemy.engine.base.RowProxy`` instance or a |
---|
1054 | dictionary corresponding result-set ``ColumnElement`` |
---|
1055 | instances to their values within a row. |
---|
1056 | |
---|
1057 | """ |
---|
1058 | pk_cols = self.primary_key |
---|
1059 | if adapter: |
---|
1060 | pk_cols = [adapter.columns[c] for c in pk_cols] |
---|
1061 | |
---|
1062 | return (self._identity_class, tuple(row[column] for column in pk_cols)) |
---|
1063 | |
---|
1064 | def identity_key_from_primary_key(self, primary_key): |
---|
1065 | """Return an identity-map key for use in storing/retrieving an |
---|
1066 | item from an identity map. |
---|
1067 | |
---|
1068 | primary_key |
---|
1069 | A list of values indicating the identifier. |
---|
1070 | |
---|
1071 | """ |
---|
1072 | return (self._identity_class, tuple(util.to_list(primary_key))) |
---|
1073 | |
---|
1074 | def identity_key_from_instance(self, instance): |
---|
1075 | """Return the identity key for the given instance, based on |
---|
1076 | its primary key attributes. |
---|
1077 | |
---|
1078 | This value is typically also found on the instance state under the |
---|
1079 | attribute name `key`. |
---|
1080 | |
---|
1081 | """ |
---|
1082 | return self.identity_key_from_primary_key(self.primary_key_from_instance(instance)) |
---|
1083 | |
---|
1084 | def _identity_key_from_state(self, state): |
---|
1085 | return self.identity_key_from_primary_key(self._primary_key_from_state(state)) |
---|
1086 | |
---|
1087 | def primary_key_from_instance(self, instance): |
---|
1088 | """Return the list of primary key values for the given |
---|
1089 | instance. |
---|
1090 | |
---|
1091 | """ |
---|
1092 | state = attributes.instance_state(instance) |
---|
1093 | return self._primary_key_from_state(state) |
---|
1094 | |
---|
1095 | def _primary_key_from_state(self, state): |
---|
1096 | return [self._get_state_attr_by_column(state, column) for column in self.primary_key] |
---|
1097 | |
---|
1098 | def _get_col_to_prop(self, column): |
---|
1099 | try: |
---|
1100 | return self._columntoproperty[column] |
---|
1101 | except KeyError: |
---|
1102 | prop = self._props.get(column.key, None) |
---|
1103 | if prop: |
---|
1104 | raise exc.UnmappedColumnError("Column '%s.%s' is not available, due to conflicting property '%s':%s" % (column.table.name, column.name, column.key, repr(prop))) |
---|
1105 | else: |
---|
1106 | raise exc.UnmappedColumnError("No column %s is configured on mapper %s..." % (column, self)) |
---|
1107 | |
---|
1108 | # TODO: improve names? |
---|
1109 | def _get_state_attr_by_column(self, state, column): |
---|
1110 | return self._get_col_to_prop(column).getattr(state, column) |
---|
1111 | |
---|
1112 | def _set_state_attr_by_column(self, state, column, value): |
---|
1113 | return self._get_col_to_prop(column).setattr(state, value, column) |
---|
1114 | |
---|
1115 | def _get_committed_attr_by_column(self, obj, column): |
---|
1116 | state = attributes.instance_state(obj) |
---|
1117 | return self._get_committed_state_attr_by_column(state, column) |
---|
1118 | |
---|
1119 | def _get_committed_state_attr_by_column(self, state, column, passive=False): |
---|
1120 | return self._get_col_to_prop(column).getcommitted(state, column, passive=passive) |
---|
1121 | |
---|
1122 | def _optimized_get_statement(self, state, attribute_names): |
---|
1123 | """assemble a WHERE clause which retrieves a given state by primary key, using a minimized set of tables. |
---|
1124 | |
---|
1125 | Applies to a joined-table inheritance mapper where the |
---|
1126 | requested attribute names are only present on joined tables, |
---|
1127 | not the base table. The WHERE clause attempts to include |
---|
1128 | only those tables to minimize joins. |
---|
1129 | |
---|
1130 | """ |
---|
1131 | props = self._props |
---|
1132 | |
---|
1133 | tables = set(chain(* |
---|
1134 | (sqlutil.find_tables(props[key].columns[0], check_columns=True) |
---|
1135 | for key in attribute_names) |
---|
1136 | )) |
---|
1137 | |
---|
1138 | if self.base_mapper.local_table in tables: |
---|
1139 | return None |
---|
1140 | |
---|
1141 | class ColumnsNotAvailable(Exception): |
---|
1142 | pass |
---|
1143 | |
---|
1144 | def visit_binary(binary): |
---|
1145 | leftcol = binary.left |
---|
1146 | rightcol = binary.right |
---|
1147 | if leftcol is None or rightcol is None: |
---|
1148 | return |
---|
1149 | |
---|
1150 | if leftcol.table not in tables: |
---|
1151 | leftval = self._get_committed_state_attr_by_column(state, leftcol, passive=True) |
---|
1152 | if leftval is attributes.PASSIVE_NORESULT: |
---|
1153 | raise ColumnsNotAvailable() |
---|
1154 | binary.left = sql.bindparam(None, leftval, type_=binary.right.type) |
---|
1155 | elif rightcol.table not in tables: |
---|
1156 | rightval = self._get_committed_state_attr_by_column(state, rightcol, passive=True) |
---|
1157 | if rightval is attributes.PASSIVE_NORESULT: |
---|
1158 | raise ColumnsNotAvailable() |
---|
1159 | binary.right = sql.bindparam(None, rightval, type_=binary.right.type) |
---|
1160 | |
---|
1161 | allconds = [] |
---|
1162 | |
---|
1163 | try: |
---|
1164 | start = False |
---|
1165 | for mapper in reversed(list(self.iterate_to_root())): |
---|
1166 | if mapper.local_table in tables: |
---|
1167 | start = True |
---|
1168 | if start and not mapper.single: |
---|
1169 | allconds.append(visitors.cloned_traverse(mapper.inherit_condition, {}, {'binary':visit_binary})) |
---|
1170 | except ColumnsNotAvailable: |
---|
1171 | return None |
---|
1172 | |
---|
1173 | cond = sql.and_(*allconds) |
---|
1174 | |
---|
1175 | return sql.select([props[key].columns[0] for key in attribute_names], cond, use_labels=True) |
---|
1176 | |
---|
1177 | def cascade_iterator(self, type_, state, halt_on=None): |
---|
1178 | """Iterate each element and its mapper in an object graph, |
---|
1179 | for all relations that meet the given cascade rule. |
---|
1180 | |
---|
1181 | ``type\_``: |
---|
1182 | The name of the cascade rule (i.e. save-update, delete, |
---|
1183 | etc.) |
---|
1184 | |
---|
1185 | ``state``: |
---|
1186 | The lead InstanceState. child items will be processed per |
---|
1187 | the relations defined for this object's mapper. |
---|
1188 | |
---|
1189 | the return value are object instances; this provides a strong |
---|
1190 | reference so that they don't fall out of scope immediately. |
---|
1191 | |
---|
1192 | """ |
---|
1193 | visited_instances = util.IdentitySet() |
---|
1194 | visitables = [(self._props.itervalues(), 'property', state)] |
---|
1195 | |
---|
1196 | while visitables: |
---|
1197 | iterator, item_type, parent_state = visitables[-1] |
---|
1198 | try: |
---|
1199 | if item_type == 'property': |
---|
1200 | prop = iterator.next() |
---|
1201 | visitables.append((prop.cascade_iterator(type_, parent_state, visited_instances, halt_on), 'mapper', None)) |
---|
1202 | elif item_type == 'mapper': |
---|
1203 | instance, instance_mapper, corresponding_state = iterator.next() |
---|
1204 | yield (instance, instance_mapper) |
---|
1205 | visitables.append((instance_mapper._props.itervalues(), 'property', corresponding_state)) |
---|
1206 | except StopIteration: |
---|
1207 | visitables.pop() |
---|
1208 | |
---|
1209 | # persistence |
---|
1210 | |
---|
1211 | @util.memoized_property |
---|
1212 | def _sorted_tables(self): |
---|
1213 | table_to_mapper = {} |
---|
1214 | for mapper in self.base_mapper.polymorphic_iterator(): |
---|
1215 | for t in mapper.tables: |
---|
1216 | table_to_mapper[t] = mapper |
---|
1217 | |
---|
1218 | sorted_ = sqlutil.sort_tables(table_to_mapper.iterkeys()) |
---|
1219 | ret = util.OrderedDict() |
---|
1220 | for t in sorted_: |
---|
1221 | ret[t] = table_to_mapper[t] |
---|
1222 | return ret |
---|
1223 | |
---|
1224 | def _save_obj(self, states, uowtransaction, postupdate=False, post_update_cols=None, single=False): |
---|
1225 | """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects. |
---|
1226 | |
---|
1227 | This is called within the context of a UOWTransaction during a |
---|
1228 | flush operation. |
---|
1229 | |
---|
1230 | `_save_obj` issues SQL statements not just for instances mapped |
---|
1231 | directly by this mapper, but for instances mapped by all |
---|
1232 | inheriting mappers as well. This is to maintain proper insert |
---|
1233 | ordering among a polymorphic chain of instances. Therefore |
---|
1234 | _save_obj is typically called only on a *base mapper*, or a |
---|
1235 | mapper which does not inherit from any other mapper. |
---|
1236 | |
---|
1237 | """ |
---|
1238 | if self._should_log_debug: |
---|
1239 | self._log_debug("_save_obj() start, " + (single and "non-batched" or "batched")) |
---|
1240 | |
---|
1241 | # if batch=false, call _save_obj separately for each object |
---|
1242 | if not single and not self.batch: |
---|
1243 | for state in _sort_states(states): |
---|
1244 | self._save_obj([state], uowtransaction, postupdate=postupdate, post_update_cols=post_update_cols, single=True) |
---|
1245 | return |
---|
1246 | |
---|
1247 | # if session has a connection callable, |
---|
1248 | # organize individual states with the connection to use for insert/update |
---|
1249 | tups = [] |
---|
1250 | if 'connection_callable' in uowtransaction.mapper_flush_opts: |
---|
1251 | connection_callable = uowtransaction.mapper_flush_opts['connection_callable'] |
---|
1252 | for state in _sort_states(states): |
---|
1253 | m = _state_mapper(state) |
---|
1254 | tups.append( |
---|
1255 | ( |
---|
1256 | state, |
---|
1257 | m, |
---|
1258 | connection_callable(self, state.obj()), |
---|
1259 | _state_has_identity(state), |
---|
1260 | state.key or m._identity_key_from_state(state) |
---|
1261 | ) |
---|
1262 | ) |
---|
1263 | else: |
---|
1264 | connection = uowtransaction.transaction.connection(self) |
---|
1265 | for state in _sort_states(states): |
---|
1266 | m = _state_mapper(state) |
---|
1267 | tups.append( |
---|
1268 | ( |
---|
1269 | state, |
---|
1270 | m, |
---|
1271 | connection, |
---|
1272 | _state_has_identity(state), |
---|
1273 | state.key or m._identity_key_from_state(state) |
---|
1274 | ) |
---|
1275 | ) |
---|
1276 | |
---|
1277 | if not postupdate: |
---|
1278 | # call before_XXX extensions |
---|
1279 | for state, mapper, connection, has_identity, instance_key in tups: |
---|
1280 | if not has_identity: |
---|
1281 | if 'before_insert' in mapper.extension: |
---|
1282 | mapper.extension.before_insert(mapper, connection, state.obj()) |
---|
1283 | else: |
---|
1284 | if 'before_update' in mapper.extension: |
---|
1285 | mapper.extension.before_update(mapper, connection, state.obj()) |
---|
1286 | |
---|
1287 | row_switches = set() |
---|
1288 | if not postupdate: |
---|
1289 | for state, mapper, connection, has_identity, instance_key in tups: |
---|
1290 | # detect if we have a "pending" instance (i.e. has no instance_key attached to it), |
---|
1291 | # and another instance with the same identity key already exists as persistent. convert to an |
---|
1292 | # UPDATE if so. |
---|
1293 | if not has_identity and instance_key in uowtransaction.session.identity_map: |
---|
1294 | instance = uowtransaction.session.identity_map[instance_key] |
---|
1295 | existing = attributes.instance_state(instance) |
---|
1296 | if not uowtransaction.is_deleted(existing): |
---|
1297 | raise exc.FlushError( |
---|
1298 | "New instance %s with identity key %s conflicts with persistent instance %s" % |
---|
1299 | (state_str(state), instance_key, state_str(existing))) |
---|
1300 | if self._should_log_debug: |
---|
1301 | self._log_debug( |
---|
1302 | "detected row switch for identity %s. will update %s, remove %s from " |
---|
1303 | "transaction" % (instance_key, state_str(state), state_str(existing))) |
---|
1304 | |
---|
1305 | # remove the "delete" flag from the existing element |
---|
1306 | uowtransaction.set_row_switch(existing) |
---|
1307 | row_switches.add(state) |
---|
1308 | |
---|
1309 | table_to_mapper = self._sorted_tables |
---|
1310 | |
---|
1311 | for table in table_to_mapper.iterkeys(): |
---|
1312 | insert = [] |
---|
1313 | update = [] |
---|
1314 | |
---|
1315 | for state, mapper, connection, has_identity, instance_key in tups: |
---|
1316 | if table not in mapper._pks_by_table: |
---|
1317 | continue |
---|
1318 | |
---|
1319 | pks = mapper._pks_by_table[table] |
---|
1320 | |
---|
1321 | if self._should_log_debug: |
---|
1322 | self._log_debug("_save_obj() table '%s' instance %s identity %s" % |
---|
1323 | (table.name, state_str(state), str(instance_key))) |
---|
1324 | |
---|
1325 | isinsert = not has_identity and not postupdate and state not in row_switches |
---|
1326 | |
---|
1327 | params = {} |
---|
1328 | value_params = {} |
---|
1329 | hasdata = False |
---|
1330 | |
---|
1331 | if isinsert: |
---|
1332 | for col in mapper._cols_by_table[table]: |
---|
1333 | if col is mapper.version_id_col: |
---|
1334 | params[col.key] = 1 |
---|
1335 | elif mapper.polymorphic_on and mapper.polymorphic_on.shares_lineage(col): |
---|
1336 | if self._should_log_debug: |
---|
1337 | self._log_debug( |
---|
1338 | "Using polymorphic identity '%s' for insert column '%s'" % |
---|
1339 | (mapper.polymorphic_identity, col.key)) |
---|
1340 | value = mapper.polymorphic_identity |
---|
1341 | if ((col.default is None and |
---|
1342 | col.server_default is None) or |
---|
1343 | value is not None): |
---|
1344 | params[col.key] = value |
---|
1345 | elif col in pks: |
---|
1346 | value = mapper._get_state_attr_by_column(state, col) |
---|
1347 | if value is not None: |
---|
1348 | params[col.key] = value |
---|
1349 | else: |
---|
1350 | value = mapper._get_state_attr_by_column(state, col) |
---|
1351 | if ((col.default is None and |
---|
1352 | col.server_default is None) or |
---|
1353 | value is not None): |
---|
1354 | if isinstance(value, sql.ClauseElement): |
---|
1355 | value_params[col] = value |
---|
1356 | else: |
---|
1357 | params[col.key] = value |
---|
1358 | insert.append((state, params, mapper, connection, value_params)) |
---|
1359 | else: |
---|
1360 | for col in mapper._cols_by_table[table]: |
---|
1361 | if col is mapper.version_id_col: |
---|
1362 | params[col._label] = mapper._get_state_attr_by_column(state, col) |
---|
1363 | params[col.key] = params[col._label] + 1 |
---|
1364 | for prop in mapper._columntoproperty.itervalues(): |
---|
1365 | history = attributes.get_state_history(state, prop.key, passive=True) |
---|
1366 | if history.added: |
---|
1367 | hasdata = True |
---|
1368 | elif mapper.polymorphic_on and mapper.polymorphic_on.shares_lineage(col) and col not in pks: |
---|
1369 | pass |
---|
1370 | else: |
---|
1371 | if post_update_cols is not None and col not in post_update_cols: |
---|
1372 | if col in pks: |
---|
1373 | params[col._label] = mapper._get_state_attr_by_column(state, col) |
---|
1374 | continue |
---|
1375 | |
---|
1376 | prop = mapper._columntoproperty[col] |
---|
1377 | history = attributes.get_state_history(state, prop.key, passive=True) |
---|
1378 | if history.added: |
---|
1379 | if isinstance(history.added[0], sql.ClauseElement): |
---|
1380 | value_params[col] = history.added[0] |
---|
1381 | else: |
---|
1382 | params[col.key] = prop.get_col_value(col, history.added[0]) |
---|
1383 | if col in pks: |
---|
1384 | if history.deleted: |
---|
1385 | params[col._label] = prop.get_col_value(col, history.deleted[0]) |
---|
1386 | hasdata = True |
---|
1387 | else: |
---|
1388 | # row switch logic can reach us here |
---|
1389 | # remove the pk from the update params so the update doesn't |
---|
1390 | # attempt to include the pk in the update statement |
---|
1391 | del params[col.key] |
---|
1392 | params[col._label] = prop.get_col_value(col, history.added[0]) |
---|
1393 | else: |
---|
1394 | hasdata = True |
---|
1395 | elif col in pks: |
---|
1396 | params[col._label] = mapper._get_state_attr_by_column(state, col) |
---|
1397 | if hasdata: |
---|
1398 | update.append((state, params, mapper, connection, value_params)) |
---|
1399 | |
---|
1400 | if update: |
---|
1401 | mapper = table_to_mapper[table] |
---|
1402 | clause = sql.and_() |
---|
1403 | |
---|
1404 | for col in mapper._pks_by_table[table]: |
---|
1405 | clause.clauses.append(col == sql.bindparam(col._label, type_=col.type)) |
---|
1406 | |
---|
1407 | if mapper.version_id_col and table.c.contains_column(mapper.version_id_col): |
---|
1408 | clause.clauses.append(mapper.version_id_col == sql.bindparam(mapper.version_id_col._label, type_=col.type)) |
---|
1409 | |
---|
1410 | statement = table.update(clause) |
---|
1411 | rows = 0 |
---|
1412 | for state, params, mapper, connection, value_params in update: |
---|
1413 | c = connection.execute(statement.values(value_params), params) |
---|
1414 | mapper._postfetch(uowtransaction, connection, table, state, c, c.last_updated_params(), value_params) |
---|
1415 | |
---|
1416 | rows += c.rowcount |
---|
1417 | |
---|
1418 | if c.supports_sane_rowcount() and rows != len(update): |
---|
1419 | raise exc.ConcurrentModificationError("Updated rowcount %d does not match number of objects updated %d" % (rows, len(update))) |
---|
1420 | |
---|
1421 | if insert: |
---|
1422 | statement = table.insert() |
---|
1423 | for state, params, mapper, connection, value_params in insert: |
---|
1424 | c = connection.execute(statement.values(value_params), params) |
---|
1425 | primary_key = c.last_inserted_ids() |
---|
1426 | |
---|
1427 | if primary_key is not None: |
---|
1428 | # set primary key attributes |
---|
1429 | for i, col in enumerate(mapper._pks_by_table[table]): |
---|
1430 | if mapper._get_state_attr_by_column(state, col) is None and len(primary_key) > i: |
---|
1431 | mapper._set_state_attr_by_column(state, col, primary_key[i]) |
---|
1432 | mapper._postfetch(uowtransaction, connection, table, state, c, c.last_inserted_params(), value_params) |
---|
1433 | |
---|
1434 | # synchronize newly inserted ids from one table to the next |
---|
1435 | # TODO: this performs some unnecessary attribute transfers |
---|
1436 | # from an attribute to itself, since the attribute is often mapped |
---|
1437 | # to multiple, equivalent columns. it also may fire off more |
---|
1438 | # than needed overall. |
---|
1439 | for m in mapper.iterate_to_root(): |
---|
1440 | if m._inherits_equated_pairs: |
---|
1441 | sync.populate(state, m, state, m, m._inherits_equated_pairs) |
---|
1442 | |
---|
1443 | if not postupdate: |
---|
1444 | for state, mapper, connection, has_identity, instance_key in tups: |
---|
1445 | |
---|
1446 | # expire readonly attributes |
---|
1447 | readonly = state.unmodified.intersection( |
---|
1448 | p.key for p in mapper._readonly_props |
---|
1449 | ) |
---|
1450 | |
---|
1451 | if readonly: |
---|
1452 | _expire_state(state, readonly) |
---|
1453 | |
---|
1454 | # if specified, eagerly refresh whatever has |
---|
1455 | # been expired. |
---|
1456 | if self.eager_defaults and state.unloaded: |
---|
1457 | state.key = self._identity_key_from_state(state) |
---|
1458 | uowtransaction.session.query(self)._get( |
---|
1459 | state.key, refresh_state=state, |
---|
1460 | only_load_props=state.unloaded) |
---|
1461 | |
---|
1462 | # call after_XXX extensions |
---|
1463 | if not has_identity: |
---|
1464 | if 'after_insert' in mapper.extension: |
---|
1465 | mapper.extension.after_insert(mapper, connection, state.obj()) |
---|
1466 | else: |
---|
1467 | if 'after_update' in mapper.extension: |
---|
1468 | mapper.extension.after_update(mapper, connection, state.obj()) |
---|
1469 | |
---|
1470 | def _postfetch(self, uowtransaction, connection, table, state, resultproxy, params, value_params): |
---|
1471 | """Expire attributes in need of newly persisted database state.""" |
---|
1472 | |
---|
1473 | postfetch_cols = resultproxy.postfetch_cols() |
---|
1474 | generated_cols = list(resultproxy.prefetch_cols()) |
---|
1475 | |
---|
1476 | if self.polymorphic_on: |
---|
1477 | po = table.corresponding_column(self.polymorphic_on) |
---|
1478 | if po: |
---|
1479 | generated_cols.append(po) |
---|
1480 | |
---|
1481 | if self.version_id_col: |
---|
1482 | generated_cols.append(self.version_id_col) |
---|
1483 | |
---|
1484 | for c in generated_cols: |
---|
1485 | if c.key in params and c in self._columntoproperty: |
---|
1486 | self._set_state_attr_by_column(state, c, params[c.key]) |
---|
1487 | |
---|
1488 | deferred_props = [prop.key for prop in [self._columntoproperty[c] for c in postfetch_cols]] |
---|
1489 | |
---|
1490 | if deferred_props: |
---|
1491 | _expire_state(state, deferred_props) |
---|
1492 | |
---|
1493 | def _delete_obj(self, states, uowtransaction): |
---|
1494 | """Issue ``DELETE`` statements for a list of objects. |
---|
1495 | |
---|
1496 | This is called within the context of a UOWTransaction during a |
---|
1497 | flush operation. |
---|
1498 | |
---|
1499 | """ |
---|
1500 | if self._should_log_debug: |
---|
1501 | self._log_debug("_delete_obj() start") |
---|
1502 | |
---|
1503 | if 'connection_callable' in uowtransaction.mapper_flush_opts: |
---|
1504 | connection_callable = uowtransaction.mapper_flush_opts['connection_callable'] |
---|
1505 | tups = [(state, _state_mapper(state), connection_callable(self, state.obj())) for state in _sort_states(states)] |
---|
1506 | else: |
---|
1507 | connection = uowtransaction.transaction.connection(self) |
---|
1508 | tups = [(state, _state_mapper(state), connection) for state in _sort_states(states)] |
---|
1509 | |
---|
1510 | for state, mapper, connection in tups: |
---|
1511 | if 'before_delete' in mapper.extension: |
---|
1512 | mapper.extension.before_delete(mapper, connection, state.obj()) |
---|
1513 | |
---|
1514 | table_to_mapper = self._sorted_tables |
---|
1515 | |
---|
1516 | for table in reversed(table_to_mapper.keys()): |
---|
1517 | delete = {} |
---|
1518 | for state, mapper, connection in tups: |
---|
1519 | if table not in mapper._pks_by_table: |
---|
1520 | continue |
---|
1521 | |
---|
1522 | params = {} |
---|
1523 | if not _state_has_identity(state): |
---|
1524 | continue |
---|
1525 | else: |
---|
1526 | delete.setdefault(connection, []).append(params) |
---|
1527 | for col in mapper._pks_by_table[table]: |
---|
1528 | params[col.key] = mapper._get_state_attr_by_column(state, col) |
---|
1529 | if mapper.version_id_col and table.c.contains_column(mapper.version_id_col): |
---|
1530 | params[mapper.version_id_col.key] = mapper._get_state_attr_by_column(state, mapper.version_id_col) |
---|
1531 | |
---|
1532 | for connection, del_objects in delete.iteritems(): |
---|
1533 | mapper = table_to_mapper[table] |
---|
1534 | clause = sql.and_() |
---|
1535 | for col in mapper._pks_by_table[table]: |
---|
1536 | clause.clauses.append(col == sql.bindparam(col.key, type_=col.type)) |
---|
1537 | if mapper.version_id_col and table.c.contains_column(mapper.version_id_col): |
---|
1538 | clause.clauses.append( |
---|
1539 | mapper.version_id_col == |
---|
1540 | sql.bindparam(mapper.version_id_col.key, type_=mapper.version_id_col.type)) |
---|
1541 | statement = table.delete(clause) |
---|
1542 | c = connection.execute(statement, del_objects) |
---|
1543 | if c.supports_sane_multi_rowcount() and c.rowcount != len(del_objects): |
---|
1544 | raise exc.ConcurrentModificationError("Deleted rowcount %d does not match " |
---|
1545 | "number of objects deleted %d" % (c.rowcount, len(del_objects))) |
---|
1546 | |
---|
1547 | for state, mapper, connection in tups: |
---|
1548 | if 'after_delete' in mapper.extension: |
---|
1549 | mapper.extension.after_delete(mapper, connection, state.obj()) |
---|
1550 | |
---|
1551 | def _register_dependencies(self, uowcommit): |
---|
1552 | """Register ``DependencyProcessor`` instances with a |
---|
1553 | ``unitofwork.UOWTransaction``. |
---|
1554 | |
---|
1555 | This call `register_dependencies` on all attached |
---|
1556 | ``MapperProperty`` instances. |
---|
1557 | |
---|
1558 | """ |
---|
1559 | for dep in self._props.values() + self._dependency_processors: |
---|
1560 | dep.register_dependencies(uowcommit) |
---|
1561 | |
---|
1562 | def _register_processors(self, uowcommit): |
---|
1563 | for dep in self._props.values() + self._dependency_processors: |
---|
1564 | dep.register_processors(uowcommit) |
---|
1565 | |
---|
1566 | # result set conversion |
---|
1567 | |
---|
1568 | def _instance_processor(self, context, path, adapter, polymorphic_from=None, extension=None, only_load_props=None, refresh_state=None, polymorphic_discriminator=None): |
---|
1569 | """Produce a mapper level row processor callable which processes rows into mapped instances.""" |
---|
1570 | |
---|
1571 | pk_cols = self.primary_key |
---|
1572 | |
---|
1573 | if polymorphic_from or refresh_state: |
---|
1574 | polymorphic_on = None |
---|
1575 | else: |
---|
1576 | polymorphic_on = polymorphic_discriminator or self.polymorphic_on |
---|
1577 | polymorphic_instances = util.PopulateDict(self._configure_subclass_mapper(context, path, adapter)) |
---|
1578 | |
---|
1579 | version_id_col = self.version_id_col |
---|
1580 | |
---|
1581 | if adapter: |
---|
1582 | pk_cols = [adapter.columns[c] for c in pk_cols] |
---|
1583 | if polymorphic_on: |
---|
1584 | polymorphic_on = adapter.columns[polymorphic_on] |
---|
1585 | if version_id_col: |
---|
1586 | version_id_col = adapter.columns[version_id_col] |
---|
1587 | |
---|
1588 | identity_class = self._identity_class |
---|
1589 | def identity_key(row): |
---|
1590 | return (identity_class, tuple(row[column] for column in pk_cols)) |
---|
1591 | |
---|
1592 | new_populators = [] |
---|
1593 | existing_populators = [] |
---|
1594 | |
---|
1595 | def populate_state(state, dict_, row, isnew, only_load_props, **flags): |
---|
1596 | if isnew: |
---|
1597 | if context.propagate_options: |
---|
1598 | state.load_options = context.propagate_options |
---|
1599 | if state.load_options: |
---|
1600 | state.load_path = context.query._current_path + path |
---|
1601 | |
---|
1602 | if not new_populators: |
---|
1603 | new_populators[:], existing_populators[:] = self._populators(context, path, row, adapter) |
---|
1604 | |
---|
1605 | if isnew: |
---|
1606 | populators = new_populators |
---|
1607 | else: |
---|
1608 | populators = existing_populators |
---|
1609 | |
---|
1610 | if only_load_props: |
---|
1611 | populators = [p for p in populators if p[0] in only_load_props] |
---|
1612 | |
---|
1613 | for key, populator in populators: |
---|
1614 | populator(state, dict_, row, isnew=isnew, **flags) |
---|
1615 | |
---|
1616 | session_identity_map = context.session.identity_map |
---|
1617 | |
---|
1618 | if not extension: |
---|
1619 | extension = self.extension |
---|
1620 | |
---|
1621 | translate_row = extension.get('translate_row', None) |
---|
1622 | create_instance = extension.get('create_instance', None) |
---|
1623 | populate_instance = extension.get('populate_instance', None) |
---|
1624 | append_result = extension.get('append_result', None) |
---|
1625 | populate_existing = context.populate_existing or self.always_refresh |
---|
1626 | |
---|
1627 | def _instance(row, result): |
---|
1628 | if translate_row: |
---|
1629 | ret = translate_row(self, context, row) |
---|
1630 | if ret is not EXT_CONTINUE: |
---|
1631 | row = ret |
---|
1632 | |
---|
1633 | if polymorphic_on: |
---|
1634 | discriminator = row[polymorphic_on] |
---|
1635 | if discriminator is not None: |
---|
1636 | _instance = polymorphic_instances[discriminator] |
---|
1637 | if _instance: |
---|
1638 | return _instance(row, result) |
---|
1639 | |
---|
1640 | # determine identity key |
---|
1641 | if refresh_state: |
---|
1642 | identitykey = refresh_state.key |
---|
1643 | if identitykey is None: |
---|
1644 | # super-rare condition; a refresh is being called |
---|
1645 | # on a non-instance-key instance; this is meant to only |
---|
1646 | # occur within a flush() |
---|
1647 | identitykey = self._identity_key_from_state(refresh_state) |
---|
1648 | else: |
---|
1649 | identitykey = identity_key(row) |
---|
1650 | |
---|
1651 | if identitykey in session_identity_map: |
---|
1652 | instance = session_identity_map[identitykey] |
---|
1653 | state = attributes.instance_state(instance) |
---|
1654 | dict_ = attributes.instance_dict(instance) |
---|
1655 | |
---|
1656 | if self._should_log_debug: |
---|
1657 | self._log_debug("_instance(): using existing instance %s identity %s" % |
---|
1658 | (instance_str(instance), identitykey)) |
---|
1659 | |
---|
1660 | isnew = state.runid != context.runid |
---|
1661 | currentload = not isnew |
---|
1662 | loaded_instance = False |
---|
1663 | |
---|
1664 | if not currentload and version_id_col and context.version_check and \ |
---|
1665 | self._get_state_attr_by_column(state, self.version_id_col) != row[version_id_col]: |
---|
1666 | raise exc.ConcurrentModificationError( |
---|
1667 | "Instance '%s' version of %s does not match %s" |
---|
1668 | % (state_str(state), self._get_state_attr_by_column(state, self.version_id_col), row[version_id_col])) |
---|
1669 | elif refresh_state: |
---|
1670 | # out of band refresh_state detected (i.e. its not in the session.identity_map) |
---|
1671 | # honor it anyway. this can happen if a _get() occurs within save_obj(), such as |
---|
1672 | # when eager_defaults is True. |
---|
1673 | state = refresh_state |
---|
1674 | instance = state.obj() |
---|
1675 | dict_ = attributes.instance_dict(instance) |
---|
1676 | isnew = state.runid != context.runid |
---|
1677 | currentload = True |
---|
1678 | loaded_instance = False |
---|
1679 | else: |
---|
1680 | if self._should_log_debug: |
---|
1681 | self._log_debug("_instance(): identity key %s not in session" % (identitykey,)) |
---|
1682 | |
---|
1683 | if self.allow_null_pks: |
---|
1684 | for x in identitykey[1]: |
---|
1685 | if x is not None: |
---|
1686 | break |
---|
1687 | else: |
---|
1688 | return None |
---|
1689 | else: |
---|
1690 | if None in identitykey[1]: |
---|
1691 | return None |
---|
1692 | isnew = True |
---|
1693 | currentload = True |
---|
1694 | loaded_instance = True |
---|
1695 | |
---|
1696 | if create_instance: |
---|
1697 | instance = create_instance(self, context, row, self.class_) |
---|
1698 | if instance is EXT_CONTINUE: |
---|
1699 | instance = self.class_manager.new_instance() |
---|
1700 | else: |
---|
1701 | manager = attributes.manager_of_class(instance.__class__) |
---|
1702 | # TODO: if manager is None, raise a friendly error about |
---|
1703 | # returning instances of unmapped types |
---|
1704 | manager.setup_instance(instance) |
---|
1705 | else: |
---|
1706 | instance = self.class_manager.new_instance() |
---|
1707 | |
---|
1708 | if self._should_log_debug: |
---|
1709 | self._log_debug("_instance(): created new instance %s identity %s" % |
---|
1710 | (instance_str(instance), identitykey)) |
---|
1711 | |
---|
1712 | dict_ = attributes.instance_dict(instance) |
---|
1713 | state = attributes.instance_state(instance) |
---|
1714 | state.key = identitykey |
---|
1715 | |
---|
1716 | # manually adding instance to session. for a complete add, |
---|
1717 | # session._finalize_loaded() must be called. |
---|
1718 | state.session_id = context.session.hash_key |
---|
1719 | session_identity_map.add(state) |
---|
1720 | |
---|
1721 | if currentload or populate_existing: |
---|
1722 | if isnew: |
---|
1723 | state.runid = context.runid |
---|
1724 | context.progress[state] = dict_ |
---|
1725 | |
---|
1726 | if not populate_instance or \ |
---|
1727 | populate_instance(self, context, row, instance, |
---|
1728 | only_load_props=only_load_props, instancekey=identitykey, isnew=isnew) is EXT_CONTINUE: |
---|
1729 | populate_state(state, dict_, row, isnew, only_load_props) |
---|
1730 | |
---|
1731 | else: |
---|
1732 | # populate attributes on non-loading instances which have been expired |
---|
1733 | # TODO: apply eager loads to un-lazy loaded collections ? |
---|
1734 | if state in context.partials or state.unloaded: |
---|
1735 | |
---|
1736 | if state in context.partials: |
---|
1737 | isnew = False |
---|
1738 | (d_, attrs) = context.partials[state] |
---|
1739 | else: |
---|
1740 | isnew = True |
---|
1741 | attrs = state.unloaded |
---|
1742 | context.partials[state] = (dict_, attrs) #<-- allow query.instances to commit the subset of attrs |
---|
1743 | |
---|
1744 | if not populate_instance or \ |
---|
1745 | populate_instance(self, context, row, instance, |
---|
1746 | only_load_props=attrs, instancekey=identitykey, isnew=isnew) is EXT_CONTINUE: |
---|
1747 | populate_state(state, dict_, row, isnew, attrs, instancekey=identitykey) |
---|
1748 | |
---|
1749 | if loaded_instance: |
---|
1750 | state._run_on_load(instance) |
---|
1751 | |
---|
1752 | if result is not None and \ |
---|
1753 | (not append_result or |
---|
1754 | append_result(self, context, row, instance, result, instancekey=identitykey, isnew=isnew) is EXT_CONTINUE): |
---|
1755 | result.append(instance) |
---|
1756 | |
---|
1757 | return instance |
---|
1758 | return _instance |
---|
1759 | |
---|
1760 | def _populators(self, context, path, row, adapter): |
---|
1761 | """Produce a collection of attribute level row processor callables.""" |
---|
1762 | |
---|
1763 | new_populators, existing_populators = [], [] |
---|
1764 | for prop in self._props.itervalues(): |
---|
1765 | newpop, existingpop = prop.create_row_processor(context, path, self, row, adapter) |
---|
1766 | if newpop: |
---|
1767 | new_populators.append((prop.key, newpop)) |
---|
1768 | if existingpop: |
---|
1769 | existing_populators.append((prop.key, existingpop)) |
---|
1770 | return new_populators, existing_populators |
---|
1771 | |
---|
1772 | def _configure_subclass_mapper(self, context, path, adapter): |
---|
1773 | """Produce a mapper level row processor callable factory for mappers inheriting this one.""" |
---|
1774 | |
---|
1775 | def configure_subclass_mapper(discriminator): |
---|
1776 | try: |
---|
1777 | mapper = self.polymorphic_map[discriminator] |
---|
1778 | except KeyError: |
---|
1779 | raise AssertionError("No such polymorphic_identity %r is defined" % discriminator) |
---|
1780 | if mapper is self: |
---|
1781 | return None |
---|
1782 | return mapper._instance_processor(context, path, adapter, polymorphic_from=self) |
---|
1783 | return configure_subclass_mapper |
---|
1784 | |
---|
1785 | log.class_logger(Mapper) |
---|
1786 | |
---|
1787 | |
---|
1788 | def reconstructor(fn): |
---|
1789 | """Decorate a method as the 'reconstructor' hook. |
---|
1790 | |
---|
1791 | Designates a method as the "reconstructor", an ``__init__``-like |
---|
1792 | method that will be called by the ORM after the instance has been |
---|
1793 | loaded from the database or otherwise reconstituted. |
---|
1794 | |
---|
1795 | The reconstructor will be invoked with no arguments. Scalar |
---|
1796 | (non-collection) database-mapped attributes of the instance will |
---|
1797 | be available for use within the function. Eagerly-loaded |
---|
1798 | collections are generally not yet available and will usually only |
---|
1799 | contain the first element. ORM state changes made to objects at |
---|
1800 | this stage will not be recorded for the next flush() operation, so |
---|
1801 | the activity within a reconstructor should be conservative. |
---|
1802 | |
---|
1803 | """ |
---|
1804 | fn.__sa_reconstructor__ = True |
---|
1805 | return fn |
---|
1806 | |
---|
1807 | def validates(*names): |
---|
1808 | """Decorate a method as a 'validator' for one or more named properties. |
---|
1809 | |
---|
1810 | Designates a method as a validator, a method which receives the |
---|
1811 | name of the attribute as well as a value to be assigned, or in the |
---|
1812 | case of a collection to be added to the collection. The function |
---|
1813 | can then raise validation exceptions to halt the process from continuing, |
---|
1814 | or can modify or replace the value before proceeding. The function |
---|
1815 | should otherwise return the given value. |
---|
1816 | |
---|
1817 | """ |
---|
1818 | def wrap(fn): |
---|
1819 | fn.__sa_validators__ = names |
---|
1820 | return fn |
---|
1821 | return wrap |
---|
1822 | |
---|
1823 | def _event_on_init(state, instance, args, kwargs): |
---|
1824 | """Trigger mapper compilation and run init_instance hooks.""" |
---|
1825 | |
---|
1826 | instrumenting_mapper = state.manager.info[_INSTRUMENTOR] |
---|
1827 | # compile() always compiles all mappers |
---|
1828 | instrumenting_mapper.compile() |
---|
1829 | if 'init_instance' in instrumenting_mapper.extension: |
---|
1830 | instrumenting_mapper.extension.init_instance( |
---|
1831 | instrumenting_mapper, instrumenting_mapper.class_, |
---|
1832 | state.manager.events.original_init, |
---|
1833 | instance, args, kwargs) |
---|
1834 | |
---|
1835 | def _event_on_init_failure(state, instance, args, kwargs): |
---|
1836 | """Run init_failed hooks.""" |
---|
1837 | |
---|
1838 | instrumenting_mapper = state.manager.info[_INSTRUMENTOR] |
---|
1839 | if 'init_failed' in instrumenting_mapper.extension: |
---|
1840 | util.warn_exception( |
---|
1841 | instrumenting_mapper.extension.init_failed, |
---|
1842 | instrumenting_mapper, instrumenting_mapper.class_, |
---|
1843 | state.manager.events.original_init, instance, args, kwargs) |
---|
1844 | |
---|
1845 | def _event_on_resurrect(state, instance): |
---|
1846 | # re-populate the primary key elements |
---|
1847 | # of the dict based on the mapping. |
---|
1848 | instrumenting_mapper = state.manager.info[_INSTRUMENTOR] |
---|
1849 | for col, val in zip(instrumenting_mapper.primary_key, state.key[1]): |
---|
1850 | instrumenting_mapper._set_state_attr_by_column(state, col, val) |
---|
1851 | |
---|
1852 | |
---|
1853 | def _sort_states(states): |
---|
1854 | return sorted(states, key=operator.attrgetter('sort_key')) |
---|
1855 | |
---|
1856 | def _load_scalar_attributes(state, attribute_names): |
---|
1857 | """initiate a column-based attribute refresh operation.""" |
---|
1858 | |
---|
1859 | mapper = _state_mapper(state) |
---|
1860 | session = _state_session(state) |
---|
1861 | if not session: |
---|
1862 | raise sa_exc.UnboundExecutionError("Instance %s is not bound to a Session; " |
---|
1863 | "attribute refresh operation cannot proceed" % (state_str(state))) |
---|
1864 | |
---|
1865 | has_key = _state_has_identity(state) |
---|
1866 | |
---|
1867 | result = False |
---|
1868 | if mapper.inherits and not mapper.concrete: |
---|
1869 | statement = mapper._optimized_get_statement(state, attribute_names) |
---|
1870 | if statement: |
---|
1871 | result = session.query(mapper).from_statement(statement)._get(None, only_load_props=attribute_names, refresh_state=state) |
---|
1872 | |
---|
1873 | if result is False: |
---|
1874 | if has_key: |
---|
1875 | identity_key = state.key |
---|
1876 | else: |
---|
1877 | identity_key = mapper._identity_key_from_state(state) |
---|
1878 | result = session.query(mapper)._get(identity_key, refresh_state=state, only_load_props=attribute_names) |
---|
1879 | |
---|
1880 | # if instance is pending, a refresh operation may not complete (even if PK attributes are assigned) |
---|
1881 | if has_key and result is None: |
---|
1882 | raise exc.ObjectDeletedError("Instance '%s' has been deleted." % state_str(state)) |
---|