| 1 | # orm/query.py |
|---|
| 2 | # Copyright (C) 2005, 2006, 2007, 2008, 2009 Michael Bayer mike_mp@zzzcomputing.com |
|---|
| 3 | # |
|---|
| 4 | # This module is part of SQLAlchemy and is released under |
|---|
| 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php |
|---|
| 6 | |
|---|
| 7 | """The Query class and support. |
|---|
| 8 | |
|---|
| 9 | Defines the :class:`~sqlalchemy.orm.query.Query` class, the central construct used by |
|---|
| 10 | the ORM to construct database queries. |
|---|
| 11 | |
|---|
| 12 | The ``Query`` class should not be confused with the |
|---|
| 13 | :class:`~sqlalchemy.sql.expression.Select` class, which defines database SELECT |
|---|
| 14 | operations at the SQL (non-ORM) level. ``Query`` differs from ``Select`` in |
|---|
| 15 | that it returns ORM-mapped objects and interacts with an ORM session, whereas |
|---|
| 16 | the ``Select`` construct interacts directly with the database to return |
|---|
| 17 | iterable result sets. |
|---|
| 18 | |
|---|
| 19 | """ |
|---|
| 20 | |
|---|
| 21 | from itertools import chain |
|---|
| 22 | from operator import itemgetter |
|---|
| 23 | |
|---|
| 24 | from sqlalchemy import sql, util, log, schema |
|---|
| 25 | from sqlalchemy import exc as sa_exc |
|---|
| 26 | from sqlalchemy.orm import exc as orm_exc |
|---|
| 27 | from sqlalchemy.sql import util as sql_util |
|---|
| 28 | from sqlalchemy.sql import expression, visitors, operators |
|---|
| 29 | from sqlalchemy.orm import ( |
|---|
| 30 | attributes, interfaces, mapper, object_mapper, evaluator, |
|---|
| 31 | ) |
|---|
| 32 | from sqlalchemy.orm.util import ( |
|---|
| 33 | AliasedClass, ORMAdapter, _entity_descriptor, _entity_info, |
|---|
| 34 | _is_aliased_class, _is_mapped_class, _orm_columns, _orm_selectable, |
|---|
| 35 | join as orm_join, |
|---|
| 36 | ) |
|---|
| 37 | |
|---|
| 38 | |
|---|
| 39 | __all__ = ['Query', 'QueryContext', 'aliased'] |
|---|
| 40 | |
|---|
| 41 | |
|---|
| 42 | aliased = AliasedClass |
|---|
| 43 | |
|---|
| 44 | def _generative(*assertions): |
|---|
| 45 | """Mark a method as generative.""" |
|---|
| 46 | |
|---|
| 47 | @util.decorator |
|---|
| 48 | def generate(fn, *args, **kw): |
|---|
| 49 | self = args[0]._clone() |
|---|
| 50 | for assertion in assertions: |
|---|
| 51 | assertion(self, fn.func_name) |
|---|
| 52 | fn(self, *args[1:], **kw) |
|---|
| 53 | return self |
|---|
| 54 | return generate |
|---|
| 55 | |
|---|
| 56 | class Query(object): |
|---|
| 57 | """ORM-level SQL construction object.""" |
|---|
| 58 | |
|---|
| 59 | _enable_eagerloads = True |
|---|
| 60 | _enable_assertions = True |
|---|
| 61 | _with_labels = False |
|---|
| 62 | _criterion = None |
|---|
| 63 | _yield_per = None |
|---|
| 64 | _lockmode = None |
|---|
| 65 | _order_by = False |
|---|
| 66 | _group_by = False |
|---|
| 67 | _having = None |
|---|
| 68 | _distinct = False |
|---|
| 69 | _offset = None |
|---|
| 70 | _limit = None |
|---|
| 71 | _statement = None |
|---|
| 72 | _joinpoint = None |
|---|
| 73 | _correlate = frozenset() |
|---|
| 74 | _populate_existing = False |
|---|
| 75 | _version_check = False |
|---|
| 76 | _autoflush = True |
|---|
| 77 | _current_path = () |
|---|
| 78 | _only_load_props = None |
|---|
| 79 | _refresh_state = None |
|---|
| 80 | _from_obj = () |
|---|
| 81 | _filter_aliases = None |
|---|
| 82 | _from_obj_alias = None |
|---|
| 83 | _currenttables = frozenset() |
|---|
| 84 | |
|---|
| 85 | def __init__(self, entities, session=None): |
|---|
| 86 | self.session = session |
|---|
| 87 | |
|---|
| 88 | self._with_options = [] |
|---|
| 89 | self._params = {} |
|---|
| 90 | self._attributes = {} |
|---|
| 91 | self._polymorphic_adapters = {} |
|---|
| 92 | self._set_entities(entities) |
|---|
| 93 | |
|---|
| 94 | def _set_entities(self, entities, entity_wrapper=None): |
|---|
| 95 | if entity_wrapper is None: |
|---|
| 96 | entity_wrapper = _QueryEntity |
|---|
| 97 | self._entities = [] |
|---|
| 98 | for ent in util.to_list(entities): |
|---|
| 99 | entity_wrapper(self, ent) |
|---|
| 100 | |
|---|
| 101 | self._setup_aliasizers(self._entities) |
|---|
| 102 | |
|---|
| 103 | def _setup_aliasizers(self, entities): |
|---|
| 104 | if hasattr(self, '_mapper_adapter_map'): |
|---|
| 105 | # usually safe to share a single map, but copying to prevent |
|---|
| 106 | # subtle leaks if end-user is reusing base query with arbitrary |
|---|
| 107 | # number of aliased() objects |
|---|
| 108 | self._mapper_adapter_map = d = self._mapper_adapter_map.copy() |
|---|
| 109 | else: |
|---|
| 110 | self._mapper_adapter_map = d = {} |
|---|
| 111 | |
|---|
| 112 | for ent in entities: |
|---|
| 113 | for entity in ent.entities: |
|---|
| 114 | if entity not in d: |
|---|
| 115 | mapper, selectable, is_aliased_class = _entity_info(entity) |
|---|
| 116 | if not is_aliased_class and mapper.with_polymorphic: |
|---|
| 117 | with_polymorphic = mapper._with_polymorphic_mappers |
|---|
| 118 | self.__mapper_loads_polymorphically_with(mapper, |
|---|
| 119 | sql_util.ColumnAdapter(selectable, mapper._equivalent_columns)) |
|---|
| 120 | adapter = None |
|---|
| 121 | elif is_aliased_class: |
|---|
| 122 | adapter = sql_util.ColumnAdapter(selectable, mapper._equivalent_columns) |
|---|
| 123 | with_polymorphic = None |
|---|
| 124 | else: |
|---|
| 125 | with_polymorphic = adapter = None |
|---|
| 126 | |
|---|
| 127 | d[entity] = (mapper, adapter, selectable, is_aliased_class, with_polymorphic) |
|---|
| 128 | ent.setup_entity(entity, *d[entity]) |
|---|
| 129 | |
|---|
| 130 | def __mapper_loads_polymorphically_with(self, mapper, adapter): |
|---|
| 131 | for m2 in mapper._with_polymorphic_mappers: |
|---|
| 132 | self._polymorphic_adapters[m2] = adapter |
|---|
| 133 | for m in m2.iterate_to_root(): |
|---|
| 134 | self._polymorphic_adapters[m.mapped_table] = self._polymorphic_adapters[m.local_table] = adapter |
|---|
| 135 | |
|---|
| 136 | def _set_select_from(self, from_obj): |
|---|
| 137 | if isinstance(from_obj, expression._SelectBaseMixin): |
|---|
| 138 | from_obj = from_obj.alias() |
|---|
| 139 | |
|---|
| 140 | self._from_obj = (from_obj,) |
|---|
| 141 | equivs = self.__all_equivs() |
|---|
| 142 | |
|---|
| 143 | if isinstance(from_obj, expression.Alias): |
|---|
| 144 | self._from_obj_alias = sql_util.ColumnAdapter(from_obj, equivs) |
|---|
| 145 | |
|---|
| 146 | def _get_polymorphic_adapter(self, entity, selectable): |
|---|
| 147 | self.__mapper_loads_polymorphically_with(entity.mapper, |
|---|
| 148 | sql_util.ColumnAdapter(selectable, entity.mapper._equivalent_columns)) |
|---|
| 149 | |
|---|
| 150 | def _reset_polymorphic_adapter(self, mapper): |
|---|
| 151 | for m2 in mapper._with_polymorphic_mappers: |
|---|
| 152 | self._polymorphic_adapters.pop(m2, None) |
|---|
| 153 | for m in m2.iterate_to_root(): |
|---|
| 154 | self._polymorphic_adapters.pop(m.mapped_table, None) |
|---|
| 155 | self._polymorphic_adapters.pop(m.local_table, None) |
|---|
| 156 | |
|---|
| 157 | def _reset_joinpoint(self): |
|---|
| 158 | self._joinpoint = None |
|---|
| 159 | self._filter_aliases = None |
|---|
| 160 | |
|---|
| 161 | def __adapt_polymorphic_element(self, element): |
|---|
| 162 | if isinstance(element, expression.FromClause): |
|---|
| 163 | search = element |
|---|
| 164 | elif hasattr(element, 'table'): |
|---|
| 165 | search = element.table |
|---|
| 166 | else: |
|---|
| 167 | search = None |
|---|
| 168 | |
|---|
| 169 | if search: |
|---|
| 170 | alias = self._polymorphic_adapters.get(search, None) |
|---|
| 171 | if alias: |
|---|
| 172 | return alias.adapt_clause(element) |
|---|
| 173 | |
|---|
| 174 | def __replace_element(self, adapters): |
|---|
| 175 | def replace(elem): |
|---|
| 176 | if '_halt_adapt' in elem._annotations: |
|---|
| 177 | return elem |
|---|
| 178 | |
|---|
| 179 | for adapter in adapters: |
|---|
| 180 | e = adapter(elem) |
|---|
| 181 | if e: |
|---|
| 182 | return e |
|---|
| 183 | return replace |
|---|
| 184 | |
|---|
| 185 | def __replace_orm_element(self, adapters): |
|---|
| 186 | def replace(elem): |
|---|
| 187 | if '_halt_adapt' in elem._annotations: |
|---|
| 188 | return elem |
|---|
| 189 | |
|---|
| 190 | if "_orm_adapt" in elem._annotations or "parententity" in elem._annotations: |
|---|
| 191 | for adapter in adapters: |
|---|
| 192 | e = adapter(elem) |
|---|
| 193 | if e: |
|---|
| 194 | return e |
|---|
| 195 | return replace |
|---|
| 196 | |
|---|
| 197 | @_generative() |
|---|
| 198 | def _adapt_all_clauses(self): |
|---|
| 199 | self._disable_orm_filtering = True |
|---|
| 200 | |
|---|
| 201 | def _adapt_clause(self, clause, as_filter, orm_only): |
|---|
| 202 | adapters = [] |
|---|
| 203 | if as_filter and self._filter_aliases: |
|---|
| 204 | adapters.append(self._filter_aliases.replace) |
|---|
| 205 | |
|---|
| 206 | if self._from_obj_alias: |
|---|
| 207 | adapters.append(self._from_obj_alias.replace) |
|---|
| 208 | |
|---|
| 209 | if self._polymorphic_adapters: |
|---|
| 210 | adapters.append(self.__adapt_polymorphic_element) |
|---|
| 211 | |
|---|
| 212 | if not adapters: |
|---|
| 213 | return clause |
|---|
| 214 | |
|---|
| 215 | if getattr(self, '_disable_orm_filtering', not orm_only): |
|---|
| 216 | return visitors.replacement_traverse( |
|---|
| 217 | clause, |
|---|
| 218 | {'column_collections':False}, |
|---|
| 219 | self.__replace_element(adapters) |
|---|
| 220 | ) |
|---|
| 221 | else: |
|---|
| 222 | return visitors.replacement_traverse( |
|---|
| 223 | clause, |
|---|
| 224 | {'column_collections':False}, |
|---|
| 225 | self.__replace_orm_element(adapters) |
|---|
| 226 | ) |
|---|
| 227 | |
|---|
| 228 | def _entity_zero(self): |
|---|
| 229 | return self._entities[0] |
|---|
| 230 | |
|---|
| 231 | def _mapper_zero(self): |
|---|
| 232 | return self._entity_zero().entity_zero |
|---|
| 233 | |
|---|
| 234 | def _extension_zero(self): |
|---|
| 235 | ent = self._entity_zero() |
|---|
| 236 | return getattr(ent, 'extension', ent.mapper.extension) |
|---|
| 237 | |
|---|
| 238 | @property |
|---|
| 239 | def _mapper_entities(self): |
|---|
| 240 | # TODO: this is wrong, its hardcoded to "priamry entity" when |
|---|
| 241 | # for the case of __all_equivs() it should not be |
|---|
| 242 | # the name of this accessor is wrong too |
|---|
| 243 | for ent in self._entities: |
|---|
| 244 | if hasattr(ent, 'primary_entity'): |
|---|
| 245 | yield ent |
|---|
| 246 | |
|---|
| 247 | def _joinpoint_zero(self): |
|---|
| 248 | return self._joinpoint or self._entity_zero().entity_zero |
|---|
| 249 | |
|---|
| 250 | def _mapper_zero_or_none(self): |
|---|
| 251 | if not getattr(self._entities[0], 'primary_entity', False): |
|---|
| 252 | return None |
|---|
| 253 | return self._entities[0].mapper |
|---|
| 254 | |
|---|
| 255 | def _only_mapper_zero(self, rationale=None): |
|---|
| 256 | if len(self._entities) > 1: |
|---|
| 257 | raise sa_exc.InvalidRequestError( |
|---|
| 258 | rationale or "This operation requires a Query against a single mapper." |
|---|
| 259 | ) |
|---|
| 260 | return self._mapper_zero() |
|---|
| 261 | |
|---|
| 262 | def _only_entity_zero(self, rationale=None): |
|---|
| 263 | if len(self._entities) > 1: |
|---|
| 264 | raise sa_exc.InvalidRequestError( |
|---|
| 265 | rationale or "This operation requires a Query against a single mapper." |
|---|
| 266 | ) |
|---|
| 267 | return self._entity_zero() |
|---|
| 268 | |
|---|
| 269 | def _generate_mapper_zero(self): |
|---|
| 270 | if not getattr(self._entities[0], 'primary_entity', False): |
|---|
| 271 | raise sa_exc.InvalidRequestError("No primary mapper set up for this Query.") |
|---|
| 272 | entity = self._entities[0]._clone() |
|---|
| 273 | self._entities = [entity] + self._entities[1:] |
|---|
| 274 | return entity |
|---|
| 275 | |
|---|
| 276 | def __all_equivs(self): |
|---|
| 277 | equivs = {} |
|---|
| 278 | for ent in self._mapper_entities: |
|---|
| 279 | equivs.update(ent.mapper._equivalent_columns) |
|---|
| 280 | return equivs |
|---|
| 281 | |
|---|
| 282 | def _no_criterion_condition(self, meth): |
|---|
| 283 | if not self._enable_assertions: |
|---|
| 284 | return |
|---|
| 285 | if self._criterion or self._statement or self._from_obj or \ |
|---|
| 286 | self._limit is not None or self._offset is not None or \ |
|---|
| 287 | self._group_by: |
|---|
| 288 | raise sa_exc.InvalidRequestError("Query.%s() being called on a Query with existing criterion. " % meth) |
|---|
| 289 | |
|---|
| 290 | self._from_obj = () |
|---|
| 291 | self._statement = self._criterion = None |
|---|
| 292 | self._order_by = self._group_by = self._distinct = False |
|---|
| 293 | |
|---|
| 294 | def _no_clauseelement_condition(self, meth): |
|---|
| 295 | if not self._enable_assertions: |
|---|
| 296 | return |
|---|
| 297 | if self._order_by: |
|---|
| 298 | raise sa_exc.InvalidRequestError("Query.%s() being called on a Query with existing criterion. " % meth) |
|---|
| 299 | self._no_criterion_condition(meth) |
|---|
| 300 | |
|---|
| 301 | def _no_statement_condition(self, meth): |
|---|
| 302 | if not self._enable_assertions: |
|---|
| 303 | return |
|---|
| 304 | if self._statement: |
|---|
| 305 | raise sa_exc.InvalidRequestError( |
|---|
| 306 | ("Query.%s() being called on a Query with an existing full " |
|---|
| 307 | "statement - can't apply criterion.") % meth) |
|---|
| 308 | |
|---|
| 309 | def _no_limit_offset(self, meth): |
|---|
| 310 | if not self._enable_assertions: |
|---|
| 311 | return |
|---|
| 312 | if self._limit is not None or self._offset is not None: |
|---|
| 313 | raise sa_exc.InvalidRequestError( |
|---|
| 314 | "Query.%s() being called on a Query which already has LIMIT or OFFSET applied. " |
|---|
| 315 | "To modify the row-limited results of a Query, call from_self() first. " |
|---|
| 316 | "Otherwise, call %s() before limit() or offset() are applied." % (meth, meth) |
|---|
| 317 | ) |
|---|
| 318 | |
|---|
| 319 | def _no_select_modifiers(self, meth): |
|---|
| 320 | if not self._enable_assertions: |
|---|
| 321 | return |
|---|
| 322 | for attr, methname, notset in ( |
|---|
| 323 | ('_limit', 'limit()', None), |
|---|
| 324 | ('_offset', 'offset()', None), |
|---|
| 325 | ('_order_by', 'order_by()', False), |
|---|
| 326 | ('_group_by', 'group_by()', False), |
|---|
| 327 | ('_distinct', 'distinct()', False), |
|---|
| 328 | ): |
|---|
| 329 | if getattr(self, attr) is not notset: |
|---|
| 330 | raise sa_exc.InvalidRequestError( |
|---|
| 331 | "Can't call Query.%s() when %s has been called" % (meth, methname) |
|---|
| 332 | ) |
|---|
| 333 | |
|---|
| 334 | def _get_options(self, populate_existing=None, |
|---|
| 335 | version_check=None, |
|---|
| 336 | only_load_props=None, |
|---|
| 337 | refresh_state=None): |
|---|
| 338 | if populate_existing: |
|---|
| 339 | self._populate_existing = populate_existing |
|---|
| 340 | if version_check: |
|---|
| 341 | self._version_check = version_check |
|---|
| 342 | if refresh_state: |
|---|
| 343 | self._refresh_state = refresh_state |
|---|
| 344 | if only_load_props: |
|---|
| 345 | self._only_load_props = set(only_load_props) |
|---|
| 346 | return self |
|---|
| 347 | |
|---|
| 348 | def _clone(self): |
|---|
| 349 | cls = self.__class__ |
|---|
| 350 | q = cls.__new__(cls) |
|---|
| 351 | q.__dict__ = self.__dict__.copy() |
|---|
| 352 | return q |
|---|
| 353 | |
|---|
| 354 | @property |
|---|
| 355 | def statement(self): |
|---|
| 356 | """The full SELECT statement represented by this Query.""" |
|---|
| 357 | |
|---|
| 358 | return self._compile_context(labels=self._with_labels).\ |
|---|
| 359 | statement._annotate({'_halt_adapt': True}) |
|---|
| 360 | |
|---|
| 361 | def subquery(self): |
|---|
| 362 | """return the full SELECT statement represented by this Query, embedded within an Alias. |
|---|
| 363 | |
|---|
| 364 | Eager JOIN generation within the query is disabled. |
|---|
| 365 | |
|---|
| 366 | """ |
|---|
| 367 | return self.enable_eagerloads(False).statement.alias() |
|---|
| 368 | |
|---|
| 369 | def __clause_element__(self): |
|---|
| 370 | return self.enable_eagerloads(False).statement |
|---|
| 371 | |
|---|
| 372 | @_generative() |
|---|
| 373 | def enable_eagerloads(self, value): |
|---|
| 374 | """Control whether or not eager joins are rendered. |
|---|
| 375 | |
|---|
| 376 | When set to False, the returned Query will not render |
|---|
| 377 | eager joins regardless of eagerload() options |
|---|
| 378 | or mapper-level lazy=False configurations. |
|---|
| 379 | |
|---|
| 380 | This is used primarily when nesting the Query's |
|---|
| 381 | statement into a subquery or other |
|---|
| 382 | selectable. |
|---|
| 383 | |
|---|
| 384 | """ |
|---|
| 385 | self._enable_eagerloads = value |
|---|
| 386 | |
|---|
| 387 | @_generative() |
|---|
| 388 | def with_labels(self): |
|---|
| 389 | """Apply column labels to the return value of Query.statement. |
|---|
| 390 | |
|---|
| 391 | Indicates that this Query's `statement` accessor should return |
|---|
| 392 | a SELECT statement that applies labels to all columns in the |
|---|
| 393 | form <tablename>_<columnname>; this is commonly used to |
|---|
| 394 | disambiguate columns from multiple tables which have the same |
|---|
| 395 | name. |
|---|
| 396 | |
|---|
| 397 | When the `Query` actually issues SQL to load rows, it always |
|---|
| 398 | uses column labeling. |
|---|
| 399 | |
|---|
| 400 | """ |
|---|
| 401 | self._with_labels = True |
|---|
| 402 | |
|---|
| 403 | @_generative() |
|---|
| 404 | def enable_assertions(self, value): |
|---|
| 405 | """Control whether assertions are generated. |
|---|
| 406 | |
|---|
| 407 | When set to False, the returned Query will |
|---|
| 408 | not assert its state before certain operations, |
|---|
| 409 | including that LIMIT/OFFSET has not been applied |
|---|
| 410 | when filter() is called, no criterion exists |
|---|
| 411 | when get() is called, and no "from_statement()" |
|---|
| 412 | exists when filter()/order_by()/group_by() etc. |
|---|
| 413 | is called. This more permissive mode is used by |
|---|
| 414 | custom Query subclasses to specify criterion or |
|---|
| 415 | other modifiers outside of the usual usage patterns. |
|---|
| 416 | |
|---|
| 417 | Care should be taken to ensure that the usage |
|---|
| 418 | pattern is even possible. A statement applied |
|---|
| 419 | by from_statement() will override any criterion |
|---|
| 420 | set by filter() or order_by(), for example. |
|---|
| 421 | |
|---|
| 422 | """ |
|---|
| 423 | self._enable_assertions = value |
|---|
| 424 | |
|---|
| 425 | @property |
|---|
| 426 | def whereclause(self): |
|---|
| 427 | """The WHERE criterion for this Query.""" |
|---|
| 428 | return self._criterion |
|---|
| 429 | |
|---|
| 430 | @_generative() |
|---|
| 431 | def _with_current_path(self, path): |
|---|
| 432 | """indicate that this query applies to objects loaded within a certain path. |
|---|
| 433 | |
|---|
| 434 | Used by deferred loaders (see strategies.py) which transfer query |
|---|
| 435 | options from an originating query to a newly generated query intended |
|---|
| 436 | for the deferred load. |
|---|
| 437 | |
|---|
| 438 | """ |
|---|
| 439 | self._current_path = path |
|---|
| 440 | |
|---|
| 441 | @_generative(_no_clauseelement_condition) |
|---|
| 442 | def with_polymorphic(self, cls_or_mappers, selectable=None, discriminator=None): |
|---|
| 443 | """Load columns for descendant mappers of this Query's mapper. |
|---|
| 444 | |
|---|
| 445 | Using this method will ensure that each descendant mapper's |
|---|
| 446 | tables are included in the FROM clause, and will allow filter() |
|---|
| 447 | criterion to be used against those tables. The resulting |
|---|
| 448 | instances will also have those columns already loaded so that |
|---|
| 449 | no "post fetch" of those columns will be required. |
|---|
| 450 | |
|---|
| 451 | :param cls_or_mappers: a single class or mapper, or list of class/mappers, |
|---|
| 452 | which inherit from this Query's mapper. Alternatively, it |
|---|
| 453 | may also be the string ``'*'``, in which case all descending |
|---|
| 454 | mappers will be added to the FROM clause. |
|---|
| 455 | |
|---|
| 456 | :param selectable: a table or select() statement that will |
|---|
| 457 | be used in place of the generated FROM clause. This argument |
|---|
| 458 | is required if any of the desired mappers use concrete table |
|---|
| 459 | inheritance, since SQLAlchemy currently cannot generate UNIONs |
|---|
| 460 | among tables automatically. If used, the ``selectable`` |
|---|
| 461 | argument must represent the full set of tables and columns mapped |
|---|
| 462 | by every desired mapper. Otherwise, the unaccounted mapped columns |
|---|
| 463 | will result in their table being appended directly to the FROM |
|---|
| 464 | clause which will usually lead to incorrect results. |
|---|
| 465 | |
|---|
| 466 | :param discriminator: a column to be used as the "discriminator" |
|---|
| 467 | column for the given selectable. If not given, the polymorphic_on |
|---|
| 468 | attribute of the mapper will be used, if any. This is useful |
|---|
| 469 | for mappers that don't have polymorphic loading behavior by default, |
|---|
| 470 | such as concrete table mappers. |
|---|
| 471 | |
|---|
| 472 | """ |
|---|
| 473 | entity = self._generate_mapper_zero() |
|---|
| 474 | entity.set_with_polymorphic(self, cls_or_mappers, selectable=selectable, discriminator=discriminator) |
|---|
| 475 | |
|---|
| 476 | @_generative() |
|---|
| 477 | def yield_per(self, count): |
|---|
| 478 | """Yield only ``count`` rows at a time. |
|---|
| 479 | |
|---|
| 480 | WARNING: use this method with caution; if the same instance is present |
|---|
| 481 | in more than one batch of rows, end-user changes to attributes will be |
|---|
| 482 | overwritten. |
|---|
| 483 | |
|---|
| 484 | In particular, it's usually impossible to use this setting with |
|---|
| 485 | eagerly loaded collections (i.e. any lazy=False) since those |
|---|
| 486 | collections will be cleared for a new load when encountered in a |
|---|
| 487 | subsequent result batch. |
|---|
| 488 | |
|---|
| 489 | """ |
|---|
| 490 | self._yield_per = count |
|---|
| 491 | |
|---|
| 492 | def get(self, ident): |
|---|
| 493 | """Return an instance of the object based on the given identifier, or None if not found. |
|---|
| 494 | |
|---|
| 495 | The `ident` argument is a scalar or tuple of primary key column values |
|---|
| 496 | in the order of the table def's primary key columns. |
|---|
| 497 | |
|---|
| 498 | """ |
|---|
| 499 | |
|---|
| 500 | # convert composite types to individual args |
|---|
| 501 | if hasattr(ident, '__composite_values__'): |
|---|
| 502 | ident = ident.__composite_values__() |
|---|
| 503 | |
|---|
| 504 | key = self._only_mapper_zero( |
|---|
| 505 | "get() can only be used against a single mapped class." |
|---|
| 506 | ).identity_key_from_primary_key(ident) |
|---|
| 507 | return self._get(key, ident) |
|---|
| 508 | |
|---|
| 509 | @classmethod |
|---|
| 510 | @util.deprecated('Deprecated. Use sqlalchemy.orm.with_parent ' |
|---|
| 511 | 'in conjunction with filter().') |
|---|
| 512 | def query_from_parent(cls, instance, property, **kwargs): |
|---|
| 513 | """Return a new Query with criterion corresponding to a parent instance. |
|---|
| 514 | |
|---|
| 515 | Return a newly constructed Query object, with criterion corresponding |
|---|
| 516 | to a relationship to the given parent instance. |
|---|
| 517 | |
|---|
| 518 | instance |
|---|
| 519 | a persistent or detached instance which is related to class |
|---|
| 520 | represented by this query. |
|---|
| 521 | |
|---|
| 522 | property |
|---|
| 523 | string name of the property which relates this query's class to the |
|---|
| 524 | instance. |
|---|
| 525 | |
|---|
| 526 | \**kwargs |
|---|
| 527 | all extra keyword arguments are propagated to the constructor of |
|---|
| 528 | Query. |
|---|
| 529 | |
|---|
| 530 | """ |
|---|
| 531 | mapper = object_mapper(instance) |
|---|
| 532 | prop = mapper.get_property(property, resolve_synonyms=True) |
|---|
| 533 | target = prop.mapper |
|---|
| 534 | criterion = prop.compare(operators.eq, instance, value_is_parent=True) |
|---|
| 535 | return Query(target, **kwargs).filter(criterion) |
|---|
| 536 | |
|---|
| 537 | @_generative() |
|---|
| 538 | def correlate(self, *args): |
|---|
| 539 | self._correlate = self._correlate.union(_orm_selectable(s) for s in args) |
|---|
| 540 | |
|---|
| 541 | @_generative() |
|---|
| 542 | def autoflush(self, setting): |
|---|
| 543 | """Return a Query with a specific 'autoflush' setting. |
|---|
| 544 | |
|---|
| 545 | Note that a Session with autoflush=False will |
|---|
| 546 | not autoflush, even if this flag is set to True at the |
|---|
| 547 | Query level. Therefore this flag is usually used only |
|---|
| 548 | to disable autoflush for a specific Query. |
|---|
| 549 | |
|---|
| 550 | """ |
|---|
| 551 | self._autoflush = setting |
|---|
| 552 | |
|---|
| 553 | @_generative() |
|---|
| 554 | def populate_existing(self): |
|---|
| 555 | """Return a Query that will refresh all instances loaded. |
|---|
| 556 | |
|---|
| 557 | This includes all entities accessed from the database, including |
|---|
| 558 | secondary entities, eagerly-loaded collection items. |
|---|
| 559 | |
|---|
| 560 | All changes present on entities which are already present in the |
|---|
| 561 | session will be reset and the entities will all be marked "clean". |
|---|
| 562 | |
|---|
| 563 | An alternative to populate_existing() is to expire the Session |
|---|
| 564 | fully using session.expire_all(). |
|---|
| 565 | |
|---|
| 566 | """ |
|---|
| 567 | self._populate_existing = True |
|---|
| 568 | |
|---|
| 569 | def with_parent(self, instance, property=None): |
|---|
| 570 | """Add a join criterion corresponding to a relationship to the given |
|---|
| 571 | parent instance. |
|---|
| 572 | |
|---|
| 573 | instance |
|---|
| 574 | a persistent or detached instance which is related to class |
|---|
| 575 | represented by this query. |
|---|
| 576 | |
|---|
| 577 | property |
|---|
| 578 | string name of the property which relates this query's class to the |
|---|
| 579 | instance. if None, the method will attempt to find a suitable |
|---|
| 580 | property. |
|---|
| 581 | |
|---|
| 582 | Currently, this method only works with immediate parent relationships, |
|---|
| 583 | but in the future may be enhanced to work across a chain of parent |
|---|
| 584 | mappers. |
|---|
| 585 | |
|---|
| 586 | """ |
|---|
| 587 | from sqlalchemy.orm import properties |
|---|
| 588 | mapper = object_mapper(instance) |
|---|
| 589 | if property is None: |
|---|
| 590 | for prop in mapper.iterate_properties: |
|---|
| 591 | if isinstance(prop, properties.PropertyLoader) and prop.mapper is self._mapper_zero(): |
|---|
| 592 | break |
|---|
| 593 | else: |
|---|
| 594 | raise sa_exc.InvalidRequestError( |
|---|
| 595 | "Could not locate a property which relates instances " |
|---|
| 596 | "of class '%s' to instances of class '%s'" % |
|---|
| 597 | (self._mapper_zero().class_.__name__, instance.__class__.__name__) |
|---|
| 598 | ) |
|---|
| 599 | else: |
|---|
| 600 | prop = mapper.get_property(property, resolve_synonyms=True) |
|---|
| 601 | return self.filter(prop.compare(operators.eq, instance, value_is_parent=True)) |
|---|
| 602 | |
|---|
| 603 | @_generative() |
|---|
| 604 | def add_entity(self, entity, alias=None): |
|---|
| 605 | """add a mapped entity to the list of result columns to be returned.""" |
|---|
| 606 | |
|---|
| 607 | if alias: |
|---|
| 608 | entity = aliased(entity, alias) |
|---|
| 609 | |
|---|
| 610 | self._entities = list(self._entities) |
|---|
| 611 | m = _MapperEntity(self, entity) |
|---|
| 612 | self._setup_aliasizers([m]) |
|---|
| 613 | |
|---|
| 614 | def from_self(self, *entities): |
|---|
| 615 | """return a Query that selects from this Query's SELECT statement. |
|---|
| 616 | |
|---|
| 617 | \*entities - optional list of entities which will replace |
|---|
| 618 | those being selected. |
|---|
| 619 | |
|---|
| 620 | """ |
|---|
| 621 | fromclause = self.with_labels().enable_eagerloads(False).statement.correlate(None) |
|---|
| 622 | q = self._from_selectable(fromclause) |
|---|
| 623 | if entities: |
|---|
| 624 | q._set_entities(entities) |
|---|
| 625 | return q |
|---|
| 626 | |
|---|
| 627 | _from_self = from_self |
|---|
| 628 | |
|---|
| 629 | @_generative() |
|---|
| 630 | def _from_selectable(self, fromclause): |
|---|
| 631 | self._statement = self._criterion = None |
|---|
| 632 | self._order_by = self._group_by = self._distinct = False |
|---|
| 633 | self._limit = self._offset = None |
|---|
| 634 | self._set_select_from(fromclause) |
|---|
| 635 | |
|---|
| 636 | def values(self, *columns): |
|---|
| 637 | """Return an iterator yielding result tuples corresponding to the given list of columns""" |
|---|
| 638 | |
|---|
| 639 | if not columns: |
|---|
| 640 | return iter(()) |
|---|
| 641 | q = self._clone() |
|---|
| 642 | q._set_entities(columns, entity_wrapper=_ColumnEntity) |
|---|
| 643 | if not q._yield_per: |
|---|
| 644 | q._yield_per = 10 |
|---|
| 645 | return iter(q) |
|---|
| 646 | _values = values |
|---|
| 647 | |
|---|
| 648 | def value(self, column): |
|---|
| 649 | """Return a scalar result corresponding to the given column expression.""" |
|---|
| 650 | try: |
|---|
| 651 | return self.values(column).next()[0] |
|---|
| 652 | except StopIteration: |
|---|
| 653 | return None |
|---|
| 654 | |
|---|
| 655 | @_generative() |
|---|
| 656 | def add_column(self, column): |
|---|
| 657 | """Add a SQL ColumnElement to the list of result columns to be returned.""" |
|---|
| 658 | |
|---|
| 659 | self._entities = list(self._entities) |
|---|
| 660 | l = len(self._entities) |
|---|
| 661 | _ColumnEntity(self, column) |
|---|
| 662 | # _ColumnEntity may add many entities if the |
|---|
| 663 | # given arg is a FROM clause |
|---|
| 664 | self._setup_aliasizers(self._entities[l:]) |
|---|
| 665 | |
|---|
| 666 | def options(self, *args): |
|---|
| 667 | """Return a new Query object, applying the given list of |
|---|
| 668 | MapperOptions. |
|---|
| 669 | |
|---|
| 670 | """ |
|---|
| 671 | return self._options(False, *args) |
|---|
| 672 | |
|---|
| 673 | def _conditional_options(self, *args): |
|---|
| 674 | return self._options(True, *args) |
|---|
| 675 | |
|---|
| 676 | @_generative() |
|---|
| 677 | def _options(self, conditional, *args): |
|---|
| 678 | # most MapperOptions write to the '_attributes' dictionary, |
|---|
| 679 | # so copy that as well |
|---|
| 680 | self._attributes = self._attributes.copy() |
|---|
| 681 | opts = [o for o in util.flatten_iterator(args)] |
|---|
| 682 | self._with_options = self._with_options + opts |
|---|
| 683 | if conditional: |
|---|
| 684 | for opt in opts: |
|---|
| 685 | opt.process_query_conditionally(self) |
|---|
| 686 | else: |
|---|
| 687 | for opt in opts: |
|---|
| 688 | opt.process_query(self) |
|---|
| 689 | |
|---|
| 690 | @_generative() |
|---|
| 691 | def with_lockmode(self, mode): |
|---|
| 692 | """Return a new Query object with the specified locking mode.""" |
|---|
| 693 | |
|---|
| 694 | self._lockmode = mode |
|---|
| 695 | |
|---|
| 696 | @_generative() |
|---|
| 697 | def params(self, *args, **kwargs): |
|---|
| 698 | """add values for bind parameters which may have been specified in filter(). |
|---|
| 699 | |
|---|
| 700 | parameters may be specified using \**kwargs, or optionally a single dictionary |
|---|
| 701 | as the first positional argument. The reason for both is that \**kwargs is |
|---|
| 702 | convenient, however some parameter dictionaries contain unicode keys in which case |
|---|
| 703 | \**kwargs cannot be used. |
|---|
| 704 | |
|---|
| 705 | """ |
|---|
| 706 | if len(args) == 1: |
|---|
| 707 | kwargs.update(args[0]) |
|---|
| 708 | elif len(args) > 0: |
|---|
| 709 | raise sa_exc.ArgumentError("params() takes zero or one positional argument, which is a dictionary.") |
|---|
| 710 | self._params = self._params.copy() |
|---|
| 711 | self._params.update(kwargs) |
|---|
| 712 | |
|---|
| 713 | @_generative(_no_statement_condition, _no_limit_offset) |
|---|
| 714 | def filter(self, criterion): |
|---|
| 715 | """apply the given filtering criterion to the query and return the newly resulting ``Query`` |
|---|
| 716 | |
|---|
| 717 | the criterion is any sql.ClauseElement applicable to the WHERE clause of a select. |
|---|
| 718 | |
|---|
| 719 | """ |
|---|
| 720 | if isinstance(criterion, basestring): |
|---|
| 721 | criterion = sql.text(criterion) |
|---|
| 722 | |
|---|
| 723 | if criterion is not None and not isinstance(criterion, sql.ClauseElement): |
|---|
| 724 | raise sa_exc.ArgumentError("filter() argument must be of type sqlalchemy.sql.ClauseElement or string") |
|---|
| 725 | |
|---|
| 726 | criterion = self._adapt_clause(criterion, True, True) |
|---|
| 727 | |
|---|
| 728 | if self._criterion is not None: |
|---|
| 729 | self._criterion = self._criterion & criterion |
|---|
| 730 | else: |
|---|
| 731 | self._criterion = criterion |
|---|
| 732 | |
|---|
| 733 | def filter_by(self, **kwargs): |
|---|
| 734 | """apply the given filtering criterion to the query and return the newly resulting ``Query``.""" |
|---|
| 735 | |
|---|
| 736 | clauses = [_entity_descriptor(self._joinpoint_zero(), key)[0] == value |
|---|
| 737 | for key, value in kwargs.iteritems()] |
|---|
| 738 | |
|---|
| 739 | return self.filter(sql.and_(*clauses)) |
|---|
| 740 | |
|---|
| 741 | |
|---|
| 742 | @_generative(_no_statement_condition, _no_limit_offset) |
|---|
| 743 | @util.accepts_a_list_as_starargs(list_deprecation='pending') |
|---|
| 744 | def order_by(self, *criterion): |
|---|
| 745 | """apply one or more ORDER BY criterion to the query and return the newly resulting ``Query``""" |
|---|
| 746 | |
|---|
| 747 | if len(criterion) == 1 and criterion[0] is None: |
|---|
| 748 | self._order_by = None |
|---|
| 749 | else: |
|---|
| 750 | criterion = [self._adapt_clause(expression._literal_as_text(o), True, True) for o in criterion] |
|---|
| 751 | |
|---|
| 752 | if self._order_by is False or self._order_by is None: |
|---|
| 753 | self._order_by = criterion |
|---|
| 754 | else: |
|---|
| 755 | self._order_by = self._order_by + criterion |
|---|
| 756 | |
|---|
| 757 | @_generative(_no_statement_condition, _no_limit_offset) |
|---|
| 758 | @util.accepts_a_list_as_starargs(list_deprecation='pending') |
|---|
| 759 | def group_by(self, *criterion): |
|---|
| 760 | """apply one or more GROUP BY criterion to the query and return the newly resulting ``Query``""" |
|---|
| 761 | |
|---|
| 762 | criterion = list(chain(*[_orm_columns(c) for c in criterion])) |
|---|
| 763 | |
|---|
| 764 | criterion = [self._adapt_clause(expression._literal_as_text(o), True, True) for o in criterion] |
|---|
| 765 | |
|---|
| 766 | if self._group_by is False: |
|---|
| 767 | self._group_by = criterion |
|---|
| 768 | else: |
|---|
| 769 | self._group_by = self._group_by + criterion |
|---|
| 770 | |
|---|
| 771 | @_generative(_no_statement_condition, _no_limit_offset) |
|---|
| 772 | def having(self, criterion): |
|---|
| 773 | """apply a HAVING criterion to the query and return the newly resulting ``Query``.""" |
|---|
| 774 | |
|---|
| 775 | if isinstance(criterion, basestring): |
|---|
| 776 | criterion = sql.text(criterion) |
|---|
| 777 | |
|---|
| 778 | if criterion is not None and not isinstance(criterion, sql.ClauseElement): |
|---|
| 779 | raise sa_exc.ArgumentError("having() argument must be of type sqlalchemy.sql.ClauseElement or string") |
|---|
| 780 | |
|---|
| 781 | criterion = self._adapt_clause(criterion, True, True) |
|---|
| 782 | |
|---|
| 783 | if self._having is not None: |
|---|
| 784 | self._having = self._having & criterion |
|---|
| 785 | else: |
|---|
| 786 | self._having = criterion |
|---|
| 787 | |
|---|
| 788 | def union(self, *q): |
|---|
| 789 | """Produce a UNION of this Query against one or more queries. |
|---|
| 790 | |
|---|
| 791 | e.g.:: |
|---|
| 792 | |
|---|
| 793 | q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar') |
|---|
| 794 | q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo') |
|---|
| 795 | |
|---|
| 796 | q3 = q1.union(q2) |
|---|
| 797 | |
|---|
| 798 | The method accepts multiple Query objects so as to control |
|---|
| 799 | the level of nesting. A series of ``union()`` calls such as:: |
|---|
| 800 | |
|---|
| 801 | x.union(y).union(z).all() |
|---|
| 802 | |
|---|
| 803 | will nest on each ``union()``, and produces:: |
|---|
| 804 | |
|---|
| 805 | SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y) UNION SELECT * FROM Z) |
|---|
| 806 | |
|---|
| 807 | Whereas:: |
|---|
| 808 | |
|---|
| 809 | x.union(y, z).all() |
|---|
| 810 | |
|---|
| 811 | produces:: |
|---|
| 812 | |
|---|
| 813 | SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION SELECT * FROM Z) |
|---|
| 814 | |
|---|
| 815 | """ |
|---|
| 816 | return self._from_selectable( |
|---|
| 817 | expression.union(*([self]+ list(q)))) |
|---|
| 818 | |
|---|
| 819 | def union_all(self, *q): |
|---|
| 820 | """Produce a UNION ALL of this Query against one or more queries. |
|---|
| 821 | |
|---|
| 822 | Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See that |
|---|
| 823 | method for usage examples. |
|---|
| 824 | |
|---|
| 825 | """ |
|---|
| 826 | return self._from_selectable( |
|---|
| 827 | expression.union_all(*([self]+ list(q))) |
|---|
| 828 | ) |
|---|
| 829 | |
|---|
| 830 | def intersect(self, *q): |
|---|
| 831 | """Produce an INTERSECT of this Query against one or more queries. |
|---|
| 832 | |
|---|
| 833 | Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See that |
|---|
| 834 | method for usage examples. |
|---|
| 835 | |
|---|
| 836 | """ |
|---|
| 837 | return self._from_selectable( |
|---|
| 838 | expression.intersect(*([self]+ list(q))) |
|---|
| 839 | ) |
|---|
| 840 | |
|---|
| 841 | def intersect_all(self, *q): |
|---|
| 842 | """Produce an INTERSECT ALL of this Query against one or more queries. |
|---|
| 843 | |
|---|
| 844 | Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See that |
|---|
| 845 | method for usage examples. |
|---|
| 846 | |
|---|
| 847 | """ |
|---|
| 848 | return self._from_selectable( |
|---|
| 849 | expression.intersect_all(*([self]+ list(q))) |
|---|
| 850 | ) |
|---|
| 851 | |
|---|
| 852 | def except_(self, *q): |
|---|
| 853 | """Produce an EXCEPT of this Query against one or more queries. |
|---|
| 854 | |
|---|
| 855 | Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See that |
|---|
| 856 | method for usage examples. |
|---|
| 857 | |
|---|
| 858 | """ |
|---|
| 859 | return self._from_selectable( |
|---|
| 860 | expression.except_(*([self]+ list(q))) |
|---|
| 861 | ) |
|---|
| 862 | |
|---|
| 863 | def except_all(self, *q): |
|---|
| 864 | """Produce an EXCEPT ALL of this Query against one or more queries. |
|---|
| 865 | |
|---|
| 866 | Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See that |
|---|
| 867 | method for usage examples. |
|---|
| 868 | |
|---|
| 869 | """ |
|---|
| 870 | return self._from_selectable( |
|---|
| 871 | expression.except_all(*([self]+ list(q))) |
|---|
| 872 | ) |
|---|
| 873 | |
|---|
| 874 | @util.accepts_a_list_as_starargs(list_deprecation='pending') |
|---|
| 875 | def join(self, *props, **kwargs): |
|---|
| 876 | """Create a join against this ``Query`` object's criterion |
|---|
| 877 | and apply generatively, returning the newly resulting ``Query``. |
|---|
| 878 | |
|---|
| 879 | Each element in \*props may be: |
|---|
| 880 | |
|---|
| 881 | * a string property name, i.e. "rooms". This will join along the |
|---|
| 882 | relation of the same name from this Query's "primary" mapper, if |
|---|
| 883 | one is present. |
|---|
| 884 | |
|---|
| 885 | * a class-mapped attribute, i.e. Houses.rooms. This will create a |
|---|
| 886 | join from "Houses" table to that of the "rooms" relation. |
|---|
| 887 | |
|---|
| 888 | * a 2-tuple containing a target class or selectable, and an "ON" |
|---|
| 889 | clause. The ON clause can be the property name/ attribute like |
|---|
| 890 | above, or a SQL expression. |
|---|
| 891 | |
|---|
| 892 | e.g.:: |
|---|
| 893 | |
|---|
| 894 | # join along string attribute names |
|---|
| 895 | session.query(Company).join('employees') |
|---|
| 896 | session.query(Company).join('employees', 'tasks') |
|---|
| 897 | |
|---|
| 898 | # join the Person entity to an alias of itself, |
|---|
| 899 | # along the "friends" relation |
|---|
| 900 | PAlias = aliased(Person) |
|---|
| 901 | session.query(Person).join((Palias, Person.friends)) |
|---|
| 902 | |
|---|
| 903 | # join from Houses to the "rooms" attribute on the |
|---|
| 904 | # "Colonials" subclass of Houses, then join to the |
|---|
| 905 | # "closets" relation on Room |
|---|
| 906 | session.query(Houses).join(Colonials.rooms, Room.closets) |
|---|
| 907 | |
|---|
| 908 | # join from Company entities to the "employees" collection, |
|---|
| 909 | # using "people JOIN engineers" as the target. Then join |
|---|
| 910 | # to the "computers" collection on the Engineer entity. |
|---|
| 911 | session.query(Company).join((people.join(engineers), 'employees'), Engineer.computers) |
|---|
| 912 | |
|---|
| 913 | # join from Articles to Keywords, using the "keywords" attribute. |
|---|
| 914 | # assume this is a many-to-many relation. |
|---|
| 915 | session.query(Article).join(Article.keywords) |
|---|
| 916 | |
|---|
| 917 | # same thing, but spelled out entirely explicitly |
|---|
| 918 | # including the association table. |
|---|
| 919 | session.query(Article).join( |
|---|
| 920 | (article_keywords, Articles.id==article_keywords.c.article_id), |
|---|
| 921 | (Keyword, Keyword.id==article_keywords.c.keyword_id) |
|---|
| 922 | ) |
|---|
| 923 | |
|---|
| 924 | \**kwargs include: |
|---|
| 925 | |
|---|
| 926 | aliased - when joining, create anonymous aliases of each table. This is |
|---|
| 927 | used for self-referential joins or multiple joins to the same table. |
|---|
| 928 | Consider usage of the aliased(SomeClass) construct as a more explicit |
|---|
| 929 | approach to this. |
|---|
| 930 | |
|---|
| 931 | from_joinpoint - when joins are specified using string property names, |
|---|
| 932 | locate the property from the mapper found in the most recent previous |
|---|
| 933 | join() call, instead of from the root entity. |
|---|
| 934 | |
|---|
| 935 | """ |
|---|
| 936 | aliased, from_joinpoint = kwargs.pop('aliased', False), kwargs.pop('from_joinpoint', False) |
|---|
| 937 | if kwargs: |
|---|
| 938 | raise TypeError("unknown arguments: %s" % ','.join(kwargs.iterkeys())) |
|---|
| 939 | return self._join(props, outerjoin=False, create_aliases=aliased, from_joinpoint=from_joinpoint) |
|---|
| 940 | |
|---|
| 941 | @util.accepts_a_list_as_starargs(list_deprecation='pending') |
|---|
| 942 | def outerjoin(self, *props, **kwargs): |
|---|
| 943 | """Create a left outer join against this ``Query`` object's criterion |
|---|
| 944 | and apply generatively, retunring the newly resulting ``Query``. |
|---|
| 945 | |
|---|
| 946 | Usage is the same as the ``join()`` method. |
|---|
| 947 | |
|---|
| 948 | """ |
|---|
| 949 | aliased, from_joinpoint = kwargs.pop('aliased', False), kwargs.pop('from_joinpoint', False) |
|---|
| 950 | if kwargs: |
|---|
| 951 | raise TypeError("unknown arguments: %s" % ','.join(kwargs.iterkeys())) |
|---|
| 952 | return self._join(props, outerjoin=True, create_aliases=aliased, from_joinpoint=from_joinpoint) |
|---|
| 953 | |
|---|
| 954 | @_generative(_no_statement_condition, _no_limit_offset) |
|---|
| 955 | def _join(self, keys, outerjoin, create_aliases, from_joinpoint): |
|---|
| 956 | |
|---|
| 957 | # copy collections that may mutate so they do not affect |
|---|
| 958 | # the copied-from query. |
|---|
| 959 | self._currenttables = set(self._currenttables) |
|---|
| 960 | self._polymorphic_adapters = self._polymorphic_adapters.copy() |
|---|
| 961 | |
|---|
| 962 | # start from the beginning unless from_joinpoint is set. |
|---|
| 963 | if not from_joinpoint: |
|---|
| 964 | self._reset_joinpoint() |
|---|
| 965 | |
|---|
| 966 | clause = replace_clause_index = None |
|---|
| 967 | |
|---|
| 968 | # after the method completes, |
|---|
| 969 | # the query's joinpoint will be set to this. |
|---|
| 970 | right_entity = None |
|---|
| 971 | |
|---|
| 972 | for arg1 in util.to_list(keys): |
|---|
| 973 | aliased_entity = False |
|---|
| 974 | alias_criterion = False |
|---|
| 975 | left_entity = right_entity |
|---|
| 976 | prop = of_type = right_entity = right_mapper = None |
|---|
| 977 | |
|---|
| 978 | # distinguish between tuples, scalar args |
|---|
| 979 | if isinstance(arg1, tuple): |
|---|
| 980 | arg1, arg2 = arg1 |
|---|
| 981 | else: |
|---|
| 982 | arg2 = None |
|---|
| 983 | |
|---|
| 984 | # determine onclause/right_entity. there |
|---|
| 985 | # is a little bit of legacy behavior still at work here |
|---|
| 986 | # which means they might be in either order. may possibly |
|---|
| 987 | # lock this down to (right_entity, onclause) in 0.6. |
|---|
| 988 | if isinstance(arg2, (interfaces.PropComparator, basestring)): |
|---|
| 989 | onclause = arg2 |
|---|
| 990 | right_entity = arg1 |
|---|
| 991 | elif isinstance(arg1, (interfaces.PropComparator, basestring)): |
|---|
| 992 | onclause = arg1 |
|---|
| 993 | right_entity = arg2 |
|---|
| 994 | else: |
|---|
| 995 | onclause = arg2 |
|---|
| 996 | right_entity = arg1 |
|---|
| 997 | |
|---|
| 998 | # extract info from the onclause argument, determine |
|---|
| 999 | # left_entity and right_entity. |
|---|
| 1000 | if isinstance(onclause, interfaces.PropComparator): |
|---|
| 1001 | of_type = getattr(onclause, '_of_type', None) |
|---|
| 1002 | prop = onclause.property |
|---|
| 1003 | descriptor = onclause |
|---|
| 1004 | |
|---|
| 1005 | if not left_entity: |
|---|
| 1006 | left_entity = onclause.parententity |
|---|
| 1007 | |
|---|
| 1008 | if of_type: |
|---|
| 1009 | right_mapper = of_type |
|---|
| 1010 | else: |
|---|
| 1011 | right_mapper = prop.mapper |
|---|
| 1012 | |
|---|
| 1013 | if not right_entity: |
|---|
| 1014 | right_entity = right_mapper |
|---|
| 1015 | |
|---|
| 1016 | elif isinstance(onclause, basestring): |
|---|
| 1017 | if not left_entity: |
|---|
| 1018 | left_entity = self._joinpoint_zero() |
|---|
| 1019 | |
|---|
| 1020 | descriptor, prop = _entity_descriptor(left_entity, onclause) |
|---|
| 1021 | right_mapper = prop.mapper |
|---|
| 1022 | |
|---|
| 1023 | if not right_entity: |
|---|
| 1024 | right_entity = right_mapper |
|---|
| 1025 | elif not left_entity: |
|---|
| 1026 | left_entity = self._joinpoint_zero() |
|---|
| 1027 | |
|---|
| 1028 | if not clause and self._from_obj: |
|---|
| 1029 | mp, left_selectable, is_aliased_class = _entity_info(left_entity) |
|---|
| 1030 | |
|---|
| 1031 | replace_clause_index, clause = sql_util.find_join_source(self._from_obj, left_selectable) |
|---|
| 1032 | if not clause: |
|---|
| 1033 | clause = left_selectable |
|---|
| 1034 | |
|---|
| 1035 | if not clause and left_entity: |
|---|
| 1036 | for ent in self._entities: |
|---|
| 1037 | if ent.corresponds_to(left_entity): |
|---|
| 1038 | clause = ent.selectable |
|---|
| 1039 | break |
|---|
| 1040 | |
|---|
| 1041 | # TODO: |
|---|
| 1042 | # this provides one kind of "backwards join" |
|---|
| 1043 | # tested in test/orm/query.py. |
|---|
| 1044 | # removal of this has been considered, but maybe not |
|---|
| 1045 | # see [ticket:1445] |
|---|
| 1046 | if not clause: |
|---|
| 1047 | if isinstance(onclause, interfaces.PropComparator): |
|---|
| 1048 | clause = onclause.__clause_element__() |
|---|
| 1049 | |
|---|
| 1050 | if not clause: |
|---|
| 1051 | raise sa_exc.InvalidRequestError("Could not find a FROM clause to join from") |
|---|
| 1052 | |
|---|
| 1053 | # if we have a MapperProperty and the onclause is not already |
|---|
| 1054 | # an instrumented descriptor. this catches of_type() |
|---|
| 1055 | # PropComparators and string-based on clauses. |
|---|
| 1056 | if prop and not isinstance(onclause, attributes.QueryableAttribute): |
|---|
| 1057 | onclause = prop |
|---|
| 1058 | |
|---|
| 1059 | # start looking at the right side of the join |
|---|
| 1060 | |
|---|
| 1061 | mp, right_selectable, is_aliased_class = _entity_info(right_entity) |
|---|
| 1062 | |
|---|
| 1063 | if mp is not None and right_mapper is not None and not mp.common_parent(right_mapper): |
|---|
| 1064 | raise sa_exc.InvalidRequestError( |
|---|
| 1065 | "Join target %s does not correspond to the right side of join condition %s" % (right_entity, onclause) |
|---|
| 1066 | ) |
|---|
| 1067 | |
|---|
| 1068 | if not right_mapper and mp: |
|---|
| 1069 | right_mapper = mp |
|---|
| 1070 | |
|---|
| 1071 | # determine if we need to wrap the right hand side in an alias. |
|---|
| 1072 | # this occurs based on the create_aliases flag, or if the target |
|---|
| 1073 | # is a selectable, Join, or polymorphically-loading mapper |
|---|
| 1074 | if right_mapper and not is_aliased_class: |
|---|
| 1075 | if right_entity is right_selectable: |
|---|
| 1076 | |
|---|
| 1077 | if not right_selectable.is_derived_from(right_mapper.mapped_table): |
|---|
| 1078 | raise sa_exc.InvalidRequestError( |
|---|
| 1079 | "Selectable '%s' is not derived from '%s'" % |
|---|
| 1080 | (right_selectable.description, right_mapper.mapped_table.description)) |
|---|
| 1081 | |
|---|
| 1082 | if not isinstance(right_selectable, expression.Alias): |
|---|
| 1083 | right_selectable = right_selectable.alias() |
|---|
| 1084 | |
|---|
| 1085 | right_entity = aliased(right_mapper, right_selectable) |
|---|
| 1086 | alias_criterion = True |
|---|
| 1087 | |
|---|
| 1088 | elif create_aliases: |
|---|
| 1089 | right_entity = aliased(right_mapper) |
|---|
| 1090 | alias_criterion = True |
|---|
| 1091 | |
|---|
| 1092 | elif right_mapper.with_polymorphic or isinstance(right_mapper.mapped_table, expression.Join): |
|---|
| 1093 | right_entity = aliased(right_mapper) |
|---|
| 1094 | alias_criterion = True |
|---|
| 1095 | aliased_entity = True |
|---|
| 1096 | |
|---|
| 1097 | elif prop: |
|---|
| 1098 | # for joins across plain relation()s, try not to specify the |
|---|
| 1099 | # same joins twice. the _currenttables collection tracks |
|---|
| 1100 | # what plain mapped tables we've joined to already. |
|---|
| 1101 | |
|---|
| 1102 | if prop.table in self._currenttables: |
|---|
| 1103 | if prop.secondary is not None and prop.secondary not in self._currenttables: |
|---|
| 1104 | # TODO: this check is not strong enough for different paths to the same endpoint which |
|---|
| 1105 | # does not use secondary tables |
|---|
| 1106 | raise sa_exc.InvalidRequestError("Can't join to property '%s'; a path to this " |
|---|
| 1107 | "table along a different secondary table already " |
|---|
| 1108 | "exists. Use the `alias=True` argument to `join()`." % descriptor) |
|---|
| 1109 | continue |
|---|
| 1110 | |
|---|
| 1111 | if prop.secondary: |
|---|
| 1112 | self._currenttables.add(prop.secondary) |
|---|
| 1113 | self._currenttables.add(prop.table) |
|---|
| 1114 | |
|---|
| 1115 | if of_type: |
|---|
| 1116 | right_entity = of_type |
|---|
| 1117 | else: |
|---|
| 1118 | right_entity = prop.mapper |
|---|
| 1119 | |
|---|
| 1120 | # create adapters to the right side, if we've created aliases |
|---|
| 1121 | if alias_criterion: |
|---|
| 1122 | right_adapter = ORMAdapter(right_entity, |
|---|
| 1123 | equivalents=right_mapper._equivalent_columns, chain_to=self._filter_aliases) |
|---|
| 1124 | |
|---|
| 1125 | # if the onclause is a ClauseElement, adapt it with our right |
|---|
| 1126 | # adapter, then with our query-wide adaptation if any. |
|---|
| 1127 | if isinstance(onclause, expression.ClauseElement): |
|---|
| 1128 | if alias_criterion: |
|---|
| 1129 | onclause = right_adapter.traverse(onclause) |
|---|
| 1130 | onclause = self._adapt_clause(onclause, False, True) |
|---|
| 1131 | |
|---|
| 1132 | # determine if we want _ORMJoin to alias the onclause |
|---|
| 1133 | # to the given left side. This is used if we're joining against a |
|---|
| 1134 | # select_from() selectable, from_self() call, or the onclause |
|---|
| 1135 | # has been resolved into a MapperProperty. Otherwise we assume |
|---|
| 1136 | # the onclause itself contains more specific information on how to |
|---|
| 1137 | # construct the onclause. |
|---|
| 1138 | join_to_left = not is_aliased_class or \ |
|---|
| 1139 | onclause is prop or \ |
|---|
| 1140 | self._from_obj_alias and clause is self._from_obj[0] |
|---|
| 1141 | |
|---|
| 1142 | # create the join |
|---|
| 1143 | clause = orm_join(clause, right_entity, onclause, isouter=outerjoin, join_to_left=join_to_left) |
|---|
| 1144 | |
|---|
| 1145 | # set up state for the query as a whole |
|---|
| 1146 | if alias_criterion: |
|---|
| 1147 | # adapt filter() calls based on our right side adaptation |
|---|
| 1148 | self._filter_aliases = right_adapter |
|---|
| 1149 | |
|---|
| 1150 | # if a polymorphic entity was aliased, establish that |
|---|
| 1151 | # so that MapperEntity/ColumnEntity can pick up on it |
|---|
| 1152 | # and adapt when it renders columns and fetches them from results |
|---|
| 1153 | if aliased_entity: |
|---|
| 1154 | self.__mapper_loads_polymorphically_with( |
|---|
| 1155 | right_mapper, |
|---|
| 1156 | ORMAdapter(right_entity, equivalents=right_mapper._equivalent_columns) |
|---|
| 1157 | ) |
|---|
| 1158 | |
|---|
| 1159 | if replace_clause_index is not None: |
|---|
| 1160 | l = list(self._from_obj) |
|---|
| 1161 | l[replace_clause_index] = clause |
|---|
| 1162 | self._from_obj = tuple(l) |
|---|
| 1163 | else: |
|---|
| 1164 | self._from_obj = self._from_obj + (clause,) |
|---|
| 1165 | |
|---|
| 1166 | # future joins with from_joinpoint=True join from our established right_entity. |
|---|
| 1167 | self._joinpoint = right_entity |
|---|
| 1168 | |
|---|
| 1169 | @_generative(_no_statement_condition) |
|---|
| 1170 | def reset_joinpoint(self): |
|---|
| 1171 | """return a new Query reset the 'joinpoint' of this Query reset |
|---|
| 1172 | back to the starting mapper. Subsequent generative calls will |
|---|
| 1173 | be constructed from the new joinpoint. |
|---|
| 1174 | |
|---|
| 1175 | Note that each call to join() or outerjoin() also starts from |
|---|
| 1176 | the root. |
|---|
| 1177 | |
|---|
| 1178 | """ |
|---|
| 1179 | self._reset_joinpoint() |
|---|
| 1180 | |
|---|
| 1181 | @_generative(_no_clauseelement_condition) |
|---|
| 1182 | def select_from(self, from_obj): |
|---|
| 1183 | """Set the `from_obj` parameter of the query and return the newly |
|---|
| 1184 | resulting ``Query``. This replaces the table which this Query selects |
|---|
| 1185 | from with the given table. |
|---|
| 1186 | |
|---|
| 1187 | |
|---|
| 1188 | `from_obj` is a single table or selectable. |
|---|
| 1189 | |
|---|
| 1190 | """ |
|---|
| 1191 | |
|---|
| 1192 | if isinstance(from_obj, (tuple, list)): |
|---|
| 1193 | # from_obj is actually a list again as of 0.5.3. so this restriction here |
|---|
| 1194 | # is somewhat artificial, but is still in place since select_from() implies aliasing all further |
|---|
| 1195 | # criterion against what's placed here, and its less complex to only |
|---|
| 1196 | # keep track of a single aliased FROM element being selected against. This could in theory be opened |
|---|
| 1197 | # up again to more complexity. |
|---|
| 1198 | util.warn_deprecated("select_from() now accepts a single Selectable as its argument, which replaces any existing FROM criterion.") |
|---|
| 1199 | from_obj = from_obj[-1] |
|---|
| 1200 | if not isinstance(from_obj, expression.FromClause): |
|---|
| 1201 | raise sa_exc.ArgumentError("select_from() accepts FromClause objects only.") |
|---|
| 1202 | self._set_select_from(from_obj) |
|---|
| 1203 | |
|---|
| 1204 | def __getitem__(self, item): |
|---|
| 1205 | if isinstance(item, slice): |
|---|
| 1206 | start, stop, step = util.decode_slice(item) |
|---|
| 1207 | |
|---|
| 1208 | if isinstance(stop, int) and isinstance(start, int) and stop - start <= 0: |
|---|
| 1209 | return [] |
|---|
| 1210 | |
|---|
| 1211 | # perhaps we should execute a count() here so that we |
|---|
| 1212 | # can still use LIMIT/OFFSET ? |
|---|
| 1213 | elif (isinstance(start, int) and start < 0) \ |
|---|
| 1214 | or (isinstance(stop, int) and stop < 0): |
|---|
| 1215 | return list(self)[item] |
|---|
| 1216 | |
|---|
| 1217 | res = self.slice(start, stop) |
|---|
| 1218 | if step is not None: |
|---|
| 1219 | return list(res)[None:None:item.step] |
|---|
| 1220 | else: |
|---|
| 1221 | return list(res) |
|---|
| 1222 | else: |
|---|
| 1223 | return list(self[item:item+1])[0] |
|---|
| 1224 | |
|---|
| 1225 | @_generative(_no_statement_condition) |
|---|
| 1226 | def slice(self, start, stop): |
|---|
| 1227 | """apply LIMIT/OFFSET to the ``Query`` based on a range and return the newly resulting ``Query``.""" |
|---|
| 1228 | if start is not None and stop is not None: |
|---|
| 1229 | self._offset = (self._offset or 0) + start |
|---|
| 1230 | self._limit = stop - start |
|---|
| 1231 | elif start is None and stop is not None: |
|---|
| 1232 | self._limit = stop |
|---|
| 1233 | elif start is not None and stop is None: |
|---|
| 1234 | self._offset = (self._offset or 0) + start |
|---|
| 1235 | |
|---|
| 1236 | @_generative(_no_statement_condition) |
|---|
| 1237 | def limit(self, limit): |
|---|
| 1238 | """Apply a ``LIMIT`` to the query and return the newly resulting |
|---|
| 1239 | |
|---|
| 1240 | ``Query``. |
|---|
| 1241 | |
|---|
| 1242 | """ |
|---|
| 1243 | self._limit = limit |
|---|
| 1244 | |
|---|
| 1245 | @_generative(_no_statement_condition) |
|---|
| 1246 | def offset(self, offset): |
|---|
| 1247 | """Apply an ``OFFSET`` to the query and return the newly resulting |
|---|
| 1248 | ``Query``. |
|---|
| 1249 | |
|---|
| 1250 | """ |
|---|
| 1251 | self._offset = offset |
|---|
| 1252 | |
|---|
| 1253 | @_generative(_no_statement_condition) |
|---|
| 1254 | def distinct(self): |
|---|
| 1255 | """Apply a ``DISTINCT`` to the query and return the newly resulting |
|---|
| 1256 | ``Query``. |
|---|
| 1257 | |
|---|
| 1258 | """ |
|---|
| 1259 | self._distinct = True |
|---|
| 1260 | |
|---|
| 1261 | def all(self): |
|---|
| 1262 | """Return the results represented by this ``Query`` as a list. |
|---|
| 1263 | |
|---|
| 1264 | This results in an execution of the underlying query. |
|---|
| 1265 | |
|---|
| 1266 | """ |
|---|
| 1267 | return list(self) |
|---|
| 1268 | |
|---|
| 1269 | @_generative(_no_clauseelement_condition) |
|---|
| 1270 | def from_statement(self, statement): |
|---|
| 1271 | """Execute the given SELECT statement and return results. |
|---|
| 1272 | |
|---|
| 1273 | This method bypasses all internal statement compilation, and the |
|---|
| 1274 | statement is executed without modification. |
|---|
| 1275 | |
|---|
| 1276 | The statement argument is either a string, a ``select()`` construct, |
|---|
| 1277 | or a ``text()`` construct, and should return the set of columns |
|---|
| 1278 | appropriate to the entity class represented by this ``Query``. |
|---|
| 1279 | |
|---|
| 1280 | Also see the ``instances()`` method. |
|---|
| 1281 | |
|---|
| 1282 | """ |
|---|
| 1283 | if isinstance(statement, basestring): |
|---|
| 1284 | statement = sql.text(statement) |
|---|
| 1285 | |
|---|
| 1286 | if not isinstance(statement, (expression._TextClause, expression._SelectBaseMixin)): |
|---|
| 1287 | raise sa_exc.ArgumentError("from_statement accepts text(), select(), and union() objects only.") |
|---|
| 1288 | |
|---|
| 1289 | self._statement = statement |
|---|
| 1290 | |
|---|
| 1291 | def first(self): |
|---|
| 1292 | """Return the first result of this ``Query`` or None if the result doesn't contain any row. |
|---|
| 1293 | |
|---|
| 1294 | This results in an execution of the underlying query. |
|---|
| 1295 | |
|---|
| 1296 | """ |
|---|
| 1297 | if self._statement: |
|---|
| 1298 | ret = list(self)[0:1] |
|---|
| 1299 | else: |
|---|
| 1300 | ret = list(self[0:1]) |
|---|
| 1301 | if len(ret) > 0: |
|---|
| 1302 | return ret[0] |
|---|
| 1303 | else: |
|---|
| 1304 | return None |
|---|
| 1305 | |
|---|
| 1306 | def one(self): |
|---|
| 1307 | """Return exactly one result or raise an exception. |
|---|
| 1308 | |
|---|
| 1309 | Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects no rows. |
|---|
| 1310 | Raises ``sqlalchemy.orm.exc.MultipleResultsFound`` if multiple rows are |
|---|
| 1311 | selected. |
|---|
| 1312 | |
|---|
| 1313 | This results in an execution of the underlying query. |
|---|
| 1314 | |
|---|
| 1315 | """ |
|---|
| 1316 | if self._statement: |
|---|
| 1317 | raise sa_exc.InvalidRequestError( |
|---|
| 1318 | "one() not available when from_statement() is used; " |
|---|
| 1319 | "use `first()` instead.") |
|---|
| 1320 | |
|---|
| 1321 | ret = list(self[0:2]) |
|---|
| 1322 | |
|---|
| 1323 | if len(ret) == 1: |
|---|
| 1324 | return ret[0] |
|---|
| 1325 | elif len(ret) == 0: |
|---|
| 1326 | raise orm_exc.NoResultFound("No row was found for one()") |
|---|
| 1327 | else: |
|---|
| 1328 | raise orm_exc.MultipleResultsFound( |
|---|
| 1329 | "Multiple rows were found for one()") |
|---|
| 1330 | |
|---|
| 1331 | def scalar(self): |
|---|
| 1332 | """Return the first element of the first result or None. |
|---|
| 1333 | |
|---|
| 1334 | >>> session.query(Item).scalar() |
|---|
| 1335 | <Item> |
|---|
| 1336 | >>> session.query(Item.id).scalar() |
|---|
| 1337 | 1 |
|---|
| 1338 | >>> session.query(Item.id).filter(Item.id < 0).scalar() |
|---|
| 1339 | None |
|---|
| 1340 | >>> session.query(Item.id, Item.name).scalar() |
|---|
| 1341 | 1 |
|---|
| 1342 | >>> session.query(func.count(Parent.id)).scalar() |
|---|
| 1343 | 20 |
|---|
| 1344 | |
|---|
| 1345 | This results in an execution of the underlying query. |
|---|
| 1346 | |
|---|
| 1347 | """ |
|---|
| 1348 | try: |
|---|
| 1349 | ret = list(self)[0] |
|---|
| 1350 | if not isinstance(ret, tuple): |
|---|
| 1351 | return ret |
|---|
| 1352 | return ret[0] |
|---|
| 1353 | except IndexError: |
|---|
| 1354 | return None |
|---|
| 1355 | |
|---|
| 1356 | def __iter__(self): |
|---|
| 1357 | context = self._compile_context() |
|---|
| 1358 | context.statement.use_labels = True |
|---|
| 1359 | if self._autoflush and not self._populate_existing: |
|---|
| 1360 | self.session._autoflush() |
|---|
| 1361 | return self._execute_and_instances(context) |
|---|
| 1362 | |
|---|
| 1363 | def _execute_and_instances(self, querycontext): |
|---|
| 1364 | result = self.session.execute(querycontext.statement, params=self._params, mapper=self._mapper_zero_or_none()) |
|---|
| 1365 | return self.instances(result, querycontext) |
|---|
| 1366 | |
|---|
| 1367 | def instances(self, cursor, __context=None): |
|---|
| 1368 | """Given a ResultProxy cursor as returned by connection.execute(), return an ORM result as an iterator. |
|---|
| 1369 | |
|---|
| 1370 | e.g.:: |
|---|
| 1371 | |
|---|
| 1372 | result = engine.execute("select * from users") |
|---|
| 1373 | for u in session.query(User).instances(result): |
|---|
| 1374 | print u |
|---|
| 1375 | """ |
|---|
| 1376 | session = self.session |
|---|
| 1377 | |
|---|
| 1378 | context = __context |
|---|
| 1379 | if context is None: |
|---|
| 1380 | context = QueryContext(self) |
|---|
| 1381 | |
|---|
| 1382 | context.runid = _new_runid() |
|---|
| 1383 | |
|---|
| 1384 | filtered = bool(list(self._mapper_entities)) |
|---|
| 1385 | single_entity = filtered and len(self._entities) == 1 |
|---|
| 1386 | |
|---|
| 1387 | if filtered: |
|---|
| 1388 | if single_entity: |
|---|
| 1389 | filter = lambda x: util.unique_list(x, util.IdentitySet) |
|---|
| 1390 | else: |
|---|
| 1391 | filter = util.unique_list |
|---|
| 1392 | else: |
|---|
| 1393 | filter = None |
|---|
| 1394 | |
|---|
| 1395 | custom_rows = single_entity and 'append_result' in self._entities[0].extension |
|---|
| 1396 | |
|---|
| 1397 | (process, labels) = zip(*[query_entity.row_processor(self, context, custom_rows) for query_entity in self._entities]) |
|---|
| 1398 | |
|---|
| 1399 | if not single_entity: |
|---|
| 1400 | labels = dict((label, property(itemgetter(i))) |
|---|
| 1401 | for i, label in enumerate(labels) |
|---|
| 1402 | if label) |
|---|
| 1403 | rowtuple = type.__new__(type, "RowTuple", (tuple,), labels) |
|---|
| 1404 | rowtuple.keys = labels.keys |
|---|
| 1405 | |
|---|
| 1406 | while True: |
|---|
| 1407 | context.progress = {} |
|---|
| 1408 | context.partials = {} |
|---|
| 1409 | |
|---|
| 1410 | if self._yield_per: |
|---|
| 1411 | fetch = cursor.fetchmany(self._yield_per) |
|---|
| 1412 | if not fetch: |
|---|
| 1413 | break |
|---|
| 1414 | else: |
|---|
| 1415 | fetch = cursor.fetchall() |
|---|
| 1416 | |
|---|
| 1417 | if custom_rows: |
|---|
| 1418 | rows = [] |
|---|
| 1419 | for row in fetch: |
|---|
| 1420 | process[0](context, row, rows) |
|---|
| 1421 | elif single_entity: |
|---|
| 1422 | rows = [process[0](context, row) for row in fetch] |
|---|
| 1423 | else: |
|---|
| 1424 | rows = [rowtuple(proc(context, row) for proc in process) |
|---|
| 1425 | for row in fetch] |
|---|
| 1426 | |
|---|
| 1427 | if filter: |
|---|
| 1428 | rows = filter(rows) |
|---|
| 1429 | |
|---|
| 1430 | if context.refresh_state and self._only_load_props and context.refresh_state in context.progress: |
|---|
| 1431 | context.refresh_state.commit(context.refresh_state.dict, self._only_load_props) |
|---|
| 1432 | context.progress.pop(context.refresh_state) |
|---|
| 1433 | |
|---|
| 1434 | session._finalize_loaded(context.progress) |
|---|
| 1435 | |
|---|
| 1436 | for ii, (dict_, attrs) in context.partials.items(): |
|---|
| 1437 | ii.commit(dict_, attrs) |
|---|
| 1438 | |
|---|
| 1439 | for row in rows: |
|---|
| 1440 | yield row |
|---|
| 1441 | |
|---|
| 1442 | if not self._yield_per: |
|---|
| 1443 | break |
|---|
| 1444 | iterate_instances = util.deprecated()(instances) |
|---|
| 1445 | |
|---|
| 1446 | def _get(self, key=None, ident=None, refresh_state=None, lockmode=None, only_load_props=None): |
|---|
| 1447 | lockmode = lockmode or self._lockmode |
|---|
| 1448 | if not self._populate_existing and not refresh_state and not self._mapper_zero().always_refresh and lockmode is None: |
|---|
| 1449 | try: |
|---|
| 1450 | instance = self.session.identity_map[key] |
|---|
| 1451 | state = attributes.instance_state(instance) |
|---|
| 1452 | if state.expired: |
|---|
| 1453 | try: |
|---|
| 1454 | state() |
|---|
| 1455 | except orm_exc.ObjectDeletedError: |
|---|
| 1456 | self.session._remove_newly_deleted(state) |
|---|
| 1457 | return None |
|---|
| 1458 | return instance |
|---|
| 1459 | except KeyError: |
|---|
| 1460 | pass |
|---|
| 1461 | |
|---|
| 1462 | if ident is None: |
|---|
| 1463 | if key is not None: |
|---|
| 1464 | ident = key[1] |
|---|
| 1465 | else: |
|---|
| 1466 | ident = util.to_list(ident) |
|---|
| 1467 | |
|---|
| 1468 | if refresh_state is None: |
|---|
| 1469 | q = self._clone() |
|---|
| 1470 | q._no_criterion_condition("get") |
|---|
| 1471 | else: |
|---|
| 1472 | q = self._clone() |
|---|
| 1473 | |
|---|
| 1474 | if ident is not None: |
|---|
| 1475 | mapper = q._mapper_zero() |
|---|
| 1476 | params = {} |
|---|
| 1477 | (_get_clause, _get_params) = mapper._get_clause |
|---|
| 1478 | |
|---|
| 1479 | _get_clause = q._adapt_clause(_get_clause, True, False) |
|---|
| 1480 | q._criterion = _get_clause |
|---|
| 1481 | |
|---|
| 1482 | for i, primary_key in enumerate(mapper.primary_key): |
|---|
| 1483 | try: |
|---|
| 1484 | params[_get_params[primary_key].key] = ident[i] |
|---|
| 1485 | except IndexError: |
|---|
| 1486 | raise sa_exc.InvalidRequestError("Could not find enough values to formulate primary key for " |
|---|
| 1487 | "query.get(); primary key columns are %s" % ', '.join("'%s'" % c for c in mapper.primary_key)) |
|---|
| 1488 | q._params = params |
|---|
| 1489 | |
|---|
| 1490 | if lockmode is not None: |
|---|
| 1491 | q._lockmode = lockmode |
|---|
| 1492 | q._get_options( |
|---|
| 1493 | populate_existing=bool(refresh_state), |
|---|
| 1494 | version_check=(lockmode is not None), |
|---|
| 1495 | only_load_props=only_load_props, |
|---|
| 1496 | refresh_state=refresh_state) |
|---|
| 1497 | q._order_by = None |
|---|
| 1498 | try: |
|---|
| 1499 | # call using all() to avoid LIMIT compilation complexity |
|---|
| 1500 | return q.all()[0] |
|---|
| 1501 | except IndexError: |
|---|
| 1502 | return None |
|---|
| 1503 | |
|---|
| 1504 | @property |
|---|
| 1505 | def _select_args(self): |
|---|
| 1506 | return { |
|---|
| 1507 | 'limit':self._limit, |
|---|
| 1508 | 'offset':self._offset, |
|---|
| 1509 | 'distinct':self._distinct, |
|---|
| 1510 | 'group_by':self._group_by or None, |
|---|
| 1511 | 'having':self._having or None |
|---|
| 1512 | } |
|---|
| 1513 | |
|---|
| 1514 | @property |
|---|
| 1515 | def _should_nest_selectable(self): |
|---|
| 1516 | kwargs = self._select_args |
|---|
| 1517 | return (kwargs.get('limit') is not None or |
|---|
| 1518 | kwargs.get('offset') is not None or |
|---|
| 1519 | kwargs.get('distinct', False)) |
|---|
| 1520 | |
|---|
| 1521 | def count(self): |
|---|
| 1522 | """Return a count of rows this Query would return. |
|---|
| 1523 | |
|---|
| 1524 | For simple entity queries, count() issues |
|---|
| 1525 | a SELECT COUNT, and will specifically count the primary |
|---|
| 1526 | key column of the first entity only. If the query uses |
|---|
| 1527 | LIMIT, OFFSET, or DISTINCT, count() will wrap the statement |
|---|
| 1528 | generated by this Query in a subquery, from which a SELECT COUNT |
|---|
| 1529 | is issued, so that the contract of "how many rows |
|---|
| 1530 | would be returned?" is honored. |
|---|
| 1531 | |
|---|
| 1532 | For queries that request specific columns or expressions, |
|---|
| 1533 | count() again makes no assumptions about those expressions |
|---|
| 1534 | and will wrap everything in a subquery. Therefore, |
|---|
| 1535 | ``Query.count()`` is usually not what you want in this case. |
|---|
| 1536 | To count specific columns, often in conjunction with |
|---|
| 1537 | GROUP BY, use ``func.count()`` as an individual column expression |
|---|
| 1538 | instead of ``Query.count()``. See the ORM tutorial |
|---|
| 1539 | for an example. |
|---|
| 1540 | |
|---|
| 1541 | """ |
|---|
| 1542 | should_nest = [self._should_nest_selectable] |
|---|
| 1543 | def ent_cols(ent): |
|---|
| 1544 | if isinstance(ent, _MapperEntity): |
|---|
| 1545 | return ent.mapper.primary_key |
|---|
| 1546 | else: |
|---|
| 1547 | should_nest[0] = True |
|---|
| 1548 | return [ent.column] |
|---|
| 1549 | |
|---|
| 1550 | return self._col_aggregate(sql.literal_column('1'), sql.func.count, |
|---|
| 1551 | nested_cols=chain(*[ent_cols(ent) for ent in self._entities]), |
|---|
| 1552 | should_nest = should_nest[0] |
|---|
| 1553 | ) |
|---|
| 1554 | |
|---|
| 1555 | def _col_aggregate(self, col, func, nested_cols=None, should_nest=False): |
|---|
| 1556 | context = QueryContext(self) |
|---|
| 1557 | |
|---|
| 1558 | for entity in self._entities: |
|---|
| 1559 | entity.setup_context(self, context) |
|---|
| 1560 | |
|---|
| 1561 | if context.from_clause: |
|---|
| 1562 | from_obj = list(context.from_clause) |
|---|
| 1563 | else: |
|---|
| 1564 | from_obj = context.froms |
|---|
| 1565 | |
|---|
| 1566 | self._adjust_for_single_inheritance(context) |
|---|
| 1567 | |
|---|
| 1568 | whereclause = context.whereclause |
|---|
| 1569 | |
|---|
| 1570 | if should_nest: |
|---|
| 1571 | if not nested_cols: |
|---|
| 1572 | nested_cols = [col] |
|---|
| 1573 | else: |
|---|
| 1574 | nested_cols = list(nested_cols) |
|---|
| 1575 | s = sql.select(nested_cols, whereclause, from_obj=from_obj, use_labels=True, **self._select_args) |
|---|
| 1576 | s = s.alias() |
|---|
| 1577 | s = sql.select([func(s.corresponding_column(col) or col)]).select_from(s) |
|---|
| 1578 | else: |
|---|
| 1579 | s = sql.select([func(col)], whereclause, from_obj=from_obj, **self._select_args) |
|---|
| 1580 | |
|---|
| 1581 | if self._autoflush and not self._populate_existing: |
|---|
| 1582 | self.session._autoflush() |
|---|
| 1583 | return self.session.scalar(s, params=self._params, mapper=self._mapper_zero()) |
|---|
| 1584 | |
|---|
| 1585 | def delete(self, synchronize_session='fetch'): |
|---|
| 1586 | """Perform a bulk delete query. |
|---|
| 1587 | |
|---|
| 1588 | Deletes rows matched by this query from the database. |
|---|
| 1589 | |
|---|
| 1590 | :param synchronize_session: chooses the strategy for the removal of matched |
|---|
| 1591 | objects from the session. Valid values are: |
|---|
| 1592 | |
|---|
| 1593 | False |
|---|
| 1594 | don't synchronize the session. This option is the most efficient and is reliable |
|---|
| 1595 | once the session is expired, which typically occurs after a commit(). Before |
|---|
| 1596 | the expiration, objects may still remain in the session which were in fact deleted |
|---|
| 1597 | which can lead to confusing results if they are accessed via get() or already |
|---|
| 1598 | loaded collections. |
|---|
| 1599 | |
|---|
| 1600 | 'fetch' |
|---|
| 1601 | performs a select query before the delete to find objects that are matched |
|---|
| 1602 | by the delete query and need to be removed from the session. Matched objects |
|---|
| 1603 | are removed from the session. 'fetch' is the default strategy. |
|---|
| 1604 | |
|---|
| 1605 | 'evaluate' |
|---|
| 1606 | experimental feature. Tries to evaluate the querys criteria in Python |
|---|
| 1607 | straight on the objects in the session. If evaluation of the criteria isn't |
|---|
| 1608 | implemented, the 'fetch' strategy will be used as a fallback. |
|---|
| 1609 | |
|---|
| 1610 | The expression evaluator currently doesn't account for differing string |
|---|
| 1611 | collations between the database and Python. |
|---|
| 1612 | |
|---|
| 1613 | Returns the number of rows deleted, excluding any cascades. |
|---|
| 1614 | |
|---|
| 1615 | The method does *not* offer in-Python cascading of relations - it is assumed that |
|---|
| 1616 | ON DELETE CASCADE is configured for any foreign key references which require it. |
|---|
| 1617 | The Session needs to be expired (occurs automatically after commit(), or call expire_all()) |
|---|
| 1618 | in order for the state of dependent objects subject to delete or delete-orphan cascade to be |
|---|
| 1619 | correctly represented. |
|---|
| 1620 | |
|---|
| 1621 | Also, the ``before_delete()`` and ``after_delete()`` :class:`~sqlalchemy.orm.interfaces.MapperExtension` |
|---|
| 1622 | methods are not called from this method. For a delete hook here, use the |
|---|
| 1623 | ``after_bulk_delete()`` :class:`~sqlalchemy.orm.interfaces.MapperExtension` method. |
|---|
| 1624 | |
|---|
| 1625 | """ |
|---|
| 1626 | #TODO: lots of duplication and ifs - probably needs to be refactored to strategies |
|---|
| 1627 | #TODO: cascades need handling. |
|---|
| 1628 | |
|---|
| 1629 | if synchronize_session not in [False, 'evaluate', 'fetch']: |
|---|
| 1630 | raise sa_exc.ArgumentError("Valid strategies for session synchronization are False, 'evaluate' and 'fetch'") |
|---|
| 1631 | self._no_select_modifiers("delete") |
|---|
| 1632 | |
|---|
| 1633 | self = self.enable_eagerloads(False) |
|---|
| 1634 | |
|---|
| 1635 | context = self._compile_context() |
|---|
| 1636 | if len(context.statement.froms) != 1 or not isinstance(context.statement.froms[0], schema.Table): |
|---|
| 1637 | raise sa_exc.ArgumentError("Only deletion via a single table query is currently supported") |
|---|
| 1638 | primary_table = context.statement.froms[0] |
|---|
| 1639 | |
|---|
| 1640 | session = self.session |
|---|
| 1641 | |
|---|
| 1642 | if synchronize_session == 'evaluate': |
|---|
| 1643 | try: |
|---|
| 1644 | evaluator_compiler = evaluator.EvaluatorCompiler() |
|---|
| 1645 | eval_condition = evaluator_compiler.process(self.whereclause) |
|---|
| 1646 | except evaluator.UnevaluatableError: |
|---|
| 1647 | synchronize_session = 'fetch' |
|---|
| 1648 | |
|---|
| 1649 | delete_stmt = sql.delete(primary_table, context.whereclause) |
|---|
| 1650 | |
|---|
| 1651 | if synchronize_session == 'fetch': |
|---|
| 1652 | #TODO: use RETURNING when available |
|---|
| 1653 | select_stmt = context.statement.with_only_columns(primary_table.primary_key) |
|---|
| 1654 | matched_rows = session.execute(select_stmt, params=self._params).fetchall() |
|---|
| 1655 | |
|---|
| 1656 | if self._autoflush: |
|---|
| 1657 | session._autoflush() |
|---|
| 1658 | result = session.execute(delete_stmt, params=self._params) |
|---|
| 1659 | |
|---|
| 1660 | if synchronize_session == 'evaluate': |
|---|
| 1661 | target_cls = self._mapper_zero().class_ |
|---|
| 1662 | |
|---|
| 1663 | #TODO: detect when the where clause is a trivial primary key match |
|---|
| 1664 | objs_to_expunge = [obj for (cls, pk),obj in session.identity_map.iteritems() |
|---|
| 1665 | if issubclass(cls, target_cls) and eval_condition(obj)] |
|---|
| 1666 | for obj in objs_to_expunge: |
|---|
| 1667 | session._remove_newly_deleted(attributes.instance_state(obj)) |
|---|
| 1668 | elif synchronize_session == 'fetch': |
|---|
| 1669 | target_mapper = self._mapper_zero() |
|---|
| 1670 | for primary_key in matched_rows: |
|---|
| 1671 | identity_key = target_mapper.identity_key_from_primary_key(list(primary_key)) |
|---|
| 1672 | if identity_key in session.identity_map: |
|---|
| 1673 | session._remove_newly_deleted(attributes.instance_state(session.identity_map[identity_key])) |
|---|
| 1674 | |
|---|
| 1675 | for ext in session.extensions: |
|---|
| 1676 | ext.after_bulk_delete(session, self, context, result) |
|---|
| 1677 | |
|---|
| 1678 | return result.rowcount |
|---|
| 1679 | |
|---|
| 1680 | def update(self, values, synchronize_session='expire'): |
|---|
| 1681 | """Perform a bulk update query. |
|---|
| 1682 | |
|---|
| 1683 | Updates rows matched by this query in the database. |
|---|
| 1684 | |
|---|
| 1685 | :param values: a dictionary with attributes names as keys and literal values or sql expressions |
|---|
| 1686 | as values. |
|---|
| 1687 | |
|---|
| 1688 | :param synchronize_session: chooses the strategy to update the |
|---|
| 1689 | attributes on objects in the session. Valid values are: |
|---|
| 1690 | |
|---|
| 1691 | False |
|---|
| 1692 | don't synchronize the session. Use this when you don't need to use the |
|---|
| 1693 | session after the update or you can be sure that none of the matched objects |
|---|
| 1694 | are in the session. |
|---|
| 1695 | |
|---|
| 1696 | 'expire' |
|---|
| 1697 | performs a select query before the update to find objects that are matched |
|---|
| 1698 | by the update query. The updated attributes are expired on matched objects. |
|---|
| 1699 | |
|---|
| 1700 | 'evaluate' |
|---|
| 1701 | experimental feature. Tries to evaluate the querys criteria in Python |
|---|
| 1702 | straight on the objects in the session. If evaluation of the criteria isn't |
|---|
| 1703 | implemented, the 'expire' strategy will be used as a fallback. |
|---|
| 1704 | |
|---|
| 1705 | The expression evaluator currently doesn't account for differing string |
|---|
| 1706 | collations between the database and Python. |
|---|
| 1707 | |
|---|
| 1708 | Returns the number of rows matched by the update. |
|---|
| 1709 | |
|---|
| 1710 | The method does *not* offer in-Python cascading of relations - it is assumed that |
|---|
| 1711 | ON UPDATE CASCADE is configured for any foreign key references which require it. |
|---|
| 1712 | The Session needs to be expired (occurs automatically after commit(), or call expire_all()) |
|---|
| 1713 | in order for the state of dependent objects subject foreign key cascade to be |
|---|
| 1714 | correctly represented. |
|---|
| 1715 | |
|---|
| 1716 | Also, the ``before_update()`` and ``after_update()`` :class:`~sqlalchemy.orm.interfaces.MapperExtension` |
|---|
| 1717 | methods are not called from this method. For an update hook here, use the |
|---|
| 1718 | ``after_bulk_update()`` :class:`~sqlalchemy.orm.interfaces.SessionExtension` method. |
|---|
| 1719 | |
|---|
| 1720 | """ |
|---|
| 1721 | |
|---|
| 1722 | #TODO: value keys need to be mapped to corresponding sql cols and instr.attr.s to string keys |
|---|
| 1723 | #TODO: updates of manytoone relations need to be converted to fk assignments |
|---|
| 1724 | #TODO: cascades need handling. |
|---|
| 1725 | |
|---|
| 1726 | self._no_select_modifiers("update") |
|---|
| 1727 | if synchronize_session not in [False, 'evaluate', 'expire']: |
|---|
| 1728 | raise sa_exc.ArgumentError("Valid strategies for session synchronization are False, 'evaluate' and 'expire'") |
|---|
| 1729 | |
|---|
| 1730 | self = self.enable_eagerloads(False) |
|---|
| 1731 | |
|---|
| 1732 | context = self._compile_context() |
|---|
| 1733 | if len(context.statement.froms) != 1 or not isinstance(context.statement.froms[0], schema.Table): |
|---|
| 1734 | raise sa_exc.ArgumentError("Only update via a single table query is currently supported") |
|---|
| 1735 | primary_table = context.statement.froms[0] |
|---|
| 1736 | |
|---|
| 1737 | session = self.session |
|---|
| 1738 | |
|---|
| 1739 | if synchronize_session == 'evaluate': |
|---|
| 1740 | try: |
|---|
| 1741 | evaluator_compiler = evaluator.EvaluatorCompiler() |
|---|
| 1742 | eval_condition = evaluator_compiler.process(self.whereclause) |
|---|
| 1743 | |
|---|
| 1744 | value_evaluators = {} |
|---|
| 1745 | for key,value in values.items(): |
|---|
| 1746 | key = expression._column_as_key(key) |
|---|
| 1747 | value_evaluators[key] = evaluator_compiler.process(expression._literal_as_binds(value)) |
|---|
| 1748 | except evaluator.UnevaluatableError: |
|---|
| 1749 | synchronize_session = 'expire' |
|---|
| 1750 | |
|---|
| 1751 | update_stmt = sql.update(primary_table, context.whereclause, values) |
|---|
| 1752 | |
|---|
| 1753 | if synchronize_session == 'expire': |
|---|
| 1754 | select_stmt = context.statement.with_only_columns(primary_table.primary_key) |
|---|
| 1755 | matched_rows = session.execute(select_stmt, params=self._params).fetchall() |
|---|
| 1756 | |
|---|
| 1757 | if self._autoflush: |
|---|
| 1758 | session._autoflush() |
|---|
| 1759 | result = session.execute(update_stmt, params=self._params) |
|---|
| 1760 | |
|---|
| 1761 | if synchronize_session == 'evaluate': |
|---|
| 1762 | target_cls = self._mapper_zero().class_ |
|---|
| 1763 | |
|---|
| 1764 | for (cls, pk),obj in session.identity_map.iteritems(): |
|---|
| 1765 | evaluated_keys = value_evaluators.keys() |
|---|
| 1766 | |
|---|
| 1767 | if issubclass(cls, target_cls) and eval_condition(obj): |
|---|
| 1768 | state, dict_ = attributes.instance_state(obj), attributes.instance_dict(obj) |
|---|
| 1769 | |
|---|
| 1770 | # only evaluate unmodified attributes |
|---|
| 1771 | to_evaluate = state.unmodified.intersection(evaluated_keys) |
|---|
| 1772 | for key in to_evaluate: |
|---|
| 1773 | dict_[key] = value_evaluators[key](obj) |
|---|
| 1774 | |
|---|
| 1775 | state.commit(dict_, list(to_evaluate)) |
|---|
| 1776 | |
|---|
| 1777 | # expire attributes with pending changes (there was no autoflush, so they are overwritten) |
|---|
| 1778 | state.expire_attributes(set(evaluated_keys).difference(to_evaluate)) |
|---|
| 1779 | |
|---|
| 1780 | elif synchronize_session == 'expire': |
|---|
| 1781 | target_mapper = self._mapper_zero() |
|---|
| 1782 | |
|---|
| 1783 | for primary_key in matched_rows: |
|---|
| 1784 | identity_key = target_mapper.identity_key_from_primary_key(list(primary_key)) |
|---|
| 1785 | if identity_key in session.identity_map: |
|---|
| 1786 | session.expire(session.identity_map[identity_key], values.keys()) |
|---|
| 1787 | |
|---|
| 1788 | for ext in session.extensions: |
|---|
| 1789 | ext.after_bulk_update(session, self, context, result) |
|---|
| 1790 | |
|---|
| 1791 | return result.rowcount |
|---|
| 1792 | |
|---|
| 1793 | def _compile_context(self, labels=True): |
|---|
| 1794 | context = QueryContext(self) |
|---|
| 1795 | |
|---|
| 1796 | if context.statement: |
|---|
| 1797 | return context |
|---|
| 1798 | |
|---|
| 1799 | if self._lockmode: |
|---|
| 1800 | try: |
|---|
| 1801 | for_update = {'read': 'read', |
|---|
| 1802 | 'update': True, |
|---|
| 1803 | 'update_nowait': 'nowait', |
|---|
| 1804 | None: False}[self._lockmode] |
|---|
| 1805 | except KeyError: |
|---|
| 1806 | raise sa_exc.ArgumentError("Unknown lockmode '%s'" % self._lockmode) |
|---|
| 1807 | else: |
|---|
| 1808 | for_update = False |
|---|
| 1809 | |
|---|
| 1810 | for entity in self._entities: |
|---|
| 1811 | entity.setup_context(self, context) |
|---|
| 1812 | |
|---|
| 1813 | eager_joins = context.eager_joins.values() |
|---|
| 1814 | |
|---|
| 1815 | if context.from_clause: |
|---|
| 1816 | froms = list(context.from_clause) # "load from explicit FROMs" mode, i.e. when select_from() or join() is used |
|---|
| 1817 | else: |
|---|
| 1818 | froms = context.froms # "load from discrete FROMs" mode, i.e. when each _MappedEntity has its own FROM |
|---|
| 1819 | |
|---|
| 1820 | self._adjust_for_single_inheritance(context) |
|---|
| 1821 | |
|---|
| 1822 | if not context.primary_columns: |
|---|
| 1823 | if self._only_load_props: |
|---|
| 1824 | raise sa_exc.InvalidRequestError("No column-based properties specified for refresh operation." |
|---|
| 1825 | " Use session.expire() to reload collections and related items.") |
|---|
| 1826 | else: |
|---|
| 1827 | raise sa_exc.InvalidRequestError("Query contains no columns with which to SELECT from.") |
|---|
| 1828 | |
|---|
| 1829 | if eager_joins and self._should_nest_selectable: |
|---|
| 1830 | # for eager joins present and LIMIT/OFFSET/DISTINCT, wrap the query inside a select, |
|---|
| 1831 | # then append eager joins onto that |
|---|
| 1832 | |
|---|
| 1833 | if context.order_by: |
|---|
| 1834 | order_by_col_expr = list(chain(*[sql_util.find_columns(o) for o in context.order_by])) |
|---|
| 1835 | else: |
|---|
| 1836 | context.order_by = None |
|---|
| 1837 | order_by_col_expr = [] |
|---|
| 1838 | |
|---|
| 1839 | inner = sql.select( |
|---|
| 1840 | context.primary_columns + order_by_col_expr, |
|---|
| 1841 | context.whereclause, |
|---|
| 1842 | from_obj=froms, |
|---|
| 1843 | use_labels=labels, |
|---|
| 1844 | correlate=False, |
|---|
| 1845 | order_by=context.order_by, |
|---|
| 1846 | **self._select_args |
|---|
| 1847 | ) |
|---|
| 1848 | |
|---|
| 1849 | if self._correlate: |
|---|
| 1850 | inner = inner.correlate(*self._correlate) |
|---|
| 1851 | |
|---|
| 1852 | inner = inner.alias() |
|---|
| 1853 | |
|---|
| 1854 | equivs = self.__all_equivs() |
|---|
| 1855 | |
|---|
| 1856 | context.adapter = sql_util.ColumnAdapter(inner, equivs) |
|---|
| 1857 | |
|---|
| 1858 | statement = sql.select([inner] + context.secondary_columns, for_update=for_update, use_labels=labels) |
|---|
| 1859 | |
|---|
| 1860 | from_clause = inner |
|---|
| 1861 | for eager_join in eager_joins: |
|---|
| 1862 | # EagerLoader places a 'stop_on' attribute on the join, |
|---|
| 1863 | # giving us a marker as to where the "splice point" of the join should be |
|---|
| 1864 | from_clause = sql_util.splice_joins(from_clause, eager_join, eager_join.stop_on) |
|---|
| 1865 | |
|---|
| 1866 | statement.append_from(from_clause) |
|---|
| 1867 | |
|---|
| 1868 | if context.order_by: |
|---|
| 1869 | statement.append_order_by(*context.adapter.copy_and_process(context.order_by)) |
|---|
| 1870 | |
|---|
| 1871 | statement.append_order_by(*context.eager_order_by) |
|---|
| 1872 | else: |
|---|
| 1873 | if not context.order_by: |
|---|
| 1874 | context.order_by = None |
|---|
| 1875 | |
|---|
| 1876 | if self._distinct and context.order_by: |
|---|
| 1877 | order_by_col_expr = list(chain(*[sql_util.find_columns(o) for o in context.order_by])) |
|---|
| 1878 | context.primary_columns += order_by_col_expr |
|---|
| 1879 | |
|---|
| 1880 | froms += context.eager_joins.values() |
|---|
| 1881 | |
|---|
| 1882 | statement = sql.select( |
|---|
| 1883 | context.primary_columns + context.secondary_columns, |
|---|
| 1884 | context.whereclause, |
|---|
| 1885 | from_obj=froms, |
|---|
| 1886 | use_labels=labels, |
|---|
| 1887 | for_update=for_update, |
|---|
| 1888 | correlate=False, |
|---|
| 1889 | order_by=context.order_by, |
|---|
| 1890 | **self._select_args |
|---|
| 1891 | ) |
|---|
| 1892 | |
|---|
| 1893 | if self._correlate: |
|---|
| 1894 | statement = statement.correlate(*self._correlate) |
|---|
| 1895 | |
|---|
| 1896 | if context.eager_order_by: |
|---|
| 1897 | statement.append_order_by(*context.eager_order_by) |
|---|
| 1898 | |
|---|
| 1899 | context.statement = statement |
|---|
| 1900 | |
|---|
| 1901 | return context |
|---|
| 1902 | |
|---|
| 1903 | def _adjust_for_single_inheritance(self, context): |
|---|
| 1904 | """Apply single-table-inheritance filtering. |
|---|
| 1905 | |
|---|
| 1906 | For all distinct single-table-inheritance mappers represented in the |
|---|
| 1907 | columns clause of this query, add criterion to the WHERE clause of the |
|---|
| 1908 | given QueryContext such that only the appropriate subtypes are |
|---|
| 1909 | selected from the total results. |
|---|
| 1910 | |
|---|
| 1911 | """ |
|---|
| 1912 | for entity, (mapper, adapter, s, i, w) in self._mapper_adapter_map.iteritems(): |
|---|
| 1913 | single_crit = mapper._single_table_criterion |
|---|
| 1914 | if single_crit: |
|---|
| 1915 | if adapter: |
|---|
| 1916 | single_crit = adapter.traverse(single_crit) |
|---|
| 1917 | single_crit = self._adapt_clause(single_crit, False, False) |
|---|
| 1918 | context.whereclause = sql.and_(context.whereclause, single_crit) |
|---|
| 1919 | |
|---|
| 1920 | def __str__(self): |
|---|
| 1921 | return str(self._compile_context().statement) |
|---|
| 1922 | |
|---|
| 1923 | |
|---|
| 1924 | class _QueryEntity(object): |
|---|
| 1925 | """represent an entity column returned within a Query result.""" |
|---|
| 1926 | |
|---|
| 1927 | def __new__(cls, *args, **kwargs): |
|---|
| 1928 | if cls is _QueryEntity: |
|---|
| 1929 | entity = args[1] |
|---|
| 1930 | if not isinstance(entity, basestring) and _is_mapped_class(entity): |
|---|
| 1931 | cls = _MapperEntity |
|---|
| 1932 | else: |
|---|
| 1933 | cls = _ColumnEntity |
|---|
| 1934 | return object.__new__(cls) |
|---|
| 1935 | |
|---|
| 1936 | def _clone(self): |
|---|
| 1937 | q = self.__class__.__new__(self.__class__) |
|---|
| 1938 | q.__dict__ = self.__dict__.copy() |
|---|
| 1939 | return q |
|---|
| 1940 | |
|---|
| 1941 | class _MapperEntity(_QueryEntity): |
|---|
| 1942 | """mapper/class/AliasedClass entity""" |
|---|
| 1943 | |
|---|
| 1944 | def __init__(self, query, entity): |
|---|
| 1945 | self.primary_entity = not query._entities |
|---|
| 1946 | query._entities.append(self) |
|---|
| 1947 | |
|---|
| 1948 | self.entities = [entity] |
|---|
| 1949 | self.entity_zero = entity |
|---|
| 1950 | |
|---|
| 1951 | def setup_entity(self, entity, mapper, adapter, from_obj, is_aliased_class, with_polymorphic): |
|---|
| 1952 | self.mapper = mapper |
|---|
| 1953 | self.extension = self.mapper.extension |
|---|
| 1954 | self.adapter = adapter |
|---|
| 1955 | self.selectable = from_obj |
|---|
| 1956 | self._with_polymorphic = with_polymorphic |
|---|
| 1957 | self._polymorphic_discriminator = None |
|---|
| 1958 | self.is_aliased_class = is_aliased_class |
|---|
| 1959 | if is_aliased_class: |
|---|
| 1960 | self.path_entity = self.entity = self.entity_zero = entity |
|---|
| 1961 | else: |
|---|
| 1962 | self.path_entity = mapper.base_mapper |
|---|
| 1963 | self.entity = self.entity_zero = mapper |
|---|
| 1964 | |
|---|
| 1965 | def set_with_polymorphic(self, query, cls_or_mappers, selectable, discriminator): |
|---|
| 1966 | if cls_or_mappers is None: |
|---|
| 1967 | query._reset_polymorphic_adapter(self.mapper) |
|---|
| 1968 | return |
|---|
| 1969 | |
|---|
| 1970 | mappers, from_obj = self.mapper._with_polymorphic_args(cls_or_mappers, selectable) |
|---|
| 1971 | self._with_polymorphic = mappers |
|---|
| 1972 | self._polymorphic_discriminator = discriminator |
|---|
| 1973 | |
|---|
| 1974 | # TODO: do the wrapped thing here too so that with_polymorphic() can be |
|---|
| 1975 | # applied to aliases |
|---|
| 1976 | if not self.is_aliased_class: |
|---|
| 1977 | self.selectable = from_obj |
|---|
| 1978 | self.adapter = query._get_polymorphic_adapter(self, from_obj) |
|---|
| 1979 | |
|---|
| 1980 | def corresponds_to(self, entity): |
|---|
| 1981 | if _is_aliased_class(entity): |
|---|
| 1982 | return entity is self.path_entity |
|---|
| 1983 | else: |
|---|
| 1984 | return entity.base_mapper is self.path_entity |
|---|
| 1985 | |
|---|
| 1986 | def _get_entity_clauses(self, query, context): |
|---|
| 1987 | |
|---|
| 1988 | adapter = None |
|---|
| 1989 | if not self.is_aliased_class and query._polymorphic_adapters: |
|---|
| 1990 | adapter = query._polymorphic_adapters.get(self.mapper, None) |
|---|
| 1991 | |
|---|
| 1992 | if not adapter and self.adapter: |
|---|
| 1993 | adapter = self.adapter |
|---|
| 1994 | |
|---|
| 1995 | if adapter: |
|---|
| 1996 | if query._from_obj_alias: |
|---|
| 1997 | ret = adapter.wrap(query._from_obj_alias) |
|---|
| 1998 | else: |
|---|
| 1999 | ret = adapter |
|---|
| 2000 | else: |
|---|
| 2001 | ret = query._from_obj_alias |
|---|
| 2002 | |
|---|
| 2003 | return ret |
|---|
| 2004 | |
|---|
| 2005 | def row_processor(self, query, context, custom_rows): |
|---|
| 2006 | adapter = self._get_entity_clauses(query, context) |
|---|
| 2007 | |
|---|
| 2008 | if context.adapter and adapter: |
|---|
| 2009 | adapter = adapter.wrap(context.adapter) |
|---|
| 2010 | elif not adapter: |
|---|
| 2011 | adapter = context.adapter |
|---|
| 2012 | |
|---|
| 2013 | # polymorphic mappers which have concrete tables in their hierarchy usually |
|---|
| 2014 | # require row aliasing unconditionally. |
|---|
| 2015 | if not adapter and self.mapper._requires_row_aliasing: |
|---|
| 2016 | adapter = sql_util.ColumnAdapter(self.selectable, self.mapper._equivalent_columns) |
|---|
| 2017 | |
|---|
| 2018 | if self.primary_entity: |
|---|
| 2019 | _instance = self.mapper._instance_processor(context, (self.path_entity,), adapter, |
|---|
| 2020 | extension=self.extension, only_load_props=query._only_load_props, refresh_state=context.refresh_state, |
|---|
| 2021 | polymorphic_discriminator=self._polymorphic_discriminator |
|---|
| 2022 | ) |
|---|
| 2023 | else: |
|---|
| 2024 | _instance = self.mapper._instance_processor(context, (self.path_entity,), adapter, |
|---|
| 2025 | polymorphic_discriminator=self._polymorphic_discriminator) |
|---|
| 2026 | |
|---|
| 2027 | if custom_rows: |
|---|
| 2028 | def main(context, row, result): |
|---|
| 2029 | _instance(row, result) |
|---|
| 2030 | else: |
|---|
| 2031 | def main(context, row): |
|---|
| 2032 | return _instance(row, None) |
|---|
| 2033 | |
|---|
| 2034 | if self.is_aliased_class: |
|---|
| 2035 | entname = self.entity._sa_label_name |
|---|
| 2036 | else: |
|---|
| 2037 | entname = self.mapper.class_.__name__ |
|---|
| 2038 | |
|---|
| 2039 | return main, entname |
|---|
| 2040 | |
|---|
| 2041 | def setup_context(self, query, context): |
|---|
| 2042 | adapter = self._get_entity_clauses(query, context) |
|---|
| 2043 | |
|---|
| 2044 | context.froms.append(self.selectable) |
|---|
| 2045 | |
|---|
| 2046 | if context.order_by is False and self.mapper.order_by: |
|---|
| 2047 | context.order_by = self.mapper.order_by |
|---|
| 2048 | |
|---|
| 2049 | # apply adaptation to the mapper's order_by if needed. |
|---|
| 2050 | if adapter: |
|---|
| 2051 | context.order_by = adapter.adapt_list(util.to_list(context.order_by)) |
|---|
| 2052 | |
|---|
| 2053 | for value in self.mapper._iterate_polymorphic_properties(self._with_polymorphic): |
|---|
| 2054 | if query._only_load_props and value.key not in query._only_load_props: |
|---|
| 2055 | continue |
|---|
| 2056 | value.setup( |
|---|
| 2057 | context, |
|---|
| 2058 | self, |
|---|
| 2059 | (self.path_entity,), |
|---|
| 2060 | adapter, |
|---|
| 2061 | only_load_props=query._only_load_props, |
|---|
| 2062 | column_collection=context.primary_columns |
|---|
| 2063 | ) |
|---|
| 2064 | |
|---|
| 2065 | if self._polymorphic_discriminator: |
|---|
| 2066 | if adapter: |
|---|
| 2067 | pd = adapter.columns[self._polymorphic_discriminator] |
|---|
| 2068 | else: |
|---|
| 2069 | pd = self._polymorphic_discriminator |
|---|
| 2070 | context.primary_columns.append(pd) |
|---|
| 2071 | |
|---|
| 2072 | def __str__(self): |
|---|
| 2073 | return str(self.mapper) |
|---|
| 2074 | |
|---|
| 2075 | |
|---|
| 2076 | class _ColumnEntity(_QueryEntity): |
|---|
| 2077 | """Column/expression based entity.""" |
|---|
| 2078 | |
|---|
| 2079 | def __init__(self, query, column): |
|---|
| 2080 | if isinstance(column, basestring): |
|---|
| 2081 | column = sql.literal_column(column) |
|---|
| 2082 | self._result_label = column.name |
|---|
| 2083 | elif isinstance(column, attributes.QueryableAttribute): |
|---|
| 2084 | self._result_label = column.key |
|---|
| 2085 | column = column.__clause_element__() |
|---|
| 2086 | else: |
|---|
| 2087 | self._result_label = getattr(column, 'key', None) |
|---|
| 2088 | |
|---|
| 2089 | if not isinstance(column, expression.ColumnElement) and hasattr(column, '_select_iterable'): |
|---|
| 2090 | for c in column._select_iterable: |
|---|
| 2091 | if c is column: |
|---|
| 2092 | break |
|---|
| 2093 | _ColumnEntity(query, c) |
|---|
| 2094 | |
|---|
| 2095 | if c is not column: |
|---|
| 2096 | return |
|---|
| 2097 | |
|---|
| 2098 | if not isinstance(column, sql.ColumnElement): |
|---|
| 2099 | raise sa_exc.InvalidRequestError( |
|---|
| 2100 | "SQL expression, column, or mapped entity expected - got '%r'" % column |
|---|
| 2101 | ) |
|---|
| 2102 | |
|---|
| 2103 | # if the Column is unnamed, give it a |
|---|
| 2104 | # label() so that mutable column expressions |
|---|
| 2105 | # can be located in the result even |
|---|
| 2106 | # if the expression's identity has been changed |
|---|
| 2107 | # due to adaption |
|---|
| 2108 | if not column._label: |
|---|
| 2109 | column = column.label(None) |
|---|
| 2110 | |
|---|
| 2111 | query._entities.append(self) |
|---|
| 2112 | |
|---|
| 2113 | self.column = column |
|---|
| 2114 | self.froms = set() |
|---|
| 2115 | |
|---|
| 2116 | # look for ORM entities represented within the |
|---|
| 2117 | # given expression. Try to count only entities |
|---|
| 2118 | # for columns whos FROM object is in the actual list |
|---|
| 2119 | # of FROMs for the overall expression - this helps |
|---|
| 2120 | # subqueries which were built from ORM constructs from |
|---|
| 2121 | # leaking out their entities into the main select construct |
|---|
| 2122 | actual_froms = set(column._from_objects) |
|---|
| 2123 | |
|---|
| 2124 | self.entities = util.OrderedSet( |
|---|
| 2125 | elem._annotations['parententity'] |
|---|
| 2126 | for elem in visitors.iterate(column, {}) |
|---|
| 2127 | if 'parententity' in elem._annotations |
|---|
| 2128 | and actual_froms.intersection(elem._from_objects) |
|---|
| 2129 | ) |
|---|
| 2130 | |
|---|
| 2131 | if self.entities: |
|---|
| 2132 | self.entity_zero = list(self.entities)[0] |
|---|
| 2133 | else: |
|---|
| 2134 | self.entity_zero = None |
|---|
| 2135 | |
|---|
| 2136 | def setup_entity(self, entity, mapper, adapter, from_obj, is_aliased_class, with_polymorphic): |
|---|
| 2137 | self.selectable = from_obj |
|---|
| 2138 | self.froms.add(from_obj) |
|---|
| 2139 | |
|---|
| 2140 | def corresponds_to(self, entity): |
|---|
| 2141 | if self.entity_zero is None: |
|---|
| 2142 | return False |
|---|
| 2143 | elif _is_aliased_class(entity): |
|---|
| 2144 | return entity is self.entity_zero |
|---|
| 2145 | else: |
|---|
| 2146 | return not _is_aliased_class(self.entity_zero) and \ |
|---|
| 2147 | entity.base_mapper.common_parent(self.entity_zero) |
|---|
| 2148 | |
|---|
| 2149 | def _resolve_expr_against_query_aliases(self, query, expr, context): |
|---|
| 2150 | return query._adapt_clause(expr, False, True) |
|---|
| 2151 | |
|---|
| 2152 | def row_processor(self, query, context, custom_rows): |
|---|
| 2153 | column = self._resolve_expr_against_query_aliases(query, self.column, context) |
|---|
| 2154 | |
|---|
| 2155 | if context.adapter: |
|---|
| 2156 | column = context.adapter.columns[column] |
|---|
| 2157 | |
|---|
| 2158 | def proc(context, row): |
|---|
| 2159 | return row[column] |
|---|
| 2160 | |
|---|
| 2161 | return (proc, self._result_label) |
|---|
| 2162 | |
|---|
| 2163 | def setup_context(self, query, context): |
|---|
| 2164 | column = self._resolve_expr_against_query_aliases(query, self.column, context) |
|---|
| 2165 | context.froms += list(self.froms) |
|---|
| 2166 | context.primary_columns.append(column) |
|---|
| 2167 | |
|---|
| 2168 | def __str__(self): |
|---|
| 2169 | return str(self.column) |
|---|
| 2170 | |
|---|
| 2171 | log.class_logger(Query) |
|---|
| 2172 | |
|---|
| 2173 | class QueryContext(object): |
|---|
| 2174 | def __init__(self, query): |
|---|
| 2175 | |
|---|
| 2176 | if query._statement: |
|---|
| 2177 | if isinstance(query._statement, expression._SelectBaseMixin) and not query._statement.use_labels: |
|---|
| 2178 | self.statement = query._statement.apply_labels() |
|---|
| 2179 | else: |
|---|
| 2180 | self.statement = query._statement |
|---|
| 2181 | else: |
|---|
| 2182 | self.statement = None |
|---|
| 2183 | self.from_clause = query._from_obj |
|---|
| 2184 | self.whereclause = query._criterion |
|---|
| 2185 | self.order_by = query._order_by |
|---|
| 2186 | if self.order_by: |
|---|
| 2187 | self.order_by = [expression._literal_as_text(o) for o in util.to_list(self.order_by)] |
|---|
| 2188 | |
|---|
| 2189 | self.query = query |
|---|
| 2190 | self.session = query.session |
|---|
| 2191 | self.populate_existing = query._populate_existing |
|---|
| 2192 | self.version_check = query._version_check |
|---|
| 2193 | self.refresh_state = query._refresh_state |
|---|
| 2194 | self.primary_columns = [] |
|---|
| 2195 | self.secondary_columns = [] |
|---|
| 2196 | self.eager_order_by = [] |
|---|
| 2197 | self.enable_eagerloads = query._enable_eagerloads |
|---|
| 2198 | self.eager_joins = {} |
|---|
| 2199 | self.froms = [] |
|---|
| 2200 | self.adapter = None |
|---|
| 2201 | |
|---|
| 2202 | self.options = set(query._with_options) |
|---|
| 2203 | self.propagate_options = self.options.difference(o for o in self.options if not o.propagate_to_loaders) |
|---|
| 2204 | self.attributes = query._attributes.copy() |
|---|
| 2205 | |
|---|
| 2206 | class AliasOption(interfaces.MapperOption): |
|---|
| 2207 | |
|---|
| 2208 | def __init__(self, alias): |
|---|
| 2209 | self.alias = alias |
|---|
| 2210 | |
|---|
| 2211 | def process_query(self, query): |
|---|
| 2212 | if isinstance(self.alias, basestring): |
|---|
| 2213 | alias = query._mapper_zero().mapped_table.alias(self.alias) |
|---|
| 2214 | else: |
|---|
| 2215 | alias = self.alias |
|---|
| 2216 | query._from_obj_alias = sql_util.ColumnAdapter(alias) |
|---|
| 2217 | |
|---|
| 2218 | |
|---|
| 2219 | _runid = 1L |
|---|
| 2220 | _id_lock = util.threading.Lock() |
|---|
| 2221 | |
|---|
| 2222 | def _new_runid(): |
|---|
| 2223 | global _runid |
|---|
| 2224 | _id_lock.acquire() |
|---|
| 2225 | try: |
|---|
| 2226 | _runid += 1 |
|---|
| 2227 | return _runid |
|---|
| 2228 | finally: |
|---|
| 2229 | _id_lock.release() |
|---|