Coverage for /home/martinb/.local/share/virtualenvs/camcops/lib/python3.6/site-packages/sqlalchemy/orm/strategies.py : 43%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# orm/strategies.py
2# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
8"""sqlalchemy.orm.interfaces.LoaderStrategy
9 implementations, and related MapperOptions."""
10from __future__ import absolute_import
12import collections
13import itertools
15from . import attributes
16from . import exc as orm_exc
17from . import interfaces
18from . import loading
19from . import properties
20from . import query
21from . import unitofwork
22from . import util as orm_util
23from .base import _DEFER_FOR_STATE
24from .base import _SET_DEFERRED_EXPIRED
25from .interfaces import LoaderStrategy
26from .interfaces import StrategizedProperty
27from .session import _state_session
28from .state import InstanceState
29from .util import _none_set
30from .util import aliased
31from .. import event
32from .. import exc as sa_exc
33from .. import inspect
34from .. import log
35from .. import sql
36from .. import util
37from ..sql import util as sql_util
38from ..sql import visitors
41def _register_attribute(
42 prop,
43 mapper,
44 useobject,
45 compare_function=None,
46 typecallable=None,
47 callable_=None,
48 proxy_property=None,
49 active_history=False,
50 impl_class=None,
51 **kw
52):
54 attribute_ext = list(util.to_list(prop.extension, default=[]))
56 listen_hooks = []
58 uselist = useobject and prop.uselist
60 if useobject and prop.single_parent:
61 listen_hooks.append(single_parent_validator)
63 if prop.key in prop.parent.validators:
64 fn, opts = prop.parent.validators[prop.key]
65 listen_hooks.append(
66 lambda desc, prop: orm_util._validator_events(
67 desc, prop.key, fn, **opts
68 )
69 )
71 if useobject:
72 listen_hooks.append(unitofwork.track_cascade_events)
74 # need to assemble backref listeners
75 # after the singleparentvalidator, mapper validator
76 if useobject:
77 backref = prop.back_populates
78 if backref and prop._effective_sync_backref:
79 listen_hooks.append(
80 lambda desc, prop: attributes.backref_listeners(
81 desc, backref, uselist
82 )
83 )
85 # a single MapperProperty is shared down a class inheritance
86 # hierarchy, so we set up attribute instrumentation and backref event
87 # for each mapper down the hierarchy.
89 # typically, "mapper" is the same as prop.parent, due to the way
90 # the configure_mappers() process runs, however this is not strongly
91 # enforced, and in the case of a second configure_mappers() run the
92 # mapper here might not be prop.parent; also, a subclass mapper may
93 # be called here before a superclass mapper. That is, can't depend
94 # on mappers not already being set up so we have to check each one.
96 for m in mapper.self_and_descendants:
97 if prop is m._props.get(
98 prop.key
99 ) and not m.class_manager._attr_has_impl(prop.key):
101 desc = attributes.register_attribute_impl(
102 m.class_,
103 prop.key,
104 parent_token=prop,
105 uselist=uselist,
106 compare_function=compare_function,
107 useobject=useobject,
108 extension=attribute_ext,
109 trackparent=useobject
110 and (
111 prop.single_parent
112 or prop.direction is interfaces.ONETOMANY
113 ),
114 typecallable=typecallable,
115 callable_=callable_,
116 active_history=active_history,
117 impl_class=impl_class,
118 send_modified_events=not useobject or not prop.viewonly,
119 doc=prop.doc,
120 **kw
121 )
123 for hook in listen_hooks:
124 hook(desc, prop)
127@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
128class UninstrumentedColumnLoader(LoaderStrategy):
129 """Represent a non-instrumented MapperProperty.
131 The polymorphic_on argument of mapper() often results in this,
132 if the argument is against the with_polymorphic selectable.
134 """
136 __slots__ = ("columns",)
138 def __init__(self, parent, strategy_key):
139 super(UninstrumentedColumnLoader, self).__init__(parent, strategy_key)
140 self.columns = self.parent_property.columns
142 def setup_query(
143 self,
144 context,
145 query_entity,
146 path,
147 loadopt,
148 adapter,
149 column_collection=None,
150 **kwargs
151 ):
152 for c in self.columns:
153 if adapter:
154 c = adapter.columns[c]
155 column_collection.append(c)
157 def create_row_processor(
158 self, context, path, loadopt, mapper, result, adapter, populators
159 ):
160 pass
163@log.class_logger
164@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
165class ColumnLoader(LoaderStrategy):
166 """Provide loading behavior for a :class:`.ColumnProperty`."""
168 __slots__ = "columns", "is_composite"
170 def __init__(self, parent, strategy_key):
171 super(ColumnLoader, self).__init__(parent, strategy_key)
172 self.columns = self.parent_property.columns
173 self.is_composite = hasattr(self.parent_property, "composite_class")
175 def setup_query(
176 self,
177 context,
178 query_entity,
179 path,
180 loadopt,
181 adapter,
182 column_collection,
183 memoized_populators,
184 **kwargs
185 ):
187 for c in self.columns:
188 if adapter:
189 c = adapter.columns[c]
190 column_collection.append(c)
192 fetch = self.columns[0]
193 if adapter:
194 fetch = adapter.columns[fetch]
195 memoized_populators[self.parent_property] = fetch
197 def init_class_attribute(self, mapper):
198 self.is_class_level = True
199 coltype = self.columns[0].type
200 # TODO: check all columns ? check for foreign key as well?
201 active_history = (
202 self.parent_property.active_history
203 or self.columns[0].primary_key
204 or mapper.version_id_col in set(self.columns)
205 )
207 _register_attribute(
208 self.parent_property,
209 mapper,
210 useobject=False,
211 compare_function=coltype.compare_values,
212 active_history=active_history,
213 )
215 def create_row_processor(
216 self, context, path, loadopt, mapper, result, adapter, populators
217 ):
218 # look through list of columns represented here
219 # to see which, if any, is present in the row.
220 for col in self.columns:
221 if adapter:
222 col = adapter.columns[col]
223 getter = result._getter(col, False)
224 if getter:
225 populators["quick"].append((self.key, getter))
226 break
227 else:
228 populators["expire"].append((self.key, True))
231@log.class_logger
232@properties.ColumnProperty.strategy_for(query_expression=True)
233class ExpressionColumnLoader(ColumnLoader):
234 def __init__(self, parent, strategy_key):
235 super(ExpressionColumnLoader, self).__init__(parent, strategy_key)
237 null = sql.null()
238 self._have_default_expression = any(
239 not c.compare(null) for c in self.parent_property.columns
240 )
242 def setup_query(
243 self,
244 context,
245 query_entity,
246 path,
247 loadopt,
248 adapter,
249 column_collection,
250 memoized_populators,
251 **kwargs
252 ):
253 columns = None
254 if loadopt and "expression" in loadopt.local_opts:
255 columns = [loadopt.local_opts["expression"]]
256 elif self._have_default_expression:
257 columns = self.parent_property.columns
259 if columns is None:
260 return
262 for c in columns:
263 if adapter:
264 c = adapter.columns[c]
265 column_collection.append(c)
267 fetch = columns[0]
268 if adapter:
269 fetch = adapter.columns[fetch]
270 memoized_populators[self.parent_property] = fetch
272 def create_row_processor(
273 self, context, path, loadopt, mapper, result, adapter, populators
274 ):
275 # look through list of columns represented here
276 # to see which, if any, is present in the row.
277 if loadopt and "expression" in loadopt.local_opts:
278 columns = [loadopt.local_opts["expression"]]
280 for col in columns:
281 if adapter:
282 col = adapter.columns[col]
283 getter = result._getter(col, False)
284 if getter:
285 populators["quick"].append((self.key, getter))
286 break
287 else:
288 populators["expire"].append((self.key, True))
290 def init_class_attribute(self, mapper):
291 self.is_class_level = True
293 _register_attribute(
294 self.parent_property,
295 mapper,
296 useobject=False,
297 compare_function=self.columns[0].type.compare_values,
298 accepts_scalar_loader=False,
299 )
302@log.class_logger
303@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
304@properties.ColumnProperty.strategy_for(do_nothing=True)
305class DeferredColumnLoader(LoaderStrategy):
306 """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
308 __slots__ = "columns", "group"
310 def __init__(self, parent, strategy_key):
311 super(DeferredColumnLoader, self).__init__(parent, strategy_key)
312 if hasattr(self.parent_property, "composite_class"):
313 raise NotImplementedError(
314 "Deferred loading for composite " "types not implemented yet"
315 )
316 self.columns = self.parent_property.columns
317 self.group = self.parent_property.group
319 def create_row_processor(
320 self, context, path, loadopt, mapper, result, adapter, populators
321 ):
323 # this path currently does not check the result
324 # for the column; this is because in most cases we are
325 # working just with the setup_query() directive which does
326 # not support this, and the behavior here should be consistent.
327 if not self.is_class_level:
328 set_deferred_for_local_state = (
329 self.parent_property._deferred_column_loader
330 )
331 populators["new"].append((self.key, set_deferred_for_local_state))
332 else:
333 populators["expire"].append((self.key, False))
335 def init_class_attribute(self, mapper):
336 self.is_class_level = True
338 _register_attribute(
339 self.parent_property,
340 mapper,
341 useobject=False,
342 compare_function=self.columns[0].type.compare_values,
343 callable_=self._load_for_state,
344 expire_missing=False,
345 )
347 def setup_query(
348 self,
349 context,
350 query_entity,
351 path,
352 loadopt,
353 adapter,
354 column_collection,
355 memoized_populators,
356 only_load_props=None,
357 **kw
358 ):
360 if (
361 (
362 loadopt
363 and "undefer_pks" in loadopt.local_opts
364 and set(self.columns).intersection(
365 self.parent._should_undefer_in_wildcard
366 )
367 )
368 or (
369 loadopt
370 and self.group
371 and loadopt.local_opts.get(
372 "undefer_group_%s" % self.group, False
373 )
374 )
375 or (only_load_props and self.key in only_load_props)
376 ):
377 self.parent_property._get_strategy(
378 (("deferred", False), ("instrument", True))
379 ).setup_query(
380 context,
381 query_entity,
382 path,
383 loadopt,
384 adapter,
385 column_collection,
386 memoized_populators,
387 **kw
388 )
389 elif self.is_class_level:
390 memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
391 else:
392 memoized_populators[self.parent_property] = _DEFER_FOR_STATE
394 def _load_for_state(self, state, passive):
395 if not state.key:
396 return attributes.ATTR_EMPTY
398 if not passive & attributes.SQL_OK:
399 return attributes.PASSIVE_NO_RESULT
401 localparent = state.manager.mapper
403 if self.group:
404 toload = [
405 p.key
406 for p in localparent.iterate_properties
407 if isinstance(p, StrategizedProperty)
408 and isinstance(p.strategy, DeferredColumnLoader)
409 and p.group == self.group
410 ]
411 else:
412 toload = [self.key]
414 # narrow the keys down to just those which have no history
415 group = [k for k in toload if k in state.unmodified]
417 session = _state_session(state)
418 if session is None:
419 raise orm_exc.DetachedInstanceError(
420 "Parent instance %s is not bound to a Session; "
421 "deferred load operation of attribute '%s' cannot proceed"
422 % (orm_util.state_str(state), self.key)
423 )
425 query = session.query(localparent)
426 if (
427 loading.load_on_ident(
428 query, state.key, only_load_props=group, refresh_state=state
429 )
430 is None
431 ):
432 raise orm_exc.ObjectDeletedError(state)
434 return attributes.ATTR_WAS_SET
437class LoadDeferredColumns(object):
438 """serializable loader object used by DeferredColumnLoader"""
440 def __init__(self, key):
441 self.key = key
443 def __call__(self, state, passive=attributes.PASSIVE_OFF):
444 key = self.key
446 localparent = state.manager.mapper
447 prop = localparent._props[key]
448 strategy = prop._strategies[DeferredColumnLoader]
449 return strategy._load_for_state(state, passive)
452class AbstractRelationshipLoader(LoaderStrategy):
453 """LoaderStratgies which deal with related objects."""
455 __slots__ = "mapper", "target", "uselist", "entity"
457 def __init__(self, parent, strategy_key):
458 super(AbstractRelationshipLoader, self).__init__(parent, strategy_key)
459 self.mapper = self.parent_property.mapper
460 self.entity = self.parent_property.entity
461 self.target = self.parent_property.target
462 self.uselist = self.parent_property.uselist
465@log.class_logger
466@properties.RelationshipProperty.strategy_for(do_nothing=True)
467class DoNothingLoader(LoaderStrategy):
468 """Relationship loader that makes no change to the object's state.
470 Compared to NoLoader, this loader does not initialize the
471 collection/attribute to empty/none; the usual default LazyLoader will
472 take effect.
474 """
477@log.class_logger
478@properties.RelationshipProperty.strategy_for(lazy="noload")
479@properties.RelationshipProperty.strategy_for(lazy=None)
480class NoLoader(AbstractRelationshipLoader):
481 """Provide loading behavior for a :class:`.RelationshipProperty`
482 with "lazy=None".
484 """
486 __slots__ = ()
488 def init_class_attribute(self, mapper):
489 self.is_class_level = True
491 _register_attribute(
492 self.parent_property,
493 mapper,
494 useobject=True,
495 typecallable=self.parent_property.collection_class,
496 )
498 def create_row_processor(
499 self, context, path, loadopt, mapper, result, adapter, populators
500 ):
501 def invoke_no_load(state, dict_, row):
502 if self.uselist:
503 state.manager.get_impl(self.key).initialize(state, dict_)
504 else:
505 dict_[self.key] = None
507 populators["new"].append((self.key, invoke_no_load))
510@log.class_logger
511@properties.RelationshipProperty.strategy_for(lazy=True)
512@properties.RelationshipProperty.strategy_for(lazy="select")
513@properties.RelationshipProperty.strategy_for(lazy="raise")
514@properties.RelationshipProperty.strategy_for(lazy="raise_on_sql")
515@properties.RelationshipProperty.strategy_for(lazy="baked_select")
516class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
517 """Provide loading behavior for a :class:`.RelationshipProperty`
518 with "lazy=True", that is loads when first accessed.
520 """
522 __slots__ = (
523 "_lazywhere",
524 "_rev_lazywhere",
525 "use_get",
526 "is_aliased_class",
527 "_bind_to_col",
528 "_equated_columns",
529 "_rev_bind_to_col",
530 "_rev_equated_columns",
531 "_simple_lazy_clause",
532 "_raise_always",
533 "_raise_on_sql",
534 "_bakery",
535 )
537 def __init__(self, parent, strategy_key):
538 super(LazyLoader, self).__init__(parent, strategy_key)
539 self._raise_always = self.strategy_opts["lazy"] == "raise"
540 self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
542 self.is_aliased_class = inspect(self.entity).is_aliased_class
544 join_condition = self.parent_property._join_condition
545 (
546 self._lazywhere,
547 self._bind_to_col,
548 self._equated_columns,
549 ) = join_condition.create_lazy_clause()
551 (
552 self._rev_lazywhere,
553 self._rev_bind_to_col,
554 self._rev_equated_columns,
555 ) = join_condition.create_lazy_clause(reverse_direction=True)
557 self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
559 # determine if our "lazywhere" clause is the same as the mapper's
560 # get() clause. then we can just use mapper.get()
561 #
562 # TODO: the "not self.uselist" can be taken out entirely; a m2o
563 # load that populates for a list (very unusual, but is possible with
564 # the API) can still set for "None" and the attribute system will
565 # populate as an empty list.
566 self.use_get = (
567 not self.is_aliased_class
568 and not self.uselist
569 and self.entity._get_clause[0].compare(
570 self._lazywhere,
571 use_proxies=True,
572 equivalents=self.mapper._equivalent_columns,
573 )
574 )
576 if self.use_get:
577 for col in list(self._equated_columns):
578 if col in self.mapper._equivalent_columns:
579 for c in self.mapper._equivalent_columns[col]:
580 self._equated_columns[c] = self._equated_columns[col]
582 self.logger.info(
583 "%s will use query.get() to " "optimize instance loads", self
584 )
586 def init_class_attribute(self, mapper):
587 self.is_class_level = True
589 active_history = (
590 self.parent_property.active_history
591 or self.parent_property.direction is not interfaces.MANYTOONE
592 or not self.use_get
593 )
595 # MANYTOONE currently only needs the
596 # "old" value for delete-orphan
597 # cascades. the required _SingleParentValidator
598 # will enable active_history
599 # in that case. otherwise we don't need the
600 # "old" value during backref operations.
601 _register_attribute(
602 self.parent_property,
603 mapper,
604 useobject=True,
605 callable_=self._load_for_state,
606 typecallable=self.parent_property.collection_class,
607 active_history=active_history,
608 )
610 def _memoized_attr__simple_lazy_clause(self):
611 criterion, bind_to_col = (self._lazywhere, self._bind_to_col)
613 params = []
615 def visit_bindparam(bindparam):
616 bindparam.unique = False
618 visitors.traverse(criterion, {}, {"bindparam": visit_bindparam})
620 def visit_bindparam(bindparam):
621 if bindparam._identifying_key in bind_to_col:
622 params.append(
623 (
624 bindparam.key,
625 bind_to_col[bindparam._identifying_key],
626 None,
627 )
628 )
629 elif bindparam.callable is None:
630 params.append((bindparam.key, None, bindparam.value))
632 criterion = visitors.cloned_traverse(
633 criterion, {}, {"bindparam": visit_bindparam}
634 )
636 return criterion, params
638 def _generate_lazy_clause(self, state, passive):
639 criterion, param_keys = self._simple_lazy_clause
641 if state is None:
642 return sql_util.adapt_criterion_to_null(
643 criterion, [key for key, ident, value in param_keys]
644 )
646 mapper = self.parent_property.parent
648 o = state.obj() # strong ref
649 dict_ = attributes.instance_dict(o)
651 if passive & attributes.INIT_OK:
652 passive ^= attributes.INIT_OK
654 params = {}
655 for key, ident, value in param_keys:
656 if ident is not None:
657 if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
658 value = mapper._get_committed_state_attr_by_column(
659 state, dict_, ident, passive
660 )
661 else:
662 value = mapper._get_state_attr_by_column(
663 state, dict_, ident, passive
664 )
666 params[key] = value
668 return criterion, params
670 def _invoke_raise_load(self, state, passive, lazy):
671 raise sa_exc.InvalidRequestError(
672 "'%s' is not available due to lazy='%s'" % (self, lazy)
673 )
675 def _load_for_state(self, state, passive):
677 if not state.key and (
678 (
679 not self.parent_property.load_on_pending
680 and not state._load_pending
681 )
682 or not state.session_id
683 ):
684 return attributes.ATTR_EMPTY
686 pending = not state.key
687 primary_key_identity = None
689 if (not passive & attributes.SQL_OK and not self.use_get) or (
690 not passive & attributes.NON_PERSISTENT_OK and pending
691 ):
692 return attributes.PASSIVE_NO_RESULT
694 if (
695 # we were given lazy="raise"
696 self._raise_always
697 # the no_raise history-related flag was not passed
698 and not passive & attributes.NO_RAISE
699 and (
700 # if we are use_get and related_object_ok is disabled,
701 # which means we are at most looking in the identity map
702 # for history purposes or otherwise returning
703 # PASSIVE_NO_RESULT, don't raise. This is also a
704 # history-related flag
705 not self.use_get
706 or passive & attributes.RELATED_OBJECT_OK
707 )
708 ):
710 self._invoke_raise_load(state, passive, "raise")
712 session = _state_session(state)
713 if not session:
714 if passive & attributes.NO_RAISE:
715 return attributes.PASSIVE_NO_RESULT
717 raise orm_exc.DetachedInstanceError(
718 "Parent instance %s is not bound to a Session; "
719 "lazy load operation of attribute '%s' cannot proceed"
720 % (orm_util.state_str(state), self.key)
721 )
723 # if we have a simple primary key load, check the
724 # identity map without generating a Query at all
725 if self.use_get:
726 primary_key_identity = self._get_ident_for_use_get(
727 session, state, passive
728 )
729 if attributes.PASSIVE_NO_RESULT in primary_key_identity:
730 return attributes.PASSIVE_NO_RESULT
731 elif attributes.NEVER_SET in primary_key_identity:
732 return attributes.NEVER_SET
734 if _none_set.issuperset(primary_key_identity):
735 return None
737 # look for this identity in the identity map. Delegate to the
738 # Query class in use, as it may have special rules for how it
739 # does this, including how it decides what the correct
740 # identity_token would be for this identity.
741 instance = session.query()._identity_lookup(
742 self.entity,
743 primary_key_identity,
744 passive=passive,
745 lazy_loaded_from=state,
746 )
748 if instance is not None:
749 if instance is attributes.PASSIVE_CLASS_MISMATCH:
750 return None
751 else:
752 return instance
753 elif (
754 not passive & attributes.SQL_OK
755 or not passive & attributes.RELATED_OBJECT_OK
756 ):
757 return attributes.PASSIVE_NO_RESULT
759 return self._emit_lazyload(
760 session, state, primary_key_identity, passive
761 )
763 def _get_ident_for_use_get(self, session, state, passive):
764 instance_mapper = state.manager.mapper
766 if passive & attributes.LOAD_AGAINST_COMMITTED:
767 get_attr = instance_mapper._get_committed_state_attr_by_column
768 else:
769 get_attr = instance_mapper._get_state_attr_by_column
771 dict_ = state.dict
773 return [
774 get_attr(state, dict_, self._equated_columns[pk], passive=passive)
775 for pk in self.mapper.primary_key
776 ]
778 @util.dependencies("sqlalchemy.ext.baked")
779 def _memoized_attr__bakery(self, baked):
780 return baked.bakery(size=50)
782 @util.dependencies("sqlalchemy.orm.strategy_options")
783 def _emit_lazyload(
784 self, strategy_options, session, state, primary_key_identity, passive
785 ):
786 # emit lazy load now using BakedQuery, to cut way down on the overhead
787 # of generating queries.
788 # there are two big things we are trying to guard against here:
789 #
790 # 1. two different lazy loads that need to have a different result,
791 # being cached on the same key. The results between two lazy loads
792 # can be different due to the options passed to the query, which
793 # take effect for descendant objects. Therefore we have to make
794 # sure paths and load options generate good cache keys, and if they
795 # don't, we don't cache.
796 # 2. a lazy load that gets cached on a key that includes some
797 # "throwaway" object, like a per-query AliasedClass, meaning
798 # the cache key will never be seen again and the cache itself
799 # will fill up. (the cache is an LRU cache, so while we won't
800 # run out of memory, it will perform terribly when it's full. A
801 # warning is emitted if this occurs.) We must prevent the
802 # generation of a cache key that is including a throwaway object
803 # in the key.
805 # note that "lazy='select'" and "lazy=True" make two separate
806 # lazy loaders. Currently the LRU cache is local to the LazyLoader,
807 # however add ourselves to the initial cache key just to future
808 # proof in case it moves
809 q = self._bakery(lambda session: session.query(self.entity), self)
811 q.add_criteria(
812 lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
813 self.parent_property,
814 )
816 if not self.parent_property.bake_queries:
817 q.spoil(full=True)
819 if self.parent_property.secondary is not None:
820 q.add_criteria(
821 lambda q: q.select_from(
822 self.mapper, self.parent_property.secondary
823 )
824 )
826 pending = not state.key
828 # don't autoflush on pending
829 if pending or passive & attributes.NO_AUTOFLUSH:
830 q.add_criteria(lambda q: q.autoflush(False))
832 if state.load_options:
833 # here, if any of the options cannot return a cache key,
834 # the BakedQuery "spoils" and caching will not occur. a path
835 # that features Cls.attribute.of_type(some_alias) will cancel
836 # caching, for example, since "some_alias" is user-defined and
837 # is usually a throwaway object.
838 effective_path = state.load_path[self.parent_property]
840 q._add_lazyload_options(state.load_options, effective_path)
842 if self.use_get:
843 if self._raise_on_sql:
844 self._invoke_raise_load(state, passive, "raise_on_sql")
846 return (
847 q(session)
848 .with_post_criteria(lambda q: q._set_lazyload_from(state))
849 ._load_on_pk_identity(
850 session.query(self.mapper), primary_key_identity
851 )
852 )
854 if self.parent_property.order_by:
855 q.add_criteria(
856 lambda q: q.order_by(
857 *util.to_list(self.parent_property.order_by)
858 )
859 )
861 for rev in self.parent_property._reverse_property:
862 # reverse props that are MANYTOONE are loading *this*
863 # object from get(), so don't need to eager out to those.
864 if (
865 rev.direction is interfaces.MANYTOONE
866 and rev._use_get
867 and not isinstance(rev.strategy, LazyLoader)
868 ):
870 q.add_criteria(
871 lambda q: q.options(
872 strategy_options.Load.for_existing_path(
873 q._current_path[rev.parent]
874 ).lazyload(rev.key)
875 )
876 )
878 lazy_clause, params = self._generate_lazy_clause(state, passive)
880 if pending:
881 if util.has_intersection(orm_util._none_set, params.values()):
882 return None
884 elif util.has_intersection(orm_util._never_set, params.values()):
885 return None
887 if self._raise_on_sql:
888 self._invoke_raise_load(state, passive, "raise_on_sql")
890 q.add_criteria(lambda q: q.filter(lazy_clause))
892 # set parameters in the query such that we don't overwrite
893 # parameters that are already set within it
894 def set_default_params(q):
895 params.update(q._params)
896 q._params = params
897 return q
899 result = (
900 q(session)
901 .with_post_criteria(lambda q: q._set_lazyload_from(state))
902 .with_post_criteria(set_default_params)
903 .all()
904 )
905 if self.uselist:
906 return result
907 else:
908 l = len(result)
909 if l:
910 if l > 1:
911 util.warn(
912 "Multiple rows returned with "
913 "uselist=False for lazily-loaded attribute '%s' "
914 % self.parent_property
915 )
917 return result[0]
918 else:
919 return None
921 def create_row_processor(
922 self, context, path, loadopt, mapper, result, adapter, populators
923 ):
924 key = self.key
926 if not self.is_class_level:
927 # we are not the primary manager for this attribute
928 # on this class - set up a
929 # per-instance lazyloader, which will override the
930 # class-level behavior.
931 # this currently only happens when using a
932 # "lazyload" option on a "no load"
933 # attribute - "eager" attributes always have a
934 # class-level lazyloader installed.
935 set_lazy_callable = (
936 InstanceState._instance_level_callable_processor
937 )(mapper.class_manager, LoadLazyAttribute(key, self), key)
939 populators["new"].append((self.key, set_lazy_callable))
940 elif context.populate_existing or mapper.always_refresh:
942 def reset_for_lazy_callable(state, dict_, row):
943 # we are the primary manager for this attribute on
944 # this class - reset its
945 # per-instance attribute state, so that the class-level
946 # lazy loader is
947 # executed when next referenced on this instance.
948 # this is needed in
949 # populate_existing() types of scenarios to reset
950 # any existing state.
951 state._reset(dict_, key)
953 populators["new"].append((self.key, reset_for_lazy_callable))
956class LoadLazyAttribute(object):
957 """serializable loader object used by LazyLoader"""
959 def __init__(self, key, initiating_strategy):
960 self.key = key
961 self.strategy_key = initiating_strategy.strategy_key
963 def __call__(self, state, passive=attributes.PASSIVE_OFF):
964 key = self.key
965 instance_mapper = state.manager.mapper
966 prop = instance_mapper._props[key]
967 strategy = prop._strategies[self.strategy_key]
969 return strategy._load_for_state(state, passive)
972@properties.RelationshipProperty.strategy_for(lazy="immediate")
973class ImmediateLoader(AbstractRelationshipLoader):
974 __slots__ = ()
976 def init_class_attribute(self, mapper):
977 self.parent_property._get_strategy(
978 (("lazy", "select"),)
979 ).init_class_attribute(mapper)
981 def setup_query(
982 self,
983 context,
984 entity,
985 path,
986 loadopt,
987 adapter,
988 column_collection=None,
989 parentmapper=None,
990 **kwargs
991 ):
992 pass
994 def create_row_processor(
995 self, context, path, loadopt, mapper, result, adapter, populators
996 ):
997 def load_immediate(state, dict_, row):
998 state.get_impl(self.key).get(state, dict_)
1000 populators["delayed"].append((self.key, load_immediate))
1003@log.class_logger
1004@properties.RelationshipProperty.strategy_for(lazy="subquery")
1005class SubqueryLoader(AbstractRelationshipLoader):
1006 __slots__ = ("join_depth",)
1008 def __init__(self, parent, strategy_key):
1009 super(SubqueryLoader, self).__init__(parent, strategy_key)
1010 self.join_depth = self.parent_property.join_depth
1012 def init_class_attribute(self, mapper):
1013 self.parent_property._get_strategy(
1014 (("lazy", "select"),)
1015 ).init_class_attribute(mapper)
1017 def setup_query(
1018 self,
1019 context,
1020 entity,
1021 path,
1022 loadopt,
1023 adapter,
1024 column_collection=None,
1025 parentmapper=None,
1026 **kwargs
1027 ):
1029 if not context.query._enable_eagerloads:
1030 return
1031 elif context.query._yield_per:
1032 context.query._no_yield_per("subquery")
1034 path = path[self.parent_property]
1036 # build up a path indicating the path from the leftmost
1037 # entity to the thing we're subquery loading.
1038 with_poly_entity = path.get(
1039 context.attributes, "path_with_polymorphic", None
1040 )
1041 if with_poly_entity is not None:
1042 effective_entity = with_poly_entity
1043 else:
1044 effective_entity = self.entity
1046 subq_path = context.attributes.get(
1047 ("subquery_path", None), orm_util.PathRegistry.root
1048 )
1050 subq_path = subq_path + path
1052 # if not via query option, check for
1053 # a cycle
1054 if not path.contains(context.attributes, "loader"):
1055 if self.join_depth:
1056 if (
1057 (
1058 context.query._current_path.length
1059 if context.query._current_path
1060 else 0
1061 )
1062 + path.length
1063 ) / 2 > self.join_depth:
1064 return
1065 elif subq_path.contains_mapper(self.mapper):
1066 return
1068 (
1069 leftmost_mapper,
1070 leftmost_attr,
1071 leftmost_relationship,
1072 ) = self._get_leftmost(subq_path)
1074 orig_query = context.attributes.get(
1075 ("orig_query", SubqueryLoader), context.query
1076 )
1078 # generate a new Query from the original, then
1079 # produce a subquery from it.
1080 left_alias = self._generate_from_original_query(
1081 orig_query,
1082 leftmost_mapper,
1083 leftmost_attr,
1084 leftmost_relationship,
1085 entity.entity_zero,
1086 )
1088 # generate another Query that will join the
1089 # left alias to the target relationships.
1090 # basically doing a longhand
1091 # "from_self()". (from_self() itself not quite industrial
1092 # strength enough for all contingencies...but very close)
1093 q = orig_query.session.query(effective_entity)
1094 q._attributes = {
1095 ("orig_query", SubqueryLoader): orig_query,
1096 ("subquery_path", None): subq_path,
1097 }
1099 q = q._set_enable_single_crit(False)
1100 to_join, local_attr, parent_alias = self._prep_for_joins(
1101 left_alias, subq_path
1102 )
1104 q = q.add_columns(*local_attr)
1105 q = self._apply_joins(
1106 q, to_join, left_alias, parent_alias, effective_entity
1107 )
1109 q = self._setup_options(q, subq_path, orig_query, effective_entity)
1110 q = self._setup_outermost_orderby(q)
1112 # add new query to attributes to be picked up
1113 # by create_row_processor
1114 path.set(context.attributes, "subquery", q)
1116 def _get_leftmost(self, subq_path):
1117 subq_path = subq_path.path
1118 subq_mapper = orm_util._class_to_mapper(subq_path[0])
1120 # determine attributes of the leftmost mapper
1121 if (
1122 self.parent.isa(subq_mapper)
1123 and self.parent_property is subq_path[1]
1124 ):
1125 leftmost_mapper, leftmost_prop = self.parent, self.parent_property
1126 else:
1127 leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1]
1129 leftmost_cols = leftmost_prop.local_columns
1131 leftmost_attr = [
1132 getattr(
1133 subq_path[0].entity, leftmost_mapper._columntoproperty[c].key
1134 )
1135 for c in leftmost_cols
1136 ]
1138 return leftmost_mapper, leftmost_attr, leftmost_prop
1140 def _generate_from_original_query(
1141 self,
1142 orig_query,
1143 leftmost_mapper,
1144 leftmost_attr,
1145 leftmost_relationship,
1146 orig_entity,
1147 ):
1148 # reformat the original query
1149 # to look only for significant columns
1150 q = orig_query._clone().correlate(None)
1152 # set the query's "FROM" list explicitly to what the
1153 # FROM list would be in any case, as we will be limiting
1154 # the columns in the SELECT list which may no longer include
1155 # all entities mentioned in things like WHERE, JOIN, etc.
1156 if not q._from_obj:
1157 q._set_select_from(
1158 list(
1159 set(
1160 [
1161 ent["entity"]
1162 for ent in orig_query.column_descriptions
1163 if ent["entity"] is not None
1164 ]
1165 )
1166 ),
1167 False,
1168 )
1170 # select from the identity columns of the outer (specifically, these
1171 # are the 'local_cols' of the property). This will remove
1172 # other columns from the query that might suggest the right entity
1173 # which is why we do _set_select_from above.
1174 target_cols = q._adapt_col_list(leftmost_attr)
1175 q._set_entities(target_cols)
1177 distinct_target_key = leftmost_relationship.distinct_target_key
1179 if distinct_target_key is True:
1180 q._distinct = True
1181 elif distinct_target_key is None:
1182 # if target_cols refer to a non-primary key or only
1183 # part of a composite primary key, set the q as distinct
1184 for t in set(c.table for c in target_cols):
1185 if not set(target_cols).issuperset(t.primary_key):
1186 q._distinct = True
1187 break
1189 if q._order_by is False:
1190 q._order_by = leftmost_mapper.order_by
1192 # don't need ORDER BY if no limit/offset
1193 if q._limit is None and q._offset is None:
1194 q._order_by = None
1196 # the original query now becomes a subquery
1197 # which we'll join onto.
1199 embed_q = q.with_labels().subquery()
1200 left_alias = orm_util.AliasedClass(
1201 leftmost_mapper, embed_q, use_mapper_path=True
1202 )
1203 return left_alias
1205 def _prep_for_joins(self, left_alias, subq_path):
1206 # figure out what's being joined. a.k.a. the fun part
1207 to_join = []
1208 pairs = list(subq_path.pairs())
1210 for i, (mapper, prop) in enumerate(pairs):
1211 if i > 0:
1212 # look at the previous mapper in the chain -
1213 # if it is as or more specific than this prop's
1214 # mapper, use that instead.
1215 # note we have an assumption here that
1216 # the non-first element is always going to be a mapper,
1217 # not an AliasedClass
1219 prev_mapper = pairs[i - 1][1].mapper
1220 to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
1221 else:
1222 to_append = mapper
1224 to_join.append((to_append, prop.key))
1226 # determine the immediate parent class we are joining from,
1227 # which needs to be aliased.
1229 if len(to_join) < 2:
1230 # in the case of a one level eager load, this is the
1231 # leftmost "left_alias".
1232 parent_alias = left_alias
1233 else:
1234 info = inspect(to_join[-1][0])
1235 if info.is_aliased_class:
1236 parent_alias = info.entity
1237 else:
1238 # alias a plain mapper as we may be
1239 # joining multiple times
1240 parent_alias = orm_util.AliasedClass(
1241 info.entity, use_mapper_path=True
1242 )
1244 local_cols = self.parent_property.local_columns
1246 local_attr = [
1247 getattr(parent_alias, self.parent._columntoproperty[c].key)
1248 for c in local_cols
1249 ]
1250 return to_join, local_attr, parent_alias
1252 def _apply_joins(
1253 self, q, to_join, left_alias, parent_alias, effective_entity
1254 ):
1256 ltj = len(to_join)
1257 if ltj == 1:
1258 to_join = [
1259 getattr(left_alias, to_join[0][1]).of_type(effective_entity)
1260 ]
1261 elif ltj == 2:
1262 to_join = [
1263 getattr(left_alias, to_join[0][1]).of_type(parent_alias),
1264 getattr(parent_alias, to_join[-1][1]).of_type(
1265 effective_entity
1266 ),
1267 ]
1268 elif ltj > 2:
1269 middle = [
1270 (
1271 orm_util.AliasedClass(item[0])
1272 if not inspect(item[0]).is_aliased_class
1273 else item[0].entity,
1274 item[1],
1275 )
1276 for item in to_join[1:-1]
1277 ]
1278 inner = []
1280 while middle:
1281 item = middle.pop(0)
1282 attr = getattr(item[0], item[1])
1283 if middle:
1284 attr = attr.of_type(middle[0][0])
1285 else:
1286 attr = attr.of_type(parent_alias)
1288 inner.append(attr)
1290 to_join = (
1291 [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)]
1292 + inner
1293 + [
1294 getattr(parent_alias, to_join[-1][1]).of_type(
1295 effective_entity
1296 )
1297 ]
1298 )
1300 for attr in to_join:
1301 q = q.join(attr, from_joinpoint=True)
1302 return q
1304 def _setup_options(self, q, subq_path, orig_query, effective_entity):
1305 # propagate loader options etc. to the new query.
1306 # these will fire relative to subq_path.
1307 q = q._with_current_path(subq_path)
1308 q = q._conditional_options(*orig_query._with_options)
1309 if orig_query._populate_existing:
1310 q._populate_existing = orig_query._populate_existing
1312 return q
1314 def _setup_outermost_orderby(self, q):
1315 if self.parent_property.order_by:
1316 # if there's an ORDER BY, alias it the same
1317 # way joinedloader does, but we have to pull out
1318 # the "eagerjoin" from the query.
1319 # this really only picks up the "secondary" table
1320 # right now.
1321 eagerjoin = q._from_obj[0]
1322 eager_order_by = eagerjoin._target_adapter.copy_and_process(
1323 util.to_list(self.parent_property.order_by)
1324 )
1325 q = q.order_by(*eager_order_by)
1326 return q
1328 class _SubqCollections(object):
1329 """Given a :class:`_query.Query` used to emit the "subquery load",
1330 provide a load interface that executes the query at the
1331 first moment a value is needed.
1333 """
1335 _data = None
1337 def __init__(self, subq):
1338 self.subq = subq
1340 def get(self, key, default):
1341 if self._data is None:
1342 self._load()
1343 return self._data.get(key, default)
1345 def _load(self):
1346 self._data = collections.defaultdict(list)
1347 for k, v in itertools.groupby(self.subq, lambda x: x[1:]):
1348 self._data[k].extend(vv[0] for vv in v)
1350 def loader(self, state, dict_, row):
1351 if self._data is None:
1352 self._load()
1354 def create_row_processor(
1355 self, context, path, loadopt, mapper, result, adapter, populators
1356 ):
1357 if not self.parent.class_manager[self.key].impl.supports_population:
1358 raise sa_exc.InvalidRequestError(
1359 "'%s' does not support object "
1360 "population - eager loading cannot be applied." % self
1361 )
1363 path = path[self.parent_property]
1365 subq = path.get(context.attributes, "subquery")
1367 if subq is None:
1368 return
1370 assert subq.session is context.session, (
1371 "Subquery session doesn't refer to that of "
1372 "our context. Are there broken context caching "
1373 "schemes being used?"
1374 )
1376 local_cols = self.parent_property.local_columns
1378 # cache the loaded collections in the context
1379 # so that inheriting mappers don't re-load when they
1380 # call upon create_row_processor again
1381 collections = path.get(context.attributes, "collections")
1382 if collections is None:
1383 collections = self._SubqCollections(subq)
1384 path.set(context.attributes, "collections", collections)
1386 if adapter:
1387 local_cols = [adapter.columns[c] for c in local_cols]
1389 if self.uselist:
1390 self._create_collection_loader(
1391 context, collections, local_cols, populators
1392 )
1393 else:
1394 self._create_scalar_loader(
1395 context, collections, local_cols, populators
1396 )
1398 def _create_collection_loader(
1399 self, context, collections, local_cols, populators
1400 ):
1401 def load_collection_from_subq(state, dict_, row):
1402 collection = collections.get(
1403 tuple([row[col] for col in local_cols]), ()
1404 )
1405 state.get_impl(self.key).set_committed_value(
1406 state, dict_, collection
1407 )
1409 def load_collection_from_subq_existing_row(state, dict_, row):
1410 if self.key not in dict_:
1411 load_collection_from_subq(state, dict_, row)
1413 populators["new"].append((self.key, load_collection_from_subq))
1414 populators["existing"].append(
1415 (self.key, load_collection_from_subq_existing_row)
1416 )
1418 if context.invoke_all_eagers:
1419 populators["eager"].append((self.key, collections.loader))
1421 def _create_scalar_loader(
1422 self, context, collections, local_cols, populators
1423 ):
1424 def load_scalar_from_subq(state, dict_, row):
1425 collection = collections.get(
1426 tuple([row[col] for col in local_cols]), (None,)
1427 )
1428 if len(collection) > 1:
1429 util.warn(
1430 "Multiple rows returned with "
1431 "uselist=False for eagerly-loaded attribute '%s' " % self
1432 )
1434 scalar = collection[0]
1435 state.get_impl(self.key).set_committed_value(state, dict_, scalar)
1437 def load_scalar_from_subq_existing_row(state, dict_, row):
1438 if self.key not in dict_:
1439 load_scalar_from_subq(state, dict_, row)
1441 populators["new"].append((self.key, load_scalar_from_subq))
1442 populators["existing"].append(
1443 (self.key, load_scalar_from_subq_existing_row)
1444 )
1445 if context.invoke_all_eagers:
1446 populators["eager"].append((self.key, collections.loader))
1449@log.class_logger
1450@properties.RelationshipProperty.strategy_for(lazy="joined")
1451@properties.RelationshipProperty.strategy_for(lazy=False)
1452class JoinedLoader(AbstractRelationshipLoader):
1453 """Provide loading behavior for a :class:`.RelationshipProperty`
1454 using joined eager loading.
1456 """
1458 __slots__ = "join_depth", "_aliased_class_pool"
1460 def __init__(self, parent, strategy_key):
1461 super(JoinedLoader, self).__init__(parent, strategy_key)
1462 self.join_depth = self.parent_property.join_depth
1463 self._aliased_class_pool = []
1465 def init_class_attribute(self, mapper):
1466 self.parent_property._get_strategy(
1467 (("lazy", "select"),)
1468 ).init_class_attribute(mapper)
1470 def setup_query(
1471 self,
1472 context,
1473 query_entity,
1474 path,
1475 loadopt,
1476 adapter,
1477 column_collection=None,
1478 parentmapper=None,
1479 chained_from_outerjoin=False,
1480 **kwargs
1481 ):
1482 """Add a left outer join to the statement that's being constructed."""
1484 if not context.query._enable_eagerloads:
1485 return
1486 elif context.query._yield_per and self.uselist:
1487 context.query._no_yield_per("joined collection")
1489 path = path[self.parent_property]
1491 with_polymorphic = None
1493 user_defined_adapter = (
1494 self._init_user_defined_eager_proc(loadopt, context)
1495 if loadopt
1496 else False
1497 )
1499 if user_defined_adapter is not False:
1500 (
1501 clauses,
1502 adapter,
1503 add_to_collection,
1504 ) = self._setup_query_on_user_defined_adapter(
1505 context, query_entity, path, adapter, user_defined_adapter
1506 )
1507 else:
1508 # if not via query option, check for
1509 # a cycle
1510 if not path.contains(context.attributes, "loader"):
1511 if self.join_depth:
1512 if path.length / 2 > self.join_depth:
1513 return
1514 elif path.contains_mapper(self.mapper):
1515 return
1517 (
1518 clauses,
1519 adapter,
1520 add_to_collection,
1521 chained_from_outerjoin,
1522 ) = self._generate_row_adapter(
1523 context,
1524 query_entity,
1525 path,
1526 loadopt,
1527 adapter,
1528 column_collection,
1529 parentmapper,
1530 chained_from_outerjoin,
1531 )
1533 with_poly_entity = path.get(
1534 context.attributes, "path_with_polymorphic", None
1535 )
1536 if with_poly_entity is not None:
1537 with_polymorphic = inspect(
1538 with_poly_entity
1539 ).with_polymorphic_mappers
1540 else:
1541 with_polymorphic = None
1543 path = path[self.entity]
1545 loading._setup_entity_query(
1546 context,
1547 self.mapper,
1548 query_entity,
1549 path,
1550 clauses,
1551 add_to_collection,
1552 with_polymorphic=with_polymorphic,
1553 parentmapper=self.mapper,
1554 chained_from_outerjoin=chained_from_outerjoin,
1555 )
1557 if with_poly_entity is not None and None in set(
1558 context.secondary_columns
1559 ):
1560 raise sa_exc.InvalidRequestError(
1561 "Detected unaliased columns when generating joined "
1562 "load. Make sure to use aliased=True or flat=True "
1563 "when using joined loading with with_polymorphic()."
1564 )
1566 def _init_user_defined_eager_proc(self, loadopt, context):
1568 # check if the opt applies at all
1569 if "eager_from_alias" not in loadopt.local_opts:
1570 # nope
1571 return False
1573 path = loadopt.path.parent
1575 # the option applies. check if the "user_defined_eager_row_processor"
1576 # has been built up.
1577 adapter = path.get(
1578 context.attributes, "user_defined_eager_row_processor", False
1579 )
1580 if adapter is not False:
1581 # just return it
1582 return adapter
1584 # otherwise figure it out.
1585 alias = loadopt.local_opts["eager_from_alias"]
1586 root_mapper, prop = path[-2:]
1588 if alias is not None:
1589 if isinstance(alias, str):
1590 alias = prop.target.alias(alias)
1591 adapter = sql_util.ColumnAdapter(
1592 alias, equivalents=prop.mapper._equivalent_columns
1593 )
1594 else:
1595 if path.contains(context.attributes, "path_with_polymorphic"):
1596 with_poly_entity = path.get(
1597 context.attributes, "path_with_polymorphic"
1598 )
1599 adapter = orm_util.ORMAdapter(
1600 with_poly_entity,
1601 equivalents=prop.mapper._equivalent_columns,
1602 )
1603 else:
1604 adapter = context.query._polymorphic_adapters.get(
1605 prop.mapper, None
1606 )
1607 path.set(
1608 context.attributes, "user_defined_eager_row_processor", adapter
1609 )
1611 return adapter
1613 def _setup_query_on_user_defined_adapter(
1614 self, context, entity, path, adapter, user_defined_adapter
1615 ):
1617 # apply some more wrapping to the "user defined adapter"
1618 # if we are setting up the query for SQL render.
1619 adapter = entity._get_entity_clauses(context.query, context)
1621 if adapter and user_defined_adapter:
1622 user_defined_adapter = user_defined_adapter.wrap(adapter)
1623 path.set(
1624 context.attributes,
1625 "user_defined_eager_row_processor",
1626 user_defined_adapter,
1627 )
1628 elif adapter:
1629 user_defined_adapter = adapter
1630 path.set(
1631 context.attributes,
1632 "user_defined_eager_row_processor",
1633 user_defined_adapter,
1634 )
1636 add_to_collection = context.primary_columns
1637 return user_defined_adapter, adapter, add_to_collection
1639 def _gen_pooled_aliased_class(self, context):
1640 # keep a local pool of AliasedClass objects that get re-used.
1641 # we need one unique AliasedClass per query per appearance of our
1642 # entity in the query.
1644 if inspect(self.entity).is_aliased_class:
1645 alt_selectable = inspect(self.entity).selectable
1646 else:
1647 alt_selectable = None
1649 key = ("joinedloader_ac", self)
1650 if key not in context.attributes:
1651 context.attributes[key] = idx = 0
1652 else:
1653 context.attributes[key] = idx = context.attributes[key] + 1
1655 if idx >= len(self._aliased_class_pool):
1656 to_adapt = orm_util.AliasedClass(
1657 self.mapper,
1658 alias=alt_selectable.alias(flat=True)
1659 if alt_selectable is not None
1660 else None,
1661 flat=True,
1662 use_mapper_path=True,
1663 )
1665 # load up the .columns collection on the Alias() before
1666 # the object becomes shared among threads. this prevents
1667 # races for column identities.
1668 inspect(to_adapt).selectable.c
1670 self._aliased_class_pool.append(to_adapt)
1672 return self._aliased_class_pool[idx]
1674 def _generate_row_adapter(
1675 self,
1676 context,
1677 entity,
1678 path,
1679 loadopt,
1680 adapter,
1681 column_collection,
1682 parentmapper,
1683 chained_from_outerjoin,
1684 ):
1685 with_poly_entity = path.get(
1686 context.attributes, "path_with_polymorphic", None
1687 )
1688 if with_poly_entity:
1689 to_adapt = with_poly_entity
1690 else:
1691 to_adapt = self._gen_pooled_aliased_class(context)
1693 clauses = inspect(to_adapt)._memo(
1694 ("joinedloader_ormadapter", self),
1695 orm_util.ORMAdapter,
1696 to_adapt,
1697 equivalents=self.mapper._equivalent_columns,
1698 adapt_required=True,
1699 allow_label_resolve=False,
1700 anonymize_labels=True,
1701 )
1703 assert clauses.aliased_class is not None
1705 if self.parent_property.uselist:
1706 context.multi_row_eager_loaders = True
1708 innerjoin = (
1709 loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin)
1710 if loadopt is not None
1711 else self.parent_property.innerjoin
1712 )
1714 if not innerjoin:
1715 # if this is an outer join, all non-nested eager joins from
1716 # this path must also be outer joins
1717 chained_from_outerjoin = True
1719 context.create_eager_joins.append(
1720 (
1721 self._create_eager_join,
1722 entity,
1723 path,
1724 adapter,
1725 parentmapper,
1726 clauses,
1727 innerjoin,
1728 chained_from_outerjoin,
1729 )
1730 )
1732 add_to_collection = context.secondary_columns
1733 path.set(context.attributes, "eager_row_processor", clauses)
1735 return clauses, adapter, add_to_collection, chained_from_outerjoin
1737 def _create_eager_join(
1738 self,
1739 context,
1740 query_entity,
1741 path,
1742 adapter,
1743 parentmapper,
1744 clauses,
1745 innerjoin,
1746 chained_from_outerjoin,
1747 ):
1749 if parentmapper is None:
1750 localparent = query_entity.mapper
1751 else:
1752 localparent = parentmapper
1754 # whether or not the Query will wrap the selectable in a subquery,
1755 # and then attach eager load joins to that (i.e., in the case of
1756 # LIMIT/OFFSET etc.)
1757 should_nest_selectable = (
1758 context.multi_row_eager_loaders
1759 and context.query._should_nest_selectable
1760 )
1762 query_entity_key = None
1764 if (
1765 query_entity not in context.eager_joins
1766 and not should_nest_selectable
1767 and context.from_clause
1768 ):
1769 indexes = sql_util.find_left_clause_that_matches_given(
1770 context.from_clause, query_entity.selectable
1771 )
1773 if len(indexes) > 1:
1774 # for the eager load case, I can't reproduce this right
1775 # now. For query.join() I can.
1776 raise sa_exc.InvalidRequestError(
1777 "Can't identify which query entity in which to joined "
1778 "eager load from. Please use an exact match when "
1779 "specifying the join path."
1780 )
1782 if indexes:
1783 clause = context.from_clause[indexes[0]]
1784 # join to an existing FROM clause on the query.
1785 # key it to its list index in the eager_joins dict.
1786 # Query._compile_context will adapt as needed and
1787 # append to the FROM clause of the select().
1788 query_entity_key, default_towrap = indexes[0], clause
1790 if query_entity_key is None:
1791 query_entity_key, default_towrap = (
1792 query_entity,
1793 query_entity.selectable,
1794 )
1796 towrap = context.eager_joins.setdefault(
1797 query_entity_key, default_towrap
1798 )
1800 if adapter:
1801 if getattr(adapter, "aliased_class", None):
1802 # joining from an adapted entity. The adapted entity
1803 # might be a "with_polymorphic", so resolve that to our
1804 # specific mapper's entity before looking for our attribute
1805 # name on it.
1806 efm = inspect(adapter.aliased_class)._entity_for_mapper(
1807 localparent
1808 if localparent.isa(self.parent)
1809 else self.parent
1810 )
1812 # look for our attribute on the adapted entity, else fall back
1813 # to our straight property
1814 onclause = getattr(efm.entity, self.key, self.parent_property)
1815 else:
1816 onclause = getattr(
1817 orm_util.AliasedClass(
1818 self.parent, adapter.selectable, use_mapper_path=True
1819 ),
1820 self.key,
1821 self.parent_property,
1822 )
1824 else:
1825 onclause = self.parent_property
1827 assert clauses.aliased_class is not None
1829 attach_on_outside = (
1830 not chained_from_outerjoin
1831 or not innerjoin
1832 or innerjoin == "unnested"
1833 or query_entity.entity_zero.represents_outer_join
1834 )
1836 if attach_on_outside:
1837 # this is the "classic" eager join case.
1838 eagerjoin = orm_util._ORMJoin(
1839 towrap,
1840 clauses.aliased_class,
1841 onclause,
1842 isouter=not innerjoin
1843 or query_entity.entity_zero.represents_outer_join
1844 or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
1845 _left_memo=self.parent,
1846 _right_memo=self.mapper,
1847 )
1848 else:
1849 # all other cases are innerjoin=='nested' approach
1850 eagerjoin = self._splice_nested_inner_join(
1851 path, towrap, clauses, onclause
1852 )
1854 context.eager_joins[query_entity_key] = eagerjoin
1856 # send a hint to the Query as to where it may "splice" this join
1857 eagerjoin.stop_on = query_entity.selectable
1859 if not parentmapper:
1860 # for parentclause that is the non-eager end of the join,
1861 # ensure all the parent cols in the primaryjoin are actually
1862 # in the
1863 # columns clause (i.e. are not deferred), so that aliasing applied
1864 # by the Query propagates those columns outward.
1865 # This has the effect
1866 # of "undefering" those columns.
1867 for col in sql_util._find_columns(
1868 self.parent_property.primaryjoin
1869 ):
1870 if localparent.persist_selectable.c.contains_column(col):
1871 if adapter:
1872 col = adapter.columns[col]
1873 context.primary_columns.append(col)
1875 if self.parent_property.order_by:
1876 context.eager_order_by += (
1877 eagerjoin._target_adapter.copy_and_process
1878 )(util.to_list(self.parent_property.order_by))
1880 def _splice_nested_inner_join(
1881 self, path, join_obj, clauses, onclause, splicing=False
1882 ):
1884 if splicing is False:
1885 # first call is always handed a join object
1886 # from the outside
1887 assert isinstance(join_obj, orm_util._ORMJoin)
1888 elif isinstance(join_obj, sql.selectable.FromGrouping):
1889 return self._splice_nested_inner_join(
1890 path, join_obj.element, clauses, onclause, splicing
1891 )
1892 elif not isinstance(join_obj, orm_util._ORMJoin):
1893 if path[-2] is splicing:
1894 return orm_util._ORMJoin(
1895 join_obj,
1896 clauses.aliased_class,
1897 onclause,
1898 isouter=False,
1899 _left_memo=splicing,
1900 _right_memo=path[-1].mapper,
1901 )
1902 else:
1903 # only here if splicing == True
1904 return None
1906 target_join = self._splice_nested_inner_join(
1907 path, join_obj.right, clauses, onclause, join_obj._right_memo
1908 )
1909 if target_join is None:
1910 right_splice = False
1911 target_join = self._splice_nested_inner_join(
1912 path, join_obj.left, clauses, onclause, join_obj._left_memo
1913 )
1914 if target_join is None:
1915 # should only return None when recursively called,
1916 # e.g. splicing==True
1917 assert (
1918 splicing is not False
1919 ), "assertion failed attempting to produce joined eager loads"
1920 return None
1921 else:
1922 right_splice = True
1924 if right_splice:
1925 # for a right splice, attempt to flatten out
1926 # a JOIN b JOIN c JOIN .. to avoid needless
1927 # parenthesis nesting
1928 if not join_obj.isouter and not target_join.isouter:
1929 eagerjoin = join_obj._splice_into_center(target_join)
1930 else:
1931 eagerjoin = orm_util._ORMJoin(
1932 join_obj.left,
1933 target_join,
1934 join_obj.onclause,
1935 isouter=join_obj.isouter,
1936 _left_memo=join_obj._left_memo,
1937 )
1938 else:
1939 eagerjoin = orm_util._ORMJoin(
1940 target_join,
1941 join_obj.right,
1942 join_obj.onclause,
1943 isouter=join_obj.isouter,
1944 _right_memo=join_obj._right_memo,
1945 )
1947 eagerjoin._target_adapter = target_join._target_adapter
1948 return eagerjoin
1950 def _create_eager_adapter(self, context, result, adapter, path, loadopt):
1951 user_defined_adapter = (
1952 self._init_user_defined_eager_proc(loadopt, context)
1953 if loadopt
1954 else False
1955 )
1957 if user_defined_adapter is not False:
1958 decorator = user_defined_adapter
1959 # user defined eagerloads are part of the "primary"
1960 # portion of the load.
1961 # the adapters applied to the Query should be honored.
1962 if context.adapter and decorator:
1963 decorator = decorator.wrap(context.adapter)
1964 elif context.adapter:
1965 decorator = context.adapter
1966 else:
1967 decorator = path.get(context.attributes, "eager_row_processor")
1968 if decorator is None:
1969 return False
1971 if self.mapper._result_has_identity_key(result, decorator):
1972 return decorator
1973 else:
1974 # no identity key - don't return a row
1975 # processor, will cause a degrade to lazy
1976 return False
1978 def create_row_processor(
1979 self, context, path, loadopt, mapper, result, adapter, populators
1980 ):
1981 if not self.parent.class_manager[self.key].impl.supports_population:
1982 raise sa_exc.InvalidRequestError(
1983 "'%s' does not support object "
1984 "population - eager loading cannot be applied." % self
1985 )
1987 our_path = path[self.parent_property]
1989 eager_adapter = self._create_eager_adapter(
1990 context, result, adapter, our_path, loadopt
1991 )
1993 if eager_adapter is not False:
1994 key = self.key
1996 _instance = loading._instance_processor(
1997 self.mapper,
1998 context,
1999 result,
2000 our_path[self.entity],
2001 eager_adapter,
2002 )
2004 if not self.uselist:
2005 self._create_scalar_loader(context, key, _instance, populators)
2006 else:
2007 self._create_collection_loader(
2008 context, key, _instance, populators
2009 )
2010 else:
2011 self.parent_property._get_strategy(
2012 (("lazy", "select"),)
2013 ).create_row_processor(
2014 context, path, loadopt, mapper, result, adapter, populators
2015 )
2017 def _create_collection_loader(self, context, key, _instance, populators):
2018 def load_collection_from_joined_new_row(state, dict_, row):
2019 collection = attributes.init_state_collection(state, dict_, key)
2020 result_list = util.UniqueAppender(
2021 collection, "append_without_event"
2022 )
2023 context.attributes[(state, key)] = result_list
2024 inst = _instance(row)
2025 if inst is not None:
2026 result_list.append(inst)
2028 def load_collection_from_joined_existing_row(state, dict_, row):
2029 if (state, key) in context.attributes:
2030 result_list = context.attributes[(state, key)]
2031 else:
2032 # appender_key can be absent from context.attributes
2033 # with isnew=False when self-referential eager loading
2034 # is used; the same instance may be present in two
2035 # distinct sets of result columns
2036 collection = attributes.init_state_collection(
2037 state, dict_, key
2038 )
2039 result_list = util.UniqueAppender(
2040 collection, "append_without_event"
2041 )
2042 context.attributes[(state, key)] = result_list
2043 inst = _instance(row)
2044 if inst is not None:
2045 result_list.append(inst)
2047 def load_collection_from_joined_exec(state, dict_, row):
2048 _instance(row)
2050 populators["new"].append(
2051 (self.key, load_collection_from_joined_new_row)
2052 )
2053 populators["existing"].append(
2054 (self.key, load_collection_from_joined_existing_row)
2055 )
2056 if context.invoke_all_eagers:
2057 populators["eager"].append(
2058 (self.key, load_collection_from_joined_exec)
2059 )
2061 def _create_scalar_loader(self, context, key, _instance, populators):
2062 def load_scalar_from_joined_new_row(state, dict_, row):
2063 # set a scalar object instance directly on the parent
2064 # object, bypassing InstrumentedAttribute event handlers.
2065 dict_[key] = _instance(row)
2067 def load_scalar_from_joined_existing_row(state, dict_, row):
2068 # call _instance on the row, even though the object has
2069 # been created, so that we further descend into properties
2070 existing = _instance(row)
2072 # conflicting value already loaded, this shouldn't happen
2073 if key in dict_:
2074 if existing is not dict_[key]:
2075 util.warn(
2076 "Multiple rows returned with "
2077 "uselist=False for eagerly-loaded attribute '%s' "
2078 % self
2079 )
2080 else:
2081 # this case is when one row has multiple loads of the
2082 # same entity (e.g. via aliasing), one has an attribute
2083 # that the other doesn't.
2084 dict_[key] = existing
2086 def load_scalar_from_joined_exec(state, dict_, row):
2087 _instance(row)
2089 populators["new"].append((self.key, load_scalar_from_joined_new_row))
2090 populators["existing"].append(
2091 (self.key, load_scalar_from_joined_existing_row)
2092 )
2093 if context.invoke_all_eagers:
2094 populators["eager"].append(
2095 (self.key, load_scalar_from_joined_exec)
2096 )
2099@log.class_logger
2100@properties.RelationshipProperty.strategy_for(lazy="selectin")
2101class SelectInLoader(AbstractRelationshipLoader, util.MemoizedSlots):
2102 __slots__ = (
2103 "join_depth",
2104 "omit_join",
2105 "_parent_alias",
2106 "_query_info",
2107 "_fallback_query_info",
2108 "_bakery",
2109 )
2111 query_info = collections.namedtuple(
2112 "queryinfo",
2113 [
2114 "load_only_child",
2115 "load_with_join",
2116 "in_expr",
2117 "pk_cols",
2118 "zero_idx",
2119 "child_lookup_cols",
2120 ],
2121 )
2123 _chunksize = 500
2125 def __init__(self, parent, strategy_key):
2126 super(SelectInLoader, self).__init__(parent, strategy_key)
2127 self.join_depth = self.parent_property.join_depth
2128 is_m2o = self.parent_property.direction is interfaces.MANYTOONE
2130 if self.parent_property.omit_join is not None:
2131 self.omit_join = self.parent_property.omit_join
2132 else:
2133 lazyloader = self.parent_property._get_strategy(
2134 (("lazy", "select"),)
2135 )
2136 if is_m2o:
2137 self.omit_join = lazyloader.use_get
2138 else:
2139 self.omit_join = self.parent._get_clause[0].compare(
2140 lazyloader._rev_lazywhere,
2141 use_proxies=True,
2142 equivalents=self.parent._equivalent_columns,
2143 )
2145 if self.omit_join:
2146 if is_m2o:
2147 self._query_info = self._init_for_omit_join_m2o()
2148 self._fallback_query_info = self._init_for_join()
2149 else:
2150 self._query_info = self._init_for_omit_join()
2151 else:
2152 self._query_info = self._init_for_join()
2154 def _init_for_omit_join(self):
2155 pk_to_fk = dict(
2156 self.parent_property._join_condition.local_remote_pairs
2157 )
2158 pk_to_fk.update(
2159 (equiv, pk_to_fk[k])
2160 for k in list(pk_to_fk)
2161 for equiv in self.parent._equivalent_columns.get(k, ())
2162 )
2164 pk_cols = fk_cols = [
2165 pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk
2166 ]
2167 if len(fk_cols) > 1:
2168 in_expr = sql.tuple_(*fk_cols)
2169 zero_idx = False
2170 else:
2171 in_expr = fk_cols[0]
2172 zero_idx = True
2174 return self.query_info(False, False, in_expr, pk_cols, zero_idx, None)
2176 def _init_for_omit_join_m2o(self):
2177 pk_cols = self.mapper.primary_key
2178 if len(pk_cols) > 1:
2179 in_expr = sql.tuple_(*pk_cols)
2180 zero_idx = False
2181 else:
2182 in_expr = pk_cols[0]
2183 zero_idx = True
2185 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
2186 lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols]
2188 return self.query_info(
2189 True, False, in_expr, pk_cols, zero_idx, lookup_cols
2190 )
2192 def _init_for_join(self):
2193 self._parent_alias = aliased(self.parent.class_)
2194 pa_insp = inspect(self._parent_alias)
2195 pk_cols = [
2196 pa_insp._adapt_element(col) for col in self.parent.primary_key
2197 ]
2198 if len(pk_cols) > 1:
2199 in_expr = sql.tuple_(*pk_cols)
2200 zero_idx = False
2201 else:
2202 in_expr = pk_cols[0]
2203 zero_idx = True
2204 return self.query_info(False, True, in_expr, pk_cols, zero_idx, None)
2206 def init_class_attribute(self, mapper):
2207 self.parent_property._get_strategy(
2208 (("lazy", "select"),)
2209 ).init_class_attribute(mapper)
2211 @util.dependencies("sqlalchemy.ext.baked")
2212 def _memoized_attr__bakery(self, baked):
2213 return baked.bakery(size=50)
2215 def create_row_processor(
2216 self, context, path, loadopt, mapper, result, adapter, populators
2217 ):
2218 if not self.parent.class_manager[self.key].impl.supports_population:
2219 raise sa_exc.InvalidRequestError(
2220 "'%s' does not support object "
2221 "population - eager loading cannot be applied." % self
2222 )
2224 selectin_path = (
2225 context.query._current_path or orm_util.PathRegistry.root
2226 ) + path
2228 if not orm_util._entity_isa(path[-1], self.parent):
2229 return
2231 if loading.PostLoad.path_exists(
2232 context, selectin_path, self.parent_property
2233 ):
2234 return
2236 path_w_prop = path[self.parent_property]
2237 selectin_path_w_prop = selectin_path[self.parent_property]
2239 # build up a path indicating the path from the leftmost
2240 # entity to the thing we're subquery loading.
2241 with_poly_entity = path_w_prop.get(
2242 context.attributes, "path_with_polymorphic", None
2243 )
2245 if with_poly_entity is not None:
2246 effective_entity = with_poly_entity
2247 else:
2248 effective_entity = self.entity
2250 if not path_w_prop.contains(context.attributes, "loader"):
2251 if self.join_depth:
2252 if selectin_path_w_prop.length / 2 > self.join_depth:
2253 return
2254 elif selectin_path_w_prop.contains_mapper(self.mapper):
2255 return
2257 loading.PostLoad.callable_for_path(
2258 context,
2259 selectin_path,
2260 self.parent,
2261 self.parent_property,
2262 self._load_for_path,
2263 effective_entity,
2264 )
2266 @util.dependencies("sqlalchemy.ext.baked")
2267 def _load_for_path(
2268 self, baked, context, path, states, load_only, effective_entity
2269 ):
2271 if load_only and self.key not in load_only:
2272 return
2274 query_info = self._query_info
2276 if query_info.load_only_child:
2277 our_states = collections.defaultdict(list)
2278 none_states = []
2280 mapper = self.parent
2282 for state, overwrite in states:
2283 state_dict = state.dict
2284 related_ident = tuple(
2285 mapper._get_state_attr_by_column(
2286 state,
2287 state_dict,
2288 lk,
2289 passive=attributes.PASSIVE_NO_FETCH,
2290 )
2291 for lk in query_info.child_lookup_cols
2292 )
2293 # if the loaded parent objects do not have the foreign key
2294 # to the related item loaded, then degrade into the joined
2295 # version of selectinload
2296 if attributes.PASSIVE_NO_RESULT in related_ident:
2297 query_info = self._fallback_query_info
2298 break
2300 # organize states into lists keyed to particular foreign
2301 # key values.
2302 if None not in related_ident:
2303 our_states[related_ident].append(
2304 (state, state_dict, overwrite)
2305 )
2306 else:
2307 # For FK values that have None, add them to a
2308 # separate collection that will be populated separately
2309 none_states.append((state, state_dict, overwrite))
2311 # note the above conditional may have changed query_info
2312 if not query_info.load_only_child:
2313 our_states = [
2314 (state.key[1], state, state.dict, overwrite)
2315 for state, overwrite in states
2316 ]
2318 pk_cols = query_info.pk_cols
2319 in_expr = query_info.in_expr
2321 if not query_info.load_with_join:
2322 # in "omit join" mode, the primary key column and the
2323 # "in" expression are in terms of the related entity. So
2324 # if the related entity is polymorphic or otherwise aliased,
2325 # we need to adapt our "pk_cols" and "in_expr" to that
2326 # entity. in non-"omit join" mode, these are against the
2327 # parent entity and do not need adaption.
2328 insp = inspect(effective_entity)
2329 if insp.is_aliased_class:
2330 pk_cols = [insp._adapt_element(col) for col in pk_cols]
2331 in_expr = insp._adapt_element(in_expr)
2332 pk_cols = [insp._adapt_element(col) for col in pk_cols]
2334 q = self._bakery(
2335 lambda session: session.query(
2336 query.Bundle("pk", *pk_cols), effective_entity
2337 ),
2338 self,
2339 )
2341 if not query_info.load_with_join:
2342 # the Bundle we have in the "omit_join" case is against raw, non
2343 # annotated columns, so to ensure the Query knows its primary
2344 # entity, we add it explicitly. If we made the Bundle against
2345 # annotated columns, we hit a performance issue in this specific
2346 # case, which is detailed in issue #4347.
2347 q.add_criteria(lambda q: q.select_from(effective_entity))
2348 else:
2349 # in the non-omit_join case, the Bundle is against the annotated/
2350 # mapped column of the parent entity, but the #4347 issue does not
2351 # occur in this case.
2352 pa = self._parent_alias
2353 q.add_criteria(
2354 lambda q: q.select_from(pa).join(
2355 getattr(pa, self.parent_property.key).of_type(
2356 effective_entity
2357 )
2358 )
2359 )
2361 if query_info.load_only_child:
2362 q.add_criteria(
2363 lambda q: q.filter(
2364 in_expr.in_(sql.bindparam("primary_keys", expanding=True))
2365 )
2366 )
2367 else:
2368 q.add_criteria(
2369 lambda q: q.filter(
2370 in_expr.in_(sql.bindparam("primary_keys", expanding=True))
2371 )
2372 )
2374 orig_query = context.query
2376 q._add_lazyload_options(
2377 orig_query._with_options, path[self.parent_property]
2378 )
2380 if orig_query._populate_existing:
2381 q.add_criteria(lambda q: q.populate_existing())
2383 if self.parent_property.order_by:
2384 if not query_info.load_with_join:
2385 eager_order_by = self.parent_property.order_by
2386 if insp.is_aliased_class:
2387 eager_order_by = [
2388 insp._adapt_element(elem) for elem in eager_order_by
2389 ]
2390 q.add_criteria(lambda q: q.order_by(*eager_order_by))
2391 else:
2393 def _setup_outermost_orderby(q):
2394 # imitate the same method that subquery eager loading uses,
2395 # looking for the adapted "secondary" table
2396 eagerjoin = q._from_obj[0]
2398 return q.order_by(
2399 *eagerjoin._target_adapter.copy_and_process(
2400 util.to_list(self.parent_property.order_by)
2401 )
2402 )
2404 q.add_criteria(_setup_outermost_orderby)
2406 if query_info.load_only_child:
2407 self._load_via_child(
2408 our_states, none_states, query_info, q, context
2409 )
2410 else:
2411 self._load_via_parent(our_states, query_info, q, context)
2413 def _load_via_child(self, our_states, none_states, query_info, q, context):
2414 uselist = self.uselist
2416 # this sort is really for the benefit of the unit tests
2417 our_keys = sorted(our_states)
2418 while our_keys:
2419 chunk = our_keys[0 : self._chunksize]
2420 our_keys = our_keys[self._chunksize :]
2421 data = {
2422 k: v
2423 for k, v in q(context.session).params(
2424 primary_keys=[
2425 key[0] if query_info.zero_idx else key for key in chunk
2426 ]
2427 )
2428 }
2430 for key in chunk:
2431 # for a real foreign key and no concurrent changes to the
2432 # DB while running this method, "key" is always present in
2433 # data. However, for primaryjoins without real foreign keys
2434 # a non-None primaryjoin condition may still refer to no
2435 # related object.
2436 related_obj = data.get(key, None)
2437 for state, dict_, overwrite in our_states[key]:
2438 if not overwrite and self.key in dict_:
2439 continue
2441 state.get_impl(self.key).set_committed_value(
2442 state,
2443 dict_,
2444 related_obj if not uselist else [related_obj],
2445 )
2446 # populate none states with empty value / collection
2447 for state, dict_, overwrite in none_states:
2448 if not overwrite and self.key in dict_:
2449 continue
2451 # note it's OK if this is a uselist=True attribute, the empty
2452 # collection will be populated
2453 state.get_impl(self.key).set_committed_value(state, dict_, None)
2455 def _load_via_parent(self, our_states, query_info, q, context):
2456 uselist = self.uselist
2457 _empty_result = () if uselist else None
2459 while our_states:
2460 chunk = our_states[0 : self._chunksize]
2461 our_states = our_states[self._chunksize :]
2463 primary_keys = [
2464 key[0] if query_info.zero_idx else key
2465 for key, state, state_dict, overwrite in chunk
2466 ]
2468 data = collections.defaultdict(list)
2469 for k, v in itertools.groupby(
2470 q(context.session).params(primary_keys=primary_keys),
2471 lambda x: x[0],
2472 ):
2473 data[k].extend(vv[1] for vv in v)
2475 for key, state, state_dict, overwrite in chunk:
2477 if not overwrite and self.key in state_dict:
2478 continue
2480 collection = data.get(key, _empty_result)
2482 if not uselist and collection:
2483 if len(collection) > 1:
2484 util.warn(
2485 "Multiple rows returned with "
2486 "uselist=False for eagerly-loaded "
2487 "attribute '%s' " % self
2488 )
2489 state.get_impl(self.key).set_committed_value(
2490 state, state_dict, collection[0]
2491 )
2492 else:
2493 # note that empty tuple set on uselist=False sets the
2494 # value to None
2495 state.get_impl(self.key).set_committed_value(
2496 state, state_dict, collection
2497 )
2500def single_parent_validator(desc, prop):
2501 def _do_check(state, value, oldvalue, initiator):
2502 if value is not None and initiator.key == prop.key:
2503 hasparent = initiator.hasparent(attributes.instance_state(value))
2504 if hasparent and oldvalue is not value:
2505 raise sa_exc.InvalidRequestError(
2506 "Instance %s is already associated with an instance "
2507 "of %s via its %s attribute, and is only allowed a "
2508 "single parent."
2509 % (orm_util.instance_str(value), state.class_, prop),
2510 code="bbf1",
2511 )
2512 return value
2514 def append(state, value, initiator):
2515 return _do_check(state, value, None, initiator)
2517 def set_(state, value, oldvalue, initiator):
2518 return _do_check(state, value, oldvalue, initiator)
2520 event.listen(
2521 desc, "append", append, raw=True, retval=True, active_history=True
2522 )
2523 event.listen(desc, "set", set_, raw=True, retval=True, active_history=True)