Coverage for src/typedal/core.py: 99%
872 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-20 17:04 +0100
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-20 17:04 +0100
1"""
2Core functionality of TypeDAL.
3"""
4import contextlib
5import csv
6import datetime as dt
7import inspect
8import json
9import math
10import types
11import typing
12import warnings
13from collections import defaultdict
14from decimal import Decimal
15from pathlib import Path
16from typing import Any, Optional
18import pydal
19from pydal._globals import DEFAULT
20from pydal.objects import Field as _Field
21from pydal.objects import Query as _Query
22from pydal.objects import Row, Rows
23from pydal.objects import Table as _Table
24from typing_extensions import Self
26from .helpers import (
27 DummyQuery,
28 all_annotations,
29 all_dict,
30 as_lambda,
31 extract_type_optional,
32 filter_out,
33 instanciate,
34 is_union,
35 looks_like,
36 mktable,
37 origin_is_subclass,
38 to_snake,
39 unwrap_type,
40)
41from .types import (
42 AfterDeleteCallable,
43 AfterInsertCallable,
44 AfterUpdateCallable,
45 BeforeDeleteCallable,
46 BeforeInsertCallable,
47 BeforeUpdateCallable,
48 CacheMetadata,
49 Expression,
50 Field,
51 Metadata,
52 PaginateDict,
53 Pagination,
54 Query,
55 _Types,
56)
58# use typing.cast(type, ...) to make mypy happy with unions
59T_annotation = typing.Type[Any] | types.UnionType
60T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]]
61T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
62T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
63T = typing.TypeVar("T")
65BASIC_MAPPINGS: dict[T_annotation, str] = {
66 str: "string",
67 int: "integer",
68 bool: "boolean",
69 bytes: "blob",
70 float: "double",
71 object: "json",
72 Decimal: "decimal(10,2)",
73 dt.date: "date",
74 dt.time: "time",
75 dt.datetime: "datetime",
76}
79def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
80 """
81 Is `cls` an instance or subclass of TypedField?
83 Deprecated
84 """
85 return (
86 isinstance(cls, TypedField)
87 or isinstance(typing.get_origin(cls), type)
88 and issubclass(typing.get_origin(cls), TypedField)
89 )
92JOIN_OPTIONS = typing.Literal["left", "inner", None]
93DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
95# table-ish paramter:
96P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table]
98Condition: typing.TypeAlias = typing.Optional[
99 typing.Callable[
100 # self, other -> Query
101 [P_Table, P_Table],
102 Query | bool,
103 ]
104]
106OnQuery: typing.TypeAlias = typing.Optional[
107 typing.Callable[
108 # self, other -> list of .on statements
109 [P_Table, P_Table],
110 list[Expression],
111 ]
112]
114To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str)
117class Relationship(typing.Generic[To_Type]):
118 """
119 Define a relationship to another table.
120 """
122 _type: To_Type
123 table: typing.Type["TypedTable"] | type | str
124 condition: Condition
125 on: OnQuery
126 multiple: bool
127 join: JOIN_OPTIONS
129 def __init__(
130 self,
131 _type: To_Type,
132 condition: Condition = None,
133 join: JOIN_OPTIONS = None,
134 on: OnQuery = None,
135 ):
136 """
137 Should not be called directly, use relationship() instead!
138 """
139 if condition and on:
140 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
141 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
143 self._type = _type
144 self.condition = condition
145 self.join = "left" if on else join # .on is always left join!
146 self.on = on
148 if args := typing.get_args(_type):
149 self.table = unwrap_type(args[0])
150 self.multiple = True
151 else:
152 self.table = _type
153 self.multiple = False
155 if isinstance(self.table, str):
156 self.table = TypeDAL.to_snake(self.table)
158 def clone(self, **update: Any) -> "Relationship[To_Type]":
159 """
160 Create a copy of the relationship, possibly updated.
161 """
162 return self.__class__(
163 update.get("_type") or self._type,
164 update.get("condition") or self.condition,
165 update.get("join") or self.join,
166 update.get("on") or self.on,
167 )
169 def __repr__(self) -> str:
170 """
171 Representation of the relationship.
172 """
173 if callback := self.condition or self.on:
174 src_code = inspect.getsource(callback).strip()
175 else:
176 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore
177 src_code = f"to {cls_name} (missing condition)"
179 join = f":{self.join}" if self.join else ""
180 return f"<Relationship{join} {src_code}>"
182 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]:
183 """
184 Get the table this relationship is bound to.
185 """
186 table = self.table # can be a string because db wasn't available yet
187 if isinstance(table, str):
188 if mapped := db._class_map.get(table):
189 # yay
190 return mapped
192 # boo, fall back to untyped table but pretend it is typed:
193 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough!
195 return table
197 def get_table_name(self) -> str:
198 """
199 Get the name of the table this relationship is bound to.
200 """
201 if isinstance(self.table, str):
202 return self.table
204 if isinstance(self.table, pydal.objects.Table):
205 return str(self.table)
207 # else: typed table
208 try:
209 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
210 except Exception: # pragma: no cover
211 table = self.table
213 return str(table)
215 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
216 """
217 Relationship is a descriptor class, which can be returned from a class but not an instance.
219 For an instance, using .join() will replace the Relationship with the actual data.
220 If you forgot to join, a warning will be shown and empty data will be returned.
221 """
222 if not instance:
223 # relationship queried on class, that's allowed
224 return self
226 warnings.warn(
227 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning
228 )
229 if self.multiple:
230 return []
231 else:
232 return None
235def relationship(
236 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None
237) -> Relationship[To_Type]:
238 """
239 Define a relationship to another table, when its id is not stored in the current table.
241 Example:
242 class User(TypedTable):
243 name: str
245 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left')
247 class Post(TypedTable):
248 title: str
249 author: User
251 User.join("posts").first() # User instance with list[Post] in .posts
253 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
254 In this case, the join strategy is set to LEFT so users without posts are also still selected.
256 For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
257 class User(TypedTable):
258 ...
260 tags = relationship(list["Tag"], on=lambda self, tag: [
261 Tagged.on(Tagged.entity == entity.gid),
262 Tag.on((Tagged.tag == tag.id)),
263 ])
265 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient.
266 """
267 return Relationship(_type, condition, join, on)
270def _generate_relationship_condition(
271 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]]
272) -> Condition:
273 origin = typing.get_origin(field)
274 # else: generic
276 if origin == list:
277 # field = typing.get_args(field)[0] # actual field
278 # return lambda _self, _other: cls[key].contains(field)
280 return lambda _self, _other: _self[key].contains(_other.id)
281 else:
282 # normal reference
283 # return lambda _self, _other: cls[key] == field.id
284 return lambda _self, _other: _self[key] == _other.id
287def to_relationship(
288 cls: typing.Type["TypedTable"] | type[Any],
289 key: str,
290 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]],
291) -> typing.Optional[Relationship[Any]]:
292 """
293 Used to automatically create relationship instance for reference fields.
295 Example:
296 class MyTable(TypedTable):
297 reference: OtherTable
299 `reference` contains the id of an Other Table row.
300 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
302 This function will automatically perform this logic (called in db.define):
303 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
305 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
306 """
307 if looks_like(field, TypedField):
308 if args := typing.get_args(field):
309 field = args[0]
310 else:
311 # weird
312 return None
314 field, optional = extract_type_optional(field)
316 try:
317 condition = _generate_relationship_condition(cls, key, field)
318 except Exception as e: # pragma: no cover
319 warnings.warn("Could not generate Relationship condition", source=e)
320 condition = None
322 if not condition: # pragma: no cover
323 # something went wrong, not a valid relationship
324 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
325 return None
327 join = "left" if optional or typing.get_origin(field) == list else "inner"
329 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
332class TypeDAL(pydal.DAL): # type: ignore
333 """
334 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
335 """
337 def __init__(
338 self,
339 uri: str | Path = "sqlite://dummy.db",
340 pool_size: int = 0,
341 folder: Optional[str | Path] = None,
342 db_codec: str = "UTF-8",
343 check_reserved: Optional[list[str]] = None,
344 migrate: bool = True,
345 fake_migrate: bool = False,
346 migrate_enabled: bool = True,
347 fake_migrate_all: bool = False,
348 decode_credentials: bool = False,
349 driver_args: Optional[dict[str, Any]] = None,
350 adapter_args: Optional[dict[str, Any]] = None,
351 attempts: int = 5,
352 auto_import: bool = False,
353 bigint_id: bool = False,
354 debug: bool = False,
355 lazy_tables: bool = False,
356 db_uid: Optional[str] = None,
357 after_connection: typing.Callable[..., Any] = None,
358 tables: Optional[list[str]] = None,
359 ignore_field_case: bool = True,
360 entity_quoting: bool = True,
361 table_hash: Optional[str] = None,
362 enable_typedal_caching: bool = True,
363 ) -> None:
364 """
365 Adds some internal tables after calling pydal's default init.
367 Set enable_typedal_caching to False to disable this behavior.
368 """
369 if folder:
370 Path(folder).mkdir(exist_ok=True)
372 super().__init__(
373 str(uri),
374 pool_size,
375 str(folder),
376 db_codec,
377 check_reserved,
378 migrate,
379 fake_migrate,
380 migrate_enabled,
381 fake_migrate_all,
382 decode_credentials,
383 driver_args,
384 adapter_args,
385 attempts,
386 auto_import,
387 bigint_id,
388 debug,
389 lazy_tables,
390 db_uid,
391 after_connection,
392 tables,
393 ignore_field_case,
394 entity_quoting,
395 table_hash,
396 )
398 if enable_typedal_caching:
399 self.try_define(_TypedalCache)
400 self.try_define(_TypedalCacheDependency)
402 def try_define(self, model: typing.Type[T]) -> typing.Type[T]:
403 """
404 Try to define a model with migrate or fall back to fake migrate.
405 """
406 try:
407 return self.define(model, migrate=True)
408 except Exception as e:
409 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning)
410 return self.define(model, migrate=False, fake_migrate=True, redefine=True)
412 default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = {
413 # fields are 'required' (notnull) by default:
414 "notnull": True,
415 }
417 # maps table name to typedal class, for resolving future references
418 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {}
420 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
421 # todo: new relationship item added should also invalidate (previously unrelated) cache result
423 # todo: option to enable/disable cache dependency behavior:
424 # - don't set _before_update and _before_delete
425 # - don't add TypedalCacheDependency entry
426 # - don't invalidate other item on new row of this type
428 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
429 # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
430 # typing.get_type_hints(cls, globalns=None, localns=None)
432 # dirty way (with evil eval):
433 # [eval(v) for k, v in cls.__annotations__.items()]
434 # this however also stops working when variables outside this scope or even references to other
435 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
436 # and might break in the future, when this annotations behavior is enabled by default.
438 # non-annotated variables have to be passed to define_table as kwargs
439 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
441 tablename = self.to_snake(cls.__name__)
442 # grab annotations of cls and it's parents:
443 annotations = all_annotations(cls)
444 # extend with `prop = TypedField()` 'annotations':
445 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
446 # remove internal stuff:
447 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
449 typedfields: dict[str, TypedField[Any]] = {
450 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
451 }
453 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
455 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
457 # ! dont' use full_dict here:
458 other_kwargs = kwargs | {
459 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
460 }
462 for key in typedfields.keys() - full_dict.keys():
463 # typed fields that don't haven't been added to the object yet
464 setattr(cls, key, typedfields[key])
466 # start with base classes and overwrite with current class:
467 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
469 # DEPRECATED: Relationship as annotation is currently not supported!
470 # ensure they are all instances and
471 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
472 # relationships = {
473 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
474 # }
476 # keys of implicit references (also relationships):
477 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")]
479 # add implicit relationships:
480 # User; list[User]; TypedField[User]; TypedField[list[User]]
481 relationships |= {
482 k: new_relationship
483 for k in reference_field_keys
484 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
485 }
487 cache_dependency = other_kwargs.pop("cache_dependency", True)
489 table: Table = self.define_table(tablename, *fields.values(), **other_kwargs)
491 for name, typed_field in typedfields.items():
492 field = fields[name]
493 typed_field.bind(field, table)
495 if issubclass(cls, TypedTable):
496 cls.__set_internals__(
497 db=self,
498 table=table,
499 # by now, all relationships should be instances!
500 relationships=typing.cast(dict[str, Relationship[Any]], relationships),
501 )
502 self._class_map[str(table)] = cls
503 else:
504 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
506 if not tablename.startswith("typedal_") and cache_dependency:
507 table._before_update.append(lambda s, _: _remove_cache(s, tablename))
508 table._before_delete.append(lambda s: _remove_cache(s, tablename))
510 return cls
512 @typing.overload
513 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]:
514 """
515 Typing Overload for define without a class.
517 @db.define()
518 class MyTable(TypedTable): ...
519 """
521 @typing.overload
522 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
523 """
524 Typing Overload for define with a class.
526 @db.define
527 class MyTable(TypedTable): ...
528 """
530 def define(
531 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any
532 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]:
533 """
534 Can be used as a decorator on a class that inherits `TypedTable`, \
535 or as a regular method if you need to define your classes before you have access to a 'db' instance.
537 You can also pass extra arguments to db.define_table.
538 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor
540 Example:
541 @db.define
542 class Person(TypedTable):
543 ...
545 class Article(TypedTable):
546 ...
548 # at a later time:
549 db.define(Article)
551 Returns:
552 the result of pydal.define_table
553 """
555 def wrapper(cls: typing.Type[T]) -> typing.Type[T]:
556 return self._define(cls, **kwargs)
558 if maybe_cls:
559 return wrapper(maybe_cls)
561 return wrapper
563 # def drop(self, table_name: str) -> None:
564 # """
565 # Remove a table by name (both on the database level and the typedal level).
566 # """
567 # # drop calls TypedTable.drop() and removes it from the `_class_map`
568 # if cls := self._class_map.pop(table_name, None):
569 # cls.drop()
571 # def drop_all(self, max_retries: int = None) -> None:
572 # """
573 # Remove all tables and keep doing so until everything is gone!
574 # """
575 # retries = 0
576 # if max_retries is None:
577 # max_retries = len(self.tables)
578 #
579 # while self.tables:
580 # retries += 1
581 # for table in self.tables:
582 # self.drop(table)
583 #
584 # if retries > max_retries:
585 # raise RuntimeError("Could not delete all tables")
587 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
588 """
589 A db instance can be called directly to perform a query.
591 Usually, only a query is passed.
593 Example:
594 db(query).select()
596 """
597 args = list(_args)
598 if args:
599 cls = args[0]
600 if isinstance(cls, bool):
601 raise ValueError("Don't actually pass a bool to db()! Use a query instead.")
603 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
604 # table defined without @db.define decorator!
605 _cls: typing.Type[TypedTable] = cls
606 args[0] = _cls.id != None
608 _set = super().__call__(*args, **kwargs)
609 return typing.cast(TypedSet, _set)
611 @classmethod
612 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
613 return Field(name, _type, **{**cls.default_kwargs, **kw})
615 @classmethod
616 def _annotation_to_pydal_fieldtype(
617 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any]
618 ) -> Optional[str]:
619 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
620 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy)
622 if isinstance(ftype, str):
623 # extract type from string
624 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate(
625 localns=locals(), globalns=globals(), recursive_guard=frozenset()
626 )
628 if mapping := BASIC_MAPPINGS.get(ftype):
629 # basi types
630 return mapping
631 elif isinstance(ftype, _Table):
632 # db.table
633 return f"reference {ftype._tablename}"
634 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
635 # SomeTable
636 snakename = cls.to_snake(ftype.__name__)
637 return f"reference {snakename}"
638 elif isinstance(ftype, TypedField):
639 # FieldType(type, ...)
640 return ftype._to_field(mut_kw)
641 elif origin_is_subclass(ftype, TypedField):
642 # TypedField[int]
643 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
644 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
645 # list[str] -> str -> string -> list:string
646 _child_type = typing.get_args(ftype)[0]
647 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
648 return f"list:{_child_type}"
649 elif is_union(ftype):
650 # str | int -> UnionType
651 # typing.Union[str | int] -> typing._UnionGenericAlias
653 # Optional[type] == type | None
655 match typing.get_args(ftype):
656 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
657 # good union of Nullable
659 # if a field is optional, it is nullable:
660 mut_kw["notnull"] = False
661 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
662 case _:
663 # two types is not supported by the db!
664 return None
665 else:
666 return None
668 @classmethod
669 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
670 """
671 Convert a annotation into a pydal Field.
673 Args:
674 fname: name of the property
675 ftype: annotation of the property
676 kw: when using TypedField or a function returning it (e.g. StringField),
677 keyword args can be used to pass any other settings you would normally to a pydal Field
679 -> pydal.Field(fname, ftype, **kw)
681 Example:
682 class MyTable:
683 fname: ftype
684 id: int
685 name: str
686 reference: Table
687 other: TypedField(str, default="John Doe") # default will be in kwargs
688 """
689 fname = cls.to_snake(fname)
691 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
692 return cls._build_field(fname, converted_type, **kw)
693 else:
694 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
696 @staticmethod
697 def to_snake(camel: str) -> str:
698 """
699 Moved to helpers, kept as a static method for legacy reasons.
700 """
701 return to_snake(camel)
704class TableProtocol(typing.Protocol): # pragma: no cover
705 """
706 Make mypy happy.
707 """
709 id: int # noqa: A003
711 def __getitem__(self, item: str) -> Field:
712 """
713 Tell mypy a Table supports dictionary notation for columns.
714 """
717class Table(_Table, TableProtocol): # type: ignore
718 """
719 Make mypy happy.
720 """
723class TableMeta(type):
724 """
725 This metaclass contains functionality on table classes, that doesn't exist on its instances.
727 Example:
728 class MyTable(TypedTable):
729 some_field: TypedField[int]
731 MyTable.update_or_insert(...) # should work
733 MyTable.some_field # -> Field, can be used to query etc.
735 row = MyTable.first() # returns instance of MyTable
737 # row.update_or_insert(...) # shouldn't work!
739 row.some_field # -> int, with actual data
741 """
743 # set up by db.define:
744 # _db: TypeDAL | None = None
745 # _table: Table | None = None
746 _db: TypeDAL | None = None
747 _table: Table | None = None
748 _relationships: dict[str, Relationship[Any]] | None = None
750 #########################
751 # TypeDAL custom logic: #
752 #########################
754 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
755 """
756 Store the related database and pydal table for later usage.
757 """
758 self._db = db
759 self._table = table
760 self._relationships = relationships
762 def __getattr__(self, col: str) -> Optional[Field]:
763 """
764 Magic method used by TypedTableMeta to get a database field with dot notation on a class.
766 Example:
767 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
769 """
770 if self._table:
771 return getattr(self._table, col, None)
773 return None
775 def _ensure_table_defined(self) -> Table:
776 if not self._table:
777 raise EnvironmentError("@define or db.define is not called on this class yet!")
778 return self._table
780 def __iter__(self) -> typing.Generator[Field, None, None]:
781 """
782 Loop through the columns of this model.
783 """
784 table = self._ensure_table_defined()
785 yield from iter(table)
787 def __getitem__(self, item: str) -> Field:
788 """
789 Allow dict notation to get a column of this table (-> Field instance).
790 """
791 table = self._ensure_table_defined()
792 return table[item]
794 def __str__(self) -> str:
795 """
796 Normally, just returns the underlying table name, but with a fallback if the model is unbound.
797 """
798 if self._table:
799 return str(self._table)
800 else:
801 return f"<unbound table {self.__name__}>"
803 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
804 """
805 Create a model instance from a pydal row.
806 """
807 return self(row)
809 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003
810 """
811 Return all rows for this model.
812 """
813 return self.collect()
815 def __json__(self: typing.Type[T_MetaInstance], instance: T_MetaInstance | None = None) -> dict[str, Any]:
816 """
817 Convert to a json-dumpable dict.
819 as_dict is not fully json-dumpable, so use as_json and json.loads to ensure it is dumpable (and loadable).
820 todo: can this be optimized?
822 See Also:
823 https://github.com/jeff-hykin/json_fix
824 """
825 string = instance.as_json() if instance else self.as_json()
827 return typing.cast(dict[str, Any], json.loads(string))
829 def get_relationships(self) -> dict[str, Relationship[Any]]:
830 """
831 Return the registered relationships of the current model.
832 """
833 return self._relationships or {}
835 ##########################
836 # TypeDAL Modified Logic #
837 ##########################
839 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
840 """
841 This is only called when db.define is not used as a decorator.
843 cls.__table functions as 'self'
845 Args:
846 **fields: anything you want to insert in the database
848 Returns: the ID of the new row.
850 """
851 table = self._ensure_table_defined()
853 result = table.insert(**fields)
854 # it already is an int but mypy doesn't understand that
855 return self(result)
857 def _insert(self, **fields: Any) -> str:
858 table = self._ensure_table_defined()
860 return str(table._insert(**fields))
862 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]":
863 """
864 Insert multiple rows, returns a TypedRows set of new instances.
865 """
866 table = self._ensure_table_defined()
867 result = table.bulk_insert(items)
868 return self.where(lambda row: row.id.belongs(result)).collect()
870 def update_or_insert(
871 self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any
872 ) -> T_MetaInstance:
873 """
874 Update a row if query matches, else insert a new one.
876 Returns the created or updated instance.
877 """
878 table = self._ensure_table_defined()
880 if query is DEFAULT:
881 record = table(**values)
882 elif isinstance(query, dict):
883 record = table(**query)
884 else:
885 record = table(query)
887 if not record:
888 return self.insert(**values)
890 record.update_record(**values)
891 return self(record)
893 def validate_and_insert(
894 self: typing.Type[T_MetaInstance], **fields: Any
895 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
896 """
897 Validate input data and then insert a row.
899 Returns a tuple of (the created instance, a dict of errors).
900 """
901 table = self._ensure_table_defined()
902 result = table.validate_and_insert(**fields)
903 if row_id := result.get("id"):
904 return self(row_id), None
905 else:
906 return None, result.get("errors")
908 def validate_and_update(
909 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
910 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
911 """
912 Validate input data and then update max 1 row.
914 Returns a tuple of (the updated instance, a dict of errors).
915 """
916 table = self._ensure_table_defined()
918 try:
919 result = table.validate_and_update(query, **fields)
920 except Exception as e:
921 result = {"errors": {"exception": str(e)}}
923 if errors := result.get("errors"):
924 return None, errors
925 elif row_id := result.get("id"):
926 return self(row_id), None
927 else: # pragma: no cover
928 # update on query without result (shouldnt happen)
929 return None, None
931 def validate_and_update_or_insert(
932 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
933 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
934 """
935 Validate input data and then update_and_insert (on max 1 row).
937 Returns a tuple of (the updated/created instance, a dict of errors).
938 """
939 table = self._ensure_table_defined()
940 result = table.validate_and_update_or_insert(query, **fields)
942 if errors := result.get("errors"):
943 return None, errors
944 elif row_id := result.get("id"):
945 return self(row_id), None
946 else: # pragma: no cover
947 # update on query without result (shouldnt happen)
948 return None, None
950 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
951 """
952 See QueryBuilder.select!
953 """
954 return QueryBuilder(self).select(*a, **kw)
956 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
957 """
958 See QueryBuilder.paginate!
959 """
960 return QueryBuilder(self).paginate(limit=limit, page=page)
962 def chunk(
963 self: typing.Type[T_MetaInstance], chunk_size: int
964 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
965 """
966 See QueryBuilder.chunk!
967 """
968 return QueryBuilder(self).chunk(chunk_size)
970 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
971 """
972 See QueryBuilder.where!
973 """
974 return QueryBuilder(self).where(*a, **kw)
976 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
977 """
978 See QueryBuilder.cache!
979 """
980 return QueryBuilder(self).cache(*deps, **kwargs)
982 def count(self: typing.Type[T_MetaInstance]) -> int:
983 """
984 See QueryBuilder.count!
985 """
986 return QueryBuilder(self).count()
988 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None:
989 """
990 See QueryBuilder.first!
991 """
992 return QueryBuilder(self).first()
994 def join(
995 self: typing.Type[T_MetaInstance],
996 *fields: str | typing.Type["TypedTable"],
997 method: JOIN_OPTIONS = None,
998 on: OnQuery | list[Expression] | Expression = None,
999 condition: Condition = None,
1000 ) -> "QueryBuilder[T_MetaInstance]":
1001 """
1002 See QueryBuilder.join!
1003 """
1004 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method)
1006 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
1007 """
1008 See QueryBuilder.collect!
1009 """
1010 return QueryBuilder(self).collect(verbose=verbose)
1012 @property
1013 def ALL(cls) -> pydal.objects.SQLALL:
1014 """
1015 Select all fields for this table.
1016 """
1017 table = cls._ensure_table_defined()
1019 return table.ALL
1021 ##########################
1022 # TypeDAL Shadowed Logic #
1023 ##########################
1024 fields: list[str]
1026 # other table methods:
1028 def drop(self, mode: str = "") -> None:
1029 """
1030 Remove the underlying table.
1031 """
1032 table = self._ensure_table_defined()
1033 table.drop(mode)
1035 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
1036 """
1037 Add an index on some columns of this table.
1038 """
1039 table = self._ensure_table_defined()
1040 result = table.create_index(name, *fields, **kwargs)
1041 return typing.cast(bool, result)
1043 def drop_index(self, name: str, if_exists: bool = False) -> bool:
1044 """
1045 Remove an index from this table.
1046 """
1047 table = self._ensure_table_defined()
1048 result = table.drop_index(name, if_exists)
1049 return typing.cast(bool, result)
1051 def import_from_csv_file(
1052 self,
1053 csvfile: typing.TextIO,
1054 id_map: dict[str, str] = None,
1055 null: Any = "<NULL>",
1056 unique: str = "uuid",
1057 id_offset: dict[str, int] = None, # id_offset used only when id_map is None
1058 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
1059 validate: bool = False,
1060 encoding: str = "utf-8",
1061 delimiter: str = ",",
1062 quotechar: str = '"',
1063 quoting: int = csv.QUOTE_MINIMAL,
1064 restore: bool = False,
1065 **kwargs: Any,
1066 ) -> None:
1067 """
1068 Load a csv file into the database.
1069 """
1070 table = self._ensure_table_defined()
1071 table.import_from_csv_file(
1072 csvfile,
1073 id_map=id_map,
1074 null=null,
1075 unique=unique,
1076 id_offset=id_offset,
1077 transform=transform,
1078 validate=validate,
1079 encoding=encoding,
1080 delimiter=delimiter,
1081 quotechar=quotechar,
1082 quoting=quoting,
1083 restore=restore,
1084 **kwargs,
1085 )
1087 def on(self, query: Query | bool) -> Expression:
1088 """
1089 Shadow Table.on.
1091 Used for joins.
1093 See Also:
1094 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1095 """
1096 table = self._ensure_table_defined()
1097 return typing.cast(Expression, table.on(query))
1099 def with_alias(self, alias: str) -> _Table:
1100 """
1101 Shadow Table.with_alias.
1103 Useful for joins when joining the same table multiple times.
1105 See Also:
1106 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1107 """
1108 table = self._ensure_table_defined()
1109 return table.with_alias(alias)
1111 # @typing.dataclass_transform()
1114class TypedField(typing.Generic[T_Value]): # pragma: no cover
1115 """
1116 Typed version of pydal.Field, which will be converted to a normal Field in the background.
1117 """
1119 # will be set by .bind on db.define
1120 name = ""
1121 _db: Optional[pydal.DAL] = None
1122 _rname: Optional[str] = None
1123 _table: Optional[Table] = None
1124 _field: Optional[Field] = None
1126 _type: T_annotation
1127 kwargs: Any
1129 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore
1130 """
1131 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used!
1132 """
1133 self._type = _type
1134 self.kwargs = settings
1135 super().__init__()
1137 @typing.overload
1138 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover
1139 """
1140 row.field -> (actual data).
1141 """
1143 @typing.overload
1144 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
1145 """
1146 Table.field -> Field.
1147 """
1149 def __get__(
1150 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance]
1151 ) -> typing.Union[T_Value, "TypedField[T_Value]"]:
1152 """
1153 Since this class is a Descriptor field, \
1154 it returns something else depending on if it's called on a class or instance.
1156 (this is mostly for mypy/typing)
1157 """
1158 if instance:
1159 # this is only reached in a very specific case:
1160 # an instance of the object was created with a specific set of fields selected (excluding the current one)
1161 # in that case, no value was stored in the owner -> return None (since the field was not selected)
1162 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
1163 else:
1164 # getting as class -> return actual field so pydal understands it when using in query etc.
1165 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
1167 def __str__(self) -> str:
1168 """
1169 String representation of a Typed Field.
1171 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
1172 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
1173 """
1174 return str(self._field) if self._field else ""
1176 def __repr__(self) -> str:
1177 """
1178 More detailed string representation of a Typed Field.
1180 Uses __str__ and adds the provided extra options (kwargs) in the representation.
1181 """
1182 s = self.__str__()
1184 if "type" in self.kwargs:
1185 # manual type in kwargs supplied
1186 t = self.kwargs["type"]
1187 elif issubclass(type, type(self._type)):
1188 # normal type, str.__name__ = 'str'
1189 t = getattr(self._type, "__name__", str(self._type))
1190 elif t_args := typing.get_args(self._type):
1191 # list[str] -> 'str'
1192 t = t_args[0].__name__
1193 else: # pragma: no cover
1194 # fallback - something else, may not even happen, I'm not sure
1195 t = self._type
1197 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
1199 kw = self.kwargs.copy()
1200 kw.pop("type", None)
1201 return f"<{s} with options {kw}>"
1203 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1204 """
1205 Convert a Typed Field instance to a pydal.Field.
1206 """
1207 other_kwargs = self.kwargs.copy()
1208 extra_kwargs.update(other_kwargs)
1209 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1211 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
1212 """
1213 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
1214 """
1215 self._table = table
1216 self._field = field
1218 def __getattr__(self, key: str) -> Any:
1219 """
1220 If the regular getattribute does not work, try to get info from the related Field.
1221 """
1222 with contextlib.suppress(AttributeError):
1223 return super().__getattribute__(key)
1225 # try on actual field:
1226 return getattr(self._field, key)
1228 def __eq__(self, other: Any) -> Query:
1229 """
1230 Performing == on a Field will result in a Query.
1231 """
1232 return typing.cast(Query, self._field == other)
1234 def __ne__(self, other: Any) -> Query:
1235 """
1236 Performing != on a Field will result in a Query.
1237 """
1238 return typing.cast(Query, self._field != other)
1240 def __gt__(self, other: Any) -> Query:
1241 """
1242 Performing > on a Field will result in a Query.
1243 """
1244 return typing.cast(Query, self._field > other)
1246 def __lt__(self, other: Any) -> Query:
1247 """
1248 Performing < on a Field will result in a Query.
1249 """
1250 return typing.cast(Query, self._field < other)
1252 def __ge__(self, other: Any) -> Query:
1253 """
1254 Performing >= on a Field will result in a Query.
1255 """
1256 return typing.cast(Query, self._field >= other)
1258 def __le__(self, other: Any) -> Query:
1259 """
1260 Performing <= on a Field will result in a Query.
1261 """
1262 return typing.cast(Query, self._field <= other)
1264 def __hash__(self) -> int:
1265 """
1266 Shadow Field.__hash__.
1267 """
1268 return hash(self._field)
1270 def __invert__(self) -> Expression:
1271 """
1272 Performing ~ on a Field will result in an Expression.
1273 """
1274 if not self._field: # pragma: no cover
1275 raise ValueError("Unbound Field can not be inverted!")
1277 return typing.cast(Expression, ~self._field)
1280class TypedTable(metaclass=TableMeta):
1281 """
1282 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
1283 """
1285 # set up by 'new':
1286 _row: Row | None = None
1288 _with: list[str]
1290 id: "TypedField[int]" # noqa: A003
1292 _before_insert: list[BeforeInsertCallable]
1293 _after_insert: list[AfterInsertCallable]
1294 _before_update: list[BeforeUpdateCallable]
1295 _after_update: list[AfterUpdateCallable]
1296 _before_delete: list[BeforeDeleteCallable]
1297 _after_delete: list[AfterDeleteCallable]
1299 def _setup_instance_methods(self) -> None:
1300 self.as_dict = self._as_dict # type: ignore
1301 self.__json__ = self.as_json = self._as_json # type: ignore
1302 # self.as_yaml = self._as_yaml # type: ignore
1303 self.as_xml = self._as_xml # type: ignore
1305 self.update = self._update # type: ignore
1307 self.delete_record = self._delete_record # type: ignore
1308 self.update_record = self._update_record # type: ignore
1310 def __new__(
1311 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any
1312 ) -> "TypedTable":
1313 """
1314 Create a Typed Rows model instance from an existing row, ID or query.
1316 Examples:
1317 MyTable(1)
1318 MyTable(id=1)
1319 MyTable(MyTable.id == 1)
1320 """
1321 table = cls._ensure_table_defined()
1322 inst = super().__new__(cls)
1324 if isinstance(row_or_id, TypedTable):
1325 # existing typed table instance!
1326 return row_or_id
1327 elif isinstance(row_or_id, pydal.objects.Row):
1328 row = row_or_id
1329 elif row_or_id is not None:
1330 row = table(row_or_id, **filters)
1331 elif filters:
1332 row = table(**filters)
1333 else:
1334 # dummy object
1335 return inst
1337 if not row:
1338 return None # type: ignore
1340 inst._row = row
1341 inst.__dict__.update(row)
1342 inst._setup_instance_methods()
1343 return inst
1345 def __iter__(self) -> typing.Generator[Any, None, None]:
1346 """
1347 Allows looping through the columns.
1348 """
1349 row = self._ensure_matching_row()
1350 yield from iter(row)
1352 def __getitem__(self, item: str) -> Any:
1353 """
1354 Allows dictionary notation to get columns.
1355 """
1356 if item in self.__dict__:
1357 return self.__dict__.get(item)
1359 # fallback to lookup in row
1360 if self._row:
1361 return self._row[item]
1363 # nothing found!
1364 raise KeyError(item)
1366 def __getattr__(self, item: str) -> Any:
1367 """
1368 Allows dot notation to get columns.
1369 """
1370 if value := self.get(item):
1371 return value
1373 raise AttributeError(item)
1375 def get(self, item: str, default: Any = None) -> Any:
1376 """
1377 Try to get a column from this instance, else return default.
1378 """
1379 try:
1380 return self.__getitem__(item)
1381 except KeyError:
1382 return default
1384 def __setitem__(self, key: str, value: Any) -> None:
1385 """
1386 Data can both be updated via dot and dict notation.
1387 """
1388 return setattr(self, key, value)
1390 def __int__(self) -> int:
1391 """
1392 Calling int on a model instance will return its id.
1393 """
1394 return getattr(self, "id", 0)
1396 def __bool__(self) -> bool:
1397 """
1398 If the instance has an underlying row with data, it is truthy.
1399 """
1400 return bool(getattr(self, "_row", False))
1402 def _ensure_matching_row(self) -> Row:
1403 if not getattr(self, "_row", None):
1404 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
1405 return self._row
1407 def __repr__(self) -> str:
1408 """
1409 String representation of the model instance.
1410 """
1411 model_name = self.__class__.__name__
1412 model_data = {}
1414 if self._row:
1415 model_data = self._row.as_json()
1417 details = model_name
1418 details += f"({model_data})"
1420 if relationships := getattr(self, "_with", []):
1421 details += f" + {relationships}"
1423 return f"<{details}>"
1425 # serialization
1426 # underscore variants work for class instances (set up by _setup_instance_methods)
1428 @classmethod
1429 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]:
1430 """
1431 Dump the object to a plain dict.
1433 Can be used as both a class or instance method:
1434 - dumps the table info if it's a class
1435 - dumps the row info if it's an instance (see _as_dict)
1436 """
1437 table = cls._ensure_table_defined()
1438 result = table.as_dict(flat, sanitize)
1439 return typing.cast(dict[str, Any], result)
1441 @classmethod
1442 def as_json(cls, sanitize: bool = True) -> str:
1443 """
1444 Dump the object to json.
1446 Can be used as both a class or instance method:
1447 - dumps the table info if it's a class
1448 - dumps the row info if it's an instance (see _as_json)
1449 """
1450 table = cls._ensure_table_defined()
1451 return typing.cast(str, table.as_json(sanitize))
1453 @classmethod
1454 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
1455 """
1456 Dump the object to xml.
1458 Can be used as both a class or instance method:
1459 - dumps the table info if it's a class
1460 - dumps the row info if it's an instance (see _as_xml)
1461 """
1462 table = cls._ensure_table_defined()
1463 return typing.cast(str, table.as_xml(sanitize))
1465 @classmethod
1466 def as_yaml(cls, sanitize: bool = True) -> str:
1467 """
1468 Dump the object to yaml.
1470 Can be used as both a class or instance method:
1471 - dumps the table info if it's a class
1472 - dumps the row info if it's an instance (see _as_yaml)
1473 """
1474 table = cls._ensure_table_defined()
1475 return typing.cast(str, table.as_yaml(sanitize))
1477 def _as_dict(
1478 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1479 ) -> dict[str, Any]:
1480 row = self._ensure_matching_row()
1481 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
1483 if _with := getattr(self, "_with", None):
1484 for relationship in _with:
1485 data = self.get(relationship)
1486 if isinstance(data, list):
1487 data = [_.as_dict() if getattr(_, "as_dict", None) else _ for _ in data]
1488 elif data:
1489 data = data.as_dict()
1491 result[relationship] = data
1493 return typing.cast(dict[str, Any], result)
1495 def _as_json(
1496 self,
1497 mode: str = "object",
1498 default: typing.Callable[[Any], Any] = None,
1499 colnames: list[str] = None,
1500 serialize: bool = True,
1501 **kwargs: Any,
1502 ) -> str:
1503 row = self._ensure_matching_row()
1504 return typing.cast(str, row.as_json(mode, default, colnames, serialize, *kwargs))
1506 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
1507 row = self._ensure_matching_row()
1508 return typing.cast(str, row.as_xml(sanitize))
1510 # def _as_yaml(self, sanitize: bool = True) -> str:
1511 # row = self._ensure_matching_row()
1512 # return typing.cast(str, row.as_yaml(sanitize))
1514 def __setattr__(self, key: str, value: Any) -> None:
1515 """
1516 When setting a property on a Typed Table model instance, also update the underlying row.
1517 """
1518 if self._row and key in self._row.__dict__ and not callable(value):
1519 # enables `row.key = value; row.update_record()`
1520 self._row[key] = value
1522 super().__setattr__(key, value)
1524 @classmethod
1525 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
1526 """
1527 Update one record.
1529 Example:
1530 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
1531 """
1532 # todo: update multiple?
1533 if record := cls(query):
1534 return record.update_record(**fields)
1535 else:
1536 return None
1538 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1539 row = self._ensure_matching_row()
1540 row.update(**fields)
1541 self.__dict__.update(**fields)
1542 return self
1544 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1545 row = self._ensure_matching_row()
1546 new_row = row.update_record(**fields)
1547 self.update(**new_row)
1548 return self
1550 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
1551 """
1552 Here as a placeholder for _update_record.
1554 Will be replaced on instance creation!
1555 """
1556 return self._update_record(**fields)
1558 def _delete_record(self) -> int:
1559 """
1560 Actual logic in `pydal.helpers.classes.RecordDeleter`.
1561 """
1562 row = self._ensure_matching_row()
1563 result = row.delete_record()
1564 self.__dict__ = {} # empty self, since row is no more.
1565 self._row = None # just to be sure
1566 self._setup_instance_methods()
1567 # ^ instance methods might've been deleted by emptying dict,
1568 # but we still want .as_dict to show an error, not the table's as_dict.
1569 return typing.cast(int, result)
1571 def delete_record(self) -> int: # pragma: no cover
1572 """
1573 Here as a placeholder for _delete_record.
1575 Will be replaced on instance creation!
1576 """
1577 return self._delete_record()
1579 # __del__ is also called on the end of a scope so don't remove records on every del!!
1581 # pickling:
1582 def __setstate__(self, state: dict[str, Any]) -> None:
1583 """
1584 Used by dill when loading from a bytestring.
1585 """
1586 # as_dict also includes table info, so dump as json to only get the actual row data
1587 # then create a new (more empty) row object:
1588 state["_row"] = Row(json.loads(state["_row"]))
1589 self.__dict__ |= state
1591 def __getstate__(self) -> dict[str, Any]:
1592 """
1593 State to save when pickling.
1595 Prevents db connection from being pickled.
1596 Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1597 """
1598 row = self._ensure_matching_row()
1599 result: dict[str, Any] = row.as_dict()
1601 if _with := getattr(self, "_with", None):
1602 for relationship in _with:
1603 data = self.get(relationship)
1605 result[relationship] = data
1607 result["_row"] = self._row.as_json() if self._row else ""
1608 return result
1611# backwards compat:
1612TypedRow = TypedTable
1615class TypedRows(typing.Collection[T_MetaInstance], Rows):
1616 """
1617 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
1618 """
1620 records: dict[int, T_MetaInstance]
1621 # _rows: Rows
1622 model: typing.Type[T_MetaInstance]
1623 metadata: Metadata
1625 # pseudo-properties: actually stored in _rows
1626 db: TypeDAL
1627 colnames: list[str]
1628 fields: list[Field]
1629 colnames_fields: list[Field]
1630 response: list[tuple[Any, ...]]
1632 def __init__(
1633 self,
1634 rows: Rows,
1635 model: typing.Type[T_MetaInstance],
1636 records: dict[int, T_MetaInstance] = None,
1637 metadata: Metadata = None,
1638 ) -> None:
1639 """
1640 Should not be called manually!
1642 Normally, the `records` from an existing `Rows` object are used
1643 but these can be overwritten with a `records` dict.
1644 `metadata` can be any (un)structured data
1645 `model` is a Typed Table class
1646 """
1647 records = records or {row.id: model(row) for row in rows}
1648 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
1649 self.model = model
1650 self.metadata = metadata or {}
1652 def __len__(self) -> int:
1653 """
1654 Return the count of rows.
1655 """
1656 return len(self.records)
1658 def __iter__(self) -> typing.Iterator[T_MetaInstance]:
1659 """
1660 Loop through the rows.
1661 """
1662 yield from self.records.values()
1664 def __contains__(self, ind: Any) -> bool:
1665 """
1666 Check if an id exists in this result set.
1667 """
1668 return ind in self.records
1670 def first(self) -> T_MetaInstance | None:
1671 """
1672 Get the row with the lowest id.
1673 """
1674 if not self.records:
1675 return None
1677 return next(iter(self))
1679 def last(self) -> T_MetaInstance | None:
1680 """
1681 Get the row with the highest id.
1682 """
1683 if not self.records:
1684 return None
1686 max_id = max(self.records.keys())
1687 return self[max_id]
1689 def find(
1690 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None
1691 ) -> "TypedRows[T_MetaInstance]":
1692 """
1693 Returns a new Rows object, a subset of the original object, filtered by the function `f`.
1694 """
1695 if not self.records:
1696 return self.__class__(self, self.model, {})
1698 records = {}
1699 if limitby:
1700 _min, _max = limitby
1701 else:
1702 _min, _max = 0, len(self)
1703 count = 0
1704 for i, row in self.records.items():
1705 if f(row):
1706 if _min <= count:
1707 records[i] = row
1708 count += 1
1709 if count == _max:
1710 break
1712 return self.__class__(self, self.model, records)
1714 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
1715 """
1716 Removes elements from the calling Rows object, filtered by the function `f`, \
1717 and returns a new Rows object containing the removed elements.
1718 """
1719 if not self.records:
1720 return self.__class__(self, self.model, {})
1721 removed = {}
1722 to_remove = []
1723 for i in self.records:
1724 row = self[i]
1725 if f(row):
1726 removed[i] = self.records[i]
1727 to_remove.append(i)
1729 [self.records.pop(i) for i in to_remove]
1731 return self.__class__(
1732 self,
1733 self.model,
1734 removed,
1735 )
1737 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
1738 """
1739 Returns a list of sorted elements (not sorted in place).
1740 """
1741 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
1743 def __str__(self) -> str:
1744 """
1745 Simple string representation.
1746 """
1747 return f"<TypedRows with {len(self)} records>"
1749 def __repr__(self) -> str:
1750 """
1751 Print a table on repr().
1752 """
1753 data = self.as_dict()
1754 headers = list(next(iter(data.values())).keys())
1755 return mktable(data, headers)
1757 def group_by_value(
1758 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any
1759 ) -> dict[T, list[T_MetaInstance]]:
1760 """
1761 Group the rows by a specific field (which will be the dict key).
1762 """
1763 kwargs["one_result"] = one_result
1764 result = super().group_by_value(*fields, **kwargs)
1765 return typing.cast(dict[T, list[T_MetaInstance]], result)
1767 def column(self, column: str = None) -> list[Any]:
1768 """
1769 Get a list of all values in a specific column.
1771 Example:
1772 rows.column('name') -> ['Name 1', 'Name 2', ...]
1773 """
1774 return typing.cast(list[Any], super().column(column))
1776 def as_csv(self) -> str:
1777 """
1778 Dump the data to csv.
1779 """
1780 return typing.cast(str, super().as_csv())
1782 def as_dict(
1783 self,
1784 key: str = None,
1785 compact: bool = False,
1786 storage_to_dict: bool = False,
1787 datetime_to_str: bool = False,
1788 custom_types: list[type] = None,
1789 ) -> dict[int, dict[str, Any]]:
1790 """
1791 Get the data in a dict of dicts.
1792 """
1793 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1794 # functionality not guaranteed
1795 return typing.cast(
1796 dict[int, dict[str, Any]],
1797 super().as_dict(
1798 key or "id",
1799 compact,
1800 storage_to_dict,
1801 datetime_to_str,
1802 custom_types,
1803 ),
1804 )
1806 return {k: v.as_dict() for k, v in self.records.items()}
1808 def as_json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str:
1809 """
1810 Turn the data into a dict and then dump to JSON.
1811 """
1812 return typing.cast(str, super().as_json(mode=mode, default=default))
1814 def json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str:
1815 """
1816 Turn the data into a dict and then dump to JSON.
1817 """
1818 return typing.cast(str, super().as_json(mode=mode, default=default))
1820 def as_list(
1821 self,
1822 compact: bool = False,
1823 storage_to_dict: bool = False,
1824 datetime_to_str: bool = False,
1825 custom_types: list[type] = None,
1826 ) -> list[dict[str, Any]]:
1827 """
1828 Get the data in a list of dicts.
1829 """
1830 if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1831 return typing.cast(
1832 list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types)
1833 )
1834 return [_.as_dict() for _ in self.records.values()]
1836 def __getitem__(self, item: int) -> T_MetaInstance:
1837 """
1838 You can get a specific row by ID from a typedrows by using rows[idx] notation.
1840 Since pydal's implementation differs (they expect a list instead of a dict with id keys),
1841 using rows[0] will return the first row, regardless of its id.
1842 """
1843 try:
1844 return self.records[item]
1845 except KeyError as e:
1846 if item == 0 and (row := self.first()):
1847 # special case: pydal internals think Rows.records is a list, not a dict
1848 return row
1850 raise e
1852 def get(self, item: int) -> typing.Optional[T_MetaInstance]:
1853 """
1854 Get a row by ID, or receive None if it isn't in this result set.
1855 """
1856 return self.records.get(item)
1858 def join(
1859 self,
1860 field: "Field | TypedField[Any]",
1861 name: str = None,
1862 constraint: Query = None,
1863 fields: list[str | Field] = None,
1864 orderby: Optional[str | Field] = None,
1865 ) -> T_MetaInstance:
1866 """
1867 This can be used to JOIN with some relationships after the initial select.
1869 Using the querybuilder's .join() method is prefered!
1870 """
1871 result = super().join(field, name, constraint, fields or [], orderby)
1872 return typing.cast(T_MetaInstance, result)
1874 def export_to_csv_file(
1875 self,
1876 ofile: typing.TextIO,
1877 null: Any = "<NULL>",
1878 delimiter: str = ",",
1879 quotechar: str = '"',
1880 quoting: int = csv.QUOTE_MINIMAL,
1881 represent: bool = False,
1882 colnames: list[str] = None,
1883 write_colnames: bool = True,
1884 *args: Any,
1885 **kwargs: Any,
1886 ) -> None:
1887 """
1888 Shadow export_to_csv_file from Rows, but with typing.
1890 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
1891 """
1892 super().export_to_csv_file(
1893 ofile,
1894 null,
1895 *args,
1896 delimiter=delimiter,
1897 quotechar=quotechar,
1898 quoting=quoting,
1899 represent=represent,
1900 colnames=colnames or self.colnames,
1901 write_colnames=write_colnames,
1902 **kwargs,
1903 )
1905 @classmethod
1906 def from_rows(
1907 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None
1908 ) -> "TypedRows[T_MetaInstance]":
1909 """
1910 Internal method to convert a Rows object to a TypedRows.
1911 """
1912 return cls(rows, model, metadata=metadata)
1914 def __json__(self) -> dict[str, Any]:
1915 """
1916 For json-fix.
1917 """
1918 return typing.cast(dict[str, Any], self.as_dict())
1920 def __getstate__(self) -> dict[str, Any]:
1921 """
1922 Used by dill to dump to bytes (exclude db connection etc).
1923 """
1924 return {
1925 "metadata": json.dumps(self.metadata, default=str),
1926 "records": self.records,
1927 }
1929 def __setstate__(self, state: dict[str, Any]) -> None:
1930 """
1931 Used by dill when loading from a bytestring.
1932 """
1933 state["metadata"] = json.loads(state["metadata"])
1934 self.__dict__.update(state)
1937from .caching import ( # noqa: E402
1938 _remove_cache,
1939 _TypedalCache,
1940 _TypedalCacheDependency,
1941 create_and_hash_cache_key,
1942 get_expire,
1943 load_from_cache,
1944 save_to_cache,
1945)
1948class QueryBuilder(typing.Generic[T_MetaInstance]):
1949 """
1950 Abstration on top of pydal's query system.
1951 """
1953 model: typing.Type[T_MetaInstance]
1954 query: Query
1955 select_args: list[Any]
1956 select_kwargs: dict[str, Any]
1957 relationships: dict[str, Relationship[Any]]
1958 metadata: Metadata
1960 def __init__(
1961 self,
1962 model: typing.Type[T_MetaInstance],
1963 add_query: Optional[Query] = None,
1964 select_args: Optional[list[Any]] = None,
1965 select_kwargs: Optional[dict[str, Any]] = None,
1966 relationships: dict[str, Relationship[Any]] = None,
1967 metadata: Metadata = None,
1968 ):
1969 """
1970 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
1972 Example:
1973 MyTable.where(...) -> QueryBuilder[MyTable]
1974 """
1975 self.model = model
1976 table = model._ensure_table_defined()
1977 default_query = typing.cast(Query, table.id > 0)
1978 self.query = add_query or default_query
1979 self.select_args = select_args or []
1980 self.select_kwargs = select_kwargs or {}
1981 self.relationships = relationships or {}
1982 self.metadata = metadata or {}
1984 def __str__(self) -> str:
1985 """
1986 Simple string representation for the query builder.
1987 """
1988 return f"QueryBuilder for {self.model}"
1990 def __repr__(self) -> str:
1991 """
1992 Advanced string representation for the query builder.
1993 """
1994 return (
1995 f"<QueryBuilder for {self.model} with "
1996 f"{len(self.select_args)} select args; "
1997 f"{len(self.select_kwargs)} select kwargs; "
1998 f"{len(self.relationships)} relationships; "
1999 f"query: {bool(self.query)}; "
2000 f"metadata: {self.metadata}; "
2001 f">"
2002 )
2004 def __bool__(self) -> bool:
2005 """
2006 Querybuilder is truthy if it has rows.
2007 """
2008 return self.count() > 0
2010 def _extend(
2011 self,
2012 add_query: Optional[Query] = None,
2013 overwrite_query: Optional[Query] = None,
2014 select_args: Optional[list[Any]] = None,
2015 select_kwargs: Optional[dict[str, Any]] = None,
2016 relationships: dict[str, Relationship[Any]] = None,
2017 metadata: Metadata = None,
2018 ) -> "QueryBuilder[T_MetaInstance]":
2019 return QueryBuilder(
2020 self.model,
2021 (add_query & self.query) if add_query else overwrite_query or self.query,
2022 (self.select_args + select_args) if select_args else self.select_args,
2023 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
2024 (self.relationships | relationships) if relationships else self.relationships,
2025 (self.metadata | (metadata or {})) if metadata else self.metadata,
2026 )
2028 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]":
2029 """
2030 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
2032 Options:
2033 paraphrased from the web2py pydal docs,
2034 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
2036 orderby: field(s) to order by. Supported:
2037 table.name - sort by name, ascending
2038 ~table.name - sort by name, descending
2039 <random> - sort randomly
2040 table.name|table.id - sort by two fields (first name, then id)
2042 groupby, having: together with orderby:
2043 groupby can be a field (e.g. table.name) to group records by
2044 having can be a query, only those `having` the condition are grouped
2046 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
2047 distinct: bool/field. Only select rows that differ
2048 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
2049 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
2050 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
2051 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
2052 """
2053 return self._extend(select_args=list(fields), select_kwargs=options)
2055 def where(
2056 self,
2057 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query],
2058 **filters: Any,
2059 ) -> "QueryBuilder[T_MetaInstance]":
2060 """
2061 Extend the builder's query.
2063 Can be used in multiple ways:
2064 .where(Query) -> with a direct query such as `Table.id == 5`
2065 .where(lambda table: table.id == 5) -> with a query via a lambda
2066 .where(id=5) -> via keyword arguments
2068 When using multiple where's, they will be ANDed:
2069 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
2070 When passing multiple queries to a single .where, they will be ORed:
2071 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
2072 """
2073 new_query = self.query
2074 table = self.model._ensure_table_defined()
2076 for field, value in filters.items():
2077 new_query &= table[field] == value
2079 subquery: DummyQuery | Query = DummyQuery()
2080 for query_or_lambda in queries_or_lambdas:
2081 if isinstance(query_or_lambda, _Query):
2082 subquery |= typing.cast(Query, query_or_lambda)
2083 elif callable(query_or_lambda):
2084 if result := query_or_lambda(self.model):
2085 subquery |= result
2086 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda):
2087 subquery |= typing.cast(Query, query_or_lambda != None)
2088 else:
2089 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).")
2091 if subquery:
2092 new_query &= subquery
2094 return self._extend(overwrite_query=new_query)
2096 def join(
2097 self,
2098 *fields: str | typing.Type[TypedTable],
2099 method: JOIN_OPTIONS = None,
2100 on: OnQuery | list[Expression] | Expression = None,
2101 condition: Condition = None,
2102 ) -> "QueryBuilder[T_MetaInstance]":
2103 """
2104 Include relationship fields in the result.
2106 `fields` can be names of Relationships on the current model.
2107 If no fields are passed, all will be used.
2109 By default, the `method` defined in the relationship is used.
2110 This can be overwritten with the `method` keyword argument (left or inner)
2111 """
2112 # todo: allow limiting amount of related rows returned for join?
2114 relationships = self.model.get_relationships()
2116 if condition and on:
2117 raise ValueError("condition and on can not be used together!")
2118 elif condition:
2119 if len(fields) != 1:
2120 raise ValueError("join(field, condition=...) can only be used with exactly one field!")
2122 if isinstance(condition, pydal.objects.Query):
2123 condition = as_lambda(condition)
2125 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)}
2126 elif on:
2127 if len(fields) != 1:
2128 raise ValueError("join(field, on=...) can only be used with exactly one field!")
2130 if isinstance(on, pydal.objects.Expression):
2131 on = [on]
2133 if isinstance(on, list):
2134 on = as_lambda(on)
2135 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)}
2137 else:
2138 if fields:
2139 # join on every relationship
2140 relationships = {str(k): relationships[str(k)] for k in fields}
2142 if method:
2143 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()}
2145 return self._extend(relationships=relationships)
2147 def cache(
2148 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None
2149 ) -> "QueryBuilder[T_MetaInstance]":
2150 """
2151 Enable caching for this query to load repeated calls from a dill row \
2152 instead of executing the sql and collecing matching rows again.
2153 """
2154 existing = self.metadata.get("cache", {})
2156 metadata: Metadata = {}
2158 cache_meta = typing.cast(
2159 CacheMetadata,
2160 self.metadata.get("cache", {})
2161 | {
2162 "enabled": True,
2163 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
2164 "expires_at": get_expire(expires_at=expires_at, ttl=ttl),
2165 },
2166 )
2168 metadata["cache"] = cache_meta
2169 return self._extend(metadata=metadata)
2171 def _get_db(self) -> TypeDAL:
2172 if db := self.model._db:
2173 return db
2174 else: # pragma: no cover
2175 raise EnvironmentError("@define or db.define is not called on this class yet!")
2177 def _select_arg_convert(self, arg: Any) -> Any:
2178 # typedfield are not really used at runtime anymore, but leave it in for safety:
2179 if isinstance(arg, TypedField): # pragma: no cover
2180 arg = arg._field
2182 return arg
2184 def delete(self) -> list[int]:
2185 """
2186 Based on the current query, delete rows and return a list of deleted IDs.
2187 """
2188 db = self._get_db()
2189 removed_ids = [_.id for _ in db(self.query).select("id")]
2190 if db(self.query).delete():
2191 # success!
2192 return removed_ids
2194 return []
2196 def _delete(self) -> str:
2197 db = self._get_db()
2198 return str(db(self.query)._delete())
2200 def update(self, **fields: Any) -> list[int]:
2201 """
2202 Based on the current query, update `fields` and return a list of updated IDs.
2203 """
2204 # todo: limit?
2205 db = self._get_db()
2206 updated_ids = db(self.query).select("id").column("id")
2207 if db(self.query).update(**fields):
2208 # success!
2209 return updated_ids
2211 return []
2213 def _update(self, **fields: Any) -> str:
2214 db = self._get_db()
2215 return str(db(self.query)._update(**fields))
2217 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]:
2218 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2219 select_kwargs = self.select_kwargs.copy()
2220 query = self.query
2221 model = self.model
2222 mut_metadata["query"] = query
2223 # require at least id of main table:
2224 select_fields = ", ".join([str(_) for _ in select_args])
2225 tablename = str(model)
2227 if add_id and f"{tablename}.id" not in select_fields:
2228 # fields of other selected, but required ID is missing.
2229 select_args.append(model.id)
2231 if self.relationships:
2232 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
2234 return query, select_args, select_kwargs
2236 def to_sql(self, add_id: bool = False) -> str:
2237 """
2238 Generate the SQL for the built query.
2239 """
2240 db = self._get_db()
2242 query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
2244 return str(db(query)._select(*select_args, **select_kwargs))
2246 def _collect(self) -> str:
2247 """
2248 Alias for to_sql, pydal-like syntax.
2249 """
2250 return self.to_sql()
2252 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
2253 expires_at = metadata["cache"].get("expires_at")
2254 metadata["cache"] |= {
2255 # key is partly dependant on cache metadata but not these:
2256 "key": None,
2257 "status": None,
2258 "cached_at": None,
2259 "expires_at": None,
2260 } # type: ignore
2262 _, key = create_and_hash_cache_key(
2263 self.model,
2264 metadata,
2265 self.query,
2266 self.select_args,
2267 self.select_kwargs,
2268 self.relationships.keys(),
2269 )
2271 # re-set after creating key:
2272 metadata["cache"]["expires_at"] = expires_at
2273 metadata["cache"]["key"] = key
2275 return load_from_cache(key)
2277 def collect(
2278 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True
2279 ) -> "TypedRows[T_MetaInstance]":
2280 """
2281 Execute the built query and turn it into model instances, while handling relationships.
2282 """
2283 if _to is None:
2284 _to = TypedRows
2286 db = self._get_db()
2287 metadata = typing.cast(Metadata, self.metadata.copy())
2289 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
2290 return result
2292 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2294 metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
2296 if verbose: # pragma: no cover
2297 print(metadata["sql"])
2299 rows: Rows = db(query).select(*select_args, **select_kwargs)
2301 metadata["final_query"] = str(query)
2302 metadata["final_args"] = [str(_) for _ in select_args]
2303 metadata["final_kwargs"] = select_kwargs
2305 if verbose: # pragma: no cover
2306 print(rows)
2308 if not self.relationships:
2309 # easy
2310 typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
2312 else:
2313 # harder: try to match rows to the belonging objects
2314 # assume structure of {'table': <data>} per row.
2315 # if that's not the case, return default behavior again
2316 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
2318 # only saves if requested in metadata:
2319 return save_to_cache(typed_rows, rows)
2321 def _handle_relationships_pre_select(
2322 self,
2323 query: Query,
2324 select_args: list[Any],
2325 select_kwargs: dict[str, Any],
2326 metadata: Metadata,
2327 ) -> tuple[Query, list[Any]]:
2328 db = self._get_db()
2329 model = self.model
2331 metadata["relationships"] = set(self.relationships.keys())
2333 # query = self._update_query_for_inner(db, model, query)
2334 join = []
2335 for key, relation in self.relationships.items():
2336 if not relation.condition or relation.join != "inner":
2337 continue
2339 other = relation.get_table(db)
2340 other = other.with_alias(f"{key}_{hash(relation)}")
2341 join.append(other.on(relation.condition(model, other)))
2343 if limitby := select_kwargs.pop("limitby", None):
2344 # if limitby + relationships:
2345 # 1. get IDs of main table entries that match 'query'
2346 # 2. change query to .belongs(id)
2347 # 3. add joins etc
2349 kwargs = {"limitby": limitby}
2351 if join:
2352 kwargs["join"] = join
2354 ids = db(query)._select(model.id, **kwargs)
2355 query = model.id.belongs(ids)
2356 metadata["ids"] = ids
2358 if join:
2359 select_kwargs["join"] = join
2361 left = []
2363 for key, relation in self.relationships.items():
2364 other = relation.get_table(db)
2365 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
2367 select_fields = ", ".join([str(_) for _ in select_args])
2368 pre_alias = str(other)
2370 if f"{other}." not in select_fields:
2371 # no fields of other selected. add .ALL:
2372 select_args.append(other.ALL)
2373 elif f"{other}.id" not in select_fields:
2374 # fields of other selected, but required ID is missing.
2375 select_args.append(other.id)
2377 if relation.on:
2378 # if it has a .on, it's always a left join!
2379 on = relation.on(model, other)
2380 if not isinstance(on, list): # pragma: no cover
2381 on = [on]
2383 left.extend(on)
2384 elif method == "left":
2385 # .on not given, generate it:
2386 other = other.with_alias(f"{key}_{hash(relation)}")
2387 condition = typing.cast(Query, relation.condition(model, other))
2388 left.append(other.on(condition))
2389 else:
2390 # else: inner join (handled earlier)
2391 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
2392 # other = other.with_alias(f"{key}_{hash(relation)}")
2393 # query &= relation.condition(model, other)
2395 # if no fields of 'other' are included, add other.ALL
2396 # else: only add other.id if missing
2397 select_fields = ", ".join([str(_) for _ in select_args])
2399 post_alias = str(other).split(" AS ")[-1]
2400 if pre_alias != post_alias:
2401 # replace .select's with aliased:
2402 select_fields = select_fields.replace(
2403 f"{pre_alias}.",
2404 f"{post_alias}.",
2405 )
2407 select_args = select_fields.split(", ")
2409 select_kwargs["left"] = left
2410 return query, select_args
2412 def _collect_with_relationships(
2413 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"] = None
2414 ) -> "TypedRows[T_MetaInstance]":
2415 """
2416 Transform the raw rows into Typed Table model instances.
2417 """
2418 db = self._get_db()
2419 main_table = self.model._ensure_table_defined()
2421 records = {}
2422 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
2424 for row in rows:
2425 main = row[main_table]
2426 main_id = main.id
2428 if main_id not in records:
2429 records[main_id] = self.model(main)
2430 records[main_id]._with = list(self.relationships.keys())
2432 # setup up all relationship defaults (once)
2433 for col, relationship in self.relationships.items():
2434 records[main_id][col] = [] if relationship.multiple else None
2436 # now add other relationship data
2437 for column, relation in self.relationships.items():
2438 relationship_column = f"{column}_{hash(relation)}"
2440 # relationship_column works for aliases with the same target column.
2441 # if col + relationship not in the row, just use the regular name.
2443 relation_data = (
2444 row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
2445 )
2447 if relation_data.id is None:
2448 # always skip None ids
2449 continue
2451 if f"{column}-{relation_data.id}" in seen_relations[main_id]:
2452 # speed up duplicates
2453 continue
2454 else:
2455 seen_relations[main_id].add(f"{column}-{relation_data.id}")
2457 relation_table = relation.get_table(db)
2458 # hopefully an instance of a typed table and a regular row otherwise:
2459 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
2461 if relation.multiple:
2462 # create list of T
2463 if not isinstance(records[main_id].get(column), list): # pragma: no cover
2464 # should already be set up before!
2465 setattr(records[main_id], column, [])
2467 records[main_id][column].append(instance)
2468 else:
2469 # create single T
2470 records[main_id][column] = instance
2472 return _to(rows, self.model, records, metadata=metadata)
2474 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]":
2475 """
2476 Call .collect() and raise an error if nothing found.
2478 Basically unwraps Optional type.
2479 """
2480 if result := self.collect():
2481 return result
2483 if not exception:
2484 exception = ValueError("Nothing found!")
2486 raise exception
2488 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
2489 """
2490 You can start iterating a Query Builder object before calling collect, for ease of use.
2491 """
2492 yield from self.collect()
2494 def count(self) -> int:
2495 """
2496 Return the amount of rows matching the current query.
2497 """
2498 db = self._get_db()
2499 model = self.model
2500 query = self.query
2502 for key, relation in self.relationships.items():
2503 if not relation.condition or relation.join != "inner":
2504 continue
2506 other = relation.get_table(db)
2507 other = other.with_alias(f"{key}_{hash(relation)}")
2508 query &= relation.condition(model, other)
2510 return db(query).count()
2512 def __paginate(
2513 self,
2514 limit: int,
2515 page: int = 1,
2516 ) -> "QueryBuilder[T_MetaInstance]":
2517 _from = limit * (page - 1)
2518 _to = limit * page
2520 available = self.count()
2522 metadata: Metadata = {}
2524 metadata["pagination"] = {
2525 "limit": limit,
2526 "current_page": page,
2527 "max_page": math.ceil(available / limit),
2528 "rows": available,
2529 "min_max": (_from, _to),
2530 }
2532 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
2534 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
2535 """
2536 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
2538 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
2539 can be loaded with relationship data!
2540 """
2541 builder = self.__paginate(limit, page)
2543 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
2545 rows._query_builder = builder
2546 return rows
2548 def _paginate(
2549 self,
2550 limit: int,
2551 page: int = 1,
2552 ) -> str:
2553 builder = self.__paginate(limit, page)
2554 return builder._collect()
2556 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
2557 """
2558 Generator that yields rows from a paginated source in chunks.
2560 This function retrieves rows from a paginated data source in chunks of the
2561 specified `chunk_size` and yields them as TypedRows.
2563 Example:
2564 ```
2565 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
2566 for row in chunk_of_rows:
2567 # Process each row within the chunk.
2568 pass
2569 ```
2570 """
2571 page = 1
2573 while rows := self.__paginate(chunk_size, page).collect():
2574 yield rows
2575 page += 1
2577 def first(self, verbose: bool = False) -> T_MetaInstance | None:
2578 """
2579 Get the first row matching the currently built query.
2581 Also adds paginate, since it would be a waste to select more rows than needed.
2582 """
2583 if row := self.paginate(page=1, limit=1, verbose=verbose).first():
2584 return self.model.from_row(row)
2585 else:
2586 return None
2588 def _first(self) -> str:
2589 return self._paginate(page=1, limit=1)
2591 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance:
2592 """
2593 Call .first() and raise an error if nothing found.
2595 Basically unwraps Optional type.
2596 """
2597 if inst := self.first(verbose=verbose):
2598 return inst
2600 if not exception:
2601 exception = ValueError("Nothing found!")
2603 raise exception
2606S = typing.TypeVar("S")
2609class PaginatedRows(TypedRows[T_MetaInstance]):
2610 """
2611 Extension on top of rows that is used when calling .paginate() instead of .collect().
2612 """
2614 _query_builder: QueryBuilder[T_MetaInstance]
2616 @property
2617 def data(self) -> list[T_MetaInstance]:
2618 """
2619 Get the underlying data.
2620 """
2621 return list(self.records.values())
2623 @property
2624 def pagination(self) -> Pagination:
2625 """
2626 Get all page info.
2627 """
2628 pagination_data = self.metadata["pagination"]
2630 has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
2631 has_prev_page = pagination_data["current_page"] > 1
2632 return {
2633 "total_items": pagination_data["rows"],
2634 "current_page": pagination_data["current_page"],
2635 "per_page": pagination_data["limit"],
2636 "total_pages": pagination_data["max_page"],
2637 "has_next_page": has_next_page,
2638 "has_prev_page": has_prev_page,
2639 "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
2640 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2641 }
2643 def next(self) -> Self: # noqa: A003
2644 """
2645 Get the next page.
2646 """
2647 data = self.metadata["pagination"]
2648 if data["current_page"] >= data["max_page"]:
2649 raise StopIteration("Final Page")
2651 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
2653 def previous(self) -> Self:
2654 """
2655 Get the previous page.
2656 """
2657 data = self.metadata["pagination"]
2658 if data["current_page"] <= 1:
2659 raise StopIteration("First Page")
2661 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
2663 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
2664 """
2665 Convert to a dictionary with pagination info and original data.
2667 All arguments are ignored!
2668 """
2669 return {"data": super().as_dict(), "pagination": self.pagination}
2672class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2673 """
2674 Used to make pydal Set more typed.
2676 This class is not actually used, only 'cast' by TypeDAL.__call__
2677 """
2679 def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int:
2680 """
2681 Count returns an int.
2682 """
2683 result = super().count(distinct, cache)
2684 return typing.cast(int, result)
2686 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
2687 """
2688 Select returns a TypedRows of a user defined table.
2690 Example:
2691 result: TypedRows[MyTable] = db(MyTable.id > 0).select()
2693 for row in result:
2694 typing.reveal_type(row) # MyTable
2695 """
2696 rows = super().select(*fields, **attributes)
2697 return typing.cast(TypedRows[T_MetaInstance], rows)