Coverage for src/typedal/core.py: 100%
856 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-02 14:59 +0100
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-02 14:59 +0100
1"""
2Core functionality of TypeDAL.
3"""
4import contextlib
5import csv
6import datetime as dt
7import inspect
8import json
9import math
10import types
11import typing
12import warnings
13from collections import defaultdict
14from decimal import Decimal
15from typing import Any, Optional
17import pydal
18from pydal._globals import DEFAULT
19from pydal.objects import Field as _Field
20from pydal.objects import Query as _Query
21from pydal.objects import Row, Rows
22from pydal.objects import Table as _Table
23from typing_extensions import Self
25from .helpers import (
26 DummyQuery,
27 all_annotations,
28 all_dict,
29 as_lambda,
30 extract_type_optional,
31 filter_out,
32 instanciate,
33 is_union,
34 looks_like,
35 mktable,
36 origin_is_subclass,
37 to_snake,
38 unwrap_type,
39)
40from .types import (
41 CacheMetadata,
42 Expression,
43 Field,
44 Metadata,
45 PaginateDict,
46 Pagination,
47 Query,
48 _Types,
49)
51# use typing.cast(type, ...) to make mypy happy with unions
52T_annotation = typing.Type[Any] | types.UnionType
53T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]]
54T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
55T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
56T = typing.TypeVar("T")
58BASIC_MAPPINGS: dict[T_annotation, str] = {
59 str: "string",
60 int: "integer",
61 bool: "boolean",
62 bytes: "blob",
63 float: "double",
64 object: "json",
65 Decimal: "decimal(10,2)",
66 dt.date: "date",
67 dt.time: "time",
68 dt.datetime: "datetime",
69}
72def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
73 """
74 Is `cls` an instance or subclass of TypedField?
76 Deprecated
77 """
78 return (
79 isinstance(cls, TypedField)
80 or isinstance(typing.get_origin(cls), type)
81 and issubclass(typing.get_origin(cls), TypedField)
82 )
85JOIN_OPTIONS = typing.Literal["left", "inner", None]
86DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
88# table-ish paramter:
89P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table]
91Condition: typing.TypeAlias = typing.Optional[
92 typing.Callable[
93 # self, other -> Query
94 [P_Table, P_Table],
95 Query | bool,
96 ]
97]
99OnQuery: typing.TypeAlias = typing.Optional[
100 typing.Callable[
101 # self, other -> list of .on statements
102 [P_Table, P_Table],
103 list[Expression],
104 ]
105]
107To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str)
110class Relationship(typing.Generic[To_Type]):
111 """
112 Define a relationship to another table.
113 """
115 _type: To_Type
116 table: typing.Type["TypedTable"] | type | str
117 condition: Condition
118 on: OnQuery
119 multiple: bool
120 join: JOIN_OPTIONS
122 def __init__(
123 self,
124 _type: To_Type,
125 condition: Condition = None,
126 join: JOIN_OPTIONS = None,
127 on: OnQuery = None,
128 ):
129 """
130 Should not be called directly, use relationship() instead!
131 """
132 if condition and on:
133 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
134 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
136 self._type = _type
137 self.condition = condition
138 self.join = "left" if on else join # .on is always left join!
139 self.on = on
141 if args := typing.get_args(_type):
142 self.table = unwrap_type(args[0])
143 self.multiple = True
144 else:
145 self.table = _type
146 self.multiple = False
148 if isinstance(self.table, str):
149 self.table = TypeDAL.to_snake(self.table)
151 def clone(self, **update: Any) -> "Relationship[To_Type]":
152 """
153 Create a copy of the relationship, possibly updated.
154 """
155 return self.__class__(
156 update.get("_type") or self._type,
157 update.get("condition") or self.condition,
158 update.get("join") or self.join,
159 update.get("on") or self.on,
160 )
162 def __repr__(self) -> str:
163 """
164 Representation of the relationship.
165 """
166 if callback := self.condition or self.on:
167 src_code = inspect.getsource(callback).strip()
168 else:
169 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore
170 src_code = f"to {cls_name} (missing condition)"
172 join = f":{self.join}" if self.join else ""
173 return f"<Relationship{join} {src_code}>"
175 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]:
176 """
177 Get the table this relationship is bound to.
178 """
179 table = self.table # can be a string because db wasn't available yet
180 if isinstance(table, str):
181 if mapped := db._class_map.get(table):
182 # yay
183 return mapped
185 # boo, fall back to untyped table but pretend it is typed:
186 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough!
188 return table
190 def get_table_name(self) -> str:
191 """
192 Get the name of the table this relationship is bound to.
193 """
194 if isinstance(self.table, str):
195 return self.table
197 if isinstance(self.table, pydal.objects.Table):
198 return str(self.table)
200 # else: typed table
201 try:
202 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
203 except Exception: # pragma: no cover
204 table = self.table
206 return str(table)
208 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
209 """
210 Relationship is a descriptor class, which can be returned from a class but not an instance.
212 For an instance, using .join() will replace the Relationship with the actual data.
213 If you forgot to join, a warning will be shown and empty data will be returned.
214 """
215 if not instance:
216 # relationship queried on class, that's allowed
217 return self
219 warnings.warn(
220 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning
221 )
222 if self.multiple:
223 return []
224 else:
225 return None
228def relationship(
229 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None
230) -> Relationship[To_Type]:
231 """
232 Define a relationship to another table, when its id is not stored in the current table.
234 Example:
235 class User(TypedTable):
236 name: str
238 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left')
240 class Post(TypedTable):
241 title: str
242 author: User
244 User.join("posts").first() # User instance with list[Post] in .posts
246 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
247 In this case, the join strategy is set to LEFT so users without posts are also still selected.
249 For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
250 class User(TypedTable):
251 ...
253 tags = relationship(list["Tag"], on=lambda self, tag: [
254 Tagged.on(Tagged.entity == entity.gid),
255 Tag.on((Tagged.tag == tag.id)),
256 ])
258 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient.
259 """
260 return Relationship(_type, condition, join, on)
263def _generate_relationship_condition(
264 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]]
265) -> Condition:
266 origin = typing.get_origin(field)
267 # else: generic
269 if origin == list:
270 # field = typing.get_args(field)[0] # actual field
271 # return lambda _self, _other: cls[key].contains(field)
273 return lambda _self, _other: _self[key].contains(_other.id)
274 else:
275 # normal reference
276 # return lambda _self, _other: cls[key] == field.id
277 return lambda _self, _other: _self[key] == _other.id
280def to_relationship(
281 cls: typing.Type["TypedTable"] | type[Any],
282 key: str,
283 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]],
284) -> typing.Optional[Relationship[Any]]:
285 """
286 Used to automatically create relationship instance for reference fields.
288 Example:
289 class MyTable(TypedTable):
290 reference: OtherTable
292 `reference` contains the id of an Other Table row.
293 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
295 This function will automatically perform this logic (called in db.define):
296 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
298 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
299 """
300 if looks_like(field, TypedField):
301 if args := typing.get_args(field):
302 field = args[0]
303 else:
304 # weird
305 return None
307 field, optional = extract_type_optional(field)
309 try:
310 condition = _generate_relationship_condition(cls, key, field)
311 except Exception as e: # pragma: no cover
312 warnings.warn("Could not generate Relationship condition", source=e)
313 condition = None
315 if not condition: # pragma: no cover
316 # something went wrong, not a valid relationship
317 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
318 return None
320 join = "left" if optional or typing.get_origin(field) == list else "inner"
322 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
325class TypeDAL(pydal.DAL): # type: ignore
326 """
327 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
328 """
330 def __init__(
331 self,
332 uri: str = "sqlite://dummy.db",
333 pool_size: int = 0,
334 folder: Optional[str] = None,
335 db_codec: str = "UTF-8",
336 check_reserved: Optional[list[str]] = None,
337 migrate: bool = True,
338 fake_migrate: bool = False,
339 migrate_enabled: bool = True,
340 fake_migrate_all: bool = False,
341 decode_credentials: bool = False,
342 driver_args: Optional[dict[str, Any]] = None,
343 adapter_args: Optional[dict[str, Any]] = None,
344 attempts: int = 5,
345 auto_import: bool = False,
346 bigint_id: bool = False,
347 debug: bool = False,
348 lazy_tables: bool = False,
349 db_uid: Optional[str] = None,
350 after_connection: typing.Callable[..., Any] = None,
351 tables: Optional[list[str]] = None,
352 ignore_field_case: bool = True,
353 entity_quoting: bool = True,
354 table_hash: Optional[str] = None,
355 enable_typedal_caching: bool = True,
356 ) -> None:
357 """
358 Adds some internal tables after calling pydal's default init.
360 Set enable_typedal_caching to False to disable this behavior.
361 """
362 super().__init__(
363 uri,
364 pool_size,
365 folder,
366 db_codec,
367 check_reserved,
368 migrate,
369 fake_migrate,
370 migrate_enabled,
371 fake_migrate_all,
372 decode_credentials,
373 driver_args,
374 adapter_args,
375 attempts,
376 auto_import,
377 bigint_id,
378 debug,
379 lazy_tables,
380 db_uid,
381 after_connection,
382 tables,
383 ignore_field_case,
384 entity_quoting,
385 table_hash,
386 )
388 if enable_typedal_caching:
389 self.define(_TypedalCache, migrate=True)
390 self.define(_TypedalCacheDependency, migrate=True)
392 default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = {
393 # fields are 'required' (notnull) by default:
394 "notnull": True,
395 }
397 # maps table name to typedal class, for resolving future references
398 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {}
400 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
401 # todo: new relationship item added should also invalidate (previously unrelated) cache result
403 # todo: option to enable/disable cache dependency behavior:
404 # - don't set _before_update and _before_delete
405 # - don't add TypedalCacheDependency entry
406 # - don't invalidate other item on new row of this type
408 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
409 # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
410 # typing.get_type_hints(cls, globalns=None, localns=None)
412 # dirty way (with evil eval):
413 # [eval(v) for k, v in cls.__annotations__.items()]
414 # this however also stops working when variables outside this scope or even references to other
415 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
416 # and might break in the future, when this annotations behavior is enabled by default.
418 # non-annotated variables have to be passed to define_table as kwargs
419 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
421 tablename = self.to_snake(cls.__name__)
422 # grab annotations of cls and it's parents:
423 annotations = all_annotations(cls)
424 # extend with `prop = TypedField()` 'annotations':
425 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
426 # remove internal stuff:
427 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
429 typedfields: dict[str, TypedField[Any]] = {
430 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
431 }
433 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
435 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
437 # ! dont' use full_dict here:
438 other_kwargs = kwargs | {
439 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
440 }
442 for key in typedfields.keys() - full_dict.keys():
443 # typed fields that don't haven't been added to the object yet
444 setattr(cls, key, typedfields[key])
446 # start with base classes and overwrite with current class:
447 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
449 # DEPRECATED: Relationship as annotation is currently not supported!
450 # ensure they are all instances and
451 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
452 # relationships = {
453 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
454 # }
456 # keys of implicit references (also relationships):
457 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")]
459 # add implicit relationships:
460 # User; list[User]; TypedField[User]; TypedField[list[User]]
461 relationships |= {
462 k: new_relationship
463 for k in reference_field_keys
464 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
465 }
467 table: Table = self.define_table(tablename, *fields.values(), **other_kwargs)
469 for name, typed_field in typedfields.items():
470 field = fields[name]
471 typed_field.bind(field, table)
473 if issubclass(cls, TypedTable):
474 cls.__set_internals__(
475 db=self,
476 table=table,
477 # by now, all relationships should be instances!
478 relationships=typing.cast(dict[str, Relationship[Any]], relationships),
479 )
480 self._class_map[str(table)] = cls
481 else:
482 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
484 if not tablename.startswith("typedal_"):
485 # todo: config
486 table._before_update.append(lambda s, _: _remove_cache(s, tablename))
487 table._before_delete.append(lambda s: _remove_cache(s, tablename))
489 return cls
491 @typing.overload
492 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]:
493 """
494 Typing Overload for define without a class.
496 @db.define()
497 class MyTable(TypedTable): ...
498 """
500 @typing.overload
501 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
502 """
503 Typing Overload for define with a class.
505 @db.define
506 class MyTable(TypedTable): ...
507 """
509 def define(
510 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any
511 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]:
512 """
513 Can be used as a decorator on a class that inherits `TypedTable`, \
514 or as a regular method if you need to define your classes before you have access to a 'db' instance.
516 You can also pass extra arguments to db.define_table.
517 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor
519 Example:
520 @db.define
521 class Person(TypedTable):
522 ...
524 class Article(TypedTable):
525 ...
527 # at a later time:
528 db.define(Article)
530 Returns:
531 the result of pydal.define_table
532 """
534 def wrapper(cls: typing.Type[T]) -> typing.Type[T]:
535 return self._define(cls, **kwargs)
537 if maybe_cls:
538 return wrapper(maybe_cls)
540 return wrapper
542 # def drop(self, table_name: str) -> None:
543 # """
544 # Remove a table by name (both on the database level and the typedal level).
545 # """
546 # # drop calls TypedTable.drop() and removes it from the `_class_map`
547 # if cls := self._class_map.pop(table_name, None):
548 # cls.drop()
550 # def drop_all(self, max_retries: int = None) -> None:
551 # """
552 # Remove all tables and keep doing so until everything is gone!
553 # """
554 # retries = 0
555 # if max_retries is None:
556 # max_retries = len(self.tables)
557 #
558 # while self.tables:
559 # retries += 1
560 # for table in self.tables:
561 # self.drop(table)
562 #
563 # if retries > max_retries:
564 # raise RuntimeError("Could not delete all tables")
566 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
567 """
568 A db instance can be called directly to perform a query.
570 Usually, only a query is passed.
572 Example:
573 db(query).select()
575 """
576 args = list(_args)
577 if args:
578 cls = args[0]
579 if isinstance(cls, bool):
580 raise ValueError("Don't actually pass a bool to db()! Use a query instead.")
582 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
583 # table defined without @db.define decorator!
584 _cls: typing.Type[TypedTable] = cls
585 args[0] = _cls.id != None
587 _set = super().__call__(*args, **kwargs)
588 return typing.cast(TypedSet, _set)
590 @classmethod
591 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
592 return Field(name, _type, **{**cls.default_kwargs, **kw})
594 @classmethod
595 def _annotation_to_pydal_fieldtype(
596 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any]
597 ) -> Optional[str]:
598 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
599 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy)
601 if isinstance(ftype, str):
602 # extract type from string
603 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate(
604 localns=locals(), globalns=globals(), recursive_guard=frozenset()
605 )
607 if mapping := BASIC_MAPPINGS.get(ftype):
608 # basi types
609 return mapping
610 elif isinstance(ftype, _Table):
611 # db.table
612 return f"reference {ftype._tablename}"
613 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
614 # SomeTable
615 snakename = cls.to_snake(ftype.__name__)
616 return f"reference {snakename}"
617 elif isinstance(ftype, TypedField):
618 # FieldType(type, ...)
619 return ftype._to_field(mut_kw)
620 elif origin_is_subclass(ftype, TypedField):
621 # TypedField[int]
622 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
623 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
624 # list[str] -> str -> string -> list:string
625 _child_type = typing.get_args(ftype)[0]
626 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
627 return f"list:{_child_type}"
628 elif is_union(ftype):
629 # str | int -> UnionType
630 # typing.Union[str | int] -> typing._UnionGenericAlias
632 # Optional[type] == type | None
634 match typing.get_args(ftype):
635 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
636 # good union of Nullable
638 # if a field is optional, it is nullable:
639 mut_kw["notnull"] = False
640 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
641 case _:
642 # two types is not supported by the db!
643 return None
644 else:
645 return None
647 @classmethod
648 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
649 """
650 Convert a annotation into a pydal Field.
652 Args:
653 fname: name of the property
654 ftype: annotation of the property
655 kw: when using TypedField or a function returning it (e.g. StringField),
656 keyword args can be used to pass any other settings you would normally to a pydal Field
658 -> pydal.Field(fname, ftype, **kw)
660 Example:
661 class MyTable:
662 fname: ftype
663 id: int
664 name: str
665 reference: Table
666 other: TypedField(str, default="John Doe") # default will be in kwargs
667 """
668 fname = cls.to_snake(fname)
670 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
671 return cls._build_field(fname, converted_type, **kw)
672 else:
673 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
675 @staticmethod
676 def to_snake(camel: str) -> str:
677 """
678 Moved to helpers, kept as a static method for legacy reasons.
679 """
680 return to_snake(camel)
683class TableProtocol(typing.Protocol): # pragma: no cover
684 """
685 Make mypy happy.
686 """
688 id: int # noqa: A003
690 def __getitem__(self, item: str) -> Field:
691 """
692 Tell mypy a Table supports dictionary notation for columns.
693 """
696class Table(_Table, TableProtocol): # type: ignore
697 """
698 Make mypy happy.
699 """
702class TableMeta(type):
703 """
704 This metaclass contains functionality on table classes, that doesn't exist on its instances.
706 Example:
707 class MyTable(TypedTable):
708 some_field: TypedField[int]
710 MyTable.update_or_insert(...) # should work
712 MyTable.some_field # -> Field, can be used to query etc.
714 row = MyTable.first() # returns instance of MyTable
716 # row.update_or_insert(...) # shouldn't work!
718 row.some_field # -> int, with actual data
720 """
722 # set up by db.define:
723 # _db: TypeDAL | None = None
724 # _table: Table | None = None
725 _db: TypeDAL | None = None
726 _table: Table | None = None
727 _relationships: dict[str, Relationship[Any]] | None = None
729 #########################
730 # TypeDAL custom logic: #
731 #########################
733 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
734 """
735 Store the related database and pydal table for later usage.
736 """
737 self._db = db
738 self._table = table
739 self._relationships = relationships
741 def __getattr__(self, col: str) -> Optional[Field]:
742 """
743 Magic method used by TypedTableMeta to get a database field with dot notation on a class.
745 Example:
746 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
748 """
749 if self._table:
750 return getattr(self._table, col, None)
752 return None
754 def _ensure_table_defined(self) -> Table:
755 if not self._table:
756 raise EnvironmentError("@define or db.define is not called on this class yet!")
757 return self._table
759 def __iter__(self) -> typing.Generator[Field, None, None]:
760 """
761 Loop through the columns of this model.
762 """
763 table = self._ensure_table_defined()
764 yield from iter(table)
766 def __getitem__(self, item: str) -> Field:
767 """
768 Allow dict notation to get a column of this table (-> Field instance).
769 """
770 table = self._ensure_table_defined()
771 return table[item]
773 def __str__(self) -> str:
774 """
775 Normally, just returns the underlying table name, but with a fallback if the model is unbound.
776 """
777 if self._table:
778 return str(self._table)
779 else:
780 return f"<unbound table {self.__name__}>"
782 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
783 """
784 Create a model instance from a pydal row.
785 """
786 return self(row)
788 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003
789 """
790 Return all rows for this model.
791 """
792 return self.collect()
794 def __json__(self: typing.Type[T_MetaInstance], instance: T_MetaInstance | None = None) -> dict[str, Any]:
795 """
796 Convert to a json-dumpable dict.
798 as_dict is not fully json-dumpable, so use as_json and json.loads to ensure it is dumpable (and loadable).
799 todo: can this be optimized?
801 See Also:
802 https://github.com/jeff-hykin/json_fix
803 """
804 string = instance.as_json() if instance else self.as_json()
806 return typing.cast(dict[str, Any], json.loads(string))
808 def get_relationships(self) -> dict[str, Relationship[Any]]:
809 """
810 Return the registered relationships of the current model.
811 """
812 return self._relationships or {}
814 ##########################
815 # TypeDAL Modified Logic #
816 ##########################
818 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
819 """
820 This is only called when db.define is not used as a decorator.
822 cls.__table functions as 'self'
824 Args:
825 **fields: anything you want to insert in the database
827 Returns: the ID of the new row.
829 """
830 table = self._ensure_table_defined()
832 result = table.insert(**fields)
833 # it already is an int but mypy doesn't understand that
834 return self(result)
836 def _insert(self, **fields: Any) -> str:
837 table = self._ensure_table_defined()
839 return str(table._insert(**fields))
841 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]":
842 """
843 Insert multiple rows, returns a TypedRows set of new instances.
844 """
845 table = self._ensure_table_defined()
846 result = table.bulk_insert(items)
847 return self.where(lambda row: row.id.belongs(result)).collect()
849 def update_or_insert(
850 self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any
851 ) -> T_MetaInstance:
852 """
853 Update a row if query matches, else insert a new one.
855 Returns the created or updated instance.
856 """
857 table = self._ensure_table_defined()
859 if query is DEFAULT:
860 record = table(**values)
861 elif isinstance(query, dict):
862 record = table(**query)
863 else:
864 record = table(query)
866 if not record:
867 return self.insert(**values)
869 record.update_record(**values)
870 return self(record)
872 def validate_and_insert(
873 self: typing.Type[T_MetaInstance], **fields: Any
874 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
875 """
876 Validate input data and then insert a row.
878 Returns a tuple of (the created instance, a dict of errors).
879 """
880 table = self._ensure_table_defined()
881 result = table.validate_and_insert(**fields)
882 if row_id := result.get("id"):
883 return self(row_id), None
884 else:
885 return None, result.get("errors")
887 def validate_and_update(
888 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
889 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
890 """
891 Validate input data and then update max 1 row.
893 Returns a tuple of (the updated instance, a dict of errors).
894 """
895 table = self._ensure_table_defined()
897 try:
898 result = table.validate_and_update(query, **fields)
899 except Exception as e:
900 result = {"errors": {"exception": str(e)}}
902 if errors := result.get("errors"):
903 return None, errors
904 elif row_id := result.get("id"):
905 return self(row_id), None
906 else: # pragma: no cover
907 # update on query without result (shouldnt happen)
908 return None, None
910 def validate_and_update_or_insert(
911 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
912 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
913 """
914 Validate input data and then update_and_insert (on max 1 row).
916 Returns a tuple of (the updated/created instance, a dict of errors).
917 """
918 table = self._ensure_table_defined()
919 result = table.validate_and_update_or_insert(query, **fields)
921 if errors := result.get("errors"):
922 return None, errors
923 elif row_id := result.get("id"):
924 return self(row_id), None
925 else: # pragma: no cover
926 # update on query without result (shouldnt happen)
927 return None, None
929 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
930 """
931 See QueryBuilder.select!
932 """
933 return QueryBuilder(self).select(*a, **kw)
935 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
936 """
937 See QueryBuilder.paginate!
938 """
939 return QueryBuilder(self).paginate(limit=limit, page=page)
941 def chunk(
942 self: typing.Type[T_MetaInstance], chunk_size: int
943 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
944 """
945 See QueryBuilder.chunk!
946 """
947 return QueryBuilder(self).chunk(chunk_size)
949 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
950 """
951 See QueryBuilder.where!
952 """
953 return QueryBuilder(self).where(*a, **kw)
955 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
956 """
957 See QueryBuilder.cache!
958 """
959 return QueryBuilder(self).cache(*deps, **kwargs)
961 def count(self: typing.Type[T_MetaInstance]) -> int:
962 """
963 See QueryBuilder.count!
964 """
965 return QueryBuilder(self).count()
967 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None:
968 """
969 See QueryBuilder.first!
970 """
971 return QueryBuilder(self).first()
973 def join(
974 self: typing.Type[T_MetaInstance],
975 *fields: str | typing.Type["TypedTable"],
976 method: JOIN_OPTIONS = None,
977 on: OnQuery | list[Expression] | Expression = None,
978 condition: Condition = None,
979 ) -> "QueryBuilder[T_MetaInstance]":
980 """
981 See QueryBuilder.join!
982 """
983 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method)
985 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
986 """
987 See QueryBuilder.collect!
988 """
989 return QueryBuilder(self).collect(verbose=verbose)
991 @property
992 def ALL(cls) -> pydal.objects.SQLALL:
993 """
994 Select all fields for this table.
995 """
996 table = cls._ensure_table_defined()
998 return table.ALL
1000 ##########################
1001 # TypeDAL Shadowed Logic #
1002 ##########################
1003 fields: list[str]
1005 # other table methods:
1007 def drop(self, mode: str = "") -> None:
1008 """
1009 Remove the underlying table.
1010 """
1011 table = self._ensure_table_defined()
1012 table.drop(mode)
1014 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
1015 """
1016 Add an index on some columns of this table.
1017 """
1018 table = self._ensure_table_defined()
1019 result = table.create_index(name, *fields, **kwargs)
1020 return typing.cast(bool, result)
1022 def drop_index(self, name: str, if_exists: bool = False) -> bool:
1023 """
1024 Remove an index from this table.
1025 """
1026 table = self._ensure_table_defined()
1027 result = table.drop_index(name, if_exists)
1028 return typing.cast(bool, result)
1030 def import_from_csv_file(
1031 self,
1032 csvfile: typing.TextIO,
1033 id_map: dict[str, str] = None,
1034 null: Any = "<NULL>",
1035 unique: str = "uuid",
1036 id_offset: dict[str, int] = None, # id_offset used only when id_map is None
1037 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
1038 validate: bool = False,
1039 encoding: str = "utf-8",
1040 delimiter: str = ",",
1041 quotechar: str = '"',
1042 quoting: int = csv.QUOTE_MINIMAL,
1043 restore: bool = False,
1044 **kwargs: Any,
1045 ) -> None:
1046 """
1047 Load a csv file into the database.
1048 """
1049 table = self._ensure_table_defined()
1050 table.import_from_csv_file(
1051 csvfile,
1052 id_map=id_map,
1053 null=null,
1054 unique=unique,
1055 id_offset=id_offset,
1056 transform=transform,
1057 validate=validate,
1058 encoding=encoding,
1059 delimiter=delimiter,
1060 quotechar=quotechar,
1061 quoting=quoting,
1062 restore=restore,
1063 **kwargs,
1064 )
1066 def on(self, query: Query | bool) -> Expression:
1067 """
1068 Shadow Table.on.
1070 Used for joins.
1072 See Also:
1073 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1074 """
1075 table = self._ensure_table_defined()
1076 return typing.cast(Expression, table.on(query))
1078 def with_alias(self, alias: str) -> _Table:
1079 """
1080 Shadow Table.with_alias.
1082 Useful for joins when joining the same table multiple times.
1084 See Also:
1085 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1086 """
1087 table = self._ensure_table_defined()
1088 return table.with_alias(alias)
1090 # @typing.dataclass_transform()
1093class TypedField(typing.Generic[T_Value]): # pragma: no cover
1094 """
1095 Typed version of pydal.Field, which will be converted to a normal Field in the background.
1096 """
1098 # will be set by .bind on db.define
1099 name = ""
1100 _db: Optional[pydal.DAL] = None
1101 _rname: Optional[str] = None
1102 _table: Optional[Table] = None
1103 _field: Optional[Field] = None
1105 _type: T_annotation
1106 kwargs: Any
1108 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore
1109 """
1110 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used!
1111 """
1112 self._type = _type
1113 self.kwargs = settings
1114 super().__init__()
1116 @typing.overload
1117 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover
1118 """
1119 row.field -> (actual data).
1120 """
1122 @typing.overload
1123 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
1124 """
1125 Table.field -> Field.
1126 """
1128 def __get__(
1129 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance]
1130 ) -> typing.Union[T_Value, "TypedField[T_Value]"]:
1131 """
1132 Since this class is a Descriptor field, \
1133 it returns something else depending on if it's called on a class or instance.
1135 (this is mostly for mypy/typing)
1136 """
1137 if instance:
1138 # this is only reached in a very specific case:
1139 # an instance of the object was created with a specific set of fields selected (excluding the current one)
1140 # in that case, no value was stored in the owner -> return None (since the field was not selected)
1141 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
1142 else:
1143 # getting as class -> return actual field so pydal understands it when using in query etc.
1144 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
1146 def __str__(self) -> str:
1147 """
1148 String representation of a Typed Field.
1150 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
1151 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
1152 """
1153 return str(self._field) if self._field else ""
1155 def __repr__(self) -> str:
1156 """
1157 More detailed string representation of a Typed Field.
1159 Uses __str__ and adds the provided extra options (kwargs) in the representation.
1160 """
1161 s = self.__str__()
1163 if "type" in self.kwargs:
1164 # manual type in kwargs supplied
1165 t = self.kwargs["type"]
1166 elif issubclass(type, type(self._type)):
1167 # normal type, str.__name__ = 'str'
1168 t = getattr(self._type, "__name__", str(self._type))
1169 elif t_args := typing.get_args(self._type):
1170 # list[str] -> 'str'
1171 t = t_args[0].__name__
1172 else: # pragma: no cover
1173 # fallback - something else, may not even happen, I'm not sure
1174 t = self._type
1176 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
1178 kw = self.kwargs.copy()
1179 kw.pop("type", None)
1180 return f"<{s} with options {kw}>"
1182 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1183 """
1184 Convert a Typed Field instance to a pydal.Field.
1185 """
1186 other_kwargs = self.kwargs.copy()
1187 extra_kwargs.update(other_kwargs)
1188 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1190 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
1191 """
1192 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
1193 """
1194 self._table = table
1195 self._field = field
1197 def __getattr__(self, key: str) -> Any:
1198 """
1199 If the regular getattribute does not work, try to get info from the related Field.
1200 """
1201 with contextlib.suppress(AttributeError):
1202 return super().__getattribute__(key)
1204 # try on actual field:
1205 return getattr(self._field, key)
1207 def __eq__(self, other: Any) -> Query:
1208 """
1209 Performing == on a Field will result in a Query.
1210 """
1211 return typing.cast(Query, self._field == other)
1213 def __ne__(self, other: Any) -> Query:
1214 """
1215 Performing != on a Field will result in a Query.
1216 """
1217 return typing.cast(Query, self._field != other)
1219 def __gt__(self, other: Any) -> Query:
1220 """
1221 Performing > on a Field will result in a Query.
1222 """
1223 return typing.cast(Query, self._field > other)
1225 def __lt__(self, other: Any) -> Query:
1226 """
1227 Performing < on a Field will result in a Query.
1228 """
1229 return typing.cast(Query, self._field < other)
1231 def __ge__(self, other: Any) -> Query:
1232 """
1233 Performing >= on a Field will result in a Query.
1234 """
1235 return typing.cast(Query, self._field >= other)
1237 def __le__(self, other: Any) -> Query:
1238 """
1239 Performing <= on a Field will result in a Query.
1240 """
1241 return typing.cast(Query, self._field <= other)
1243 def __hash__(self) -> int:
1244 """
1245 Shadow Field.__hash__.
1246 """
1247 return hash(self._field)
1249 def __invert__(self) -> Expression:
1250 """
1251 Performing ~ on a Field will result in an Expression.
1252 """
1253 if not self._field: # pragma: no cover
1254 raise ValueError("Unbound Field can not be inverted!")
1256 return typing.cast(Expression, ~self._field)
1259class TypedTable(metaclass=TableMeta):
1260 """
1261 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
1262 """
1264 # set up by 'new':
1265 _row: Row | None = None
1267 _with: list[str]
1269 id: "TypedField[int]" # noqa: A003
1271 def _setup_instance_methods(self) -> None:
1272 self.as_dict = self._as_dict # type: ignore
1273 self.__json__ = self.as_json = self._as_json # type: ignore
1274 # self.as_yaml = self._as_yaml # type: ignore
1275 self.as_xml = self._as_xml # type: ignore
1277 self.update = self._update # type: ignore
1279 self.delete_record = self._delete_record # type: ignore
1280 self.update_record = self._update_record # type: ignore
1282 def __new__(
1283 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any
1284 ) -> "TypedTable":
1285 """
1286 Create a Typed Rows model instance from an existing row, ID or query.
1288 Examples:
1289 MyTable(1)
1290 MyTable(id=1)
1291 MyTable(MyTable.id == 1)
1292 """
1293 table = cls._ensure_table_defined()
1294 inst = super().__new__(cls)
1296 if isinstance(row_or_id, TypedTable):
1297 # existing typed table instance!
1298 return row_or_id
1299 elif isinstance(row_or_id, pydal.objects.Row):
1300 row = row_or_id
1301 elif row_or_id is not None:
1302 row = table(row_or_id, **filters)
1303 elif filters:
1304 row = table(**filters)
1305 else:
1306 # dummy object
1307 return inst
1309 if not row:
1310 return None # type: ignore
1312 inst._row = row
1313 inst.__dict__.update(row)
1314 inst._setup_instance_methods()
1315 return inst
1317 def __iter__(self) -> typing.Generator[Any, None, None]:
1318 """
1319 Allows looping through the columns.
1320 """
1321 row = self._ensure_matching_row()
1322 yield from iter(row)
1324 def __getitem__(self, item: str) -> Any:
1325 """
1326 Allows dictionary notation to get columns.
1327 """
1328 if item in self.__dict__:
1329 return self.__dict__.get(item)
1331 # fallback to lookup in row
1332 if self._row:
1333 return self._row[item]
1335 # nothing found!
1336 raise KeyError(item)
1338 def __getattr__(self, item: str) -> Any:
1339 """
1340 Allows dot notation to get columns.
1341 """
1342 if value := self.get(item):
1343 return value
1345 raise AttributeError(item)
1347 def get(self, item: str, default: Any = None) -> Any:
1348 """
1349 Try to get a column from this instance, else return default.
1350 """
1351 try:
1352 return self.__getitem__(item)
1353 except KeyError:
1354 return default
1356 def __setitem__(self, key: str, value: Any) -> None:
1357 """
1358 Data can both be updated via dot and dict notation.
1359 """
1360 return setattr(self, key, value)
1362 def __int__(self) -> int:
1363 """
1364 Calling int on a model instance will return its id.
1365 """
1366 return getattr(self, "id", 0)
1368 def __bool__(self) -> bool:
1369 """
1370 If the instance has an underlying row with data, it is truthy.
1371 """
1372 return bool(getattr(self, "_row", False))
1374 def _ensure_matching_row(self) -> Row:
1375 if not getattr(self, "_row", None):
1376 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
1377 return self._row
1379 def __repr__(self) -> str:
1380 """
1381 String representation of the model instance.
1382 """
1383 model_name = self.__class__.__name__
1384 model_data = {}
1386 if self._row:
1387 model_data = self._row.as_json()
1389 details = model_name
1390 details += f"({model_data})"
1392 if relationships := getattr(self, "_with", []):
1393 details += f" + {relationships}"
1395 return f"<{details}>"
1397 # serialization
1398 # underscore variants work for class instances (set up by _setup_instance_methods)
1400 @classmethod
1401 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]:
1402 """
1403 Dump the object to a plain dict.
1405 Can be used as both a class or instance method:
1406 - dumps the table info if it's a class
1407 - dumps the row info if it's an instance (see _as_dict)
1408 """
1409 table = cls._ensure_table_defined()
1410 result = table.as_dict(flat, sanitize)
1411 return typing.cast(dict[str, Any], result)
1413 @classmethod
1414 def as_json(cls, sanitize: bool = True) -> str:
1415 """
1416 Dump the object to json.
1418 Can be used as both a class or instance method:
1419 - dumps the table info if it's a class
1420 - dumps the row info if it's an instance (see _as_json)
1421 """
1422 table = cls._ensure_table_defined()
1423 return typing.cast(str, table.as_json(sanitize))
1425 @classmethod
1426 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
1427 """
1428 Dump the object to xml.
1430 Can be used as both a class or instance method:
1431 - dumps the table info if it's a class
1432 - dumps the row info if it's an instance (see _as_xml)
1433 """
1434 table = cls._ensure_table_defined()
1435 return typing.cast(str, table.as_xml(sanitize))
1437 @classmethod
1438 def as_yaml(cls, sanitize: bool = True) -> str:
1439 """
1440 Dump the object to yaml.
1442 Can be used as both a class or instance method:
1443 - dumps the table info if it's a class
1444 - dumps the row info if it's an instance (see _as_yaml)
1445 """
1446 table = cls._ensure_table_defined()
1447 return typing.cast(str, table.as_yaml(sanitize))
1449 def _as_dict(
1450 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1451 ) -> dict[str, Any]:
1452 row = self._ensure_matching_row()
1453 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
1455 if _with := getattr(self, "_with", None):
1456 for relationship in _with:
1457 data = self.get(relationship)
1458 if isinstance(data, list):
1459 data = [_.as_dict() if getattr(_, "as_dict", None) else _ for _ in data]
1460 elif data:
1461 data = data.as_dict()
1463 result[relationship] = data
1465 return typing.cast(dict[str, Any], result)
1467 def _as_json(
1468 self,
1469 mode: str = "object",
1470 default: typing.Callable[[Any], Any] = None,
1471 colnames: list[str] = None,
1472 serialize: bool = True,
1473 **kwargs: Any,
1474 ) -> str:
1475 row = self._ensure_matching_row()
1476 return typing.cast(str, row.as_json(mode, default, colnames, serialize, *kwargs))
1478 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
1479 row = self._ensure_matching_row()
1480 return typing.cast(str, row.as_xml(sanitize))
1482 # def _as_yaml(self, sanitize: bool = True) -> str:
1483 # row = self._ensure_matching_row()
1484 # return typing.cast(str, row.as_yaml(sanitize))
1486 def __setattr__(self, key: str, value: Any) -> None:
1487 """
1488 When setting a property on a Typed Table model instance, also update the underlying row.
1489 """
1490 if self._row and key in self._row.__dict__ and not callable(value):
1491 # enables `row.key = value; row.update_record()`
1492 self._row[key] = value
1494 super().__setattr__(key, value)
1496 @classmethod
1497 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
1498 """
1499 Update one record.
1501 Example:
1502 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
1503 """
1504 # todo: update multiple?
1505 if record := cls(query):
1506 return record.update_record(**fields)
1507 else:
1508 return None
1510 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1511 row = self._ensure_matching_row()
1512 row.update(**fields)
1513 self.__dict__.update(**fields)
1514 return self
1516 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1517 row = self._ensure_matching_row()
1518 new_row = row.update_record(**fields)
1519 self.update(**new_row)
1520 return self
1522 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
1523 """
1524 Here as a placeholder for _update_record.
1526 Will be replaced on instance creation!
1527 """
1528 return self._update_record(**fields)
1530 def _delete_record(self) -> int:
1531 """
1532 Actual logic in `pydal.helpers.classes.RecordDeleter`.
1533 """
1534 row = self._ensure_matching_row()
1535 result = row.delete_record()
1536 self.__dict__ = {} # empty self, since row is no more.
1537 self._row = None # just to be sure
1538 self._setup_instance_methods()
1539 # ^ instance methods might've been deleted by emptying dict,
1540 # but we still want .as_dict to show an error, not the table's as_dict.
1541 return typing.cast(int, result)
1543 def delete_record(self) -> int: # pragma: no cover
1544 """
1545 Here as a placeholder for _delete_record.
1547 Will be replaced on instance creation!
1548 """
1549 return self._delete_record()
1551 # __del__ is also called on the end of a scope so don't remove records on every del!!
1553 # pickling:
1554 def __setstate__(self, state: dict[str, Any]) -> None:
1555 """
1556 Used by dill when loading from a bytestring.
1557 """
1558 # as_dict also includes table info, so dump as json to only get the actual row data
1559 # then create a new (more empty) row object:
1560 state["_row"] = Row(json.loads(state["_row"]))
1561 self.__dict__ |= state
1563 def __getstate__(self) -> dict[str, Any]:
1564 """
1565 State to save when pickling.
1567 Prevents db connection from being pickled.
1568 Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1569 """
1570 row = self._ensure_matching_row()
1571 result: dict[str, Any] = row.as_dict()
1573 if _with := getattr(self, "_with", None):
1574 for relationship in _with:
1575 data = self.get(relationship)
1577 result[relationship] = data
1579 result["_row"] = self._row.as_json() if self._row else ""
1580 return result
1583# backwards compat:
1584TypedRow = TypedTable
1587class TypedRows(typing.Collection[T_MetaInstance], Rows):
1588 """
1589 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
1590 """
1592 records: dict[int, T_MetaInstance]
1593 # _rows: Rows
1594 model: typing.Type[T_MetaInstance]
1595 metadata: Metadata
1597 # pseudo-properties: actually stored in _rows
1598 db: TypeDAL
1599 colnames: list[str]
1600 fields: list[Field]
1601 colnames_fields: list[Field]
1602 response: list[tuple[Any, ...]]
1604 def __init__(
1605 self,
1606 rows: Rows,
1607 model: typing.Type[T_MetaInstance],
1608 records: dict[int, T_MetaInstance] = None,
1609 metadata: Metadata = None,
1610 ) -> None:
1611 """
1612 Should not be called manually!
1614 Normally, the `records` from an existing `Rows` object are used
1615 but these can be overwritten with a `records` dict.
1616 `metadata` can be any (un)structured data
1617 `model` is a Typed Table class
1618 """
1619 records = records or {row.id: model(row) for row in rows}
1620 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
1621 self.model = model
1622 self.metadata = metadata or {}
1624 def __len__(self) -> int:
1625 """
1626 Return the count of rows.
1627 """
1628 return len(self.records)
1630 def __iter__(self) -> typing.Iterator[T_MetaInstance]:
1631 """
1632 Loop through the rows.
1633 """
1634 yield from self.records.values()
1636 def __contains__(self, ind: Any) -> bool:
1637 """
1638 Check if an id exists in this result set.
1639 """
1640 return ind in self.records
1642 def first(self) -> T_MetaInstance | None:
1643 """
1644 Get the row with the lowest id.
1645 """
1646 if not self.records:
1647 return None
1649 return next(iter(self))
1651 def last(self) -> T_MetaInstance | None:
1652 """
1653 Get the row with the highest id.
1654 """
1655 if not self.records:
1656 return None
1658 max_id = max(self.records.keys())
1659 return self[max_id]
1661 def find(
1662 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None
1663 ) -> "TypedRows[T_MetaInstance]":
1664 """
1665 Returns a new Rows object, a subset of the original object, filtered by the function `f`.
1666 """
1667 if not self.records:
1668 return self.__class__(self, self.model, {})
1670 records = {}
1671 if limitby:
1672 _min, _max = limitby
1673 else:
1674 _min, _max = 0, len(self)
1675 count = 0
1676 for i, row in self.records.items():
1677 if f(row):
1678 if _min <= count:
1679 records[i] = row
1680 count += 1
1681 if count == _max:
1682 break
1684 return self.__class__(self, self.model, records)
1686 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
1687 """
1688 Removes elements from the calling Rows object, filtered by the function `f`, \
1689 and returns a new Rows object containing the removed elements.
1690 """
1691 if not self.records:
1692 return self.__class__(self, self.model, {})
1693 removed = {}
1694 to_remove = []
1695 for i in self.records:
1696 row = self[i]
1697 if f(row):
1698 removed[i] = self.records[i]
1699 to_remove.append(i)
1701 [self.records.pop(i) for i in to_remove]
1703 return self.__class__(
1704 self,
1705 self.model,
1706 removed,
1707 )
1709 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
1710 """
1711 Returns a list of sorted elements (not sorted in place).
1712 """
1713 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
1715 def __str__(self) -> str:
1716 """
1717 Simple string representation.
1718 """
1719 return f"<TypedRows with {len(self)} records>"
1721 def __repr__(self) -> str:
1722 """
1723 Print a table on repr().
1724 """
1725 data = self.as_dict()
1726 headers = list(next(iter(data.values())).keys())
1727 return mktable(data, headers)
1729 def group_by_value(
1730 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any
1731 ) -> dict[T, list[T_MetaInstance]]:
1732 """
1733 Group the rows by a specific field (which will be the dict key).
1734 """
1735 kwargs["one_result"] = one_result
1736 result = super().group_by_value(*fields, **kwargs)
1737 return typing.cast(dict[T, list[T_MetaInstance]], result)
1739 def column(self, column: str = None) -> list[Any]:
1740 """
1741 Get a list of all values in a specific column.
1743 Example:
1744 rows.column('name') -> ['Name 1', 'Name 2', ...]
1745 """
1746 return typing.cast(list[Any], super().column(column))
1748 def as_csv(self) -> str:
1749 """
1750 Dump the data to csv.
1751 """
1752 return typing.cast(str, super().as_csv())
1754 def as_dict(
1755 self,
1756 key: str = None,
1757 compact: bool = False,
1758 storage_to_dict: bool = False,
1759 datetime_to_str: bool = False,
1760 custom_types: list[type] = None,
1761 ) -> dict[int, dict[str, Any]]:
1762 """
1763 Get the data in a dict of dicts.
1764 """
1765 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1766 # functionality not guaranteed
1767 return typing.cast(
1768 dict[int, dict[str, Any]],
1769 super().as_dict(
1770 key or "id",
1771 compact,
1772 storage_to_dict,
1773 datetime_to_str,
1774 custom_types,
1775 ),
1776 )
1778 return {k: v.as_dict() for k, v in self.records.items()}
1780 def as_json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str:
1781 """
1782 Turn the data into a dict and then dump to JSON.
1783 """
1784 return typing.cast(str, super().as_json(mode=mode, default=default))
1786 def json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str:
1787 """
1788 Turn the data into a dict and then dump to JSON.
1789 """
1790 return typing.cast(str, super().as_json(mode=mode, default=default))
1792 def as_list(
1793 self,
1794 compact: bool = False,
1795 storage_to_dict: bool = False,
1796 datetime_to_str: bool = False,
1797 custom_types: list[type] = None,
1798 ) -> list[dict[str, Any]]:
1799 """
1800 Get the data in a list of dicts.
1801 """
1802 if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1803 return typing.cast(
1804 list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types)
1805 )
1806 return [_.as_dict() for _ in self.records.values()]
1808 def __getitem__(self, item: int) -> T_MetaInstance:
1809 """
1810 You can get a specific row by ID from a typedrows by using rows[idx] notation.
1812 Since pydal's implementation differs (they expect a list instead of a dict with id keys),
1813 using rows[0] will return the first row, regardless of its id.
1814 """
1815 try:
1816 return self.records[item]
1817 except KeyError as e:
1818 if item == 0 and (row := self.first()):
1819 # special case: pydal internals think Rows.records is a list, not a dict
1820 return row
1822 raise e
1824 def get(self, item: int) -> typing.Optional[T_MetaInstance]:
1825 """
1826 Get a row by ID, or receive None if it isn't in this result set.
1827 """
1828 return self.records.get(item)
1830 def join(
1831 self,
1832 field: "Field | TypedField[Any]",
1833 name: str = None,
1834 constraint: Query = None,
1835 fields: list[str | Field] = None,
1836 orderby: Optional[str | Field] = None,
1837 ) -> T_MetaInstance:
1838 """
1839 This can be used to JOIN with some relationships after the initial select.
1841 Using the querybuilder's .join() method is prefered!
1842 """
1843 result = super().join(field, name, constraint, fields or [], orderby)
1844 return typing.cast(T_MetaInstance, result)
1846 def export_to_csv_file(
1847 self,
1848 ofile: typing.TextIO,
1849 null: Any = "<NULL>",
1850 delimiter: str = ",",
1851 quotechar: str = '"',
1852 quoting: int = csv.QUOTE_MINIMAL,
1853 represent: bool = False,
1854 colnames: list[str] = None,
1855 write_colnames: bool = True,
1856 *args: Any,
1857 **kwargs: Any,
1858 ) -> None:
1859 """
1860 Shadow export_to_csv_file from Rows, but with typing.
1862 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
1863 """
1864 super().export_to_csv_file(
1865 ofile,
1866 null,
1867 *args,
1868 delimiter=delimiter,
1869 quotechar=quotechar,
1870 quoting=quoting,
1871 represent=represent,
1872 colnames=colnames or self.colnames,
1873 write_colnames=write_colnames,
1874 **kwargs,
1875 )
1877 @classmethod
1878 def from_rows(
1879 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None
1880 ) -> "TypedRows[T_MetaInstance]":
1881 """
1882 Internal method to convert a Rows object to a TypedRows.
1883 """
1884 return cls(rows, model, metadata=metadata)
1886 def __json__(self) -> dict[str, Any]:
1887 """
1888 For json-fix.
1889 """
1890 return typing.cast(dict[str, Any], self.as_dict())
1892 def __getstate__(self) -> dict[str, Any]:
1893 """
1894 Used by dill to dump to bytes (exclude db connection etc).
1895 """
1896 return {
1897 "metadata": json.dumps(self.metadata, default=str),
1898 "records": self.records,
1899 }
1901 def __setstate__(self, state: dict[str, Any]) -> None:
1902 """
1903 Used by dill when loading from a bytestring.
1904 """
1905 state["metadata"] = json.loads(state["metadata"])
1906 self.__dict__.update(state)
1909from .caching import ( # noqa: E402
1910 _remove_cache,
1911 _TypedalCache,
1912 _TypedalCacheDependency,
1913 create_and_hash_cache_key,
1914 get_expire,
1915 load_from_cache,
1916 save_to_cache,
1917)
1920class QueryBuilder(typing.Generic[T_MetaInstance]):
1921 """
1922 Abstration on top of pydal's query system.
1923 """
1925 model: typing.Type[T_MetaInstance]
1926 query: Query
1927 select_args: list[Any]
1928 select_kwargs: dict[str, Any]
1929 relationships: dict[str, Relationship[Any]]
1930 metadata: Metadata
1932 def __init__(
1933 self,
1934 model: typing.Type[T_MetaInstance],
1935 add_query: Optional[Query] = None,
1936 select_args: Optional[list[Any]] = None,
1937 select_kwargs: Optional[dict[str, Any]] = None,
1938 relationships: dict[str, Relationship[Any]] = None,
1939 metadata: Metadata = None,
1940 ):
1941 """
1942 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
1944 Example:
1945 MyTable.where(...) -> QueryBuilder[MyTable]
1946 """
1947 self.model = model
1948 table = model._ensure_table_defined()
1949 default_query = typing.cast(Query, table.id > 0)
1950 self.query = add_query or default_query
1951 self.select_args = select_args or []
1952 self.select_kwargs = select_kwargs or {}
1953 self.relationships = relationships or {}
1954 self.metadata = metadata or {}
1956 def __str__(self) -> str:
1957 """
1958 Simple string representation for the query builder.
1959 """
1960 return f"QueryBuilder for {self.model}"
1962 def __repr__(self) -> str:
1963 """
1964 Advanced string representation for the query builder.
1965 """
1966 return (
1967 f"<QueryBuilder for {self.model} with "
1968 f"{len(self.select_args)} select args; "
1969 f"{len(self.select_kwargs)} select kwargs; "
1970 f"{len(self.relationships)} relationships; "
1971 f"query: {bool(self.query)}; "
1972 f"metadata: {self.metadata}; "
1973 f">"
1974 )
1976 def __bool__(self) -> bool:
1977 """
1978 Querybuilder is truthy if it has rows.
1979 """
1980 return self.count() > 0
1982 def _extend(
1983 self,
1984 add_query: Optional[Query] = None,
1985 overwrite_query: Optional[Query] = None,
1986 select_args: Optional[list[Any]] = None,
1987 select_kwargs: Optional[dict[str, Any]] = None,
1988 relationships: dict[str, Relationship[Any]] = None,
1989 metadata: Metadata = None,
1990 ) -> "QueryBuilder[T_MetaInstance]":
1991 return QueryBuilder(
1992 self.model,
1993 (add_query & self.query) if add_query else overwrite_query or self.query,
1994 (self.select_args + select_args) if select_args else self.select_args,
1995 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
1996 (self.relationships | relationships) if relationships else self.relationships,
1997 (self.metadata | (metadata or {})) if metadata else self.metadata,
1998 )
2000 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]":
2001 """
2002 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
2004 Options:
2005 paraphrased from the web2py pydal docs,
2006 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
2008 orderby: field(s) to order by. Supported:
2009 table.name - sort by name, ascending
2010 ~table.name - sort by name, descending
2011 <random> - sort randomly
2012 table.name|table.id - sort by two fields (first name, then id)
2014 groupby, having: together with orderby:
2015 groupby can be a field (e.g. table.name) to group records by
2016 having can be a query, only those `having` the condition are grouped
2018 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
2019 distinct: bool/field. Only select rows that differ
2020 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
2021 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
2022 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
2023 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
2024 """
2025 return self._extend(select_args=list(fields), select_kwargs=options)
2027 def where(
2028 self,
2029 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query],
2030 **filters: Any,
2031 ) -> "QueryBuilder[T_MetaInstance]":
2032 """
2033 Extend the builder's query.
2035 Can be used in multiple ways:
2036 .where(Query) -> with a direct query such as `Table.id == 5`
2037 .where(lambda table: table.id == 5) -> with a query via a lambda
2038 .where(id=5) -> via keyword arguments
2040 When using multiple where's, they will be ANDed:
2041 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
2042 When passing multiple queries to a single .where, they will be ORed:
2043 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
2044 """
2045 new_query = self.query
2046 table = self.model._ensure_table_defined()
2048 for field, value in filters.items():
2049 new_query &= table[field] == value
2051 subquery: DummyQuery | Query = DummyQuery()
2052 for query_or_lambda in queries_or_lambdas:
2053 if isinstance(query_or_lambda, _Query):
2054 subquery |= typing.cast(Query, query_or_lambda)
2055 elif callable(query_or_lambda):
2056 if result := query_or_lambda(self.model):
2057 subquery |= result
2058 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda):
2059 subquery |= typing.cast(Query, query_or_lambda != None)
2060 else:
2061 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).")
2063 if subquery:
2064 new_query &= subquery
2066 return self._extend(overwrite_query=new_query)
2068 def join(
2069 self,
2070 *fields: str | typing.Type[TypedTable],
2071 method: JOIN_OPTIONS = None,
2072 on: OnQuery | list[Expression] | Expression = None,
2073 condition: Condition = None,
2074 ) -> "QueryBuilder[T_MetaInstance]":
2075 """
2076 Include relationship fields in the result.
2078 `fields` can be names of Relationships on the current model.
2079 If no fields are passed, all will be used.
2081 By default, the `method` defined in the relationship is used.
2082 This can be overwritten with the `method` keyword argument (left or inner)
2083 """
2084 # todo: allow limiting amount of related rows returned for join?
2086 relationships = self.model.get_relationships()
2088 if condition and on:
2089 raise ValueError("condition and on can not be used together!")
2090 elif condition:
2091 if len(fields) != 1:
2092 raise ValueError("join(field, condition=...) can only be used with exactly one field!")
2094 if isinstance(condition, pydal.objects.Query):
2095 condition = as_lambda(condition)
2097 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)}
2098 elif on:
2099 if len(fields) != 1:
2100 raise ValueError("join(field, on=...) can only be used with exactly one field!")
2102 if isinstance(on, pydal.objects.Expression):
2103 on = [on]
2105 if isinstance(on, list):
2106 on = as_lambda(on)
2107 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)}
2109 else:
2110 if fields:
2111 # join on every relationship
2112 relationships = {str(k): relationships[str(k)] for k in fields}
2114 if method:
2115 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()}
2117 return self._extend(relationships=relationships)
2119 def cache(
2120 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None
2121 ) -> "QueryBuilder[T_MetaInstance]":
2122 """
2123 Enable caching for this query to load repeated calls from a dill row \
2124 instead of executing the sql and collecing matching rows again.
2125 """
2126 existing = self.metadata.get("cache", {})
2128 metadata: Metadata = {}
2130 cache_meta = typing.cast(
2131 CacheMetadata,
2132 self.metadata.get("cache", {})
2133 | {
2134 "enabled": True,
2135 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
2136 "expires_at": get_expire(expires_at=expires_at, ttl=ttl),
2137 },
2138 )
2140 metadata["cache"] = cache_meta
2141 return self._extend(metadata=metadata)
2143 def _get_db(self) -> TypeDAL:
2144 if db := self.model._db:
2145 return db
2146 else: # pragma: no cover
2147 raise EnvironmentError("@define or db.define is not called on this class yet!")
2149 def _select_arg_convert(self, arg: Any) -> Any:
2150 # typedfield are not really used at runtime anymore, but leave it in for safety:
2151 if isinstance(arg, TypedField): # pragma: no cover
2152 arg = arg._field
2154 return arg
2156 def delete(self) -> list[int]:
2157 """
2158 Based on the current query, delete rows and return a list of deleted IDs.
2159 """
2160 db = self._get_db()
2161 removed_ids = [_.id for _ in db(self.query).select("id")]
2162 if db(self.query).delete():
2163 # success!
2164 return removed_ids
2166 return []
2168 def _delete(self) -> str:
2169 db = self._get_db()
2170 return str(db(self.query)._delete())
2172 def update(self, **fields: Any) -> list[int]:
2173 """
2174 Based on the current query, update `fields` and return a list of updated IDs.
2175 """
2176 # todo: limit?
2177 db = self._get_db()
2178 updated_ids = db(self.query).select("id").column("id")
2179 if db(self.query).update(**fields):
2180 # success!
2181 return updated_ids
2183 return []
2185 def _update(self, **fields: Any) -> str:
2186 db = self._get_db()
2187 return str(db(self.query)._update(**fields))
2189 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]:
2190 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2191 select_kwargs = self.select_kwargs.copy()
2192 query = self.query
2193 model = self.model
2194 mut_metadata["query"] = query
2195 # require at least id of main table:
2196 select_fields = ", ".join([str(_) for _ in select_args])
2197 tablename = str(model)
2199 if add_id and f"{tablename}.id" not in select_fields:
2200 # fields of other selected, but required ID is missing.
2201 select_args.append(model.id)
2203 if self.relationships:
2204 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
2206 return query, select_args, select_kwargs
2208 def to_sql(self, add_id: bool = False) -> str:
2209 """
2210 Generate the SQL for the built query.
2211 """
2212 db = self._get_db()
2214 query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
2216 return str(db(query)._select(*select_args, **select_kwargs))
2218 def _collect(self) -> str:
2219 """
2220 Alias for to_sql, pydal-like syntax.
2221 """
2222 return self.to_sql()
2224 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
2225 expires_at = metadata["cache"].get("expires_at")
2226 metadata["cache"] |= {
2227 # key is partly dependant on cache metadata but not these:
2228 "key": None,
2229 "status": None,
2230 "cached_at": None,
2231 "expires_at": None,
2232 } # type: ignore
2234 _, key = create_and_hash_cache_key(
2235 self.model,
2236 metadata,
2237 self.query,
2238 self.select_args,
2239 self.select_kwargs,
2240 self.relationships.keys(),
2241 )
2243 # re-set after creating key:
2244 metadata["cache"]["expires_at"] = expires_at
2245 metadata["cache"]["key"] = key
2247 return load_from_cache(key)
2249 def collect(
2250 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True
2251 ) -> "TypedRows[T_MetaInstance]":
2252 """
2253 Execute the built query and turn it into model instances, while handling relationships.
2254 """
2255 if _to is None:
2256 _to = TypedRows
2258 db = self._get_db()
2259 metadata = typing.cast(Metadata, self.metadata.copy())
2261 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
2262 return result
2264 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2266 metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
2268 if verbose: # pragma: no cover
2269 print(metadata["sql"])
2271 rows: Rows = db(query).select(*select_args, **select_kwargs)
2273 metadata["final_query"] = str(query)
2274 metadata["final_args"] = [str(_) for _ in select_args]
2275 metadata["final_kwargs"] = select_kwargs
2277 if verbose: # pragma: no cover
2278 print(rows)
2280 if not self.relationships:
2281 # easy
2282 typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
2284 else:
2285 # harder: try to match rows to the belonging objects
2286 # assume structure of {'table': <data>} per row.
2287 # if that's not the case, return default behavior again
2288 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
2290 # only saves if requested in metadata:
2291 return save_to_cache(typed_rows, rows)
2293 def _handle_relationships_pre_select(
2294 self,
2295 query: Query,
2296 select_args: list[Any],
2297 select_kwargs: dict[str, Any],
2298 metadata: Metadata,
2299 ) -> tuple[Query, list[Any]]:
2300 db = self._get_db()
2301 model = self.model
2303 metadata["relationships"] = set(self.relationships.keys())
2305 # query = self._update_query_for_inner(db, model, query)
2306 join = []
2307 for key, relation in self.relationships.items():
2308 if not relation.condition or relation.join != "inner":
2309 continue
2311 other = relation.get_table(db)
2312 other = other.with_alias(f"{key}_{hash(relation)}")
2313 join.append(other.on(relation.condition(model, other)))
2315 if limitby := select_kwargs.pop("limitby", None):
2316 # if limitby + relationships:
2317 # 1. get IDs of main table entries that match 'query'
2318 # 2. change query to .belongs(id)
2319 # 3. add joins etc
2321 kwargs = {"limitby": limitby}
2323 if join:
2324 kwargs["join"] = join
2326 ids = db(query)._select(model.id, **kwargs)
2327 query = model.id.belongs(ids)
2328 metadata["ids"] = ids
2330 if join:
2331 select_kwargs["join"] = join
2333 left = []
2335 for key, relation in self.relationships.items():
2336 other = relation.get_table(db)
2337 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
2339 select_fields = ", ".join([str(_) for _ in select_args])
2340 pre_alias = str(other)
2342 if f"{other}." not in select_fields:
2343 # no fields of other selected. add .ALL:
2344 select_args.append(other.ALL)
2345 elif f"{other}.id" not in select_fields:
2346 # fields of other selected, but required ID is missing.
2347 select_args.append(other.id)
2349 if relation.on:
2350 # if it has a .on, it's always a left join!
2351 on = relation.on(model, other)
2352 if not isinstance(on, list): # pragma: no cover
2353 on = [on]
2355 left.extend(on)
2356 elif method == "left":
2357 # .on not given, generate it:
2358 other = other.with_alias(f"{key}_{hash(relation)}")
2359 condition = typing.cast(Query, relation.condition(model, other))
2360 left.append(other.on(condition))
2361 else:
2362 # else: inner join (handled earlier)
2363 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
2364 # other = other.with_alias(f"{key}_{hash(relation)}")
2365 # query &= relation.condition(model, other)
2367 # if no fields of 'other' are included, add other.ALL
2368 # else: only add other.id if missing
2369 select_fields = ", ".join([str(_) for _ in select_args])
2371 post_alias = str(other).split(" AS ")[-1]
2372 if pre_alias != post_alias:
2373 # replace .select's with aliased:
2374 select_fields = select_fields.replace(
2375 f"{pre_alias}.",
2376 f"{post_alias}.",
2377 )
2379 select_args = select_fields.split(", ")
2381 select_kwargs["left"] = left
2382 return query, select_args
2384 def _collect_with_relationships(
2385 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"] = None
2386 ) -> "TypedRows[T_MetaInstance]":
2387 """
2388 Transform the raw rows into Typed Table model instances.
2389 """
2390 db = self._get_db()
2391 main_table = self.model._ensure_table_defined()
2393 records = {}
2394 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
2396 for row in rows:
2397 main = row[main_table]
2398 main_id = main.id
2400 if main_id not in records:
2401 records[main_id] = self.model(main)
2402 records[main_id]._with = list(self.relationships.keys())
2404 # setup up all relationship defaults (once)
2405 for col, relationship in self.relationships.items():
2406 records[main_id][col] = [] if relationship.multiple else None
2408 # now add other relationship data
2409 for column, relation in self.relationships.items():
2410 relationship_column = f"{column}_{hash(relation)}"
2412 # relationship_column works for aliases with the same target column.
2413 # if col + relationship not in the row, just use the regular name.
2415 relation_data = (
2416 row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
2417 )
2419 if relation_data.id is None:
2420 # always skip None ids
2421 continue
2423 if f"{column}-{relation_data.id}" in seen_relations[main_id]:
2424 # speed up duplicates
2425 continue
2426 else:
2427 seen_relations[main_id].add(f"{column}-{relation_data.id}")
2429 relation_table = relation.get_table(db)
2430 # hopefully an instance of a typed table and a regular row otherwise:
2431 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
2433 if relation.multiple:
2434 # create list of T
2435 if not isinstance(records[main_id].get(column), list): # pragma: no cover
2436 # should already be set up before!
2437 setattr(records[main_id], column, [])
2439 records[main_id][column].append(instance)
2440 else:
2441 # create single T
2442 records[main_id][column] = instance
2444 return _to(rows, self.model, records, metadata=metadata)
2446 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]":
2447 """
2448 Call .collect() and raise an error if nothing found.
2450 Basically unwraps Optional type.
2451 """
2452 if result := self.collect():
2453 return result
2455 if not exception:
2456 exception = ValueError("Nothing found!")
2458 raise exception
2460 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
2461 """
2462 You can start iterating a Query Builder object before calling collect, for ease of use.
2463 """
2464 yield from self.collect()
2466 def count(self) -> int:
2467 """
2468 Return the amount of rows matching the current query.
2469 """
2470 db = self._get_db()
2471 model = self.model
2472 query = self.query
2474 for key, relation in self.relationships.items():
2475 if not relation.condition or relation.join != "inner":
2476 continue
2478 other = relation.get_table(db)
2479 other = other.with_alias(f"{key}_{hash(relation)}")
2480 query &= relation.condition(model, other)
2482 return db(query).count()
2484 def __paginate(
2485 self,
2486 limit: int,
2487 page: int = 1,
2488 ) -> "QueryBuilder[T_MetaInstance]":
2489 _from = limit * (page - 1)
2490 _to = limit * page
2492 available = self.count()
2494 metadata: Metadata = {}
2496 metadata["pagination"] = {
2497 "limit": limit,
2498 "current_page": page,
2499 "max_page": math.ceil(available / limit),
2500 "rows": available,
2501 "min_max": (_from, _to),
2502 }
2504 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
2506 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
2507 """
2508 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
2510 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
2511 can be loaded with relationship data!
2512 """
2513 builder = self.__paginate(limit, page)
2515 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
2517 rows._query_builder = builder
2518 return rows
2520 def _paginate(
2521 self,
2522 limit: int,
2523 page: int = 1,
2524 ) -> str:
2525 builder = self.__paginate(limit, page)
2526 return builder._collect()
2528 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
2529 """
2530 Generator that yields rows from a paginated source in chunks.
2532 This function retrieves rows from a paginated data source in chunks of the
2533 specified `chunk_size` and yields them as TypedRows.
2535 Example:
2536 ```
2537 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
2538 for row in chunk_of_rows:
2539 # Process each row within the chunk.
2540 pass
2541 ```
2542 """
2543 page = 1
2545 while rows := self.__paginate(chunk_size, page).collect():
2546 yield rows
2547 page += 1
2549 def first(self, verbose: bool = False) -> T_MetaInstance | None:
2550 """
2551 Get the first row matching the currently built query.
2553 Also adds paginate, since it would be a waste to select more rows than needed.
2554 """
2555 if row := self.paginate(page=1, limit=1, verbose=verbose).first():
2556 return self.model.from_row(row)
2557 else:
2558 return None
2560 def _first(self) -> str:
2561 return self._paginate(page=1, limit=1)
2563 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance:
2564 """
2565 Call .first() and raise an error if nothing found.
2567 Basically unwraps Optional type.
2568 """
2569 if inst := self.first(verbose=verbose):
2570 return inst
2572 if not exception:
2573 exception = ValueError("Nothing found!")
2575 raise exception
2578S = typing.TypeVar("S")
2581class PaginatedRows(TypedRows[T_MetaInstance]):
2582 """
2583 Extension on top of rows that is used when calling .paginate() instead of .collect().
2584 """
2586 _query_builder: QueryBuilder[T_MetaInstance]
2588 @property
2589 def data(self) -> list[T_MetaInstance]:
2590 """
2591 Get the underlying data.
2592 """
2593 return list(self.records.values())
2595 @property
2596 def pagination(self) -> Pagination:
2597 """
2598 Get all page info.
2599 """
2600 pagination_data = self.metadata["pagination"]
2602 has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
2603 has_prev_page = pagination_data["current_page"] > 1
2604 return {
2605 "total_items": pagination_data["rows"],
2606 "current_page": pagination_data["current_page"],
2607 "per_page": pagination_data["limit"],
2608 "total_pages": pagination_data["max_page"],
2609 "has_next_page": has_next_page,
2610 "has_prev_page": has_prev_page,
2611 "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
2612 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2613 }
2615 def next(self) -> Self: # noqa: A003
2616 """
2617 Get the next page.
2618 """
2619 data = self.metadata["pagination"]
2620 if data["current_page"] >= data["max_page"]:
2621 raise StopIteration("Final Page")
2623 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
2625 def previous(self) -> Self:
2626 """
2627 Get the previous page.
2628 """
2629 data = self.metadata["pagination"]
2630 if data["current_page"] <= 1:
2631 raise StopIteration("First Page")
2633 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
2635 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
2636 """
2637 Convert to a dictionary with pagination info and original data.
2639 All arguments are ignored!
2640 """
2641 return {"data": super().as_dict(), "pagination": self.pagination}
2644class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2645 """
2646 Used to make pydal Set more typed.
2648 This class is not actually used, only 'cast' by TypeDAL.__call__
2649 """
2651 def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int:
2652 """
2653 Count returns an int.
2654 """
2655 result = super().count(distinct, cache)
2656 return typing.cast(int, result)
2658 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
2659 """
2660 Select returns a TypedRows of a user defined table.
2662 Example:
2663 result: TypedRows[MyTable] = db(MyTable.id > 0).select()
2665 for row in result:
2666 typing.reveal_type(row) # MyTable
2667 """
2668 rows = super().select(*fields, **attributes)
2669 return typing.cast(TypedRows[T_MetaInstance], rows)