Coverage for src/typedal/core.py: 100%
903 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-02 16:17 +0200
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-02 16:17 +0200
1"""
2Core functionality of TypeDAL.
3"""
5import contextlib
6import csv
7import datetime as dt
8import inspect
9import json
10import math
11import types
12import typing
13import warnings
14from collections import defaultdict
15from decimal import Decimal
16from pathlib import Path
17from typing import Any, Optional
19import pydal
20from pydal._globals import DEFAULT
21from pydal.objects import Field as _Field
22from pydal.objects import Query as _Query
23from pydal.objects import Row
24from pydal.objects import Table as _Table
25from typing_extensions import Self
27from .config import TypeDALConfig, load_config
28from .helpers import (
29 DummyQuery,
30 all_annotations,
31 all_dict,
32 as_lambda,
33 extract_type_optional,
34 filter_out,
35 instanciate,
36 is_union,
37 looks_like,
38 mktable,
39 origin_is_subclass,
40 to_snake,
41 unwrap_type,
42)
43from .serializers import as_json
44from .types import (
45 AfterDeleteCallable,
46 AfterInsertCallable,
47 AfterUpdateCallable,
48 AnyDict,
49 BeforeDeleteCallable,
50 BeforeInsertCallable,
51 BeforeUpdateCallable,
52 CacheMetadata,
53 Expression,
54 Field,
55 Metadata,
56 PaginateDict,
57 Pagination,
58 Query,
59 Rows,
60 Validator,
61 _Types,
62)
64# use typing.cast(type, ...) to make mypy happy with unions
65T_annotation = typing.Type[Any] | types.UnionType
66T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]]
67T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
68T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
69T = typing.TypeVar("T")
71BASIC_MAPPINGS: dict[T_annotation, str] = {
72 str: "string",
73 int: "integer",
74 bool: "boolean",
75 bytes: "blob",
76 float: "double",
77 object: "json",
78 Decimal: "decimal(10,2)",
79 dt.date: "date",
80 dt.time: "time",
81 dt.datetime: "datetime",
82}
85def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
86 """
87 Is `cls` an instance or subclass of TypedField?
89 Deprecated
90 """
91 return (
92 isinstance(cls, TypedField)
93 or isinstance(typing.get_origin(cls), type)
94 and issubclass(typing.get_origin(cls), TypedField)
95 )
98JOIN_OPTIONS = typing.Literal["left", "inner", None]
99DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
101# table-ish paramter:
102P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table]
104Condition: typing.TypeAlias = typing.Optional[
105 typing.Callable[
106 # self, other -> Query
107 [P_Table, P_Table],
108 Query | bool,
109 ]
110]
112OnQuery: typing.TypeAlias = typing.Optional[
113 typing.Callable[
114 # self, other -> list of .on statements
115 [P_Table, P_Table],
116 list[Expression],
117 ]
118]
120To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str)
123class Relationship(typing.Generic[To_Type]):
124 """
125 Define a relationship to another table.
126 """
128 _type: To_Type
129 table: typing.Type["TypedTable"] | type | str
130 condition: Condition
131 on: OnQuery
132 multiple: bool
133 join: JOIN_OPTIONS
135 def __init__(
136 self,
137 _type: To_Type,
138 condition: Condition = None,
139 join: JOIN_OPTIONS = None,
140 on: OnQuery = None,
141 ):
142 """
143 Should not be called directly, use relationship() instead!
144 """
145 if condition and on:
146 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
147 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
149 self._type = _type
150 self.condition = condition
151 self.join = "left" if on else join # .on is always left join!
152 self.on = on
154 if args := typing.get_args(_type):
155 self.table = unwrap_type(args[0])
156 self.multiple = True
157 else:
158 self.table = _type
159 self.multiple = False
161 if isinstance(self.table, str):
162 self.table = TypeDAL.to_snake(self.table)
164 def clone(self, **update: Any) -> "Relationship[To_Type]":
165 """
166 Create a copy of the relationship, possibly updated.
167 """
168 return self.__class__(
169 update.get("_type") or self._type,
170 update.get("condition") or self.condition,
171 update.get("join") or self.join,
172 update.get("on") or self.on,
173 )
175 def __repr__(self) -> str:
176 """
177 Representation of the relationship.
178 """
179 if callback := self.condition or self.on:
180 src_code = inspect.getsource(callback).strip()
181 else:
182 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore
183 src_code = f"to {cls_name} (missing condition)"
185 join = f":{self.join}" if self.join else ""
186 return f"<Relationship{join} {src_code}>"
188 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]:
189 """
190 Get the table this relationship is bound to.
191 """
192 table = self.table # can be a string because db wasn't available yet
193 if isinstance(table, str):
194 if mapped := db._class_map.get(table):
195 # yay
196 return mapped
198 # boo, fall back to untyped table but pretend it is typed:
199 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough!
201 return table
203 def get_table_name(self) -> str:
204 """
205 Get the name of the table this relationship is bound to.
206 """
207 if isinstance(self.table, str):
208 return self.table
210 if isinstance(self.table, pydal.objects.Table):
211 return str(self.table)
213 # else: typed table
214 try:
215 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
216 except Exception: # pragma: no cover
217 table = self.table
219 return str(table)
221 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
222 """
223 Relationship is a descriptor class, which can be returned from a class but not an instance.
225 For an instance, using .join() will replace the Relationship with the actual data.
226 If you forgot to join, a warning will be shown and empty data will be returned.
227 """
228 if not instance:
229 # relationship queried on class, that's allowed
230 return self
232 warnings.warn(
233 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning
234 )
235 if self.multiple:
236 return []
237 else:
238 return None
241def relationship(
242 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None
243) -> Relationship[To_Type]:
244 """
245 Define a relationship to another table, when its id is not stored in the current table.
247 Example:
248 class User(TypedTable):
249 name: str
251 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left')
253 class Post(TypedTable):
254 title: str
255 author: User
257 User.join("posts").first() # User instance with list[Post] in .posts
259 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
260 In this case, the join strategy is set to LEFT so users without posts are also still selected.
262 For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
263 class User(TypedTable):
264 ...
266 tags = relationship(list["Tag"], on=lambda self, tag: [
267 Tagged.on(Tagged.entity == entity.gid),
268 Tag.on((Tagged.tag == tag.id)),
269 ])
271 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient.
272 """
273 return Relationship(_type, condition, join, on)
276def _generate_relationship_condition(
277 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]]
278) -> Condition:
279 origin = typing.get_origin(field)
280 # else: generic
282 if origin == list:
283 # field = typing.get_args(field)[0] # actual field
284 # return lambda _self, _other: cls[key].contains(field)
286 return lambda _self, _other: _self[key].contains(_other.id)
287 else:
288 # normal reference
289 # return lambda _self, _other: cls[key] == field.id
290 return lambda _self, _other: _self[key] == _other.id
293def to_relationship(
294 cls: typing.Type["TypedTable"] | type[Any],
295 key: str,
296 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]],
297) -> typing.Optional[Relationship[Any]]:
298 """
299 Used to automatically create relationship instance for reference fields.
301 Example:
302 class MyTable(TypedTable):
303 reference: OtherTable
305 `reference` contains the id of an Other Table row.
306 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
308 This function will automatically perform this logic (called in db.define):
309 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
311 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
312 """
313 if looks_like(field, TypedField):
314 if args := typing.get_args(field):
315 field = args[0]
316 else:
317 # weird
318 return None
320 field, optional = extract_type_optional(field)
322 try:
323 condition = _generate_relationship_condition(cls, key, field)
324 except Exception as e: # pragma: no cover
325 warnings.warn("Could not generate Relationship condition", source=e)
326 condition = None
328 if not condition: # pragma: no cover
329 # something went wrong, not a valid relationship
330 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
331 return None
333 join = "left" if optional or typing.get_origin(field) == list else "inner"
335 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
338class TypeDAL(pydal.DAL): # type: ignore
339 """
340 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
341 """
343 _config: TypeDALConfig
345 def __init__(
346 self,
347 uri: Optional[str] = None, # default from config or 'sqlite:memory'
348 pool_size: int = None, # default 1 if sqlite else 3
349 folder: Optional[str | Path] = None, # default 'databases' in config
350 db_codec: str = "UTF-8",
351 check_reserved: Optional[list[str]] = None,
352 migrate: Optional[bool] = None, # default True by config
353 fake_migrate: Optional[bool] = None, # default False by config
354 migrate_enabled: bool = True,
355 fake_migrate_all: bool = False,
356 decode_credentials: bool = False,
357 driver_args: Optional[AnyDict] = None,
358 adapter_args: Optional[AnyDict] = None,
359 attempts: int = 5,
360 auto_import: bool = False,
361 bigint_id: bool = False,
362 debug: bool = False,
363 lazy_tables: bool = False,
364 db_uid: Optional[str] = None,
365 after_connection: typing.Callable[..., Any] = None,
366 tables: Optional[list[str]] = None,
367 ignore_field_case: bool = True,
368 entity_quoting: bool = True,
369 table_hash: Optional[str] = None,
370 enable_typedal_caching: bool = None,
371 use_pyproject: bool | str = True,
372 use_env: bool | str = True,
373 connection: Optional[str] = None,
374 config: Optional[TypeDALConfig] = None,
375 ) -> None:
376 """
377 Adds some internal tables after calling pydal's default init.
379 Set enable_typedal_caching to False to disable this behavior.
380 """
381 config = config or load_config(connection, _use_pyproject=use_pyproject, _use_env=use_env)
382 config.update(
383 database=uri,
384 dialect=uri.split(":")[0] if uri and ":" in uri else None,
385 folder=str(folder) if folder is not None else None,
386 migrate=migrate,
387 fake_migrate=fake_migrate,
388 caching=enable_typedal_caching,
389 pool_size=pool_size,
390 )
392 self._config = config
394 if config.folder:
395 Path(config.folder).mkdir(exist_ok=True)
397 super().__init__(
398 config.database,
399 config.pool_size,
400 config.folder,
401 db_codec,
402 check_reserved,
403 config.migrate,
404 config.fake_migrate,
405 migrate_enabled,
406 fake_migrate_all,
407 decode_credentials,
408 driver_args,
409 adapter_args,
410 attempts,
411 auto_import,
412 bigint_id,
413 debug,
414 lazy_tables,
415 db_uid,
416 after_connection,
417 tables,
418 ignore_field_case,
419 entity_quoting,
420 table_hash,
421 )
423 if config.caching:
424 self.try_define(_TypedalCache)
425 self.try_define(_TypedalCacheDependency)
427 def try_define(self, model: typing.Type[T], verbose: bool = False) -> typing.Type[T]:
428 """
429 Try to define a model with migrate or fall back to fake migrate.
430 """
431 try:
432 return self.define(model, migrate=True)
433 except Exception as e:
434 # clean up:
435 self.rollback()
436 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self):
437 delattr(self, tablename)
439 if verbose:
440 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning)
442 # try again:
443 return self.define(model, migrate=True, fake_migrate=True, redefine=True)
445 default_kwargs: typing.ClassVar[AnyDict] = {
446 # fields are 'required' (notnull) by default:
447 "notnull": True,
448 }
450 # maps table name to typedal class, for resolving future references
451 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {}
453 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
454 # todo: new relationship item added should also invalidate (previously unrelated) cache result
456 # todo: option to enable/disable cache dependency behavior:
457 # - don't set _before_update and _before_delete
458 # - don't add TypedalCacheDependency entry
459 # - don't invalidate other item on new row of this type
461 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
462 # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
463 # typing.get_type_hints(cls, globalns=None, localns=None)
465 # dirty way (with evil eval):
466 # [eval(v) for k, v in cls.__annotations__.items()]
467 # this however also stops working when variables outside this scope or even references to other
468 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
469 # and might break in the future, when this annotations behavior is enabled by default.
471 # non-annotated variables have to be passed to define_table as kwargs
472 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
474 tablename = self.to_snake(cls.__name__)
475 # grab annotations of cls and it's parents:
476 annotations = all_annotations(cls)
477 # extend with `prop = TypedField()` 'annotations':
478 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
479 # remove internal stuff:
480 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
482 typedfields: dict[str, TypedField[Any]] = {
483 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
484 }
486 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
488 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
490 # ! dont' use full_dict here:
491 other_kwargs = kwargs | {
492 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
493 } # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that.
494 # now it's only used to extract relationships from the object.
495 # other properties of the class (incl methods) should not be touched
497 for key in typedfields.keys() - full_dict.keys():
498 # typed fields that don't haven't been added to the object yet
499 setattr(cls, key, typedfields[key])
501 # start with base classes and overwrite with current class:
502 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
504 # DEPRECATED: Relationship as annotation is currently not supported!
505 # ensure they are all instances and
506 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
507 # relationships = {
508 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
509 # }
511 # keys of implicit references (also relationships):
512 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")]
514 # add implicit relationships:
515 # User; list[User]; TypedField[User]; TypedField[list[User]]
516 relationships |= {
517 k: new_relationship
518 for k in reference_field_keys
519 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
520 }
522 cache_dependency = kwargs.pop("cache_dependency", True)
524 table: Table = self.define_table(tablename, *fields.values(), **kwargs)
526 for name, typed_field in typedfields.items():
527 field = fields[name]
528 typed_field.bind(field, table)
530 if issubclass(cls, TypedTable):
531 cls.__set_internals__(
532 db=self,
533 table=table,
534 # by now, all relationships should be instances!
535 relationships=typing.cast(dict[str, Relationship[Any]], relationships),
536 )
537 self._class_map[str(table)] = cls
538 cls.__on_define__(self)
539 else:
540 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
542 if not tablename.startswith("typedal_") and cache_dependency:
543 table._before_update.append(lambda s, _: _remove_cache(s, tablename))
544 table._before_delete.append(lambda s: _remove_cache(s, tablename))
546 return cls
548 @typing.overload
549 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]:
550 """
551 Typing Overload for define without a class.
553 @db.define()
554 class MyTable(TypedTable): ...
555 """
557 @typing.overload
558 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
559 """
560 Typing Overload for define with a class.
562 @db.define
563 class MyTable(TypedTable): ...
564 """
566 def define(
567 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any
568 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]:
569 """
570 Can be used as a decorator on a class that inherits `TypedTable`, \
571 or as a regular method if you need to define your classes before you have access to a 'db' instance.
573 You can also pass extra arguments to db.define_table.
574 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor
576 Example:
577 @db.define
578 class Person(TypedTable):
579 ...
581 class Article(TypedTable):
582 ...
584 # at a later time:
585 db.define(Article)
587 Returns:
588 the result of pydal.define_table
589 """
591 def wrapper(cls: typing.Type[T]) -> typing.Type[T]:
592 return self._define(cls, **kwargs)
594 if maybe_cls:
595 return wrapper(maybe_cls)
597 return wrapper
599 # def drop(self, table_name: str) -> None:
600 # """
601 # Remove a table by name (both on the database level and the typedal level).
602 # """
603 # # drop calls TypedTable.drop() and removes it from the `_class_map`
604 # if cls := self._class_map.pop(table_name, None):
605 # cls.drop()
607 # def drop_all(self, max_retries: int = None) -> None:
608 # """
609 # Remove all tables and keep doing so until everything is gone!
610 # """
611 # retries = 0
612 # if max_retries is None:
613 # max_retries = len(self.tables)
614 #
615 # while self.tables:
616 # retries += 1
617 # for table in self.tables:
618 # self.drop(table)
619 #
620 # if retries > max_retries:
621 # raise RuntimeError("Could not delete all tables")
623 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
624 """
625 A db instance can be called directly to perform a query.
627 Usually, only a query is passed.
629 Example:
630 db(query).select()
632 """
633 args = list(_args)
634 if args:
635 cls = args[0]
636 if isinstance(cls, bool):
637 raise ValueError("Don't actually pass a bool to db()! Use a query instead.")
639 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
640 # table defined without @db.define decorator!
641 _cls: typing.Type[TypedTable] = cls
642 args[0] = _cls.id != None
644 _set = super().__call__(*args, **kwargs)
645 return typing.cast(TypedSet, _set)
647 def __getitem__(self, key: str) -> "Table":
648 """
649 Allows dynamically accessing a table by its name as a string.
651 Example:
652 db['users'] -> user
653 """
654 return typing.cast(Table, super().__getitem__(str(key)))
656 @classmethod
657 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
658 return Field(name, _type, **{**cls.default_kwargs, **kw})
660 @classmethod
661 def _annotation_to_pydal_fieldtype(
662 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any]
663 ) -> Optional[str]:
664 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
665 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy)
667 if isinstance(ftype, str):
668 # extract type from string
669 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate(
670 localns=locals(), globalns=globals(), recursive_guard=frozenset()
671 )
673 if mapping := BASIC_MAPPINGS.get(ftype):
674 # basi types
675 return mapping
676 elif isinstance(ftype, _Table):
677 # db.table
678 return f"reference {ftype._tablename}"
679 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
680 # SomeTable
681 snakename = cls.to_snake(ftype.__name__)
682 return f"reference {snakename}"
683 elif isinstance(ftype, TypedField):
684 # FieldType(type, ...)
685 return ftype._to_field(mut_kw)
686 elif origin_is_subclass(ftype, TypedField):
687 # TypedField[int]
688 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
689 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
690 # list[str] -> str -> string -> list:string
691 _child_type = typing.get_args(ftype)[0]
692 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
693 return f"list:{_child_type}"
694 elif is_union(ftype):
695 # str | int -> UnionType
696 # typing.Union[str | int] -> typing._UnionGenericAlias
698 # Optional[type] == type | None
700 match typing.get_args(ftype):
701 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
702 # good union of Nullable
704 # if a field is optional, it is nullable:
705 mut_kw["notnull"] = False
706 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
707 case _:
708 # two types is not supported by the db!
709 return None
710 else:
711 return None
713 @classmethod
714 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
715 """
716 Convert a annotation into a pydal Field.
718 Args:
719 fname: name of the property
720 ftype: annotation of the property
721 kw: when using TypedField or a function returning it (e.g. StringField),
722 keyword args can be used to pass any other settings you would normally to a pydal Field
724 -> pydal.Field(fname, ftype, **kw)
726 Example:
727 class MyTable:
728 fname: ftype
729 id: int
730 name: str
731 reference: Table
732 other: TypedField(str, default="John Doe") # default will be in kwargs
733 """
734 fname = cls.to_snake(fname)
736 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
737 return cls._build_field(fname, converted_type, **kw)
738 else:
739 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
741 @staticmethod
742 def to_snake(camel: str) -> str:
743 """
744 Moved to helpers, kept as a static method for legacy reasons.
745 """
746 return to_snake(camel)
749class TableProtocol(typing.Protocol): # pragma: no cover
750 """
751 Make mypy happy.
752 """
754 id: "TypedField[int]"
756 def __getitem__(self, item: str) -> Field:
757 """
758 Tell mypy a Table supports dictionary notation for columns.
759 """
762class Table(_Table, TableProtocol): # type: ignore
763 """
764 Make mypy happy.
765 """
768class TableMeta(type):
769 """
770 This metaclass contains functionality on table classes, that doesn't exist on its instances.
772 Example:
773 class MyTable(TypedTable):
774 some_field: TypedField[int]
776 MyTable.update_or_insert(...) # should work
778 MyTable.some_field # -> Field, can be used to query etc.
780 row = MyTable.first() # returns instance of MyTable
782 # row.update_or_insert(...) # shouldn't work!
784 row.some_field # -> int, with actual data
786 """
788 # set up by db.define:
789 # _db: TypeDAL | None = None
790 # _table: Table | None = None
791 _db: TypeDAL | None = None
792 _table: Table | None = None
793 _relationships: dict[str, Relationship[Any]] | None = None
795 #########################
796 # TypeDAL custom logic: #
797 #########################
799 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
800 """
801 Store the related database and pydal table for later usage.
802 """
803 self._db = db
804 self._table = table
805 self._relationships = relationships
807 def __getattr__(self, col: str) -> Optional[Field]:
808 """
809 Magic method used by TypedTableMeta to get a database field with dot notation on a class.
811 Example:
812 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
814 """
815 if self._table:
816 return getattr(self._table, col, None)
818 return None
820 def _ensure_table_defined(self) -> Table:
821 if not self._table:
822 raise EnvironmentError("@define or db.define is not called on this class yet!")
823 return self._table
825 def __iter__(self) -> typing.Generator[Field, None, None]:
826 """
827 Loop through the columns of this model.
828 """
829 table = self._ensure_table_defined()
830 yield from iter(table)
832 def __getitem__(self, item: str) -> Field:
833 """
834 Allow dict notation to get a column of this table (-> Field instance).
835 """
836 table = self._ensure_table_defined()
837 return table[item]
839 def __str__(self) -> str:
840 """
841 Normally, just returns the underlying table name, but with a fallback if the model is unbound.
842 """
843 if self._table:
844 return str(self._table)
845 else:
846 return f"<unbound table {self.__name__}>"
848 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
849 """
850 Create a model instance from a pydal row.
851 """
852 return self(row)
854 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]":
855 """
856 Return all rows for this model.
857 """
858 return self.collect()
860 def get_relationships(self) -> dict[str, Relationship[Any]]:
861 """
862 Return the registered relationships of the current model.
863 """
864 return self._relationships or {}
866 ##########################
867 # TypeDAL Modified Logic #
868 ##########################
870 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
871 """
872 This is only called when db.define is not used as a decorator.
874 cls.__table functions as 'self'
876 Args:
877 **fields: anything you want to insert in the database
879 Returns: the ID of the new row.
881 """
882 table = self._ensure_table_defined()
884 result = table.insert(**fields)
885 # it already is an int but mypy doesn't understand that
886 return self(result)
888 def _insert(self, **fields: Any) -> str:
889 table = self._ensure_table_defined()
891 return str(table._insert(**fields))
893 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
894 """
895 Insert multiple rows, returns a TypedRows set of new instances.
896 """
897 table = self._ensure_table_defined()
898 result = table.bulk_insert(items)
899 return self.where(lambda row: row.id.belongs(result)).collect()
901 def update_or_insert(
902 self: typing.Type[T_MetaInstance], query: T_Query | AnyDict = DEFAULT, **values: Any
903 ) -> T_MetaInstance:
904 """
905 Update a row if query matches, else insert a new one.
907 Returns the created or updated instance.
908 """
909 table = self._ensure_table_defined()
911 if query is DEFAULT:
912 record = table(**values)
913 elif isinstance(query, dict):
914 record = table(**query)
915 else:
916 record = table(query)
918 if not record:
919 return self.insert(**values)
921 record.update_record(**values)
922 return self(record)
924 def validate_and_insert(
925 self: typing.Type[T_MetaInstance], **fields: Any
926 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
927 """
928 Validate input data and then insert a row.
930 Returns a tuple of (the created instance, a dict of errors).
931 """
932 table = self._ensure_table_defined()
933 result = table.validate_and_insert(**fields)
934 if row_id := result.get("id"):
935 return self(row_id), None
936 else:
937 return None, result.get("errors")
939 def validate_and_update(
940 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
941 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
942 """
943 Validate input data and then update max 1 row.
945 Returns a tuple of (the updated instance, a dict of errors).
946 """
947 table = self._ensure_table_defined()
949 try:
950 result = table.validate_and_update(query, **fields)
951 except Exception as e:
952 result = {"errors": {"exception": str(e)}}
954 if errors := result.get("errors"):
955 return None, errors
956 elif row_id := result.get("id"):
957 return self(row_id), None
958 else: # pragma: no cover
959 # update on query without result (shouldnt happen)
960 return None, None
962 def validate_and_update_or_insert(
963 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
964 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
965 """
966 Validate input data and then update_and_insert (on max 1 row).
968 Returns a tuple of (the updated/created instance, a dict of errors).
969 """
970 table = self._ensure_table_defined()
971 result = table.validate_and_update_or_insert(query, **fields)
973 if errors := result.get("errors"):
974 return None, errors
975 elif row_id := result.get("id"):
976 return self(row_id), None
977 else: # pragma: no cover
978 # update on query without result (shouldnt happen)
979 return None, None
981 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
982 """
983 See QueryBuilder.select!
984 """
985 return QueryBuilder(self).select(*a, **kw)
987 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
988 """
989 See QueryBuilder.paginate!
990 """
991 return QueryBuilder(self).paginate(limit=limit, page=page)
993 def chunk(
994 self: typing.Type[T_MetaInstance], chunk_size: int
995 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
996 """
997 See QueryBuilder.chunk!
998 """
999 return QueryBuilder(self).chunk(chunk_size)
1001 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
1002 """
1003 See QueryBuilder.where!
1004 """
1005 return QueryBuilder(self).where(*a, **kw)
1007 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
1008 """
1009 See QueryBuilder.cache!
1010 """
1011 return QueryBuilder(self).cache(*deps, **kwargs)
1013 def count(self: typing.Type[T_MetaInstance]) -> int:
1014 """
1015 See QueryBuilder.count!
1016 """
1017 return QueryBuilder(self).count()
1019 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None:
1020 """
1021 See QueryBuilder.first!
1022 """
1023 return QueryBuilder(self).first()
1025 def join(
1026 self: typing.Type[T_MetaInstance],
1027 *fields: str | typing.Type["TypedTable"],
1028 method: JOIN_OPTIONS = None,
1029 on: OnQuery | list[Expression] | Expression = None,
1030 condition: Condition = None,
1031 ) -> "QueryBuilder[T_MetaInstance]":
1032 """
1033 See QueryBuilder.join!
1034 """
1035 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method)
1037 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
1038 """
1039 See QueryBuilder.collect!
1040 """
1041 return QueryBuilder(self).collect(verbose=verbose)
1043 @property
1044 def ALL(cls) -> pydal.objects.SQLALL:
1045 """
1046 Select all fields for this table.
1047 """
1048 table = cls._ensure_table_defined()
1050 return table.ALL
1052 ##########################
1053 # TypeDAL Shadowed Logic #
1054 ##########################
1055 fields: list[str]
1057 # other table methods:
1059 def truncate(self, mode: str = "") -> None:
1060 """
1061 Remove all data and reset index.
1062 """
1063 table = self._ensure_table_defined()
1064 table.truncate(mode)
1066 def drop(self, mode: str = "") -> None:
1067 """
1068 Remove the underlying table.
1069 """
1070 table = self._ensure_table_defined()
1071 table.drop(mode)
1073 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
1074 """
1075 Add an index on some columns of this table.
1076 """
1077 table = self._ensure_table_defined()
1078 result = table.create_index(name, *fields, **kwargs)
1079 return typing.cast(bool, result)
1081 def drop_index(self, name: str, if_exists: bool = False) -> bool:
1082 """
1083 Remove an index from this table.
1084 """
1085 table = self._ensure_table_defined()
1086 result = table.drop_index(name, if_exists)
1087 return typing.cast(bool, result)
1089 def import_from_csv_file(
1090 self,
1091 csvfile: typing.TextIO,
1092 id_map: dict[str, str] = None,
1093 null: Any = "<NULL>",
1094 unique: str = "uuid",
1095 id_offset: dict[str, int] = None, # id_offset used only when id_map is None
1096 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
1097 validate: bool = False,
1098 encoding: str = "utf-8",
1099 delimiter: str = ",",
1100 quotechar: str = '"',
1101 quoting: int = csv.QUOTE_MINIMAL,
1102 restore: bool = False,
1103 **kwargs: Any,
1104 ) -> None:
1105 """
1106 Load a csv file into the database.
1107 """
1108 table = self._ensure_table_defined()
1109 table.import_from_csv_file(
1110 csvfile,
1111 id_map=id_map,
1112 null=null,
1113 unique=unique,
1114 id_offset=id_offset,
1115 transform=transform,
1116 validate=validate,
1117 encoding=encoding,
1118 delimiter=delimiter,
1119 quotechar=quotechar,
1120 quoting=quoting,
1121 restore=restore,
1122 **kwargs,
1123 )
1125 def on(self, query: Query | bool) -> Expression:
1126 """
1127 Shadow Table.on.
1129 Used for joins.
1131 See Also:
1132 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1133 """
1134 table = self._ensure_table_defined()
1135 return typing.cast(Expression, table.on(query))
1137 def with_alias(self, alias: str) -> _Table:
1138 """
1139 Shadow Table.with_alias.
1141 Useful for joins when joining the same table multiple times.
1143 See Also:
1144 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1145 """
1146 table = self._ensure_table_defined()
1147 return table.with_alias(alias)
1149 # @typing.dataclass_transform()
1152class TypedField(typing.Generic[T_Value]): # pragma: no cover
1153 """
1154 Typed version of pydal.Field, which will be converted to a normal Field in the background.
1155 """
1157 # will be set by .bind on db.define
1158 name = ""
1159 _db: Optional[pydal.DAL] = None
1160 _rname: Optional[str] = None
1161 _table: Optional[Table] = None
1162 _field: Optional[Field] = None
1164 _type: T_annotation
1165 kwargs: Any
1167 requires: Validator | typing.Iterable[Validator]
1169 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore
1170 """
1171 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used!
1172 """
1173 self._type = _type
1174 self.kwargs = settings
1175 super().__init__()
1177 @typing.overload
1178 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover
1179 """
1180 row.field -> (actual data).
1181 """
1183 @typing.overload
1184 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
1185 """
1186 Table.field -> Field.
1187 """
1189 def __get__(
1190 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance]
1191 ) -> typing.Union[T_Value, "TypedField[T_Value]"]:
1192 """
1193 Since this class is a Descriptor field, \
1194 it returns something else depending on if it's called on a class or instance.
1196 (this is mostly for mypy/typing)
1197 """
1198 if instance:
1199 # this is only reached in a very specific case:
1200 # an instance of the object was created with a specific set of fields selected (excluding the current one)
1201 # in that case, no value was stored in the owner -> return None (since the field was not selected)
1202 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
1203 else:
1204 # getting as class -> return actual field so pydal understands it when using in query etc.
1205 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
1207 def __str__(self) -> str:
1208 """
1209 String representation of a Typed Field.
1211 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
1212 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
1213 """
1214 return str(self._field) if self._field else ""
1216 def __repr__(self) -> str:
1217 """
1218 More detailed string representation of a Typed Field.
1220 Uses __str__ and adds the provided extra options (kwargs) in the representation.
1221 """
1222 s = self.__str__()
1224 if "type" in self.kwargs:
1225 # manual type in kwargs supplied
1226 t = self.kwargs["type"]
1227 elif issubclass(type, type(self._type)):
1228 # normal type, str.__name__ = 'str'
1229 t = getattr(self._type, "__name__", str(self._type))
1230 elif t_args := typing.get_args(self._type):
1231 # list[str] -> 'str'
1232 t = t_args[0].__name__
1233 else: # pragma: no cover
1234 # fallback - something else, may not even happen, I'm not sure
1235 t = self._type
1237 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
1239 kw = self.kwargs.copy()
1240 kw.pop("type", None)
1241 return f"<{s} with options {kw}>"
1243 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1244 """
1245 Convert a Typed Field instance to a pydal.Field.
1246 """
1247 other_kwargs = self.kwargs.copy()
1248 extra_kwargs.update(other_kwargs)
1249 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1251 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
1252 """
1253 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
1254 """
1255 self._table = table
1256 self._field = field
1258 def __getattr__(self, key: str) -> Any:
1259 """
1260 If the regular getattribute does not work, try to get info from the related Field.
1261 """
1262 with contextlib.suppress(AttributeError):
1263 return super().__getattribute__(key)
1265 # try on actual field:
1266 return getattr(self._field, key)
1268 def __eq__(self, other: Any) -> Query:
1269 """
1270 Performing == on a Field will result in a Query.
1271 """
1272 return typing.cast(Query, self._field == other)
1274 def __ne__(self, other: Any) -> Query:
1275 """
1276 Performing != on a Field will result in a Query.
1277 """
1278 return typing.cast(Query, self._field != other)
1280 def __gt__(self, other: Any) -> Query:
1281 """
1282 Performing > on a Field will result in a Query.
1283 """
1284 return typing.cast(Query, self._field > other)
1286 def __lt__(self, other: Any) -> Query:
1287 """
1288 Performing < on a Field will result in a Query.
1289 """
1290 return typing.cast(Query, self._field < other)
1292 def __ge__(self, other: Any) -> Query:
1293 """
1294 Performing >= on a Field will result in a Query.
1295 """
1296 return typing.cast(Query, self._field >= other)
1298 def __le__(self, other: Any) -> Query:
1299 """
1300 Performing <= on a Field will result in a Query.
1301 """
1302 return typing.cast(Query, self._field <= other)
1304 def __hash__(self) -> int:
1305 """
1306 Shadow Field.__hash__.
1307 """
1308 return hash(self._field)
1310 def __invert__(self) -> Expression:
1311 """
1312 Performing ~ on a Field will result in an Expression.
1313 """
1314 if not self._field: # pragma: no cover
1315 raise ValueError("Unbound Field can not be inverted!")
1317 return typing.cast(Expression, ~self._field)
1320class TypedTable(metaclass=TableMeta):
1321 """
1322 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
1323 """
1325 # set up by 'new':
1326 _row: Row | None = None
1328 _with: list[str]
1330 id: "TypedField[int]"
1332 _before_insert: list[BeforeInsertCallable]
1333 _after_insert: list[AfterInsertCallable]
1334 _before_update: list[BeforeUpdateCallable]
1335 _after_update: list[AfterUpdateCallable]
1336 _before_delete: list[BeforeDeleteCallable]
1337 _after_delete: list[AfterDeleteCallable]
1339 def _setup_instance_methods(self) -> None:
1340 self.as_dict = self._as_dict # type: ignore
1341 self.__json__ = self.as_json = self._as_json # type: ignore
1342 # self.as_yaml = self._as_yaml # type: ignore
1343 self.as_xml = self._as_xml # type: ignore
1345 self.update = self._update # type: ignore
1347 self.delete_record = self._delete_record # type: ignore
1348 self.update_record = self._update_record # type: ignore
1350 def __new__(
1351 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any
1352 ) -> "TypedTable":
1353 """
1354 Create a Typed Rows model instance from an existing row, ID or query.
1356 Examples:
1357 MyTable(1)
1358 MyTable(id=1)
1359 MyTable(MyTable.id == 1)
1360 """
1361 table = cls._ensure_table_defined()
1362 inst = super().__new__(cls)
1364 if isinstance(row_or_id, TypedTable):
1365 # existing typed table instance!
1366 return row_or_id
1367 elif isinstance(row_or_id, pydal.objects.Row):
1368 row = row_or_id
1369 elif row_or_id is not None:
1370 row = table(row_or_id, **filters)
1371 elif filters:
1372 row = table(**filters)
1373 else:
1374 # dummy object
1375 return inst
1377 if not row:
1378 return None # type: ignore
1380 inst._row = row
1381 inst.__dict__.update(row)
1382 inst._setup_instance_methods()
1383 return inst
1385 @classmethod
1386 def __on_define__(cls, db: TypeDAL) -> None:
1387 """
1388 Method that can be implemented by tables to do an action after db.define is completed.
1390 This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"),
1391 where you need a reference to the current database, which may not exist yet when defining the model.
1392 """
1394 def __iter__(self) -> typing.Generator[Any, None, None]:
1395 """
1396 Allows looping through the columns.
1397 """
1398 row = self._ensure_matching_row()
1399 yield from iter(row)
1401 def __getitem__(self, item: str) -> Any:
1402 """
1403 Allows dictionary notation to get columns.
1404 """
1405 if item in self.__dict__:
1406 return self.__dict__.get(item)
1408 # fallback to lookup in row
1409 if self._row:
1410 return self._row[item]
1412 # nothing found!
1413 raise KeyError(item)
1415 def __getattr__(self, item: str) -> Any:
1416 """
1417 Allows dot notation to get columns.
1418 """
1419 if value := self.get(item):
1420 return value
1422 raise AttributeError(item)
1424 def get(self, item: str, default: Any = None) -> Any:
1425 """
1426 Try to get a column from this instance, else return default.
1427 """
1428 try:
1429 return self.__getitem__(item)
1430 except KeyError:
1431 return default
1433 def __setitem__(self, key: str, value: Any) -> None:
1434 """
1435 Data can both be updated via dot and dict notation.
1436 """
1437 return setattr(self, key, value)
1439 def __int__(self) -> int:
1440 """
1441 Calling int on a model instance will return its id.
1442 """
1443 return getattr(self, "id", 0)
1445 def __bool__(self) -> bool:
1446 """
1447 If the instance has an underlying row with data, it is truthy.
1448 """
1449 return bool(getattr(self, "_row", False))
1451 def _ensure_matching_row(self) -> Row:
1452 if not getattr(self, "_row", None):
1453 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
1454 return self._row
1456 def __repr__(self) -> str:
1457 """
1458 String representation of the model instance.
1459 """
1460 model_name = self.__class__.__name__
1461 model_data = {}
1463 if self._row:
1464 model_data = self._row.as_json()
1466 details = model_name
1467 details += f"({model_data})"
1469 if relationships := getattr(self, "_with", []):
1470 details += f" + {relationships}"
1472 return f"<{details}>"
1474 # serialization
1475 # underscore variants work for class instances (set up by _setup_instance_methods)
1477 @classmethod
1478 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
1479 """
1480 Dump the object to a plain dict.
1482 Can be used as both a class or instance method:
1483 - dumps the table info if it's a class
1484 - dumps the row info if it's an instance (see _as_dict)
1485 """
1486 table = cls._ensure_table_defined()
1487 result = table.as_dict(flat, sanitize)
1488 return typing.cast(AnyDict, result)
1490 @classmethod
1491 def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
1492 """
1493 Dump the object to json.
1495 Can be used as both a class or instance method:
1496 - dumps the table info if it's a class
1497 - dumps the row info if it's an instance (see _as_json)
1498 """
1499 data = cls.as_dict(sanitize=sanitize)
1500 return as_json.encode(data, indent=indent, **kwargs)
1502 @classmethod
1503 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
1504 """
1505 Dump the object to xml.
1507 Can be used as both a class or instance method:
1508 - dumps the table info if it's a class
1509 - dumps the row info if it's an instance (see _as_xml)
1510 """
1511 table = cls._ensure_table_defined()
1512 return typing.cast(str, table.as_xml(sanitize))
1514 @classmethod
1515 def as_yaml(cls, sanitize: bool = True) -> str:
1516 """
1517 Dump the object to yaml.
1519 Can be used as both a class or instance method:
1520 - dumps the table info if it's a class
1521 - dumps the row info if it's an instance (see _as_yaml)
1522 """
1523 table = cls._ensure_table_defined()
1524 return typing.cast(str, table.as_yaml(sanitize))
1526 def _as_dict(
1527 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1528 ) -> AnyDict:
1529 row = self._ensure_matching_row()
1531 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
1533 def asdict_method(obj: Any) -> Any: # pragma: no cover
1534 if hasattr(obj, "_as_dict"): # typedal
1535 return obj._as_dict()
1536 elif hasattr(obj, "as_dict"): # pydal
1537 return obj.as_dict()
1538 else: # something else??
1539 return obj.__dict__
1541 if _with := getattr(self, "_with", None):
1542 for relationship in _with:
1543 data = self.get(relationship)
1545 if isinstance(data, list):
1546 data = [asdict_method(_) for _ in data]
1547 elif data:
1548 data = asdict_method(data)
1550 result[relationship] = data
1552 return typing.cast(AnyDict, result)
1554 def _as_json(
1555 self,
1556 default: typing.Callable[[Any], Any] = None,
1557 indent: Optional[int] = None,
1558 **kwargs: Any,
1559 ) -> str:
1560 data = self._as_dict()
1561 return as_json.encode(data, default=default, indent=indent, **kwargs)
1563 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
1564 row = self._ensure_matching_row()
1565 return typing.cast(str, row.as_xml(sanitize))
1567 # def _as_yaml(self, sanitize: bool = True) -> str:
1568 # row = self._ensure_matching_row()
1569 # return typing.cast(str, row.as_yaml(sanitize))
1571 def __setattr__(self, key: str, value: Any) -> None:
1572 """
1573 When setting a property on a Typed Table model instance, also update the underlying row.
1574 """
1575 if self._row and key in self._row.__dict__ and not callable(value):
1576 # enables `row.key = value; row.update_record()`
1577 self._row[key] = value
1579 super().__setattr__(key, value)
1581 @classmethod
1582 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
1583 """
1584 Update one record.
1586 Example:
1587 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
1588 """
1589 # todo: update multiple?
1590 if record := cls(query):
1591 return record.update_record(**fields)
1592 else:
1593 return None
1595 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1596 row = self._ensure_matching_row()
1597 row.update(**fields)
1598 self.__dict__.update(**fields)
1599 return self
1601 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1602 row = self._ensure_matching_row()
1603 new_row = row.update_record(**fields)
1604 self.update(**new_row)
1605 return self
1607 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
1608 """
1609 Here as a placeholder for _update_record.
1611 Will be replaced on instance creation!
1612 """
1613 return self._update_record(**fields)
1615 def _delete_record(self) -> int:
1616 """
1617 Actual logic in `pydal.helpers.classes.RecordDeleter`.
1618 """
1619 row = self._ensure_matching_row()
1620 result = row.delete_record()
1621 self.__dict__ = {} # empty self, since row is no more.
1622 self._row = None # just to be sure
1623 self._setup_instance_methods()
1624 # ^ instance methods might've been deleted by emptying dict,
1625 # but we still want .as_dict to show an error, not the table's as_dict.
1626 return typing.cast(int, result)
1628 def delete_record(self) -> int: # pragma: no cover
1629 """
1630 Here as a placeholder for _delete_record.
1632 Will be replaced on instance creation!
1633 """
1634 return self._delete_record()
1636 # __del__ is also called on the end of a scope so don't remove records on every del!!
1638 # pickling:
1640 def __getstate__(self) -> AnyDict:
1641 """
1642 State to save when pickling.
1644 Prevents db connection from being pickled.
1645 Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1646 """
1647 row = self._ensure_matching_row()
1648 result: AnyDict = row.as_dict()
1650 if _with := getattr(self, "_with", None):
1651 result["_with"] = _with
1652 for relationship in _with:
1653 data = self.get(relationship)
1655 result[relationship] = data
1657 result["_row"] = self._row.as_json() if self._row else ""
1658 return result
1660 def __setstate__(self, state: AnyDict) -> None:
1661 """
1662 Used by dill when loading from a bytestring.
1663 """
1664 # as_dict also includes table info, so dump as json to only get the actual row data
1665 # then create a new (more empty) row object:
1666 state["_row"] = Row(json.loads(state["_row"]))
1667 self.__dict__ |= state
1670# backwards compat:
1671TypedRow = TypedTable
1674class TypedRows(typing.Collection[T_MetaInstance], Rows):
1675 """
1676 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
1677 """
1679 records: dict[int, T_MetaInstance]
1680 # _rows: Rows
1681 model: typing.Type[T_MetaInstance]
1682 metadata: Metadata
1684 # pseudo-properties: actually stored in _rows
1685 db: TypeDAL
1686 colnames: list[str]
1687 fields: list[Field]
1688 colnames_fields: list[Field]
1689 response: list[tuple[Any, ...]]
1691 def __init__(
1692 self,
1693 rows: Rows,
1694 model: typing.Type[T_MetaInstance],
1695 records: dict[int, T_MetaInstance] = None,
1696 metadata: Metadata = None,
1697 ) -> None:
1698 """
1699 Should not be called manually!
1701 Normally, the `records` from an existing `Rows` object are used
1702 but these can be overwritten with a `records` dict.
1703 `metadata` can be any (un)structured data
1704 `model` is a Typed Table class
1705 """
1706 records = records or {row.id: model(row) for row in rows}
1707 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
1708 self.model = model
1709 self.metadata = metadata or {}
1710 self.colnames = rows.colnames
1712 def __len__(self) -> int:
1713 """
1714 Return the count of rows.
1715 """
1716 return len(self.records)
1718 def __iter__(self) -> typing.Iterator[T_MetaInstance]:
1719 """
1720 Loop through the rows.
1721 """
1722 yield from self.records.values()
1724 def __contains__(self, ind: Any) -> bool:
1725 """
1726 Check if an id exists in this result set.
1727 """
1728 return ind in self.records
1730 def first(self) -> T_MetaInstance | None:
1731 """
1732 Get the row with the lowest id.
1733 """
1734 if not self.records:
1735 return None
1737 return next(iter(self))
1739 def last(self) -> T_MetaInstance | None:
1740 """
1741 Get the row with the highest id.
1742 """
1743 if not self.records:
1744 return None
1746 max_id = max(self.records.keys())
1747 return self[max_id]
1749 def find(
1750 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None
1751 ) -> "TypedRows[T_MetaInstance]":
1752 """
1753 Returns a new Rows object, a subset of the original object, filtered by the function `f`.
1754 """
1755 if not self.records:
1756 return self.__class__(self, self.model, {})
1758 records = {}
1759 if limitby:
1760 _min, _max = limitby
1761 else:
1762 _min, _max = 0, len(self)
1763 count = 0
1764 for i, row in self.records.items():
1765 if f(row):
1766 if _min <= count:
1767 records[i] = row
1768 count += 1
1769 if count == _max:
1770 break
1772 return self.__class__(self, self.model, records)
1774 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
1775 """
1776 Removes elements from the calling Rows object, filtered by the function `f`, \
1777 and returns a new Rows object containing the removed elements.
1778 """
1779 if not self.records:
1780 return self.__class__(self, self.model, {})
1781 removed = {}
1782 to_remove = []
1783 for i in self.records:
1784 row = self[i]
1785 if f(row):
1786 removed[i] = self.records[i]
1787 to_remove.append(i)
1789 [self.records.pop(i) for i in to_remove]
1791 return self.__class__(
1792 self,
1793 self.model,
1794 removed,
1795 )
1797 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
1798 """
1799 Returns a list of sorted elements (not sorted in place).
1800 """
1801 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
1803 def __str__(self) -> str:
1804 """
1805 Simple string representation.
1806 """
1807 return f"<TypedRows with {len(self)} records>"
1809 def __repr__(self) -> str:
1810 """
1811 Print a table on repr().
1812 """
1813 data = self.as_dict()
1814 headers = list(next(iter(data.values())).keys())
1815 return mktable(data, headers)
1817 def group_by_value(
1818 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any
1819 ) -> dict[T, list[T_MetaInstance]]:
1820 """
1821 Group the rows by a specific field (which will be the dict key).
1822 """
1823 kwargs["one_result"] = one_result
1824 result = super().group_by_value(*fields, **kwargs)
1825 return typing.cast(dict[T, list[T_MetaInstance]], result)
1827 def column(self, column: str = None) -> list[Any]:
1828 """
1829 Get a list of all values in a specific column.
1831 Example:
1832 rows.column('name') -> ['Name 1', 'Name 2', ...]
1833 """
1834 return typing.cast(list[Any], super().column(column))
1836 def as_csv(self) -> str:
1837 """
1838 Dump the data to csv.
1839 """
1840 return typing.cast(str, super().as_csv())
1842 def as_dict(
1843 self,
1844 key: str = None,
1845 compact: bool = False,
1846 storage_to_dict: bool = False,
1847 datetime_to_str: bool = False,
1848 custom_types: list[type] = None,
1849 ) -> dict[int, AnyDict]:
1850 """
1851 Get the data in a dict of dicts.
1852 """
1853 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1854 # functionality not guaranteed
1855 return typing.cast(
1856 dict[int, AnyDict],
1857 super().as_dict(
1858 key or "id",
1859 compact,
1860 storage_to_dict,
1861 datetime_to_str,
1862 custom_types,
1863 ),
1864 )
1866 return {k: v.as_dict() for k, v in self.records.items()}
1868 def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
1869 """
1870 Turn the data into a dict and then dump to JSON.
1871 """
1872 data = self.as_list()
1874 return as_json.encode(data, default=default, indent=indent, **kwargs)
1876 def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
1877 """
1878 Turn the data into a dict and then dump to JSON.
1879 """
1880 return self.as_json(default=default, indent=indent, **kwargs)
1882 def as_list(
1883 self,
1884 compact: bool = False,
1885 storage_to_dict: bool = False,
1886 datetime_to_str: bool = False,
1887 custom_types: list[type] = None,
1888 ) -> list[AnyDict]:
1889 """
1890 Get the data in a list of dicts.
1891 """
1892 if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1893 return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
1895 return [_.as_dict() for _ in self.records.values()]
1897 def __getitem__(self, item: int) -> T_MetaInstance:
1898 """
1899 You can get a specific row by ID from a typedrows by using rows[idx] notation.
1901 Since pydal's implementation differs (they expect a list instead of a dict with id keys),
1902 using rows[0] will return the first row, regardless of its id.
1903 """
1904 try:
1905 return self.records[item]
1906 except KeyError as e:
1907 if item == 0 and (row := self.first()):
1908 # special case: pydal internals think Rows.records is a list, not a dict
1909 return row
1911 raise e
1913 def get(self, item: int) -> typing.Optional[T_MetaInstance]:
1914 """
1915 Get a row by ID, or receive None if it isn't in this result set.
1916 """
1917 return self.records.get(item)
1919 def update(self, **new_values: Any) -> bool:
1920 """
1921 Update the current rows in the database with new_values.
1922 """
1923 # cast to make mypy understand .id is a TypedField and not an int!
1924 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined())
1926 ids = set(self.column("id"))
1927 query = table.id.belongs(ids)
1928 return bool(self.db(query).update(**new_values))
1930 def delete(self) -> bool:
1931 """
1932 Delete the currently selected rows from the database.
1933 """
1934 # cast to make mypy understand .id is a TypedField and not an int!
1935 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined())
1937 ids = set(self.column("id"))
1938 query = table.id.belongs(ids)
1939 return bool(self.db(query).delete())
1941 def join(
1942 self,
1943 field: "Field | TypedField[Any]",
1944 name: str = None,
1945 constraint: Query = None,
1946 fields: list[str | Field] = None,
1947 orderby: Optional[str | Field] = None,
1948 ) -> T_MetaInstance:
1949 """
1950 This can be used to JOIN with some relationships after the initial select.
1952 Using the querybuilder's .join() method is prefered!
1953 """
1954 result = super().join(field, name, constraint, fields or [], orderby)
1955 return typing.cast(T_MetaInstance, result)
1957 def export_to_csv_file(
1958 self,
1959 ofile: typing.TextIO,
1960 null: Any = "<NULL>",
1961 delimiter: str = ",",
1962 quotechar: str = '"',
1963 quoting: int = csv.QUOTE_MINIMAL,
1964 represent: bool = False,
1965 colnames: list[str] = None,
1966 write_colnames: bool = True,
1967 *args: Any,
1968 **kwargs: Any,
1969 ) -> None:
1970 """
1971 Shadow export_to_csv_file from Rows, but with typing.
1973 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
1974 """
1975 super().export_to_csv_file(
1976 ofile,
1977 null,
1978 *args,
1979 delimiter=delimiter,
1980 quotechar=quotechar,
1981 quoting=quoting,
1982 represent=represent,
1983 colnames=colnames or self.colnames,
1984 write_colnames=write_colnames,
1985 **kwargs,
1986 )
1988 @classmethod
1989 def from_rows(
1990 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None
1991 ) -> "TypedRows[T_MetaInstance]":
1992 """
1993 Internal method to convert a Rows object to a TypedRows.
1994 """
1995 return cls(rows, model, metadata=metadata)
1997 def __getstate__(self) -> AnyDict:
1998 """
1999 Used by dill to dump to bytes (exclude db connection etc).
2000 """
2001 return {
2002 "metadata": json.dumps(self.metadata, default=str),
2003 "records": self.records,
2004 "model": str(self.model._table),
2005 "colnames": self.colnames,
2006 }
2008 def __setstate__(self, state: AnyDict) -> None:
2009 """
2010 Used by dill when loading from a bytestring.
2011 """
2012 state["metadata"] = json.loads(state["metadata"])
2013 self.__dict__.update(state)
2014 # db etc. set after undill by caching.py
2017from .caching import ( # noqa: E402
2018 _remove_cache,
2019 _TypedalCache,
2020 _TypedalCacheDependency,
2021 create_and_hash_cache_key,
2022 get_expire,
2023 load_from_cache,
2024 save_to_cache,
2025)
2028class QueryBuilder(typing.Generic[T_MetaInstance]):
2029 """
2030 Abstration on top of pydal's query system.
2031 """
2033 model: typing.Type[T_MetaInstance]
2034 query: Query
2035 select_args: list[Any]
2036 select_kwargs: AnyDict
2037 relationships: dict[str, Relationship[Any]]
2038 metadata: Metadata
2040 def __init__(
2041 self,
2042 model: typing.Type[T_MetaInstance],
2043 add_query: Optional[Query] = None,
2044 select_args: Optional[list[Any]] = None,
2045 select_kwargs: Optional[AnyDict] = None,
2046 relationships: dict[str, Relationship[Any]] = None,
2047 metadata: Metadata = None,
2048 ):
2049 """
2050 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
2052 Example:
2053 MyTable.where(...) -> QueryBuilder[MyTable]
2054 """
2055 self.model = model
2056 table = model._ensure_table_defined()
2057 default_query = typing.cast(Query, table.id > 0)
2058 self.query = add_query or default_query
2059 self.select_args = select_args or []
2060 self.select_kwargs = select_kwargs or {}
2061 self.relationships = relationships or {}
2062 self.metadata = metadata or {}
2064 def __str__(self) -> str:
2065 """
2066 Simple string representation for the query builder.
2067 """
2068 return f"QueryBuilder for {self.model}"
2070 def __repr__(self) -> str:
2071 """
2072 Advanced string representation for the query builder.
2073 """
2074 return (
2075 f"<QueryBuilder for {self.model} with "
2076 f"{len(self.select_args)} select args; "
2077 f"{len(self.select_kwargs)} select kwargs; "
2078 f"{len(self.relationships)} relationships; "
2079 f"query: {bool(self.query)}; "
2080 f"metadata: {self.metadata}; "
2081 f">"
2082 )
2084 def __bool__(self) -> bool:
2085 """
2086 Querybuilder is truthy if it has rows.
2087 """
2088 return self.count() > 0
2090 def _extend(
2091 self,
2092 add_query: Optional[Query] = None,
2093 overwrite_query: Optional[Query] = None,
2094 select_args: Optional[list[Any]] = None,
2095 select_kwargs: Optional[AnyDict] = None,
2096 relationships: dict[str, Relationship[Any]] = None,
2097 metadata: Metadata = None,
2098 ) -> "QueryBuilder[T_MetaInstance]":
2099 return QueryBuilder(
2100 self.model,
2101 (add_query & self.query) if add_query else overwrite_query or self.query,
2102 (self.select_args + select_args) if select_args else self.select_args,
2103 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
2104 (self.relationships | relationships) if relationships else self.relationships,
2105 (self.metadata | (metadata or {})) if metadata else self.metadata,
2106 )
2108 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]":
2109 """
2110 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
2112 Options:
2113 paraphrased from the web2py pydal docs,
2114 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
2116 orderby: field(s) to order by. Supported:
2117 table.name - sort by name, ascending
2118 ~table.name - sort by name, descending
2119 <random> - sort randomly
2120 table.name|table.id - sort by two fields (first name, then id)
2122 groupby, having: together with orderby:
2123 groupby can be a field (e.g. table.name) to group records by
2124 having can be a query, only those `having` the condition are grouped
2126 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
2127 distinct: bool/field. Only select rows that differ
2128 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
2129 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
2130 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
2131 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
2132 """
2133 return self._extend(select_args=list(fields), select_kwargs=options)
2135 def where(
2136 self,
2137 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query],
2138 **filters: Any,
2139 ) -> "QueryBuilder[T_MetaInstance]":
2140 """
2141 Extend the builder's query.
2143 Can be used in multiple ways:
2144 .where(Query) -> with a direct query such as `Table.id == 5`
2145 .where(lambda table: table.id == 5) -> with a query via a lambda
2146 .where(id=5) -> via keyword arguments
2148 When using multiple where's, they will be ANDed:
2149 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
2150 When passing multiple queries to a single .where, they will be ORed:
2151 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
2152 """
2153 new_query = self.query
2154 table = self.model._ensure_table_defined()
2156 for field, value in filters.items():
2157 new_query &= table[field] == value
2159 subquery: DummyQuery | Query = DummyQuery()
2160 for query_or_lambda in queries_or_lambdas:
2161 if isinstance(query_or_lambda, _Query):
2162 subquery |= typing.cast(Query, query_or_lambda)
2163 elif callable(query_or_lambda):
2164 if result := query_or_lambda(self.model):
2165 subquery |= result
2166 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda):
2167 subquery |= typing.cast(Query, query_or_lambda != None)
2168 else:
2169 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).")
2171 if subquery:
2172 new_query &= subquery
2174 return self._extend(overwrite_query=new_query)
2176 def join(
2177 self,
2178 *fields: str | typing.Type[TypedTable],
2179 method: JOIN_OPTIONS = None,
2180 on: OnQuery | list[Expression] | Expression = None,
2181 condition: Condition = None,
2182 ) -> "QueryBuilder[T_MetaInstance]":
2183 """
2184 Include relationship fields in the result.
2186 `fields` can be names of Relationships on the current model.
2187 If no fields are passed, all will be used.
2189 By default, the `method` defined in the relationship is used.
2190 This can be overwritten with the `method` keyword argument (left or inner)
2191 """
2192 # todo: allow limiting amount of related rows returned for join?
2194 relationships = self.model.get_relationships()
2196 if condition and on:
2197 raise ValueError("condition and on can not be used together!")
2198 elif condition:
2199 if len(fields) != 1:
2200 raise ValueError("join(field, condition=...) can only be used with exactly one field!")
2202 if isinstance(condition, pydal.objects.Query):
2203 condition = as_lambda(condition)
2205 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)}
2206 elif on:
2207 if len(fields) != 1:
2208 raise ValueError("join(field, on=...) can only be used with exactly one field!")
2210 if isinstance(on, pydal.objects.Expression):
2211 on = [on]
2213 if isinstance(on, list):
2214 on = as_lambda(on)
2215 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)}
2217 else:
2218 if fields:
2219 # join on every relationship
2220 relationships = {str(k): relationships[str(k)] for k in fields}
2222 if method:
2223 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()}
2225 return self._extend(relationships=relationships)
2227 def cache(
2228 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None
2229 ) -> "QueryBuilder[T_MetaInstance]":
2230 """
2231 Enable caching for this query to load repeated calls from a dill row \
2232 instead of executing the sql and collecing matching rows again.
2233 """
2234 existing = self.metadata.get("cache", {})
2236 metadata: Metadata = {}
2238 cache_meta = typing.cast(
2239 CacheMetadata,
2240 self.metadata.get("cache", {})
2241 | {
2242 "enabled": True,
2243 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
2244 "expires_at": get_expire(expires_at=expires_at, ttl=ttl),
2245 },
2246 )
2248 metadata["cache"] = cache_meta
2249 return self._extend(metadata=metadata)
2251 def _get_db(self) -> TypeDAL:
2252 if db := self.model._db:
2253 return db
2254 else: # pragma: no cover
2255 raise EnvironmentError("@define or db.define is not called on this class yet!")
2257 def _select_arg_convert(self, arg: Any) -> Any:
2258 # typedfield are not really used at runtime anymore, but leave it in for safety:
2259 if isinstance(arg, TypedField): # pragma: no cover
2260 arg = arg._field
2262 return arg
2264 def delete(self) -> list[int]:
2265 """
2266 Based on the current query, delete rows and return a list of deleted IDs.
2267 """
2268 db = self._get_db()
2269 removed_ids = [_.id for _ in db(self.query).select("id")]
2270 if db(self.query).delete():
2271 # success!
2272 return removed_ids
2274 return []
2276 def _delete(self) -> str:
2277 db = self._get_db()
2278 return str(db(self.query)._delete())
2280 def update(self, **fields: Any) -> list[int]:
2281 """
2282 Based on the current query, update `fields` and return a list of updated IDs.
2283 """
2284 # todo: limit?
2285 db = self._get_db()
2286 updated_ids = db(self.query).select("id").column("id")
2287 if db(self.query).update(**fields):
2288 # success!
2289 return updated_ids
2291 return []
2293 def _update(self, **fields: Any) -> str:
2294 db = self._get_db()
2295 return str(db(self.query)._update(**fields))
2297 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], AnyDict]:
2298 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2299 select_kwargs = self.select_kwargs.copy()
2300 query = self.query
2301 model = self.model
2302 mut_metadata["query"] = query
2303 # require at least id of main table:
2304 select_fields = ", ".join([str(_) for _ in select_args])
2305 tablename = str(model)
2307 if add_id and f"{tablename}.id" not in select_fields:
2308 # fields of other selected, but required ID is missing.
2309 select_args.append(model.id)
2311 if self.relationships:
2312 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
2314 return query, select_args, select_kwargs
2316 def to_sql(self, add_id: bool = False) -> str:
2317 """
2318 Generate the SQL for the built query.
2319 """
2320 db = self._get_db()
2322 query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
2324 return str(db(query)._select(*select_args, **select_kwargs))
2326 def _collect(self) -> str:
2327 """
2328 Alias for to_sql, pydal-like syntax.
2329 """
2330 return self.to_sql()
2332 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
2333 expires_at = metadata["cache"].get("expires_at")
2334 metadata["cache"] |= {
2335 # key is partly dependant on cache metadata but not these:
2336 "key": None,
2337 "status": None,
2338 "cached_at": None,
2339 "expires_at": None,
2340 }
2342 _, key = create_and_hash_cache_key(
2343 self.model,
2344 metadata,
2345 self.query,
2346 self.select_args,
2347 self.select_kwargs,
2348 self.relationships.keys(),
2349 )
2351 # re-set after creating key:
2352 metadata["cache"]["expires_at"] = expires_at
2353 metadata["cache"]["key"] = key
2355 return load_from_cache(key, self._get_db())
2357 def execute(self, add_id: bool = False) -> Rows:
2358 """
2359 Raw version of .collect which only executes the SQL, without performing any magic afterwards.
2360 """
2361 db = self._get_db()
2362 metadata = typing.cast(Metadata, self.metadata.copy())
2364 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2366 return db(query).select(*select_args, **select_kwargs)
2368 def collect(
2369 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True
2370 ) -> "TypedRows[T_MetaInstance]":
2371 """
2372 Execute the built query and turn it into model instances, while handling relationships.
2373 """
2374 if _to is None:
2375 _to = TypedRows
2377 db = self._get_db()
2378 metadata = typing.cast(Metadata, self.metadata.copy())
2380 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
2381 return result
2383 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2385 metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
2387 if verbose: # pragma: no cover
2388 print(metadata["sql"])
2390 rows: Rows = db(query).select(*select_args, **select_kwargs)
2392 metadata["final_query"] = str(query)
2393 metadata["final_args"] = [str(_) for _ in select_args]
2394 metadata["final_kwargs"] = select_kwargs
2396 if verbose: # pragma: no cover
2397 print(rows)
2399 if not self.relationships:
2400 # easy
2401 typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
2403 else:
2404 # harder: try to match rows to the belonging objects
2405 # assume structure of {'table': <data>} per row.
2406 # if that's not the case, return default behavior again
2407 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
2409 # only saves if requested in metadata:
2410 return save_to_cache(typed_rows, rows)
2412 def _handle_relationships_pre_select(
2413 self,
2414 query: Query,
2415 select_args: list[Any],
2416 select_kwargs: AnyDict,
2417 metadata: Metadata,
2418 ) -> tuple[Query, list[Any]]:
2419 db = self._get_db()
2420 model = self.model
2422 metadata["relationships"] = set(self.relationships.keys())
2424 # query = self._update_query_for_inner(db, model, query)
2425 join = []
2426 for key, relation in self.relationships.items():
2427 if not relation.condition or relation.join != "inner":
2428 continue
2430 other = relation.get_table(db)
2431 other = other.with_alias(f"{key}_{hash(relation)}")
2432 join.append(other.on(relation.condition(model, other)))
2434 if limitby := select_kwargs.pop("limitby", None):
2435 # if limitby + relationships:
2436 # 1. get IDs of main table entries that match 'query'
2437 # 2. change query to .belongs(id)
2438 # 3. add joins etc
2440 kwargs = {"limitby": limitby}
2442 if join:
2443 kwargs["join"] = join
2445 ids = db(query)._select(model.id, **kwargs)
2446 query = model.id.belongs(ids)
2447 metadata["ids"] = ids
2449 if join:
2450 select_kwargs["join"] = join
2452 left = []
2454 for key, relation in self.relationships.items():
2455 other = relation.get_table(db)
2456 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
2458 select_fields = ", ".join([str(_) for _ in select_args])
2459 pre_alias = str(other)
2461 if f"{other}." not in select_fields:
2462 # no fields of other selected. add .ALL:
2463 select_args.append(other.ALL)
2464 elif f"{other}.id" not in select_fields:
2465 # fields of other selected, but required ID is missing.
2466 select_args.append(other.id)
2468 if relation.on:
2469 # if it has a .on, it's always a left join!
2470 on = relation.on(model, other)
2471 if not isinstance(on, list): # pragma: no cover
2472 on = [on]
2474 left.extend(on)
2475 elif method == "left":
2476 # .on not given, generate it:
2477 other = other.with_alias(f"{key}_{hash(relation)}")
2478 condition = typing.cast(Query, relation.condition(model, other))
2479 left.append(other.on(condition))
2480 else:
2481 # else: inner join (handled earlier)
2482 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
2483 # other = other.with_alias(f"{key}_{hash(relation)}")
2484 # query &= relation.condition(model, other)
2486 # if no fields of 'other' are included, add other.ALL
2487 # else: only add other.id if missing
2488 select_fields = ", ".join([str(_) for _ in select_args])
2490 post_alias = str(other).split(" AS ")[-1]
2491 if pre_alias != post_alias:
2492 # replace .select's with aliased:
2493 select_fields = select_fields.replace(
2494 f"{pre_alias}.",
2495 f"{post_alias}.",
2496 )
2498 select_args = select_fields.split(", ")
2500 select_kwargs["left"] = left
2501 return query, select_args
2503 def _collect_with_relationships(
2504 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"]
2505 ) -> "TypedRows[T_MetaInstance]":
2506 """
2507 Transform the raw rows into Typed Table model instances.
2508 """
2509 db = self._get_db()
2510 main_table = self.model._ensure_table_defined()
2512 records = {}
2513 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
2515 for row in rows:
2516 main = row[main_table]
2517 main_id = main.id
2519 if main_id not in records:
2520 records[main_id] = self.model(main)
2521 records[main_id]._with = list(self.relationships.keys())
2523 # setup up all relationship defaults (once)
2524 for col, relationship in self.relationships.items():
2525 records[main_id][col] = [] if relationship.multiple else None
2527 # now add other relationship data
2528 for column, relation in self.relationships.items():
2529 relationship_column = f"{column}_{hash(relation)}"
2531 # relationship_column works for aliases with the same target column.
2532 # if col + relationship not in the row, just use the regular name.
2534 relation_data = (
2535 row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
2536 )
2538 if relation_data.id is None:
2539 # always skip None ids
2540 continue
2542 if f"{column}-{relation_data.id}" in seen_relations[main_id]:
2543 # speed up duplicates
2544 continue
2545 else:
2546 seen_relations[main_id].add(f"{column}-{relation_data.id}")
2548 relation_table = relation.get_table(db)
2549 # hopefully an instance of a typed table and a regular row otherwise:
2550 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
2552 if relation.multiple:
2553 # create list of T
2554 if not isinstance(records[main_id].get(column), list): # pragma: no cover
2555 # should already be set up before!
2556 setattr(records[main_id], column, [])
2558 records[main_id][column].append(instance)
2559 else:
2560 # create single T
2561 records[main_id][column] = instance
2563 return _to(rows, self.model, records, metadata=metadata)
2565 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]":
2566 """
2567 Call .collect() and raise an error if nothing found.
2569 Basically unwraps Optional type.
2570 """
2571 if result := self.collect():
2572 return result
2574 if not exception:
2575 exception = ValueError("Nothing found!")
2577 raise exception
2579 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
2580 """
2581 You can start iterating a Query Builder object before calling collect, for ease of use.
2582 """
2583 yield from self.collect()
2585 def count(self) -> int:
2586 """
2587 Return the amount of rows matching the current query.
2588 """
2589 db = self._get_db()
2590 model = self.model
2591 query = self.query
2593 for key, relation in self.relationships.items():
2594 if not relation.condition or relation.join != "inner":
2595 continue
2597 other = relation.get_table(db)
2598 other = other.with_alias(f"{key}_{hash(relation)}")
2599 query &= relation.condition(model, other)
2601 return db(query).count()
2603 def __paginate(
2604 self,
2605 limit: int,
2606 page: int = 1,
2607 ) -> "QueryBuilder[T_MetaInstance]":
2608 _from = limit * (page - 1)
2609 _to = limit * page
2611 available = self.count()
2613 metadata: Metadata = {}
2615 metadata["pagination"] = {
2616 "limit": limit,
2617 "current_page": page,
2618 "max_page": math.ceil(available / limit),
2619 "rows": available,
2620 "min_max": (_from, _to),
2621 }
2623 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
2625 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
2626 """
2627 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
2629 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
2630 can be loaded with relationship data!
2631 """
2632 builder = self.__paginate(limit, page)
2634 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
2636 rows._query_builder = builder
2637 return rows
2639 def _paginate(
2640 self,
2641 limit: int,
2642 page: int = 1,
2643 ) -> str:
2644 builder = self.__paginate(limit, page)
2645 return builder._collect()
2647 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
2648 """
2649 Generator that yields rows from a paginated source in chunks.
2651 This function retrieves rows from a paginated data source in chunks of the
2652 specified `chunk_size` and yields them as TypedRows.
2654 Example:
2655 ```
2656 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
2657 for row in chunk_of_rows:
2658 # Process each row within the chunk.
2659 pass
2660 ```
2661 """
2662 page = 1
2664 while rows := self.__paginate(chunk_size, page).collect():
2665 yield rows
2666 page += 1
2668 def first(self, verbose: bool = False) -> T_MetaInstance | None:
2669 """
2670 Get the first row matching the currently built query.
2672 Also adds paginate, since it would be a waste to select more rows than needed.
2673 """
2674 if row := self.paginate(page=1, limit=1, verbose=verbose).first():
2675 return self.model.from_row(row)
2676 else:
2677 return None
2679 def _first(self) -> str:
2680 return self._paginate(page=1, limit=1)
2682 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance:
2683 """
2684 Call .first() and raise an error if nothing found.
2686 Basically unwraps Optional type.
2687 """
2688 if inst := self.first(verbose=verbose):
2689 return inst
2691 if not exception:
2692 exception = ValueError("Nothing found!")
2694 raise exception
2697S = typing.TypeVar("S")
2700class PaginatedRows(TypedRows[T_MetaInstance]):
2701 """
2702 Extension on top of rows that is used when calling .paginate() instead of .collect().
2703 """
2705 _query_builder: QueryBuilder[T_MetaInstance]
2707 @property
2708 def data(self) -> list[T_MetaInstance]:
2709 """
2710 Get the underlying data.
2711 """
2712 return list(self.records.values())
2714 @property
2715 def pagination(self) -> Pagination:
2716 """
2717 Get all page info.
2718 """
2719 pagination_data = self.metadata["pagination"]
2721 has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
2722 has_prev_page = pagination_data["current_page"] > 1
2723 return {
2724 "total_items": pagination_data["rows"],
2725 "current_page": pagination_data["current_page"],
2726 "per_page": pagination_data["limit"],
2727 "total_pages": pagination_data["max_page"],
2728 "has_next_page": has_next_page,
2729 "has_prev_page": has_prev_page,
2730 "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
2731 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2732 }
2734 def next(self) -> Self:
2735 """
2736 Get the next page.
2737 """
2738 data = self.metadata["pagination"]
2739 if data["current_page"] >= data["max_page"]:
2740 raise StopIteration("Final Page")
2742 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
2744 def previous(self) -> Self:
2745 """
2746 Get the previous page.
2747 """
2748 data = self.metadata["pagination"]
2749 if data["current_page"] <= 1:
2750 raise StopIteration("First Page")
2752 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
2754 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
2755 """
2756 Convert to a dictionary with pagination info and original data.
2758 All arguments are ignored!
2759 """
2760 return {"data": super().as_dict(), "pagination": self.pagination}
2763class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2764 """
2765 Used to make pydal Set more typed.
2767 This class is not actually used, only 'cast' by TypeDAL.__call__
2768 """
2770 def count(self, distinct: bool = None, cache: AnyDict = None) -> int:
2771 """
2772 Count returns an int.
2773 """
2774 result = super().count(distinct, cache)
2775 return typing.cast(int, result)
2777 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
2778 """
2779 Select returns a TypedRows of a user defined table.
2781 Example:
2782 result: TypedRows[MyTable] = db(MyTable.id > 0).select()
2784 for row in result:
2785 typing.reveal_type(row) # MyTable
2786 """
2787 rows = super().select(*fields, **attributes)
2788 return typing.cast(TypedRows[T_MetaInstance], rows)