Coverage for src/typedal/core.py: 100%
904 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-04-16 21:21 +0200
« prev ^ index » next coverage.py v7.4.1, created at 2024-04-16 21:21 +0200
1"""
2Core functionality of TypeDAL.
3"""
5import contextlib
6import csv
7import datetime as dt
8import inspect
9import json
10import math
11import types
12import typing
13import warnings
14from collections import defaultdict
15from decimal import Decimal
16from pathlib import Path
17from typing import Any, Optional
19import pydal
20from pydal._globals import DEFAULT
21from pydal.objects import Field as _Field
22from pydal.objects import Query as _Query
23from pydal.objects import Row
24from pydal.objects import Table as _Table
25from typing_extensions import Self
27from .config import TypeDALConfig, load_config
28from .helpers import (
29 DummyQuery,
30 all_annotations,
31 all_dict,
32 as_lambda,
33 extract_type_optional,
34 filter_out,
35 instanciate,
36 is_union,
37 looks_like,
38 mktable,
39 origin_is_subclass,
40 to_snake,
41 unwrap_type,
42)
43from .serializers import as_json
44from .types import (
45 AfterDeleteCallable,
46 AfterInsertCallable,
47 AfterUpdateCallable,
48 AnyDict,
49 BeforeDeleteCallable,
50 BeforeInsertCallable,
51 BeforeUpdateCallable,
52 CacheMetadata,
53 Expression,
54 Field,
55 Metadata,
56 PaginateDict,
57 Pagination,
58 Query,
59 Rows,
60 Validator,
61 _Types,
62)
64# use typing.cast(type, ...) to make mypy happy with unions
65T_annotation = typing.Type[Any] | types.UnionType
66T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]]
67T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
68T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
69T = typing.TypeVar("T")
71BASIC_MAPPINGS: dict[T_annotation, str] = {
72 str: "string",
73 int: "integer",
74 bool: "boolean",
75 bytes: "blob",
76 float: "double",
77 object: "json",
78 Decimal: "decimal(10,2)",
79 dt.date: "date",
80 dt.time: "time",
81 dt.datetime: "datetime",
82}
85def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
86 """
87 Is `cls` an instance or subclass of TypedField?
89 Deprecated
90 """
91 return (
92 isinstance(cls, TypedField)
93 or isinstance(typing.get_origin(cls), type)
94 and issubclass(typing.get_origin(cls), TypedField)
95 )
98JOIN_OPTIONS = typing.Literal["left", "inner", None]
99DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
101# table-ish paramter:
102P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table]
104Condition: typing.TypeAlias = typing.Optional[
105 typing.Callable[
106 # self, other -> Query
107 [P_Table, P_Table],
108 Query | bool,
109 ]
110]
112OnQuery: typing.TypeAlias = typing.Optional[
113 typing.Callable[
114 # self, other -> list of .on statements
115 [P_Table, P_Table],
116 list[Expression],
117 ]
118]
120To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str)
123class Relationship(typing.Generic[To_Type]):
124 """
125 Define a relationship to another table.
126 """
128 _type: To_Type
129 table: typing.Type["TypedTable"] | type | str
130 condition: Condition
131 on: OnQuery
132 multiple: bool
133 join: JOIN_OPTIONS
135 def __init__(
136 self,
137 _type: To_Type,
138 condition: Condition = None,
139 join: JOIN_OPTIONS = None,
140 on: OnQuery = None,
141 ):
142 """
143 Should not be called directly, use relationship() instead!
144 """
145 if condition and on:
146 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
147 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
149 self._type = _type
150 self.condition = condition
151 self.join = "left" if on else join # .on is always left join!
152 self.on = on
154 if args := typing.get_args(_type):
155 self.table = unwrap_type(args[0])
156 self.multiple = True
157 else:
158 self.table = _type
159 self.multiple = False
161 if isinstance(self.table, str):
162 self.table = TypeDAL.to_snake(self.table)
164 def clone(self, **update: Any) -> "Relationship[To_Type]":
165 """
166 Create a copy of the relationship, possibly updated.
167 """
168 return self.__class__(
169 update.get("_type") or self._type,
170 update.get("condition") or self.condition,
171 update.get("join") or self.join,
172 update.get("on") or self.on,
173 )
175 def __repr__(self) -> str:
176 """
177 Representation of the relationship.
178 """
179 if callback := self.condition or self.on:
180 src_code = inspect.getsource(callback).strip()
181 else:
182 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore
183 src_code = f"to {cls_name} (missing condition)"
185 join = f":{self.join}" if self.join else ""
186 return f"<Relationship{join} {src_code}>"
188 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]:
189 """
190 Get the table this relationship is bound to.
191 """
192 table = self.table # can be a string because db wasn't available yet
193 if isinstance(table, str):
194 if mapped := db._class_map.get(table):
195 # yay
196 return mapped
198 # boo, fall back to untyped table but pretend it is typed:
199 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough!
201 return table
203 def get_table_name(self) -> str:
204 """
205 Get the name of the table this relationship is bound to.
206 """
207 if isinstance(self.table, str):
208 return self.table
210 if isinstance(self.table, pydal.objects.Table):
211 return str(self.table)
213 # else: typed table
214 try:
215 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
216 except Exception: # pragma: no cover
217 table = self.table
219 return str(table)
221 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
222 """
223 Relationship is a descriptor class, which can be returned from a class but not an instance.
225 For an instance, using .join() will replace the Relationship with the actual data.
226 If you forgot to join, a warning will be shown and empty data will be returned.
227 """
228 if not instance:
229 # relationship queried on class, that's allowed
230 return self
232 warnings.warn(
233 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning
234 )
235 if self.multiple:
236 return []
237 else:
238 return None
241def relationship(
242 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None
243) -> Relationship[To_Type]:
244 """
245 Define a relationship to another table, when its id is not stored in the current table.
247 Example:
248 class User(TypedTable):
249 name: str
251 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left')
253 class Post(TypedTable):
254 title: str
255 author: User
257 User.join("posts").first() # User instance with list[Post] in .posts
259 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
260 In this case, the join strategy is set to LEFT so users without posts are also still selected.
262 For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
263 class User(TypedTable):
264 ...
266 tags = relationship(list["Tag"], on=lambda self, tag: [
267 Tagged.on(Tagged.entity == entity.gid),
268 Tag.on((Tagged.tag == tag.id)),
269 ])
271 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient.
272 """
273 return Relationship(_type, condition, join, on)
276def _generate_relationship_condition(
277 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]]
278) -> Condition:
279 origin = typing.get_origin(field)
280 # else: generic
282 if origin == list:
283 # field = typing.get_args(field)[0] # actual field
284 # return lambda _self, _other: cls[key].contains(field)
286 return lambda _self, _other: _self[key].contains(_other.id)
287 else:
288 # normal reference
289 # return lambda _self, _other: cls[key] == field.id
290 return lambda _self, _other: _self[key] == _other.id
293def to_relationship(
294 cls: typing.Type["TypedTable"] | type[Any],
295 key: str,
296 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]],
297) -> typing.Optional[Relationship[Any]]:
298 """
299 Used to automatically create relationship instance for reference fields.
301 Example:
302 class MyTable(TypedTable):
303 reference: OtherTable
305 `reference` contains the id of an Other Table row.
306 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
308 This function will automatically perform this logic (called in db.define):
309 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
311 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
312 """
313 if looks_like(field, TypedField):
314 if args := typing.get_args(field):
315 field = args[0]
316 else:
317 # weird
318 return None
320 field, optional = extract_type_optional(field)
322 try:
323 condition = _generate_relationship_condition(cls, key, field)
324 except Exception as e: # pragma: no cover
325 warnings.warn("Could not generate Relationship condition", source=e)
326 condition = None
328 if not condition: # pragma: no cover
329 # something went wrong, not a valid relationship
330 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
331 return None
333 join = "left" if optional or typing.get_origin(field) == list else "inner"
335 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
338class TypeDAL(pydal.DAL): # type: ignore
339 """
340 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
341 """
343 _config: TypeDALConfig
345 def __init__(
346 self,
347 uri: Optional[str] = None, # default from config or 'sqlite:memory'
348 pool_size: int = None, # default 1 if sqlite else 3
349 folder: Optional[str | Path] = None, # default 'databases' in config
350 db_codec: str = "UTF-8",
351 check_reserved: Optional[list[str]] = None,
352 migrate: Optional[bool] = None, # default True by config
353 fake_migrate: Optional[bool] = None, # default False by config
354 migrate_enabled: bool = True,
355 fake_migrate_all: bool = False,
356 decode_credentials: bool = False,
357 driver_args: Optional[AnyDict] = None,
358 adapter_args: Optional[AnyDict] = None,
359 attempts: int = 5,
360 auto_import: bool = False,
361 bigint_id: bool = False,
362 debug: bool = False,
363 lazy_tables: bool = False,
364 db_uid: Optional[str] = None,
365 after_connection: typing.Callable[..., Any] = None,
366 tables: Optional[list[str]] = None,
367 ignore_field_case: bool = True,
368 entity_quoting: bool = True,
369 table_hash: Optional[str] = None,
370 enable_typedal_caching: bool = None,
371 use_pyproject: bool | str = True,
372 use_env: bool | str = True,
373 connection: Optional[str] = None,
374 config: Optional[TypeDALConfig] = None,
375 ) -> None:
376 """
377 Adds some internal tables after calling pydal's default init.
379 Set enable_typedal_caching to False to disable this behavior.
380 """
381 config = config or load_config(connection, _use_pyproject=use_pyproject, _use_env=use_env)
382 config.update(
383 database=uri,
384 dialect=uri.split(":")[0] if uri and ":" in uri else None,
385 folder=str(folder) if folder is not None else None,
386 migrate=migrate,
387 fake_migrate=fake_migrate,
388 caching=enable_typedal_caching,
389 pool_size=pool_size,
390 )
392 self._config = config
394 if config.folder:
395 Path(config.folder).mkdir(exist_ok=True)
397 super().__init__(
398 config.database,
399 config.pool_size,
400 config.folder,
401 db_codec,
402 check_reserved,
403 config.migrate,
404 config.fake_migrate,
405 migrate_enabled,
406 fake_migrate_all,
407 decode_credentials,
408 driver_args,
409 adapter_args,
410 attempts,
411 auto_import,
412 bigint_id,
413 debug,
414 lazy_tables,
415 db_uid,
416 after_connection,
417 tables,
418 ignore_field_case,
419 entity_quoting,
420 table_hash,
421 )
423 if config.caching:
424 self.try_define(_TypedalCache)
425 self.try_define(_TypedalCacheDependency)
427 def try_define(self, model: typing.Type[T], verbose: bool = False) -> typing.Type[T]:
428 """
429 Try to define a model with migrate or fall back to fake migrate.
430 """
431 try:
432 return self.define(model, migrate=True)
433 except Exception as e:
434 # clean up:
435 self.rollback()
436 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self):
437 delattr(self, tablename)
439 if verbose:
440 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning)
442 # try again:
443 return self.define(model, migrate=True, fake_migrate=True, redefine=True)
445 default_kwargs: typing.ClassVar[AnyDict] = {
446 # fields are 'required' (notnull) by default:
447 "notnull": True,
448 }
450 # maps table name to typedal class, for resolving future references
451 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {}
453 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
454 # todo: new relationship item added should also invalidate (previously unrelated) cache result
456 # todo: option to enable/disable cache dependency behavior:
457 # - don't set _before_update and _before_delete
458 # - don't add TypedalCacheDependency entry
459 # - don't invalidate other item on new row of this type
461 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
462 # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
463 # typing.get_type_hints(cls, globalns=None, localns=None)
465 # dirty way (with evil eval):
466 # [eval(v) for k, v in cls.__annotations__.items()]
467 # this however also stops working when variables outside this scope or even references to other
468 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
469 # and might break in the future, when this annotations behavior is enabled by default.
471 # non-annotated variables have to be passed to define_table as kwargs
472 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
474 tablename = self.to_snake(cls.__name__)
475 # grab annotations of cls and it's parents:
476 annotations = all_annotations(cls)
477 # extend with `prop = TypedField()` 'annotations':
478 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
479 # remove internal stuff:
480 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
482 typedfields: dict[str, TypedField[Any]] = {
483 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
484 }
486 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
488 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
490 # ! dont' use full_dict here:
491 other_kwargs = kwargs | {
492 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
493 } # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that.
494 # now it's only used to extract relationships from the object.
495 # other properties of the class (incl methods) should not be touched
497 for key in typedfields.keys() - full_dict.keys():
498 # typed fields that don't haven't been added to the object yet
499 setattr(cls, key, typedfields[key])
501 # start with base classes and overwrite with current class:
502 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
504 # DEPRECATED: Relationship as annotation is currently not supported!
505 # ensure they are all instances and
506 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
507 # relationships = {
508 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
509 # }
511 # keys of implicit references (also relationships):
512 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")]
514 # add implicit relationships:
515 # User; list[User]; TypedField[User]; TypedField[list[User]]
516 relationships |= {
517 k: new_relationship
518 for k in reference_field_keys
519 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
520 }
522 cache_dependency = self._config.caching and kwargs.pop("cache_dependency", True)
524 table: Table = self.define_table(tablename, *fields.values(), **kwargs)
526 for name, typed_field in typedfields.items():
527 field = fields[name]
528 typed_field.bind(field, table)
530 if issubclass(cls, TypedTable):
531 cls.__set_internals__(
532 db=self,
533 table=table,
534 # by now, all relationships should be instances!
535 relationships=typing.cast(dict[str, Relationship[Any]], relationships),
536 )
537 self._class_map[str(table)] = cls
538 cls.__on_define__(self)
539 else:
540 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
542 if not tablename.startswith("typedal_") and cache_dependency:
543 table._before_update.append(lambda s, _: _remove_cache(s, tablename))
544 table._before_delete.append(lambda s: _remove_cache(s, tablename))
546 return cls
548 @typing.overload
549 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]:
550 """
551 Typing Overload for define without a class.
553 @db.define()
554 class MyTable(TypedTable): ...
555 """
557 @typing.overload
558 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
559 """
560 Typing Overload for define with a class.
562 @db.define
563 class MyTable(TypedTable): ...
564 """
566 def define(
567 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any
568 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]:
569 """
570 Can be used as a decorator on a class that inherits `TypedTable`, \
571 or as a regular method if you need to define your classes before you have access to a 'db' instance.
573 You can also pass extra arguments to db.define_table.
574 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor
576 Example:
577 @db.define
578 class Person(TypedTable):
579 ...
581 class Article(TypedTable):
582 ...
584 # at a later time:
585 db.define(Article)
587 Returns:
588 the result of pydal.define_table
589 """
591 def wrapper(cls: typing.Type[T]) -> typing.Type[T]:
592 return self._define(cls, **kwargs)
594 if maybe_cls:
595 return wrapper(maybe_cls)
597 return wrapper
599 # def drop(self, table_name: str) -> None:
600 # """
601 # Remove a table by name (both on the database level and the typedal level).
602 # """
603 # # drop calls TypedTable.drop() and removes it from the `_class_map`
604 # if cls := self._class_map.pop(table_name, None):
605 # cls.drop()
607 # def drop_all(self, max_retries: int = None) -> None:
608 # """
609 # Remove all tables and keep doing so until everything is gone!
610 # """
611 # retries = 0
612 # if max_retries is None:
613 # max_retries = len(self.tables)
614 #
615 # while self.tables:
616 # retries += 1
617 # for table in self.tables:
618 # self.drop(table)
619 #
620 # if retries > max_retries:
621 # raise RuntimeError("Could not delete all tables")
623 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
624 """
625 A db instance can be called directly to perform a query.
627 Usually, only a query is passed.
629 Example:
630 db(query).select()
632 """
633 args = list(_args)
634 if args:
635 cls = args[0]
636 if isinstance(cls, bool):
637 raise ValueError("Don't actually pass a bool to db()! Use a query instead.")
639 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
640 # table defined without @db.define decorator!
641 _cls: typing.Type[TypedTable] = cls
642 args[0] = _cls.id != None
644 _set = super().__call__(*args, **kwargs)
645 return typing.cast(TypedSet, _set)
647 def __getitem__(self, key: str) -> "Table":
648 """
649 Allows dynamically accessing a table by its name as a string.
651 Example:
652 db['users'] -> user
653 """
654 return typing.cast(Table, super().__getitem__(str(key)))
656 @classmethod
657 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
658 return Field(name, _type, **{**cls.default_kwargs, **kw})
660 @classmethod
661 def _annotation_to_pydal_fieldtype(
662 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any]
663 ) -> Optional[str]:
664 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
665 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy)
667 if isinstance(ftype, str):
668 # extract type from string
669 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate(
670 localns=locals(), globalns=globals(), recursive_guard=frozenset()
671 )
673 if mapping := BASIC_MAPPINGS.get(ftype):
674 # basi types
675 return mapping
676 elif isinstance(ftype, _Table):
677 # db.table
678 return f"reference {ftype._tablename}"
679 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
680 # SomeTable
681 snakename = cls.to_snake(ftype.__name__)
682 return f"reference {snakename}"
683 elif isinstance(ftype, TypedField):
684 # FieldType(type, ...)
685 return ftype._to_field(mut_kw)
686 elif origin_is_subclass(ftype, TypedField):
687 # TypedField[int]
688 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
689 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
690 # list[str] -> str -> string -> list:string
691 _child_type = typing.get_args(ftype)[0]
692 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
693 return f"list:{_child_type}"
694 elif is_union(ftype):
695 # str | int -> UnionType
696 # typing.Union[str | int] -> typing._UnionGenericAlias
698 # Optional[type] == type | None
700 match typing.get_args(ftype):
701 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
702 # good union of Nullable
704 # if a field is optional, it is nullable:
705 mut_kw["notnull"] = False
706 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
707 case _:
708 # two types is not supported by the db!
709 return None
710 else:
711 return None
713 @classmethod
714 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
715 """
716 Convert a annotation into a pydal Field.
718 Args:
719 fname: name of the property
720 ftype: annotation of the property
721 kw: when using TypedField or a function returning it (e.g. StringField),
722 keyword args can be used to pass any other settings you would normally to a pydal Field
724 -> pydal.Field(fname, ftype, **kw)
726 Example:
727 class MyTable:
728 fname: ftype
729 id: int
730 name: str
731 reference: Table
732 other: TypedField(str, default="John Doe") # default will be in kwargs
733 """
734 fname = cls.to_snake(fname)
736 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
737 return cls._build_field(fname, converted_type, **kw)
738 else:
739 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
741 @staticmethod
742 def to_snake(camel: str) -> str:
743 """
744 Moved to helpers, kept as a static method for legacy reasons.
745 """
746 return to_snake(camel)
749class TableProtocol(typing.Protocol): # pragma: no cover
750 """
751 Make mypy happy.
752 """
754 id: "TypedField[int]"
756 def __getitem__(self, item: str) -> Field:
757 """
758 Tell mypy a Table supports dictionary notation for columns.
759 """
762class Table(_Table, TableProtocol): # type: ignore
763 """
764 Make mypy happy.
765 """
768class TableMeta(type):
769 """
770 This metaclass contains functionality on table classes, that doesn't exist on its instances.
772 Example:
773 class MyTable(TypedTable):
774 some_field: TypedField[int]
776 MyTable.update_or_insert(...) # should work
778 MyTable.some_field # -> Field, can be used to query etc.
780 row = MyTable.first() # returns instance of MyTable
782 # row.update_or_insert(...) # shouldn't work!
784 row.some_field # -> int, with actual data
786 """
788 # set up by db.define:
789 # _db: TypeDAL | None = None
790 # _table: Table | None = None
791 _db: TypeDAL | None = None
792 _table: Table | None = None
793 _relationships: dict[str, Relationship[Any]] | None = None
795 #########################
796 # TypeDAL custom logic: #
797 #########################
799 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
800 """
801 Store the related database and pydal table for later usage.
802 """
803 self._db = db
804 self._table = table
805 self._relationships = relationships
807 def __getattr__(self, col: str) -> Optional[Field]:
808 """
809 Magic method used by TypedTableMeta to get a database field with dot notation on a class.
811 Example:
812 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
814 """
815 if self._table:
816 return getattr(self._table, col, None)
818 return None
820 def _ensure_table_defined(self) -> Table:
821 if not self._table:
822 raise EnvironmentError("@define or db.define is not called on this class yet!")
823 return self._table
825 def __iter__(self) -> typing.Generator[Field, None, None]:
826 """
827 Loop through the columns of this model.
828 """
829 table = self._ensure_table_defined()
830 yield from iter(table)
832 def __getitem__(self, item: str) -> Field:
833 """
834 Allow dict notation to get a column of this table (-> Field instance).
835 """
836 table = self._ensure_table_defined()
837 return table[item]
839 def __str__(self) -> str:
840 """
841 Normally, just returns the underlying table name, but with a fallback if the model is unbound.
842 """
843 if self._table:
844 return str(self._table)
845 else:
846 return f"<unbound table {self.__name__}>"
848 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
849 """
850 Create a model instance from a pydal row.
851 """
852 return self(row)
854 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]":
855 """
856 Return all rows for this model.
857 """
858 return self.collect()
860 def get_relationships(self) -> dict[str, Relationship[Any]]:
861 """
862 Return the registered relationships of the current model.
863 """
864 return self._relationships or {}
866 ##########################
867 # TypeDAL Modified Logic #
868 ##########################
870 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
871 """
872 This is only called when db.define is not used as a decorator.
874 cls.__table functions as 'self'
876 Args:
877 **fields: anything you want to insert in the database
879 Returns: the ID of the new row.
881 """
882 table = self._ensure_table_defined()
884 result = table.insert(**fields)
885 # it already is an int but mypy doesn't understand that
886 return self(result)
888 def _insert(self, **fields: Any) -> str:
889 table = self._ensure_table_defined()
891 return str(table._insert(**fields))
893 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
894 """
895 Insert multiple rows, returns a TypedRows set of new instances.
896 """
897 table = self._ensure_table_defined()
898 result = table.bulk_insert(items)
899 return self.where(lambda row: row.id.belongs(result)).collect()
901 def update_or_insert(
902 self: typing.Type[T_MetaInstance], query: T_Query | AnyDict = DEFAULT, **values: Any
903 ) -> T_MetaInstance:
904 """
905 Update a row if query matches, else insert a new one.
907 Returns the created or updated instance.
908 """
909 table = self._ensure_table_defined()
911 if query is DEFAULT:
912 record = table(**values)
913 elif isinstance(query, dict):
914 record = table(**query)
915 else:
916 record = table(query)
918 if not record:
919 return self.insert(**values)
921 record.update_record(**values)
922 return self(record)
924 def validate_and_insert(
925 self: typing.Type[T_MetaInstance], **fields: Any
926 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
927 """
928 Validate input data and then insert a row.
930 Returns a tuple of (the created instance, a dict of errors).
931 """
932 table = self._ensure_table_defined()
933 result = table.validate_and_insert(**fields)
934 if row_id := result.get("id"):
935 return self(row_id), None
936 else:
937 return None, result.get("errors")
939 def validate_and_update(
940 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
941 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
942 """
943 Validate input data and then update max 1 row.
945 Returns a tuple of (the updated instance, a dict of errors).
946 """
947 table = self._ensure_table_defined()
949 try:
950 result = table.validate_and_update(query, **fields)
951 except Exception as e:
952 result = {"errors": {"exception": str(e)}}
954 if errors := result.get("errors"):
955 return None, errors
956 elif row_id := result.get("id"):
957 return self(row_id), None
958 else: # pragma: no cover
959 # update on query without result (shouldnt happen)
960 return None, None
962 def validate_and_update_or_insert(
963 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
964 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
965 """
966 Validate input data and then update_and_insert (on max 1 row).
968 Returns a tuple of (the updated/created instance, a dict of errors).
969 """
970 table = self._ensure_table_defined()
971 result = table.validate_and_update_or_insert(query, **fields)
973 if errors := result.get("errors"):
974 return None, errors
975 elif row_id := result.get("id"):
976 return self(row_id), None
977 else: # pragma: no cover
978 # update on query without result (shouldnt happen)
979 return None, None
981 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
982 """
983 See QueryBuilder.select!
984 """
985 return QueryBuilder(self).select(*a, **kw)
987 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
988 """
989 See QueryBuilder.paginate!
990 """
991 return QueryBuilder(self).paginate(limit=limit, page=page)
993 def chunk(
994 self: typing.Type[T_MetaInstance], chunk_size: int
995 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
996 """
997 See QueryBuilder.chunk!
998 """
999 return QueryBuilder(self).chunk(chunk_size)
1001 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
1002 """
1003 See QueryBuilder.where!
1004 """
1005 return QueryBuilder(self).where(*a, **kw)
1007 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
1008 """
1009 See QueryBuilder.cache!
1010 """
1011 return QueryBuilder(self).cache(*deps, **kwargs)
1013 def count(self: typing.Type[T_MetaInstance]) -> int:
1014 """
1015 See QueryBuilder.count!
1016 """
1017 return QueryBuilder(self).count()
1019 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None:
1020 """
1021 See QueryBuilder.first!
1022 """
1023 return QueryBuilder(self).first()
1025 def join(
1026 self: typing.Type[T_MetaInstance],
1027 *fields: str | typing.Type["TypedTable"],
1028 method: JOIN_OPTIONS = None,
1029 on: OnQuery | list[Expression] | Expression = None,
1030 condition: Condition = None,
1031 ) -> "QueryBuilder[T_MetaInstance]":
1032 """
1033 See QueryBuilder.join!
1034 """
1035 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method)
1037 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
1038 """
1039 See QueryBuilder.collect!
1040 """
1041 return QueryBuilder(self).collect(verbose=verbose)
1043 @property
1044 def ALL(cls) -> pydal.objects.SQLALL:
1045 """
1046 Select all fields for this table.
1047 """
1048 table = cls._ensure_table_defined()
1050 return table.ALL
1052 ##########################
1053 # TypeDAL Shadowed Logic #
1054 ##########################
1055 fields: list[str]
1057 # other table methods:
1059 def truncate(self, mode: str = "") -> None:
1060 """
1061 Remove all data and reset index.
1062 """
1063 table = self._ensure_table_defined()
1064 table.truncate(mode)
1066 def drop(self, mode: str = "") -> None:
1067 """
1068 Remove the underlying table.
1069 """
1070 table = self._ensure_table_defined()
1071 table.drop(mode)
1073 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
1074 """
1075 Add an index on some columns of this table.
1076 """
1077 table = self._ensure_table_defined()
1078 result = table.create_index(name, *fields, **kwargs)
1079 return typing.cast(bool, result)
1081 def drop_index(self, name: str, if_exists: bool = False) -> bool:
1082 """
1083 Remove an index from this table.
1084 """
1085 table = self._ensure_table_defined()
1086 result = table.drop_index(name, if_exists)
1087 return typing.cast(bool, result)
1089 def import_from_csv_file(
1090 self,
1091 csvfile: typing.TextIO,
1092 id_map: dict[str, str] = None,
1093 null: Any = "<NULL>",
1094 unique: str = "uuid",
1095 id_offset: dict[str, int] = None, # id_offset used only when id_map is None
1096 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
1097 validate: bool = False,
1098 encoding: str = "utf-8",
1099 delimiter: str = ",",
1100 quotechar: str = '"',
1101 quoting: int = csv.QUOTE_MINIMAL,
1102 restore: bool = False,
1103 **kwargs: Any,
1104 ) -> None:
1105 """
1106 Load a csv file into the database.
1107 """
1108 table = self._ensure_table_defined()
1109 table.import_from_csv_file(
1110 csvfile,
1111 id_map=id_map,
1112 null=null,
1113 unique=unique,
1114 id_offset=id_offset,
1115 transform=transform,
1116 validate=validate,
1117 encoding=encoding,
1118 delimiter=delimiter,
1119 quotechar=quotechar,
1120 quoting=quoting,
1121 restore=restore,
1122 **kwargs,
1123 )
1125 def on(self, query: Query | bool) -> Expression:
1126 """
1127 Shadow Table.on.
1129 Used for joins.
1131 See Also:
1132 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1133 """
1134 table = self._ensure_table_defined()
1135 return typing.cast(Expression, table.on(query))
1137 def with_alias(self, alias: str) -> _Table:
1138 """
1139 Shadow Table.with_alias.
1141 Useful for joins when joining the same table multiple times.
1143 See Also:
1144 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1145 """
1146 table = self._ensure_table_defined()
1147 return table.with_alias(alias)
1149 # @typing.dataclass_transform()
1152class TypedField(typing.Generic[T_Value]): # pragma: no cover
1153 """
1154 Typed version of pydal.Field, which will be converted to a normal Field in the background.
1155 """
1157 # will be set by .bind on db.define
1158 name = ""
1159 _db: Optional[pydal.DAL] = None
1160 _rname: Optional[str] = None
1161 _table: Optional[Table] = None
1162 _field: Optional[Field] = None
1164 _type: T_annotation
1165 kwargs: Any
1167 requires: Validator | typing.Iterable[Validator]
1169 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore
1170 """
1171 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used!
1172 """
1173 self._type = _type
1174 self.kwargs = settings
1175 super().__init__()
1177 @typing.overload
1178 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover
1179 """
1180 row.field -> (actual data).
1181 """
1183 @typing.overload
1184 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
1185 """
1186 Table.field -> Field.
1187 """
1189 def __get__(
1190 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance]
1191 ) -> typing.Union[T_Value, "TypedField[T_Value]"]:
1192 """
1193 Since this class is a Descriptor field, \
1194 it returns something else depending on if it's called on a class or instance.
1196 (this is mostly for mypy/typing)
1197 """
1198 if instance:
1199 # this is only reached in a very specific case:
1200 # an instance of the object was created with a specific set of fields selected (excluding the current one)
1201 # in that case, no value was stored in the owner -> return None (since the field was not selected)
1202 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
1203 else:
1204 # getting as class -> return actual field so pydal understands it when using in query etc.
1205 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
1207 def __str__(self) -> str:
1208 """
1209 String representation of a Typed Field.
1211 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
1212 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
1213 """
1214 return str(self._field) if self._field else ""
1216 def __repr__(self) -> str:
1217 """
1218 More detailed string representation of a Typed Field.
1220 Uses __str__ and adds the provided extra options (kwargs) in the representation.
1221 """
1222 s = self.__str__()
1224 if "type" in self.kwargs:
1225 # manual type in kwargs supplied
1226 t = self.kwargs["type"]
1227 elif issubclass(type, type(self._type)):
1228 # normal type, str.__name__ = 'str'
1229 t = getattr(self._type, "__name__", str(self._type))
1230 elif t_args := typing.get_args(self._type):
1231 # list[str] -> 'str'
1232 t = t_args[0].__name__
1233 else: # pragma: no cover
1234 # fallback - something else, may not even happen, I'm not sure
1235 t = self._type
1237 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
1239 kw = self.kwargs.copy()
1240 kw.pop("type", None)
1241 return f"<{s} with options {kw}>"
1243 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1244 """
1245 Convert a Typed Field instance to a pydal.Field.
1246 """
1247 other_kwargs = self.kwargs.copy()
1248 extra_kwargs.update(other_kwargs)
1249 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1251 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
1252 """
1253 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
1254 """
1255 self._table = table
1256 self._field = field
1258 def __getattr__(self, key: str) -> Any:
1259 """
1260 If the regular getattribute does not work, try to get info from the related Field.
1261 """
1262 with contextlib.suppress(AttributeError):
1263 return super().__getattribute__(key)
1265 # try on actual field:
1266 return getattr(self._field, key)
1268 def __eq__(self, other: Any) -> Query:
1269 """
1270 Performing == on a Field will result in a Query.
1271 """
1272 return typing.cast(Query, self._field == other)
1274 def __ne__(self, other: Any) -> Query:
1275 """
1276 Performing != on a Field will result in a Query.
1277 """
1278 return typing.cast(Query, self._field != other)
1280 def __gt__(self, other: Any) -> Query:
1281 """
1282 Performing > on a Field will result in a Query.
1283 """
1284 return typing.cast(Query, self._field > other)
1286 def __lt__(self, other: Any) -> Query:
1287 """
1288 Performing < on a Field will result in a Query.
1289 """
1290 return typing.cast(Query, self._field < other)
1292 def __ge__(self, other: Any) -> Query:
1293 """
1294 Performing >= on a Field will result in a Query.
1295 """
1296 return typing.cast(Query, self._field >= other)
1298 def __le__(self, other: Any) -> Query:
1299 """
1300 Performing <= on a Field will result in a Query.
1301 """
1302 return typing.cast(Query, self._field <= other)
1304 def __hash__(self) -> int:
1305 """
1306 Shadow Field.__hash__.
1307 """
1308 return hash(self._field)
1310 def __invert__(self) -> Expression:
1311 """
1312 Performing ~ on a Field will result in an Expression.
1313 """
1314 if not self._field: # pragma: no cover
1315 raise ValueError("Unbound Field can not be inverted!")
1317 return typing.cast(Expression, ~self._field)
1320class _TypedTable:
1321 """
1322 This class is a final shared parent between TypedTable and Mixins.
1324 This needs to exist because otherwise the __on_define__ of Mixins are not executed.
1325 Notably, this class exists at a level ABOVE the `metaclass=TableMeta`,
1326 because otherwise typing gets confused when Mixins are used and multiple types could satisfy
1327 generic 'T subclass of TypedTable'
1328 -> Setting 'TypedTable' as the parent for Mixin does not work at runtime (and works semi at type check time)
1329 """
1331 id: "TypedField[int]"
1333 _before_insert: list[BeforeInsertCallable]
1334 _after_insert: list[AfterInsertCallable]
1335 _before_update: list[BeforeUpdateCallable]
1336 _after_update: list[AfterUpdateCallable]
1337 _before_delete: list[BeforeDeleteCallable]
1338 _after_delete: list[AfterDeleteCallable]
1340 @classmethod
1341 def __on_define__(cls, db: TypeDAL) -> None:
1342 """
1343 Method that can be implemented by tables to do an action after db.define is completed.
1345 This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"),
1346 where you need a reference to the current database, which may not exist yet when defining the model.
1347 """
1350class TypedTable(_TypedTable, metaclass=TableMeta):
1351 """
1352 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
1353 """
1355 # set up by 'new':
1356 _row: Row | None = None
1358 _with: list[str]
1360 def _setup_instance_methods(self) -> None:
1361 self.as_dict = self._as_dict # type: ignore
1362 self.__json__ = self.as_json = self._as_json # type: ignore
1363 # self.as_yaml = self._as_yaml # type: ignore
1364 self.as_xml = self._as_xml # type: ignore
1366 self.update = self._update # type: ignore
1368 self.delete_record = self._delete_record # type: ignore
1369 self.update_record = self._update_record # type: ignore
1371 def __new__(
1372 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any
1373 ) -> "TypedTable":
1374 """
1375 Create a Typed Rows model instance from an existing row, ID or query.
1377 Examples:
1378 MyTable(1)
1379 MyTable(id=1)
1380 MyTable(MyTable.id == 1)
1381 """
1382 table = cls._ensure_table_defined()
1383 inst = super().__new__(cls)
1385 if isinstance(row_or_id, TypedTable):
1386 # existing typed table instance!
1387 return row_or_id
1388 elif isinstance(row_or_id, pydal.objects.Row):
1389 row = row_or_id
1390 elif row_or_id is not None:
1391 row = table(row_or_id, **filters)
1392 elif filters:
1393 row = table(**filters)
1394 else:
1395 # dummy object
1396 return inst
1398 if not row:
1399 return None # type: ignore
1401 inst._row = row
1402 inst.__dict__.update(row)
1403 inst._setup_instance_methods()
1404 return inst
1406 def __iter__(self) -> typing.Generator[Any, None, None]:
1407 """
1408 Allows looping through the columns.
1409 """
1410 row = self._ensure_matching_row()
1411 yield from iter(row)
1413 def __getitem__(self, item: str) -> Any:
1414 """
1415 Allows dictionary notation to get columns.
1416 """
1417 if item in self.__dict__:
1418 return self.__dict__.get(item)
1420 # fallback to lookup in row
1421 if self._row:
1422 return self._row[item]
1424 # nothing found!
1425 raise KeyError(item)
1427 def __getattr__(self, item: str) -> Any:
1428 """
1429 Allows dot notation to get columns.
1430 """
1431 if value := self.get(item):
1432 return value
1434 raise AttributeError(item)
1436 def get(self, item: str, default: Any = None) -> Any:
1437 """
1438 Try to get a column from this instance, else return default.
1439 """
1440 try:
1441 return self.__getitem__(item)
1442 except KeyError:
1443 return default
1445 def __setitem__(self, key: str, value: Any) -> None:
1446 """
1447 Data can both be updated via dot and dict notation.
1448 """
1449 return setattr(self, key, value)
1451 def __int__(self) -> int:
1452 """
1453 Calling int on a model instance will return its id.
1454 """
1455 return getattr(self, "id", 0)
1457 def __bool__(self) -> bool:
1458 """
1459 If the instance has an underlying row with data, it is truthy.
1460 """
1461 return bool(getattr(self, "_row", False))
1463 def _ensure_matching_row(self) -> Row:
1464 if not getattr(self, "_row", None):
1465 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
1466 return self._row
1468 def __repr__(self) -> str:
1469 """
1470 String representation of the model instance.
1471 """
1472 model_name = self.__class__.__name__
1473 model_data = {}
1475 if self._row:
1476 model_data = self._row.as_json()
1478 details = model_name
1479 details += f"({model_data})"
1481 if relationships := getattr(self, "_with", []):
1482 details += f" + {relationships}"
1484 return f"<{details}>"
1486 # serialization
1487 # underscore variants work for class instances (set up by _setup_instance_methods)
1489 @classmethod
1490 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
1491 """
1492 Dump the object to a plain dict.
1494 Can be used as both a class or instance method:
1495 - dumps the table info if it's a class
1496 - dumps the row info if it's an instance (see _as_dict)
1497 """
1498 table = cls._ensure_table_defined()
1499 result = table.as_dict(flat, sanitize)
1500 return typing.cast(AnyDict, result)
1502 @classmethod
1503 def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
1504 """
1505 Dump the object to json.
1507 Can be used as both a class or instance method:
1508 - dumps the table info if it's a class
1509 - dumps the row info if it's an instance (see _as_json)
1510 """
1511 data = cls.as_dict(sanitize=sanitize)
1512 return as_json.encode(data, indent=indent, **kwargs)
1514 @classmethod
1515 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
1516 """
1517 Dump the object to xml.
1519 Can be used as both a class or instance method:
1520 - dumps the table info if it's a class
1521 - dumps the row info if it's an instance (see _as_xml)
1522 """
1523 table = cls._ensure_table_defined()
1524 return typing.cast(str, table.as_xml(sanitize))
1526 @classmethod
1527 def as_yaml(cls, sanitize: bool = True) -> str:
1528 """
1529 Dump the object to yaml.
1531 Can be used as both a class or instance method:
1532 - dumps the table info if it's a class
1533 - dumps the row info if it's an instance (see _as_yaml)
1534 """
1535 table = cls._ensure_table_defined()
1536 return typing.cast(str, table.as_yaml(sanitize))
1538 def _as_dict(
1539 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1540 ) -> AnyDict:
1541 row = self._ensure_matching_row()
1543 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
1545 def asdict_method(obj: Any) -> Any: # pragma: no cover
1546 if hasattr(obj, "_as_dict"): # typedal
1547 return obj._as_dict()
1548 elif hasattr(obj, "as_dict"): # pydal
1549 return obj.as_dict()
1550 else: # something else??
1551 return obj.__dict__
1553 if _with := getattr(self, "_with", None):
1554 for relationship in _with:
1555 data = self.get(relationship)
1557 if isinstance(data, list):
1558 data = [asdict_method(_) for _ in data]
1559 elif data:
1560 data = asdict_method(data)
1562 result[relationship] = data
1564 return typing.cast(AnyDict, result)
1566 def _as_json(
1567 self,
1568 default: typing.Callable[[Any], Any] = None,
1569 indent: Optional[int] = None,
1570 **kwargs: Any,
1571 ) -> str:
1572 data = self._as_dict()
1573 return as_json.encode(data, default=default, indent=indent, **kwargs)
1575 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
1576 row = self._ensure_matching_row()
1577 return typing.cast(str, row.as_xml(sanitize))
1579 # def _as_yaml(self, sanitize: bool = True) -> str:
1580 # row = self._ensure_matching_row()
1581 # return typing.cast(str, row.as_yaml(sanitize))
1583 def __setattr__(self, key: str, value: Any) -> None:
1584 """
1585 When setting a property on a Typed Table model instance, also update the underlying row.
1586 """
1587 if self._row and key in self._row.__dict__ and not callable(value):
1588 # enables `row.key = value; row.update_record()`
1589 self._row[key] = value
1591 super().__setattr__(key, value)
1593 @classmethod
1594 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
1595 """
1596 Update one record.
1598 Example:
1599 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
1600 """
1601 # todo: update multiple?
1602 if record := cls(query):
1603 return record.update_record(**fields)
1604 else:
1605 return None
1607 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1608 row = self._ensure_matching_row()
1609 row.update(**fields)
1610 self.__dict__.update(**fields)
1611 return self
1613 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1614 row = self._ensure_matching_row()
1615 new_row = row.update_record(**fields)
1616 self.update(**new_row)
1617 return self
1619 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
1620 """
1621 Here as a placeholder for _update_record.
1623 Will be replaced on instance creation!
1624 """
1625 return self._update_record(**fields)
1627 def _delete_record(self) -> int:
1628 """
1629 Actual logic in `pydal.helpers.classes.RecordDeleter`.
1630 """
1631 row = self._ensure_matching_row()
1632 result = row.delete_record()
1633 self.__dict__ = {} # empty self, since row is no more.
1634 self._row = None # just to be sure
1635 self._setup_instance_methods()
1636 # ^ instance methods might've been deleted by emptying dict,
1637 # but we still want .as_dict to show an error, not the table's as_dict.
1638 return typing.cast(int, result)
1640 def delete_record(self) -> int: # pragma: no cover
1641 """
1642 Here as a placeholder for _delete_record.
1644 Will be replaced on instance creation!
1645 """
1646 return self._delete_record()
1648 # __del__ is also called on the end of a scope so don't remove records on every del!!
1650 # pickling:
1652 def __getstate__(self) -> AnyDict:
1653 """
1654 State to save when pickling.
1656 Prevents db connection from being pickled.
1657 Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1658 """
1659 row = self._ensure_matching_row()
1660 result: AnyDict = row.as_dict()
1662 if _with := getattr(self, "_with", None):
1663 result["_with"] = _with
1664 for relationship in _with:
1665 data = self.get(relationship)
1667 result[relationship] = data
1669 result["_row"] = self._row.as_json() if self._row else ""
1670 return result
1672 def __setstate__(self, state: AnyDict) -> None:
1673 """
1674 Used by dill when loading from a bytestring.
1675 """
1676 # as_dict also includes table info, so dump as json to only get the actual row data
1677 # then create a new (more empty) row object:
1678 state["_row"] = Row(json.loads(state["_row"]))
1679 self.__dict__ |= state
1682# backwards compat:
1683TypedRow = TypedTable
1686class TypedRows(typing.Collection[T_MetaInstance], Rows):
1687 """
1688 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
1689 """
1691 records: dict[int, T_MetaInstance]
1692 # _rows: Rows
1693 model: typing.Type[T_MetaInstance]
1694 metadata: Metadata
1696 # pseudo-properties: actually stored in _rows
1697 db: TypeDAL
1698 colnames: list[str]
1699 fields: list[Field]
1700 colnames_fields: list[Field]
1701 response: list[tuple[Any, ...]]
1703 def __init__(
1704 self,
1705 rows: Rows,
1706 model: typing.Type[T_MetaInstance],
1707 records: dict[int, T_MetaInstance] = None,
1708 metadata: Metadata = None,
1709 ) -> None:
1710 """
1711 Should not be called manually!
1713 Normally, the `records` from an existing `Rows` object are used
1714 but these can be overwritten with a `records` dict.
1715 `metadata` can be any (un)structured data
1716 `model` is a Typed Table class
1717 """
1718 records = records or {row.id: model(row) for row in rows}
1719 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
1720 self.model = model
1721 self.metadata = metadata or {}
1722 self.colnames = rows.colnames
1724 def __len__(self) -> int:
1725 """
1726 Return the count of rows.
1727 """
1728 return len(self.records)
1730 def __iter__(self) -> typing.Iterator[T_MetaInstance]:
1731 """
1732 Loop through the rows.
1733 """
1734 yield from self.records.values()
1736 def __contains__(self, ind: Any) -> bool:
1737 """
1738 Check if an id exists in this result set.
1739 """
1740 return ind in self.records
1742 def first(self) -> T_MetaInstance | None:
1743 """
1744 Get the row with the lowest id.
1745 """
1746 if not self.records:
1747 return None
1749 return next(iter(self))
1751 def last(self) -> T_MetaInstance | None:
1752 """
1753 Get the row with the highest id.
1754 """
1755 if not self.records:
1756 return None
1758 max_id = max(self.records.keys())
1759 return self[max_id]
1761 def find(
1762 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None
1763 ) -> "TypedRows[T_MetaInstance]":
1764 """
1765 Returns a new Rows object, a subset of the original object, filtered by the function `f`.
1766 """
1767 if not self.records:
1768 return self.__class__(self, self.model, {})
1770 records = {}
1771 if limitby:
1772 _min, _max = limitby
1773 else:
1774 _min, _max = 0, len(self)
1775 count = 0
1776 for i, row in self.records.items():
1777 if f(row):
1778 if _min <= count:
1779 records[i] = row
1780 count += 1
1781 if count == _max:
1782 break
1784 return self.__class__(self, self.model, records)
1786 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
1787 """
1788 Removes elements from the calling Rows object, filtered by the function `f`, \
1789 and returns a new Rows object containing the removed elements.
1790 """
1791 if not self.records:
1792 return self.__class__(self, self.model, {})
1793 removed = {}
1794 to_remove = []
1795 for i in self.records:
1796 row = self[i]
1797 if f(row):
1798 removed[i] = self.records[i]
1799 to_remove.append(i)
1801 [self.records.pop(i) for i in to_remove]
1803 return self.__class__(
1804 self,
1805 self.model,
1806 removed,
1807 )
1809 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
1810 """
1811 Returns a list of sorted elements (not sorted in place).
1812 """
1813 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
1815 def __str__(self) -> str:
1816 """
1817 Simple string representation.
1818 """
1819 return f"<TypedRows with {len(self)} records>"
1821 def __repr__(self) -> str:
1822 """
1823 Print a table on repr().
1824 """
1825 data = self.as_dict()
1826 headers = list(next(iter(data.values())).keys())
1827 return mktable(data, headers)
1829 def group_by_value(
1830 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any
1831 ) -> dict[T, list[T_MetaInstance]]:
1832 """
1833 Group the rows by a specific field (which will be the dict key).
1834 """
1835 kwargs["one_result"] = one_result
1836 result = super().group_by_value(*fields, **kwargs)
1837 return typing.cast(dict[T, list[T_MetaInstance]], result)
1839 def column(self, column: str = None) -> list[Any]:
1840 """
1841 Get a list of all values in a specific column.
1843 Example:
1844 rows.column('name') -> ['Name 1', 'Name 2', ...]
1845 """
1846 return typing.cast(list[Any], super().column(column))
1848 def as_csv(self) -> str:
1849 """
1850 Dump the data to csv.
1851 """
1852 return typing.cast(str, super().as_csv())
1854 def as_dict(
1855 self,
1856 key: str = None,
1857 compact: bool = False,
1858 storage_to_dict: bool = False,
1859 datetime_to_str: bool = False,
1860 custom_types: list[type] = None,
1861 ) -> dict[int, AnyDict]:
1862 """
1863 Get the data in a dict of dicts.
1864 """
1865 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1866 # functionality not guaranteed
1867 return typing.cast(
1868 dict[int, AnyDict],
1869 super().as_dict(
1870 key or "id",
1871 compact,
1872 storage_to_dict,
1873 datetime_to_str,
1874 custom_types,
1875 ),
1876 )
1878 return {k: v.as_dict() for k, v in self.records.items()}
1880 def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
1881 """
1882 Turn the data into a dict and then dump to JSON.
1883 """
1884 data = self.as_list()
1886 return as_json.encode(data, default=default, indent=indent, **kwargs)
1888 def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
1889 """
1890 Turn the data into a dict and then dump to JSON.
1891 """
1892 return self.as_json(default=default, indent=indent, **kwargs)
1894 def as_list(
1895 self,
1896 compact: bool = False,
1897 storage_to_dict: bool = False,
1898 datetime_to_str: bool = False,
1899 custom_types: list[type] = None,
1900 ) -> list[AnyDict]:
1901 """
1902 Get the data in a list of dicts.
1903 """
1904 if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1905 return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
1907 return [_.as_dict() for _ in self.records.values()]
1909 def __getitem__(self, item: int) -> T_MetaInstance:
1910 """
1911 You can get a specific row by ID from a typedrows by using rows[idx] notation.
1913 Since pydal's implementation differs (they expect a list instead of a dict with id keys),
1914 using rows[0] will return the first row, regardless of its id.
1915 """
1916 try:
1917 return self.records[item]
1918 except KeyError as e:
1919 if item == 0 and (row := self.first()):
1920 # special case: pydal internals think Rows.records is a list, not a dict
1921 return row
1923 raise e
1925 def get(self, item: int) -> typing.Optional[T_MetaInstance]:
1926 """
1927 Get a row by ID, or receive None if it isn't in this result set.
1928 """
1929 return self.records.get(item)
1931 def update(self, **new_values: Any) -> bool:
1932 """
1933 Update the current rows in the database with new_values.
1934 """
1935 # cast to make mypy understand .id is a TypedField and not an int!
1936 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined())
1938 ids = set(self.column("id"))
1939 query = table.id.belongs(ids)
1940 return bool(self.db(query).update(**new_values))
1942 def delete(self) -> bool:
1943 """
1944 Delete the currently selected rows from the database.
1945 """
1946 # cast to make mypy understand .id is a TypedField and not an int!
1947 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined())
1949 ids = set(self.column("id"))
1950 query = table.id.belongs(ids)
1951 return bool(self.db(query).delete())
1953 def join(
1954 self,
1955 field: "Field | TypedField[Any]",
1956 name: str = None,
1957 constraint: Query = None,
1958 fields: list[str | Field] = None,
1959 orderby: Optional[str | Field] = None,
1960 ) -> T_MetaInstance:
1961 """
1962 This can be used to JOIN with some relationships after the initial select.
1964 Using the querybuilder's .join() method is prefered!
1965 """
1966 result = super().join(field, name, constraint, fields or [], orderby)
1967 return typing.cast(T_MetaInstance, result)
1969 def export_to_csv_file(
1970 self,
1971 ofile: typing.TextIO,
1972 null: Any = "<NULL>",
1973 delimiter: str = ",",
1974 quotechar: str = '"',
1975 quoting: int = csv.QUOTE_MINIMAL,
1976 represent: bool = False,
1977 colnames: list[str] = None,
1978 write_colnames: bool = True,
1979 *args: Any,
1980 **kwargs: Any,
1981 ) -> None:
1982 """
1983 Shadow export_to_csv_file from Rows, but with typing.
1985 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
1986 """
1987 super().export_to_csv_file(
1988 ofile,
1989 null,
1990 *args,
1991 delimiter=delimiter,
1992 quotechar=quotechar,
1993 quoting=quoting,
1994 represent=represent,
1995 colnames=colnames or self.colnames,
1996 write_colnames=write_colnames,
1997 **kwargs,
1998 )
2000 @classmethod
2001 def from_rows(
2002 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None
2003 ) -> "TypedRows[T_MetaInstance]":
2004 """
2005 Internal method to convert a Rows object to a TypedRows.
2006 """
2007 return cls(rows, model, metadata=metadata)
2009 def __getstate__(self) -> AnyDict:
2010 """
2011 Used by dill to dump to bytes (exclude db connection etc).
2012 """
2013 return {
2014 "metadata": json.dumps(self.metadata, default=str),
2015 "records": self.records,
2016 "model": str(self.model._table),
2017 "colnames": self.colnames,
2018 }
2020 def __setstate__(self, state: AnyDict) -> None:
2021 """
2022 Used by dill when loading from a bytestring.
2023 """
2024 state["metadata"] = json.loads(state["metadata"])
2025 self.__dict__.update(state)
2026 # db etc. set after undill by caching.py
2029from .caching import ( # noqa: E402
2030 _remove_cache,
2031 _TypedalCache,
2032 _TypedalCacheDependency,
2033 create_and_hash_cache_key,
2034 get_expire,
2035 load_from_cache,
2036 save_to_cache,
2037)
2040class QueryBuilder(typing.Generic[T_MetaInstance]):
2041 """
2042 Abstration on top of pydal's query system.
2043 """
2045 model: typing.Type[T_MetaInstance]
2046 query: Query
2047 select_args: list[Any]
2048 select_kwargs: AnyDict
2049 relationships: dict[str, Relationship[Any]]
2050 metadata: Metadata
2052 def __init__(
2053 self,
2054 model: typing.Type[T_MetaInstance],
2055 add_query: Optional[Query] = None,
2056 select_args: Optional[list[Any]] = None,
2057 select_kwargs: Optional[AnyDict] = None,
2058 relationships: dict[str, Relationship[Any]] = None,
2059 metadata: Metadata = None,
2060 ):
2061 """
2062 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
2064 Example:
2065 MyTable.where(...) -> QueryBuilder[MyTable]
2066 """
2067 self.model = model
2068 table = model._ensure_table_defined()
2069 default_query = typing.cast(Query, table.id > 0)
2070 self.query = add_query or default_query
2071 self.select_args = select_args or []
2072 self.select_kwargs = select_kwargs or {}
2073 self.relationships = relationships or {}
2074 self.metadata = metadata or {}
2076 def __str__(self) -> str:
2077 """
2078 Simple string representation for the query builder.
2079 """
2080 return f"QueryBuilder for {self.model}"
2082 def __repr__(self) -> str:
2083 """
2084 Advanced string representation for the query builder.
2085 """
2086 return (
2087 f"<QueryBuilder for {self.model} with "
2088 f"{len(self.select_args)} select args; "
2089 f"{len(self.select_kwargs)} select kwargs; "
2090 f"{len(self.relationships)} relationships; "
2091 f"query: {bool(self.query)}; "
2092 f"metadata: {self.metadata}; "
2093 f">"
2094 )
2096 def __bool__(self) -> bool:
2097 """
2098 Querybuilder is truthy if it has rows.
2099 """
2100 return self.count() > 0
2102 def _extend(
2103 self,
2104 add_query: Optional[Query] = None,
2105 overwrite_query: Optional[Query] = None,
2106 select_args: Optional[list[Any]] = None,
2107 select_kwargs: Optional[AnyDict] = None,
2108 relationships: dict[str, Relationship[Any]] = None,
2109 metadata: Metadata = None,
2110 ) -> "QueryBuilder[T_MetaInstance]":
2111 return QueryBuilder(
2112 self.model,
2113 (add_query & self.query) if add_query else overwrite_query or self.query,
2114 (self.select_args + select_args) if select_args else self.select_args,
2115 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
2116 (self.relationships | relationships) if relationships else self.relationships,
2117 (self.metadata | (metadata or {})) if metadata else self.metadata,
2118 )
2120 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]":
2121 """
2122 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
2124 Options:
2125 paraphrased from the web2py pydal docs,
2126 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
2128 orderby: field(s) to order by. Supported:
2129 table.name - sort by name, ascending
2130 ~table.name - sort by name, descending
2131 <random> - sort randomly
2132 table.name|table.id - sort by two fields (first name, then id)
2134 groupby, having: together with orderby:
2135 groupby can be a field (e.g. table.name) to group records by
2136 having can be a query, only those `having` the condition are grouped
2138 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
2139 distinct: bool/field. Only select rows that differ
2140 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
2141 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
2142 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
2143 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
2144 """
2145 return self._extend(select_args=list(fields), select_kwargs=options)
2147 def where(
2148 self,
2149 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query],
2150 **filters: Any,
2151 ) -> "QueryBuilder[T_MetaInstance]":
2152 """
2153 Extend the builder's query.
2155 Can be used in multiple ways:
2156 .where(Query) -> with a direct query such as `Table.id == 5`
2157 .where(lambda table: table.id == 5) -> with a query via a lambda
2158 .where(id=5) -> via keyword arguments
2160 When using multiple where's, they will be ANDed:
2161 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
2162 When passing multiple queries to a single .where, they will be ORed:
2163 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
2164 """
2165 new_query = self.query
2166 table = self.model._ensure_table_defined()
2168 for field, value in filters.items():
2169 new_query &= table[field] == value
2171 subquery: DummyQuery | Query = DummyQuery()
2172 for query_or_lambda in queries_or_lambdas:
2173 if isinstance(query_or_lambda, _Query):
2174 subquery |= typing.cast(Query, query_or_lambda)
2175 elif callable(query_or_lambda):
2176 if result := query_or_lambda(self.model):
2177 subquery |= result
2178 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda):
2179 subquery |= typing.cast(Query, query_or_lambda != None)
2180 else:
2181 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).")
2183 if subquery:
2184 new_query &= subquery
2186 return self._extend(overwrite_query=new_query)
2188 def join(
2189 self,
2190 *fields: str | typing.Type[TypedTable],
2191 method: JOIN_OPTIONS = None,
2192 on: OnQuery | list[Expression] | Expression = None,
2193 condition: Condition = None,
2194 ) -> "QueryBuilder[T_MetaInstance]":
2195 """
2196 Include relationship fields in the result.
2198 `fields` can be names of Relationships on the current model.
2199 If no fields are passed, all will be used.
2201 By default, the `method` defined in the relationship is used.
2202 This can be overwritten with the `method` keyword argument (left or inner)
2203 """
2204 # todo: allow limiting amount of related rows returned for join?
2206 relationships = self.model.get_relationships()
2208 if condition and on:
2209 raise ValueError("condition and on can not be used together!")
2210 elif condition:
2211 if len(fields) != 1:
2212 raise ValueError("join(field, condition=...) can only be used with exactly one field!")
2214 if isinstance(condition, pydal.objects.Query):
2215 condition = as_lambda(condition)
2217 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)}
2218 elif on:
2219 if len(fields) != 1:
2220 raise ValueError("join(field, on=...) can only be used with exactly one field!")
2222 if isinstance(on, pydal.objects.Expression):
2223 on = [on]
2225 if isinstance(on, list):
2226 on = as_lambda(on)
2227 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)}
2229 else:
2230 if fields:
2231 # join on every relationship
2232 relationships = {str(k): relationships[str(k)] for k in fields}
2234 if method:
2235 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()}
2237 return self._extend(relationships=relationships)
2239 def cache(
2240 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None
2241 ) -> "QueryBuilder[T_MetaInstance]":
2242 """
2243 Enable caching for this query to load repeated calls from a dill row \
2244 instead of executing the sql and collecing matching rows again.
2245 """
2246 existing = self.metadata.get("cache", {})
2248 metadata: Metadata = {}
2250 cache_meta = typing.cast(
2251 CacheMetadata,
2252 self.metadata.get("cache", {})
2253 | {
2254 "enabled": True,
2255 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
2256 "expires_at": get_expire(expires_at=expires_at, ttl=ttl),
2257 },
2258 )
2260 metadata["cache"] = cache_meta
2261 return self._extend(metadata=metadata)
2263 def _get_db(self) -> TypeDAL:
2264 if db := self.model._db:
2265 return db
2266 else: # pragma: no cover
2267 raise EnvironmentError("@define or db.define is not called on this class yet!")
2269 def _select_arg_convert(self, arg: Any) -> Any:
2270 # typedfield are not really used at runtime anymore, but leave it in for safety:
2271 if isinstance(arg, TypedField): # pragma: no cover
2272 arg = arg._field
2274 return arg
2276 def delete(self) -> list[int]:
2277 """
2278 Based on the current query, delete rows and return a list of deleted IDs.
2279 """
2280 db = self._get_db()
2281 removed_ids = [_.id for _ in db(self.query).select("id")]
2282 if db(self.query).delete():
2283 # success!
2284 return removed_ids
2286 return []
2288 def _delete(self) -> str:
2289 db = self._get_db()
2290 return str(db(self.query)._delete())
2292 def update(self, **fields: Any) -> list[int]:
2293 """
2294 Based on the current query, update `fields` and return a list of updated IDs.
2295 """
2296 # todo: limit?
2297 db = self._get_db()
2298 updated_ids = db(self.query).select("id").column("id")
2299 if db(self.query).update(**fields):
2300 # success!
2301 return updated_ids
2303 return []
2305 def _update(self, **fields: Any) -> str:
2306 db = self._get_db()
2307 return str(db(self.query)._update(**fields))
2309 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], AnyDict]:
2310 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2311 select_kwargs = self.select_kwargs.copy()
2312 query = self.query
2313 model = self.model
2314 mut_metadata["query"] = query
2315 # require at least id of main table:
2316 select_fields = ", ".join([str(_) for _ in select_args])
2317 tablename = str(model)
2319 if add_id and f"{tablename}.id" not in select_fields:
2320 # fields of other selected, but required ID is missing.
2321 select_args.append(model.id)
2323 if self.relationships:
2324 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
2326 return query, select_args, select_kwargs
2328 def to_sql(self, add_id: bool = False) -> str:
2329 """
2330 Generate the SQL for the built query.
2331 """
2332 db = self._get_db()
2334 query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
2336 return str(db(query)._select(*select_args, **select_kwargs))
2338 def _collect(self) -> str:
2339 """
2340 Alias for to_sql, pydal-like syntax.
2341 """
2342 return self.to_sql()
2344 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
2345 expires_at = metadata["cache"].get("expires_at")
2346 metadata["cache"] |= {
2347 # key is partly dependant on cache metadata but not these:
2348 "key": None,
2349 "status": None,
2350 "cached_at": None,
2351 "expires_at": None,
2352 }
2354 _, key = create_and_hash_cache_key(
2355 self.model,
2356 metadata,
2357 self.query,
2358 self.select_args,
2359 self.select_kwargs,
2360 self.relationships.keys(),
2361 )
2363 # re-set after creating key:
2364 metadata["cache"]["expires_at"] = expires_at
2365 metadata["cache"]["key"] = key
2367 return load_from_cache(key, self._get_db())
2369 def execute(self, add_id: bool = False) -> Rows:
2370 """
2371 Raw version of .collect which only executes the SQL, without performing any magic afterwards.
2372 """
2373 db = self._get_db()
2374 metadata = typing.cast(Metadata, self.metadata.copy())
2376 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2378 return db(query).select(*select_args, **select_kwargs)
2380 def collect(
2381 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True
2382 ) -> "TypedRows[T_MetaInstance]":
2383 """
2384 Execute the built query and turn it into model instances, while handling relationships.
2385 """
2386 if _to is None:
2387 _to = TypedRows
2389 db = self._get_db()
2390 metadata = typing.cast(Metadata, self.metadata.copy())
2392 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
2393 return result
2395 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2397 metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
2399 if verbose: # pragma: no cover
2400 print(metadata["sql"])
2402 rows: Rows = db(query).select(*select_args, **select_kwargs)
2404 metadata["final_query"] = str(query)
2405 metadata["final_args"] = [str(_) for _ in select_args]
2406 metadata["final_kwargs"] = select_kwargs
2408 if verbose: # pragma: no cover
2409 print(rows)
2411 if not self.relationships:
2412 # easy
2413 typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
2415 else:
2416 # harder: try to match rows to the belonging objects
2417 # assume structure of {'table': <data>} per row.
2418 # if that's not the case, return default behavior again
2419 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
2421 # only saves if requested in metadata:
2422 return save_to_cache(typed_rows, rows)
2424 def _handle_relationships_pre_select(
2425 self,
2426 query: Query,
2427 select_args: list[Any],
2428 select_kwargs: AnyDict,
2429 metadata: Metadata,
2430 ) -> tuple[Query, list[Any]]:
2431 db = self._get_db()
2432 model = self.model
2434 metadata["relationships"] = set(self.relationships.keys())
2436 # query = self._update_query_for_inner(db, model, query)
2437 join = []
2438 for key, relation in self.relationships.items():
2439 if not relation.condition or relation.join != "inner":
2440 continue
2442 other = relation.get_table(db)
2443 other = other.with_alias(f"{key}_{hash(relation)}")
2444 join.append(other.on(relation.condition(model, other)))
2446 if limitby := select_kwargs.pop("limitby", None):
2447 # if limitby + relationships:
2448 # 1. get IDs of main table entries that match 'query'
2449 # 2. change query to .belongs(id)
2450 # 3. add joins etc
2452 kwargs = {"limitby": limitby}
2454 if join:
2455 kwargs["join"] = join
2457 ids = db(query)._select(model.id, **kwargs)
2458 query = model.id.belongs(ids)
2459 metadata["ids"] = ids
2461 if join:
2462 select_kwargs["join"] = join
2464 left = []
2466 for key, relation in self.relationships.items():
2467 other = relation.get_table(db)
2468 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
2470 select_fields = ", ".join([str(_) for _ in select_args])
2471 pre_alias = str(other)
2473 if f"{other}." not in select_fields:
2474 # no fields of other selected. add .ALL:
2475 select_args.append(other.ALL)
2476 elif f"{other}.id" not in select_fields:
2477 # fields of other selected, but required ID is missing.
2478 select_args.append(other.id)
2480 if relation.on:
2481 # if it has a .on, it's always a left join!
2482 on = relation.on(model, other)
2483 if not isinstance(on, list): # pragma: no cover
2484 on = [on]
2486 left.extend(on)
2487 elif method == "left":
2488 # .on not given, generate it:
2489 other = other.with_alias(f"{key}_{hash(relation)}")
2490 condition = typing.cast(Query, relation.condition(model, other))
2491 left.append(other.on(condition))
2492 else:
2493 # else: inner join (handled earlier)
2494 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
2495 # other = other.with_alias(f"{key}_{hash(relation)}")
2496 # query &= relation.condition(model, other)
2498 # if no fields of 'other' are included, add other.ALL
2499 # else: only add other.id if missing
2500 select_fields = ", ".join([str(_) for _ in select_args])
2502 post_alias = str(other).split(" AS ")[-1]
2503 if pre_alias != post_alias:
2504 # replace .select's with aliased:
2505 select_fields = select_fields.replace(
2506 f"{pre_alias}.",
2507 f"{post_alias}.",
2508 )
2510 select_args = select_fields.split(", ")
2512 select_kwargs["left"] = left
2513 return query, select_args
2515 def _collect_with_relationships(
2516 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"]
2517 ) -> "TypedRows[T_MetaInstance]":
2518 """
2519 Transform the raw rows into Typed Table model instances.
2520 """
2521 db = self._get_db()
2522 main_table = self.model._ensure_table_defined()
2524 records = {}
2525 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
2527 for row in rows:
2528 main = row[main_table]
2529 main_id = main.id
2531 if main_id not in records:
2532 records[main_id] = self.model(main)
2533 records[main_id]._with = list(self.relationships.keys())
2535 # setup up all relationship defaults (once)
2536 for col, relationship in self.relationships.items():
2537 records[main_id][col] = [] if relationship.multiple else None
2539 # now add other relationship data
2540 for column, relation in self.relationships.items():
2541 relationship_column = f"{column}_{hash(relation)}"
2543 # relationship_column works for aliases with the same target column.
2544 # if col + relationship not in the row, just use the regular name.
2546 relation_data = (
2547 row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
2548 )
2550 if relation_data.id is None:
2551 # always skip None ids
2552 continue
2554 if f"{column}-{relation_data.id}" in seen_relations[main_id]:
2555 # speed up duplicates
2556 continue
2557 else:
2558 seen_relations[main_id].add(f"{column}-{relation_data.id}")
2560 relation_table = relation.get_table(db)
2561 # hopefully an instance of a typed table and a regular row otherwise:
2562 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
2564 if relation.multiple:
2565 # create list of T
2566 if not isinstance(records[main_id].get(column), list): # pragma: no cover
2567 # should already be set up before!
2568 setattr(records[main_id], column, [])
2570 records[main_id][column].append(instance)
2571 else:
2572 # create single T
2573 records[main_id][column] = instance
2575 return _to(rows, self.model, records, metadata=metadata)
2577 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]":
2578 """
2579 Call .collect() and raise an error if nothing found.
2581 Basically unwraps Optional type.
2582 """
2583 if result := self.collect():
2584 return result
2586 if not exception:
2587 exception = ValueError("Nothing found!")
2589 raise exception
2591 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
2592 """
2593 You can start iterating a Query Builder object before calling collect, for ease of use.
2594 """
2595 yield from self.collect()
2597 def count(self) -> int:
2598 """
2599 Return the amount of rows matching the current query.
2600 """
2601 db = self._get_db()
2602 model = self.model
2603 query = self.query
2605 for key, relation in self.relationships.items():
2606 if not relation.condition or relation.join != "inner":
2607 continue
2609 other = relation.get_table(db)
2610 other = other.with_alias(f"{key}_{hash(relation)}")
2611 query &= relation.condition(model, other)
2613 return db(query).count()
2615 def __paginate(
2616 self,
2617 limit: int,
2618 page: int = 1,
2619 ) -> "QueryBuilder[T_MetaInstance]":
2620 _from = limit * (page - 1)
2621 _to = limit * page
2623 available = self.count()
2625 metadata: Metadata = {}
2627 metadata["pagination"] = {
2628 "limit": limit,
2629 "current_page": page,
2630 "max_page": math.ceil(available / limit),
2631 "rows": available,
2632 "min_max": (_from, _to),
2633 }
2635 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
2637 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
2638 """
2639 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
2641 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
2642 can be loaded with relationship data!
2643 """
2644 builder = self.__paginate(limit, page)
2646 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
2648 rows._query_builder = builder
2649 return rows
2651 def _paginate(
2652 self,
2653 limit: int,
2654 page: int = 1,
2655 ) -> str:
2656 builder = self.__paginate(limit, page)
2657 return builder._collect()
2659 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
2660 """
2661 Generator that yields rows from a paginated source in chunks.
2663 This function retrieves rows from a paginated data source in chunks of the
2664 specified `chunk_size` and yields them as TypedRows.
2666 Example:
2667 ```
2668 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
2669 for row in chunk_of_rows:
2670 # Process each row within the chunk.
2671 pass
2672 ```
2673 """
2674 page = 1
2676 while rows := self.__paginate(chunk_size, page).collect():
2677 yield rows
2678 page += 1
2680 def first(self, verbose: bool = False) -> T_MetaInstance | None:
2681 """
2682 Get the first row matching the currently built query.
2684 Also adds paginate, since it would be a waste to select more rows than needed.
2685 """
2686 if row := self.paginate(page=1, limit=1, verbose=verbose).first():
2687 return self.model.from_row(row)
2688 else:
2689 return None
2691 def _first(self) -> str:
2692 return self._paginate(page=1, limit=1)
2694 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance:
2695 """
2696 Call .first() and raise an error if nothing found.
2698 Basically unwraps Optional type.
2699 """
2700 if inst := self.first(verbose=verbose):
2701 return inst
2703 if not exception:
2704 exception = ValueError("Nothing found!")
2706 raise exception
2709S = typing.TypeVar("S")
2712class PaginatedRows(TypedRows[T_MetaInstance]):
2713 """
2714 Extension on top of rows that is used when calling .paginate() instead of .collect().
2715 """
2717 _query_builder: QueryBuilder[T_MetaInstance]
2719 @property
2720 def data(self) -> list[T_MetaInstance]:
2721 """
2722 Get the underlying data.
2723 """
2724 return list(self.records.values())
2726 @property
2727 def pagination(self) -> Pagination:
2728 """
2729 Get all page info.
2730 """
2731 pagination_data = self.metadata["pagination"]
2733 has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
2734 has_prev_page = pagination_data["current_page"] > 1
2735 return {
2736 "total_items": pagination_data["rows"],
2737 "current_page": pagination_data["current_page"],
2738 "per_page": pagination_data["limit"],
2739 "total_pages": pagination_data["max_page"],
2740 "has_next_page": has_next_page,
2741 "has_prev_page": has_prev_page,
2742 "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
2743 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2744 }
2746 def next(self) -> Self:
2747 """
2748 Get the next page.
2749 """
2750 data = self.metadata["pagination"]
2751 if data["current_page"] >= data["max_page"]:
2752 raise StopIteration("Final Page")
2754 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
2756 def previous(self) -> Self:
2757 """
2758 Get the previous page.
2759 """
2760 data = self.metadata["pagination"]
2761 if data["current_page"] <= 1:
2762 raise StopIteration("First Page")
2764 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
2766 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
2767 """
2768 Convert to a dictionary with pagination info and original data.
2770 All arguments are ignored!
2771 """
2772 return {"data": super().as_dict(), "pagination": self.pagination}
2775class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2776 """
2777 Used to make pydal Set more typed.
2779 This class is not actually used, only 'cast' by TypeDAL.__call__
2780 """
2782 def count(self, distinct: bool = None, cache: AnyDict = None) -> int:
2783 """
2784 Count returns an int.
2785 """
2786 result = super().count(distinct, cache)
2787 return typing.cast(int, result)
2789 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
2790 """
2791 Select returns a TypedRows of a user defined table.
2793 Example:
2794 result: TypedRows[MyTable] = db(MyTable.id > 0).select()
2796 for row in result:
2797 typing.reveal_type(row) # MyTable
2798 """
2799 rows = super().select(*fields, **attributes)
2800 return typing.cast(TypedRows[T_MetaInstance], rows)