Coverage for src/typedal/core.py: 100%
898 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-19 14:40 +0100
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-19 14:40 +0100
1"""
2Core functionality of TypeDAL.
3"""
4import contextlib
5import csv
6import datetime as dt
7import inspect
8import json
9import math
10import types
11import typing
12import warnings
13from collections import defaultdict
14from decimal import Decimal
15from pathlib import Path
16from typing import Any, Optional
18import pydal
19from pydal._globals import DEFAULT
20from pydal.objects import Field as _Field
21from pydal.objects import Query as _Query
22from pydal.objects import Row
23from pydal.objects import Table as _Table
24from typing_extensions import Self
26from .config import TypeDALConfig, load_config
27from .helpers import (
28 DummyQuery,
29 all_annotations,
30 all_dict,
31 as_lambda,
32 extract_type_optional,
33 filter_out,
34 instanciate,
35 is_union,
36 looks_like,
37 mktable,
38 origin_is_subclass,
39 to_snake,
40 unwrap_type,
41)
42from .serializers import as_json
43from .types import (
44 AfterDeleteCallable,
45 AfterInsertCallable,
46 AfterUpdateCallable,
47 BeforeDeleteCallable,
48 BeforeInsertCallable,
49 BeforeUpdateCallable,
50 CacheMetadata,
51 Expression,
52 Field,
53 Metadata,
54 PaginateDict,
55 Pagination,
56 Query,
57 Rows,
58 Validator,
59 _Types,
60)
62# use typing.cast(type, ...) to make mypy happy with unions
63T_annotation = typing.Type[Any] | types.UnionType
64T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]]
65T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
66T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
67T = typing.TypeVar("T")
69BASIC_MAPPINGS: dict[T_annotation, str] = {
70 str: "string",
71 int: "integer",
72 bool: "boolean",
73 bytes: "blob",
74 float: "double",
75 object: "json",
76 Decimal: "decimal(10,2)",
77 dt.date: "date",
78 dt.time: "time",
79 dt.datetime: "datetime",
80}
83def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
84 """
85 Is `cls` an instance or subclass of TypedField?
87 Deprecated
88 """
89 return (
90 isinstance(cls, TypedField)
91 or isinstance(typing.get_origin(cls), type)
92 and issubclass(typing.get_origin(cls), TypedField)
93 )
96JOIN_OPTIONS = typing.Literal["left", "inner", None]
97DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
99# table-ish paramter:
100P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table]
102Condition: typing.TypeAlias = typing.Optional[
103 typing.Callable[
104 # self, other -> Query
105 [P_Table, P_Table],
106 Query | bool,
107 ]
108]
110OnQuery: typing.TypeAlias = typing.Optional[
111 typing.Callable[
112 # self, other -> list of .on statements
113 [P_Table, P_Table],
114 list[Expression],
115 ]
116]
118To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str)
121class Relationship(typing.Generic[To_Type]):
122 """
123 Define a relationship to another table.
124 """
126 _type: To_Type
127 table: typing.Type["TypedTable"] | type | str
128 condition: Condition
129 on: OnQuery
130 multiple: bool
131 join: JOIN_OPTIONS
133 def __init__(
134 self,
135 _type: To_Type,
136 condition: Condition = None,
137 join: JOIN_OPTIONS = None,
138 on: OnQuery = None,
139 ):
140 """
141 Should not be called directly, use relationship() instead!
142 """
143 if condition and on:
144 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
145 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
147 self._type = _type
148 self.condition = condition
149 self.join = "left" if on else join # .on is always left join!
150 self.on = on
152 if args := typing.get_args(_type):
153 self.table = unwrap_type(args[0])
154 self.multiple = True
155 else:
156 self.table = _type
157 self.multiple = False
159 if isinstance(self.table, str):
160 self.table = TypeDAL.to_snake(self.table)
162 def clone(self, **update: Any) -> "Relationship[To_Type]":
163 """
164 Create a copy of the relationship, possibly updated.
165 """
166 return self.__class__(
167 update.get("_type") or self._type,
168 update.get("condition") or self.condition,
169 update.get("join") or self.join,
170 update.get("on") or self.on,
171 )
173 def __repr__(self) -> str:
174 """
175 Representation of the relationship.
176 """
177 if callback := self.condition or self.on:
178 src_code = inspect.getsource(callback).strip()
179 else:
180 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore
181 src_code = f"to {cls_name} (missing condition)"
183 join = f":{self.join}" if self.join else ""
184 return f"<Relationship{join} {src_code}>"
186 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]:
187 """
188 Get the table this relationship is bound to.
189 """
190 table = self.table # can be a string because db wasn't available yet
191 if isinstance(table, str):
192 if mapped := db._class_map.get(table):
193 # yay
194 return mapped
196 # boo, fall back to untyped table but pretend it is typed:
197 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough!
199 return table
201 def get_table_name(self) -> str:
202 """
203 Get the name of the table this relationship is bound to.
204 """
205 if isinstance(self.table, str):
206 return self.table
208 if isinstance(self.table, pydal.objects.Table):
209 return str(self.table)
211 # else: typed table
212 try:
213 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
214 except Exception: # pragma: no cover
215 table = self.table
217 return str(table)
219 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
220 """
221 Relationship is a descriptor class, which can be returned from a class but not an instance.
223 For an instance, using .join() will replace the Relationship with the actual data.
224 If you forgot to join, a warning will be shown and empty data will be returned.
225 """
226 if not instance:
227 # relationship queried on class, that's allowed
228 return self
230 warnings.warn(
231 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning
232 )
233 if self.multiple:
234 return []
235 else:
236 return None
239def relationship(
240 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None
241) -> Relationship[To_Type]:
242 """
243 Define a relationship to another table, when its id is not stored in the current table.
245 Example:
246 class User(TypedTable):
247 name: str
249 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left')
251 class Post(TypedTable):
252 title: str
253 author: User
255 User.join("posts").first() # User instance with list[Post] in .posts
257 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
258 In this case, the join strategy is set to LEFT so users without posts are also still selected.
260 For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
261 class User(TypedTable):
262 ...
264 tags = relationship(list["Tag"], on=lambda self, tag: [
265 Tagged.on(Tagged.entity == entity.gid),
266 Tag.on((Tagged.tag == tag.id)),
267 ])
269 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient.
270 """
271 return Relationship(_type, condition, join, on)
274def _generate_relationship_condition(
275 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]]
276) -> Condition:
277 origin = typing.get_origin(field)
278 # else: generic
280 if origin == list:
281 # field = typing.get_args(field)[0] # actual field
282 # return lambda _self, _other: cls[key].contains(field)
284 return lambda _self, _other: _self[key].contains(_other.id)
285 else:
286 # normal reference
287 # return lambda _self, _other: cls[key] == field.id
288 return lambda _self, _other: _self[key] == _other.id
291def to_relationship(
292 cls: typing.Type["TypedTable"] | type[Any],
293 key: str,
294 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]],
295) -> typing.Optional[Relationship[Any]]:
296 """
297 Used to automatically create relationship instance for reference fields.
299 Example:
300 class MyTable(TypedTable):
301 reference: OtherTable
303 `reference` contains the id of an Other Table row.
304 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
306 This function will automatically perform this logic (called in db.define):
307 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
309 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
310 """
311 if looks_like(field, TypedField):
312 if args := typing.get_args(field):
313 field = args[0]
314 else:
315 # weird
316 return None
318 field, optional = extract_type_optional(field)
320 try:
321 condition = _generate_relationship_condition(cls, key, field)
322 except Exception as e: # pragma: no cover
323 warnings.warn("Could not generate Relationship condition", source=e)
324 condition = None
326 if not condition: # pragma: no cover
327 # something went wrong, not a valid relationship
328 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
329 return None
331 join = "left" if optional or typing.get_origin(field) == list else "inner"
333 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
336class TypeDAL(pydal.DAL): # type: ignore
337 """
338 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
339 """
341 _config: TypeDALConfig
343 def __init__(
344 self,
345 uri: Optional[str] = None, # default from config or 'sqlite:memory'
346 pool_size: int = None, # default 1 if sqlite else 3
347 folder: Optional[str | Path] = None, # default 'databases' in config
348 db_codec: str = "UTF-8",
349 check_reserved: Optional[list[str]] = None,
350 migrate: Optional[bool] = None, # default True by config
351 fake_migrate: Optional[bool] = None, # default False by config
352 migrate_enabled: bool = True,
353 fake_migrate_all: bool = False,
354 decode_credentials: bool = False,
355 driver_args: Optional[dict[str, Any]] = None,
356 adapter_args: Optional[dict[str, Any]] = None,
357 attempts: int = 5,
358 auto_import: bool = False,
359 bigint_id: bool = False,
360 debug: bool = False,
361 lazy_tables: bool = False,
362 db_uid: Optional[str] = None,
363 after_connection: typing.Callable[..., Any] = None,
364 tables: Optional[list[str]] = None,
365 ignore_field_case: bool = True,
366 entity_quoting: bool = True,
367 table_hash: Optional[str] = None,
368 enable_typedal_caching: bool = None,
369 use_pyproject: bool | str = True,
370 use_env: bool | str = True,
371 ) -> None:
372 """
373 Adds some internal tables after calling pydal's default init.
375 Set enable_typedal_caching to False to disable this behavior.
376 """
377 config = load_config(_use_pyproject=use_pyproject, _use_env=use_env)
378 config.update(
379 database=uri,
380 dialect=uri.split(":")[0] if uri and ":" in uri else None,
381 folder=folder,
382 migrate=migrate,
383 fake_migrate=fake_migrate,
384 caching=enable_typedal_caching,
385 pool_size=pool_size,
386 )
388 self._config = config
390 if config.folder:
391 Path(config.folder).mkdir(exist_ok=True)
393 super().__init__(
394 config.database,
395 config.pool_size,
396 config.folder,
397 db_codec,
398 check_reserved,
399 config.migrate,
400 config.fake_migrate,
401 migrate_enabled,
402 fake_migrate_all,
403 decode_credentials,
404 driver_args,
405 adapter_args,
406 attempts,
407 auto_import,
408 bigint_id,
409 debug,
410 lazy_tables,
411 db_uid,
412 after_connection,
413 tables,
414 ignore_field_case,
415 entity_quoting,
416 table_hash,
417 )
419 if config.caching:
420 self.try_define(_TypedalCache)
421 self.try_define(_TypedalCacheDependency)
423 def try_define(self, model: typing.Type[T], verbose: bool = False) -> typing.Type[T]:
424 """
425 Try to define a model with migrate or fall back to fake migrate.
426 """
427 try:
428 return self.define(model, migrate=True)
429 except Exception as e:
430 # clean up:
431 self.rollback()
432 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self):
433 delattr(self, tablename)
435 if verbose:
436 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning)
438 # try again:
439 return self.define(model, migrate=True, fake_migrate=True, redefine=True)
441 default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = {
442 # fields are 'required' (notnull) by default:
443 "notnull": True,
444 }
446 # maps table name to typedal class, for resolving future references
447 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {}
449 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
450 # todo: new relationship item added should also invalidate (previously unrelated) cache result
452 # todo: option to enable/disable cache dependency behavior:
453 # - don't set _before_update and _before_delete
454 # - don't add TypedalCacheDependency entry
455 # - don't invalidate other item on new row of this type
457 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
458 # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
459 # typing.get_type_hints(cls, globalns=None, localns=None)
461 # dirty way (with evil eval):
462 # [eval(v) for k, v in cls.__annotations__.items()]
463 # this however also stops working when variables outside this scope or even references to other
464 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
465 # and might break in the future, when this annotations behavior is enabled by default.
467 # non-annotated variables have to be passed to define_table as kwargs
468 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
470 tablename = self.to_snake(cls.__name__)
471 # grab annotations of cls and it's parents:
472 annotations = all_annotations(cls)
473 # extend with `prop = TypedField()` 'annotations':
474 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
475 # remove internal stuff:
476 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
478 typedfields: dict[str, TypedField[Any]] = {
479 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
480 }
482 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
484 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
486 # ! dont' use full_dict here:
487 other_kwargs = kwargs | {
488 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
489 } # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that.
490 # now it's only used to extract relationships from the object.
491 # other properties of the class (incl methods) should not be touched
493 for key in typedfields.keys() - full_dict.keys():
494 # typed fields that don't haven't been added to the object yet
495 setattr(cls, key, typedfields[key])
497 # start with base classes and overwrite with current class:
498 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
500 # DEPRECATED: Relationship as annotation is currently not supported!
501 # ensure they are all instances and
502 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
503 # relationships = {
504 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
505 # }
507 # keys of implicit references (also relationships):
508 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")]
510 # add implicit relationships:
511 # User; list[User]; TypedField[User]; TypedField[list[User]]
512 relationships |= {
513 k: new_relationship
514 for k in reference_field_keys
515 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
516 }
518 cache_dependency = kwargs.pop("cache_dependency", True)
520 table: Table = self.define_table(tablename, *fields.values(), **kwargs)
522 for name, typed_field in typedfields.items():
523 field = fields[name]
524 typed_field.bind(field, table)
526 if issubclass(cls, TypedTable):
527 cls.__set_internals__(
528 db=self,
529 table=table,
530 # by now, all relationships should be instances!
531 relationships=typing.cast(dict[str, Relationship[Any]], relationships),
532 )
533 self._class_map[str(table)] = cls
534 cls.__on_define__(self)
535 else:
536 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
538 if not tablename.startswith("typedal_") and cache_dependency:
539 table._before_update.append(lambda s, _: _remove_cache(s, tablename))
540 table._before_delete.append(lambda s: _remove_cache(s, tablename))
542 return cls
544 @typing.overload
545 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]:
546 """
547 Typing Overload for define without a class.
549 @db.define()
550 class MyTable(TypedTable): ...
551 """
553 @typing.overload
554 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]:
555 """
556 Typing Overload for define with a class.
558 @db.define
559 class MyTable(TypedTable): ...
560 """
562 def define(
563 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any
564 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]:
565 """
566 Can be used as a decorator on a class that inherits `TypedTable`, \
567 or as a regular method if you need to define your classes before you have access to a 'db' instance.
569 You can also pass extra arguments to db.define_table.
570 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor
572 Example:
573 @db.define
574 class Person(TypedTable):
575 ...
577 class Article(TypedTable):
578 ...
580 # at a later time:
581 db.define(Article)
583 Returns:
584 the result of pydal.define_table
585 """
587 def wrapper(cls: typing.Type[T]) -> typing.Type[T]:
588 return self._define(cls, **kwargs)
590 if maybe_cls:
591 return wrapper(maybe_cls)
593 return wrapper
595 # def drop(self, table_name: str) -> None:
596 # """
597 # Remove a table by name (both on the database level and the typedal level).
598 # """
599 # # drop calls TypedTable.drop() and removes it from the `_class_map`
600 # if cls := self._class_map.pop(table_name, None):
601 # cls.drop()
603 # def drop_all(self, max_retries: int = None) -> None:
604 # """
605 # Remove all tables and keep doing so until everything is gone!
606 # """
607 # retries = 0
608 # if max_retries is None:
609 # max_retries = len(self.tables)
610 #
611 # while self.tables:
612 # retries += 1
613 # for table in self.tables:
614 # self.drop(table)
615 #
616 # if retries > max_retries:
617 # raise RuntimeError("Could not delete all tables")
619 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
620 """
621 A db instance can be called directly to perform a query.
623 Usually, only a query is passed.
625 Example:
626 db(query).select()
628 """
629 args = list(_args)
630 if args:
631 cls = args[0]
632 if isinstance(cls, bool):
633 raise ValueError("Don't actually pass a bool to db()! Use a query instead.")
635 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
636 # table defined without @db.define decorator!
637 _cls: typing.Type[TypedTable] = cls
638 args[0] = _cls.id != None
640 _set = super().__call__(*args, **kwargs)
641 return typing.cast(TypedSet, _set)
643 def __getitem__(self, key: str) -> "Table":
644 """
645 Allows dynamically accessing a table by its name as a string.
647 Example:
648 db['users'] -> user
649 """
650 return typing.cast(Table, super().__getitem__(str(key)))
652 @classmethod
653 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
654 return Field(name, _type, **{**cls.default_kwargs, **kw})
656 @classmethod
657 def _annotation_to_pydal_fieldtype(
658 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any]
659 ) -> Optional[str]:
660 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
661 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy)
663 if isinstance(ftype, str):
664 # extract type from string
665 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate(
666 localns=locals(), globalns=globals(), recursive_guard=frozenset()
667 )
669 if mapping := BASIC_MAPPINGS.get(ftype):
670 # basi types
671 return mapping
672 elif isinstance(ftype, _Table):
673 # db.table
674 return f"reference {ftype._tablename}"
675 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
676 # SomeTable
677 snakename = cls.to_snake(ftype.__name__)
678 return f"reference {snakename}"
679 elif isinstance(ftype, TypedField):
680 # FieldType(type, ...)
681 return ftype._to_field(mut_kw)
682 elif origin_is_subclass(ftype, TypedField):
683 # TypedField[int]
684 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
685 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
686 # list[str] -> str -> string -> list:string
687 _child_type = typing.get_args(ftype)[0]
688 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
689 return f"list:{_child_type}"
690 elif is_union(ftype):
691 # str | int -> UnionType
692 # typing.Union[str | int] -> typing._UnionGenericAlias
694 # Optional[type] == type | None
696 match typing.get_args(ftype):
697 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
698 # good union of Nullable
700 # if a field is optional, it is nullable:
701 mut_kw["notnull"] = False
702 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
703 case _:
704 # two types is not supported by the db!
705 return None
706 else:
707 return None
709 @classmethod
710 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
711 """
712 Convert a annotation into a pydal Field.
714 Args:
715 fname: name of the property
716 ftype: annotation of the property
717 kw: when using TypedField or a function returning it (e.g. StringField),
718 keyword args can be used to pass any other settings you would normally to a pydal Field
720 -> pydal.Field(fname, ftype, **kw)
722 Example:
723 class MyTable:
724 fname: ftype
725 id: int
726 name: str
727 reference: Table
728 other: TypedField(str, default="John Doe") # default will be in kwargs
729 """
730 fname = cls.to_snake(fname)
732 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
733 return cls._build_field(fname, converted_type, **kw)
734 else:
735 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
737 @staticmethod
738 def to_snake(camel: str) -> str:
739 """
740 Moved to helpers, kept as a static method for legacy reasons.
741 """
742 return to_snake(camel)
745class TableProtocol(typing.Protocol): # pragma: no cover
746 """
747 Make mypy happy.
748 """
750 id: "TypedField[int]" # noqa: A003
752 def __getitem__(self, item: str) -> Field:
753 """
754 Tell mypy a Table supports dictionary notation for columns.
755 """
758class Table(_Table, TableProtocol): # type: ignore
759 """
760 Make mypy happy.
761 """
764class TableMeta(type):
765 """
766 This metaclass contains functionality on table classes, that doesn't exist on its instances.
768 Example:
769 class MyTable(TypedTable):
770 some_field: TypedField[int]
772 MyTable.update_or_insert(...) # should work
774 MyTable.some_field # -> Field, can be used to query etc.
776 row = MyTable.first() # returns instance of MyTable
778 # row.update_or_insert(...) # shouldn't work!
780 row.some_field # -> int, with actual data
782 """
784 # set up by db.define:
785 # _db: TypeDAL | None = None
786 # _table: Table | None = None
787 _db: TypeDAL | None = None
788 _table: Table | None = None
789 _relationships: dict[str, Relationship[Any]] | None = None
791 #########################
792 # TypeDAL custom logic: #
793 #########################
795 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
796 """
797 Store the related database and pydal table for later usage.
798 """
799 self._db = db
800 self._table = table
801 self._relationships = relationships
803 def __getattr__(self, col: str) -> Optional[Field]:
804 """
805 Magic method used by TypedTableMeta to get a database field with dot notation on a class.
807 Example:
808 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
810 """
811 if self._table:
812 return getattr(self._table, col, None)
814 return None
816 def _ensure_table_defined(self) -> Table:
817 if not self._table:
818 raise EnvironmentError("@define or db.define is not called on this class yet!")
819 return self._table
821 def __iter__(self) -> typing.Generator[Field, None, None]:
822 """
823 Loop through the columns of this model.
824 """
825 table = self._ensure_table_defined()
826 yield from iter(table)
828 def __getitem__(self, item: str) -> Field:
829 """
830 Allow dict notation to get a column of this table (-> Field instance).
831 """
832 table = self._ensure_table_defined()
833 return table[item]
835 def __str__(self) -> str:
836 """
837 Normally, just returns the underlying table name, but with a fallback if the model is unbound.
838 """
839 if self._table:
840 return str(self._table)
841 else:
842 return f"<unbound table {self.__name__}>"
844 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
845 """
846 Create a model instance from a pydal row.
847 """
848 return self(row)
850 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003
851 """
852 Return all rows for this model.
853 """
854 return self.collect()
856 def get_relationships(self) -> dict[str, Relationship[Any]]:
857 """
858 Return the registered relationships of the current model.
859 """
860 return self._relationships or {}
862 ##########################
863 # TypeDAL Modified Logic #
864 ##########################
866 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
867 """
868 This is only called when db.define is not used as a decorator.
870 cls.__table functions as 'self'
872 Args:
873 **fields: anything you want to insert in the database
875 Returns: the ID of the new row.
877 """
878 table = self._ensure_table_defined()
880 result = table.insert(**fields)
881 # it already is an int but mypy doesn't understand that
882 return self(result)
884 def _insert(self, **fields: Any) -> str:
885 table = self._ensure_table_defined()
887 return str(table._insert(**fields))
889 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]":
890 """
891 Insert multiple rows, returns a TypedRows set of new instances.
892 """
893 table = self._ensure_table_defined()
894 result = table.bulk_insert(items)
895 return self.where(lambda row: row.id.belongs(result)).collect()
897 def update_or_insert(
898 self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any
899 ) -> T_MetaInstance:
900 """
901 Update a row if query matches, else insert a new one.
903 Returns the created or updated instance.
904 """
905 table = self._ensure_table_defined()
907 if query is DEFAULT:
908 record = table(**values)
909 elif isinstance(query, dict):
910 record = table(**query)
911 else:
912 record = table(query)
914 if not record:
915 return self.insert(**values)
917 record.update_record(**values)
918 return self(record)
920 def validate_and_insert(
921 self: typing.Type[T_MetaInstance], **fields: Any
922 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
923 """
924 Validate input data and then insert a row.
926 Returns a tuple of (the created instance, a dict of errors).
927 """
928 table = self._ensure_table_defined()
929 result = table.validate_and_insert(**fields)
930 if row_id := result.get("id"):
931 return self(row_id), None
932 else:
933 return None, result.get("errors")
935 def validate_and_update(
936 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
937 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
938 """
939 Validate input data and then update max 1 row.
941 Returns a tuple of (the updated instance, a dict of errors).
942 """
943 table = self._ensure_table_defined()
945 try:
946 result = table.validate_and_update(query, **fields)
947 except Exception as e:
948 result = {"errors": {"exception": str(e)}}
950 if errors := result.get("errors"):
951 return None, errors
952 elif row_id := result.get("id"):
953 return self(row_id), None
954 else: # pragma: no cover
955 # update on query without result (shouldnt happen)
956 return None, None
958 def validate_and_update_or_insert(
959 self: typing.Type[T_MetaInstance], query: Query, **fields: Any
960 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
961 """
962 Validate input data and then update_and_insert (on max 1 row).
964 Returns a tuple of (the updated/created instance, a dict of errors).
965 """
966 table = self._ensure_table_defined()
967 result = table.validate_and_update_or_insert(query, **fields)
969 if errors := result.get("errors"):
970 return None, errors
971 elif row_id := result.get("id"):
972 return self(row_id), None
973 else: # pragma: no cover
974 # update on query without result (shouldnt happen)
975 return None, None
977 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
978 """
979 See QueryBuilder.select!
980 """
981 return QueryBuilder(self).select(*a, **kw)
983 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
984 """
985 See QueryBuilder.paginate!
986 """
987 return QueryBuilder(self).paginate(limit=limit, page=page)
989 def chunk(
990 self: typing.Type[T_MetaInstance], chunk_size: int
991 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
992 """
993 See QueryBuilder.chunk!
994 """
995 return QueryBuilder(self).chunk(chunk_size)
997 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
998 """
999 See QueryBuilder.where!
1000 """
1001 return QueryBuilder(self).where(*a, **kw)
1003 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
1004 """
1005 See QueryBuilder.cache!
1006 """
1007 return QueryBuilder(self).cache(*deps, **kwargs)
1009 def count(self: typing.Type[T_MetaInstance]) -> int:
1010 """
1011 See QueryBuilder.count!
1012 """
1013 return QueryBuilder(self).count()
1015 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None:
1016 """
1017 See QueryBuilder.first!
1018 """
1019 return QueryBuilder(self).first()
1021 def join(
1022 self: typing.Type[T_MetaInstance],
1023 *fields: str | typing.Type["TypedTable"],
1024 method: JOIN_OPTIONS = None,
1025 on: OnQuery | list[Expression] | Expression = None,
1026 condition: Condition = None,
1027 ) -> "QueryBuilder[T_MetaInstance]":
1028 """
1029 See QueryBuilder.join!
1030 """
1031 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method)
1033 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
1034 """
1035 See QueryBuilder.collect!
1036 """
1037 return QueryBuilder(self).collect(verbose=verbose)
1039 @property
1040 def ALL(cls) -> pydal.objects.SQLALL:
1041 """
1042 Select all fields for this table.
1043 """
1044 table = cls._ensure_table_defined()
1046 return table.ALL
1048 ##########################
1049 # TypeDAL Shadowed Logic #
1050 ##########################
1051 fields: list[str]
1053 # other table methods:
1055 def truncate(self, mode: str = "") -> None:
1056 """
1057 Remove all data and reset index.
1058 """
1059 table = self._ensure_table_defined()
1060 table.truncate(mode)
1062 def drop(self, mode: str = "") -> None:
1063 """
1064 Remove the underlying table.
1065 """
1066 table = self._ensure_table_defined()
1067 table.drop(mode)
1069 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
1070 """
1071 Add an index on some columns of this table.
1072 """
1073 table = self._ensure_table_defined()
1074 result = table.create_index(name, *fields, **kwargs)
1075 return typing.cast(bool, result)
1077 def drop_index(self, name: str, if_exists: bool = False) -> bool:
1078 """
1079 Remove an index from this table.
1080 """
1081 table = self._ensure_table_defined()
1082 result = table.drop_index(name, if_exists)
1083 return typing.cast(bool, result)
1085 def import_from_csv_file(
1086 self,
1087 csvfile: typing.TextIO,
1088 id_map: dict[str, str] = None,
1089 null: Any = "<NULL>",
1090 unique: str = "uuid",
1091 id_offset: dict[str, int] = None, # id_offset used only when id_map is None
1092 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
1093 validate: bool = False,
1094 encoding: str = "utf-8",
1095 delimiter: str = ",",
1096 quotechar: str = '"',
1097 quoting: int = csv.QUOTE_MINIMAL,
1098 restore: bool = False,
1099 **kwargs: Any,
1100 ) -> None:
1101 """
1102 Load a csv file into the database.
1103 """
1104 table = self._ensure_table_defined()
1105 table.import_from_csv_file(
1106 csvfile,
1107 id_map=id_map,
1108 null=null,
1109 unique=unique,
1110 id_offset=id_offset,
1111 transform=transform,
1112 validate=validate,
1113 encoding=encoding,
1114 delimiter=delimiter,
1115 quotechar=quotechar,
1116 quoting=quoting,
1117 restore=restore,
1118 **kwargs,
1119 )
1121 def on(self, query: Query | bool) -> Expression:
1122 """
1123 Shadow Table.on.
1125 Used for joins.
1127 See Also:
1128 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1129 """
1130 table = self._ensure_table_defined()
1131 return typing.cast(Expression, table.on(query))
1133 def with_alias(self, alias: str) -> _Table:
1134 """
1135 Shadow Table.with_alias.
1137 Useful for joins when joining the same table multiple times.
1139 See Also:
1140 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1141 """
1142 table = self._ensure_table_defined()
1143 return table.with_alias(alias)
1145 # @typing.dataclass_transform()
1148class TypedField(typing.Generic[T_Value]): # pragma: no cover
1149 """
1150 Typed version of pydal.Field, which will be converted to a normal Field in the background.
1151 """
1153 # will be set by .bind on db.define
1154 name = ""
1155 _db: Optional[pydal.DAL] = None
1156 _rname: Optional[str] = None
1157 _table: Optional[Table] = None
1158 _field: Optional[Field] = None
1160 _type: T_annotation
1161 kwargs: Any
1163 requires: Validator | typing.Iterable[Validator]
1165 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore
1166 """
1167 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used!
1168 """
1169 self._type = _type
1170 self.kwargs = settings
1171 super().__init__()
1173 @typing.overload
1174 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover
1175 """
1176 row.field -> (actual data).
1177 """
1179 @typing.overload
1180 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
1181 """
1182 Table.field -> Field.
1183 """
1185 def __get__(
1186 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance]
1187 ) -> typing.Union[T_Value, "TypedField[T_Value]"]:
1188 """
1189 Since this class is a Descriptor field, \
1190 it returns something else depending on if it's called on a class or instance.
1192 (this is mostly for mypy/typing)
1193 """
1194 if instance:
1195 # this is only reached in a very specific case:
1196 # an instance of the object was created with a specific set of fields selected (excluding the current one)
1197 # in that case, no value was stored in the owner -> return None (since the field was not selected)
1198 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
1199 else:
1200 # getting as class -> return actual field so pydal understands it when using in query etc.
1201 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
1203 def __str__(self) -> str:
1204 """
1205 String representation of a Typed Field.
1207 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
1208 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
1209 """
1210 return str(self._field) if self._field else ""
1212 def __repr__(self) -> str:
1213 """
1214 More detailed string representation of a Typed Field.
1216 Uses __str__ and adds the provided extra options (kwargs) in the representation.
1217 """
1218 s = self.__str__()
1220 if "type" in self.kwargs:
1221 # manual type in kwargs supplied
1222 t = self.kwargs["type"]
1223 elif issubclass(type, type(self._type)):
1224 # normal type, str.__name__ = 'str'
1225 t = getattr(self._type, "__name__", str(self._type))
1226 elif t_args := typing.get_args(self._type):
1227 # list[str] -> 'str'
1228 t = t_args[0].__name__
1229 else: # pragma: no cover
1230 # fallback - something else, may not even happen, I'm not sure
1231 t = self._type
1233 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
1235 kw = self.kwargs.copy()
1236 kw.pop("type", None)
1237 return f"<{s} with options {kw}>"
1239 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1240 """
1241 Convert a Typed Field instance to a pydal.Field.
1242 """
1243 other_kwargs = self.kwargs.copy()
1244 extra_kwargs.update(other_kwargs)
1245 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1247 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
1248 """
1249 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
1250 """
1251 self._table = table
1252 self._field = field
1254 def __getattr__(self, key: str) -> Any:
1255 """
1256 If the regular getattribute does not work, try to get info from the related Field.
1257 """
1258 with contextlib.suppress(AttributeError):
1259 return super().__getattribute__(key)
1261 # try on actual field:
1262 return getattr(self._field, key)
1264 def __eq__(self, other: Any) -> Query:
1265 """
1266 Performing == on a Field will result in a Query.
1267 """
1268 return typing.cast(Query, self._field == other)
1270 def __ne__(self, other: Any) -> Query:
1271 """
1272 Performing != on a Field will result in a Query.
1273 """
1274 return typing.cast(Query, self._field != other)
1276 def __gt__(self, other: Any) -> Query:
1277 """
1278 Performing > on a Field will result in a Query.
1279 """
1280 return typing.cast(Query, self._field > other)
1282 def __lt__(self, other: Any) -> Query:
1283 """
1284 Performing < on a Field will result in a Query.
1285 """
1286 return typing.cast(Query, self._field < other)
1288 def __ge__(self, other: Any) -> Query:
1289 """
1290 Performing >= on a Field will result in a Query.
1291 """
1292 return typing.cast(Query, self._field >= other)
1294 def __le__(self, other: Any) -> Query:
1295 """
1296 Performing <= on a Field will result in a Query.
1297 """
1298 return typing.cast(Query, self._field <= other)
1300 def __hash__(self) -> int:
1301 """
1302 Shadow Field.__hash__.
1303 """
1304 return hash(self._field)
1306 def __invert__(self) -> Expression:
1307 """
1308 Performing ~ on a Field will result in an Expression.
1309 """
1310 if not self._field: # pragma: no cover
1311 raise ValueError("Unbound Field can not be inverted!")
1313 return typing.cast(Expression, ~self._field)
1316class TypedTable(metaclass=TableMeta):
1317 """
1318 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
1319 """
1321 # set up by 'new':
1322 _row: Row | None = None
1324 _with: list[str]
1326 id: "TypedField[int]" # noqa: A003
1328 _before_insert: list[BeforeInsertCallable]
1329 _after_insert: list[AfterInsertCallable]
1330 _before_update: list[BeforeUpdateCallable]
1331 _after_update: list[AfterUpdateCallable]
1332 _before_delete: list[BeforeDeleteCallable]
1333 _after_delete: list[AfterDeleteCallable]
1335 def _setup_instance_methods(self) -> None:
1336 self.as_dict = self._as_dict # type: ignore
1337 self.__json__ = self.as_json = self._as_json # type: ignore
1338 # self.as_yaml = self._as_yaml # type: ignore
1339 self.as_xml = self._as_xml # type: ignore
1341 self.update = self._update # type: ignore
1343 self.delete_record = self._delete_record # type: ignore
1344 self.update_record = self._update_record # type: ignore
1346 def __new__(
1347 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any
1348 ) -> "TypedTable":
1349 """
1350 Create a Typed Rows model instance from an existing row, ID or query.
1352 Examples:
1353 MyTable(1)
1354 MyTable(id=1)
1355 MyTable(MyTable.id == 1)
1356 """
1357 table = cls._ensure_table_defined()
1358 inst = super().__new__(cls)
1360 if isinstance(row_or_id, TypedTable):
1361 # existing typed table instance!
1362 return row_or_id
1363 elif isinstance(row_or_id, pydal.objects.Row):
1364 row = row_or_id
1365 elif row_or_id is not None:
1366 row = table(row_or_id, **filters)
1367 elif filters:
1368 row = table(**filters)
1369 else:
1370 # dummy object
1371 return inst
1373 if not row:
1374 return None # type: ignore
1376 inst._row = row
1377 inst.__dict__.update(row)
1378 inst._setup_instance_methods()
1379 return inst
1381 @classmethod
1382 def __on_define__(cls, db: TypeDAL) -> None:
1383 """
1384 Method that can be implemented by tables to do an action after db.define is completed.
1386 This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"),
1387 where you need a reference to the current database, which may not exist yet when defining the model.
1388 """
1390 def __iter__(self) -> typing.Generator[Any, None, None]:
1391 """
1392 Allows looping through the columns.
1393 """
1394 row = self._ensure_matching_row()
1395 yield from iter(row)
1397 def __getitem__(self, item: str) -> Any:
1398 """
1399 Allows dictionary notation to get columns.
1400 """
1401 if item in self.__dict__:
1402 return self.__dict__.get(item)
1404 # fallback to lookup in row
1405 if self._row:
1406 return self._row[item]
1408 # nothing found!
1409 raise KeyError(item)
1411 def __getattr__(self, item: str) -> Any:
1412 """
1413 Allows dot notation to get columns.
1414 """
1415 if value := self.get(item):
1416 return value
1418 raise AttributeError(item)
1420 def get(self, item: str, default: Any = None) -> Any:
1421 """
1422 Try to get a column from this instance, else return default.
1423 """
1424 try:
1425 return self.__getitem__(item)
1426 except KeyError:
1427 return default
1429 def __setitem__(self, key: str, value: Any) -> None:
1430 """
1431 Data can both be updated via dot and dict notation.
1432 """
1433 return setattr(self, key, value)
1435 def __int__(self) -> int:
1436 """
1437 Calling int on a model instance will return its id.
1438 """
1439 return getattr(self, "id", 0)
1441 def __bool__(self) -> bool:
1442 """
1443 If the instance has an underlying row with data, it is truthy.
1444 """
1445 return bool(getattr(self, "_row", False))
1447 def _ensure_matching_row(self) -> Row:
1448 if not getattr(self, "_row", None):
1449 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
1450 return self._row
1452 def __repr__(self) -> str:
1453 """
1454 String representation of the model instance.
1455 """
1456 model_name = self.__class__.__name__
1457 model_data = {}
1459 if self._row:
1460 model_data = self._row.as_json()
1462 details = model_name
1463 details += f"({model_data})"
1465 if relationships := getattr(self, "_with", []):
1466 details += f" + {relationships}"
1468 return f"<{details}>"
1470 # serialization
1471 # underscore variants work for class instances (set up by _setup_instance_methods)
1473 @classmethod
1474 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]:
1475 """
1476 Dump the object to a plain dict.
1478 Can be used as both a class or instance method:
1479 - dumps the table info if it's a class
1480 - dumps the row info if it's an instance (see _as_dict)
1481 """
1482 table = cls._ensure_table_defined()
1483 result = table.as_dict(flat, sanitize)
1484 return typing.cast(dict[str, Any], result)
1486 @classmethod
1487 def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
1488 """
1489 Dump the object to json.
1491 Can be used as both a class or instance method:
1492 - dumps the table info if it's a class
1493 - dumps the row info if it's an instance (see _as_json)
1494 """
1495 data = cls.as_dict(sanitize=sanitize)
1496 return as_json.encode(data, indent=indent, **kwargs)
1498 @classmethod
1499 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
1500 """
1501 Dump the object to xml.
1503 Can be used as both a class or instance method:
1504 - dumps the table info if it's a class
1505 - dumps the row info if it's an instance (see _as_xml)
1506 """
1507 table = cls._ensure_table_defined()
1508 return typing.cast(str, table.as_xml(sanitize))
1510 @classmethod
1511 def as_yaml(cls, sanitize: bool = True) -> str:
1512 """
1513 Dump the object to yaml.
1515 Can be used as both a class or instance method:
1516 - dumps the table info if it's a class
1517 - dumps the row info if it's an instance (see _as_yaml)
1518 """
1519 table = cls._ensure_table_defined()
1520 return typing.cast(str, table.as_yaml(sanitize))
1522 def _as_dict(
1523 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1524 ) -> dict[str, Any]:
1525 row = self._ensure_matching_row()
1527 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
1529 def asdict_method(obj: Any) -> Any: # pragma: no cover
1530 if hasattr(obj, "_as_dict"): # typedal
1531 return obj._as_dict()
1532 elif hasattr(obj, "as_dict"): # pydal
1533 return obj.as_dict()
1534 else: # something else??
1535 return obj.__dict__
1537 if _with := getattr(self, "_with", None):
1538 for relationship in _with:
1539 data = self.get(relationship)
1541 if isinstance(data, list):
1542 data = [asdict_method(_) for _ in data]
1543 elif data:
1544 data = asdict_method(data)
1546 result[relationship] = data
1548 return typing.cast(dict[str, Any], result)
1550 def _as_json(
1551 self,
1552 default: typing.Callable[[Any], Any] = None,
1553 indent: Optional[int] = None,
1554 **kwargs: Any,
1555 ) -> str:
1556 data = self._as_dict()
1557 return as_json.encode(data, default=default, indent=indent, **kwargs)
1559 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
1560 row = self._ensure_matching_row()
1561 return typing.cast(str, row.as_xml(sanitize))
1563 # def _as_yaml(self, sanitize: bool = True) -> str:
1564 # row = self._ensure_matching_row()
1565 # return typing.cast(str, row.as_yaml(sanitize))
1567 def __setattr__(self, key: str, value: Any) -> None:
1568 """
1569 When setting a property on a Typed Table model instance, also update the underlying row.
1570 """
1571 if self._row and key in self._row.__dict__ and not callable(value):
1572 # enables `row.key = value; row.update_record()`
1573 self._row[key] = value
1575 super().__setattr__(key, value)
1577 @classmethod
1578 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
1579 """
1580 Update one record.
1582 Example:
1583 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
1584 """
1585 # todo: update multiple?
1586 if record := cls(query):
1587 return record.update_record(**fields)
1588 else:
1589 return None
1591 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1592 row = self._ensure_matching_row()
1593 row.update(**fields)
1594 self.__dict__.update(**fields)
1595 return self
1597 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1598 row = self._ensure_matching_row()
1599 new_row = row.update_record(**fields)
1600 self.update(**new_row)
1601 return self
1603 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
1604 """
1605 Here as a placeholder for _update_record.
1607 Will be replaced on instance creation!
1608 """
1609 return self._update_record(**fields)
1611 def _delete_record(self) -> int:
1612 """
1613 Actual logic in `pydal.helpers.classes.RecordDeleter`.
1614 """
1615 row = self._ensure_matching_row()
1616 result = row.delete_record()
1617 self.__dict__ = {} # empty self, since row is no more.
1618 self._row = None # just to be sure
1619 self._setup_instance_methods()
1620 # ^ instance methods might've been deleted by emptying dict,
1621 # but we still want .as_dict to show an error, not the table's as_dict.
1622 return typing.cast(int, result)
1624 def delete_record(self) -> int: # pragma: no cover
1625 """
1626 Here as a placeholder for _delete_record.
1628 Will be replaced on instance creation!
1629 """
1630 return self._delete_record()
1632 # __del__ is also called on the end of a scope so don't remove records on every del!!
1634 # pickling:
1636 def __getstate__(self) -> dict[str, Any]:
1637 """
1638 State to save when pickling.
1640 Prevents db connection from being pickled.
1641 Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1642 """
1643 row = self._ensure_matching_row()
1644 result: dict[str, Any] = row.as_dict()
1646 if _with := getattr(self, "_with", None):
1647 result["_with"] = _with
1648 for relationship in _with:
1649 data = self.get(relationship)
1651 result[relationship] = data
1653 result["_row"] = self._row.as_json() if self._row else ""
1654 return result
1656 def __setstate__(self, state: dict[str, Any]) -> None:
1657 """
1658 Used by dill when loading from a bytestring.
1659 """
1660 # as_dict also includes table info, so dump as json to only get the actual row data
1661 # then create a new (more empty) row object:
1662 state["_row"] = Row(json.loads(state["_row"]))
1663 self.__dict__ |= state
1666# backwards compat:
1667TypedRow = TypedTable
1670class TypedRows(typing.Collection[T_MetaInstance], Rows):
1671 """
1672 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
1673 """
1675 records: dict[int, T_MetaInstance]
1676 # _rows: Rows
1677 model: typing.Type[T_MetaInstance]
1678 metadata: Metadata
1680 # pseudo-properties: actually stored in _rows
1681 db: TypeDAL
1682 colnames: list[str]
1683 fields: list[Field]
1684 colnames_fields: list[Field]
1685 response: list[tuple[Any, ...]]
1687 def __init__(
1688 self,
1689 rows: Rows,
1690 model: typing.Type[T_MetaInstance],
1691 records: dict[int, T_MetaInstance] = None,
1692 metadata: Metadata = None,
1693 ) -> None:
1694 """
1695 Should not be called manually!
1697 Normally, the `records` from an existing `Rows` object are used
1698 but these can be overwritten with a `records` dict.
1699 `metadata` can be any (un)structured data
1700 `model` is a Typed Table class
1701 """
1702 records = records or {row.id: model(row) for row in rows}
1703 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
1704 self.model = model
1705 self.metadata = metadata or {}
1706 self.colnames = rows.colnames
1708 def __len__(self) -> int:
1709 """
1710 Return the count of rows.
1711 """
1712 return len(self.records)
1714 def __iter__(self) -> typing.Iterator[T_MetaInstance]:
1715 """
1716 Loop through the rows.
1717 """
1718 yield from self.records.values()
1720 def __contains__(self, ind: Any) -> bool:
1721 """
1722 Check if an id exists in this result set.
1723 """
1724 return ind in self.records
1726 def first(self) -> T_MetaInstance | None:
1727 """
1728 Get the row with the lowest id.
1729 """
1730 if not self.records:
1731 return None
1733 return next(iter(self))
1735 def last(self) -> T_MetaInstance | None:
1736 """
1737 Get the row with the highest id.
1738 """
1739 if not self.records:
1740 return None
1742 max_id = max(self.records.keys())
1743 return self[max_id]
1745 def find(
1746 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None
1747 ) -> "TypedRows[T_MetaInstance]":
1748 """
1749 Returns a new Rows object, a subset of the original object, filtered by the function `f`.
1750 """
1751 if not self.records:
1752 return self.__class__(self, self.model, {})
1754 records = {}
1755 if limitby:
1756 _min, _max = limitby
1757 else:
1758 _min, _max = 0, len(self)
1759 count = 0
1760 for i, row in self.records.items():
1761 if f(row):
1762 if _min <= count:
1763 records[i] = row
1764 count += 1
1765 if count == _max:
1766 break
1768 return self.__class__(self, self.model, records)
1770 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
1771 """
1772 Removes elements from the calling Rows object, filtered by the function `f`, \
1773 and returns a new Rows object containing the removed elements.
1774 """
1775 if not self.records:
1776 return self.__class__(self, self.model, {})
1777 removed = {}
1778 to_remove = []
1779 for i in self.records:
1780 row = self[i]
1781 if f(row):
1782 removed[i] = self.records[i]
1783 to_remove.append(i)
1785 [self.records.pop(i) for i in to_remove]
1787 return self.__class__(
1788 self,
1789 self.model,
1790 removed,
1791 )
1793 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
1794 """
1795 Returns a list of sorted elements (not sorted in place).
1796 """
1797 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
1799 def __str__(self) -> str:
1800 """
1801 Simple string representation.
1802 """
1803 return f"<TypedRows with {len(self)} records>"
1805 def __repr__(self) -> str:
1806 """
1807 Print a table on repr().
1808 """
1809 data = self.as_dict()
1810 headers = list(next(iter(data.values())).keys())
1811 return mktable(data, headers)
1813 def group_by_value(
1814 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any
1815 ) -> dict[T, list[T_MetaInstance]]:
1816 """
1817 Group the rows by a specific field (which will be the dict key).
1818 """
1819 kwargs["one_result"] = one_result
1820 result = super().group_by_value(*fields, **kwargs)
1821 return typing.cast(dict[T, list[T_MetaInstance]], result)
1823 def column(self, column: str = None) -> list[Any]:
1824 """
1825 Get a list of all values in a specific column.
1827 Example:
1828 rows.column('name') -> ['Name 1', 'Name 2', ...]
1829 """
1830 return typing.cast(list[Any], super().column(column))
1832 def as_csv(self) -> str:
1833 """
1834 Dump the data to csv.
1835 """
1836 return typing.cast(str, super().as_csv())
1838 def as_dict(
1839 self,
1840 key: str = None,
1841 compact: bool = False,
1842 storage_to_dict: bool = False,
1843 datetime_to_str: bool = False,
1844 custom_types: list[type] = None,
1845 ) -> dict[int, dict[str, Any]]:
1846 """
1847 Get the data in a dict of dicts.
1848 """
1849 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
1850 # functionality not guaranteed
1851 return typing.cast(
1852 dict[int, dict[str, Any]],
1853 super().as_dict(
1854 key or "id",
1855 compact,
1856 storage_to_dict,
1857 datetime_to_str,
1858 custom_types,
1859 ),
1860 )
1862 return {k: v.as_dict() for k, v in self.records.items()}
1864 def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
1865 """
1866 Turn the data into a dict and then dump to JSON.
1867 """
1868 data = self.as_list()
1870 return as_json.encode(data, default=default, indent=indent, **kwargs)
1872 def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
1873 """
1874 Turn the data into a dict and then dump to JSON.
1875 """
1876 return self.as_json(default=default, indent=indent, **kwargs)
1878 def as_list(
1879 self,
1880 compact: bool = False,
1881 storage_to_dict: bool = False,
1882 datetime_to_str: bool = False,
1883 custom_types: list[type] = None,
1884 ) -> list[dict[str, Any]]:
1885 """
1886 Get the data in a list of dicts.
1887 """
1888 if any([compact, storage_to_dict, datetime_to_str, custom_types]):
1889 return typing.cast(
1890 list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types)
1891 )
1893 return [_.as_dict() for _ in self.records.values()]
1895 def __getitem__(self, item: int) -> T_MetaInstance:
1896 """
1897 You can get a specific row by ID from a typedrows by using rows[idx] notation.
1899 Since pydal's implementation differs (they expect a list instead of a dict with id keys),
1900 using rows[0] will return the first row, regardless of its id.
1901 """
1902 try:
1903 return self.records[item]
1904 except KeyError as e:
1905 if item == 0 and (row := self.first()):
1906 # special case: pydal internals think Rows.records is a list, not a dict
1907 return row
1909 raise e
1911 def get(self, item: int) -> typing.Optional[T_MetaInstance]:
1912 """
1913 Get a row by ID, or receive None if it isn't in this result set.
1914 """
1915 return self.records.get(item)
1917 def update(self, **new_values: Any) -> bool:
1918 """
1919 Update the current rows in the database with new_values.
1920 """
1921 # cast to make mypy understand .id is a TypedField and not an int!
1922 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined())
1924 ids = set(self.column("id"))
1925 query = table.id.belongs(ids)
1926 return bool(self.db(query).update(**new_values))
1928 def delete(self) -> bool:
1929 """
1930 Delete the currently selected rows from the database.
1931 """
1932 # cast to make mypy understand .id is a TypedField and not an int!
1933 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined())
1935 ids = set(self.column("id"))
1936 query = table.id.belongs(ids)
1937 return bool(self.db(query).delete())
1939 def join(
1940 self,
1941 field: "Field | TypedField[Any]",
1942 name: str = None,
1943 constraint: Query = None,
1944 fields: list[str | Field] = None,
1945 orderby: Optional[str | Field] = None,
1946 ) -> T_MetaInstance:
1947 """
1948 This can be used to JOIN with some relationships after the initial select.
1950 Using the querybuilder's .join() method is prefered!
1951 """
1952 result = super().join(field, name, constraint, fields or [], orderby)
1953 return typing.cast(T_MetaInstance, result)
1955 def export_to_csv_file(
1956 self,
1957 ofile: typing.TextIO,
1958 null: Any = "<NULL>",
1959 delimiter: str = ",",
1960 quotechar: str = '"',
1961 quoting: int = csv.QUOTE_MINIMAL,
1962 represent: bool = False,
1963 colnames: list[str] = None,
1964 write_colnames: bool = True,
1965 *args: Any,
1966 **kwargs: Any,
1967 ) -> None:
1968 """
1969 Shadow export_to_csv_file from Rows, but with typing.
1971 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
1972 """
1973 super().export_to_csv_file(
1974 ofile,
1975 null,
1976 *args,
1977 delimiter=delimiter,
1978 quotechar=quotechar,
1979 quoting=quoting,
1980 represent=represent,
1981 colnames=colnames or self.colnames,
1982 write_colnames=write_colnames,
1983 **kwargs,
1984 )
1986 @classmethod
1987 def from_rows(
1988 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None
1989 ) -> "TypedRows[T_MetaInstance]":
1990 """
1991 Internal method to convert a Rows object to a TypedRows.
1992 """
1993 return cls(rows, model, metadata=metadata)
1996 def __getstate__(self) -> dict[str, Any]:
1997 """
1998 Used by dill to dump to bytes (exclude db connection etc).
1999 """
2000 return {
2001 "metadata": json.dumps(self.metadata, default=str),
2002 "records": self.records,
2003 "model": str(self.model._table),
2004 "colnames": self.colnames,
2005 }
2007 def __setstate__(self, state: dict[str, Any]) -> None:
2008 """
2009 Used by dill when loading from a bytestring.
2010 """
2011 state["metadata"] = json.loads(state["metadata"])
2012 self.__dict__.update(state)
2013 # db etc. set after undill by caching.py
2016from .caching import ( # noqa: E402
2017 _remove_cache,
2018 _TypedalCache,
2019 _TypedalCacheDependency,
2020 create_and_hash_cache_key,
2021 get_expire,
2022 load_from_cache,
2023 save_to_cache,
2024)
2027class QueryBuilder(typing.Generic[T_MetaInstance]):
2028 """
2029 Abstration on top of pydal's query system.
2030 """
2032 model: typing.Type[T_MetaInstance]
2033 query: Query
2034 select_args: list[Any]
2035 select_kwargs: dict[str, Any]
2036 relationships: dict[str, Relationship[Any]]
2037 metadata: Metadata
2039 def __init__(
2040 self,
2041 model: typing.Type[T_MetaInstance],
2042 add_query: Optional[Query] = None,
2043 select_args: Optional[list[Any]] = None,
2044 select_kwargs: Optional[dict[str, Any]] = None,
2045 relationships: dict[str, Relationship[Any]] = None,
2046 metadata: Metadata = None,
2047 ):
2048 """
2049 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
2051 Example:
2052 MyTable.where(...) -> QueryBuilder[MyTable]
2053 """
2054 self.model = model
2055 table = model._ensure_table_defined()
2056 default_query = typing.cast(Query, table.id > 0)
2057 self.query = add_query or default_query
2058 self.select_args = select_args or []
2059 self.select_kwargs = select_kwargs or {}
2060 self.relationships = relationships or {}
2061 self.metadata = metadata or {}
2063 def __str__(self) -> str:
2064 """
2065 Simple string representation for the query builder.
2066 """
2067 return f"QueryBuilder for {self.model}"
2069 def __repr__(self) -> str:
2070 """
2071 Advanced string representation for the query builder.
2072 """
2073 return (
2074 f"<QueryBuilder for {self.model} with "
2075 f"{len(self.select_args)} select args; "
2076 f"{len(self.select_kwargs)} select kwargs; "
2077 f"{len(self.relationships)} relationships; "
2078 f"query: {bool(self.query)}; "
2079 f"metadata: {self.metadata}; "
2080 f">"
2081 )
2083 def __bool__(self) -> bool:
2084 """
2085 Querybuilder is truthy if it has rows.
2086 """
2087 return self.count() > 0
2089 def _extend(
2090 self,
2091 add_query: Optional[Query] = None,
2092 overwrite_query: Optional[Query] = None,
2093 select_args: Optional[list[Any]] = None,
2094 select_kwargs: Optional[dict[str, Any]] = None,
2095 relationships: dict[str, Relationship[Any]] = None,
2096 metadata: Metadata = None,
2097 ) -> "QueryBuilder[T_MetaInstance]":
2098 return QueryBuilder(
2099 self.model,
2100 (add_query & self.query) if add_query else overwrite_query or self.query,
2101 (self.select_args + select_args) if select_args else self.select_args,
2102 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
2103 (self.relationships | relationships) if relationships else self.relationships,
2104 (self.metadata | (metadata or {})) if metadata else self.metadata,
2105 )
2107 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]":
2108 """
2109 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
2111 Options:
2112 paraphrased from the web2py pydal docs,
2113 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
2115 orderby: field(s) to order by. Supported:
2116 table.name - sort by name, ascending
2117 ~table.name - sort by name, descending
2118 <random> - sort randomly
2119 table.name|table.id - sort by two fields (first name, then id)
2121 groupby, having: together with orderby:
2122 groupby can be a field (e.g. table.name) to group records by
2123 having can be a query, only those `having` the condition are grouped
2125 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
2126 distinct: bool/field. Only select rows that differ
2127 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
2128 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
2129 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
2130 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
2131 """
2132 return self._extend(select_args=list(fields), select_kwargs=options)
2134 def where(
2135 self,
2136 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query],
2137 **filters: Any,
2138 ) -> "QueryBuilder[T_MetaInstance]":
2139 """
2140 Extend the builder's query.
2142 Can be used in multiple ways:
2143 .where(Query) -> with a direct query such as `Table.id == 5`
2144 .where(lambda table: table.id == 5) -> with a query via a lambda
2145 .where(id=5) -> via keyword arguments
2147 When using multiple where's, they will be ANDed:
2148 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
2149 When passing multiple queries to a single .where, they will be ORed:
2150 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
2151 """
2152 new_query = self.query
2153 table = self.model._ensure_table_defined()
2155 for field, value in filters.items():
2156 new_query &= table[field] == value
2158 subquery: DummyQuery | Query = DummyQuery()
2159 for query_or_lambda in queries_or_lambdas:
2160 if isinstance(query_or_lambda, _Query):
2161 subquery |= typing.cast(Query, query_or_lambda)
2162 elif callable(query_or_lambda):
2163 if result := query_or_lambda(self.model):
2164 subquery |= result
2165 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda):
2166 subquery |= typing.cast(Query, query_or_lambda != None)
2167 else:
2168 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).")
2170 if subquery:
2171 new_query &= subquery
2173 return self._extend(overwrite_query=new_query)
2175 def join(
2176 self,
2177 *fields: str | typing.Type[TypedTable],
2178 method: JOIN_OPTIONS = None,
2179 on: OnQuery | list[Expression] | Expression = None,
2180 condition: Condition = None,
2181 ) -> "QueryBuilder[T_MetaInstance]":
2182 """
2183 Include relationship fields in the result.
2185 `fields` can be names of Relationships on the current model.
2186 If no fields are passed, all will be used.
2188 By default, the `method` defined in the relationship is used.
2189 This can be overwritten with the `method` keyword argument (left or inner)
2190 """
2191 # todo: allow limiting amount of related rows returned for join?
2193 relationships = self.model.get_relationships()
2195 if condition and on:
2196 raise ValueError("condition and on can not be used together!")
2197 elif condition:
2198 if len(fields) != 1:
2199 raise ValueError("join(field, condition=...) can only be used with exactly one field!")
2201 if isinstance(condition, pydal.objects.Query):
2202 condition = as_lambda(condition)
2204 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)}
2205 elif on:
2206 if len(fields) != 1:
2207 raise ValueError("join(field, on=...) can only be used with exactly one field!")
2209 if isinstance(on, pydal.objects.Expression):
2210 on = [on]
2212 if isinstance(on, list):
2213 on = as_lambda(on)
2214 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)}
2216 else:
2217 if fields:
2218 # join on every relationship
2219 relationships = {str(k): relationships[str(k)] for k in fields}
2221 if method:
2222 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()}
2224 return self._extend(relationships=relationships)
2226 def cache(
2227 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None
2228 ) -> "QueryBuilder[T_MetaInstance]":
2229 """
2230 Enable caching for this query to load repeated calls from a dill row \
2231 instead of executing the sql and collecing matching rows again.
2232 """
2233 existing = self.metadata.get("cache", {})
2235 metadata: Metadata = {}
2237 cache_meta = typing.cast(
2238 CacheMetadata,
2239 self.metadata.get("cache", {})
2240 | {
2241 "enabled": True,
2242 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
2243 "expires_at": get_expire(expires_at=expires_at, ttl=ttl),
2244 },
2245 )
2247 metadata["cache"] = cache_meta
2248 return self._extend(metadata=metadata)
2250 def _get_db(self) -> TypeDAL:
2251 if db := self.model._db:
2252 return db
2253 else: # pragma: no cover
2254 raise EnvironmentError("@define or db.define is not called on this class yet!")
2256 def _select_arg_convert(self, arg: Any) -> Any:
2257 # typedfield are not really used at runtime anymore, but leave it in for safety:
2258 if isinstance(arg, TypedField): # pragma: no cover
2259 arg = arg._field
2261 return arg
2263 def delete(self) -> list[int]:
2264 """
2265 Based on the current query, delete rows and return a list of deleted IDs.
2266 """
2267 db = self._get_db()
2268 removed_ids = [_.id for _ in db(self.query).select("id")]
2269 if db(self.query).delete():
2270 # success!
2271 return removed_ids
2273 return []
2275 def _delete(self) -> str:
2276 db = self._get_db()
2277 return str(db(self.query)._delete())
2279 def update(self, **fields: Any) -> list[int]:
2280 """
2281 Based on the current query, update `fields` and return a list of updated IDs.
2282 """
2283 # todo: limit?
2284 db = self._get_db()
2285 updated_ids = db(self.query).select("id").column("id")
2286 if db(self.query).update(**fields):
2287 # success!
2288 return updated_ids
2290 return []
2292 def _update(self, **fields: Any) -> str:
2293 db = self._get_db()
2294 return str(db(self.query)._update(**fields))
2296 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]:
2297 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2298 select_kwargs = self.select_kwargs.copy()
2299 query = self.query
2300 model = self.model
2301 mut_metadata["query"] = query
2302 # require at least id of main table:
2303 select_fields = ", ".join([str(_) for _ in select_args])
2304 tablename = str(model)
2306 if add_id and f"{tablename}.id" not in select_fields:
2307 # fields of other selected, but required ID is missing.
2308 select_args.append(model.id)
2310 if self.relationships:
2311 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
2313 return query, select_args, select_kwargs
2315 def to_sql(self, add_id: bool = False) -> str:
2316 """
2317 Generate the SQL for the built query.
2318 """
2319 db = self._get_db()
2321 query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
2323 return str(db(query)._select(*select_args, **select_kwargs))
2325 def _collect(self) -> str:
2326 """
2327 Alias for to_sql, pydal-like syntax.
2328 """
2329 return self.to_sql()
2331 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
2332 expires_at = metadata["cache"].get("expires_at")
2333 metadata["cache"] |= {
2334 # key is partly dependant on cache metadata but not these:
2335 "key": None,
2336 "status": None,
2337 "cached_at": None,
2338 "expires_at": None,
2339 }
2341 _, key = create_and_hash_cache_key(
2342 self.model,
2343 metadata,
2344 self.query,
2345 self.select_args,
2346 self.select_kwargs,
2347 self.relationships.keys(),
2348 )
2350 # re-set after creating key:
2351 metadata["cache"]["expires_at"] = expires_at
2352 metadata["cache"]["key"] = key
2354 return load_from_cache(key, self._get_db())
2356 def collect(
2357 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True
2358 ) -> "TypedRows[T_MetaInstance]":
2359 """
2360 Execute the built query and turn it into model instances, while handling relationships.
2361 """
2362 if _to is None:
2363 _to = TypedRows
2365 db = self._get_db()
2366 metadata = typing.cast(Metadata, self.metadata.copy())
2368 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
2369 return result
2371 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2373 metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
2375 if verbose: # pragma: no cover
2376 print(metadata["sql"])
2378 rows: Rows = db(query).select(*select_args, **select_kwargs)
2380 metadata["final_query"] = str(query)
2381 metadata["final_args"] = [str(_) for _ in select_args]
2382 metadata["final_kwargs"] = select_kwargs
2384 if verbose: # pragma: no cover
2385 print(rows)
2387 if not self.relationships:
2388 # easy
2389 typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
2391 else:
2392 # harder: try to match rows to the belonging objects
2393 # assume structure of {'table': <data>} per row.
2394 # if that's not the case, return default behavior again
2395 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
2397 # only saves if requested in metadata:
2398 return save_to_cache(typed_rows, rows)
2400 def _handle_relationships_pre_select(
2401 self,
2402 query: Query,
2403 select_args: list[Any],
2404 select_kwargs: dict[str, Any],
2405 metadata: Metadata,
2406 ) -> tuple[Query, list[Any]]:
2407 db = self._get_db()
2408 model = self.model
2410 metadata["relationships"] = set(self.relationships.keys())
2412 # query = self._update_query_for_inner(db, model, query)
2413 join = []
2414 for key, relation in self.relationships.items():
2415 if not relation.condition or relation.join != "inner":
2416 continue
2418 other = relation.get_table(db)
2419 other = other.with_alias(f"{key}_{hash(relation)}")
2420 join.append(other.on(relation.condition(model, other)))
2422 if limitby := select_kwargs.pop("limitby", None):
2423 # if limitby + relationships:
2424 # 1. get IDs of main table entries that match 'query'
2425 # 2. change query to .belongs(id)
2426 # 3. add joins etc
2428 kwargs = {"limitby": limitby}
2430 if join:
2431 kwargs["join"] = join
2433 ids = db(query)._select(model.id, **kwargs)
2434 query = model.id.belongs(ids)
2435 metadata["ids"] = ids
2437 if join:
2438 select_kwargs["join"] = join
2440 left = []
2442 for key, relation in self.relationships.items():
2443 other = relation.get_table(db)
2444 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
2446 select_fields = ", ".join([str(_) for _ in select_args])
2447 pre_alias = str(other)
2449 if f"{other}." not in select_fields:
2450 # no fields of other selected. add .ALL:
2451 select_args.append(other.ALL)
2452 elif f"{other}.id" not in select_fields:
2453 # fields of other selected, but required ID is missing.
2454 select_args.append(other.id)
2456 if relation.on:
2457 # if it has a .on, it's always a left join!
2458 on = relation.on(model, other)
2459 if not isinstance(on, list): # pragma: no cover
2460 on = [on]
2462 left.extend(on)
2463 elif method == "left":
2464 # .on not given, generate it:
2465 other = other.with_alias(f"{key}_{hash(relation)}")
2466 condition = typing.cast(Query, relation.condition(model, other))
2467 left.append(other.on(condition))
2468 else:
2469 # else: inner join (handled earlier)
2470 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
2471 # other = other.with_alias(f"{key}_{hash(relation)}")
2472 # query &= relation.condition(model, other)
2474 # if no fields of 'other' are included, add other.ALL
2475 # else: only add other.id if missing
2476 select_fields = ", ".join([str(_) for _ in select_args])
2478 post_alias = str(other).split(" AS ")[-1]
2479 if pre_alias != post_alias:
2480 # replace .select's with aliased:
2481 select_fields = select_fields.replace(
2482 f"{pre_alias}.",
2483 f"{post_alias}.",
2484 )
2486 select_args = select_fields.split(", ")
2488 select_kwargs["left"] = left
2489 return query, select_args
2491 def _collect_with_relationships(
2492 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"]
2493 ) -> "TypedRows[T_MetaInstance]":
2494 """
2495 Transform the raw rows into Typed Table model instances.
2496 """
2497 db = self._get_db()
2498 main_table = self.model._ensure_table_defined()
2500 records = {}
2501 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
2503 for row in rows:
2504 main = row[main_table]
2505 main_id = main.id
2507 if main_id not in records:
2508 records[main_id] = self.model(main)
2509 records[main_id]._with = list(self.relationships.keys())
2511 # setup up all relationship defaults (once)
2512 for col, relationship in self.relationships.items():
2513 records[main_id][col] = [] if relationship.multiple else None
2515 # now add other relationship data
2516 for column, relation in self.relationships.items():
2517 relationship_column = f"{column}_{hash(relation)}"
2519 # relationship_column works for aliases with the same target column.
2520 # if col + relationship not in the row, just use the regular name.
2522 relation_data = (
2523 row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
2524 )
2526 if relation_data.id is None:
2527 # always skip None ids
2528 continue
2530 if f"{column}-{relation_data.id}" in seen_relations[main_id]:
2531 # speed up duplicates
2532 continue
2533 else:
2534 seen_relations[main_id].add(f"{column}-{relation_data.id}")
2536 relation_table = relation.get_table(db)
2537 # hopefully an instance of a typed table and a regular row otherwise:
2538 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
2540 if relation.multiple:
2541 # create list of T
2542 if not isinstance(records[main_id].get(column), list): # pragma: no cover
2543 # should already be set up before!
2544 setattr(records[main_id], column, [])
2546 records[main_id][column].append(instance)
2547 else:
2548 # create single T
2549 records[main_id][column] = instance
2551 return _to(rows, self.model, records, metadata=metadata)
2553 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]":
2554 """
2555 Call .collect() and raise an error if nothing found.
2557 Basically unwraps Optional type.
2558 """
2559 if result := self.collect():
2560 return result
2562 if not exception:
2563 exception = ValueError("Nothing found!")
2565 raise exception
2567 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
2568 """
2569 You can start iterating a Query Builder object before calling collect, for ease of use.
2570 """
2571 yield from self.collect()
2573 def count(self) -> int:
2574 """
2575 Return the amount of rows matching the current query.
2576 """
2577 db = self._get_db()
2578 model = self.model
2579 query = self.query
2581 for key, relation in self.relationships.items():
2582 if not relation.condition or relation.join != "inner":
2583 continue
2585 other = relation.get_table(db)
2586 other = other.with_alias(f"{key}_{hash(relation)}")
2587 query &= relation.condition(model, other)
2589 return db(query).count()
2591 def __paginate(
2592 self,
2593 limit: int,
2594 page: int = 1,
2595 ) -> "QueryBuilder[T_MetaInstance]":
2596 _from = limit * (page - 1)
2597 _to = limit * page
2599 available = self.count()
2601 metadata: Metadata = {}
2603 metadata["pagination"] = {
2604 "limit": limit,
2605 "current_page": page,
2606 "max_page": math.ceil(available / limit),
2607 "rows": available,
2608 "min_max": (_from, _to),
2609 }
2611 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
2613 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
2614 """
2615 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
2617 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
2618 can be loaded with relationship data!
2619 """
2620 builder = self.__paginate(limit, page)
2622 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
2624 rows._query_builder = builder
2625 return rows
2627 def _paginate(
2628 self,
2629 limit: int,
2630 page: int = 1,
2631 ) -> str:
2632 builder = self.__paginate(limit, page)
2633 return builder._collect()
2635 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
2636 """
2637 Generator that yields rows from a paginated source in chunks.
2639 This function retrieves rows from a paginated data source in chunks of the
2640 specified `chunk_size` and yields them as TypedRows.
2642 Example:
2643 ```
2644 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
2645 for row in chunk_of_rows:
2646 # Process each row within the chunk.
2647 pass
2648 ```
2649 """
2650 page = 1
2652 while rows := self.__paginate(chunk_size, page).collect():
2653 yield rows
2654 page += 1
2656 def first(self, verbose: bool = False) -> T_MetaInstance | None:
2657 """
2658 Get the first row matching the currently built query.
2660 Also adds paginate, since it would be a waste to select more rows than needed.
2661 """
2662 if row := self.paginate(page=1, limit=1, verbose=verbose).first():
2663 return self.model.from_row(row)
2664 else:
2665 return None
2667 def _first(self) -> str:
2668 return self._paginate(page=1, limit=1)
2670 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance:
2671 """
2672 Call .first() and raise an error if nothing found.
2674 Basically unwraps Optional type.
2675 """
2676 if inst := self.first(verbose=verbose):
2677 return inst
2679 if not exception:
2680 exception = ValueError("Nothing found!")
2682 raise exception
2685S = typing.TypeVar("S")
2688class PaginatedRows(TypedRows[T_MetaInstance]):
2689 """
2690 Extension on top of rows that is used when calling .paginate() instead of .collect().
2691 """
2693 _query_builder: QueryBuilder[T_MetaInstance]
2695 @property
2696 def data(self) -> list[T_MetaInstance]:
2697 """
2698 Get the underlying data.
2699 """
2700 return list(self.records.values())
2702 @property
2703 def pagination(self) -> Pagination:
2704 """
2705 Get all page info.
2706 """
2707 pagination_data = self.metadata["pagination"]
2709 has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
2710 has_prev_page = pagination_data["current_page"] > 1
2711 return {
2712 "total_items": pagination_data["rows"],
2713 "current_page": pagination_data["current_page"],
2714 "per_page": pagination_data["limit"],
2715 "total_pages": pagination_data["max_page"],
2716 "has_next_page": has_next_page,
2717 "has_prev_page": has_prev_page,
2718 "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
2719 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2720 }
2722 def next(self) -> Self: # noqa: A003
2723 """
2724 Get the next page.
2725 """
2726 data = self.metadata["pagination"]
2727 if data["current_page"] >= data["max_page"]:
2728 raise StopIteration("Final Page")
2730 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
2732 def previous(self) -> Self:
2733 """
2734 Get the previous page.
2735 """
2736 data = self.metadata["pagination"]
2737 if data["current_page"] <= 1:
2738 raise StopIteration("First Page")
2740 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
2742 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
2743 """
2744 Convert to a dictionary with pagination info and original data.
2746 All arguments are ignored!
2747 """
2748 return {"data": super().as_dict(), "pagination": self.pagination}
2751class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
2752 """
2753 Used to make pydal Set more typed.
2755 This class is not actually used, only 'cast' by TypeDAL.__call__
2756 """
2758 def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int:
2759 """
2760 Count returns an int.
2761 """
2762 result = super().count(distinct, cache)
2763 return typing.cast(int, result)
2765 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
2766 """
2767 Select returns a TypedRows of a user defined table.
2769 Example:
2770 result: TypedRows[MyTable] = db(MyTable.id > 0).select()
2772 for row in result:
2773 typing.reveal_type(row) # MyTable
2774 """
2775 rows = super().select(*fields, **attributes)
2776 return typing.cast(TypedRows[T_MetaInstance], rows)