Coverage for src/typedal/core.py: 100%

927 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-08-05 19:10 +0200

1""" 

2Core functionality of TypeDAL. 

3""" 

4 

5import contextlib 

6import csv 

7import datetime as dt 

8import inspect 

9import json 

10import math 

11import types 

12import typing 

13import warnings 

14from collections import defaultdict 

15from copy import copy 

16from decimal import Decimal 

17from pathlib import Path 

18from typing import Any, Optional, Type 

19 

20import pydal 

21from pydal._globals import DEFAULT 

22from pydal.objects import Field as _Field 

23from pydal.objects import Query as _Query 

24from pydal.objects import Row 

25from pydal.objects import Table as _Table 

26from typing_extensions import Self, Unpack 

27 

28from .config import TypeDALConfig, load_config 

29from .helpers import ( 

30 DummyQuery, 

31 all_annotations, 

32 all_dict, 

33 as_lambda, 

34 extract_type_optional, 

35 filter_out, 

36 instanciate, 

37 is_union, 

38 looks_like, 

39 mktable, 

40 origin_is_subclass, 

41 to_snake, 

42 unwrap_type, 

43) 

44from .serializers import as_json 

45from .types import ( 

46 AnyDict, 

47 CacheMetadata, 

48 Expression, 

49 Field, 

50 Metadata, 

51 OpRow, 

52 PaginateDict, 

53 Pagination, 

54 Query, 

55 Reference, 

56 Rows, 

57 SelectKwargs, 

58 Set, 

59 Table, 

60 Validator, 

61 _Types, 

62) 

63 

64# use typing.cast(type, ...) to make mypy happy with unions 

65T_annotation = Type[Any] | types.UnionType 

66T_Query = typing.Union["Table", Query, bool, None, "TypedTable", Type["TypedTable"]] 

67T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic) 

68T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta" 

69T = typing.TypeVar("T") 

70 

71BASIC_MAPPINGS: dict[T_annotation, str] = { 

72 str: "string", 

73 int: "integer", 

74 bool: "boolean", 

75 bytes: "blob", 

76 float: "double", 

77 object: "json", 

78 Decimal: "decimal(10,2)", 

79 dt.date: "date", 

80 dt.time: "time", 

81 dt.datetime: "datetime", 

82} 

83 

84 

85def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]: 

86 """ 

87 Is `cls` an instance or subclass of TypedField? 

88 

89 Deprecated 

90 """ 

91 return ( 

92 isinstance(cls, TypedField) 

93 or isinstance(typing.get_origin(cls), type) 

94 and issubclass(typing.get_origin(cls), TypedField) 

95 ) 

96 

97 

98JOIN_OPTIONS = typing.Literal["left", "inner", None] 

99DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left" 

100 

101# table-ish paramter: 

102P_Table = typing.Union[Type["TypedTable"], pydal.objects.Table] 

103 

104Condition: typing.TypeAlias = typing.Optional[ 

105 typing.Callable[ 

106 # self, other -> Query 

107 [P_Table, P_Table], 

108 Query | bool, 

109 ] 

110] 

111 

112OnQuery: typing.TypeAlias = typing.Optional[ 

113 typing.Callable[ 

114 # self, other -> list of .on statements 

115 [P_Table, P_Table], 

116 list[Expression], 

117 ] 

118] 

119 

120To_Type = typing.TypeVar("To_Type", type[Any], Type[Any], str) 

121 

122 

123class Relationship(typing.Generic[To_Type]): 

124 """ 

125 Define a relationship to another table. 

126 """ 

127 

128 _type: To_Type 

129 table: Type["TypedTable"] | type | str 

130 condition: Condition 

131 on: OnQuery 

132 multiple: bool 

133 join: JOIN_OPTIONS 

134 

135 def __init__( 

136 self, 

137 _type: To_Type, 

138 condition: Condition = None, 

139 join: JOIN_OPTIONS = None, 

140 on: OnQuery = None, 

141 ): 

142 """ 

143 Should not be called directly, use relationship() instead! 

144 """ 

145 if condition and on: 

146 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}") 

147 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!") 

148 

149 self._type = _type 

150 self.condition = condition 

151 self.join = "left" if on else join # .on is always left join! 

152 self.on = on 

153 

154 if args := typing.get_args(_type): 

155 self.table = unwrap_type(args[0]) 

156 self.multiple = True 

157 else: 

158 self.table = _type 

159 self.multiple = False 

160 

161 if isinstance(self.table, str): 

162 self.table = TypeDAL.to_snake(self.table) 

163 

164 def clone(self, **update: Any) -> "Relationship[To_Type]": 

165 """ 

166 Create a copy of the relationship, possibly updated. 

167 """ 

168 return self.__class__( 

169 update.get("_type") or self._type, 

170 update.get("condition") or self.condition, 

171 update.get("join") or self.join, 

172 update.get("on") or self.on, 

173 ) 

174 

175 def __repr__(self) -> str: 

176 """ 

177 Representation of the relationship. 

178 """ 

179 if callback := self.condition or self.on: 

180 src_code = inspect.getsource(callback).strip() 

181 else: 

182 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore 

183 src_code = f"to {cls_name} (missing condition)" 

184 

185 join = f":{self.join}" if self.join else "" 

186 return f"<Relationship{join} {src_code}>" 

187 

188 def get_table(self, db: "TypeDAL") -> Type["TypedTable"]: 

189 """ 

190 Get the table this relationship is bound to. 

191 """ 

192 table = self.table # can be a string because db wasn't available yet 

193 if isinstance(table, str): 

194 if mapped := db._class_map.get(table): 

195 # yay 

196 return mapped 

197 

198 # boo, fall back to untyped table but pretend it is typed: 

199 return typing.cast(Type["TypedTable"], db[table]) # eh close enough! 

200 

201 return table 

202 

203 def get_table_name(self) -> str: 

204 """ 

205 Get the name of the table this relationship is bound to. 

206 """ 

207 if isinstance(self.table, str): 

208 return self.table 

209 

210 if isinstance(self.table, pydal.objects.Table): 

211 return str(self.table) 

212 

213 # else: typed table 

214 try: 

215 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table 

216 except Exception: # pragma: no cover 

217 table = self.table 

218 

219 return str(table) 

220 

221 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]": 

222 """ 

223 Relationship is a descriptor class, which can be returned from a class but not an instance. 

224 

225 For an instance, using .join() will replace the Relationship with the actual data. 

226 If you forgot to join, a warning will be shown and empty data will be returned. 

227 """ 

228 if not instance: 

229 # relationship queried on class, that's allowed 

230 return self 

231 

232 warnings.warn( 

233 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning 

234 ) 

235 if self.multiple: 

236 return [] 

237 else: 

238 return None 

239 

240 

241def relationship(_type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None) -> To_Type: 

242 """ 

243 Define a relationship to another table, when its id is not stored in the current table. 

244 

245 Example: 

246 class User(TypedTable): 

247 name: str 

248 

249 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left') 

250 

251 class Post(TypedTable): 

252 title: str 

253 author: User 

254 

255 User.join("posts").first() # User instance with list[Post] in .posts 

256 

257 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts. 

258 In this case, the join strategy is set to LEFT so users without posts are also still selected. 

259 

260 For complex queries with a pivot table, a `on` can be set insteaad of `condition`: 

261 class User(TypedTable): 

262 ... 

263 

264 tags = relationship(list["Tag"], on=lambda self, tag: [ 

265 Tagged.on(Tagged.entity == entity.gid), 

266 Tag.on((Tagged.tag == tag.id)), 

267 ]) 

268 

269 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient. 

270 """ 

271 return typing.cast( 

272 # note: The descriptor `Relationship[To_Type]` is more correct, but pycharm doesn't really get that. 

273 # so for ease of use, just cast to the refered type for now! 

274 # e.g. x = relationship(Author) -> x: Author 

275 To_Type, 

276 Relationship(_type, condition, join, on), 

277 ) 

278 

279 

280def _generate_relationship_condition( 

281 _: Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", Type["TypedTable"]] 

282) -> Condition: 

283 origin = typing.get_origin(field) 

284 # else: generic 

285 

286 if origin is list: 

287 # field = typing.get_args(field)[0] # actual field 

288 # return lambda _self, _other: cls[key].contains(field) 

289 

290 return lambda _self, _other: _self[key].contains(_other.id) 

291 else: 

292 # normal reference 

293 # return lambda _self, _other: cls[key] == field.id 

294 return lambda _self, _other: _self[key] == _other.id 

295 

296 

297def to_relationship( 

298 cls: Type["TypedTable"] | type[Any], 

299 key: str, 

300 field: typing.Union["TypedField[Any]", "Table", Type["TypedTable"]], 

301) -> typing.Optional[Relationship[Any]]: 

302 """ 

303 Used to automatically create relationship instance for reference fields. 

304 

305 Example: 

306 class MyTable(TypedTable): 

307 reference: OtherTable 

308 

309 `reference` contains the id of an Other Table row. 

310 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work. 

311 

312 This function will automatically perform this logic (called in db.define): 

313 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable] 

314 

315 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable]. 

316 """ 

317 if looks_like(field, TypedField): 

318 if args := typing.get_args(field): 

319 field = args[0] 

320 else: 

321 # weird 

322 return None 

323 

324 field, optional = extract_type_optional(field) 

325 

326 try: 

327 condition = _generate_relationship_condition(cls, key, field) 

328 except Exception as e: # pragma: no cover 

329 warnings.warn("Could not generate Relationship condition", source=e) 

330 condition = None 

331 

332 if not condition: # pragma: no cover 

333 # something went wrong, not a valid relationship 

334 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}") 

335 return None 

336 

337 join = "left" if optional or typing.get_origin(field) is list else "inner" 

338 

339 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join)) 

340 

341 

342class TypeDAL(pydal.DAL): # type: ignore 

343 """ 

344 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables. 

345 """ 

346 

347 _config: TypeDALConfig 

348 

349 def __init__( 

350 self, 

351 uri: Optional[str] = None, # default from config or 'sqlite:memory' 

352 pool_size: int = None, # default 1 if sqlite else 3 

353 folder: Optional[str | Path] = None, # default 'databases' in config 

354 db_codec: str = "UTF-8", 

355 check_reserved: Optional[list[str]] = None, 

356 migrate: Optional[bool] = None, # default True by config 

357 fake_migrate: Optional[bool] = None, # default False by config 

358 migrate_enabled: bool = True, 

359 fake_migrate_all: bool = False, 

360 decode_credentials: bool = False, 

361 driver_args: Optional[AnyDict] = None, 

362 adapter_args: Optional[AnyDict] = None, 

363 attempts: int = 5, 

364 auto_import: bool = False, 

365 bigint_id: bool = False, 

366 debug: bool = False, 

367 lazy_tables: bool = False, 

368 db_uid: Optional[str] = None, 

369 after_connection: typing.Callable[..., Any] = None, 

370 tables: Optional[list[str]] = None, 

371 ignore_field_case: bool = True, 

372 entity_quoting: bool = True, 

373 table_hash: Optional[str] = None, 

374 enable_typedal_caching: bool = None, 

375 use_pyproject: bool | str = True, 

376 use_env: bool | str = True, 

377 connection: Optional[str] = None, 

378 config: Optional[TypeDALConfig] = None, 

379 ) -> None: 

380 """ 

381 Adds some internal tables after calling pydal's default init. 

382 

383 Set enable_typedal_caching to False to disable this behavior. 

384 """ 

385 config = config or load_config(connection, _use_pyproject=use_pyproject, _use_env=use_env) 

386 config.update( 

387 database=uri, 

388 dialect=uri.split(":")[0] if uri and ":" in uri else None, 

389 folder=str(folder) if folder is not None else None, 

390 migrate=migrate, 

391 fake_migrate=fake_migrate, 

392 caching=enable_typedal_caching, 

393 pool_size=pool_size, 

394 ) 

395 

396 self._config = config 

397 

398 if config.folder: 

399 Path(config.folder).mkdir(exist_ok=True) 

400 

401 super().__init__( 

402 config.database, 

403 config.pool_size, 

404 config.folder, 

405 db_codec, 

406 check_reserved, 

407 config.migrate, 

408 config.fake_migrate, 

409 migrate_enabled, 

410 fake_migrate_all, 

411 decode_credentials, 

412 driver_args, 

413 adapter_args, 

414 attempts, 

415 auto_import, 

416 bigint_id, 

417 debug, 

418 lazy_tables, 

419 db_uid, 

420 after_connection, 

421 tables, 

422 ignore_field_case, 

423 entity_quoting, 

424 table_hash, 

425 ) 

426 

427 if config.caching: 

428 self.try_define(_TypedalCache) 

429 self.try_define(_TypedalCacheDependency) 

430 

431 def try_define(self, model: Type[T], verbose: bool = False) -> Type[T]: 

432 """ 

433 Try to define a model with migrate or fall back to fake migrate. 

434 """ 

435 try: 

436 return self.define(model, migrate=True) 

437 except Exception as e: 

438 # clean up: 

439 self.rollback() 

440 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self): 

441 delattr(self, tablename) 

442 

443 if verbose: 

444 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning) 

445 

446 # try again: 

447 return self.define(model, migrate=True, fake_migrate=True, redefine=True) 

448 

449 default_kwargs: typing.ClassVar[AnyDict] = { 

450 # fields are 'required' (notnull) by default: 

451 "notnull": True, 

452 } 

453 

454 # maps table name to typedal class, for resolving future references 

455 _class_map: typing.ClassVar[dict[str, Type["TypedTable"]]] = {} 

456 

457 def _define(self, cls: Type[T], **kwargs: Any) -> Type[T]: 

458 # todo: new relationship item added should also invalidate (previously unrelated) cache result 

459 

460 # todo: option to enable/disable cache dependency behavior: 

461 # - don't set _before_update and _before_delete 

462 # - don't add TypedalCacheDependency entry 

463 # - don't invalidate other item on new row of this type 

464 

465 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below. 

466 # proper way to handle this would be (but gives error right now due to Table implementing magic methods): 

467 # typing.get_type_hints(cls, globalns=None, localns=None) 

468 

469 # dirty way (with evil eval): 

470 # [eval(v) for k, v in cls.__annotations__.items()] 

471 # this however also stops working when variables outside this scope or even references to other 

472 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used, 

473 # and might break in the future, when this annotations behavior is enabled by default. 

474 

475 # non-annotated variables have to be passed to define_table as kwargs 

476 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins) 

477 

478 tablename = self.to_snake(cls.__name__) 

479 # grab annotations of cls and it's parents: 

480 annotations = all_annotations(cls) 

481 # extend with `prop = TypedField()` 'annotations': 

482 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)} 

483 # remove internal stuff: 

484 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")} 

485 

486 typedfields: dict[str, TypedField[Any]] = { 

487 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v) 

488 } 

489 

490 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship) 

491 

492 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()} 

493 

494 # ! dont' use full_dict here: 

495 other_kwargs = kwargs | { 

496 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_") 

497 } # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that. 

498 # now it's only used to extract relationships from the object. 

499 # other properties of the class (incl methods) should not be touched 

500 

501 # for key in typedfields.keys() - full_dict.keys(): 

502 # # typed fields that don't haven't been added to the object yet 

503 # setattr(cls, key, typedfields[key]) 

504 

505 for key, field in typedfields.items(): 

506 # clone every property so it can be re-used across mixins: 

507 clone = copy(field) 

508 setattr(cls, key, clone) 

509 typedfields[key] = clone 

510 

511 # start with base classes and overwrite with current class: 

512 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship) 

513 

514 # DEPRECATED: Relationship as annotation is currently not supported! 

515 # ensure they are all instances and 

516 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`): 

517 # relationships = { 

518 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items() 

519 # } 

520 

521 # keys of implicit references (also relationships): 

522 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")] 

523 

524 # add implicit relationships: 

525 # User; list[User]; TypedField[User]; TypedField[list[User]] 

526 relationships |= { 

527 k: new_relationship 

528 for k in reference_field_keys 

529 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k])) 

530 } 

531 

532 cache_dependency = self._config.caching and kwargs.pop("cache_dependency", True) 

533 

534 table: Table = self.define_table(tablename, *fields.values(), **kwargs) 

535 

536 for name, typed_field in typedfields.items(): 

537 field = fields[name] 

538 typed_field.bind(field, table) 

539 

540 if issubclass(cls, TypedTable): 

541 cls.__set_internals__( 

542 db=self, 

543 table=table, 

544 # by now, all relationships should be instances! 

545 relationships=typing.cast(dict[str, Relationship[Any]], relationships), 

546 ) 

547 self._class_map[str(table)] = cls 

548 cls.__on_define__(self) 

549 else: 

550 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!") 

551 

552 if not tablename.startswith("typedal_") and cache_dependency: 

553 table._before_update.append(lambda s, _: _remove_cache(s, tablename)) 

554 table._before_delete.append(lambda s: _remove_cache(s, tablename)) 

555 

556 return cls 

557 

558 @typing.overload 

559 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[Type[T]], Type[T]]: 

560 """ 

561 Typing Overload for define without a class. 

562 

563 @db.define() 

564 class MyTable(TypedTable): ... 

565 """ 

566 

567 @typing.overload 

568 def define(self, maybe_cls: Type[T], **kwargs: Any) -> Type[T]: 

569 """ 

570 Typing Overload for define with a class. 

571 

572 @db.define 

573 class MyTable(TypedTable): ... 

574 """ 

575 

576 def define(self, maybe_cls: Type[T] | None = None, **kwargs: Any) -> Type[T] | typing.Callable[[Type[T]], Type[T]]: 

577 """ 

578 Can be used as a decorator on a class that inherits `TypedTable`, \ 

579 or as a regular method if you need to define your classes before you have access to a 'db' instance. 

580 

581 You can also pass extra arguments to db.define_table. 

582 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor 

583 

584 Example: 

585 @db.define 

586 class Person(TypedTable): 

587 ... 

588 

589 class Article(TypedTable): 

590 ... 

591 

592 # at a later time: 

593 db.define(Article) 

594 

595 Returns: 

596 the result of pydal.define_table 

597 """ 

598 

599 def wrapper(cls: Type[T]) -> Type[T]: 

600 return self._define(cls, **kwargs) 

601 

602 if maybe_cls: 

603 return wrapper(maybe_cls) 

604 

605 return wrapper 

606 

607 # def drop(self, table_name: str) -> None: 

608 # """ 

609 # Remove a table by name (both on the database level and the typedal level). 

610 # """ 

611 # # drop calls TypedTable.drop() and removes it from the `_class_map` 

612 # if cls := self._class_map.pop(table_name, None): 

613 # cls.drop() 

614 

615 # def drop_all(self, max_retries: int = None) -> None: 

616 # """ 

617 # Remove all tables and keep doing so until everything is gone! 

618 # """ 

619 # retries = 0 

620 # if max_retries is None: 

621 # max_retries = len(self.tables) 

622 # 

623 # while self.tables: 

624 # retries += 1 

625 # for table in self.tables: 

626 # self.drop(table) 

627 # 

628 # if retries > max_retries: 

629 # raise RuntimeError("Could not delete all tables") 

630 

631 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet": 

632 """ 

633 A db instance can be called directly to perform a query. 

634 

635 Usually, only a query is passed. 

636 

637 Example: 

638 db(query).select() 

639 

640 """ 

641 args = list(_args) 

642 if args: 

643 cls = args[0] 

644 if isinstance(cls, bool): 

645 raise ValueError("Don't actually pass a bool to db()! Use a query instead.") 

646 

647 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable): 

648 # table defined without @db.define decorator! 

649 _cls: Type[TypedTable] = cls 

650 args[0] = _cls.id != None 

651 

652 _set = super().__call__(*args, **kwargs) 

653 return typing.cast(TypedSet, _set) 

654 

655 def __getitem__(self, key: str) -> "Table": 

656 """ 

657 Allows dynamically accessing a table by its name as a string. 

658 

659 Example: 

660 db['users'] -> user 

661 """ 

662 return typing.cast(Table, super().__getitem__(str(key))) 

663 

664 @classmethod 

665 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field: 

666 return Field(name, _type, **{**cls.default_kwargs, **kw}) 

667 

668 @classmethod 

669 def _annotation_to_pydal_fieldtype( 

670 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any] 

671 ) -> Optional[str]: 

672 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union. 

673 ftype = typing.cast(type, _ftype) # cast from Type to type to make mypy happy) 

674 

675 if isinstance(ftype, str): 

676 # extract type from string 

677 ftype = typing.get_args(Type[ftype])[0]._evaluate( 

678 localns=locals(), globalns=globals(), recursive_guard=frozenset() 

679 ) 

680 

681 if mapping := BASIC_MAPPINGS.get(ftype): 

682 # basi types 

683 return mapping 

684 elif isinstance(ftype, _Table): 

685 # db.table 

686 return f"reference {ftype._tablename}" 

687 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable): 

688 # SomeTable 

689 snakename = cls.to_snake(ftype.__name__) 

690 return f"reference {snakename}" 

691 elif isinstance(ftype, TypedField): 

692 # FieldType(type, ...) 

693 return ftype._to_field(mut_kw) 

694 elif origin_is_subclass(ftype, TypedField): 

695 # TypedField[int] 

696 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw) 

697 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField): 

698 # list[str] -> str -> string -> list:string 

699 _child_type = typing.get_args(ftype)[0] 

700 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

701 return f"list:{_child_type}" 

702 elif is_union(ftype): 

703 # str | int -> UnionType 

704 # typing.Union[str | int] -> typing._UnionGenericAlias 

705 

706 # Optional[type] == type | None 

707 

708 match typing.get_args(ftype): 

709 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type): 

710 # good union of Nullable 

711 

712 # if a field is optional, it is nullable: 

713 mut_kw["notnull"] = False 

714 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

715 case _: 

716 # two types is not supported by the db! 

717 return None 

718 else: 

719 return None 

720 

721 @classmethod 

722 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field: 

723 """ 

724 Convert a annotation into a pydal Field. 

725 

726 Args: 

727 fname: name of the property 

728 ftype: annotation of the property 

729 kw: when using TypedField or a function returning it (e.g. StringField), 

730 keyword args can be used to pass any other settings you would normally to a pydal Field 

731 

732 -> pydal.Field(fname, ftype, **kw) 

733 

734 Example: 

735 class MyTable: 

736 fname: ftype 

737 id: int 

738 name: str 

739 reference: Table 

740 other: TypedField(str, default="John Doe") # default will be in kwargs 

741 """ 

742 fname = cls.to_snake(fname) 

743 

744 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw): 

745 return cls._build_field(fname, converted_type, **kw) 

746 else: 

747 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}") 

748 

749 @staticmethod 

750 def to_snake(camel: str) -> str: 

751 """ 

752 Moved to helpers, kept as a static method for legacy reasons. 

753 """ 

754 return to_snake(camel) 

755 

756 

757class TableMeta(type): 

758 """ 

759 This metaclass contains functionality on table classes, that doesn't exist on its instances. 

760 

761 Example: 

762 class MyTable(TypedTable): 

763 some_field: TypedField[int] 

764 

765 MyTable.update_or_insert(...) # should work 

766 

767 MyTable.some_field # -> Field, can be used to query etc. 

768 

769 row = MyTable.first() # returns instance of MyTable 

770 

771 # row.update_or_insert(...) # shouldn't work! 

772 

773 row.some_field # -> int, with actual data 

774 

775 """ 

776 

777 # set up by db.define: 

778 # _db: TypeDAL | None = None 

779 # _table: Table | None = None 

780 _db: TypeDAL | None = None 

781 _table: Table | None = None 

782 _relationships: dict[str, Relationship[Any]] | None = None 

783 

784 ######################### 

785 # TypeDAL custom logic: # 

786 ######################### 

787 

788 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None: 

789 """ 

790 Store the related database and pydal table for later usage. 

791 """ 

792 self._db = db 

793 self._table = table 

794 self._relationships = relationships 

795 

796 def __getattr__(self, col: str) -> Optional[Field]: 

797 """ 

798 Magic method used by TypedTableMeta to get a database field with dot notation on a class. 

799 

800 Example: 

801 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__) 

802 

803 """ 

804 if self._table: 

805 return getattr(self._table, col, None) 

806 

807 return None 

808 

809 def _ensure_table_defined(self) -> Table: 

810 if not self._table: 

811 raise EnvironmentError("@define or db.define is not called on this class yet!") 

812 return self._table 

813 

814 def __iter__(self) -> typing.Generator[Field, None, None]: 

815 """ 

816 Loop through the columns of this model. 

817 """ 

818 table = self._ensure_table_defined() 

819 yield from iter(table) 

820 

821 def __getitem__(self, item: str) -> Field: 

822 """ 

823 Allow dict notation to get a column of this table (-> Field instance). 

824 """ 

825 table = self._ensure_table_defined() 

826 return table[item] 

827 

828 def __str__(self) -> str: 

829 """ 

830 Normally, just returns the underlying table name, but with a fallback if the model is unbound. 

831 """ 

832 if self._table: 

833 return str(self._table) 

834 else: 

835 return f"<unbound table {self.__name__}>" 

836 

837 def from_row(self: Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance: 

838 """ 

839 Create a model instance from a pydal row. 

840 """ 

841 return self(row) 

842 

843 def all(self: Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": 

844 """ 

845 Return all rows for this model. 

846 """ 

847 return self.collect() 

848 

849 def get_relationships(self) -> dict[str, Relationship[Any]]: 

850 """ 

851 Return the registered relationships of the current model. 

852 """ 

853 return self._relationships or {} 

854 

855 ########################## 

856 # TypeDAL Modified Logic # 

857 ########################## 

858 

859 def insert(self: Type[T_MetaInstance], **fields: Any) -> T_MetaInstance: 

860 """ 

861 This is only called when db.define is not used as a decorator. 

862 

863 cls.__table functions as 'self' 

864 

865 Args: 

866 **fields: anything you want to insert in the database 

867 

868 Returns: the ID of the new row. 

869 

870 """ 

871 table = self._ensure_table_defined() 

872 

873 result = table.insert(**fields) 

874 # it already is an int but mypy doesn't understand that 

875 return self(result) 

876 

877 def _insert(self, **fields: Any) -> str: 

878 table = self._ensure_table_defined() 

879 

880 return str(table._insert(**fields)) 

881 

882 def bulk_insert(self: Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]": 

883 """ 

884 Insert multiple rows, returns a TypedRows set of new instances. 

885 """ 

886 table = self._ensure_table_defined() 

887 result = table.bulk_insert(items) 

888 return self.where(lambda row: row.id.belongs(result)).collect() 

889 

890 def update_or_insert( 

891 self: Type[T_MetaInstance], query: T_Query | AnyDict = DEFAULT, **values: Any 

892 ) -> T_MetaInstance: 

893 """ 

894 Update a row if query matches, else insert a new one. 

895 

896 Returns the created or updated instance. 

897 """ 

898 table = self._ensure_table_defined() 

899 

900 if query is DEFAULT: 

901 record = table(**values) 

902 elif isinstance(query, dict): 

903 record = table(**query) 

904 else: 

905 record = table(query) 

906 

907 if not record: 

908 return self.insert(**values) 

909 

910 record.update_record(**values) 

911 return self(record) 

912 

913 def validate_and_insert( 

914 self: Type[T_MetaInstance], **fields: Any 

915 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

916 """ 

917 Validate input data and then insert a row. 

918 

919 Returns a tuple of (the created instance, a dict of errors). 

920 """ 

921 table = self._ensure_table_defined() 

922 result = table.validate_and_insert(**fields) 

923 if row_id := result.get("id"): 

924 return self(row_id), None 

925 else: 

926 return None, result.get("errors") 

927 

928 def validate_and_update( 

929 self: Type[T_MetaInstance], query: Query, **fields: Any 

930 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

931 """ 

932 Validate input data and then update max 1 row. 

933 

934 Returns a tuple of (the updated instance, a dict of errors). 

935 """ 

936 table = self._ensure_table_defined() 

937 

938 result = table.validate_and_update(query, **fields) 

939 

940 if errors := result.get("errors"): 

941 return None, errors 

942 elif row_id := result.get("id"): 

943 return self(row_id), None 

944 else: # pragma: no cover 

945 # update on query without result (shouldnt happen) 

946 return None, None 

947 

948 def validate_and_update_or_insert( 

949 self: Type[T_MetaInstance], query: Query, **fields: Any 

950 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

951 """ 

952 Validate input data and then update_and_insert (on max 1 row). 

953 

954 Returns a tuple of (the updated/created instance, a dict of errors). 

955 """ 

956 table = self._ensure_table_defined() 

957 result = table.validate_and_update_or_insert(query, **fields) 

958 

959 if errors := result.get("errors"): 

960 return None, errors 

961 elif row_id := result.get("id"): 

962 return self(row_id), None 

963 else: # pragma: no cover 

964 # update on query without result (shouldnt happen) 

965 return None, None 

966 

967 def select(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

968 """ 

969 See QueryBuilder.select! 

970 """ 

971 return QueryBuilder(self).select(*a, **kw) 

972 

973 def paginate(self: Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]": 

974 """ 

975 See QueryBuilder.paginate! 

976 """ 

977 return QueryBuilder(self).paginate(limit=limit, page=page) 

978 

979 def chunk(self: Type[T_MetaInstance], chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

980 """ 

981 See QueryBuilder.chunk! 

982 """ 

983 return QueryBuilder(self).chunk(chunk_size) 

984 

985 def where(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

986 """ 

987 See QueryBuilder.where! 

988 """ 

989 return QueryBuilder(self).where(*a, **kw) 

990 

991 def cache(self: Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]": 

992 """ 

993 See QueryBuilder.cache! 

994 """ 

995 return QueryBuilder(self).cache(*deps, **kwargs) 

996 

997 def count(self: Type[T_MetaInstance]) -> int: 

998 """ 

999 See QueryBuilder.count! 

1000 """ 

1001 return QueryBuilder(self).count() 

1002 

1003 def first(self: Type[T_MetaInstance]) -> T_MetaInstance | None: 

1004 """ 

1005 See QueryBuilder.first! 

1006 """ 

1007 return QueryBuilder(self).first() 

1008 

1009 def first_or_fail(self: Type[T_MetaInstance]) -> T_MetaInstance: 

1010 """ 

1011 See QueryBuilder.first_or_fail! 

1012 """ 

1013 return QueryBuilder(self).first_or_fail() 

1014 

1015 def join( 

1016 self: Type[T_MetaInstance], 

1017 *fields: str | Type["TypedTable"], 

1018 method: JOIN_OPTIONS = None, 

1019 on: OnQuery | list[Expression] | Expression = None, 

1020 condition: Condition = None, 

1021 ) -> "QueryBuilder[T_MetaInstance]": 

1022 """ 

1023 See QueryBuilder.join! 

1024 """ 

1025 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method) 

1026 

1027 def collect(self: Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]": 

1028 """ 

1029 See QueryBuilder.collect! 

1030 """ 

1031 return QueryBuilder(self).collect(verbose=verbose) 

1032 

1033 @property 

1034 def ALL(cls) -> pydal.objects.SQLALL: 

1035 """ 

1036 Select all fields for this table. 

1037 """ 

1038 table = cls._ensure_table_defined() 

1039 

1040 return table.ALL 

1041 

1042 ########################## 

1043 # TypeDAL Shadowed Logic # 

1044 ########################## 

1045 fields: list[str] 

1046 

1047 # other table methods: 

1048 

1049 def truncate(self, mode: str = "") -> None: 

1050 """ 

1051 Remove all data and reset index. 

1052 """ 

1053 table = self._ensure_table_defined() 

1054 table.truncate(mode) 

1055 

1056 def drop(self, mode: str = "") -> None: 

1057 """ 

1058 Remove the underlying table. 

1059 """ 

1060 table = self._ensure_table_defined() 

1061 table.drop(mode) 

1062 

1063 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool: 

1064 """ 

1065 Add an index on some columns of this table. 

1066 """ 

1067 table = self._ensure_table_defined() 

1068 result = table.create_index(name, *fields, **kwargs) 

1069 return typing.cast(bool, result) 

1070 

1071 def drop_index(self, name: str, if_exists: bool = False) -> bool: 

1072 """ 

1073 Remove an index from this table. 

1074 """ 

1075 table = self._ensure_table_defined() 

1076 result = table.drop_index(name, if_exists) 

1077 return typing.cast(bool, result) 

1078 

1079 def import_from_csv_file( 

1080 self, 

1081 csvfile: typing.TextIO, 

1082 id_map: dict[str, str] = None, 

1083 null: Any = "<NULL>", 

1084 unique: str = "uuid", 

1085 id_offset: dict[str, int] = None, # id_offset used only when id_map is None 

1086 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None, 

1087 validate: bool = False, 

1088 encoding: str = "utf-8", 

1089 delimiter: str = ",", 

1090 quotechar: str = '"', 

1091 quoting: int = csv.QUOTE_MINIMAL, 

1092 restore: bool = False, 

1093 **kwargs: Any, 

1094 ) -> None: 

1095 """ 

1096 Load a csv file into the database. 

1097 """ 

1098 table = self._ensure_table_defined() 

1099 table.import_from_csv_file( 

1100 csvfile, 

1101 id_map=id_map, 

1102 null=null, 

1103 unique=unique, 

1104 id_offset=id_offset, 

1105 transform=transform, 

1106 validate=validate, 

1107 encoding=encoding, 

1108 delimiter=delimiter, 

1109 quotechar=quotechar, 

1110 quoting=quoting, 

1111 restore=restore, 

1112 **kwargs, 

1113 ) 

1114 

1115 def on(self, query: Query | bool) -> Expression: 

1116 """ 

1117 Shadow Table.on. 

1118 

1119 Used for joins. 

1120 

1121 See Also: 

1122 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1123 """ 

1124 table = self._ensure_table_defined() 

1125 return typing.cast(Expression, table.on(query)) 

1126 

1127 def with_alias(self: Type[T_MetaInstance], alias: str) -> Type[T_MetaInstance]: 

1128 """ 

1129 Shadow Table.with_alias. 

1130 

1131 Useful for joins when joining the same table multiple times. 

1132 

1133 See Also: 

1134 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1135 """ 

1136 table = self._ensure_table_defined() 

1137 return typing.cast(Type[T_MetaInstance], table.with_alias(alias)) 

1138 

1139 # hooks: 

1140 def before_insert( 

1141 cls: Type[T_MetaInstance], 

1142 fn: typing.Callable[[T_MetaInstance], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]], 

1143 ) -> Type[T_MetaInstance]: 

1144 """ 

1145 Add a before insert hook. 

1146 """ 

1147 cls._before_insert.append(fn) # type: ignore 

1148 return cls 

1149 

1150 def after_insert( 

1151 cls: Type[T_MetaInstance], 

1152 fn: ( 

1153 typing.Callable[[T_MetaInstance, Reference], Optional[bool]] 

1154 | typing.Callable[[OpRow, Reference], Optional[bool]] 

1155 ), 

1156 ) -> Type[T_MetaInstance]: 

1157 """ 

1158 Add an after insert hook. 

1159 """ 

1160 cls._after_insert.append(fn) # type: ignore 

1161 return cls 

1162 

1163 def before_update( 

1164 cls: Type[T_MetaInstance], 

1165 fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]], 

1166 ) -> Type[T_MetaInstance]: 

1167 """ 

1168 Add a before update hook. 

1169 """ 

1170 cls._before_update.append(fn) # type: ignore 

1171 return cls 

1172 

1173 def after_update( 

1174 cls: Type[T_MetaInstance], 

1175 fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]], 

1176 ) -> Type[T_MetaInstance]: 

1177 """ 

1178 Add an after update hook. 

1179 """ 

1180 cls._after_update.append(fn) # type: ignore 

1181 return cls 

1182 

1183 def before_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]: 

1184 """ 

1185 Add a before delete hook. 

1186 """ 

1187 cls._before_delete.append(fn) 

1188 return cls 

1189 

1190 def after_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]: 

1191 """ 

1192 Add an after delete hook. 

1193 """ 

1194 cls._after_delete.append(fn) 

1195 return cls 

1196 

1197 

1198class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover 

1199 """ 

1200 Typed version of pydal.Field, which will be converted to a normal Field in the background. 

1201 """ 

1202 

1203 # will be set by .bind on db.define 

1204 name = "" 

1205 _db: Optional[pydal.DAL] = None 

1206 _rname: Optional[str] = None 

1207 _table: Optional[Table] = None 

1208 _field: Optional[Field] = None 

1209 

1210 _type: T_annotation 

1211 kwargs: Any 

1212 

1213 requires: Validator | typing.Iterable[Validator] 

1214 

1215 def __init__(self, _type: Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore 

1216 """ 

1217 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used! 

1218 """ 

1219 self._type = _type 

1220 self.kwargs = settings 

1221 # super().__init__() 

1222 

1223 @typing.overload 

1224 def __get__(self, instance: T_MetaInstance, owner: Type[T_MetaInstance]) -> T_Value: # pragma: no cover 

1225 """ 

1226 row.field -> (actual data). 

1227 """ 

1228 

1229 @typing.overload 

1230 def __get__(self, instance: None, owner: "Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover 

1231 """ 

1232 Table.field -> Field. 

1233 """ 

1234 

1235 def __get__( 

1236 self, instance: T_MetaInstance | None, owner: Type[T_MetaInstance] 

1237 ) -> typing.Union[T_Value, "TypedField[T_Value]"]: 

1238 """ 

1239 Since this class is a Descriptor field, \ 

1240 it returns something else depending on if it's called on a class or instance. 

1241 

1242 (this is mostly for mypy/typing) 

1243 """ 

1244 if instance: 

1245 # this is only reached in a very specific case: 

1246 # an instance of the object was created with a specific set of fields selected (excluding the current one) 

1247 # in that case, no value was stored in the owner -> return None (since the field was not selected) 

1248 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields 

1249 else: 

1250 # getting as class -> return actual field so pydal understands it when using in query etc. 

1251 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support 

1252 

1253 def __str__(self) -> str: 

1254 """ 

1255 String representation of a Typed Field. 

1256 

1257 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`, 

1258 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str) 

1259 """ 

1260 return str(self._field) if self._field else "" 

1261 

1262 def __repr__(self) -> str: 

1263 """ 

1264 More detailed string representation of a Typed Field. 

1265 

1266 Uses __str__ and adds the provided extra options (kwargs) in the representation. 

1267 """ 

1268 s = self.__str__() 

1269 

1270 if "type" in self.kwargs: 

1271 # manual type in kwargs supplied 

1272 t = self.kwargs["type"] 

1273 elif issubclass(type, type(self._type)): 

1274 # normal type, str.__name__ = 'str' 

1275 t = getattr(self._type, "__name__", str(self._type)) 

1276 elif t_args := typing.get_args(self._type): 

1277 # list[str] -> 'str' 

1278 t = t_args[0].__name__ 

1279 else: # pragma: no cover 

1280 # fallback - something else, may not even happen, I'm not sure 

1281 t = self._type 

1282 

1283 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]" 

1284 

1285 kw = self.kwargs.copy() 

1286 kw.pop("type", None) 

1287 return f"<{s} with options {kw}>" 

1288 

1289 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]: 

1290 """ 

1291 Convert a Typed Field instance to a pydal.Field. 

1292 """ 

1293 other_kwargs = self.kwargs.copy() 

1294 extra_kwargs.update(other_kwargs) 

1295 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs) 

1296 

1297 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None: 

1298 """ 

1299 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`. 

1300 """ 

1301 self._table = table 

1302 self._field = field 

1303 

1304 def __getattr__(self, key: str) -> Any: 

1305 """ 

1306 If the regular getattribute does not work, try to get info from the related Field. 

1307 """ 

1308 with contextlib.suppress(AttributeError): 

1309 return super().__getattribute__(key) 

1310 

1311 # try on actual field: 

1312 return getattr(self._field, key) 

1313 

1314 def __eq__(self, other: Any) -> Query: 

1315 """ 

1316 Performing == on a Field will result in a Query. 

1317 """ 

1318 return typing.cast(Query, self._field == other) 

1319 

1320 def __ne__(self, other: Any) -> Query: 

1321 """ 

1322 Performing != on a Field will result in a Query. 

1323 """ 

1324 return typing.cast(Query, self._field != other) 

1325 

1326 def __gt__(self, other: Any) -> Query: 

1327 """ 

1328 Performing > on a Field will result in a Query. 

1329 """ 

1330 return typing.cast(Query, self._field > other) 

1331 

1332 def __lt__(self, other: Any) -> Query: 

1333 """ 

1334 Performing < on a Field will result in a Query. 

1335 """ 

1336 return typing.cast(Query, self._field < other) 

1337 

1338 def __ge__(self, other: Any) -> Query: 

1339 """ 

1340 Performing >= on a Field will result in a Query. 

1341 """ 

1342 return typing.cast(Query, self._field >= other) 

1343 

1344 def __le__(self, other: Any) -> Query: 

1345 """ 

1346 Performing <= on a Field will result in a Query. 

1347 """ 

1348 return typing.cast(Query, self._field <= other) 

1349 

1350 def __hash__(self) -> int: 

1351 """ 

1352 Shadow Field.__hash__. 

1353 """ 

1354 return hash(self._field) 

1355 

1356 def __invert__(self) -> Expression: 

1357 """ 

1358 Performing ~ on a Field will result in an Expression. 

1359 """ 

1360 if not self._field: # pragma: no cover 

1361 raise ValueError("Unbound Field can not be inverted!") 

1362 

1363 return typing.cast(Expression, ~self._field) 

1364 

1365 def lower(self) -> Expression: 

1366 """ 

1367 For string-fields: compare lowercased values. 

1368 """ 

1369 if not self._field: # pragma: no cover 

1370 raise ValueError("Unbound Field can not be lowered!") 

1371 

1372 return typing.cast(Expression, self._field.lower()) 

1373 

1374 # ... etc 

1375 

1376 

1377class _TypedTable: 

1378 """ 

1379 This class is a final shared parent between TypedTable and Mixins. 

1380 

1381 This needs to exist because otherwise the __on_define__ of Mixins are not executed. 

1382 Notably, this class exists at a level ABOVE the `metaclass=TableMeta`, 

1383 because otherwise typing gets confused when Mixins are used and multiple types could satisfy 

1384 generic 'T subclass of TypedTable' 

1385 -> Setting 'TypedTable' as the parent for Mixin does not work at runtime (and works semi at type check time) 

1386 """ 

1387 

1388 id: "TypedField[int]" 

1389 

1390 _before_insert: list[typing.Callable[[Self], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]]] 

1391 _after_insert: list[ 

1392 typing.Callable[[Self, Reference], Optional[bool]] | typing.Callable[[OpRow, Reference], Optional[bool]] 

1393 ] 

1394 _before_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]] 

1395 _after_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]] 

1396 _before_delete: list[typing.Callable[[Set], Optional[bool]]] 

1397 _after_delete: list[typing.Callable[[Set], Optional[bool]]] 

1398 

1399 @classmethod 

1400 def __on_define__(cls, db: TypeDAL) -> None: 

1401 """ 

1402 Method that can be implemented by tables to do an action after db.define is completed. 

1403 

1404 This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"), 

1405 where you need a reference to the current database, which may not exist yet when defining the model. 

1406 """ 

1407 

1408 

1409class TypedTable(_TypedTable, metaclass=TableMeta): 

1410 """ 

1411 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality. 

1412 """ 

1413 

1414 # set up by 'new': 

1415 _row: Row | None = None 

1416 

1417 _with: list[str] 

1418 

1419 def _setup_instance_methods(self) -> None: 

1420 self.as_dict = self._as_dict # type: ignore 

1421 self.__json__ = self.as_json = self._as_json # type: ignore 

1422 # self.as_yaml = self._as_yaml # type: ignore 

1423 self.as_xml = self._as_xml # type: ignore 

1424 

1425 self.update = self._update # type: ignore 

1426 

1427 self.delete_record = self._delete_record # type: ignore 

1428 self.update_record = self._update_record # type: ignore 

1429 

1430 def __new__( 

1431 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any 

1432 ) -> Self: 

1433 """ 

1434 Create a Typed Rows model instance from an existing row, ID or query. 

1435 

1436 Examples: 

1437 MyTable(1) 

1438 MyTable(id=1) 

1439 MyTable(MyTable.id == 1) 

1440 """ 

1441 table = cls._ensure_table_defined() 

1442 inst = super().__new__(cls) 

1443 

1444 if isinstance(row_or_id, TypedTable): 

1445 # existing typed table instance! 

1446 return typing.cast(Self, row_or_id) 

1447 

1448 elif isinstance(row_or_id, pydal.objects.Row): 

1449 row = row_or_id 

1450 elif row_or_id is not None: 

1451 row = table(row_or_id, **filters) 

1452 elif filters: 

1453 row = table(**filters) 

1454 else: 

1455 # dummy object 

1456 return inst 

1457 

1458 if not row: 

1459 return None # type: ignore 

1460 

1461 inst._row = row 

1462 inst.__dict__.update(row) 

1463 inst._setup_instance_methods() 

1464 return inst 

1465 

1466 def __iter__(self) -> typing.Generator[Any, None, None]: 

1467 """ 

1468 Allows looping through the columns. 

1469 """ 

1470 row = self._ensure_matching_row() 

1471 yield from iter(row) 

1472 

1473 def __getitem__(self, item: str) -> Any: 

1474 """ 

1475 Allows dictionary notation to get columns. 

1476 """ 

1477 if item in self.__dict__: 

1478 return self.__dict__.get(item) 

1479 

1480 # fallback to lookup in row 

1481 if self._row: 

1482 return self._row[item] 

1483 

1484 # nothing found! 

1485 raise KeyError(item) 

1486 

1487 def __getattr__(self, item: str) -> Any: 

1488 """ 

1489 Allows dot notation to get columns. 

1490 """ 

1491 if value := self.get(item): 

1492 return value 

1493 

1494 raise AttributeError(item) 

1495 

1496 def get(self, item: str, default: Any = None) -> Any: 

1497 """ 

1498 Try to get a column from this instance, else return default. 

1499 """ 

1500 try: 

1501 return self.__getitem__(item) 

1502 except KeyError: 

1503 return default 

1504 

1505 def __setitem__(self, key: str, value: Any) -> None: 

1506 """ 

1507 Data can both be updated via dot and dict notation. 

1508 """ 

1509 return setattr(self, key, value) 

1510 

1511 def __int__(self) -> int: 

1512 """ 

1513 Calling int on a model instance will return its id. 

1514 """ 

1515 return getattr(self, "id", 0) 

1516 

1517 def __bool__(self) -> bool: 

1518 """ 

1519 If the instance has an underlying row with data, it is truthy. 

1520 """ 

1521 return bool(getattr(self, "_row", False)) 

1522 

1523 def _ensure_matching_row(self) -> Row: 

1524 if not getattr(self, "_row", None): 

1525 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?") 

1526 return self._row 

1527 

1528 def __repr__(self) -> str: 

1529 """ 

1530 String representation of the model instance. 

1531 """ 

1532 model_name = self.__class__.__name__ 

1533 model_data = {} 

1534 

1535 if self._row: 

1536 model_data = self._row.as_json() 

1537 

1538 details = model_name 

1539 details += f"({model_data})" 

1540 

1541 if relationships := getattr(self, "_with", []): 

1542 details += f" + {relationships}" 

1543 

1544 return f"<{details}>" 

1545 

1546 # serialization 

1547 # underscore variants work for class instances (set up by _setup_instance_methods) 

1548 

1549 @classmethod 

1550 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict: 

1551 """ 

1552 Dump the object to a plain dict. 

1553 

1554 Can be used as both a class or instance method: 

1555 - dumps the table info if it's a class 

1556 - dumps the row info if it's an instance (see _as_dict) 

1557 """ 

1558 table = cls._ensure_table_defined() 

1559 result = table.as_dict(flat, sanitize) 

1560 return typing.cast(AnyDict, result) 

1561 

1562 @classmethod 

1563 def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str: 

1564 """ 

1565 Dump the object to json. 

1566 

1567 Can be used as both a class or instance method: 

1568 - dumps the table info if it's a class 

1569 - dumps the row info if it's an instance (see _as_json) 

1570 """ 

1571 data = cls.as_dict(sanitize=sanitize) 

1572 return as_json.encode(data, indent=indent, **kwargs) 

1573 

1574 @classmethod 

1575 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover 

1576 """ 

1577 Dump the object to xml. 

1578 

1579 Can be used as both a class or instance method: 

1580 - dumps the table info if it's a class 

1581 - dumps the row info if it's an instance (see _as_xml) 

1582 """ 

1583 table = cls._ensure_table_defined() 

1584 return typing.cast(str, table.as_xml(sanitize)) 

1585 

1586 @classmethod 

1587 def as_yaml(cls, sanitize: bool = True) -> str: 

1588 """ 

1589 Dump the object to yaml. 

1590 

1591 Can be used as both a class or instance method: 

1592 - dumps the table info if it's a class 

1593 - dumps the row info if it's an instance (see _as_yaml) 

1594 """ 

1595 table = cls._ensure_table_defined() 

1596 return typing.cast(str, table.as_yaml(sanitize)) 

1597 

1598 def _as_dict( 

1599 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None 

1600 ) -> AnyDict: 

1601 row = self._ensure_matching_row() 

1602 

1603 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types) 

1604 

1605 def asdict_method(obj: Any) -> Any: # pragma: no cover 

1606 if hasattr(obj, "_as_dict"): # typedal 

1607 return obj._as_dict() 

1608 elif hasattr(obj, "as_dict"): # pydal 

1609 return obj.as_dict() 

1610 else: # something else?? 

1611 return obj.__dict__ 

1612 

1613 if _with := getattr(self, "_with", None): 

1614 for relationship in _with: 

1615 data = self.get(relationship) 

1616 

1617 if isinstance(data, list): 

1618 data = [asdict_method(_) for _ in data] 

1619 elif data: 

1620 data = asdict_method(data) 

1621 

1622 result[relationship] = data 

1623 

1624 return typing.cast(AnyDict, result) 

1625 

1626 def _as_json( 

1627 self, 

1628 default: typing.Callable[[Any], Any] = None, 

1629 indent: Optional[int] = None, 

1630 **kwargs: Any, 

1631 ) -> str: 

1632 data = self._as_dict() 

1633 return as_json.encode(data, default=default, indent=indent, **kwargs) 

1634 

1635 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover 

1636 row = self._ensure_matching_row() 

1637 return typing.cast(str, row.as_xml(sanitize)) 

1638 

1639 # def _as_yaml(self, sanitize: bool = True) -> str: 

1640 # row = self._ensure_matching_row() 

1641 # return typing.cast(str, row.as_yaml(sanitize)) 

1642 

1643 def __setattr__(self, key: str, value: Any) -> None: 

1644 """ 

1645 When setting a property on a Typed Table model instance, also update the underlying row. 

1646 """ 

1647 if self._row and key in self._row.__dict__ and not callable(value): 

1648 # enables `row.key = value; row.update_record()` 

1649 self._row[key] = value 

1650 

1651 super().__setattr__(key, value) 

1652 

1653 @classmethod 

1654 def update(cls: Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None: 

1655 """ 

1656 Update one record. 

1657 

1658 Example: 

1659 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable 

1660 """ 

1661 # todo: update multiple? 

1662 if record := cls(query): 

1663 return record.update_record(**fields) 

1664 else: 

1665 return None 

1666 

1667 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1668 row = self._ensure_matching_row() 

1669 row.update(**fields) 

1670 self.__dict__.update(**fields) 

1671 return self 

1672 

1673 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1674 row = self._ensure_matching_row() 

1675 new_row = row.update_record(**fields) 

1676 self.update(**new_row) 

1677 return self 

1678 

1679 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover 

1680 """ 

1681 Here as a placeholder for _update_record. 

1682 

1683 Will be replaced on instance creation! 

1684 """ 

1685 return self._update_record(**fields) 

1686 

1687 def _delete_record(self) -> int: 

1688 """ 

1689 Actual logic in `pydal.helpers.classes.RecordDeleter`. 

1690 """ 

1691 row = self._ensure_matching_row() 

1692 result = row.delete_record() 

1693 self.__dict__ = {} # empty self, since row is no more. 

1694 self._row = None # just to be sure 

1695 self._setup_instance_methods() 

1696 # ^ instance methods might've been deleted by emptying dict, 

1697 # but we still want .as_dict to show an error, not the table's as_dict. 

1698 return typing.cast(int, result) 

1699 

1700 def delete_record(self) -> int: # pragma: no cover 

1701 """ 

1702 Here as a placeholder for _delete_record. 

1703 

1704 Will be replaced on instance creation! 

1705 """ 

1706 return self._delete_record() 

1707 

1708 # __del__ is also called on the end of a scope so don't remove records on every del!! 

1709 

1710 # pickling: 

1711 

1712 def __getstate__(self) -> AnyDict: 

1713 """ 

1714 State to save when pickling. 

1715 

1716 Prevents db connection from being pickled. 

1717 Similar to as_dict but without changing the data of the relationships (dill does that recursively) 

1718 """ 

1719 row = self._ensure_matching_row() 

1720 result: AnyDict = row.as_dict() 

1721 

1722 if _with := getattr(self, "_with", None): 

1723 result["_with"] = _with 

1724 for relationship in _with: 

1725 data = self.get(relationship) 

1726 

1727 result[relationship] = data 

1728 

1729 result["_row"] = self._row.as_json() if self._row else "" 

1730 return result 

1731 

1732 def __setstate__(self, state: AnyDict) -> None: 

1733 """ 

1734 Used by dill when loading from a bytestring. 

1735 """ 

1736 # as_dict also includes table info, so dump as json to only get the actual row data 

1737 # then create a new (more empty) row object: 

1738 state["_row"] = Row(json.loads(state["_row"])) 

1739 self.__dict__ |= state 

1740 

1741 

1742# backwards compat: 

1743TypedRow = TypedTable 

1744 

1745 

1746class TypedRows(typing.Collection[T_MetaInstance], Rows): 

1747 """ 

1748 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select). 

1749 """ 

1750 

1751 records: dict[int, T_MetaInstance] 

1752 # _rows: Rows 

1753 model: Type[T_MetaInstance] 

1754 metadata: Metadata 

1755 

1756 # pseudo-properties: actually stored in _rows 

1757 db: TypeDAL 

1758 colnames: list[str] 

1759 fields: list[Field] 

1760 colnames_fields: list[Field] 

1761 response: list[tuple[Any, ...]] 

1762 

1763 def __init__( 

1764 self, 

1765 rows: Rows, 

1766 model: Type[T_MetaInstance], 

1767 records: dict[int, T_MetaInstance] = None, 

1768 metadata: Metadata = None, 

1769 ) -> None: 

1770 """ 

1771 Should not be called manually! 

1772 

1773 Normally, the `records` from an existing `Rows` object are used 

1774 but these can be overwritten with a `records` dict. 

1775 `metadata` can be any (un)structured data 

1776 `model` is a Typed Table class 

1777 """ 

1778 records = records or {row.id: model(row) for row in rows} 

1779 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields) 

1780 self.model = model 

1781 self.metadata = metadata or {} 

1782 self.colnames = rows.colnames 

1783 

1784 def __len__(self) -> int: 

1785 """ 

1786 Return the count of rows. 

1787 """ 

1788 return len(self.records) 

1789 

1790 def __iter__(self) -> typing.Iterator[T_MetaInstance]: 

1791 """ 

1792 Loop through the rows. 

1793 """ 

1794 yield from self.records.values() 

1795 

1796 def __contains__(self, ind: Any) -> bool: 

1797 """ 

1798 Check if an id exists in this result set. 

1799 """ 

1800 return ind in self.records 

1801 

1802 def first(self) -> T_MetaInstance | None: 

1803 """ 

1804 Get the row with the lowest id. 

1805 """ 

1806 if not self.records: 

1807 return None 

1808 

1809 return next(iter(self)) 

1810 

1811 def last(self) -> T_MetaInstance | None: 

1812 """ 

1813 Get the row with the highest id. 

1814 """ 

1815 if not self.records: 

1816 return None 

1817 

1818 max_id = max(self.records.keys()) 

1819 return self[max_id] 

1820 

1821 def find( 

1822 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None 

1823 ) -> "TypedRows[T_MetaInstance]": 

1824 """ 

1825 Returns a new Rows object, a subset of the original object, filtered by the function `f`. 

1826 """ 

1827 if not self.records: 

1828 return self.__class__(self, self.model, {}) 

1829 

1830 records = {} 

1831 if limitby: 

1832 _min, _max = limitby 

1833 else: 

1834 _min, _max = 0, len(self) 

1835 count = 0 

1836 for i, row in self.records.items(): 

1837 if f(row): 

1838 if _min <= count: 

1839 records[i] = row 

1840 count += 1 

1841 if count == _max: 

1842 break 

1843 

1844 return self.__class__(self, self.model, records) 

1845 

1846 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]": 

1847 """ 

1848 Removes elements from the calling Rows object, filtered by the function `f`, \ 

1849 and returns a new Rows object containing the removed elements. 

1850 """ 

1851 if not self.records: 

1852 return self.__class__(self, self.model, {}) 

1853 removed = {} 

1854 to_remove = [] 

1855 for i in self.records: 

1856 row = self[i] 

1857 if f(row): 

1858 removed[i] = self.records[i] 

1859 to_remove.append(i) 

1860 

1861 [self.records.pop(i) for i in to_remove] 

1862 

1863 return self.__class__( 

1864 self, 

1865 self.model, 

1866 removed, 

1867 ) 

1868 

1869 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]: 

1870 """ 

1871 Returns a list of sorted elements (not sorted in place). 

1872 """ 

1873 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)] 

1874 

1875 def __str__(self) -> str: 

1876 """ 

1877 Simple string representation. 

1878 """ 

1879 return f"<TypedRows with {len(self)} records>" 

1880 

1881 def __repr__(self) -> str: 

1882 """ 

1883 Print a table on repr(). 

1884 """ 

1885 data = self.as_dict() 

1886 headers = list(next(iter(data.values())).keys()) 

1887 return mktable(data, headers) 

1888 

1889 def group_by_value( 

1890 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any 

1891 ) -> dict[T, list[T_MetaInstance]]: 

1892 """ 

1893 Group the rows by a specific field (which will be the dict key). 

1894 """ 

1895 kwargs["one_result"] = one_result 

1896 result = super().group_by_value(*fields, **kwargs) 

1897 return typing.cast(dict[T, list[T_MetaInstance]], result) 

1898 

1899 def as_csv(self) -> str: 

1900 """ 

1901 Dump the data to csv. 

1902 """ 

1903 return typing.cast(str, super().as_csv()) 

1904 

1905 def as_dict( 

1906 self, 

1907 key: str = None, 

1908 compact: bool = False, 

1909 storage_to_dict: bool = False, 

1910 datetime_to_str: bool = False, 

1911 custom_types: list[type] = None, 

1912 ) -> dict[int, AnyDict]: 

1913 """ 

1914 Get the data in a dict of dicts. 

1915 """ 

1916 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]): 

1917 # functionality not guaranteed 

1918 return typing.cast( 

1919 dict[int, AnyDict], 

1920 super().as_dict( 

1921 key or "id", 

1922 compact, 

1923 storage_to_dict, 

1924 datetime_to_str, 

1925 custom_types, 

1926 ), 

1927 ) 

1928 

1929 return {k: v.as_dict() for k, v in self.records.items()} 

1930 

1931 def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str: 

1932 """ 

1933 Turn the data into a dict and then dump to JSON. 

1934 """ 

1935 data = self.as_list() 

1936 

1937 return as_json.encode(data, default=default, indent=indent, **kwargs) 

1938 

1939 def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str: 

1940 """ 

1941 Turn the data into a dict and then dump to JSON. 

1942 """ 

1943 return self.as_json(default=default, indent=indent, **kwargs) 

1944 

1945 def as_list( 

1946 self, 

1947 compact: bool = False, 

1948 storage_to_dict: bool = False, 

1949 datetime_to_str: bool = False, 

1950 custom_types: list[type] = None, 

1951 ) -> list[AnyDict]: 

1952 """ 

1953 Get the data in a list of dicts. 

1954 """ 

1955 if any([compact, storage_to_dict, datetime_to_str, custom_types]): 

1956 return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types)) 

1957 

1958 return [_.as_dict() for _ in self.records.values()] 

1959 

1960 def __getitem__(self, item: int) -> T_MetaInstance: 

1961 """ 

1962 You can get a specific row by ID from a typedrows by using rows[idx] notation. 

1963 

1964 Since pydal's implementation differs (they expect a list instead of a dict with id keys), 

1965 using rows[0] will return the first row, regardless of its id. 

1966 """ 

1967 try: 

1968 return self.records[item] 

1969 except KeyError as e: 

1970 if item == 0 and (row := self.first()): 

1971 # special case: pydal internals think Rows.records is a list, not a dict 

1972 return row 

1973 

1974 raise e 

1975 

1976 def get(self, item: int) -> typing.Optional[T_MetaInstance]: 

1977 """ 

1978 Get a row by ID, or receive None if it isn't in this result set. 

1979 """ 

1980 return self.records.get(item) 

1981 

1982 def update(self, **new_values: Any) -> bool: 

1983 """ 

1984 Update the current rows in the database with new_values. 

1985 """ 

1986 # cast to make mypy understand .id is a TypedField and not an int! 

1987 table = typing.cast(Type[TypedTable], self.model._ensure_table_defined()) 

1988 

1989 ids = set(self.column("id")) 

1990 query = table.id.belongs(ids) 

1991 return bool(self.db(query).update(**new_values)) 

1992 

1993 def delete(self) -> bool: 

1994 """ 

1995 Delete the currently selected rows from the database. 

1996 """ 

1997 # cast to make mypy understand .id is a TypedField and not an int! 

1998 table = typing.cast(Type[TypedTable], self.model._ensure_table_defined()) 

1999 

2000 ids = set(self.column("id")) 

2001 query = table.id.belongs(ids) 

2002 return bool(self.db(query).delete()) 

2003 

2004 def join( 

2005 self, 

2006 field: "Field | TypedField[Any]", 

2007 name: str = None, 

2008 constraint: Query = None, 

2009 fields: list[str | Field] = None, 

2010 orderby: Optional[str | Field] = None, 

2011 ) -> T_MetaInstance: 

2012 """ 

2013 This can be used to JOIN with some relationships after the initial select. 

2014 

2015 Using the querybuilder's .join() method is prefered! 

2016 """ 

2017 result = super().join(field, name, constraint, fields or [], orderby) 

2018 return typing.cast(T_MetaInstance, result) 

2019 

2020 def export_to_csv_file( 

2021 self, 

2022 ofile: typing.TextIO, 

2023 null: Any = "<NULL>", 

2024 delimiter: str = ",", 

2025 quotechar: str = '"', 

2026 quoting: int = csv.QUOTE_MINIMAL, 

2027 represent: bool = False, 

2028 colnames: list[str] = None, 

2029 write_colnames: bool = True, 

2030 *args: Any, 

2031 **kwargs: Any, 

2032 ) -> None: 

2033 """ 

2034 Shadow export_to_csv_file from Rows, but with typing. 

2035 

2036 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data 

2037 """ 

2038 super().export_to_csv_file( 

2039 ofile, 

2040 null, 

2041 *args, 

2042 delimiter=delimiter, 

2043 quotechar=quotechar, 

2044 quoting=quoting, 

2045 represent=represent, 

2046 colnames=colnames or self.colnames, 

2047 write_colnames=write_colnames, 

2048 **kwargs, 

2049 ) 

2050 

2051 @classmethod 

2052 def from_rows( 

2053 cls, rows: Rows, model: Type[T_MetaInstance], metadata: Metadata = None 

2054 ) -> "TypedRows[T_MetaInstance]": 

2055 """ 

2056 Internal method to convert a Rows object to a TypedRows. 

2057 """ 

2058 return cls(rows, model, metadata=metadata) 

2059 

2060 def __getstate__(self) -> AnyDict: 

2061 """ 

2062 Used by dill to dump to bytes (exclude db connection etc). 

2063 """ 

2064 return { 

2065 "metadata": json.dumps(self.metadata, default=str), 

2066 "records": self.records, 

2067 "model": str(self.model._table), 

2068 "colnames": self.colnames, 

2069 } 

2070 

2071 def __setstate__(self, state: AnyDict) -> None: 

2072 """ 

2073 Used by dill when loading from a bytestring. 

2074 """ 

2075 state["metadata"] = json.loads(state["metadata"]) 

2076 self.__dict__.update(state) 

2077 # db etc. set after undill by caching.py 

2078 

2079 

2080from .caching import ( # noqa: E402 

2081 _remove_cache, 

2082 _TypedalCache, 

2083 _TypedalCacheDependency, 

2084 create_and_hash_cache_key, 

2085 get_expire, 

2086 load_from_cache, 

2087 save_to_cache, 

2088) 

2089 

2090 

2091class QueryBuilder(typing.Generic[T_MetaInstance]): 

2092 """ 

2093 Abstration on top of pydal's query system. 

2094 """ 

2095 

2096 model: Type[T_MetaInstance] 

2097 query: Query 

2098 select_args: list[Any] 

2099 select_kwargs: SelectKwargs 

2100 relationships: dict[str, Relationship[Any]] 

2101 metadata: Metadata 

2102 

2103 def __init__( 

2104 self, 

2105 model: Type[T_MetaInstance], 

2106 add_query: Optional[Query] = None, 

2107 select_args: Optional[list[Any]] = None, 

2108 select_kwargs: Optional[SelectKwargs] = None, 

2109 relationships: dict[str, Relationship[Any]] = None, 

2110 metadata: Metadata = None, 

2111 ): 

2112 """ 

2113 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable. 

2114 

2115 Example: 

2116 MyTable.where(...) -> QueryBuilder[MyTable] 

2117 """ 

2118 self.model = model 

2119 table = model._ensure_table_defined() 

2120 default_query = typing.cast(Query, table.id > 0) 

2121 self.query = add_query or default_query 

2122 self.select_args = select_args or [] 

2123 self.select_kwargs = select_kwargs or {} 

2124 self.relationships = relationships or {} 

2125 self.metadata = metadata or {} 

2126 

2127 def __str__(self) -> str: 

2128 """ 

2129 Simple string representation for the query builder. 

2130 """ 

2131 return f"QueryBuilder for {self.model}" 

2132 

2133 def __repr__(self) -> str: 

2134 """ 

2135 Advanced string representation for the query builder. 

2136 """ 

2137 return ( 

2138 f"<QueryBuilder for {self.model} with " 

2139 f"{len(self.select_args)} select args; " 

2140 f"{len(self.select_kwargs)} select kwargs; " 

2141 f"{len(self.relationships)} relationships; " 

2142 f"query: {bool(self.query)}; " 

2143 f"metadata: {self.metadata}; " 

2144 f">" 

2145 ) 

2146 

2147 def __bool__(self) -> bool: 

2148 """ 

2149 Querybuilder is truthy if it has rows. 

2150 """ 

2151 return self.count() > 0 

2152 

2153 def _extend( 

2154 self, 

2155 add_query: Optional[Query] = None, 

2156 overwrite_query: Optional[Query] = None, 

2157 select_args: Optional[list[Any]] = None, 

2158 select_kwargs: Optional[SelectKwargs] = None, 

2159 relationships: dict[str, Relationship[Any]] = None, 

2160 metadata: Metadata = None, 

2161 ) -> "QueryBuilder[T_MetaInstance]": 

2162 return QueryBuilder( 

2163 self.model, 

2164 (add_query & self.query) if add_query else overwrite_query or self.query, 

2165 (self.select_args + select_args) if select_args else self.select_args, 

2166 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs, 

2167 (self.relationships | relationships) if relationships else self.relationships, 

2168 (self.metadata | (metadata or {})) if metadata else self.metadata, 

2169 ) 

2170 

2171 def select(self, *fields: Any, **options: Unpack[SelectKwargs]) -> "QueryBuilder[T_MetaInstance]": 

2172 """ 

2173 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL). 

2174 

2175 Options: 

2176 paraphrased from the web2py pydal docs, 

2177 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache 

2178 

2179 orderby: field(s) to order by. Supported: 

2180 table.name - sort by name, ascending 

2181 ~table.name - sort by name, descending 

2182 <random> - sort randomly 

2183 table.name|table.id - sort by two fields (first name, then id) 

2184 

2185 groupby, having: together with orderby: 

2186 groupby can be a field (e.g. table.name) to group records by 

2187 having can be a query, only those `having` the condition are grouped 

2188 

2189 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended. 

2190 distinct: bool/field. Only select rows that differ 

2191 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby. 

2192 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended! 

2193 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended! 

2194 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True) 

2195 """ 

2196 return self._extend(select_args=list(fields), select_kwargs=options) 

2197 

2198 def where( 

2199 self, 

2200 *queries_or_lambdas: Query | typing.Callable[[Type[T_MetaInstance]], Query], 

2201 **filters: Any, 

2202 ) -> "QueryBuilder[T_MetaInstance]": 

2203 """ 

2204 Extend the builder's query. 

2205 

2206 Can be used in multiple ways: 

2207 .where(Query) -> with a direct query such as `Table.id == 5` 

2208 .where(lambda table: table.id == 5) -> with a query via a lambda 

2209 .where(id=5) -> via keyword arguments 

2210 

2211 When using multiple where's, they will be ANDed: 

2212 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6) 

2213 When passing multiple queries to a single .where, they will be ORed: 

2214 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6) 

2215 """ 

2216 new_query = self.query 

2217 table = self.model._ensure_table_defined() 

2218 

2219 for field, value in filters.items(): 

2220 new_query &= table[field] == value 

2221 

2222 subquery: DummyQuery | Query = DummyQuery() 

2223 for query_or_lambda in queries_or_lambdas: 

2224 if isinstance(query_or_lambda, _Query): 

2225 subquery |= typing.cast(Query, query_or_lambda) 

2226 elif callable(query_or_lambda): 

2227 if result := query_or_lambda(self.model): 

2228 subquery |= result 

2229 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda): 

2230 subquery |= typing.cast(Query, query_or_lambda != None) 

2231 else: 

2232 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).") 

2233 

2234 if subquery: 

2235 new_query &= subquery 

2236 

2237 return self._extend(overwrite_query=new_query) 

2238 

2239 def join( 

2240 self, 

2241 *fields: str | Type[TypedTable], 

2242 method: JOIN_OPTIONS = None, 

2243 on: OnQuery | list[Expression] | Expression = None, 

2244 condition: Condition = None, 

2245 ) -> "QueryBuilder[T_MetaInstance]": 

2246 """ 

2247 Include relationship fields in the result. 

2248 

2249 `fields` can be names of Relationships on the current model. 

2250 If no fields are passed, all will be used. 

2251 

2252 By default, the `method` defined in the relationship is used. 

2253 This can be overwritten with the `method` keyword argument (left or inner) 

2254 """ 

2255 # todo: allow limiting amount of related rows returned for join? 

2256 

2257 relationships = self.model.get_relationships() 

2258 

2259 if condition and on: 

2260 raise ValueError("condition and on can not be used together!") 

2261 elif condition: 

2262 if len(fields) != 1: 

2263 raise ValueError("join(field, condition=...) can only be used with exactly one field!") 

2264 

2265 if isinstance(condition, pydal.objects.Query): 

2266 condition = as_lambda(condition) 

2267 

2268 relationships = {str(fields[0]): Relationship(fields[0], condition=condition, join=method)} 

2269 elif on: 

2270 if len(fields) != 1: 

2271 raise ValueError("join(field, on=...) can only be used with exactly one field!") 

2272 

2273 if isinstance(on, pydal.objects.Expression): 

2274 on = [on] 

2275 

2276 if isinstance(on, list): 

2277 on = as_lambda(on) 

2278 relationships = {str(fields[0]): Relationship(fields[0], on=on, join=method)} 

2279 

2280 else: 

2281 if fields: 

2282 # join on every relationship 

2283 relationships = {str(k): relationships[str(k)] for k in fields} 

2284 

2285 if method: 

2286 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()} 

2287 

2288 return self._extend(relationships=relationships) 

2289 

2290 def cache( 

2291 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None 

2292 ) -> "QueryBuilder[T_MetaInstance]": 

2293 """ 

2294 Enable caching for this query to load repeated calls from a dill row \ 

2295 instead of executing the sql and collecing matching rows again. 

2296 """ 

2297 existing = self.metadata.get("cache", {}) 

2298 

2299 metadata: Metadata = {} 

2300 

2301 cache_meta = typing.cast( 

2302 CacheMetadata, 

2303 self.metadata.get("cache", {}) 

2304 | { 

2305 "enabled": True, 

2306 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps], 

2307 "expires_at": get_expire(expires_at=expires_at, ttl=ttl), 

2308 }, 

2309 ) 

2310 

2311 metadata["cache"] = cache_meta 

2312 return self._extend(metadata=metadata) 

2313 

2314 def _get_db(self) -> TypeDAL: 

2315 if db := self.model._db: 

2316 return db 

2317 else: # pragma: no cover 

2318 raise EnvironmentError("@define or db.define is not called on this class yet!") 

2319 

2320 def _select_arg_convert(self, arg: Any) -> Any: 

2321 # typedfield are not really used at runtime anymore, but leave it in for safety: 

2322 if isinstance(arg, TypedField): # pragma: no cover 

2323 arg = arg._field 

2324 

2325 return arg 

2326 

2327 def delete(self) -> list[int]: 

2328 """ 

2329 Based on the current query, delete rows and return a list of deleted IDs. 

2330 """ 

2331 db = self._get_db() 

2332 removed_ids = [_.id for _ in db(self.query).select("id")] 

2333 if db(self.query).delete(): 

2334 # success! 

2335 return removed_ids 

2336 

2337 return [] 

2338 

2339 def _delete(self) -> str: 

2340 db = self._get_db() 

2341 return str(db(self.query)._delete()) 

2342 

2343 def update(self, **fields: Any) -> list[int]: 

2344 """ 

2345 Based on the current query, update `fields` and return a list of updated IDs. 

2346 """ 

2347 # todo: limit? 

2348 db = self._get_db() 

2349 updated_ids = db(self.query).select("id").column("id") 

2350 if db(self.query).update(**fields): 

2351 # success! 

2352 return updated_ids 

2353 

2354 return [] 

2355 

2356 def _update(self, **fields: Any) -> str: 

2357 db = self._get_db() 

2358 return str(db(self.query)._update(**fields)) 

2359 

2360 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], SelectKwargs]: 

2361 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL] 

2362 select_kwargs = self.select_kwargs.copy() 

2363 query = self.query 

2364 model = self.model 

2365 mut_metadata["query"] = query 

2366 # require at least id of main table: 

2367 select_fields = ", ".join([str(_) for _ in select_args]) 

2368 tablename = str(model) 

2369 

2370 if add_id and f"{tablename}.id" not in select_fields: 

2371 # fields of other selected, but required ID is missing. 

2372 select_args.append(model.id) 

2373 

2374 if self.relationships: 

2375 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata) 

2376 

2377 return query, select_args, select_kwargs 

2378 

2379 def to_sql(self, add_id: bool = False) -> str: 

2380 """ 

2381 Generate the SQL for the built query. 

2382 """ 

2383 db = self._get_db() 

2384 

2385 query, select_args, select_kwargs = self._before_query({}, add_id=add_id) 

2386 

2387 return str(db(query)._select(*select_args, **select_kwargs)) 

2388 

2389 def _collect(self) -> str: 

2390 """ 

2391 Alias for to_sql, pydal-like syntax. 

2392 """ 

2393 return self.to_sql() 

2394 

2395 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None": 

2396 expires_at = metadata["cache"].get("expires_at") 

2397 metadata["cache"] |= { 

2398 # key is partly dependant on cache metadata but not these: 

2399 "key": None, 

2400 "status": None, 

2401 "cached_at": None, 

2402 "expires_at": None, 

2403 } 

2404 

2405 _, key = create_and_hash_cache_key( 

2406 self.model, 

2407 metadata, 

2408 self.query, 

2409 self.select_args, 

2410 self.select_kwargs, 

2411 self.relationships.keys(), 

2412 ) 

2413 

2414 # re-set after creating key: 

2415 metadata["cache"]["expires_at"] = expires_at 

2416 metadata["cache"]["key"] = key 

2417 

2418 return load_from_cache(key, self._get_db()) 

2419 

2420 def execute(self, add_id: bool = False) -> Rows: 

2421 """ 

2422 Raw version of .collect which only executes the SQL, without performing any magic afterwards. 

2423 """ 

2424 db = self._get_db() 

2425 metadata = typing.cast(Metadata, self.metadata.copy()) 

2426 

2427 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id) 

2428 

2429 return db(query).select(*select_args, **select_kwargs) 

2430 

2431 def collect( 

2432 self, verbose: bool = False, _to: Type["TypedRows[Any]"] = None, add_id: bool = True 

2433 ) -> "TypedRows[T_MetaInstance]": 

2434 """ 

2435 Execute the built query and turn it into model instances, while handling relationships. 

2436 """ 

2437 if _to is None: 

2438 _to = TypedRows 

2439 

2440 db = self._get_db() 

2441 metadata = typing.cast(Metadata, self.metadata.copy()) 

2442 

2443 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)): 

2444 return result 

2445 

2446 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id) 

2447 

2448 metadata["sql"] = db(query)._select(*select_args, **select_kwargs) 

2449 

2450 if verbose: # pragma: no cover 

2451 print(metadata["sql"]) 

2452 

2453 rows: Rows = db(query).select(*select_args, **select_kwargs) 

2454 

2455 metadata["final_query"] = str(query) 

2456 metadata["final_args"] = [str(_) for _ in select_args] 

2457 metadata["final_kwargs"] = select_kwargs 

2458 

2459 if verbose: # pragma: no cover 

2460 print(rows) 

2461 

2462 if not self.relationships: 

2463 # easy 

2464 typed_rows = _to.from_rows(rows, self.model, metadata=metadata) 

2465 

2466 else: 

2467 # harder: try to match rows to the belonging objects 

2468 # assume structure of {'table': <data>} per row. 

2469 # if that's not the case, return default behavior again 

2470 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to) 

2471 

2472 # only saves if requested in metadata: 

2473 return save_to_cache(typed_rows, rows) 

2474 

2475 @typing.overload 

2476 def column(self, field: TypedField[T]) -> list[T]: 

2477 """ 

2478 If a typedfield is passed, the output type can be safely determined. 

2479 """ 

2480 

2481 @typing.overload 

2482 def column(self, field: T) -> list[T]: 

2483 """ 

2484 Otherwise, the output type is loosely determined (assumes `field: type` or Any). 

2485 """ 

2486 

2487 def column(self, field: TypedField[T] | T) -> list[T]: 

2488 """ 

2489 Get all values in a specific column. 

2490 

2491 Shortcut for `.select(field).execute().column(field)`. 

2492 """ 

2493 return self.select(field).execute().column(field) 

2494 

2495 def _handle_relationships_pre_select( 

2496 self, 

2497 query: Query, 

2498 select_args: list[Any], 

2499 select_kwargs: SelectKwargs, 

2500 metadata: Metadata, 

2501 ) -> tuple[Query, list[Any]]: 

2502 db = self._get_db() 

2503 model = self.model 

2504 

2505 metadata["relationships"] = set(self.relationships.keys()) 

2506 

2507 # query = self._update_query_for_inner(db, model, query) 

2508 join = [] 

2509 for key, relation in self.relationships.items(): 

2510 if not relation.condition or relation.join != "inner": 

2511 continue 

2512 

2513 other = relation.get_table(db) 

2514 other = other.with_alias(f"{key}_{hash(relation)}") 

2515 join.append(other.on(relation.condition(model, other))) 

2516 

2517 if limitby := select_kwargs.pop("limitby", ()): 

2518 

2519 # if limitby + relationships: 

2520 # 1. get IDs of main table entries that match 'query' 

2521 # 2. change query to .belongs(id) 

2522 # 3. add joins etc 

2523 

2524 kwargs: SelectKwargs = select_kwargs | {"limitby": limitby} 

2525 # if orderby := select_kwargs.get("orderby"): 

2526 # kwargs["orderby"] = orderby 

2527 

2528 if join: 

2529 kwargs["join"] = join 

2530 

2531 ids = db(query)._select(model.id, **kwargs) 

2532 query = model.id.belongs(ids) 

2533 metadata["ids"] = ids 

2534 

2535 if join: 

2536 select_kwargs["join"] = join 

2537 

2538 left = [] 

2539 

2540 for key, relation in self.relationships.items(): 

2541 other = relation.get_table(db) 

2542 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION 

2543 

2544 select_fields = ", ".join([str(_) for _ in select_args]) 

2545 pre_alias = str(other) 

2546 

2547 if f"{other}." not in select_fields: 

2548 # no fields of other selected. add .ALL: 

2549 select_args.append(other.ALL) 

2550 elif f"{other}.id" not in select_fields: 

2551 # fields of other selected, but required ID is missing. 

2552 select_args.append(other.id) 

2553 

2554 if relation.on: 

2555 # if it has a .on, it's always a left join! 

2556 on = relation.on(model, other) 

2557 if not isinstance(on, list): # pragma: no cover 

2558 on = [on] 

2559 

2560 left.extend(on) 

2561 elif method == "left": 

2562 # .on not given, generate it: 

2563 other = other.with_alias(f"{key}_{hash(relation)}") 

2564 condition = typing.cast(Query, relation.condition(model, other)) 

2565 left.append(other.on(condition)) 

2566 else: 

2567 # else: inner join (handled earlier) 

2568 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace 

2569 # other = other.with_alias(f"{key}_{hash(relation)}") 

2570 # query &= relation.condition(model, other) 

2571 

2572 # if no fields of 'other' are included, add other.ALL 

2573 # else: only add other.id if missing 

2574 select_fields = ", ".join([str(_) for _ in select_args]) 

2575 

2576 post_alias = str(other).split(" AS ")[-1] 

2577 if pre_alias != post_alias: 

2578 # replace .select's with aliased: 

2579 select_fields = select_fields.replace( 

2580 f"{pre_alias}.", 

2581 f"{post_alias}.", 

2582 ) 

2583 

2584 select_args = select_fields.split(", ") 

2585 

2586 select_kwargs["left"] = left 

2587 return query, select_args 

2588 

2589 def _collect_with_relationships( 

2590 self, rows: Rows, metadata: Metadata, _to: Type["TypedRows[Any]"] 

2591 ) -> "TypedRows[T_MetaInstance]": 

2592 """ 

2593 Transform the raw rows into Typed Table model instances. 

2594 """ 

2595 db = self._get_db() 

2596 main_table = self.model._ensure_table_defined() 

2597 

2598 records = {} 

2599 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation 

2600 

2601 for row in rows: 

2602 main = row[main_table] 

2603 main_id = main.id 

2604 

2605 if main_id not in records: 

2606 records[main_id] = self.model(main) 

2607 records[main_id]._with = list(self.relationships.keys()) 

2608 

2609 # setup up all relationship defaults (once) 

2610 for col, relationship in self.relationships.items(): 

2611 records[main_id][col] = [] if relationship.multiple else None 

2612 

2613 # now add other relationship data 

2614 for column, relation in self.relationships.items(): 

2615 relationship_column = f"{column}_{hash(relation)}" 

2616 

2617 # relationship_column works for aliases with the same target column. 

2618 # if col + relationship not in the row, just use the regular name. 

2619 

2620 relation_data = ( 

2621 row[relationship_column] if relationship_column in row else row[relation.get_table_name()] 

2622 ) 

2623 

2624 if relation_data.id is None: 

2625 # always skip None ids 

2626 continue 

2627 

2628 if f"{column}-{relation_data.id}" in seen_relations[main_id]: 

2629 # speed up duplicates 

2630 continue 

2631 else: 

2632 seen_relations[main_id].add(f"{column}-{relation_data.id}") 

2633 

2634 relation_table = relation.get_table(db) 

2635 # hopefully an instance of a typed table and a regular row otherwise: 

2636 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data 

2637 

2638 if relation.multiple: 

2639 # create list of T 

2640 if not isinstance(records[main_id].get(column), list): # pragma: no cover 

2641 # should already be set up before! 

2642 setattr(records[main_id], column, []) 

2643 

2644 records[main_id][column].append(instance) 

2645 else: 

2646 # create single T 

2647 records[main_id][column] = instance 

2648 

2649 return _to(rows, self.model, records, metadata=metadata) 

2650 

2651 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]": 

2652 """ 

2653 Call .collect() and raise an error if nothing found. 

2654 

2655 Basically unwraps Optional type. 

2656 """ 

2657 if result := self.collect(): 

2658 return result 

2659 

2660 if not exception: 

2661 exception = ValueError("Nothing found!") 

2662 

2663 raise exception 

2664 

2665 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]: 

2666 """ 

2667 You can start iterating a Query Builder object before calling collect, for ease of use. 

2668 """ 

2669 yield from self.collect() 

2670 

2671 def count(self) -> int: 

2672 """ 

2673 Return the amount of rows matching the current query. 

2674 """ 

2675 db = self._get_db() 

2676 model = self.model 

2677 query = self.query 

2678 

2679 for key, relation in self.relationships.items(): 

2680 if not relation.condition or relation.join != "inner": 

2681 continue 

2682 

2683 other = relation.get_table(db) 

2684 other = other.with_alias(f"{key}_{hash(relation)}") 

2685 query &= relation.condition(model, other) 

2686 

2687 return db(query).count() 

2688 

2689 def __paginate( 

2690 self, 

2691 limit: int, 

2692 page: int = 1, 

2693 ) -> "QueryBuilder[T_MetaInstance]": 

2694 _from = limit * (page - 1) 

2695 _to = limit * page 

2696 

2697 available = self.count() 

2698 

2699 metadata: Metadata = {} 

2700 

2701 metadata["pagination"] = { 

2702 "limit": limit, 

2703 "current_page": page, 

2704 "max_page": math.ceil(available / limit), 

2705 "rows": available, 

2706 "min_max": (_from, _to), 

2707 } 

2708 

2709 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata) 

2710 

2711 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]": 

2712 """ 

2713 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset. 

2714 

2715 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \ 

2716 can be loaded with relationship data! 

2717 """ 

2718 builder = self.__paginate(limit, page) 

2719 

2720 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows)) 

2721 

2722 rows._query_builder = builder 

2723 return rows 

2724 

2725 def _paginate( 

2726 self, 

2727 limit: int, 

2728 page: int = 1, 

2729 ) -> str: 

2730 builder = self.__paginate(limit, page) 

2731 return builder._collect() 

2732 

2733 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

2734 """ 

2735 Generator that yields rows from a paginated source in chunks. 

2736 

2737 This function retrieves rows from a paginated data source in chunks of the 

2738 specified `chunk_size` and yields them as TypedRows. 

2739 

2740 Example: 

2741 ``` 

2742 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100): 

2743 for row in chunk_of_rows: 

2744 # Process each row within the chunk. 

2745 pass 

2746 ``` 

2747 """ 

2748 page = 1 

2749 

2750 while rows := self.__paginate(chunk_size, page).collect(): 

2751 yield rows 

2752 page += 1 

2753 

2754 def first(self, verbose: bool = False) -> T_MetaInstance | None: 

2755 """ 

2756 Get the first row matching the currently built query. 

2757 

2758 Also adds paginate, since it would be a waste to select more rows than needed. 

2759 """ 

2760 if row := self.paginate(page=1, limit=1, verbose=verbose).first(): 

2761 return self.model.from_row(row) 

2762 else: 

2763 return None 

2764 

2765 def _first(self) -> str: 

2766 return self._paginate(page=1, limit=1) 

2767 

2768 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance: 

2769 """ 

2770 Call .first() and raise an error if nothing found. 

2771 

2772 Basically unwraps Optional type. 

2773 """ 

2774 if inst := self.first(verbose=verbose): 

2775 return inst 

2776 

2777 if not exception: 

2778 exception = ValueError("Nothing found!") 

2779 

2780 raise exception 

2781 

2782 

2783S = typing.TypeVar("S") 

2784 

2785 

2786class PaginatedRows(TypedRows[T_MetaInstance]): 

2787 """ 

2788 Extension on top of rows that is used when calling .paginate() instead of .collect(). 

2789 """ 

2790 

2791 _query_builder: QueryBuilder[T_MetaInstance] 

2792 

2793 @property 

2794 def data(self) -> list[T_MetaInstance]: 

2795 """ 

2796 Get the underlying data. 

2797 """ 

2798 return list(self.records.values()) 

2799 

2800 @property 

2801 def pagination(self) -> Pagination: 

2802 """ 

2803 Get all page info. 

2804 """ 

2805 pagination_data = self.metadata["pagination"] 

2806 

2807 has_next_page = pagination_data["current_page"] < pagination_data["max_page"] 

2808 has_prev_page = pagination_data["current_page"] > 1 

2809 return { 

2810 "total_items": pagination_data["rows"], 

2811 "current_page": pagination_data["current_page"], 

2812 "per_page": pagination_data["limit"], 

2813 "total_pages": pagination_data["max_page"], 

2814 "has_next_page": has_next_page, 

2815 "has_prev_page": has_prev_page, 

2816 "next_page": pagination_data["current_page"] + 1 if has_next_page else None, 

2817 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None, 

2818 } 

2819 

2820 def next(self) -> Self: 

2821 """ 

2822 Get the next page. 

2823 """ 

2824 data = self.metadata["pagination"] 

2825 if data["current_page"] >= data["max_page"]: 

2826 raise StopIteration("Final Page") 

2827 

2828 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1) 

2829 

2830 def previous(self) -> Self: 

2831 """ 

2832 Get the previous page. 

2833 """ 

2834 data = self.metadata["pagination"] 

2835 if data["current_page"] <= 1: 

2836 raise StopIteration("First Page") 

2837 

2838 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1) 

2839 

2840 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore 

2841 """ 

2842 Convert to a dictionary with pagination info and original data. 

2843 

2844 All arguments are ignored! 

2845 """ 

2846 return {"data": super().as_dict(), "pagination": self.pagination} 

2847 

2848 

2849class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover 

2850 """ 

2851 Used to make pydal Set more typed. 

2852 

2853 This class is not actually used, only 'cast' by TypeDAL.__call__ 

2854 """ 

2855 

2856 def count(self, distinct: bool = None, cache: AnyDict = None) -> int: 

2857 """ 

2858 Count returns an int. 

2859 """ 

2860 result = super().count(distinct, cache) 

2861 return typing.cast(int, result) 

2862 

2863 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]: 

2864 """ 

2865 Select returns a TypedRows of a user defined table. 

2866 

2867 Example: 

2868 result: TypedRows[MyTable] = db(MyTable.id > 0).select() 

2869 

2870 for row in result: 

2871 reveal_type(row) # MyTable 

2872 """ 

2873 rows = super().select(*fields, **attributes) 

2874 return typing.cast(TypedRows[T_MetaInstance], rows)