Coverage for src/typedal/core.py: 100%

898 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-19 16:50 +0100

1""" 

2Core functionality of TypeDAL. 

3""" 

4import contextlib 

5import csv 

6import datetime as dt 

7import inspect 

8import json 

9import math 

10import types 

11import typing 

12import warnings 

13from collections import defaultdict 

14from decimal import Decimal 

15from pathlib import Path 

16from typing import Any, Optional 

17 

18import pydal 

19from pydal._globals import DEFAULT 

20from pydal.objects import Field as _Field 

21from pydal.objects import Query as _Query 

22from pydal.objects import Row 

23from pydal.objects import Table as _Table 

24from typing_extensions import Self 

25 

26from .config import TypeDALConfig, load_config 

27from .helpers import ( 

28 DummyQuery, 

29 all_annotations, 

30 all_dict, 

31 as_lambda, 

32 extract_type_optional, 

33 filter_out, 

34 instanciate, 

35 is_union, 

36 looks_like, 

37 mktable, 

38 origin_is_subclass, 

39 to_snake, 

40 unwrap_type, 

41) 

42from .serializers import as_json 

43from .types import ( 

44 AfterDeleteCallable, 

45 AfterInsertCallable, 

46 AfterUpdateCallable, 

47 BeforeDeleteCallable, 

48 BeforeInsertCallable, 

49 BeforeUpdateCallable, 

50 CacheMetadata, 

51 Expression, 

52 Field, 

53 Metadata, 

54 PaginateDict, 

55 Pagination, 

56 Query, 

57 Rows, 

58 Validator, 

59 _Types, 

60) 

61 

62# use typing.cast(type, ...) to make mypy happy with unions 

63T_annotation = typing.Type[Any] | types.UnionType 

64T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]] 

65T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic) 

66T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta" 

67T = typing.TypeVar("T") 

68 

69BASIC_MAPPINGS: dict[T_annotation, str] = { 

70 str: "string", 

71 int: "integer", 

72 bool: "boolean", 

73 bytes: "blob", 

74 float: "double", 

75 object: "json", 

76 Decimal: "decimal(10,2)", 

77 dt.date: "date", 

78 dt.time: "time", 

79 dt.datetime: "datetime", 

80} 

81 

82 

83def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]: 

84 """ 

85 Is `cls` an instance or subclass of TypedField? 

86 

87 Deprecated 

88 """ 

89 return ( 

90 isinstance(cls, TypedField) 

91 or isinstance(typing.get_origin(cls), type) 

92 and issubclass(typing.get_origin(cls), TypedField) 

93 ) 

94 

95 

96JOIN_OPTIONS = typing.Literal["left", "inner", None] 

97DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left" 

98 

99# table-ish paramter: 

100P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table] 

101 

102Condition: typing.TypeAlias = typing.Optional[ 

103 typing.Callable[ 

104 # self, other -> Query 

105 [P_Table, P_Table], 

106 Query | bool, 

107 ] 

108] 

109 

110OnQuery: typing.TypeAlias = typing.Optional[ 

111 typing.Callable[ 

112 # self, other -> list of .on statements 

113 [P_Table, P_Table], 

114 list[Expression], 

115 ] 

116] 

117 

118To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str) 

119 

120 

121class Relationship(typing.Generic[To_Type]): 

122 """ 

123 Define a relationship to another table. 

124 """ 

125 

126 _type: To_Type 

127 table: typing.Type["TypedTable"] | type | str 

128 condition: Condition 

129 on: OnQuery 

130 multiple: bool 

131 join: JOIN_OPTIONS 

132 

133 def __init__( 

134 self, 

135 _type: To_Type, 

136 condition: Condition = None, 

137 join: JOIN_OPTIONS = None, 

138 on: OnQuery = None, 

139 ): 

140 """ 

141 Should not be called directly, use relationship() instead! 

142 """ 

143 if condition and on: 

144 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}") 

145 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!") 

146 

147 self._type = _type 

148 self.condition = condition 

149 self.join = "left" if on else join # .on is always left join! 

150 self.on = on 

151 

152 if args := typing.get_args(_type): 

153 self.table = unwrap_type(args[0]) 

154 self.multiple = True 

155 else: 

156 self.table = _type 

157 self.multiple = False 

158 

159 if isinstance(self.table, str): 

160 self.table = TypeDAL.to_snake(self.table) 

161 

162 def clone(self, **update: Any) -> "Relationship[To_Type]": 

163 """ 

164 Create a copy of the relationship, possibly updated. 

165 """ 

166 return self.__class__( 

167 update.get("_type") or self._type, 

168 update.get("condition") or self.condition, 

169 update.get("join") or self.join, 

170 update.get("on") or self.on, 

171 ) 

172 

173 def __repr__(self) -> str: 

174 """ 

175 Representation of the relationship. 

176 """ 

177 if callback := self.condition or self.on: 

178 src_code = inspect.getsource(callback).strip() 

179 else: 

180 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore 

181 src_code = f"to {cls_name} (missing condition)" 

182 

183 join = f":{self.join}" if self.join else "" 

184 return f"<Relationship{join} {src_code}>" 

185 

186 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]: 

187 """ 

188 Get the table this relationship is bound to. 

189 """ 

190 table = self.table # can be a string because db wasn't available yet 

191 if isinstance(table, str): 

192 if mapped := db._class_map.get(table): 

193 # yay 

194 return mapped 

195 

196 # boo, fall back to untyped table but pretend it is typed: 

197 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough! 

198 

199 return table 

200 

201 def get_table_name(self) -> str: 

202 """ 

203 Get the name of the table this relationship is bound to. 

204 """ 

205 if isinstance(self.table, str): 

206 return self.table 

207 

208 if isinstance(self.table, pydal.objects.Table): 

209 return str(self.table) 

210 

211 # else: typed table 

212 try: 

213 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table 

214 except Exception: # pragma: no cover 

215 table = self.table 

216 

217 return str(table) 

218 

219 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]": 

220 """ 

221 Relationship is a descriptor class, which can be returned from a class but not an instance. 

222 

223 For an instance, using .join() will replace the Relationship with the actual data. 

224 If you forgot to join, a warning will be shown and empty data will be returned. 

225 """ 

226 if not instance: 

227 # relationship queried on class, that's allowed 

228 return self 

229 

230 warnings.warn( 

231 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning 

232 ) 

233 if self.multiple: 

234 return [] 

235 else: 

236 return None 

237 

238 

239def relationship( 

240 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None 

241) -> Relationship[To_Type]: 

242 """ 

243 Define a relationship to another table, when its id is not stored in the current table. 

244 

245 Example: 

246 class User(TypedTable): 

247 name: str 

248 

249 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left') 

250 

251 class Post(TypedTable): 

252 title: str 

253 author: User 

254 

255 User.join("posts").first() # User instance with list[Post] in .posts 

256 

257 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts. 

258 In this case, the join strategy is set to LEFT so users without posts are also still selected. 

259 

260 For complex queries with a pivot table, a `on` can be set insteaad of `condition`: 

261 class User(TypedTable): 

262 ... 

263 

264 tags = relationship(list["Tag"], on=lambda self, tag: [ 

265 Tagged.on(Tagged.entity == entity.gid), 

266 Tag.on((Tagged.tag == tag.id)), 

267 ]) 

268 

269 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient. 

270 """ 

271 return Relationship(_type, condition, join, on) 

272 

273 

274def _generate_relationship_condition( 

275 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]] 

276) -> Condition: 

277 origin = typing.get_origin(field) 

278 # else: generic 

279 

280 if origin == list: 

281 # field = typing.get_args(field)[0] # actual field 

282 # return lambda _self, _other: cls[key].contains(field) 

283 

284 return lambda _self, _other: _self[key].contains(_other.id) 

285 else: 

286 # normal reference 

287 # return lambda _self, _other: cls[key] == field.id 

288 return lambda _self, _other: _self[key] == _other.id 

289 

290 

291def to_relationship( 

292 cls: typing.Type["TypedTable"] | type[Any], 

293 key: str, 

294 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]], 

295) -> typing.Optional[Relationship[Any]]: 

296 """ 

297 Used to automatically create relationship instance for reference fields. 

298 

299 Example: 

300 class MyTable(TypedTable): 

301 reference: OtherTable 

302 

303 `reference` contains the id of an Other Table row. 

304 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work. 

305 

306 This function will automatically perform this logic (called in db.define): 

307 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable] 

308 

309 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable]. 

310 """ 

311 if looks_like(field, TypedField): 

312 if args := typing.get_args(field): 

313 field = args[0] 

314 else: 

315 # weird 

316 return None 

317 

318 field, optional = extract_type_optional(field) 

319 

320 try: 

321 condition = _generate_relationship_condition(cls, key, field) 

322 except Exception as e: # pragma: no cover 

323 warnings.warn("Could not generate Relationship condition", source=e) 

324 condition = None 

325 

326 if not condition: # pragma: no cover 

327 # something went wrong, not a valid relationship 

328 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}") 

329 return None 

330 

331 join = "left" if optional or typing.get_origin(field) == list else "inner" 

332 

333 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join)) 

334 

335 

336class TypeDAL(pydal.DAL): # type: ignore 

337 """ 

338 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables. 

339 """ 

340 

341 _config: TypeDALConfig 

342 

343 def __init__( 

344 self, 

345 uri: Optional[str] = None, # default from config or 'sqlite:memory' 

346 pool_size: int = None, # default 1 if sqlite else 3 

347 folder: Optional[str | Path] = None, # default 'databases' in config 

348 db_codec: str = "UTF-8", 

349 check_reserved: Optional[list[str]] = None, 

350 migrate: Optional[bool] = None, # default True by config 

351 fake_migrate: Optional[bool] = None, # default False by config 

352 migrate_enabled: bool = True, 

353 fake_migrate_all: bool = False, 

354 decode_credentials: bool = False, 

355 driver_args: Optional[dict[str, Any]] = None, 

356 adapter_args: Optional[dict[str, Any]] = None, 

357 attempts: int = 5, 

358 auto_import: bool = False, 

359 bigint_id: bool = False, 

360 debug: bool = False, 

361 lazy_tables: bool = False, 

362 db_uid: Optional[str] = None, 

363 after_connection: typing.Callable[..., Any] = None, 

364 tables: Optional[list[str]] = None, 

365 ignore_field_case: bool = True, 

366 entity_quoting: bool = True, 

367 table_hash: Optional[str] = None, 

368 enable_typedal_caching: bool = None, 

369 use_pyproject: bool | str = True, 

370 use_env: bool | str = True, 

371 ) -> None: 

372 """ 

373 Adds some internal tables after calling pydal's default init. 

374 

375 Set enable_typedal_caching to False to disable this behavior. 

376 """ 

377 config = load_config(_use_pyproject=use_pyproject, _use_env=use_env) 

378 config.update( 

379 database=uri, 

380 dialect=uri.split(":")[0] if uri and ":" in uri else None, 

381 folder=folder, 

382 migrate=migrate, 

383 fake_migrate=fake_migrate, 

384 caching=enable_typedal_caching, 

385 pool_size=pool_size, 

386 ) 

387 

388 self._config = config 

389 

390 if config.folder: 

391 Path(config.folder).mkdir(exist_ok=True) 

392 

393 super().__init__( 

394 config.database, 

395 config.pool_size, 

396 config.folder, 

397 db_codec, 

398 check_reserved, 

399 config.migrate, 

400 config.fake_migrate, 

401 migrate_enabled, 

402 fake_migrate_all, 

403 decode_credentials, 

404 driver_args, 

405 adapter_args, 

406 attempts, 

407 auto_import, 

408 bigint_id, 

409 debug, 

410 lazy_tables, 

411 db_uid, 

412 after_connection, 

413 tables, 

414 ignore_field_case, 

415 entity_quoting, 

416 table_hash, 

417 ) 

418 

419 if config.caching: 

420 self.try_define(_TypedalCache) 

421 self.try_define(_TypedalCacheDependency) 

422 

423 def try_define(self, model: typing.Type[T], verbose: bool = False) -> typing.Type[T]: 

424 """ 

425 Try to define a model with migrate or fall back to fake migrate. 

426 """ 

427 try: 

428 return self.define(model, migrate=True) 

429 except Exception as e: 

430 # clean up: 

431 self.rollback() 

432 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self): 

433 delattr(self, tablename) 

434 

435 if verbose: 

436 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning) 

437 

438 # try again: 

439 return self.define(model, migrate=True, fake_migrate=True, redefine=True) 

440 

441 default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = { 

442 # fields are 'required' (notnull) by default: 

443 "notnull": True, 

444 } 

445 

446 # maps table name to typedal class, for resolving future references 

447 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {} 

448 

449 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]: 

450 # todo: new relationship item added should also invalidate (previously unrelated) cache result 

451 

452 # todo: option to enable/disable cache dependency behavior: 

453 # - don't set _before_update and _before_delete 

454 # - don't add TypedalCacheDependency entry 

455 # - don't invalidate other item on new row of this type 

456 

457 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below. 

458 # proper way to handle this would be (but gives error right now due to Table implementing magic methods): 

459 # typing.get_type_hints(cls, globalns=None, localns=None) 

460 

461 # dirty way (with evil eval): 

462 # [eval(v) for k, v in cls.__annotations__.items()] 

463 # this however also stops working when variables outside this scope or even references to other 

464 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used, 

465 # and might break in the future, when this annotations behavior is enabled by default. 

466 

467 # non-annotated variables have to be passed to define_table as kwargs 

468 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins) 

469 

470 tablename = self.to_snake(cls.__name__) 

471 # grab annotations of cls and it's parents: 

472 annotations = all_annotations(cls) 

473 # extend with `prop = TypedField()` 'annotations': 

474 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)} 

475 # remove internal stuff: 

476 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")} 

477 

478 typedfields: dict[str, TypedField[Any]] = { 

479 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v) 

480 } 

481 

482 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship) 

483 

484 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()} 

485 

486 # ! dont' use full_dict here: 

487 other_kwargs = kwargs | { 

488 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_") 

489 } # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that. 

490 # now it's only used to extract relationships from the object. 

491 # other properties of the class (incl methods) should not be touched 

492 

493 for key in typedfields.keys() - full_dict.keys(): 

494 # typed fields that don't haven't been added to the object yet 

495 setattr(cls, key, typedfields[key]) 

496 

497 # start with base classes and overwrite with current class: 

498 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship) 

499 

500 # DEPRECATED: Relationship as annotation is currently not supported! 

501 # ensure they are all instances and 

502 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`): 

503 # relationships = { 

504 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items() 

505 # } 

506 

507 # keys of implicit references (also relationships): 

508 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")] 

509 

510 # add implicit relationships: 

511 # User; list[User]; TypedField[User]; TypedField[list[User]] 

512 relationships |= { 

513 k: new_relationship 

514 for k in reference_field_keys 

515 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k])) 

516 } 

517 

518 cache_dependency = kwargs.pop("cache_dependency", True) 

519 

520 table: Table = self.define_table(tablename, *fields.values(), **kwargs) 

521 

522 for name, typed_field in typedfields.items(): 

523 field = fields[name] 

524 typed_field.bind(field, table) 

525 

526 if issubclass(cls, TypedTable): 

527 cls.__set_internals__( 

528 db=self, 

529 table=table, 

530 # by now, all relationships should be instances! 

531 relationships=typing.cast(dict[str, Relationship[Any]], relationships), 

532 ) 

533 self._class_map[str(table)] = cls 

534 cls.__on_define__(self) 

535 else: 

536 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!") 

537 

538 if not tablename.startswith("typedal_") and cache_dependency: 

539 table._before_update.append(lambda s, _: _remove_cache(s, tablename)) 

540 table._before_delete.append(lambda s: _remove_cache(s, tablename)) 

541 

542 return cls 

543 

544 @typing.overload 

545 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]: 

546 """ 

547 Typing Overload for define without a class. 

548 

549 @db.define() 

550 class MyTable(TypedTable): ... 

551 """ 

552 

553 @typing.overload 

554 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]: 

555 """ 

556 Typing Overload for define with a class. 

557 

558 @db.define 

559 class MyTable(TypedTable): ... 

560 """ 

561 

562 def define( 

563 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any 

564 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]: 

565 """ 

566 Can be used as a decorator on a class that inherits `TypedTable`, \ 

567 or as a regular method if you need to define your classes before you have access to a 'db' instance. 

568 

569 You can also pass extra arguments to db.define_table. 

570 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor 

571 

572 Example: 

573 @db.define 

574 class Person(TypedTable): 

575 ... 

576 

577 class Article(TypedTable): 

578 ... 

579 

580 # at a later time: 

581 db.define(Article) 

582 

583 Returns: 

584 the result of pydal.define_table 

585 """ 

586 

587 def wrapper(cls: typing.Type[T]) -> typing.Type[T]: 

588 return self._define(cls, **kwargs) 

589 

590 if maybe_cls: 

591 return wrapper(maybe_cls) 

592 

593 return wrapper 

594 

595 # def drop(self, table_name: str) -> None: 

596 # """ 

597 # Remove a table by name (both on the database level and the typedal level). 

598 # """ 

599 # # drop calls TypedTable.drop() and removes it from the `_class_map` 

600 # if cls := self._class_map.pop(table_name, None): 

601 # cls.drop() 

602 

603 # def drop_all(self, max_retries: int = None) -> None: 

604 # """ 

605 # Remove all tables and keep doing so until everything is gone! 

606 # """ 

607 # retries = 0 

608 # if max_retries is None: 

609 # max_retries = len(self.tables) 

610 # 

611 # while self.tables: 

612 # retries += 1 

613 # for table in self.tables: 

614 # self.drop(table) 

615 # 

616 # if retries > max_retries: 

617 # raise RuntimeError("Could not delete all tables") 

618 

619 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet": 

620 """ 

621 A db instance can be called directly to perform a query. 

622 

623 Usually, only a query is passed. 

624 

625 Example: 

626 db(query).select() 

627 

628 """ 

629 args = list(_args) 

630 if args: 

631 cls = args[0] 

632 if isinstance(cls, bool): 

633 raise ValueError("Don't actually pass a bool to db()! Use a query instead.") 

634 

635 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable): 

636 # table defined without @db.define decorator! 

637 _cls: typing.Type[TypedTable] = cls 

638 args[0] = _cls.id != None 

639 

640 _set = super().__call__(*args, **kwargs) 

641 return typing.cast(TypedSet, _set) 

642 

643 def __getitem__(self, key: str) -> "Table": 

644 """ 

645 Allows dynamically accessing a table by its name as a string. 

646 

647 Example: 

648 db['users'] -> user 

649 """ 

650 return typing.cast(Table, super().__getitem__(str(key))) 

651 

652 @classmethod 

653 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field: 

654 return Field(name, _type, **{**cls.default_kwargs, **kw}) 

655 

656 @classmethod 

657 def _annotation_to_pydal_fieldtype( 

658 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any] 

659 ) -> Optional[str]: 

660 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union. 

661 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy) 

662 

663 if isinstance(ftype, str): 

664 # extract type from string 

665 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate( 

666 localns=locals(), globalns=globals(), recursive_guard=frozenset() 

667 ) 

668 

669 if mapping := BASIC_MAPPINGS.get(ftype): 

670 # basi types 

671 return mapping 

672 elif isinstance(ftype, _Table): 

673 # db.table 

674 return f"reference {ftype._tablename}" 

675 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable): 

676 # SomeTable 

677 snakename = cls.to_snake(ftype.__name__) 

678 return f"reference {snakename}" 

679 elif isinstance(ftype, TypedField): 

680 # FieldType(type, ...) 

681 return ftype._to_field(mut_kw) 

682 elif origin_is_subclass(ftype, TypedField): 

683 # TypedField[int] 

684 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw) 

685 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField): 

686 # list[str] -> str -> string -> list:string 

687 _child_type = typing.get_args(ftype)[0] 

688 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

689 return f"list:{_child_type}" 

690 elif is_union(ftype): 

691 # str | int -> UnionType 

692 # typing.Union[str | int] -> typing._UnionGenericAlias 

693 

694 # Optional[type] == type | None 

695 

696 match typing.get_args(ftype): 

697 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type): 

698 # good union of Nullable 

699 

700 # if a field is optional, it is nullable: 

701 mut_kw["notnull"] = False 

702 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

703 case _: 

704 # two types is not supported by the db! 

705 return None 

706 else: 

707 return None 

708 

709 @classmethod 

710 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field: 

711 """ 

712 Convert a annotation into a pydal Field. 

713 

714 Args: 

715 fname: name of the property 

716 ftype: annotation of the property 

717 kw: when using TypedField or a function returning it (e.g. StringField), 

718 keyword args can be used to pass any other settings you would normally to a pydal Field 

719 

720 -> pydal.Field(fname, ftype, **kw) 

721 

722 Example: 

723 class MyTable: 

724 fname: ftype 

725 id: int 

726 name: str 

727 reference: Table 

728 other: TypedField(str, default="John Doe") # default will be in kwargs 

729 """ 

730 fname = cls.to_snake(fname) 

731 

732 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw): 

733 return cls._build_field(fname, converted_type, **kw) 

734 else: 

735 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}") 

736 

737 @staticmethod 

738 def to_snake(camel: str) -> str: 

739 """ 

740 Moved to helpers, kept as a static method for legacy reasons. 

741 """ 

742 return to_snake(camel) 

743 

744 

745class TableProtocol(typing.Protocol): # pragma: no cover 

746 """ 

747 Make mypy happy. 

748 """ 

749 

750 id: "TypedField[int]" # noqa: A003 

751 

752 def __getitem__(self, item: str) -> Field: 

753 """ 

754 Tell mypy a Table supports dictionary notation for columns. 

755 """ 

756 

757 

758class Table(_Table, TableProtocol): # type: ignore 

759 """ 

760 Make mypy happy. 

761 """ 

762 

763 

764class TableMeta(type): 

765 """ 

766 This metaclass contains functionality on table classes, that doesn't exist on its instances. 

767 

768 Example: 

769 class MyTable(TypedTable): 

770 some_field: TypedField[int] 

771 

772 MyTable.update_or_insert(...) # should work 

773 

774 MyTable.some_field # -> Field, can be used to query etc. 

775 

776 row = MyTable.first() # returns instance of MyTable 

777 

778 # row.update_or_insert(...) # shouldn't work! 

779 

780 row.some_field # -> int, with actual data 

781 

782 """ 

783 

784 # set up by db.define: 

785 # _db: TypeDAL | None = None 

786 # _table: Table | None = None 

787 _db: TypeDAL | None = None 

788 _table: Table | None = None 

789 _relationships: dict[str, Relationship[Any]] | None = None 

790 

791 ######################### 

792 # TypeDAL custom logic: # 

793 ######################### 

794 

795 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None: 

796 """ 

797 Store the related database and pydal table for later usage. 

798 """ 

799 self._db = db 

800 self._table = table 

801 self._relationships = relationships 

802 

803 def __getattr__(self, col: str) -> Optional[Field]: 

804 """ 

805 Magic method used by TypedTableMeta to get a database field with dot notation on a class. 

806 

807 Example: 

808 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__) 

809 

810 """ 

811 if self._table: 

812 return getattr(self._table, col, None) 

813 

814 return None 

815 

816 def _ensure_table_defined(self) -> Table: 

817 if not self._table: 

818 raise EnvironmentError("@define or db.define is not called on this class yet!") 

819 return self._table 

820 

821 def __iter__(self) -> typing.Generator[Field, None, None]: 

822 """ 

823 Loop through the columns of this model. 

824 """ 

825 table = self._ensure_table_defined() 

826 yield from iter(table) 

827 

828 def __getitem__(self, item: str) -> Field: 

829 """ 

830 Allow dict notation to get a column of this table (-> Field instance). 

831 """ 

832 table = self._ensure_table_defined() 

833 return table[item] 

834 

835 def __str__(self) -> str: 

836 """ 

837 Normally, just returns the underlying table name, but with a fallback if the model is unbound. 

838 """ 

839 if self._table: 

840 return str(self._table) 

841 else: 

842 return f"<unbound table {self.__name__}>" 

843 

844 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance: 

845 """ 

846 Create a model instance from a pydal row. 

847 """ 

848 return self(row) 

849 

850 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003 

851 """ 

852 Return all rows for this model. 

853 """ 

854 return self.collect() 

855 

856 def get_relationships(self) -> dict[str, Relationship[Any]]: 

857 """ 

858 Return the registered relationships of the current model. 

859 """ 

860 return self._relationships or {} 

861 

862 ########################## 

863 # TypeDAL Modified Logic # 

864 ########################## 

865 

866 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance: 

867 """ 

868 This is only called when db.define is not used as a decorator. 

869 

870 cls.__table functions as 'self' 

871 

872 Args: 

873 **fields: anything you want to insert in the database 

874 

875 Returns: the ID of the new row. 

876 

877 """ 

878 table = self._ensure_table_defined() 

879 

880 result = table.insert(**fields) 

881 # it already is an int but mypy doesn't understand that 

882 return self(result) 

883 

884 def _insert(self, **fields: Any) -> str: 

885 table = self._ensure_table_defined() 

886 

887 return str(table._insert(**fields)) 

888 

889 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]": 

890 """ 

891 Insert multiple rows, returns a TypedRows set of new instances. 

892 """ 

893 table = self._ensure_table_defined() 

894 result = table.bulk_insert(items) 

895 return self.where(lambda row: row.id.belongs(result)).collect() 

896 

897 def update_or_insert( 

898 self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any 

899 ) -> T_MetaInstance: 

900 """ 

901 Update a row if query matches, else insert a new one. 

902 

903 Returns the created or updated instance. 

904 """ 

905 table = self._ensure_table_defined() 

906 

907 if query is DEFAULT: 

908 record = table(**values) 

909 elif isinstance(query, dict): 

910 record = table(**query) 

911 else: 

912 record = table(query) 

913 

914 if not record: 

915 return self.insert(**values) 

916 

917 record.update_record(**values) 

918 return self(record) 

919 

920 def validate_and_insert( 

921 self: typing.Type[T_MetaInstance], **fields: Any 

922 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

923 """ 

924 Validate input data and then insert a row. 

925 

926 Returns a tuple of (the created instance, a dict of errors). 

927 """ 

928 table = self._ensure_table_defined() 

929 result = table.validate_and_insert(**fields) 

930 if row_id := result.get("id"): 

931 return self(row_id), None 

932 else: 

933 return None, result.get("errors") 

934 

935 def validate_and_update( 

936 self: typing.Type[T_MetaInstance], query: Query, **fields: Any 

937 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

938 """ 

939 Validate input data and then update max 1 row. 

940 

941 Returns a tuple of (the updated instance, a dict of errors). 

942 """ 

943 table = self._ensure_table_defined() 

944 

945 try: 

946 result = table.validate_and_update(query, **fields) 

947 except Exception as e: 

948 result = {"errors": {"exception": str(e)}} 

949 

950 if errors := result.get("errors"): 

951 return None, errors 

952 elif row_id := result.get("id"): 

953 return self(row_id), None 

954 else: # pragma: no cover 

955 # update on query without result (shouldnt happen) 

956 return None, None 

957 

958 def validate_and_update_or_insert( 

959 self: typing.Type[T_MetaInstance], query: Query, **fields: Any 

960 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

961 """ 

962 Validate input data and then update_and_insert (on max 1 row). 

963 

964 Returns a tuple of (the updated/created instance, a dict of errors). 

965 """ 

966 table = self._ensure_table_defined() 

967 result = table.validate_and_update_or_insert(query, **fields) 

968 

969 if errors := result.get("errors"): 

970 return None, errors 

971 elif row_id := result.get("id"): 

972 return self(row_id), None 

973 else: # pragma: no cover 

974 # update on query without result (shouldnt happen) 

975 return None, None 

976 

977 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

978 """ 

979 See QueryBuilder.select! 

980 """ 

981 return QueryBuilder(self).select(*a, **kw) 

982 

983 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]": 

984 """ 

985 See QueryBuilder.paginate! 

986 """ 

987 return QueryBuilder(self).paginate(limit=limit, page=page) 

988 

989 def chunk( 

990 self: typing.Type[T_MetaInstance], chunk_size: int 

991 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

992 """ 

993 See QueryBuilder.chunk! 

994 """ 

995 return QueryBuilder(self).chunk(chunk_size) 

996 

997 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

998 """ 

999 See QueryBuilder.where! 

1000 """ 

1001 return QueryBuilder(self).where(*a, **kw) 

1002 

1003 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]": 

1004 """ 

1005 See QueryBuilder.cache! 

1006 """ 

1007 return QueryBuilder(self).cache(*deps, **kwargs) 

1008 

1009 def count(self: typing.Type[T_MetaInstance]) -> int: 

1010 """ 

1011 See QueryBuilder.count! 

1012 """ 

1013 return QueryBuilder(self).count() 

1014 

1015 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None: 

1016 """ 

1017 See QueryBuilder.first! 

1018 """ 

1019 return QueryBuilder(self).first() 

1020 

1021 def join( 

1022 self: typing.Type[T_MetaInstance], 

1023 *fields: str | typing.Type["TypedTable"], 

1024 method: JOIN_OPTIONS = None, 

1025 on: OnQuery | list[Expression] | Expression = None, 

1026 condition: Condition = None, 

1027 ) -> "QueryBuilder[T_MetaInstance]": 

1028 """ 

1029 See QueryBuilder.join! 

1030 """ 

1031 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method) 

1032 

1033 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]": 

1034 """ 

1035 See QueryBuilder.collect! 

1036 """ 

1037 return QueryBuilder(self).collect(verbose=verbose) 

1038 

1039 @property 

1040 def ALL(cls) -> pydal.objects.SQLALL: 

1041 """ 

1042 Select all fields for this table. 

1043 """ 

1044 table = cls._ensure_table_defined() 

1045 

1046 return table.ALL 

1047 

1048 ########################## 

1049 # TypeDAL Shadowed Logic # 

1050 ########################## 

1051 fields: list[str] 

1052 

1053 # other table methods: 

1054 

1055 def truncate(self, mode: str = "") -> None: 

1056 """ 

1057 Remove all data and reset index. 

1058 """ 

1059 table = self._ensure_table_defined() 

1060 table.truncate(mode) 

1061 

1062 def drop(self, mode: str = "") -> None: 

1063 """ 

1064 Remove the underlying table. 

1065 """ 

1066 table = self._ensure_table_defined() 

1067 table.drop(mode) 

1068 

1069 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool: 

1070 """ 

1071 Add an index on some columns of this table. 

1072 """ 

1073 table = self._ensure_table_defined() 

1074 result = table.create_index(name, *fields, **kwargs) 

1075 return typing.cast(bool, result) 

1076 

1077 def drop_index(self, name: str, if_exists: bool = False) -> bool: 

1078 """ 

1079 Remove an index from this table. 

1080 """ 

1081 table = self._ensure_table_defined() 

1082 result = table.drop_index(name, if_exists) 

1083 return typing.cast(bool, result) 

1084 

1085 def import_from_csv_file( 

1086 self, 

1087 csvfile: typing.TextIO, 

1088 id_map: dict[str, str] = None, 

1089 null: Any = "<NULL>", 

1090 unique: str = "uuid", 

1091 id_offset: dict[str, int] = None, # id_offset used only when id_map is None 

1092 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None, 

1093 validate: bool = False, 

1094 encoding: str = "utf-8", 

1095 delimiter: str = ",", 

1096 quotechar: str = '"', 

1097 quoting: int = csv.QUOTE_MINIMAL, 

1098 restore: bool = False, 

1099 **kwargs: Any, 

1100 ) -> None: 

1101 """ 

1102 Load a csv file into the database. 

1103 """ 

1104 table = self._ensure_table_defined() 

1105 table.import_from_csv_file( 

1106 csvfile, 

1107 id_map=id_map, 

1108 null=null, 

1109 unique=unique, 

1110 id_offset=id_offset, 

1111 transform=transform, 

1112 validate=validate, 

1113 encoding=encoding, 

1114 delimiter=delimiter, 

1115 quotechar=quotechar, 

1116 quoting=quoting, 

1117 restore=restore, 

1118 **kwargs, 

1119 ) 

1120 

1121 def on(self, query: Query | bool) -> Expression: 

1122 """ 

1123 Shadow Table.on. 

1124 

1125 Used for joins. 

1126 

1127 See Also: 

1128 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1129 """ 

1130 table = self._ensure_table_defined() 

1131 return typing.cast(Expression, table.on(query)) 

1132 

1133 def with_alias(self, alias: str) -> _Table: 

1134 """ 

1135 Shadow Table.with_alias. 

1136 

1137 Useful for joins when joining the same table multiple times. 

1138 

1139 See Also: 

1140 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1141 """ 

1142 table = self._ensure_table_defined() 

1143 return table.with_alias(alias) 

1144 

1145 # @typing.dataclass_transform() 

1146 

1147 

1148class TypedField(typing.Generic[T_Value]): # pragma: no cover 

1149 """ 

1150 Typed version of pydal.Field, which will be converted to a normal Field in the background. 

1151 """ 

1152 

1153 # will be set by .bind on db.define 

1154 name = "" 

1155 _db: Optional[pydal.DAL] = None 

1156 _rname: Optional[str] = None 

1157 _table: Optional[Table] = None 

1158 _field: Optional[Field] = None 

1159 

1160 _type: T_annotation 

1161 kwargs: Any 

1162 

1163 requires: Validator | typing.Iterable[Validator] 

1164 

1165 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore 

1166 """ 

1167 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used! 

1168 """ 

1169 self._type = _type 

1170 self.kwargs = settings 

1171 super().__init__() 

1172 

1173 @typing.overload 

1174 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover 

1175 """ 

1176 row.field -> (actual data). 

1177 """ 

1178 

1179 @typing.overload 

1180 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover 

1181 """ 

1182 Table.field -> Field. 

1183 """ 

1184 

1185 def __get__( 

1186 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance] 

1187 ) -> typing.Union[T_Value, "TypedField[T_Value]"]: 

1188 """ 

1189 Since this class is a Descriptor field, \ 

1190 it returns something else depending on if it's called on a class or instance. 

1191 

1192 (this is mostly for mypy/typing) 

1193 """ 

1194 if instance: 

1195 # this is only reached in a very specific case: 

1196 # an instance of the object was created with a specific set of fields selected (excluding the current one) 

1197 # in that case, no value was stored in the owner -> return None (since the field was not selected) 

1198 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields 

1199 else: 

1200 # getting as class -> return actual field so pydal understands it when using in query etc. 

1201 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support 

1202 

1203 def __str__(self) -> str: 

1204 """ 

1205 String representation of a Typed Field. 

1206 

1207 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`, 

1208 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str) 

1209 """ 

1210 return str(self._field) if self._field else "" 

1211 

1212 def __repr__(self) -> str: 

1213 """ 

1214 More detailed string representation of a Typed Field. 

1215 

1216 Uses __str__ and adds the provided extra options (kwargs) in the representation. 

1217 """ 

1218 s = self.__str__() 

1219 

1220 if "type" in self.kwargs: 

1221 # manual type in kwargs supplied 

1222 t = self.kwargs["type"] 

1223 elif issubclass(type, type(self._type)): 

1224 # normal type, str.__name__ = 'str' 

1225 t = getattr(self._type, "__name__", str(self._type)) 

1226 elif t_args := typing.get_args(self._type): 

1227 # list[str] -> 'str' 

1228 t = t_args[0].__name__ 

1229 else: # pragma: no cover 

1230 # fallback - something else, may not even happen, I'm not sure 

1231 t = self._type 

1232 

1233 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]" 

1234 

1235 kw = self.kwargs.copy() 

1236 kw.pop("type", None) 

1237 return f"<{s} with options {kw}>" 

1238 

1239 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]: 

1240 """ 

1241 Convert a Typed Field instance to a pydal.Field. 

1242 """ 

1243 other_kwargs = self.kwargs.copy() 

1244 extra_kwargs.update(other_kwargs) 

1245 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs) 

1246 

1247 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None: 

1248 """ 

1249 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`. 

1250 """ 

1251 self._table = table 

1252 self._field = field 

1253 

1254 def __getattr__(self, key: str) -> Any: 

1255 """ 

1256 If the regular getattribute does not work, try to get info from the related Field. 

1257 """ 

1258 with contextlib.suppress(AttributeError): 

1259 return super().__getattribute__(key) 

1260 

1261 # try on actual field: 

1262 return getattr(self._field, key) 

1263 

1264 def __eq__(self, other: Any) -> Query: 

1265 """ 

1266 Performing == on a Field will result in a Query. 

1267 """ 

1268 return typing.cast(Query, self._field == other) 

1269 

1270 def __ne__(self, other: Any) -> Query: 

1271 """ 

1272 Performing != on a Field will result in a Query. 

1273 """ 

1274 return typing.cast(Query, self._field != other) 

1275 

1276 def __gt__(self, other: Any) -> Query: 

1277 """ 

1278 Performing > on a Field will result in a Query. 

1279 """ 

1280 return typing.cast(Query, self._field > other) 

1281 

1282 def __lt__(self, other: Any) -> Query: 

1283 """ 

1284 Performing < on a Field will result in a Query. 

1285 """ 

1286 return typing.cast(Query, self._field < other) 

1287 

1288 def __ge__(self, other: Any) -> Query: 

1289 """ 

1290 Performing >= on a Field will result in a Query. 

1291 """ 

1292 return typing.cast(Query, self._field >= other) 

1293 

1294 def __le__(self, other: Any) -> Query: 

1295 """ 

1296 Performing <= on a Field will result in a Query. 

1297 """ 

1298 return typing.cast(Query, self._field <= other) 

1299 

1300 def __hash__(self) -> int: 

1301 """ 

1302 Shadow Field.__hash__. 

1303 """ 

1304 return hash(self._field) 

1305 

1306 def __invert__(self) -> Expression: 

1307 """ 

1308 Performing ~ on a Field will result in an Expression. 

1309 """ 

1310 if not self._field: # pragma: no cover 

1311 raise ValueError("Unbound Field can not be inverted!") 

1312 

1313 return typing.cast(Expression, ~self._field) 

1314 

1315 

1316class TypedTable(metaclass=TableMeta): 

1317 """ 

1318 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality. 

1319 """ 

1320 

1321 # set up by 'new': 

1322 _row: Row | None = None 

1323 

1324 _with: list[str] 

1325 

1326 id: "TypedField[int]" # noqa: A003 

1327 

1328 _before_insert: list[BeforeInsertCallable] 

1329 _after_insert: list[AfterInsertCallable] 

1330 _before_update: list[BeforeUpdateCallable] 

1331 _after_update: list[AfterUpdateCallable] 

1332 _before_delete: list[BeforeDeleteCallable] 

1333 _after_delete: list[AfterDeleteCallable] 

1334 

1335 def _setup_instance_methods(self) -> None: 

1336 self.as_dict = self._as_dict # type: ignore 

1337 self.__json__ = self.as_json = self._as_json # type: ignore 

1338 # self.as_yaml = self._as_yaml # type: ignore 

1339 self.as_xml = self._as_xml # type: ignore 

1340 

1341 self.update = self._update # type: ignore 

1342 

1343 self.delete_record = self._delete_record # type: ignore 

1344 self.update_record = self._update_record # type: ignore 

1345 

1346 def __new__( 

1347 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any 

1348 ) -> "TypedTable": 

1349 """ 

1350 Create a Typed Rows model instance from an existing row, ID or query. 

1351 

1352 Examples: 

1353 MyTable(1) 

1354 MyTable(id=1) 

1355 MyTable(MyTable.id == 1) 

1356 """ 

1357 table = cls._ensure_table_defined() 

1358 inst = super().__new__(cls) 

1359 

1360 if isinstance(row_or_id, TypedTable): 

1361 # existing typed table instance! 

1362 return row_or_id 

1363 elif isinstance(row_or_id, pydal.objects.Row): 

1364 row = row_or_id 

1365 elif row_or_id is not None: 

1366 row = table(row_or_id, **filters) 

1367 elif filters: 

1368 row = table(**filters) 

1369 else: 

1370 # dummy object 

1371 return inst 

1372 

1373 if not row: 

1374 return None # type: ignore 

1375 

1376 inst._row = row 

1377 inst.__dict__.update(row) 

1378 inst._setup_instance_methods() 

1379 return inst 

1380 

1381 @classmethod 

1382 def __on_define__(cls, db: TypeDAL) -> None: 

1383 """ 

1384 Method that can be implemented by tables to do an action after db.define is completed. 

1385 

1386 This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"), 

1387 where you need a reference to the current database, which may not exist yet when defining the model. 

1388 """ 

1389 

1390 def __iter__(self) -> typing.Generator[Any, None, None]: 

1391 """ 

1392 Allows looping through the columns. 

1393 """ 

1394 row = self._ensure_matching_row() 

1395 yield from iter(row) 

1396 

1397 def __getitem__(self, item: str) -> Any: 

1398 """ 

1399 Allows dictionary notation to get columns. 

1400 """ 

1401 if item in self.__dict__: 

1402 return self.__dict__.get(item) 

1403 

1404 # fallback to lookup in row 

1405 if self._row: 

1406 return self._row[item] 

1407 

1408 # nothing found! 

1409 raise KeyError(item) 

1410 

1411 def __getattr__(self, item: str) -> Any: 

1412 """ 

1413 Allows dot notation to get columns. 

1414 """ 

1415 if value := self.get(item): 

1416 return value 

1417 

1418 raise AttributeError(item) 

1419 

1420 def get(self, item: str, default: Any = None) -> Any: 

1421 """ 

1422 Try to get a column from this instance, else return default. 

1423 """ 

1424 try: 

1425 return self.__getitem__(item) 

1426 except KeyError: 

1427 return default 

1428 

1429 def __setitem__(self, key: str, value: Any) -> None: 

1430 """ 

1431 Data can both be updated via dot and dict notation. 

1432 """ 

1433 return setattr(self, key, value) 

1434 

1435 def __int__(self) -> int: 

1436 """ 

1437 Calling int on a model instance will return its id. 

1438 """ 

1439 return getattr(self, "id", 0) 

1440 

1441 def __bool__(self) -> bool: 

1442 """ 

1443 If the instance has an underlying row with data, it is truthy. 

1444 """ 

1445 return bool(getattr(self, "_row", False)) 

1446 

1447 def _ensure_matching_row(self) -> Row: 

1448 if not getattr(self, "_row", None): 

1449 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?") 

1450 return self._row 

1451 

1452 def __repr__(self) -> str: 

1453 """ 

1454 String representation of the model instance. 

1455 """ 

1456 model_name = self.__class__.__name__ 

1457 model_data = {} 

1458 

1459 if self._row: 

1460 model_data = self._row.as_json() 

1461 

1462 details = model_name 

1463 details += f"({model_data})" 

1464 

1465 if relationships := getattr(self, "_with", []): 

1466 details += f" + {relationships}" 

1467 

1468 return f"<{details}>" 

1469 

1470 # serialization 

1471 # underscore variants work for class instances (set up by _setup_instance_methods) 

1472 

1473 @classmethod 

1474 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]: 

1475 """ 

1476 Dump the object to a plain dict. 

1477 

1478 Can be used as both a class or instance method: 

1479 - dumps the table info if it's a class 

1480 - dumps the row info if it's an instance (see _as_dict) 

1481 """ 

1482 table = cls._ensure_table_defined() 

1483 result = table.as_dict(flat, sanitize) 

1484 return typing.cast(dict[str, Any], result) 

1485 

1486 @classmethod 

1487 def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str: 

1488 """ 

1489 Dump the object to json. 

1490 

1491 Can be used as both a class or instance method: 

1492 - dumps the table info if it's a class 

1493 - dumps the row info if it's an instance (see _as_json) 

1494 """ 

1495 data = cls.as_dict(sanitize=sanitize) 

1496 return as_json.encode(data, indent=indent, **kwargs) 

1497 

1498 @classmethod 

1499 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover 

1500 """ 

1501 Dump the object to xml. 

1502 

1503 Can be used as both a class or instance method: 

1504 - dumps the table info if it's a class 

1505 - dumps the row info if it's an instance (see _as_xml) 

1506 """ 

1507 table = cls._ensure_table_defined() 

1508 return typing.cast(str, table.as_xml(sanitize)) 

1509 

1510 @classmethod 

1511 def as_yaml(cls, sanitize: bool = True) -> str: 

1512 """ 

1513 Dump the object to yaml. 

1514 

1515 Can be used as both a class or instance method: 

1516 - dumps the table info if it's a class 

1517 - dumps the row info if it's an instance (see _as_yaml) 

1518 """ 

1519 table = cls._ensure_table_defined() 

1520 return typing.cast(str, table.as_yaml(sanitize)) 

1521 

1522 def _as_dict( 

1523 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None 

1524 ) -> dict[str, Any]: 

1525 row = self._ensure_matching_row() 

1526 

1527 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types) 

1528 

1529 def asdict_method(obj: Any) -> Any: # pragma: no cover 

1530 if hasattr(obj, "_as_dict"): # typedal 

1531 return obj._as_dict() 

1532 elif hasattr(obj, "as_dict"): # pydal 

1533 return obj.as_dict() 

1534 else: # something else?? 

1535 return obj.__dict__ 

1536 

1537 if _with := getattr(self, "_with", None): 

1538 for relationship in _with: 

1539 data = self.get(relationship) 

1540 

1541 if isinstance(data, list): 

1542 data = [asdict_method(_) for _ in data] 

1543 elif data: 

1544 data = asdict_method(data) 

1545 

1546 result[relationship] = data 

1547 

1548 return typing.cast(dict[str, Any], result) 

1549 

1550 def _as_json( 

1551 self, 

1552 default: typing.Callable[[Any], Any] = None, 

1553 indent: Optional[int] = None, 

1554 **kwargs: Any, 

1555 ) -> str: 

1556 data = self._as_dict() 

1557 return as_json.encode(data, default=default, indent=indent, **kwargs) 

1558 

1559 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover 

1560 row = self._ensure_matching_row() 

1561 return typing.cast(str, row.as_xml(sanitize)) 

1562 

1563 # def _as_yaml(self, sanitize: bool = True) -> str: 

1564 # row = self._ensure_matching_row() 

1565 # return typing.cast(str, row.as_yaml(sanitize)) 

1566 

1567 def __setattr__(self, key: str, value: Any) -> None: 

1568 """ 

1569 When setting a property on a Typed Table model instance, also update the underlying row. 

1570 """ 

1571 if self._row and key in self._row.__dict__ and not callable(value): 

1572 # enables `row.key = value; row.update_record()` 

1573 self._row[key] = value 

1574 

1575 super().__setattr__(key, value) 

1576 

1577 @classmethod 

1578 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None: 

1579 """ 

1580 Update one record. 

1581 

1582 Example: 

1583 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable 

1584 """ 

1585 # todo: update multiple? 

1586 if record := cls(query): 

1587 return record.update_record(**fields) 

1588 else: 

1589 return None 

1590 

1591 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1592 row = self._ensure_matching_row() 

1593 row.update(**fields) 

1594 self.__dict__.update(**fields) 

1595 return self 

1596 

1597 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1598 row = self._ensure_matching_row() 

1599 new_row = row.update_record(**fields) 

1600 self.update(**new_row) 

1601 return self 

1602 

1603 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover 

1604 """ 

1605 Here as a placeholder for _update_record. 

1606 

1607 Will be replaced on instance creation! 

1608 """ 

1609 return self._update_record(**fields) 

1610 

1611 def _delete_record(self) -> int: 

1612 """ 

1613 Actual logic in `pydal.helpers.classes.RecordDeleter`. 

1614 """ 

1615 row = self._ensure_matching_row() 

1616 result = row.delete_record() 

1617 self.__dict__ = {} # empty self, since row is no more. 

1618 self._row = None # just to be sure 

1619 self._setup_instance_methods() 

1620 # ^ instance methods might've been deleted by emptying dict, 

1621 # but we still want .as_dict to show an error, not the table's as_dict. 

1622 return typing.cast(int, result) 

1623 

1624 def delete_record(self) -> int: # pragma: no cover 

1625 """ 

1626 Here as a placeholder for _delete_record. 

1627 

1628 Will be replaced on instance creation! 

1629 """ 

1630 return self._delete_record() 

1631 

1632 # __del__ is also called on the end of a scope so don't remove records on every del!! 

1633 

1634 # pickling: 

1635 

1636 def __getstate__(self) -> dict[str, Any]: 

1637 """ 

1638 State to save when pickling. 

1639 

1640 Prevents db connection from being pickled. 

1641 Similar to as_dict but without changing the data of the relationships (dill does that recursively) 

1642 """ 

1643 row = self._ensure_matching_row() 

1644 result: dict[str, Any] = row.as_dict() 

1645 

1646 if _with := getattr(self, "_with", None): 

1647 result["_with"] = _with 

1648 for relationship in _with: 

1649 data = self.get(relationship) 

1650 

1651 result[relationship] = data 

1652 

1653 result["_row"] = self._row.as_json() if self._row else "" 

1654 return result 

1655 

1656 def __setstate__(self, state: dict[str, Any]) -> None: 

1657 """ 

1658 Used by dill when loading from a bytestring. 

1659 """ 

1660 # as_dict also includes table info, so dump as json to only get the actual row data 

1661 # then create a new (more empty) row object: 

1662 state["_row"] = Row(json.loads(state["_row"])) 

1663 self.__dict__ |= state 

1664 

1665 

1666# backwards compat: 

1667TypedRow = TypedTable 

1668 

1669 

1670class TypedRows(typing.Collection[T_MetaInstance], Rows): 

1671 """ 

1672 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select). 

1673 """ 

1674 

1675 records: dict[int, T_MetaInstance] 

1676 # _rows: Rows 

1677 model: typing.Type[T_MetaInstance] 

1678 metadata: Metadata 

1679 

1680 # pseudo-properties: actually stored in _rows 

1681 db: TypeDAL 

1682 colnames: list[str] 

1683 fields: list[Field] 

1684 colnames_fields: list[Field] 

1685 response: list[tuple[Any, ...]] 

1686 

1687 def __init__( 

1688 self, 

1689 rows: Rows, 

1690 model: typing.Type[T_MetaInstance], 

1691 records: dict[int, T_MetaInstance] = None, 

1692 metadata: Metadata = None, 

1693 ) -> None: 

1694 """ 

1695 Should not be called manually! 

1696 

1697 Normally, the `records` from an existing `Rows` object are used 

1698 but these can be overwritten with a `records` dict. 

1699 `metadata` can be any (un)structured data 

1700 `model` is a Typed Table class 

1701 """ 

1702 records = records or {row.id: model(row) for row in rows} 

1703 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields) 

1704 self.model = model 

1705 self.metadata = metadata or {} 

1706 self.colnames = rows.colnames 

1707 

1708 def __len__(self) -> int: 

1709 """ 

1710 Return the count of rows. 

1711 """ 

1712 return len(self.records) 

1713 

1714 def __iter__(self) -> typing.Iterator[T_MetaInstance]: 

1715 """ 

1716 Loop through the rows. 

1717 """ 

1718 yield from self.records.values() 

1719 

1720 def __contains__(self, ind: Any) -> bool: 

1721 """ 

1722 Check if an id exists in this result set. 

1723 """ 

1724 return ind in self.records 

1725 

1726 def first(self) -> T_MetaInstance | None: 

1727 """ 

1728 Get the row with the lowest id. 

1729 """ 

1730 if not self.records: 

1731 return None 

1732 

1733 return next(iter(self)) 

1734 

1735 def last(self) -> T_MetaInstance | None: 

1736 """ 

1737 Get the row with the highest id. 

1738 """ 

1739 if not self.records: 

1740 return None 

1741 

1742 max_id = max(self.records.keys()) 

1743 return self[max_id] 

1744 

1745 def find( 

1746 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None 

1747 ) -> "TypedRows[T_MetaInstance]": 

1748 """ 

1749 Returns a new Rows object, a subset of the original object, filtered by the function `f`. 

1750 """ 

1751 if not self.records: 

1752 return self.__class__(self, self.model, {}) 

1753 

1754 records = {} 

1755 if limitby: 

1756 _min, _max = limitby 

1757 else: 

1758 _min, _max = 0, len(self) 

1759 count = 0 

1760 for i, row in self.records.items(): 

1761 if f(row): 

1762 if _min <= count: 

1763 records[i] = row 

1764 count += 1 

1765 if count == _max: 

1766 break 

1767 

1768 return self.__class__(self, self.model, records) 

1769 

1770 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]": 

1771 """ 

1772 Removes elements from the calling Rows object, filtered by the function `f`, \ 

1773 and returns a new Rows object containing the removed elements. 

1774 """ 

1775 if not self.records: 

1776 return self.__class__(self, self.model, {}) 

1777 removed = {} 

1778 to_remove = [] 

1779 for i in self.records: 

1780 row = self[i] 

1781 if f(row): 

1782 removed[i] = self.records[i] 

1783 to_remove.append(i) 

1784 

1785 [self.records.pop(i) for i in to_remove] 

1786 

1787 return self.__class__( 

1788 self, 

1789 self.model, 

1790 removed, 

1791 ) 

1792 

1793 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]: 

1794 """ 

1795 Returns a list of sorted elements (not sorted in place). 

1796 """ 

1797 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)] 

1798 

1799 def __str__(self) -> str: 

1800 """ 

1801 Simple string representation. 

1802 """ 

1803 return f"<TypedRows with {len(self)} records>" 

1804 

1805 def __repr__(self) -> str: 

1806 """ 

1807 Print a table on repr(). 

1808 """ 

1809 data = self.as_dict() 

1810 headers = list(next(iter(data.values())).keys()) 

1811 return mktable(data, headers) 

1812 

1813 def group_by_value( 

1814 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any 

1815 ) -> dict[T, list[T_MetaInstance]]: 

1816 """ 

1817 Group the rows by a specific field (which will be the dict key). 

1818 """ 

1819 kwargs["one_result"] = one_result 

1820 result = super().group_by_value(*fields, **kwargs) 

1821 return typing.cast(dict[T, list[T_MetaInstance]], result) 

1822 

1823 def column(self, column: str = None) -> list[Any]: 

1824 """ 

1825 Get a list of all values in a specific column. 

1826 

1827 Example: 

1828 rows.column('name') -> ['Name 1', 'Name 2', ...] 

1829 """ 

1830 return typing.cast(list[Any], super().column(column)) 

1831 

1832 def as_csv(self) -> str: 

1833 """ 

1834 Dump the data to csv. 

1835 """ 

1836 return typing.cast(str, super().as_csv()) 

1837 

1838 def as_dict( 

1839 self, 

1840 key: str = None, 

1841 compact: bool = False, 

1842 storage_to_dict: bool = False, 

1843 datetime_to_str: bool = False, 

1844 custom_types: list[type] = None, 

1845 ) -> dict[int, dict[str, Any]]: 

1846 """ 

1847 Get the data in a dict of dicts. 

1848 """ 

1849 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]): 

1850 # functionality not guaranteed 

1851 return typing.cast( 

1852 dict[int, dict[str, Any]], 

1853 super().as_dict( 

1854 key or "id", 

1855 compact, 

1856 storage_to_dict, 

1857 datetime_to_str, 

1858 custom_types, 

1859 ), 

1860 ) 

1861 

1862 return {k: v.as_dict() for k, v in self.records.items()} 

1863 

1864 def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str: 

1865 """ 

1866 Turn the data into a dict and then dump to JSON. 

1867 """ 

1868 data = self.as_list() 

1869 

1870 return as_json.encode(data, default=default, indent=indent, **kwargs) 

1871 

1872 def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str: 

1873 """ 

1874 Turn the data into a dict and then dump to JSON. 

1875 """ 

1876 return self.as_json(default=default, indent=indent, **kwargs) 

1877 

1878 def as_list( 

1879 self, 

1880 compact: bool = False, 

1881 storage_to_dict: bool = False, 

1882 datetime_to_str: bool = False, 

1883 custom_types: list[type] = None, 

1884 ) -> list[dict[str, Any]]: 

1885 """ 

1886 Get the data in a list of dicts. 

1887 """ 

1888 if any([compact, storage_to_dict, datetime_to_str, custom_types]): 

1889 return typing.cast( 

1890 list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types) 

1891 ) 

1892 

1893 return [_.as_dict() for _ in self.records.values()] 

1894 

1895 def __getitem__(self, item: int) -> T_MetaInstance: 

1896 """ 

1897 You can get a specific row by ID from a typedrows by using rows[idx] notation. 

1898 

1899 Since pydal's implementation differs (they expect a list instead of a dict with id keys), 

1900 using rows[0] will return the first row, regardless of its id. 

1901 """ 

1902 try: 

1903 return self.records[item] 

1904 except KeyError as e: 

1905 if item == 0 and (row := self.first()): 

1906 # special case: pydal internals think Rows.records is a list, not a dict 

1907 return row 

1908 

1909 raise e 

1910 

1911 def get(self, item: int) -> typing.Optional[T_MetaInstance]: 

1912 """ 

1913 Get a row by ID, or receive None if it isn't in this result set. 

1914 """ 

1915 return self.records.get(item) 

1916 

1917 def update(self, **new_values: Any) -> bool: 

1918 """ 

1919 Update the current rows in the database with new_values. 

1920 """ 

1921 # cast to make mypy understand .id is a TypedField and not an int! 

1922 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined()) 

1923 

1924 ids = set(self.column("id")) 

1925 query = table.id.belongs(ids) 

1926 return bool(self.db(query).update(**new_values)) 

1927 

1928 def delete(self) -> bool: 

1929 """ 

1930 Delete the currently selected rows from the database. 

1931 """ 

1932 # cast to make mypy understand .id is a TypedField and not an int! 

1933 table = typing.cast(typing.Type[TypedTable], self.model._ensure_table_defined()) 

1934 

1935 ids = set(self.column("id")) 

1936 query = table.id.belongs(ids) 

1937 return bool(self.db(query).delete()) 

1938 

1939 def join( 

1940 self, 

1941 field: "Field | TypedField[Any]", 

1942 name: str = None, 

1943 constraint: Query = None, 

1944 fields: list[str | Field] = None, 

1945 orderby: Optional[str | Field] = None, 

1946 ) -> T_MetaInstance: 

1947 """ 

1948 This can be used to JOIN with some relationships after the initial select. 

1949 

1950 Using the querybuilder's .join() method is prefered! 

1951 """ 

1952 result = super().join(field, name, constraint, fields or [], orderby) 

1953 return typing.cast(T_MetaInstance, result) 

1954 

1955 def export_to_csv_file( 

1956 self, 

1957 ofile: typing.TextIO, 

1958 null: Any = "<NULL>", 

1959 delimiter: str = ",", 

1960 quotechar: str = '"', 

1961 quoting: int = csv.QUOTE_MINIMAL, 

1962 represent: bool = False, 

1963 colnames: list[str] = None, 

1964 write_colnames: bool = True, 

1965 *args: Any, 

1966 **kwargs: Any, 

1967 ) -> None: 

1968 """ 

1969 Shadow export_to_csv_file from Rows, but with typing. 

1970 

1971 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data 

1972 """ 

1973 super().export_to_csv_file( 

1974 ofile, 

1975 null, 

1976 *args, 

1977 delimiter=delimiter, 

1978 quotechar=quotechar, 

1979 quoting=quoting, 

1980 represent=represent, 

1981 colnames=colnames or self.colnames, 

1982 write_colnames=write_colnames, 

1983 **kwargs, 

1984 ) 

1985 

1986 @classmethod 

1987 def from_rows( 

1988 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None 

1989 ) -> "TypedRows[T_MetaInstance]": 

1990 """ 

1991 Internal method to convert a Rows object to a TypedRows. 

1992 """ 

1993 return cls(rows, model, metadata=metadata) 

1994 

1995 def __getstate__(self) -> dict[str, Any]: 

1996 """ 

1997 Used by dill to dump to bytes (exclude db connection etc). 

1998 """ 

1999 return { 

2000 "metadata": json.dumps(self.metadata, default=str), 

2001 "records": self.records, 

2002 "model": str(self.model._table), 

2003 "colnames": self.colnames, 

2004 } 

2005 

2006 def __setstate__(self, state: dict[str, Any]) -> None: 

2007 """ 

2008 Used by dill when loading from a bytestring. 

2009 """ 

2010 state["metadata"] = json.loads(state["metadata"]) 

2011 self.__dict__.update(state) 

2012 # db etc. set after undill by caching.py 

2013 

2014 

2015from .caching import ( # noqa: E402 

2016 _remove_cache, 

2017 _TypedalCache, 

2018 _TypedalCacheDependency, 

2019 create_and_hash_cache_key, 

2020 get_expire, 

2021 load_from_cache, 

2022 save_to_cache, 

2023) 

2024 

2025 

2026class QueryBuilder(typing.Generic[T_MetaInstance]): 

2027 """ 

2028 Abstration on top of pydal's query system. 

2029 """ 

2030 

2031 model: typing.Type[T_MetaInstance] 

2032 query: Query 

2033 select_args: list[Any] 

2034 select_kwargs: dict[str, Any] 

2035 relationships: dict[str, Relationship[Any]] 

2036 metadata: Metadata 

2037 

2038 def __init__( 

2039 self, 

2040 model: typing.Type[T_MetaInstance], 

2041 add_query: Optional[Query] = None, 

2042 select_args: Optional[list[Any]] = None, 

2043 select_kwargs: Optional[dict[str, Any]] = None, 

2044 relationships: dict[str, Relationship[Any]] = None, 

2045 metadata: Metadata = None, 

2046 ): 

2047 """ 

2048 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable. 

2049 

2050 Example: 

2051 MyTable.where(...) -> QueryBuilder[MyTable] 

2052 """ 

2053 self.model = model 

2054 table = model._ensure_table_defined() 

2055 default_query = typing.cast(Query, table.id > 0) 

2056 self.query = add_query or default_query 

2057 self.select_args = select_args or [] 

2058 self.select_kwargs = select_kwargs or {} 

2059 self.relationships = relationships or {} 

2060 self.metadata = metadata or {} 

2061 

2062 def __str__(self) -> str: 

2063 """ 

2064 Simple string representation for the query builder. 

2065 """ 

2066 return f"QueryBuilder for {self.model}" 

2067 

2068 def __repr__(self) -> str: 

2069 """ 

2070 Advanced string representation for the query builder. 

2071 """ 

2072 return ( 

2073 f"<QueryBuilder for {self.model} with " 

2074 f"{len(self.select_args)} select args; " 

2075 f"{len(self.select_kwargs)} select kwargs; " 

2076 f"{len(self.relationships)} relationships; " 

2077 f"query: {bool(self.query)}; " 

2078 f"metadata: {self.metadata}; " 

2079 f">" 

2080 ) 

2081 

2082 def __bool__(self) -> bool: 

2083 """ 

2084 Querybuilder is truthy if it has rows. 

2085 """ 

2086 return self.count() > 0 

2087 

2088 def _extend( 

2089 self, 

2090 add_query: Optional[Query] = None, 

2091 overwrite_query: Optional[Query] = None, 

2092 select_args: Optional[list[Any]] = None, 

2093 select_kwargs: Optional[dict[str, Any]] = None, 

2094 relationships: dict[str, Relationship[Any]] = None, 

2095 metadata: Metadata = None, 

2096 ) -> "QueryBuilder[T_MetaInstance]": 

2097 return QueryBuilder( 

2098 self.model, 

2099 (add_query & self.query) if add_query else overwrite_query or self.query, 

2100 (self.select_args + select_args) if select_args else self.select_args, 

2101 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs, 

2102 (self.relationships | relationships) if relationships else self.relationships, 

2103 (self.metadata | (metadata or {})) if metadata else self.metadata, 

2104 ) 

2105 

2106 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]": 

2107 """ 

2108 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL). 

2109 

2110 Options: 

2111 paraphrased from the web2py pydal docs, 

2112 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache 

2113 

2114 orderby: field(s) to order by. Supported: 

2115 table.name - sort by name, ascending 

2116 ~table.name - sort by name, descending 

2117 <random> - sort randomly 

2118 table.name|table.id - sort by two fields (first name, then id) 

2119 

2120 groupby, having: together with orderby: 

2121 groupby can be a field (e.g. table.name) to group records by 

2122 having can be a query, only those `having` the condition are grouped 

2123 

2124 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended. 

2125 distinct: bool/field. Only select rows that differ 

2126 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby. 

2127 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended! 

2128 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended! 

2129 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True) 

2130 """ 

2131 return self._extend(select_args=list(fields), select_kwargs=options) 

2132 

2133 def where( 

2134 self, 

2135 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query], 

2136 **filters: Any, 

2137 ) -> "QueryBuilder[T_MetaInstance]": 

2138 """ 

2139 Extend the builder's query. 

2140 

2141 Can be used in multiple ways: 

2142 .where(Query) -> with a direct query such as `Table.id == 5` 

2143 .where(lambda table: table.id == 5) -> with a query via a lambda 

2144 .where(id=5) -> via keyword arguments 

2145 

2146 When using multiple where's, they will be ANDed: 

2147 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6) 

2148 When passing multiple queries to a single .where, they will be ORed: 

2149 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6) 

2150 """ 

2151 new_query = self.query 

2152 table = self.model._ensure_table_defined() 

2153 

2154 for field, value in filters.items(): 

2155 new_query &= table[field] == value 

2156 

2157 subquery: DummyQuery | Query = DummyQuery() 

2158 for query_or_lambda in queries_or_lambdas: 

2159 if isinstance(query_or_lambda, _Query): 

2160 subquery |= typing.cast(Query, query_or_lambda) 

2161 elif callable(query_or_lambda): 

2162 if result := query_or_lambda(self.model): 

2163 subquery |= result 

2164 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda): 

2165 subquery |= typing.cast(Query, query_or_lambda != None) 

2166 else: 

2167 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).") 

2168 

2169 if subquery: 

2170 new_query &= subquery 

2171 

2172 return self._extend(overwrite_query=new_query) 

2173 

2174 def join( 

2175 self, 

2176 *fields: str | typing.Type[TypedTable], 

2177 method: JOIN_OPTIONS = None, 

2178 on: OnQuery | list[Expression] | Expression = None, 

2179 condition: Condition = None, 

2180 ) -> "QueryBuilder[T_MetaInstance]": 

2181 """ 

2182 Include relationship fields in the result. 

2183 

2184 `fields` can be names of Relationships on the current model. 

2185 If no fields are passed, all will be used. 

2186 

2187 By default, the `method` defined in the relationship is used. 

2188 This can be overwritten with the `method` keyword argument (left or inner) 

2189 """ 

2190 # todo: allow limiting amount of related rows returned for join? 

2191 

2192 relationships = self.model.get_relationships() 

2193 

2194 if condition and on: 

2195 raise ValueError("condition and on can not be used together!") 

2196 elif condition: 

2197 if len(fields) != 1: 

2198 raise ValueError("join(field, condition=...) can only be used with exactly one field!") 

2199 

2200 if isinstance(condition, pydal.objects.Query): 

2201 condition = as_lambda(condition) 

2202 

2203 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)} 

2204 elif on: 

2205 if len(fields) != 1: 

2206 raise ValueError("join(field, on=...) can only be used with exactly one field!") 

2207 

2208 if isinstance(on, pydal.objects.Expression): 

2209 on = [on] 

2210 

2211 if isinstance(on, list): 

2212 on = as_lambda(on) 

2213 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)} 

2214 

2215 else: 

2216 if fields: 

2217 # join on every relationship 

2218 relationships = {str(k): relationships[str(k)] for k in fields} 

2219 

2220 if method: 

2221 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()} 

2222 

2223 return self._extend(relationships=relationships) 

2224 

2225 def cache( 

2226 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None 

2227 ) -> "QueryBuilder[T_MetaInstance]": 

2228 """ 

2229 Enable caching for this query to load repeated calls from a dill row \ 

2230 instead of executing the sql and collecing matching rows again. 

2231 """ 

2232 existing = self.metadata.get("cache", {}) 

2233 

2234 metadata: Metadata = {} 

2235 

2236 cache_meta = typing.cast( 

2237 CacheMetadata, 

2238 self.metadata.get("cache", {}) 

2239 | { 

2240 "enabled": True, 

2241 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps], 

2242 "expires_at": get_expire(expires_at=expires_at, ttl=ttl), 

2243 }, 

2244 ) 

2245 

2246 metadata["cache"] = cache_meta 

2247 return self._extend(metadata=metadata) 

2248 

2249 def _get_db(self) -> TypeDAL: 

2250 if db := self.model._db: 

2251 return db 

2252 else: # pragma: no cover 

2253 raise EnvironmentError("@define or db.define is not called on this class yet!") 

2254 

2255 def _select_arg_convert(self, arg: Any) -> Any: 

2256 # typedfield are not really used at runtime anymore, but leave it in for safety: 

2257 if isinstance(arg, TypedField): # pragma: no cover 

2258 arg = arg._field 

2259 

2260 return arg 

2261 

2262 def delete(self) -> list[int]: 

2263 """ 

2264 Based on the current query, delete rows and return a list of deleted IDs. 

2265 """ 

2266 db = self._get_db() 

2267 removed_ids = [_.id for _ in db(self.query).select("id")] 

2268 if db(self.query).delete(): 

2269 # success! 

2270 return removed_ids 

2271 

2272 return [] 

2273 

2274 def _delete(self) -> str: 

2275 db = self._get_db() 

2276 return str(db(self.query)._delete()) 

2277 

2278 def update(self, **fields: Any) -> list[int]: 

2279 """ 

2280 Based on the current query, update `fields` and return a list of updated IDs. 

2281 """ 

2282 # todo: limit? 

2283 db = self._get_db() 

2284 updated_ids = db(self.query).select("id").column("id") 

2285 if db(self.query).update(**fields): 

2286 # success! 

2287 return updated_ids 

2288 

2289 return [] 

2290 

2291 def _update(self, **fields: Any) -> str: 

2292 db = self._get_db() 

2293 return str(db(self.query)._update(**fields)) 

2294 

2295 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]: 

2296 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL] 

2297 select_kwargs = self.select_kwargs.copy() 

2298 query = self.query 

2299 model = self.model 

2300 mut_metadata["query"] = query 

2301 # require at least id of main table: 

2302 select_fields = ", ".join([str(_) for _ in select_args]) 

2303 tablename = str(model) 

2304 

2305 if add_id and f"{tablename}.id" not in select_fields: 

2306 # fields of other selected, but required ID is missing. 

2307 select_args.append(model.id) 

2308 

2309 if self.relationships: 

2310 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata) 

2311 

2312 return query, select_args, select_kwargs 

2313 

2314 def to_sql(self, add_id: bool = False) -> str: 

2315 """ 

2316 Generate the SQL for the built query. 

2317 """ 

2318 db = self._get_db() 

2319 

2320 query, select_args, select_kwargs = self._before_query({}, add_id=add_id) 

2321 

2322 return str(db(query)._select(*select_args, **select_kwargs)) 

2323 

2324 def _collect(self) -> str: 

2325 """ 

2326 Alias for to_sql, pydal-like syntax. 

2327 """ 

2328 return self.to_sql() 

2329 

2330 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None": 

2331 expires_at = metadata["cache"].get("expires_at") 

2332 metadata["cache"] |= { 

2333 # key is partly dependant on cache metadata but not these: 

2334 "key": None, 

2335 "status": None, 

2336 "cached_at": None, 

2337 "expires_at": None, 

2338 } 

2339 

2340 _, key = create_and_hash_cache_key( 

2341 self.model, 

2342 metadata, 

2343 self.query, 

2344 self.select_args, 

2345 self.select_kwargs, 

2346 self.relationships.keys(), 

2347 ) 

2348 

2349 # re-set after creating key: 

2350 metadata["cache"]["expires_at"] = expires_at 

2351 metadata["cache"]["key"] = key 

2352 

2353 return load_from_cache(key, self._get_db()) 

2354 

2355 def collect( 

2356 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True 

2357 ) -> "TypedRows[T_MetaInstance]": 

2358 """ 

2359 Execute the built query and turn it into model instances, while handling relationships. 

2360 """ 

2361 if _to is None: 

2362 _to = TypedRows 

2363 

2364 db = self._get_db() 

2365 metadata = typing.cast(Metadata, self.metadata.copy()) 

2366 

2367 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)): 

2368 return result 

2369 

2370 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id) 

2371 

2372 metadata["sql"] = db(query)._select(*select_args, **select_kwargs) 

2373 

2374 if verbose: # pragma: no cover 

2375 print(metadata["sql"]) 

2376 

2377 rows: Rows = db(query).select(*select_args, **select_kwargs) 

2378 

2379 metadata["final_query"] = str(query) 

2380 metadata["final_args"] = [str(_) for _ in select_args] 

2381 metadata["final_kwargs"] = select_kwargs 

2382 

2383 if verbose: # pragma: no cover 

2384 print(rows) 

2385 

2386 if not self.relationships: 

2387 # easy 

2388 typed_rows = _to.from_rows(rows, self.model, metadata=metadata) 

2389 

2390 else: 

2391 # harder: try to match rows to the belonging objects 

2392 # assume structure of {'table': <data>} per row. 

2393 # if that's not the case, return default behavior again 

2394 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to) 

2395 

2396 # only saves if requested in metadata: 

2397 return save_to_cache(typed_rows, rows) 

2398 

2399 def _handle_relationships_pre_select( 

2400 self, 

2401 query: Query, 

2402 select_args: list[Any], 

2403 select_kwargs: dict[str, Any], 

2404 metadata: Metadata, 

2405 ) -> tuple[Query, list[Any]]: 

2406 db = self._get_db() 

2407 model = self.model 

2408 

2409 metadata["relationships"] = set(self.relationships.keys()) 

2410 

2411 # query = self._update_query_for_inner(db, model, query) 

2412 join = [] 

2413 for key, relation in self.relationships.items(): 

2414 if not relation.condition or relation.join != "inner": 

2415 continue 

2416 

2417 other = relation.get_table(db) 

2418 other = other.with_alias(f"{key}_{hash(relation)}") 

2419 join.append(other.on(relation.condition(model, other))) 

2420 

2421 if limitby := select_kwargs.pop("limitby", None): 

2422 # if limitby + relationships: 

2423 # 1. get IDs of main table entries that match 'query' 

2424 # 2. change query to .belongs(id) 

2425 # 3. add joins etc 

2426 

2427 kwargs = {"limitby": limitby} 

2428 

2429 if join: 

2430 kwargs["join"] = join 

2431 

2432 ids = db(query)._select(model.id, **kwargs) 

2433 query = model.id.belongs(ids) 

2434 metadata["ids"] = ids 

2435 

2436 if join: 

2437 select_kwargs["join"] = join 

2438 

2439 left = [] 

2440 

2441 for key, relation in self.relationships.items(): 

2442 other = relation.get_table(db) 

2443 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION 

2444 

2445 select_fields = ", ".join([str(_) for _ in select_args]) 

2446 pre_alias = str(other) 

2447 

2448 if f"{other}." not in select_fields: 

2449 # no fields of other selected. add .ALL: 

2450 select_args.append(other.ALL) 

2451 elif f"{other}.id" not in select_fields: 

2452 # fields of other selected, but required ID is missing. 

2453 select_args.append(other.id) 

2454 

2455 if relation.on: 

2456 # if it has a .on, it's always a left join! 

2457 on = relation.on(model, other) 

2458 if not isinstance(on, list): # pragma: no cover 

2459 on = [on] 

2460 

2461 left.extend(on) 

2462 elif method == "left": 

2463 # .on not given, generate it: 

2464 other = other.with_alias(f"{key}_{hash(relation)}") 

2465 condition = typing.cast(Query, relation.condition(model, other)) 

2466 left.append(other.on(condition)) 

2467 else: 

2468 # else: inner join (handled earlier) 

2469 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace 

2470 # other = other.with_alias(f"{key}_{hash(relation)}") 

2471 # query &= relation.condition(model, other) 

2472 

2473 # if no fields of 'other' are included, add other.ALL 

2474 # else: only add other.id if missing 

2475 select_fields = ", ".join([str(_) for _ in select_args]) 

2476 

2477 post_alias = str(other).split(" AS ")[-1] 

2478 if pre_alias != post_alias: 

2479 # replace .select's with aliased: 

2480 select_fields = select_fields.replace( 

2481 f"{pre_alias}.", 

2482 f"{post_alias}.", 

2483 ) 

2484 

2485 select_args = select_fields.split(", ") 

2486 

2487 select_kwargs["left"] = left 

2488 return query, select_args 

2489 

2490 def _collect_with_relationships( 

2491 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"] 

2492 ) -> "TypedRows[T_MetaInstance]": 

2493 """ 

2494 Transform the raw rows into Typed Table model instances. 

2495 """ 

2496 db = self._get_db() 

2497 main_table = self.model._ensure_table_defined() 

2498 

2499 records = {} 

2500 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation 

2501 

2502 for row in rows: 

2503 main = row[main_table] 

2504 main_id = main.id 

2505 

2506 if main_id not in records: 

2507 records[main_id] = self.model(main) 

2508 records[main_id]._with = list(self.relationships.keys()) 

2509 

2510 # setup up all relationship defaults (once) 

2511 for col, relationship in self.relationships.items(): 

2512 records[main_id][col] = [] if relationship.multiple else None 

2513 

2514 # now add other relationship data 

2515 for column, relation in self.relationships.items(): 

2516 relationship_column = f"{column}_{hash(relation)}" 

2517 

2518 # relationship_column works for aliases with the same target column. 

2519 # if col + relationship not in the row, just use the regular name. 

2520 

2521 relation_data = ( 

2522 row[relationship_column] if relationship_column in row else row[relation.get_table_name()] 

2523 ) 

2524 

2525 if relation_data.id is None: 

2526 # always skip None ids 

2527 continue 

2528 

2529 if f"{column}-{relation_data.id}" in seen_relations[main_id]: 

2530 # speed up duplicates 

2531 continue 

2532 else: 

2533 seen_relations[main_id].add(f"{column}-{relation_data.id}") 

2534 

2535 relation_table = relation.get_table(db) 

2536 # hopefully an instance of a typed table and a regular row otherwise: 

2537 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data 

2538 

2539 if relation.multiple: 

2540 # create list of T 

2541 if not isinstance(records[main_id].get(column), list): # pragma: no cover 

2542 # should already be set up before! 

2543 setattr(records[main_id], column, []) 

2544 

2545 records[main_id][column].append(instance) 

2546 else: 

2547 # create single T 

2548 records[main_id][column] = instance 

2549 

2550 return _to(rows, self.model, records, metadata=metadata) 

2551 

2552 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]": 

2553 """ 

2554 Call .collect() and raise an error if nothing found. 

2555 

2556 Basically unwraps Optional type. 

2557 """ 

2558 if result := self.collect(): 

2559 return result 

2560 

2561 if not exception: 

2562 exception = ValueError("Nothing found!") 

2563 

2564 raise exception 

2565 

2566 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]: 

2567 """ 

2568 You can start iterating a Query Builder object before calling collect, for ease of use. 

2569 """ 

2570 yield from self.collect() 

2571 

2572 def count(self) -> int: 

2573 """ 

2574 Return the amount of rows matching the current query. 

2575 """ 

2576 db = self._get_db() 

2577 model = self.model 

2578 query = self.query 

2579 

2580 for key, relation in self.relationships.items(): 

2581 if not relation.condition or relation.join != "inner": 

2582 continue 

2583 

2584 other = relation.get_table(db) 

2585 other = other.with_alias(f"{key}_{hash(relation)}") 

2586 query &= relation.condition(model, other) 

2587 

2588 return db(query).count() 

2589 

2590 def __paginate( 

2591 self, 

2592 limit: int, 

2593 page: int = 1, 

2594 ) -> "QueryBuilder[T_MetaInstance]": 

2595 _from = limit * (page - 1) 

2596 _to = limit * page 

2597 

2598 available = self.count() 

2599 

2600 metadata: Metadata = {} 

2601 

2602 metadata["pagination"] = { 

2603 "limit": limit, 

2604 "current_page": page, 

2605 "max_page": math.ceil(available / limit), 

2606 "rows": available, 

2607 "min_max": (_from, _to), 

2608 } 

2609 

2610 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata) 

2611 

2612 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]": 

2613 """ 

2614 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset. 

2615 

2616 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \ 

2617 can be loaded with relationship data! 

2618 """ 

2619 builder = self.__paginate(limit, page) 

2620 

2621 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows)) 

2622 

2623 rows._query_builder = builder 

2624 return rows 

2625 

2626 def _paginate( 

2627 self, 

2628 limit: int, 

2629 page: int = 1, 

2630 ) -> str: 

2631 builder = self.__paginate(limit, page) 

2632 return builder._collect() 

2633 

2634 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

2635 """ 

2636 Generator that yields rows from a paginated source in chunks. 

2637 

2638 This function retrieves rows from a paginated data source in chunks of the 

2639 specified `chunk_size` and yields them as TypedRows. 

2640 

2641 Example: 

2642 ``` 

2643 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100): 

2644 for row in chunk_of_rows: 

2645 # Process each row within the chunk. 

2646 pass 

2647 ``` 

2648 """ 

2649 page = 1 

2650 

2651 while rows := self.__paginate(chunk_size, page).collect(): 

2652 yield rows 

2653 page += 1 

2654 

2655 def first(self, verbose: bool = False) -> T_MetaInstance | None: 

2656 """ 

2657 Get the first row matching the currently built query. 

2658 

2659 Also adds paginate, since it would be a waste to select more rows than needed. 

2660 """ 

2661 if row := self.paginate(page=1, limit=1, verbose=verbose).first(): 

2662 return self.model.from_row(row) 

2663 else: 

2664 return None 

2665 

2666 def _first(self) -> str: 

2667 return self._paginate(page=1, limit=1) 

2668 

2669 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance: 

2670 """ 

2671 Call .first() and raise an error if nothing found. 

2672 

2673 Basically unwraps Optional type. 

2674 """ 

2675 if inst := self.first(verbose=verbose): 

2676 return inst 

2677 

2678 if not exception: 

2679 exception = ValueError("Nothing found!") 

2680 

2681 raise exception 

2682 

2683 

2684S = typing.TypeVar("S") 

2685 

2686 

2687class PaginatedRows(TypedRows[T_MetaInstance]): 

2688 """ 

2689 Extension on top of rows that is used when calling .paginate() instead of .collect(). 

2690 """ 

2691 

2692 _query_builder: QueryBuilder[T_MetaInstance] 

2693 

2694 @property 

2695 def data(self) -> list[T_MetaInstance]: 

2696 """ 

2697 Get the underlying data. 

2698 """ 

2699 return list(self.records.values()) 

2700 

2701 @property 

2702 def pagination(self) -> Pagination: 

2703 """ 

2704 Get all page info. 

2705 """ 

2706 pagination_data = self.metadata["pagination"] 

2707 

2708 has_next_page = pagination_data["current_page"] < pagination_data["max_page"] 

2709 has_prev_page = pagination_data["current_page"] > 1 

2710 return { 

2711 "total_items": pagination_data["rows"], 

2712 "current_page": pagination_data["current_page"], 

2713 "per_page": pagination_data["limit"], 

2714 "total_pages": pagination_data["max_page"], 

2715 "has_next_page": has_next_page, 

2716 "has_prev_page": has_prev_page, 

2717 "next_page": pagination_data["current_page"] + 1 if has_next_page else None, 

2718 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None, 

2719 } 

2720 

2721 def next(self) -> Self: # noqa: A003 

2722 """ 

2723 Get the next page. 

2724 """ 

2725 data = self.metadata["pagination"] 

2726 if data["current_page"] >= data["max_page"]: 

2727 raise StopIteration("Final Page") 

2728 

2729 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1) 

2730 

2731 def previous(self) -> Self: 

2732 """ 

2733 Get the previous page. 

2734 """ 

2735 data = self.metadata["pagination"] 

2736 if data["current_page"] <= 1: 

2737 raise StopIteration("First Page") 

2738 

2739 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1) 

2740 

2741 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore 

2742 """ 

2743 Convert to a dictionary with pagination info and original data. 

2744 

2745 All arguments are ignored! 

2746 """ 

2747 return {"data": super().as_dict(), "pagination": self.pagination} 

2748 

2749 

2750class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover 

2751 """ 

2752 Used to make pydal Set more typed. 

2753 

2754 This class is not actually used, only 'cast' by TypeDAL.__call__ 

2755 """ 

2756 

2757 def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int: 

2758 """ 

2759 Count returns an int. 

2760 """ 

2761 result = super().count(distinct, cache) 

2762 return typing.cast(int, result) 

2763 

2764 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]: 

2765 """ 

2766 Select returns a TypedRows of a user defined table. 

2767 

2768 Example: 

2769 result: TypedRows[MyTable] = db(MyTable.id > 0).select() 

2770 

2771 for row in result: 

2772 typing.reveal_type(row) # MyTable 

2773 """ 

2774 rows = super().select(*fields, **attributes) 

2775 return typing.cast(TypedRows[T_MetaInstance], rows)