Coverage for src/typedal/core.py: 100%

888 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-18 13:46 +0100

1""" 

2Core functionality of TypeDAL. 

3""" 

4import contextlib 

5import csv 

6import datetime as dt 

7import inspect 

8import json 

9import math 

10import types 

11import typing 

12import warnings 

13from collections import defaultdict 

14from decimal import Decimal 

15from pathlib import Path 

16from typing import Any, Optional 

17 

18import pydal 

19from pydal._globals import DEFAULT 

20from pydal.objects import Field as _Field 

21from pydal.objects import Query as _Query 

22from pydal.objects import Row 

23from pydal.objects import Table as _Table 

24from typing_extensions import Self 

25 

26from .config import load_config 

27from .helpers import ( 

28 DummyQuery, 

29 all_annotations, 

30 all_dict, 

31 as_lambda, 

32 extract_type_optional, 

33 filter_out, 

34 instanciate, 

35 is_union, 

36 looks_like, 

37 mktable, 

38 origin_is_subclass, 

39 to_snake, 

40 unwrap_type, 

41) 

42from .types import ( 

43 AfterDeleteCallable, 

44 AfterInsertCallable, 

45 AfterUpdateCallable, 

46 BeforeDeleteCallable, 

47 BeforeInsertCallable, 

48 BeforeUpdateCallable, 

49 CacheMetadata, 

50 Expression, 

51 Field, 

52 Metadata, 

53 PaginateDict, 

54 Pagination, 

55 Query, 

56 Rows, 

57 Validator, 

58 _Types, 

59) 

60 

61# use typing.cast(type, ...) to make mypy happy with unions 

62T_annotation = typing.Type[Any] | types.UnionType 

63T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]] 

64T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic) 

65T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta" 

66T = typing.TypeVar("T") 

67 

68BASIC_MAPPINGS: dict[T_annotation, str] = { 

69 str: "string", 

70 int: "integer", 

71 bool: "boolean", 

72 bytes: "blob", 

73 float: "double", 

74 object: "json", 

75 Decimal: "decimal(10,2)", 

76 dt.date: "date", 

77 dt.time: "time", 

78 dt.datetime: "datetime", 

79} 

80 

81 

82def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]: 

83 """ 

84 Is `cls` an instance or subclass of TypedField? 

85 

86 Deprecated 

87 """ 

88 return ( 

89 isinstance(cls, TypedField) 

90 or isinstance(typing.get_origin(cls), type) 

91 and issubclass(typing.get_origin(cls), TypedField) 

92 ) 

93 

94 

95JOIN_OPTIONS = typing.Literal["left", "inner", None] 

96DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left" 

97 

98# table-ish paramter: 

99P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table] 

100 

101Condition: typing.TypeAlias = typing.Optional[ 

102 typing.Callable[ 

103 # self, other -> Query 

104 [P_Table, P_Table], 

105 Query | bool, 

106 ] 

107] 

108 

109OnQuery: typing.TypeAlias = typing.Optional[ 

110 typing.Callable[ 

111 # self, other -> list of .on statements 

112 [P_Table, P_Table], 

113 list[Expression], 

114 ] 

115] 

116 

117To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str) 

118 

119 

120class Relationship(typing.Generic[To_Type]): 

121 """ 

122 Define a relationship to another table. 

123 """ 

124 

125 _type: To_Type 

126 table: typing.Type["TypedTable"] | type | str 

127 condition: Condition 

128 on: OnQuery 

129 multiple: bool 

130 join: JOIN_OPTIONS 

131 

132 def __init__( 

133 self, 

134 _type: To_Type, 

135 condition: Condition = None, 

136 join: JOIN_OPTIONS = None, 

137 on: OnQuery = None, 

138 ): 

139 """ 

140 Should not be called directly, use relationship() instead! 

141 """ 

142 if condition and on: 

143 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}") 

144 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!") 

145 

146 self._type = _type 

147 self.condition = condition 

148 self.join = "left" if on else join # .on is always left join! 

149 self.on = on 

150 

151 if args := typing.get_args(_type): 

152 self.table = unwrap_type(args[0]) 

153 self.multiple = True 

154 else: 

155 self.table = _type 

156 self.multiple = False 

157 

158 if isinstance(self.table, str): 

159 self.table = TypeDAL.to_snake(self.table) 

160 

161 def clone(self, **update: Any) -> "Relationship[To_Type]": 

162 """ 

163 Create a copy of the relationship, possibly updated. 

164 """ 

165 return self.__class__( 

166 update.get("_type") or self._type, 

167 update.get("condition") or self.condition, 

168 update.get("join") or self.join, 

169 update.get("on") or self.on, 

170 ) 

171 

172 def __repr__(self) -> str: 

173 """ 

174 Representation of the relationship. 

175 """ 

176 if callback := self.condition or self.on: 

177 src_code = inspect.getsource(callback).strip() 

178 else: 

179 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore 

180 src_code = f"to {cls_name} (missing condition)" 

181 

182 join = f":{self.join}" if self.join else "" 

183 return f"<Relationship{join} {src_code}>" 

184 

185 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]: 

186 """ 

187 Get the table this relationship is bound to. 

188 """ 

189 table = self.table # can be a string because db wasn't available yet 

190 if isinstance(table, str): 

191 if mapped := db._class_map.get(table): 

192 # yay 

193 return mapped 

194 

195 # boo, fall back to untyped table but pretend it is typed: 

196 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough! 

197 

198 return table 

199 

200 def get_table_name(self) -> str: 

201 """ 

202 Get the name of the table this relationship is bound to. 

203 """ 

204 if isinstance(self.table, str): 

205 return self.table 

206 

207 if isinstance(self.table, pydal.objects.Table): 

208 return str(self.table) 

209 

210 # else: typed table 

211 try: 

212 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table 

213 except Exception: # pragma: no cover 

214 table = self.table 

215 

216 return str(table) 

217 

218 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]": 

219 """ 

220 Relationship is a descriptor class, which can be returned from a class but not an instance. 

221 

222 For an instance, using .join() will replace the Relationship with the actual data. 

223 If you forgot to join, a warning will be shown and empty data will be returned. 

224 """ 

225 if not instance: 

226 # relationship queried on class, that's allowed 

227 return self 

228 

229 warnings.warn( 

230 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning 

231 ) 

232 if self.multiple: 

233 return [] 

234 else: 

235 return None 

236 

237 

238def relationship( 

239 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None 

240) -> Relationship[To_Type]: 

241 """ 

242 Define a relationship to another table, when its id is not stored in the current table. 

243 

244 Example: 

245 class User(TypedTable): 

246 name: str 

247 

248 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left') 

249 

250 class Post(TypedTable): 

251 title: str 

252 author: User 

253 

254 User.join("posts").first() # User instance with list[Post] in .posts 

255 

256 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts. 

257 In this case, the join strategy is set to LEFT so users without posts are also still selected. 

258 

259 For complex queries with a pivot table, a `on` can be set insteaad of `condition`: 

260 class User(TypedTable): 

261 ... 

262 

263 tags = relationship(list["Tag"], on=lambda self, tag: [ 

264 Tagged.on(Tagged.entity == entity.gid), 

265 Tag.on((Tagged.tag == tag.id)), 

266 ]) 

267 

268 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient. 

269 """ 

270 return Relationship(_type, condition, join, on) 

271 

272 

273def _generate_relationship_condition( 

274 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]] 

275) -> Condition: 

276 origin = typing.get_origin(field) 

277 # else: generic 

278 

279 if origin == list: 

280 # field = typing.get_args(field)[0] # actual field 

281 # return lambda _self, _other: cls[key].contains(field) 

282 

283 return lambda _self, _other: _self[key].contains(_other.id) 

284 else: 

285 # normal reference 

286 # return lambda _self, _other: cls[key] == field.id 

287 return lambda _self, _other: _self[key] == _other.id 

288 

289 

290def to_relationship( 

291 cls: typing.Type["TypedTable"] | type[Any], 

292 key: str, 

293 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]], 

294) -> typing.Optional[Relationship[Any]]: 

295 """ 

296 Used to automatically create relationship instance for reference fields. 

297 

298 Example: 

299 class MyTable(TypedTable): 

300 reference: OtherTable 

301 

302 `reference` contains the id of an Other Table row. 

303 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work. 

304 

305 This function will automatically perform this logic (called in db.define): 

306 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable] 

307 

308 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable]. 

309 """ 

310 if looks_like(field, TypedField): 

311 if args := typing.get_args(field): 

312 field = args[0] 

313 else: 

314 # weird 

315 return None 

316 

317 field, optional = extract_type_optional(field) 

318 

319 try: 

320 condition = _generate_relationship_condition(cls, key, field) 

321 except Exception as e: # pragma: no cover 

322 warnings.warn("Could not generate Relationship condition", source=e) 

323 condition = None 

324 

325 if not condition: # pragma: no cover 

326 # something went wrong, not a valid relationship 

327 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}") 

328 return None 

329 

330 join = "left" if optional or typing.get_origin(field) == list else "inner" 

331 

332 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join)) 

333 

334 

335class TypeDAL(pydal.DAL): # type: ignore 

336 """ 

337 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables. 

338 """ 

339 

340 def __init__( 

341 self, 

342 uri: Optional[str] = None, # default from config or 'sqlite:memory' 

343 pool_size: int = None, # default 1 if sqlite else 3 

344 folder: Optional[str | Path] = None, # default 'databases' in config 

345 db_codec: str = "UTF-8", 

346 check_reserved: Optional[list[str]] = None, 

347 migrate: Optional[bool] = None, # default True by config 

348 fake_migrate: Optional[bool] = None, # default False by config 

349 migrate_enabled: bool = True, 

350 fake_migrate_all: bool = False, 

351 decode_credentials: bool = False, 

352 driver_args: Optional[dict[str, Any]] = None, 

353 adapter_args: Optional[dict[str, Any]] = None, 

354 attempts: int = 5, 

355 auto_import: bool = False, 

356 bigint_id: bool = False, 

357 debug: bool = False, 

358 lazy_tables: bool = False, 

359 db_uid: Optional[str] = None, 

360 after_connection: typing.Callable[..., Any] = None, 

361 tables: Optional[list[str]] = None, 

362 ignore_field_case: bool = True, 

363 entity_quoting: bool = True, 

364 table_hash: Optional[str] = None, 

365 enable_typedal_caching: bool = None, 

366 use_pyproject: bool | str = True, 

367 use_env: bool | str = True, 

368 ) -> None: 

369 """ 

370 Adds some internal tables after calling pydal's default init. 

371 

372 Set enable_typedal_caching to False to disable this behavior. 

373 """ 

374 config = load_config(_use_pyproject=use_pyproject, _use_env=use_env) 

375 config.update( 

376 database=uri, 

377 dialect=uri.split(":")[0] if uri and ":" in uri else None, 

378 folder=folder, 

379 migrate=migrate, 

380 fake_migrate=fake_migrate, 

381 caching=enable_typedal_caching, 

382 pool_size=pool_size, 

383 ) 

384 

385 if config.folder: 

386 Path(config.folder).mkdir(exist_ok=True) 

387 

388 super().__init__( 

389 config.database, 

390 config.pool_size, 

391 config.folder, 

392 db_codec, 

393 check_reserved, 

394 config.migrate, 

395 config.fake_migrate, 

396 migrate_enabled, 

397 fake_migrate_all, 

398 decode_credentials, 

399 driver_args, 

400 adapter_args, 

401 attempts, 

402 auto_import, 

403 bigint_id, 

404 debug, 

405 lazy_tables, 

406 db_uid, 

407 after_connection, 

408 tables, 

409 ignore_field_case, 

410 entity_quoting, 

411 table_hash, 

412 ) 

413 

414 if config.caching: 

415 self.try_define(_TypedalCache) 

416 self.try_define(_TypedalCacheDependency) 

417 

418 def try_define(self, model: typing.Type[T], verbose: bool = False) -> typing.Type[T]: 

419 """ 

420 Try to define a model with migrate or fall back to fake migrate. 

421 """ 

422 try: 

423 return self.define(model, migrate=True) 

424 except Exception as e: 

425 # clean up: 

426 self.rollback() 

427 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self): 

428 delattr(self, tablename) 

429 

430 if verbose: 

431 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning) 

432 

433 # try again: 

434 return self.define(model, migrate=True, fake_migrate=True, redefine=True) 

435 

436 default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = { 

437 # fields are 'required' (notnull) by default: 

438 "notnull": True, 

439 } 

440 

441 # maps table name to typedal class, for resolving future references 

442 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {} 

443 

444 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]: 

445 # todo: new relationship item added should also invalidate (previously unrelated) cache result 

446 

447 # todo: option to enable/disable cache dependency behavior: 

448 # - don't set _before_update and _before_delete 

449 # - don't add TypedalCacheDependency entry 

450 # - don't invalidate other item on new row of this type 

451 

452 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below. 

453 # proper way to handle this would be (but gives error right now due to Table implementing magic methods): 

454 # typing.get_type_hints(cls, globalns=None, localns=None) 

455 

456 # dirty way (with evil eval): 

457 # [eval(v) for k, v in cls.__annotations__.items()] 

458 # this however also stops working when variables outside this scope or even references to other 

459 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used, 

460 # and might break in the future, when this annotations behavior is enabled by default. 

461 

462 # non-annotated variables have to be passed to define_table as kwargs 

463 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins) 

464 

465 tablename = self.to_snake(cls.__name__) 

466 # grab annotations of cls and it's parents: 

467 annotations = all_annotations(cls) 

468 # extend with `prop = TypedField()` 'annotations': 

469 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)} 

470 # remove internal stuff: 

471 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")} 

472 

473 typedfields: dict[str, TypedField[Any]] = { 

474 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v) 

475 } 

476 

477 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship) 

478 

479 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()} 

480 

481 # ! dont' use full_dict here: 

482 other_kwargs = kwargs | { 

483 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_") 

484 } 

485 

486 for key in typedfields.keys() - full_dict.keys(): 

487 # typed fields that don't haven't been added to the object yet 

488 setattr(cls, key, typedfields[key]) 

489 

490 # start with base classes and overwrite with current class: 

491 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship) 

492 

493 # DEPRECATED: Relationship as annotation is currently not supported! 

494 # ensure they are all instances and 

495 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`): 

496 # relationships = { 

497 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items() 

498 # } 

499 

500 # keys of implicit references (also relationships): 

501 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")] 

502 

503 # add implicit relationships: 

504 # User; list[User]; TypedField[User]; TypedField[list[User]] 

505 relationships |= { 

506 k: new_relationship 

507 for k in reference_field_keys 

508 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k])) 

509 } 

510 

511 cache_dependency = other_kwargs.pop("cache_dependency", True) 

512 

513 table: Table = self.define_table(tablename, *fields.values(), **other_kwargs) 

514 

515 for name, typed_field in typedfields.items(): 

516 field = fields[name] 

517 typed_field.bind(field, table) 

518 

519 if issubclass(cls, TypedTable): 

520 cls.__set_internals__( 

521 db=self, 

522 table=table, 

523 # by now, all relationships should be instances! 

524 relationships=typing.cast(dict[str, Relationship[Any]], relationships), 

525 ) 

526 self._class_map[str(table)] = cls 

527 cls.__on_define__(self) 

528 else: 

529 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!") 

530 

531 if not tablename.startswith("typedal_") and cache_dependency: 

532 table._before_update.append(lambda s, _: _remove_cache(s, tablename)) 

533 table._before_delete.append(lambda s: _remove_cache(s, tablename)) 

534 

535 return cls 

536 

537 @typing.overload 

538 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]: 

539 """ 

540 Typing Overload for define without a class. 

541 

542 @db.define() 

543 class MyTable(TypedTable): ... 

544 """ 

545 

546 @typing.overload 

547 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]: 

548 """ 

549 Typing Overload for define with a class. 

550 

551 @db.define 

552 class MyTable(TypedTable): ... 

553 """ 

554 

555 def define( 

556 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any 

557 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]: 

558 """ 

559 Can be used as a decorator on a class that inherits `TypedTable`, \ 

560 or as a regular method if you need to define your classes before you have access to a 'db' instance. 

561 

562 You can also pass extra arguments to db.define_table. 

563 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor 

564 

565 Example: 

566 @db.define 

567 class Person(TypedTable): 

568 ... 

569 

570 class Article(TypedTable): 

571 ... 

572 

573 # at a later time: 

574 db.define(Article) 

575 

576 Returns: 

577 the result of pydal.define_table 

578 """ 

579 

580 def wrapper(cls: typing.Type[T]) -> typing.Type[T]: 

581 return self._define(cls, **kwargs) 

582 

583 if maybe_cls: 

584 return wrapper(maybe_cls) 

585 

586 return wrapper 

587 

588 # def drop(self, table_name: str) -> None: 

589 # """ 

590 # Remove a table by name (both on the database level and the typedal level). 

591 # """ 

592 # # drop calls TypedTable.drop() and removes it from the `_class_map` 

593 # if cls := self._class_map.pop(table_name, None): 

594 # cls.drop() 

595 

596 # def drop_all(self, max_retries: int = None) -> None: 

597 # """ 

598 # Remove all tables and keep doing so until everything is gone! 

599 # """ 

600 # retries = 0 

601 # if max_retries is None: 

602 # max_retries = len(self.tables) 

603 # 

604 # while self.tables: 

605 # retries += 1 

606 # for table in self.tables: 

607 # self.drop(table) 

608 # 

609 # if retries > max_retries: 

610 # raise RuntimeError("Could not delete all tables") 

611 

612 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet": 

613 """ 

614 A db instance can be called directly to perform a query. 

615 

616 Usually, only a query is passed. 

617 

618 Example: 

619 db(query).select() 

620 

621 """ 

622 args = list(_args) 

623 if args: 

624 cls = args[0] 

625 if isinstance(cls, bool): 

626 raise ValueError("Don't actually pass a bool to db()! Use a query instead.") 

627 

628 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable): 

629 # table defined without @db.define decorator! 

630 _cls: typing.Type[TypedTable] = cls 

631 args[0] = _cls.id != None 

632 

633 _set = super().__call__(*args, **kwargs) 

634 return typing.cast(TypedSet, _set) 

635 

636 def __getitem__(self, key: str) -> "Table": 

637 """ 

638 Allows dynamically accessing a table by its name as a string. 

639 

640 Example: 

641 db['users'] -> user 

642 """ 

643 return typing.cast(Table, super().__getitem__(str(key))) 

644 

645 @classmethod 

646 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field: 

647 return Field(name, _type, **{**cls.default_kwargs, **kw}) 

648 

649 @classmethod 

650 def _annotation_to_pydal_fieldtype( 

651 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any] 

652 ) -> Optional[str]: 

653 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union. 

654 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy) 

655 

656 if isinstance(ftype, str): 

657 # extract type from string 

658 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate( 

659 localns=locals(), globalns=globals(), recursive_guard=frozenset() 

660 ) 

661 

662 if mapping := BASIC_MAPPINGS.get(ftype): 

663 # basi types 

664 return mapping 

665 elif isinstance(ftype, _Table): 

666 # db.table 

667 return f"reference {ftype._tablename}" 

668 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable): 

669 # SomeTable 

670 snakename = cls.to_snake(ftype.__name__) 

671 return f"reference {snakename}" 

672 elif isinstance(ftype, TypedField): 

673 # FieldType(type, ...) 

674 return ftype._to_field(mut_kw) 

675 elif origin_is_subclass(ftype, TypedField): 

676 # TypedField[int] 

677 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw) 

678 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField): 

679 # list[str] -> str -> string -> list:string 

680 _child_type = typing.get_args(ftype)[0] 

681 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

682 return f"list:{_child_type}" 

683 elif is_union(ftype): 

684 # str | int -> UnionType 

685 # typing.Union[str | int] -> typing._UnionGenericAlias 

686 

687 # Optional[type] == type | None 

688 

689 match typing.get_args(ftype): 

690 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type): 

691 # good union of Nullable 

692 

693 # if a field is optional, it is nullable: 

694 mut_kw["notnull"] = False 

695 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

696 case _: 

697 # two types is not supported by the db! 

698 return None 

699 else: 

700 return None 

701 

702 @classmethod 

703 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field: 

704 """ 

705 Convert a annotation into a pydal Field. 

706 

707 Args: 

708 fname: name of the property 

709 ftype: annotation of the property 

710 kw: when using TypedField or a function returning it (e.g. StringField), 

711 keyword args can be used to pass any other settings you would normally to a pydal Field 

712 

713 -> pydal.Field(fname, ftype, **kw) 

714 

715 Example: 

716 class MyTable: 

717 fname: ftype 

718 id: int 

719 name: str 

720 reference: Table 

721 other: TypedField(str, default="John Doe") # default will be in kwargs 

722 """ 

723 fname = cls.to_snake(fname) 

724 

725 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw): 

726 return cls._build_field(fname, converted_type, **kw) 

727 else: 

728 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}") 

729 

730 @staticmethod 

731 def to_snake(camel: str) -> str: 

732 """ 

733 Moved to helpers, kept as a static method for legacy reasons. 

734 """ 

735 return to_snake(camel) 

736 

737 

738class TableProtocol(typing.Protocol): # pragma: no cover 

739 """ 

740 Make mypy happy. 

741 """ 

742 

743 id: int # noqa: A003 

744 

745 def __getitem__(self, item: str) -> Field: 

746 """ 

747 Tell mypy a Table supports dictionary notation for columns. 

748 """ 

749 

750 

751class Table(_Table, TableProtocol): # type: ignore 

752 """ 

753 Make mypy happy. 

754 """ 

755 

756 

757class TableMeta(type): 

758 """ 

759 This metaclass contains functionality on table classes, that doesn't exist on its instances. 

760 

761 Example: 

762 class MyTable(TypedTable): 

763 some_field: TypedField[int] 

764 

765 MyTable.update_or_insert(...) # should work 

766 

767 MyTable.some_field # -> Field, can be used to query etc. 

768 

769 row = MyTable.first() # returns instance of MyTable 

770 

771 # row.update_or_insert(...) # shouldn't work! 

772 

773 row.some_field # -> int, with actual data 

774 

775 """ 

776 

777 # set up by db.define: 

778 # _db: TypeDAL | None = None 

779 # _table: Table | None = None 

780 _db: TypeDAL | None = None 

781 _table: Table | None = None 

782 _relationships: dict[str, Relationship[Any]] | None = None 

783 

784 ######################### 

785 # TypeDAL custom logic: # 

786 ######################### 

787 

788 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None: 

789 """ 

790 Store the related database and pydal table for later usage. 

791 """ 

792 self._db = db 

793 self._table = table 

794 self._relationships = relationships 

795 

796 def __getattr__(self, col: str) -> Optional[Field]: 

797 """ 

798 Magic method used by TypedTableMeta to get a database field with dot notation on a class. 

799 

800 Example: 

801 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__) 

802 

803 """ 

804 if self._table: 

805 return getattr(self._table, col, None) 

806 

807 return None 

808 

809 def _ensure_table_defined(self) -> Table: 

810 if not self._table: 

811 raise EnvironmentError("@define or db.define is not called on this class yet!") 

812 return self._table 

813 

814 def __iter__(self) -> typing.Generator[Field, None, None]: 

815 """ 

816 Loop through the columns of this model. 

817 """ 

818 table = self._ensure_table_defined() 

819 yield from iter(table) 

820 

821 def __getitem__(self, item: str) -> Field: 

822 """ 

823 Allow dict notation to get a column of this table (-> Field instance). 

824 """ 

825 table = self._ensure_table_defined() 

826 return table[item] 

827 

828 def __str__(self) -> str: 

829 """ 

830 Normally, just returns the underlying table name, but with a fallback if the model is unbound. 

831 """ 

832 if self._table: 

833 return str(self._table) 

834 else: 

835 return f"<unbound table {self.__name__}>" 

836 

837 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance: 

838 """ 

839 Create a model instance from a pydal row. 

840 """ 

841 return self(row) 

842 

843 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003 

844 """ 

845 Return all rows for this model. 

846 """ 

847 return self.collect() 

848 

849 def __json__(self: typing.Type[T_MetaInstance], instance: T_MetaInstance | None = None) -> dict[str, Any]: 

850 """ 

851 Convert to a json-dumpable dict. 

852 

853 as_dict is not fully json-dumpable, so use as_json and json.loads to ensure it is dumpable (and loadable). 

854 todo: can this be optimized? 

855 

856 See Also: 

857 https://github.com/jeff-hykin/json_fix 

858 """ 

859 string = instance.as_json() if instance else self.as_json() 

860 

861 return typing.cast(dict[str, Any], json.loads(string)) 

862 

863 def get_relationships(self) -> dict[str, Relationship[Any]]: 

864 """ 

865 Return the registered relationships of the current model. 

866 """ 

867 return self._relationships or {} 

868 

869 ########################## 

870 # TypeDAL Modified Logic # 

871 ########################## 

872 

873 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance: 

874 """ 

875 This is only called when db.define is not used as a decorator. 

876 

877 cls.__table functions as 'self' 

878 

879 Args: 

880 **fields: anything you want to insert in the database 

881 

882 Returns: the ID of the new row. 

883 

884 """ 

885 table = self._ensure_table_defined() 

886 

887 result = table.insert(**fields) 

888 # it already is an int but mypy doesn't understand that 

889 return self(result) 

890 

891 def _insert(self, **fields: Any) -> str: 

892 table = self._ensure_table_defined() 

893 

894 return str(table._insert(**fields)) 

895 

896 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]": 

897 """ 

898 Insert multiple rows, returns a TypedRows set of new instances. 

899 """ 

900 table = self._ensure_table_defined() 

901 result = table.bulk_insert(items) 

902 return self.where(lambda row: row.id.belongs(result)).collect() 

903 

904 def update_or_insert( 

905 self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any 

906 ) -> T_MetaInstance: 

907 """ 

908 Update a row if query matches, else insert a new one. 

909 

910 Returns the created or updated instance. 

911 """ 

912 table = self._ensure_table_defined() 

913 

914 if query is DEFAULT: 

915 record = table(**values) 

916 elif isinstance(query, dict): 

917 record = table(**query) 

918 else: 

919 record = table(query) 

920 

921 if not record: 

922 return self.insert(**values) 

923 

924 record.update_record(**values) 

925 return self(record) 

926 

927 def validate_and_insert( 

928 self: typing.Type[T_MetaInstance], **fields: Any 

929 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

930 """ 

931 Validate input data and then insert a row. 

932 

933 Returns a tuple of (the created instance, a dict of errors). 

934 """ 

935 table = self._ensure_table_defined() 

936 result = table.validate_and_insert(**fields) 

937 if row_id := result.get("id"): 

938 return self(row_id), None 

939 else: 

940 return None, result.get("errors") 

941 

942 def validate_and_update( 

943 self: typing.Type[T_MetaInstance], query: Query, **fields: Any 

944 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

945 """ 

946 Validate input data and then update max 1 row. 

947 

948 Returns a tuple of (the updated instance, a dict of errors). 

949 """ 

950 table = self._ensure_table_defined() 

951 

952 try: 

953 result = table.validate_and_update(query, **fields) 

954 except Exception as e: 

955 result = {"errors": {"exception": str(e)}} 

956 

957 if errors := result.get("errors"): 

958 return None, errors 

959 elif row_id := result.get("id"): 

960 return self(row_id), None 

961 else: # pragma: no cover 

962 # update on query without result (shouldnt happen) 

963 return None, None 

964 

965 def validate_and_update_or_insert( 

966 self: typing.Type[T_MetaInstance], query: Query, **fields: Any 

967 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

968 """ 

969 Validate input data and then update_and_insert (on max 1 row). 

970 

971 Returns a tuple of (the updated/created instance, a dict of errors). 

972 """ 

973 table = self._ensure_table_defined() 

974 result = table.validate_and_update_or_insert(query, **fields) 

975 

976 if errors := result.get("errors"): 

977 return None, errors 

978 elif row_id := result.get("id"): 

979 return self(row_id), None 

980 else: # pragma: no cover 

981 # update on query without result (shouldnt happen) 

982 return None, None 

983 

984 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

985 """ 

986 See QueryBuilder.select! 

987 """ 

988 return QueryBuilder(self).select(*a, **kw) 

989 

990 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]": 

991 """ 

992 See QueryBuilder.paginate! 

993 """ 

994 return QueryBuilder(self).paginate(limit=limit, page=page) 

995 

996 def chunk( 

997 self: typing.Type[T_MetaInstance], chunk_size: int 

998 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

999 """ 

1000 See QueryBuilder.chunk! 

1001 """ 

1002 return QueryBuilder(self).chunk(chunk_size) 

1003 

1004 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

1005 """ 

1006 See QueryBuilder.where! 

1007 """ 

1008 return QueryBuilder(self).where(*a, **kw) 

1009 

1010 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]": 

1011 """ 

1012 See QueryBuilder.cache! 

1013 """ 

1014 return QueryBuilder(self).cache(*deps, **kwargs) 

1015 

1016 def count(self: typing.Type[T_MetaInstance]) -> int: 

1017 """ 

1018 See QueryBuilder.count! 

1019 """ 

1020 return QueryBuilder(self).count() 

1021 

1022 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None: 

1023 """ 

1024 See QueryBuilder.first! 

1025 """ 

1026 return QueryBuilder(self).first() 

1027 

1028 def join( 

1029 self: typing.Type[T_MetaInstance], 

1030 *fields: str | typing.Type["TypedTable"], 

1031 method: JOIN_OPTIONS = None, 

1032 on: OnQuery | list[Expression] | Expression = None, 

1033 condition: Condition = None, 

1034 ) -> "QueryBuilder[T_MetaInstance]": 

1035 """ 

1036 See QueryBuilder.join! 

1037 """ 

1038 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method) 

1039 

1040 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]": 

1041 """ 

1042 See QueryBuilder.collect! 

1043 """ 

1044 return QueryBuilder(self).collect(verbose=verbose) 

1045 

1046 @property 

1047 def ALL(cls) -> pydal.objects.SQLALL: 

1048 """ 

1049 Select all fields for this table. 

1050 """ 

1051 table = cls._ensure_table_defined() 

1052 

1053 return table.ALL 

1054 

1055 ########################## 

1056 # TypeDAL Shadowed Logic # 

1057 ########################## 

1058 fields: list[str] 

1059 

1060 # other table methods: 

1061 

1062 def truncate(self, mode: str = "") -> None: 

1063 """ 

1064 Remove all data and reset index. 

1065 """ 

1066 table = self._ensure_table_defined() 

1067 table.truncate(mode) 

1068 

1069 def drop(self, mode: str = "") -> None: 

1070 """ 

1071 Remove the underlying table. 

1072 """ 

1073 table = self._ensure_table_defined() 

1074 table.drop(mode) 

1075 

1076 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool: 

1077 """ 

1078 Add an index on some columns of this table. 

1079 """ 

1080 table = self._ensure_table_defined() 

1081 result = table.create_index(name, *fields, **kwargs) 

1082 return typing.cast(bool, result) 

1083 

1084 def drop_index(self, name: str, if_exists: bool = False) -> bool: 

1085 """ 

1086 Remove an index from this table. 

1087 """ 

1088 table = self._ensure_table_defined() 

1089 result = table.drop_index(name, if_exists) 

1090 return typing.cast(bool, result) 

1091 

1092 def import_from_csv_file( 

1093 self, 

1094 csvfile: typing.TextIO, 

1095 id_map: dict[str, str] = None, 

1096 null: Any = "<NULL>", 

1097 unique: str = "uuid", 

1098 id_offset: dict[str, int] = None, # id_offset used only when id_map is None 

1099 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None, 

1100 validate: bool = False, 

1101 encoding: str = "utf-8", 

1102 delimiter: str = ",", 

1103 quotechar: str = '"', 

1104 quoting: int = csv.QUOTE_MINIMAL, 

1105 restore: bool = False, 

1106 **kwargs: Any, 

1107 ) -> None: 

1108 """ 

1109 Load a csv file into the database. 

1110 """ 

1111 table = self._ensure_table_defined() 

1112 table.import_from_csv_file( 

1113 csvfile, 

1114 id_map=id_map, 

1115 null=null, 

1116 unique=unique, 

1117 id_offset=id_offset, 

1118 transform=transform, 

1119 validate=validate, 

1120 encoding=encoding, 

1121 delimiter=delimiter, 

1122 quotechar=quotechar, 

1123 quoting=quoting, 

1124 restore=restore, 

1125 **kwargs, 

1126 ) 

1127 

1128 def on(self, query: Query | bool) -> Expression: 

1129 """ 

1130 Shadow Table.on. 

1131 

1132 Used for joins. 

1133 

1134 See Also: 

1135 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1136 """ 

1137 table = self._ensure_table_defined() 

1138 return typing.cast(Expression, table.on(query)) 

1139 

1140 def with_alias(self, alias: str) -> _Table: 

1141 """ 

1142 Shadow Table.with_alias. 

1143 

1144 Useful for joins when joining the same table multiple times. 

1145 

1146 See Also: 

1147 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1148 """ 

1149 table = self._ensure_table_defined() 

1150 return table.with_alias(alias) 

1151 

1152 # @typing.dataclass_transform() 

1153 

1154 

1155class TypedField(typing.Generic[T_Value]): # pragma: no cover 

1156 """ 

1157 Typed version of pydal.Field, which will be converted to a normal Field in the background. 

1158 """ 

1159 

1160 # will be set by .bind on db.define 

1161 name = "" 

1162 _db: Optional[pydal.DAL] = None 

1163 _rname: Optional[str] = None 

1164 _table: Optional[Table] = None 

1165 _field: Optional[Field] = None 

1166 

1167 _type: T_annotation 

1168 kwargs: Any 

1169 

1170 requires: Validator | typing.Iterable[Validator] 

1171 

1172 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore 

1173 """ 

1174 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used! 

1175 """ 

1176 self._type = _type 

1177 self.kwargs = settings 

1178 super().__init__() 

1179 

1180 @typing.overload 

1181 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover 

1182 """ 

1183 row.field -> (actual data). 

1184 """ 

1185 

1186 @typing.overload 

1187 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover 

1188 """ 

1189 Table.field -> Field. 

1190 """ 

1191 

1192 def __get__( 

1193 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance] 

1194 ) -> typing.Union[T_Value, "TypedField[T_Value]"]: 

1195 """ 

1196 Since this class is a Descriptor field, \ 

1197 it returns something else depending on if it's called on a class or instance. 

1198 

1199 (this is mostly for mypy/typing) 

1200 """ 

1201 if instance: 

1202 # this is only reached in a very specific case: 

1203 # an instance of the object was created with a specific set of fields selected (excluding the current one) 

1204 # in that case, no value was stored in the owner -> return None (since the field was not selected) 

1205 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields 

1206 else: 

1207 # getting as class -> return actual field so pydal understands it when using in query etc. 

1208 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support 

1209 

1210 def __str__(self) -> str: 

1211 """ 

1212 String representation of a Typed Field. 

1213 

1214 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`, 

1215 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str) 

1216 """ 

1217 return str(self._field) if self._field else "" 

1218 

1219 def __repr__(self) -> str: 

1220 """ 

1221 More detailed string representation of a Typed Field. 

1222 

1223 Uses __str__ and adds the provided extra options (kwargs) in the representation. 

1224 """ 

1225 s = self.__str__() 

1226 

1227 if "type" in self.kwargs: 

1228 # manual type in kwargs supplied 

1229 t = self.kwargs["type"] 

1230 elif issubclass(type, type(self._type)): 

1231 # normal type, str.__name__ = 'str' 

1232 t = getattr(self._type, "__name__", str(self._type)) 

1233 elif t_args := typing.get_args(self._type): 

1234 # list[str] -> 'str' 

1235 t = t_args[0].__name__ 

1236 else: # pragma: no cover 

1237 # fallback - something else, may not even happen, I'm not sure 

1238 t = self._type 

1239 

1240 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]" 

1241 

1242 kw = self.kwargs.copy() 

1243 kw.pop("type", None) 

1244 return f"<{s} with options {kw}>" 

1245 

1246 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]: 

1247 """ 

1248 Convert a Typed Field instance to a pydal.Field. 

1249 """ 

1250 other_kwargs = self.kwargs.copy() 

1251 extra_kwargs.update(other_kwargs) 

1252 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs) 

1253 

1254 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None: 

1255 """ 

1256 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`. 

1257 """ 

1258 self._table = table 

1259 self._field = field 

1260 

1261 def __getattr__(self, key: str) -> Any: 

1262 """ 

1263 If the regular getattribute does not work, try to get info from the related Field. 

1264 """ 

1265 with contextlib.suppress(AttributeError): 

1266 return super().__getattribute__(key) 

1267 

1268 # try on actual field: 

1269 return getattr(self._field, key) 

1270 

1271 def __eq__(self, other: Any) -> Query: 

1272 """ 

1273 Performing == on a Field will result in a Query. 

1274 """ 

1275 return typing.cast(Query, self._field == other) 

1276 

1277 def __ne__(self, other: Any) -> Query: 

1278 """ 

1279 Performing != on a Field will result in a Query. 

1280 """ 

1281 return typing.cast(Query, self._field != other) 

1282 

1283 def __gt__(self, other: Any) -> Query: 

1284 """ 

1285 Performing > on a Field will result in a Query. 

1286 """ 

1287 return typing.cast(Query, self._field > other) 

1288 

1289 def __lt__(self, other: Any) -> Query: 

1290 """ 

1291 Performing < on a Field will result in a Query. 

1292 """ 

1293 return typing.cast(Query, self._field < other) 

1294 

1295 def __ge__(self, other: Any) -> Query: 

1296 """ 

1297 Performing >= on a Field will result in a Query. 

1298 """ 

1299 return typing.cast(Query, self._field >= other) 

1300 

1301 def __le__(self, other: Any) -> Query: 

1302 """ 

1303 Performing <= on a Field will result in a Query. 

1304 """ 

1305 return typing.cast(Query, self._field <= other) 

1306 

1307 def __hash__(self) -> int: 

1308 """ 

1309 Shadow Field.__hash__. 

1310 """ 

1311 return hash(self._field) 

1312 

1313 def __invert__(self) -> Expression: 

1314 """ 

1315 Performing ~ on a Field will result in an Expression. 

1316 """ 

1317 if not self._field: # pragma: no cover 

1318 raise ValueError("Unbound Field can not be inverted!") 

1319 

1320 return typing.cast(Expression, ~self._field) 

1321 

1322 

1323class TypedTable(metaclass=TableMeta): 

1324 """ 

1325 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality. 

1326 """ 

1327 

1328 # set up by 'new': 

1329 _row: Row | None = None 

1330 

1331 _with: list[str] 

1332 

1333 id: "TypedField[int]" # noqa: A003 

1334 

1335 _before_insert: list[BeforeInsertCallable] 

1336 _after_insert: list[AfterInsertCallable] 

1337 _before_update: list[BeforeUpdateCallable] 

1338 _after_update: list[AfterUpdateCallable] 

1339 _before_delete: list[BeforeDeleteCallable] 

1340 _after_delete: list[AfterDeleteCallable] 

1341 

1342 def _setup_instance_methods(self) -> None: 

1343 self.as_dict = self._as_dict # type: ignore 

1344 self.__json__ = self.as_json = self._as_json # type: ignore 

1345 # self.as_yaml = self._as_yaml # type: ignore 

1346 self.as_xml = self._as_xml # type: ignore 

1347 

1348 self.update = self._update # type: ignore 

1349 

1350 self.delete_record = self._delete_record # type: ignore 

1351 self.update_record = self._update_record # type: ignore 

1352 

1353 def __new__( 

1354 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any 

1355 ) -> "TypedTable": 

1356 """ 

1357 Create a Typed Rows model instance from an existing row, ID or query. 

1358 

1359 Examples: 

1360 MyTable(1) 

1361 MyTable(id=1) 

1362 MyTable(MyTable.id == 1) 

1363 """ 

1364 table = cls._ensure_table_defined() 

1365 inst = super().__new__(cls) 

1366 

1367 if isinstance(row_or_id, TypedTable): 

1368 # existing typed table instance! 

1369 return row_or_id 

1370 elif isinstance(row_or_id, pydal.objects.Row): 

1371 row = row_or_id 

1372 elif row_or_id is not None: 

1373 row = table(row_or_id, **filters) 

1374 elif filters: 

1375 row = table(**filters) 

1376 else: 

1377 # dummy object 

1378 return inst 

1379 

1380 if not row: 

1381 return None # type: ignore 

1382 

1383 inst._row = row 

1384 inst.__dict__.update(row) 

1385 inst._setup_instance_methods() 

1386 return inst 

1387 

1388 @classmethod 

1389 def __on_define__(cls, db: TypeDAL) -> None: 

1390 """ 

1391 Method that can be implemented by tables to do an action after db.define is completed. 

1392 

1393 This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"), 

1394 where you need a reference to the current database, which may not exist yet when defining the model. 

1395 """ 

1396 

1397 def __iter__(self) -> typing.Generator[Any, None, None]: 

1398 """ 

1399 Allows looping through the columns. 

1400 """ 

1401 row = self._ensure_matching_row() 

1402 yield from iter(row) 

1403 

1404 def __getitem__(self, item: str) -> Any: 

1405 """ 

1406 Allows dictionary notation to get columns. 

1407 """ 

1408 if item in self.__dict__: 

1409 return self.__dict__.get(item) 

1410 

1411 # fallback to lookup in row 

1412 if self._row: 

1413 return self._row[item] 

1414 

1415 # nothing found! 

1416 raise KeyError(item) 

1417 

1418 def __getattr__(self, item: str) -> Any: 

1419 """ 

1420 Allows dot notation to get columns. 

1421 """ 

1422 if value := self.get(item): 

1423 return value 

1424 

1425 raise AttributeError(item) 

1426 

1427 def get(self, item: str, default: Any = None) -> Any: 

1428 """ 

1429 Try to get a column from this instance, else return default. 

1430 """ 

1431 try: 

1432 return self.__getitem__(item) 

1433 except KeyError: 

1434 return default 

1435 

1436 def __setitem__(self, key: str, value: Any) -> None: 

1437 """ 

1438 Data can both be updated via dot and dict notation. 

1439 """ 

1440 return setattr(self, key, value) 

1441 

1442 def __int__(self) -> int: 

1443 """ 

1444 Calling int on a model instance will return its id. 

1445 """ 

1446 return getattr(self, "id", 0) 

1447 

1448 def __bool__(self) -> bool: 

1449 """ 

1450 If the instance has an underlying row with data, it is truthy. 

1451 """ 

1452 return bool(getattr(self, "_row", False)) 

1453 

1454 def _ensure_matching_row(self) -> Row: 

1455 if not getattr(self, "_row", None): 

1456 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?") 

1457 return self._row 

1458 

1459 def __repr__(self) -> str: 

1460 """ 

1461 String representation of the model instance. 

1462 """ 

1463 model_name = self.__class__.__name__ 

1464 model_data = {} 

1465 

1466 if self._row: 

1467 model_data = self._row.as_json() 

1468 

1469 details = model_name 

1470 details += f"({model_data})" 

1471 

1472 if relationships := getattr(self, "_with", []): 

1473 details += f" + {relationships}" 

1474 

1475 return f"<{details}>" 

1476 

1477 # serialization 

1478 # underscore variants work for class instances (set up by _setup_instance_methods) 

1479 

1480 @classmethod 

1481 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]: 

1482 """ 

1483 Dump the object to a plain dict. 

1484 

1485 Can be used as both a class or instance method: 

1486 - dumps the table info if it's a class 

1487 - dumps the row info if it's an instance (see _as_dict) 

1488 """ 

1489 table = cls._ensure_table_defined() 

1490 result = table.as_dict(flat, sanitize) 

1491 return typing.cast(dict[str, Any], result) 

1492 

1493 @classmethod 

1494 def as_json(cls, sanitize: bool = True) -> str: 

1495 """ 

1496 Dump the object to json. 

1497 

1498 Can be used as both a class or instance method: 

1499 - dumps the table info if it's a class 

1500 - dumps the row info if it's an instance (see _as_json) 

1501 """ 

1502 table = cls._ensure_table_defined() 

1503 return typing.cast(str, table.as_json(sanitize)) 

1504 

1505 @classmethod 

1506 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover 

1507 """ 

1508 Dump the object to xml. 

1509 

1510 Can be used as both a class or instance method: 

1511 - dumps the table info if it's a class 

1512 - dumps the row info if it's an instance (see _as_xml) 

1513 """ 

1514 table = cls._ensure_table_defined() 

1515 return typing.cast(str, table.as_xml(sanitize)) 

1516 

1517 @classmethod 

1518 def as_yaml(cls, sanitize: bool = True) -> str: 

1519 """ 

1520 Dump the object to yaml. 

1521 

1522 Can be used as both a class or instance method: 

1523 - dumps the table info if it's a class 

1524 - dumps the row info if it's an instance (see _as_yaml) 

1525 """ 

1526 table = cls._ensure_table_defined() 

1527 return typing.cast(str, table.as_yaml(sanitize)) 

1528 

1529 def _as_dict( 

1530 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None 

1531 ) -> dict[str, Any]: 

1532 row = self._ensure_matching_row() 

1533 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types) 

1534 

1535 if _with := getattr(self, "_with", None): 

1536 for relationship in _with: 

1537 data = self.get(relationship) 

1538 if isinstance(data, list): 

1539 data = [_.as_dict() if getattr(_, "as_dict", None) else _ for _ in data] 

1540 elif data: 

1541 data = data.as_dict() 

1542 

1543 result[relationship] = data 

1544 

1545 return typing.cast(dict[str, Any], result) 

1546 

1547 def _as_json( 

1548 self, 

1549 mode: str = "object", 

1550 default: typing.Callable[[Any], Any] = None, 

1551 colnames: list[str] = None, 

1552 serialize: bool = True, 

1553 **kwargs: Any, 

1554 ) -> str: 

1555 row = self._ensure_matching_row() 

1556 return typing.cast(str, row.as_json(mode, default, colnames, serialize, *kwargs)) 

1557 

1558 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover 

1559 row = self._ensure_matching_row() 

1560 return typing.cast(str, row.as_xml(sanitize)) 

1561 

1562 # def _as_yaml(self, sanitize: bool = True) -> str: 

1563 # row = self._ensure_matching_row() 

1564 # return typing.cast(str, row.as_yaml(sanitize)) 

1565 

1566 def __setattr__(self, key: str, value: Any) -> None: 

1567 """ 

1568 When setting a property on a Typed Table model instance, also update the underlying row. 

1569 """ 

1570 if self._row and key in self._row.__dict__ and not callable(value): 

1571 # enables `row.key = value; row.update_record()` 

1572 self._row[key] = value 

1573 

1574 super().__setattr__(key, value) 

1575 

1576 @classmethod 

1577 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None: 

1578 """ 

1579 Update one record. 

1580 

1581 Example: 

1582 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable 

1583 """ 

1584 # todo: update multiple? 

1585 if record := cls(query): 

1586 return record.update_record(**fields) 

1587 else: 

1588 return None 

1589 

1590 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1591 row = self._ensure_matching_row() 

1592 row.update(**fields) 

1593 self.__dict__.update(**fields) 

1594 return self 

1595 

1596 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1597 row = self._ensure_matching_row() 

1598 new_row = row.update_record(**fields) 

1599 self.update(**new_row) 

1600 return self 

1601 

1602 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover 

1603 """ 

1604 Here as a placeholder for _update_record. 

1605 

1606 Will be replaced on instance creation! 

1607 """ 

1608 return self._update_record(**fields) 

1609 

1610 def _delete_record(self) -> int: 

1611 """ 

1612 Actual logic in `pydal.helpers.classes.RecordDeleter`. 

1613 """ 

1614 row = self._ensure_matching_row() 

1615 result = row.delete_record() 

1616 self.__dict__ = {} # empty self, since row is no more. 

1617 self._row = None # just to be sure 

1618 self._setup_instance_methods() 

1619 # ^ instance methods might've been deleted by emptying dict, 

1620 # but we still want .as_dict to show an error, not the table's as_dict. 

1621 return typing.cast(int, result) 

1622 

1623 def delete_record(self) -> int: # pragma: no cover 

1624 """ 

1625 Here as a placeholder for _delete_record. 

1626 

1627 Will be replaced on instance creation! 

1628 """ 

1629 return self._delete_record() 

1630 

1631 # __del__ is also called on the end of a scope so don't remove records on every del!! 

1632 

1633 # pickling: 

1634 def __setstate__(self, state: dict[str, Any]) -> None: 

1635 """ 

1636 Used by dill when loading from a bytestring. 

1637 """ 

1638 # as_dict also includes table info, so dump as json to only get the actual row data 

1639 # then create a new (more empty) row object: 

1640 state["_row"] = Row(json.loads(state["_row"])) 

1641 self.__dict__ |= state 

1642 

1643 def __getstate__(self) -> dict[str, Any]: 

1644 """ 

1645 State to save when pickling. 

1646 

1647 Prevents db connection from being pickled. 

1648 Similar to as_dict but without changing the data of the relationships (dill does that recursively) 

1649 """ 

1650 row = self._ensure_matching_row() 

1651 result: dict[str, Any] = row.as_dict() 

1652 

1653 if _with := getattr(self, "_with", None): 

1654 for relationship in _with: 

1655 data = self.get(relationship) 

1656 

1657 result[relationship] = data 

1658 

1659 result["_row"] = self._row.as_json() if self._row else "" 

1660 return result 

1661 

1662 

1663# backwards compat: 

1664TypedRow = TypedTable 

1665 

1666 

1667class TypedRows(typing.Collection[T_MetaInstance], Rows): 

1668 """ 

1669 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select). 

1670 """ 

1671 

1672 records: dict[int, T_MetaInstance] 

1673 # _rows: Rows 

1674 model: typing.Type[T_MetaInstance] 

1675 metadata: Metadata 

1676 

1677 # pseudo-properties: actually stored in _rows 

1678 db: TypeDAL 

1679 colnames: list[str] 

1680 fields: list[Field] 

1681 colnames_fields: list[Field] 

1682 response: list[tuple[Any, ...]] 

1683 

1684 def __init__( 

1685 self, 

1686 rows: Rows, 

1687 model: typing.Type[T_MetaInstance], 

1688 records: dict[int, T_MetaInstance] = None, 

1689 metadata: Metadata = None, 

1690 ) -> None: 

1691 """ 

1692 Should not be called manually! 

1693 

1694 Normally, the `records` from an existing `Rows` object are used 

1695 but these can be overwritten with a `records` dict. 

1696 `metadata` can be any (un)structured data 

1697 `model` is a Typed Table class 

1698 """ 

1699 records = records or {row.id: model(row) for row in rows} 

1700 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields) 

1701 self.model = model 

1702 self.metadata = metadata or {} 

1703 self.colnames = rows.colnames 

1704 

1705 def __len__(self) -> int: 

1706 """ 

1707 Return the count of rows. 

1708 """ 

1709 return len(self.records) 

1710 

1711 def __iter__(self) -> typing.Iterator[T_MetaInstance]: 

1712 """ 

1713 Loop through the rows. 

1714 """ 

1715 yield from self.records.values() 

1716 

1717 def __contains__(self, ind: Any) -> bool: 

1718 """ 

1719 Check if an id exists in this result set. 

1720 """ 

1721 return ind in self.records 

1722 

1723 def first(self) -> T_MetaInstance | None: 

1724 """ 

1725 Get the row with the lowest id. 

1726 """ 

1727 if not self.records: 

1728 return None 

1729 

1730 return next(iter(self)) 

1731 

1732 def last(self) -> T_MetaInstance | None: 

1733 """ 

1734 Get the row with the highest id. 

1735 """ 

1736 if not self.records: 

1737 return None 

1738 

1739 max_id = max(self.records.keys()) 

1740 return self[max_id] 

1741 

1742 def find( 

1743 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None 

1744 ) -> "TypedRows[T_MetaInstance]": 

1745 """ 

1746 Returns a new Rows object, a subset of the original object, filtered by the function `f`. 

1747 """ 

1748 if not self.records: 

1749 return self.__class__(self, self.model, {}) 

1750 

1751 records = {} 

1752 if limitby: 

1753 _min, _max = limitby 

1754 else: 

1755 _min, _max = 0, len(self) 

1756 count = 0 

1757 for i, row in self.records.items(): 

1758 if f(row): 

1759 if _min <= count: 

1760 records[i] = row 

1761 count += 1 

1762 if count == _max: 

1763 break 

1764 

1765 return self.__class__(self, self.model, records) 

1766 

1767 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]": 

1768 """ 

1769 Removes elements from the calling Rows object, filtered by the function `f`, \ 

1770 and returns a new Rows object containing the removed elements. 

1771 """ 

1772 if not self.records: 

1773 return self.__class__(self, self.model, {}) 

1774 removed = {} 

1775 to_remove = [] 

1776 for i in self.records: 

1777 row = self[i] 

1778 if f(row): 

1779 removed[i] = self.records[i] 

1780 to_remove.append(i) 

1781 

1782 [self.records.pop(i) for i in to_remove] 

1783 

1784 return self.__class__( 

1785 self, 

1786 self.model, 

1787 removed, 

1788 ) 

1789 

1790 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]: 

1791 """ 

1792 Returns a list of sorted elements (not sorted in place). 

1793 """ 

1794 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)] 

1795 

1796 def __str__(self) -> str: 

1797 """ 

1798 Simple string representation. 

1799 """ 

1800 return f"<TypedRows with {len(self)} records>" 

1801 

1802 def __repr__(self) -> str: 

1803 """ 

1804 Print a table on repr(). 

1805 """ 

1806 data = self.as_dict() 

1807 headers = list(next(iter(data.values())).keys()) 

1808 return mktable(data, headers) 

1809 

1810 def group_by_value( 

1811 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any 

1812 ) -> dict[T, list[T_MetaInstance]]: 

1813 """ 

1814 Group the rows by a specific field (which will be the dict key). 

1815 """ 

1816 kwargs["one_result"] = one_result 

1817 result = super().group_by_value(*fields, **kwargs) 

1818 return typing.cast(dict[T, list[T_MetaInstance]], result) 

1819 

1820 def column(self, column: str = None) -> list[Any]: 

1821 """ 

1822 Get a list of all values in a specific column. 

1823 

1824 Example: 

1825 rows.column('name') -> ['Name 1', 'Name 2', ...] 

1826 """ 

1827 return typing.cast(list[Any], super().column(column)) 

1828 

1829 def as_csv(self) -> str: 

1830 """ 

1831 Dump the data to csv. 

1832 """ 

1833 return typing.cast(str, super().as_csv()) 

1834 

1835 def as_dict( 

1836 self, 

1837 key: str = None, 

1838 compact: bool = False, 

1839 storage_to_dict: bool = False, 

1840 datetime_to_str: bool = False, 

1841 custom_types: list[type] = None, 

1842 ) -> dict[int, dict[str, Any]]: 

1843 """ 

1844 Get the data in a dict of dicts. 

1845 """ 

1846 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]): 

1847 # functionality not guaranteed 

1848 return typing.cast( 

1849 dict[int, dict[str, Any]], 

1850 super().as_dict( 

1851 key or "id", 

1852 compact, 

1853 storage_to_dict, 

1854 datetime_to_str, 

1855 custom_types, 

1856 ), 

1857 ) 

1858 

1859 return {k: v.as_dict() for k, v in self.records.items()} 

1860 

1861 def as_json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str: 

1862 """ 

1863 Turn the data into a dict and then dump to JSON. 

1864 """ 

1865 return typing.cast(str, super().as_json(mode=mode, default=default)) 

1866 

1867 def json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str: 

1868 """ 

1869 Turn the data into a dict and then dump to JSON. 

1870 """ 

1871 return typing.cast(str, super().as_json(mode=mode, default=default)) 

1872 

1873 def as_list( 

1874 self, 

1875 compact: bool = False, 

1876 storage_to_dict: bool = False, 

1877 datetime_to_str: bool = False, 

1878 custom_types: list[type] = None, 

1879 ) -> list[dict[str, Any]]: 

1880 """ 

1881 Get the data in a list of dicts. 

1882 """ 

1883 if any([compact, storage_to_dict, datetime_to_str, custom_types]): 

1884 return typing.cast( 

1885 list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types) 

1886 ) 

1887 return [_.as_dict() for _ in self.records.values()] 

1888 

1889 def __getitem__(self, item: int) -> T_MetaInstance: 

1890 """ 

1891 You can get a specific row by ID from a typedrows by using rows[idx] notation. 

1892 

1893 Since pydal's implementation differs (they expect a list instead of a dict with id keys), 

1894 using rows[0] will return the first row, regardless of its id. 

1895 """ 

1896 try: 

1897 return self.records[item] 

1898 except KeyError as e: 

1899 if item == 0 and (row := self.first()): 

1900 # special case: pydal internals think Rows.records is a list, not a dict 

1901 return row 

1902 

1903 raise e 

1904 

1905 def get(self, item: int) -> typing.Optional[T_MetaInstance]: 

1906 """ 

1907 Get a row by ID, or receive None if it isn't in this result set. 

1908 """ 

1909 return self.records.get(item) 

1910 

1911 def join( 

1912 self, 

1913 field: "Field | TypedField[Any]", 

1914 name: str = None, 

1915 constraint: Query = None, 

1916 fields: list[str | Field] = None, 

1917 orderby: Optional[str | Field] = None, 

1918 ) -> T_MetaInstance: 

1919 """ 

1920 This can be used to JOIN with some relationships after the initial select. 

1921 

1922 Using the querybuilder's .join() method is prefered! 

1923 """ 

1924 result = super().join(field, name, constraint, fields or [], orderby) 

1925 return typing.cast(T_MetaInstance, result) 

1926 

1927 def export_to_csv_file( 

1928 self, 

1929 ofile: typing.TextIO, 

1930 null: Any = "<NULL>", 

1931 delimiter: str = ",", 

1932 quotechar: str = '"', 

1933 quoting: int = csv.QUOTE_MINIMAL, 

1934 represent: bool = False, 

1935 colnames: list[str] = None, 

1936 write_colnames: bool = True, 

1937 *args: Any, 

1938 **kwargs: Any, 

1939 ) -> None: 

1940 """ 

1941 Shadow export_to_csv_file from Rows, but with typing. 

1942 

1943 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data 

1944 """ 

1945 super().export_to_csv_file( 

1946 ofile, 

1947 null, 

1948 *args, 

1949 delimiter=delimiter, 

1950 quotechar=quotechar, 

1951 quoting=quoting, 

1952 represent=represent, 

1953 colnames=colnames or self.colnames, 

1954 write_colnames=write_colnames, 

1955 **kwargs, 

1956 ) 

1957 

1958 @classmethod 

1959 def from_rows( 

1960 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None 

1961 ) -> "TypedRows[T_MetaInstance]": 

1962 """ 

1963 Internal method to convert a Rows object to a TypedRows. 

1964 """ 

1965 return cls(rows, model, metadata=metadata) 

1966 

1967 def __json__(self) -> dict[str, Any]: 

1968 """ 

1969 For json-fix. 

1970 """ 

1971 return typing.cast(dict[str, Any], self.as_dict()) 

1972 

1973 def __getstate__(self) -> dict[str, Any]: 

1974 """ 

1975 Used by dill to dump to bytes (exclude db connection etc). 

1976 """ 

1977 return { 

1978 "metadata": json.dumps(self.metadata, default=str), 

1979 "records": self.records, 

1980 "model": str(self.model._table), 

1981 "colnames": self.colnames, 

1982 } 

1983 

1984 def __setstate__(self, state: dict[str, Any]) -> None: 

1985 """ 

1986 Used by dill when loading from a bytestring. 

1987 """ 

1988 state["metadata"] = json.loads(state["metadata"]) 

1989 self.__dict__.update(state) 

1990 # db etc. set after undill by caching.py 

1991 

1992 

1993from .caching import ( # noqa: E402 

1994 _remove_cache, 

1995 _TypedalCache, 

1996 _TypedalCacheDependency, 

1997 create_and_hash_cache_key, 

1998 get_expire, 

1999 load_from_cache, 

2000 save_to_cache, 

2001) 

2002 

2003 

2004class QueryBuilder(typing.Generic[T_MetaInstance]): 

2005 """ 

2006 Abstration on top of pydal's query system. 

2007 """ 

2008 

2009 model: typing.Type[T_MetaInstance] 

2010 query: Query 

2011 select_args: list[Any] 

2012 select_kwargs: dict[str, Any] 

2013 relationships: dict[str, Relationship[Any]] 

2014 metadata: Metadata 

2015 

2016 def __init__( 

2017 self, 

2018 model: typing.Type[T_MetaInstance], 

2019 add_query: Optional[Query] = None, 

2020 select_args: Optional[list[Any]] = None, 

2021 select_kwargs: Optional[dict[str, Any]] = None, 

2022 relationships: dict[str, Relationship[Any]] = None, 

2023 metadata: Metadata = None, 

2024 ): 

2025 """ 

2026 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable. 

2027 

2028 Example: 

2029 MyTable.where(...) -> QueryBuilder[MyTable] 

2030 """ 

2031 self.model = model 

2032 table = model._ensure_table_defined() 

2033 default_query = typing.cast(Query, table.id > 0) 

2034 self.query = add_query or default_query 

2035 self.select_args = select_args or [] 

2036 self.select_kwargs = select_kwargs or {} 

2037 self.relationships = relationships or {} 

2038 self.metadata = metadata or {} 

2039 

2040 def __str__(self) -> str: 

2041 """ 

2042 Simple string representation for the query builder. 

2043 """ 

2044 return f"QueryBuilder for {self.model}" 

2045 

2046 def __repr__(self) -> str: 

2047 """ 

2048 Advanced string representation for the query builder. 

2049 """ 

2050 return ( 

2051 f"<QueryBuilder for {self.model} with " 

2052 f"{len(self.select_args)} select args; " 

2053 f"{len(self.select_kwargs)} select kwargs; " 

2054 f"{len(self.relationships)} relationships; " 

2055 f"query: {bool(self.query)}; " 

2056 f"metadata: {self.metadata}; " 

2057 f">" 

2058 ) 

2059 

2060 def __bool__(self) -> bool: 

2061 """ 

2062 Querybuilder is truthy if it has rows. 

2063 """ 

2064 return self.count() > 0 

2065 

2066 def _extend( 

2067 self, 

2068 add_query: Optional[Query] = None, 

2069 overwrite_query: Optional[Query] = None, 

2070 select_args: Optional[list[Any]] = None, 

2071 select_kwargs: Optional[dict[str, Any]] = None, 

2072 relationships: dict[str, Relationship[Any]] = None, 

2073 metadata: Metadata = None, 

2074 ) -> "QueryBuilder[T_MetaInstance]": 

2075 return QueryBuilder( 

2076 self.model, 

2077 (add_query & self.query) if add_query else overwrite_query or self.query, 

2078 (self.select_args + select_args) if select_args else self.select_args, 

2079 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs, 

2080 (self.relationships | relationships) if relationships else self.relationships, 

2081 (self.metadata | (metadata or {})) if metadata else self.metadata, 

2082 ) 

2083 

2084 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]": 

2085 """ 

2086 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL). 

2087 

2088 Options: 

2089 paraphrased from the web2py pydal docs, 

2090 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache 

2091 

2092 orderby: field(s) to order by. Supported: 

2093 table.name - sort by name, ascending 

2094 ~table.name - sort by name, descending 

2095 <random> - sort randomly 

2096 table.name|table.id - sort by two fields (first name, then id) 

2097 

2098 groupby, having: together with orderby: 

2099 groupby can be a field (e.g. table.name) to group records by 

2100 having can be a query, only those `having` the condition are grouped 

2101 

2102 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended. 

2103 distinct: bool/field. Only select rows that differ 

2104 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby. 

2105 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended! 

2106 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended! 

2107 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True) 

2108 """ 

2109 return self._extend(select_args=list(fields), select_kwargs=options) 

2110 

2111 def where( 

2112 self, 

2113 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query], 

2114 **filters: Any, 

2115 ) -> "QueryBuilder[T_MetaInstance]": 

2116 """ 

2117 Extend the builder's query. 

2118 

2119 Can be used in multiple ways: 

2120 .where(Query) -> with a direct query such as `Table.id == 5` 

2121 .where(lambda table: table.id == 5) -> with a query via a lambda 

2122 .where(id=5) -> via keyword arguments 

2123 

2124 When using multiple where's, they will be ANDed: 

2125 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6) 

2126 When passing multiple queries to a single .where, they will be ORed: 

2127 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6) 

2128 """ 

2129 new_query = self.query 

2130 table = self.model._ensure_table_defined() 

2131 

2132 for field, value in filters.items(): 

2133 new_query &= table[field] == value 

2134 

2135 subquery: DummyQuery | Query = DummyQuery() 

2136 for query_or_lambda in queries_or_lambdas: 

2137 if isinstance(query_or_lambda, _Query): 

2138 subquery |= typing.cast(Query, query_or_lambda) 

2139 elif callable(query_or_lambda): 

2140 if result := query_or_lambda(self.model): 

2141 subquery |= result 

2142 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda): 

2143 subquery |= typing.cast(Query, query_or_lambda != None) 

2144 else: 

2145 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).") 

2146 

2147 if subquery: 

2148 new_query &= subquery 

2149 

2150 return self._extend(overwrite_query=new_query) 

2151 

2152 def join( 

2153 self, 

2154 *fields: str | typing.Type[TypedTable], 

2155 method: JOIN_OPTIONS = None, 

2156 on: OnQuery | list[Expression] | Expression = None, 

2157 condition: Condition = None, 

2158 ) -> "QueryBuilder[T_MetaInstance]": 

2159 """ 

2160 Include relationship fields in the result. 

2161 

2162 `fields` can be names of Relationships on the current model. 

2163 If no fields are passed, all will be used. 

2164 

2165 By default, the `method` defined in the relationship is used. 

2166 This can be overwritten with the `method` keyword argument (left or inner) 

2167 """ 

2168 # todo: allow limiting amount of related rows returned for join? 

2169 

2170 relationships = self.model.get_relationships() 

2171 

2172 if condition and on: 

2173 raise ValueError("condition and on can not be used together!") 

2174 elif condition: 

2175 if len(fields) != 1: 

2176 raise ValueError("join(field, condition=...) can only be used with exactly one field!") 

2177 

2178 if isinstance(condition, pydal.objects.Query): 

2179 condition = as_lambda(condition) 

2180 

2181 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)} 

2182 elif on: 

2183 if len(fields) != 1: 

2184 raise ValueError("join(field, on=...) can only be used with exactly one field!") 

2185 

2186 if isinstance(on, pydal.objects.Expression): 

2187 on = [on] 

2188 

2189 if isinstance(on, list): 

2190 on = as_lambda(on) 

2191 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)} 

2192 

2193 else: 

2194 if fields: 

2195 # join on every relationship 

2196 relationships = {str(k): relationships[str(k)] for k in fields} 

2197 

2198 if method: 

2199 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()} 

2200 

2201 return self._extend(relationships=relationships) 

2202 

2203 def cache( 

2204 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None 

2205 ) -> "QueryBuilder[T_MetaInstance]": 

2206 """ 

2207 Enable caching for this query to load repeated calls from a dill row \ 

2208 instead of executing the sql and collecing matching rows again. 

2209 """ 

2210 existing = self.metadata.get("cache", {}) 

2211 

2212 metadata: Metadata = {} 

2213 

2214 cache_meta = typing.cast( 

2215 CacheMetadata, 

2216 self.metadata.get("cache", {}) 

2217 | { 

2218 "enabled": True, 

2219 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps], 

2220 "expires_at": get_expire(expires_at=expires_at, ttl=ttl), 

2221 }, 

2222 ) 

2223 

2224 metadata["cache"] = cache_meta 

2225 return self._extend(metadata=metadata) 

2226 

2227 def _get_db(self) -> TypeDAL: 

2228 if db := self.model._db: 

2229 return db 

2230 else: # pragma: no cover 

2231 raise EnvironmentError("@define or db.define is not called on this class yet!") 

2232 

2233 def _select_arg_convert(self, arg: Any) -> Any: 

2234 # typedfield are not really used at runtime anymore, but leave it in for safety: 

2235 if isinstance(arg, TypedField): # pragma: no cover 

2236 arg = arg._field 

2237 

2238 return arg 

2239 

2240 def delete(self) -> list[int]: 

2241 """ 

2242 Based on the current query, delete rows and return a list of deleted IDs. 

2243 """ 

2244 db = self._get_db() 

2245 removed_ids = [_.id for _ in db(self.query).select("id")] 

2246 if db(self.query).delete(): 

2247 # success! 

2248 return removed_ids 

2249 

2250 return [] 

2251 

2252 def _delete(self) -> str: 

2253 db = self._get_db() 

2254 return str(db(self.query)._delete()) 

2255 

2256 def update(self, **fields: Any) -> list[int]: 

2257 """ 

2258 Based on the current query, update `fields` and return a list of updated IDs. 

2259 """ 

2260 # todo: limit? 

2261 db = self._get_db() 

2262 updated_ids = db(self.query).select("id").column("id") 

2263 if db(self.query).update(**fields): 

2264 # success! 

2265 return updated_ids 

2266 

2267 return [] 

2268 

2269 def _update(self, **fields: Any) -> str: 

2270 db = self._get_db() 

2271 return str(db(self.query)._update(**fields)) 

2272 

2273 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]: 

2274 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL] 

2275 select_kwargs = self.select_kwargs.copy() 

2276 query = self.query 

2277 model = self.model 

2278 mut_metadata["query"] = query 

2279 # require at least id of main table: 

2280 select_fields = ", ".join([str(_) for _ in select_args]) 

2281 tablename = str(model) 

2282 

2283 if add_id and f"{tablename}.id" not in select_fields: 

2284 # fields of other selected, but required ID is missing. 

2285 select_args.append(model.id) 

2286 

2287 if self.relationships: 

2288 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata) 

2289 

2290 return query, select_args, select_kwargs 

2291 

2292 def to_sql(self, add_id: bool = False) -> str: 

2293 """ 

2294 Generate the SQL for the built query. 

2295 """ 

2296 db = self._get_db() 

2297 

2298 query, select_args, select_kwargs = self._before_query({}, add_id=add_id) 

2299 

2300 return str(db(query)._select(*select_args, **select_kwargs)) 

2301 

2302 def _collect(self) -> str: 

2303 """ 

2304 Alias for to_sql, pydal-like syntax. 

2305 """ 

2306 return self.to_sql() 

2307 

2308 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None": 

2309 expires_at = metadata["cache"].get("expires_at") 

2310 metadata["cache"] |= { 

2311 # key is partly dependant on cache metadata but not these: 

2312 "key": None, 

2313 "status": None, 

2314 "cached_at": None, 

2315 "expires_at": None, 

2316 } 

2317 

2318 _, key = create_and_hash_cache_key( 

2319 self.model, 

2320 metadata, 

2321 self.query, 

2322 self.select_args, 

2323 self.select_kwargs, 

2324 self.relationships.keys(), 

2325 ) 

2326 

2327 # re-set after creating key: 

2328 metadata["cache"]["expires_at"] = expires_at 

2329 metadata["cache"]["key"] = key 

2330 

2331 return load_from_cache(key, self._get_db()) 

2332 

2333 def collect( 

2334 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True 

2335 ) -> "TypedRows[T_MetaInstance]": 

2336 """ 

2337 Execute the built query and turn it into model instances, while handling relationships. 

2338 """ 

2339 if _to is None: 

2340 _to = TypedRows 

2341 

2342 db = self._get_db() 

2343 metadata = typing.cast(Metadata, self.metadata.copy()) 

2344 

2345 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)): 

2346 return result 

2347 

2348 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id) 

2349 

2350 metadata["sql"] = db(query)._select(*select_args, **select_kwargs) 

2351 

2352 if verbose: # pragma: no cover 

2353 print(metadata["sql"]) 

2354 

2355 rows: Rows = db(query).select(*select_args, **select_kwargs) 

2356 

2357 metadata["final_query"] = str(query) 

2358 metadata["final_args"] = [str(_) for _ in select_args] 

2359 metadata["final_kwargs"] = select_kwargs 

2360 

2361 if verbose: # pragma: no cover 

2362 print(rows) 

2363 

2364 if not self.relationships: 

2365 # easy 

2366 typed_rows = _to.from_rows(rows, self.model, metadata=metadata) 

2367 

2368 else: 

2369 # harder: try to match rows to the belonging objects 

2370 # assume structure of {'table': <data>} per row. 

2371 # if that's not the case, return default behavior again 

2372 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to) 

2373 

2374 # only saves if requested in metadata: 

2375 return save_to_cache(typed_rows, rows) 

2376 

2377 def _handle_relationships_pre_select( 

2378 self, 

2379 query: Query, 

2380 select_args: list[Any], 

2381 select_kwargs: dict[str, Any], 

2382 metadata: Metadata, 

2383 ) -> tuple[Query, list[Any]]: 

2384 db = self._get_db() 

2385 model = self.model 

2386 

2387 metadata["relationships"] = set(self.relationships.keys()) 

2388 

2389 # query = self._update_query_for_inner(db, model, query) 

2390 join = [] 

2391 for key, relation in self.relationships.items(): 

2392 if not relation.condition or relation.join != "inner": 

2393 continue 

2394 

2395 other = relation.get_table(db) 

2396 other = other.with_alias(f"{key}_{hash(relation)}") 

2397 join.append(other.on(relation.condition(model, other))) 

2398 

2399 if limitby := select_kwargs.pop("limitby", None): 

2400 # if limitby + relationships: 

2401 # 1. get IDs of main table entries that match 'query' 

2402 # 2. change query to .belongs(id) 

2403 # 3. add joins etc 

2404 

2405 kwargs = {"limitby": limitby} 

2406 

2407 if join: 

2408 kwargs["join"] = join 

2409 

2410 ids = db(query)._select(model.id, **kwargs) 

2411 query = model.id.belongs(ids) 

2412 metadata["ids"] = ids 

2413 

2414 if join: 

2415 select_kwargs["join"] = join 

2416 

2417 left = [] 

2418 

2419 for key, relation in self.relationships.items(): 

2420 other = relation.get_table(db) 

2421 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION 

2422 

2423 select_fields = ", ".join([str(_) for _ in select_args]) 

2424 pre_alias = str(other) 

2425 

2426 if f"{other}." not in select_fields: 

2427 # no fields of other selected. add .ALL: 

2428 select_args.append(other.ALL) 

2429 elif f"{other}.id" not in select_fields: 

2430 # fields of other selected, but required ID is missing. 

2431 select_args.append(other.id) 

2432 

2433 if relation.on: 

2434 # if it has a .on, it's always a left join! 

2435 on = relation.on(model, other) 

2436 if not isinstance(on, list): # pragma: no cover 

2437 on = [on] 

2438 

2439 left.extend(on) 

2440 elif method == "left": 

2441 # .on not given, generate it: 

2442 other = other.with_alias(f"{key}_{hash(relation)}") 

2443 condition = typing.cast(Query, relation.condition(model, other)) 

2444 left.append(other.on(condition)) 

2445 else: 

2446 # else: inner join (handled earlier) 

2447 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace 

2448 # other = other.with_alias(f"{key}_{hash(relation)}") 

2449 # query &= relation.condition(model, other) 

2450 

2451 # if no fields of 'other' are included, add other.ALL 

2452 # else: only add other.id if missing 

2453 select_fields = ", ".join([str(_) for _ in select_args]) 

2454 

2455 post_alias = str(other).split(" AS ")[-1] 

2456 if pre_alias != post_alias: 

2457 # replace .select's with aliased: 

2458 select_fields = select_fields.replace( 

2459 f"{pre_alias}.", 

2460 f"{post_alias}.", 

2461 ) 

2462 

2463 select_args = select_fields.split(", ") 

2464 

2465 select_kwargs["left"] = left 

2466 return query, select_args 

2467 

2468 def _collect_with_relationships( 

2469 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"] = None 

2470 ) -> "TypedRows[T_MetaInstance]": 

2471 """ 

2472 Transform the raw rows into Typed Table model instances. 

2473 """ 

2474 db = self._get_db() 

2475 main_table = self.model._ensure_table_defined() 

2476 

2477 records = {} 

2478 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation 

2479 

2480 for row in rows: 

2481 main = row[main_table] 

2482 main_id = main.id 

2483 

2484 if main_id not in records: 

2485 records[main_id] = self.model(main) 

2486 records[main_id]._with = list(self.relationships.keys()) 

2487 

2488 # setup up all relationship defaults (once) 

2489 for col, relationship in self.relationships.items(): 

2490 records[main_id][col] = [] if relationship.multiple else None 

2491 

2492 # now add other relationship data 

2493 for column, relation in self.relationships.items(): 

2494 relationship_column = f"{column}_{hash(relation)}" 

2495 

2496 # relationship_column works for aliases with the same target column. 

2497 # if col + relationship not in the row, just use the regular name. 

2498 

2499 relation_data = ( 

2500 row[relationship_column] if relationship_column in row else row[relation.get_table_name()] 

2501 ) 

2502 

2503 if relation_data.id is None: 

2504 # always skip None ids 

2505 continue 

2506 

2507 if f"{column}-{relation_data.id}" in seen_relations[main_id]: 

2508 # speed up duplicates 

2509 continue 

2510 else: 

2511 seen_relations[main_id].add(f"{column}-{relation_data.id}") 

2512 

2513 relation_table = relation.get_table(db) 

2514 # hopefully an instance of a typed table and a regular row otherwise: 

2515 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data 

2516 

2517 if relation.multiple: 

2518 # create list of T 

2519 if not isinstance(records[main_id].get(column), list): # pragma: no cover 

2520 # should already be set up before! 

2521 setattr(records[main_id], column, []) 

2522 

2523 records[main_id][column].append(instance) 

2524 else: 

2525 # create single T 

2526 records[main_id][column] = instance 

2527 

2528 return _to(rows, self.model, records, metadata=metadata) 

2529 

2530 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]": 

2531 """ 

2532 Call .collect() and raise an error if nothing found. 

2533 

2534 Basically unwraps Optional type. 

2535 """ 

2536 if result := self.collect(): 

2537 return result 

2538 

2539 if not exception: 

2540 exception = ValueError("Nothing found!") 

2541 

2542 raise exception 

2543 

2544 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]: 

2545 """ 

2546 You can start iterating a Query Builder object before calling collect, for ease of use. 

2547 """ 

2548 yield from self.collect() 

2549 

2550 def count(self) -> int: 

2551 """ 

2552 Return the amount of rows matching the current query. 

2553 """ 

2554 db = self._get_db() 

2555 model = self.model 

2556 query = self.query 

2557 

2558 for key, relation in self.relationships.items(): 

2559 if not relation.condition or relation.join != "inner": 

2560 continue 

2561 

2562 other = relation.get_table(db) 

2563 other = other.with_alias(f"{key}_{hash(relation)}") 

2564 query &= relation.condition(model, other) 

2565 

2566 return db(query).count() 

2567 

2568 def __paginate( 

2569 self, 

2570 limit: int, 

2571 page: int = 1, 

2572 ) -> "QueryBuilder[T_MetaInstance]": 

2573 _from = limit * (page - 1) 

2574 _to = limit * page 

2575 

2576 available = self.count() 

2577 

2578 metadata: Metadata = {} 

2579 

2580 metadata["pagination"] = { 

2581 "limit": limit, 

2582 "current_page": page, 

2583 "max_page": math.ceil(available / limit), 

2584 "rows": available, 

2585 "min_max": (_from, _to), 

2586 } 

2587 

2588 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata) 

2589 

2590 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]": 

2591 """ 

2592 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset. 

2593 

2594 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \ 

2595 can be loaded with relationship data! 

2596 """ 

2597 builder = self.__paginate(limit, page) 

2598 

2599 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows)) 

2600 

2601 rows._query_builder = builder 

2602 return rows 

2603 

2604 def _paginate( 

2605 self, 

2606 limit: int, 

2607 page: int = 1, 

2608 ) -> str: 

2609 builder = self.__paginate(limit, page) 

2610 return builder._collect() 

2611 

2612 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

2613 """ 

2614 Generator that yields rows from a paginated source in chunks. 

2615 

2616 This function retrieves rows from a paginated data source in chunks of the 

2617 specified `chunk_size` and yields them as TypedRows. 

2618 

2619 Example: 

2620 ``` 

2621 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100): 

2622 for row in chunk_of_rows: 

2623 # Process each row within the chunk. 

2624 pass 

2625 ``` 

2626 """ 

2627 page = 1 

2628 

2629 while rows := self.__paginate(chunk_size, page).collect(): 

2630 yield rows 

2631 page += 1 

2632 

2633 def first(self, verbose: bool = False) -> T_MetaInstance | None: 

2634 """ 

2635 Get the first row matching the currently built query. 

2636 

2637 Also adds paginate, since it would be a waste to select more rows than needed. 

2638 """ 

2639 if row := self.paginate(page=1, limit=1, verbose=verbose).first(): 

2640 return self.model.from_row(row) 

2641 else: 

2642 return None 

2643 

2644 def _first(self) -> str: 

2645 return self._paginate(page=1, limit=1) 

2646 

2647 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance: 

2648 """ 

2649 Call .first() and raise an error if nothing found. 

2650 

2651 Basically unwraps Optional type. 

2652 """ 

2653 if inst := self.first(verbose=verbose): 

2654 return inst 

2655 

2656 if not exception: 

2657 exception = ValueError("Nothing found!") 

2658 

2659 raise exception 

2660 

2661 

2662S = typing.TypeVar("S") 

2663 

2664 

2665class PaginatedRows(TypedRows[T_MetaInstance]): 

2666 """ 

2667 Extension on top of rows that is used when calling .paginate() instead of .collect(). 

2668 """ 

2669 

2670 _query_builder: QueryBuilder[T_MetaInstance] 

2671 

2672 @property 

2673 def data(self) -> list[T_MetaInstance]: 

2674 """ 

2675 Get the underlying data. 

2676 """ 

2677 return list(self.records.values()) 

2678 

2679 @property 

2680 def pagination(self) -> Pagination: 

2681 """ 

2682 Get all page info. 

2683 """ 

2684 pagination_data = self.metadata["pagination"] 

2685 

2686 has_next_page = pagination_data["current_page"] < pagination_data["max_page"] 

2687 has_prev_page = pagination_data["current_page"] > 1 

2688 return { 

2689 "total_items": pagination_data["rows"], 

2690 "current_page": pagination_data["current_page"], 

2691 "per_page": pagination_data["limit"], 

2692 "total_pages": pagination_data["max_page"], 

2693 "has_next_page": has_next_page, 

2694 "has_prev_page": has_prev_page, 

2695 "next_page": pagination_data["current_page"] + 1 if has_next_page else None, 

2696 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None, 

2697 } 

2698 

2699 def next(self) -> Self: # noqa: A003 

2700 """ 

2701 Get the next page. 

2702 """ 

2703 data = self.metadata["pagination"] 

2704 if data["current_page"] >= data["max_page"]: 

2705 raise StopIteration("Final Page") 

2706 

2707 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1) 

2708 

2709 def previous(self) -> Self: 

2710 """ 

2711 Get the previous page. 

2712 """ 

2713 data = self.metadata["pagination"] 

2714 if data["current_page"] <= 1: 

2715 raise StopIteration("First Page") 

2716 

2717 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1) 

2718 

2719 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore 

2720 """ 

2721 Convert to a dictionary with pagination info and original data. 

2722 

2723 All arguments are ignored! 

2724 """ 

2725 return {"data": super().as_dict(), "pagination": self.pagination} 

2726 

2727 

2728class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover 

2729 """ 

2730 Used to make pydal Set more typed. 

2731 

2732 This class is not actually used, only 'cast' by TypeDAL.__call__ 

2733 """ 

2734 

2735 def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int: 

2736 """ 

2737 Count returns an int. 

2738 """ 

2739 result = super().count(distinct, cache) 

2740 return typing.cast(int, result) 

2741 

2742 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]: 

2743 """ 

2744 Select returns a TypedRows of a user defined table. 

2745 

2746 Example: 

2747 result: TypedRows[MyTable] = db(MyTable.id > 0).select() 

2748 

2749 for row in result: 

2750 typing.reveal_type(row) # MyTable 

2751 """ 

2752 rows = super().select(*fields, **attributes) 

2753 return typing.cast(TypedRows[T_MetaInstance], rows)