Coverage for src/typedal/core.py: 100%

884 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-04 18:50 +0100

1""" 

2Core functionality of TypeDAL. 

3""" 

4import contextlib 

5import csv 

6import datetime as dt 

7import inspect 

8import json 

9import math 

10import types 

11import typing 

12import warnings 

13from collections import defaultdict 

14from decimal import Decimal 

15from pathlib import Path 

16from typing import Any, Optional 

17 

18import pydal 

19from pydal._globals import DEFAULT 

20from pydal.objects import Field as _Field 

21from pydal.objects import Query as _Query 

22from pydal.objects import Row 

23from pydal.objects import Table as _Table 

24from typing_extensions import Self 

25 

26from .config import load_config 

27from .helpers import ( 

28 DummyQuery, 

29 all_annotations, 

30 all_dict, 

31 as_lambda, 

32 extract_type_optional, 

33 filter_out, 

34 instanciate, 

35 is_union, 

36 looks_like, 

37 mktable, 

38 origin_is_subclass, 

39 to_snake, 

40 unwrap_type, 

41) 

42from .types import ( 

43 AfterDeleteCallable, 

44 AfterInsertCallable, 

45 AfterUpdateCallable, 

46 BeforeDeleteCallable, 

47 BeforeInsertCallable, 

48 BeforeUpdateCallable, 

49 CacheMetadata, 

50 Expression, 

51 Field, 

52 Metadata, 

53 PaginateDict, 

54 Pagination, 

55 Query, 

56 Rows, 

57 _Types, 

58) 

59 

60# use typing.cast(type, ...) to make mypy happy with unions 

61T_annotation = typing.Type[Any] | types.UnionType 

62T_Query = typing.Union["Table", Query, bool, None, "TypedTable", typing.Type["TypedTable"]] 

63T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic) 

64T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta" 

65T = typing.TypeVar("T") 

66 

67BASIC_MAPPINGS: dict[T_annotation, str] = { 

68 str: "string", 

69 int: "integer", 

70 bool: "boolean", 

71 bytes: "blob", 

72 float: "double", 

73 object: "json", 

74 Decimal: "decimal(10,2)", 

75 dt.date: "date", 

76 dt.time: "time", 

77 dt.datetime: "datetime", 

78} 

79 

80 

81def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]: 

82 """ 

83 Is `cls` an instance or subclass of TypedField? 

84 

85 Deprecated 

86 """ 

87 return ( 

88 isinstance(cls, TypedField) 

89 or isinstance(typing.get_origin(cls), type) 

90 and issubclass(typing.get_origin(cls), TypedField) 

91 ) 

92 

93 

94JOIN_OPTIONS = typing.Literal["left", "inner", None] 

95DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left" 

96 

97# table-ish paramter: 

98P_Table = typing.Union[typing.Type["TypedTable"], pydal.objects.Table] 

99 

100Condition: typing.TypeAlias = typing.Optional[ 

101 typing.Callable[ 

102 # self, other -> Query 

103 [P_Table, P_Table], 

104 Query | bool, 

105 ] 

106] 

107 

108OnQuery: typing.TypeAlias = typing.Optional[ 

109 typing.Callable[ 

110 # self, other -> list of .on statements 

111 [P_Table, P_Table], 

112 list[Expression], 

113 ] 

114] 

115 

116To_Type = typing.TypeVar("To_Type", type[Any], typing.Type[Any], str) 

117 

118 

119class Relationship(typing.Generic[To_Type]): 

120 """ 

121 Define a relationship to another table. 

122 """ 

123 

124 _type: To_Type 

125 table: typing.Type["TypedTable"] | type | str 

126 condition: Condition 

127 on: OnQuery 

128 multiple: bool 

129 join: JOIN_OPTIONS 

130 

131 def __init__( 

132 self, 

133 _type: To_Type, 

134 condition: Condition = None, 

135 join: JOIN_OPTIONS = None, 

136 on: OnQuery = None, 

137 ): 

138 """ 

139 Should not be called directly, use relationship() instead! 

140 """ 

141 if condition and on: 

142 warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}") 

143 raise ValueError("Please specify either a condition or an 'on' statement for this relationship!") 

144 

145 self._type = _type 

146 self.condition = condition 

147 self.join = "left" if on else join # .on is always left join! 

148 self.on = on 

149 

150 if args := typing.get_args(_type): 

151 self.table = unwrap_type(args[0]) 

152 self.multiple = True 

153 else: 

154 self.table = _type 

155 self.multiple = False 

156 

157 if isinstance(self.table, str): 

158 self.table = TypeDAL.to_snake(self.table) 

159 

160 def clone(self, **update: Any) -> "Relationship[To_Type]": 

161 """ 

162 Create a copy of the relationship, possibly updated. 

163 """ 

164 return self.__class__( 

165 update.get("_type") or self._type, 

166 update.get("condition") or self.condition, 

167 update.get("join") or self.join, 

168 update.get("on") or self.on, 

169 ) 

170 

171 def __repr__(self) -> str: 

172 """ 

173 Representation of the relationship. 

174 """ 

175 if callback := self.condition or self.on: 

176 src_code = inspect.getsource(callback).strip() 

177 else: 

178 cls_name = self._type if isinstance(self._type, str) else self._type.__name__ # type: ignore 

179 src_code = f"to {cls_name} (missing condition)" 

180 

181 join = f":{self.join}" if self.join else "" 

182 return f"<Relationship{join} {src_code}>" 

183 

184 def get_table(self, db: "TypeDAL") -> typing.Type["TypedTable"]: 

185 """ 

186 Get the table this relationship is bound to. 

187 """ 

188 table = self.table # can be a string because db wasn't available yet 

189 if isinstance(table, str): 

190 if mapped := db._class_map.get(table): 

191 # yay 

192 return mapped 

193 

194 # boo, fall back to untyped table but pretend it is typed: 

195 return typing.cast(typing.Type["TypedTable"], db[table]) # eh close enough! 

196 

197 return table 

198 

199 def get_table_name(self) -> str: 

200 """ 

201 Get the name of the table this relationship is bound to. 

202 """ 

203 if isinstance(self.table, str): 

204 return self.table 

205 

206 if isinstance(self.table, pydal.objects.Table): 

207 return str(self.table) 

208 

209 # else: typed table 

210 try: 

211 table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table 

212 except Exception: # pragma: no cover 

213 table = self.table 

214 

215 return str(table) 

216 

217 def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]": 

218 """ 

219 Relationship is a descriptor class, which can be returned from a class but not an instance. 

220 

221 For an instance, using .join() will replace the Relationship with the actual data. 

222 If you forgot to join, a warning will be shown and empty data will be returned. 

223 """ 

224 if not instance: 

225 # relationship queried on class, that's allowed 

226 return self 

227 

228 warnings.warn( 

229 "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning 

230 ) 

231 if self.multiple: 

232 return [] 

233 else: 

234 return None 

235 

236 

237def relationship( 

238 _type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None 

239) -> Relationship[To_Type]: 

240 """ 

241 Define a relationship to another table, when its id is not stored in the current table. 

242 

243 Example: 

244 class User(TypedTable): 

245 name: str 

246 

247 posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left') 

248 

249 class Post(TypedTable): 

250 title: str 

251 author: User 

252 

253 User.join("posts").first() # User instance with list[Post] in .posts 

254 

255 Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts. 

256 In this case, the join strategy is set to LEFT so users without posts are also still selected. 

257 

258 For complex queries with a pivot table, a `on` can be set insteaad of `condition`: 

259 class User(TypedTable): 

260 ... 

261 

262 tags = relationship(list["Tag"], on=lambda self, tag: [ 

263 Tagged.on(Tagged.entity == entity.gid), 

264 Tag.on((Tagged.tag == tag.id)), 

265 ]) 

266 

267 If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient. 

268 """ 

269 return Relationship(_type, condition, join, on) 

270 

271 

272def _generate_relationship_condition( 

273 _: typing.Type["TypedTable"], key: str, field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]] 

274) -> Condition: 

275 origin = typing.get_origin(field) 

276 # else: generic 

277 

278 if origin == list: 

279 # field = typing.get_args(field)[0] # actual field 

280 # return lambda _self, _other: cls[key].contains(field) 

281 

282 return lambda _self, _other: _self[key].contains(_other.id) 

283 else: 

284 # normal reference 

285 # return lambda _self, _other: cls[key] == field.id 

286 return lambda _self, _other: _self[key] == _other.id 

287 

288 

289def to_relationship( 

290 cls: typing.Type["TypedTable"] | type[Any], 

291 key: str, 

292 field: typing.Union["TypedField[Any]", "Table", typing.Type["TypedTable"]], 

293) -> typing.Optional[Relationship[Any]]: 

294 """ 

295 Used to automatically create relationship instance for reference fields. 

296 

297 Example: 

298 class MyTable(TypedTable): 

299 reference: OtherTable 

300 

301 `reference` contains the id of an Other Table row. 

302 MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work. 

303 

304 This function will automatically perform this logic (called in db.define): 

305 to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable] 

306 

307 Also works for list:reference (list[OtherTable]) and TypedField[OtherTable]. 

308 """ 

309 if looks_like(field, TypedField): 

310 if args := typing.get_args(field): 

311 field = args[0] 

312 else: 

313 # weird 

314 return None 

315 

316 field, optional = extract_type_optional(field) 

317 

318 try: 

319 condition = _generate_relationship_condition(cls, key, field) 

320 except Exception as e: # pragma: no cover 

321 warnings.warn("Could not generate Relationship condition", source=e) 

322 condition = None 

323 

324 if not condition: # pragma: no cover 

325 # something went wrong, not a valid relationship 

326 warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}") 

327 return None 

328 

329 join = "left" if optional or typing.get_origin(field) == list else "inner" 

330 

331 return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join)) 

332 

333 

334class TypeDAL(pydal.DAL): # type: ignore 

335 """ 

336 Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables. 

337 """ 

338 

339 def __init__( 

340 self, 

341 uri: Optional[str] = None, # default from config or 'sqlite:memory' 

342 pool_size: int = None, # default 1 if sqlite else 3 

343 folder: Optional[str | Path] = None, # default 'databases' in config 

344 db_codec: str = "UTF-8", 

345 check_reserved: Optional[list[str]] = None, 

346 migrate: Optional[bool] = None, # default True by config 

347 fake_migrate: Optional[bool] = None, # default False by config 

348 migrate_enabled: bool = True, 

349 fake_migrate_all: bool = False, 

350 decode_credentials: bool = False, 

351 driver_args: Optional[dict[str, Any]] = None, 

352 adapter_args: Optional[dict[str, Any]] = None, 

353 attempts: int = 5, 

354 auto_import: bool = False, 

355 bigint_id: bool = False, 

356 debug: bool = False, 

357 lazy_tables: bool = False, 

358 db_uid: Optional[str] = None, 

359 after_connection: typing.Callable[..., Any] = None, 

360 tables: Optional[list[str]] = None, 

361 ignore_field_case: bool = True, 

362 entity_quoting: bool = True, 

363 table_hash: Optional[str] = None, 

364 enable_typedal_caching: bool = None, 

365 use_pyproject: bool | str = True, 

366 use_env: bool | str = True, 

367 ) -> None: 

368 """ 

369 Adds some internal tables after calling pydal's default init. 

370 

371 Set enable_typedal_caching to False to disable this behavior. 

372 """ 

373 config = load_config(_use_pyproject=use_pyproject, _use_env=use_env) 

374 config.update( 

375 database=uri, 

376 dialect=uri.split(":")[0] if uri and ":" in uri else None, 

377 folder=folder, 

378 migrate=migrate, 

379 fake_migrate=fake_migrate, 

380 caching=enable_typedal_caching, 

381 pool_size=pool_size, 

382 ) 

383 

384 if config.folder: 

385 Path(config.folder).mkdir(exist_ok=True) 

386 

387 super().__init__( 

388 config.database, 

389 config.pool_size, 

390 config.folder, 

391 db_codec, 

392 check_reserved, 

393 config.migrate, 

394 config.fake_migrate, 

395 migrate_enabled, 

396 fake_migrate_all, 

397 decode_credentials, 

398 driver_args, 

399 adapter_args, 

400 attempts, 

401 auto_import, 

402 bigint_id, 

403 debug, 

404 lazy_tables, 

405 db_uid, 

406 after_connection, 

407 tables, 

408 ignore_field_case, 

409 entity_quoting, 

410 table_hash, 

411 ) 

412 

413 if config.caching: 

414 self.try_define(_TypedalCache) 

415 self.try_define(_TypedalCacheDependency) 

416 

417 def try_define(self, model: typing.Type[T], verbose: bool = False) -> typing.Type[T]: 

418 """ 

419 Try to define a model with migrate or fall back to fake migrate. 

420 """ 

421 try: 

422 return self.define(model, migrate=True) 

423 except Exception as e: 

424 # clean up: 

425 self.rollback() 

426 if (tablename := self.to_snake(model.__name__)) and tablename in dir(self): 

427 delattr(self, tablename) 

428 

429 if verbose: 

430 warnings.warn(f"{model} could not be migrated, try faking", source=e, category=RuntimeWarning) 

431 

432 # try again: 

433 return self.define(model, migrate=True, fake_migrate=True, redefine=True) 

434 

435 default_kwargs: typing.ClassVar[typing.Dict[str, Any]] = { 

436 # fields are 'required' (notnull) by default: 

437 "notnull": True, 

438 } 

439 

440 # maps table name to typedal class, for resolving future references 

441 _class_map: typing.ClassVar[dict[str, typing.Type["TypedTable"]]] = {} 

442 

443 def _define(self, cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]: 

444 # todo: new relationship item added should also invalidate (previously unrelated) cache result 

445 

446 # todo: option to enable/disable cache dependency behavior: 

447 # - don't set _before_update and _before_delete 

448 # - don't add TypedalCacheDependency entry 

449 # - don't invalidate other item on new row of this type 

450 

451 # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below. 

452 # proper way to handle this would be (but gives error right now due to Table implementing magic methods): 

453 # typing.get_type_hints(cls, globalns=None, localns=None) 

454 

455 # dirty way (with evil eval): 

456 # [eval(v) for k, v in cls.__annotations__.items()] 

457 # this however also stops working when variables outside this scope or even references to other 

458 # objects are used. So for now, this package will NOT work when from __future__ import annotations is used, 

459 # and might break in the future, when this annotations behavior is enabled by default. 

460 

461 # non-annotated variables have to be passed to define_table as kwargs 

462 full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins) 

463 

464 tablename = self.to_snake(cls.__name__) 

465 # grab annotations of cls and it's parents: 

466 annotations = all_annotations(cls) 

467 # extend with `prop = TypedField()` 'annotations': 

468 annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)} 

469 # remove internal stuff: 

470 annotations = {k: v for k, v in annotations.items() if not k.startswith("_")} 

471 

472 typedfields: dict[str, TypedField[Any]] = { 

473 k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v) 

474 } 

475 

476 relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship) 

477 

478 fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()} 

479 

480 # ! dont' use full_dict here: 

481 other_kwargs = kwargs | { 

482 k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_") 

483 } 

484 

485 for key in typedfields.keys() - full_dict.keys(): 

486 # typed fields that don't haven't been added to the object yet 

487 setattr(cls, key, typedfields[key]) 

488 

489 # start with base classes and overwrite with current class: 

490 relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship) 

491 

492 # DEPRECATED: Relationship as annotation is currently not supported! 

493 # ensure they are all instances and 

494 # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`): 

495 # relationships = { 

496 # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items() 

497 # } 

498 

499 # keys of implicit references (also relationships): 

500 reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")] 

501 

502 # add implicit relationships: 

503 # User; list[User]; TypedField[User]; TypedField[list[User]] 

504 relationships |= { 

505 k: new_relationship 

506 for k in reference_field_keys 

507 if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k])) 

508 } 

509 

510 cache_dependency = other_kwargs.pop("cache_dependency", True) 

511 

512 table: Table = self.define_table(tablename, *fields.values(), **other_kwargs) 

513 

514 for name, typed_field in typedfields.items(): 

515 field = fields[name] 

516 typed_field.bind(field, table) 

517 

518 if issubclass(cls, TypedTable): 

519 cls.__set_internals__( 

520 db=self, 

521 table=table, 

522 # by now, all relationships should be instances! 

523 relationships=typing.cast(dict[str, Relationship[Any]], relationships), 

524 ) 

525 self._class_map[str(table)] = cls 

526 else: 

527 warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!") 

528 

529 if not tablename.startswith("typedal_") and cache_dependency: 

530 table._before_update.append(lambda s, _: _remove_cache(s, tablename)) 

531 table._before_delete.append(lambda s: _remove_cache(s, tablename)) 

532 

533 return cls 

534 

535 @typing.overload 

536 def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[typing.Type[T]], typing.Type[T]]: 

537 """ 

538 Typing Overload for define without a class. 

539 

540 @db.define() 

541 class MyTable(TypedTable): ... 

542 """ 

543 

544 @typing.overload 

545 def define(self, maybe_cls: typing.Type[T], **kwargs: Any) -> typing.Type[T]: 

546 """ 

547 Typing Overload for define with a class. 

548 

549 @db.define 

550 class MyTable(TypedTable): ... 

551 """ 

552 

553 def define( 

554 self, maybe_cls: typing.Type[T] | None = None, **kwargs: Any 

555 ) -> typing.Type[T] | typing.Callable[[typing.Type[T]], typing.Type[T]]: 

556 """ 

557 Can be used as a decorator on a class that inherits `TypedTable`, \ 

558 or as a regular method if you need to define your classes before you have access to a 'db' instance. 

559 

560 You can also pass extra arguments to db.define_table. 

561 See http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#Table-constructor 

562 

563 Example: 

564 @db.define 

565 class Person(TypedTable): 

566 ... 

567 

568 class Article(TypedTable): 

569 ... 

570 

571 # at a later time: 

572 db.define(Article) 

573 

574 Returns: 

575 the result of pydal.define_table 

576 """ 

577 

578 def wrapper(cls: typing.Type[T]) -> typing.Type[T]: 

579 return self._define(cls, **kwargs) 

580 

581 if maybe_cls: 

582 return wrapper(maybe_cls) 

583 

584 return wrapper 

585 

586 # def drop(self, table_name: str) -> None: 

587 # """ 

588 # Remove a table by name (both on the database level and the typedal level). 

589 # """ 

590 # # drop calls TypedTable.drop() and removes it from the `_class_map` 

591 # if cls := self._class_map.pop(table_name, None): 

592 # cls.drop() 

593 

594 # def drop_all(self, max_retries: int = None) -> None: 

595 # """ 

596 # Remove all tables and keep doing so until everything is gone! 

597 # """ 

598 # retries = 0 

599 # if max_retries is None: 

600 # max_retries = len(self.tables) 

601 # 

602 # while self.tables: 

603 # retries += 1 

604 # for table in self.tables: 

605 # self.drop(table) 

606 # 

607 # if retries > max_retries: 

608 # raise RuntimeError("Could not delete all tables") 

609 

610 def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet": 

611 """ 

612 A db instance can be called directly to perform a query. 

613 

614 Usually, only a query is passed. 

615 

616 Example: 

617 db(query).select() 

618 

619 """ 

620 args = list(_args) 

621 if args: 

622 cls = args[0] 

623 if isinstance(cls, bool): 

624 raise ValueError("Don't actually pass a bool to db()! Use a query instead.") 

625 

626 if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable): 

627 # table defined without @db.define decorator! 

628 _cls: typing.Type[TypedTable] = cls 

629 args[0] = _cls.id != None 

630 

631 _set = super().__call__(*args, **kwargs) 

632 return typing.cast(TypedSet, _set) 

633 

634 def __getitem__(self, key: str) -> "Table": 

635 """ 

636 Allows dynamically accessing a table by its name as a string. 

637 

638 Example: 

639 db['users'] -> user 

640 """ 

641 return typing.cast(Table, super().__getitem__(str(key))) 

642 

643 @classmethod 

644 def _build_field(cls, name: str, _type: str, **kw: Any) -> Field: 

645 return Field(name, _type, **{**cls.default_kwargs, **kw}) 

646 

647 @classmethod 

648 def _annotation_to_pydal_fieldtype( 

649 cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any] 

650 ) -> Optional[str]: 

651 # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union. 

652 ftype = typing.cast(type, _ftype) # cast from typing.Type to type to make mypy happy) 

653 

654 if isinstance(ftype, str): 

655 # extract type from string 

656 ftype = typing.get_args(typing.Type[ftype])[0]._evaluate( 

657 localns=locals(), globalns=globals(), recursive_guard=frozenset() 

658 ) 

659 

660 if mapping := BASIC_MAPPINGS.get(ftype): 

661 # basi types 

662 return mapping 

663 elif isinstance(ftype, _Table): 

664 # db.table 

665 return f"reference {ftype._tablename}" 

666 elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable): 

667 # SomeTable 

668 snakename = cls.to_snake(ftype.__name__) 

669 return f"reference {snakename}" 

670 elif isinstance(ftype, TypedField): 

671 # FieldType(type, ...) 

672 return ftype._to_field(mut_kw) 

673 elif origin_is_subclass(ftype, TypedField): 

674 # TypedField[int] 

675 return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw) 

676 elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField): 

677 # list[str] -> str -> string -> list:string 

678 _child_type = typing.get_args(ftype)[0] 

679 _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

680 return f"list:{_child_type}" 

681 elif is_union(ftype): 

682 # str | int -> UnionType 

683 # typing.Union[str | int] -> typing._UnionGenericAlias 

684 

685 # Optional[type] == type | None 

686 

687 match typing.get_args(ftype): 

688 case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type): 

689 # good union of Nullable 

690 

691 # if a field is optional, it is nullable: 

692 mut_kw["notnull"] = False 

693 return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw) 

694 case _: 

695 # two types is not supported by the db! 

696 return None 

697 else: 

698 return None 

699 

700 @classmethod 

701 def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field: 

702 """ 

703 Convert a annotation into a pydal Field. 

704 

705 Args: 

706 fname: name of the property 

707 ftype: annotation of the property 

708 kw: when using TypedField or a function returning it (e.g. StringField), 

709 keyword args can be used to pass any other settings you would normally to a pydal Field 

710 

711 -> pydal.Field(fname, ftype, **kw) 

712 

713 Example: 

714 class MyTable: 

715 fname: ftype 

716 id: int 

717 name: str 

718 reference: Table 

719 other: TypedField(str, default="John Doe") # default will be in kwargs 

720 """ 

721 fname = cls.to_snake(fname) 

722 

723 if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw): 

724 return cls._build_field(fname, converted_type, **kw) 

725 else: 

726 raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}") 

727 

728 @staticmethod 

729 def to_snake(camel: str) -> str: 

730 """ 

731 Moved to helpers, kept as a static method for legacy reasons. 

732 """ 

733 return to_snake(camel) 

734 

735 

736class TableProtocol(typing.Protocol): # pragma: no cover 

737 """ 

738 Make mypy happy. 

739 """ 

740 

741 id: int # noqa: A003 

742 

743 def __getitem__(self, item: str) -> Field: 

744 """ 

745 Tell mypy a Table supports dictionary notation for columns. 

746 """ 

747 

748 

749class Table(_Table, TableProtocol): # type: ignore 

750 """ 

751 Make mypy happy. 

752 """ 

753 

754 

755class TableMeta(type): 

756 """ 

757 This metaclass contains functionality on table classes, that doesn't exist on its instances. 

758 

759 Example: 

760 class MyTable(TypedTable): 

761 some_field: TypedField[int] 

762 

763 MyTable.update_or_insert(...) # should work 

764 

765 MyTable.some_field # -> Field, can be used to query etc. 

766 

767 row = MyTable.first() # returns instance of MyTable 

768 

769 # row.update_or_insert(...) # shouldn't work! 

770 

771 row.some_field # -> int, with actual data 

772 

773 """ 

774 

775 # set up by db.define: 

776 # _db: TypeDAL | None = None 

777 # _table: Table | None = None 

778 _db: TypeDAL | None = None 

779 _table: Table | None = None 

780 _relationships: dict[str, Relationship[Any]] | None = None 

781 

782 ######################### 

783 # TypeDAL custom logic: # 

784 ######################### 

785 

786 def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None: 

787 """ 

788 Store the related database and pydal table for later usage. 

789 """ 

790 self._db = db 

791 self._table = table 

792 self._relationships = relationships 

793 

794 def __getattr__(self, col: str) -> Optional[Field]: 

795 """ 

796 Magic method used by TypedTableMeta to get a database field with dot notation on a class. 

797 

798 Example: 

799 SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__) 

800 

801 """ 

802 if self._table: 

803 return getattr(self._table, col, None) 

804 

805 return None 

806 

807 def _ensure_table_defined(self) -> Table: 

808 if not self._table: 

809 raise EnvironmentError("@define or db.define is not called on this class yet!") 

810 return self._table 

811 

812 def __iter__(self) -> typing.Generator[Field, None, None]: 

813 """ 

814 Loop through the columns of this model. 

815 """ 

816 table = self._ensure_table_defined() 

817 yield from iter(table) 

818 

819 def __getitem__(self, item: str) -> Field: 

820 """ 

821 Allow dict notation to get a column of this table (-> Field instance). 

822 """ 

823 table = self._ensure_table_defined() 

824 return table[item] 

825 

826 def __str__(self) -> str: 

827 """ 

828 Normally, just returns the underlying table name, but with a fallback if the model is unbound. 

829 """ 

830 if self._table: 

831 return str(self._table) 

832 else: 

833 return f"<unbound table {self.__name__}>" 

834 

835 def from_row(self: typing.Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance: 

836 """ 

837 Create a model instance from a pydal row. 

838 """ 

839 return self(row) 

840 

841 def all(self: typing.Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]": # noqa: A003 

842 """ 

843 Return all rows for this model. 

844 """ 

845 return self.collect() 

846 

847 def __json__(self: typing.Type[T_MetaInstance], instance: T_MetaInstance | None = None) -> dict[str, Any]: 

848 """ 

849 Convert to a json-dumpable dict. 

850 

851 as_dict is not fully json-dumpable, so use as_json and json.loads to ensure it is dumpable (and loadable). 

852 todo: can this be optimized? 

853 

854 See Also: 

855 https://github.com/jeff-hykin/json_fix 

856 """ 

857 string = instance.as_json() if instance else self.as_json() 

858 

859 return typing.cast(dict[str, Any], json.loads(string)) 

860 

861 def get_relationships(self) -> dict[str, Relationship[Any]]: 

862 """ 

863 Return the registered relationships of the current model. 

864 """ 

865 return self._relationships or {} 

866 

867 ########################## 

868 # TypeDAL Modified Logic # 

869 ########################## 

870 

871 def insert(self: typing.Type[T_MetaInstance], **fields: Any) -> T_MetaInstance: 

872 """ 

873 This is only called when db.define is not used as a decorator. 

874 

875 cls.__table functions as 'self' 

876 

877 Args: 

878 **fields: anything you want to insert in the database 

879 

880 Returns: the ID of the new row. 

881 

882 """ 

883 table = self._ensure_table_defined() 

884 

885 result = table.insert(**fields) 

886 # it already is an int but mypy doesn't understand that 

887 return self(result) 

888 

889 def _insert(self, **fields: Any) -> str: 

890 table = self._ensure_table_defined() 

891 

892 return str(table._insert(**fields)) 

893 

894 def bulk_insert(self: typing.Type[T_MetaInstance], items: list[dict[str, Any]]) -> "TypedRows[T_MetaInstance]": 

895 """ 

896 Insert multiple rows, returns a TypedRows set of new instances. 

897 """ 

898 table = self._ensure_table_defined() 

899 result = table.bulk_insert(items) 

900 return self.where(lambda row: row.id.belongs(result)).collect() 

901 

902 def update_or_insert( 

903 self: typing.Type[T_MetaInstance], query: T_Query | dict[str, Any] = DEFAULT, **values: Any 

904 ) -> T_MetaInstance: 

905 """ 

906 Update a row if query matches, else insert a new one. 

907 

908 Returns the created or updated instance. 

909 """ 

910 table = self._ensure_table_defined() 

911 

912 if query is DEFAULT: 

913 record = table(**values) 

914 elif isinstance(query, dict): 

915 record = table(**query) 

916 else: 

917 record = table(query) 

918 

919 if not record: 

920 return self.insert(**values) 

921 

922 record.update_record(**values) 

923 return self(record) 

924 

925 def validate_and_insert( 

926 self: typing.Type[T_MetaInstance], **fields: Any 

927 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

928 """ 

929 Validate input data and then insert a row. 

930 

931 Returns a tuple of (the created instance, a dict of errors). 

932 """ 

933 table = self._ensure_table_defined() 

934 result = table.validate_and_insert(**fields) 

935 if row_id := result.get("id"): 

936 return self(row_id), None 

937 else: 

938 return None, result.get("errors") 

939 

940 def validate_and_update( 

941 self: typing.Type[T_MetaInstance], query: Query, **fields: Any 

942 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

943 """ 

944 Validate input data and then update max 1 row. 

945 

946 Returns a tuple of (the updated instance, a dict of errors). 

947 """ 

948 table = self._ensure_table_defined() 

949 

950 try: 

951 result = table.validate_and_update(query, **fields) 

952 except Exception as e: 

953 result = {"errors": {"exception": str(e)}} 

954 

955 if errors := result.get("errors"): 

956 return None, errors 

957 elif row_id := result.get("id"): 

958 return self(row_id), None 

959 else: # pragma: no cover 

960 # update on query without result (shouldnt happen) 

961 return None, None 

962 

963 def validate_and_update_or_insert( 

964 self: typing.Type[T_MetaInstance], query: Query, **fields: Any 

965 ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]: 

966 """ 

967 Validate input data and then update_and_insert (on max 1 row). 

968 

969 Returns a tuple of (the updated/created instance, a dict of errors). 

970 """ 

971 table = self._ensure_table_defined() 

972 result = table.validate_and_update_or_insert(query, **fields) 

973 

974 if errors := result.get("errors"): 

975 return None, errors 

976 elif row_id := result.get("id"): 

977 return self(row_id), None 

978 else: # pragma: no cover 

979 # update on query without result (shouldnt happen) 

980 return None, None 

981 

982 def select(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

983 """ 

984 See QueryBuilder.select! 

985 """ 

986 return QueryBuilder(self).select(*a, **kw) 

987 

988 def paginate(self: typing.Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]": 

989 """ 

990 See QueryBuilder.paginate! 

991 """ 

992 return QueryBuilder(self).paginate(limit=limit, page=page) 

993 

994 def chunk( 

995 self: typing.Type[T_MetaInstance], chunk_size: int 

996 ) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

997 """ 

998 See QueryBuilder.chunk! 

999 """ 

1000 return QueryBuilder(self).chunk(chunk_size) 

1001 

1002 def where(self: typing.Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]": 

1003 """ 

1004 See QueryBuilder.where! 

1005 """ 

1006 return QueryBuilder(self).where(*a, **kw) 

1007 

1008 def cache(self: typing.Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]": 

1009 """ 

1010 See QueryBuilder.cache! 

1011 """ 

1012 return QueryBuilder(self).cache(*deps, **kwargs) 

1013 

1014 def count(self: typing.Type[T_MetaInstance]) -> int: 

1015 """ 

1016 See QueryBuilder.count! 

1017 """ 

1018 return QueryBuilder(self).count() 

1019 

1020 def first(self: typing.Type[T_MetaInstance]) -> T_MetaInstance | None: 

1021 """ 

1022 See QueryBuilder.first! 

1023 """ 

1024 return QueryBuilder(self).first() 

1025 

1026 def join( 

1027 self: typing.Type[T_MetaInstance], 

1028 *fields: str | typing.Type["TypedTable"], 

1029 method: JOIN_OPTIONS = None, 

1030 on: OnQuery | list[Expression] | Expression = None, 

1031 condition: Condition = None, 

1032 ) -> "QueryBuilder[T_MetaInstance]": 

1033 """ 

1034 See QueryBuilder.join! 

1035 """ 

1036 return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method) 

1037 

1038 def collect(self: typing.Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]": 

1039 """ 

1040 See QueryBuilder.collect! 

1041 """ 

1042 return QueryBuilder(self).collect(verbose=verbose) 

1043 

1044 @property 

1045 def ALL(cls) -> pydal.objects.SQLALL: 

1046 """ 

1047 Select all fields for this table. 

1048 """ 

1049 table = cls._ensure_table_defined() 

1050 

1051 return table.ALL 

1052 

1053 ########################## 

1054 # TypeDAL Shadowed Logic # 

1055 ########################## 

1056 fields: list[str] 

1057 

1058 # other table methods: 

1059 

1060 def truncate(self, mode: str = "") -> None: 

1061 """ 

1062 Remove all data and reset index. 

1063 """ 

1064 table = self._ensure_table_defined() 

1065 table.truncate(mode) 

1066 

1067 def drop(self, mode: str = "") -> None: 

1068 """ 

1069 Remove the underlying table. 

1070 """ 

1071 table = self._ensure_table_defined() 

1072 table.drop(mode) 

1073 

1074 def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool: 

1075 """ 

1076 Add an index on some columns of this table. 

1077 """ 

1078 table = self._ensure_table_defined() 

1079 result = table.create_index(name, *fields, **kwargs) 

1080 return typing.cast(bool, result) 

1081 

1082 def drop_index(self, name: str, if_exists: bool = False) -> bool: 

1083 """ 

1084 Remove an index from this table. 

1085 """ 

1086 table = self._ensure_table_defined() 

1087 result = table.drop_index(name, if_exists) 

1088 return typing.cast(bool, result) 

1089 

1090 def import_from_csv_file( 

1091 self, 

1092 csvfile: typing.TextIO, 

1093 id_map: dict[str, str] = None, 

1094 null: Any = "<NULL>", 

1095 unique: str = "uuid", 

1096 id_offset: dict[str, int] = None, # id_offset used only when id_map is None 

1097 transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None, 

1098 validate: bool = False, 

1099 encoding: str = "utf-8", 

1100 delimiter: str = ",", 

1101 quotechar: str = '"', 

1102 quoting: int = csv.QUOTE_MINIMAL, 

1103 restore: bool = False, 

1104 **kwargs: Any, 

1105 ) -> None: 

1106 """ 

1107 Load a csv file into the database. 

1108 """ 

1109 table = self._ensure_table_defined() 

1110 table.import_from_csv_file( 

1111 csvfile, 

1112 id_map=id_map, 

1113 null=null, 

1114 unique=unique, 

1115 id_offset=id_offset, 

1116 transform=transform, 

1117 validate=validate, 

1118 encoding=encoding, 

1119 delimiter=delimiter, 

1120 quotechar=quotechar, 

1121 quoting=quoting, 

1122 restore=restore, 

1123 **kwargs, 

1124 ) 

1125 

1126 def on(self, query: Query | bool) -> Expression: 

1127 """ 

1128 Shadow Table.on. 

1129 

1130 Used for joins. 

1131 

1132 See Also: 

1133 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1134 """ 

1135 table = self._ensure_table_defined() 

1136 return typing.cast(Expression, table.on(query)) 

1137 

1138 def with_alias(self, alias: str) -> _Table: 

1139 """ 

1140 Shadow Table.with_alias. 

1141 

1142 Useful for joins when joining the same table multiple times. 

1143 

1144 See Also: 

1145 http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation 

1146 """ 

1147 table = self._ensure_table_defined() 

1148 return table.with_alias(alias) 

1149 

1150 # @typing.dataclass_transform() 

1151 

1152 

1153class TypedField(typing.Generic[T_Value]): # pragma: no cover 

1154 """ 

1155 Typed version of pydal.Field, which will be converted to a normal Field in the background. 

1156 """ 

1157 

1158 # will be set by .bind on db.define 

1159 name = "" 

1160 _db: Optional[pydal.DAL] = None 

1161 _rname: Optional[str] = None 

1162 _table: Optional[Table] = None 

1163 _field: Optional[Field] = None 

1164 

1165 _type: T_annotation 

1166 kwargs: Any 

1167 

1168 def __init__(self, _type: typing.Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore 

1169 """ 

1170 A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used! 

1171 """ 

1172 self._type = _type 

1173 self.kwargs = settings 

1174 super().__init__() 

1175 

1176 @typing.overload 

1177 def __get__(self, instance: T_MetaInstance, owner: typing.Type[T_MetaInstance]) -> T_Value: # pragma: no cover 

1178 """ 

1179 row.field -> (actual data). 

1180 """ 

1181 

1182 @typing.overload 

1183 def __get__(self, instance: None, owner: "typing.Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover 

1184 """ 

1185 Table.field -> Field. 

1186 """ 

1187 

1188 def __get__( 

1189 self, instance: T_MetaInstance | None, owner: typing.Type[T_MetaInstance] 

1190 ) -> typing.Union[T_Value, "TypedField[T_Value]"]: 

1191 """ 

1192 Since this class is a Descriptor field, \ 

1193 it returns something else depending on if it's called on a class or instance. 

1194 

1195 (this is mostly for mypy/typing) 

1196 """ 

1197 if instance: 

1198 # this is only reached in a very specific case: 

1199 # an instance of the object was created with a specific set of fields selected (excluding the current one) 

1200 # in that case, no value was stored in the owner -> return None (since the field was not selected) 

1201 return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields 

1202 else: 

1203 # getting as class -> return actual field so pydal understands it when using in query etc. 

1204 return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support 

1205 

1206 def __str__(self) -> str: 

1207 """ 

1208 String representation of a Typed Field. 

1209 

1210 If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`, 

1211 otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str) 

1212 """ 

1213 return str(self._field) if self._field else "" 

1214 

1215 def __repr__(self) -> str: 

1216 """ 

1217 More detailed string representation of a Typed Field. 

1218 

1219 Uses __str__ and adds the provided extra options (kwargs) in the representation. 

1220 """ 

1221 s = self.__str__() 

1222 

1223 if "type" in self.kwargs: 

1224 # manual type in kwargs supplied 

1225 t = self.kwargs["type"] 

1226 elif issubclass(type, type(self._type)): 

1227 # normal type, str.__name__ = 'str' 

1228 t = getattr(self._type, "__name__", str(self._type)) 

1229 elif t_args := typing.get_args(self._type): 

1230 # list[str] -> 'str' 

1231 t = t_args[0].__name__ 

1232 else: # pragma: no cover 

1233 # fallback - something else, may not even happen, I'm not sure 

1234 t = self._type 

1235 

1236 s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]" 

1237 

1238 kw = self.kwargs.copy() 

1239 kw.pop("type", None) 

1240 return f"<{s} with options {kw}>" 

1241 

1242 def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]: 

1243 """ 

1244 Convert a Typed Field instance to a pydal.Field. 

1245 """ 

1246 other_kwargs = self.kwargs.copy() 

1247 extra_kwargs.update(other_kwargs) 

1248 return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs) 

1249 

1250 def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None: 

1251 """ 

1252 Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`. 

1253 """ 

1254 self._table = table 

1255 self._field = field 

1256 

1257 def __getattr__(self, key: str) -> Any: 

1258 """ 

1259 If the regular getattribute does not work, try to get info from the related Field. 

1260 """ 

1261 with contextlib.suppress(AttributeError): 

1262 return super().__getattribute__(key) 

1263 

1264 # try on actual field: 

1265 return getattr(self._field, key) 

1266 

1267 def __eq__(self, other: Any) -> Query: 

1268 """ 

1269 Performing == on a Field will result in a Query. 

1270 """ 

1271 return typing.cast(Query, self._field == other) 

1272 

1273 def __ne__(self, other: Any) -> Query: 

1274 """ 

1275 Performing != on a Field will result in a Query. 

1276 """ 

1277 return typing.cast(Query, self._field != other) 

1278 

1279 def __gt__(self, other: Any) -> Query: 

1280 """ 

1281 Performing > on a Field will result in a Query. 

1282 """ 

1283 return typing.cast(Query, self._field > other) 

1284 

1285 def __lt__(self, other: Any) -> Query: 

1286 """ 

1287 Performing < on a Field will result in a Query. 

1288 """ 

1289 return typing.cast(Query, self._field < other) 

1290 

1291 def __ge__(self, other: Any) -> Query: 

1292 """ 

1293 Performing >= on a Field will result in a Query. 

1294 """ 

1295 return typing.cast(Query, self._field >= other) 

1296 

1297 def __le__(self, other: Any) -> Query: 

1298 """ 

1299 Performing <= on a Field will result in a Query. 

1300 """ 

1301 return typing.cast(Query, self._field <= other) 

1302 

1303 def __hash__(self) -> int: 

1304 """ 

1305 Shadow Field.__hash__. 

1306 """ 

1307 return hash(self._field) 

1308 

1309 def __invert__(self) -> Expression: 

1310 """ 

1311 Performing ~ on a Field will result in an Expression. 

1312 """ 

1313 if not self._field: # pragma: no cover 

1314 raise ValueError("Unbound Field can not be inverted!") 

1315 

1316 return typing.cast(Expression, ~self._field) 

1317 

1318 

1319class TypedTable(metaclass=TableMeta): 

1320 """ 

1321 Enhanded modeling system on top of pydal's Table that adds typing and additional functionality. 

1322 """ 

1323 

1324 # set up by 'new': 

1325 _row: Row | None = None 

1326 

1327 _with: list[str] 

1328 

1329 id: "TypedField[int]" # noqa: A003 

1330 

1331 _before_insert: list[BeforeInsertCallable] 

1332 _after_insert: list[AfterInsertCallable] 

1333 _before_update: list[BeforeUpdateCallable] 

1334 _after_update: list[AfterUpdateCallable] 

1335 _before_delete: list[BeforeDeleteCallable] 

1336 _after_delete: list[AfterDeleteCallable] 

1337 

1338 def _setup_instance_methods(self) -> None: 

1339 self.as_dict = self._as_dict # type: ignore 

1340 self.__json__ = self.as_json = self._as_json # type: ignore 

1341 # self.as_yaml = self._as_yaml # type: ignore 

1342 self.as_xml = self._as_xml # type: ignore 

1343 

1344 self.update = self._update # type: ignore 

1345 

1346 self.delete_record = self._delete_record # type: ignore 

1347 self.update_record = self._update_record # type: ignore 

1348 

1349 def __new__( 

1350 cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any 

1351 ) -> "TypedTable": 

1352 """ 

1353 Create a Typed Rows model instance from an existing row, ID or query. 

1354 

1355 Examples: 

1356 MyTable(1) 

1357 MyTable(id=1) 

1358 MyTable(MyTable.id == 1) 

1359 """ 

1360 table = cls._ensure_table_defined() 

1361 inst = super().__new__(cls) 

1362 

1363 if isinstance(row_or_id, TypedTable): 

1364 # existing typed table instance! 

1365 return row_or_id 

1366 elif isinstance(row_or_id, pydal.objects.Row): 

1367 row = row_or_id 

1368 elif row_or_id is not None: 

1369 row = table(row_or_id, **filters) 

1370 elif filters: 

1371 row = table(**filters) 

1372 else: 

1373 # dummy object 

1374 return inst 

1375 

1376 if not row: 

1377 return None # type: ignore 

1378 

1379 inst._row = row 

1380 inst.__dict__.update(row) 

1381 inst._setup_instance_methods() 

1382 return inst 

1383 

1384 def __iter__(self) -> typing.Generator[Any, None, None]: 

1385 """ 

1386 Allows looping through the columns. 

1387 """ 

1388 row = self._ensure_matching_row() 

1389 yield from iter(row) 

1390 

1391 def __getitem__(self, item: str) -> Any: 

1392 """ 

1393 Allows dictionary notation to get columns. 

1394 """ 

1395 if item in self.__dict__: 

1396 return self.__dict__.get(item) 

1397 

1398 # fallback to lookup in row 

1399 if self._row: 

1400 return self._row[item] 

1401 

1402 # nothing found! 

1403 raise KeyError(item) 

1404 

1405 def __getattr__(self, item: str) -> Any: 

1406 """ 

1407 Allows dot notation to get columns. 

1408 """ 

1409 if value := self.get(item): 

1410 return value 

1411 

1412 raise AttributeError(item) 

1413 

1414 def get(self, item: str, default: Any = None) -> Any: 

1415 """ 

1416 Try to get a column from this instance, else return default. 

1417 """ 

1418 try: 

1419 return self.__getitem__(item) 

1420 except KeyError: 

1421 return default 

1422 

1423 def __setitem__(self, key: str, value: Any) -> None: 

1424 """ 

1425 Data can both be updated via dot and dict notation. 

1426 """ 

1427 return setattr(self, key, value) 

1428 

1429 def __int__(self) -> int: 

1430 """ 

1431 Calling int on a model instance will return its id. 

1432 """ 

1433 return getattr(self, "id", 0) 

1434 

1435 def __bool__(self) -> bool: 

1436 """ 

1437 If the instance has an underlying row with data, it is truthy. 

1438 """ 

1439 return bool(getattr(self, "_row", False)) 

1440 

1441 def _ensure_matching_row(self) -> Row: 

1442 if not getattr(self, "_row", None): 

1443 raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?") 

1444 return self._row 

1445 

1446 def __repr__(self) -> str: 

1447 """ 

1448 String representation of the model instance. 

1449 """ 

1450 model_name = self.__class__.__name__ 

1451 model_data = {} 

1452 

1453 if self._row: 

1454 model_data = self._row.as_json() 

1455 

1456 details = model_name 

1457 details += f"({model_data})" 

1458 

1459 if relationships := getattr(self, "_with", []): 

1460 details += f" + {relationships}" 

1461 

1462 return f"<{details}>" 

1463 

1464 # serialization 

1465 # underscore variants work for class instances (set up by _setup_instance_methods) 

1466 

1467 @classmethod 

1468 def as_dict(cls, flat: bool = False, sanitize: bool = True) -> dict[str, Any]: 

1469 """ 

1470 Dump the object to a plain dict. 

1471 

1472 Can be used as both a class or instance method: 

1473 - dumps the table info if it's a class 

1474 - dumps the row info if it's an instance (see _as_dict) 

1475 """ 

1476 table = cls._ensure_table_defined() 

1477 result = table.as_dict(flat, sanitize) 

1478 return typing.cast(dict[str, Any], result) 

1479 

1480 @classmethod 

1481 def as_json(cls, sanitize: bool = True) -> str: 

1482 """ 

1483 Dump the object to json. 

1484 

1485 Can be used as both a class or instance method: 

1486 - dumps the table info if it's a class 

1487 - dumps the row info if it's an instance (see _as_json) 

1488 """ 

1489 table = cls._ensure_table_defined() 

1490 return typing.cast(str, table.as_json(sanitize)) 

1491 

1492 @classmethod 

1493 def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover 

1494 """ 

1495 Dump the object to xml. 

1496 

1497 Can be used as both a class or instance method: 

1498 - dumps the table info if it's a class 

1499 - dumps the row info if it's an instance (see _as_xml) 

1500 """ 

1501 table = cls._ensure_table_defined() 

1502 return typing.cast(str, table.as_xml(sanitize)) 

1503 

1504 @classmethod 

1505 def as_yaml(cls, sanitize: bool = True) -> str: 

1506 """ 

1507 Dump the object to yaml. 

1508 

1509 Can be used as both a class or instance method: 

1510 - dumps the table info if it's a class 

1511 - dumps the row info if it's an instance (see _as_yaml) 

1512 """ 

1513 table = cls._ensure_table_defined() 

1514 return typing.cast(str, table.as_yaml(sanitize)) 

1515 

1516 def _as_dict( 

1517 self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None 

1518 ) -> dict[str, Any]: 

1519 row = self._ensure_matching_row() 

1520 result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types) 

1521 

1522 if _with := getattr(self, "_with", None): 

1523 for relationship in _with: 

1524 data = self.get(relationship) 

1525 if isinstance(data, list): 

1526 data = [_.as_dict() if getattr(_, "as_dict", None) else _ for _ in data] 

1527 elif data: 

1528 data = data.as_dict() 

1529 

1530 result[relationship] = data 

1531 

1532 return typing.cast(dict[str, Any], result) 

1533 

1534 def _as_json( 

1535 self, 

1536 mode: str = "object", 

1537 default: typing.Callable[[Any], Any] = None, 

1538 colnames: list[str] = None, 

1539 serialize: bool = True, 

1540 **kwargs: Any, 

1541 ) -> str: 

1542 row = self._ensure_matching_row() 

1543 return typing.cast(str, row.as_json(mode, default, colnames, serialize, *kwargs)) 

1544 

1545 def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover 

1546 row = self._ensure_matching_row() 

1547 return typing.cast(str, row.as_xml(sanitize)) 

1548 

1549 # def _as_yaml(self, sanitize: bool = True) -> str: 

1550 # row = self._ensure_matching_row() 

1551 # return typing.cast(str, row.as_yaml(sanitize)) 

1552 

1553 def __setattr__(self, key: str, value: Any) -> None: 

1554 """ 

1555 When setting a property on a Typed Table model instance, also update the underlying row. 

1556 """ 

1557 if self._row and key in self._row.__dict__ and not callable(value): 

1558 # enables `row.key = value; row.update_record()` 

1559 self._row[key] = value 

1560 

1561 super().__setattr__(key, value) 

1562 

1563 @classmethod 

1564 def update(cls: typing.Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None: 

1565 """ 

1566 Update one record. 

1567 

1568 Example: 

1569 MyTable.update(MyTable.id == 1, name="NewName") -> MyTable 

1570 """ 

1571 # todo: update multiple? 

1572 if record := cls(query): 

1573 return record.update_record(**fields) 

1574 else: 

1575 return None 

1576 

1577 def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1578 row = self._ensure_matching_row() 

1579 row.update(**fields) 

1580 self.__dict__.update(**fields) 

1581 return self 

1582 

1583 def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: 

1584 row = self._ensure_matching_row() 

1585 new_row = row.update_record(**fields) 

1586 self.update(**new_row) 

1587 return self 

1588 

1589 def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover 

1590 """ 

1591 Here as a placeholder for _update_record. 

1592 

1593 Will be replaced on instance creation! 

1594 """ 

1595 return self._update_record(**fields) 

1596 

1597 def _delete_record(self) -> int: 

1598 """ 

1599 Actual logic in `pydal.helpers.classes.RecordDeleter`. 

1600 """ 

1601 row = self._ensure_matching_row() 

1602 result = row.delete_record() 

1603 self.__dict__ = {} # empty self, since row is no more. 

1604 self._row = None # just to be sure 

1605 self._setup_instance_methods() 

1606 # ^ instance methods might've been deleted by emptying dict, 

1607 # but we still want .as_dict to show an error, not the table's as_dict. 

1608 return typing.cast(int, result) 

1609 

1610 def delete_record(self) -> int: # pragma: no cover 

1611 """ 

1612 Here as a placeholder for _delete_record. 

1613 

1614 Will be replaced on instance creation! 

1615 """ 

1616 return self._delete_record() 

1617 

1618 # __del__ is also called on the end of a scope so don't remove records on every del!! 

1619 

1620 # pickling: 

1621 def __setstate__(self, state: dict[str, Any]) -> None: 

1622 """ 

1623 Used by dill when loading from a bytestring. 

1624 """ 

1625 # as_dict also includes table info, so dump as json to only get the actual row data 

1626 # then create a new (more empty) row object: 

1627 state["_row"] = Row(json.loads(state["_row"])) 

1628 self.__dict__ |= state 

1629 

1630 def __getstate__(self) -> dict[str, Any]: 

1631 """ 

1632 State to save when pickling. 

1633 

1634 Prevents db connection from being pickled. 

1635 Similar to as_dict but without changing the data of the relationships (dill does that recursively) 

1636 """ 

1637 row = self._ensure_matching_row() 

1638 result: dict[str, Any] = row.as_dict() 

1639 

1640 if _with := getattr(self, "_with", None): 

1641 for relationship in _with: 

1642 data = self.get(relationship) 

1643 

1644 result[relationship] = data 

1645 

1646 result["_row"] = self._row.as_json() if self._row else "" 

1647 return result 

1648 

1649 

1650# backwards compat: 

1651TypedRow = TypedTable 

1652 

1653 

1654class TypedRows(typing.Collection[T_MetaInstance], Rows): 

1655 """ 

1656 Slighly enhaned and typed functionality on top of pydal Rows (the result of a select). 

1657 """ 

1658 

1659 records: dict[int, T_MetaInstance] 

1660 # _rows: Rows 

1661 model: typing.Type[T_MetaInstance] 

1662 metadata: Metadata 

1663 

1664 # pseudo-properties: actually stored in _rows 

1665 db: TypeDAL 

1666 colnames: list[str] 

1667 fields: list[Field] 

1668 colnames_fields: list[Field] 

1669 response: list[tuple[Any, ...]] 

1670 

1671 def __init__( 

1672 self, 

1673 rows: Rows, 

1674 model: typing.Type[T_MetaInstance], 

1675 records: dict[int, T_MetaInstance] = None, 

1676 metadata: Metadata = None, 

1677 ) -> None: 

1678 """ 

1679 Should not be called manually! 

1680 

1681 Normally, the `records` from an existing `Rows` object are used 

1682 but these can be overwritten with a `records` dict. 

1683 `metadata` can be any (un)structured data 

1684 `model` is a Typed Table class 

1685 """ 

1686 records = records or {row.id: model(row) for row in rows} 

1687 super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields) 

1688 self.model = model 

1689 self.metadata = metadata or {} 

1690 

1691 def __len__(self) -> int: 

1692 """ 

1693 Return the count of rows. 

1694 """ 

1695 return len(self.records) 

1696 

1697 def __iter__(self) -> typing.Iterator[T_MetaInstance]: 

1698 """ 

1699 Loop through the rows. 

1700 """ 

1701 yield from self.records.values() 

1702 

1703 def __contains__(self, ind: Any) -> bool: 

1704 """ 

1705 Check if an id exists in this result set. 

1706 """ 

1707 return ind in self.records 

1708 

1709 def first(self) -> T_MetaInstance | None: 

1710 """ 

1711 Get the row with the lowest id. 

1712 """ 

1713 if not self.records: 

1714 return None 

1715 

1716 return next(iter(self)) 

1717 

1718 def last(self) -> T_MetaInstance | None: 

1719 """ 

1720 Get the row with the highest id. 

1721 """ 

1722 if not self.records: 

1723 return None 

1724 

1725 max_id = max(self.records.keys()) 

1726 return self[max_id] 

1727 

1728 def find( 

1729 self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None 

1730 ) -> "TypedRows[T_MetaInstance]": 

1731 """ 

1732 Returns a new Rows object, a subset of the original object, filtered by the function `f`. 

1733 """ 

1734 if not self.records: 

1735 return self.__class__(self, self.model, {}) 

1736 

1737 records = {} 

1738 if limitby: 

1739 _min, _max = limitby 

1740 else: 

1741 _min, _max = 0, len(self) 

1742 count = 0 

1743 for i, row in self.records.items(): 

1744 if f(row): 

1745 if _min <= count: 

1746 records[i] = row 

1747 count += 1 

1748 if count == _max: 

1749 break 

1750 

1751 return self.__class__(self, self.model, records) 

1752 

1753 def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]": 

1754 """ 

1755 Removes elements from the calling Rows object, filtered by the function `f`, \ 

1756 and returns a new Rows object containing the removed elements. 

1757 """ 

1758 if not self.records: 

1759 return self.__class__(self, self.model, {}) 

1760 removed = {} 

1761 to_remove = [] 

1762 for i in self.records: 

1763 row = self[i] 

1764 if f(row): 

1765 removed[i] = self.records[i] 

1766 to_remove.append(i) 

1767 

1768 [self.records.pop(i) for i in to_remove] 

1769 

1770 return self.__class__( 

1771 self, 

1772 self.model, 

1773 removed, 

1774 ) 

1775 

1776 def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]: 

1777 """ 

1778 Returns a list of sorted elements (not sorted in place). 

1779 """ 

1780 return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)] 

1781 

1782 def __str__(self) -> str: 

1783 """ 

1784 Simple string representation. 

1785 """ 

1786 return f"<TypedRows with {len(self)} records>" 

1787 

1788 def __repr__(self) -> str: 

1789 """ 

1790 Print a table on repr(). 

1791 """ 

1792 data = self.as_dict() 

1793 headers = list(next(iter(data.values())).keys()) 

1794 return mktable(data, headers) 

1795 

1796 def group_by_value( 

1797 self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any 

1798 ) -> dict[T, list[T_MetaInstance]]: 

1799 """ 

1800 Group the rows by a specific field (which will be the dict key). 

1801 """ 

1802 kwargs["one_result"] = one_result 

1803 result = super().group_by_value(*fields, **kwargs) 

1804 return typing.cast(dict[T, list[T_MetaInstance]], result) 

1805 

1806 def column(self, column: str = None) -> list[Any]: 

1807 """ 

1808 Get a list of all values in a specific column. 

1809 

1810 Example: 

1811 rows.column('name') -> ['Name 1', 'Name 2', ...] 

1812 """ 

1813 return typing.cast(list[Any], super().column(column)) 

1814 

1815 def as_csv(self) -> str: 

1816 """ 

1817 Dump the data to csv. 

1818 """ 

1819 return typing.cast(str, super().as_csv()) 

1820 

1821 def as_dict( 

1822 self, 

1823 key: str = None, 

1824 compact: bool = False, 

1825 storage_to_dict: bool = False, 

1826 datetime_to_str: bool = False, 

1827 custom_types: list[type] = None, 

1828 ) -> dict[int, dict[str, Any]]: 

1829 """ 

1830 Get the data in a dict of dicts. 

1831 """ 

1832 if any([key, compact, storage_to_dict, datetime_to_str, custom_types]): 

1833 # functionality not guaranteed 

1834 return typing.cast( 

1835 dict[int, dict[str, Any]], 

1836 super().as_dict( 

1837 key or "id", 

1838 compact, 

1839 storage_to_dict, 

1840 datetime_to_str, 

1841 custom_types, 

1842 ), 

1843 ) 

1844 

1845 return {k: v.as_dict() for k, v in self.records.items()} 

1846 

1847 def as_json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str: 

1848 """ 

1849 Turn the data into a dict and then dump to JSON. 

1850 """ 

1851 return typing.cast(str, super().as_json(mode=mode, default=default)) 

1852 

1853 def json(self, mode: str = "object", default: typing.Callable[[Any], Any] = None) -> str: 

1854 """ 

1855 Turn the data into a dict and then dump to JSON. 

1856 """ 

1857 return typing.cast(str, super().as_json(mode=mode, default=default)) 

1858 

1859 def as_list( 

1860 self, 

1861 compact: bool = False, 

1862 storage_to_dict: bool = False, 

1863 datetime_to_str: bool = False, 

1864 custom_types: list[type] = None, 

1865 ) -> list[dict[str, Any]]: 

1866 """ 

1867 Get the data in a list of dicts. 

1868 """ 

1869 if any([compact, storage_to_dict, datetime_to_str, custom_types]): 

1870 return typing.cast( 

1871 list[dict[str, Any]], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types) 

1872 ) 

1873 return [_.as_dict() for _ in self.records.values()] 

1874 

1875 def __getitem__(self, item: int) -> T_MetaInstance: 

1876 """ 

1877 You can get a specific row by ID from a typedrows by using rows[idx] notation. 

1878 

1879 Since pydal's implementation differs (they expect a list instead of a dict with id keys), 

1880 using rows[0] will return the first row, regardless of its id. 

1881 """ 

1882 try: 

1883 return self.records[item] 

1884 except KeyError as e: 

1885 if item == 0 and (row := self.first()): 

1886 # special case: pydal internals think Rows.records is a list, not a dict 

1887 return row 

1888 

1889 raise e 

1890 

1891 def get(self, item: int) -> typing.Optional[T_MetaInstance]: 

1892 """ 

1893 Get a row by ID, or receive None if it isn't in this result set. 

1894 """ 

1895 return self.records.get(item) 

1896 

1897 def join( 

1898 self, 

1899 field: "Field | TypedField[Any]", 

1900 name: str = None, 

1901 constraint: Query = None, 

1902 fields: list[str | Field] = None, 

1903 orderby: Optional[str | Field] = None, 

1904 ) -> T_MetaInstance: 

1905 """ 

1906 This can be used to JOIN with some relationships after the initial select. 

1907 

1908 Using the querybuilder's .join() method is prefered! 

1909 """ 

1910 result = super().join(field, name, constraint, fields or [], orderby) 

1911 return typing.cast(T_MetaInstance, result) 

1912 

1913 def export_to_csv_file( 

1914 self, 

1915 ofile: typing.TextIO, 

1916 null: Any = "<NULL>", 

1917 delimiter: str = ",", 

1918 quotechar: str = '"', 

1919 quoting: int = csv.QUOTE_MINIMAL, 

1920 represent: bool = False, 

1921 colnames: list[str] = None, 

1922 write_colnames: bool = True, 

1923 *args: Any, 

1924 **kwargs: Any, 

1925 ) -> None: 

1926 """ 

1927 Shadow export_to_csv_file from Rows, but with typing. 

1928 

1929 See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data 

1930 """ 

1931 super().export_to_csv_file( 

1932 ofile, 

1933 null, 

1934 *args, 

1935 delimiter=delimiter, 

1936 quotechar=quotechar, 

1937 quoting=quoting, 

1938 represent=represent, 

1939 colnames=colnames or self.colnames, 

1940 write_colnames=write_colnames, 

1941 **kwargs, 

1942 ) 

1943 

1944 @classmethod 

1945 def from_rows( 

1946 cls, rows: Rows, model: typing.Type[T_MetaInstance], metadata: Metadata = None 

1947 ) -> "TypedRows[T_MetaInstance]": 

1948 """ 

1949 Internal method to convert a Rows object to a TypedRows. 

1950 """ 

1951 return cls(rows, model, metadata=metadata) 

1952 

1953 def __json__(self) -> dict[str, Any]: 

1954 """ 

1955 For json-fix. 

1956 """ 

1957 return typing.cast(dict[str, Any], self.as_dict()) 

1958 

1959 def __getstate__(self) -> dict[str, Any]: 

1960 """ 

1961 Used by dill to dump to bytes (exclude db connection etc). 

1962 """ 

1963 return { 

1964 "metadata": json.dumps(self.metadata, default=str), 

1965 "records": self.records, 

1966 } 

1967 

1968 def __setstate__(self, state: dict[str, Any]) -> None: 

1969 """ 

1970 Used by dill when loading from a bytestring. 

1971 """ 

1972 state["metadata"] = json.loads(state["metadata"]) 

1973 self.__dict__.update(state) 

1974 

1975 

1976from .caching import ( # noqa: E402 

1977 _remove_cache, 

1978 _TypedalCache, 

1979 _TypedalCacheDependency, 

1980 create_and_hash_cache_key, 

1981 get_expire, 

1982 load_from_cache, 

1983 save_to_cache, 

1984) 

1985 

1986 

1987class QueryBuilder(typing.Generic[T_MetaInstance]): 

1988 """ 

1989 Abstration on top of pydal's query system. 

1990 """ 

1991 

1992 model: typing.Type[T_MetaInstance] 

1993 query: Query 

1994 select_args: list[Any] 

1995 select_kwargs: dict[str, Any] 

1996 relationships: dict[str, Relationship[Any]] 

1997 metadata: Metadata 

1998 

1999 def __init__( 

2000 self, 

2001 model: typing.Type[T_MetaInstance], 

2002 add_query: Optional[Query] = None, 

2003 select_args: Optional[list[Any]] = None, 

2004 select_kwargs: Optional[dict[str, Any]] = None, 

2005 relationships: dict[str, Relationship[Any]] = None, 

2006 metadata: Metadata = None, 

2007 ): 

2008 """ 

2009 Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable. 

2010 

2011 Example: 

2012 MyTable.where(...) -> QueryBuilder[MyTable] 

2013 """ 

2014 self.model = model 

2015 table = model._ensure_table_defined() 

2016 default_query = typing.cast(Query, table.id > 0) 

2017 self.query = add_query or default_query 

2018 self.select_args = select_args or [] 

2019 self.select_kwargs = select_kwargs or {} 

2020 self.relationships = relationships or {} 

2021 self.metadata = metadata or {} 

2022 

2023 def __str__(self) -> str: 

2024 """ 

2025 Simple string representation for the query builder. 

2026 """ 

2027 return f"QueryBuilder for {self.model}" 

2028 

2029 def __repr__(self) -> str: 

2030 """ 

2031 Advanced string representation for the query builder. 

2032 """ 

2033 return ( 

2034 f"<QueryBuilder for {self.model} with " 

2035 f"{len(self.select_args)} select args; " 

2036 f"{len(self.select_kwargs)} select kwargs; " 

2037 f"{len(self.relationships)} relationships; " 

2038 f"query: {bool(self.query)}; " 

2039 f"metadata: {self.metadata}; " 

2040 f">" 

2041 ) 

2042 

2043 def __bool__(self) -> bool: 

2044 """ 

2045 Querybuilder is truthy if it has rows. 

2046 """ 

2047 return self.count() > 0 

2048 

2049 def _extend( 

2050 self, 

2051 add_query: Optional[Query] = None, 

2052 overwrite_query: Optional[Query] = None, 

2053 select_args: Optional[list[Any]] = None, 

2054 select_kwargs: Optional[dict[str, Any]] = None, 

2055 relationships: dict[str, Relationship[Any]] = None, 

2056 metadata: Metadata = None, 

2057 ) -> "QueryBuilder[T_MetaInstance]": 

2058 return QueryBuilder( 

2059 self.model, 

2060 (add_query & self.query) if add_query else overwrite_query or self.query, 

2061 (self.select_args + select_args) if select_args else self.select_args, 

2062 (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs, 

2063 (self.relationships | relationships) if relationships else self.relationships, 

2064 (self.metadata | (metadata or {})) if metadata else self.metadata, 

2065 ) 

2066 

2067 def select(self, *fields: Any, **options: Any) -> "QueryBuilder[T_MetaInstance]": 

2068 """ 

2069 Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL). 

2070 

2071 Options: 

2072 paraphrased from the web2py pydal docs, 

2073 For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache 

2074 

2075 orderby: field(s) to order by. Supported: 

2076 table.name - sort by name, ascending 

2077 ~table.name - sort by name, descending 

2078 <random> - sort randomly 

2079 table.name|table.id - sort by two fields (first name, then id) 

2080 

2081 groupby, having: together with orderby: 

2082 groupby can be a field (e.g. table.name) to group records by 

2083 having can be a query, only those `having` the condition are grouped 

2084 

2085 limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended. 

2086 distinct: bool/field. Only select rows that differ 

2087 orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby. 

2088 join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended! 

2089 left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended! 

2090 cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True) 

2091 """ 

2092 return self._extend(select_args=list(fields), select_kwargs=options) 

2093 

2094 def where( 

2095 self, 

2096 *queries_or_lambdas: Query | typing.Callable[[typing.Type[T_MetaInstance]], Query], 

2097 **filters: Any, 

2098 ) -> "QueryBuilder[T_MetaInstance]": 

2099 """ 

2100 Extend the builder's query. 

2101 

2102 Can be used in multiple ways: 

2103 .where(Query) -> with a direct query such as `Table.id == 5` 

2104 .where(lambda table: table.id == 5) -> with a query via a lambda 

2105 .where(id=5) -> via keyword arguments 

2106 

2107 When using multiple where's, they will be ANDed: 

2108 .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6) 

2109 When passing multiple queries to a single .where, they will be ORed: 

2110 .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6) 

2111 """ 

2112 new_query = self.query 

2113 table = self.model._ensure_table_defined() 

2114 

2115 for field, value in filters.items(): 

2116 new_query &= table[field] == value 

2117 

2118 subquery: DummyQuery | Query = DummyQuery() 

2119 for query_or_lambda in queries_or_lambdas: 

2120 if isinstance(query_or_lambda, _Query): 

2121 subquery |= typing.cast(Query, query_or_lambda) 

2122 elif callable(query_or_lambda): 

2123 if result := query_or_lambda(self.model): 

2124 subquery |= result 

2125 elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda): 

2126 subquery |= typing.cast(Query, query_or_lambda != None) 

2127 else: 

2128 raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).") 

2129 

2130 if subquery: 

2131 new_query &= subquery 

2132 

2133 return self._extend(overwrite_query=new_query) 

2134 

2135 def join( 

2136 self, 

2137 *fields: str | typing.Type[TypedTable], 

2138 method: JOIN_OPTIONS = None, 

2139 on: OnQuery | list[Expression] | Expression = None, 

2140 condition: Condition = None, 

2141 ) -> "QueryBuilder[T_MetaInstance]": 

2142 """ 

2143 Include relationship fields in the result. 

2144 

2145 `fields` can be names of Relationships on the current model. 

2146 If no fields are passed, all will be used. 

2147 

2148 By default, the `method` defined in the relationship is used. 

2149 This can be overwritten with the `method` keyword argument (left or inner) 

2150 """ 

2151 # todo: allow limiting amount of related rows returned for join? 

2152 

2153 relationships = self.model.get_relationships() 

2154 

2155 if condition and on: 

2156 raise ValueError("condition and on can not be used together!") 

2157 elif condition: 

2158 if len(fields) != 1: 

2159 raise ValueError("join(field, condition=...) can only be used with exactly one field!") 

2160 

2161 if isinstance(condition, pydal.objects.Query): 

2162 condition = as_lambda(condition) 

2163 

2164 relationships = {str(fields[0]): relationship(fields[0], condition=condition, join=method)} 

2165 elif on: 

2166 if len(fields) != 1: 

2167 raise ValueError("join(field, on=...) can only be used with exactly one field!") 

2168 

2169 if isinstance(on, pydal.objects.Expression): 

2170 on = [on] 

2171 

2172 if isinstance(on, list): 

2173 on = as_lambda(on) 

2174 relationships = {str(fields[0]): relationship(fields[0], on=on, join=method)} 

2175 

2176 else: 

2177 if fields: 

2178 # join on every relationship 

2179 relationships = {str(k): relationships[str(k)] for k in fields} 

2180 

2181 if method: 

2182 relationships = {str(k): r.clone(join=method) for k, r in relationships.items()} 

2183 

2184 return self._extend(relationships=relationships) 

2185 

2186 def cache( 

2187 self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None 

2188 ) -> "QueryBuilder[T_MetaInstance]": 

2189 """ 

2190 Enable caching for this query to load repeated calls from a dill row \ 

2191 instead of executing the sql and collecing matching rows again. 

2192 """ 

2193 existing = self.metadata.get("cache", {}) 

2194 

2195 metadata: Metadata = {} 

2196 

2197 cache_meta = typing.cast( 

2198 CacheMetadata, 

2199 self.metadata.get("cache", {}) 

2200 | { 

2201 "enabled": True, 

2202 "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps], 

2203 "expires_at": get_expire(expires_at=expires_at, ttl=ttl), 

2204 }, 

2205 ) 

2206 

2207 metadata["cache"] = cache_meta 

2208 return self._extend(metadata=metadata) 

2209 

2210 def _get_db(self) -> TypeDAL: 

2211 if db := self.model._db: 

2212 return db 

2213 else: # pragma: no cover 

2214 raise EnvironmentError("@define or db.define is not called on this class yet!") 

2215 

2216 def _select_arg_convert(self, arg: Any) -> Any: 

2217 # typedfield are not really used at runtime anymore, but leave it in for safety: 

2218 if isinstance(arg, TypedField): # pragma: no cover 

2219 arg = arg._field 

2220 

2221 return arg 

2222 

2223 def delete(self) -> list[int]: 

2224 """ 

2225 Based on the current query, delete rows and return a list of deleted IDs. 

2226 """ 

2227 db = self._get_db() 

2228 removed_ids = [_.id for _ in db(self.query).select("id")] 

2229 if db(self.query).delete(): 

2230 # success! 

2231 return removed_ids 

2232 

2233 return [] 

2234 

2235 def _delete(self) -> str: 

2236 db = self._get_db() 

2237 return str(db(self.query)._delete()) 

2238 

2239 def update(self, **fields: Any) -> list[int]: 

2240 """ 

2241 Based on the current query, update `fields` and return a list of updated IDs. 

2242 """ 

2243 # todo: limit? 

2244 db = self._get_db() 

2245 updated_ids = db(self.query).select("id").column("id") 

2246 if db(self.query).update(**fields): 

2247 # success! 

2248 return updated_ids 

2249 

2250 return [] 

2251 

2252 def _update(self, **fields: Any) -> str: 

2253 db = self._get_db() 

2254 return str(db(self.query)._update(**fields)) 

2255 

2256 def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], dict[str, Any]]: 

2257 select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL] 

2258 select_kwargs = self.select_kwargs.copy() 

2259 query = self.query 

2260 model = self.model 

2261 mut_metadata["query"] = query 

2262 # require at least id of main table: 

2263 select_fields = ", ".join([str(_) for _ in select_args]) 

2264 tablename = str(model) 

2265 

2266 if add_id and f"{tablename}.id" not in select_fields: 

2267 # fields of other selected, but required ID is missing. 

2268 select_args.append(model.id) 

2269 

2270 if self.relationships: 

2271 query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata) 

2272 

2273 return query, select_args, select_kwargs 

2274 

2275 def to_sql(self, add_id: bool = False) -> str: 

2276 """ 

2277 Generate the SQL for the built query. 

2278 """ 

2279 db = self._get_db() 

2280 

2281 query, select_args, select_kwargs = self._before_query({}, add_id=add_id) 

2282 

2283 return str(db(query)._select(*select_args, **select_kwargs)) 

2284 

2285 def _collect(self) -> str: 

2286 """ 

2287 Alias for to_sql, pydal-like syntax. 

2288 """ 

2289 return self.to_sql() 

2290 

2291 def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None": 

2292 expires_at = metadata["cache"].get("expires_at") 

2293 metadata["cache"] |= { 

2294 # key is partly dependant on cache metadata but not these: 

2295 "key": None, 

2296 "status": None, 

2297 "cached_at": None, 

2298 "expires_at": None, 

2299 } 

2300 

2301 _, key = create_and_hash_cache_key( 

2302 self.model, 

2303 metadata, 

2304 self.query, 

2305 self.select_args, 

2306 self.select_kwargs, 

2307 self.relationships.keys(), 

2308 ) 

2309 

2310 # re-set after creating key: 

2311 metadata["cache"]["expires_at"] = expires_at 

2312 metadata["cache"]["key"] = key 

2313 

2314 return load_from_cache(key) 

2315 

2316 def collect( 

2317 self, verbose: bool = False, _to: typing.Type["TypedRows[Any]"] = None, add_id: bool = True 

2318 ) -> "TypedRows[T_MetaInstance]": 

2319 """ 

2320 Execute the built query and turn it into model instances, while handling relationships. 

2321 """ 

2322 if _to is None: 

2323 _to = TypedRows 

2324 

2325 db = self._get_db() 

2326 metadata = typing.cast(Metadata, self.metadata.copy()) 

2327 

2328 if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)): 

2329 return result 

2330 

2331 query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id) 

2332 

2333 metadata["sql"] = db(query)._select(*select_args, **select_kwargs) 

2334 

2335 if verbose: # pragma: no cover 

2336 print(metadata["sql"]) 

2337 

2338 rows: Rows = db(query).select(*select_args, **select_kwargs) 

2339 

2340 metadata["final_query"] = str(query) 

2341 metadata["final_args"] = [str(_) for _ in select_args] 

2342 metadata["final_kwargs"] = select_kwargs 

2343 

2344 if verbose: # pragma: no cover 

2345 print(rows) 

2346 

2347 if not self.relationships: 

2348 # easy 

2349 typed_rows = _to.from_rows(rows, self.model, metadata=metadata) 

2350 

2351 else: 

2352 # harder: try to match rows to the belonging objects 

2353 # assume structure of {'table': <data>} per row. 

2354 # if that's not the case, return default behavior again 

2355 typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to) 

2356 

2357 # only saves if requested in metadata: 

2358 return save_to_cache(typed_rows, rows) 

2359 

2360 def _handle_relationships_pre_select( 

2361 self, 

2362 query: Query, 

2363 select_args: list[Any], 

2364 select_kwargs: dict[str, Any], 

2365 metadata: Metadata, 

2366 ) -> tuple[Query, list[Any]]: 

2367 db = self._get_db() 

2368 model = self.model 

2369 

2370 metadata["relationships"] = set(self.relationships.keys()) 

2371 

2372 # query = self._update_query_for_inner(db, model, query) 

2373 join = [] 

2374 for key, relation in self.relationships.items(): 

2375 if not relation.condition or relation.join != "inner": 

2376 continue 

2377 

2378 other = relation.get_table(db) 

2379 other = other.with_alias(f"{key}_{hash(relation)}") 

2380 join.append(other.on(relation.condition(model, other))) 

2381 

2382 if limitby := select_kwargs.pop("limitby", None): 

2383 # if limitby + relationships: 

2384 # 1. get IDs of main table entries that match 'query' 

2385 # 2. change query to .belongs(id) 

2386 # 3. add joins etc 

2387 

2388 kwargs = {"limitby": limitby} 

2389 

2390 if join: 

2391 kwargs["join"] = join 

2392 

2393 ids = db(query)._select(model.id, **kwargs) 

2394 query = model.id.belongs(ids) 

2395 metadata["ids"] = ids 

2396 

2397 if join: 

2398 select_kwargs["join"] = join 

2399 

2400 left = [] 

2401 

2402 for key, relation in self.relationships.items(): 

2403 other = relation.get_table(db) 

2404 method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION 

2405 

2406 select_fields = ", ".join([str(_) for _ in select_args]) 

2407 pre_alias = str(other) 

2408 

2409 if f"{other}." not in select_fields: 

2410 # no fields of other selected. add .ALL: 

2411 select_args.append(other.ALL) 

2412 elif f"{other}.id" not in select_fields: 

2413 # fields of other selected, but required ID is missing. 

2414 select_args.append(other.id) 

2415 

2416 if relation.on: 

2417 # if it has a .on, it's always a left join! 

2418 on = relation.on(model, other) 

2419 if not isinstance(on, list): # pragma: no cover 

2420 on = [on] 

2421 

2422 left.extend(on) 

2423 elif method == "left": 

2424 # .on not given, generate it: 

2425 other = other.with_alias(f"{key}_{hash(relation)}") 

2426 condition = typing.cast(Query, relation.condition(model, other)) 

2427 left.append(other.on(condition)) 

2428 else: 

2429 # else: inner join (handled earlier) 

2430 other = other.with_alias(f"{key}_{hash(relation)}") # only for replace 

2431 # other = other.with_alias(f"{key}_{hash(relation)}") 

2432 # query &= relation.condition(model, other) 

2433 

2434 # if no fields of 'other' are included, add other.ALL 

2435 # else: only add other.id if missing 

2436 select_fields = ", ".join([str(_) for _ in select_args]) 

2437 

2438 post_alias = str(other).split(" AS ")[-1] 

2439 if pre_alias != post_alias: 

2440 # replace .select's with aliased: 

2441 select_fields = select_fields.replace( 

2442 f"{pre_alias}.", 

2443 f"{post_alias}.", 

2444 ) 

2445 

2446 select_args = select_fields.split(", ") 

2447 

2448 select_kwargs["left"] = left 

2449 return query, select_args 

2450 

2451 def _collect_with_relationships( 

2452 self, rows: Rows, metadata: Metadata, _to: typing.Type["TypedRows[Any]"] = None 

2453 ) -> "TypedRows[T_MetaInstance]": 

2454 """ 

2455 Transform the raw rows into Typed Table model instances. 

2456 """ 

2457 db = self._get_db() 

2458 main_table = self.model._ensure_table_defined() 

2459 

2460 records = {} 

2461 seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation 

2462 

2463 for row in rows: 

2464 main = row[main_table] 

2465 main_id = main.id 

2466 

2467 if main_id not in records: 

2468 records[main_id] = self.model(main) 

2469 records[main_id]._with = list(self.relationships.keys()) 

2470 

2471 # setup up all relationship defaults (once) 

2472 for col, relationship in self.relationships.items(): 

2473 records[main_id][col] = [] if relationship.multiple else None 

2474 

2475 # now add other relationship data 

2476 for column, relation in self.relationships.items(): 

2477 relationship_column = f"{column}_{hash(relation)}" 

2478 

2479 # relationship_column works for aliases with the same target column. 

2480 # if col + relationship not in the row, just use the regular name. 

2481 

2482 relation_data = ( 

2483 row[relationship_column] if relationship_column in row else row[relation.get_table_name()] 

2484 ) 

2485 

2486 if relation_data.id is None: 

2487 # always skip None ids 

2488 continue 

2489 

2490 if f"{column}-{relation_data.id}" in seen_relations[main_id]: 

2491 # speed up duplicates 

2492 continue 

2493 else: 

2494 seen_relations[main_id].add(f"{column}-{relation_data.id}") 

2495 

2496 relation_table = relation.get_table(db) 

2497 # hopefully an instance of a typed table and a regular row otherwise: 

2498 instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data 

2499 

2500 if relation.multiple: 

2501 # create list of T 

2502 if not isinstance(records[main_id].get(column), list): # pragma: no cover 

2503 # should already be set up before! 

2504 setattr(records[main_id], column, []) 

2505 

2506 records[main_id][column].append(instance) 

2507 else: 

2508 # create single T 

2509 records[main_id][column] = instance 

2510 

2511 return _to(rows, self.model, records, metadata=metadata) 

2512 

2513 def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]": 

2514 """ 

2515 Call .collect() and raise an error if nothing found. 

2516 

2517 Basically unwraps Optional type. 

2518 """ 

2519 if result := self.collect(): 

2520 return result 

2521 

2522 if not exception: 

2523 exception = ValueError("Nothing found!") 

2524 

2525 raise exception 

2526 

2527 def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]: 

2528 """ 

2529 You can start iterating a Query Builder object before calling collect, for ease of use. 

2530 """ 

2531 yield from self.collect() 

2532 

2533 def count(self) -> int: 

2534 """ 

2535 Return the amount of rows matching the current query. 

2536 """ 

2537 db = self._get_db() 

2538 model = self.model 

2539 query = self.query 

2540 

2541 for key, relation in self.relationships.items(): 

2542 if not relation.condition or relation.join != "inner": 

2543 continue 

2544 

2545 other = relation.get_table(db) 

2546 other = other.with_alias(f"{key}_{hash(relation)}") 

2547 query &= relation.condition(model, other) 

2548 

2549 return db(query).count() 

2550 

2551 def __paginate( 

2552 self, 

2553 limit: int, 

2554 page: int = 1, 

2555 ) -> "QueryBuilder[T_MetaInstance]": 

2556 _from = limit * (page - 1) 

2557 _to = limit * page 

2558 

2559 available = self.count() 

2560 

2561 metadata: Metadata = {} 

2562 

2563 metadata["pagination"] = { 

2564 "limit": limit, 

2565 "current_page": page, 

2566 "max_page": math.ceil(available / limit), 

2567 "rows": available, 

2568 "min_max": (_from, _to), 

2569 } 

2570 

2571 return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata) 

2572 

2573 def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]": 

2574 """ 

2575 Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset. 

2576 

2577 Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \ 

2578 can be loaded with relationship data! 

2579 """ 

2580 builder = self.__paginate(limit, page) 

2581 

2582 rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows)) 

2583 

2584 rows._query_builder = builder 

2585 return rows 

2586 

2587 def _paginate( 

2588 self, 

2589 limit: int, 

2590 page: int = 1, 

2591 ) -> str: 

2592 builder = self.__paginate(limit, page) 

2593 return builder._collect() 

2594 

2595 def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]: 

2596 """ 

2597 Generator that yields rows from a paginated source in chunks. 

2598 

2599 This function retrieves rows from a paginated data source in chunks of the 

2600 specified `chunk_size` and yields them as TypedRows. 

2601 

2602 Example: 

2603 ``` 

2604 for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100): 

2605 for row in chunk_of_rows: 

2606 # Process each row within the chunk. 

2607 pass 

2608 ``` 

2609 """ 

2610 page = 1 

2611 

2612 while rows := self.__paginate(chunk_size, page).collect(): 

2613 yield rows 

2614 page += 1 

2615 

2616 def first(self, verbose: bool = False) -> T_MetaInstance | None: 

2617 """ 

2618 Get the first row matching the currently built query. 

2619 

2620 Also adds paginate, since it would be a waste to select more rows than needed. 

2621 """ 

2622 if row := self.paginate(page=1, limit=1, verbose=verbose).first(): 

2623 return self.model.from_row(row) 

2624 else: 

2625 return None 

2626 

2627 def _first(self) -> str: 

2628 return self._paginate(page=1, limit=1) 

2629 

2630 def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance: 

2631 """ 

2632 Call .first() and raise an error if nothing found. 

2633 

2634 Basically unwraps Optional type. 

2635 """ 

2636 if inst := self.first(verbose=verbose): 

2637 return inst 

2638 

2639 if not exception: 

2640 exception = ValueError("Nothing found!") 

2641 

2642 raise exception 

2643 

2644 

2645S = typing.TypeVar("S") 

2646 

2647 

2648class PaginatedRows(TypedRows[T_MetaInstance]): 

2649 """ 

2650 Extension on top of rows that is used when calling .paginate() instead of .collect(). 

2651 """ 

2652 

2653 _query_builder: QueryBuilder[T_MetaInstance] 

2654 

2655 @property 

2656 def data(self) -> list[T_MetaInstance]: 

2657 """ 

2658 Get the underlying data. 

2659 """ 

2660 return list(self.records.values()) 

2661 

2662 @property 

2663 def pagination(self) -> Pagination: 

2664 """ 

2665 Get all page info. 

2666 """ 

2667 pagination_data = self.metadata["pagination"] 

2668 

2669 has_next_page = pagination_data["current_page"] < pagination_data["max_page"] 

2670 has_prev_page = pagination_data["current_page"] > 1 

2671 return { 

2672 "total_items": pagination_data["rows"], 

2673 "current_page": pagination_data["current_page"], 

2674 "per_page": pagination_data["limit"], 

2675 "total_pages": pagination_data["max_page"], 

2676 "has_next_page": has_next_page, 

2677 "has_prev_page": has_prev_page, 

2678 "next_page": pagination_data["current_page"] + 1 if has_next_page else None, 

2679 "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None, 

2680 } 

2681 

2682 def next(self) -> Self: # noqa: A003 

2683 """ 

2684 Get the next page. 

2685 """ 

2686 data = self.metadata["pagination"] 

2687 if data["current_page"] >= data["max_page"]: 

2688 raise StopIteration("Final Page") 

2689 

2690 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1) 

2691 

2692 def previous(self) -> Self: 

2693 """ 

2694 Get the previous page. 

2695 """ 

2696 data = self.metadata["pagination"] 

2697 if data["current_page"] <= 1: 

2698 raise StopIteration("First Page") 

2699 

2700 return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1) 

2701 

2702 def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore 

2703 """ 

2704 Convert to a dictionary with pagination info and original data. 

2705 

2706 All arguments are ignored! 

2707 """ 

2708 return {"data": super().as_dict(), "pagination": self.pagination} 

2709 

2710 

2711class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover 

2712 """ 

2713 Used to make pydal Set more typed. 

2714 

2715 This class is not actually used, only 'cast' by TypeDAL.__call__ 

2716 """ 

2717 

2718 def count(self, distinct: bool = None, cache: dict[str, Any] = None) -> int: 

2719 """ 

2720 Count returns an int. 

2721 """ 

2722 result = super().count(distinct, cache) 

2723 return typing.cast(int, result) 

2724 

2725 def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]: 

2726 """ 

2727 Select returns a TypedRows of a user defined table. 

2728 

2729 Example: 

2730 result: TypedRows[MyTable] = db(MyTable.id > 0).select() 

2731 

2732 for row in result: 

2733 typing.reveal_type(row) # MyTable 

2734 """ 

2735 rows = super().select(*fields, **attributes) 

2736 return typing.cast(TypedRows[T_MetaInstance], rows)