Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/bin/env python 

2 

3""" 

4camcops_server/cc_modules/cc_db.py 

5 

6=============================================================================== 

7 

8 Copyright (C) 2012-2020 Rudolf Cardinal (rudolf@pobox.com). 

9 

10 This file is part of CamCOPS. 

11 

12 CamCOPS is free software: you can redistribute it and/or modify 

13 it under the terms of the GNU General Public License as published by 

14 the Free Software Foundation, either version 3 of the License, or 

15 (at your option) any later version. 

16 

17 CamCOPS is distributed in the hope that it will be useful, 

18 but WITHOUT ANY WARRANTY; without even the implied warranty of 

19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

20 GNU General Public License for more details. 

21 

22 You should have received a copy of the GNU General Public License 

23 along with CamCOPS. If not, see <https://www.gnu.org/licenses/>. 

24 

25=============================================================================== 

26 

27**Common database code, e.g. mixins for tables that are uploaded from the 

28client.** 

29 

30""" 

31 

32from collections import OrderedDict 

33import logging 

34from typing import (Any, Callable, Dict, Generator, Iterable, List, NoReturn, 

35 Optional, Set, Tuple, Type, TYPE_CHECKING, TypeVar, Union) 

36 

37from cardinal_pythonlib.logs import BraceStyleAdapter 

38from cardinal_pythonlib.sqlalchemy.orm_inspect import gen_columns 

39from pendulum import DateTime as Pendulum 

40from sqlalchemy.exc import IntegrityError 

41from sqlalchemy.ext.declarative import declared_attr 

42from sqlalchemy.orm import relationship 

43from sqlalchemy.orm.relationships import RelationshipProperty 

44from sqlalchemy.orm import Session as SqlASession 

45from sqlalchemy.sql.functions import func 

46from sqlalchemy.sql.schema import Column, ForeignKey 

47from sqlalchemy.sql.sqltypes import Boolean, DateTime, Integer 

48 

49from camcops_server.cc_modules.cc_constants import ( 

50 ERA_NOW, 

51 EXTRA_COMMENT_PREFIX, 

52 EXTRA_TASK_TABLENAME_FIELD, 

53 EXTRA_TASK_SERVER_PK_FIELD, 

54) 

55from camcops_server.cc_modules.cc_sqla_coltypes import ( 

56 CamcopsColumn, 

57 EraColType, 

58 gen_ancillary_relationships, 

59 gen_camcops_blob_columns, 

60 PendulumDateTimeAsIsoTextColType, 

61 PermittedValueChecker, 

62 RelationshipInfo, 

63 SemanticVersionColType, 

64 TableNameColType, 

65) 

66from camcops_server.cc_modules.cc_simpleobjects import TaskExportOptions 

67from camcops_server.cc_modules.cc_tsv import TsvPage 

68from camcops_server.cc_modules.cc_version import CAMCOPS_SERVER_VERSION 

69from camcops_server.cc_modules.cc_xml import ( 

70 make_xml_branches_from_blobs, 

71 make_xml_branches_from_columns, 

72 make_xml_branches_from_summaries, 

73 XML_COMMENT_STORED, 

74 XML_COMMENT_CALCULATED, 

75 XmlElement, 

76) 

77 

78if TYPE_CHECKING: 

79 from camcops_server.cc_modules.cc_blob import Blob # noqa: F401 

80 from camcops_server.cc_modules.cc_patient import Patient # noqa: F401 

81 from camcops_server.cc_modules.cc_request import CamcopsRequest # noqa: E501,F401 

82 from camcops_server.cc_modules.cc_summaryelement import SummaryElement # noqa: E501,F401 

83 from camcops_server.cc_modules.cc_task import Task # noqa: F401 

84 

85log = BraceStyleAdapter(logging.getLogger(__name__)) 

86 

87 

88# ============================================================================= 

89# Hacks for specific database drivers 

90# ============================================================================= 

91 

92CRASH_ON_BAD_CONVERSIONS = False # for debugging only! 

93 

94if CRASH_ON_BAD_CONVERSIONS: 

95 log.error("DANGER: CRASH_ON_BAD_CONVERSIONS set in cc_db.py") 

96 

97try: 

98 import MySQLdb 

99 import MySQLdb.converters 

100except ImportError: 

101 MySQLdb = None 

102 

103try: 

104 import pymysql 

105 import pymysql.converters 

106except ImportError: 

107 pymysql = None 

108 

109_SQL_LITERAL_TYPE = Union[int, float, str] 

110 

111_MYSQL_CONVERSION_DICT_TYPE = Dict[Any, Callable] 

112_MYSQLDB_PYTHON_TO_DB_TYPE = Callable[[Any, _MYSQL_CONVERSION_DICT_TYPE], 

113 _SQL_LITERAL_TYPE] # f(o, d) -> s 

114_MYSQLDB_DB_TO_PYTHON_TYPE = Callable[[_SQL_LITERAL_TYPE], Any] # f(s) -> o 

115 

116_PYMYSQL_ENCODER_DICT_TYPE = Dict[Type, Callable] 

117_PYMYSQL_PYTHON_TO_DB_TYPE = Callable[[Any, Optional[_PYMYSQL_ENCODER_DICT_TYPE]], # noqa 

118 _SQL_LITERAL_TYPE] # f(o, mapping) -> s 

119_PYMYSQL_DB_TO_PYTHON_TYPE = Callable[[_SQL_LITERAL_TYPE], Any] 

120 

121 

122def mysqldb_crash_on_bad_conversion(o: Any, 

123 d: _MYSQL_CONVERSION_DICT_TYPE) -> \ 

124 NoReturn: 

125 """ 

126 Reports a bad conversion and crashes. For debugging only (obviously)! 

127 

128 **Conversions by mysqlclient (MySQLdb)** 

129 

130 As per the help docstring for ``MySQLdb/converters.py``, 

131 

132 - the Python-to-database conversion function has the signature ``f(o, d)`` 

133 where ``o`` is the thing to be converted (such as a datetime.datetime) 

134 and ``d`` is the conversion dictionary; it returns an SQL literal value. 

135 

136 - The database-to-Python conversion function has the argument ``f(s)`` 

137 where ``s`` is a string; it returns a Python object. 

138 

139 Both types of functions are stored in ``MySQLdb.converters``, which is a 

140 ``dict``. The keys named ``FIELD_TYPE.*`` are the database-to-Python 

141 converters; the others are the Python-to-database converters. 

142 

143 **Conversions by pymysql** 

144 

145 Similar (for back compatibility), but not the same. 

146 

147 - ``pymysql.converters.conversions`` is ``pymysql.converters.decoders`` and 

148 contains database-to-Python converters. 

149 

150 - ``pymysql.converters.encoders`` contains Python-to-database converters. 

151 

152 Args: 

153 o: Python object 

154 d: MySQLdb conversion dictionary 

155 

156 Returns: 

157 SQL literal 

158 """ 

159 failmsg = ( 

160 f"mysqldb_crash_on_bad_conversion: attempting to convert bad Python " 

161 f"object to database: {o!r}. Conversion dict is {d!r}." 

162 ) 

163 log.critical(failmsg) 

164 raise RuntimeError(failmsg) 

165 

166 

167def pymysql_crash_on_bad_conversion(obj: Any, 

168 mapping: _PYMYSQL_ENCODER_DICT_TYPE) -> \ 

169 NoReturn: 

170 """ 

171 See :func:`mysqldb_crash_on_bad_conversion`. 

172 """ 

173 failmsg = ( 

174 f"pymysql_crash_on_bad_conversion: attempting to convert bad Python " 

175 f"object to database: {obj!r}. Mapping dict is {mapping!r}." 

176 ) 

177 log.critical(failmsg) 

178 raise RuntimeError(failmsg) 

179 

180 

181# ----------------------------------------------------------------------------- 

182# Pendulum; see https://pypi.org/project/pendulum/ -- but note that it says 

183# "pymysql.converters.conversions" but should say 

184# "pymysql.converters.encoders". 

185# ----------------------------------------------------------------------------- 

186 

187if MySQLdb: 

188 log.debug("Hacking MySQLdb to support pendulum.DateTime") 

189 if CRASH_ON_BAD_CONVERSIONS: 

190 MySQLdb.converters.conversions[Pendulum] = mysqldb_crash_on_bad_conversion # noqa 

191 else: 

192 MySQLdb.converters.conversions[Pendulum] = MySQLdb.converters.DateTime2literal # noqa 

193 

194if pymysql: 

195 log.debug("Hacking pymysql to support pendulum.DateTime") 

196 if CRASH_ON_BAD_CONVERSIONS: 

197 pymysql.converters.encoders[Pendulum] = pymysql_crash_on_bad_conversion 

198 else: 

199 pymysql.converters.encoders[Pendulum] = pymysql.converters.escape_datetime # noqa 

200 

201 

202# ============================================================================= 

203# Constants 

204# ============================================================================= 

205 

206T = TypeVar('T') 

207 

208# Database fieldname constants. Do not change. Used here and in client_api.py 

209FN_PK = "_pk" 

210FN_DEVICE_ID = "_device_id" 

211FN_ERA = "_era" 

212FN_CURRENT = "_current" 

213FN_WHEN_ADDED_EXACT = "_when_added_exact" 

214FN_WHEN_ADDED_BATCH_UTC = "_when_added_batch_utc" 

215FN_ADDING_USER_ID = "_adding_user_id" 

216FN_WHEN_REMOVED_EXACT = "_when_removed_exact" 

217FN_WHEN_REMOVED_BATCH_UTC = "_when_removed_batch_utc" 

218FN_REMOVING_USER_ID = "_removing_user_id" 

219FN_PRESERVING_USER_ID = "_preserving_user_id" 

220FN_FORCIBLY_PRESERVED = "_forcibly_preserved" 

221FN_PREDECESSOR_PK = "_predecessor_pk" 

222FN_SUCCESSOR_PK = "_successor_pk" 

223FN_MANUALLY_ERASED = "_manually_erased" 

224FN_MANUALLY_ERASED_AT = "_manually_erased_at" 

225FN_MANUALLY_ERASING_USER_ID = "_manually_erasing_user_id" 

226FN_CAMCOPS_VERSION = "_camcops_version" 

227FN_ADDITION_PENDING = "_addition_pending" 

228FN_REMOVAL_PENDING = "_removal_pending" 

229FN_GROUP_ID = "_group_id" 

230 

231# Common fieldnames used by all tasks. Do not change. 

232TFN_WHEN_CREATED = "when_created" 

233TFN_WHEN_FIRSTEXIT = "when_firstexit" 

234TFN_FIRSTEXIT_IS_FINISH = "firstexit_is_finish" 

235TFN_FIRSTEXIT_IS_ABORT = "firstexit_is_abort" 

236TFN_EDITING_TIME_S = "editing_time_s" 

237 

238 

239# ============================================================================= 

240# GenericTabletRecordMixin 

241# ============================================================================= 

242 

243# noinspection PyAttributeOutsideInit 

244class GenericTabletRecordMixin(object): 

245 """ 

246 Mixin for all tables that are uploaded from the client, representing the 

247 fields that the server adds at the point of upload. 

248 

249 From the server's perspective, ``_pk`` is the unique primary key. 

250 

251 However, records are defined also in their tablet context, for which an 

252 individual tablet (defined by the combination of ``_device_id`` and 

253 ``_era``) sees its own PK, ``id``. 

254 """ 

255 __tablename__ = None # type: str # sorts out some mixin type checking 

256 

257 # ------------------------------------------------------------------------- 

258 # On the server side: 

259 # ------------------------------------------------------------------------- 

260 

261 # Plain columns 

262 

263 # noinspection PyMethodParameters 

264 @declared_attr 

265 def _pk(cls) -> Column: 

266 return Column( 

267 FN_PK, Integer, 

268 primary_key=True, autoincrement=True, index=True, 

269 comment="(SERVER) Primary key (on the server)" 

270 ) 

271 

272 # noinspection PyMethodParameters 

273 @declared_attr 

274 def _device_id(cls) -> Column: 

275 return Column( 

276 FN_DEVICE_ID, Integer, ForeignKey("_security_devices.id", 

277 use_alter=True), 

278 nullable=False, index=True, 

279 comment="(SERVER) ID of the source tablet device" 

280 ) 

281 

282 # noinspection PyMethodParameters 

283 @declared_attr 

284 def _era(cls) -> Column: 

285 return Column( 

286 FN_ERA, EraColType, 

287 nullable=False, index=True, 

288 comment="(SERVER) 'NOW', or when this row was preserved and " 

289 "removed from the source device (UTC ISO 8601)", 

290 ) 

291 # ... note that _era is textual so that plain comparison 

292 # with "=" always works, i.e. no NULLs -- for USER comparison too, not 

293 # just in CamCOPS code 

294 

295 # noinspection PyMethodParameters 

296 @declared_attr 

297 def _current(cls) -> Column: 

298 return Column( 

299 FN_CURRENT, Boolean, 

300 nullable=False, index=True, 

301 comment="(SERVER) Is the row current (1) or not (0)?" 

302 ) 

303 

304 # noinspection PyMethodParameters 

305 @declared_attr 

306 def _when_added_exact(cls) -> Column: 

307 return Column( 

308 FN_WHEN_ADDED_EXACT, PendulumDateTimeAsIsoTextColType, 

309 comment="(SERVER) Date/time this row was added (ISO 8601)" 

310 ) 

311 

312 # noinspection PyMethodParameters 

313 @declared_attr 

314 def _when_added_batch_utc(cls) -> Column: 

315 return Column( 

316 FN_WHEN_ADDED_BATCH_UTC, DateTime, 

317 comment="(SERVER) Date/time of the upload batch that added this " 

318 "row (DATETIME in UTC)" 

319 ) 

320 

321 # noinspection PyMethodParameters 

322 @declared_attr 

323 def _adding_user_id(cls) -> Column: 

324 return Column( 

325 FN_ADDING_USER_ID, Integer, 

326 ForeignKey("_security_users.id"), 

327 comment="(SERVER) ID of user that added this row", 

328 ) 

329 

330 # noinspection PyMethodParameters 

331 @declared_attr 

332 def _when_removed_exact(cls) -> Column: 

333 return Column( 

334 FN_WHEN_REMOVED_EXACT, PendulumDateTimeAsIsoTextColType, 

335 comment="(SERVER) Date/time this row was removed, i.e. made " 

336 "not current (ISO 8601)" 

337 ) 

338 

339 # noinspection PyMethodParameters 

340 @declared_attr 

341 def _when_removed_batch_utc(cls) -> Column: 

342 return Column( 

343 FN_WHEN_REMOVED_BATCH_UTC, DateTime, 

344 comment="(SERVER) Date/time of the upload batch that removed " 

345 "this row (DATETIME in UTC)" 

346 ) 

347 

348 # noinspection PyMethodParameters 

349 @declared_attr 

350 def _removing_user_id(cls) -> Column: 

351 return Column( 

352 FN_REMOVING_USER_ID, Integer, 

353 ForeignKey("_security_users.id"), 

354 comment="(SERVER) ID of user that removed this row" 

355 ) 

356 

357 # noinspection PyMethodParameters 

358 @declared_attr 

359 def _preserving_user_id(cls) -> Column: 

360 return Column( 

361 FN_PRESERVING_USER_ID, Integer, 

362 ForeignKey("_security_users.id"), 

363 comment="(SERVER) ID of user that preserved this row" 

364 ) 

365 

366 # noinspection PyMethodParameters 

367 @declared_attr 

368 def _forcibly_preserved(cls) -> Column: 

369 return Column( 

370 FN_FORCIBLY_PRESERVED, Boolean, default=False, 

371 comment="(SERVER) Forcibly preserved by superuser (rather than " 

372 "normally preserved by tablet)?" 

373 ) 

374 

375 # noinspection PyMethodParameters 

376 @declared_attr 

377 def _predecessor_pk(cls) -> Column: 

378 return Column( 

379 FN_PREDECESSOR_PK, Integer, 

380 comment="(SERVER) PK of predecessor record, prior to modification" 

381 ) 

382 

383 # noinspection PyMethodParameters 

384 @declared_attr 

385 def _successor_pk(cls) -> Column: 

386 return Column( 

387 FN_SUCCESSOR_PK, Integer, 

388 comment="(SERVER) PK of successor record (after modification) " 

389 "or NULL (whilst live, or after deletion)" 

390 ) 

391 

392 # noinspection PyMethodParameters 

393 @declared_attr 

394 def _manually_erased(cls) -> Column: 

395 return Column( 

396 FN_MANUALLY_ERASED, Boolean, default=False, 

397 comment="(SERVER) Record manually erased (content destroyed)?" 

398 ) 

399 

400 # noinspection PyMethodParameters 

401 @declared_attr 

402 def _manually_erased_at(cls) -> Column: 

403 return Column( 

404 FN_MANUALLY_ERASED_AT, PendulumDateTimeAsIsoTextColType, 

405 comment="(SERVER) Date/time of manual erasure (ISO 8601)" 

406 ) 

407 

408 # noinspection PyMethodParameters 

409 @declared_attr 

410 def _manually_erasing_user_id(cls) -> Column: 

411 return Column( 

412 FN_MANUALLY_ERASING_USER_ID, Integer, 

413 ForeignKey("_security_users.id"), 

414 comment="(SERVER) ID of user that erased this row manually" 

415 ) 

416 

417 # noinspection PyMethodParameters 

418 @declared_attr 

419 def _camcops_version(cls) -> Column: 

420 return Column( 

421 FN_CAMCOPS_VERSION, SemanticVersionColType, 

422 default=CAMCOPS_SERVER_VERSION, 

423 comment="(SERVER) CamCOPS version number of the uploading device" 

424 ) 

425 

426 # noinspection PyMethodParameters 

427 @declared_attr 

428 def _addition_pending(cls) -> Column: 

429 return Column( 

430 FN_ADDITION_PENDING, Boolean, nullable=False, default=False, 

431 comment="(SERVER) Addition pending?" 

432 ) 

433 

434 # noinspection PyMethodParameters 

435 @declared_attr 

436 def _removal_pending(cls) -> Column: 

437 return Column( 

438 FN_REMOVAL_PENDING, Boolean, default=False, 

439 comment="(SERVER) Removal pending?" 

440 ) 

441 

442 # noinspection PyMethodParameters 

443 @declared_attr 

444 def _group_id(cls) -> Column: 

445 return Column( 

446 FN_GROUP_ID, Integer, ForeignKey("_security_groups.id"), 

447 nullable=False, index=True, 

448 comment="(SERVER) ID of group to which this record belongs" 

449 ) 

450 

451 RESERVED_FIELDS = [ # fields that tablets can't upload 

452 FN_PK, 

453 FN_DEVICE_ID, 

454 FN_ERA, 

455 FN_CURRENT, 

456 FN_WHEN_ADDED_EXACT, 

457 FN_WHEN_ADDED_BATCH_UTC, 

458 FN_ADDING_USER_ID, 

459 FN_WHEN_REMOVED_EXACT, 

460 FN_WHEN_REMOVED_BATCH_UTC, 

461 FN_REMOVING_USER_ID, 

462 FN_PRESERVING_USER_ID, 

463 FN_FORCIBLY_PRESERVED, 

464 FN_PREDECESSOR_PK, 

465 FN_SUCCESSOR_PK, 

466 FN_MANUALLY_ERASED, 

467 FN_MANUALLY_ERASED_AT, 

468 FN_MANUALLY_ERASING_USER_ID, 

469 FN_CAMCOPS_VERSION, 

470 FN_ADDITION_PENDING, 

471 FN_REMOVAL_PENDING, 

472 FN_GROUP_ID, 

473 ] # but more generally: they start with "_"... 

474 assert(all(x.startswith("_") for x in RESERVED_FIELDS)) 

475 

476 # ------------------------------------------------------------------------- 

477 # Fields that *all* client tables have: 

478 # ------------------------------------------------------------------------- 

479 

480 # noinspection PyMethodParameters 

481 @declared_attr 

482 def id(cls) -> Column: 

483 return Column( 

484 "id", Integer, 

485 nullable=False, index=True, 

486 comment="(TASK) Primary key (task ID) on the tablet device" 

487 ) 

488 

489 # noinspection PyMethodParameters 

490 @declared_attr 

491 def when_last_modified(cls) -> Column: 

492 return Column( 

493 "when_last_modified", PendulumDateTimeAsIsoTextColType, 

494 index=True, # ... as used by database upload script 

495 comment="(STANDARD) Date/time this row was last modified on the " 

496 "source tablet device (ISO 8601)" 

497 ) 

498 

499 # noinspection PyMethodParameters 

500 @declared_attr 

501 def _move_off_tablet(cls) -> Column: 

502 return Column( 

503 "_move_off_tablet", Boolean, default=False, 

504 comment="(SERVER/TABLET) Record-specific preservation pending?" 

505 ) 

506 

507 # ------------------------------------------------------------------------- 

508 # Relationships 

509 # ------------------------------------------------------------------------- 

510 

511 # noinspection PyMethodParameters 

512 @declared_attr 

513 def _device(cls) -> RelationshipProperty: 

514 return relationship("Device") 

515 

516 # noinspection PyMethodParameters 

517 @declared_attr 

518 def _adding_user(cls) -> RelationshipProperty: 

519 return relationship("User", foreign_keys=[cls._adding_user_id]) 

520 

521 # noinspection PyMethodParameters 

522 @declared_attr 

523 def _removing_user(cls) -> RelationshipProperty: 

524 return relationship("User", foreign_keys=[cls._removing_user_id]) 

525 

526 # noinspection PyMethodParameters 

527 @declared_attr 

528 def _preserving_user(cls) -> RelationshipProperty: 

529 return relationship("User", foreign_keys=[cls._preserving_user_id]) 

530 

531 # noinspection PyMethodParameters 

532 @declared_attr 

533 def _manually_erasing_user(cls) -> RelationshipProperty: 

534 return relationship("User", 

535 foreign_keys=[cls._manually_erasing_user_id]) 

536 

537 # noinspection PyMethodParameters 

538 @declared_attr 

539 def _group(cls) -> RelationshipProperty: 

540 return relationship("Group", 

541 foreign_keys=[cls._group_id]) 

542 

543 # ------------------------------------------------------------------------- 

544 # Fetching attributes 

545 # ------------------------------------------------------------------------- 

546 

547 @property 

548 def pk(self) -> Optional[int]: 

549 """ 

550 Returns the (server) primary key of this record. 

551 """ 

552 return self._pk 

553 

554 @property 

555 def era(self) -> Optional[str]: 

556 """ 

557 Returns the era of this record (a text representation of the date/time 

558 of the point of record finalization, or ``NOW`` if the record is still 

559 present on the client device). 

560 """ 

561 return self._era 

562 

563 @property 

564 def device_id(self) -> Optional[int]: 

565 """ 

566 Returns the client device ID of this record. 

567 """ 

568 return self._device_id 

569 

570 @property 

571 def group_id(self) -> Optional[int]: 

572 """ 

573 Returns the group ID of this record. 

574 """ 

575 return self._group_id 

576 

577 # ------------------------------------------------------------------------- 

578 # Autoscanning objects and their relationships 

579 # ------------------------------------------------------------------------- 

580 

581 def _get_xml_root(self, 

582 req: "CamcopsRequest", 

583 options: TaskExportOptions) -> XmlElement: 

584 """ 

585 Called to create an XML root object for records ancillary to Task 

586 objects. Tasks themselves use a more complex mechanism. 

587 

588 Args: 

589 req: a :class:`camcops_server.cc_modules.cc_request.CamcopsRequest` 

590 options: a :class:`camcops_server.cc_modules.cc_simpleobjects.TaskExportOptions` 

591 """ # noqa 

592 # "__tablename__" will make the type checker complain, as we're 

593 # defining a function for a mixin that assumes it's mixed in to a 

594 # SQLAlchemy Base-derived class 

595 # noinspection PyUnresolvedReferences 

596 return XmlElement( 

597 name=self.__tablename__, 

598 value=self._get_xml_branches(req=req, options=options) 

599 ) 

600 

601 def _get_xml_branches(self, 

602 req: "CamcopsRequest", 

603 options: TaskExportOptions) -> List[XmlElement]: 

604 """ 

605 Gets the values of SQLAlchemy columns as XmlElement objects. 

606 Optionally, find any SQLAlchemy relationships that are relationships 

607 to Blob objects, and include them too. 

608 

609 Used by :func:`_get_xml_root` above, but also by Tasks themselves. 

610 

611 Args: 

612 req: a :class:`camcops_server.cc_modules.cc_request.CamcopsRequest` 

613 options: a :class:`camcops_server.cc_modules.cc_simpleobjects.TaskExportOptions` 

614 """ # noqa 

615 # log.debug("_get_xml_branches for {!r}", self) 

616 options = options or TaskExportOptions(xml_include_plain_columns=True, 

617 xml_include_calculated=True, 

618 xml_sort_by_name=True) 

619 branches = [] # type: List[XmlElement] 

620 if options.xml_with_header_comments: 

621 branches.append(XML_COMMENT_STORED) 

622 if options.xml_include_plain_columns: 

623 new_branches = make_xml_branches_from_columns( 

624 self, skip_fields=options.xml_skip_fields) 

625 if options.xml_sort_by_name: 

626 new_branches.sort(key=lambda el: el.name) 

627 branches += new_branches 

628 if options.include_blobs: 

629 new_branches = make_xml_branches_from_blobs( 

630 req, self, skip_fields=options.xml_skip_fields) 

631 if options.xml_sort_by_name: 

632 new_branches.sort(key=lambda el: el.name) 

633 branches += new_branches 

634 # Calculated 

635 if options.xml_include_calculated: 

636 if options.xml_with_header_comments: 

637 branches.append(XML_COMMENT_CALCULATED) 

638 branches.extend(make_xml_branches_from_summaries( 

639 self.get_summaries(req), 

640 skip_fields=options.xml_skip_fields, 

641 sort_by_name=options.xml_sort_by_name 

642 )) 

643 # log.debug("... branches for {!r}: {!r}", self, branches) 

644 return branches 

645 

646 def _get_core_tsv_page(self, req: "CamcopsRequest", 

647 heading_prefix: str = "") -> TsvPage: 

648 """ 

649 Returns a single-row :class:`camcops_server.cc_modules.cc_tsv.TsvPage`, 

650 like an Excel "sheet", representing this record. (It may be combined 

651 with others later to produce a multi-row spreadsheet.) 

652 """ 

653 row = OrderedDict() 

654 for attrname, column in gen_columns(self): 

655 row[heading_prefix + attrname] = getattr(self, attrname) 

656 for s in self.get_summaries(req): 

657 row[heading_prefix + s.name] = s.value 

658 return TsvPage(name=self.__tablename__, rows=[row]) 

659 

660 # ------------------------------------------------------------------------- 

661 # Erasing (overwriting data, not deleting the database records) 

662 # ------------------------------------------------------------------------- 

663 

664 def manually_erase_with_dependants(self, req: "CamcopsRequest") -> None: 

665 """ 

666 Manually erases a standard record and marks it so erased. Iterates 

667 through any dependants and does likewise to them. 

668 

669 The object remains ``_current`` (if it was), as a placeholder, but its 

670 contents are wiped. 

671 

672 WRITES TO THE DATABASE. 

673 """ 

674 if self._manually_erased or self._pk is None or self._era == ERA_NOW: 

675 # ... _manually_erased: don't do it twice 

676 # ... _pk: basic sanity check 

677 # ... _era: don't erase things that are current on the tablet 

678 return 

679 # 1. "Erase my dependants" 

680 for ancillary in self.gen_ancillary_instances_even_noncurrent(): 

681 ancillary.manually_erase_with_dependants(req) 

682 for blob in self.gen_blobs_even_noncurrent(): 

683 blob.manually_erase_with_dependants(req) 

684 # 2. "Erase me" 

685 erasure_attrs = [] # type: List[str] 

686 for attrname, column in gen_columns(self): 

687 if attrname.startswith("_"): # system field 

688 continue 

689 if not column.nullable: # this should cover FKs 

690 continue 

691 if column.foreign_keys: # ... but to be sure... 

692 continue 

693 erasure_attrs.append(attrname) 

694 for attrname in erasure_attrs: 

695 setattr(self, attrname, None) 

696 self._current = False 

697 self._manually_erased = True 

698 self._manually_erased_at = req.now 

699 self._manually_erasing_user_id = req.user_id 

700 

701 def delete_with_dependants(self, req: "CamcopsRequest") -> None: 

702 """ 

703 Deletes (completely from the database) this record and any 

704 dependant records. 

705 """ 

706 if self._pk is None: 

707 return 

708 # 1. "Delete my dependants" 

709 for ancillary in self.gen_ancillary_instances_even_noncurrent(): 

710 ancillary.delete_with_dependants(req) 

711 for blob in self.gen_blobs_even_noncurrent(): 

712 blob.delete_with_dependants(req) 

713 # 2. "Delete me" 

714 dbsession = SqlASession.object_session(self) 

715 dbsession.delete(self) 

716 

717 def gen_attrname_ancillary_pairs(self) \ 

718 -> Generator[Tuple[str, "GenericTabletRecordMixin"], None, None]: 

719 """ 

720 Iterates through and yields all ``_current`` "ancillary" objects 

721 (typically: records of subtables). 

722 

723 Yields tuples of ``(attrname, related_record)``. 

724 """ 

725 for attrname, rel_prop, rel_cls in gen_ancillary_relationships(self): 

726 if rel_prop.uselist: 

727 ancillaries = getattr(self, attrname) # type: List[GenericTabletRecordMixin] # noqa 

728 else: 

729 ancillaries = [getattr(self, attrname)] # type: List[GenericTabletRecordMixin] # noqa 

730 for ancillary in ancillaries: 

731 if ancillary is None: 

732 continue 

733 yield attrname, ancillary 

734 

735 def gen_ancillary_instances(self) -> Generator["GenericTabletRecordMixin", 

736 None, None]: 

737 """ 

738 Generates all ``_current`` ancillary objects of this object. 

739 """ 

740 for attrname, ancillary in self.gen_attrname_ancillary_pairs(): 

741 yield ancillary 

742 

743 def gen_ancillary_instances_even_noncurrent(self) \ 

744 -> Generator["GenericTabletRecordMixin", None, None]: 

745 """ 

746 Generates all ancillary objects of this object, even non-current 

747 ones. 

748 """ 

749 for lineage_member in self._gen_unique_lineage_objects( 

750 self.gen_ancillary_instances()): 

751 yield lineage_member 

752 

753 def gen_blobs(self) -> Generator["Blob", None, None]: 

754 """ 

755 Generate all ``_current`` BLOBs owned by this object. 

756 """ 

757 for id_attrname, column in gen_camcops_blob_columns(self): 

758 relationship_attr = column.blob_relationship_attr_name 

759 blob = getattr(self, relationship_attr) 

760 if blob is None: 

761 continue 

762 yield blob 

763 

764 def gen_blobs_even_noncurrent(self) -> Generator["Blob", None, None]: 

765 """ 

766 Generates all BLOBs owned by this object, even non-current ones. 

767 """ 

768 for lineage_member in self._gen_unique_lineage_objects( 

769 self.gen_blobs()): # type: "Blob" 

770 yield lineage_member 

771 

772 def get_lineage(self) -> List["GenericTabletRecordMixin"]: 

773 """ 

774 Returns all records that are part of the same "lineage", that is: 

775 

776 - of the same class; 

777 - matching on id/device_id/era; 

778 - including both current and any historical non-current versions. 

779 

780 Will include the "self" object. 

781 

782 """ 

783 dbsession = SqlASession.object_session(self) 

784 cls = self.__class__ 

785 q = ( 

786 dbsession.query(cls) 

787 .filter(cls.id == self.id) 

788 .filter(cls._device_id == self._device_id) 

789 .filter(cls._era == self._era) 

790 ) 

791 return list(q) 

792 

793 @staticmethod 

794 def _gen_unique_lineage_objects( 

795 collection: Iterable["GenericTabletRecordMixin"]) \ 

796 -> Generator["GenericTabletRecordMixin", None, None]: 

797 """ 

798 Given an iterable of database records, generate all related lineage 

799 objects for each of them (via :meth:`get_lineage`) that are unique by 

800 PK. 

801 """ 

802 seen_pks = set() # type: Set[int] 

803 for item in collection: 

804 if item is None: 

805 continue 

806 for lineage_member in item.get_lineage(): 

807 pk = lineage_member.pk 

808 if pk in seen_pks: 

809 continue 

810 seen_pks.add(pk) 

811 yield lineage_member 

812 

813 # ------------------------------------------------------------------------- 

814 # Retrieving a linked record by client ID 

815 # ------------------------------------------------------------------------- 

816 

817 @classmethod 

818 def get_linked(cls, client_id: Optional[int], 

819 other: "GenericTabletRecordMixin") \ 

820 -> Optional["GenericTabletRecordMixin"]: 

821 """ 

822 Returns a specific linked record, of the class of ``self``, whose 

823 client-side ID is ``client_id``, and which matches ``other`` in terms 

824 of device/era. 

825 """ 

826 if client_id is None: 

827 return None 

828 dbsession = SqlASession.object_session(other) 

829 # noinspection PyPep8 

830 q = ( 

831 dbsession.query(cls) 

832 .filter(cls.id == client_id) 

833 .filter(cls._device_id == other._device_id) 

834 .filter(cls._era == other._era) 

835 .filter(cls._current == True) # noqa: E712 

836 ) 

837 return q.first() 

838 

839 # ------------------------------------------------------------------------- 

840 # History functions for server-side editing 

841 # ------------------------------------------------------------------------- 

842 

843 def set_predecessor(self, req: "CamcopsRequest", 

844 predecessor: "GenericTabletRecordMixin") -> None: 

845 """ 

846 Used for some unusual server-side manipulations (e.g. editing patient 

847 details). 

848 

849 Amends this object so the "self" object replaces the predecessor, so: 

850 

851 - "self" becomes current and refers back to "predecessor"; 

852 - "predecessor" becomes non-current and refers forward to "self". 

853 

854 """ 

855 assert predecessor._current 

856 # We become new and current, and refer to our predecessor 

857 self._device_id = predecessor._device_id 

858 self._era = predecessor._era 

859 self._current = True 

860 self._when_added_exact = req.now 

861 self._when_added_batch_utc = req.now_utc 

862 self._adding_user_id = req.user_id 

863 if self._era != ERA_NOW: 

864 self._preserving_user_id = req.user_id 

865 self._forcibly_preserved = True 

866 self._predecessor_pk = predecessor._pk 

867 self._camcops_version = predecessor._camcops_version 

868 self._group_id = predecessor._group_id 

869 # Make our predecessor refer to us 

870 if self._pk is None: 

871 req.dbsession.add(self) # ensure we have a PK, part 1 

872 req.dbsession.flush() # ensure we have a PK, part 2 

873 predecessor._set_successor(req, self) 

874 

875 def _set_successor(self, req: "CamcopsRequest", 

876 successor: "GenericTabletRecordMixin") -> None: 

877 """ 

878 See :func:`set_predecessor` above. 

879 """ 

880 assert successor._pk is not None 

881 self._current = False 

882 self._when_removed_exact = req.now 

883 self._when_removed_batch_utc = req.now_utc 

884 self._removing_user_id = req.user_id 

885 self._successor_pk = successor._pk 

886 

887 def mark_as_deleted(self, req: "CamcopsRequest") -> None: 

888 """ 

889 Ends the history chain and marks this record as non-current. 

890 """ 

891 if self._current: 

892 self._when_removed_exact = req.now 

893 self._when_removed_batch_utc = req.now_utc 

894 self._removing_user_id = req.user_id 

895 self._current = False 

896 

897 def create_fresh(self, req: "CamcopsRequest", device_id: int, 

898 era: str, group_id: int) -> None: 

899 """ 

900 Used to create a record from scratch. 

901 """ 

902 self._device_id = device_id 

903 self._era = era 

904 self._group_id = group_id 

905 self._current = True 

906 self._when_added_exact = req.now 

907 self._when_added_batch_utc = req.now_utc 

908 self._adding_user_id = req.user_id 

909 

910 def save_with_next_available_id(self, req: "CamcopsRequest", 

911 device_id: int, 

912 era: str = ERA_NOW) -> None: 

913 """ 

914 Save a record with the next available client pk in sequence. 

915 This is of use when creating patients and ID numbers on the server 

916 to ensure uniqueness, or when fixing up a missing ID number for 

917 a patient created on a device. 

918 """ 

919 cls = self.__class__ 

920 

921 saved_ok = False 

922 

923 # MySql doesn't support "select for update" so we have to keep 

924 # trying the next available ID and checking for an integrity 

925 # error in case another user has grabbed it by the time we have 

926 # committed 

927 # noinspection PyProtectedMember 

928 last_id = ( 

929 req.dbsession 

930 # func.max(cls.id) + 1 here will do the right thing for 

931 # backends that support select for update (maybe not for no rows) 

932 .query(func.max(cls.id)) 

933 .filter(cls._device_id == device_id) 

934 .filter(cls._era == era) 

935 .scalar() 

936 ) or 0 

937 

938 next_id = last_id + 1 

939 

940 while not saved_ok: 

941 self.id = next_id 

942 

943 req.dbsession.add(self) 

944 

945 try: 

946 req.dbsession.flush() 

947 saved_ok = True 

948 except IntegrityError: 

949 req.dbsession.rollback() 

950 next_id += 1 

951 

952 # ------------------------------------------------------------------------- 

953 # Override this if you provide summaries 

954 # ------------------------------------------------------------------------- 

955 

956 # noinspection PyMethodMayBeStatic 

957 def get_summaries(self, req: "CamcopsRequest") -> List["SummaryElement"]: 

958 """ 

959 Return a list of :class:`SummaryElement` objects, for this database 

960 object (not any dependent classes/tables). 

961 

962 Note that this is implemented on :class:`GenericTabletRecordMixin`, 

963 not :class:`camcops_server.cc_modules.cc_task.Task`, so that ancillary 

964 objects can also provide summaries. 

965 """ 

966 return [] 

967 

968 def get_summary_names(self, req: "CamcopsRequest") -> List[str]: 

969 """ 

970 Returns a list of summary field names. 

971 """ 

972 return [x.name for x in self.get_summaries(req)] 

973 

974 

975# ============================================================================= 

976# Relationships 

977# ============================================================================= 

978 

979def ancillary_relationship( 

980 parent_class_name: str, 

981 ancillary_class_name: str, 

982 ancillary_fk_to_parent_attr_name: str, 

983 ancillary_order_by_attr_name: str = None, 

984 read_only: bool = True) -> RelationshipProperty: 

985 """ 

986 Implements a one-to-many relationship, i.e. one parent to many ancillaries. 

987 """ 

988 parent_pk_attr_name = "id" # always 

989 return relationship( 

990 ancillary_class_name, 

991 primaryjoin=( 

992 "and_(" 

993 " remote({a}.{fk}) == foreign({p}.{pk}), " 

994 " remote({a}._device_id) == foreign({p}._device_id), " 

995 " remote({a}._era) == foreign({p}._era), " 

996 " remote({a}._current) == True " 

997 ")".format( 

998 a=ancillary_class_name, 

999 fk=ancillary_fk_to_parent_attr_name, 

1000 p=parent_class_name, 

1001 pk=parent_pk_attr_name, 

1002 ) 

1003 ), 

1004 uselist=True, 

1005 order_by="{a}.{f}".format(a=ancillary_class_name, 

1006 f=ancillary_order_by_attr_name), 

1007 viewonly=read_only, 

1008 info={ 

1009 RelationshipInfo.IS_ANCILLARY: True, 

1010 }, 

1011 # ... "info" is a user-defined dictionary; see 

1012 # http://docs.sqlalchemy.org/en/latest/orm/relationship_api.html#sqlalchemy.orm.relationship.params.info # noqa 

1013 # http://docs.sqlalchemy.org/en/latest/orm/internals.html#MapperProperty.info # noqa 

1014 ) 

1015 

1016 

1017# ============================================================================= 

1018# Field creation assistance 

1019# ============================================================================= 

1020 

1021# TypeEngineBase = TypeVar('TypeEngineBase', bound=TypeEngine) 

1022 

1023def add_multiple_columns( 

1024 cls: Type, 

1025 prefix: str, 

1026 start: int, 

1027 end: int, 

1028 coltype=Integer, 

1029 # this type fails: Union[Type[TypeEngineBase], TypeEngine] 

1030 # ... https://stackoverflow.com/questions/38106227 

1031 # ... https://github.com/python/typing/issues/266 

1032 colkwargs: Dict[str, Any] = None, 

1033 comment_fmt: str = None, 

1034 comment_strings: List[str] = None, 

1035 minimum: Union[int, float] = None, 

1036 maximum: Union[int, float] = None, 

1037 pv: List[Any] = None, 

1038 suffix: str = "") -> None: 

1039 """ 

1040 Add a sequence of SQLAlchemy columns to a class. 

1041 

1042 Called from a metaclass. 

1043 Used to make task creation a bit easier. 

1044 

1045 Args: 

1046 cls: 

1047 class to which to add columns 

1048 prefix: 

1049 Fieldname will be ``prefix + str(n) + suffix``, where ``n`` is 

1050 defined as below. 

1051 suffix: 

1052 Optional. See ``prefix``. 

1053 start: 

1054 Start of range. 

1055 end: 

1056 End of range. Thus: ``i`` will range from ``0`` to ``(end - 

1057 start)`` inclusive; ``n`` will range from ``start`` to ``end`` 

1058 inclusive. 

1059 coltype: 

1060 SQLAlchemy column type, in either of these formats: (a) 

1061 ``Integer`` (of general type ``Type[TypeEngine]``?); (b) 

1062 ``Integer()`` (of general type ``TypeEngine``). 

1063 colkwargs: 

1064 SQLAlchemy column arguments, as in 

1065 ``Column(name, coltype, **colkwargs)`` 

1066 comment_fmt: 

1067 Format string defining field comments. Substitutable 

1068 values are: 

1069 

1070 - ``{n}``: field number (from range). 

1071 - ``{s}``: ``comment_strings[i]``, where ``i`` is a zero-based 

1072 index as defined as above, or "" if out of range. 

1073 

1074 comment_strings: 

1075 see ``comment_fmt`` 

1076 minimum: 

1077 minimum permitted value, or ``None`` 

1078 maximum: 

1079 maximum permitted value, or ``None`` 

1080 pv: 

1081 list of permitted values, or ``None`` 

1082 """ 

1083 colkwargs = {} if colkwargs is None else colkwargs # type: Dict[str, Any] 

1084 comment_strings = comment_strings or [] 

1085 for n in range(start, end + 1): 

1086 nstr = str(n) 

1087 i = n - start 

1088 colname = prefix + nstr + suffix 

1089 if comment_fmt: 

1090 s = "" 

1091 if 0 <= i < len(comment_strings): 

1092 s = comment_strings[i] or "" 

1093 colkwargs["comment"] = comment_fmt.format(n=n, s=s) 

1094 if minimum is not None or maximum is not None or pv is not None: 

1095 colkwargs["permitted_value_checker"] = PermittedValueChecker( 

1096 minimum=minimum, 

1097 maximum=maximum, 

1098 permitted_values=pv 

1099 ) 

1100 setattr(cls, colname, CamcopsColumn(colname, coltype, **colkwargs)) 

1101 else: 

1102 setattr(cls, colname, Column(colname, coltype, **colkwargs)) 

1103 

1104 

1105# ============================================================================= 

1106# TaskDescendant 

1107# ============================================================================= 

1108 

1109class TaskDescendant(object): 

1110 """ 

1111 Information mixin for sub-tables that can be traced back to a class. Used 

1112 to denormalize the database for export in some circumstances. 

1113 

1114 Not used for the Blob class, which has no reasonable way of tracing itself 

1115 back to a given task if it is used by a task's ancillary tables rather than 

1116 a primary task row. 

1117 """ 

1118 

1119 @classmethod 

1120 def task_ancestor_class(cls) -> Optional[Type["Task"]]: 

1121 """ 

1122 Returns the class of the ancestral task. 

1123 

1124 If the descendant can descend from lots of types of task (rare; only 

1125 applies to :class:`camcops_server.cc_modules.cc_blob.Blob` and 

1126 :class:`camcops_server.cc_modules.cc_summaryelement.ExtraSummaryTable`), 

1127 returns ``None``. 

1128 """ # noqa 

1129 raise NotImplementedError 

1130 

1131 @classmethod 

1132 def task_ancestor_might_have_patient(cls) -> bool: 

1133 """ 

1134 Does this object have a single task ancestor, that is not anonymous? 

1135 """ 

1136 taskcls = cls.task_ancestor_class() 

1137 if not taskcls: 

1138 return True # e.g. Blob, ExtraSummaryTable 

1139 return not taskcls.is_anonymous 

1140 

1141 def task_ancestor_server_pk(self) -> Optional[int]: 

1142 """ 

1143 Returns the server PK of the ancestral task. 

1144 

1145 Note that this is an export-time calculation; the client may update its 

1146 task rows without updating its descendant rows (so server PKs change 

1147 whilst client IDs don't). 

1148 """ 

1149 task = self.task_ancestor() 

1150 if not task: 

1151 return None 

1152 return task.pk 

1153 

1154 def task_ancestor(self) -> Optional["Task"]: 

1155 """ 

1156 Returns the specific ancestor task of this object. 

1157 """ 

1158 raise NotImplementedError 

1159 

1160 def task_ancestor_patient(self) -> Optional["Patient"]: 

1161 """ 

1162 Returns the associated patient, if there is one. 

1163 """ 

1164 task = self.task_ancestor() 

1165 return task.patient if task else None 

1166 

1167 @classmethod 

1168 def extra_task_xref_columns(cls) -> List[Column]: 

1169 """ 

1170 Returns extra columns used to cross-reference this 

1171 :class:`TaskDescendant` to its ancestor task, in certain export 

1172 formats (``DB_PATIENT_ID_PER_ROW``). 

1173 """ 

1174 return [ 

1175 Column( 

1176 EXTRA_TASK_TABLENAME_FIELD, TableNameColType, 

1177 comment=EXTRA_COMMENT_PREFIX + "Table name of ancestor task" 

1178 ), 

1179 Column( 

1180 EXTRA_TASK_SERVER_PK_FIELD, Integer, 

1181 comment=EXTRA_COMMENT_PREFIX + "Server PK of ancestor task" 

1182 ), 

1183 ] 

1184 

1185 def add_extra_task_xref_info_to_row(self, row: Dict[str, Any]) -> None: 

1186 """ 

1187 For the ``DB_PATIENT_ID_PER_ROW`` export option. Adds additional 

1188 cross-referencing info to a row. 

1189 

1190 Args: 

1191 row: future database row, as a dictionary 

1192 """ 

1193 ancestor = self.task_ancestor() 

1194 if ancestor: 

1195 row[EXTRA_TASK_TABLENAME_FIELD] = ancestor.tablename 

1196 row[EXTRA_TASK_SERVER_PK_FIELD] = ancestor.pk