Coverage for cc_modules/cc_db.py: 56%
443 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-08 23:14 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-08 23:14 +0000
1#!/usr/bin/env python
3"""
4camcops_server/cc_modules/cc_db.py
6===============================================================================
8 Copyright (C) 2012, University of Cambridge, Department of Psychiatry.
9 Created by Rudolf Cardinal (rnc1001@cam.ac.uk).
11 This file is part of CamCOPS.
13 CamCOPS is free software: you can redistribute it and/or modify
14 it under the terms of the GNU General Public License as published by
15 the Free Software Foundation, either version 3 of the License, or
16 (at your option) any later version.
18 CamCOPS is distributed in the hope that it will be useful,
19 but WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 GNU General Public License for more details.
23 You should have received a copy of the GNU General Public License
24 along with CamCOPS. If not, see <https://www.gnu.org/licenses/>.
26===============================================================================
28**Common database code, e.g. mixins for tables that are uploaded from the
29client.**
31"""
33from collections import OrderedDict
34import logging
35from typing import (
36 Any,
37 Callable,
38 Dict,
39 Generator,
40 Iterable,
41 List,
42 NoReturn,
43 Optional,
44 Set,
45 Tuple,
46 Type,
47 TYPE_CHECKING,
48 TypeVar,
49 Union,
50)
52from cardinal_pythonlib.logs import BraceStyleAdapter
53from cardinal_pythonlib.sqlalchemy.orm_inspect import gen_columns
54from pendulum import DateTime as Pendulum
55from sqlalchemy.exc import IntegrityError
56from sqlalchemy.ext.declarative import declared_attr
57from sqlalchemy.orm import relationship
58from sqlalchemy.orm.relationships import RelationshipProperty
59from sqlalchemy.orm import Session as SqlASession
60from sqlalchemy.sql.functions import func
61from sqlalchemy.sql.schema import Column, ForeignKey
62from sqlalchemy.sql.sqltypes import Boolean, DateTime, Integer
64from camcops_server.cc_modules.cc_constants import (
65 CLIENT_DATE_FIELD,
66 ERA_NOW,
67 EXTRA_COMMENT_PREFIX,
68 EXTRA_TASK_SERVER_PK_FIELD,
69 EXTRA_TASK_TABLENAME_FIELD,
70 MOVE_OFF_TABLET_FIELD,
71 SPREADSHEET_PATIENT_FIELD_PREFIX,
72 TABLET_ID_FIELD,
73)
74from camcops_server.cc_modules.cc_dataclasses import SummarySchemaInfo
75from camcops_server.cc_modules.cc_sqla_coltypes import (
76 CamcopsColumn,
77 COLATTR_PERMITTED_VALUE_CHECKER,
78 EraColType,
79 gen_ancillary_relationships,
80 gen_camcops_blob_columns,
81 PendulumDateTimeAsIsoTextColType,
82 PermittedValueChecker,
83 RelationshipInfo,
84 SemanticVersionColType,
85 TableNameColType,
86)
87from camcops_server.cc_modules.cc_simpleobjects import TaskExportOptions
88from camcops_server.cc_modules.cc_spreadsheet import SpreadsheetPage
89from camcops_server.cc_modules.cc_version import CAMCOPS_SERVER_VERSION
90from camcops_server.cc_modules.cc_xml import (
91 make_xml_branches_from_blobs,
92 make_xml_branches_from_columns,
93 make_xml_branches_from_summaries,
94 XML_COMMENT_STORED,
95 XML_COMMENT_CALCULATED,
96 XmlElement,
97)
99if TYPE_CHECKING:
100 from camcops_server.cc_modules.cc_blob import Blob # noqa: F401
101 from camcops_server.cc_modules.cc_patient import Patient # noqa: F401
102 from camcops_server.cc_modules.cc_request import (
103 CamcopsRequest, # noqa: F401
104 )
105 from camcops_server.cc_modules.cc_summaryelement import (
106 SummaryElement, # noqa: F401
107 )
108 from camcops_server.cc_modules.cc_task import Task # noqa: F401
110log = BraceStyleAdapter(logging.getLogger(__name__))
113# =============================================================================
114# Hacks for specific database drivers
115# =============================================================================
117CRASH_ON_BAD_CONVERSIONS = False # for debugging only!
119if CRASH_ON_BAD_CONVERSIONS:
120 log.error("DANGER: CRASH_ON_BAD_CONVERSIONS set in cc_db.py")
122try:
123 import MySQLdb
124 import MySQLdb.converters
125except ImportError:
126 MySQLdb = None
128try:
129 import pymysql
130 import pymysql.converters
131except ImportError:
132 pymysql = None
134_SQL_LITERAL_TYPE = Union[int, float, str]
136_MYSQL_CONVERSION_DICT_TYPE = Dict[Any, Callable]
137_MYSQLDB_PYTHON_TO_DB_TYPE = Callable[
138 [Any, _MYSQL_CONVERSION_DICT_TYPE], _SQL_LITERAL_TYPE
139] # f(o, d) -> s
140_MYSQLDB_DB_TO_PYTHON_TYPE = Callable[[_SQL_LITERAL_TYPE], Any] # f(s) -> o
142_PYMYSQL_ENCODER_DICT_TYPE = Dict[Type, Callable]
143_PYMYSQL_PYTHON_TO_DB_TYPE = Callable[
144 [Any, Optional[_PYMYSQL_ENCODER_DICT_TYPE]], _SQL_LITERAL_TYPE # noqa
145] # f(o, mapping) -> s
146_PYMYSQL_DB_TO_PYTHON_TYPE = Callable[[_SQL_LITERAL_TYPE], Any]
149def mysqldb_crash_on_bad_conversion(
150 o: Any, d: _MYSQL_CONVERSION_DICT_TYPE
151) -> NoReturn:
152 """
153 Reports a bad conversion and crashes. For debugging only (obviously)!
155 **Conversions by mysqlclient (MySQLdb)**
157 As per the help docstring for ``MySQLdb/converters.py``,
159 - the Python-to-database conversion function has the signature ``f(o, d)``
160 where ``o`` is the thing to be converted (such as a datetime.datetime)
161 and ``d`` is the conversion dictionary; it returns an SQL literal value.
163 - The database-to-Python conversion function has the argument ``f(s)``
164 where ``s`` is a string; it returns a Python object.
166 Both types of functions are stored in ``MySQLdb.converters``, which is a
167 ``dict``. The keys named ``FIELD_TYPE.*`` are the database-to-Python
168 converters; the others are the Python-to-database converters.
170 **Conversions by pymysql**
172 Similar (for back compatibility), but not the same.
174 - ``pymysql.converters.conversions`` is ``pymysql.converters.decoders`` and
175 contains database-to-Python converters.
177 - ``pymysql.converters.encoders`` contains Python-to-database converters.
179 Args:
180 o: Python object
181 d: MySQLdb conversion dictionary
183 Returns:
184 SQL literal
185 """
186 failmsg = (
187 f"mysqldb_crash_on_bad_conversion: attempting to convert bad Python "
188 f"object to database: {o!r}. Conversion dict is {d!r}."
189 )
190 log.critical(failmsg)
191 raise RuntimeError(failmsg)
194def pymysql_crash_on_bad_conversion(
195 obj: Any, mapping: _PYMYSQL_ENCODER_DICT_TYPE
196) -> NoReturn:
197 """
198 See :func:`mysqldb_crash_on_bad_conversion`.
199 """
200 failmsg = (
201 f"pymysql_crash_on_bad_conversion: attempting to convert bad Python "
202 f"object to database: {obj!r}. Mapping dict is {mapping!r}."
203 )
204 log.critical(failmsg)
205 raise RuntimeError(failmsg)
208# -----------------------------------------------------------------------------
209# Pendulum; see https://pypi.org/project/pendulum/ -- but note that it says
210# "pymysql.converters.conversions" but should say
211# "pymysql.converters.encoders".
212# -----------------------------------------------------------------------------
214if MySQLdb:
215 log.debug("Hacking MySQLdb to support pendulum.DateTime")
216 if CRASH_ON_BAD_CONVERSIONS:
217 MySQLdb.converters.conversions[
218 Pendulum
219 ] = mysqldb_crash_on_bad_conversion # noqa
220 else:
221 MySQLdb.converters.conversions[
222 Pendulum
223 ] = MySQLdb.converters.DateTime2literal # noqa
225if pymysql:
226 log.debug("Hacking pymysql to support pendulum.DateTime")
227 if CRASH_ON_BAD_CONVERSIONS:
228 pymysql.converters.encoders[Pendulum] = pymysql_crash_on_bad_conversion
229 else:
230 pymysql.converters.encoders[
231 Pendulum
232 ] = pymysql.converters.escape_datetime # noqa
233 # And also, as per the source code and
234 # https://stackoverflow.com/questions/59871904/convert-pymysql-query-result-with-mysql-decimal-type-to-python-float # noqa
235 pymysql.converters.conversions = pymysql.converters.encoders.copy()
236 pymysql.converters.conversions.update(pymysql.converters.decoders)
239# =============================================================================
240# Constants
241# =============================================================================
243T = TypeVar("T")
245# Database fieldname constants. Do not change. Used here and in client_api.py
246FN_PK = "_pk"
247FN_DEVICE_ID = "_device_id"
248FN_ERA = "_era"
249FN_CURRENT = "_current"
250FN_WHEN_ADDED_EXACT = "_when_added_exact"
251FN_WHEN_ADDED_BATCH_UTC = "_when_added_batch_utc"
252FN_ADDING_USER_ID = "_adding_user_id"
253FN_WHEN_REMOVED_EXACT = "_when_removed_exact"
254FN_WHEN_REMOVED_BATCH_UTC = "_when_removed_batch_utc"
255FN_REMOVING_USER_ID = "_removing_user_id"
256FN_PRESERVING_USER_ID = "_preserving_user_id"
257FN_FORCIBLY_PRESERVED = "_forcibly_preserved"
258FN_PREDECESSOR_PK = "_predecessor_pk"
259FN_SUCCESSOR_PK = "_successor_pk"
260FN_MANUALLY_ERASED = "_manually_erased"
261FN_MANUALLY_ERASED_AT = "_manually_erased_at"
262FN_MANUALLY_ERASING_USER_ID = "_manually_erasing_user_id"
263FN_CAMCOPS_VERSION = "_camcops_version"
264FN_ADDITION_PENDING = "_addition_pending"
265FN_REMOVAL_PENDING = "_removal_pending"
266FN_GROUP_ID = "_group_id"
268# Common fieldnames used by all tasks. Do not change.
269TFN_WHEN_CREATED = "when_created"
270TFN_WHEN_FIRSTEXIT = "when_firstexit"
271TFN_FIRSTEXIT_IS_FINISH = "firstexit_is_finish"
272TFN_FIRSTEXIT_IS_ABORT = "firstexit_is_abort"
273TFN_EDITING_TIME_S = "editing_time_s"
275# Fieldnames for the task patient mixin. Do not change.
276TFN_PATIENT_ID = "patient_id"
278# Fieldnames for the task clinician mixin. Do not change.
279TFN_CLINICIAN_SPECIALTY = "clinician_specialty"
280TFN_CLINICIAN_NAME = "clinician_name"
281TFN_CLINICIAN_PROFESSIONAL_REGISTRATION = "clinician_professional_registration"
282TFN_CLINICIAN_POST = "clinician_post"
283TFN_CLINICIAN_SERVICE = "clinician_service"
284TFN_CLINICIAN_CONTACT_DETAILS = "clinician_contact_details"
286# Fieldnames for the task respondent mixin. Do not change.
287TFN_RESPONDENT_NAME = "respondent_name"
288TFN_RESPONDENT_RELATIONSHIP = "respondent_relationship"
290# Selected field/column names for patients. Do not change.
291PFN_UUID = "uuid"
293# Column names for task summaries.
294SFN_IS_COMPLETE = "is_complete"
295SFN_SECONDS_CREATION_TO_FIRST_FINISH = "seconds_from_creation_to_first_finish"
296SFN_CAMCOPS_SERVER_VERSION = "camcops_server_version"
298RESERVED_FIELDS = ( # fields that tablets can't upload
299 FN_PK,
300 FN_DEVICE_ID,
301 FN_ERA,
302 FN_CURRENT,
303 FN_WHEN_ADDED_EXACT,
304 FN_WHEN_ADDED_BATCH_UTC,
305 FN_ADDING_USER_ID,
306 FN_WHEN_REMOVED_EXACT,
307 FN_WHEN_REMOVED_BATCH_UTC,
308 FN_REMOVING_USER_ID,
309 FN_PRESERVING_USER_ID,
310 FN_FORCIBLY_PRESERVED,
311 FN_PREDECESSOR_PK,
312 FN_SUCCESSOR_PK,
313 FN_MANUALLY_ERASED,
314 FN_MANUALLY_ERASED_AT,
315 FN_MANUALLY_ERASING_USER_ID,
316 FN_CAMCOPS_VERSION,
317 FN_ADDITION_PENDING,
318 FN_REMOVAL_PENDING,
319 FN_GROUP_ID,
320) # but more generally: they start with "_"...
321assert all(x.startswith("_") for x in RESERVED_FIELDS)
323TABLET_STANDARD_FIELDS = RESERVED_FIELDS + (
324 TABLET_ID_FIELD,
325 CLIENT_DATE_FIELD, # when_last_modified
326 MOVE_OFF_TABLET_FIELD,
327)
328TASK_STANDARD_FIELDS = TABLET_STANDARD_FIELDS + (
329 # All tasks:
330 TFN_WHEN_CREATED,
331 TFN_WHEN_FIRSTEXIT,
332 TFN_FIRSTEXIT_IS_FINISH,
333 TFN_FIRSTEXIT_IS_ABORT,
334 TFN_EDITING_TIME_S,
335)
336TASK_FREQUENT_AND_FK_FIELDS = TASK_STANDARD_FIELDS + (
337 # Tasks with a patient:
338 TFN_PATIENT_ID,
339)
340TASK_FREQUENT_FIELDS = TASK_FREQUENT_AND_FK_FIELDS + (
341 # Tasks with a clinician:
342 TFN_CLINICIAN_SPECIALTY,
343 TFN_CLINICIAN_NAME,
344 TFN_CLINICIAN_PROFESSIONAL_REGISTRATION,
345 TFN_CLINICIAN_POST,
346 TFN_CLINICIAN_SERVICE,
347 TFN_CLINICIAN_CONTACT_DETAILS,
348 # Tasks with a respondent:
349 TFN_RESPONDENT_NAME,
350 TFN_RESPONDENT_RELATIONSHIP,
351)
353REMOVE_COLUMNS_FOR_SIMPLIFIED_SPREADSHEETS = {
354 # keep this: CLIENT_DATE_FIELD = when_last_modified
355 # keep this: FN_PK = task PK
356 # keep this: SFN_IS_COMPLETE = is the task complete
357 # keep this: SPREADSHEET_PATIENT_FIELD_PREFIX + FN_PK = patient PK
358 # keep this: TFN_WHEN_CREATED = main creation time
359 FN_ADDING_USER_ID,
360 FN_ADDITION_PENDING,
361 FN_CAMCOPS_VERSION, # debatable; version that captured the original data
362 FN_CURRENT,
363 FN_DEVICE_ID,
364 FN_ERA,
365 FN_FORCIBLY_PRESERVED,
366 FN_GROUP_ID,
367 FN_MANUALLY_ERASED,
368 FN_MANUALLY_ERASED_AT,
369 FN_MANUALLY_ERASING_USER_ID,
370 FN_PREDECESSOR_PK,
371 FN_PRESERVING_USER_ID,
372 FN_REMOVAL_PENDING,
373 FN_REMOVING_USER_ID,
374 FN_SUCCESSOR_PK,
375 FN_WHEN_ADDED_BATCH_UTC,
376 FN_WHEN_ADDED_EXACT,
377 FN_WHEN_REMOVED_BATCH_UTC,
378 FN_WHEN_REMOVED_EXACT,
379 MOVE_OFF_TABLET_FIELD,
380 SFN_CAMCOPS_SERVER_VERSION, # debatable; version that generated summary information # noqa
381 SFN_SECONDS_CREATION_TO_FIRST_FINISH,
382 SPREADSHEET_PATIENT_FIELD_PREFIX + CLIENT_DATE_FIELD,
383 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_ADDING_USER_ID,
384 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_ADDITION_PENDING,
385 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_CAMCOPS_VERSION,
386 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_CURRENT,
387 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_DEVICE_ID,
388 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_ERA,
389 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_FORCIBLY_PRESERVED,
390 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_GROUP_ID,
391 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_MANUALLY_ERASED,
392 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_MANUALLY_ERASED_AT,
393 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_MANUALLY_ERASING_USER_ID,
394 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_PREDECESSOR_PK,
395 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_PRESERVING_USER_ID,
396 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_REMOVAL_PENDING,
397 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_REMOVING_USER_ID,
398 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_SUCCESSOR_PK,
399 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_WHEN_ADDED_BATCH_UTC,
400 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_WHEN_ADDED_EXACT,
401 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_WHEN_REMOVED_BATCH_UTC,
402 SPREADSHEET_PATIENT_FIELD_PREFIX + FN_WHEN_REMOVED_EXACT,
403 SPREADSHEET_PATIENT_FIELD_PREFIX + MOVE_OFF_TABLET_FIELD,
404 SPREADSHEET_PATIENT_FIELD_PREFIX + PFN_UUID,
405 SPREADSHEET_PATIENT_FIELD_PREFIX + TABLET_ID_FIELD,
406 TABLET_ID_FIELD,
407 TFN_EDITING_TIME_S,
408 TFN_FIRSTEXIT_IS_ABORT,
409 TFN_FIRSTEXIT_IS_FINISH,
410 TFN_PATIENT_ID,
411 TFN_WHEN_FIRSTEXIT,
412}
415# =============================================================================
416# GenericTabletRecordMixin
417# =============================================================================
419# noinspection PyAttributeOutsideInit
420class GenericTabletRecordMixin(object):
421 """
422 Mixin for all tables that are uploaded from the client, representing the
423 fields that the server adds at the point of upload.
425 From the server's perspective, ``_pk`` is the unique primary key.
427 However, records are defined also in their tablet context, for which an
428 individual tablet (defined by the combination of ``_device_id`` and
429 ``_era``) sees its own PK, ``id``.
430 """
432 __tablename__ = None # type: str # sorts out some mixin type checking
434 # -------------------------------------------------------------------------
435 # On the server side:
436 # -------------------------------------------------------------------------
438 # Plain columns
440 # noinspection PyMethodParameters
441 @declared_attr
442 def _pk(cls) -> Column:
443 return Column(
444 FN_PK,
445 Integer,
446 primary_key=True,
447 autoincrement=True,
448 index=True,
449 comment="(SERVER) Primary key (on the server)",
450 )
452 # noinspection PyMethodParameters
453 @declared_attr
454 def _device_id(cls) -> Column:
455 return Column(
456 FN_DEVICE_ID,
457 Integer,
458 ForeignKey("_security_devices.id", use_alter=True),
459 nullable=False,
460 index=True,
461 comment="(SERVER) ID of the source tablet device",
462 )
464 # noinspection PyMethodParameters
465 @declared_attr
466 def _era(cls) -> Column:
467 return Column(
468 FN_ERA,
469 EraColType,
470 nullable=False,
471 index=True,
472 comment="(SERVER) 'NOW', or when this row was preserved and "
473 "removed from the source device (UTC ISO 8601)",
474 )
475 # ... note that _era is textual so that plain comparison
476 # with "=" always works, i.e. no NULLs -- for USER comparison too, not
477 # just in CamCOPS code
479 # noinspection PyMethodParameters
480 @declared_attr
481 def _current(cls) -> Column:
482 return Column(
483 FN_CURRENT,
484 Boolean,
485 nullable=False,
486 index=True,
487 comment="(SERVER) Is the row current (1) or not (0)?",
488 )
490 # noinspection PyMethodParameters
491 @declared_attr
492 def _when_added_exact(cls) -> Column:
493 return Column(
494 FN_WHEN_ADDED_EXACT,
495 PendulumDateTimeAsIsoTextColType,
496 comment="(SERVER) Date/time this row was added (ISO 8601)",
497 )
499 # noinspection PyMethodParameters
500 @declared_attr
501 def _when_added_batch_utc(cls) -> Column:
502 return Column(
503 FN_WHEN_ADDED_BATCH_UTC,
504 DateTime,
505 comment="(SERVER) Date/time of the upload batch that added this "
506 "row (DATETIME in UTC)",
507 )
509 # noinspection PyMethodParameters
510 @declared_attr
511 def _adding_user_id(cls) -> Column:
512 return Column(
513 FN_ADDING_USER_ID,
514 Integer,
515 ForeignKey("_security_users.id"),
516 comment="(SERVER) ID of user that added this row",
517 )
519 # noinspection PyMethodParameters
520 @declared_attr
521 def _when_removed_exact(cls) -> Column:
522 return Column(
523 FN_WHEN_REMOVED_EXACT,
524 PendulumDateTimeAsIsoTextColType,
525 comment="(SERVER) Date/time this row was removed, i.e. made "
526 "not current (ISO 8601)",
527 )
529 # noinspection PyMethodParameters
530 @declared_attr
531 def _when_removed_batch_utc(cls) -> Column:
532 return Column(
533 FN_WHEN_REMOVED_BATCH_UTC,
534 DateTime,
535 comment="(SERVER) Date/time of the upload batch that removed "
536 "this row (DATETIME in UTC)",
537 )
539 # noinspection PyMethodParameters
540 @declared_attr
541 def _removing_user_id(cls) -> Column:
542 return Column(
543 FN_REMOVING_USER_ID,
544 Integer,
545 ForeignKey("_security_users.id"),
546 comment="(SERVER) ID of user that removed this row",
547 )
549 # noinspection PyMethodParameters
550 @declared_attr
551 def _preserving_user_id(cls) -> Column:
552 return Column(
553 FN_PRESERVING_USER_ID,
554 Integer,
555 ForeignKey("_security_users.id"),
556 comment="(SERVER) ID of user that preserved this row",
557 )
559 # noinspection PyMethodParameters
560 @declared_attr
561 def _forcibly_preserved(cls) -> Column:
562 return Column(
563 FN_FORCIBLY_PRESERVED,
564 Boolean,
565 default=False,
566 comment="(SERVER) Forcibly preserved by superuser (rather than "
567 "normally preserved by tablet)?",
568 )
570 # noinspection PyMethodParameters
571 @declared_attr
572 def _predecessor_pk(cls) -> Column:
573 return Column(
574 FN_PREDECESSOR_PK,
575 Integer,
576 comment="(SERVER) PK of predecessor record, prior to modification",
577 )
579 # noinspection PyMethodParameters
580 @declared_attr
581 def _successor_pk(cls) -> Column:
582 return Column(
583 FN_SUCCESSOR_PK,
584 Integer,
585 comment="(SERVER) PK of successor record (after modification) "
586 "or NULL (whilst live, or after deletion)",
587 )
589 # noinspection PyMethodParameters
590 @declared_attr
591 def _manually_erased(cls) -> Column:
592 return Column(
593 FN_MANUALLY_ERASED,
594 Boolean,
595 default=False,
596 comment="(SERVER) Record manually erased (content destroyed)?",
597 )
599 # noinspection PyMethodParameters
600 @declared_attr
601 def _manually_erased_at(cls) -> Column:
602 return Column(
603 FN_MANUALLY_ERASED_AT,
604 PendulumDateTimeAsIsoTextColType,
605 comment="(SERVER) Date/time of manual erasure (ISO 8601)",
606 )
608 # noinspection PyMethodParameters
609 @declared_attr
610 def _manually_erasing_user_id(cls) -> Column:
611 return Column(
612 FN_MANUALLY_ERASING_USER_ID,
613 Integer,
614 ForeignKey("_security_users.id"),
615 comment="(SERVER) ID of user that erased this row manually",
616 )
618 # noinspection PyMethodParameters
619 @declared_attr
620 def _camcops_version(cls) -> Column:
621 return Column(
622 FN_CAMCOPS_VERSION,
623 SemanticVersionColType,
624 default=CAMCOPS_SERVER_VERSION,
625 comment="(SERVER) CamCOPS version number of the uploading device",
626 )
628 # noinspection PyMethodParameters
629 @declared_attr
630 def _addition_pending(cls) -> Column:
631 return Column(
632 FN_ADDITION_PENDING,
633 Boolean,
634 nullable=False,
635 default=False,
636 comment="(SERVER) Addition pending?",
637 )
639 # noinspection PyMethodParameters
640 @declared_attr
641 def _removal_pending(cls) -> Column:
642 return Column(
643 FN_REMOVAL_PENDING,
644 Boolean,
645 default=False,
646 comment="(SERVER) Removal pending?",
647 )
649 # noinspection PyMethodParameters
650 @declared_attr
651 def _group_id(cls) -> Column:
652 return Column(
653 FN_GROUP_ID,
654 Integer,
655 ForeignKey("_security_groups.id"),
656 nullable=False,
657 index=True,
658 comment="(SERVER) ID of group to which this record belongs",
659 )
661 # -------------------------------------------------------------------------
662 # Fields that *all* client tables have:
663 # -------------------------------------------------------------------------
665 # noinspection PyMethodParameters
666 @declared_attr
667 def id(cls) -> Column:
668 return Column(
669 TABLET_ID_FIELD,
670 Integer,
671 nullable=False,
672 index=True,
673 comment="(TASK) Primary key (task ID) on the tablet device",
674 )
676 # noinspection PyMethodParameters
677 @declared_attr
678 def when_last_modified(cls) -> Column:
679 return Column(
680 CLIENT_DATE_FIELD,
681 PendulumDateTimeAsIsoTextColType,
682 index=True, # ... as used by database upload script
683 comment="(STANDARD) Date/time this row was last modified on the "
684 "source tablet device (ISO 8601)",
685 )
687 # noinspection PyMethodParameters
688 @declared_attr
689 def _move_off_tablet(cls) -> Column:
690 return Column(
691 MOVE_OFF_TABLET_FIELD,
692 Boolean,
693 default=False,
694 comment="(SERVER/TABLET) Record-specific preservation pending?",
695 )
697 # -------------------------------------------------------------------------
698 # Relationships
699 # -------------------------------------------------------------------------
701 # noinspection PyMethodParameters
702 @declared_attr
703 def _device(cls) -> RelationshipProperty:
704 return relationship("Device")
706 # noinspection PyMethodParameters
707 @declared_attr
708 def _adding_user(cls) -> RelationshipProperty:
709 return relationship("User", foreign_keys=[cls._adding_user_id])
711 # noinspection PyMethodParameters
712 @declared_attr
713 def _removing_user(cls) -> RelationshipProperty:
714 return relationship("User", foreign_keys=[cls._removing_user_id])
716 # noinspection PyMethodParameters
717 @declared_attr
718 def _preserving_user(cls) -> RelationshipProperty:
719 return relationship("User", foreign_keys=[cls._preserving_user_id])
721 # noinspection PyMethodParameters
722 @declared_attr
723 def _manually_erasing_user(cls) -> RelationshipProperty:
724 return relationship(
725 "User", foreign_keys=[cls._manually_erasing_user_id]
726 )
728 # noinspection PyMethodParameters
729 @declared_attr
730 def _group(cls) -> RelationshipProperty:
731 return relationship("Group", foreign_keys=[cls._group_id])
733 # -------------------------------------------------------------------------
734 # Fetching attributes
735 # -------------------------------------------------------------------------
737 @property
738 def pk(self) -> Optional[int]:
739 """
740 Returns the (server) primary key of this record.
741 """
742 return self._pk
744 @property
745 def era(self) -> Optional[str]:
746 """
747 Returns the era of this record (a text representation of the date/time
748 of the point of record finalization, or ``NOW`` if the record is still
749 present on the client device).
750 """
751 return self._era
753 @property
754 def device_id(self) -> Optional[int]:
755 """
756 Returns the client device ID of this record.
757 """
758 return self._device_id
760 @property
761 def group_id(self) -> Optional[int]:
762 """
763 Returns the group ID of this record.
764 """
765 return self._group_id
767 # -------------------------------------------------------------------------
768 # Other universal properties
769 # -------------------------------------------------------------------------
771 def is_live_on_tablet(self) -> bool:
772 """
773 Is the record live on a tablet (not finalized)?
774 """
775 return self._era == ERA_NOW
777 def is_finalized(self) -> bool:
778 """
779 Is the record finalized (no longer available to be edited on the
780 client device), and therefore (if required) editable on the server?
781 """
782 return not self.is_live_on_tablet()
784 def created_on_server(self, req: "CamcopsRequest") -> bool:
785 """
786 Was this record created on the server?
787 """
788 from camcops_server.cc_modules.cc_device import (
789 Device,
790 ) # delayed import
792 server_device = Device.get_server_device(req.dbsession)
793 return self._era == ERA_NOW and self._device_id == server_device.id
795 # -------------------------------------------------------------------------
796 # Autoscanning objects and their relationships
797 # -------------------------------------------------------------------------
799 def _get_xml_root(
800 self, req: "CamcopsRequest", options: TaskExportOptions
801 ) -> XmlElement:
802 """
803 Called to create an XML root object for records ancillary to Task
804 objects. Tasks themselves use a more complex mechanism.
806 Args:
807 req: a :class:`camcops_server.cc_modules.cc_request.CamcopsRequest`
808 options: a :class:`camcops_server.cc_modules.cc_simpleobjects.TaskExportOptions`
809 """ # noqa
810 # "__tablename__" will make the type checker complain, as we're
811 # defining a function for a mixin that assumes it's mixed in to a
812 # SQLAlchemy Base-derived class
813 # noinspection PyUnresolvedReferences
814 return XmlElement(
815 name=self.__tablename__,
816 value=self._get_xml_branches(req=req, options=options),
817 )
819 def _get_xml_branches(
820 self, req: "CamcopsRequest", options: TaskExportOptions
821 ) -> List[XmlElement]:
822 """
823 Gets the values of SQLAlchemy columns as XmlElement objects.
824 Optionally, find any SQLAlchemy relationships that are relationships
825 to Blob objects, and include them too.
827 Used by :func:`_get_xml_root` above, but also by Tasks themselves.
829 Args:
830 req: a :class:`camcops_server.cc_modules.cc_request.CamcopsRequest`
831 options: a :class:`camcops_server.cc_modules.cc_simpleobjects.TaskExportOptions`
832 """ # noqa
833 # log.debug("_get_xml_branches for {!r}", self)
834 options = options or TaskExportOptions(
835 xml_include_plain_columns=True,
836 xml_include_calculated=True,
837 xml_sort_by_name=True,
838 )
839 branches = [] # type: List[XmlElement]
840 if options.xml_with_header_comments:
841 branches.append(XML_COMMENT_STORED)
842 if options.xml_include_plain_columns:
843 new_branches = make_xml_branches_from_columns(
844 self, skip_fields=options.xml_skip_fields
845 )
846 if options.xml_sort_by_name:
847 new_branches.sort(key=lambda el: el.name)
848 branches += new_branches
849 if options.include_blobs:
850 new_branches = make_xml_branches_from_blobs(
851 req, self, skip_fields=options.xml_skip_fields
852 )
853 if options.xml_sort_by_name:
854 new_branches.sort(key=lambda el: el.name)
855 branches += new_branches
856 # Calculated
857 if options.xml_include_calculated:
858 if options.xml_with_header_comments:
859 branches.append(XML_COMMENT_CALCULATED)
860 branches.extend(
861 make_xml_branches_from_summaries(
862 self.get_summaries(req),
863 skip_fields=options.xml_skip_fields,
864 sort_by_name=options.xml_sort_by_name,
865 )
866 )
867 # log.debug("... branches for {!r}: {!r}", self, branches)
868 return branches
870 def _get_core_spreadsheet_page(
871 self, req: "CamcopsRequest", heading_prefix: str = ""
872 ) -> SpreadsheetPage:
873 """
874 Returns a single-row
875 :class:`camcops_server.cc_modules.cc_spreadsheet.SpreadsheetPage`, like
876 an Excel "sheet", representing this record. (It may be combined with
877 others later to produce a multi-row spreadsheet.)
878 """
879 row = OrderedDict()
880 for attrname, column in gen_columns(self):
881 row[heading_prefix + attrname] = getattr(self, attrname)
882 for s in self.get_summaries(req):
883 row[heading_prefix + s.name] = s.value
884 return SpreadsheetPage(name=self.__tablename__, rows=[row])
886 def _get_core_spreadsheet_schema(
887 self, table_name: str = "", column_name_prefix: str = ""
888 ) -> Set[SummarySchemaInfo]:
889 """
890 Returns schema information compatible with
891 :func:`_get_core_spreadsheet_page`.
892 """
893 return set(
894 SummarySchemaInfo.from_column(
895 column,
896 table_name=table_name,
897 column_name_prefix=column_name_prefix,
898 )
899 for _, column in gen_columns(self)
900 )
902 # -------------------------------------------------------------------------
903 # Erasing (overwriting data, not deleting the database records)
904 # -------------------------------------------------------------------------
906 def manually_erase_with_dependants(self, req: "CamcopsRequest") -> None:
907 """
908 Manually erases a standard record and marks it so erased. Iterates
909 through any dependants and does likewise to them.
911 The object remains ``_current`` (if it was), as a placeholder, but its
912 contents are wiped.
914 WRITES TO THE DATABASE.
915 """
916 if self._manually_erased or self._pk is None or self._era == ERA_NOW:
917 # ... _manually_erased: don't do it twice
918 # ... _pk: basic sanity check
919 # ... _era: don't erase things that are current on the tablet
920 return
921 # 1. "Erase my dependants"
922 for ancillary in self.gen_ancillary_instances_even_noncurrent():
923 ancillary.manually_erase_with_dependants(req)
924 for blob in self.gen_blobs_even_noncurrent():
925 blob.manually_erase_with_dependants(req)
926 # 2. "Erase me"
927 erasure_attrs = [] # type: List[str]
928 for attrname, column in gen_columns(self):
929 if attrname.startswith("_"): # system field
930 continue
931 if not column.nullable: # this should cover FKs
932 continue
933 if column.foreign_keys: # ... but to be sure...
934 continue
935 erasure_attrs.append(attrname)
936 for attrname in erasure_attrs:
937 setattr(self, attrname, None)
938 self._current = False
939 self._manually_erased = True
940 self._manually_erased_at = req.now
941 self._manually_erasing_user_id = req.user_id
943 def delete_with_dependants(self, req: "CamcopsRequest") -> None:
944 """
945 Deletes (completely from the database) this record and any
946 dependant records.
947 """
948 if self._pk is None:
949 return
950 # 1. "Delete my dependants"
951 for ancillary in self.gen_ancillary_instances_even_noncurrent():
952 ancillary.delete_with_dependants(req)
953 for blob in self.gen_blobs_even_noncurrent():
954 blob.delete_with_dependants(req)
955 # 2. "Delete me"
956 dbsession = SqlASession.object_session(self)
957 dbsession.delete(self)
959 def gen_attrname_ancillary_pairs(
960 self,
961 ) -> Generator[Tuple[str, "GenericTabletRecordMixin"], None, None]:
962 """
963 Iterates through and yields all ``_current`` "ancillary" objects
964 (typically: records of subtables).
966 Yields tuples of ``(attrname, related_record)``.
967 """
968 for attrname, rel_prop, rel_cls in gen_ancillary_relationships(self):
969 if rel_prop.uselist:
970 ancillaries = getattr(
971 self, attrname
972 ) # type: List[GenericTabletRecordMixin]
973 else:
974 ancillaries = [
975 getattr(self, attrname)
976 ] # type: List[GenericTabletRecordMixin]
977 for ancillary in ancillaries:
978 if ancillary is None:
979 continue
980 yield attrname, ancillary
982 def gen_ancillary_instances(
983 self,
984 ) -> Generator["GenericTabletRecordMixin", None, None]:
985 """
986 Generates all ``_current`` ancillary objects of this object.
987 """
988 for attrname, ancillary in self.gen_attrname_ancillary_pairs():
989 yield ancillary
991 def gen_ancillary_instances_even_noncurrent(
992 self,
993 ) -> Generator["GenericTabletRecordMixin", None, None]:
994 """
995 Generates all ancillary objects of this object, even non-current
996 ones.
997 """
998 for lineage_member in self._gen_unique_lineage_objects(
999 self.gen_ancillary_instances()
1000 ):
1001 yield lineage_member
1003 def gen_blobs(self) -> Generator["Blob", None, None]:
1004 """
1005 Generate all ``_current`` BLOBs owned by this object.
1006 """
1007 for id_attrname, column in gen_camcops_blob_columns(self):
1008 relationship_attr = column.blob_relationship_attr_name
1009 blob = getattr(self, relationship_attr)
1010 if blob is None:
1011 continue
1012 yield blob
1014 def gen_blobs_even_noncurrent(self) -> Generator["Blob", None, None]:
1015 """
1016 Generates all BLOBs owned by this object, even non-current ones.
1017 """
1018 for lineage_member in self._gen_unique_lineage_objects(
1019 self.gen_blobs()
1020 ): # type: "Blob"
1021 yield lineage_member
1023 def get_lineage(self) -> List["GenericTabletRecordMixin"]:
1024 """
1025 Returns all records that are part of the same "lineage", that is:
1027 - of the same class;
1028 - matching on id/device_id/era;
1029 - including both current and any historical non-current versions.
1031 Will include the "self" object.
1033 """
1034 dbsession = SqlASession.object_session(self)
1035 cls = self.__class__
1036 q = (
1037 dbsession.query(cls)
1038 .filter(cls.id == self.id)
1039 .filter(cls._device_id == self._device_id)
1040 .filter(cls._era == self._era)
1041 )
1042 return list(q)
1044 @staticmethod
1045 def _gen_unique_lineage_objects(
1046 collection: Iterable["GenericTabletRecordMixin"],
1047 ) -> Generator["GenericTabletRecordMixin", None, None]:
1048 """
1049 Given an iterable of database records, generate all related lineage
1050 objects for each of them (via :meth:`get_lineage`) that are unique by
1051 PK.
1052 """
1053 seen_pks = set() # type: Set[int]
1054 for item in collection:
1055 if item is None:
1056 continue
1057 for lineage_member in item.get_lineage():
1058 pk = lineage_member.pk
1059 if pk in seen_pks:
1060 continue
1061 seen_pks.add(pk)
1062 yield lineage_member
1064 # -------------------------------------------------------------------------
1065 # Retrieving a linked record by client ID
1066 # -------------------------------------------------------------------------
1068 @classmethod
1069 def get_linked(
1070 cls, client_id: Optional[int], other: "GenericTabletRecordMixin"
1071 ) -> Optional["GenericTabletRecordMixin"]:
1072 """
1073 Returns a specific linked record, of the class of ``self``, whose
1074 client-side ID is ``client_id``, and which matches ``other`` in terms
1075 of device/era.
1076 """
1077 if client_id is None:
1078 return None
1079 dbsession = SqlASession.object_session(other)
1080 # noinspection PyPep8
1081 q = (
1082 dbsession.query(cls)
1083 .filter(cls.id == client_id)
1084 .filter(cls._device_id == other._device_id)
1085 .filter(cls._era == other._era)
1086 .filter(cls._current == True) # noqa: E712
1087 )
1088 return q.first()
1090 # -------------------------------------------------------------------------
1091 # History functions for server-side editing
1092 # -------------------------------------------------------------------------
1094 def set_predecessor(
1095 self, req: "CamcopsRequest", predecessor: "GenericTabletRecordMixin"
1096 ) -> None:
1097 """
1098 Used for some unusual server-side manipulations (e.g. editing patient
1099 details).
1101 Amends this object so the "self" object replaces the predecessor, so:
1103 - "self" becomes current and refers back to "predecessor";
1104 - "predecessor" becomes non-current and refers forward to "self".
1106 """
1107 assert predecessor._current
1108 # We become new and current, and refer to our predecessor
1109 self._device_id = predecessor._device_id
1110 self._era = predecessor._era
1111 self._current = True
1112 self._when_added_exact = req.now
1113 self._when_added_batch_utc = req.now_utc
1114 self._adding_user_id = req.user_id
1115 if self._era != ERA_NOW:
1116 self._preserving_user_id = req.user_id
1117 self._forcibly_preserved = True
1118 self._predecessor_pk = predecessor._pk
1119 self._camcops_version = predecessor._camcops_version
1120 self._group_id = predecessor._group_id
1121 # Make our predecessor refer to us
1122 if self._pk is None:
1123 req.dbsession.add(self) # ensure we have a PK, part 1
1124 req.dbsession.flush() # ensure we have a PK, part 2
1125 predecessor._set_successor(req, self)
1127 def _set_successor(
1128 self, req: "CamcopsRequest", successor: "GenericTabletRecordMixin"
1129 ) -> None:
1130 """
1131 See :func:`set_predecessor` above.
1132 """
1133 assert successor._pk is not None
1134 self._current = False
1135 self._when_removed_exact = req.now
1136 self._when_removed_batch_utc = req.now_utc
1137 self._removing_user_id = req.user_id
1138 self._successor_pk = successor._pk
1140 def mark_as_deleted(self, req: "CamcopsRequest") -> None:
1141 """
1142 Ends the history chain and marks this record as non-current.
1143 """
1144 if self._current:
1145 self._when_removed_exact = req.now
1146 self._when_removed_batch_utc = req.now_utc
1147 self._removing_user_id = req.user_id
1148 self._current = False
1150 def create_fresh(
1151 self, req: "CamcopsRequest", device_id: int, era: str, group_id: int
1152 ) -> None:
1153 """
1154 Used to create a record from scratch.
1155 """
1156 self._device_id = device_id
1157 self._era = era
1158 self._group_id = group_id
1159 self._current = True
1160 self._when_added_exact = req.now
1161 self._when_added_batch_utc = req.now_utc
1162 self._adding_user_id = req.user_id
1164 def save_with_next_available_id(
1165 self, req: "CamcopsRequest", device_id: int, era: str = ERA_NOW
1166 ) -> None:
1167 """
1168 Save a record with the next available client pk in sequence.
1169 This is of use when creating patients and ID numbers on the server
1170 to ensure uniqueness, or when fixing up a missing ID number for
1171 a patient created on a device.
1172 """
1173 cls = self.__class__
1175 saved_ok = False
1177 # MySql doesn't support "select for update" so we have to keep
1178 # trying the next available ID and checking for an integrity
1179 # error in case another user has grabbed it by the time we have
1180 # committed
1181 # noinspection PyProtectedMember
1182 last_id = (
1183 req.dbsession
1184 # func.max(cls.id) + 1 here will do the right thing for
1185 # backends that support select for update (maybe not for no rows)
1186 .query(func.max(cls.id))
1187 .filter(cls._device_id == device_id)
1188 .filter(cls._era == era)
1189 .scalar()
1190 ) or 0
1192 next_id = last_id + 1
1194 while not saved_ok:
1195 self.id = next_id
1197 req.dbsession.add(self)
1199 try:
1200 req.dbsession.flush()
1201 saved_ok = True
1202 except IntegrityError:
1203 req.dbsession.rollback()
1204 next_id += 1
1206 # -------------------------------------------------------------------------
1207 # Override this if you provide summaries
1208 # -------------------------------------------------------------------------
1210 # noinspection PyMethodMayBeStatic
1211 def get_summaries(self, req: "CamcopsRequest") -> List["SummaryElement"]:
1212 """
1213 Return a list of :class:`SummaryElement` objects, for this database
1214 object (not any dependent classes/tables).
1216 Note that this is implemented on :class:`GenericTabletRecordMixin`,
1217 not :class:`camcops_server.cc_modules.cc_task.Task`, so that ancillary
1218 objects can also provide summaries.
1219 """
1220 return []
1222 def get_summary_names(self, req: "CamcopsRequest") -> List[str]:
1223 """
1224 Returns a list of summary field names.
1225 """
1226 return [x.name for x in self.get_summaries(req)]
1229# =============================================================================
1230# Relationships
1231# =============================================================================
1234def ancillary_relationship(
1235 parent_class_name: str,
1236 ancillary_class_name: str,
1237 ancillary_fk_to_parent_attr_name: str,
1238 ancillary_order_by_attr_name: str = None,
1239 read_only: bool = True,
1240) -> RelationshipProperty:
1241 """
1242 Implements a one-to-many relationship, i.e. one parent to many ancillaries.
1243 """
1244 parent_pk_attr_name = "id" # always
1245 return relationship(
1246 ancillary_class_name,
1247 primaryjoin=(
1248 "and_("
1249 " remote({a}.{fk}) == foreign({p}.{pk}), "
1250 " remote({a}._device_id) == foreign({p}._device_id), "
1251 " remote({a}._era) == foreign({p}._era), "
1252 " remote({a}._current) == True "
1253 ")".format(
1254 a=ancillary_class_name,
1255 fk=ancillary_fk_to_parent_attr_name,
1256 p=parent_class_name,
1257 pk=parent_pk_attr_name,
1258 )
1259 ),
1260 uselist=True,
1261 order_by="{a}.{f}".format(
1262 a=ancillary_class_name, f=ancillary_order_by_attr_name
1263 ),
1264 viewonly=read_only,
1265 info={RelationshipInfo.IS_ANCILLARY: True},
1266 # ... "info" is a user-defined dictionary; see
1267 # https://docs.sqlalchemy.org/en/latest/orm/relationship_api.html#sqlalchemy.orm.relationship.params.info # noqa
1268 # https://docs.sqlalchemy.org/en/latest/orm/internals.html#MapperProperty.info # noqa
1269 )
1272# =============================================================================
1273# Field creation assistance
1274# =============================================================================
1276# TypeEngineBase = TypeVar('TypeEngineBase', bound=TypeEngine)
1279def add_multiple_columns(
1280 cls: Type,
1281 prefix: str,
1282 start: int,
1283 end: int,
1284 coltype=Integer,
1285 # this type fails: Union[Type[TypeEngineBase], TypeEngine]
1286 # ... https://stackoverflow.com/questions/38106227
1287 # ... https://github.com/python/typing/issues/266
1288 colkwargs: Dict[str, Any] = None,
1289 comment_fmt: str = None,
1290 comment_strings: List[str] = None,
1291 minimum: Union[int, float] = None,
1292 maximum: Union[int, float] = None,
1293 pv: List[Any] = None,
1294 suffix: str = "",
1295) -> None:
1296 """
1297 Add a sequence of SQLAlchemy columns to a class.
1299 Called from a metaclass.
1300 Used to make task creation a bit easier.
1302 Args:
1303 cls:
1304 class to which to add columns
1305 prefix:
1306 Fieldname will be ``prefix + str(n) + suffix``, where ``n`` is
1307 defined as below.
1308 suffix:
1309 Optional. See ``prefix``.
1310 start:
1311 Start of range.
1312 end:
1313 End of range. Thus: ``i`` will range from ``0`` to ``(end -
1314 start)`` inclusive; ``n`` will range from ``start`` to ``end``
1315 inclusive.
1316 coltype:
1317 SQLAlchemy column type, in either of these formats: (a)
1318 ``Integer`` (of general type ``Type[TypeEngine]``?); (b)
1319 ``Integer()`` (of general type ``TypeEngine``).
1320 colkwargs:
1321 SQLAlchemy column arguments, as in
1322 ``Column(name, coltype, **colkwargs)``
1323 comment_fmt:
1324 Format string defining field comments. Substitutable
1325 values are:
1327 - ``{n}``: field number (from range).
1328 - ``{s}``: ``comment_strings[i]``, where ``i`` is a zero-based
1329 index as defined as above, or "" if out of range.
1331 comment_strings:
1332 see ``comment_fmt``
1333 minimum:
1334 minimum permitted value, or ``None``
1335 maximum:
1336 maximum permitted value, or ``None``
1337 pv:
1338 list of permitted values, or ``None``
1339 """
1340 colkwargs = {} if colkwargs is None else colkwargs # type: Dict[str, Any]
1341 comment_strings = comment_strings or []
1342 for n in range(start, end + 1):
1343 nstr = str(n)
1344 i = n - start
1345 colname = prefix + nstr + suffix
1346 if comment_fmt:
1347 s = ""
1348 if 0 <= i < len(comment_strings):
1349 s = comment_strings[i] or ""
1350 colkwargs["comment"] = comment_fmt.format(n=n, s=s)
1351 if minimum is not None or maximum is not None or pv is not None:
1352 colkwargs[COLATTR_PERMITTED_VALUE_CHECKER] = PermittedValueChecker(
1353 minimum=minimum, maximum=maximum, permitted_values=pv
1354 )
1355 setattr(cls, colname, CamcopsColumn(colname, coltype, **colkwargs))
1356 else:
1357 setattr(cls, colname, Column(colname, coltype, **colkwargs))
1360# =============================================================================
1361# TaskDescendant
1362# =============================================================================
1365class TaskDescendant(object):
1366 """
1367 Information mixin for sub-tables that can be traced back to a class. Used
1368 to denormalize the database for export in some circumstances.
1370 Not used for the Blob class, which has no reasonable way of tracing itself
1371 back to a given task if it is used by a task's ancillary tables rather than
1372 a primary task row.
1373 """
1375 @classmethod
1376 def task_ancestor_class(cls) -> Optional[Type["Task"]]:
1377 """
1378 Returns the class of the ancestral task.
1380 If the descendant can descend from lots of types of task (rare; only
1381 applies to :class:`camcops_server.cc_modules.cc_blob.Blob` and
1382 :class:`camcops_server.cc_modules.cc_summaryelement.ExtraSummaryTable`),
1383 returns ``None``.
1384 """ # noqa
1385 raise NotImplementedError
1387 @classmethod
1388 def task_ancestor_might_have_patient(cls) -> bool:
1389 """
1390 Does this object have a single task ancestor, that is not anonymous?
1391 """
1392 taskcls = cls.task_ancestor_class()
1393 if not taskcls:
1394 return True # e.g. Blob, ExtraSummaryTable
1395 return not taskcls.is_anonymous
1397 def task_ancestor_server_pk(self) -> Optional[int]:
1398 """
1399 Returns the server PK of the ancestral task.
1401 Note that this is an export-time calculation; the client may update its
1402 task rows without updating its descendant rows (so server PKs change
1403 whilst client IDs don't).
1404 """
1405 task = self.task_ancestor()
1406 if not task:
1407 return None
1408 return task.pk
1410 def task_ancestor(self) -> Optional["Task"]:
1411 """
1412 Returns the specific ancestor task of this object.
1413 """
1414 raise NotImplementedError
1416 def task_ancestor_patient(self) -> Optional["Patient"]:
1417 """
1418 Returns the associated patient, if there is one.
1419 """
1420 task = self.task_ancestor()
1421 return task.patient if task else None
1423 @classmethod
1424 def extra_task_xref_columns(cls) -> List[Column]:
1425 """
1426 Returns extra columns used to cross-reference this
1427 :class:`TaskDescendant` to its ancestor task, in certain export
1428 formats (``DB_PATIENT_ID_PER_ROW``).
1429 """
1430 return [
1431 Column(
1432 EXTRA_TASK_TABLENAME_FIELD,
1433 TableNameColType,
1434 comment=EXTRA_COMMENT_PREFIX + "Table name of ancestor task",
1435 ),
1436 Column(
1437 EXTRA_TASK_SERVER_PK_FIELD,
1438 Integer,
1439 comment=EXTRA_COMMENT_PREFIX + "Server PK of ancestor task",
1440 ),
1441 ]
1443 def add_extra_task_xref_info_to_row(self, row: Dict[str, Any]) -> None:
1444 """
1445 For the ``DB_PATIENT_ID_PER_ROW`` export option. Adds additional
1446 cross-referencing info to a row.
1448 Args:
1449 row: future database row, as a dictionary
1450 """
1451 ancestor = self.task_ancestor()
1452 if ancestor:
1453 row[EXTRA_TASK_TABLENAME_FIELD] = ancestor.tablename
1454 row[EXTRA_TASK_SERVER_PK_FIELD] = ancestor.pk