From 6c69a5a9fcfbf74ba1bfb551148ef81e178681ba Mon Sep 17 00:00:00 2001 From: Darren <3921919+pendingintent@users.noreply.github.com> Date: Mon, 15 Dec 2025 14:10:19 -0500 Subject: [PATCH 01/12] Study Arms USDM JSON generator --- src/usdm/generate_arms.py | 190 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 src/usdm/generate_arms.py diff --git a/src/usdm/generate_arms.py b/src/usdm/generate_arms.py new file mode 100644 index 0000000..a5e9471 --- /dev/null +++ b/src/usdm/generate_arms.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 +# Prefer absolute import; fallback to adding src/ to sys.path when run directly +from typing import Optional, List, Dict, Any, Tuple + +try: + from soa_builder.web.app import _connect # reuse existing DB connector +except ImportError: + import sys + from pathlib import Path + + here = Path(__file__).resolve() + src_dir = here.parents[2] / "src" + if src_dir.exists() and str(src_dir) not in sys.path: + sys.path.insert(0, str(src_dir)) + from soa_builder.web.app import _connect # type: ignore + + +def _nz(s: Optional[str]) -> Optional[str]: + s = (s or "").strip() + return s or None + + +def _get_type_code_tuple(soa_id: int, code_uid: str) -> Tuple[str, str, str, str]: + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT DISTINCT c.codelist_table, p.code,p.cdisc_submission_value,p.dataset_date " + "FROM code c INNER JOIN protocol_terminology p ON c.codelist_code = p.codelist_code " + "AND c.code = p.code WHERE c.soa_id=? AND c.code_uid=?", + ( + soa_id, + code_uid, + ), + ) + rows = cur.fetchall() + conn.close() + code_system = [r[0] for r in rows] + code_code = [r[1] for r in rows] + code_decode = [r[2] for r in rows] + code_system_version = [r[3] for r in rows] + + return code_code, code_decode, code_system, code_system_version + + +def _get_data_origin_type_tuple( + soa_id: int, code_uid: str +) -> Tuple[str, str, str, str]: + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT DISTINCT c.codelist_table,d.code,d.cdisc_submission_value,d.dataset_date " + "FROM code c INNER JOIN ddf_terminology d ON c.codelist_code = d.codelist_code " + "AND c.code = d.code WHERE c.soa_id=? AND c.code_uid=?", + ( + soa_id, + code_uid, + ), + ) + rows = cur.fetchall() + conn.close() + code_system = [r[0] for r in rows] + code_code = [r[1] for r in rows] + code_decode = [r[2] for r in rows] + code_system_version = [r[3] for r in rows] + + return code_code, code_decode, code_system, code_system_version + + +def build_usdm_arms(soa_id: int) -> List[Dict[str, Any]]: + """ + Build USDM Arms-Output objects for the given SOA. + + USDM Activity-Output (subset): + - id: string + - extensionAttributes?: string[]|[] + - name: string + - label?: string | null + - description?: string | null + - type: { + - id: string + - extensionAttributes?: string[]|[] + - code: string + - codeSystem: string + - codeSystemVersion: string + - decode: string + - instanceType: "Code" + } + - dataOriginDescription?: string|null + - dataOriginType?: { + - id: string + - extensionAttributes?: string[]|[] + - code: string + - codeSystem: string + - codeSystemVersion: string + - decode: string + - instanceTye: "Code" + } + - popultionIds?: int[] + - notes?: string[]|[] + - instanceType: "Activity" + """ + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT id,arm_uid,name,label,description,type,data_origin_type FROM arm WHERE soa_id=? ORDER BY arm_uid", + (soa_id,), + ) + rows = cur.fetchall() + conn.close() + out: List[Dict[str, Any]] = [] + + for i, r in enumerate(rows): + id, arm_uid, name, label, description, type, data_origin_type = ( + r[0], + r[1], + r[2], + r[3], + r[4], + r[5], + r[6], + ) + t_code, t_decode, t_codeSystem, t_codeSystemVersion = _get_type_code_tuple( + soa_id, type + ) + dto_code, dto_decode, dto_code_system, dto_code_system_version = ( + _get_data_origin_type_tuple(soa_id, data_origin_type) + ) + + arm = { + "id": arm_uid, + "extensionAttributes": [], + "name": name, + "label": _nz(label), + "description": _nz(description), + "type": { + "id": type, + "extensionAttributes": [], + "code": t_code[0], + "codeSystem": "db://" + t_codeSystem[0], + "codeSystemVersion": t_codeSystemVersion[0], + "decode": t_decode[0], + "instanceType": "Code", + }, + "dataOriginDescription": "Data collected from subjects", + "dataOriginType": { + "id": data_origin_type, + "extensionAttributes": [], + "code": dto_code[0], + "codeSystem": "db://" + dto_code_system[0], + "codeSystemVersion": dto_code_system_version[0], + "decode": dto_decode[0], + "instanceType": "Code", + }, + "populationIds": [], + "notes": [], + "instanceType": "StudyArm", + } + out.append(arm) + + return out + + +if __name__ == "__main__": + import argparse + import json + import logging + import sys + + logger = logging.getLogger("usdm.generate_arms") + + parser = argparse.ArgumentParser(description="Export USDM arms for a SOA.") + parser.add_argument("soa_id", type=int, help="SOA id to export arms for") + parser.add_argument( + "-o", "--output", default="-", help="Output file path or '-' for stdout" + ) + parser.add_argument("--indent", type=int, default=2, help="JSON indent") + args = parser.parse_args() + + try: + activities = build_usdm_arms(args.soa_id) + except Exception: + logger.exception("Failed to build arms for soa_id=%s", args.soa_id) + sys.exit(1) + + payload = json.dumps(activities, indent=args.indent) + if args.output in ("-", "/dev/stdout"): + sys.stdout.write(payload + "\n") + else: + with open(args.output, "w", encoding="utf-8") as f: + f.write(payload + "\n") From a8c75befe4232437a40c3ec13b6917ae3a5deb8f Mon Sep 17 00:00:00 2001 From: Darren <3921919+pendingintent@users.noreply.github.com> Date: Tue, 16 Dec 2025 08:14:31 -0500 Subject: [PATCH 02/12] Generate Epoch USDM JSON --- src/usdm/generate_study_epochs.py | 183 ++++++++++++++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100644 src/usdm/generate_study_epochs.py diff --git a/src/usdm/generate_study_epochs.py b/src/usdm/generate_study_epochs.py new file mode 100644 index 0000000..e2f209d --- /dev/null +++ b/src/usdm/generate_study_epochs.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 +# Prefer absolute import; fallback to adding src/ to sys.path when run directly +from typing import Optional, List, Dict, Any, Tuple +from urllib.parse import urlparse + + +import os +import requests + +try: + from soa_builder.web.app import _connect # reuse existing DB connector +except ImportError: + import sys + from pathlib import Path + + here = Path(__file__).resolve() + src_dir = here.parents[2] / "src" + if src_dir.exists() and str(src_dir) not in sys.path: + sys.path.insert(0, str(src_dir)) + from soa_builder.web.app import _connect # type: ignore + + +def _nz(s: Optional[str]) -> Optional[str]: + s = (s or "").strip() + return s or None + + +def _get_epoch_code_values(soa_id: int, type: str, code: str) -> Tuple[ + str, + str, + str, +]: + logger = logging.getLogger("usdm.generate_arms") + url = "https://library.cdisc.org/api/mdr/ct/packages/sdtmct-2025-09-26/codelists/C99079" + headers: dict[str, str] = {"Accept": "application/json"} + subscription_key = os.environ.get("CDISC_SUBSCRIPTION_KEY") + api_key = os.environ.get("CDISC_API_KEY") or os.environ.get( + "CDISC_SUBSCRIPTION_KEY" + ) + unified_key = subscription_key or api_key + if unified_key: + headers["Ocp-Apim-Subscription-Key"] = unified_key + if api_key: + headers["Authorization"] = f"Bearer {api_key}" + headers["api-key"] = api_key + + resp = requests.get(url, headers=headers, timeout=10) + if resp.status_code != 200: + logger.exception("No response from {} for code {}".format(url, type)) + else: + content = resp.json() + parsed_url = urlparse(url) + code_system = parsed_url.scheme + "//:" + parsed_url.netloc + code_system_version = parsed_url.path.split("/", 7)[5] + + top_terms = content.get("terms") + for term in top_terms: + if term.get("conceptId") == code: + decode = term.get("submissionValue") + + return code_system, code_system_version, decode + + +def build_usdm_activities(soa_id: int) -> List[Dict[str, Any]]: + """ + Build USDM Epoch-Output objects for the given SOA. + + USDM Epoch-Output (subset): + - id: string + - extensionAttributes?: string[] + - name: string + - label?: string | null + - description?: string | null + - type: { + - id: string + - extensionAttributes?: string[] + - code: string + - codeSystem: string + - codeSystemVersion: string + - decode: string + - instanceType: "Code" + } + - previousId?: string | null + - nextId?: string | null + - notes?: string[] + - instanceType: "Activity" + """ + conn = _connect() + cur = conn.cursor() + # Order by order_index if present, else by id for deterministic output + cur.execute("PRAGMA table_info(epoch)") + cols = {r[1] for r in cur.fetchall()} + if "order_index" in cols: + cur.execute( + "SELECT e.id, e.epoch_uid, e.name, e.epoch_label, e.epoch_description, e.type, c.code " + "FROM epoch e INNER JOIN code c ON e.soa_id = c.soa_id AND e.type = c.code_uid " + "WHERE e.soa_id=? ORDER BY e.order_index, e.id", + (soa_id,), + ) + else: + cur.execute( + "SELECT e.id, e.epoch_uid, e.name, e.epoch_label, e.epoch_description, e.type, c.code " + "FROM epoch e INNER JOIN code c ON e.soa_id = c.soa_id AND e.type = c.code_uid " + "WHERE e.soa_id=? ORDER BY e.id", + (soa_id,), + ) + rows = cur.fetchall() + conn.close() + + uids = [r[1] for r in rows] + id_by_index = {i: uid for i, uid in enumerate(uids)} + + out: List[Dict[str, Any]] = [] + + for i, r in enumerate(rows): + id, epoch_uid, name, label, description, type, code = ( + r[0], + r[1], + r[2], + r[3], + r[4], + r[5], + r[6], + ) + eid = epoch_uid + prev_id = id_by_index.get(i - 1) + next_id = id_by_index.get(i + 1) + code_system, code_system_version, decode = _get_epoch_code_values( + soa_id, type, code + ) + + epoch = { + "id": eid, + "extensionAttributes": [], + "name": name, + "label": _nz(label), + "description": _nz(description), + "type": { + "id": type, + "extensionAttributes": [], + "code": code, + "codeSystem": code_system, + "codeSystemVersion": code_system_version, + "decode": decode, + "instanceType": "Code", + }, + "previousId": prev_id, + "nextId": next_id, + "notes": [], + "instanceType": "StudyEpoch", + } + out.append(epoch) + return out + + +if __name__ == "__main__": + import argparse + import json + import logging + import sys + + logger = logging.getLogger("usdm.generate_epochs") + + parser = argparse.ArgumentParser(description="Export USDM epochs for a SOA.") + parser.add_argument("soa_id", type=int, help="SOA id to export epochs for") + parser.add_argument( + "-o", "--output", default="-", help="Output file path or '-' for stdout" + ) + parser.add_argument("--indent", type=int, default=2, help="JSON indent") + args = parser.parse_args() + + try: + activities = build_usdm_activities(args.soa_id) + except Exception: + logger.exception("Failed to build epochs for soa_id=%s", args.soa_id) + sys.exit(1) + + payload = json.dumps(activities, indent=args.indent) + if args.output in ("-", "/dev/stdout"): + sys.stdout.write(payload + "\n") + else: + with open(args.output, "w", encoding="utf-8") as f: + f.write(payload + "\n") From 4dce47901346bebb86ea680c21125847ba4577d3 Mon Sep 17 00:00:00 2001 From: Darren <3921919+pendingintent@users.noreply.github.com> Date: Tue, 16 Dec 2025 14:23:06 -0500 Subject: [PATCH 03/12] API endpoints, database, auditing for timings --- src/soa_builder/web/app.py | 2 + src/soa_builder/web/audit.py | 27 ++ src/soa_builder/web/initialize_database.py | 36 +++ src/soa_builder/web/routers/timings.py | 327 +++++++++++++++++++++ src/soa_builder/web/schemas.py | 30 ++ 5 files changed, 422 insertions(+) create mode 100644 src/soa_builder/web/routers/timings.py diff --git a/src/soa_builder/web/app.py b/src/soa_builder/web/app.py index 8adc83a..841821b 100644 --- a/src/soa_builder/web/app.py +++ b/src/soa_builder/web/app.py @@ -68,6 +68,7 @@ from .routers import freezes as freezes_router from .routers import rollback as rollback_router from .routers import visits as visits_router +from .routers import timings as timings_router from .routers.arms import create_arm # re-export for backward compatibility from .routers.arms import delete_arm from .schemas import ArmCreate, SOACreate, SOAMetadataUpdate @@ -317,6 +318,7 @@ def _record_arm_audit( app.include_router(epochs_router.router) app.include_router(freezes_router.router) app.include_router(rollback_router.router) +app.include_router(timings_router.router) @app.post("/soa/{soa_id}/visits/reorder", response_class=JSONResponse) diff --git a/src/soa_builder/web/audit.py b/src/soa_builder/web/audit.py index b2847ef..c6b0066 100644 --- a/src/soa_builder/web/audit.py +++ b/src/soa_builder/web/audit.py @@ -190,3 +190,30 @@ def _record_study_cell_audit( conn.close() except Exception as e: logger.warning("Failed recording study_cell audit: %s", e) + + +def _record_timing_audit( + soa_id: int, + action: str, + timing_id: int | None, + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, +): + try: + conn = _connect() + cur = conn.cursor() + cur.execute( + "INSERT INTO timing_audit (soa_id, timing_id, action, before_json, after_json, performed_at VALUES (?,?,?,?,?,?))", + ( + soa_id, + timing_id, + action, + before, + after, + datetime.now(timezone.utc).isoformat(), + ), + ) + conn.commit() + conn.close() + except Exception as e: + logger.warning("Failed recording timing audit: %s", e) diff --git a/src/soa_builder/web/initialize_database.py b/src/soa_builder/web/initialize_database.py index b591e34..8c7182d 100644 --- a/src/soa_builder/web/initialize_database.py +++ b/src/soa_builder/web/initialize_database.py @@ -255,5 +255,41 @@ def _init_db(): )""" ) + # create the timing table + cur.execute( + """CREATE TABLE IF NOT EXISTS timing ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + soa_id INTEGER NOT NULL, + timing_uid TEXT NOT NULL, -- immutable Timing_N identifier unique within SOA + name TEXT NOT NULL, + label TEXT, + description TEXT, + type TEXT, -- value chosen from submissionValue in codelist_code C201264 + value TEXT, + value_label TEXT, + relative_to_from TEXT, -- value chosen from submissionValue in codelist_code C201265 + relative_from_schedule_instance TEXT, + relative_to_schedule_instance TEXT, + window_label TEXT, + window_upper TEXT, + window_lower TEXT, + order_index INTEGER, + UNIQUE(soa_id, timing_uid) + )""" + ) + + # create timing_audit table + cur.execute( + """CREATE TABLE IF NOT EXISTS timing_audit ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + soa_id INT NOT NULL, + timing_id INT NOT NULL, + action TEXT NOT NULL, -- create|update|delete + before_json TEXT, + after_json TEXT, + performed_at TEXT + )""" + ) + conn.commit() conn.close() diff --git a/src/soa_builder/web/routers/timings.py b/src/soa_builder/web/routers/timings.py new file mode 100644 index 0000000..40d7bfd --- /dev/null +++ b/src/soa_builder/web/routers/timings.py @@ -0,0 +1,327 @@ +import logging +from typing import Optional + +from fastapi import APIRouter, HTTPException +from fastapi.responses import JSONResponse + +from ..audit import _record_timing_audit +from ..db import _connect +from ..schemas import TimingCreate, TimingUpdate +from ..utils import soa_exists + +router = APIRouter(prefix="/soa/{soa_id}") +logger = logging.getLogger("soa_builder.web.routers.timings") + + +def _nz(s: Optional[str]) -> Optional[str]: + s = (s or "").strip() + return s or None + + +@router.get("/timings", response_class=JSONResponse, response_model=None) +def list_timings(soa_id: int): + if not soa_exists(soa_id): + raise HTTPException(404, "SOA not found") + + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT id,timing_uid,name,label,description,type, " + "value,value_label,relative_to_from,relative_from_schedule_instance, " + "relative_to_schedule_instance,window_label,window_upper,window_lower,order_index " + "FROM timing WHERE soa_id=? order by order_index, id", + (soa_id,), + ) + rows = [ + { + "id": r[0], + "timing_uid": r[1], + "name": r[2], + "label": r[3], + "description": r[4], + "type": r[5], + "value": r[6], + "value_label": r[7], + "relative_to_from": r[8], + "relative_from_schedule_instance": r[9], + "relative_to_schedule_instance": r[10], + "window_label": r[11], + "window_upper": r[12], + "window_lower": r[13], + "order_index": r[14], + } + for r in cur.fetchall() + ] + conn.close() + return rows + + +@router.post( + "/timings", response_class=JSONResponse, status_code=201, response_model=None +) +def create_timing(soa_id: int, payload: TimingCreate): + if not soa_exists(soa_id): + raise HTTPException(404, "SOA not found") + + name = (payload.name or "").strip() + if not name: + raise HTTPException(400, "Timing name required") + + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT COALESCE(MAX(order_index),0) FROM timing WHERE soa_id=?", + (soa_id,), + ) + next_ord = (cur.fetchone() or [0])[0] + 1 + cur.execute( + "SELECT timing_uid FROM timing WHERE soa_id=? AND timing_uid LIKE 'Timing_%'", + (soa_id,), + ) + existing_uids = [r[0] for r in cur.fetchall() if r[0]] + used_nums = set() + for uid in existing_uids: + if uid.startswith("Timing_"): + tail = uid[len("Timing_") :] + if tail.isdigit(): + used_nums.add(int(tail)) + else: + logger.warning( + "Invalid timing_uid format encountered (ignored): %s", + uid, + ) + next_n = 1 + while next_n in used_nums: + next_n += 1 + new_uid = f"Timing_{next_n}" + cur.execute( + """INSERT INTO timing (soa_id,timing_uid,name,label,description,type,value,value_label, + relative_to_from,relative_from_schedule_instance,relative_to_schedule_instance,window_label, + window_upper,window_lower,order_index) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""", + ( + soa_id, + new_uid, + name, + _nz(payload.label), + _nz(payload.description), + _nz(payload.type), + _nz(payload.value), + _nz(payload.value_label), + _nz(payload.relative_to_from), + _nz(payload.relative_from_schedule_instance), + _nz(payload.relative_to_schedule_instance), + _nz(payload.window_label), + _nz(payload.window_upper), + _nz(payload.window_lower), + next_ord, + ), + ) + timing_id = cur.lastrowid + conn.commit() + conn.close() + row = { + "id": timing_id, + "timing_uid": new_uid, + "name": name, + "label": (payload.label or "").strip() or None, + "description": (payload.description or "").strip() or None, + "order_index": next_ord, + } + _record_timing_audit(soa_id, "create", timing_id, before=None, after=row) + return row + + +@router.patch("/timings/{timing_id}", response_class=JSONResponse, response_model=None) +def update_timing(soa_id: int, timing_id: int, payload: TimingUpdate): + if not soa_exists(soa_id): + raise HTTPException(404, "SOA not found") + + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT id,timing_uid,name,label,description,type,value,value_label,relative_to_from," + "relative_from_schedule_instance,relative_to_schedule_instance,window_label,window_upper," + "window_lower,order_index FROM timing WHERE soa_id=? AND id=?", + ( + soa_id, + timing_id, + ), + ) + row = cur.fetchone() + if not row: + conn.close() + raise HTTPException(404, f"Timing id={timing_id} not found") + + before = { + "id": row[0], + "timing_uid": row[1], + "name": row[2], + "label": row[3], + "description": row[4], + "type": row[5], + "value": row[6], + "value_label": row[7], + "relative_to_from": row[8], + "relative_from_schedule_instance": row[9], + "relative_to_schedule_instance": row[10], + "window_label": row[11], + "window_upper": row[12], + "window_lower": row[13], + "order_index": row[14], + } + new_name = (payload.name if payload.name is not None else before["name"]) or "" + new_label = payload.label if payload.label is not None else before["label"] + new_description = ( + payload.description + if payload.description is not None + else before["description"] + ) + new_type = payload.type if payload.type is not None else before["type"] + new_value = payload.value if payload.value is not None else before["value"] + new_value_label = ( + payload.value_label + if payload.value_label is not None + else before["value_label"] + ) + new_relative_to_from = ( + payload.relative_to_from + if payload.relative_to_from is not None + else before["relative_to_from"] + ) + new_relative_from_schedule_instance = ( + payload.relative_from_schedule_instance + if payload.relative_from_schedule_instance is not None + else before["relative_from_schedule_instance"] + ) + new_relative_to_schedule_instance = ( + payload.relative_to_schedule_instance + if payload.relative_to_schedule_instance is not None + else before["relative_to_schedule_instance"] + ) + new_window_label = ( + payload.window_label + if payload.window_label is not None + else before["window_label"] + ) + new_window_upper = ( + payload.window_upper + if payload.window_upper is not None + else before["window_upper"] + ) + new_window_lower = ( + payload.window_lower + if payload.window_lower is not None + else before["window_lower"] + ) + + cur.execute( + "UPDATE timing SET name=?, label=?, description=?, type=?, value=?, value_label=?, " + "relative_to_from=?, relative_from_schedule_instance=?, relative_to_schedule_instance=?, " + "window_label=?, window_upper=?, window_lower=? WHERE id=? AND soa_id=?", + ( + _nz(new_name), + _nz(new_label), + _nz(new_description), + _nz(new_type), + _nz(new_value), + _nz(new_value_label), + _nz(new_relative_to_from), + _nz(new_relative_from_schedule_instance), + _nz(new_relative_to_schedule_instance), + _nz(new_window_label), + _nz(new_window_upper), + _nz(new_window_lower), + timing_id, + soa_id, + ), + ) + conn.commit() + cur.execute( + "SELECT id,timing_uid,name,label,description,type,value,value_label,relative_to_from," + "relative_from_schedule_instance,relative_to_schedule_instance,window_label,window_upper," + "window_lower,order_index FROM timing WHERE soa_id=? AND id=?", + (soa_id, timing_id), + ) + r = cur.fetchone() + conn.close() + after = { + "id": r[0], + "timing_uid": r[1], + "name": r[2], + "label": r[3], + "description": r[4], + "type": r[5], + "value": r[6], + "value_label": r[7], + "relative_to_from": r[8], + "relative_from_schedule_instance": r[9], + "relative_to_schedule_instance": r[10], + "window_label": r[11], + "window_upper": r[12], + "window_lower": r[13], + "order_index": r[14], + } + mutable = [ + "name", + "label", + "description", + "type", + "value", + "value_label", + "relative_to_from", + "relative_from_schedule_instance", + "relative_to_schedule_instance", + "window_label", + "window_upper", + "window_lower", + ] + update_fields = [ + f for f in mutable if (before.get(f) or None) != (after.get(f) or None) + ] + _record_timing_audit( + soa_id, + "update", + timing_id, + before=before, + after={**after, "updated_fields": update_fields}, + ) + return {**after, "updated_fields": update_fields} + + +@router.delete("/timings/{timing_id}", response_class=JSONResponse, response_model=None) +def delete_timing(soa_id: int, timing_id: int): + if not soa_exists(soa_id): + raise HTTPException(404, "SOA not found") + + conn = _connect() + cur = conn.cursor() + cur.execute( + "SELECT id,timing_uid,name,label,description FROM timing WHERE soa_id=? AND id=?", + ( + soa_id, + timing_id, + ), + ) + row = cur.fetchone() + if not row: + conn.close() + raise HTTPException(404, f"Timing id={timing_id} not found") + before = { + "id": row[0], + "timing_uid": row[1], + "name": row[2], + "label": row[3], + "description": row[4], + } + cur.execute( + "DELETE FROM timing WHERE id=? AND soa_id=?", + ( + timing_id, + soa_id, + ), + ) + conn.commit() + conn.close() + + _record_timing_audit(soa_id, "delete", timing_id, before=before, after=None) + return {"deleted": True, "id": timing_id} diff --git a/src/soa_builder/web/schemas.py b/src/soa_builder/web/schemas.py index 22adba4..f7d1652 100644 --- a/src/soa_builder/web/schemas.py +++ b/src/soa_builder/web/schemas.py @@ -3,6 +3,36 @@ from pydantic import BaseModel +class TimingCreate(BaseModel): + name: str + label: Optional[str] = None + description: Optional[str] = None + type: Optional[str] = None + value: Optional[str] = None + value_label: Optional[str] = None + relative_to_from: Optional[str] = None + relative_from_schedule_instance: Optional[str] = None + relative_to_schedule_instance: Optional[str] = None + window_label: Optional[str] = None + window_upper: Optional[str] = None + window_lower: Optional[str] = None + + +class TimingUpdate(BaseModel): + name: str + label: Optional[str] = None + description: Optional[str] = None + type: Optional[str] = None + value: Optional[str] = None + value_label: Optional[str] = None + relative_to_from: Optional[str] = None + relative_from_schedule_instance: Optional[str] = None + relative_to_schedule_instance: Optional[str] = None + window_label: Optional[str] = None + window_upper: Optional[str] = None + window_lower: Optional[str] = None + + class ActivityCreate(BaseModel): name: str label: Optional[str] = None From fbf8aec45b2e469c38b575ab482687016ab009f5 Mon Sep 17 00:00:00 2001 From: Darren <3921919+pendingintent@users.noreply.github.com> Date: Tue, 16 Dec 2025 14:29:48 -0500 Subject: [PATCH 04/12] Added tests for timings endpoints --- tests/test_timings.py | 141 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 tests/test_timings.py diff --git a/tests/test_timings.py b/tests/test_timings.py new file mode 100644 index 0000000..b05905b --- /dev/null +++ b/tests/test_timings.py @@ -0,0 +1,141 @@ +from typing import Dict, Any, Tuple + +from fastapi.testclient import TestClient + +from soa_builder.web.app import app +from soa_builder.web.db import _connect + +client = TestClient(app) + + +def _ensure_soa(soa_id: int = 999) -> int: + conn = _connect() + cur = conn.cursor() + cur.execute( + "INSERT OR IGNORE INTO soa (id, name) VALUES (?, ?)", + (soa_id, f"Test SOA {soa_id}"), + ) + cur.execute("DELETE FROM timing WHERE soa_id=?", (soa_id,)) + conn.commit() + conn.close() + return soa_id + + +def _create_timing( + soa_id: int, name: str = "Baseline", **kwargs +) -> Tuple[int, Dict[str, Any]]: + payload = {"name": name, **kwargs} + r = client.post(f"/soa/{soa_id}/timings", json=payload) + assert r.status_code == 201, r.text + data = r.json() + return data["id"], data + + +def test_list_timings_404_for_missing_soa(): + r = client.get("/soa/123456/timings") + assert r.status_code == 404 + + +def test_create_timing_requires_name(): + soa_id = _ensure_soa(1001) + r = client.post(f"/soa/{soa_id}/timings", json={"name": " "}) + assert r.status_code == 400 + assert "Timing name required" in r.text + + +def test_create_timing_trims_and_sets_uid_order_index(): + soa_id = _ensure_soa(1002) + tid1, t1 = _create_timing( + soa_id, name=" Visit Day 1 ", label=" L1 ", description=" Desc " + ) + assert t1["name"] == "Visit Day 1" + assert t1["label"] == "L1" + assert t1["description"] == "Desc" + assert t1["timing_uid"].startswith("Timing_") + assert t1["order_index"] == 1 + + tid2, t2 = _create_timing(soa_id, name="Follow-up") + assert t2["order_index"] == 2 + assert t2["timing_uid"].startswith("Timing_") + assert t1["timing_uid"] != t2["timing_uid"] + + # List ordered by order_index then id + r = client.get(f"/soa/{soa_id}/timings") + assert r.status_code == 200 + rows = r.json() + assert [row["id"] for row in rows] == [tid1, tid2] + + +def test_update_timing_mutable_fields_and_updated_fields(): + soa_id = _ensure_soa(1003) + tid, before = _create_timing(soa_id, name="Baseline", label=None, description=None) + + payload = { + "name": " Baseline Updated ", + "label": " Label X ", + "description": " Desc Y ", + "type": " relative ", + "value": " 5 ", + "value_label": " days ", + "relative_to_from": " from ", + "relative_from_schedule_instance": " Arm A ", + "relative_to_schedule_instance": " Epoch 1 ", + "window_label": " Window ", + "window_upper": " +2 ", + "window_lower": " -1 ", + } + r = client.patch(f"/soa/{soa_id}/timings/{tid}", json=payload) + assert r.status_code == 200, r.text + data = r.json() + + # Trimmed values stored (empty -> None) + assert data["name"] == "Baseline Updated" + assert data["label"] == "Label X" + assert data["description"] == "Desc Y" + assert data["type"] == "relative" + assert data["value"] == "5" + assert data["value_label"] == "days" + assert data["relative_to_from"] == "from" + assert data["relative_from_schedule_instance"] == "Arm A" + assert data["relative_to_schedule_instance"] == "Epoch 1" + assert data["window_label"] == "Window" + assert data["window_upper"] == "+2" + assert data["window_lower"] == "-1" + + # updated_fields must include changed keys + uf = set(data["updated_fields"]) + expected = { + "name", + "label", + "description", + "type", + "value", + "value_label", + "relative_to_from", + "relative_from_schedule_instance", + "relative_to_schedule_instance", + "window_label", + "window_upper", + "window_lower", + } + assert expected.issubset(uf) + + +def test_update_timing_404_for_missing_id(): + soa_id = _ensure_soa(1004) + r = client.patch(f"/soa/{soa_id}/timings/999999", json={"name": "x"}) + assert r.status_code == 404 + + +def test_delete_timing_happy_path_and_404(): + soa_id = _ensure_soa(1005) + tid, _ = _create_timing(soa_id, name="Delete Me") + r = client.delete(f"/soa/{soa_id}/timings/{tid}") + assert r.status_code == 200 + data = r.json() + assert data["deleted"] is True + assert data["id"] == tid + + # Gone + r2 = client.delete(f"/soa/{soa_id}/timings/{tid}") + assert r2.status_code == 404 From f7642422c337f66ce01d1ef83aee5645cc912bb1 Mon Sep 17 00:00:00 2001 From: Darren <3921919+pendingintent@users.noreply.github.com> Date: Tue, 16 Dec 2025 16:03:19 -0500 Subject: [PATCH 05/12] Added endpoints & UI page for Study Timings --- src/soa_builder/web/app.py | 39 ++--- src/soa_builder/web/audit.py | 18 ++- src/soa_builder/web/routers/timings.py | 167 ++++++++++++++++++++- src/soa_builder/web/templates/base.html | 1 + src/soa_builder/web/templates/timings.html | 117 +++++++++++++++ tests/test_timing_audit.py | 44 ++++++ tests/test_timing_audit_endpoint.py | 39 +++++ 7 files changed, 387 insertions(+), 38 deletions(-) create mode 100644 src/soa_builder/web/templates/timings.html create mode 100644 tests/test_timing_audit.py create mode 100644 tests/test_timing_audit_endpoint.py diff --git a/src/soa_builder/web/app.py b/src/soa_builder/web/app.py index 841821b..f87a697 100644 --- a/src/soa_builder/web/app.py +++ b/src/soa_builder/web/app.py @@ -410,14 +410,6 @@ def reorder_activities_api(soa_id: int, order: List[int]): return JSONResponse({"ok": True, "old_order": old_order, "new_order": order}) -class ConceptsUpdate(BaseModel): - concept_codes: List[str] - - -class FreezeCreate(BaseModel): - version_label: Optional[str] = None - - def _list_freezes(soa_id: int): conn = _connect() cur = conn.cursor() @@ -1054,6 +1046,15 @@ def _rollback_preview(soa_id: int, freeze_id: int) -> dict: } +# ------ Schemas -----# +class ConceptsUpdate(BaseModel): + concept_codes: List[str] + + +class FreezeCreate(BaseModel): + version_label: Optional[str] = None + + class CellCreate(BaseModel): visit_id: int activity_id: int @@ -1080,12 +1081,6 @@ class MatrixImport(BaseModel): reset: bool = True -# --------------------- Helpers --------------------- - - -# Use shared utils.soa_exists instead of local helper - - def _fetch_matrix(soa_id: int): conn = _connect() cur = conn.cursor() @@ -2153,23 +2148,11 @@ def update_soa_metadata(soa_id: int, payload: SOAMetadataUpdate): """Visit creation handled in routers/visits.py""" - - """Visit update handled in routers/visits.py""" - - """Visit detail handled in routers/visits.py""" - - """Activity creation handled in routers/activities.py""" - - """Activity update handled in routers/activities.py""" - - """Activity detail handled in routers/activities.py""" - - """Epoch CRUD and reorder endpoints refactored into epochs_router.""" @@ -5904,7 +5887,6 @@ def _epoch_types_snapshot(soa_id_int: int) -> list[dict]: return HTMLResponse("OK") -# --------------------- DDF Terminology Load --------------------- def _sanitize_column(name: str) -> str: """Sanitize Excel column header to safe SQLite identifier: lowercase, replace spaces & non-alnum with underscore, collapse repeats.""" import re @@ -5917,6 +5899,7 @@ def _sanitize_column(name: str) -> str: return s +# ------------------------- DDF Terminology ----------------------# def load_ddf_terminology( file_path: str, sheet_name: str = "DDF Terminology 2025-09-26", @@ -6509,7 +6492,7 @@ def ui_ddf_audit( ) -# Protocol Terminology functions +# ------------------------ Protocol Terminology ----------------------# def load_protocol_terminology( file_path: str, sheet_name: str = "Protocol Terminology 2025-09-26", diff --git a/src/soa_builder/web/audit.py b/src/soa_builder/web/audit.py index c6b0066..da3b265 100644 --- a/src/soa_builder/web/audit.py +++ b/src/soa_builder/web/audit.py @@ -202,14 +202,26 @@ def _record_timing_audit( try: conn = _connect() cur = conn.cursor() + # Ensure table exists (defensive for migrated databases) + cur.execute( + """CREATE TABLE IF NOT EXISTS timing_audit ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + soa_id INTEGER NOT NULL, + timing_id INTEGER, + action TEXT NOT NULL, + before_json TEXT, + after_json TEXT, + performed_at TEXT NOT NULL + )""" + ) cur.execute( - "INSERT INTO timing_audit (soa_id, timing_id, action, before_json, after_json, performed_at VALUES (?,?,?,?,?,?))", + "INSERT INTO timing_audit (soa_id, timing_id, action, before_json, after_json, performed_at) VALUES (?,?,?,?,?,?)", ( soa_id, timing_id, action, - before, - after, + json.dumps(before) if before else None, + json.dumps(after) if after else None, datetime.now(timezone.utc).isoformat(), ), ) diff --git a/src/soa_builder/web/routers/timings.py b/src/soa_builder/web/routers/timings.py index 40d7bfd..f03064b 100644 --- a/src/soa_builder/web/routers/timings.py +++ b/src/soa_builder/web/routers/timings.py @@ -1,16 +1,22 @@ import logging +import json +import os from typing import Optional -from fastapi import APIRouter, HTTPException -from fastapi.responses import JSONResponse +from fastapi import APIRouter, HTTPException, Request, Form +from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse +from fastapi.templating import Jinja2Templates from ..audit import _record_timing_audit from ..db import _connect from ..schemas import TimingCreate, TimingUpdate from ..utils import soa_exists -router = APIRouter(prefix="/soa/{soa_id}") +router = APIRouter() logger = logging.getLogger("soa_builder.web.routers.timings") +templates = Jinja2Templates( + directory=os.path.join(os.path.dirname(__file__), "..", "templates") +) def _nz(s: Optional[str]) -> Optional[str]: @@ -18,7 +24,57 @@ def _nz(s: Optional[str]) -> Optional[str]: return s or None -@router.get("/timings", response_class=JSONResponse, response_model=None) +# UI code to list timings in an SOA +@router.get("/ui/soa/{soa_id}/timings", response_class=HTMLResponse) +def ui_list_timings(request: Request, soa_id: int): + if not soa_exists(soa_id): + raise HTTPException(404, "SOA not found") + timings = list_timings(soa_id) + return templates.TemplateResponse( + request, + "timings.html", + {"request": request, "soa_id": soa_id, "timings": timings}, + ) + + +# UI code to create a timing for an SOA +@router.post("/ui/soa/{soa_id}/timings/create") +def ui_create_timing( + request: Request, + soa_id: int, + name: str = Form(...), + label: Optional[str] = Form(None), + description: Optional[str] = Form(None), + type: Optional[str] = Form(None), + value: Optional[str] = Form(None), + value_label: Optional[str] = Form(None), + relative_to_from: Optional[str] = Form(None), + relative_from_schedule_instance: Optional[str] = Form(None), + relative_to_schedule_instance: Optional[str] = Form(None), + window_label: Optional[str] = Form(None), + window_upper: Optional[str] = Form(None), + window_lower: Optional[str] = Form(None), +): + payload = TimingCreate( + name=name, + label=label, + description=description, + type=type, + value=value, + value_label=value_label, + relative_to_from=relative_to_from, + relative_from_schedule_instance=relative_from_schedule_instance, + relative_to_schedule_instance=relative_to_schedule_instance, + window_label=window_label, + window_upper=window_upper, + window_lower=window_lower, + ) + create_timing(soa_id, payload) + return RedirectResponse(url=f"/ui/soa/{soa_id}/timings", status_code=303) + + +# API endpoint to list timings for SOA +@router.get("/soa/{soa_id}/timings", response_class=JSONResponse, response_model=None) def list_timings(soa_id: int): if not soa_exists(soa_id): raise HTTPException(404, "SOA not found") @@ -56,8 +112,51 @@ def list_timings(soa_id: int): return rows +@router.get("/soa/{soa_id}/timing_audit", response_class=JSONResponse) +def list_timing_audit(soa_id: int): + if not soa_exists(soa_id): + raise HTTPException(404, "SOA not found") + conn = _connect() + cur = conn.cursor() + try: + cur.execute( + "SELECT id, timing_id, action, before_json, after_json, performed_at FROM timing_audit WHERE soa_id=? ORDER BY id DESC", + (soa_id,), + ) + except Exception: + # If table does not exist yet, return empty list for backward compatibility + conn.close() + return JSONResponse([]) + rows = [] + for r in cur.fetchall(): + try: + before = json.loads(r[3]) if r[3] else None + except Exception: + before = None + try: + after = json.loads(r[4]) if r[4] else None + except Exception: + after = None + rows.append( + { + "id": r[0], + "timing_id": r[1], + "action": r[2], + "before": before, + "after": after, + "performed_at": r[5], + } + ) + conn.close() + return JSONResponse(rows) + + +# API endpoint for creating a timing in an SOA @router.post( - "/timings", response_class=JSONResponse, status_code=201, response_model=None + "/soa/{soa_id}/timings", + response_class=JSONResponse, + status_code=201, + response_model=None, ) def create_timing(soa_id: int, payload: TimingCreate): if not soa_exists(soa_id): @@ -131,7 +230,12 @@ def create_timing(soa_id: int, payload: TimingCreate): return row -@router.patch("/timings/{timing_id}", response_class=JSONResponse, response_model=None) +# API endpoint to update a timing in an SOA +@router.patch( + "/soa/{soa_id}/timings/{timing_id}", + response_class=JSONResponse, + response_model=None, +) def update_timing(soa_id: int, timing_id: int, payload: TimingUpdate): if not soa_exists(soa_id): raise HTTPException(404, "SOA not found") @@ -288,7 +392,49 @@ def update_timing(soa_id: int, timing_id: int, payload: TimingUpdate): return {**after, "updated_fields": update_fields} -@router.delete("/timings/{timing_id}", response_class=JSONResponse, response_model=None) +# UI code to update a timing in an SOA +@router.post("/ui/soa/{soa_id}/timings/{timing_id}/update") +def ui_update_timing( + request: Request, + soa_id: int, + timing_id: int, + name: Optional[str] = Form(None), + label: Optional[str] = Form(None), + description: Optional[str] = Form(None), + type: Optional[str] = Form(None), + value: Optional[str] = Form(None), + value_label: Optional[str] = Form(None), + relative_to_from: Optional[str] = Form(None), + relative_from_schedule_instance: Optional[str] = Form(None), + relative_to_schedule_instance: Optional[str] = Form(None), + window_label: Optional[str] = Form(None), + window_upper: Optional[str] = Form(None), + window_lower: Optional[str] = Form(None), +): + payload = TimingUpdate( + name=name, + label=label, + description=description, + type=type, + value=value, + value_label=value_label, + relative_to_from=relative_to_from, + relative_from_schedule_instance=relative_from_schedule_instance, + relative_to_schedule_instance=relative_to_schedule_instance, + window_label=window_label, + window_upper=window_upper, + window_lower=window_lower, + ) + update_timing(soa_id, timing_id, payload) + return RedirectResponse(url=f"/ui/soa/{soa_id}/timings", status_code=303) + + +# API endpoint to delete a timing +@router.delete( + "/soa/{soa_id}/timings/{timing_id}", + response_class=JSONResponse, + response_model=None, +) def delete_timing(soa_id: int, timing_id: int): if not soa_exists(soa_id): raise HTTPException(404, "SOA not found") @@ -325,3 +471,10 @@ def delete_timing(soa_id: int, timing_id: int): _record_timing_audit(soa_id, "delete", timing_id, before=before, after=None) return {"deleted": True, "id": timing_id} + + +# UI Code to delete timing +@router.post("/ui/soa/{soa_id}/timings/{timing_id}/delete") +def ui_delete_timing(request: Request, soa_id: int, timing_id: int): + delete_timing(soa_id, timing_id) + return RedirectResponse(url=f"/ui/soa/{soa_id}/timings", status_code=303) diff --git a/src/soa_builder/web/templates/base.html b/src/soa_builder/web/templates/base.html index 797beb8..e86d3a5 100644 --- a/src/soa_builder/web/templates/base.html +++ b/src/soa_builder/web/templates/base.html @@ -11,6 +11,7 @@

SoA Workbench