diff --git a/.gitignore b/.gitignore
index e580969..72f6bf2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -100,5 +100,7 @@ SOA Workbench Wishlist.docx
NCT01750580_limited.json
CLAUDE.md
edit-column-collapse.html
+.claude
+api_test.py
# End of file
diff --git a/docs/Create TDD.docx b/docs/Create TDD.docx
new file mode 100644
index 0000000..9c959b7
Binary files /dev/null and b/docs/Create TDD.docx differ
diff --git a/files/SDTMIGv3.4.pdf b/files/SDTMIGv3.4.pdf
new file mode 100644
index 0000000..867b947
Binary files /dev/null and b/files/SDTMIGv3.4.pdf differ
diff --git a/src/sdtm/generate_ta.py b/src/sdtm/generate_ta.py
new file mode 100644
index 0000000..caa9fe7
--- /dev/null
+++ b/src/sdtm/generate_ta.py
@@ -0,0 +1,78 @@
+"""Generate the SDTM Trial Arms (TA) domain from the SOA workbench DB."""
+
+from soa_builder.web.db import _connect
+
+
+def build_sdtm_ta(soa_id: int) -> list[dict]:
+ """One record per planned element per arm (TA domain).
+
+ Mapping follows SDTM IG v3.4 Section 7 / docs/Create TDD.docx:
+ ARMCD = StudyArm/@name (≤20 chars)
+ ARM = StudyArm/@description
+ TAETORD = sequential order within arm (by epoch.order_index then sc.order_index)
+ ETCD = StudyElement/@name
+ ELEMENT = StudyElement/@description
+ EPOCH = StudyEpoch/@name
+ TABRANCH / TATRANS = blank (require ScheduledDecisionInstance, not in DB)
+ """
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("SELECT study_id, name FROM soa WHERE id=?", (soa_id,))
+ row = cur.fetchone()
+ study_id = (row[0] or row[1]) if row else ""
+
+ cur.execute(
+ """
+ SELECT sc.arm_uid,
+ a.name AS arm_name,
+ a.description AS arm_desc,
+ a.label AS arm_label,
+ e.name AS epoch_name,
+ e.order_index AS epoch_ord,
+ el.name AS el_name,
+ el.description AS el_desc,
+ el.label AS el_label,
+ sc.order_index
+ FROM study_cell sc
+ JOIN arm a ON a.arm_uid = sc.arm_uid AND a.soa_id = sc.soa_id
+ JOIN epoch e ON e.epoch_uid = sc.epoch_uid AND e.soa_id = sc.soa_id
+ JOIN element el ON el.element_id = sc.element_uid AND el.soa_id = sc.soa_id
+ WHERE sc.soa_id = ?
+ ORDER BY a.order_index, e.order_index, sc.order_index
+ """,
+ (soa_id,),
+ )
+ rows = cur.fetchall()
+ conn.close()
+
+ records = []
+ arm_seq: dict[str, int] = {}
+ for (
+ arm_uid,
+ arm_name,
+ arm_desc,
+ arm_label,
+ epoch_name,
+ _epoch_ord,
+ el_name,
+ el_desc,
+ el_label,
+ _sc_ord,
+ ) in rows:
+ arm_seq.setdefault(arm_uid, 0)
+ arm_seq[arm_uid] += 1
+ records.append(
+ {
+ "STUDYID": study_id,
+ "DOMAIN": "TA",
+ "ARMCD": (arm_name or "")[:20],
+ "ARM": arm_desc or arm_label or arm_name or "",
+ "TAETORD": arm_seq[arm_uid],
+ "ETCD": el_name or "",
+ "ELEMENT": el_desc or el_label or el_name or "",
+ "TABRANCH": "",
+ "TATRANS": "",
+ "EPOCH": epoch_name or "",
+ }
+ )
+ return records
diff --git a/src/sdtm/generate_te.py b/src/sdtm/generate_te.py
new file mode 100644
index 0000000..cd7b9ba
--- /dev/null
+++ b/src/sdtm/generate_te.py
@@ -0,0 +1,54 @@
+"""Generate the SDTM Trial Elements (TE) domain from the SOA workbench DB."""
+
+from soa_builder.web.db import _connect
+
+
+def build_sdtm_te(soa_id: int) -> list[dict]:
+ """One record per unique study element (TE domain).
+
+ Mapping follows SDTM IG v3.4 Section 7 / docs/Create TDD.docx:
+ STUDYID = StudyIdentifier (sponsor org) → soa.study_id or soa.name
+ ETCD = StudyElement/@name
+ ELEMENT = StudyElement/@description
+ TESTRL = StudyElement/@transitionStartRule/TransitionRule/@text
+ TEENRL = StudyElement/@transitionEndRule/TransitionRule/@text
+ TEDUR = blank (requires Timing value derivation, not directly in DB)
+ """
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("SELECT study_id, name FROM soa WHERE id=?", (soa_id,))
+ row = cur.fetchone()
+ study_id = (row[0] or row[1]) if row else ""
+
+ cur.execute(
+ """
+ SELECT el.name,
+ el.description,
+ el.label,
+ tr_start.text AS testrl_text,
+ tr_end.text AS teenrl_text
+ FROM element el
+ LEFT JOIN transition_rule tr_start ON tr_start.transition_rule_uid = el.testrl
+ LEFT JOIN transition_rule tr_end ON tr_end.transition_rule_uid = el.teenrl
+ WHERE el.soa_id = ?
+ ORDER BY el.order_index
+ """,
+ (soa_id,),
+ )
+ rows = cur.fetchall()
+ conn.close()
+
+ records = []
+ for el_name, el_desc, el_label, testrl, teenrl in rows:
+ records.append(
+ {
+ "STUDYID": study_id,
+ "DOMAIN": "TE",
+ "ETCD": el_name or "",
+ "ELEMENT": el_desc or el_label or el_name or "",
+ "TESTRL": testrl or "",
+ "TEENRL": teenrl or "",
+ "TEDUR": "",
+ }
+ )
+ return records
diff --git a/src/sdtm/generate_tv.py b/src/sdtm/generate_tv.py
new file mode 100644
index 0000000..663a12d
--- /dev/null
+++ b/src/sdtm/generate_tv.py
@@ -0,0 +1,169 @@
+"""Generate the SDTM Trial Visits (TV) domain from the SOA workbench DB."""
+
+import re
+
+from soa_builder.web.db import _connect
+
+
+def _iso_duration_to_days(value: str) -> str:
+ """Convert an ISO 8601 duration string to an integer number of days.
+
+ Handles the patterns used in clinical trial timing values:
+ P{n}D → n days
+ P{n}W → n * 7 days
+ P{n}Y{n}M{n}D → years * 365 + months * 30 + days (approximate)
+ -P... → negative day count
+ PT{n}H / time-only → "" (cannot express as integer days)
+
+ Returns the day count as a string, or "" if the value is absent or
+ cannot be converted to an integer number of days.
+ """
+ if not value:
+ return ""
+ s = value.strip()
+ negative = s.startswith("-")
+ if negative:
+ s = s[1:]
+ if not s.startswith("P"):
+ return ""
+ s = s[1:] # strip leading 'P'
+
+ # Weeks-only shorthand: {n}W
+ m = re.fullmatch(r"(\d+(?:\.\d+)?)W", s)
+ if m:
+ days = round(float(m.group(1)) * 7)
+ return str(-days if negative else days)
+
+ # General form: split date/time at 'T'
+ date_part = s.partition("T")[0]
+
+ y = re.search(r"(\d+(?:\.\d+)?)Y", date_part)
+ mo = re.search(r"(\d+(?:\.\d+)?)M", date_part)
+ d = re.search(r"(\d+(?:\.\d+)?)D", date_part)
+
+ if not y and not mo and not d:
+ return "" # time-only duration (e.g. PT8H) — not a day count
+
+ days = 0
+ if y:
+ days += round(float(y.group(1)) * 365)
+ if mo:
+ days += round(float(mo.group(1)) * 30)
+ if d:
+ days += round(float(d.group(1)))
+
+ return str(-days if negative else days)
+
+
+def build_sdtm_tv(soa_id: int) -> list[dict]:
+ """One record per planned (visit, arm) combination (TV domain).
+
+ Mapping follows SDTM IG v3.4 Section 7 / docs/Create TDD.docx:
+ VISITNUM = Encounter ordering (visit.order_index)
+ VISIT = Encounter/@name (visit.name)
+ VISITDY = Encounter/@timing/Timing/@timingValue
+ (timing.value via visit.scheduledAtId → timing.id)
+ ARMCD = StudyArm/@name via ScheduledActivityInstance → StudyCell → arm
+ (one row per arm when encounter is linked; blank otherwise)
+ ARM = StudyArm/@description via same path
+ TVSTRL = Encounter/@transitionStartRule/TransitionRule/@text
+ TVENRL = Encounter/@transitionEndRule/TransitionRule/@text
+
+ Row cardinality:
+ - If a visit's encounter_uid appears in instances that link to arm(s)
+ via epoch→study_cell, one TV row is emitted per (visit, arm).
+ - If there is no instance linkage, one TV row is emitted with ARMCD/ARM blank.
+ """
+ conn = _connect()
+ cur = conn.cursor()
+
+ cur.execute("SELECT study_id, name FROM soa WHERE id=?", (soa_id,))
+ row = cur.fetchone()
+ study_id = (row[0] or row[1]) if row else ""
+
+ # Query 1: all visits with timing and transition rule text
+ cur.execute(
+ """
+ SELECT v.encounter_uid,
+ v.name,
+ v.order_index,
+ t.value AS timing_value,
+ tr_s.text AS tvstrl_text,
+ tr_e.text AS tvenrl_text
+ FROM visit v
+ LEFT JOIN timing t
+ ON t.soa_id = v.soa_id
+ AND v.scheduledAtId IS NOT NULL
+ AND v.scheduledAtId != ''
+ AND t.id = CAST(v.scheduledAtId AS INTEGER)
+ LEFT JOIN transition_rule tr_s
+ ON tr_s.transition_rule_uid = v.transitionStartRule
+ LEFT JOIN transition_rule tr_e
+ ON tr_e.transition_rule_uid = v.transitionEndRule
+ WHERE v.soa_id = ?
+ ORDER BY v.order_index
+ """,
+ (soa_id,),
+ )
+ visits = cur.fetchall()
+
+ # Query 2: arm linkage per encounter via instances → epoch → study_cell → arm
+ cur.execute(
+ """
+ SELECT DISTINCT inst.encounter_uid,
+ a.name AS arm_name,
+ a.description AS arm_desc,
+ a.label AS arm_label,
+ a.order_index AS arm_ord
+ FROM instances inst
+ JOIN study_cell sc ON sc.soa_id = inst.soa_id
+ AND sc.epoch_uid = inst.epoch_uid
+ JOIN arm a ON a.soa_id = sc.soa_id
+ AND a.arm_uid = sc.arm_uid
+ WHERE inst.soa_id = ?
+ AND inst.encounter_uid IS NOT NULL
+ AND inst.encounter_uid != ''
+ ORDER BY inst.encounter_uid, a.order_index
+ """,
+ (soa_id,),
+ )
+ arm_map: dict[str, list[tuple[str, str, str]]] = {}
+ for enc_uid, arm_name, arm_desc, arm_label, _ in cur.fetchall():
+ arm_map.setdefault(enc_uid, []).append((arm_name, arm_desc, arm_label))
+
+ conn.close()
+
+ records = []
+ for enc_uid, visit_name, order_index, timing_val, tvstrl, tvenrl in visits:
+ arms = arm_map.get(enc_uid or "", [])
+ if arms:
+ for arm_name, arm_desc, arm_label in arms:
+ records.append(
+ {
+ "STUDYID": study_id,
+ "DOMAIN": "TV",
+ "VISITNUM": order_index,
+ "VISIT": visit_name or "",
+ "VISITDY": _iso_duration_to_days(timing_val or ""),
+ "ARMCD": (arm_name or "")[:20],
+ "ARM": arm_desc or arm_label or arm_name or "",
+ "TVSTRL": tvstrl or "",
+ "TVENRL": tvenrl or "",
+ }
+ )
+ else:
+ records.append(
+ {
+ "STUDYID": study_id,
+ "DOMAIN": "TV",
+ "VISITNUM": order_index,
+ "VISIT": visit_name or "",
+ "VISITDY": timing_val or "",
+ "ARMCD": "",
+ "ARM": "",
+ "TVSTRL": tvstrl or "",
+ "TVENRL": tvenrl or "",
+ }
+ )
+ records.sort(key=lambda r: (r["ARMCD"], r["VISITNUM"]))
+ return records
diff --git a/src/soa_builder/web/app.py b/src/soa_builder/web/app.py
index dc3685f..8d41ae8 100644
--- a/src/soa_builder/web/app.py
+++ b/src/soa_builder/web/app.py
@@ -23,7 +23,16 @@
import pandas as pd
import requests
from dotenv import load_dotenv
-from fastapi import FastAPI, File, Form, HTTPException, Request, Response, UploadFile
+from fastapi import (
+ BackgroundTasks,
+ FastAPI,
+ File,
+ Form,
+ HTTPException,
+ Request,
+ Response,
+ UploadFile,
+)
from fastapi.responses import HTMLResponse, JSONResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
@@ -74,6 +83,10 @@
from .routers import schedule_timelines as schedule_timelines_router
from .routers import cells as cells_router
from .routers import instances as instances_router
+from .routers import usdm_json as usdm_json_router
+from .routers import tdd as tdd_router
+from .routers import decision_instances as decision_instances_router
+from .routers import condition_assignments as condition_assignments_router
# Avoid binding visit helpers directly to allow fresh reloads in tests
@@ -187,6 +200,7 @@ def _configure_logging():
_backfill_dataset_date("ddf_terminology", "ddf_terminology_audit")
_backfill_dataset_date("protocol_terminology", "protocol_terminology_audit")
+
# Include routers
app.include_router(arms_router.router)
app.include_router(elements_router.router)
@@ -202,6 +216,10 @@ def _configure_logging():
app.include_router(schedule_timelines_router.router)
app.include_router(rules_router.router)
app.include_router(cells_router.router)
+app.include_router(usdm_json_router.router)
+app.include_router(tdd_router.router)
+app.include_router(decision_instances_router.router)
+app.include_router(condition_assignments_router.router)
def _record_visit_audit(
@@ -2326,12 +2344,80 @@ def _get_activity_concepts(activity_id: int):
return rows
+def _lookup_and_save_dss(soa_id: int, activity_id: int, concept_code: str) -> None:
+ """Background task: auto-lookup DSS for a concept via CDISC API and persist."""
+ import os
+ import requests as _requests
+
+ api_key = os.environ.get("CDISC_API_KEY") or os.environ.get(
+ "CDISC_SUBSCRIPTION_KEY"
+ )
+ subscription_key = os.environ.get("CDISC_SUBSCRIPTION_KEY") or api_key
+ headers: dict = {"Accept": "application/json"}
+ if subscription_key:
+ headers["Ocp-Apim-Subscription-Key"] = subscription_key
+ if api_key:
+ headers["Authorization"] = f"Bearer {api_key}"
+ headers["api-key"] = api_key
+
+ try:
+ # Step 1: discover DSS href for this concept
+ list_url = (
+ "https://api.library.cdisc.org/api/cosmos/v2/mdr/specializations"
+ "/datasetspecializations?biomedicalconcept=" + concept_code
+ )
+ r1 = _requests.get(list_url, headers=headers, timeout=15)
+ if r1.status_code != 200:
+ return
+ data1 = r1.json()
+ sdtm_links = data1["_links"]["datasetSpecializations"]["sdtm"]
+ if not sdtm_links:
+ return
+ dss_href = sdtm_links[0]["href"]
+ if dss_href.startswith("/"):
+ dss_href = "https://api.library.cdisc.org/api/cosmos/v2" + dss_href
+
+ # Step 2: fetch DSS detail to get datasetSpecializationId
+ r2 = _requests.get(dss_href, headers=headers, timeout=15)
+ if r2.status_code != 200:
+ return
+ data2 = r2.json()
+ dss_id = data2.get("datasetSpecializationId")
+ dss_domain = data2.get("domain")
+ if not dss_id:
+ return
+
+ # Step 3: persist to activity_concept
+ conn = _connect()
+ cur = conn.cursor()
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ cur.execute(
+ "UPDATE activity_concept SET dss_title=?, dss_href=?, dss_domain=?"
+ " WHERE activity_id=? AND concept_code=? AND soa_id=?",
+ (dss_id, dss_href, dss_domain, activity_id, concept_code, soa_id),
+ )
+ else:
+ cur.execute(
+ "UPDATE activity_concept SET dss_title=?, dss_href=?, dss_domain=?"
+ " WHERE activity_id=? AND concept_code=?",
+ (dss_id, dss_href, dss_domain, activity_id, concept_code),
+ )
+ conn.commit()
+ conn.close()
+ except Exception:
+ pass # silent failure — DSS column remains unset; user can assign manually
+
+
# API endpoint for adding a BC to an activity
@app.post(
"/ui/soa/{soa_id}/activity/{activity_id}/concepts/add", response_class=HTMLResponse
)
def ui_add_activity_concept(
- request: Request, soa_id: int, activity_id: int, concept_code: str = Form(...)
+ request: Request,
+ soa_id: int,
+ activity_id: int,
+ background_tasks: BackgroundTasks,
+ concept_code: str = Form(...),
):
"""Add Biomedical Concept to an Activity."""
if not activity_id:
@@ -2415,6 +2501,7 @@ def ui_add_activity_concept(
(activity_id, code, title),
)
conn.commit()
+ background_tasks.add_task(_lookup_and_save_dss, soa_id, activity_id, code)
conn.close()
selected = _get_activity_concepts(activity_id)
html = templates.get_template("concepts_cell.html").render(
@@ -4696,11 +4783,30 @@ def ui_set_activity_concepts(
request: Request,
soa_id: int,
activity_id: int,
+ background_tasks: BackgroundTasks,
concept_codes: List[str] = Form([]),
):
"""Form handler to set Biomedical Concepts related to an Activity."""
payload = ConceptsUpdate(concept_codes=list(dict.fromkeys(concept_codes)))
set_activity_concepts(soa_id, activity_id, payload)
+ # Queue background DSS lookup for any concept without a DSS assigned
+ conn = _connect()
+ cur = conn.cursor()
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ cur.execute(
+ "SELECT concept_code FROM activity_concept"
+ " WHERE activity_id=? AND soa_id=? AND (dss_title IS NULL OR dss_title='')",
+ (activity_id, soa_id),
+ )
+ else:
+ cur.execute(
+ "SELECT concept_code FROM activity_concept"
+ " WHERE activity_id=? AND (dss_title IS NULL OR dss_title='')",
+ (activity_id,),
+ )
+ for (code,) in cur.fetchall():
+ background_tasks.add_task(_lookup_and_save_dss, soa_id, activity_id, code)
+ conn.close()
# HTMX inline update support
if request.headers.get("HX-Request") == "true":
concepts = fetch_biomedical_concepts()
diff --git a/src/soa_builder/web/audit.py b/src/soa_builder/web/audit.py
index 2a60aba..4dfc260 100644
--- a/src/soa_builder/web/audit.py
+++ b/src/soa_builder/web/audit.py
@@ -285,6 +285,82 @@ def _record_instance_audit(
logger.warning("Failed recording instance audit: %s", e)
+def _record_decision_instance_audit(
+ soa_id: int,
+ action: str,
+ decision_instance_id: int | None,
+ before: Optional[Dict[str, Any]] = None,
+ after: Optional[Dict[str, Any]] = None,
+):
+ try:
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ """CREATE TABLE IF NOT EXISTS decision_instance_audit (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER NOT NULL,
+ decision_instance_id INTEGER,
+ action TEXT NOT NULL,
+ before_json TEXT,
+ after_json TEXT,
+ performed_at TEXT NOT NULL
+ )"""
+ )
+ cur.execute(
+ "INSERT INTO decision_instance_audit (soa_id, decision_instance_id, action, before_json, after_json, performed_at) VALUES (?,?,?,?,?,?)",
+ (
+ soa_id,
+ decision_instance_id,
+ action,
+ json.dumps(before) if before else None,
+ json.dumps(after) if after else None,
+ datetime.now(timezone.utc).isoformat(),
+ ),
+ )
+ conn.commit()
+ conn.close()
+ except Exception as e:
+ logger.warning("Failed recording decision_instance audit: %s", e)
+
+
+def _record_condition_assignment_audit(
+ soa_id: int,
+ action: str,
+ condition_assignment_id: int | None,
+ before: Optional[Dict[str, Any]] = None,
+ after: Optional[Dict[str, Any]] = None,
+):
+ try:
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ """CREATE TABLE IF NOT EXISTS condition_assignment_audit (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER NOT NULL,
+ condition_assignment_id INTEGER,
+ action TEXT NOT NULL,
+ before_json TEXT,
+ after_json TEXT,
+ performed_at TEXT NOT NULL
+ )"""
+ )
+ cur.execute(
+ "INSERT INTO condition_assignment_audit (soa_id, condition_assignment_id, action, before_json, after_json, performed_at) VALUES (?,?,?,?,?,?)",
+ (
+ soa_id,
+ condition_assignment_id,
+ action,
+ json.dumps(before) if before else None,
+ json.dumps(after) if after else None,
+ datetime.now(timezone.utc).isoformat(),
+ ),
+ )
+ conn.commit()
+ conn.close()
+ except Exception as e:
+ logger.warning("Failed recording condition_assignment audit: %s", e)
+
+
# Transition Rule Audit
def _record_transition_rule_audit(
soa_id: int,
diff --git a/src/soa_builder/web/initialize_database.py b/src/soa_builder/web/initialize_database.py
index 50c2a97..49160c8 100644
--- a/src/soa_builder/web/initialize_database.py
+++ b/src/soa_builder/web/initialize_database.py
@@ -136,6 +136,40 @@ def _init_db():
)"""
)
+ # decision_instances
+ cur.execute(
+ """CREATE TABLE IF NOT EXISTS decision_instances (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER NOT NULL,
+ instance_uid TEXT NOT NULL, -- ScheduledDecisionInstance_N
+ name TEXT NOT NULL,
+ label TEXT,
+ description TEXT,
+ default_condition_uid TEXT, -- UID of default-path ScheduledInstance
+ epoch_uid TEXT,
+ member_of_timeline TEXT, -- schedule_timeline_uid FK
+ order_index INTEGER,
+ UNIQUE(soa_id, instance_uid)
+ )"""
+ )
+
+ # condition_assignment
+ cur.execute(
+ """CREATE TABLE IF NOT EXISTS condition_assignment (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER NOT NULL,
+ condition_assignment_uid TEXT NOT NULL,
+ name TEXT NOT NULL,
+ label TEXT,
+ description TEXT,
+ condition TEXT,
+ decision_instance_uid TEXT,
+ condition_target_uid TEXT,
+ order_index INTEGER,
+ UNIQUE(soa_id, condition_assignment_uid)
+ )"""
+ )
+
# protocol_terminology: this table is created dynamically when uploading a new Protocol Terminology
# (app.py:5781-6119)
diff --git a/src/soa_builder/web/migrate_database.py b/src/soa_builder/web/migrate_database.py
index 547bbeb..61ff5e8 100644
--- a/src/soa_builder/web/migrate_database.py
+++ b/src/soa_builder/web/migrate_database.py
@@ -1012,6 +1012,10 @@ def _migrate_activity_concept_add_dss():
cur.execute("ALTER TABLE activity_concept ADD COLUMN dss_href TEXT")
conn.commit()
logger.info("Added dss_href column to activity_concept table")
+ if "dss_domain" not in cols:
+ cur.execute("ALTER TABLE activity_concept ADD COLUMN dss_domain TEXT")
+ conn.commit()
+ logger.info("Added dss_domain column to activity_concept table")
conn.close()
except Exception as e:
logger.warning("activity_concept dss migration failed: %s", e)
diff --git a/src/soa_builder/web/routers/activities.py b/src/soa_builder/web/routers/activities.py
index 4d1beaf..88e2573 100644
--- a/src/soa_builder/web/routers/activities.py
+++ b/src/soa_builder/web/routers/activities.py
@@ -6,7 +6,7 @@
import time
from typing import List
-from fastapi import APIRouter, HTTPException, Request, Form
+from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, Form
from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse
from fastapi.templating import Jinja2Templates
@@ -650,6 +650,41 @@ def ui_refresh_concepts_activities(request: Request, soa_id: int):
return RedirectResponse(url=redirect_url, status_code=303)
+@ui_router.post("/ui/soa/{soa_id}/activities/dss_auto_assign")
+def ui_dss_auto_assign(
+ request: Request,
+ soa_id: int,
+ background_tasks: BackgroundTasks,
+):
+ """Queue background DSS auto-assignment for all concepts in the SOA."""
+ from ..app import _lookup_and_save_dss as _auto_dss
+
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ conn = _connect()
+ cur = conn.cursor()
+ has_soa_col = _table_has_columns(cur, "activity_concept", ("soa_id",))
+ if has_soa_col:
+ cur.execute(
+ "SELECT activity_id, concept_code FROM activity_concept WHERE soa_id=?",
+ (soa_id,),
+ )
+ else:
+ cur.execute(
+ "SELECT ac.activity_id, ac.concept_code FROM activity_concept ac"
+ " JOIN activity a ON a.id=ac.activity_id WHERE a.soa_id=?",
+ (soa_id,),
+ )
+ rows = cur.fetchall()
+ conn.close()
+ for activity_id, concept_code in rows:
+ background_tasks.add_task(_auto_dss, soa_id, activity_id, concept_code)
+ redirect_url = f"/ui/soa/{int(soa_id)}/activities"
+ if request.headers.get("HX-Request") == "true":
+ return HTMLResponse("", headers={"HX-Redirect": redirect_url})
+ return RedirectResponse(url=redirect_url, status_code=303)
+
+
@ui_router.post("/ui/soa/{soa_id}/activities/create")
def ui_create_activity(
request: Request,
diff --git a/src/soa_builder/web/routers/condition_assignments.py b/src/soa_builder/web/routers/condition_assignments.py
new file mode 100644
index 0000000..68e6123
--- /dev/null
+++ b/src/soa_builder/web/routers/condition_assignments.py
@@ -0,0 +1,419 @@
+import logging
+import os
+from typing import Optional
+
+from fastapi import APIRouter, HTTPException, Request, Form
+from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse
+from fastapi.templating import Jinja2Templates
+
+from ..audit import _record_condition_assignment_audit
+from ..db import _connect
+from ..schemas import ConditionAssignmentCreate, ConditionAssignmentUpdate
+from ..utils import (
+ soa_exists,
+ _nz as _nz,
+ get_scheduled_activity_instance,
+ redirect_url_from_referer as _redirect_url,
+)
+
+router = APIRouter()
+logger = logging.getLogger("soa_builder.web.routers.condition_assignments")
+templates = Jinja2Templates(
+ directory=os.path.join(os.path.dirname(__file__), "..", "templates")
+)
+
+
+# API endpoint to list conditions
+@router.get(
+ "/soa/{soa_id}/condition_assignments",
+ response_class=JSONResponse,
+ response_model=None,
+)
+def list_condition_assignments(soa_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT id, condition_assignment_uid, name, label, description, condition,
+ decision_instance_uid, condition_target_uid, order_index FROM condition_assignment
+ WHERE soa_id=? ORDER BY name, id
+ """,
+ (soa_id,),
+ )
+ rows = [
+ {
+ "id": r[0],
+ "condition_assignment_uid": r[1],
+ "name": r[2],
+ "label": r[3],
+ "description": r[4],
+ "condition": r[5],
+ "decision_instance_uid": r[6],
+ "condition_target_uid": r[7],
+ "order_index": r[8],
+ }
+ for r in cur.fetchall()
+ ]
+ conn.close()
+ return rows
+
+
+# UI code to list conditions
+@router.get("/ui/soa/{soa_id}/condition_assignments", response_class=HTMLResponse)
+def ui_list_condition_assignments(request: Request, soa_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conditions = list_condition_assignments(soa_id)
+ instance_options = get_scheduled_activity_instance(soa_id)
+
+ # Study metadata
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT study_id, study_label, study_description, name, created_at FROM soa WHERE id=?",
+ (soa_id,),
+ )
+ meta_row = cur.fetchone()
+ conn.close()
+ study_id, study_label, study_description, study_name, study_created_at = meta_row
+ study_meta = {
+ "study_id": study_id,
+ "study_label": study_label,
+ "study_description": study_description,
+ "study_name": study_name,
+ "study_created_at": study_created_at,
+ }
+
+ return templates.TemplateResponse(
+ request,
+ "condition_assignments.html",
+ {
+ "request": request,
+ "soa_id": soa_id,
+ "conditions": conditions,
+ "instance_options": instance_options,
+ **study_meta,
+ },
+ )
+
+
+# API endpoint to create a new condition
+@router.post(
+ "/soa/{soa_id}/condition_assignments",
+ response_class=JSONResponse,
+ status_code=201,
+ response_model=None,
+)
+def create_condition_assignment(soa_id: int, payload: ConditionAssignmentCreate):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ name = (payload.name or "").strip()
+ if not name:
+ raise HTTPException(400, "Condition name required")
+
+ # Calculate next order_index and condition_assignment_uid
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT COALESCE(MAX(order_index),0) FROM condition_assignment WHERE soa_id=?",
+ (soa_id,),
+ )
+ next_ord = (cur.fetchone() or [0])[0] + 1
+ cur.execute(
+ "SELECT condition_assignment_uid FROM condition_assignment WHERE soa_id=? and condition_assignment_uid LIKE 'Condition_%'",
+ (soa_id,),
+ )
+ existing_uids = [r[0] for r in cur.fetchall() if r[0]]
+ used_nums = set()
+ for uid in existing_uids:
+ if uid.startswith("Condition_"):
+ tail = uid[len("Condition_") :]
+ if tail.isdigit():
+ used_nums.add(int(tail))
+ else:
+ logger.warning(
+ "Invalid condition_assignment_uid format encountered (ignored): %s",
+ uid,
+ )
+ # Always pick max(existing) + 1, do not fill gaps
+ next_n = (max(used_nums) if used_nums else 0) + 1
+ new_uid = f"Condition_{next_n}"
+ # Insert values for new condition into the condition_assignment table
+ cur.execute(
+ """
+ INSERT INTO condition_assignment (soa_id,condition_assignment_uid,name,label,description,condition,
+ decision_instance_uid,condition_target_uid,order_index) VALUES (?,?,?,?,?,?,?,?,?)
+ """,
+ (
+ soa_id,
+ new_uid,
+ name,
+ _nz(payload.label),
+ _nz(payload.description),
+ _nz(payload.condition),
+ _nz(payload.decision_instance_uid),
+ _nz(payload.condition_target_uid),
+ next_ord,
+ ),
+ )
+ condition_id = cur.lastrowid
+ conn.commit()
+ conn.close()
+ after = {
+ "id": condition_id,
+ "condition_assignment_uid": new_uid,
+ "name": name,
+ "label": (payload.label or "").strip() or None,
+ "description": (payload.description or "").strip() or None,
+ "condition": (payload.condition or "").strip() or None,
+ "decision_instance_uid": (payload.decision_instance_uid or "").strip() or None,
+ "condition_target_uid": (payload.condition_target_uid or "").strip() or None,
+ }
+ _record_condition_assignment_audit(
+ soa_id, "create", condition_id, before=None, after=after
+ )
+ return after
+
+
+# UI endpoint for creating new condition
+@router.post("/ui/soa/{soa_id}/condition_assignments/create")
+def ui_create_condition_assignment(
+ request: Request,
+ soa_id: int,
+ name: str = Form(...),
+ label: Optional[str] = Form(None),
+ description: Optional[str] = Form(None),
+ condition: Optional[str] = Form(None),
+ decision_instance_uid: Optional[str] = Form(None),
+ condition_target_uid: Optional[str] = Form(None),
+):
+ payload = ConditionAssignmentCreate(
+ name=name,
+ label=label,
+ description=description,
+ condition=condition,
+ decision_instance_uid=decision_instance_uid,
+ condition_target_uid=condition_target_uid,
+ )
+ create_condition_assignment(soa_id, payload)
+ return RedirectResponse(
+ url=_redirect_url(request, f"/ui/soa/{int(soa_id)}/condition_assignments"),
+ status_code=303,
+ )
+
+
+# API endpoint to update a condition
+@router.patch(
+ "/soa/{soa_id}/condition_assignments/{condition_id}",
+ response_class=JSONResponse,
+ response_model=None,
+)
+def update_condition_assignment(
+ soa_id: int, condition_id: int, payload: ConditionAssignmentUpdate
+):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT id,condition_assignment_uid,name,label,description,condition,decision_instance_uid,
+ condition_target_uid FROM condition_assignment WHERE soa_id=? AND id=?
+ """,
+ (
+ soa_id,
+ condition_id,
+ ),
+ )
+ row = cur.fetchone()
+ if not row:
+ raise HTTPException(404, f"Condition id={int(condition_id)} not found")
+
+ before = {
+ "id": row[0],
+ "condition_assignment_uid": row[1],
+ "name": row[2],
+ "label": row[3],
+ "description": row[4],
+ "condition": row[5],
+ "decision_instance_uid": row[6],
+ "condition_target_uid": row[7],
+ }
+
+ new_name = (payload.name if payload.name is not None else before["name"]) or ""
+ new_label = payload.label if payload.label is not None else before["label"]
+ new_description = (
+ payload.description
+ if payload.description is not None
+ else before["description"]
+ )
+ new_condition = (
+ payload.condition if payload.condition is not None else before["condition"]
+ )
+ new_decision_instance_uid = (
+ payload.decision_instance_uid
+ if payload.decision_instance_uid is not None
+ else before["decision_instance_uid"]
+ )
+ new_condition_target_uid = (
+ payload.condition_target_uid
+ if payload.condition_target_uid is not None
+ else before["condition_target_uid"]
+ )
+
+ cur.execute(
+ """
+ UPDATE condition_assignment SET name=?,label=?,description=?,condition=?,decision_instance_uid=?,condition_target_uid=?
+ WHERE id=? AND soa_id=?
+ """,
+ (
+ _nz(new_name),
+ _nz(new_label),
+ _nz(new_description),
+ _nz(new_condition),
+ _nz(new_decision_instance_uid),
+ _nz(new_condition_target_uid),
+ condition_id,
+ soa_id,
+ ),
+ )
+ conn.commit()
+ cur.execute(
+ """
+ SELECT id,condition_assignment_uid,name,label,description,condition,decision_instance_uid,
+ condition_target_uid FROM condition_assignment WHERE soa_id=? AND id=?
+ """,
+ (
+ soa_id,
+ condition_id,
+ ),
+ )
+ r = cur.fetchone()
+ conn.close()
+ after = {
+ "id": r[0],
+ "condition_assignment_uid": r[1],
+ "name": r[2],
+ "label": r[3],
+ "description": r[4],
+ "condition": r[5],
+ "decision_instance_uid": r[6],
+ "condition_target_uid": r[7],
+ }
+ mutable = [
+ "name",
+ "label",
+ "description",
+ "condition",
+ "decision_instance_uid",
+ "condition_target_uid",
+ ]
+ update_fields = [
+ f for f in mutable if (before.get(f) or None) != (after.get(f) or None)
+ ]
+ _record_condition_assignment_audit(
+ soa_id,
+ "update",
+ condition_id,
+ before=before,
+ after={**after, "updated_fields": update_fields},
+ )
+ return {**after, "updated_fields": update_fields}
+
+
+# UI endpoint for updating a condition
+@router.post("/ui/soa/{soa_id}/condition_assignments/{condition_id}/update")
+def ui_update_condition_assignment(
+ request: Request,
+ soa_id: int,
+ condition_id: int,
+ name: Optional[str] = Form(None),
+ label: Optional[str] = Form(None),
+ description: Optional[str] = Form(None),
+ condition: Optional[str] = Form(None),
+ decision_instance_uid: Optional[str] = Form(None),
+ condition_target_uid: Optional[str] = Form(None),
+):
+ payload = ConditionAssignmentUpdate(
+ name=name,
+ label=label,
+ description=description,
+ condition=condition,
+ decision_instance_uid=decision_instance_uid,
+ condition_target_uid=condition_target_uid,
+ )
+ update_condition_assignment(soa_id, condition_id, payload)
+ return RedirectResponse(
+ url=_redirect_url(request, f"/ui/soa/{int(soa_id)}/condition_assignments"),
+ status_code=303,
+ )
+
+
+# API endpoint for deleting a condition
+@router.delete(
+ "/soa/{soa_id}/condition_assignments/{condition_id}",
+ response_class=JSONResponse,
+ response_model=None,
+)
+def delete_condition_assignment(soa_id: int, condition_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ """
+ SELECT id,condition_assignment_uid,name,label,description,condition,decision_instance_uid,condition_target_uid
+ FROM condition_assignment WHERE soa_id=? and id=?
+ """,
+ (
+ soa_id,
+ condition_id,
+ ),
+ )
+ row = cur.fetchone()
+ if not row:
+ raise HTTPException(404, f"Condition id={int(condition_id)} not found")
+
+ before = {
+ "id": row[0],
+ "condition_assignment_uid": row[1],
+ "name": row[2],
+ "label": row[3],
+ "description": row[4],
+ "condition": row[5],
+ "decision_intance_uid": row[6],
+ "condition_target_uid": row[7],
+ }
+ cur.execute(
+ "DELETE FROM condition_assignment WHERE soa_id=? AND id=?",
+ (
+ soa_id,
+ condition_id,
+ ),
+ )
+ conn.commit()
+ conn.close()
+ _record_condition_assignment_audit(
+ soa_id, "delete", condition_id, before, after=None
+ )
+ return {"deleted": True, "id": condition_id}
+
+
+# UI endpoint to delete a condition
+@router.post("/soa/{soa_id}/condition_assignments/{condition_id}/delete")
+def ui_delete_condition_assignment(request: Request, soa_id: int, condition_id: int):
+ delete_condition_assignment(soa_id, condition_id)
+ return RedirectResponse(
+ url=_redirect_url(
+ request,
+ f"/ui/soa/{int(soa_id)}/condition_assignments",
+ ),
+ status_code=303,
+ )
diff --git a/src/soa_builder/web/routers/decision_instances.py b/src/soa_builder/web/routers/decision_instances.py
new file mode 100644
index 0000000..f853589
--- /dev/null
+++ b/src/soa_builder/web/routers/decision_instances.py
@@ -0,0 +1,416 @@
+import logging
+import os
+from typing import Optional, List
+
+from fastapi import APIRouter, HTTPException, Request, Form, Body
+from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse
+from fastapi.templating import Jinja2Templates
+
+from ..audit import _record_decision_instance_audit
+from ..db import _connect
+from ..schemas import DecisionInstanceCreate, DecisionInstanceUpdate
+from ..utils import (
+ soa_exists,
+ get_epoch_uid,
+ get_schedule_timeline,
+ redirect_url_from_referer as _redirect_url,
+ _nz as _nz,
+)
+
+router = APIRouter()
+logger = logging.getLogger("soa_builder.web.routers.decision_instances")
+templates = Jinja2Templates(
+ directory=os.path.join(os.path.dirname(__file__), "..", "templates")
+)
+
+
+@router.get(
+ "/soa/{soa_id}/decision_instances",
+ response_class=JSONResponse,
+ response_model=None,
+)
+def list_decision_instances(soa_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT id, instance_uid, name, label, description, default_condition_uid, "
+ "epoch_uid, member_of_timeline, order_index FROM decision_instances "
+ "WHERE soa_id=? ORDER BY order_index, id",
+ (soa_id,),
+ )
+ rows = [
+ {
+ "id": r[0],
+ "instance_uid": r[1],
+ "name": r[2],
+ "label": r[3],
+ "description": r[4],
+ "default_condition_uid": r[5],
+ "epoch_uid": r[6],
+ "member_of_timeline": r[7],
+ "order_index": r[8],
+ }
+ for r in cur.fetchall()
+ ]
+ conn.close()
+ return rows
+
+
+@router.get("/ui/soa/{soa_id}/decision_instances", response_class=HTMLResponse)
+def ui_list_decision_instances(request: Request, soa_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ decision_instances = list_decision_instances(soa_id)
+ schedule_timelines_options = get_schedule_timeline(soa_id)
+ epoch_options = get_epoch_uid(soa_id)
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT study_id, study_label, study_description, name, created_at FROM soa WHERE id=?",
+ (soa_id,),
+ )
+ meta_row = cur.fetchone()
+ conn.close()
+
+ study_id, study_label, study_description, study_name, study_created_at = meta_row
+ return templates.TemplateResponse(
+ request,
+ "decision_instances.html",
+ {
+ "request": request,
+ "soa_id": soa_id,
+ "decision_instances": decision_instances,
+ "schedule_timelines_options": schedule_timelines_options,
+ "epoch_options": epoch_options,
+ "study_id": study_id,
+ "study_label": study_label,
+ "study_description": study_description,
+ "study_name": study_name,
+ "study_created_at": study_created_at,
+ },
+ )
+
+
+@router.post(
+ "/soa/{soa_id}/decision_instances",
+ response_class=JSONResponse,
+ status_code=201,
+ response_model=None,
+)
+def create_decision_instance(soa_id: int, payload: DecisionInstanceCreate):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ name = (payload.name or "").strip()
+ if not name:
+ raise HTTPException(400, "Decision instance name required")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT COALESCE(MAX(order_index), 0) FROM decision_instances WHERE soa_id=?",
+ (soa_id,),
+ )
+ next_ord = (cur.fetchone() or [0])[0] + 1
+
+ cur.execute(
+ "SELECT instance_uid FROM decision_instances WHERE soa_id=? "
+ "AND instance_uid LIKE 'ScheduledDecisionInstance_%'",
+ (soa_id,),
+ )
+ existing_uids = [r[0] for r in cur.fetchall() if r[0]]
+ used_nums = set()
+ for uid in existing_uids:
+ if uid.startswith("ScheduledDecisionInstance_"):
+ tail = uid[len("ScheduledDecisionInstance_") :]
+ if tail.isdigit():
+ used_nums.add(int(tail))
+ else:
+ logger.warning(
+ "Invalid decision instance uid format (ignored): %s", uid
+ )
+ next_n = (max(used_nums) if used_nums else 0) + 1
+ new_uid = f"ScheduledDecisionInstance_{next_n}"
+
+ cur.execute(
+ "INSERT INTO decision_instances (soa_id, instance_uid, name, label, description, "
+ "default_condition_uid, epoch_uid, member_of_timeline, order_index) "
+ "VALUES (?,?,?,?,?,?,?,?,?)",
+ (
+ soa_id,
+ new_uid,
+ name,
+ _nz(payload.label),
+ _nz(payload.description),
+ _nz(payload.default_condition_uid),
+ _nz(payload.epoch_uid),
+ _nz(payload.member_of_timeline),
+ next_ord,
+ ),
+ )
+ decision_instance_id = cur.lastrowid
+ conn.commit()
+ conn.close()
+
+ after = {
+ "id": decision_instance_id,
+ "instance_uid": new_uid,
+ "name": name,
+ "label": _nz(payload.label),
+ "description": _nz(payload.description),
+ "default_condition_uid": _nz(payload.default_condition_uid),
+ "epoch_uid": _nz(payload.epoch_uid),
+ "member_of_timeline": _nz(payload.member_of_timeline),
+ "order_index": next_ord,
+ }
+ _record_decision_instance_audit(
+ soa_id, "create", decision_instance_id, before=None, after=after
+ )
+ return after
+
+
+@router.post("/ui/soa/{soa_id}/decision_instances/create")
+def ui_create_decision_instance(
+ request: Request,
+ soa_id: int,
+ name: str = Form(...),
+ label: Optional[str] = Form(None),
+ description: Optional[str] = Form(None),
+ default_condition_uid: Optional[str] = Form(None),
+ epoch_uid: Optional[str] = Form(None),
+ member_of_timeline: Optional[str] = Form(None),
+):
+ payload = DecisionInstanceCreate(
+ name=name,
+ label=label,
+ description=description,
+ default_condition_uid=default_condition_uid,
+ epoch_uid=epoch_uid,
+ member_of_timeline=member_of_timeline,
+ )
+ create_decision_instance(soa_id, payload)
+ return RedirectResponse(
+ url=_redirect_url(request, f"/ui/soa/{int(soa_id)}/decision_instances"),
+ status_code=303,
+ )
+
+
+@router.patch(
+ "/soa/{soa_id}/decision_instances/{decision_instance_id}",
+ response_class=JSONResponse,
+ response_model=None,
+)
+def update_decision_instance(
+ soa_id: int, decision_instance_id: int, payload: DecisionInstanceUpdate
+):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT id, instance_uid, name, label, description, default_condition_uid, "
+ "epoch_uid, member_of_timeline, order_index FROM decision_instances "
+ "WHERE soa_id=? AND id=?",
+ (soa_id, decision_instance_id),
+ )
+ row = cur.fetchone()
+ if not row:
+ conn.close()
+ raise HTTPException(
+ 404, f"Decision instance id={int(decision_instance_id)} not found"
+ )
+
+ before = {
+ "id": row[0],
+ "instance_uid": row[1],
+ "name": row[2],
+ "label": row[3],
+ "description": row[4],
+ "default_condition_uid": row[5],
+ "epoch_uid": row[6],
+ "member_of_timeline": row[7],
+ "order_index": row[8],
+ }
+
+ new_name = (payload.name if payload.name is not None else before["name"]) or ""
+ new_label = payload.label if payload.label is not None else before["label"]
+ new_description = (
+ payload.description
+ if payload.description is not None
+ else before["description"]
+ )
+ new_default_condition_uid = (
+ payload.default_condition_uid
+ if payload.default_condition_uid is not None
+ else before["default_condition_uid"]
+ )
+ new_epoch_uid = (
+ payload.epoch_uid if payload.epoch_uid is not None else before["epoch_uid"]
+ )
+ new_member_of_timeline = (
+ payload.member_of_timeline
+ if payload.member_of_timeline is not None
+ else before["member_of_timeline"]
+ )
+
+ cur.execute(
+ "UPDATE decision_instances SET name=?, label=?, description=?, "
+ "default_condition_uid=?, epoch_uid=?, member_of_timeline=? "
+ "WHERE id=? AND soa_id=?",
+ (
+ _nz(new_name),
+ _nz(new_label),
+ _nz(new_description),
+ _nz(new_default_condition_uid),
+ _nz(new_epoch_uid),
+ _nz(new_member_of_timeline),
+ decision_instance_id,
+ soa_id,
+ ),
+ )
+ conn.commit()
+ cur.execute(
+ "SELECT id, instance_uid, name, label, description, default_condition_uid, "
+ "epoch_uid, member_of_timeline, order_index FROM decision_instances "
+ "WHERE soa_id=? AND id=?",
+ (soa_id, decision_instance_id),
+ )
+ r = cur.fetchone()
+ conn.close()
+
+ after = {
+ "id": r[0],
+ "instance_uid": r[1],
+ "name": r[2],
+ "label": r[3],
+ "description": r[4],
+ "default_condition_uid": r[5],
+ "epoch_uid": r[6],
+ "member_of_timeline": r[7],
+ "order_index": r[8],
+ }
+ mutable = [
+ "name",
+ "label",
+ "description",
+ "default_condition_uid",
+ "epoch_uid",
+ "member_of_timeline",
+ ]
+ updated_fields = [
+ f for f in mutable if (before.get(f) or None) != (after.get(f) or None)
+ ]
+ _record_decision_instance_audit(
+ soa_id,
+ "update",
+ decision_instance_id,
+ before=before,
+ after={**after, "updated_fields": updated_fields},
+ )
+ return {**after, "updated_fields": updated_fields}
+
+
+@router.post("/ui/soa/{soa_id}/decision_instances/{decision_instance_id}/update")
+def ui_update_decision_instance(
+ request: Request,
+ soa_id: int,
+ decision_instance_id: int,
+ name: Optional[str] = Form(None),
+ label: Optional[str] = Form(None),
+ description: Optional[str] = Form(None),
+ default_condition_uid: Optional[str] = Form(None),
+ epoch_uid: Optional[str] = Form(None),
+ member_of_timeline: Optional[str] = Form(None),
+):
+ payload = DecisionInstanceUpdate(
+ name=name,
+ label=label,
+ description=description,
+ default_condition_uid=default_condition_uid,
+ epoch_uid=epoch_uid,
+ member_of_timeline=member_of_timeline,
+ )
+ update_decision_instance(soa_id, decision_instance_id, payload)
+ return RedirectResponse(
+ url=_redirect_url(request, f"/ui/soa/{int(soa_id)}/decision_instances"),
+ status_code=303,
+ )
+
+
+@router.delete(
+ "/soa/{soa_id}/decision_instances/{decision_instance_id}",
+ response_class=JSONResponse,
+ response_model=None,
+)
+def delete_decision_instance(soa_id: int, decision_instance_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT id, instance_uid, name FROM decision_instances WHERE soa_id=? AND id=?",
+ (soa_id, decision_instance_id),
+ )
+ row = cur.fetchone()
+ if not row:
+ conn.close()
+ raise HTTPException(
+ 404, f"Decision instance id={int(decision_instance_id)} not found"
+ )
+
+ before = {"id": row[0], "instance_uid": row[1], "name": row[2]}
+ cur.execute(
+ "DELETE FROM decision_instances WHERE id=? AND soa_id=?",
+ (decision_instance_id, soa_id),
+ )
+ conn.commit()
+ conn.close()
+ _record_decision_instance_audit(
+ soa_id, "delete", decision_instance_id, before=before, after=None
+ )
+ return {"deleted": True, "id": decision_instance_id}
+
+
+@router.post("/ui/soa/{soa_id}/decision_instances/{decision_instance_id}/delete")
+def ui_delete_decision_instance(
+ request: Request, soa_id: int, decision_instance_id: int
+):
+ delete_decision_instance(soa_id, decision_instance_id)
+ return RedirectResponse(
+ url=_redirect_url(request, f"/ui/soa/{int(soa_id)}/decision_instances"),
+ status_code=303,
+ )
+
+
+@router.post("/soa/{soa_id}/decision_instances/reorder", response_class=JSONResponse)
+def reorder_decision_instances(
+ soa_id: int,
+ order: List[int] = Body(..., embed=True),
+):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ if not order:
+ raise HTTPException(400, "Order list required")
+
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("SELECT id FROM decision_instances WHERE soa_id=?", (soa_id,))
+ existing = {r[0] for r in cur.fetchall()}
+ if set(order) - existing:
+ conn.close()
+ raise HTTPException(400, "Order contains invalid decision instance id")
+
+ for idx, did in enumerate(order, start=1):
+ cur.execute(
+ "UPDATE decision_instances SET order_index=? WHERE id=?", (idx, did)
+ )
+ conn.commit()
+ conn.close()
+ return JSONResponse({"ok": True, "new_order": order})
diff --git a/src/soa_builder/web/routers/instances.py b/src/soa_builder/web/routers/instances.py
index 1cef88b..1a7a807 100644
--- a/src/soa_builder/web/routers/instances.py
+++ b/src/soa_builder/web/routers/instances.py
@@ -16,6 +16,7 @@
get_schedule_timeline,
get_scheduled_activity_instance,
redirect_url_from_referer as _redirect_url,
+ _nz as _nz,
)
router = APIRouter()
@@ -25,11 +26,6 @@
)
-def _nz(s: Optional[str]) -> Optional[str]:
- s = (s or "").strip()
- return s or None
-
-
# API endpoint to list timeline instances for SOA
@router.get("/soa/{soa_id}/instances", response_class=JSONResponse, response_model=None)
def list_instances(soa_id: int):
diff --git a/src/soa_builder/web/routers/schedule_timelines.py b/src/soa_builder/web/routers/schedule_timelines.py
index 47d2e9c..21b1911 100644
--- a/src/soa_builder/web/routers/schedule_timelines.py
+++ b/src/soa_builder/web/routers/schedule_timelines.py
@@ -18,6 +18,8 @@
get_study_timing_type,
redirect_url_from_referer as _redirect_url,
)
+from .condition_assignments import list_condition_assignments
+from .decision_instances import list_decision_instances
from .instances import list_instances
from .timings import list_timings
@@ -143,9 +145,11 @@ def ui_study_timing(request: Request, soa_id: int):
# Instances data
instances = list_instances(soa_id)
+ decision_instances = list_decision_instances(soa_id)
encounter_options = get_encounter_id(soa_id)
epoch_options = get_epoch_uid(soa_id)
schedule_timelines_options = get_schedule_timeline(soa_id)
+ conditions = list_condition_assignments(soa_id)
# Timings data (with code_uid -> submission_value decoding)
timings = list_timings(soa_id)
@@ -200,6 +204,8 @@ def ui_study_timing(request: Request, soa_id: int):
"soa_id": soa_id,
"schedule_timelines": schedule_timelines,
"instances": instances,
+ "conditions": conditions,
+ "decision_instances": decision_instances,
"timings": timings,
"instance_options": instance_options,
"encounter_options": encounter_options,
diff --git a/src/soa_builder/web/routers/tdd.py b/src/soa_builder/web/routers/tdd.py
new file mode 100644
index 0000000..c96312f
--- /dev/null
+++ b/src/soa_builder/web/routers/tdd.py
@@ -0,0 +1,128 @@
+"""Routes for generating SDTM Trial Design Domains (TA, TE)."""
+
+import csv
+import io
+import json
+import logging
+import os
+
+from fastapi import APIRouter, HTTPException, Request
+from fastapi.responses import HTMLResponse, StreamingResponse
+from fastapi.templating import Jinja2Templates
+
+from ..utils import soa_exists
+
+router = APIRouter()
+logger = logging.getLogger("soa_builder.web.routers.tdd")
+templates = Jinja2Templates(
+ directory=os.path.join(os.path.dirname(__file__), "..", "templates")
+)
+
+_DOMAINS = [
+ ("ta", "Trial Arms", "ta.json", "ta.csv"),
+ ("te", "Trial Elements", "te.json", "te.csv"),
+ ("tv", "Trial Visits", "tv.json", "tv.csv"),
+]
+
+_FIELDNAMES: dict[str, list[str]] = {
+ "ta": [
+ "STUDYID",
+ "DOMAIN",
+ "ARMCD",
+ "ARM",
+ "TAETORD",
+ "ETCD",
+ "ELEMENT",
+ "TABRANCH",
+ "TATRANS",
+ "EPOCH",
+ ],
+ "te": ["STUDYID", "DOMAIN", "ETCD", "ELEMENT", "TESTRL", "TEENRL", "TEDUR"],
+ "tv": [
+ "STUDYID",
+ "DOMAIN",
+ "VISITNUM",
+ "VISIT",
+ "VISITDY",
+ "ARMCD",
+ "ARM",
+ "TVSTRL",
+ "TVENRL",
+ ],
+}
+
+
+def _build(domain: str, soa_id: int) -> list[dict]:
+ """Delegate to the appropriate SDTM TDD generator."""
+ if domain == "ta":
+ from sdtm.generate_ta import build_sdtm_ta
+
+ return build_sdtm_ta(soa_id)
+ if domain == "te":
+ from sdtm.generate_te import build_sdtm_te
+
+ return build_sdtm_te(soa_id)
+ if domain == "tv":
+ from sdtm.generate_tv import build_sdtm_tv
+
+ return build_sdtm_tv(soa_id)
+ raise ValueError(f"Unknown domain: {domain}")
+
+
+@router.get("/ui/soa/{soa_id}/tdd", response_class=HTMLResponse)
+def ui_tdd(request: Request, soa_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ return templates.TemplateResponse(
+ request,
+ "tdd.html",
+ {"soa_id": soa_id, "domains": _DOMAINS},
+ )
+
+
+@router.get("/soa/{soa_id}/tdd/{domain}/json")
+def download_tdd_json(soa_id: int, domain: str):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ valid_keys = {d[0] for d in _DOMAINS}
+ if domain not in valid_keys:
+ raise HTTPException(400, f"Unknown domain '{domain}'")
+ try:
+ data = _build(domain, soa_id)
+ except Exception as exc:
+ logger.exception("Failed to build TDD domain %s for soa_id=%s", domain, soa_id)
+ raise HTTPException(500, f"Failed to generate {domain}: {exc}") from exc
+ filename = next(d[2] for d in _DOMAINS if d[0] == domain)
+ payload = json.dumps(data, indent=2) + "\n"
+ buf = io.BytesIO(payload.encode("utf-8"))
+ return StreamingResponse(
+ buf,
+ media_type="application/json",
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
+
+
+@router.get("/soa/{soa_id}/tdd/{domain}/csv")
+def download_tdd_csv(soa_id: int, domain: str):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ valid_keys = {d[0] for d in _DOMAINS}
+ if domain not in valid_keys:
+ raise HTTPException(400, f"Unknown domain '{domain}'")
+ try:
+ data = _build(domain, soa_id)
+ except Exception as exc:
+ logger.exception("Failed to build TDD domain %s for soa_id=%s", domain, soa_id)
+ raise HTTPException(500, f"Failed to generate {domain}: {exc}") from exc
+ filename = next(d[3] for d in _DOMAINS if d[0] == domain)
+ fieldnames = _FIELDNAMES[domain]
+ out = io.StringIO()
+ writer = csv.DictWriter(out, fieldnames=fieldnames)
+ writer.writeheader()
+ writer.writerows(data)
+ buf = io.BytesIO(out.getvalue().encode("utf-8"))
+ return StreamingResponse(
+ buf,
+ media_type="text/csv",
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
diff --git a/src/soa_builder/web/routers/usdm_json.py b/src/soa_builder/web/routers/usdm_json.py
new file mode 100644
index 0000000..5215c70
--- /dev/null
+++ b/src/soa_builder/web/routers/usdm_json.py
@@ -0,0 +1,128 @@
+import io
+import json
+import logging
+import os
+
+from fastapi import APIRouter, HTTPException, Request
+from fastapi.responses import HTMLResponse, StreamingResponse
+from fastapi.templating import Jinja2Templates
+
+from ..db import _connect
+from ..utils import soa_exists
+
+router = APIRouter()
+logger = logging.getLogger("soa_builder.web.routers.usdm_json")
+templates = Jinja2Templates(
+ directory=os.path.join(os.path.dirname(__file__), "..", "templates")
+)
+
+_COMPONENTS = [
+ ("full", "Full USDM Document", "usdm_full.json"),
+ ("arms", "Arms", "usdm_arms.json"),
+ ("activities", "Activities", "usdm_activities.json"),
+ ("biomedical_concepts", "Biomedical Concepts", "usdm_biomedical_concepts.json"),
+ ("elements", "Study Elements", "usdm_elements.json"),
+ ("encounters", "Encounters", "usdm_encounters.json"),
+ ("epochs", "Study Epochs", "usdm_epochs.json"),
+ ("schedule_timelines", "Schedule Timelines", "usdm_schedule_timelines.json"),
+ ("timings", "Timings", "usdm_timings.json"),
+ ("instances", "Scheduled Activity Instances", "usdm_instances.json"),
+ ("study_cells", "Study Cells", "usdm_study_cells.json"),
+]
+
+
+def _build(component: str, soa_id: int):
+ """Delegate to the appropriate usdm generator."""
+ if component == "full":
+ from usdm.generate_usdm import build_usdm
+
+ return build_usdm(soa_id)
+ if component == "arms":
+ from usdm.generate_arms import build_usdm_arms
+
+ return build_usdm_arms(soa_id)
+ if component == "activities":
+ from usdm.generate_activities import build_usdm_activities
+
+ return build_usdm_activities(soa_id)
+
+ if component == "biomedical_concepts":
+ from usdm.generate_biomedical_concepts import build_usdm_biomedical_concepts
+
+ return build_usdm_biomedical_concepts(soa_id)
+
+ if component == "elements":
+ from usdm.generate_elements import build_usdm_elements
+
+ return build_usdm_elements(soa_id)
+ if component == "encounters":
+ from usdm.generate_encounters import build_usdm_encounters
+
+ return build_usdm_encounters(soa_id)
+ if component == "epochs":
+ from usdm.generate_study_epochs import build_usdm_epochs
+
+ return build_usdm_epochs(soa_id)
+ if component == "schedule_timelines":
+ from usdm.generate_schedule_timelines import build_usdm_schedule_timelines
+
+ return build_usdm_schedule_timelines(soa_id)
+ if component == "timings":
+ from usdm.generate_study_timings import build_usdm_timings
+
+ return build_usdm_timings(soa_id, None)
+ if component == "instances":
+ from usdm.generate_scheduled_activity_instances import build_usdm_instances
+
+ return build_usdm_instances(soa_id, None)
+ if component == "study_cells":
+ from usdm.generate_study_cells import build_usdm_study_cells
+
+ return build_usdm_study_cells(soa_id)
+ raise ValueError(f"Unknown component: {component}")
+
+
+@router.get("/ui/soa/{soa_id}/usdm_json", response_class=HTMLResponse)
+def ui_usdm_json(request: Request, soa_id: int):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("SELECT name, study_id, study_label FROM soa WHERE id=?", (soa_id,))
+ row = cur.fetchone()
+ conn.close()
+ return templates.TemplateResponse(
+ request,
+ "usdm_json.html",
+ {
+ "soa_id": soa_id,
+ "study_name": row[0],
+ "study_id_value": row[1],
+ "study_label": row[2],
+ "components": _COMPONENTS,
+ },
+ )
+
+
+@router.get("/soa/{soa_id}/usdm_json/{component}")
+def download_usdm_component(soa_id: int, component: str):
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ valid_keys = {c[0] for c in _COMPONENTS}
+ if component not in valid_keys:
+ raise HTTPException(400, f"Unknown component '{component}'")
+ try:
+ data = _build(component, soa_id)
+ except Exception as exc:
+ logger.exception(
+ "Failed to build USDM component %s for soa_id=%s", component, soa_id
+ )
+ raise HTTPException(500, f"Failed to generate {component}: {exc}") from exc
+ filename = next(c[2] for c in _COMPONENTS if c[0] == component)
+ payload = json.dumps(data, indent=2) + "\n"
+ buf = io.BytesIO(payload.encode("utf-8"))
+ return StreamingResponse(
+ buf,
+ media_type="application/json",
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
diff --git a/src/soa_builder/web/schemas.py b/src/soa_builder/web/schemas.py
index e50c535..1e0095c 100644
--- a/src/soa_builder/web/schemas.py
+++ b/src/soa_builder/web/schemas.py
@@ -303,3 +303,39 @@ class StudyCellUpdate(BaseModel):
arm_uid: Optional[str] = None
epoch_uid: Optional[str] = None
element_uid: Optional[str] = None
+
+
+class DecisionInstanceCreate(BaseModel):
+ name: str
+ label: Optional[str] = None
+ description: Optional[str] = None
+ default_condition_uid: Optional[str] = None
+ epoch_uid: Optional[str] = None
+ member_of_timeline: Optional[str] = None
+
+
+class DecisionInstanceUpdate(BaseModel):
+ name: Optional[str] = None
+ label: Optional[str] = None
+ description: Optional[str] = None
+ default_condition_uid: Optional[str] = None
+ epoch_uid: Optional[str] = None
+ member_of_timeline: Optional[str] = None
+
+
+class ConditionAssignmentCreate(BaseModel):
+ name: str
+ label: Optional[str] = None
+ description: Optional[str] = None
+ condition: Optional[str] = None
+ decision_instance_uid: Optional[str] = None
+ condition_target_uid: Optional[str] = None
+
+
+class ConditionAssignmentUpdate(BaseModel):
+ name: Optional[str] = None
+ label: Optional[str] = None
+ description: Optional[str] = None
+ condition: Optional[str] = None
+ decision_instance_uid: Optional[str] = None
+ condition_target_uid: Optional[str] = None
diff --git a/src/soa_builder/web/templates/_condition_assignments_section.html b/src/soa_builder/web/templates/_condition_assignments_section.html
new file mode 100644
index 0000000..187e3f5
--- /dev/null
+++ b/src/soa_builder/web/templates/_condition_assignments_section.html
@@ -0,0 +1,164 @@
+
Conditions Assignments for Study: {% if study_label %}{{ study_label }}{% else %}{{ study_name }}{% endif %}
+
+{% if not hide_return_link %}
+
+{% endif %}
+
+
+
+
+
+ | ID |
+ Name |
+ Label |
+ Description |
+ Condition |
+ Decision Instance |
+ Target Instance |
+ Save |
+ Delete |
+
+ {% for c in conditions or [] %}
+
+
+ |
+
+ |
+
+ {% else %}
+
+ | No Conditions yet. |
+
+ {% endfor %}
+
+
+
+
diff --git a/src/soa_builder/web/templates/_decision_instances_section.html b/src/soa_builder/web/templates/_decision_instances_section.html
new file mode 100644
index 0000000..699487b
--- /dev/null
+++ b/src/soa_builder/web/templates/_decision_instances_section.html
@@ -0,0 +1,173 @@
+Scheduled Decision Instances for Study: {% if study_label %}{{ study_label }}{% else %}{{ study_name }}{% endif %}
+
+{% if not hide_return_link %}
+
+{% endif %}
+
+
+
+
+
+ | ID |
+ Name |
+ Label |
+ Description |
+ Default Condition |
+ Epoch |
+ Member of Timeline |
+ Save |
+ Delete |
+
+ {% for di in decision_instances or [] %}
+
+
+ |
+
+ |
+
+ {% else %}
+ | No Decision Instances yet. |
+ {% endfor %}
+
+
+
diff --git a/src/soa_builder/web/templates/_instances_section.html b/src/soa_builder/web/templates/_instances_section.html
index 38d7c29..474353c 100644
--- a/src/soa_builder/web/templates/_instances_section.html
+++ b/src/soa_builder/web/templates/_instances_section.html
@@ -1,4 +1,4 @@
-Scheduled Activity Instances for Study: {% if study_label %}{{ study_label }}{% else %}{{ study_name }}{% endif %}
+Scheduled Instances for Study: {% if study_label %}{{ study_label }}{% else %}{{ study_name }}{% endif %}
{% if not hide_return_link %}
@@ -8,7 +8,7 @@
Scheduled Activity Instances for Study: {% if study_label %}{{ study_label }
{% endif %}
-