diff --git a/.gitignore b/.gitignore
index 19300fd..093fae6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -95,5 +95,6 @@ old-tests/
*.db-corrupt
docs/~*
files/~*
+output/*
# End of file
diff --git a/src/soa_builder/web/app.py b/src/soa_builder/web/app.py
index 9573e16..8adc83a 100644
--- a/src/soa_builder/web/app.py
+++ b/src/soa_builder/web/app.py
@@ -48,6 +48,7 @@
_migrate_add_epoch_label_desc,
_migrate_add_epoch_seq,
_migrate_add_study_fields,
+ _migrate_add_epoch_uid,
_migrate_arm_add_type_fields,
_migrate_element_audit_columns,
_migrate_copy_cell_data,
@@ -72,8 +73,10 @@
from .schemas import ArmCreate, SOACreate, SOAMetadataUpdate
from .utils import (
get_next_code_uid as _get_next_code_uid,
+ get_next_concept_uid as _get_next_concept_uid,
load_epoch_type_options,
soa_exists,
+ table_has_columns as _table_has_columns,
)
# Audit functions
@@ -136,6 +139,7 @@ def _configure_logging():
_migrate_add_epoch_id_to_visit()
_migrate_add_epoch_seq()
_migrate_add_epoch_label_desc()
+_migrate_add_epoch_uid()
_migrate_create_code_junction()
_migrate_add_study_fields()
_drop_unused_override_table()
@@ -520,12 +524,34 @@ def _create_freeze(soa_id: int, version_label: Optional[str]):
concepts_map = {}
if activity_ids:
placeholders = ",".join("?" for _ in activity_ids)
- cur.execute(
- f"SELECT activity_id, concept_code, concept_title FROM activity_concept WHERE activity_id IN ({placeholders})",
- activity_ids,
- )
- for aid, code, title in cur.fetchall():
- concepts_map.setdefault(aid, []).append({"code": code, "title": title})
+ has_uid = _table_has_columns(cur, "activity_concept", ("concept_uid",))
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ if has_uid:
+ cur.execute(
+ f"SELECT activity_id, concept_code, concept_title, concept_uid FROM activity_concept WHERE soa_id=? AND activity_id IN ({placeholders})",
+ [soa_id] + activity_ids,
+ )
+ else:
+ cur.execute(
+ f"SELECT activity_id, concept_code, concept_title, NULL as concept_uid FROM activity_concept WHERE soa_id=? AND activity_id IN ({placeholders})",
+ [soa_id] + activity_ids,
+ )
+ else:
+ if has_uid:
+ cur.execute(
+ f"SELECT activity_id, concept_code, concept_title, concept_uid FROM activity_concept WHERE activity_id IN ({placeholders})",
+ activity_ids,
+ )
+ else:
+ cur.execute(
+ f"SELECT activity_id, concept_code, concept_title, NULL as concept_uid FROM activity_concept WHERE activity_id IN ({placeholders})",
+ activity_ids,
+ )
+ for aid, code, title, cuid in cur.fetchall():
+ entry = {"code": code, "title": title}
+ if cuid:
+ entry["uid"] = cuid
+ concepts_map.setdefault(aid, []).append(entry)
snapshot = {
"soa_id": soa_id,
"soa_name": soa_name,
@@ -806,15 +832,68 @@ def _rollback_freeze(soa_id: int, freeze_id: int) -> dict:
new_aid = activity_id_map.get(int(old_aid))
if not new_aid:
continue
+ # Fetch activity_uid for the new activity id
+ cur.execute("SELECT activity_uid FROM activity WHERE id=?", (new_aid,))
+ row_uid = cur.fetchone()
+ new_activity_uid = row_uid[0] if row_uid else None
+ ac_has_soa = _table_has_columns(cur, "activity_concept", ("soa_id",))
+ ac_has_actuid = _table_has_columns(cur, "activity_concept", ("activity_uid",))
+ ac_has_conceptuid = _table_has_columns(
+ cur, "activity_concept", ("concept_uid",)
+ )
for c in concept_list:
code = c.get("code")
title = c.get("title") or code
if not code:
continue
- cur.execute(
- "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
- (new_aid, code, title),
+ # Insert concept mapping; include soa_id if column exists
+ concept_uid = (
+ _get_next_concept_uid(cur, soa_id) if ac_has_conceptuid else None
)
+ if ac_has_soa and ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?,?)",
+ (soa_id, new_aid, new_activity_uid, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, new_aid, new_activity_uid, code, title),
+ )
+ elif ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (new_aid, new_activity_uid, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (new_aid, new_activity_uid, code, title),
+ )
+ elif ac_has_soa:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, new_aid, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_code, concept_title) VALUES (?,?,?,?)",
+ (soa_id, new_aid, code, title),
+ )
+ else:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (new_aid, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
+ (new_aid, code, title),
+ )
inserted_concepts += 1
conn.commit()
conn.close()
@@ -1017,11 +1096,41 @@ def _fetch_matrix(soa_id: int):
dict(id=r[0], name=r[1], raw_header=r[2], order_index=r[3], epoch_id=r[4])
for r in cur.fetchall()
]
- cur.execute(
- "SELECT id,name,order_index FROM activity WHERE soa_id=? ORDER BY order_index",
- (soa_id,),
- )
- activities = [dict(id=r[0], name=r[1], order_index=r[2]) for r in cur.fetchall()]
+ # Activities: include optional label/description if schema supports them
+ cur.execute("PRAGMA table_info(activity)")
+ act_cols = {r[1] for r in cur.fetchall()}
+ if "label" in act_cols and "description" in act_cols:
+ cur.execute(
+ "SELECT id,name,order_index,activity_uid,label,description FROM activity WHERE soa_id=? ORDER BY order_index",
+ (soa_id,),
+ )
+ activities = [
+ dict(
+ id=r[0],
+ name=r[1],
+ order_index=r[2],
+ activity_uid=r[3],
+ label=r[4],
+ description=r[5],
+ )
+ for r in cur.fetchall()
+ ]
+ else:
+ cur.execute(
+ "SELECT id,name,order_index,activity_uid FROM activity WHERE soa_id=? ORDER BY order_index",
+ (soa_id,),
+ )
+ activities = [
+ dict(
+ id=r[0],
+ name=r[1],
+ order_index=r[2],
+ activity_uid=r[3],
+ label=None,
+ description=None,
+ )
+ for r in cur.fetchall()
+ ]
cur.execute(
"SELECT visit_id, activity_id, status FROM matrix_cells WHERE soa_id=?",
(soa_id,),
@@ -2073,20 +2182,75 @@ def set_activity_concepts(soa_id: int, activity_id: int, payload: ConceptsUpdate
if not cur.fetchone():
conn.close()
raise HTTPException(404, "Activity not found")
- # Clear existing mappings
- cur.execute("DELETE FROM activity_concept WHERE activity_id=?", (activity_id,))
+ # Clear existing mappings; include soa_id if column exists
+ ac_has_soa = _table_has_columns(cur, "activity_concept", ("soa_id",))
+ ac_has_actuid = _table_has_columns(cur, "activity_concept", ("activity_uid",))
+ if ac_has_soa:
+ cur.execute(
+ "DELETE FROM activity_concept WHERE activity_id=? AND soa_id=?",
+ (activity_id, soa_id),
+ )
+ else:
+ cur.execute("DELETE FROM activity_concept WHERE activity_id=?", (activity_id,))
concepts = fetch_biomedical_concepts()
lookup = {c["code"]: c["title"] for c in concepts}
+ # Fetch activity_uid once
+ cur.execute("SELECT activity_uid FROM activity WHERE id=?", (activity_id,))
+ r = cur.fetchone()
+ activity_uid = r[0] if r else None
+ # Prepare concept_uid generation when column exists
+ ac_has_conceptuid = _table_has_columns(cur, "activity_concept", ("concept_uid",))
inserted = 0
for code in payload.concept_codes:
ccode = code.strip()
if not ccode:
continue
title = lookup.get(ccode, ccode)
- cur.execute(
- "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
- (activity_id, ccode, title),
- )
+ concept_uid = _get_next_concept_uid(cur, soa_id) if ac_has_conceptuid else None
+ if ac_has_soa and ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?,?)",
+ (soa_id, activity_id, activity_uid, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, activity_id, activity_uid, ccode, title),
+ )
+ elif ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (activity_id, activity_uid, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (activity_id, activity_uid, ccode, title),
+ )
+ elif ac_has_soa:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, activity_id, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_code, concept_title) VALUES (?,?,?,?)",
+ (soa_id, activity_id, ccode, title),
+ )
+ else:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (activity_id, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
+ (activity_id, ccode, title),
+ )
inserted += 1
conn.commit()
conn.close()
@@ -2097,10 +2261,16 @@ def _get_activity_concepts(activity_id: int):
"""Return list of concepts (immutable: stored snapshot)."""
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=?",
- (activity_id,),
- )
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ cur.execute(
+ "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=? AND soa_id=(SELECT soa_id FROM activity WHERE id=?)",
+ (activity_id, activity_id),
+ )
+ else:
+ cur.execute(
+ "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=?",
+ (activity_id,),
+ )
rows = [{"code": c, "title": t} for c, t in cur.fetchall()]
conn.close()
return rows
@@ -2129,15 +2299,70 @@ def ui_add_activity_concept(
if not cur.fetchone():
conn.close()
raise HTTPException(404, "Activity not found")
- cur.execute(
- "SELECT 1 FROM activity_concept WHERE activity_id=? AND concept_code=?",
- (activity_id, code),
- )
- if not cur.fetchone():
+ # Check existence; include soa_id if column exists
+ ac_has_soa = _table_has_columns(cur, "activity_concept", ("soa_id",))
+ ac_has_actuid = _table_has_columns(cur, "activity_concept", ("activity_uid",))
+ ac_has_conceptuid = _table_has_columns(cur, "activity_concept", ("concept_uid",))
+ if ac_has_soa:
cur.execute(
- "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
- (activity_id, code, title),
+ "SELECT 1 FROM activity_concept WHERE activity_id=? AND concept_code=? AND soa_id=?",
+ (activity_id, code, soa_id),
)
+ else:
+ cur.execute(
+ "SELECT 1 FROM activity_concept WHERE activity_id=? AND concept_code=?",
+ (activity_id, code),
+ )
+ if not cur.fetchone():
+ # Fetch activity_uid once
+ cur.execute("SELECT activity_uid FROM activity WHERE id=?", (activity_id,))
+ rr = cur.fetchone()
+ activity_uid = rr[0] if rr else None
+ concept_uid = _get_next_concept_uid(cur, soa_id) if ac_has_conceptuid else None
+ if ac_has_soa and ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?,?)",
+ (soa_id, activity_id, activity_uid, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, activity_id, activity_uid, code, title),
+ )
+ elif ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (activity_id, activity_uid, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (activity_id, activity_uid, code, title),
+ )
+ elif ac_has_soa:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, activity_id, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_code, concept_title) VALUES (?,?,?,?)",
+ (soa_id, activity_id, code, title),
+ )
+ else:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (activity_id, concept_uid, code, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
+ (activity_id, code, title),
+ )
conn.commit()
conn.close()
selected = _get_activity_concepts(activity_id)
@@ -2170,10 +2395,19 @@ def ui_remove_activity_concept(
raise HTTPException(400, "Empty concept_code")
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "DELETE FROM activity_concept WHERE activity_id=? AND concept_code=?",
- (activity_id, code),
- )
+ # Delete mapping; include soa_id if column exists
+ cur.execute("PRAGMA table_info(activity_concept)")
+ ac_cols = {r[1] for r in cur.fetchall()}
+ if "soa_id" in ac_cols:
+ cur.execute(
+ "DELETE FROM activity_concept WHERE activity_id=? AND concept_code=? AND soa_id=?",
+ (activity_id, code, soa_id),
+ )
+ else:
+ cur.execute(
+ "DELETE FROM activity_concept WHERE activity_id=? AND concept_code=?",
+ (activity_id, code),
+ )
conn.commit()
conn.close()
concepts = fetch_biomedical_concepts()
@@ -2252,39 +2486,98 @@ def export_xlsx(soa_id: int, left: Optional[int] = None, right: Optional[int] =
headers, rows = _matrix_arrays(soa_id)
# Build DataFrame, then inject Concepts column (second position)
df = pd.DataFrame(rows, columns=["Activity"] + headers)
- # Fetch concepts only (immutable snapshot titles)
+ # Fetch concepts and optional concept_uids (immutable snapshot titles)
conn = _connect()
cur = conn.cursor()
- cur.execute("SELECT activity_id, concept_code, concept_title FROM activity_concept")
+ has_uid = _table_has_columns(cur, "activity_concept", ("concept_uid",))
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ if has_uid:
+ cur.execute(
+ "SELECT activity_id, concept_code, concept_title, concept_uid FROM activity_concept WHERE soa_id=?",
+ (soa_id,),
+ )
+ else:
+ cur.execute(
+ "SELECT activity_id, concept_code, concept_title, NULL as concept_uid FROM activity_concept WHERE soa_id=?",
+ (soa_id,),
+ )
+ else:
+ if has_uid:
+ cur.execute(
+ "SELECT ac.activity_id, ac.concept_code, ac.concept_title, ac.concept_uid FROM activity_concept ac JOIN activity a ON ac.activity_id = a.id WHERE a.soa_id=?",
+ (soa_id,),
+ )
+ else:
+ cur.execute(
+ "SELECT ac.activity_id, ac.concept_code, ac.concept_title, NULL as concept_uid FROM activity_concept ac JOIN activity a ON ac.activity_id = a.id WHERE a.soa_id=?",
+ (soa_id,),
+ )
concepts_map = {}
- for aid, code, title in cur.fetchall():
+ concepts_uids_map = {}
+ for aid, code, title, cuid in cur.fetchall():
concepts_map.setdefault(aid, {})[code] = title
+ if cuid:
+ concepts_uids_map.setdefault(aid, set()).add(cuid)
conn.close()
visits, activities, _cells = _fetch_matrix(soa_id)
activity_ids_in_order = [a["id"] for a in activities]
# Build display strings using EffectiveTitle (override if present) and show code in parentheses
concepts_strings = []
+ concept_uids_strings = []
for aid in activity_ids_in_order:
cmap = concepts_map.get(aid, {})
+ cuids = concepts_uids_map.get(aid, set())
if not cmap:
concepts_strings.append("")
+ concept_uids_strings.append("")
continue
items = sorted(cmap.items(), key=lambda kv: kv[1].lower())
concepts_strings.append(
"; ".join([f"{title} ({code})" for code, title in items])
)
+ concept_uids_strings.append(", ".join(sorted(list(cuids))) if cuids else "")
if len(concepts_strings) == len(df):
df.insert(1, "Concepts", concepts_strings)
+ df["Concept UIDs"] = concept_uids_strings
# Build concept mappings sheet data
mapping_rows = []
for a in activities:
aid = a["id"]
cmap = concepts_map.get(aid, {})
+ cuids = concepts_uids_map.get(aid, set())
+ # Map code -> uid (if any) for this activity
+ code_to_uid = {}
+ if cuids:
+ # We need to fetch per code uid; concepts_uids_map stores set per activity, not mapping.
+ # Build mapping by querying rows for this activity to capture concept_uid per code when available.
+ if has_uid:
+ conn2 = _connect()
+ cur2 = conn2.cursor()
+ if _table_has_columns(cur2, "activity_concept", ("soa_id",)):
+ cur2.execute(
+ "SELECT concept_code, concept_uid FROM activity_concept WHERE soa_id=? AND activity_id=?",
+ (soa_id, aid),
+ )
+ else:
+ cur2.execute(
+ "SELECT concept_code, concept_uid FROM activity_concept WHERE activity_id=?",
+ (aid,),
+ )
+ for ccode, cuid in cur2.fetchall():
+ if cuid:
+ code_to_uid[ccode] = cuid
+ conn2.close()
for code, title in cmap.items():
- mapping_rows.append([aid, a["name"], code, title])
+ mapping_rows.append([aid, a["name"], code, title, code_to_uid.get(code)])
mapping_df = pd.DataFrame(
mapping_rows,
- columns=["ActivityID", "ActivityName", "ConceptCode", "ConceptTitle"],
+ columns=[
+ "ActivityID",
+ "ActivityName",
+ "ConceptCode",
+ "ConceptTitle",
+ "ConceptUID",
+ ],
)
# Build rollback audit sheet data (optional)
audit_rows = (
@@ -3109,10 +3402,16 @@ def ui_edit(request: Request, soa_id: int):
conn = _connect()
cur = conn.cursor()
placeholders = ",".join("?" for _ in activity_ids)
- cur.execute(
- f"SELECT activity_id, concept_code, concept_title FROM activity_concept WHERE activity_id IN ({placeholders})",
- activity_ids,
- )
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ cur.execute(
+ f"SELECT activity_id, concept_code, concept_title FROM activity_concept WHERE soa_id=? AND activity_id IN ({placeholders})",
+ [soa_id] + activity_ids,
+ )
+ else:
+ cur.execute(
+ f"SELECT activity_id, concept_code, concept_title FROM activity_concept WHERE activity_id IN ({placeholders})",
+ activity_ids,
+ )
for aid, code, title in cur.fetchall():
activity_concepts.setdefault(aid, []).append({"code": code, "title": title})
conn.close()
@@ -4543,6 +4842,21 @@ def ui_add_study_cell(
f"",
status_code=404,
)
+ # Allocate a single StudyCell UID for this Arm×Epoch submission,
+ # but reuse an existing UID if one already exists for (soa_id, arm_uid, epoch_uid)
+ sc_uid_global = None
+ try:
+ cur.execute(
+ "SELECT study_cell_uid FROM study_cell WHERE soa_id=? AND arm_uid=? AND epoch_uid=? LIMIT 1",
+ (soa_id, arm_uid, epoch_uid),
+ )
+ row_existing = cur.fetchone()
+ if row_existing and row_existing[0]:
+ sc_uid_global = row_existing[0]
+ except Exception:
+ sc_uid_global = None
+ if not sc_uid_global:
+ sc_uid_global = _next_study_cell_uid(cur, soa_id)
inserted = 0
for el_uid in element_ids:
# ensure element exists if element_id column present
@@ -4563,10 +4877,9 @@ def ui_add_study_cell(
)
if cur.fetchone():
continue
- sc_uid = _next_study_cell_uid(cur, soa_id)
cur.execute(
"INSERT INTO study_cell (soa_id, study_cell_uid, arm_uid, epoch_uid, element_uid) VALUES (?,?,?,?,?)",
- (soa_id, sc_uid, arm_uid, epoch_uid, el_uid),
+ (soa_id, sc_uid_global, arm_uid, epoch_uid, el_uid),
)
sc_id = cur.lastrowid
# Inline audit write for reliability
@@ -4579,7 +4892,7 @@ def ui_add_study_cell(
None,
json.dumps(
{
- "study_cell_uid": sc_uid,
+ "study_cell_uid": sc_uid_global,
"arm_uid": arm_uid,
"epoch_uid": epoch_uid,
"element_uid": el_uid,
@@ -5269,10 +5582,16 @@ def ui_set_activity_concepts(
concepts = fetch_biomedical_concepts()
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=?",
- (activity_id,),
- )
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ cur.execute(
+ "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=? AND soa_id=?",
+ (activity_id, soa_id),
+ )
+ else:
+ cur.execute(
+ "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=?",
+ (activity_id,),
+ )
selected = [{"code": c, "title": t} for c, t in cur.fetchall()]
conn.close()
html = templates.get_template("concepts_cell.html").render(
@@ -5306,10 +5625,16 @@ def ui_activity_concepts_cell(
concepts = fetch_biomedical_concepts()
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=?",
- (activity_id,),
- )
+ if _table_has_columns(cur, "activity_concept", ("soa_id",)):
+ cur.execute(
+ "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=? AND soa_id=?",
+ (activity_id, soa_id),
+ )
+ else:
+ cur.execute(
+ "SELECT concept_code, concept_title FROM activity_concept WHERE activity_id=?",
+ (activity_id,),
+ )
selected = [{"code": c, "title": t} for c, t in cur.fetchall()]
conn.close()
return HTMLResponse(
diff --git a/src/soa_builder/web/initialize_database.py b/src/soa_builder/web/initialize_database.py
index c4c06d4..b591e34 100644
--- a/src/soa_builder/web/initialize_database.py
+++ b/src/soa_builder/web/initialize_database.py
@@ -5,13 +5,31 @@ def _init_db():
conn = _connect()
cur = conn.cursor()
cur.execute(
- """CREATE TABLE IF NOT EXISTS soa (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, created_at TEXT)"""
+ """CREATE TABLE IF NOT EXISTS soa (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT,
+ created_at TEXT
+ )"""
)
cur.execute(
- """CREATE TABLE IF NOT EXISTS visit (id INTEGER PRIMARY KEY AUTOINCREMENT, soa_id INTEGER, name TEXT, raw_header TEXT, order_index INTEGER)"""
+ """CREATE TABLE IF NOT EXISTS visit (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER,
+ name TEXT,
+ raw_header TEXT,
+ order_index INTEGER
+ )"""
)
cur.execute(
- """CREATE TABLE IF NOT EXISTS activity (id INTEGER PRIMARY KEY AUTOINCREMENT, soa_id INTEGER, name TEXT, order_index INTEGER, activity_uid TEXT)"""
+ """CREATE TABLE IF NOT EXISTS activity (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER,
+ name TEXT,
+ order_index INTEGER,
+ activity_uid TEXT, -- immutable Activity_N identifier unique within an SOA
+ label TEXT,
+ description TEXT
+ )"""
)
# Arms: groupings similar to Visits. (Legacy element linkage removed; schema now only stores intrinsic fields.)
cur.execute(
@@ -127,19 +145,44 @@ def _init_db():
)
# Epochs: high-level study phase grouping (optional). Behaves like visits/activities list ordering.
cur.execute(
- """CREATE TABLE IF NOT EXISTS epoch (id INTEGER PRIMARY KEY AUTOINCREMENT, soa_id INTEGER, name TEXT, order_index INTEGER)"""
+ """CREATE TABLE IF NOT EXISTS epoch (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER,
+ name TEXT,
+ order_index INTEGER
+ )"""
)
# Matrix cells table (renamed from legacy 'cell')
cur.execute(
- """CREATE TABLE IF NOT EXISTS matrix_cells (id INTEGER PRIMARY KEY AUTOINCREMENT, soa_id INTEGER, visit_id INTEGER, activity_id INTEGER, status TEXT)"""
+ """CREATE TABLE IF NOT EXISTS matrix_cells (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER,
+ visit_id INTEGER,
+ activity_id INTEGER,
+ status TEXT
+ )"""
)
# Mapping table linking activities to biomedical concepts (concept_code + title stored for snapshot purposes)
cur.execute(
- """CREATE TABLE IF NOT EXISTS activity_concept (id INTEGER PRIMARY KEY AUTOINCREMENT, activity_id INTEGER, concept_code TEXT, concept_title TEXT)"""
+ """CREATE TABLE IF NOT EXISTS activity_concept (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ activity_id INTEGER,
+ concept_code TEXT,
+ concept_title TEXT,
+ concept_uid TEXT, -- immutable BiomedicalConcept_N identifier unique within an SOA
+ activity_uid TEXT, -- joins to the activity table using this uid unique within an SOA
+ soa_id INT
+ )"""
)
# Frozen versions (snapshot JSON of current matrix & concepts)
cur.execute(
- """CREATE TABLE IF NOT EXISTS soa_freeze (id INTEGER PRIMARY KEY AUTOINCREMENT, soa_id INTEGER, version_label TEXT, created_at TEXT, snapshot_json TEXT)"""
+ """CREATE TABLE IF NOT EXISTS soa_freeze (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ soa_id INTEGER,
+ version_label TEXT,
+ created_at TEXT,
+ snapshot_json TEXT
+ )"""
)
# Unique index to enforce one label per SoA
cur.execute(
diff --git a/src/soa_builder/web/migrate_database.py b/src/soa_builder/web/migrate_database.py
index 03b8ef0..16ac802 100644
--- a/src/soa_builder/web/migrate_database.py
+++ b/src/soa_builder/web/migrate_database.py
@@ -279,6 +279,88 @@ def _migrate_add_epoch_type():
logger.warning("Epoch type migration failed: %s", e)
+# Migration: add epoch_uid to epoch
+def _migrate_add_epoch_uid():
+ """Ensure epoch_uid column exists and is populated as StudyEpoch_ unique per SoA.
+ Uses epoch_seq when available to keep numbering stable; otherwise falls back to id order.
+ Creates unique index (soa_id, epoch_uid).
+ """
+ try:
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("PRAGMA table_info(epoch)")
+ cols = {r[1] for r in cur.fetchall()}
+ if "epoch_uid" not in cols:
+ cur.execute("ALTER TABLE epoch ADD COLUMN epoch_uid TEXT")
+ conn.commit()
+ logger.info("Added epoch_uid column to epoch table")
+ # Backfill any NULL epoch_uid values
+ cur.execute("SELECT DISTINCT soa_id FROM epoch")
+ soa_ids = [r[0] for r in cur.fetchall()]
+ for sid in soa_ids:
+ # Prefer ordering by epoch_seq if present to make UIDs deterministic
+ order_col = "epoch_seq" if "epoch_seq" in cols else "id"
+ cur.execute(
+ f"SELECT id, COALESCE(epoch_seq, 0) FROM epoch WHERE soa_id=? AND epoch_uid IS NULL ORDER BY {order_col}",
+ (sid,),
+ )
+ rows = cur.fetchall()
+ if not rows:
+ continue
+ # Determine used numbers to avoid collisions when partially populated
+ cur.execute(
+ "SELECT epoch_uid FROM epoch WHERE soa_id=? AND epoch_uid IS NOT NULL",
+ (sid,),
+ )
+ used_nums = set()
+ for (uid,) in cur.fetchall():
+ if isinstance(uid, str) and uid.startswith("StudyEpoch_"):
+ try:
+ used_nums.add(int(uid.split("StudyEpoch_")[-1]))
+ except Exception:
+ pass
+ for eid, seq in rows:
+ n = int(seq) if int(seq) > 0 and int(seq) not in used_nums else None
+ if n is None:
+ # pick next available number
+ n = 1
+ while n in used_nums:
+ n += 1
+ uid = f"StudyEpoch_{n}"
+ used_nums.add(n)
+ cur.execute("UPDATE epoch SET epoch_uid=? WHERE id=?", (uid, eid))
+ # Create unique index
+ try:
+ cur.execute(
+ "CREATE UNIQUE INDEX IF NOT EXISTS idx_epoch_soaid_uid ON epoch(soa_id, epoch_uid)"
+ )
+ conn.commit()
+ except Exception:
+ pass
+ # Create trigger to auto-fill epoch_uid on insert when NULL
+ try:
+ cur.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS tr_epoch_uid_autofill
+ AFTER INSERT ON epoch
+ FOR EACH ROW
+ WHEN NEW.epoch_uid IS NULL
+ BEGIN
+ UPDATE epoch
+ SET epoch_uid = 'StudyEpoch_' || COALESCE(NEW.epoch_seq, NEW.id)
+ WHERE id = NEW.id;
+ END;
+ """
+ )
+ conn.commit()
+ except Exception:
+ pass
+ conn.commit()
+ conn.close()
+ except Exception as e:
+ logger.warning("epoch_uid migration failed: %s", e)
+
+
# Migration: create code_junction table
def _migrate_create_code_junction():
"""Create code_junction linking table if absent.
diff --git a/src/soa_builder/web/routers/activities.py b/src/soa_builder/web/routers/activities.py
index afd795f..e7c29d0 100644
--- a/src/soa_builder/web/routers/activities.py
+++ b/src/soa_builder/web/routers/activities.py
@@ -6,13 +6,17 @@
import time
from typing import List
-from fastapi import APIRouter, HTTPException
-from fastapi.responses import JSONResponse
+from fastapi import APIRouter, HTTPException, Request, Form
+from fastapi.responses import JSONResponse, HTMLResponse
from ..audit import _record_activity_audit, _record_reorder_audit
from ..db import _connect
from ..schemas import ActivityCreate, ActivityUpdate, BulkActivities
-from ..utils import soa_exists
+from ..utils import (
+ soa_exists,
+ table_has_columns as _table_has_columns,
+ get_next_concept_uid as _get_next_concept_uid,
+)
_ACT_CONCEPT_CACHE = {"data": None, "fetched_at": 0}
_ACT_CONCEPT_TTL = 60 * 60
@@ -84,11 +88,18 @@ def list_activities(soa_id: int):
conn = _connect()
cur = conn.cursor()
cur.execute(
- "SELECT id,name,order_index,activity_uid FROM activity WHERE soa_id=? ORDER BY order_index",
+ "SELECT id,name,order_index,activity_uid,label,description FROM activity WHERE soa_id=? ORDER BY order_index",
(soa_id,),
)
rows = [
- {"id": r[0], "name": r[1], "order_index": r[2], "activity_uid": r[3]}
+ {
+ "id": r[0],
+ "name": r[1],
+ "order_index": r[2],
+ "activity_uid": r[3],
+ "label": r[4],
+ "description": r[5],
+ }
for r in cur.fetchall()
]
conn.close()
@@ -102,7 +113,7 @@ def get_activity(soa_id: int, activity_id: int):
conn = _connect()
cur = conn.cursor()
cur.execute(
- "SELECT id,name,order_index,activity_uid FROM activity WHERE id=? AND soa_id=?",
+ "SELECT id,name,order_index,activity_uid,label,description FROM activity WHERE id=? AND soa_id=?",
(activity_id, soa_id),
)
row = cur.fetchone()
@@ -115,6 +126,8 @@ def get_activity(soa_id: int, activity_id: int):
"name": row[1],
"order_index": row[2],
"activity_uid": row[3],
+ "label": row[4],
+ "description": row[5],
}
@@ -124,29 +137,77 @@ def add_activity(soa_id: int, payload: ActivityCreate):
raise HTTPException(404, "SOA not found")
conn = _connect()
cur = conn.cursor()
- cur.execute("SELECT COUNT(*) FROM activity WHERE soa_id=?", (soa_id,))
- order_index = cur.fetchone()[0] + 1
+ # Determine next order_index
cur.execute(
- "INSERT INTO activity (soa_id,name,order_index,activity_uid) VALUES (?,?,?,?)",
- (soa_id, payload.name, order_index, f"Activity_{order_index}"),
+ "SELECT COALESCE(MAX(order_index),0) FROM activity WHERE soa_id=?", (soa_id,)
)
+ order_index = (cur.fetchone() or [0])[0] + 1
+ # Compute activity_uid from order_index (keeps list stable after inserts)
+ activity_uid = f"Activity_{order_index}"
+
+ name = (payload.name or "").strip()
+ label = (payload.label or "").strip() or None
+ description = (payload.description or "").strip() or None
+ if not name:
+ conn.close()
+ raise HTTPException(400, "Name required")
+
+ # Insert guarding for legacy schemas that may not have label/description
+ cur.execute("PRAGMA table_info(activity)")
+ cols = {r[1] for r in cur.fetchall()}
+ if "label" in cols and "description" in cols:
+ cur.execute(
+ "INSERT INTO activity (soa_id,name,order_index,activity_uid,label,description) VALUES (?,?,?,?,?,?)",
+ (soa_id, name, order_index, activity_uid, label, description),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity (soa_id,name,order_index,activity_uid) VALUES (?,?,?,?)",
+ (soa_id, name, order_index, activity_uid),
+ )
aid = cur.lastrowid
conn.commit()
conn.close()
+
after = {
"id": aid,
- "name": payload.name,
+ "name": name,
"order_index": order_index,
- "activity_uid": f"Activity_{order_index}",
+ "activity_uid": activity_uid,
+ "label": label,
+ "description": description,
}
_record_activity_audit(soa_id, "create", aid, before=None, after=after)
return {
"activity_id": aid,
"order_index": order_index,
- "activity_uid": f"Activity_{order_index}",
+ "activity_uid": activity_uid,
}
+@router.post("/activities/add", response_class=HTMLResponse)
+def ui_add_activity(
+ request: Request,
+ soa_id: int,
+ name: str | None = Form(None),
+ label: str | None = Form(None),
+ description: str | None = Form(None),
+):
+ """UI form handler to add an Activity, then redirect to the edit page.
+
+ Accepts standard form fields and reuses the JSON create logic.
+ """
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ payload = ActivityCreate(name=name or "", label=label, description=description)
+ add_activity(soa_id, payload)
+ if request.headers.get("HX-Request") == "true":
+ return HTMLResponse("", headers={"HX-Redirect": f"/ui/soa/{soa_id}/edit"})
+ return HTMLResponse(
+ f""
+ )
+
+
@router.patch("/activities/{activity_id}", response_class=JSONResponse)
def update_activity(soa_id: int, activity_id: int, payload: ActivityUpdate):
if not soa_exists(soa_id):
@@ -154,33 +215,63 @@ def update_activity(soa_id: int, activity_id: int, payload: ActivityUpdate):
conn = _connect()
cur = conn.cursor()
cur.execute(
- "SELECT id,name,order_index,activity_uid FROM activity WHERE id=? AND soa_id=?",
+ "SELECT id,name,order_index,activity_uid,label,description FROM activity WHERE id=? AND soa_id=?",
(activity_id, soa_id),
)
row = cur.fetchone()
if not row:
conn.close()
raise HTTPException(404, "Activity not found")
+
before = {
"id": row[0],
"name": row[1],
"order_index": row[2],
"activity_uid": row[3],
+ "label": row[4],
+ "description": row[5],
}
+
+ # Apply payload with trimming; None means "unchanged"
new_name = (payload.name if payload.name is not None else before["name"]) or ""
+ new_label = payload.label if payload.label is not None else before["label"]
+ new_description = (
+ payload.description
+ if payload.description is not None
+ else before["description"]
+ )
+
new_name = new_name.strip()
+ new_label = (new_label or "").strip() or None
+ new_description = (new_description or "").strip() or None
+
cur.execute(
- "UPDATE activity SET name=? WHERE id=?", (new_name or None, activity_id)
+ "UPDATE activity SET name=?, label=?, description=? WHERE id=? AND soa_id=?",
+ (new_name or None, new_label, new_description, activity_id, soa_id),
)
conn.commit()
cur.execute(
- "SELECT id,name,order_index,activity_uid FROM activity WHERE id=?",
- (activity_id,),
+ "SELECT id,name,order_index,activity_uid,label,description FROM activity WHERE id=? AND soa_id=?",
+ (activity_id, soa_id),
)
r = cur.fetchone()
conn.close()
- after = {"id": r[0], "name": r[1], "order_index": r[2], "activity_uid": r[3]}
- updated_fields = ["name"] if before["name"] != after["name"] else []
+
+ after = {
+ "id": r[0],
+ "name": r[1],
+ "order_index": r[2],
+ "activity_uid": r[3],
+ "label": r[4],
+ "description": r[5],
+ }
+
+ # Correct updated_fields calculation
+ updated_fields = []
+ for fld in ("name", "label", "description"):
+ if (before.get(fld) or None) != (after.get(fld) or None):
+ updated_fields.append(fld)
+
_record_activity_audit(
soa_id,
"update",
@@ -191,38 +282,76 @@ def update_activity(soa_id: int, activity_id: int, payload: ActivityUpdate):
return JSONResponse({**after, "updated_fields": updated_fields})
+@router.post("/activities/{activity_id}/update", response_class=HTMLResponse)
+def ui_update_activity(
+ request: Request,
+ soa_id: int,
+ activity_id: int,
+ name: str | None = Form(None),
+ label: str | None = Form(None),
+ description: str | None = Form(None),
+):
+ """UI form handler to update an Activity and redirect back to edit page.
+
+ This wraps the JSON update endpoint and returns an HTML redirect suitable
+ for both full page and HTMX requests.
+ """
+ if not soa_exists(soa_id):
+ raise HTTPException(404, "SOA not found")
+ payload = ActivityUpdate(name=name, label=label, description=description)
+ # Reuse the JSON handler for business logic/audit
+ update_activity(soa_id, activity_id, payload)
+ if request.headers.get("HX-Request") == "true":
+ return HTMLResponse("", headers={"HX-Redirect": f"/ui/soa/{soa_id}/edit"})
+ return HTMLResponse(
+ f""
+ )
+
+
@router.post("/activities/reorder", response_class=JSONResponse)
def reorder_activities_api(soa_id: int, order: List[int]):
if not soa_exists(soa_id):
raise HTTPException(404, "SOA not found")
if not order:
raise HTTPException(400, "Order list required")
+
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "SELECT id FROM activity WHERE soa_id=? ORDER BY order_index", (soa_id,)
- )
- old_order = [r[0] for r in cur.fetchall()]
+
+ # Validate IDs exist in this SOA
cur.execute("SELECT id FROM activity WHERE soa_id=?", (soa_id,))
existing = {r[0] for r in cur.fetchall()}
if set(order) - existing:
conn.close()
raise HTTPException(400, "Order contains invalid activity id")
+
+ cur.execute(
+ "SELECT id FROM activity WHERE soa_id=? ORDER BY order_index", (soa_id,)
+ )
+ old_order = [r[0] for r in cur.fetchall()]
+
before_rows = {
r[0]: r[1]
for r in cur.execute(
"SELECT id,order_index FROM activity WHERE soa_id=?", (soa_id,)
).fetchall()
}
+
+ # Apply new order_index
for idx, aid in enumerate(order, start=1):
- cur.execute("UPDATE activity SET order_index=? WHERE id=?", (idx, aid))
+ cur.execute(
+ "UPDATE activity SET order_index=? WHERE id=? AND soa_id=?",
+ (idx, aid, soa_id),
+ )
+
after_rows = {
r[0]: r[1]
for r in cur.execute(
"SELECT id,order_index FROM activity WHERE soa_id=?", (soa_id,)
).fetchall()
}
- # Two-phase UID reassignment
+
+ # Reassign activity_uid from order_index
cur.execute(
"UPDATE activity SET activity_uid='TMP_' || id WHERE soa_id=?", (soa_id,)
)
@@ -232,7 +361,9 @@ def reorder_activities_api(soa_id: int, order: List[int]):
)
conn.commit()
conn.close()
+
_record_reorder_audit(soa_id, "activity", old_order, order)
+
reorder_details = [
{
"id": aid,
@@ -241,6 +372,7 @@ def reorder_activities_api(soa_id: int, order: List[int]):
}
for aid in order
]
+
_record_activity_audit(
soa_id,
"reorder",
@@ -298,19 +430,74 @@ def set_activity_concepts(soa_id: int, activity_id: int, concept_codes: List[str
if not cur.fetchone():
conn.close()
raise HTTPException(404, "Activity not found")
- cur.execute("DELETE FROM activity_concept WHERE activity_id=?", (activity_id,))
+ # Clear existing mappings; include soa_id if column exists
+ ac_has_soa = _table_has_columns(cur, "activity_concept", ("soa_id",))
+ ac_has_actuid = _table_has_columns(cur, "activity_concept", ("activity_uid",))
+ if ac_has_soa:
+ cur.execute(
+ "DELETE FROM activity_concept WHERE activity_id=? AND soa_id=?",
+ (activity_id, soa_id),
+ )
+ else:
+ cur.execute("DELETE FROM activity_concept WHERE activity_id=?", (activity_id,))
concepts = fetch_biomedical_concepts()
lookup = {c["code"]: c["title"] for c in concepts}
+ # Fetch activity_uid once for inserts
+ cur.execute("SELECT activity_uid FROM activity WHERE id=?", (activity_id,))
+ row = cur.fetchone()
+ activity_uid = row[0] if row else None
+ ac_has_conceptuid = _table_has_columns(cur, "activity_concept", ("concept_uid",))
inserted = 0
for code in concept_codes:
ccode = code.strip()
if not ccode:
continue
title = lookup.get(ccode, ccode)
- cur.execute(
- "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
- (activity_id, ccode, title),
- )
+ concept_uid = _get_next_concept_uid(cur, soa_id) if ac_has_conceptuid else None
+ if ac_has_soa and ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?,?)",
+ (soa_id, activity_id, activity_uid, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, activity_id, activity_uid, ccode, title),
+ )
+ elif ac_has_actuid:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (activity_id, activity_uid, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, activity_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (activity_id, activity_uid, ccode, title),
+ )
+ elif ac_has_soa:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?,?)",
+ (soa_id, activity_id, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (soa_id, activity_id, concept_code, concept_title) VALUES (?,?,?,?)",
+ (soa_id, activity_id, ccode, title),
+ )
+ else:
+ if ac_has_conceptuid:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_uid, concept_code, concept_title) VALUES (?,?,?,?)",
+ (activity_id, concept_uid, ccode, title),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO activity_concept (activity_id, concept_code, concept_title) VALUES (?,?,?)",
+ (activity_id, ccode, title),
+ )
inserted += 1
conn.commit()
conn.close()
diff --git a/src/soa_builder/web/routers/epochs.py b/src/soa_builder/web/routers/epochs.py
index f77c562..501d3e8 100644
--- a/src/soa_builder/web/routers/epochs.py
+++ b/src/soa_builder/web/routers/epochs.py
@@ -9,7 +9,7 @@
from fastapi.responses import JSONResponse
from ..schemas import EpochCreate, EpochUpdate
-from ..utils import soa_exists
+from ..utils import soa_exists, table_has_columns as _table_has_columns
DB_PATH = os.environ.get("SOA_BUILDER_DB", "soa_builder_web.db")
@@ -65,17 +65,34 @@ def add_epoch(soa_id: int, payload: EpochCreate):
cur.execute("SELECT MAX(epoch_seq) FROM epoch WHERE soa_id=?", (soa_id,))
row = cur.fetchone()
next_seq = (row[0] or 0) + 1
- cur.execute(
- "INSERT INTO epoch (soa_id,name,order_index,epoch_seq,epoch_label,epoch_description) VALUES (?,?,?,?,?,?)",
- (
- soa_id,
- payload.name,
- order_index,
- next_seq,
- (payload.epoch_label or "").strip() or None,
- (payload.epoch_description or "").strip() or None,
- ),
- )
+ # Determine if epoch_uid column exists and prepare values
+ has_uid = _table_has_columns(cur, "epoch", ("epoch_uid",))
+ epoch_uid_val = f"StudyEpoch_{next_seq}"
+ if has_uid:
+ cur.execute(
+ "INSERT INTO epoch (soa_id,name,order_index,epoch_seq,epoch_label,epoch_description,epoch_uid) VALUES (?,?,?,?,?,?,?)",
+ (
+ soa_id,
+ payload.name,
+ order_index,
+ next_seq,
+ (payload.epoch_label or "").strip() or None,
+ (payload.epoch_description or "").strip() or None,
+ epoch_uid_val,
+ ),
+ )
+ else:
+ cur.execute(
+ "INSERT INTO epoch (soa_id,name,order_index,epoch_seq,epoch_label,epoch_description) VALUES (?,?,?,?,?,?)",
+ (
+ soa_id,
+ payload.name,
+ order_index,
+ next_seq,
+ (payload.epoch_label or "").strip() or None,
+ (payload.epoch_description or "").strip() or None,
+ ),
+ )
eid = cur.lastrowid
conn.commit()
conn.close()
@@ -93,6 +110,7 @@ def add_epoch(soa_id: int, payload: EpochCreate):
"epoch_seq": next_seq,
"epoch_label": (payload.epoch_label or "").strip() or None,
"epoch_description": (payload.epoch_description or "").strip() or None,
+ "epoch_uid": epoch_uid_val if has_uid else f"StudyEpoch_{next_seq}",
},
)
@@ -103,21 +121,43 @@ def list_epochs(soa_id: int):
raise HTTPException(404, "SOA not found")
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "SELECT id,name,order_index,epoch_seq,epoch_label,epoch_description FROM epoch WHERE soa_id=? ORDER BY order_index",
- (soa_id,),
- )
- rows = [
- {
- "id": r[0],
- "name": r[1],
- "order_index": r[2],
- "epoch_seq": r[3],
- "epoch_label": r[4],
- "epoch_description": r[5],
- }
- for r in cur.fetchall()
- ]
+ has_uid = _table_has_columns(cur, "epoch", ("epoch_uid",))
+ if has_uid:
+ cur.execute(
+ "SELECT id,name,order_index,epoch_seq,epoch_label,epoch_description,epoch_uid FROM epoch WHERE soa_id=? ORDER BY order_index",
+ (soa_id,),
+ )
+ rows = [
+ {
+ "id": r[0],
+ "name": r[1],
+ "order_index": r[2],
+ "epoch_seq": r[3],
+ "epoch_label": r[4],
+ "epoch_description": r[5],
+ "epoch_uid": r[6],
+ }
+ for r in cur.fetchall()
+ ]
+ else:
+ cur.execute(
+ "SELECT id,name,order_index,epoch_seq,epoch_label,epoch_description FROM epoch WHERE soa_id=? ORDER BY order_index",
+ (soa_id,),
+ )
+ rows = []
+ for r in cur.fetchall():
+ eid, name, order_index, epoch_seq, epoch_label, epoch_description = r
+ rows.append(
+ {
+ "id": eid,
+ "name": name,
+ "order_index": order_index,
+ "epoch_seq": epoch_seq,
+ "epoch_label": epoch_label,
+ "epoch_description": epoch_description,
+ "epoch_uid": f"StudyEpoch_{epoch_seq or eid}",
+ }
+ )
conn.close()
return {"soa_id": soa_id, "epochs": rows}
@@ -128,23 +168,48 @@ def get_epoch(soa_id: int, epoch_id: int):
raise HTTPException(404, "SOA not found")
conn = _connect()
cur = conn.cursor()
- cur.execute(
- "SELECT id,name,order_index,epoch_seq,epoch_label,epoch_description FROM epoch WHERE id=? AND soa_id=?",
- (epoch_id, soa_id),
- )
- row = cur.fetchone()
+ has_uid = _table_has_columns(cur, "epoch", ("epoch_uid",))
+ if has_uid:
+ cur.execute(
+ "SELECT id,name,order_index,epoch_seq,epoch_label,epoch_description,epoch_uid FROM epoch WHERE id=? AND soa_id=?",
+ (epoch_id, soa_id),
+ )
+ row = cur.fetchone()
+ else:
+ cur.execute(
+ "SELECT id,name,order_index,epoch_seq,epoch_label,epoch_description FROM epoch WHERE id=? AND soa_id=?",
+ (epoch_id, soa_id),
+ )
+ row = cur.fetchone()
conn.close()
if not row:
raise HTTPException(404, "Epoch not found")
- return {
- "id": row[0],
- "soa_id": soa_id,
- "name": row[1],
- "order_index": row[2],
- "epoch_seq": row[3],
- "epoch_label": row[4],
- "epoch_description": row[5],
- }
+ if has_uid:
+ eid, name, order_index, epoch_seq, epoch_label, epoch_description, epoch_uid = (
+ row
+ )
+ return {
+ "id": eid,
+ "soa_id": soa_id,
+ "name": name,
+ "order_index": order_index,
+ "epoch_seq": epoch_seq,
+ "epoch_label": epoch_label,
+ "epoch_description": epoch_description,
+ "epoch_uid": epoch_uid,
+ }
+ else:
+ eid, name, order_index, epoch_seq, epoch_label, epoch_description = row
+ return {
+ "id": eid,
+ "soa_id": soa_id,
+ "name": name,
+ "order_index": order_index,
+ "epoch_seq": epoch_seq,
+ "epoch_label": epoch_label,
+ "epoch_description": epoch_description,
+ "epoch_uid": f"StudyEpoch_{epoch_seq or eid}",
+ }
@router.post("/soa/{soa_id}/epochs/{epoch_id}/metadata")
diff --git a/src/soa_builder/web/schemas.py b/src/soa_builder/web/schemas.py
index 7620550..22adba4 100644
--- a/src/soa_builder/web/schemas.py
+++ b/src/soa_builder/web/schemas.py
@@ -5,10 +5,14 @@
class ActivityCreate(BaseModel):
name: str
+ label: Optional[str] = None
+ description: Optional[str] = None
class ActivityUpdate(BaseModel):
name: Optional[str] = None
+ label: Optional[str] = None
+ description: Optional[str] = None
class BulkActivities(BaseModel):
diff --git a/src/soa_builder/web/templates/audits.html b/src/soa_builder/web/templates/audits.html
new file mode 100644
index 0000000..e69de29
diff --git a/src/soa_builder/web/templates/edit.html b/src/soa_builder/web/templates/edit.html
index 1c251fd..3736aa3 100644
--- a/src/soa_builder/web/templates/edit.html
+++ b/src/soa_builder/web/templates/edit.html
@@ -121,16 +121,27 @@ Editing SoA {{ soa_id }}
Activities ({{ activities|length }}) (drag to reorder)
{% for a in activities %}
- - {{ a.order_index }}. {{ a.name }}
+
-
+ {{ a.order_index }}. {{ a.name }}
+
+
{% endfor %}
-
@@ -292,7 +303,7 @@ Editing SoA {{ soa_id }}
diff --git a/src/soa_builder/web/utils.py b/src/soa_builder/web/utils.py
index f03c2f7..422f9df 100644
--- a/src/soa_builder/web/utils.py
+++ b/src/soa_builder/web/utils.py
@@ -230,6 +230,38 @@ def get_next_code_uid(cur: Any, soa_id: int) -> str:
return f"Code_{n}"
+def get_next_concept_uid(cur: Any, soa_id: int) -> str:
+ """Compute next unique BiomedicalConcept_N for the given SOA.
+
+ Assumes `cur` is a sqlite cursor within an open transaction.
+ Uses activity_concept table when available; falls back safely if table missing.
+ """
+ try:
+ cur.execute("PRAGMA table_info(activity_concept)")
+ cols = {r[1] for r in cur.fetchall()}
+ if "concept_uid" not in cols:
+ return "BiomedicalConcept_1"
+ if "soa_id" in cols:
+ cur.execute(
+ "SELECT concept_uid FROM activity_concept WHERE soa_id=? AND concept_uid LIKE 'BiomedicalConcept_%'",
+ (soa_id,),
+ )
+ else:
+ cur.execute(
+ "SELECT concept_uid FROM activity_concept WHERE concept_uid LIKE 'BiomedicalConcept_%'"
+ )
+ existing = [x[0] for x in cur.fetchall() if x[0]]
+ n = 1
+ if existing:
+ try:
+ n = max(int(x.split("_")[1]) for x in existing) + 1
+ except Exception:
+ n = len(existing) + 1
+ return f"BiomedicalConcept_{n}"
+ except Exception:
+ return "BiomedicalConcept_1"
+
+
def soa_exists(soa_id: int) -> bool:
"""Return True if an SOA row exists with the given id."""
try:
@@ -241,3 +273,19 @@ def soa_exists(soa_id: int) -> bool:
return ok
except Exception:
return False
+
+
+def table_has_columns(cur: Any, table: str, required: List[str] | tuple) -> bool:
+ """Return True if all required column names exist in the given table.
+
+ Parameters:
+ cur: sqlite3 cursor
+ table: table name (constant in code; not user-provided)
+ required: iterable of column names to check
+ """
+ try:
+ cur.execute(f"PRAGMA table_info({table})")
+ cols = {r[1] for r in cur.fetchall()}
+ return all(c in cols for c in required)
+ except Exception:
+ return False
diff --git a/src/usdm/generate_activities.py b/src/usdm/generate_activities.py
new file mode 100755
index 0000000..7671386
--- /dev/null
+++ b/src/usdm/generate_activities.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+# Prefer absolute import; fallback to adding src/ to sys.path when run directly
+from typing import Optional, List, Dict, Any
+
+try:
+ from soa_builder.web.app import _connect # reuse existing DB connector
+except ImportError:
+ import sys
+ from pathlib import Path
+
+ here = Path(__file__).resolve()
+ src_dir = here.parents[2] / "src"
+ if src_dir.exists() and str(src_dir) not in sys.path:
+ sys.path.insert(0, str(src_dir))
+ from soa_builder.web.app import _connect # type: ignore
+
+
+def _nz(s: Optional[str]) -> Optional[str]:
+ s = (s or "").strip()
+ return s or None
+
+
+def _get_biomedical_concept_ids(soa_id: int, activity_uid: int) -> List[str]:
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT concept_uid from activity_concept where soa_id=? and activity_uid=?",
+ (
+ soa_id,
+ activity_uid,
+ ),
+ )
+ rows = cur.fetchall()
+ conn.close()
+ bc_uids = [r[0] for r in rows] or []
+ return bc_uids
+
+
+def build_usdm_activities(soa_id: int) -> List[Dict[str, Any]]:
+ """
+ Build USDM Activity-Output objects for the given SOA.
+
+ USDM Activity-Output (subset):
+ - id: string
+ - name: string
+ - label?: string | null
+ - description?: string | null
+ - previousId?: string | null
+ - nextId?: string | null
+ - childIds: string[]
+ - definedProcedures: Procedure-Output[] (left empty here)
+ - biomedicalConceptIds: string[] (left empty here)
+ - bcCategoryIds: string[] (left empty here)
+ - bcSurrogateIds: string[] (left empty here)
+ - timelineId?: string | null (left null here)
+ - notes: CommentAnnotation-Output[] (left empty here)
+ - extensionAttributes: ExtensionAttribute-Output[] (empty)
+ - instanceType: "Activity"
+ """
+ conn = _connect()
+ cur = conn.cursor()
+ # Order by order_index if present, else by id for deterministic output
+ cur.execute("PRAGMA table_info(activity)")
+ cols = {r[1] for r in cur.fetchall()}
+ if "order_index" in cols:
+ cur.execute(
+ "SELECT id, activity_uid, name, label, description FROM activity WHERE soa_id=? ORDER BY order_index, id",
+ (soa_id,),
+ )
+ else:
+ cur.execute(
+ "SELECT id, activity_uid, name, label, description FROM activity WHERE soa_id=? ORDER BY id",
+ (soa_id,),
+ )
+ rows = cur.fetchall()
+ conn.close()
+
+ # Build simple linear previous/next links by list order
+ # ids = [f"Activity_{r[0]}" for r in rows]
+ uids = [r[1] for r in rows]
+ id_by_index = {i: uid for i, uid in enumerate(uids)}
+
+ out: List[Dict[str, Any]] = []
+ for i, r in enumerate(rows):
+ id, activity_uid, name, label, description = r[0], r[1], r[2], r[3], r[4]
+ aid = activity_uid
+ prev_id = id_by_index.get(i - 1)
+ next_id = id_by_index.get(i + 1)
+ bcs = _get_biomedical_concept_ids(soa_id, aid)
+
+ activity = {
+ "id": aid,
+ "extensionAttributes": [],
+ "name": name,
+ "label": _nz(label),
+ "description": _nz(description),
+ "previousId": prev_id,
+ "nextId": next_id,
+ "childIds": [],
+ "definedProcedures": [],
+ "biomedicalConceptIds": bcs,
+ "bcCategoryIds": [],
+ "bcSurrogateIds": [],
+ "timelineId": None,
+ "notes": [],
+ "instanceType": "Activity",
+ }
+ out.append(activity)
+
+ return out
+
+
+if __name__ == "__main__":
+ import argparse
+ import json
+ import logging
+ import sys
+
+ logger = logging.getLogger("usdm.generate_activities")
+
+ parser = argparse.ArgumentParser(description="Export USDM activities for a SOA.")
+ parser.add_argument("soa_id", type=int, help="SOA id to export activities for")
+ parser.add_argument(
+ "-o", "--output", default="-", help="Output file path or '-' for stdout"
+ )
+ parser.add_argument("--indent", type=int, default=2, help="JSON indent")
+ args = parser.parse_args()
+
+ try:
+ activities = build_usdm_activities(args.soa_id)
+ except Exception:
+ logger.exception("Failed to build activities for soa_id=%s", args.soa_id)
+ sys.exit(1)
+
+ payload = json.dumps(activities, indent=args.indent)
+ if args.output in ("-", "/dev/stdout"):
+ sys.stdout.write(payload + "\n")
+ else:
+ with open(args.output, "w", encoding="utf-8") as f:
+ f.write(payload + "\n")
diff --git a/src/usdm/generate_study_cells.py b/src/usdm/generate_study_cells.py
new file mode 100644
index 0000000..48e1b20
--- /dev/null
+++ b/src/usdm/generate_study_cells.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python3
+# Prefer absolute import; fallback to adding src/ to sys.path when run directly
+from typing import Optional, List, Dict, Any
+
+try:
+ from soa_builder.web.app import _connect # reuse existing DB connector
+except ImportError:
+ import sys
+ from pathlib import Path
+
+ here = Path(__file__).resolve()
+ src_dir = here.parents[2] / "src"
+ if src_dir.exists() and str(src_dir) not in sys.path:
+ sys.path.insert(0, str(src_dir))
+ from soa_builder.web.app import _connect # type: ignore
+
+
+def _nz(s: Optional[str]) -> Optional[str]:
+ s = (s or "").strip()
+ return s or None
+
+
+def _get_element_ids(soa_id: int, study_cell_uid: str) -> List[str]:
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT element_uid from study_cell WHERE soa_id=? AND study_cell_uid=? ORDER BY element_uid",
+ (
+ soa_id,
+ study_cell_uid,
+ ),
+ )
+ rows = cur.fetchall()
+ conn.close()
+ # Deduplicate and preserve stable order
+ element_uids = [r[0] for r in rows] or []
+ seen = set()
+ ordered_unique: List[str] = []
+ for uid in element_uids:
+ if uid and uid not in seen:
+ seen.add(uid)
+ ordered_unique.append(uid)
+ return ordered_unique
+
+
+def build_usdm_study_cells(soa_id: int) -> List[Dict[str, Any]]:
+ """
+ Build USDM StudyCells-Output objects for the given SOA
+
+ USDM StudyCells-Output:
+ - id: string
+ - extensionAttributes: string[]
+ - armdId: string
+ - epochId: string
+ - elementIds: string[]
+ - instanceType: "StudyCell"
+ """
+
+ conn = _connect()
+ cur = conn.cursor()
+ # Select distinct StudyCell groups by (study_cell_uid, arm_uid, epoch_uid)
+ cur.execute(
+ "SELECT DISTINCT study_cell_uid, arm_uid, epoch_uid FROM study_cell WHERE soa_id=? ORDER BY id,study_cell_uid",
+ (soa_id,),
+ )
+ rows = cur.fetchall()
+ conn.close()
+
+ out: List[Dict[str, Any]] = []
+ for i, r in enumerate(rows):
+ study_cell_uid, arm_uid, epoch_uid = r[0], r[1], r[2]
+ scid = study_cell_uid
+ armId = arm_uid
+ epochId = epoch_uid
+ elementIds = _get_element_ids(soa_id, scid)
+
+ study_cells = {
+ "id": scid,
+ "extensionAttributes": [],
+ "armId": armId,
+ "epochId": epochId,
+ "elementIds": elementIds,
+ "instanceType": "StudyCell",
+ }
+ out.append(study_cells)
+
+ return out
+
+
+if __name__ == "__main__":
+ import argparse
+ import json
+ import logging
+ import sys
+
+ logger = logging.getLogger("usdm.generate_study_cells")
+
+ parser = argparse.ArgumentParser(description="Export USDM StudyCells for an SOA.")
+ parser.add_argument("soa_id", type=int, help="SOA id to export activities for")
+ parser.add_argument(
+ "-o", "--output", default="-", help="Output file path or '-' for stdout"
+ )
+ parser.add_argument("--indent", type=int, default=2, help="JSON indent")
+ args = parser.parse_args()
+
+ try:
+ study_cells = build_usdm_study_cells(args.soa_id)
+ except Exception:
+ logger.exception("Failed to build StudyCells for soa_id=%s", args.soa_id)
+ sys.exit(1)
+
+ payload = json.dumps(study_cells, indent=args.indent)
+ if args.output in ("-", "/dev/stdout"):
+ sys.stdout.write(payload + "\n")
+ else:
+ with open(args.output, "w", encoding="utf-8") as f:
+ f.write(payload + "\n")
diff --git a/tests/test_study_cell_uid_reuse.py b/tests/test_study_cell_uid_reuse.py
new file mode 100644
index 0000000..3a894d4
--- /dev/null
+++ b/tests/test_study_cell_uid_reuse.py
@@ -0,0 +1,99 @@
+from fastapi.testclient import TestClient
+
+from soa_builder.web.app import app, _connect
+
+client = TestClient(app)
+
+
+def test_study_cell_uid_reuse_same_arm_epoch():
+ # Create study
+ r = client.post("/soa", json={"name": "UID Reuse Study"})
+ assert r.status_code == 200
+ soa_id = r.json()["id"]
+
+ # Create an arm (UI API is in routers.arms; use JSON route)
+ arm_resp = client.post(f"/soa/{soa_id}/arms", json={"name": "Arm A"})
+ assert arm_resp.status_code in (200, 201)
+ arm_id = arm_resp.json().get("arm_id") or arm_resp.json().get("id")
+
+ # Fetch arm_uid
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("SELECT arm_uid FROM arm WHERE id=?", (arm_id,))
+ arm_uid = cur.fetchone()[0]
+
+ # Create two epochs via JSON API
+ e1 = client.post(f"/soa/{soa_id}/epochs", json={"name": "Screening"})
+ assert e1.status_code in (200, 201)
+ e2 = client.post(f"/soa/{soa_id}/epochs", json={"name": "Treatment"})
+ assert e2.status_code in (200, 201)
+
+ # Pick first epoch and get its epoch_uid
+ cur.execute(
+ "SELECT id, epoch_uid FROM epoch WHERE soa_id=? ORDER BY order_index", (soa_id,)
+ )
+ rows = cur.fetchall()
+ epoch_id = rows[0][0]
+ epoch_uid = rows[0][1] or ("StudyEpoch_" + str(epoch_id))
+
+ # Create two elements
+ el1 = client.post(
+ f"/soa/{soa_id}/elements",
+ json={"name": "Dose", "label": "D", "description": ""},
+ )
+ assert el1.status_code in (200, 201)
+ el2 = client.post(
+ f"/soa/{soa_id}/elements", json={"name": "Lab", "label": "L", "description": ""}
+ )
+ assert el2.status_code in (200, 201)
+
+ # Fetch element logical ids (element_id)
+ cur.execute("PRAGMA table_info(element)")
+ cols = {r[1] for r in cur.fetchall()}
+ assert "element_id" in cols, "element_id column expected in tests"
+ cur.execute("SELECT element_id FROM element WHERE soa_id=? ORDER BY id", (soa_id,))
+ element_ids = [r[0] for r in cur.fetchall()]
+ assert len(element_ids) >= 2
+ el_a, el_b = element_ids[0], element_ids[1]
+
+ conn.close()
+
+ # Call UI endpoint to add study cells with multiple elements; reuse safeguard should apply
+ form = {
+ "arm_uid": arm_uid,
+ "epoch_uid": epoch_uid,
+ "element_uids": [el_a, el_b],
+ }
+ resp = client.post(f"/ui/soa/{soa_id}/add_study_cell", data=form)
+ assert resp.status_code in (200, 201)
+
+ # Verify rows share the same study_cell_uid
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT study_cell_uid, element_uid FROM study_cell WHERE soa_id=? AND arm_uid=? AND epoch_uid=? ORDER BY id",
+ (soa_id, arm_uid, epoch_uid),
+ )
+ sc_rows = cur.fetchall()
+ conn.close()
+ assert len(sc_rows) >= 2
+ uids = {r[0] for r in sc_rows}
+ assert (
+ len(uids) == 1
+ ), "Expected all StudyCell rows to reuse the same study_cell_uid"
+
+ # Idempotence check: submitting the same element again should not create a duplicate row
+ form_dup = {"arm_uid": arm_uid, "epoch_uid": epoch_uid, "element_uids": [el_b]}
+ resp_dup = client.post(f"/ui/soa/{soa_id}/add_study_cell", data=form_dup)
+ assert resp_dup.status_code in (200, 201)
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT COUNT(*) FROM study_cell WHERE soa_id=? AND arm_uid=? AND epoch_uid=? AND element_uid=?",
+ (soa_id, arm_uid, epoch_uid, el_b),
+ )
+ cnt = cur.fetchone()[0]
+ conn.close()
+ assert (
+ cnt == 1
+ ), "Duplicate submission should not create a second row for the same element"
diff --git a/tests/test_study_cell_uid_reuse_later.py b/tests/test_study_cell_uid_reuse_later.py
new file mode 100644
index 0000000..817740b
--- /dev/null
+++ b/tests/test_study_cell_uid_reuse_later.py
@@ -0,0 +1,107 @@
+from fastapi.testclient import TestClient
+
+from soa_builder.web.app import app, _connect
+
+client = TestClient(app)
+
+
+def test_study_cell_uid_reuse_on_later_addition():
+ # Create study
+ r = client.post("/soa", json={"name": "UID Reuse Later Study"})
+ assert r.status_code == 200
+ soa_id = r.json()["id"]
+
+ # Create an arm
+ arm_resp = client.post(f"/soa/{soa_id}/arms", json={"name": "Arm B"})
+ assert arm_resp.status_code in (200, 201)
+ arm_id = arm_resp.json().get("arm_id") or arm_resp.json().get("id")
+
+ # Fetch arm_uid
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute("SELECT arm_uid FROM arm WHERE id=?", (arm_id,))
+ arm_uid = cur.fetchone()[0]
+
+ # Create one epoch
+ e1 = client.post(f"/soa/{soa_id}/epochs", json={"name": "Baseline"})
+ assert e1.status_code in (200, 201)
+
+ # Get epoch_uid
+ cur.execute(
+ "SELECT id, epoch_uid FROM epoch WHERE soa_id=? ORDER BY order_index", (soa_id,)
+ )
+ rows = cur.fetchall()
+ epoch_uid = rows[0][1] or ("StudyEpoch_" + str(rows[0][0]))
+
+ # Create three elements
+ el1 = client.post(
+ f"/soa/{soa_id}/elements",
+ json={"name": "Vitals", "label": "V", "description": ""},
+ )
+ assert el1.status_code in (200, 201)
+ el2 = client.post(
+ f"/soa/{soa_id}/elements", json={"name": "ECG", "label": "E", "description": ""}
+ )
+ assert el2.status_code in (200, 201)
+ el3 = client.post(
+ f"/soa/{soa_id}/elements", json={"name": "PK", "label": "P", "description": ""}
+ )
+ assert el3.status_code in (200, 201)
+
+ # Get element_uids (element_id)
+ cur.execute("SELECT element_id FROM element WHERE soa_id=? ORDER BY id", (soa_id,))
+ element_ids = [r[0] for r in cur.fetchall()]
+ assert len(element_ids) >= 3
+ el_a, el_b, el_c = element_ids[:3]
+ conn.close()
+
+ # First submission: add Vitals+ECG
+ form1 = {"arm_uid": arm_uid, "epoch_uid": epoch_uid, "element_uids": [el_a, el_b]}
+ resp1 = client.post(f"/ui/soa/{soa_id}/add_study_cell", data=form1)
+ assert resp1.status_code in (200, 201)
+
+ # Capture the assigned study_cell_uid
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT DISTINCT study_cell_uid FROM study_cell WHERE soa_id=? AND arm_uid=? AND epoch_uid=?",
+ (soa_id, arm_uid, epoch_uid),
+ )
+ first_uid = cur.fetchone()[0]
+ conn.close()
+ assert first_uid and first_uid.startswith("StudyCell_")
+
+ # Second submission later: add PK only, should reuse the same UID
+ form2 = {"arm_uid": arm_uid, "epoch_uid": epoch_uid, "element_uids": [el_c]}
+ resp2 = client.post(f"/ui/soa/{soa_id}/add_study_cell", data=form2)
+ assert resp2.status_code in (200, 201)
+
+ # Verify all three rows share the same study_cell_uid
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT study_cell_uid, element_uid FROM study_cell WHERE soa_id=? AND arm_uid=? AND epoch_uid=? ORDER BY id",
+ (soa_id, arm_uid, epoch_uid),
+ )
+ rows = cur.fetchall()
+ conn.close()
+ assert len(rows) >= 3
+ uids = {r[0] for r in rows}
+ assert len(uids) == 1
+ assert first_uid in uids
+
+ # Idempotence check: submitting the same element again should not create a duplicate row
+ form_dup = {"arm_uid": arm_uid, "epoch_uid": epoch_uid, "element_uids": [el_c]}
+ resp_dup = client.post(f"/ui/soa/{soa_id}/add_study_cell", data=form_dup)
+ assert resp_dup.status_code in (200, 201)
+ conn = _connect()
+ cur = conn.cursor()
+ cur.execute(
+ "SELECT COUNT(*) FROM study_cell WHERE soa_id=? AND arm_uid=? AND epoch_uid=? AND element_uid=?",
+ (soa_id, arm_uid, epoch_uid, el_c),
+ )
+ cnt = cur.fetchone()[0]
+ conn.close()
+ assert (
+ cnt == 1
+ ), "Duplicate submission should not create a second row for the same element"