Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
777 changes: 777 additions & 0 deletions COMPREHENSIVE_RUN_SUMMARY.md

Large diffs are not rendered by default.

157 changes: 157 additions & 0 deletions benchmarks/results/hbcm_20251125_145434.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
{
"timestamp": "2025-11-25T14:54:34.374584",
"benchmark_suite": "HBCM Performance",
"results": [
{
"test_name": "HBCM Single Step",
"duration_s": 0.01442199200002392,
"iterations": 10000,
"mean_latency_ms": 0.0012409382,
"median_latency_ms": 0.00122,
"p95_latency_ms": 0.001327,
"p99_latency_ms": 0.001873,
"min_latency_ms": 0.001036,
"max_latency_ms": 0.014447,
"throughput_ops_per_sec": 693385.4907133088,
"memory_mb": 0.0,
"metadata": {
"dt": 0.001,
"state_size": 4
}
},
{
"test_name": "HBCM Short Simulation (100 steps)",
"duration_s": 0.017287748000000006,
"iterations": 100,
"mean_latency_ms": 0.17287748,
"median_latency_ms": 0.164737,
"p95_latency_ms": 0.197652,
"p99_latency_ms": 0.345699,
"min_latency_ms": 0.161277,
"max_latency_ms": 0.345699,
"throughput_ops_per_sec": 5784.4434104430475,
"memory_mb": 0.0,
"metadata": {
"steps": 100,
"simulated_time_s": 0.1
}
},
{
"test_name": "HBCM Long Simulation (10,000 steps)",
"duration_s": 0.21427981399999999,
"iterations": 10,
"mean_latency_ms": 21.4279814,
"median_latency_ms": 20.903914,
"p95_latency_ms": 26.940432,
"p99_latency_ms": 26.940432,
"min_latency_ms": 18.785698,
"max_latency_ms": 26.940432,
"throughput_ops_per_sec": 46.667951653159456,
"memory_mb": 0.0,
"metadata": {
"steps": 10000,
"simulated_time_s": 10.0
}
},
{
"test_name": "Real-Time Capability (1000 Hz)",
"duration_s": 0.015861716999978626,
"iterations": 10000,
"mean_latency_ms": 0.0013879299,
"median_latency_ms": 0.001358,
"p95_latency_ms": 0.001571,
"p99_latency_ms": 0.001757,
"min_latency_ms": 0.001097,
"max_latency_ms": 0.011467,
"throughput_ops_per_sec": 630448.7717195733,
"memory_mb": 0.0,
"metadata": {
"target_hz": 1000,
"target_dt_ms": 1.0,
"simulated_time_s": 10.0,
"realtime_factor": 630.4487717195733,
"can_run_realtime": true
}
},
{
"test_name": "Parameter Scaling (dt=0.0001s)",
"duration_s": 0.0012714360000000001,
"iterations": 1000,
"mean_latency_ms": 0.001271436,
"median_latency_ms": 0.001222,
"p95_latency_ms": 0.001351,
"p99_latency_ms": 0.001549,
"min_latency_ms": 0.001071,
"max_latency_ms": 0.021118,
"throughput_ops_per_sec": 786512.2585800622,
"memory_mb": 0.0,
"metadata": {
"dt": 0.0001
}
},
{
"test_name": "Parameter Scaling (dt=0.0005s)",
"duration_s": 0.0012715989999999985,
"iterations": 1000,
"mean_latency_ms": 0.001271599,
"median_latency_ms": 0.00124,
"p95_latency_ms": 0.001336,
"p99_latency_ms": 0.001403,
"min_latency_ms": 0.00114,
"max_latency_ms": 0.011554,
"throughput_ops_per_sec": 786411.4394553638,
"memory_mb": 0.0,
"metadata": {
"dt": 0.0005
}
},
{
"test_name": "Parameter Scaling (dt=0.001s)",
"duration_s": 0.0012477389999999994,
"iterations": 1000,
"mean_latency_ms": 0.001247739,
"median_latency_ms": 0.001241,
"p95_latency_ms": 0.001334,
"p99_latency_ms": 0.001412,
"min_latency_ms": 0.001076,
"max_latency_ms": 0.004644,
"throughput_ops_per_sec": 801449.6621488953,
"memory_mb": 0.0,
"metadata": {
"dt": 0.001
}
},
{
"test_name": "Parameter Scaling (dt=0.005s)",
"duration_s": 0.001226551999999999,
"iterations": 1000,
"mean_latency_ms": 0.001226552,
"median_latency_ms": 0.001217,
"p95_latency_ms": 0.001326,
"p99_latency_ms": 0.00145,
"min_latency_ms": 0.001063,
"max_latency_ms": 0.003936,
"throughput_ops_per_sec": 815293.6035325048,
"memory_mb": 0.0,
"metadata": {
"dt": 0.005
}
},
{
"test_name": "Parameter Scaling (dt=0.01s)",
"duration_s": 0.00127227,
"iterations": 1000,
"mean_latency_ms": 0.00127227,
"median_latency_ms": 0.001262,
"p95_latency_ms": 0.00137,
"p99_latency_ms": 0.001494,
"min_latency_ms": 0.001082,
"max_latency_ms": 0.008907,
"throughput_ops_per_sec": 785996.6830939973,
"memory_mb": 0.0,
"metadata": {
"dt": 0.01
}
}
]
}
4 changes: 2 additions & 2 deletions benchmarks/results/plp_vs_pid_validation.json
Original file line number Diff line number Diff line change
Expand Up @@ -50022,7 +50022,7 @@
"disturbance_rejection_time": 0.0,
"noise_amplification": 0.24693005316107844,
"control_effort": 1.9646503563061755,
"computation_time_us": 6.768041399999999,
"computation_time_us": 6.8092844,
"stability_margin": 0.20406070310891564,
"max_control_value": 0.21381508940429902
},
Expand All @@ -50034,7 +50034,7 @@
"disturbance_rejection_time": 0.0,
"noise_amplification": 0.37800948291826,
"control_effort": 8.311934466214723,
"computation_time_us": 4.0539564,
"computation_time_us": 4.0034568,
"stability_margin": 0.37160875122898035,
"max_control_value": 1.0
}
Expand Down
83 changes: 83 additions & 0 deletions pytest_output.log
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
============================= test session starts ==============================
platform linux -- Python 3.11.14, pytest-9.0.1, pluggy-1.6.0 -- /root/.local/share/uv/tools/pytest/bin/python
cachedir: .pytest_cache
rootdir: /home/user/Multi-Heart-Model
collecting ... collected 8 items / 6 errors

==================================== ERRORS ====================================
_____ ERROR collecting tests/integration/test_microprocessor_motorhand.py ______
ImportError while importing test module '/home/user/Multi-Heart-Model/tests/integration/test_microprocessor_motorhand.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
/usr/lib/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/integration/test_microprocessor_motorhand.py:10: in <module>
import numpy as np
E ModuleNotFoundError: No module named 'numpy'
___________ ERROR collecting tests/organ_chip/test_drug_toxicity.py ____________
ImportError while importing test module '/home/user/Multi-Heart-Model/tests/organ_chip/test_drug_toxicity.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
/usr/lib/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/organ_chip/test_drug_toxicity.py:18: in <module>
import numpy as np
E ModuleNotFoundError: No module named 'numpy'
____________ ERROR collecting tests/organchip/test_drug_toxicity.py ____________
ImportError while importing test module '/home/user/Multi-Heart-Model/tests/organchip/test_drug_toxicity.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
/usr/lib/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/organchip/test_drug_toxicity.py:17: in <module>
from organchip.orchestrator import OrganChipSuite, create_default_organ_chip_suite
E ModuleNotFoundError: No module named 'organchip.orchestrator'
_______ ERROR collecting tests/surgical_robotics/test_dvrk_interface.py ________
ImportError while importing test module '/home/user/Multi-Heart-Model/tests/surgical_robotics/test_dvrk_interface.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
/usr/lib/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/surgical_robotics/test_dvrk_interface.py:6: in <module>
import numpy as np
E ModuleNotFoundError: No module named 'numpy'
______ ERROR collecting tests/surgical_robotics/test_physio_controller.py ______
ImportError while importing test module '/home/user/Multi-Heart-Model/tests/surgical_robotics/test_physio_controller.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
/usr/lib/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/surgical_robotics/test_physio_controller.py:6: in <module>
import numpy as np
E ModuleNotFoundError: No module named 'numpy'
____________________ ERROR collecting tests/test_models.py _____________________
ImportError while importing test module '/home/user/Multi-Heart-Model/tests/test_models.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
/usr/lib/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
tests/test_models.py:3: in <module>
from src.cardiac import VanDerPolOscillator
src/__init__.py:3: in <module>
from . import neural, cardiac, coupling # re-export packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
src/cardiac/__init__.py:26: in <module>
from .luo_rudy import LuoRudyModel, LuoRudyParameters
src/cardiac/luo_rudy.py:29: in <module>
import numpy as np
E ModuleNotFoundError: No module named 'numpy'
=========================== short test summary info ============================
ERROR tests/integration/test_microprocessor_motorhand.py
ERROR tests/organ_chip/test_drug_toxicity.py
ERROR tests/organchip/test_drug_toxicity.py
ERROR tests/surgical_robotics/test_dvrk_interface.py
ERROR tests/surgical_robotics/test_physio_controller.py
ERROR tests/test_models.py
!!!!!!!!!!!!!!!!!!! Interrupted: 6 errors during collection !!!!!!!!!!!!!!!!!!!!
============================== 6 errors in 0.65s ===============================
129 changes: 129 additions & 0 deletions run_tests_simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#!/usr/bin/env python3
"""
Simple test runner that doesn't require pytest
Runs basic functionality tests for all models
"""

import sys
from pathlib import Path

sys.path.insert(0, str(Path(__file__).parent / 'src'))

def test_van_der_pol():
"""Test Van der Pol oscillator"""
from src.cardiac import VanDerPolOscillator

model = VanDerPolOscillator(mu=1.5, omega=1.0)
state = (1.0, 0.0)

# Test step
new_state = model.step(0.0, state, 0.001)
assert len(new_state) == 2
assert not any(v != v for v in new_state) # Check for NaN

return True

def test_fitzhugh_nagumo():
"""Test FitzHugh-Nagumo model"""
from src.neural import FitzHughNagumo

model = FitzHughNagumo(stimulus_amplitude=0.5)
state = (0.0, 0.0)

# Test step
new_state = model.step(0.0, state, 0.001)
assert len(new_state) == 2
assert not any(v != v for v in new_state)

return True

def test_hbcm():
"""Test Heart-Brain Coupling Model"""
from src.cardiac import VanDerPolOscillator
from src.neural import FitzHughNagumo
from src.coupling import HeartBrainCouplingModel

neural = FitzHughNagumo()
cardiac = VanDerPolOscillator()
hbcm = HeartBrainCouplingModel(neural_model=neural, cardiac_model=cardiac)

# Test simulation
trajectory = hbcm.simulate((0.0, 0.0, 1.0, 0.0), (0.0, 1.0), 0.001)
assert len(trajectory) > 0

return True

def test_primal_processor():
"""Test Primal Logic Processor"""
from src.microprocessor import PrimalLogicProcessor

processor = PrimalLogicProcessor()
control, state = processor.compute_control(
current_value=30.0,
target_value=0.0,
timestamp=0.0
)

assert -10.0 <= control <= 10.0
assert state.error == 30.0

return True

def test_organchip():
"""Test Organ Chip Suite"""
from src.organchip.orchestrator import create_default_organ_chip_suite

suite = create_default_organ_chip_suite()
suite.verbose = False

# Test initialization
state = suite.initialize_state(drug_amount_mg=100.0)
assert 'circulation' in state

return True

def main():
"""Run all tests"""
print("=" * 70)
print("SIMPLE TEST RUNNER - Multi-Heart-Model")
print("=" * 70)

tests = [
("Van der Pol Oscillator", test_van_der_pol),
("FitzHugh-Nagumo Model", test_fitzhugh_nagumo),
("Heart-Brain Coupling Model", test_hbcm),
("Primal Logic Processor", test_primal_processor),
("Organ Chip Suite", test_organchip),
]

results = []

for name, test_func in tests:
try:
print(f"\nTesting {name}...")
result = test_func()
print(f" ✓ PASSED")
results.append((name, True))
except Exception as e:
print(f" ✗ FAILED: {e}")
results.append((name, False))

print("\n" + "=" * 70)
print("TEST SUMMARY")
print("=" * 70)

passed = sum(1 for _, result in results if result)
total = len(results)

for name, result in results:
status = "✓ PASS" if result else "✗ FAIL"
print(f" {status}: {name}")

print("=" * 70)
print(f" Results: {passed}/{total} tests passed ({100*passed/total:.1f}%)")
print("=" * 70)

return 0 if passed == total else 1

if __name__ == "__main__":
sys.exit(main())
Loading
Loading