diff --git a/.github/workflows/test_performance.yml b/.github/workflows/test_performance.yml index 492d287e82..7819127b0c 100644 --- a/.github/workflows/test_performance.yml +++ b/.github/workflows/test_performance.yml @@ -52,7 +52,7 @@ jobs: # import test model - name: Import test model run: | - AMICI_IMPORT_NPROCS=2 check_time.sh petab_import python tests/performance/test.py import + AMICI_IMPORT_NPROCS=2 tests/performance/performance_test.py petab_import - name: "Upload artifact: CS_Signalling_ERBB_RAS_AKT_petab" uses: actions/upload-artifact@v7 @@ -63,59 +63,59 @@ jobs: # install model package - name: Install test model run: > - check_time.sh install_model tests/performance/test.py compile; + tests/performance/performance_test.py install_model; for opt in O0 O1 O2; do - check_time.sh install_model_${opt} tests/performance/test.py compile_${opt}; + tests/performance/performance_test.py install_model_${opt}; done # run simulations - name: forward_simulation run: > - check_time.sh forward_simulation tests/performance/test.py forward_simulation; + tests/performance/performance_test.py forward_simulation; for opt in O0 O1 O2; do - check_time.sh forward_simulation tests/performance/test.py forward_simulation_${opt}; + tests/performance/performance_test.py forward_simulation_${opt}; done - name: forward_sensitivities run: > - check_time.sh forward_sensitivities tests/performance/test.py forward_sensitivities; + tests/performance/performance_test.py forward_sensitivities; for opt in O0 O1 O2; do - check_time.sh forward_sensitivities tests/performance/test.py forward_sensitivities_${opt}; + tests/performance/performance_test.py forward_sensitivities_${opt}; done - name: adjoint_sensitivities run: > - check_time.sh adjoint_sensitivities tests/performance/test.py adjoint_sensitivities; + tests/performance/performance_test.py adjoint_sensitivities; for opt in O0 O1 O2; do - check_time.sh adjoint_sensitivities tests/performance/test.py adjoint_sensitivities_${opt}; + tests/performance/performance_test.py adjoint_sensitivities_${opt}; done - name: forward_simulation_non_optimal_parameters run: | - check_time.sh forward_simulation_non_optimal_parameters tests/performance/test.py forward_simulation_non_optimal_parameters; + tests/performance/performance_test.py forward_simulation_non_optimal_parameters; for opt in O0 O1 O2; do - check_time.sh forward_simulation_non_optimal_parameters tests/performance/test.py forward_simulation_non_optimal_parameters_${opt}; + tests/performance/performance_test.py forward_simulation_non_optimal_parameters_${opt}; done - name: adjoint_sensitivities_non_optimal_parameters run: | - check_time.sh adjoint_sensitivities_non_optimal_parameters tests/performance/test.py adjoint_sensitivities_non_optimal_parameters; + tests/performance/performance_test.py adjoint_sensitivities_non_optimal_parameters; for opt in O0 O1 O2; do - check_time.sh adjoint_sensitivities_non_optimal_parameters tests/performance/test.py adjoint_sensitivities_non_optimal_parameters_${opt}; + tests/performance/performance_test.py adjoint_sensitivities_non_optimal_parameters_${opt}; done - name: forward_steadystate_sensitivities_non_optimal_parameters run: | - check_time.sh forward_steadystate_sensitivities_non_optimal_parameters tests/performance/test.py forward_steadystate_sensitivities_non_optimal_parameters; + tests/performance/performance_test.py forward_steadystate_sensitivities_non_optimal_parameters; for opt in O0 O1 O2; do - check_time.sh forward_steadystate_sensitivities_non_optimal_parameters tests/performance/test.py forward_steadystate_sensitivities_non_optimal_parameters_${opt}; + tests/performance/performance_test.py forward_steadystate_sensitivities_non_optimal_parameters_${opt}; done - name: adjoint_steadystate_sensitivities_non_optimal_parameters run: | - check_time.sh adjoint_steadystate_sensitivities_non_optimal_parameters tests/performance/test.py adjoint_steadystate_sensitivities_non_optimal_parameters; + tests/performance/performance_test.py adjoint_steadystate_sensitivities_non_optimal_parameters; for opt in O0 O1 O2; do - check_time.sh adjoint_steadystate_sensitivities_non_optimal_parameters tests/performance/test.py adjoint_steadystate_sensitivities_non_optimal_parameters_${opt}; + tests/performance/performance_test.py adjoint_steadystate_sensitivities_non_optimal_parameters_${opt}; done diff --git a/tests/performance/test.py b/tests/performance/performance_test.py similarity index 86% rename from tests/performance/test.py rename to tests/performance/performance_test.py index a0b61c80b2..619bcc38d4 100755 --- a/tests/performance/test.py +++ b/tests/performance/performance_test.py @@ -5,12 +5,14 @@ import shutil import subprocess import sys +import time from pathlib import Path import amici import amici.exporters.sundials.de_export import amici.importers.sbml import petab.v1 as petab +import yaml from amici.importers.petab.v1._petab_import import import_model_sbml from amici.sim.sundials import ( AMICI_SUCCESS, @@ -21,6 +23,10 @@ run_simulation, ) +reference_times_file = Path(__file__).parent / "reference.yml" +with open(reference_times_file) as f: + reference_times: dict[str, float] = yaml.safe_load(f) + def parse_args(): arg = sys.argv[1] @@ -153,11 +159,15 @@ def main(): model_dir_compiled = Path(f"model_performance_test_{suffix}") model_name = "model_performance_test" - if arg == "import": + start_time = time.time() + + if arg == "petab_import": run_import(model_name, model_dir_source) + check_time(arg, time.time() - start_time) return - elif arg == "compile": + elif arg == "install_model": compile_model(model_dir_source, model_dir_compiled) + check_time(f"{arg}{suffix}", time.time() - start_time) return else: model_module = amici.import_model_module( @@ -176,6 +186,21 @@ def main(): rdata = run_simulation(model, solver, edata) check_results(rdata) + check_time(arg, time.time() - start_time) + + +def check_time(task: str, time: float): + reference_time = reference_times[task] + if time <= reference_time: + print( + f"Time for task {task} is {time:.2f}s, " + f"which is within the reference time of {reference_time:.2f}s." + ) + else: + raise AssertionError( + f"Time for task {task} is {time:.2f}s, " + f"which exceeds the reference time of {reference_time:.2f}s." + ) if __name__ == "__main__": diff --git a/tests/performance/reference.yml b/tests/performance/reference.yml index 0f2a090615..c353c59fd6 100644 --- a/tests/performance/reference.yml +++ b/tests/performance/reference.yml @@ -1,15 +1,15 @@ # Reference wall times (seconds) with some buffer -create_sdist: 16 -install_sdist: 150 +create_sdist: 10 +install_sdist: 110 petab_import: 720 install_model: 60 install_model_O0: 40 install_model_O1: 45 install_model_O2: 60 -forward_simulation: 2 -forward_sensitivities: 2 -adjoint_sensitivities: 2.5 -forward_simulation_non_optimal_parameters: 2 -adjoint_sensitivities_non_optimal_parameters: 5 -forward_steadystate_sensitivities_non_optimal_parameters: 5 -adjoint_steadystate_sensitivities_non_optimal_parameters: 4 +forward_simulation: 1 +forward_sensitivities: 1 +adjoint_sensitivities: 1 +forward_simulation_non_optimal_parameters: 1 +adjoint_sensitivities_non_optimal_parameters: 2 +forward_steadystate_sensitivities_non_optimal_parameters: 3 +adjoint_steadystate_sensitivities_non_optimal_parameters: 2