From 43570a4c694cf79d9046c6de47708787a52ce39b Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 13 Aug 2025 14:53:45 +0200 Subject: [PATCH 01/80] use consistent server name --- tests/provision/dummy_project_for_testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/provision/dummy_project_for_testing.yml b/tests/provision/dummy_project_for_testing.yml index 7e259592..d4984d77 100644 --- a/tests/provision/dummy_project_for_testing.yml +++ b/tests/provision/dummy_project_for_testing.yml @@ -34,7 +34,7 @@ builders: path: nvflare.ha.dummy_overseer_agent.DummyOverseerAgent overseer_exists: false args: - sp_end_point: odeliatempvm.local:8002:8003 + sp_end_point: server.local:8002:8003 - path: nvflare.lighter.impl.cert.CertBuilder - path: nvflare.lighter.impl.signature.SignatureBuilder From 4c2ab1adaa8dc2d88b3d9127285e84d54226d3ec Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 13 Aug 2025 16:04:46 +0200 Subject: [PATCH 02/80] scripts to start a dummy training from the startup kits --- _testsOutsideDocker_submitDummyTraining.exp | 15 +++++ runTestsOutsideDocker.sh | 73 +++++++++++++++++++++ 2 files changed, 88 insertions(+) create mode 100755 _testsOutsideDocker_submitDummyTraining.exp create mode 100755 runTestsOutsideDocker.sh diff --git a/_testsOutsideDocker_submitDummyTraining.exp b/_testsOutsideDocker_submitDummyTraining.exp new file mode 100755 index 00000000..7d69997c --- /dev/null +++ b/_testsOutsideDocker_submitDummyTraining.exp @@ -0,0 +1,15 @@ +#!/usr/bin/env expect + +spawn ./docker.sh --no_pull +expect "User Name: " +send "admin@test.odelia\r" +expect "> " +send "submit_job MediSwarm/application/jobs/minimal_training_pytorch_cnn\r" +expect "> " +send "sys_info client\r" +expect "> " +send "sys_info server\r" +expect "> " +send "list_jobs\r" +expect "> " +send "list_jobs\r" diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh new file mode 100755 index 00000000..986168f1 --- /dev/null +++ b/runTestsOutsideDocker.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash + +set -e + +if ! grep -q "127.0.0.1 server.local" /etc/hosts; then + echo "/etc/hosts needs to contain the following line, please add it." + echo "127.0.0.1 server.local localhost" + exit 1 +fi + +if [ -z "$GPU_FOR_TESTING" ]; then + export GPU_FOR_TESTING="all" +fi + +VERSION=$(./getVersionNumber.sh) +DOCKER_IMAGE=jefftud/odelia:$VERSION +PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" +SYNTHETIC_DATA_DIR=$(mktemp -d) +CWD=$(pwd) + +create_synthetic_data () { + # create synthetic data + docker run --rm \ + -u $(id -u):$(id -g) \ + -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ + -w /MediSwarm \ + jefftud/odelia:$VERSION \ + /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" +} + +cleanup () { + rm -rf "$SYNTHETIC_DATA_DIR" + docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B +} + +start_server_and_clients () { + cd $PROJECT_DIR/prod_00 + cd server.local/startup + ./docker.sh --no_pull --start_server + cd ../.. + sleep 10 + + cd client_A/startup + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /data/MEVISTwoNodeSwarm/scratch --GPU device=$GPU_FOR_TESTING --start_client + cd ../.. + cd client_B/startup + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /data/MEVISTwoNodeSwarm/scratch --GPU device=$GPU_FOR_TESTING --start_client + sleep 5 + + cd "$CWD" +} + +run_dummy_training () { + cd $PROJECT_DIR/prod_00 + cd admin@test.odelia/startup + ../../../../../_testsOutsideDocker_submitDummyTraining.exp + docker kill fladmin + sleep 60 +} + +check_output_of_dummy_training () { + echo "TODO check output of dummy training" +} + +run_tests () { + create_synthetic_data + start_server_and_clients + run_dummy_training + check_output_of_dummy_training + cleanup +} + +run_tests From 92ccb25feffb3bdbeb528f3220c367afe5b70f6a Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 14 Aug 2025 15:59:12 +0200 Subject: [PATCH 03/80] skeleton for further tests --- runTestsOutsideDocker.sh | 46 +++++++++++++++++++++++++++++++++++----- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 986168f1..f82a4df7 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -18,6 +18,14 @@ PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) CWD=$(pwd) +check_files_on_github () { + echo "TODO check files/documentation on github" +} + +check_startup_kits () { + echo "TODO check startup kits" +} + create_synthetic_data () { # create synthetic data docker run --rm \ @@ -28,9 +36,8 @@ create_synthetic_data () { /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" } -cleanup () { +cleanup_synthetic_data () { rm -rf "$SYNTHETIC_DATA_DIR" - docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B } start_server_and_clients () { @@ -50,7 +57,23 @@ start_server_and_clients () { cd "$CWD" } -run_dummy_training () { +kill_server_and_clients () { + docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B +} + +run_docker_gpu_preflight_check () { + echo "TODO run dummy training locally" +} + +run_data_access_preflight_check () { + echo "TODO run data access preflight check locally" +} + +check_output_of_preflight_checks () { + echo "TODO check output of preflight checks" +} + +run_dummy_training_in_swarm () { cd $PROJECT_DIR/prod_00 cd admin@test.odelia/startup ../../../../../_testsOutsideDocker_submitDummyTraining.exp @@ -63,11 +86,24 @@ check_output_of_dummy_training () { } run_tests () { + check_files_on_github + + check_startup_kits + create_synthetic_data + + run_docker_gpu_preflight_check + run_data_access_preflight_check + check_output_of_preflight_checks + start_server_and_clients - run_dummy_training + + run_dummy_training_in_swarm check_output_of_dummy_training - cleanup + + kill_server_and_clients + + cleanup_synthetic_data } run_tests From cc2f8e54a2ad41489189971a8d4d4535b2272bef Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Fri, 15 Aug 2025 14:48:10 +0200 Subject: [PATCH 04/80] added preflight checks (without checking their output so far) --- runTestsOutsideDocker.sh | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index f82a4df7..bbf450e6 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -16,6 +16,7 @@ VERSION=$(./getVersionNumber.sh) DOCKER_IMAGE=jefftud/odelia:$VERSION PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) +SCRATCH_DIR=$(mktemp -d) CWD=$(pwd) check_files_on_github () { @@ -36,22 +37,23 @@ create_synthetic_data () { /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" } -cleanup_synthetic_data () { +cleanup_temporary_data () { rm -rf "$SYNTHETIC_DATA_DIR" + rm -rf "$SCRATCH_DIR" } start_server_and_clients () { - cd $PROJECT_DIR/prod_00 + cd "$PROJECT_DIR"/prod_00 cd server.local/startup ./docker.sh --no_pull --start_server cd ../.. sleep 10 cd client_A/startup - ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /data/MEVISTwoNodeSwarm/scratch --GPU device=$GPU_FOR_TESTING --start_client + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client cd ../.. cd client_B/startup - ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /data/MEVISTwoNodeSwarm/scratch --GPU device=$GPU_FOR_TESTING --start_client + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client sleep 5 cd "$CWD" @@ -62,26 +64,29 @@ kill_server_and_clients () { } run_docker_gpu_preflight_check () { - echo "TODO run dummy training locally" + cd "$PROJECT_DIR"/prod_00 + cd client_A/startup + ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee dummy_training_console_output.txt + echo "TODO check output in dummy_training_console_output.txt" + cd "$CWD" } run_data_access_preflight_check () { - echo "TODO run data access preflight check locally" -} - -check_output_of_preflight_checks () { - echo "TODO check output of preflight checks" + cd "$PROJECT_DIR"/prod_00 + cd client_A/startup + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee preflight_check_console_output.txt + echo "TODO check output in preflight_check_console_output.txt" + cd ../.. + cd ../.. } run_dummy_training_in_swarm () { - cd $PROJECT_DIR/prod_00 + cd "$PROJECT_DIR"/prod_00 cd admin@test.odelia/startup ../../../../../_testsOutsideDocker_submitDummyTraining.exp docker kill fladmin sleep 60 -} -check_output_of_dummy_training () { echo "TODO check output of dummy training" } @@ -94,16 +99,14 @@ run_tests () { run_docker_gpu_preflight_check run_data_access_preflight_check - check_output_of_preflight_checks start_server_and_clients run_dummy_training_in_swarm - check_output_of_dummy_training kill_server_and_clients - cleanup_synthetic_data + cleanup_temporary_data } run_tests From 8f12780d5a66b7d364a8b9ab03cd29b1dedfaae3 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 18 Aug 2025 11:41:52 +0200 Subject: [PATCH 05/80] check if (source code for) license is available on github and if README contains certain keywords --- runTestsOutsideDocker.sh | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index bbf450e6..32c7c3fa 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -20,7 +20,24 @@ SCRATCH_DIR=$(mktemp -d) CWD=$(pwd) check_files_on_github () { - echo "TODO check files/documentation on github" + CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/LICENSE) + if echo "$CONTENT" | grep -q "MIT License" ; then + echo "Downloaded and verified license from github" + else + echo "Could not download and verify license" + exit 1 + fi + + CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/README.md) + for ROLE in 'Swarm Participant' 'Developer' 'Swarm Operator'; + do + if echo "$CONTENT" | grep -q "$ROLE" ; then + echo "Instructions for $ROLE found" + else + echo "Instructions for role $ROLE missing" + exit 1 + fi + done } check_startup_kits () { @@ -28,7 +45,6 @@ check_startup_kits () { } create_synthetic_data () { - # create synthetic data docker run --rm \ -u $(id -u):$(id -g) \ -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ From 76ebb4c42524a1d871b4b69ffd7ef83860410b31 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 18 Aug 2025 11:42:51 +0200 Subject: [PATCH 06/80] check if second startup kit can be built and contains expected files --- runTestsOutsideDocker.sh | 36 +++++++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 32c7c3fa..3142df9c 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -18,6 +18,7 @@ PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) SCRATCH_DIR=$(mktemp -d) CWD=$(pwd) +PROJECT_FILE="tests/provision/dummy_project_for_testing.yml" check_files_on_github () { CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/LICENSE) @@ -40,8 +41,37 @@ check_files_on_github () { done } -check_startup_kits () { - echo "TODO check startup kits" +create_second_startup_kit () { + if [ ! -d "$PROJECT_DIR"/prod_00 ]; then + echo '"$PROJECT_DIR"/prod_00 does not exist, please generate the startup kit first' + exit 1 + fi + if [ -d "$PROJECT_DIR"/prod_01 ]; then + echo '"$PROJECT_DIR"/prod_01 exists, please remove it' + exit 1 + fi + ./_buildStartupKits.sh $PROJECT_FILE $VERSION + + for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; + do + if [ -f "$PROJECT_DIR/prod_01/client_A/startup/$FILE" ] ; then + echo "$FILE found" + else + echo "$FILE missing" + exit 1 + fi + done + + ZIP_CONTENT=$(unzip -tv "$PROJECT_DIR/prod_01/client_B_${VERSION}.zip") + for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; + do + if echo "$ZIP_CONTENT" | grep -q "$FILE" ; then + echo "$FILE found in zip" + else + echo "$FILE missing in zip" + exit 1 + fi + done } create_synthetic_data () { @@ -109,7 +139,7 @@ run_dummy_training_in_swarm () { run_tests () { check_files_on_github - check_startup_kits + create_second_startup_kit create_synthetic_data From e4864b532bdad2f9ccd9f455f803020ca239e329 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 18 Aug 2025 14:47:53 +0200 Subject: [PATCH 07/80] check output of preflight checks --- runTestsOutsideDocker.sh | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 3142df9c..b27372d0 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -112,18 +112,33 @@ kill_server_and_clients () { run_docker_gpu_preflight_check () { cd "$PROJECT_DIR"/prod_00 cd client_A/startup - ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee dummy_training_console_output.txt - echo "TODO check output in dummy_training_console_output.txt" + CONSOLE_OUTPUT=docker_gpu_preflight_check_console_output.txt + ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee "$CONSOLE_OUTPUT" + + if grep -q "Epoch 1: 100%" "$CONSOLE_OUTPUT" && grep -q "Training completed successfully" "$CONSOLE_OUTPUT"; then + echo "Expected output of Docker/GPU preflight check found" + else + echo "Missing expected output of Docker/GPU preflight check" + exit 1 + fi + cd "$CWD" } run_data_access_preflight_check () { cd "$PROJECT_DIR"/prod_00 cd client_A/startup - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee preflight_check_console_output.txt - echo "TODO check output in preflight_check_console_output.txt" - cd ../.. - cd ../.. + CONSOLE_OUTPUT=data_access_preflight_check_console_output.txt + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee $CONSOLE_OUTPUT + + if grep -q "Train set: 18, Val set: 6" "$CONSOLE_OUTPUT" && grep -q "Epoch 0: 100%" "$CONSOLE_OUTPUT"; then + echo "Expected output of Docker/GPU preflight check found" + else + echo "Missing expected output of Docker/GPU preflight check" + exit 1 + fi + + cd "$CWD" } run_dummy_training_in_swarm () { From 58cfd9119186efed1ccd7e193a8826898f857ab7 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 18 Aug 2025 17:32:14 +0200 Subject: [PATCH 08/80] check captured console output of swarm training and files created --- runTestsOutsideDocker.sh | 42 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index b27372d0..d0b65636 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -147,8 +147,46 @@ run_dummy_training_in_swarm () { ../../../../../_testsOutsideDocker_submitDummyTraining.exp docker kill fladmin sleep 60 + cd "$CWD" + + cd "$PROJECT_DIR"/prod_00/server.local/startup + CONSOLE_OUTPUT=nohup.out + for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.'; + do + if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then + echo "Expected output $EXPECTED_OUTPUT found" + else + echo "Expected output $EXPECTED_OUTPUT missing" + exit 1 + fi + done + cd "$CWD" - echo "TODO check output of dummy training" + cd "$PROJECT_DIR"/prod_00/client_A/startup + CONSOLE_OUTPUT=nohup.out + for EXPECTED_OUTPUT in 'Sending training result to aggregation client' 'Epoch 9: 100%' ; + do + if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then + echo "Expected output $EXPECTED_OUTPUT found" + else + echo "Expected output $EXPECTED_OUTPUT missing" + exit 1 + fi + done + cd "$CWD" + + cd "$PROJECT_DIR"/prod_00/client_A/ + FILES_PRESENT=$(find . -type f -name "*.*") + for EXPECTED_FILE in 'custom/minimal_training.py' 'best_FL_global_model.pt' 'FL_global_model.pt' ; + do + if echo "$FILES_PRESENT" | grep -q "$EXPECTED_FILE" ; then + echo "Expected file $EXPECTED_FILE found" + else + echo "Expected file $EXPECTED_FILE missing" + exit 1 + fi + done + cd "$CWD" } run_tests () { @@ -162,9 +200,7 @@ run_tests () { run_data_access_preflight_check start_server_and_clients - run_dummy_training_in_swarm - kill_server_and_clients cleanup_temporary_data From a67405e117a61c3c1c28f4c50d43a05a5983e9a5 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 20 Aug 2025 15:02:39 +0200 Subject: [PATCH 09/80] test pushing and pulling image to/from local docker registry --- _testsOutsideDocker_submitDummyTraining.exp | 2 +- runTestsOutsideDocker.sh | 31 ++++++++++++++----- tests/provision/dummy_project_for_testing.yml | 2 +- 3 files changed, 26 insertions(+), 9 deletions(-) diff --git a/_testsOutsideDocker_submitDummyTraining.exp b/_testsOutsideDocker_submitDummyTraining.exp index 7d69997c..0a79ec00 100755 --- a/_testsOutsideDocker_submitDummyTraining.exp +++ b/_testsOutsideDocker_submitDummyTraining.exp @@ -1,6 +1,6 @@ #!/usr/bin/env expect -spawn ./docker.sh --no_pull +spawn ./docker.sh expect "User Name: " send "admin@test.odelia\r" expect "> " diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index d0b65636..7a14090f 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -13,7 +13,8 @@ if [ -z "$GPU_FOR_TESTING" ]; then fi VERSION=$(./getVersionNumber.sh) -DOCKER_IMAGE=jefftud/odelia:$VERSION +GENERATED_DOCKER_IMAGE=jefftud/odelia:$VERSION +EXPECTED_DOCKER_IMAGE=localhost:5000/$GENERATED_DOCKER_IMAGE # must match what is specified in the project.yml PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) SCRATCH_DIR=$(mktemp -d) @@ -74,12 +75,20 @@ create_second_startup_kit () { done } +push_image_to_local_docker_registry () { + docker run -d -p 5000:5000 --rm --name registry registry:3 + docker tag $GENERATED_DOCKER_IMAGE $EXPECTED_DOCKER_IMAGE + docker push $EXPECTED_DOCKER_IMAGE + docker rmi $EXPECTED_DOCKER_IMAGE # so that pulling later has an effect + docker pull $EXPECTED_DOCKER_IMAGE +} + create_synthetic_data () { docker run --rm \ -u $(id -u):$(id -g) \ -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ -w /MediSwarm \ - jefftud/odelia:$VERSION \ + $GENERATED_DOCKER_IMAGE \ /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" } @@ -88,18 +97,23 @@ cleanup_temporary_data () { rm -rf "$SCRATCH_DIR" } +cleanup_local_docker_registry () { + docker rmi $EXPECTED_DOCKER_IMAGE + docker kill registry +} + start_server_and_clients () { cd "$PROJECT_DIR"/prod_00 cd server.local/startup - ./docker.sh --no_pull --start_server + ./docker.sh --start_server cd ../.. sleep 10 cd client_A/startup - ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client cd ../.. cd client_B/startup - ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client sleep 5 cd "$CWD" @@ -113,7 +127,7 @@ run_docker_gpu_preflight_check () { cd "$PROJECT_DIR"/prod_00 cd client_A/startup CONSOLE_OUTPUT=docker_gpu_preflight_check_console_output.txt - ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee "$CONSOLE_OUTPUT" + ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training 2>&1 | tee "$CONSOLE_OUTPUT" if grep -q "Epoch 1: 100%" "$CONSOLE_OUTPUT" && grep -q "Training completed successfully" "$CONSOLE_OUTPUT"; then echo "Expected output of Docker/GPU preflight check found" @@ -129,7 +143,7 @@ run_data_access_preflight_check () { cd "$PROJECT_DIR"/prod_00 cd client_A/startup CONSOLE_OUTPUT=data_access_preflight_check_console_output.txt - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee $CONSOLE_OUTPUT + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check 2>&1 | tee $CONSOLE_OUTPUT if grep -q "Train set: 18, Val set: 6" "$CONSOLE_OUTPUT" && grep -q "Epoch 0: 100%" "$CONSOLE_OUTPUT"; then echo "Expected output of Docker/GPU preflight check found" @@ -194,6 +208,8 @@ run_tests () { create_second_startup_kit + push_image_to_local_docker_registry + create_synthetic_data run_docker_gpu_preflight_check @@ -204,6 +220,7 @@ run_tests () { kill_server_and_clients cleanup_temporary_data + cleanup_local_docker_registry } run_tests diff --git a/tests/provision/dummy_project_for_testing.yml b/tests/provision/dummy_project_for_testing.yml index d4984d77..39a83bd0 100644 --- a/tests/provision/dummy_project_for_testing.yml +++ b/tests/provision/dummy_project_for_testing.yml @@ -29,7 +29,7 @@ builders: args: config_folder: config scheme: http - docker_image: jefftud/odelia:__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__ + docker_image: "localhost:5000/jefftud/odelia:__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__" overseer_agent: path: nvflare.ha.dummy_overseer_agent.DummyOverseerAgent overseer_exists: false From 1e9cdcf5d97194d388205e8f65dfbb3469d90a00 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 20 Aug 2025 15:46:54 +0200 Subject: [PATCH 10/80] Revert "test pushing and pulling image to/from local docker registry", this is very slow This reverts commit a67405e117a61c3c1c28f4c50d43a05a5983e9a5. --- _testsOutsideDocker_submitDummyTraining.exp | 2 +- runTestsOutsideDocker.sh | 31 +++++-------------- tests/provision/dummy_project_for_testing.yml | 2 +- 3 files changed, 9 insertions(+), 26 deletions(-) diff --git a/_testsOutsideDocker_submitDummyTraining.exp b/_testsOutsideDocker_submitDummyTraining.exp index 0a79ec00..7d69997c 100755 --- a/_testsOutsideDocker_submitDummyTraining.exp +++ b/_testsOutsideDocker_submitDummyTraining.exp @@ -1,6 +1,6 @@ #!/usr/bin/env expect -spawn ./docker.sh +spawn ./docker.sh --no_pull expect "User Name: " send "admin@test.odelia\r" expect "> " diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 7a14090f..d0b65636 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -13,8 +13,7 @@ if [ -z "$GPU_FOR_TESTING" ]; then fi VERSION=$(./getVersionNumber.sh) -GENERATED_DOCKER_IMAGE=jefftud/odelia:$VERSION -EXPECTED_DOCKER_IMAGE=localhost:5000/$GENERATED_DOCKER_IMAGE # must match what is specified in the project.yml +DOCKER_IMAGE=jefftud/odelia:$VERSION PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) SCRATCH_DIR=$(mktemp -d) @@ -75,20 +74,12 @@ create_second_startup_kit () { done } -push_image_to_local_docker_registry () { - docker run -d -p 5000:5000 --rm --name registry registry:3 - docker tag $GENERATED_DOCKER_IMAGE $EXPECTED_DOCKER_IMAGE - docker push $EXPECTED_DOCKER_IMAGE - docker rmi $EXPECTED_DOCKER_IMAGE # so that pulling later has an effect - docker pull $EXPECTED_DOCKER_IMAGE -} - create_synthetic_data () { docker run --rm \ -u $(id -u):$(id -g) \ -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ -w /MediSwarm \ - $GENERATED_DOCKER_IMAGE \ + jefftud/odelia:$VERSION \ /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" } @@ -97,23 +88,18 @@ cleanup_temporary_data () { rm -rf "$SCRATCH_DIR" } -cleanup_local_docker_registry () { - docker rmi $EXPECTED_DOCKER_IMAGE - docker kill registry -} - start_server_and_clients () { cd "$PROJECT_DIR"/prod_00 cd server.local/startup - ./docker.sh --start_server + ./docker.sh --no_pull --start_server cd ../.. sleep 10 cd client_A/startup - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client cd ../.. cd client_B/startup - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client sleep 5 cd "$CWD" @@ -127,7 +113,7 @@ run_docker_gpu_preflight_check () { cd "$PROJECT_DIR"/prod_00 cd client_A/startup CONSOLE_OUTPUT=docker_gpu_preflight_check_console_output.txt - ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training 2>&1 | tee "$CONSOLE_OUTPUT" + ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee "$CONSOLE_OUTPUT" if grep -q "Epoch 1: 100%" "$CONSOLE_OUTPUT" && grep -q "Training completed successfully" "$CONSOLE_OUTPUT"; then echo "Expected output of Docker/GPU preflight check found" @@ -143,7 +129,7 @@ run_data_access_preflight_check () { cd "$PROJECT_DIR"/prod_00 cd client_A/startup CONSOLE_OUTPUT=data_access_preflight_check_console_output.txt - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check 2>&1 | tee $CONSOLE_OUTPUT + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee $CONSOLE_OUTPUT if grep -q "Train set: 18, Val set: 6" "$CONSOLE_OUTPUT" && grep -q "Epoch 0: 100%" "$CONSOLE_OUTPUT"; then echo "Expected output of Docker/GPU preflight check found" @@ -208,8 +194,6 @@ run_tests () { create_second_startup_kit - push_image_to_local_docker_registry - create_synthetic_data run_docker_gpu_preflight_check @@ -220,7 +204,6 @@ run_tests () { kill_server_and_clients cleanup_temporary_data - cleanup_local_docker_registry } run_tests diff --git a/tests/provision/dummy_project_for_testing.yml b/tests/provision/dummy_project_for_testing.yml index 39a83bd0..d4984d77 100644 --- a/tests/provision/dummy_project_for_testing.yml +++ b/tests/provision/dummy_project_for_testing.yml @@ -29,7 +29,7 @@ builders: args: config_folder: config scheme: http - docker_image: "localhost:5000/jefftud/odelia:__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__" + docker_image: jefftud/odelia:__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__ overseer_agent: path: nvflare.ha.dummy_overseer_agent.DummyOverseerAgent overseer_exists: false From c039c10e14093267a168f1dd03e1cac4515bbe07 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 20 Aug 2025 15:48:05 +0200 Subject: [PATCH 11/80] use defined variable rather than hard-coded name --- runTestsOutsideDocker.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index d0b65636..4758e056 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -79,7 +79,7 @@ create_synthetic_data () { -u $(id -u):$(id -g) \ -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ -w /MediSwarm \ - jefftud/odelia:$VERSION \ + $DOCKER_IMAGE \ /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" } From 1cb5ad055faa03398e67b1814929645bd3695b6a Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 09:48:46 +0200 Subject: [PATCH 12/80] renamed file for running integration tests --- runTestsInDocker.sh => runIntegrationTests.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename runTestsInDocker.sh => runIntegrationTests.sh (100%) diff --git a/runTestsInDocker.sh b/runIntegrationTests.sh similarity index 100% rename from runTestsInDocker.sh rename to runIntegrationTests.sh From 2d3bdac317836f0b7d336079865469e124f3e40a Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 09:54:16 +0200 Subject: [PATCH 13/80] refactored tests to be run in Docker: moved to separate scripts --- _runTestsInsideDocker.sh | 54 ------------------- runIntegrationTests.sh | 18 +++++-- ...run_controller_unit_tests_with_coverage.sh | 12 +++++ ...n_minimal_example_proof_of_concept_mode.sh | 19 +++++++ .../_run_minimal_example_simulation_mode.sh | 10 ++++ .../_run_minimal_example_standalone.sh | 10 ++++ .../_run_nvflare_unit_tests.sh | 10 ++++ 7 files changed, 75 insertions(+), 58 deletions(-) delete mode 100755 _runTestsInsideDocker.sh create mode 100755 tests/integration_tests/_run_controller_unit_tests_with_coverage.sh create mode 100755 tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh create mode 100755 tests/integration_tests/_run_minimal_example_simulation_mode.sh create mode 100755 tests/integration_tests/_run_minimal_example_standalone.sh create mode 100755 tests/integration_tests/_run_nvflare_unit_tests.sh diff --git a/_runTestsInsideDocker.sh b/_runTestsInsideDocker.sh deleted file mode 100755 index d3d07c18..00000000 --- a/_runTestsInsideDocker.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash - -run_controller_unit_tests_with_coverage () { - # run unit tests of ODELIA swarm learning and report coverage - export MPLCONFIGDIR=/tmp - cd /MediSwarm/tests/unit_tests/controller - PYTHONPATH=/MediSwarm/controller/controller python3 -m coverage run --source=/MediSwarm/controller/controller -m unittest discover - coverage report -m - rm .coverage -} - -run_nvflare_unit_tests () { - cd /MediSwarm/docker_config/NVFlare - ./runtest.sh -c -r - coverage report -m - cd .. -} - -run_minimal_example_standalone () { - # run standalone version of minimal example - cd /MediSwarm/application/jobs/minimal_training_pytorch_cnn/app/custom/ - export TRAINING_MODE="local_training" - ./main.py -} - -run_minimal_example_simulation_mode () { - # run simulation mode for minimal example - cd /MediSwarm - export TRAINING_MODE="swarm" - nvflare simulator -w /tmp/minimal_training_pytorch_cnn -n 2 -t 2 application/jobs/minimal_training_pytorch_cnn -c simulated_node_0,simulated_node_1 -} - -run_minimal_example_proof_of_concept_mode () { - # run proof-of-concept mode for minimal example - cd /MediSwarm - export TRAINING_MODE="swarm" - nvflare poc prepare -c poc_client_0 poc_client_1 - nvflare poc prepare-jobs-dir -j application/jobs/ - nvflare poc start -ex admin@nvidia.com - sleep 15 - echo "Will submit job now after sleeping 15 seconds to allow the background process to complete" - nvflare job submit -j application/jobs/minimal_training_pytorch_cnn - sleep 60 - echo "Will shut down now after sleeping 60 seconds to allow the background process to complete" - sleep 2 - nvflare poc stop -} - -run_controller_unit_tests_with_coverage -# uncomment the following line to run NVFlare's unit tests (takes about 2 minutes and will install python packages in the container) -# run_nvflare_unit_tests -run_minimal_example_standalone -run_minimal_example_simulation_mode -run_minimal_example_proof_of_concept_mode diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 329ee6cf..92f7f75b 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -10,9 +10,8 @@ if [ -z "$GPU_FOR_TESTING" ]; then export GPU_FOR_TESTING="all" fi - -run_tests () { - echo "[Run] Unit tests inside Docker..." +_run_test_in_docker() { + echo "[Run] " $1 " inside Docker ..." docker run --rm \ --shm-size=16g \ --ipc=host \ @@ -20,10 +19,21 @@ run_tests () { --ulimit stack=67108864 \ -v /tmp:/scratch \ --gpus="$GPU_FOR_TESTING" \ - --entrypoint=/MediSwarm/_runTestsInsideDocker.sh \ + --entrypoint=/MediSwarm/$1 \ "$DOCKER_IMAGE" } + +run_tests () { + _run_test_in_docker tests/integration_tests/_run_controller_unit_tests_with_coverage.sh + _run_test_in_docker tests/integration_tests/_run_minimal_example_standalone.sh + _run_test_in_docker tests/integration_tests/_run_minimal_example_simulation_mode.sh + _run_test_in_docker tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh + + # uncomment the following line to also run NVFlare's unit tests (takes about 2 minutes and will install python packages in the container) + # run_test_in_docker tests/integration_tests/_run_nvflare_unit_tests.sh +} + prepare_dummy_trainings () { echo "[Prepare] Startup kits for dummy project..." rm -rf "$PROJECT_DIR" diff --git a/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh b/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh new file mode 100755 index 00000000..87ef36d1 --- /dev/null +++ b/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +run_controller_unit_tests_with_coverage () { + # run unit tests of ODELIA swarm learning and report coverage + export MPLCONFIGDIR=/tmp + cd /MediSwarm/tests/unit_tests/controller + PYTHONPATH=/MediSwarm/controller/controller python3 -m coverage run --source=/MediSwarm/controller/controller -m unittest discover + coverage report -m + rm .coverage +} + +run_controller_unit_tests_with_coverage diff --git a/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh b/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh new file mode 100755 index 00000000..9331ea7b --- /dev/null +++ b/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +run_minimal_example_proof_of_concept_mode () { + # run proof-of-concept mode for minimal example + cd /MediSwarm + export TRAINING_MODE="swarm" + nvflare poc prepare -c poc_client_0 poc_client_1 + nvflare poc prepare-jobs-dir -j application/jobs/ + nvflare poc start -ex admin@nvidia.com + sleep 15 + echo "Will submit job now after sleeping 15 seconds to allow the background process to complete" + nvflare job submit -j application/jobs/minimal_training_pytorch_cnn + sleep 60 + echo "Will shut down now after sleeping 60 seconds to allow the background process to complete" + sleep 2 + nvflare poc stop +} + +run_minimal_example_proof_of_concept_mode diff --git a/tests/integration_tests/_run_minimal_example_simulation_mode.sh b/tests/integration_tests/_run_minimal_example_simulation_mode.sh new file mode 100755 index 00000000..4f87934d --- /dev/null +++ b/tests/integration_tests/_run_minimal_example_simulation_mode.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +run_minimal_example_simulation_mode () { + # run simulation mode for minimal example + cd /MediSwarm + export TRAINING_MODE="swarm" + nvflare simulator -w /tmp/minimal_training_pytorch_cnn -n 2 -t 2 application/jobs/minimal_training_pytorch_cnn -c simulated_node_0,simulated_node_1 +} + +run_minimal_example_simulation_mode diff --git a/tests/integration_tests/_run_minimal_example_standalone.sh b/tests/integration_tests/_run_minimal_example_standalone.sh new file mode 100755 index 00000000..79e10ddb --- /dev/null +++ b/tests/integration_tests/_run_minimal_example_standalone.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +run_minimal_example_standalone () { + # run standalone version of minimal example + cd /MediSwarm/application/jobs/minimal_training_pytorch_cnn/app/custom/ + export TRAINING_MODE="local_training" + ./main.py +} + +run_minimal_example_standalone diff --git a/tests/integration_tests/_run_nvflare_unit_tests.sh b/tests/integration_tests/_run_nvflare_unit_tests.sh new file mode 100755 index 00000000..efd3b502 --- /dev/null +++ b/tests/integration_tests/_run_nvflare_unit_tests.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +run_nvflare_unit_tests () { + cd /MediSwarm/docker_config/NVFlare + ./runtest.sh -c -r + coverage report -m + cd .. +} + +run_nvflare_unit_tests From 79cf7d6fa26919f10aba1745caf40131f8e8b34e Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 15:26:10 +0200 Subject: [PATCH 14/80] moved method to script for running integration tests --- runIntegrationTests.sh | 28 ++++++++++++++++++++++++++-- runTestsOutsideDocker.sh | 22 ---------------------- 2 files changed, 26 insertions(+), 24 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 92f7f75b..49a68bb4 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -10,8 +10,31 @@ if [ -z "$GPU_FOR_TESTING" ]; then export GPU_FOR_TESTING="all" fi +check_files_on_github () { + echo "[Run] Test whether expected content is available on github" + + CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/LICENSE) + if echo "$CONTENT" | grep -q "MIT License" ; then + echo "Downloaded and verified license from github" + else + echo "Could not download and verify license" + exit 1 + fi + + CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/README.md) + for ROLE in 'Swarm Participant' 'Developer' 'Swarm Operator'; + do + if echo "$CONTENT" | grep -q "$ROLE" ; then + echo "Instructions for $ROLE found" + else + echo "Instructions for role $ROLE missing" + exit 1 + fi + done +} + _run_test_in_docker() { - echo "[Run] " $1 " inside Docker ..." + echo "[Run]" $1 "inside Docker ..." docker run --rm \ --shm-size=16g \ --ipc=host \ @@ -23,7 +46,6 @@ _run_test_in_docker() { "$DOCKER_IMAGE" } - run_tests () { _run_test_in_docker tests/integration_tests/_run_controller_unit_tests_with_coverage.sh _run_test_in_docker tests/integration_tests/_run_minimal_example_standalone.sh @@ -78,12 +100,14 @@ cleanup_dummy_trainings () { } case "$1" in + check_files_on_github) check_files_on_github ;; run_tests) run_tests ;; prepare_dummy_trainings) prepare_dummy_trainings ;; run_dummy_training) run_dummy_training ;; run_3dcnn_tests) run_3dcnn_tests ;; cleanup) cleanup_dummy_trainings ;; all | "") + check_files_on_github run_tests prepare_dummy_trainings run_dummy_training diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 4758e056..3e3f79a9 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -20,26 +20,6 @@ SCRATCH_DIR=$(mktemp -d) CWD=$(pwd) PROJECT_FILE="tests/provision/dummy_project_for_testing.yml" -check_files_on_github () { - CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/LICENSE) - if echo "$CONTENT" | grep -q "MIT License" ; then - echo "Downloaded and verified license from github" - else - echo "Could not download and verify license" - exit 1 - fi - - CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/README.md) - for ROLE in 'Swarm Participant' 'Developer' 'Swarm Operator'; - do - if echo "$CONTENT" | grep -q "$ROLE" ; then - echo "Instructions for $ROLE found" - else - echo "Instructions for role $ROLE missing" - exit 1 - fi - done -} create_second_startup_kit () { if [ ! -d "$PROJECT_DIR"/prod_00 ]; then @@ -190,8 +170,6 @@ run_dummy_training_in_swarm () { } run_tests () { - check_files_on_github - create_second_startup_kit create_synthetic_data From 89b6d597ebf4e1ea79dc0cbea82f9266dad536cb Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 15:32:03 +0200 Subject: [PATCH 15/80] moved generating two sets of startup kits to script for integration tests --- runIntegrationTests.sh | 50 +++++++++++++++++++++++++++++++++++----- runTestsOutsideDocker.sh | 50 ---------------------------------------- 2 files changed, 44 insertions(+), 56 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 49a68bb4..f2f037ac 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -5,11 +5,15 @@ set -e VERSION=$(./getVersionNumber.sh) DOCKER_IMAGE=jefftud/odelia:$VERSION PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" +SYNTHETIC_DATA_DIR=$(mktemp -d) +SCRATCH_DIR=$(mktemp -d) CWD=$(pwd) +PROJECT_FILE="tests/provision/dummy_project_for_testing.yml" if [ -z "$GPU_FOR_TESTING" ]; then export GPU_FOR_TESTING="all" fi + check_files_on_github () { echo "[Run] Test whether expected content is available on github" @@ -56,10 +60,44 @@ run_tests () { # run_test_in_docker tests/integration_tests/_run_nvflare_unit_tests.sh } -prepare_dummy_trainings () { - echo "[Prepare] Startup kits for dummy project..." - rm -rf "$PROJECT_DIR" - ./_buildStartupKits.sh tests/provision/dummy_project_for_testing.yml "$VERSION" +create_startup_kits_and_check_contained_files () { + echo "[Prepare] Startup kits for dummy project ..." + + if ! grep -q "127.0.0.1 server.local" /etc/hosts; then + echo "/etc/hosts needs to contain the following line, please add it." + echo "127.0.0.1 server.local localhost" + exit 1 + fi + + if [ ! -d "$PROJECT_DIR"/prod_00 ]; then + ./_buildStartupKits.sh $PROJECT_FILE $VERSION + fi + if [ -d "$PROJECT_DIR"/prod_01 ]; then + echo '"$PROJECT_DIR"/prod_01 exists, please remove/rename it' + exit 1 + fi + ./_buildStartupKits.sh $PROJECT_FILE $VERSION + + for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; + do + if [ -f "$PROJECT_DIR/prod_01/client_A/startup/$FILE" ] ; then + echo "$FILE found" + else + echo "$FILE missing" + exit 1 + fi + done + + ZIP_CONTENT=$(unzip -tv "$PROJECT_DIR/prod_01/client_B_${VERSION}.zip") + for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; + do + if echo "$ZIP_CONTENT" | grep -q "$FILE" ; then + echo "$FILE found in zip" + else + echo "$FILE missing in zip" + exit 1 + fi + done } run_dummy_training () { @@ -102,14 +140,14 @@ cleanup_dummy_trainings () { case "$1" in check_files_on_github) check_files_on_github ;; run_tests) run_tests ;; - prepare_dummy_trainings) prepare_dummy_trainings ;; + create_startup_kits) create_startup_kits_and_check_contained_files ;; run_dummy_training) run_dummy_training ;; run_3dcnn_tests) run_3dcnn_tests ;; cleanup) cleanup_dummy_trainings ;; all | "") check_files_on_github run_tests - prepare_dummy_trainings + create_startup_kits_and_check_contained_files run_dummy_training run_3dcnn_tests cleanup_dummy_trainings diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 3e3f79a9..f990d11d 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -2,57 +2,7 @@ set -e -if ! grep -q "127.0.0.1 server.local" /etc/hosts; then - echo "/etc/hosts needs to contain the following line, please add it." - echo "127.0.0.1 server.local localhost" - exit 1 -fi - -if [ -z "$GPU_FOR_TESTING" ]; then - export GPU_FOR_TESTING="all" -fi - -VERSION=$(./getVersionNumber.sh) -DOCKER_IMAGE=jefftud/odelia:$VERSION -PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" -SYNTHETIC_DATA_DIR=$(mktemp -d) -SCRATCH_DIR=$(mktemp -d) -CWD=$(pwd) -PROJECT_FILE="tests/provision/dummy_project_for_testing.yml" - - -create_second_startup_kit () { - if [ ! -d "$PROJECT_DIR"/prod_00 ]; then - echo '"$PROJECT_DIR"/prod_00 does not exist, please generate the startup kit first' - exit 1 - fi - if [ -d "$PROJECT_DIR"/prod_01 ]; then - echo '"$PROJECT_DIR"/prod_01 exists, please remove it' - exit 1 - fi - ./_buildStartupKits.sh $PROJECT_FILE $VERSION - - for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; - do - if [ -f "$PROJECT_DIR/prod_01/client_A/startup/$FILE" ] ; then - echo "$FILE found" - else - echo "$FILE missing" - exit 1 - fi - done - ZIP_CONTENT=$(unzip -tv "$PROJECT_DIR/prod_01/client_B_${VERSION}.zip") - for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; - do - if echo "$ZIP_CONTENT" | grep -q "$FILE" ; then - echo "$FILE found in zip" - else - echo "$FILE missing in zip" - exit 1 - fi - done -} create_synthetic_data () { docker run --rm \ From 7590b264318ff2804b6d0ab6fdd4c9a90538620c Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 15:38:19 +0200 Subject: [PATCH 16/80] dedicated function to generate synthetic data --- runIntegrationTests.sh | 24 ++++++++++++++---------- runTestsOutsideDocker.sh | 12 ------------ 2 files changed, 14 insertions(+), 22 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index f2f037ac..5e97ad9f 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -100,6 +100,17 @@ create_startup_kits_and_check_contained_files () { done } +create_synthetic_data () { + echo "[Prepare] Synthetic data ..." + docker run --rm \ + -u $(id -u):$(id -g) \ + -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ + -w /MediSwarm \ + $DOCKER_IMAGE \ + /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" +} + + run_dummy_training () { echo "[Run] Dummy training session..." cd "$PROJECT_DIR/prod_00/client_A/startup/" @@ -108,16 +119,7 @@ run_dummy_training () { } run_3dcnn_tests () { - echo "[Run] Synthetic data + 3D CNN preflight check..." - SYNTHETIC_DATA_DIR=$(mktemp -d) - - # create synthetic data - docker run --rm \ - -u $(id -u):$(id -g) \ - -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ - -w /MediSwarm \ - jefftud/odelia:$VERSION \ - /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" + echo "[Run] 3D CNN preflight check..." # run tests using synthetic data cd "$PROJECT_DIR/prod_00/client_A/startup/" @@ -141,6 +143,7 @@ case "$1" in check_files_on_github) check_files_on_github ;; run_tests) run_tests ;; create_startup_kits) create_startup_kits_and_check_contained_files ;; + create_synthetic_data) create_synthetic_data ;; run_dummy_training) run_dummy_training ;; run_3dcnn_tests) run_3dcnn_tests ;; cleanup) cleanup_dummy_trainings ;; @@ -148,6 +151,7 @@ case "$1" in check_files_on_github run_tests create_startup_kits_and_check_contained_files + create_synthetic_data run_dummy_training run_3dcnn_tests cleanup_dummy_trainings diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index f990d11d..171dfde7 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -4,14 +4,6 @@ set -e -create_synthetic_data () { - docker run --rm \ - -u $(id -u):$(id -g) \ - -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ - -w /MediSwarm \ - $DOCKER_IMAGE \ - /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" -} cleanup_temporary_data () { rm -rf "$SYNTHETIC_DATA_DIR" @@ -120,10 +112,6 @@ run_dummy_training_in_swarm () { } run_tests () { - create_second_startup_kit - - create_synthetic_data - run_docker_gpu_preflight_check run_data_access_preflight_check From 4a7c07827849b92259bda74687ab60b12ad4e4c5 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 15:56:28 +0200 Subject: [PATCH 17/80] more meaningful name for method --- runIntegrationTests.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 5e97ad9f..f7914481 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -50,7 +50,7 @@ _run_test_in_docker() { "$DOCKER_IMAGE" } -run_tests () { +run_local_tests () { _run_test_in_docker tests/integration_tests/_run_controller_unit_tests_with_coverage.sh _run_test_in_docker tests/integration_tests/_run_minimal_example_standalone.sh _run_test_in_docker tests/integration_tests/_run_minimal_example_simulation_mode.sh @@ -141,7 +141,7 @@ cleanup_dummy_trainings () { case "$1" in check_files_on_github) check_files_on_github ;; - run_tests) run_tests ;; + run_local_tests) run_local_tests ;; create_startup_kits) create_startup_kits_and_check_contained_files ;; create_synthetic_data) create_synthetic_data ;; run_dummy_training) run_dummy_training ;; @@ -149,7 +149,7 @@ case "$1" in cleanup) cleanup_dummy_trainings ;; all | "") check_files_on_github - run_tests + run_local_tests create_startup_kits_and_check_contained_files create_synthetic_data run_dummy_training From e02911cb6c63c116d1445698ac1b073d0727f030 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 27 Aug 2025 15:58:52 +0200 Subject: [PATCH 18/80] moved/merged cleanup to script for integration tests --- runIntegrationTests.sh | 18 ++++++++---------- runTestsOutsideDocker.sh | 7 ------- 2 files changed, 8 insertions(+), 17 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index f7914481..340c74b5 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -128,14 +128,12 @@ run_3dcnn_tests () { ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /tmp/scratch --GPU "$GPU_FOR_TESTING" --no_pull --run_script /MediSwarm/_run3DdcnnptlTestsInDocker.sh cd "$CWD" - - # clean up synthetic data - rm -rf "$SYNTHETIC_DATA_DIR" || echo "Warning: cleanup failed" } - -cleanup_dummy_trainings () { - echo "[Cleanup] Removing dummy workspace..." +cleanup_temporary_data () { + echo "[Cleanup] Removing synthetic data, scratch directory, dummy workspace ..." + rm -rf "$SYNTHETIC_DATA_DIR" + rm -rf "$SCRATCH_DIR" rm -rf "$PROJECT_DIR" } @@ -146,15 +144,15 @@ case "$1" in create_synthetic_data) create_synthetic_data ;; run_dummy_training) run_dummy_training ;; run_3dcnn_tests) run_3dcnn_tests ;; - cleanup) cleanup_dummy_trainings ;; + cleanup) cleanup_temporary_data ;; all | "") check_files_on_github run_local_tests create_startup_kits_and_check_contained_files create_synthetic_data - run_dummy_training - run_3dcnn_tests - cleanup_dummy_trainings + # run_dummy_training + # run_3dcnn_tests + cleanup_temporary_data ;; *) echo "Unknown argument: $1"; exit 1 ;; esac diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 171dfde7..49dfb8fa 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -3,13 +3,6 @@ set -e - - -cleanup_temporary_data () { - rm -rf "$SYNTHETIC_DATA_DIR" - rm -rf "$SCRATCH_DIR" -} - start_server_and_clients () { cd "$PROJECT_DIR"/prod_00 cd server.local/startup From d67fa92f116410c6e3b03976e240e2c070abdfb0 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 28 Aug 2025 10:42:47 +0200 Subject: [PATCH 19/80] moved test for running Docker/GPU preflight check, i.e., extended existing test by checking output --- runIntegrationTests.sh | 23 ++++++++++++++++++----- runTestsOutsideDocker.sh | 16 ---------------- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 340c74b5..2ab7b059 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -111,10 +111,20 @@ create_synthetic_data () { } -run_dummy_training () { - echo "[Run] Dummy training session..." +run_docker_gpu_preflight_check () { + # requires having built a startup kit + echo "[Run] Docker/GPU preflight check (local dummy training via startup kit) ..." cd "$PROJECT_DIR/prod_00/client_A/startup/" - ./docker.sh --data_dir /tmp/ --scratch_dir /tmp/scratch --GPU "$GPU_FOR_TESTING" --no_pull --dummy_training + CONSOLE_OUTPUT=docker_gpu_preflight_check_console_output.txt + ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee "$CONSOLE_OUTPUT" + + if grep -q "Epoch 1: 100%" "$CONSOLE_OUTPUT" && grep -q "Training completed successfully" "$CONSOLE_OUTPUT"; then + echo "Expected output of Docker/GPU preflight check found" + else + echo "Missing expected output of Docker/GPU preflight check" + exit 1 + fi + cd "$CWD" } @@ -142,7 +152,7 @@ case "$1" in run_local_tests) run_local_tests ;; create_startup_kits) create_startup_kits_and_check_contained_files ;; create_synthetic_data) create_synthetic_data ;; - run_dummy_training) run_dummy_training ;; + run_docker_gpu_preflight_check) run_docker_gpu_preflight_check ;; run_3dcnn_tests) run_3dcnn_tests ;; cleanup) cleanup_temporary_data ;; all | "") @@ -150,9 +160,12 @@ case "$1" in run_local_tests create_startup_kits_and_check_contained_files create_synthetic_data - # run_dummy_training + run_docker_gpu_preflight_check # run_3dcnn_tests cleanup_temporary_data ;; *) echo "Unknown argument: $1"; exit 1 ;; esac + +# TODO adapt ./assets/readme/README.developer.md +# TODO adapt .github/workflows/pr-test.yaml diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index 49dfb8fa..b2461ad3 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -24,21 +24,6 @@ kill_server_and_clients () { docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B } -run_docker_gpu_preflight_check () { - cd "$PROJECT_DIR"/prod_00 - cd client_A/startup - CONSOLE_OUTPUT=docker_gpu_preflight_check_console_output.txt - ./docker.sh --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --dummy_training --no_pull 2>&1 | tee "$CONSOLE_OUTPUT" - - if grep -q "Epoch 1: 100%" "$CONSOLE_OUTPUT" && grep -q "Training completed successfully" "$CONSOLE_OUTPUT"; then - echo "Expected output of Docker/GPU preflight check found" - else - echo "Missing expected output of Docker/GPU preflight check" - exit 1 - fi - - cd "$CWD" -} run_data_access_preflight_check () { cd "$PROJECT_DIR"/prod_00 @@ -105,7 +90,6 @@ run_dummy_training_in_swarm () { } run_tests () { - run_docker_gpu_preflight_check run_data_access_preflight_check start_server_and_clients From c9ba1418c77a500cfb91ac9b2d72e8f2ba66b57c Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 28 Aug 2025 10:50:42 +0200 Subject: [PATCH 20/80] moved method for running data access preflight check --- runIntegrationTests.sh | 26 +++++++++++++++++--------- runTestsOutsideDocker.sh | 19 ------------------- 2 files changed, 17 insertions(+), 28 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 2ab7b059..bc77b52b 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -128,18 +128,26 @@ run_docker_gpu_preflight_check () { cd "$CWD" } -run_3dcnn_tests () { - echo "[Run] 3D CNN preflight check..." - - # run tests using synthetic data - cd "$PROJECT_DIR/prod_00/client_A/startup/" - # preflight check (standalone) and swarm simulation mode - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /tmp/scratch --GPU "$GPU_FOR_TESTING" --no_pull --preflight_check - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /tmp/scratch --GPU "$GPU_FOR_TESTING" --no_pull --run_script /MediSwarm/_run3DdcnnptlTestsInDocker.sh +run_data_access_preflight_check () { + # requires having built a startup kit and synthetic dataset + echo "[Run] Data access preflight check..." + cd "$PROJECT_DIR"/prod_00 + cd client_A/startup + CONSOLE_OUTPUT=data_access_preflight_check_console_output.txt + ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee $CONSOLE_OUTPUT + + if grep -q "Train set: 18, Val set: 6" "$CONSOLE_OUTPUT" && grep -q "Epoch 0: 100%" "$CONSOLE_OUTPUT"; then + echo "Expected output of Docker/GPU preflight check found" + else + echo "Missing expected output of Docker/GPU preflight check" + exit 1 + fi cd "$CWD" } +# TODO ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /tmp/scratch --GPU "$GPU_FOR_TESTING" --no_pull --run_script /MediSwarm/_run3DdcnnptlTestsInDocker.sh + cleanup_temporary_data () { echo "[Cleanup] Removing synthetic data, scratch directory, dummy workspace ..." rm -rf "$SYNTHETIC_DATA_DIR" @@ -161,7 +169,7 @@ case "$1" in create_startup_kits_and_check_contained_files create_synthetic_data run_docker_gpu_preflight_check - # run_3dcnn_tests + run_data_access_preflight_check cleanup_temporary_data ;; *) echo "Unknown argument: $1"; exit 1 ;; diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh index b2461ad3..a72018ba 100755 --- a/runTestsOutsideDocker.sh +++ b/runTestsOutsideDocker.sh @@ -24,23 +24,6 @@ kill_server_and_clients () { docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B } - -run_data_access_preflight_check () { - cd "$PROJECT_DIR"/prod_00 - cd client_A/startup - CONSOLE_OUTPUT=data_access_preflight_check_console_output.txt - ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --preflight_check --no_pull 2>&1 | tee $CONSOLE_OUTPUT - - if grep -q "Train set: 18, Val set: 6" "$CONSOLE_OUTPUT" && grep -q "Epoch 0: 100%" "$CONSOLE_OUTPUT"; then - echo "Expected output of Docker/GPU preflight check found" - else - echo "Missing expected output of Docker/GPU preflight check" - exit 1 - fi - - cd "$CWD" -} - run_dummy_training_in_swarm () { cd "$PROJECT_DIR"/prod_00 cd admin@test.odelia/startup @@ -90,8 +73,6 @@ run_dummy_training_in_swarm () { } run_tests () { - run_data_access_preflight_check - start_server_and_clients run_dummy_training_in_swarm kill_server_and_clients From b1c72f51b8519edc63fd6d8bce76c94087bd14e1 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 28 Aug 2025 10:51:28 +0200 Subject: [PATCH 21/80] refactored so that individual steps (including cleanup) can be run separately --- runIntegrationTests.sh | 44 +++++++++++++++++++++++++++++++++++------- 1 file changed, 37 insertions(+), 7 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index bc77b52b..6627ceb0 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -51,9 +51,13 @@ _run_test_in_docker() { } run_local_tests () { + echo "[Run] Controller unit tests" _run_test_in_docker tests/integration_tests/_run_controller_unit_tests_with_coverage.sh + echo "[Run] Minimal example, standalone" _run_test_in_docker tests/integration_tests/_run_minimal_example_standalone.sh + echo "[Run] Minimal example, simulation mode" _run_test_in_docker tests/integration_tests/_run_minimal_example_simulation_mode.sh + echo "[Run] Minimal example, proof-of-concept mode" _run_test_in_docker tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh # uncomment the following line to also run NVFlare's unit tests (takes about 2 minutes and will install python packages in the container) @@ -156,13 +160,39 @@ cleanup_temporary_data () { } case "$1" in - check_files_on_github) check_files_on_github ;; - run_local_tests) run_local_tests ;; - create_startup_kits) create_startup_kits_and_check_contained_files ;; - create_synthetic_data) create_synthetic_data ;; - run_docker_gpu_preflight_check) run_docker_gpu_preflight_check ;; - run_3dcnn_tests) run_3dcnn_tests ;; - cleanup) cleanup_temporary_data ;; + check_files_on_github) + check_files_on_github + cleanup_temporary_data + ;; + + run_local_tests) + run_local_tests + cleanup_temporary_data + ;; + + create_startup_kits) + create_startup_kits_and_check_contained_files + cleanup_temporary_data + ;; + + create_synthetic_data) + create_synthetic_data + cleanup_temporary_data + ;; + + run_docker_gpu_preflight_check) + create_startup_kits_and_check_contained_files + run_docker_gpu_preflight_check + cleanup_temporary_data + ;; + + run_data_access_preflight_check) + create_startup_kits_and_check_contained_files + create_synthetic_data + run_data_access_preflight_check + cleanup_temporary_data + ;; + all | "") check_files_on_github run_local_tests From 580b0c0bac876edca4dfcda32e51ca9155f6b43b Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 28 Aug 2025 11:44:32 +0200 Subject: [PATCH 22/80] integrated 3dcnn training in simulation mode in Docker in test --- runIntegrationTests.sh | 13 ++++++++++++- .../integration_tests/_run_3dcnn_simulation_mode.sh | 0 2 files changed, 12 insertions(+), 1 deletion(-) rename _run3DdcnnptlTestsInDocker.sh => tests/integration_tests/_run_3dcnn_simulation_mode.sh (100%) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 6627ceb0..8443061f 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -150,7 +150,11 @@ run_data_access_preflight_check () { cd "$CWD" } -# TODO ./docker.sh --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir /tmp/scratch --GPU "$GPU_FOR_TESTING" --no_pull --run_script /MediSwarm/_run3DdcnnptlTestsInDocker.sh +run_simulation_mode_in_docker () { + # requires having built a startup kit and synthetic dataset + echo "[Run] Simulation mode of 3DCNN training in Docker" + _run_test_in_docker tests/integration_tests/_run_3dcnn_simulation_mode.sh +} cleanup_temporary_data () { echo "[Cleanup] Removing synthetic data, scratch directory, dummy workspace ..." @@ -193,6 +197,13 @@ case "$1" in cleanup_temporary_data ;; + run_simulation_mode_in_docker) + create_startup_kits_and_check_contained_files + create_synthetic_data + run_simulation_mode_in_docker + cleanup_temporary_data + ;; + all | "") check_files_on_github run_local_tests diff --git a/_run3DdcnnptlTestsInDocker.sh b/tests/integration_tests/_run_3dcnn_simulation_mode.sh similarity index 100% rename from _run3DdcnnptlTestsInDocker.sh rename to tests/integration_tests/_run_3dcnn_simulation_mode.sh From 0d85a88d357e805b8445c1da02c94025be05fd90 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 28 Aug 2025 11:52:58 +0200 Subject: [PATCH 23/80] let scripts fail on error --- .../_run_controller_unit_tests_with_coverage.sh | 2 ++ .../_run_minimal_example_proof_of_concept_mode.sh | 2 ++ tests/integration_tests/_run_minimal_example_simulation_mode.sh | 2 ++ tests/integration_tests/_run_minimal_example_standalone.sh | 2 ++ tests/integration_tests/_run_nvflare_unit_tests.sh | 2 ++ 5 files changed, 10 insertions(+) diff --git a/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh b/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh index 87ef36d1..3d3b87dd 100755 --- a/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh +++ b/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +set -e + run_controller_unit_tests_with_coverage () { # run unit tests of ODELIA swarm learning and report coverage export MPLCONFIGDIR=/tmp diff --git a/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh b/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh index 9331ea7b..9e60b7fc 100755 --- a/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh +++ b/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +set -e + run_minimal_example_proof_of_concept_mode () { # run proof-of-concept mode for minimal example cd /MediSwarm diff --git a/tests/integration_tests/_run_minimal_example_simulation_mode.sh b/tests/integration_tests/_run_minimal_example_simulation_mode.sh index 4f87934d..e1fd931f 100755 --- a/tests/integration_tests/_run_minimal_example_simulation_mode.sh +++ b/tests/integration_tests/_run_minimal_example_simulation_mode.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +set -e + run_minimal_example_simulation_mode () { # run simulation mode for minimal example cd /MediSwarm diff --git a/tests/integration_tests/_run_minimal_example_standalone.sh b/tests/integration_tests/_run_minimal_example_standalone.sh index 79e10ddb..f0106342 100755 --- a/tests/integration_tests/_run_minimal_example_standalone.sh +++ b/tests/integration_tests/_run_minimal_example_standalone.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +set -e + run_minimal_example_standalone () { # run standalone version of minimal example cd /MediSwarm/application/jobs/minimal_training_pytorch_cnn/app/custom/ diff --git a/tests/integration_tests/_run_nvflare_unit_tests.sh b/tests/integration_tests/_run_nvflare_unit_tests.sh index efd3b502..890406c2 100755 --- a/tests/integration_tests/_run_nvflare_unit_tests.sh +++ b/tests/integration_tests/_run_nvflare_unit_tests.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +set -e + run_nvflare_unit_tests () { cd /MediSwarm/docker_config/NVFlare ./runtest.sh -c -r From f015f8bf952d97951419867d20833bd0994b86fb Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 8 Sep 2025 15:17:44 +0200 Subject: [PATCH 24/80] moved (last remaining) test of server and clients to script for integration tests --- runIntegrationTests.sh | 91 +++++++++++++++++++++++++++++++++++++++- runTestsOutsideDocker.sh | 83 ------------------------------------ 2 files changed, 90 insertions(+), 84 deletions(-) delete mode 100755 runTestsOutsideDocker.sh diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 8443061f..c4a19622 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -37,6 +37,7 @@ check_files_on_github () { done } + _run_test_in_docker() { echo "[Run]" $1 "inside Docker ..." docker run --rm \ @@ -50,6 +51,7 @@ _run_test_in_docker() { "$DOCKER_IMAGE" } + run_local_tests () { echo "[Run] Controller unit tests" _run_test_in_docker tests/integration_tests/_run_controller_unit_tests_with_coverage.sh @@ -64,8 +66,9 @@ run_local_tests () { # run_test_in_docker tests/integration_tests/_run_nvflare_unit_tests.sh } + create_startup_kits_and_check_contained_files () { - echo "[Prepare] Startup kits for dummy project ..." + echo "[Prepare] Startup kits for test project ..." if ! grep -q "127.0.0.1 server.local" /etc/hosts; then echo "/etc/hosts needs to contain the following line, please add it." @@ -104,6 +107,7 @@ create_startup_kits_and_check_contained_files () { done } + create_synthetic_data () { echo "[Prepare] Synthetic data ..." docker run --rm \ @@ -132,6 +136,7 @@ run_docker_gpu_preflight_check () { cd "$CWD" } + run_data_access_preflight_check () { # requires having built a startup kit and synthetic dataset echo "[Run] Data access preflight check..." @@ -150,12 +155,86 @@ run_data_access_preflight_check () { cd "$CWD" } + run_simulation_mode_in_docker () { # requires having built a startup kit and synthetic dataset echo "[Run] Simulation mode of 3DCNN training in Docker" _run_test_in_docker tests/integration_tests/_run_3dcnn_simulation_mode.sh } + +start_server_and_clients () { + cd "$PROJECT_DIR"/prod_00 + cd server.local/startup + ./docker.sh --no_pull --start_server + cd ../.. + sleep 10 + + cd client_A/startup + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client + cd ../.. + cd client_B/startup + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client + sleep 5 + + cd "$CWD" +} + + +run_dummy_training_in_swarm () { + cd "$PROJECT_DIR"/prod_00 + cd admin@test.odelia/startup + "$CWD"/_testsOutsideDocker_submitDummyTraining.exp + docker kill fladmin + sleep 60 + cd "$CWD" + + cd "$PROJECT_DIR"/prod_00/server.local/startup + CONSOLE_OUTPUT=nohup.out + for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.'; + do + if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then + echo "Expected output $EXPECTED_OUTPUT found" + else + echo "Expected output $EXPECTED_OUTPUT missing" + exit 1 + fi + done + cd "$CWD" + + cd "$PROJECT_DIR"/prod_00/client_A/startup + CONSOLE_OUTPUT=nohup.out + for EXPECTED_OUTPUT in 'Sending training result to aggregation client' 'Epoch 9: 100%' ; + do + if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then + echo "Expected output $EXPECTED_OUTPUT found" + else + echo "Expected output $EXPECTED_OUTPUT missing" + exit 1 + fi + done + cd "$CWD" + + cd "$PROJECT_DIR"/prod_00/client_A/ + FILES_PRESENT=$(find . -type f -name "*.*") + for EXPECTED_FILE in 'custom/minimal_training.py' 'best_FL_global_model.pt' 'FL_global_model.pt' ; + do + if echo "$FILES_PRESENT" | grep -q "$EXPECTED_FILE" ; then + echo "Expected file $EXPECTED_FILE found" + else + echo "Expected file $EXPECTED_FILE missing" + exit 1 + fi + done + cd "$CWD" +} + + +kill_server_and_clients () { + docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B +} + + cleanup_temporary_data () { echo "[Cleanup] Removing synthetic data, scratch directory, dummy workspace ..." rm -rf "$SYNTHETIC_DATA_DIR" @@ -163,6 +242,7 @@ cleanup_temporary_data () { rm -rf "$PROJECT_DIR" } + case "$1" in check_files_on_github) check_files_on_github @@ -204,6 +284,15 @@ case "$1" in cleanup_temporary_data ;; + run_dummy_training_in_swarm) + create_startup_kits_and_check_contained_files + create_synthetic_data + start_server_and_clients + run_dummy_training_in_swarm + kill_server_and_clients + cleanup_temporary_data + ;; + all | "") check_files_on_github run_local_tests diff --git a/runTestsOutsideDocker.sh b/runTestsOutsideDocker.sh deleted file mode 100755 index a72018ba..00000000 --- a/runTestsOutsideDocker.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env bash - -set -e - - -start_server_and_clients () { - cd "$PROJECT_DIR"/prod_00 - cd server.local/startup - ./docker.sh --no_pull --start_server - cd ../.. - sleep 10 - - cd client_A/startup - ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client - cd ../.. - cd client_B/startup - ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client - sleep 5 - - cd "$CWD" -} - -kill_server_and_clients () { - docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B -} - -run_dummy_training_in_swarm () { - cd "$PROJECT_DIR"/prod_00 - cd admin@test.odelia/startup - ../../../../../_testsOutsideDocker_submitDummyTraining.exp - docker kill fladmin - sleep 60 - cd "$CWD" - - cd "$PROJECT_DIR"/prod_00/server.local/startup - CONSOLE_OUTPUT=nohup.out - for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.'; - do - if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then - echo "Expected output $EXPECTED_OUTPUT found" - else - echo "Expected output $EXPECTED_OUTPUT missing" - exit 1 - fi - done - cd "$CWD" - - cd "$PROJECT_DIR"/prod_00/client_A/startup - CONSOLE_OUTPUT=nohup.out - for EXPECTED_OUTPUT in 'Sending training result to aggregation client' 'Epoch 9: 100%' ; - do - if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then - echo "Expected output $EXPECTED_OUTPUT found" - else - echo "Expected output $EXPECTED_OUTPUT missing" - exit 1 - fi - done - cd "$CWD" - - cd "$PROJECT_DIR"/prod_00/client_A/ - FILES_PRESENT=$(find . -type f -name "*.*") - for EXPECTED_FILE in 'custom/minimal_training.py' 'best_FL_global_model.pt' 'FL_global_model.pt' ; - do - if echo "$FILES_PRESENT" | grep -q "$EXPECTED_FILE" ; then - echo "Expected file $EXPECTED_FILE found" - else - echo "Expected file $EXPECTED_FILE missing" - exit 1 - fi - done - cd "$CWD" -} - -run_tests () { - start_server_and_clients - run_dummy_training_in_swarm - kill_server_and_clients - - cleanup_temporary_data -} - -run_tests From c6b9ec65955b9d2d407684a342eb5fb53b52d619 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 8 Sep 2025 15:31:53 +0200 Subject: [PATCH 25/80] removed unnecessary block --- runIntegrationTests.sh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index c4a19622..f51999ce 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -259,11 +259,6 @@ case "$1" in cleanup_temporary_data ;; - create_synthetic_data) - create_synthetic_data - cleanup_temporary_data - ;; - run_docker_gpu_preflight_check) create_startup_kits_and_check_contained_files run_docker_gpu_preflight_check From da14feb74dd0e986ea6909d19a92e68b10eb06a8 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 8 Sep 2025 15:32:08 +0200 Subject: [PATCH 26/80] completed "all" section --- runIntegrationTests.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index f51999ce..8fc1a26d 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -295,6 +295,9 @@ case "$1" in create_synthetic_data run_docker_gpu_preflight_check run_data_access_preflight_check + start_server_and_clients + run_dummy_training_in_swarm + kill_server_and_clients cleanup_temporary_data ;; *) echo "Unknown argument: $1"; exit 1 ;; From c30a39f8a76b40931c7eb6250ed0d2d3f720c194 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 8 Sep 2025 15:32:22 +0200 Subject: [PATCH 27/80] consistently output what is being run --- runIntegrationTests.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 8fc1a26d..46dfb634 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -63,6 +63,7 @@ run_local_tests () { _run_test_in_docker tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh # uncomment the following line to also run NVFlare's unit tests (takes about 2 minutes and will install python packages in the container) + # echo "[Run] NVFlare unit tests" # run_test_in_docker tests/integration_tests/_run_nvflare_unit_tests.sh } @@ -164,6 +165,8 @@ run_simulation_mode_in_docker () { start_server_and_clients () { + echo "[Run] Start server and client Docker containers ..." + cd "$PROJECT_DIR"/prod_00 cd server.local/startup ./docker.sh --no_pull --start_server @@ -182,6 +185,8 @@ start_server_and_clients () { run_dummy_training_in_swarm () { + echo "[Run] Dummy training in swarm ..." + cd "$PROJECT_DIR"/prod_00 cd admin@test.odelia/startup "$CWD"/_testsOutsideDocker_submitDummyTraining.exp @@ -231,6 +236,7 @@ run_dummy_training_in_swarm () { kill_server_and_clients () { + echo "[Cleanup] Kill server and client Docker containers ..." docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B } From 452621d94ef15136fe32ed9141bad5acc8c2df1a Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 10:46:06 +0200 Subject: [PATCH 28/80] running simulation mode of 3D CNN training does not work yet, commented out --- runIntegrationTests.sh | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 46dfb634..1709ac96 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -157,13 +157,6 @@ run_data_access_preflight_check () { } -run_simulation_mode_in_docker () { - # requires having built a startup kit and synthetic dataset - echo "[Run] Simulation mode of 3DCNN training in Docker" - _run_test_in_docker tests/integration_tests/_run_3dcnn_simulation_mode.sh -} - - start_server_and_clients () { echo "[Run] Start server and client Docker containers ..." @@ -278,13 +271,6 @@ case "$1" in cleanup_temporary_data ;; - run_simulation_mode_in_docker) - create_startup_kits_and_check_contained_files - create_synthetic_data - run_simulation_mode_in_docker - cleanup_temporary_data - ;; - run_dummy_training_in_swarm) create_startup_kits_and_check_contained_files create_synthetic_data @@ -311,3 +297,18 @@ esac # TODO adapt ./assets/readme/README.developer.md # TODO adapt .github/workflows/pr-test.yaml + +# The following does not work yet. It should be included in "all" and in .github/workflows/pr-test.yaml once it works. +# +# run_simulation_mode_in_docker () { +# # requires having built a startup kit and synthetic dataset +# echo "[Run] Simulation mode of 3DCNN training in Docker" +# _run_test_in_docker tests/integration_tests/_run_3dcnn_simulation_mode.sh +# } +# +# run_simulation_mode_in_docker) +# create_startup_kits_and_check_contained_files +# create_synthetic_data +# run_simulation_mode_in_docker +# cleanup_temporary_data +# ;; From 92a246014b02d0bc28d3521ac7baf2e130802be5 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 11:05:38 +0200 Subject: [PATCH 29/80] expanded run_local_tests and moved unit test script to more suitable folder --- runIntegrationTests.sh | 30 +++++++++++++++---- .../_run_nvflare_unit_tests.sh | 0 2 files changed, 24 insertions(+), 6 deletions(-) rename tests/{integration_tests => unit_tests}/_run_nvflare_unit_tests.sh (100%) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 1709ac96..392b9c3e 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -52,19 +52,29 @@ _run_test_in_docker() { } -run_local_tests () { +run_unit_tests_controller(){ echo "[Run] Controller unit tests" _run_test_in_docker tests/integration_tests/_run_controller_unit_tests_with_coverage.sh +} + +run_dummy_training_standalone(){ echo "[Run] Minimal example, standalone" _run_test_in_docker tests/integration_tests/_run_minimal_example_standalone.sh +} + +run_dummy_training_simulation_mode(){ echo "[Run] Minimal example, simulation mode" _run_test_in_docker tests/integration_tests/_run_minimal_example_simulation_mode.sh +} + +run_dummy_training_poc_mode(){ echo "[Run] Minimal example, proof-of-concept mode" _run_test_in_docker tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh +} - # uncomment the following line to also run NVFlare's unit tests (takes about 2 minutes and will install python packages in the container) - # echo "[Run] NVFlare unit tests" - # run_test_in_docker tests/integration_tests/_run_nvflare_unit_tests.sh +run_nvflare_unit_tests(){ + echo "[Run] NVFlare unit tests" + _run_test_in_docker tests/unit_tests/_run_nvflare_unit_tests.sh } @@ -249,7 +259,11 @@ case "$1" in ;; run_local_tests) - run_local_tests + run_unit_tests_controller + run_dummy_training_standalone + run_dummy_training_simulation_mode + run_dummy_training_poc_mode + run_nvflare_unit_tests cleanup_temporary_data ;; @@ -282,7 +296,11 @@ case "$1" in all | "") check_files_on_github - run_local_tests + run_unit_tests_controller + run_dummy_training_standalone + run_dummy_training_simulation_mode + run_dummy_training_poc_mode + run_nvflare_unit_tests create_startup_kits_and_check_contained_files create_synthetic_data run_docker_gpu_preflight_check diff --git a/tests/integration_tests/_run_nvflare_unit_tests.sh b/tests/unit_tests/_run_nvflare_unit_tests.sh similarity index 100% rename from tests/integration_tests/_run_nvflare_unit_tests.sh rename to tests/unit_tests/_run_nvflare_unit_tests.sh From ba7363d7531984ceecdcdae8219e459756619964 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 13:33:23 +0200 Subject: [PATCH 30/80] disabled NVFlare unit tests as before --- runIntegrationTests.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 392b9c3e..ff6b5a6a 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -263,7 +263,7 @@ case "$1" in run_dummy_training_standalone run_dummy_training_simulation_mode run_dummy_training_poc_mode - run_nvflare_unit_tests + # run_nvflare_unit_tests # uncomment to enable NVFlare unit tests cleanup_temporary_data ;; @@ -300,7 +300,7 @@ case "$1" in run_dummy_training_standalone run_dummy_training_simulation_mode run_dummy_training_poc_mode - run_nvflare_unit_tests + # run_nvflare_unit_tests # uncomment to enable NVFlare unit tests create_startup_kits_and_check_contained_files create_synthetic_data run_docker_gpu_preflight_check From ddc3e7e40aa7b47b020b0dac7586ff515cfdb618 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 13:51:58 +0200 Subject: [PATCH 31/80] updated developer readme --- assets/readme/README.developer.md | 15 ++++++++++----- runIntegrationTests.sh | 3 +-- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/assets/readme/README.developer.md b/assets/readme/README.developer.md index fb6aafc3..0f789529 100644 --- a/assets/readme/README.developer.md +++ b/assets/readme/README.developer.md @@ -24,10 +24,10 @@ The project description specifies the swarm nodes etc. to be used for a swarm tr kits, running local trainings in the startup kit), you can manually push the image to DockerHub, provided you have the necessary rights. Make sure you are not re-using a version number for this purpose. -## Running Local Tests +## Running Tests ```bash - ./runTestsInDocker.sh + ./runIntegrationTests.sh ``` You should see @@ -36,10 +36,11 @@ You should see 2. output of a successful simulation run with two nodes 3. output of a successful proof-of-concept run run with two nodes 4. output of a set of startup kits being generated -5. output of a dummy training run using one of the startup kits -6. TODO update this to what the tests output now +5. output of a Docker/GPU preflight check using one of the startup kits +6. output of a data access preflight check using one of the startup kits +7. output of a dummy training run in a swarm consisting of one server and two client nodes -Optionally, uncomment running NVFlare unit tests in `_runTestsInsideDocker.sh`. +Optionally, uncomment running NVFlare unit tests. ## Distributing Startup Kits @@ -93,3 +94,7 @@ export CONFIG=original run in the swarm 3. Use the local tests to check if the code is swarm-ready 4. TODO more detailed instructions + +## Continuous Integration + +Tests to be executed after pushing to github are defined in `.github/workflows/pr-test.yaml`. diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index ff6b5a6a..86429103 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -313,10 +313,9 @@ case "$1" in *) echo "Unknown argument: $1"; exit 1 ;; esac -# TODO adapt ./assets/readme/README.developer.md # TODO adapt .github/workflows/pr-test.yaml -# The following does not work yet. It should be included in "all" and in .github/workflows/pr-test.yaml once it works. +# The following does not work yet. It should be included in "all", in ./assets/readme/README.developer.md and in .github/workflows/pr-test.yaml once it works. # # run_simulation_mode_in_docker () { # # requires having built a startup kit and synthetic dataset From 6ae32c29f139d134d8d63bd8a6cf622f8f459f4b Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 14:02:46 +0200 Subject: [PATCH 32/80] run integration tests in CI in one go --- .github/workflows/pr-test.yaml | 27 ++------------------------- runIntegrationTests.sh | 3 +-- 2 files changed, 3 insertions(+), 27 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index a75c14a5..64c18cd1 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -50,27 +50,10 @@ jobs: - name: Build Docker image and dummy startup kits run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml --use-docker-cache - - name: Prepare dummy trainings - continue-on-error: true - run: | - ./runTestsInDocker.sh prepare_dummy_trainings - echo "Dummy training project prepared" - - - name: Run dummy training + - name: Run integration tests continue-on-error: false run: | - ./runTestsInDocker.sh run_dummy_training - echo "Dummy training finished" - echo "=== Checking log output ===" - ls -lh workspace/*/prod_00/client_A/logs || echo "No logs found for dummy training" - - - name: Run 3D CNN tests - continue-on-error: false - run: | - ./runTestsInDocker.sh run_3dcnn_tests - echo "3D CNN tests check finished" - echo "=== Checking synthetic log output ===" - ls -lh workspace/*/prod_00/client_A/logs || echo "No logs found for 3D CNN tests" + ./runIntegrationTests.sh - name: Run Unit Tests inside Docker continue-on-error: true @@ -78,9 +61,3 @@ jobs: ./runTestsInDocker.sh run_tests echo "=== [LOG CHECK] ===" docker logs $(docker ps -a -q --latest) | grep -i "error" && echo "Error found in logs" || echo "No error found" - - - name: Cleanup training artifacts - continue-on-error: true - run: | - ./runTestsInDocker.sh cleanup - echo "Cleanup finished" diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 86429103..7335860a 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -313,8 +313,7 @@ case "$1" in *) echo "Unknown argument: $1"; exit 1 ;; esac -# TODO adapt .github/workflows/pr-test.yaml - +# TODO # The following does not work yet. It should be included in "all", in ./assets/readme/README.developer.md and in .github/workflows/pr-test.yaml once it works. # # run_simulation_mode_in_docker () { From 58093875b3533e22d0134265555df00e760a86cf Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 14:07:16 +0200 Subject: [PATCH 33/80] renamed expect script and moved it to more suitable location --- runIntegrationTests.sh | 2 +- .../integration_tests/_submitDummyTraining.exp | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename _testsOutsideDocker_submitDummyTraining.exp => tests/integration_tests/_submitDummyTraining.exp (100%) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 7335860a..f747f3e6 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -192,7 +192,7 @@ run_dummy_training_in_swarm () { cd "$PROJECT_DIR"/prod_00 cd admin@test.odelia/startup - "$CWD"/_testsOutsideDocker_submitDummyTraining.exp + "$CWD"/tests/integration_tests/_submitDummyTraining.exp docker kill fladmin sleep 60 cd "$CWD" diff --git a/_testsOutsideDocker_submitDummyTraining.exp b/tests/integration_tests/_submitDummyTraining.exp similarity index 100% rename from _testsOutsideDocker_submitDummyTraining.exp rename to tests/integration_tests/_submitDummyTraining.exp From 6a4f5e95c2f28fb168f250b0044b1636fa3d36aa Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 14:11:02 +0200 Subject: [PATCH 34/80] removed step using script that no longer exists --- .github/workflows/pr-test.yaml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 64c18cd1..8a93ba94 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -54,10 +54,3 @@ jobs: continue-on-error: false run: | ./runIntegrationTests.sh - - - name: Run Unit Tests inside Docker - continue-on-error: true - run: | - ./runTestsInDocker.sh run_tests - echo "=== [LOG CHECK] ===" - docker logs $(docker ps -a -q --latest) | grep -i "error" && echo "Error found in logs" || echo "No error found" From 1d5c43fd6fc88f8917a97502aabb95be880040a7 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 14:34:56 +0200 Subject: [PATCH 35/80] trying to enable test of 3D CNN in simulation mode --- runIntegrationTests.sh | 37 ++++++++++--------- .../_run_3dcnn_simulation_mode.sh | 9 +++-- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index f747f3e6..ad9bb283 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -45,7 +45,8 @@ _run_test_in_docker() { --ipc=host \ --ulimit memlock=-1 \ --ulimit stack=67108864 \ - -v /tmp:/scratch \ + -v "$SYNTHETIC_DATA_DIR":/data \ + -v "$SCRATCH_DIR":/scratch \ --gpus="$GPU_FOR_TESTING" \ --entrypoint=/MediSwarm/$1 \ "$DOCKER_IMAGE" @@ -83,7 +84,7 @@ create_startup_kits_and_check_contained_files () { if ! grep -q "127.0.0.1 server.local" /etc/hosts; then echo "/etc/hosts needs to contain the following line, please add it." - echo "127.0.0.1 server.local localhost" + echo "127.0.0.1 server.local" exit 1 fi @@ -167,6 +168,13 @@ run_data_access_preflight_check () { } +run_3dcnn_simulation_mode () { + # requires having built a startup kit and synthetic dataset + echo "[Run] Simulation mode of 3DCNN training in Docker" + _run_test_in_docker tests/integration_tests/_run_3dcnn_simulation_mode.sh +} + + start_server_and_clients () { echo "[Run] Start server and client Docker containers ..." @@ -272,6 +280,13 @@ case "$1" in cleanup_temporary_data ;; + run_3dcnn_simulation_mode) + create_startup_kits_and_check_contained_files + create_synthetic_data + run_3dcnn_simulation_mode + cleanup_temporary_data + ;; + run_docker_gpu_preflight_check) create_startup_kits_and_check_contained_files run_docker_gpu_preflight_check @@ -301,8 +316,9 @@ case "$1" in run_dummy_training_simulation_mode run_dummy_training_poc_mode # run_nvflare_unit_tests # uncomment to enable NVFlare unit tests - create_startup_kits_and_check_contained_files create_synthetic_data + run_3dcnn_simulation_mode + create_startup_kits_and_check_contained_files run_docker_gpu_preflight_check run_data_access_preflight_check start_server_and_clients @@ -314,17 +330,4 @@ case "$1" in esac # TODO -# The following does not work yet. It should be included in "all", in ./assets/readme/README.developer.md and in .github/workflows/pr-test.yaml once it works. -# -# run_simulation_mode_in_docker () { -# # requires having built a startup kit and synthetic dataset -# echo "[Run] Simulation mode of 3DCNN training in Docker" -# _run_test_in_docker tests/integration_tests/_run_3dcnn_simulation_mode.sh -# } -# -# run_simulation_mode_in_docker) -# create_startup_kits_and_check_contained_files -# create_synthetic_data -# run_simulation_mode_in_docker -# cleanup_temporary_data -# ;; +# Once the 3D CNN simulation mode works, it should be mentioned in ./assets/readme/README.developer.md. diff --git a/tests/integration_tests/_run_3dcnn_simulation_mode.sh b/tests/integration_tests/_run_3dcnn_simulation_mode.sh index 7fb7a877..030e855e 100755 --- a/tests/integration_tests/_run_3dcnn_simulation_mode.sh +++ b/tests/integration_tests/_run_3dcnn_simulation_mode.sh @@ -11,11 +11,12 @@ run_3dcnn_simulation_mode () { sed -i 's/num_rounds = .*/num_rounds = 2/' ${TMPDIR}/ODELIA_ternary_classification/app/config/config_fed_server.conf export TRAINING_MODE="swarm" export SITE_NAME="client_A" + export DATA_DIR=/data + export SCRATCH_DIR=/scratch + export TORCH_HOME=/torch_home + export MODEL_NAME=MST + export CONFIG=unilateral nvflare simulator -w /tmp/ODELIA_ternary_classification -n 2 -t 2 ${TMPDIR}/ODELIA_ternary_classification -c client_A,client_B - unset TRAINING_MODE - unset SITE_NAME - rm -rf ${TMPDIR} - unset TMPDIR } run_3dcnn_simulation_mode From efc53b3f90d7c4f6239efa7ddd3696ce10c61d0a Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:21:43 +0200 Subject: [PATCH 36/80] moved check of name resolution to where it is needed --- runIntegrationTests.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index ad9bb283..69cf1d1b 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -82,12 +82,6 @@ run_nvflare_unit_tests(){ create_startup_kits_and_check_contained_files () { echo "[Prepare] Startup kits for test project ..." - if ! grep -q "127.0.0.1 server.local" /etc/hosts; then - echo "/etc/hosts needs to contain the following line, please add it." - echo "127.0.0.1 server.local" - exit 1 - fi - if [ ! -d "$PROJECT_DIR"/prod_00 ]; then ./_buildStartupKits.sh $PROJECT_FILE $VERSION fi @@ -178,6 +172,12 @@ run_3dcnn_simulation_mode () { start_server_and_clients () { echo "[Run] Start server and client Docker containers ..." + if ! grep -q "127.0.0.1 server.local" /etc/hosts; then + echo "/etc/hosts needs to contain the following line, please add it." + echo "127.0.0.1 server.local" + exit 1 + fi + cd "$PROJECT_DIR"/prod_00 cd server.local/startup ./docker.sh --no_pull --start_server From ef9150e7cd95955d931980a37f170fbbc353b9b3 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:22:48 +0200 Subject: [PATCH 37/80] removed unnecessary step --- runIntegrationTests.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 69cf1d1b..5b2e914b 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -281,7 +281,6 @@ case "$1" in ;; run_3dcnn_simulation_mode) - create_startup_kits_and_check_contained_files create_synthetic_data run_3dcnn_simulation_mode cleanup_temporary_data From e9117fa8cff4ce65a20df442be412272c76b0d09 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:23:08 +0200 Subject: [PATCH 38/80] made tests that do not use the startup kits callable individually --- runIntegrationTests.sh | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 5b2e914b..900009b8 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -266,17 +266,23 @@ case "$1" in cleanup_temporary_data ;; - run_local_tests) + run_unit_tests_controller) run_unit_tests_controller + cleanup_temporary_data + ;; + + run_dummy_training_standalone) run_dummy_training_standalone + cleanup_temporary_data + ;; + + run_dummy_training_simulation_mode) run_dummy_training_simulation_mode - run_dummy_training_poc_mode - # run_nvflare_unit_tests # uncomment to enable NVFlare unit tests cleanup_temporary_data ;; - create_startup_kits) - create_startup_kits_and_check_contained_files + run_dummy_training_poc_mode) + run_dummy_training_poc_mode cleanup_temporary_data ;; @@ -286,6 +292,11 @@ case "$1" in cleanup_temporary_data ;; + create_startup_kits) + create_startup_kits_and_check_contained_files + cleanup_temporary_data + ;; + run_docker_gpu_preflight_check) create_startup_kits_and_check_contained_files run_docker_gpu_preflight_check From ebaba99452ebc4c184251951cf0f2879af4bb9bb Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:23:31 +0200 Subject: [PATCH 39/80] call tests as separate steps in workflow --- .github/workflows/pr-test.yaml | 49 ++++++++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 8a93ba94..abe73027 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -50,7 +50,52 @@ jobs: - name: Build Docker image and dummy startup kits run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml --use-docker-cache - - name: Run integration tests + - name: Run integration test (check_files_on_github) continue-on-error: false run: | - ./runIntegrationTests.sh + ./runIntegrationTests.sh check_files_on_github + + - name: Run integration test (run_unit_tests_controller) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_unit_tests_controller + + - name: Run integration test (run_dummy_training_standalone) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_dummy_training_standalone + + - name: Run integration test (run_dummy_training_simulation_mode) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_dummy_training_simulation_mode + + - name: Run integration test (run_dummy_training_poc_mode) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_dummy_training_poc_mode + + - name: Run integration test (run_3dcnn_simulation_mode) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_3dcnn_simulation_mode + + - name: Run integration test (create_startup_kits) + continue-on-error: false + run: | + ./runIntegrationTests.sh create_startup_kits + + - name: Run integration test (run_docker_gpu_preflight_check) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_docker_gpu_preflight_check + + - name: Run integration test (run_data_access_preflight_check) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_data_access_preflight_check + + - name: Run integration test (run_dummy_training_in_swarm) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_dummy_training_in_swarm From 1faeefbeefa5b7a1832942476d88e3fdc3e0a84d Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:33:30 +0200 Subject: [PATCH 40/80] arguments for docker run like in docker.sh from startup scripts to create files with permission for local user --- runIntegrationTests.sh | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 900009b8..95a4eda6 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -45,6 +45,8 @@ _run_test_in_docker() { --ipc=host \ --ulimit memlock=-1 \ --ulimit stack=67108864 \ + -u $(id -u):$(id -g) \ + -v /etc/passwd:/etc/passwd -v /etc/group:/etc/group \ -v "$SYNTHETIC_DATA_DIR":/data \ -v "$SCRATCH_DIR":/scratch \ --gpus="$GPU_FOR_TESTING" \ @@ -117,11 +119,12 @@ create_startup_kits_and_check_contained_files () { create_synthetic_data () { echo "[Prepare] Synthetic data ..." docker run --rm \ - -u $(id -u):$(id -g) \ - -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ - -w /MediSwarm \ - $DOCKER_IMAGE \ - /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" + -u $(id -u):$(id -g) \ + -v /etc/passwd:/etc/passwd -v /etc/group:/etc/group \ + -v "$SYNTHETIC_DATA_DIR":/synthetic_data \ + -w /MediSwarm \ + $DOCKER_IMAGE \ + /bin/bash -c "python3 application/jobs/ODELIA_ternary_classification/app/scripts/create_synthetic_dataset/create_synthetic_dataset.py /synthetic_data" } From 2b89a8300ccf6c33c7f90d229c6f8c5ea7a46e65 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:44:52 +0200 Subject: [PATCH 41/80] write coverage file to location outside code directory --- .../_run_controller_unit_tests_with_coverage.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh b/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh index 3d3b87dd..46e6e11c 100755 --- a/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh +++ b/tests/integration_tests/_run_controller_unit_tests_with_coverage.sh @@ -5,10 +5,11 @@ set -e run_controller_unit_tests_with_coverage () { # run unit tests of ODELIA swarm learning and report coverage export MPLCONFIGDIR=/tmp + export COVERAGE_FILE=/tmp/.MediSwarm_coverage cd /MediSwarm/tests/unit_tests/controller PYTHONPATH=/MediSwarm/controller/controller python3 -m coverage run --source=/MediSwarm/controller/controller -m unittest discover coverage report -m - rm .coverage + rm "$COVERAGE_FILE" } run_controller_unit_tests_with_coverage From b1d31029f0b22fadc9a23ab556b29183ab758bdf Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:56:56 +0200 Subject: [PATCH 42/80] ensure directory exists --- .../_run_minimal_example_proof_of_concept_mode.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh b/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh index 9e60b7fc..ee26a4d0 100755 --- a/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh +++ b/tests/integration_tests/_run_minimal_example_proof_of_concept_mode.sh @@ -4,6 +4,7 @@ set -e run_minimal_example_proof_of_concept_mode () { # run proof-of-concept mode for minimal example + mkdir -p ~/.nvflare cd /MediSwarm export TRAINING_MODE="swarm" nvflare poc prepare -c poc_client_0 poc_client_1 From e0b69270e6414452d57881d4b50f19082aace3f3 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 15:58:36 +0200 Subject: [PATCH 43/80] renamed server "localhost" so that it does not need mapping to an IP address --- runIntegrationTests.sh | 10 ++-------- tests/provision/dummy_project_for_testing.yml | 4 ++-- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 95a4eda6..eec1d22c 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -175,14 +175,8 @@ run_3dcnn_simulation_mode () { start_server_and_clients () { echo "[Run] Start server and client Docker containers ..." - if ! grep -q "127.0.0.1 server.local" /etc/hosts; then - echo "/etc/hosts needs to contain the following line, please add it." - echo "127.0.0.1 server.local" - exit 1 - fi - cd "$PROJECT_DIR"/prod_00 - cd server.local/startup + cd localhost/startup ./docker.sh --no_pull --start_server cd ../.. sleep 10 @@ -208,7 +202,7 @@ run_dummy_training_in_swarm () { sleep 60 cd "$CWD" - cd "$PROJECT_DIR"/prod_00/server.local/startup + cd "$PROJECT_DIR"/prod_00/localhost/startup CONSOLE_OUTPUT=nohup.out for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.'; do diff --git a/tests/provision/dummy_project_for_testing.yml b/tests/provision/dummy_project_for_testing.yml index d4984d77..613f81ce 100644 --- a/tests/provision/dummy_project_for_testing.yml +++ b/tests/provision/dummy_project_for_testing.yml @@ -4,7 +4,7 @@ description: > Test setup. participants: - - name: server.local + - name: localhost type: server org: Test_Org fed_learn_port: 8002 @@ -34,7 +34,7 @@ builders: path: nvflare.ha.dummy_overseer_agent.DummyOverseerAgent overseer_exists: false args: - sp_end_point: server.local:8002:8003 + sp_end_point: localhost:8002:8003 - path: nvflare.lighter.impl.cert.CertBuilder - path: nvflare.lighter.impl.signature.SignatureBuilder From fb60c8ec5de9c1029f0bb28f7d757e88f23455e0 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 16:10:50 +0200 Subject: [PATCH 44/80] allow local user to create home directory --- docker_config/Dockerfile_ODELIA | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker_config/Dockerfile_ODELIA b/docker_config/Dockerfile_ODELIA index a2e58d48..0eada436 100644 --- a/docker_config/Dockerfile_ODELIA +++ b/docker_config/Dockerfile_ODELIA @@ -343,3 +343,6 @@ RUN ln -s /MediSwarm /fl_admin/transfer/MediSwarm # Copy pre-trained model weights to image COPY ./torch_home_cache /torch_home + +# allow creating home directory for local user inside container if needed +RUN chmod a+rwx /home From ce492e16424002ea9b643dbba319f010d868a2b5 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 16:34:05 +0200 Subject: [PATCH 45/80] avoid name clashes of Docker containers --- buildDockerImageAndStartupKits.sh | 4 +++- docker_config/master_template.yml | 8 ++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/buildDockerImageAndStartupKits.sh b/buildDockerImageAndStartupKits.sh index 1767cfef..5c894a4f 100755 --- a/buildDockerImageAndStartupKits.sh +++ b/buildDockerImageAndStartupKits.sh @@ -26,7 +26,7 @@ fi VERSION=`./getVersionNumber.sh` DOCKER_IMAGE=jefftud/odelia:$VERSION - +CONTAINER_VERSION_ID=`git rev-parse --short HEAD` # prepare clean version of source code repository clone for building Docker image @@ -42,6 +42,8 @@ cd ../.. rm .git -rf chmod a+rX . -R sed -i 's#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__#'$VERSION'#' docker_config/master_template.yml +sed -i 's#__REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__#'$CONTAINER_VERSION_ID'#' docker_config/master_template.yml + cd $CWD diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index 0a2306db..0423403b 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -680,7 +680,7 @@ docker_cln_sh: | docker pull "$DOCKER_IMAGE" fi - CONTAINER_NAME=odelia_swarm_client_{~~client_name~~} + CONTAINER_NAME=odelia_swarm_client_{~~client_name~~}___REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__ DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE -u $(id -u):$(id -g)" DOCKER_MOUNTS="-v /etc/passwd:/etc/passwd -v /etc/group:/etc/group -v $DIR/..:/startupkit/ -v $MY_SCRATCH_DIR:/scratch/" if [[ ! -z "$MY_DATA_DIR" ]]; then @@ -697,7 +697,7 @@ docker_cln_sh: | --env GPU_DEVICE=$GPU2USE \ --env MODEL_NAME=MST \ --env CONFIG=unilateral \ - --env MEDISWARM_VERSION=__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__" + --env MEDISWARM_VERSION=__REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ # Execution modes if [[ ! -z "$DUMMY_TRAINING" ]]; then @@ -764,7 +764,7 @@ docker_svr_sh: | docker pull $DOCKER_IMAGE fi svr_name="${SVR_NAME:-flserver}" - CONTAINER_NAME=odelia_swarm_server_$svr_name + CONTAINER_NAME=odelia_swarm_server_${svr_name}___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ rm -rf ../pid.fl ../daemon_pid.fl # clean up potential leftovers from previous run @@ -811,7 +811,7 @@ docker_adm_sh: | echo "Updating docker image" docker pull $DOCKER_IMAGE fi - CONTAINER_NAME=odelia_swarm_admin + CONTAINER_NAME=odelia_swarm_admin___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ echo "Starting docker with $DOCKER_IMAGE as $CONTAINER_NAME" docker run --rm -it --name=fladmin -v $DIR/../local/:/fl_admin/local/ -v $DIR/../startup/:/fl_admin/startup/ -w /fl_admin/startup/ $NETARG $DOCKER_IMAGE /bin/bash -c "./fl_admin.sh" From 80c142826212904ca076761016e447efac23a7c0 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 9 Sep 2025 16:54:48 +0200 Subject: [PATCH 46/80] fixed replacement of version identifiers --- docker_config/master_template.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index 0423403b..180eacf4 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -680,7 +680,7 @@ docker_cln_sh: | docker pull "$DOCKER_IMAGE" fi - CONTAINER_NAME=odelia_swarm_client_{~~client_name~~}___REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__ + CONTAINER_NAME=odelia_swarm_client_{~~client_name~~}___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE -u $(id -u):$(id -g)" DOCKER_MOUNTS="-v /etc/passwd:/etc/passwd -v /etc/group:/etc/group -v $DIR/..:/startupkit/ -v $MY_SCRATCH_DIR:/scratch/" if [[ ! -z "$MY_DATA_DIR" ]]; then @@ -697,7 +697,7 @@ docker_cln_sh: | --env GPU_DEVICE=$GPU2USE \ --env MODEL_NAME=MST \ --env CONFIG=unilateral \ - --env MEDISWARM_VERSION=__REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ + --env MEDISWARM_VERSION=__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__ # Execution modes if [[ ! -z "$DUMMY_TRAINING" ]]; then From 89de533418ae1c3c7843ce89d57c7acf62b25bfd Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 11:16:25 +0200 Subject: [PATCH 47/80] fixed missing closing " --- docker_config/master_template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index 180eacf4..440f0633 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -697,7 +697,7 @@ docker_cln_sh: | --env GPU_DEVICE=$GPU2USE \ --env MODEL_NAME=MST \ --env CONFIG=unilateral \ - --env MEDISWARM_VERSION=__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__ + --env MEDISWARM_VERSION=__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__" # Execution modes if [[ ! -z "$DUMMY_TRAINING" ]]; then From f1df3499ddf438da2060be823bf78a53c64f9c10 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 13:55:14 +0200 Subject: [PATCH 48/80] wait longer so that sys_info sees both clients --- runIntegrationTests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index eec1d22c..d21a32d1 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -186,7 +186,7 @@ start_server_and_clients () { cd ../.. cd client_B/startup ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_B --GPU device=$GPU_FOR_TESTING --start_client - sleep 5 + sleep 8 cd "$CWD" } From 450603e761410bbcb18960935cbee3ae9031ad88 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 13:58:49 +0200 Subject: [PATCH 49/80] check that models for dummy training are small --- runIntegrationTests.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index d21a32d1..d8f560ac 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -239,6 +239,15 @@ run_dummy_training_in_swarm () { exit 1 fi done + + actualsize=$(wc -c <*/app_client_A/best_FL_global_model.pt) + if [ $actualsize -le 1048576 ]; then + echo "Checkpoint file size OK" + else + echo "Checkpoint too large: " $actualsize + exit 1 + fi + cd "$CWD" } From 5af06b804a75369a8378b578bf87f6fb7091dcd7 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 14:16:22 +0200 Subject: [PATCH 50/80] added check whether job ID is logged by server --- runIntegrationTests.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index d8f560ac..164e66b1 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -204,9 +204,10 @@ run_dummy_training_in_swarm () { cd "$PROJECT_DIR"/prod_00/localhost/startup CONSOLE_OUTPUT=nohup.out - for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.'; + for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.' \ + 'Start to the run Job: [0-9a-f]\{8\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{12\}'; do - if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then + if grep -q --regexp="$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then echo "Expected output $EXPECTED_OUTPUT found" else echo "Expected output $EXPECTED_OUTPUT missing" From 17d8d77dfca4b5897ecd988b34aa2642cb0f3c38 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 14:30:30 +0200 Subject: [PATCH 51/80] use defined container name for container running admin console --- docker_config/master_template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index 440f0633..ec989a7b 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -814,7 +814,7 @@ docker_adm_sh: | CONTAINER_NAME=odelia_swarm_admin___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ echo "Starting docker with $DOCKER_IMAGE as $CONTAINER_NAME" - docker run --rm -it --name=fladmin -v $DIR/../local/:/fl_admin/local/ -v $DIR/../startup/:/fl_admin/startup/ -w /fl_admin/startup/ $NETARG $DOCKER_IMAGE /bin/bash -c "./fl_admin.sh" + docker run --rm -it --name=$CONTAINER_NAME -v $DIR/../local/:/fl_admin/local/ -v $DIR/../startup/:/fl_admin/startup/ -w /fl_admin/startup/ $NETARG $DOCKER_IMAGE /bin/bash -c "./fl_admin.sh" compose_yaml: | services: From 8d527d059dc9da39855b857386b9faf2a90a9014 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 14:31:37 +0200 Subject: [PATCH 52/80] use correct container names in `docker kill` --- runIntegrationTests.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 164e66b1..30167298 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -3,6 +3,7 @@ set -e VERSION=$(./getVersionNumber.sh) +CONTAINER_VERSION_SUFFIX=$(git rev-parse --short HEAD) DOCKER_IMAGE=jefftud/odelia:$VERSION PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) @@ -198,7 +199,7 @@ run_dummy_training_in_swarm () { cd "$PROJECT_DIR"/prod_00 cd admin@test.odelia/startup "$CWD"/tests/integration_tests/_submitDummyTraining.exp - docker kill fladmin + docker kill odelia_swarm_admin_$CONTAINER_VERSION_SUFFIX sleep 60 cd "$CWD" @@ -255,7 +256,7 @@ run_dummy_training_in_swarm () { kill_server_and_clients () { echo "[Cleanup] Kill server and client Docker containers ..." - docker kill odelia_swarm_server_flserver odelia_swarm_client_client_A odelia_swarm_client_client_B + docker kill odelia_swarm_server_flserver_$CONTAINER_VERSION_SUFFIX odelia_swarm_client_client_A_$CONTAINER_VERSION_SUFFIX odelia_swarm_client_client_B_$CONTAINER_VERSION_SUFFIX } From 40abbe6e0c7ba268d0b72e579e9f3cf340656a56 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 14:56:08 +0200 Subject: [PATCH 53/80] updated instructions on building startup kits --- assets/readme/README.operator.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/assets/readme/README.operator.md b/assets/readme/README.operator.md index 101d5266..130629b7 100644 --- a/assets/readme/README.operator.md +++ b/assets/readme/README.operator.md @@ -24,12 +24,15 @@ For example, add the following line (replace `` with the server's actual IP ### Via Script (recommended) 1. Use, e.g., the file `application/provision/project_MEVIS_test.yml`, adapt as needed (network protocol etc.) -2. Call `buildStartupKits.sh /path/to/project_configuration.yml` to build the startup kits +2. Call `buildDockerImageAndStartupKits.sh -p /path/to/project_configuration.yml` to build the Docker image and the startup kits 3. Startup kits are generated to `workspace//prod_00/` -4. Deploy startup kits to the respective server/clients +4. Deploy startup kits to the respective server/client operators +5. Push the Docker image to the registry ### Via the Dashboard (not recommended) +Build the Docker image as described above. + ```bash docker run -d --rm \ --ipc=host -p 8443:8443 \ @@ -69,7 +72,7 @@ Access the dashboard at `https://localhost:8443` log in with the admin credentia 2. Client Sites > approve client sites 3. Project Home > freeze project -## Download startup kits +#### Download startup kits After setting up the project admin configuration, server and clients can download their startup kits. Store the passwords somewhere, they are only displayed once (or you can download them again). From 1c9bbc91c411c48f7c94e0bea0dc545454c0f38e Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 14:56:17 +0200 Subject: [PATCH 54/80] check for keywords in documentation --- runIntegrationTests.sh | 44 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 39 insertions(+), 5 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 30167298..23339c58 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -18,24 +18,59 @@ fi check_files_on_github () { echo "[Run] Test whether expected content is available on github" - CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/LICENSE) - if echo "$CONTENT" | grep -q "MIT License" ; then + LICENSE_ON_GITHUB=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/LICENSE) + if echo "$LICENSE_ON_GITHUB" | grep -q "MIT License" ; then echo "Downloaded and verified license from github" else echo "Could not download and verify license" exit 1 fi - CONTENT=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/README.md) + MAIN_README=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/README.md) for ROLE in 'Swarm Participant' 'Developer' 'Swarm Operator'; do - if echo "$CONTENT" | grep -q "$ROLE" ; then + if echo "$MAIN_README" | grep -qie "$ROLE" ; then echo "Instructions for $ROLE found" else echo "Instructions for role $ROLE missing" exit 1 fi done + + PARTICIPANT_README=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/assets/readme/README.participant.md) + for EXPECTED_KEYWORDS in 'Prerequisites' 'RAM' 'Ubuntu' 'VPN' 'Prepare Dataset' './docker.sh' 'Local Training' 'Start Swarm Node'; + do + if echo "$PARTICIPANT_README" | grep -qie "$EXPECTED_KEYWORDS" ; then + echo "Instructions on $EXPECTED_KEYWORDS found" + else + echo "Instructions on $EXPECTED_KEYWORDS missing" + exit 1 + fi + done + + SWARM_OPERATOR_README=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/assets/readme/README.operator.md) + for EXPECTED_KEYWORDS in 'Create Startup Kits' 'Starting a Swarm Training'; + do + if echo "$SWARM_OPERATOR_README" | grep -qie "$EXPECTED_KEYWORDS" ; then + echo "Instructions on $EXPECTED_KEYWORDS found" + else + echo "Instructions on $EXPECTED_KEYWORDS missing" + exit 1 + fi + done + + APC_DEVELOPER_README=$(curl -L https://github.com/KatherLab/MediSwarm/raw/refs/heads/main/assets/readme/README.developer.md) + for EXPECTED_KEYWORDS in 'Contributing Application Code'; + do + if echo "$APC_DEVELOPER_README" | grep -qie "$EXPECTED_KEYWORDS" ; then + echo "Instructions on $EXPECTED_KEYWORDS found" + else + echo "Instructions on $EXPECTED_KEYWORDS missing" + exit 1 + fi + done + + } @@ -271,7 +306,6 @@ cleanup_temporary_data () { case "$1" in check_files_on_github) check_files_on_github - cleanup_temporary_data ;; run_unit_tests_controller) From 2d06b682bc48838c7363e9ede7716ba5aa1da141 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 15:17:07 +0200 Subject: [PATCH 55/80] clean up temp dir in case more than this test is run in a container --- tests/integration_tests/_run_3dcnn_simulation_mode.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration_tests/_run_3dcnn_simulation_mode.sh b/tests/integration_tests/_run_3dcnn_simulation_mode.sh index 030e855e..a39da49d 100755 --- a/tests/integration_tests/_run_3dcnn_simulation_mode.sh +++ b/tests/integration_tests/_run_3dcnn_simulation_mode.sh @@ -17,6 +17,7 @@ run_3dcnn_simulation_mode () { export MODEL_NAME=MST export CONFIG=unilateral nvflare simulator -w /tmp/ODELIA_ternary_classification -n 2 -t 2 ${TMPDIR}/ODELIA_ternary_classification -c client_A,client_B + rm -rf ${TMPDIR} } run_3dcnn_simulation_mode From b38863c3b1a5a695b486b181a4a9cb7ace427311 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 10 Sep 2025 15:17:23 +0200 Subject: [PATCH 56/80] updated documentation of test output --- assets/readme/README.developer.md | 13 +++++++------ runIntegrationTests.sh | 5 ----- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/assets/readme/README.developer.md b/assets/readme/README.developer.md index 0f789529..1215e574 100644 --- a/assets/readme/README.developer.md +++ b/assets/readme/README.developer.md @@ -33,12 +33,13 @@ The project description specifies the swarm nodes etc. to be used for a swarm tr You should see 1. several expected errors and warnings printed from unit tests that should succeed overall, and a coverage report -2. output of a successful simulation run with two nodes -3. output of a successful proof-of-concept run run with two nodes -4. output of a set of startup kits being generated -5. output of a Docker/GPU preflight check using one of the startup kits -6. output of a data access preflight check using one of the startup kits -7. output of a dummy training run in a swarm consisting of one server and two client nodes +2. output of a successful simulation run of a dummy training with two nodes +3. output of a successful proof-of-concept run of a dummy training with two nodes +4. output of a successful simulation run of a 3D CNN training using synthetic data with two nodes +5. output of a set of startup kits being generated +6. output of a Docker/GPU preflight check using one of the startup kits +7. output of a data access preflight check using one of the startup kits +8. output of a dummy training run in a swarm consisting of one server and two client nodes Optionally, uncomment running NVFlare unit tests. diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 23339c58..dc0add80 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -69,8 +69,6 @@ check_files_on_github () { exit 1 fi done - - } @@ -380,6 +378,3 @@ case "$1" in ;; *) echo "Unknown argument: $1"; exit 1 ;; esac - -# TODO -# Once the 3D CNN simulation mode works, it should be mentioned in ./assets/readme/README.developer.md. From 9ca51d3ce54a784f6feb850c516139bfbdd1821c Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 15 Sep 2025 14:21:42 +0200 Subject: [PATCH 57/80] check that aggregation and metrics are communicated --- runIntegrationTests.sh | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index dc0add80..8fac9cad 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -252,7 +252,7 @@ run_dummy_training_in_swarm () { cd "$PROJECT_DIR"/prod_00/client_A/startup CONSOLE_OUTPUT=nohup.out - for EXPECTED_OUTPUT in 'Sending training result to aggregation client' 'Epoch 9: 100%' ; + for EXPECTED_OUTPUT in 'Sending training result to aggregation client' 'Epoch 9: 100%' 'val/AUC_ROC'; do if grep -q "$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then echo "Expected output $EXPECTED_OUTPUT found" @@ -263,6 +263,16 @@ run_dummy_training_in_swarm () { done cd "$CWD" + for EXPECTED_OUTPUT in 'validation metric .* from client' 'aggregating [0-9]* update(s) at round [0-9]*'; + do + if grep -q --regexp="$EXPECTED_OUTPUT" "$PROJECT_DIR"/prod_00/client_?/startup/nohup.out; then + echo "Expected output $EXPECTED_OUTPUT found" + else + echo "Expected output $EXPECTED_OUTPUT missing" + exit 1 + fi + done + cd "$PROJECT_DIR"/prod_00/client_A/ FILES_PRESENT=$(find . -type f -name "*.*") for EXPECTED_FILE in 'custom/minimal_training.py' 'best_FL_global_model.pt' 'FL_global_model.pt' ; From b87f53489184da517c859bd9a0e7465836b7ea03 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 15 Sep 2025 14:40:55 +0200 Subject: [PATCH 58/80] check number of rounds --- runIntegrationTests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 8fac9cad..ce0db11a 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -239,7 +239,7 @@ run_dummy_training_in_swarm () { cd "$PROJECT_DIR"/prod_00/localhost/startup CONSOLE_OUTPUT=nohup.out for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.' \ - 'Start to the run Job: [0-9a-f]\{8\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{12\}'; + 'Start to the run Job: [0-9a-f]\{8\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{12\}' 'updated status of client client_B on round 4'; do if grep -q --regexp="$EXPECTED_OUTPUT" "$CONSOLE_OUTPUT"; then echo "Expected output $EXPECTED_OUTPUT found" From 1030d6cf92e54d451a0c103d16ce6d7293400379 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Mon, 15 Sep 2025 14:46:10 +0200 Subject: [PATCH 59/80] check that dummy training ApC is available --- runIntegrationTests.sh | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index ce0db11a..402dd4d8 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -69,6 +69,17 @@ check_files_on_github () { exit 1 fi done + + DUMMY_TRAINING_APC=$(curl -L https://raw.githubusercontent.com/KatherLab/MediSwarm/refs/heads/main/application/jobs/minimal_training_pytorch_cnn/app/custom/main.py) + for EXPECTED_KEYWORDS in 'python3'; + do + if echo "$DUMMY_TRAINING_APC" | grep -qie "$EXPECTED_KEYWORDS" ; then + echo "Dummy Training ApC: $EXPECTED_KEYWORDS found" + else + echo "Dummy Training ApC: $EXPECTED_KEYWORDS missing" + exit 1 + fi + done } From ce1207ee276a0fd16fffd6e0c0cc531e0d3b1662 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 16 Sep 2025 16:50:33 +0200 Subject: [PATCH 60/80] temporarily removed failing test from CI workflow --- .github/workflows/pr-test.yaml | 5 ----- runIntegrationTests.sh | 1 + 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index abe73027..c97f16aa 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -94,8 +94,3 @@ jobs: continue-on-error: false run: | ./runIntegrationTests.sh run_data_access_preflight_check - - - name: Run integration test (run_dummy_training_in_swarm) - continue-on-error: false - run: | - ./runIntegrationTests.sh run_dummy_training_in_swarm diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 402dd4d8..49f07caf 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -378,6 +378,7 @@ case "$1" in run_dummy_training_in_swarm kill_server_and_clients cleanup_temporary_data + # TODO add to CI if we want this (currently not working) ;; all | "") From e0866197590378ae8502be4dcf2d5c664709baec Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 16 Sep 2025 16:59:30 +0200 Subject: [PATCH 61/80] test listing licenses --- .github/workflows/pr-test.yaml | 5 +++++ runIntegrationTests.sh | 25 +++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index c97f16aa..0ffa7007 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -85,6 +85,11 @@ jobs: run: | ./runIntegrationTests.sh create_startup_kits + - name: Run license listing test (run_list_licenses) + continue-on-error: false + run: | + ./runIntegrationTests.sh run_list_licenses + - name: Run integration test (run_docker_gpu_preflight_check) continue-on-error: false run: | diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 49f07caf..25a1f05c 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -173,6 +173,25 @@ create_synthetic_data () { } +run_list_licenses () { + cd "$PROJECT_DIR"/prod_00 + cd localhost/startup + LICENSES_LISTED=$(./docker.sh --list_licenses --no_pull) + + for EXPECTED_KEYWORDS in 'scikit-learn' 'torch' 'nvflare_mediswarm' 'BSD License' 'MIT License'; + do + if echo "$LICENSES_LISTED" | grep -qie "$EXPECTED_KEYWORDS" ; then + echo "Instructions on $EXPECTED_KEYWORDS found" + else + echo "Instructions on $EXPECTED_KEYWORDS missing" + exit 1 + fi + done + + cd "$CWD" +} + + run_docker_gpu_preflight_check () { # requires having built a startup kit echo "[Run] Docker/GPU preflight check (local dummy training via startup kit) ..." @@ -358,6 +377,12 @@ case "$1" in cleanup_temporary_data ;; + run_list_licenses) + create_startup_kits_and_check_contained_files + run_list_licenses + cleanup_temporary_data + ;; + run_docker_gpu_preflight_check) create_startup_kits_and_check_contained_files run_docker_gpu_preflight_check From 79b1b0b7ae0bd283e7e34cdb97418f4120fde2e8 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 17 Sep 2025 11:33:32 +0200 Subject: [PATCH 62/80] Added test of pushing image to local registry (in separate Docker container) and pulling it from there. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This required additional changes: * changed name of Docker image for testing to localhost:5000/…, which should also prevent accidental push * parse name of Docker image from swarm project description yml rather than use hard-coded name * extended "delete old image versions" script accordingly --- .github/workflows/pr-test.yaml | 5 ++ _buildStartupKits.sh | 8 ++-- buildDockerImageAndStartupKits.sh | 25 +++++----- runIntegrationTests.sh | 48 +++++++++++++++++-- .../remove_old_odelia_docker_images.sh | 5 +- tests/provision/dummy_project_for_testing.yml | 2 +- 6 files changed, 71 insertions(+), 22 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 0ffa7007..2f1f733c 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -99,3 +99,8 @@ jobs: continue-on-error: false run: | ./runIntegrationTests.sh run_data_access_preflight_check + + - name: Run integration test (push_pull_image) + continue-on-error: false + run: | + ./runIntegrationTests.sh push_pull_image \ No newline at end of file diff --git a/_buildStartupKits.sh b/_buildStartupKits.sh index 29755d27..94950376 100755 --- a/_buildStartupKits.sh +++ b/_buildStartupKits.sh @@ -2,15 +2,17 @@ set -euo pipefail -if [ "$#" -ne 2 ]; then - echo "Usage: _buildStartupKits.sh SWARM_PROJECT.yml VERSION_STRING" +if [ "$#" -ne 3 ]; then + echo "Usage: _buildStartupKits.sh SWARM_PROJECT.yml VERSION_STRING CONTAINER_NAME" exit 1 fi PROJECT_YML=$1 VERSION=$2 +CONTAINER_NAME=$3 sed -i 's#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__#'$VERSION'#' $PROJECT_YML + echo "Building startup kits for project $PROJECT_YML with version $VERSION" docker run --rm \ -u $(id -u):$(id -g) \ @@ -20,7 +22,7 @@ docker run --rm \ -w /workspace/ \ -e PROJECT_YML=$PROJECT_YML \ -e VERSION=$VERSION \ - jefftud/odelia:$VERSION \ + $CONTAINER_NAME \ /bin/bash -c "nvflare provision -p \$PROJECT_YML && ./_generateStartupKitArchives.sh \$PROJECT_YML \$VERSION"|| { echo "Docker run failed"; exit 1; } sed -i 's#'$VERSION'#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__#' $PROJECT_YML diff --git a/buildDockerImageAndStartupKits.sh b/buildDockerImageAndStartupKits.sh index 5c894a4f..654e63cd 100755 --- a/buildDockerImageAndStartupKits.sh +++ b/buildDockerImageAndStartupKits.sh @@ -25,7 +25,6 @@ if [ -z "$PROJECT_FILE" ]; then fi VERSION=`./getVersionNumber.sh` -DOCKER_IMAGE=jefftud/odelia:$VERSION CONTAINER_VERSION_ID=`git rev-parse --short HEAD` # prepare clean version of source code repository clone for building Docker image @@ -41,16 +40,15 @@ git clean -x -q -f . cd ../.. rm .git -rf chmod a+rX . -R + +# replacements in copy of source code sed -i 's#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_DOCKER_IMAGE__#'$VERSION'#' docker_config/master_template.yml sed -i 's#__REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__#'$CONTAINER_VERSION_ID'#' docker_config/master_template.yml -cd $CWD - - # prepare pre-trained model weights for being included in Docker image -MODEL_WEIGHTS_FILE='docker_config/torch_home_cache/hub/checkpoints/dinov2_vits14_pretrain.pth' -MODEL_LICENSE_FILE='docker_config/torch_home_cache/hub/facebookresearch_dinov2_main/LICENSE' +MODEL_WEIGHTS_FILE=$CWD'/docker_config/torch_home_cache/hub/checkpoints/dinov2_vits14_pretrain.pth' +MODEL_LICENSE_FILE=$CWD'/docker_config/torch_home_cache/hub/facebookresearch_dinov2_main/LICENSE' if [[ ! -f $MODEL_WEIGHTS_FILE || ! -f $MODEL_LICENSE_FILE ]]; then echo "Pre-trained model not available. Attempting download" HUBDIR=$(dirname $(dirname $MODEL_LICENSE_FILE)) @@ -63,22 +61,25 @@ if [[ ! -f $MODEL_WEIGHTS_FILE || ! -f $MODEL_LICENSE_FILE ]]; then fi if echo 2e405cee1bad14912278296d4f42e993 $MODEL_WEIGHTS_FILE | md5sum --check - && echo 153d2db1c329326a2d9f881317ea942e $MODEL_LICENSE_FILE | md5sum --check -; then - cp -r ./docker_config/torch_home_cache $CLEAN_SOURCE_DIR/torch_home_cache + cp -r $CWD/docker_config/torch_home_cache $CLEAN_SOURCE_DIR/torch_home_cache else exit 1 fi chmod a+rX $CLEAN_SOURCE_DIR/torch_home_cache -R +cd $CWD # build and print follow-up steps +CONTAINER_NAME=`grep " docker_image: " $PROJECT_FILE | sed 's/ docker_image: //' | sed 's#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__#'$VERSION'#'` +echo $CONTAINER_NAME -docker build $DOCKER_BUILD_ARGS -t $DOCKER_IMAGE $CLEAN_SOURCE_DIR -f docker_config/Dockerfile_ODELIA +docker build $DOCKER_BUILD_ARGS -t $CONTAINER_NAME $CLEAN_SOURCE_DIR -f docker_config/Dockerfile_ODELIA -echo "Docker image $DOCKER_IMAGE built successfully" -echo "./_buildStartupKits.sh $PROJECT_FILE $VERSION" -./_buildStartupKits.sh $PROJECT_FILE $VERSION +echo "Docker image $CONTAINER_NAME built successfully" +echo "./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME" +./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME echo "Startup kits built successfully" rm -rf $CLEAN_SOURCE_DIR -echo "If you wish, manually push $DOCKER_IMAGE now" +echo "If you wish, manually push $CONTAINER_NAME now" diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 25a1f05c..f8e68533 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -4,7 +4,7 @@ set -e VERSION=$(./getVersionNumber.sh) CONTAINER_VERSION_SUFFIX=$(git rev-parse --short HEAD) -DOCKER_IMAGE=jefftud/odelia:$VERSION +DOCKER_IMAGE=localhost:5000/odelia:$VERSION PROJECT_DIR="workspace/odelia_${VERSION}_dummy_project_for_testing" SYNTHETIC_DATA_DIR=$(mktemp -d) SCRATCH_DIR=$(mktemp -d) @@ -130,13 +130,13 @@ create_startup_kits_and_check_contained_files () { echo "[Prepare] Startup kits for test project ..." if [ ! -d "$PROJECT_DIR"/prod_00 ]; then - ./_buildStartupKits.sh $PROJECT_FILE $VERSION + ./_buildStartupKits.sh $PROJECT_FILE $VERSION $DOCKER_IMAGE fi if [ -d "$PROJECT_DIR"/prod_01 ]; then - echo '"$PROJECT_DIR"/prod_01 exists, please remove/rename it' + echo '$PROJECT_DIR/prod_01 exists, please remove/rename it' exit 1 fi - ./_buildStartupKits.sh $PROJECT_FILE $VERSION + ./_buildStartupKits.sh $PROJECT_FILE $VERSION $DOCKER_IMAGE for FILE in 'client.crt' 'client.key' 'docker.sh' 'rootCA.pem'; do @@ -256,6 +256,35 @@ start_server_and_clients () { } +start_registry_docker_and_push () { + docker run -d --rm -p 5000:5000 --name local_test_registry_$CONTAINER_VERSION_SUFFIX registry:3 + sleep 3 + docker push localhost:5000/odelia:$VERSION +} + + +run_container_with_pulling () { + docker rmi localhost:5000/odelia:$VERSION + cd "$PROJECT_DIR"/prod_00 + cd localhost/startup + OUTPUT=$(./docker.sh --list_licenses) + + if echo "$OUTPUT" | grep -qie "Status: Downloaded newer image for localhost:5000/odelia:$VERSION" ; then + echo "Image pulled successfully" + else + echo "Instructions on $EXPECTED_KEYWORDS missing" + exit 1 + fi + + cd "$CWD" +} + + +kill_registry_docker () { + docker kill local_test_registry_$CONTAINER_VERSION_SUFFIX +} + + run_dummy_training_in_swarm () { echo "[Run] Dummy training in swarm ..." @@ -396,6 +425,13 @@ case "$1" in cleanup_temporary_data ;; + push_pull_image) + create_startup_kits_and_check_contained_files + start_registry_docker_and_push + run_container_with_pulling + kill_registry_docker + ;; + run_dummy_training_in_swarm) create_startup_kits_and_check_contained_files create_synthetic_data @@ -416,6 +452,9 @@ case "$1" in create_synthetic_data run_3dcnn_simulation_mode create_startup_kits_and_check_contained_files + start_registry_docker_and_push + run_container_with_pulling + kill_registry_docker run_docker_gpu_preflight_check run_data_access_preflight_check start_server_and_clients @@ -423,5 +462,6 @@ case "$1" in kill_server_and_clients cleanup_temporary_data ;; + *) echo "Unknown argument: $1"; exit 1 ;; esac diff --git a/scripts/dev_utils/remove_old_odelia_docker_images.sh b/scripts/dev_utils/remove_old_odelia_docker_images.sh index 7da4ee25..5f25f6d3 100755 --- a/scripts/dev_utils/remove_old_odelia_docker_images.sh +++ b/scripts/dev_utils/remove_old_odelia_docker_images.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash export OLD_ODELIA_DOCKER_IMAGES=$(docker image list | grep jefftud/odelia | sed 's|jefftud/odelia *[0-9a-z.-]* *||' | sed 's| *.*||' | tail -n +2) +export OLD_ODELIA_DOCKER_IMAGES_LOCAL=$(docker image list | grep localhost:5000/odelia | sed 's|localhost:5000/odelia *[0-9a-z.-]* *||' | sed 's| *.*||' | tail -n +2) echo "All docker images:" @@ -8,12 +9,12 @@ docker image list echo "The following Docker images are old ODELIA docker images:" -echo "$OLD_ODELIA_DOCKER_IMAGES" +echo "$OLD_ODELIA_DOCKER_IMAGES" "$OLD_ODELIA_DOCKER_IMAGES_LOCAL" read -p "Delete these Docker images, unless they have additional tags? (y/n): " answer if [[ "$answer" == "y" ]]; then - for image in $OLD_ODELIA_DOCKER_IMAGES; do + for image in $OLD_ODELIA_DOCKER_IMAGES $OLD_ODELIA_DOCKER_IMAGES_LOCAL; do docker rmi $image done fi diff --git a/tests/provision/dummy_project_for_testing.yml b/tests/provision/dummy_project_for_testing.yml index 613f81ce..5e658c78 100644 --- a/tests/provision/dummy_project_for_testing.yml +++ b/tests/provision/dummy_project_for_testing.yml @@ -29,7 +29,7 @@ builders: args: config_folder: config scheme: http - docker_image: jefftud/odelia:__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__ + docker_image: localhost:5000/odelia:__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__ overseer_agent: path: nvflare.ha.dummy_overseer_agent.DummyOverseerAgent overseer_exists: false From 90794d8cef100154e292749337566424b7f996d5 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 17 Sep 2025 14:29:40 +0200 Subject: [PATCH 63/80] removed lengthy test step that does not provide much value from CI pipeline --- .github/workflows/pr-test.yaml | 5 ----- runIntegrationTests.sh | 1 + 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 2f1f733c..0ffa7007 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -99,8 +99,3 @@ jobs: continue-on-error: false run: | ./runIntegrationTests.sh run_data_access_preflight_check - - - name: Run integration test (push_pull_image) - continue-on-error: false - run: | - ./runIntegrationTests.sh push_pull_image \ No newline at end of file diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index f8e68533..e63bf993 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -430,6 +430,7 @@ case "$1" in start_registry_docker_and_push run_container_with_pulling kill_registry_docker + # TODO add to CI if we want this (takes several minutes) ;; run_dummy_training_in_swarm) From 827872055eff0f237eda7854231fb88eef24107f Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 17 Sep 2025 14:29:08 +0200 Subject: [PATCH 64/80] more speaking names of the CI test steps --- .github/workflows/pr-test.yaml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 0ffa7007..c54b5ce9 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -50,52 +50,52 @@ jobs: - name: Build Docker image and dummy startup kits run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml --use-docker-cache - - name: Run integration test (check_files_on_github) + - name: Run integration test: check documentation on github continue-on-error: false run: | ./runIntegrationTests.sh check_files_on_github - - name: Run integration test (run_unit_tests_controller) + - name: Run controller unit tests continue-on-error: false run: | ./runIntegrationTests.sh run_unit_tests_controller - - name: Run integration test (run_dummy_training_standalone) + - name: Run dummy training standalone continue-on-error: false run: | ./runIntegrationTests.sh run_dummy_training_standalone - - name: Run integration test (run_dummy_training_simulation_mode) + - name: Run dummy training in simulation mode continue-on-error: false run: | ./runIntegrationTests.sh run_dummy_training_simulation_mode - - name: Run integration test (run_dummy_training_poc_mode) + - name: Run dummy training in proof-of-concept mode continue-on-error: false run: | ./runIntegrationTests.sh run_dummy_training_poc_mode - - name: Run integration test (run_3dcnn_simulation_mode) + - name: Run 3DCNN training in simulation mode continue-on-error: false run: | ./runIntegrationTests.sh run_3dcnn_simulation_mode - - name: Run integration test (create_startup_kits) + - name: Run integration test: creating startup kits continue-on-error: false run: | ./runIntegrationTests.sh create_startup_kits - - name: Run license listing test (run_list_licenses) + - name: Run intergration test: listing licenses continue-on-error: false run: | ./runIntegrationTests.sh run_list_licenses - - name: Run integration test (run_docker_gpu_preflight_check) + - name: Run integration test: Docker/GPU preflight check continue-on-error: false run: | ./runIntegrationTests.sh run_docker_gpu_preflight_check - - name: Run integration test (run_data_access_preflight_check) + - name: Run integration test: Data access preflight check continue-on-error: false run: | ./runIntegrationTests.sh run_data_access_preflight_check From f4470b9f94d019fb9f283d48aa71a7fbf406d88b Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 17 Sep 2025 14:35:55 +0200 Subject: [PATCH 65/80] fixed syntax of workflow --- .github/workflows/pr-test.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index c54b5ce9..950064f2 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -50,7 +50,7 @@ jobs: - name: Build Docker image and dummy startup kits run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml --use-docker-cache - - name: Run integration test: check documentation on github + - name: Run integration test checking documentation on github continue-on-error: false run: | ./runIntegrationTests.sh check_files_on_github @@ -80,22 +80,22 @@ jobs: run: | ./runIntegrationTests.sh run_3dcnn_simulation_mode - - name: Run integration test: creating startup kits + - name: Run integration test creating startup kits continue-on-error: false run: | ./runIntegrationTests.sh create_startup_kits - - name: Run intergration test: listing licenses + - name: Run intergration test listing licenses continue-on-error: false run: | ./runIntegrationTests.sh run_list_licenses - - name: Run integration test: Docker/GPU preflight check + - name: Run integration test Docker GPU preflight check continue-on-error: false run: | ./runIntegrationTests.sh run_docker_gpu_preflight_check - - name: Run integration test: Data access preflight check + - name: Run integration test Data access preflight check continue-on-error: false run: | ./runIntegrationTests.sh run_data_access_preflight_check From 095c1b763769b6841c5807e6d9a9577521d6adf0 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 17 Sep 2025 15:01:43 +0200 Subject: [PATCH 66/80] do not need -it for listing licenses --- docker_config/master_template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index ec989a7b..42e81588 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -776,7 +776,7 @@ docker_svr_sh: | --ipc=host $NETARG $DOCKER_IMAGE \ /bin/bash -c "nohup ./start.sh >> nohup.out 2>&1 && chmod a+r nohup.out && /bin/bash" elif [ ! -z "$LIST_LICENSES" ]; then - docker run -it --rm --name=$CONTAINER_NAME \ + docker run --rm --name=$CONTAINER_NAME \ $DOCKER_IMAGE \ /bin/bash -c "pip-licenses -s -u --order=license" elif [ ! -z "$INTERACTIVE" ]; then From 02ff2848adb34e11e1ae10caeb68579a5c7bb965 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Fri, 19 Sep 2025 13:33:38 +0200 Subject: [PATCH 67/80] implemented test that client with incorrect startup kit cannot connect --- runIntegrationTests.sh | 54 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index e63bf993..1dc9dc3a 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -285,6 +285,51 @@ kill_registry_docker () { } +verify_wrong_client_does_not_connect () { + echo "[Run] Verify that client with outdated startup kit does not connect ..." + + cp -r "$PROJECT_DIR"/prod_01 "$PROJECT_DIR"/prod_wrong_client + cd "$PROJECT_DIR"/prod_wrong_client + cd localhost/startup + ./docker.sh --no_pull --start_server + cd ../.. + sleep 10 + + rm client_A -rf + tar xvf "$CWD"/tests/integration_tests/outdated_startup_kit.tar.gz + sed -i 's#DOCKER_IMAGE=localhost:5000/odelia:1.0.1-dev.250919.095c1b7#DOCKER_IMAGE='$DOCKER_IMAGE'#' client_A/startup/docker.sh + sed -i 's#CONTAINER_NAME=odelia_swarm_client_client_A_095c1b7#CONTAINER_NAME=odelia_swarm_client_client_A_'$CONTAINER_VERSION_SUFFIX'#' client_A/startup/docker.sh + + cd client_A/startup + ./docker.sh --no_pull --data_dir "$SYNTHETIC_DATA_DIR" --scratch_dir "$SCRATCH_DIR"/client_A --GPU device=$GPU_FOR_TESTING --start_client + cd ../.. + + sleep 20 + + CONSOLE_OUTPUT_SERVER=localhost/startup/nohup.out + CONSOLE_OUTPUT_CLIENT=client_A/startup/nohup.out + + if grep -q "Total clients: 1" $CONSOLE_OUTPUT_SERVER; then + echo "Connection with non-authorized client" + exit 1 + else + echo "Connection rejected successfully by server" + fi + + if grep -q "SSLCertVerificationError" $CONSOLE_OUTPUT_CLIENT; then + echo "Connection rejected successfully by client" + else + echo "Could not verify that connection was rejected" + exit 1 + fi + + docker kill odelia_swarm_server_flserver_$CONTAINER_VERSION_SUFFIX odelia_swarm_client_client_A_$CONTAINER_VERSION_SUFFIX + rm -rf "$PROJECT_DIR"/prod_wrong_client + + cd "$CWD" +} + + run_dummy_training_in_swarm () { echo "[Run] Dummy training in swarm ..." @@ -433,6 +478,14 @@ case "$1" in # TODO add to CI if we want this (takes several minutes) ;; + check_wrong_startup_kit) + create_startup_kits_and_check_contained_files + create_synthetic_data + verify_wrong_client_does_not_connect + cleanup_temporary_data + # TODO add to CI if we want this + ;; + run_dummy_training_in_swarm) create_startup_kits_and_check_contained_files create_synthetic_data @@ -459,6 +512,7 @@ case "$1" in run_docker_gpu_preflight_check run_data_access_preflight_check start_server_and_clients + verify_wrong_client_does_not_connect run_dummy_training_in_swarm kill_server_and_clients cleanup_temporary_data From 71cc56702ad0d069d17a9ec6eca68fc0c0cc1c8a Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 23 Sep 2025 14:51:13 +0200 Subject: [PATCH 68/80] added file forgotten in 02ff2848adb34e11e1ae10caeb68579a5c7bb965 --- .../outdated_startup_kit.tar.gz | Bin 0 -> 9917 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/integration_tests/outdated_startup_kit.tar.gz diff --git a/tests/integration_tests/outdated_startup_kit.tar.gz b/tests/integration_tests/outdated_startup_kit.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..ba3a984ec77afe5d8cfe1dd3747b3d0b57306e39 GIT binary patch literal 9917 zcma)fMN}LLpeqz8?(W6i-QBIo;10#LxVsm38>F}tEACL-eSi*5ad#M)cmLnNb6!@- z$uf&1lnF>M{~vJYhJMML^36}%uXum`$#7NtH9hEnH01;wV} zggiMrux}~>Q=|6fu8jv`ttkO3{WX`reo=^ENE&XVFA#>1TcqgcT>=GF_9-POPY~3t z(3H&KWzsk}3ySqm5u~9}!eW}*E4p!CIGQIPjZ!`=x;a*$Y7@b~Lre_q55sRc+#)4h zLs0sH+oA2n3~$*3-*McGicTBYfAVn3Xogsa6wn{fU1&7jqr$(P=0PytCEB49{j$Bt zZj$yZ15&i+nf59Ef6>m5iOI7|wf0(b7 z#s!vJ*qS~ap03PW#$b~+4#wpS1Khi-#;Jsq#u;?QH6PLt3kQvOr?kzzdiyW?ZBRc- z&vH#ZjH2F2yzc$qFMjvqH?P$8=G0#oLppYWMB8 zatztDnF;Zxme|SmP z{+-T6Q%kmgDd2@qaZv4GzUN)2aXph~MsVTD3kw3}O^E|sm<$)?cfXmPm%(E3xDyjk zS_+wwH8xEOvd~fBl@{mk+Ogjl8fJ(}B{d$i)?0ZCr7=w8n4~4Dzrp*vlvmRiBHroz zQMthh=<;?L+#_3wmxAGZh)0n0Ax=`_1x9V~{?MR3WLHd7oJCa10ipIuy*XxP*esb3 zv7#X#m+RWKc&6%2=2a@I;#OJ)7$#>Xb`7mHg|d65A3muH#&4*OoPDLt1LP#T)d

kY*LoZpd8U&ZboWufHzEiX3n*9?3%7-(^Ri)67m|NEo_(^&_}VJUJ4M8yQAKF z-yMi^)N}DY@eZZ5DJ?6}H3Ux3T;{L$R6`aQ&wl7aWoCSy*9H==`xJsPoE zVq;ndcu+ba%Na?wr|)D;*Pt&IuV4lFPOPZ?&GDLGfiuo`2$5mSXl*1^eD+YYdXu?j zODL~ayGM*JGTvjPYQ$$O$D`>lJjOY8cIpn;>YePtOK~nLYUVE5yi~zTIlpTMgCuPp zMJ}U#YB2I4=}3z1yv|OYBkl%h`T6x=+4bW1*kryh1wmm(pZ{D`>Be5M3Pp+`EbrR6 zzM2M@$>SkgN1j~gCzW$tnd@<`as3ClNwh{X%(jo2>~YvEknN>fZQdsqduLVG=?BaG zyLI6DZ*;(3qh?y1-n4C=2Z0~6;oC=3+`l0I#-hBp2u+pQGD;$&x6u8?D;L0 z8yhD$tQBX-{!;H&5+bc2U)Ro2!40%G6`4uCT743$U{U;yar5ZgJT#gDQdK5g+OiqA zOroyta;>=|l}!mtZgG5IHw(@8$H7{O+hZ^+ z3wZ#W@^6u!sNKsCLFAV;`}67=L6y7HO@iAe*%Xg^yFMCbm2_h74B9Jg4-733a~==!}D#rGDq4WBn>KLTemV2N)XQ0i z_+})+_5}~Tyojz+^)iS~+{+s4#I5Xz(n=$=X8Np${smx>xhtugwQh_fz6bFIx8nIplUvl4P=n)5A+ zS%=nv_XNd4%0G9SH!18=?@P5!zc%@I_%bmtuXn{}Z?WfISOxQbP1q1pe@qM}aw1+j zqS5A~Zss~Qp+8r1?AIE_RegN$kJ-CfEy~@TvCkwp3(nx~v2!f6K5&e50mj zqw)yze>WwgPzErSsQa)0?sjcZ8Q#PeH5jfij)@q9f;)2my=%MZ5b~LajsIp;%=@sU zDmYl_5}IBL>e!(+KtZWj%^?vUsR_xATaNmR);2>Sz(5y+;Czt=0Cj+V*^G5`{pCoq zs9}=NNyG#sxF|uIyHX-Pq=u8sD5_j z%aF}Ox|wY@tONNGE^2u#e#2gm4ECg;Cr=eA{yY~ zV*bHaqls%g$DcpaAq1Rl-l-$Cl3}%;BjLT7$F4-{B;39HVu9_xrH+XJvd^tDznhsS zu1^l7;3^^HjTCD}GZ0&6m;ISRn4z8lXz+iRMgb<#hGOZnZHbj^+L zFOk46E%3qyEXbWjL?i_&K18Axf%6NioI_VZ^hSP?H!tBv#Tw2_r;xLaGv=&vn8@5#Vv;5ogB^uTR% z!o4`8-Rf0Bre6#yMvmB*_k*JV>K^_9fy`kMeY``S*p5R!!uSDk6ybOXC)h&zncKLs z5AQQmda)N@z{ALsj%}C!4mluS^TGCKiNYk%$al!@XTP8b2&&frwpzZRd~C>jD2)Up z^q?Nj(SP8P=Rm$sqmb{~m6|Fm9o{}0QrAr6x!zY?LlUYJtSw@YFJ6Io!K&1CZ;_3` z+Cr9bLwq87Crc4G)wJ*$TGZ0@Q~VSTS6x9og!621t6p%eQC z+eoUNCHFETry1}*pAfP5>DdYf)mHH885;k zE$Z~uq-7wvB0|}F`n0Yo=z#8?>i&@+sK#^O@giAbNa6MDbS>oL7e;MeFv;YM72;Qe z$8Dza*Y?uwI%p9cQ(yECjsqws0Tl9)JhFcE((fwWKVaYuSLtoJ;^U@vdTDO%rr2{5 zf0bR05)Ab$Oz1Zbv5;#g_oTA^7b@6zd2Mt~5Q6FxX4H}HA?TCf{yUKpBvPrAb_X6d znIX^kt~VS-r`Ve>kX$EB90qm|IM~A95bwy|S@R0M`+8II^!#=Q=ndcqI`!+N4(Leh z42X4?i1*zV73eZ8>3yTMztOz7W83W3P!DSyiK+ct2Zs=PJ9{ghT4eO_fRs)F1~CuT zyMnjiuI*!tN?42~GSC}DDgGFh>jg#rQOn#8Tkh=Mf-}Wb0Nq&O8`7L?>kF%zl5t zpU&>8$<%n|YFmrw^Kf--^7gtx`fB6e$5f(OZ{KX+fL!n^vfs-o{_SlE&n}2&Ls0Acw?BQqKd7 zbO`eYRiw!90PhLR@#WA|ozMVLH|o#)W#PXun{!a$S0liEa?)jX3b)i&mqyk&9AdOn zKjElYrr7bld#Y6!t;*G<$o82R%IVmk_l`tT%hM?!_}4otU1~e}Ex!~6;Y|71Lzre}Ur6M#OW>)gSQudyzYX%@mNl^@x zi_VIa-`e5A2(v8!z3xwJlmy#}DTjtJQ2j0*v)>>96&deQVmvl=ToEr;qa0sO738AjEzxg3n7r6yR`FQt?36xgvWYjx-6cB{70f4CCINCCkP z@7#l?1*PPIrpO1^WQs$yGVHO)QJrxOLf%78F1#W}4q^^%q-hh9^VquV_OIQu&Q8;- zVriEjSG)XNmQ45iwhq*9_^R&VQ#cFnEDvvqDpUft^pR6}D9J>DxQZqE6hbcyOTc&c zPuXTow82NxzSW+DH37p@aisJ*e8cc?WG?^{)piFvgj|1RT zt?@#lz6PV72;o03YV7G@z@hNO7;|6eQUM8XHfah@!5A=ID}yM~DX#sg@9W|llrKHk z&?2b?0n&Zc=YGTLHJ5j&a?49L6ec$#3u^lPv;R%3Pkes@Znu z+<+(g?yq(RdY;n^`Y|BUv~Li>dV51B@)ioB%=GP;iorrK91kx&(pc2ps&=oCKG_1W7tz^Go@~Kxp!{LV~^O$ z%Tk_p+|ibF3PMF|O81yUWnRT|0r_ex^CCxpvt|8$^b&T~-9mGx@$a?AK3o9Uv~gG- zq??ygJ4|*(0%1P@__bEICXeY6L9Ryu zkO_!9`r5AQA&NWc<^Y0BsiuKM%ZaeMn{WgNVk)~<1=rfn+nH|2>Hv*0`}iTmtX4y~ zsPEZ8yQK2X-1z2P>7Dy&G0Op0y1W~DW7~4wrB;73vO*<~p_6(f*V7FLVq#+@U_gB< z9>=#PF&DN^mXm5Rv5i?n8%}V{jo^U`koy>Mj&m^{?oZ}ix1xk+S z0H-ZSp5CMYR(I`i1uZ}gQF*RvLOJN3AajKm>%rUp;O@eLCN{5LEya9~npw|5n(>b9 zhsu7UGbQPCy!sj{N=ZO16U}&Oa7NXa^(!I|IYu~p+uh`3MH_BApufHRhZFA8+TG}# zGUO>A_&#`~^0KXV?hSS2H3}(&24x>5i$b+co|Mq*t>JiCKZ$+3x)xoArmJ@amrCR@ zi;@N)N_4Qm744)ZH!84SG;E4F#y`V&Psr-aW^ z8dk6w9IP?U0FSCn%#M58+DI!RriWAP+I-9Pko$oQDf~^95!U6Sp8D-Rc?mCn6FV#|7?Xp^rv-dlHn|NL6 ze%9Rz#o76{cOnge>g70Fqi{;kN&H&psr=nlBg9Vv={6xzTF4`L&#n>vrW?(KgZmrL$SMMW||E=Ej^he+_v48=Y zyO<31zA(6Tbm_2kbt~=hSl>$*ziEWyWy5>TGp{ZAB6Z8O#2VWOH?sBW^*KM2Nwk&) z``I)3%Dy!4kKX~GF7HR|-J(Ndt) zzu7ad!bHTS(G5d~{3P71?!uZ5-7nvJy)F{P+wr`Lbnt==2~Ule9M;|z;qM9?TdcA2 zHYNky8yzRY`mm4H0hSEmdkr~~bxfNwdO|rKg=ww2R;~3(P|ATk!Z)Z8>Dy-FO&in< zmK2%*(Lk$(wn9UakNBWa@Cm8cb8bLtI#LU?Ygk*@>d`q6h#NO#Bf|H)1&ODsO-}~= z$U2V-{39-jsre|MhDT&@8`^@_?|=zlYxDW6+}sE!a^L&+eXig#hN!+cn+sN2IB@aU_|Ej)0{MT=4T80@L-K`@9*pq~KFv_DI84$xpOo zyq8MM;>X&ZgUb&vm>38lXS-IyP-317g_p)LNnBwFE!irH7d(s`qJuES7_4;nA64x~ z%H8Tpo;%dl14+Ymakc)?H9vXF*zk%aXNVOS3m488x$eGbz)HH#)%#lTl4DR_G5f6H z-#JyVmSIF9ooZ`iq|n6)PPK^MH4nbTM^>K4M?M`ZmUYWyohWHoC(|Qx7+D)oWXqrp z(hCGy#Zq%nYft@%h>J`pexp*kTbw#2BuTrQy2WAJEBiBfKeZtVE&Be>5ea7#O0J}l zc1^4PGN*KJwnZ2(^gxD5zx*i{^}o6HfWFMgeUB-rASpwnhv5G~0FQ7=^qOz^QrI15 z{><9V3Cr3-o#q{x`n&d|BSKM>rT=S3KFAOVAS?Fy?K5c?ZQ#T?n`JT$<3+d^(ijT4 z4*e)2t;p-RLHylwDFOp+RE?&qhPWp-S?F<2yaz|(YP>+e(;Q4rR zsoh55;qjHzy#(rW>}v5-U$w!b8CHe>A862_ADnG${PlWzT+U=*O3sHPmvMEEup03p z6nh~NuQ?^ERpwq{#Ve%ei?M+D-T}>{>Ti5nTg!x0U^P64glNG=0b@W+y!=7;fH8s1 z2WyF|H`0B<;e#axwsY$Z>>2fjL#wGD`uKjL?^O)O?{$gdGRG=YoG+s2tAdRP1D?b; zG}AFXQ>A?+apDSrX7uDeGSz`bEdtjRdFS3~OLVO3L9|G8DR72?mO=$3ltt-mqys8U zN1eYY#k0@+xh6VadVu>wU*IL%sW@H{s9wZ%G9+ct^lty7qp~P4SguRVtCtb7F?6EVi zQasehH%HIo`xf`X)oU#$6RavPGRL;my{Q*wZf1wE2KDDab#TQPyr3R$gyMK~>dhYbfnMhYhXA8^b8+`lmCLO^?4 zI&?ds?x#W^-)l4`=vRVYueiro_5VO|Lhdi1Nje@&w$pt?Reccd#6IHZc`)W7M>a4jL=;d*>YA9*B>jHxzS zz!W!b2%|WSXrc#rc;=JzokNu&Z}?|$uZue}Y>U`102kBAd2S59IOtbKpxWt)fb zxf3GMDE|M3P%Lz%CUZ3b(%-)a-9GA!g~s8Mk;m2jP~jCW&h$V_!EnnOxS2cZ=*`@V z4pd9pG-7fYUpx5UB-04`Q&^)t*rne4K=zOn?t72yKV=~RO$ZmcWvfEx^# za*X>M*0SKYRR3XTr@a~{bh7VLE1PBC$r4hjmGE1*q{DZC;`b^TC(EoC$CNr* z=}j(7|FS(AHk3v-KuVleJD1G%DwOm7`ZULYwbjqRVwc^r)i=F1xVWU56mVVtdu1a% zW=W4qWXqUJSLR4*d>m7Qpc-tEJ4r61mhI|!?L{74k%MAIo(7|n*+B1wWsKD2dlKcj zCXDY|`tj=}3p2p#i_L*lt|A}2!`!q!J>aSw# z?NwcorqCTWGd<&7-;CI~JNNqF&pOS2_)Y92R2b3CY#XPd8X z=KFpFtEoBX?(uRydXyojQJkv~yasmucb_JKzc$qy0w0yVwHKN?F+xnJQ zeOGG#B93ES1(RZby3oVJwqkrOtFq&UzAuKyAJ0zr&GZVi&oMY@OxcT7ij^N)4B#=# zwXO58u9;rR|9th-%C6aKLtro$;9F$HVUa77JVyE{F40#gu*$NQ{#i?D+;(|2Ldd*q zdXXu~pq#gQntw^YoPWnV%@qZ_VAEmG2l-G3X!*(Nd#LZa(=+S zNHb{93@TH2X8x-yQ3s#e2)UdDqW{+wDK#|@ACedB@r#R9Iw}YPn*4rOtLF?IQR9ki z9=uOMeSFo~bYTq)B#_2syf>{S)h~Kp&A2|f`ox9R>jtrhF?a&T021S)ux(bhfI8=# zE$(4w{~GGtt|OyW!37hB4vYCI`OZDvnjtsdjpJgFQ#lIY$zGrbJd2>PZH?Z6AY(I& z>c%y(zTYfh>RL2F#w%<;>~iWH^!4_6ap~gPe!=idf|2!0Yj*z`d!?2G;{$^hP2~aV z4_9etv3>sf7=zVMK8Il~pqVZ{M7-rKb8(o8@ozjDi2;3U^$jU-<<5jZx~qiD@cK~< zzfJ!2@jo-c$p7AX>Ef~=n(wOe>CLNc3n{i?E3WgKNPi!pmZ^&-uzSRP)BDnauNSa9 z4e`I^s#%`}`%ydITuS41tLIzQBmQ}mtG7$ov)cq!n+Nwd3maFM;jM#(X$B~Eve?wL z92qflb=86z|IMhaYF{b->nukVjEpk=^mwo zmH8q5!jSDtL=fuAZ&nQHzX}IIlkMGglloOrefx)JqRo27Y9W+SpqFj5 zk55R6&}}%5n~*=q{A6ISwq{gS@?UP>`N?l}tLEbaP+qqA(j z^V7a3Lw3%4mz`i&%>{A1+dGh_YGDDE7Zz zOYO0fXjvEwJf?IDdT!vKBkK4k1*Z>z9adtj9Y_U;a=-OiBINPIEOmzP1Olaa{s-L` z^bfWkPht#yOR(sh!B^o)s-`k=6jOY-i$I77$QUT2YYi&pz*<)BxThfIjs@>%cTM;0 z-7yc7z3}d*QoU^ol%5^OcV=TS*KK{rnIqGQI|)_j$k(t-NE=^)JWM_uRc+nIU0dl- z&5UWFXx)4|;(5J393Io~B|_(z5Tof=3+dhGU=aTaeIz2d3;A|9TLR~Oz&Uv3|M@x| z+7jGBX#{OK;eCMm3?4o}@$*&FLt$fbcPrPEZiZHX3+X9Bxa@vsl8wlP>s0TG)^QTo z|0Yodsm@I7 Date: Wed, 24 Sep 2025 15:31:29 +0200 Subject: [PATCH 69/80] Implemented test setup for swarm nodes to connect to locally hosted VPN from within the container. Currently requires manual steps (at least building the VPN container) for the test to succeed and will need to be adapted for productive VPN use. * install packages for OpenVPN and debugging in ODELIA container * changed docker run arguments for swarm nodes to be able to open VPN connection from within (currently as root, to be reconsidered) * setup for building and VPN container, creating OpenVPN certificates (one fixed set committed), running VPN container * swarm server now named testserver.local, but this name only needs to be reachable in the containers * VPN container is assumed to be reachable on host at 172.17.0.1 from other containers * changed ports for nvflare server to avoid interference with productive servers * noted TODOs * added integration test checking that and documenting how this works --- _generateStartupKitArchives.sh | 3 + assets/readme/README.operator.md | 2 + docker_config/Dockerfile_ODELIA | 16 + docker_config/master_template.yml | 27 +- runIntegrationTests.sh | 35 +- tests/local_vpn/Dockerfile_openvpnserver | 11 + tests/local_vpn/README.txt | 17 + tests/local_vpn/_build_docker.sh | 6 + .../_openvpn_certificate_creation.sh | 67 ++++ tests/local_vpn/_openvpn_start.sh | 31 ++ tests/local_vpn/client_configs/.gitignore | 1 + .../admin@test.odelia_client.ovpn | 299 +++++++++++++++++ tests/local_vpn/client_configs/client.conf | 138 ++++++++ .../client_configs/client_A_client.ovpn | 299 +++++++++++++++++ .../client_configs/client_B_client.ovpn | 299 +++++++++++++++++ tests/local_vpn/client_configs/make_ovpn.sh | 18 ++ .../testserver.local_client.ovpn | 299 +++++++++++++++++ .../local_vpn/create_openvpn_certificates.sh | 5 + tests/local_vpn/run_docker_openvpnserver.sh | 3 + tests/local_vpn/server_config/.gitignore | 2 + tests/local_vpn/server_config/ca.crt | 20 ++ .../server_config/ccd/admin@test.odelia | 1 + tests/local_vpn/server_config/ccd/client_A | 1 + tests/local_vpn/server_config/ccd/client_B | 1 + .../server_config/ccd/testserver.local | 1 + tests/local_vpn/server_config/server.conf | 304 ++++++++++++++++++ tests/local_vpn/server_config/server.crt | 87 +++++ tests/local_vpn/server_config/server.key | 28 ++ tests/local_vpn/server_config/ta.key | 21 ++ tests/provision/dummy_project_for_testing.yml | 8 +- 30 files changed, 2036 insertions(+), 14 deletions(-) create mode 100644 tests/local_vpn/Dockerfile_openvpnserver create mode 100644 tests/local_vpn/README.txt create mode 100755 tests/local_vpn/_build_docker.sh create mode 100644 tests/local_vpn/_openvpn_certificate_creation.sh create mode 100644 tests/local_vpn/_openvpn_start.sh create mode 100644 tests/local_vpn/client_configs/.gitignore create mode 100644 tests/local_vpn/client_configs/admin@test.odelia_client.ovpn create mode 100755 tests/local_vpn/client_configs/client.conf create mode 100644 tests/local_vpn/client_configs/client_A_client.ovpn create mode 100644 tests/local_vpn/client_configs/client_B_client.ovpn create mode 100755 tests/local_vpn/client_configs/make_ovpn.sh create mode 100644 tests/local_vpn/client_configs/testserver.local_client.ovpn create mode 100755 tests/local_vpn/create_openvpn_certificates.sh create mode 100755 tests/local_vpn/run_docker_openvpnserver.sh create mode 100644 tests/local_vpn/server_config/.gitignore create mode 100644 tests/local_vpn/server_config/ca.crt create mode 100644 tests/local_vpn/server_config/ccd/admin@test.odelia create mode 100644 tests/local_vpn/server_config/ccd/client_A create mode 100644 tests/local_vpn/server_config/ccd/client_B create mode 100644 tests/local_vpn/server_config/ccd/testserver.local create mode 100755 tests/local_vpn/server_config/server.conf create mode 100644 tests/local_vpn/server_config/server.crt create mode 100644 tests/local_vpn/server_config/server.key create mode 100644 tests/local_vpn/server_config/ta.key diff --git a/_generateStartupKitArchives.sh b/_generateStartupKitArchives.sh index ea842b41..23ea02c9 100755 --- a/_generateStartupKitArchives.sh +++ b/_generateStartupKitArchives.sh @@ -6,8 +6,11 @@ OUTPUT_FOLDER=workspace/`grep "^name: " $1 | sed 's/name: //'` TARGET_FOLDER=`ls -d $OUTPUT_FOLDER/prod_* | tail -n 1` LONG_VERSION=$2 +# TODO copy from different location + cd $TARGET_FOLDER for startupkit in `ls .`; do + cp ../../../tests/local_vpn/client_configs/${startupkit}_client.ovpn ${startupkit}/startup/vpn_client.ovpn zip -rq ${startupkit}_$LONG_VERSION.zip $startupkit echo "Generated startup kit $TARGET_FOLDER/${startupkit}_$LONG_VERSION.zip" done diff --git a/assets/readme/README.operator.md b/assets/readme/README.operator.md index 130629b7..d67b03f1 100644 --- a/assets/readme/README.operator.md +++ b/assets/readme/README.operator.md @@ -29,6 +29,8 @@ For example, add the following line (replace `` with the server's actual IP 4. Deploy startup kits to the respective server/client operators 5. Push the Docker image to the registry +TODO describe what needs to be done for productive VPN credentials (that must remain a local resource and not be committed) + ### Via the Dashboard (not recommended) Build the Docker image as described above. diff --git a/docker_config/Dockerfile_ODELIA b/docker_config/Dockerfile_ODELIA index 765c5b67..e6a98165 100644 --- a/docker_config/Dockerfile_ODELIA +++ b/docker_config/Dockerfile_ODELIA @@ -189,6 +189,22 @@ RUN apt install -y \ xdg-user-dirs=0.17-2ubuntu4 \ xz-utils=5.2.5-2ubuntu1 +# openvpn iputils-ping net-tools sudo and dependencies at fixed versions +# TODO remove tools only needed for debugging +RUN apt install -y \ + libelf1=0.186-1ubuntu0.1 \ + libbpf0=1:0.5.0-1ubuntu22.04.1 \ + libcap2-bin=1:2.44-1ubuntu0.22.04.2 \ + iproute2=5.15.0-1ubuntu2 \ + iputils-ping=3:20211215-1ubuntu0.1 \ + libatm1=1:2.5.1-4build2 \ + libpam-cap=1:2.44-1ubuntu0.22.04.2 \ + sudo=1.9.9-1ubuntu2.5 \ + liblzo2-2=2.10-2build3 \ + libpkcs11-helper1=1.28-1ubuntu0.22.04.1 \ + net-tools=1.60+git20181103.0eebece-1ubuntu5.4 \ + openvpn=2.5.11-0ubuntu0.22.04.1 + # Clean up apt cache RUN rm -rf /var/lib/apt/lists/* diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index 42e81588..63774b50 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -334,6 +334,11 @@ authz_def: | fl_admin_sh: | #!/usr/bin/env bash + + # TODO add name and IP address for productive server + echo "10.8.0.4 testserver.local" >> /etc/hosts + openvpn ./vpn_client.ovpn >> nohup_vpn.out 2>&1 & + DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" python3 -m nvflare.fuel.hci.tools.admin -m $DIR/.. -s fed_admin.json @@ -367,6 +372,11 @@ start_ovsr_sh: | start_cln_sh: | #!/usr/bin/env bash + + # TODO add name and IP address for productive server + echo "10.8.0.4 testserver.local" >> /etc/hosts + openvpn ./vpn_client.ovpn >> nohup_vpn.out 2>&1 & + DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" all_arguments="${@}" doCloud=false @@ -392,6 +402,9 @@ start_cln_sh: | start_svr_sh: | #!/usr/bin/env bash + + openvpn ./vpn_client.ovpn >> nohup_vpn.out 2>&1 & + DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" all_arguments="${@}" doCloud=false @@ -670,7 +683,7 @@ docker_cln_sh: | chmod -R 777 "$MY_SCRATCH_DIR" # Networking & Cleanup - NETARG="--net=host" + NETARG="--cap-add=NET_ADMIN --device /dev/net/tun" rm -rf ../pid.fl ../daemon_pid.fl # Docker image and container name @@ -680,8 +693,10 @@ docker_cln_sh: | docker pull "$DOCKER_IMAGE" fi + # TODO check if admin rights are needed and make sure output files are readable and deletable by non-root users on the host + CONTAINER_NAME=odelia_swarm_client_{~~client_name~~}___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ - DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE -u $(id -u):$(id -g)" + DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE" DOCKER_MOUNTS="-v /etc/passwd:/etc/passwd -v /etc/group:/etc/group -v $DIR/..:/startupkit/ -v $MY_SCRATCH_DIR:/scratch/" if [[ ! -z "$MY_DATA_DIR" ]]; then DOCKER_MOUNTS+=" -v $MY_DATA_DIR:/data/:ro" @@ -754,10 +769,12 @@ docker_svr_sh: | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" # to use host network, use line below - NETARG="--net=host" + NETARG="--cap-add=NET_ADMIN --device /dev/net/tun" # or to expose specific ports, use line below #NETARG="-p {~~admin_port~~}:{~~admin_port~~} -p {~~fed_learn_port~~}:{~~fed_learn_port~~}" + # TODO check if admin rights are needed and make sure output files are readable and deletable by non-root users on the host + DOCKER_IMAGE={~~docker_image~~} if [ -z "$NOPULL" ]; then echo "Updating docker image" @@ -804,7 +821,9 @@ docker_adm_sh: | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" # To use host network - NETARG="--net=host" + NETARG="--cap-add=NET_ADMIN --device /dev/net/tun" + + # TODO check if admin rights are needed and make sure output files are readable and deletable by non-root users on the host DOCKER_IMAGE={~~docker_image~~} if [ -z "$NOPULL" ]; then diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 1dc9dc3a..99f39187 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -175,7 +175,7 @@ create_synthetic_data () { run_list_licenses () { cd "$PROJECT_DIR"/prod_00 - cd localhost/startup + cd testserver.local/startup LICENSES_LISTED=$(./docker.sh --list_licenses --no_pull) for EXPECTED_KEYWORDS in 'scikit-learn' 'torch' 'nvflare_mediswarm' 'BSD License' 'MIT License'; @@ -236,11 +236,30 @@ run_3dcnn_simulation_mode () { } +start_testing_vpn () { + echo "[Prepare] Start local VPN server for testing ..." + + # TODO make sure (at suitable locatin in scripts) that VPN container is built and that VPN certificates exist + + cp -r tests/local_vpn "$PROJECT_DIR"/prod_00/ + chmod a+rX "$PROJECT_DIR"/prod_00/local_vpn -R + cd "$PROJECT_DIR"/prod_00/local_vpn + ./run_docker_openvpnserver.sh + cd "$CWD" +} + + +kill_testing_vpn () { + echo "[Cleanup] Kill local VPN server Docker container ..." + docker kill odelia_testing_openvpnserver +} + + start_server_and_clients () { echo "[Run] Start server and client Docker containers ..." cd "$PROJECT_DIR"/prod_00 - cd localhost/startup + cd testserver.local/startup ./docker.sh --no_pull --start_server cd ../.. sleep 10 @@ -266,7 +285,7 @@ start_registry_docker_and_push () { run_container_with_pulling () { docker rmi localhost:5000/odelia:$VERSION cd "$PROJECT_DIR"/prod_00 - cd localhost/startup + cd testserver.local/startup OUTPUT=$(./docker.sh --list_licenses) if echo "$OUTPUT" | grep -qie "Status: Downloaded newer image for localhost:5000/odelia:$VERSION" ; then @@ -290,7 +309,7 @@ verify_wrong_client_does_not_connect () { cp -r "$PROJECT_DIR"/prod_01 "$PROJECT_DIR"/prod_wrong_client cd "$PROJECT_DIR"/prod_wrong_client - cd localhost/startup + cd testserver.local/startup ./docker.sh --no_pull --start_server cd ../.. sleep 10 @@ -306,7 +325,7 @@ verify_wrong_client_does_not_connect () { sleep 20 - CONSOLE_OUTPUT_SERVER=localhost/startup/nohup.out + CONSOLE_OUTPUT_SERVER=testserver.local/startup/nohup.out CONSOLE_OUTPUT_CLIENT=client_A/startup/nohup.out if grep -q "Total clients: 1" $CONSOLE_OUTPUT_SERVER; then @@ -340,7 +359,7 @@ run_dummy_training_in_swarm () { sleep 60 cd "$CWD" - cd "$PROJECT_DIR"/prod_00/localhost/startup + cd "$PROJECT_DIR"/prod_00/testserver.local/startup CONSOLE_OUTPUT=nohup.out for EXPECTED_OUTPUT in 'Total clients: 2' 'updated status of client client_A on round 4' 'updated status of client client_B on round 4' 'all_done=True' 'Server runner finished.' \ 'Start to the run Job: [0-9a-f]\{8\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{4\}-[0-9a-f]\{12\}' 'updated status of client client_B on round 4'; @@ -489,9 +508,11 @@ case "$1" in run_dummy_training_in_swarm) create_startup_kits_and_check_contained_files create_synthetic_data + start_testing_vpn start_server_and_clients run_dummy_training_in_swarm kill_server_and_clients + kill_testing_vpn cleanup_temporary_data # TODO add to CI if we want this (currently not working) ;; @@ -511,7 +532,9 @@ case "$1" in kill_registry_docker run_docker_gpu_preflight_check run_data_access_preflight_check + start_testing_vpn start_server_and_clients + kill_testing_vpn verify_wrong_client_does_not_connect run_dummy_training_in_swarm kill_server_and_clients diff --git a/tests/local_vpn/Dockerfile_openvpnserver b/tests/local_vpn/Dockerfile_openvpnserver new file mode 100644 index 00000000..8270f8fa --- /dev/null +++ b/tests/local_vpn/Dockerfile_openvpnserver @@ -0,0 +1,11 @@ +FROM ubuntu:22.04 + +RUN apt update +RUN apt install -y easy-rsa openvpn openssl ufw joe patch +RUN apt install -y openssh-server net-tools + +RUN useradd ca_user + +COPY _openvpn_certificate_creation.sh / +COPY _openvpn_start.sh / +RUN chmod u+x /*.sh diff --git a/tests/local_vpn/README.txt b/tests/local_vpn/README.txt new file mode 100644 index 00000000..5cc3e826 --- /dev/null +++ b/tests/local_vpn/README.txt @@ -0,0 +1,17 @@ +# Following https://www.digitalocean.com/community/tutorials/how-to-set-up-and-configure-an-openvpn-server-on-ubuntu-20-04 +# but on 22.04 + +Setup +----- +./create_openvpn_certificates.sh builds a docker image and creates certificates and .ovpn config files for the clients specified in _openvpn_certificate_creation.sh +Modify server_config/server.conf and client_configs/client.conf to modify network configuration. +Files to use on the server and client are created in server_config/ and client_configs/ + +Usage +----- +./openvpn_start.sh builds a docker image and starts OpenVPN server in the docker container. +Modify _openvpn_start.sh for further firewall etc. configuration. + +Disclaimer +---------- +This configuration is not necessarily secure and should not be re-used unless you know what you are doing. diff --git a/tests/local_vpn/_build_docker.sh b/tests/local_vpn/_build_docker.sh new file mode 100755 index 00000000..0df1ce0f --- /dev/null +++ b/tests/local_vpn/_build_docker.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +# TODO should this be named "latest"? Do we need to pin versions? +# TODO think about splitting building certificates from running the VPN container + +docker build -t odelia_testing_openvpnserver:latest . -f Dockerfile_openvpnserver diff --git a/tests/local_vpn/_openvpn_certificate_creation.sh b/tests/local_vpn/_openvpn_certificate_creation.sh new file mode 100644 index 00000000..a815f001 --- /dev/null +++ b/tests/local_vpn/_openvpn_certificate_creation.sh @@ -0,0 +1,67 @@ +#!/usr/bin/env bash + +# Roughly following https://www.digitalocean.com/community/tutorials/how-to-set-up-and-configure-an-openvpn-server-on-ubuntu-20-04 +# but on 22.04 + +chown ca_user:ca_user /home/ca_user/ -R +chmod a+rwX /home/ca_user/ -R +/bin/su - -c '/home/ca_user/ca_setup.sh' ca_user + +mkdir ~/easy-rsa +ln -s /usr/share/easy-rsa/* ~/easy-rsa/ +cd ~/easy-rsa + +echo 'set_var EASYRSA_ALGO "ec"' > vars +echo 'set_var EASYRSA_DIGEST "sha512"' >> vars + +./easyrsa init-pki + +rm /server_config/ca.crt \ + /server_config/server.crt \ + /server_config/server.key \ + /server_config/ta.key -f + +rm -rf /client_configs/keys +mkdir -p /client_configs/keys/ + +export EASYRSA_BATCH=1 +./easyrsa gen-req server nopass + +cp ~/easy-rsa/pki/reqs/server.req /tmp/ +chmod a+r /tmp/server.req +/bin/su - -c "export EASYRSA_BATCH=1 && cd ~/easy-rsa/ && ./easyrsa import-req /tmp/server.req server && ./easyrsa sign-req server server" ca_user + +cd ~/easy-rsa +openvpn --genkey secret ta.key +cp ta.key /client_configs/keys/ +cp /home/ca_user/easy-rsa/pki/ca.crt /client_configs/keys/ + +# copy/create files to where they are needed +cp /home/ca_user/easy-rsa/pki/ca.crt /server_config/ +cp /home/ca_user/easy-rsa/pki/issued/server.crt /server_config/ +cp ~/easy-rsa/pki/private/server.key /server_config/ +cp ~/easy-rsa/ta.key /server_config/ + +mkdir /server_config/ccd + +i=4 +for client in testserver.local admin@test.odelia client_A client_B; do + cd ~/easy-rsa + EASYRSA_BATCH=1 EASYRSA_REQ_CN=$client ./easyrsa gen-req $client nopass + cp pki/private/$client.key /client_configs/keys/ + + cp ~/easy-rsa/pki/reqs/$client.req /tmp/ + chmod a+r /tmp/$client.req + /bin/su - -c "export EASYRSA_BATCH=1 && cd ~/easy-rsa/ && ./easyrsa import-req /tmp/$client.req $client && ./easyrsa sign-req client $client" ca_user + cp /home/ca_user/easy-rsa/pki/issued/$client.crt /client_configs/keys/ + + cd /client_configs + ./make_ovpn.sh $client + + echo "ifconfig-push 10.8.0."$i" 255.0.0.0" > /server_config/ccd/$client + i=$((i+1)) +done + +chmod a+rwX /client_configs -R +chmod a+rwX /server_config -R +chmod a+rwX /home/ca_user -R diff --git a/tests/local_vpn/_openvpn_start.sh b/tests/local_vpn/_openvpn_start.sh new file mode 100644 index 00000000..62d1a864 --- /dev/null +++ b/tests/local_vpn/_openvpn_start.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +echo "net.ipv4.ip_forward = 1" >> /etc/sysctl.conf +sysctl -p + +echo "MTBhMTEsMTkKPiAjIFNUQVJUIE9QRU5WUE4gUlVMRVMKPiAjIE5BVCB0YWJsZSBydWxlcwo+ICpuYXQKPiA6UE9TVFJPVVRJTkcgQUNDRVBUIFswOjBdCj4gIyBBbGxvdyB0cmFmZmljIGZyb20gT3BlblZQTiBjbGllbnQgdG8gZXRoMCAoY2hhbmdlIHRvIHRoZSBpbnRlcmZhY2UgeW91IGRpc2NvdmVyZWQhKQo+IC1BIFBPU1RST1VUSU5HIC1zIDEwLjguMC4wLzggLW8gZXRoMCAtaiBNQVNRVUVSQURFCj4gQ09NTUlUCj4gIyBFTkQgT1BFTlZQTiBSVUxFUwo+IAo=" | base64 -d > before.rules.patch +patch /etc/ufw/before.rules before.rules.patch +rm before.rules.patch + +echo "MTljMTkKPCBERUZBVUxUX0ZPUldBUkRfUE9MSUNZPSJEUk9QIgotLS0KPiBERUZBVUxUX0ZPUldBUkRfUE9MSUNZPSJBQ0NFUFQiCg==" | base64 -d > ufw.patch +patch /etc/default/ufw ufw.patch +rm ufw.patch + +ufw allow 9194/udp +ufw allow OpenSSH +ufw disable +ufw enable + +cp /server_config/ca.crt /etc/openvpn/server/ +cp /server_config/server.conf /etc/openvpn/server/ +cp /server_config/server.crt /etc/openvpn/server/ +cp /server_config/server.key /etc/openvpn/server/ +cp /server_config/ta.key /etc/openvpn/server/ +cp /server_config/ccd /etc/openvpn/ccd -r + +# write log to folder on host +cd server_config + +nohup openvpn --duplicate-cn --client-to-client --config /etc/openvpn/server/server.conf & +sleep 2 +chmod a+r /server_config/nohup.out diff --git a/tests/local_vpn/client_configs/.gitignore b/tests/local_vpn/client_configs/.gitignore new file mode 100644 index 00000000..38156aad --- /dev/null +++ b/tests/local_vpn/client_configs/.gitignore @@ -0,0 +1 @@ +keys \ No newline at end of file diff --git a/tests/local_vpn/client_configs/admin@test.odelia_client.ovpn b/tests/local_vpn/client_configs/admin@test.odelia_client.ovpn new file mode 100644 index 00000000..8b9a87ee --- /dev/null +++ b/tests/local_vpn/client_configs/admin@test.odelia_client.ovpn @@ -0,0 +1,299 @@ +############################################## +# Sample client-side OpenVPN 2.0 config file # +# for connecting to multi-client server. # +# # +# This configuration can be used by multiple # +# clients, however each client should have # +# its own cert and key files. # +# # +# On Windows, you might want to rename this # +# file so it has a .ovpn extension # +############################################## + +# Specify that we are a client and that we +# will be pulling certain config file directives +# from the server. +client + +# Use the same setting as you are using on +# the server. +# On most systems, the VPN will not function +# unless you partially or fully disable +# the firewall for the TUN/TAP interface. +;dev tap +dev tun + +# Windows needs the TAP-Win32 adapter name +# from the Network Connections panel +# if you have more than one. On XP SP2, +# you may need to disable the firewall +# for the TAP adapter. +;dev-node MyTap + +# Are we connecting to a TCP or +# UDP server? Use the same setting as +# on the server. +;proto tcp +proto udp + +# The hostname/IP and port of the server. +# You can have multiple remote entries +# to load balance between the servers. +remote 172.17.0.1 9194 + +# Choose a random host from the remote +# list for load-balancing. Otherwise +# try hosts in the order specified. +;remote-random + +# Keep trying indefinitely to resolve the +# host name of the OpenVPN server. Very useful +# on machines which are not permanently connected +# to the internet such as laptops. +resolv-retry infinite + +# Most clients don't need to bind to +# a specific local port number. +nobind + +# Downgrade privileges after initialization (non-Windows only) +user nobody +group nogroup + +# Try to preserve some state across restarts. +persist-key +persist-tun + +# If you are connecting through an +# HTTP proxy to reach the actual OpenVPN +# server, put the proxy server/IP and +# port number here. See the man page +# if your proxy server requires +# authentication. +;http-proxy-retry # retry on connection failures +;http-proxy [proxy server] [proxy port #] + +# Wireless networks often produce a lot +# of duplicate packets. Set this flag +# to silence duplicate packet warnings. +;mute-replay-warnings + +# SSL/TLS parms. +# See the server config file for more +# description. It's best to use +# a separate .crt/.key file pair +# for each client. A single ca +# file can be used for all clients. + +# Verify server certificate by checking that the +# certificate has the correct key usage set. +# This is an important precaution to protect against +# a potential attack discussed here: +# http://openvpn.net/howto.html#mitm +# +# To use this feature, you will need to generate +# your server certificates with the keyUsage set to +# digitalSignature, keyEncipherment +# and the extendedKeyUsage to +# serverAuth +# EasyRSA can do this for you. +remote-cert-tls server + +# If a tls-auth key is used on the server +# then every client must also have the key. +;tls-auth ta.key 1 + +# Select a cryptographic cipher. +# If the cipher option is used on the server +# then you must also specify it here. +# Note that v2.4 client/server will automatically +# negotiate AES-256-GCM in TLS mode. +# See also the data-ciphers option in the manpage +;cipher AES-256-CBC +cipher AES-256-GCM + +auth SHA256 + +# Enable compression on the VPN link. +# Don't enable this unless it is also +# enabled in the server config file. +#comp-lzo + +# Set log file verbosity. +verb 3 + +# Silence repeating messages +;mute 20 + +key-direction 1 + +; script-security 2 +; up /etc/openvpn/update-resolv-conf +; down /etc/openvpn/update-resolv-conf + +; script-security 2 +; up /etc/openvpn/update-systemd-resolved +; down /etc/openvpn/update-systemd-resolved +; down-pre +; dhcp-option DOMAIN-ROUTE . + +-----BEGIN CERTIFICATE----- +MIIDQjCCAiqgAwIBAgIUBwqUYD1oxBKeImaMZfm44TsTAF0wDQYJKoZIhvcNAQEL +BQAwEzERMA8GA1UEAwwIQ2hhbmdlTWUwHhcNMjUwOTIzMTI0NjQyWhcNMzUwOTIx +MTI0NjQyWjATMREwDwYDVQQDDAhDaGFuZ2VNZTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKGt+8oRY7cWPg1SahfIV3XAeeH1SQEFq4f2q+E9ZbWVnCg9 +b59hMzwYr84/j4V73Hlv2udLrkguvnT9KqqJY/0wo3Bd1swH2WLej1fo0+rVo24w +hzeLfeH1e4erZbzQk8XG68U7yNDHKYo+LIz9syBzZA4Bq12bHxDsZbJF7HUANzFR +j9Xg3dR7utPtG8ktmD83rV9/E97whblMpLmjmf2sbCqdLOKTkZnwp5mI47TTkhMj +9K0q7irHmbtZcPZQH5Z59GtqaCaRt8DKfeYniyoPnGVfzFberHHQ4C11pcRrdvgY +n14/W5myh6HESQD6umyCYooyXG7wfqIKujROQCMCAwEAAaOBjTCBijAdBgNVHQ4E +FgQUtMsHbl94qRV7OW5UNNjk2mJ+/U8wTgYDVR0jBEcwRYAUtMsHbl94qRV7OW5U +NNjk2mJ+/U+hF6QVMBMxETAPBgNVBAMMCENoYW5nZU1lghQHCpRgPWjEEp4iZoxl ++bjhOxMAXTAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAQEAGeryP/2JuOp7tzi7Ww9lFUx2DRcgq/FwnU4biotUfuLejHQt/IeIwRYs +dW6AToUYJak8Uy/AFffMootwLcC8z8FATBnxtokWNpxtscpbTSHbeS0HvXnXFaU8 +xxlzp9l5k+46MrrvdzFsjoRfVxs0FUHzWifBnObBziTLfHt+J71509uqRWX6JuTa +PDAT8CMcLKxxS4BcorWtAmc51lW/dQQ41HDJ8a6acltDAprmlnhd8ksWzpTjUDNR +/cfSMcVTpPxPSW/WchR5NlJKQEAf9B/xC+LQgDRSDLaZ8CvzRDgosllzJ+aIS7GK +GPec69LiKqpirZ7enwDM67R4DwIHKA== +-----END CERTIFICATE----- + + +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + 45:fc:0b:2c:a3:b7:9c:b6:f1:56:fd:47:cb:b2:12:12 + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=ChangeMe + Validity + Not Before: Sep 23 12:46:43 2025 GMT + Not After : Dec 27 12:46:43 2027 GMT + Subject: CN=admin@test.odelia + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:ca:33:a9:8e:be:5d:00:9e:ff:72:43:e9:e4:8b: + 8f:09:6e:56:38:7e:f8:57:e1:5f:e7:df:af:e1:22: + 69:1e:7a:9c:a3:43:84:8f:f8:cc:61:4e:61:dc:3a: + 56:02:77:13:65:09:4e:25:02:94:a9:94:3f:76:4f: + b8:6c:98:36:0c:52:cc:22:e7:16:97:2b:c2:c1:7c: + 14:db:f8:45:7a:b7:c8:b0:5c:a9:a1:d8:0c:ca:b0: + 4f:b3:a6:f3:05:f2:e7:43:ac:90:2c:32:4b:ae:b8: + d8:67:c0:0f:46:e2:e1:a7:d9:a4:cd:c7:5b:29:4e: + c4:38:aa:6b:43:c5:31:8e:a4:be:68:73:82:72:ca: + a4:df:81:80:c7:13:df:b7:e1:53:07:04:c0:d6:78: + 66:22:9a:fe:ba:95:0e:e5:cc:93:47:1f:f1:e9:86: + 77:3d:c4:54:cd:b8:c9:8a:2b:02:eb:84:0b:68:22: + 50:8f:16:7a:e5:d7:ec:3f:3f:25:f0:79:74:42:3a: + bb:2e:a3:dc:c0:d4:d3:05:8b:4e:01:a7:e8:ff:6d: + 94:1e:4d:de:f7:76:10:cc:62:66:d9:b4:1e:58:0c: + 52:de:46:1c:26:bc:71:ef:82:bb:25:f6:d7:14:19: + e6:3d:a1:e4:cc:0b:94:1f:c6:bb:37:81:4d:5c:76: + 6b:2b + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + E5:EC:27:21:94:02:06:AC:C6:C7:DB:B6:13:25:9C:C3:60:1E:47:FE + X509v3 Authority Key Identifier: + keyid:B4:CB:07:6E:5F:78:A9:15:7B:39:6E:54:34:D8:E4:DA:62:7E:FD:4F + DirName:/CN=ChangeMe + serial:07:0A:94:60:3D:68:C4:12:9E:22:66:8C:65:F9:B8:E1:3B:13:00:5D + X509v3 Extended Key Usage: + TLS Web Client Authentication + X509v3 Key Usage: + Digital Signature + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 73:cb:0e:63:bf:1d:f5:04:37:d3:cc:9c:c8:d2:21:60:f0:ae: + 23:08:38:0b:77:31:9b:6f:b3:89:5f:5c:69:86:e8:69:47:b8: + da:04:56:8b:a2:f4:25:2b:48:c6:4f:1d:a2:8a:b3:b8:7c:a8: + d2:9e:89:9a:20:71:69:fb:9f:4d:39:8d:cb:9c:f2:58:bc:58: + 19:10:cd:be:1f:bd:6e:e4:af:fd:c6:eb:2f:83:39:e7:4b:2c: + bf:23:e1:9d:9e:81:80:86:41:df:9f:fc:3b:d3:29:7f:dc:fb: + a6:45:5c:38:0b:80:de:27:ef:23:f8:53:80:48:69:37:c9:9b: + aa:24:cc:ff:54:80:77:2b:ab:51:c7:02:4d:e7:49:01:af:f4: + d3:d1:89:09:4a:96:99:44:e2:0d:13:b1:9d:4b:47:73:70:22: + fc:a7:4f:20:90:00:a3:5b:96:c9:59:e7:0e:e1:25:e0:00:3c: + 66:a8:32:62:f1:42:bc:84:32:32:46:b7:ac:b9:ed:e7:45:47: + 3b:26:b7:2b:f2:ce:04:e9:64:9c:52:5d:e4:08:11:32:ff:e0: + ff:a9:d8:e5:1a:e7:f0:cc:21:25:f8:04:40:6a:e3:ed:5f:fc: + b2:15:0a:b7:cf:85:db:82:29:e2:27:ed:e8:94:f4:c3:01:77: + 04:d0:bf:7d +-----BEGIN CERTIFICATE----- +MIIDWTCCAkGgAwIBAgIQRfwLLKO3nLbxVv1Hy7ISEjANBgkqhkiG9w0BAQsFADAT +MREwDwYDVQQDDAhDaGFuZ2VNZTAeFw0yNTA5MjMxMjQ2NDNaFw0yNzEyMjcxMjQ2 +NDNaMBwxGjAYBgNVBAMMEWFkbWluQHRlc3Qub2RlbGlhMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEAyjOpjr5dAJ7/ckPp5IuPCW5WOH74V+Ff59+v4SJp +Hnqco0OEj/jMYU5h3DpWAncTZQlOJQKUqZQ/dk+4bJg2DFLMIucWlyvCwXwU2/hF +erfIsFypodgMyrBPs6bzBfLnQ6yQLDJLrrjYZ8APRuLhp9mkzcdbKU7EOKprQ8Ux +jqS+aHOCcsqk34GAxxPft+FTBwTA1nhmIpr+upUO5cyTRx/x6YZ3PcRUzbjJiisC +64QLaCJQjxZ65dfsPz8l8Hl0Qjq7LqPcwNTTBYtOAafo/22UHk3e93YQzGJm2bQe +WAxS3kYcJrxx74K7JfbXFBnmPaHkzAuUH8a7N4FNXHZrKwIDAQABo4GfMIGcMAkG +A1UdEwQCMAAwHQYDVR0OBBYEFOXsJyGUAgasxsfbthMlnMNgHkf+ME4GA1UdIwRH +MEWAFLTLB25feKkVezluVDTY5Npifv1PoRekFTATMREwDwYDVQQDDAhDaGFuZ2VN +ZYIUBwqUYD1oxBKeImaMZfm44TsTAF0wEwYDVR0lBAwwCgYIKwYBBQUHAwIwCwYD +VR0PBAQDAgeAMA0GCSqGSIb3DQEBCwUAA4IBAQBzyw5jvx31BDfTzJzI0iFg8K4j +CDgLdzGbb7OJX1xphuhpR7jaBFaLovQlK0jGTx2iirO4fKjSnomaIHFp+59NOY3L +nPJYvFgZEM2+H71u5K/9xusvgznnSyy/I+GdnoGAhkHfn/w70yl/3PumRVw4C4De +J+8j+FOASGk3yZuqJMz/VIB3K6tRxwJN50kBr/TT0YkJSpaZROINE7GdS0dzcCL8 +p08gkACjW5bJWecO4SXgADxmqDJi8UK8hDIyRresue3nRUc7Jrcr8s4E6WScUl3k +CBEy/+D/qdjlGufwzCEl+ARAauPtX/yyFQq3z4XbginiJ+3olPTDAXcE0L99 +-----END CERTIFICATE----- + + +-----BEGIN PRIVATE KEY----- +MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDKM6mOvl0Anv9y +Q+nki48JblY4fvhX4V/n36/hImkeepyjQ4SP+MxhTmHcOlYCdxNlCU4lApSplD92 +T7hsmDYMUswi5xaXK8LBfBTb+EV6t8iwXKmh2AzKsE+zpvMF8udDrJAsMkuuuNhn +wA9G4uGn2aTNx1spTsQ4qmtDxTGOpL5oc4JyyqTfgYDHE9+34VMHBMDWeGYimv66 +lQ7lzJNHH/Hphnc9xFTNuMmKKwLrhAtoIlCPFnrl1+w/PyXweXRCOrsuo9zA1NMF +i04Bp+j/bZQeTd73dhDMYmbZtB5YDFLeRhwmvHHvgrsl9tcUGeY9oeTMC5Qfxrs3 +gU1cdmsrAgMBAAECggEARJO+9vWjLzm9ozBbXDLk4Sr1vRV6/rsmPssDqJR2GGs3 +Qrv8cqPMFVhzIjD6yL0/6617PlwgtV7dIzAoeVQqSIWwTEoZxE2IMPz3Sh9q2RMJ +0p6qvYQ72mZvsZt2otbeDnqxLvwj5O82HgHqbH04USgkl9H8Hgdjz2AlHwf7Jcgo +pwD48OtE8YFMof3/SFzKUJDPdsCsjGlWnDDJMjXrIR6BZdE7xxEX7L2VRcmVBQXR +lhAeNwYJNQ1qWGuXaSPx2BNa9BuTd66PwypsyPKwI63CJ6WkUh1bWsAviTBzr5Oz +u27eym4uK6mfXS6Pgv4VcM0kNUjnPd8p/XSaGQCfUQKBgQDLLuV9bhUyHPRbJbHC +WcXxNMiwUOpoyQY+KTj+p8mgXc9tB1TvL+dNi+vhc1mgHNactFnmyzx1S8eM2Wn9 +Aw1fxw42APUTw4rJh+l3UsuTBMZwQ3s6CeNFgX+PNHqHK/47xyXdmgipMB4Uz0JI +EPEe1avbLTymDCmbfJ3mFqLMuQKBgQD+w3VZhhI5OJSEQUkJs/sURQdBoD05rX/B +afz4ZqfRfLJscI3oG1oV8ZwMkoeA1ou5ovtxr/4XlGPhsopDa7sUMWbhEkm5Rssw +gPVmGE9HnM8tSG/8So7fbIXHCGcKRDD5JKnjPyqpP78wTzJeOrCfhbFP+dpiRJrJ +mEhn3V2tAwKBgQDDXHYgIkaTBrAVK6s9eeAPSndkwIiC9DbicfRxNpdxcIHPDWun +B+JY9554Cdc1UkUwK2D9vpCFH7XhQfLc6aBkZRrO5iC/PhcmK15Z8uv2knLS4q+L +YJJ79EXYRddCPRSYGaXY6xBEzRU/YQEUFeYhhcVWWqqj5bHj5PBVmZIzUQKBgQC3 +AOa6ETHkAr3EpzT1EGFqtQ86WAXC+duMrzr1oKAqPl3YwZ1ePs+edbk32sYViYhD +KE1g5CAtBf4dsWfaeHehUL9rK/zjZ3Qr+mbNGOdSNNUp3R/8Zf5thgIu7908pbFc +NrcGs2hMvarz49/1ikk3vgyZu4vhDRD3gTl5yq0wywKBgQCddStrC6gtOOaZofSL +bU6Le9TXyDbBfGiVpDxaD1rxdWylN3jQY9JSznmq7RGTR2TUVlqeFoCunaLc4VJi +N+np08niR1T/Mm+8HqLzYRROmLIozETrPomdgj1Ewa83lmI4/JSiNZbkFs+Jh6J1 +sGSrPFifkIAVP/C6PbVqj1Nn7A== +-----END PRIVATE KEY----- + + +# +# 2048 bit OpenVPN static key +# +-----BEGIN OpenVPN Static key V1----- +488b61084812969fe8ad0f9dd40f56a2 +6cdadddfe345daef6b5c6d3c3e779fc5 +1f7d236966953482d2af085e3f8581b7 +d216f2d891972a463bbb22ca6c104b9d +f99dcb19d7d575a1d46e7918bb2556c6 +db9f51cd792c5e89e011586214692b95 +2a32a7fe85e4538c40e1d0aa2a9f8e15 +fcc0ce5d31974e3c2041b127776f7658 +878cb8245ed235ec996c2370c0fc0023 +699bc028b3412bc40209cba8233bc111 +fa1438095f99052d799fa718f3b04499 +472254d0286b4b2ce99db49e98a4cc25 +fd948bddcdcf08006a6d7bff40354e7b +5e93ea753a8ecc05de41ae34d280e7eb +99220e436bf8b7693a00667485631e28 +edba3e33b6f558dfa50b92eec6ac8b44 +-----END OpenVPN Static key V1----- + diff --git a/tests/local_vpn/client_configs/client.conf b/tests/local_vpn/client_configs/client.conf new file mode 100755 index 00000000..49669b71 --- /dev/null +++ b/tests/local_vpn/client_configs/client.conf @@ -0,0 +1,138 @@ +############################################## +# Sample client-side OpenVPN 2.0 config file # +# for connecting to multi-client server. # +# # +# This configuration can be used by multiple # +# clients, however each client should have # +# its own cert and key files. # +# # +# On Windows, you might want to rename this # +# file so it has a .ovpn extension # +############################################## + +# Specify that we are a client and that we +# will be pulling certain config file directives +# from the server. +client + +# Use the same setting as you are using on +# the server. +# On most systems, the VPN will not function +# unless you partially or fully disable +# the firewall for the TUN/TAP interface. +;dev tap +dev tun + +# Windows needs the TAP-Win32 adapter name +# from the Network Connections panel +# if you have more than one. On XP SP2, +# you may need to disable the firewall +# for the TAP adapter. +;dev-node MyTap + +# Are we connecting to a TCP or +# UDP server? Use the same setting as +# on the server. +;proto tcp +proto udp + +# The hostname/IP and port of the server. +# You can have multiple remote entries +# to load balance between the servers. +remote 172.17.0.1 9194 + +# Choose a random host from the remote +# list for load-balancing. Otherwise +# try hosts in the order specified. +;remote-random + +# Keep trying indefinitely to resolve the +# host name of the OpenVPN server. Very useful +# on machines which are not permanently connected +# to the internet such as laptops. +resolv-retry infinite + +# Most clients don't need to bind to +# a specific local port number. +nobind + +# Downgrade privileges after initialization (non-Windows only) +user nobody +group nogroup + +# Try to preserve some state across restarts. +persist-key +persist-tun + +# If you are connecting through an +# HTTP proxy to reach the actual OpenVPN +# server, put the proxy server/IP and +# port number here. See the man page +# if your proxy server requires +# authentication. +;http-proxy-retry # retry on connection failures +;http-proxy [proxy server] [proxy port #] + +# Wireless networks often produce a lot +# of duplicate packets. Set this flag +# to silence duplicate packet warnings. +;mute-replay-warnings + +# SSL/TLS parms. +# See the server config file for more +# description. It's best to use +# a separate .crt/.key file pair +# for each client. A single ca +# file can be used for all clients. + +# Verify server certificate by checking that the +# certificate has the correct key usage set. +# This is an important precaution to protect against +# a potential attack discussed here: +# http://openvpn.net/howto.html#mitm +# +# To use this feature, you will need to generate +# your server certificates with the keyUsage set to +# digitalSignature, keyEncipherment +# and the extendedKeyUsage to +# serverAuth +# EasyRSA can do this for you. +remote-cert-tls server + +# If a tls-auth key is used on the server +# then every client must also have the key. +;tls-auth ta.key 1 + +# Select a cryptographic cipher. +# If the cipher option is used on the server +# then you must also specify it here. +# Note that v2.4 client/server will automatically +# negotiate AES-256-GCM in TLS mode. +# See also the data-ciphers option in the manpage +;cipher AES-256-CBC +cipher AES-256-GCM + +auth SHA256 + +# Enable compression on the VPN link. +# Don't enable this unless it is also +# enabled in the server config file. +#comp-lzo + +# Set log file verbosity. +verb 3 + +# Silence repeating messages +;mute 20 + +key-direction 1 + +; script-security 2 +; up /etc/openvpn/update-resolv-conf +; down /etc/openvpn/update-resolv-conf + +; script-security 2 +; up /etc/openvpn/update-systemd-resolved +; down /etc/openvpn/update-systemd-resolved +; down-pre +; dhcp-option DOMAIN-ROUTE . diff --git a/tests/local_vpn/client_configs/client_A_client.ovpn b/tests/local_vpn/client_configs/client_A_client.ovpn new file mode 100644 index 00000000..1506b75d --- /dev/null +++ b/tests/local_vpn/client_configs/client_A_client.ovpn @@ -0,0 +1,299 @@ +############################################## +# Sample client-side OpenVPN 2.0 config file # +# for connecting to multi-client server. # +# # +# This configuration can be used by multiple # +# clients, however each client should have # +# its own cert and key files. # +# # +# On Windows, you might want to rename this # +# file so it has a .ovpn extension # +############################################## + +# Specify that we are a client and that we +# will be pulling certain config file directives +# from the server. +client + +# Use the same setting as you are using on +# the server. +# On most systems, the VPN will not function +# unless you partially or fully disable +# the firewall for the TUN/TAP interface. +;dev tap +dev tun + +# Windows needs the TAP-Win32 adapter name +# from the Network Connections panel +# if you have more than one. On XP SP2, +# you may need to disable the firewall +# for the TAP adapter. +;dev-node MyTap + +# Are we connecting to a TCP or +# UDP server? Use the same setting as +# on the server. +;proto tcp +proto udp + +# The hostname/IP and port of the server. +# You can have multiple remote entries +# to load balance between the servers. +remote 172.17.0.1 9194 + +# Choose a random host from the remote +# list for load-balancing. Otherwise +# try hosts in the order specified. +;remote-random + +# Keep trying indefinitely to resolve the +# host name of the OpenVPN server. Very useful +# on machines which are not permanently connected +# to the internet such as laptops. +resolv-retry infinite + +# Most clients don't need to bind to +# a specific local port number. +nobind + +# Downgrade privileges after initialization (non-Windows only) +user nobody +group nogroup + +# Try to preserve some state across restarts. +persist-key +persist-tun + +# If you are connecting through an +# HTTP proxy to reach the actual OpenVPN +# server, put the proxy server/IP and +# port number here. See the man page +# if your proxy server requires +# authentication. +;http-proxy-retry # retry on connection failures +;http-proxy [proxy server] [proxy port #] + +# Wireless networks often produce a lot +# of duplicate packets. Set this flag +# to silence duplicate packet warnings. +;mute-replay-warnings + +# SSL/TLS parms. +# See the server config file for more +# description. It's best to use +# a separate .crt/.key file pair +# for each client. A single ca +# file can be used for all clients. + +# Verify server certificate by checking that the +# certificate has the correct key usage set. +# This is an important precaution to protect against +# a potential attack discussed here: +# http://openvpn.net/howto.html#mitm +# +# To use this feature, you will need to generate +# your server certificates with the keyUsage set to +# digitalSignature, keyEncipherment +# and the extendedKeyUsage to +# serverAuth +# EasyRSA can do this for you. +remote-cert-tls server + +# If a tls-auth key is used on the server +# then every client must also have the key. +;tls-auth ta.key 1 + +# Select a cryptographic cipher. +# If the cipher option is used on the server +# then you must also specify it here. +# Note that v2.4 client/server will automatically +# negotiate AES-256-GCM in TLS mode. +# See also the data-ciphers option in the manpage +;cipher AES-256-CBC +cipher AES-256-GCM + +auth SHA256 + +# Enable compression on the VPN link. +# Don't enable this unless it is also +# enabled in the server config file. +#comp-lzo + +# Set log file verbosity. +verb 3 + +# Silence repeating messages +;mute 20 + +key-direction 1 + +; script-security 2 +; up /etc/openvpn/update-resolv-conf +; down /etc/openvpn/update-resolv-conf + +; script-security 2 +; up /etc/openvpn/update-systemd-resolved +; down /etc/openvpn/update-systemd-resolved +; down-pre +; dhcp-option DOMAIN-ROUTE . + +-----BEGIN CERTIFICATE----- +MIIDQjCCAiqgAwIBAgIUBwqUYD1oxBKeImaMZfm44TsTAF0wDQYJKoZIhvcNAQEL +BQAwEzERMA8GA1UEAwwIQ2hhbmdlTWUwHhcNMjUwOTIzMTI0NjQyWhcNMzUwOTIx +MTI0NjQyWjATMREwDwYDVQQDDAhDaGFuZ2VNZTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKGt+8oRY7cWPg1SahfIV3XAeeH1SQEFq4f2q+E9ZbWVnCg9 +b59hMzwYr84/j4V73Hlv2udLrkguvnT9KqqJY/0wo3Bd1swH2WLej1fo0+rVo24w +hzeLfeH1e4erZbzQk8XG68U7yNDHKYo+LIz9syBzZA4Bq12bHxDsZbJF7HUANzFR +j9Xg3dR7utPtG8ktmD83rV9/E97whblMpLmjmf2sbCqdLOKTkZnwp5mI47TTkhMj +9K0q7irHmbtZcPZQH5Z59GtqaCaRt8DKfeYniyoPnGVfzFberHHQ4C11pcRrdvgY +n14/W5myh6HESQD6umyCYooyXG7wfqIKujROQCMCAwEAAaOBjTCBijAdBgNVHQ4E +FgQUtMsHbl94qRV7OW5UNNjk2mJ+/U8wTgYDVR0jBEcwRYAUtMsHbl94qRV7OW5U +NNjk2mJ+/U+hF6QVMBMxETAPBgNVBAMMCENoYW5nZU1lghQHCpRgPWjEEp4iZoxl ++bjhOxMAXTAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAQEAGeryP/2JuOp7tzi7Ww9lFUx2DRcgq/FwnU4biotUfuLejHQt/IeIwRYs +dW6AToUYJak8Uy/AFffMootwLcC8z8FATBnxtokWNpxtscpbTSHbeS0HvXnXFaU8 +xxlzp9l5k+46MrrvdzFsjoRfVxs0FUHzWifBnObBziTLfHt+J71509uqRWX6JuTa +PDAT8CMcLKxxS4BcorWtAmc51lW/dQQ41HDJ8a6acltDAprmlnhd8ksWzpTjUDNR +/cfSMcVTpPxPSW/WchR5NlJKQEAf9B/xC+LQgDRSDLaZ8CvzRDgosllzJ+aIS7GK +GPec69LiKqpirZ7enwDM67R4DwIHKA== +-----END CERTIFICATE----- + + +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + 54:bc:c2:64:c6:73:20:54:74:58:b8:6a:6e:10:38:76 + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=ChangeMe + Validity + Not Before: Sep 23 12:46:43 2025 GMT + Not After : Dec 27 12:46:43 2027 GMT + Subject: CN=client_A + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:c8:62:8c:53:02:a0:a3:8b:17:bc:80:97:f8:0f: + 63:35:7d:75:1d:b4:36:bd:75:17:ac:36:35:0b:6a: + ec:38:b3:7f:d6:1f:ef:c2:90:dc:b3:d5:1e:11:65: + 36:5c:63:b8:ef:7c:d2:eb:05:4c:61:54:02:93:8b: + 84:6b:8b:1c:ca:3e:6e:d5:b4:b0:2c:6f:a4:36:db: + fc:d4:a3:8c:23:da:f0:be:cf:d3:16:dd:44:4d:77: + ce:53:1d:5e:14:e2:c3:67:b1:9a:25:44:f9:b3:b1: + f6:13:a6:0d:5e:16:49:cc:cd:52:b8:8c:2c:8e:ac: + 87:17:ff:ff:c1:8a:e3:f5:3c:71:69:9f:14:a2:85: + 37:0e:4b:16:24:83:08:4e:58:b7:60:36:98:c7:2e: + 4b:bb:d7:b2:e0:aa:95:bb:22:7d:a6:bf:da:71:95: + c0:fe:d6:bb:93:06:27:2f:b9:4c:47:85:f5:80:2b: + f1:1b:c8:03:bb:5a:8d:13:e9:0e:1a:23:c1:92:7a: + 7a:41:43:93:f3:3a:ca:36:0b:a2:dc:b8:fc:61:7d: + 7b:af:3e:7a:fc:ad:ac:d4:04:f4:ec:57:18:ae:c8: + 4d:c3:ec:5c:bd:72:c0:b0:8e:24:fe:13:44:93:b0: + c3:78:3c:99:23:74:dd:44:8f:e3:ac:1b:12:8d:d8: + 74:e9 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + DF:E6:D3:15:9C:F9:C3:F9:4E:C9:60:28:FA:6B:38:CA:1C:72:F7:B2 + X509v3 Authority Key Identifier: + keyid:B4:CB:07:6E:5F:78:A9:15:7B:39:6E:54:34:D8:E4:DA:62:7E:FD:4F + DirName:/CN=ChangeMe + serial:07:0A:94:60:3D:68:C4:12:9E:22:66:8C:65:F9:B8:E1:3B:13:00:5D + X509v3 Extended Key Usage: + TLS Web Client Authentication + X509v3 Key Usage: + Digital Signature + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 6c:de:92:45:ed:e7:01:63:d9:63:29:65:b7:75:e6:ed:31:44: + 8b:a7:7c:06:0c:02:87:15:bd:f2:e3:3e:e0:8b:74:87:44:d3: + 8a:f6:86:6d:3e:2f:1c:e7:b9:1d:b5:42:4d:60:76:1c:4f:8d: + a7:9c:81:a6:57:8b:62:85:76:15:f8:f8:0d:ef:2c:85:27:f5: + 2a:1d:36:84:88:77:72:f7:52:85:93:b8:0f:0b:97:54:e9:23: + 76:d6:1d:44:09:57:3e:ee:33:72:87:02:91:2e:50:fc:a2:88: + 42:88:6d:de:26:21:cc:79:96:61:9f:d9:1e:12:54:7c:96:f7: + 49:4a:08:f9:72:26:d7:40:59:fc:ab:8b:01:3d:b6:e2:4d:19: + fc:ff:1a:39:78:65:e0:13:9a:33:be:99:d6:fb:30:ea:a4:0b: + 41:32:eb:0e:f8:1c:95:e7:16:a0:3f:8e:2c:43:17:10:3c:f7: + b3:98:71:59:2d:17:94:32:a1:9b:85:39:2f:fa:2e:f9:45:dc: + 6e:c9:11:de:94:e1:10:52:87:04:43:e1:9b:4e:39:7b:c6:1e: + 55:a8:82:7c:77:d1:4a:cb:4c:8f:cb:ee:3f:b6:c7:6f:8a:3d: + 1a:a9:9e:9a:16:a4:3e:10:c0:49:95:5a:7c:c0:13:35:15:e8: + 1f:1f:f8:1a +-----BEGIN CERTIFICATE----- +MIIDUDCCAjigAwIBAgIQVLzCZMZzIFR0WLhqbhA4djANBgkqhkiG9w0BAQsFADAT +MREwDwYDVQQDDAhDaGFuZ2VNZTAeFw0yNTA5MjMxMjQ2NDNaFw0yNzEyMjcxMjQ2 +NDNaMBMxETAPBgNVBAMMCGNsaWVudF9BMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAyGKMUwKgo4sXvICX+A9jNX11HbQ2vXUXrDY1C2rsOLN/1h/vwpDc +s9UeEWU2XGO473zS6wVMYVQCk4uEa4scyj5u1bSwLG+kNtv81KOMI9rwvs/TFt1E +TXfOUx1eFOLDZ7GaJUT5s7H2E6YNXhZJzM1SuIwsjqyHF///wYrj9TxxaZ8UooU3 +DksWJIMITli3YDaYxy5Lu9ey4KqVuyJ9pr/acZXA/ta7kwYnL7lMR4X1gCvxG8gD +u1qNE+kOGiPBknp6QUOT8zrKNgui3Lj8YX17rz56/K2s1AT07FcYrshNw+xcvXLA +sI4k/hNEk7DDeDyZI3TdRI/jrBsSjdh06QIDAQABo4GfMIGcMAkGA1UdEwQCMAAw +HQYDVR0OBBYEFN/m0xWc+cP5TslgKPprOMoccveyME4GA1UdIwRHMEWAFLTLB25f +eKkVezluVDTY5Npifv1PoRekFTATMREwDwYDVQQDDAhDaGFuZ2VNZYIUBwqUYD1o +xBKeImaMZfm44TsTAF0wEwYDVR0lBAwwCgYIKwYBBQUHAwIwCwYDVR0PBAQDAgeA +MA0GCSqGSIb3DQEBCwUAA4IBAQBs3pJF7ecBY9ljKWW3debtMUSLp3wGDAKHFb3y +4z7gi3SHRNOK9oZtPi8c57kdtUJNYHYcT42nnIGmV4tihXYV+PgN7yyFJ/UqHTaE +iHdy91KFk7gPC5dU6SN21h1ECVc+7jNyhwKRLlD8oohCiG3eJiHMeZZhn9keElR8 +lvdJSgj5cibXQFn8q4sBPbbiTRn8/xo5eGXgE5ozvpnW+zDqpAtBMusO+ByV5xag +P44sQxcQPPezmHFZLReUMqGbhTkv+i75RdxuyRHelOEQUocEQ+GbTjl7xh5VqIJ8 +d9FKy0yPy+4/tsdvij0aqZ6aFqQ+EMBJlVp8wBM1FegfH/ga +-----END CERTIFICATE----- + + +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDIYoxTAqCjixe8 +gJf4D2M1fXUdtDa9dResNjULauw4s3/WH+/CkNyz1R4RZTZcY7jvfNLrBUxhVAKT +i4RrixzKPm7VtLAsb6Q22/zUo4wj2vC+z9MW3URNd85THV4U4sNnsZolRPmzsfYT +pg1eFknMzVK4jCyOrIcX///BiuP1PHFpnxSihTcOSxYkgwhOWLdgNpjHLku717Lg +qpW7In2mv9pxlcD+1ruTBicvuUxHhfWAK/EbyAO7Wo0T6Q4aI8GSenpBQ5PzOso2 +C6LcuPxhfXuvPnr8razUBPTsVxiuyE3D7Fy9csCwjiT+E0STsMN4PJkjdN1Ej+Os +GxKN2HTpAgMBAAECggEAWxr3KryclY6lTZZ3wZgZZpXyO/2WD8BfcXQ53MWRvdva +iNt/Ukozle2U3JrUQuAyEmyBpsoDZpLgEv4RSCX5AnitQquCl8lwc2LEilcLXbfq +0g5CLniOV9xbKc3F2yAYcJo+d6hrEQid1WQfWsIubpeBfxd4IKwPRdmmCfRgXTv+ +a7TVI9pRmFNg7J9Cs2VEqf7SdMX8U+7bPJfvHZ+aWYO5d9ZWhMSW5EB43QlgcVg2 +Eof1AjvkBY4NOOsb2uWkw7HiKloT95L8PR6I9bSCesJU58oGDPJyQKG58ANk5alh +9qPgzK5RnkMxzO+aEEzZ5x8NYacx51JwcScI6r5/ewKBgQDgMQSg+h+JZmGY1nuY +5OM6OiGoyHq8PAogPzWEO4N5I26kmkiTiLzyr4dzvPNx+1uOCuSQpt/qBB9rli1w +y1PQkrXMtfrHv83AWep1bFgripgwsGTKRTq0t9Obl5zzkV2OaBlGJP+gaBnfEbM4 +htchBFEyTMfoobFz9+Xv8mvHBwKBgQDk0NcZ7xoqx1PY4f8bbpAIOj8VhJopsZBm +Jv/jzJq8JREMXU54y8VkaT3ihY5tq/7DhPvpeVy87UI6urEaxoK5D3xAdoMVsBy0 +SVkfAMTjqU5PpZahPTL1vyvrH9EvJSfW1/qhtdyxZzw5p0T2Ro1ZEEC/GTRAZZuV +LgUHt594jwKBgQCtzJJgEUedhucmSzAqCVc2bpZleHXds1XORfJA/rofkR5XMNwO +s7R3FyiUyuiXdls1tLAYi6WOj3+kMhosFRR23yVc+77cV480DQC74zA/IQR2ymh4 +fk7ShqffORwNnqW+nmjpfglF2y4jRl9/9NiV2fjwW6GmcKNW2dlBuNdgxQKBgEKu +pfEV4D9VRZcwDWNWLj1nlBjWQwMhjx5mAS7G4tUvzC8ZRhQn9keT8AgCugY2GJGs +QKnCx4b7cdChtZlC/relzqUOpJb+cu8LbSB+3eIm5f6KGEK3DhHV+5uS8yhVIK4Y +1R6pXD6LAl8e4xcOaoTpGqVWWAboVZX9ClQ8bAn7AoGADM9OTA+hc/LicOO/oqp+ +lJ3XKBbQMvWZY0fhGvm0DSYLZi7cBOBCBwJXvfq278Cq1u+i2QHW9hV64Dcbt0TQ +l74cqQpoXZ7ZYFUUmYsEh3smL8K1u176Yig9LbVjUBD2eF02J+OXGWJtDQwyI696 +04gCGQhFI98vaM11YlS+skk= +-----END PRIVATE KEY----- + + +# +# 2048 bit OpenVPN static key +# +-----BEGIN OpenVPN Static key V1----- +488b61084812969fe8ad0f9dd40f56a2 +6cdadddfe345daef6b5c6d3c3e779fc5 +1f7d236966953482d2af085e3f8581b7 +d216f2d891972a463bbb22ca6c104b9d +f99dcb19d7d575a1d46e7918bb2556c6 +db9f51cd792c5e89e011586214692b95 +2a32a7fe85e4538c40e1d0aa2a9f8e15 +fcc0ce5d31974e3c2041b127776f7658 +878cb8245ed235ec996c2370c0fc0023 +699bc028b3412bc40209cba8233bc111 +fa1438095f99052d799fa718f3b04499 +472254d0286b4b2ce99db49e98a4cc25 +fd948bddcdcf08006a6d7bff40354e7b +5e93ea753a8ecc05de41ae34d280e7eb +99220e436bf8b7693a00667485631e28 +edba3e33b6f558dfa50b92eec6ac8b44 +-----END OpenVPN Static key V1----- + diff --git a/tests/local_vpn/client_configs/client_B_client.ovpn b/tests/local_vpn/client_configs/client_B_client.ovpn new file mode 100644 index 00000000..6229c033 --- /dev/null +++ b/tests/local_vpn/client_configs/client_B_client.ovpn @@ -0,0 +1,299 @@ +############################################## +# Sample client-side OpenVPN 2.0 config file # +# for connecting to multi-client server. # +# # +# This configuration can be used by multiple # +# clients, however each client should have # +# its own cert and key files. # +# # +# On Windows, you might want to rename this # +# file so it has a .ovpn extension # +############################################## + +# Specify that we are a client and that we +# will be pulling certain config file directives +# from the server. +client + +# Use the same setting as you are using on +# the server. +# On most systems, the VPN will not function +# unless you partially or fully disable +# the firewall for the TUN/TAP interface. +;dev tap +dev tun + +# Windows needs the TAP-Win32 adapter name +# from the Network Connections panel +# if you have more than one. On XP SP2, +# you may need to disable the firewall +# for the TAP adapter. +;dev-node MyTap + +# Are we connecting to a TCP or +# UDP server? Use the same setting as +# on the server. +;proto tcp +proto udp + +# The hostname/IP and port of the server. +# You can have multiple remote entries +# to load balance between the servers. +remote 172.17.0.1 9194 + +# Choose a random host from the remote +# list for load-balancing. Otherwise +# try hosts in the order specified. +;remote-random + +# Keep trying indefinitely to resolve the +# host name of the OpenVPN server. Very useful +# on machines which are not permanently connected +# to the internet such as laptops. +resolv-retry infinite + +# Most clients don't need to bind to +# a specific local port number. +nobind + +# Downgrade privileges after initialization (non-Windows only) +user nobody +group nogroup + +# Try to preserve some state across restarts. +persist-key +persist-tun + +# If you are connecting through an +# HTTP proxy to reach the actual OpenVPN +# server, put the proxy server/IP and +# port number here. See the man page +# if your proxy server requires +# authentication. +;http-proxy-retry # retry on connection failures +;http-proxy [proxy server] [proxy port #] + +# Wireless networks often produce a lot +# of duplicate packets. Set this flag +# to silence duplicate packet warnings. +;mute-replay-warnings + +# SSL/TLS parms. +# See the server config file for more +# description. It's best to use +# a separate .crt/.key file pair +# for each client. A single ca +# file can be used for all clients. + +# Verify server certificate by checking that the +# certificate has the correct key usage set. +# This is an important precaution to protect against +# a potential attack discussed here: +# http://openvpn.net/howto.html#mitm +# +# To use this feature, you will need to generate +# your server certificates with the keyUsage set to +# digitalSignature, keyEncipherment +# and the extendedKeyUsage to +# serverAuth +# EasyRSA can do this for you. +remote-cert-tls server + +# If a tls-auth key is used on the server +# then every client must also have the key. +;tls-auth ta.key 1 + +# Select a cryptographic cipher. +# If the cipher option is used on the server +# then you must also specify it here. +# Note that v2.4 client/server will automatically +# negotiate AES-256-GCM in TLS mode. +# See also the data-ciphers option in the manpage +;cipher AES-256-CBC +cipher AES-256-GCM + +auth SHA256 + +# Enable compression on the VPN link. +# Don't enable this unless it is also +# enabled in the server config file. +#comp-lzo + +# Set log file verbosity. +verb 3 + +# Silence repeating messages +;mute 20 + +key-direction 1 + +; script-security 2 +; up /etc/openvpn/update-resolv-conf +; down /etc/openvpn/update-resolv-conf + +; script-security 2 +; up /etc/openvpn/update-systemd-resolved +; down /etc/openvpn/update-systemd-resolved +; down-pre +; dhcp-option DOMAIN-ROUTE . + +-----BEGIN CERTIFICATE----- +MIIDQjCCAiqgAwIBAgIUBwqUYD1oxBKeImaMZfm44TsTAF0wDQYJKoZIhvcNAQEL +BQAwEzERMA8GA1UEAwwIQ2hhbmdlTWUwHhcNMjUwOTIzMTI0NjQyWhcNMzUwOTIx +MTI0NjQyWjATMREwDwYDVQQDDAhDaGFuZ2VNZTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKGt+8oRY7cWPg1SahfIV3XAeeH1SQEFq4f2q+E9ZbWVnCg9 +b59hMzwYr84/j4V73Hlv2udLrkguvnT9KqqJY/0wo3Bd1swH2WLej1fo0+rVo24w +hzeLfeH1e4erZbzQk8XG68U7yNDHKYo+LIz9syBzZA4Bq12bHxDsZbJF7HUANzFR +j9Xg3dR7utPtG8ktmD83rV9/E97whblMpLmjmf2sbCqdLOKTkZnwp5mI47TTkhMj +9K0q7irHmbtZcPZQH5Z59GtqaCaRt8DKfeYniyoPnGVfzFberHHQ4C11pcRrdvgY +n14/W5myh6HESQD6umyCYooyXG7wfqIKujROQCMCAwEAAaOBjTCBijAdBgNVHQ4E +FgQUtMsHbl94qRV7OW5UNNjk2mJ+/U8wTgYDVR0jBEcwRYAUtMsHbl94qRV7OW5U +NNjk2mJ+/U+hF6QVMBMxETAPBgNVBAMMCENoYW5nZU1lghQHCpRgPWjEEp4iZoxl ++bjhOxMAXTAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAQEAGeryP/2JuOp7tzi7Ww9lFUx2DRcgq/FwnU4biotUfuLejHQt/IeIwRYs +dW6AToUYJak8Uy/AFffMootwLcC8z8FATBnxtokWNpxtscpbTSHbeS0HvXnXFaU8 +xxlzp9l5k+46MrrvdzFsjoRfVxs0FUHzWifBnObBziTLfHt+J71509uqRWX6JuTa +PDAT8CMcLKxxS4BcorWtAmc51lW/dQQ41HDJ8a6acltDAprmlnhd8ksWzpTjUDNR +/cfSMcVTpPxPSW/WchR5NlJKQEAf9B/xC+LQgDRSDLaZ8CvzRDgosllzJ+aIS7GK +GPec69LiKqpirZ7enwDM67R4DwIHKA== +-----END CERTIFICATE----- + + +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + e0:1a:9b:9d:b6:2e:8a:b3:15:ba:a5:92:33:3d:75:01 + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=ChangeMe + Validity + Not Before: Sep 23 12:46:43 2025 GMT + Not After : Dec 27 12:46:43 2027 GMT + Subject: CN=client_B + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:bf:f1:4b:16:3d:95:5e:bd:9f:34:53:d6:a0:80: + 7c:0c:3b:36:65:32:0c:b5:a2:98:12:92:81:66:73: + 68:dd:ec:e3:b4:86:f8:7c:32:c1:1b:01:3b:47:07: + 61:fb:e4:d4:40:cf:9e:b6:1f:b8:10:8d:ac:39:f6: + 76:5d:84:5c:fb:38:f6:5d:cd:fe:60:dd:58:b9:fa: + ee:6b:61:62:53:e1:aa:31:b0:b8:36:8e:6b:b1:7c: + 08:8a:5f:1c:f3:03:29:3b:4f:bc:12:74:60:af:97: + 39:63:c2:77:f1:73:8d:b1:f5:80:f2:a2:e9:6b:4d: + 83:bf:7a:95:ee:30:6b:e1:e0:a4:6c:b4:e6:75:f9: + 92:3c:17:a0:17:1d:37:4b:5f:b3:2d:7a:ab:20:5e: + 27:22:82:31:5d:67:bb:58:3e:53:06:02:d9:17:84: + fa:2a:56:48:10:12:d8:5f:c2:00:f0:8c:d8:29:09: + ed:bf:d1:c2:30:74:2f:33:3f:7e:38:88:3a:fc:13: + f1:ed:5b:90:30:8e:7a:c5:b2:89:0f:21:e6:ad:8d: + a4:ca:30:e3:f8:5f:52:8e:cb:eb:13:6d:ce:cb:7c: + 21:ae:ab:b5:58:cd:85:1f:93:98:7f:ad:3f:1f:b0: + 95:14:74:20:ed:82:be:28:47:77:80:a8:8b:a7:33: + 41:7f + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + 6B:AB:8A:5C:11:80:8B:38:1F:B9:4B:7E:DC:AF:5A:B4:CF:41:74:4F + X509v3 Authority Key Identifier: + keyid:B4:CB:07:6E:5F:78:A9:15:7B:39:6E:54:34:D8:E4:DA:62:7E:FD:4F + DirName:/CN=ChangeMe + serial:07:0A:94:60:3D:68:C4:12:9E:22:66:8C:65:F9:B8:E1:3B:13:00:5D + X509v3 Extended Key Usage: + TLS Web Client Authentication + X509v3 Key Usage: + Digital Signature + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 49:2f:45:4f:07:f9:cf:26:0a:0c:a6:45:a9:cc:ca:e6:be:1f: + 24:47:b5:a7:5b:f0:00:e3:6d:15:b7:cd:1f:98:33:7a:dd:b4: + 2d:1a:c0:fe:34:84:ec:53:f8:b0:88:7c:30:9f:f3:43:5b:19: + 5b:dc:57:e4:18:fe:d7:cf:eb:50:03:8a:bf:03:d5:9c:79:92: + ad:5f:fe:12:a5:39:74:4e:e1:e0:48:af:31:62:a7:e8:e6:9a: + e9:e2:d7:40:52:d5:ab:22:e3:0b:9c:78:18:83:76:ba:5e:fe: + 6f:aa:96:f4:76:0f:88:ac:56:18:bc:e6:da:b7:55:ab:42:b7: + 74:2b:94:00:c8:e5:a1:66:63:41:b5:a9:48:7d:15:ce:d1:eb: + 14:50:3e:d0:a7:78:f4:92:0f:e3:ee:0d:df:5d:2c:ce:85:bf: + 73:39:32:dc:17:39:d4:39:11:11:f4:0b:ad:4d:af:88:1a:d4: + c4:bf:b9:1c:ed:e8:21:d4:b7:48:01:55:ff:a7:2b:86:b4:dd: + b4:54:fb:1f:0d:96:2b:da:15:c7:13:d2:1d:34:d5:13:dd:f4: + 6a:20:5a:e8:00:b8:60:88:5c:76:7e:77:82:6f:1b:a7:4c:41: + fb:4f:0f:1a:df:46:1f:09:79:a0:1c:16:c1:cd:7a:48:1c:91: + 1f:db:06:92 +-----BEGIN CERTIFICATE----- +MIIDUTCCAjmgAwIBAgIRAOAam522LoqzFbqlkjM9dQEwDQYJKoZIhvcNAQELBQAw +EzERMA8GA1UEAwwIQ2hhbmdlTWUwHhcNMjUwOTIzMTI0NjQzWhcNMjcxMjI3MTI0 +NjQzWjATMREwDwYDVQQDDAhjbGllbnRfQjCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL/xSxY9lV69nzRT1qCAfAw7NmUyDLWimBKSgWZzaN3s47SG+Hwy +wRsBO0cHYfvk1EDPnrYfuBCNrDn2dl2EXPs49l3N/mDdWLn67mthYlPhqjGwuDaO +a7F8CIpfHPMDKTtPvBJ0YK+XOWPCd/FzjbH1gPKi6WtNg796le4wa+HgpGy05nX5 +kjwXoBcdN0tfsy16qyBeJyKCMV1nu1g+UwYC2ReE+ipWSBAS2F/CAPCM2CkJ7b/R +wjB0LzM/fjiIOvwT8e1bkDCOesWyiQ8h5q2NpMow4/hfUo7L6xNtzst8Ia6rtVjN +hR+TmH+tPx+wlRR0IO2CvihHd4Coi6czQX8CAwEAAaOBnzCBnDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBRrq4pcEYCLOB+5S37cr1q0z0F0TzBOBgNVHSMERzBFgBS0ywdu +X3ipFXs5blQ02OTaYn79T6EXpBUwEzERMA8GA1UEAwwIQ2hhbmdlTWWCFAcKlGA9 +aMQSniJmjGX5uOE7EwBdMBMGA1UdJQQMMAoGCCsGAQUFBwMCMAsGA1UdDwQEAwIH +gDANBgkqhkiG9w0BAQsFAAOCAQEASS9FTwf5zyYKDKZFqczK5r4fJEe1p1vwAONt +FbfNH5gzet20LRrA/jSE7FP4sIh8MJ/zQ1sZW9xX5Bj+18/rUAOKvwPVnHmSrV/+ +EqU5dE7h4EivMWKn6Oaa6eLXQFLVqyLjC5x4GIN2ul7+b6qW9HYPiKxWGLzm2rdV +q0K3dCuUAMjloWZjQbWpSH0VztHrFFA+0Kd49JIP4+4N310szoW/czky3Bc51DkR +EfQLrU2viBrUxL+5HO3oIdS3SAFV/6crhrTdtFT7Hw2WK9oVxxPSHTTVE930aiBa +6AC4YIhcdn53gm8bp0xB+08PGt9GHwl5oBwWwc16SByRH9sGkg== +-----END CERTIFICATE----- + + +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC/8UsWPZVevZ80 +U9aggHwMOzZlMgy1opgSkoFmc2jd7OO0hvh8MsEbATtHB2H75NRAz562H7gQjaw5 +9nZdhFz7OPZdzf5g3Vi5+u5rYWJT4aoxsLg2jmuxfAiKXxzzAyk7T7wSdGCvlzlj +wnfxc42x9YDyoulrTYO/epXuMGvh4KRstOZ1+ZI8F6AXHTdLX7MteqsgXicigjFd +Z7tYPlMGAtkXhPoqVkgQEthfwgDwjNgpCe2/0cIwdC8zP344iDr8E/HtW5AwjnrF +sokPIeatjaTKMOP4X1KOy+sTbc7LfCGuq7VYzYUfk5h/rT8fsJUUdCDtgr4oR3eA +qIunM0F/AgMBAAECggEAKNI2d+ptBBMr8sMJ2GS6/RbywJ7eWRrVYM3Lu3A8E0a4 +PsKdwjxBGW8vnjGRwzKteYMua+lfChY3VLR4A/eMltlMfDK9MPiiUBtv7WJuuQw7 +WAoPg3rSqJKKdnM4Au7fLAAPLZWWooF08SSAwdcjgX+HBxNitTFtHaIClP+zUfxI +av/bwDbUj928Lo/WZ/UtS0v+Bq8C+B4c/udYN7k4VDTuKvVv0KqJTn0deQ1fGBxt +a61HcLPOjBO9wnakcZMtmcz9bi9ziKIsOvoontTPTNP9M2p11mJMdndZvBW6sue0 +zb31Kd1QLlk6LkLEbp32SwA265QofOvc2Xf8Gr0G8QKBgQDjIhdhvkZqP3cl4jCw +IlPR7Y7TCWECXh9v76MLKIvmLXo5mO8b/DeQBTMQ5N+PW7/6eB5GDFi+B+foqEbk +NbpawtvDSglhGjyj0X6XqYHMpBSBLQuEvw03BiOgJddkE1BA2HVBvfr5JS7eRoZZ +sjOX+OBmpb1ie6hH7QIFbHbrzQKBgQDYVkFD6tKyUI8QhGqLf6xPvLha9gTwCG8m +uQe+fjVFZ2f/Cru5/sNl/xMiW2y1Sq37L8mLmY1hdxSGfkDzfcKdF6I1//woZmfK +cXWFTpqEBYTbVQGktZb37KasNdp4hREavWc3xKdiJOfbxVk+9cO4zSPyfMIpN/Km +YxwCApXOewKBgQDe8W+R+XqUf4csIEE6Ife0b0Fp1CLseAbTkJyxLzNi0/DM6FiL +V54SN4hQZNcrmBtwdscAas4QeSIhNEuhZTtuKyYbImjibyZmhhOEOlW10Lhvsw9D +VWRbRiNh5sLs8Cgt/knaJeha9Sxz8TWehVQvL5LULoseR9J+Bx2cxUJVYQKBgQCh +nqb5l3g7ESYgf9ydRQ+1LldIVV3Q+WwYsMkRTnZ72EoAZsNiq+rMy2g/FbA8LIOY +EdZvfZL7CpyB8daSUhTPibV8xDZc9Ex8GJFkuxmCoiDkPziQFb2okNrf8we5XCgw +Iun25urpzoqNTH1lJPRInrFJWl0vsAWOuqJU+hty+wKBgBDc7Ym9zMUaB1rLatwd +ECejBcvAPpD7rwEqmdzj9DTfzCOaUwsHwsQAEwg1tFrhuK5W44FtAP8y4eWn3Krt +ExPgrA5JxWnmI297Pa9YDuB6eczSdxKH2AxE0vz552ZPnO5eTZIQZAgIuVGZZxmR +KcXzTlbubo5w1jJpvbczHhA5 +-----END PRIVATE KEY----- + + +# +# 2048 bit OpenVPN static key +# +-----BEGIN OpenVPN Static key V1----- +488b61084812969fe8ad0f9dd40f56a2 +6cdadddfe345daef6b5c6d3c3e779fc5 +1f7d236966953482d2af085e3f8581b7 +d216f2d891972a463bbb22ca6c104b9d +f99dcb19d7d575a1d46e7918bb2556c6 +db9f51cd792c5e89e011586214692b95 +2a32a7fe85e4538c40e1d0aa2a9f8e15 +fcc0ce5d31974e3c2041b127776f7658 +878cb8245ed235ec996c2370c0fc0023 +699bc028b3412bc40209cba8233bc111 +fa1438095f99052d799fa718f3b04499 +472254d0286b4b2ce99db49e98a4cc25 +fd948bddcdcf08006a6d7bff40354e7b +5e93ea753a8ecc05de41ae34d280e7eb +99220e436bf8b7693a00667485631e28 +edba3e33b6f558dfa50b92eec6ac8b44 +-----END OpenVPN Static key V1----- + diff --git a/tests/local_vpn/client_configs/make_ovpn.sh b/tests/local_vpn/client_configs/make_ovpn.sh new file mode 100755 index 00000000..6a73d7f7 --- /dev/null +++ b/tests/local_vpn/client_configs/make_ovpn.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# First argument: Client identifier + +KEY_DIR=./keys +BASE_CONFIG=./client.conf + +cat ${BASE_CONFIG} \ + <(echo -e '') \ + ${KEY_DIR}/ca.crt \ + <(echo -e '\n') \ + ${KEY_DIR}/${1}.crt \ + <(echo -e '\n') \ + ${KEY_DIR}/${1}.key \ + <(echo -e '\n') \ + ${KEY_DIR}/ta.key \ + <(echo -e '') \ + > ${1}_client.ovpn diff --git a/tests/local_vpn/client_configs/testserver.local_client.ovpn b/tests/local_vpn/client_configs/testserver.local_client.ovpn new file mode 100644 index 00000000..4d11e13f --- /dev/null +++ b/tests/local_vpn/client_configs/testserver.local_client.ovpn @@ -0,0 +1,299 @@ +############################################## +# Sample client-side OpenVPN 2.0 config file # +# for connecting to multi-client server. # +# # +# This configuration can be used by multiple # +# clients, however each client should have # +# its own cert and key files. # +# # +# On Windows, you might want to rename this # +# file so it has a .ovpn extension # +############################################## + +# Specify that we are a client and that we +# will be pulling certain config file directives +# from the server. +client + +# Use the same setting as you are using on +# the server. +# On most systems, the VPN will not function +# unless you partially or fully disable +# the firewall for the TUN/TAP interface. +;dev tap +dev tun + +# Windows needs the TAP-Win32 adapter name +# from the Network Connections panel +# if you have more than one. On XP SP2, +# you may need to disable the firewall +# for the TAP adapter. +;dev-node MyTap + +# Are we connecting to a TCP or +# UDP server? Use the same setting as +# on the server. +;proto tcp +proto udp + +# The hostname/IP and port of the server. +# You can have multiple remote entries +# to load balance between the servers. +remote 172.17.0.1 9194 + +# Choose a random host from the remote +# list for load-balancing. Otherwise +# try hosts in the order specified. +;remote-random + +# Keep trying indefinitely to resolve the +# host name of the OpenVPN server. Very useful +# on machines which are not permanently connected +# to the internet such as laptops. +resolv-retry infinite + +# Most clients don't need to bind to +# a specific local port number. +nobind + +# Downgrade privileges after initialization (non-Windows only) +user nobody +group nogroup + +# Try to preserve some state across restarts. +persist-key +persist-tun + +# If you are connecting through an +# HTTP proxy to reach the actual OpenVPN +# server, put the proxy server/IP and +# port number here. See the man page +# if your proxy server requires +# authentication. +;http-proxy-retry # retry on connection failures +;http-proxy [proxy server] [proxy port #] + +# Wireless networks often produce a lot +# of duplicate packets. Set this flag +# to silence duplicate packet warnings. +;mute-replay-warnings + +# SSL/TLS parms. +# See the server config file for more +# description. It's best to use +# a separate .crt/.key file pair +# for each client. A single ca +# file can be used for all clients. + +# Verify server certificate by checking that the +# certificate has the correct key usage set. +# This is an important precaution to protect against +# a potential attack discussed here: +# http://openvpn.net/howto.html#mitm +# +# To use this feature, you will need to generate +# your server certificates with the keyUsage set to +# digitalSignature, keyEncipherment +# and the extendedKeyUsage to +# serverAuth +# EasyRSA can do this for you. +remote-cert-tls server + +# If a tls-auth key is used on the server +# then every client must also have the key. +;tls-auth ta.key 1 + +# Select a cryptographic cipher. +# If the cipher option is used on the server +# then you must also specify it here. +# Note that v2.4 client/server will automatically +# negotiate AES-256-GCM in TLS mode. +# See also the data-ciphers option in the manpage +;cipher AES-256-CBC +cipher AES-256-GCM + +auth SHA256 + +# Enable compression on the VPN link. +# Don't enable this unless it is also +# enabled in the server config file. +#comp-lzo + +# Set log file verbosity. +verb 3 + +# Silence repeating messages +;mute 20 + +key-direction 1 + +; script-security 2 +; up /etc/openvpn/update-resolv-conf +; down /etc/openvpn/update-resolv-conf + +; script-security 2 +; up /etc/openvpn/update-systemd-resolved +; down /etc/openvpn/update-systemd-resolved +; down-pre +; dhcp-option DOMAIN-ROUTE . + +-----BEGIN CERTIFICATE----- +MIIDQjCCAiqgAwIBAgIUBwqUYD1oxBKeImaMZfm44TsTAF0wDQYJKoZIhvcNAQEL +BQAwEzERMA8GA1UEAwwIQ2hhbmdlTWUwHhcNMjUwOTIzMTI0NjQyWhcNMzUwOTIx +MTI0NjQyWjATMREwDwYDVQQDDAhDaGFuZ2VNZTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKGt+8oRY7cWPg1SahfIV3XAeeH1SQEFq4f2q+E9ZbWVnCg9 +b59hMzwYr84/j4V73Hlv2udLrkguvnT9KqqJY/0wo3Bd1swH2WLej1fo0+rVo24w +hzeLfeH1e4erZbzQk8XG68U7yNDHKYo+LIz9syBzZA4Bq12bHxDsZbJF7HUANzFR +j9Xg3dR7utPtG8ktmD83rV9/E97whblMpLmjmf2sbCqdLOKTkZnwp5mI47TTkhMj +9K0q7irHmbtZcPZQH5Z59GtqaCaRt8DKfeYniyoPnGVfzFberHHQ4C11pcRrdvgY +n14/W5myh6HESQD6umyCYooyXG7wfqIKujROQCMCAwEAAaOBjTCBijAdBgNVHQ4E +FgQUtMsHbl94qRV7OW5UNNjk2mJ+/U8wTgYDVR0jBEcwRYAUtMsHbl94qRV7OW5U +NNjk2mJ+/U+hF6QVMBMxETAPBgNVBAMMCENoYW5nZU1lghQHCpRgPWjEEp4iZoxl ++bjhOxMAXTAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAQEAGeryP/2JuOp7tzi7Ww9lFUx2DRcgq/FwnU4biotUfuLejHQt/IeIwRYs +dW6AToUYJak8Uy/AFffMootwLcC8z8FATBnxtokWNpxtscpbTSHbeS0HvXnXFaU8 +xxlzp9l5k+46MrrvdzFsjoRfVxs0FUHzWifBnObBziTLfHt+J71509uqRWX6JuTa +PDAT8CMcLKxxS4BcorWtAmc51lW/dQQ41HDJ8a6acltDAprmlnhd8ksWzpTjUDNR +/cfSMcVTpPxPSW/WchR5NlJKQEAf9B/xC+LQgDRSDLaZ8CvzRDgosllzJ+aIS7GK +GPec69LiKqpirZ7enwDM67R4DwIHKA== +-----END CERTIFICATE----- + + +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + 0b:44:23:c7:c0:5f:a4:2c:ee:c7:77:80:f9:48:36:04 + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=ChangeMe + Validity + Not Before: Sep 23 12:46:42 2025 GMT + Not After : Dec 27 12:46:42 2027 GMT + Subject: CN=testserver.local + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b4:ef:06:2e:ca:f4:e5:1f:b4:1e:d0:ca:d4:a1: + ef:03:4d:14:b6:e8:4e:e9:26:e0:c5:96:d7:0a:36: + a5:4c:6d:92:5b:05:e8:0e:57:14:64:c1:84:1f:7c: + f4:99:3a:c7:4a:41:92:5a:c1:99:c1:0c:33:d6:81: + f2:49:e3:7a:10:d1:2e:24:b8:3e:d1:00:a6:c0:a4: + 56:a5:17:7d:70:df:74:e5:0c:97:5e:67:2f:05:0a: + 81:8b:24:5b:22:b5:87:62:12:4a:92:b2:e2:b7:3b: + d6:39:20:dc:22:76:58:61:5c:a4:6d:d5:33:4b:a6: + 54:00:7f:43:69:ce:0a:d6:3a:21:d2:8c:59:1e:e7: + 66:ad:77:6b:fe:56:d3:12:ca:bd:18:55:c9:71:e4: + 8b:da:67:28:b3:63:6b:6f:31:e2:b5:89:15:af:ea: + 1a:9a:7f:31:b3:f1:ba:32:21:59:96:81:71:9f:69: + 13:86:d2:db:c5:aa:0c:a7:95:3b:68:a3:9d:46:a9: + 61:c9:04:13:53:44:3e:60:81:5e:da:54:43:b2:90: + 75:33:dc:4a:9a:ed:2e:f0:82:ef:1f:e6:72:7f:6b: + 20:64:67:9b:d3:66:e4:99:64:6a:62:7f:47:83:c3: + 50:f3:bc:fe:e2:7a:c8:65:99:82:2c:89:3b:2c:78: + 32:e3 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + D0:06:2F:3A:D8:9B:F3:9D:7D:B5:8A:F6:5D:CE:8A:83:89:5D:AB:B0 + X509v3 Authority Key Identifier: + keyid:B4:CB:07:6E:5F:78:A9:15:7B:39:6E:54:34:D8:E4:DA:62:7E:FD:4F + DirName:/CN=ChangeMe + serial:07:0A:94:60:3D:68:C4:12:9E:22:66:8C:65:F9:B8:E1:3B:13:00:5D + X509v3 Extended Key Usage: + TLS Web Client Authentication + X509v3 Key Usage: + Digital Signature + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 01:81:34:a1:ad:9d:9f:e7:cf:a1:ae:e5:8b:6f:d8:b3:eb:ac: + f7:8f:09:8c:f5:ad:64:96:a5:45:58:c6:92:6e:f8:e2:21:06: + 2d:2a:89:fb:61:d5:eb:6b:56:78:d7:28:31:f7:58:2c:52:bf: + b2:ed:48:92:0c:49:b1:70:30:78:14:41:76:d4:c4:be:3c:15: + b8:4f:27:6d:a9:87:3b:45:b9:a4:76:3d:23:51:6a:9d:ca:24: + 63:ba:50:ed:4c:b9:ad:8f:c8:57:54:44:16:53:35:0a:c6:c8: + 25:2e:57:7c:32:28:57:bd:e4:6d:98:a8:96:31:d9:42:bb:65: + 25:0e:2a:d9:a5:94:17:2c:6c:bb:f7:c6:d6:e9:b2:df:a2:66: + f6:cb:73:43:97:dc:5c:b5:34:a3:0a:8b:84:ba:71:4e:81:83: + 8d:5e:2c:99:7f:12:89:b3:90:27:1a:0c:e8:c6:d5:51:8f:9f: + ea:49:b9:24:64:68:64:40:98:21:82:eb:52:7c:8b:10:48:61: + b5:01:d4:42:6c:2e:13:f1:07:52:0d:cf:05:cd:06:70:0c:63: + aa:e1:dc:93:2b:bb:8e:eb:11:3e:59:6f:12:90:37:29:d8:45: + fc:d3:52:87:b4:a2:55:54:f2:17:d8:f4:32:52:39:3a:cf:0d: + 2c:a0:d4:e3 +-----BEGIN CERTIFICATE----- +MIIDWDCCAkCgAwIBAgIQC0Qjx8BfpCzux3eA+Ug2BDANBgkqhkiG9w0BAQsFADAT +MREwDwYDVQQDDAhDaGFuZ2VNZTAeFw0yNTA5MjMxMjQ2NDJaFw0yNzEyMjcxMjQ2 +NDJaMBsxGTAXBgNVBAMMEHRlc3RzZXJ2ZXIubG9jYWwwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQC07wYuyvTlH7Qe0MrUoe8DTRS26E7pJuDFltcKNqVM +bZJbBegOVxRkwYQffPSZOsdKQZJawZnBDDPWgfJJ43oQ0S4kuD7RAKbApFalF31w +33TlDJdeZy8FCoGLJFsitYdiEkqSsuK3O9Y5INwidlhhXKRt1TNLplQAf0NpzgrW +OiHSjFke52atd2v+VtMSyr0YVclx5IvaZyizY2tvMeK1iRWv6hqafzGz8boyIVmW +gXGfaROG0tvFqgynlTtoo51GqWHJBBNTRD5ggV7aVEOykHUz3Eqa7S7wgu8f5nJ/ +ayBkZ5vTZuSZZGpif0eDw1DzvP7ieshlmYIsiTsseDLjAgMBAAGjgZ8wgZwwCQYD +VR0TBAIwADAdBgNVHQ4EFgQU0AYvOtib8519tYr2Xc6Kg4ldq7AwTgYDVR0jBEcw +RYAUtMsHbl94qRV7OW5UNNjk2mJ+/U+hF6QVMBMxETAPBgNVBAMMCENoYW5nZU1l +ghQHCpRgPWjEEp4iZoxl+bjhOxMAXTATBgNVHSUEDDAKBggrBgEFBQcDAjALBgNV +HQ8EBAMCB4AwDQYJKoZIhvcNAQELBQADggEBAAGBNKGtnZ/nz6Gu5Ytv2LPrrPeP +CYz1rWSWpUVYxpJu+OIhBi0qifth1etrVnjXKDH3WCxSv7LtSJIMSbFwMHgUQXbU +xL48FbhPJ22phztFuaR2PSNRap3KJGO6UO1Mua2PyFdURBZTNQrGyCUuV3wyKFe9 +5G2YqJYx2UK7ZSUOKtmllBcsbLv3xtbpst+iZvbLc0OX3Fy1NKMKi4S6cU6Bg41e +LJl/EomzkCcaDOjG1VGPn+pJuSRkaGRAmCGC61J8ixBIYbUB1EJsLhPxB1INzwXN +BnAMY6rh3JMru47rET5ZbxKQNynYRfzTUoe0olVU8hfY9DJSOTrPDSyg1OM= +-----END CERTIFICATE----- + + +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC07wYuyvTlH7Qe +0MrUoe8DTRS26E7pJuDFltcKNqVMbZJbBegOVxRkwYQffPSZOsdKQZJawZnBDDPW +gfJJ43oQ0S4kuD7RAKbApFalF31w33TlDJdeZy8FCoGLJFsitYdiEkqSsuK3O9Y5 +INwidlhhXKRt1TNLplQAf0NpzgrWOiHSjFke52atd2v+VtMSyr0YVclx5IvaZyiz +Y2tvMeK1iRWv6hqafzGz8boyIVmWgXGfaROG0tvFqgynlTtoo51GqWHJBBNTRD5g +gV7aVEOykHUz3Eqa7S7wgu8f5nJ/ayBkZ5vTZuSZZGpif0eDw1DzvP7ieshlmYIs +iTsseDLjAgMBAAECggEADhFrsfsL3D4r0Mg74m6OLqaGpCDJ9JeWRdFlpPX8wkWJ +af8P56reULs53QI9OSYjKriQevKvVB2bxjj9BWu6KPvqvOYqftuwcNgWTeiBU2O8 +gLa1lPHW68BrtCLpMc4FhHBBph23QmH/qm94o6FUsTVKf6kNFPu4xP/K1mYz3NYv +ejQGXFtmi1bJFo+wf5KhUOg1devz4gWYodGPZlJ2M3tbFLv3Xaaj9k776rSkXmD8 +DQvP5yND0j1x9N6hT/tE2f0pSZmO1iu2782ER2LN2C12FEQKtEReGi9Pm+DkPl/u +KqgxUeIAQazmppP8cfIJH6SK7RXNvHZjCnXKigaJmQKBgQDVR6Gh+mArDXFfalqg +Me2V13On4exe3zwIqHOYxHLcEHqWyLsSKa+xa+CUCfJpc0Nux51SnDaxBOwYBNqT +rYRLxXyN5ocJWpdguiBP8nXdTFVC8XwZtLC2QH+2UK322AUTBmFV85xIVofeLgY/ +H/GOqdi7wIGfg/vdyJUxMnhFdQKBgQDZLMcVo62FEgyPB90ZE3KnGdJJlFHHKkj8 +AC0R20Rd6Y3oDFuoHmKaV1vo5ePthjHhyMgJ2VHIPih3+jt5mQf/zaveHKfrwg4F +rlPbqsY08tWM51qQ1wKgyKi4ASZKWzYQUZBhZrd2YXLyN0EQzMtTLTjaPHIpDVnP +r+w37/+T9wKBgQCRJ0Y3Ekr/IhAF60Ewg6p5739UQ+t2CiI2lkbOMu0lHsX/9y9y +RhLAAnZ+6mIkKIE9VPeacJy8T2hLVIpaNZ6zXv3NKZa/4/rgpuw03QQgj8H7ZJSc +fiBCeZUxxKkRNaYGc7ItKDY1+UZRDSvNLHVfLfNGnNbbdJ0nLUt0hy/ZvQKBgHsj +0J6MeE8DtOtE4jDdvhzRn1LpLpVnfIqm7uc5FMLLMxNoLnBdCjvJXOvprht4A8Cq +QAKVnrGTzQ56bE6+XrLEw7blOLGNDrZZ6mKbqldLeZqzc768q1jPbhsnS7bNkRIf +rWYM/+m3x51fhx0nggJfmeTkcTalw07nyWDOTHRxAoGAQ7poaI25mTTwyX4J4adK +n1BMrrFns7ztHdbWD+P2T1MnJ/ibURPwXCdFuxKCEBtyoEiFTBZRLuB4N4UUwGe3 +pFmgWL4d+qPrCfOyksn0YyTjBtoBPrxSccWrNeKBqePewmnQk0SRLN8w8hoiJFgV +zGLzeXsIRbDfvPT3ZUT3zgI= +-----END PRIVATE KEY----- + + +# +# 2048 bit OpenVPN static key +# +-----BEGIN OpenVPN Static key V1----- +488b61084812969fe8ad0f9dd40f56a2 +6cdadddfe345daef6b5c6d3c3e779fc5 +1f7d236966953482d2af085e3f8581b7 +d216f2d891972a463bbb22ca6c104b9d +f99dcb19d7d575a1d46e7918bb2556c6 +db9f51cd792c5e89e011586214692b95 +2a32a7fe85e4538c40e1d0aa2a9f8e15 +fcc0ce5d31974e3c2041b127776f7658 +878cb8245ed235ec996c2370c0fc0023 +699bc028b3412bc40209cba8233bc111 +fa1438095f99052d799fa718f3b04499 +472254d0286b4b2ce99db49e98a4cc25 +fd948bddcdcf08006a6d7bff40354e7b +5e93ea753a8ecc05de41ae34d280e7eb +99220e436bf8b7693a00667485631e28 +edba3e33b6f558dfa50b92eec6ac8b44 +-----END OpenVPN Static key V1----- + diff --git a/tests/local_vpn/create_openvpn_certificates.sh b/tests/local_vpn/create_openvpn_certificates.sh new file mode 100755 index 00000000..91aef0dc --- /dev/null +++ b/tests/local_vpn/create_openvpn_certificates.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +./_build_docker.sh + +docker run --rm -v ./ca_user:/home/ca_user -v ./client_configs:/client_configs -v ./server_config:/server_config -p 9194:9194/udp --cap-add=NET_ADMIN --privileged --name odelia_testing_openvpnserver odelia_testing_openvpnserver:latest /bin/bash -c "./_openvpn_certificate_creation.sh" diff --git a/tests/local_vpn/run_docker_openvpnserver.sh b/tests/local_vpn/run_docker_openvpnserver.sh new file mode 100755 index 00000000..4bf703a2 --- /dev/null +++ b/tests/local_vpn/run_docker_openvpnserver.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker run -d -t --rm -v ./ca_user:/home/ca_user -v ./server_config:/server_config -p 9194:9194/udp --cap-add=NET_ADMIN --privileged --name odelia_testing_openvpnserver odelia_testing_openvpnserver:latest /bin/bash -c "./_openvpn_start.sh && /bin/bash" diff --git a/tests/local_vpn/server_config/.gitignore b/tests/local_vpn/server_config/.gitignore new file mode 100644 index 00000000..23de1ea2 --- /dev/null +++ b/tests/local_vpn/server_config/.gitignore @@ -0,0 +1,2 @@ +nohup.out +ipp.txt \ No newline at end of file diff --git a/tests/local_vpn/server_config/ca.crt b/tests/local_vpn/server_config/ca.crt new file mode 100644 index 00000000..02ee2179 --- /dev/null +++ b/tests/local_vpn/server_config/ca.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDQjCCAiqgAwIBAgIUBwqUYD1oxBKeImaMZfm44TsTAF0wDQYJKoZIhvcNAQEL +BQAwEzERMA8GA1UEAwwIQ2hhbmdlTWUwHhcNMjUwOTIzMTI0NjQyWhcNMzUwOTIx +MTI0NjQyWjATMREwDwYDVQQDDAhDaGFuZ2VNZTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKGt+8oRY7cWPg1SahfIV3XAeeH1SQEFq4f2q+E9ZbWVnCg9 +b59hMzwYr84/j4V73Hlv2udLrkguvnT9KqqJY/0wo3Bd1swH2WLej1fo0+rVo24w +hzeLfeH1e4erZbzQk8XG68U7yNDHKYo+LIz9syBzZA4Bq12bHxDsZbJF7HUANzFR +j9Xg3dR7utPtG8ktmD83rV9/E97whblMpLmjmf2sbCqdLOKTkZnwp5mI47TTkhMj +9K0q7irHmbtZcPZQH5Z59GtqaCaRt8DKfeYniyoPnGVfzFberHHQ4C11pcRrdvgY +n14/W5myh6HESQD6umyCYooyXG7wfqIKujROQCMCAwEAAaOBjTCBijAdBgNVHQ4E +FgQUtMsHbl94qRV7OW5UNNjk2mJ+/U8wTgYDVR0jBEcwRYAUtMsHbl94qRV7OW5U +NNjk2mJ+/U+hF6QVMBMxETAPBgNVBAMMCENoYW5nZU1lghQHCpRgPWjEEp4iZoxl ++bjhOxMAXTAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG9w0BAQsF +AAOCAQEAGeryP/2JuOp7tzi7Ww9lFUx2DRcgq/FwnU4biotUfuLejHQt/IeIwRYs +dW6AToUYJak8Uy/AFffMootwLcC8z8FATBnxtokWNpxtscpbTSHbeS0HvXnXFaU8 +xxlzp9l5k+46MrrvdzFsjoRfVxs0FUHzWifBnObBziTLfHt+J71509uqRWX6JuTa +PDAT8CMcLKxxS4BcorWtAmc51lW/dQQ41HDJ8a6acltDAprmlnhd8ksWzpTjUDNR +/cfSMcVTpPxPSW/WchR5NlJKQEAf9B/xC+LQgDRSDLaZ8CvzRDgosllzJ+aIS7GK +GPec69LiKqpirZ7enwDM67R4DwIHKA== +-----END CERTIFICATE----- diff --git a/tests/local_vpn/server_config/ccd/admin@test.odelia b/tests/local_vpn/server_config/ccd/admin@test.odelia new file mode 100644 index 00000000..3e8f368e --- /dev/null +++ b/tests/local_vpn/server_config/ccd/admin@test.odelia @@ -0,0 +1 @@ +ifconfig-push 10.8.0.5 255.0.0.0 diff --git a/tests/local_vpn/server_config/ccd/client_A b/tests/local_vpn/server_config/ccd/client_A new file mode 100644 index 00000000..2009193e --- /dev/null +++ b/tests/local_vpn/server_config/ccd/client_A @@ -0,0 +1 @@ +ifconfig-push 10.8.0.6 255.0.0.0 diff --git a/tests/local_vpn/server_config/ccd/client_B b/tests/local_vpn/server_config/ccd/client_B new file mode 100644 index 00000000..da607617 --- /dev/null +++ b/tests/local_vpn/server_config/ccd/client_B @@ -0,0 +1 @@ +ifconfig-push 10.8.0.7 255.0.0.0 diff --git a/tests/local_vpn/server_config/ccd/testserver.local b/tests/local_vpn/server_config/ccd/testserver.local new file mode 100644 index 00000000..75bd4873 --- /dev/null +++ b/tests/local_vpn/server_config/ccd/testserver.local @@ -0,0 +1 @@ +ifconfig-push 10.8.0.4 255.0.0.0 diff --git a/tests/local_vpn/server_config/server.conf b/tests/local_vpn/server_config/server.conf new file mode 100755 index 00000000..8d90cd74 --- /dev/null +++ b/tests/local_vpn/server_config/server.conf @@ -0,0 +1,304 @@ +################################################# +# Sample OpenVPN 2.0 config file for # +# multi-client server. # +# # +# This file is for the server side # +# of a many-clients <-> one-server # +# OpenVPN configuration. # +# # +# OpenVPN also supports # +# single-machine <-> single-machine # +# configurations (See the Examples page # +# on the web site for more info). # +# # +# This config should work on Windows # +# or Linux/BSD systems. Remember on # +# Windows to quote pathnames and use # +# double backslashes, e.g.: # +# "C:\\Program Files\\OpenVPN\\config\\foo.key" # +# # +# Comments are preceded with '#' or ';' # +################################################# + +# Which local IP address should OpenVPN +# listen on? (optional) +;local a.b.c.d + +# Which TCP/UDP port should OpenVPN listen on? +# If you want to run multiple OpenVPN instances +# on the same machine, use a different port +# number for each one. You will need to +# open up this port on your firewall. +port 9194 + +# TCP or UDP server? +;proto tcp +proto udp + +# "dev tun" will create a routed IP tunnel, +# "dev tap" will create an ethernet tunnel. +# Use "dev tap0" if you are ethernet bridging +# and have precreated a tap0 virtual interface +# and bridged it with your ethernet interface. +# If you want to control access policies +# over the VPN, you must create firewall +# rules for the the TUN/TAP interface. +# On non-Windows systems, you can give +# an explicit unit number, such as tun0. +# On Windows, use "dev-node" for this. +# On most systems, the VPN will not function +# unless you partially or fully disable +# the firewall for the TUN/TAP interface. +;dev tap +dev tun + +# Windows needs the TAP-Win32 adapter name +# from the Network Connections panel if you +# have more than one. On XP SP2 or higher, +# you may need to selectively disable the +# Windows firewall for the TAP adapter. +# Non-Windows systems usually don't need this. +;dev-node MyTap + +# SSL/TLS root certificate (ca), certificate +# (cert), and private key (key). Each client +# and the server must have their own cert and +# key file. The server and all clients will +# use the same ca file. +# +# See the "easy-rsa" directory for a series +# of scripts for generating RSA certificates +# and private keys. Remember to use +# a unique Common Name for the server +# and each of the client certificates. +# +# Any X509 key management system can be used. +# OpenVPN can also use a PKCS #12 formatted key file +# (see "pkcs12" directive in man page). +ca /etc/openvpn/server/ca.crt +cert /etc/openvpn/server/server.crt +key /etc/openvpn/server/server.key # This file should be kept secret + +# Diffie hellman parameters. +# Generate your own with: +# openssl dhparam -out dh1024.pem 1024 +# Substitute 2048 for 1024 if you are using +# 2048 bit keys. +;dh dh1024.pem +dh none + +# Configure server mode and supply a VPN subnet +# for OpenVPN to draw client addresses from. +# The server will take 10.8.0.1 for itself, +# the rest will be made available to clients. +# Each client will be able to reach the server +# on 10.8.0.1. Comment this line out if you are +# ethernet bridging. See the man page for more info. +server 10.8.0.0 255.255.255.0 + +# Maintain a record of client <-> virtual IP address +# associations in this file. If OpenVPN goes down or +# is restarted, reconnecting clients can be assigned +# the same virtual IP address from the pool that was +# previously assigned. +ifconfig-pool-persist ipp.txt + +# Configure server mode for ethernet bridging. +# You must first use your OS's bridging capability +# to bridge the TAP interface with the ethernet +# NIC interface. Then you must manually set the +# IP/netmask on the bridge interface, here we +# assume 10.8.0.4/255.255.255.0. Finally we +# must set aside an IP range in this subnet +# (start=10.8.0.50 end=10.8.0.100) to allocate +# to connecting clients. Leave this line commented +# out unless you are ethernet bridging. +;server-bridge 10.8.0.4 255.255.255.0 10.8.0.50 10.8.0.100 + +# Configure server mode for ethernet bridging +# using a DHCP-proxy, where clients talk +# to the OpenVPN server-side DHCP server +# to receive their IP address allocation +# and DNS server addresses. You must first use +# your OS's bridging capability to bridge the TAP +# interface with the ethernet NIC interface. +# Note: this mode only works on clients (such as +# Windows), where the client-side TAP adapter is +# bound to a DHCP client. +;server-bridge + +# Push routes to the client to allow it +# to reach other private subnets behind +# the server. Remember that these +# private subnets will also need +# to know to route the OpenVPN client +# address pool (10.8.0.0/255.255.255.0) +# back to the OpenVPN server. +;push "route 192.168.10.0 255.255.255.0" +;push "route 192.168.20.0 255.255.255.0" + +# To assign specific IP addresses to specific +# clients or if a connecting client has a private +# subnet behind it that should also have VPN access, +# use the subdirectory "ccd" for client-specific +# configuration files (see man page for more info). + +# EXAMPLE: Suppose the client +# having the certificate common name "Thelonious" +# also has a small subnet behind his connecting +# machine, such as 192.168.40.128/255.255.255.248. +# First, uncomment out these lines: +;client-config-dir ccd +;route 192.168.40.128 255.255.255.248 +# Then create a file ccd/Thelonious with this line: +# iroute 192.168.40.128 255.255.255.248 +# This will allow Thelonious' private subnet to +# access the VPN. This example will only work +# if you are routing, not bridging, i.e. you are +# using "dev tun" and "server" directives. + +# EXAMPLE: Suppose you want to give +# Thelonious a fixed VPN IP address of 10.9.0.1. +# First uncomment out these lines: +client-config-dir /server_config/ccd +;route 10.9.0.0 255.255.255.252 +# Then add this line to ccd/Thelonious: +# ifconfig-push 10.9.0.1 10.9.0.2 + +# Suppose that you want to enable different +# firewall access policies for different groups +# of clients. There are two methods: +# (1) Run multiple OpenVPN daemons, one for each +# group, and firewall the TUN/TAP interface +# for each group/daemon appropriately. +# (2) (Advanced) Create a script to dynamically +# modify the firewall in response to access +# from different clients. See man +# page for more info on learn-address script. +;learn-address ./script + +# If enabled, this directive will configure +# all clients to redirect their default +# network gateway through the VPN, causing +# all IP traffic such as web browsing and +# and DNS lookups to go through the VPN +# (The OpenVPN server machine may need to NAT +# or bridge the TUN/TAP interface to the internet +# in order for this to work properly). +;push "redirect-gateway def1 bypass-dhcp" + +# Certain Windows-specific network settings +# can be pushed to clients, such as DNS +# or WINS server addresses. CAVEAT: +# http://openvpn.net/faq.html#dhcpcaveats +# The addresses below refer to the public +# DNS servers provided by opendns.com. +;push "dhcp-option DNS 208.67.222.222" +;push "dhcp-option DNS 208.67.220.220" + +# Uncomment this directive to allow different +# clients to be able to "see" each other. +# By default, clients will only see the server. +# To force clients to only see the server, you +# will also need to appropriately firewall the +# server's TUN/TAP interface. +;client-to-client + +# Uncomment this directive if multiple clients +# might connect with the same certificate/key +# files or common names. This is recommended +# only for testing purposes. For production use, +# each client should have its own certificate/key +# pair. +# +# IF YOU HAVE NOT GENERATED INDIVIDUAL +# CERTIFICATE/KEY PAIRS FOR EACH CLIENT, +# EACH HAVING ITS OWN UNIQUE "COMMON NAME", +# UNCOMMENT THIS LINE OUT. +;duplicate-cn + +# The keepalive directive causes ping-like +# messages to be sent back and forth over +# the link so that each side knows when +# the other side has gone down. +# Ping every 10 seconds, assume that remote +# peer is down if no ping received during +# a 120 second time period. +keepalive 2 10 + +# For extra security beyond that provided +# by SSL/TLS, create an "HMAC firewall" +# to help block DoS attacks and UDP port flooding. +# +# Generate with: +# openvpn --genkey --secret ta.key +# +# The server and each client must have +# a copy of this key. +# The second parameter should be '0' +# on the server and '1' on the clients. +;tls-auth ta.key 0 # This file is secret +tls-crypt /etc/openvpn/server/ta.key + +# Select a cryptographic cipher. +# This config item must be copied to +# the client config file as well. +;cipher BF-CBC # Blowfish (default) +;cipher AES-128-CBC # AES +;cipher DES-EDE3-CBC # Triple-DES +cipher AES-256-GCM + +auth SHA256 + +# Enable compression on the VPN link. +# If you enable it here, you must also +# enable it in the client config file. +;comp-lzo + +# The maximum number of concurrently connected +# clients we want to allow. +;max-clients 100 + +# It's a good idea to reduce the OpenVPN +# daemon's privileges after initialization. +# +# You can uncomment this out on +# non-Windows systems. +user nobody +group nogroup + +# The persist options will try to avoid +# accessing certain resources on restart +# that may no longer be accessible because +# of the privilege downgrade. +persist-key +persist-tun + +# Output a short status file showing +# current connections, truncated +# and rewritten every minute. +status openvpn-status.log + +# By default, log messages will go to the syslog (or +# on Windows, if running as a service, they will go to +# the "\Program Files\OpenVPN\log" directory). +# Use log or log-append to override this default. +# "log" will truncate the log file on OpenVPN startup, +# while "log-append" will append to it. Use one +# or the other (but not both). +;log openvpn.log +;log-append openvpn.log + +# Set the appropriate level of log +# file verbosity. +# +# 0 is silent, except for fatal errors +# 4 is reasonable for general usage +# 5 and 6 can help to debug connection problems +# 9 is extremely verbose +verb 3 + +# Silence repeating messages. At most 20 +# sequential messages of the same message +# category will be output to the log. +;mute 20 diff --git a/tests/local_vpn/server_config/server.crt b/tests/local_vpn/server_config/server.crt new file mode 100644 index 00000000..8a6bcc20 --- /dev/null +++ b/tests/local_vpn/server_config/server.crt @@ -0,0 +1,87 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + 61:74:e5:68:11:63:be:bb:fa:fa:4d:63:12:ad:fa:6a + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=ChangeMe + Validity + Not Before: Sep 23 12:46:42 2025 GMT + Not After : Dec 27 12:46:42 2027 GMT + Subject: CN=ChangeMe + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:ae:66:65:3b:39:6e:aa:39:39:7f:f1:be:18:c4: + 52:60:c3:3c:63:77:2a:fd:d0:79:22:6a:5f:b7:ab: + 9d:94:27:89:9a:5c:2d:b7:ea:66:91:f7:06:57:24: + 38:bd:55:71:2d:ff:9a:dd:b3:ed:0c:bf:1b:8c:93: + 27:63:d4:a1:a7:00:55:68:c5:a0:c4:9e:d3:51:d7: + ec:f8:9d:7e:b1:a4:84:80:78:9b:76:58:61:b9:89: + c9:94:e5:ad:ca:61:33:e0:f7:f3:35:0a:fc:6c:28: + b5:53:57:52:01:0a:e1:60:f1:42:f0:a4:d3:e1:4e: + 25:12:83:01:ba:f5:1a:96:44:33:17:b8:69:bc:a4: + b1:2e:b1:e0:e3:50:c6:6f:dc:f7:12:16:40:21:63: + db:14:b1:b1:fe:6f:76:84:f7:ef:a0:bb:0b:dc:03: + 44:b6:2a:f0:61:7b:7c:4a:7a:51:9b:ab:01:8f:10: + a8:db:10:62:c3:72:3b:2c:fc:b5:03:e2:73:e6:1d: + d0:3e:a5:83:f5:ae:30:4c:d8:79:28:d1:d1:5c:61: + 84:2d:8c:0d:8d:39:ce:a6:15:21:0b:4b:cd:29:28: + 72:ed:9e:63:7d:73:bd:70:f3:29:4f:c5:c4:95:ef: + dc:a7:28:27:af:36:91:e0:53:ef:4e:7d:ba:50:34: + 83:51 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + 5C:FC:F9:A1:E6:BE:75:F7:02:23:72:3B:F2:09:B5:A9:D2:8C:A1:3B + X509v3 Authority Key Identifier: + keyid:B4:CB:07:6E:5F:78:A9:15:7B:39:6E:54:34:D8:E4:DA:62:7E:FD:4F + DirName:/CN=ChangeMe + serial:07:0A:94:60:3D:68:C4:12:9E:22:66:8C:65:F9:B8:E1:3B:13:00:5D + X509v3 Extended Key Usage: + TLS Web Server Authentication + X509v3 Key Usage: + Digital Signature, Key Encipherment + X509v3 Subject Alternative Name: + DNS:ChangeMe + Signature Algorithm: sha256WithRSAEncryption + Signature Value: + 6c:df:63:30:de:ae:e7:4a:07:be:c3:c6:78:fe:91:f4:89:c1: + 41:fc:58:d3:52:e8:bd:ab:6b:a1:68:d5:8a:36:4f:6f:21:68: + 2a:07:c6:cd:56:7f:8b:f9:0d:00:f7:9f:ba:2f:84:79:08:a2: + 53:8b:4b:76:6b:49:59:bb:9a:51:45:63:c3:25:ce:d2:46:61: + fe:2c:86:d4:ae:f7:bb:de:c2:f1:4f:8d:46:6e:a6:f3:cb:25: + 72:75:e7:eb:c6:a2:10:34:8a:a9:ca:9c:b4:ba:9c:e0:50:6d: + cd:91:a9:97:37:be:d7:40:e1:21:ba:a8:fe:8f:0d:96:2d:19: + a0:10:41:8b:cf:16:4a:a3:83:24:96:62:11:0f:e1:76:5d:46: + 1e:60:1d:2f:9d:1c:87:de:b0:1b:f7:26:61:13:af:41:44:01: + b6:dd:40:de:94:20:04:5e:68:42:79:7b:13:03:b0:6c:5f:d2: + ff:3c:15:6b:ca:21:57:69:61:de:05:68:b1:9e:e5:f8:be:c2: + 38:c7:1f:53:2e:da:7b:fd:26:fa:83:8e:5d:06:70:d9:7d:9e: + c1:75:99:70:f7:3e:66:e4:95:8e:43:ec:4a:9d:bd:0f:d7:08: + 64:f1:5f:f8:94:46:6e:46:20:44:5f:71:0b:2e:e2:0d:87:eb: + 69:cb:86:af +-----BEGIN CERTIFICATE----- +MIIDZTCCAk2gAwIBAgIQYXTlaBFjvrv6+k1jEq36ajANBgkqhkiG9w0BAQsFADAT +MREwDwYDVQQDDAhDaGFuZ2VNZTAeFw0yNTA5MjMxMjQ2NDJaFw0yNzEyMjcxMjQ2 +NDJaMBMxETAPBgNVBAMMCENoYW5nZU1lMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEArmZlOzluqjk5f/G+GMRSYMM8Y3cq/dB5Impft6udlCeJmlwtt+pm +kfcGVyQ4vVVxLf+a3bPtDL8bjJMnY9ShpwBVaMWgxJ7TUdfs+J1+saSEgHibdlhh +uYnJlOWtymEz4PfzNQr8bCi1U1dSAQrhYPFC8KTT4U4lEoMBuvUalkQzF7hpvKSx +LrHg41DGb9z3EhZAIWPbFLGx/m92hPfvoLsL3ANEtirwYXt8SnpRm6sBjxCo2xBi +w3I7LPy1A+Jz5h3QPqWD9a4wTNh5KNHRXGGELYwNjTnOphUhC0vNKShy7Z5jfXO9 +cPMpT8XEle/cpygnrzaR4FPvTn26UDSDUQIDAQABo4G0MIGxMAkGA1UdEwQCMAAw +HQYDVR0OBBYEFFz8+aHmvnX3AiNyO/IJtanSjKE7ME4GA1UdIwRHMEWAFLTLB25f +eKkVezluVDTY5Npifv1PoRekFTATMREwDwYDVQQDDAhDaGFuZ2VNZYIUBwqUYD1o +xBKeImaMZfm44TsTAF0wEwYDVR0lBAwwCgYIKwYBBQUHAwEwCwYDVR0PBAQDAgWg +MBMGA1UdEQQMMAqCCENoYW5nZU1lMA0GCSqGSIb3DQEBCwUAA4IBAQBs32Mw3q7n +Sge+w8Z4/pH0icFB/FjTUui9q2uhaNWKNk9vIWgqB8bNVn+L+Q0A95+6L4R5CKJT +i0t2a0lZu5pRRWPDJc7SRmH+LIbUrve73sLxT41GbqbzyyVydefrxqIQNIqpypy0 +upzgUG3NkamXN77XQOEhuqj+jw2WLRmgEEGLzxZKo4MklmIRD+F2XUYeYB0vnRyH +3rAb9yZhE69BRAG23UDelCAEXmhCeXsTA7BsX9L/PBVryiFXaWHeBWixnuX4vsI4 +xx9TLtp7/Sb6g45dBnDZfZ7BdZlw9z5m5JWOQ+xKnb0P1whk8V/4lEZuRiBEX3EL +LuINh+tpy4av +-----END CERTIFICATE----- diff --git a/tests/local_vpn/server_config/server.key b/tests/local_vpn/server_config/server.key new file mode 100644 index 00000000..b73e742e --- /dev/null +++ b/tests/local_vpn/server_config/server.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCuZmU7OW6qOTl/ +8b4YxFJgwzxjdyr90Hkial+3q52UJ4maXC236maR9wZXJDi9VXEt/5rds+0MvxuM +kydj1KGnAFVoxaDEntNR1+z4nX6xpISAeJt2WGG5icmU5a3KYTPg9/M1CvxsKLVT +V1IBCuFg8ULwpNPhTiUSgwG69RqWRDMXuGm8pLEuseDjUMZv3PcSFkAhY9sUsbH+ +b3aE9++guwvcA0S2KvBhe3xKelGbqwGPEKjbEGLDcjss/LUD4nPmHdA+pYP1rjBM +2Hko0dFcYYQtjA2NOc6mFSELS80pKHLtnmN9c71w8ylPxcSV79ynKCevNpHgU+9O +fbpQNINRAgMBAAECggEAQBXzfBxiLJ4joX7hRnOZ++GyZrCLNUKuyLVDIBipqqAO +whC+Yhd6Aog+JbZzPSvRD8CeFXsBEE6HnpQShO5FSrtmJz58EdR1Pd11QHSLclbM +s/Ld2dKncokN8K+nubcXW8NxdRvo3wvkedAcG7L2V+vAF/LRwzi2icNnVt6rmuyw +Z/W5/HERlt4IAikKDQhBZrtGx5Cbjun5ekjN2sWFVB7TT2u6o/BsYQ7ljGUZt9uQ +DStfOURAv5BE8eYyWQIxd7fPCfY3UNxpJUPxvuDxpeCwITzD5v8qoVSBBH1lvQ7s +i61/Cr7dfwNsAtlMzrERxRmMR5WQzsfxPvfqhb3IuwKBgQDZo8zBAEXiTnNSl3W5 +1bs1ab8AFTfTzeY2Th2SxDZLdcy5I3dirwfusyQkv5eoRWi5Vx/fNXIh5OJos2Fu +M0CxuuJVP2dkXzBJrazAkzlDhEsG/MGMaIE/p3aFQeyID1EZcN3u3Z9StEbW4B8y +I/8dTgJCnBzfHs2HH80VbQHsZwKBgQDNI44tqevSW6XwwXz1sYJy5NBPrksRLEcU +rhm6rsLMhKXHUJa0KDeOeM9sjiBBrCL/pOkqwcnLUsqZ8pIQIEhwaIBfHznblgxZ +jCho3ZjYm4/Is9XD/lcS2yU5ialRI9kFz6qTkOlO0XonIwJs4NiITbuzfopr2BGh +IlzXcrC/hwKBgQCUl6IvT4lnJprUE/bbx1JG+IjgfJweLyDziMfmMbLEOIxrBwz2 +wnwO/B48PNdFmwYSLKrlEa939raiN37Y54NPFUJ8Y4qq29azJzGgVaQuNb+n6KAY +xi0gkax49PaSOqrrTMUp1gR2SgFnqaOC71K55k3ivoVzzKsUi6DQ9RjwFwKBgEiD +/BuaSJmo+iT8UO8NW96/kf/IzhJ5A3uE++VpJ8ViUrP9gfiXiuQbQr/OEgsFDa4v +HpmVvX7ZenMnM4jt0I2j81Us1agRB7aT/CjtxL01aIN7RuKswx0QSL1pM2hScsJC +Ibtea4sIM9Un5BCW/xRX3jVaUxZCYCEE46rpiR97AoGBAJdQQnQAxYS2Ua7Jj0go +0SgG99w7ONZjSupTTr4VpMaXmh6CBke44RulMUA+PwB1XtfVpxx8xhuPq2d6y79T +o5OLbEjdLPq8A8S0n5eXMD7FXXG8TYPpcqoO2Hqhgu9q1vRgqPopIcRuhhp5wdCp +iIGJHhwsI9sYN6wnGydeOH9U +-----END PRIVATE KEY----- diff --git a/tests/local_vpn/server_config/ta.key b/tests/local_vpn/server_config/ta.key new file mode 100644 index 00000000..2bf036ac --- /dev/null +++ b/tests/local_vpn/server_config/ta.key @@ -0,0 +1,21 @@ +# +# 2048 bit OpenVPN static key +# +-----BEGIN OpenVPN Static key V1----- +488b61084812969fe8ad0f9dd40f56a2 +6cdadddfe345daef6b5c6d3c3e779fc5 +1f7d236966953482d2af085e3f8581b7 +d216f2d891972a463bbb22ca6c104b9d +f99dcb19d7d575a1d46e7918bb2556c6 +db9f51cd792c5e89e011586214692b95 +2a32a7fe85e4538c40e1d0aa2a9f8e15 +fcc0ce5d31974e3c2041b127776f7658 +878cb8245ed235ec996c2370c0fc0023 +699bc028b3412bc40209cba8233bc111 +fa1438095f99052d799fa718f3b04499 +472254d0286b4b2ce99db49e98a4cc25 +fd948bddcdcf08006a6d7bff40354e7b +5e93ea753a8ecc05de41ae34d280e7eb +99220e436bf8b7693a00667485631e28 +edba3e33b6f558dfa50b92eec6ac8b44 +-----END OpenVPN Static key V1----- diff --git a/tests/provision/dummy_project_for_testing.yml b/tests/provision/dummy_project_for_testing.yml index 5e658c78..ea544be7 100644 --- a/tests/provision/dummy_project_for_testing.yml +++ b/tests/provision/dummy_project_for_testing.yml @@ -4,11 +4,11 @@ description: > Test setup. participants: - - name: localhost + - name: testserver.local type: server org: Test_Org - fed_learn_port: 8002 - admin_port: 8003 + fed_learn_port: 8012 + admin_port: 8013 - name: client_A type: client org: Test_Org @@ -34,7 +34,7 @@ builders: path: nvflare.ha.dummy_overseer_agent.DummyOverseerAgent overseer_exists: false args: - sp_end_point: localhost:8002:8003 + sp_end_point: testserver.local:8012:8013 - path: nvflare.lighter.impl.cert.CertBuilder - path: nvflare.lighter.impl.signature.SignatureBuilder From cc7f7d8eb30eb45a583234f103c118b514d40bd2 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Wed, 24 Sep 2025 16:07:08 +0200 Subject: [PATCH 70/80] ensure that VPN docker image exists before starting server --- runIntegrationTests.sh | 2 -- tests/local_vpn/run_docker_openvpnserver.sh | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 99f39187..799084b1 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -239,8 +239,6 @@ run_3dcnn_simulation_mode () { start_testing_vpn () { echo "[Prepare] Start local VPN server for testing ..." - # TODO make sure (at suitable locatin in scripts) that VPN container is built and that VPN certificates exist - cp -r tests/local_vpn "$PROJECT_DIR"/prod_00/ chmod a+rX "$PROJECT_DIR"/prod_00/local_vpn -R cd "$PROJECT_DIR"/prod_00/local_vpn diff --git a/tests/local_vpn/run_docker_openvpnserver.sh b/tests/local_vpn/run_docker_openvpnserver.sh index 4bf703a2..f501f811 100755 --- a/tests/local_vpn/run_docker_openvpnserver.sh +++ b/tests/local_vpn/run_docker_openvpnserver.sh @@ -1,3 +1,5 @@ #!/usr/bin/env bash +./_build_docker.sh + docker run -d -t --rm -v ./ca_user:/home/ca_user -v ./server_config:/server_config -p 9194:9194/udp --cap-add=NET_ADMIN --privileged --name odelia_testing_openvpnserver odelia_testing_openvpnserver:latest /bin/bash -c "./_openvpn_start.sh && /bin/bash" From f802a0978ec718da5c2c5395b0f9ecccc6fecd4e Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:43:35 +0200 Subject: [PATCH 71/80] avoid need for admin rights for client nodes: allow local user to start openvpn in Docker, added name resolution to docker run call --- docker_config/Dockerfile_ODELIA | 3 +++ docker_config/master_template.yml | 5 ++++- runIntegrationTests.sh | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/docker_config/Dockerfile_ODELIA b/docker_config/Dockerfile_ODELIA index e6a98165..ca39165a 100644 --- a/docker_config/Dockerfile_ODELIA +++ b/docker_config/Dockerfile_ODELIA @@ -362,3 +362,6 @@ COPY ./torch_home_cache /torch_home # allow creating home directory for local user inside container if needed RUN chmod a+rwx /home + +# allow starting VPN connection by non-root users +RUN chmod gu+s /usr/sbin/openvpn diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index 63774b50..cd48e4cd 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -696,7 +696,10 @@ docker_cln_sh: | # TODO check if admin rights are needed and make sure output files are readable and deletable by non-root users on the host CONTAINER_NAME=odelia_swarm_client_{~~client_name~~}___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ - DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE" + DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE -u $(id -u):$(id -g)" + if [[ ! -z "$ODELIA_ADDITIONAL_DOCKER_OPTIONS" ]]; then + DOCKER_OPTIONS_A+=" ${ODELIA_ADDITIONAL_DOCKER_OPTIONS}" + fi DOCKER_MOUNTS="-v /etc/passwd:/etc/passwd -v /etc/group:/etc/group -v $DIR/..:/startupkit/ -v $MY_SCRATCH_DIR:/scratch/" if [[ ! -z "$MY_DATA_DIR" ]]; then DOCKER_MOUNTS+=" -v $MY_DATA_DIR:/data/:ro" diff --git a/runIntegrationTests.sh b/runIntegrationTests.sh index 799084b1..3433195b 100755 --- a/runIntegrationTests.sh +++ b/runIntegrationTests.sh @@ -255,7 +255,7 @@ kill_testing_vpn () { start_server_and_clients () { echo "[Run] Start server and client Docker containers ..." - + export ODELIA_ADDITIONAL_DOCKER_OPTIONS="--add-host testserver.local:10.8.0.4" cd "$PROJECT_DIR"/prod_00 cd testserver.local/startup ./docker.sh --no_pull --start_server From f59465e9a66b0641971f1bb20e1ada27a4d28e75 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:43:54 +0200 Subject: [PATCH 72/80] updated pinned versions --- docker_config/Dockerfile_ODELIA | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/docker_config/Dockerfile_ODELIA b/docker_config/Dockerfile_ODELIA index ca39165a..b4f1e9ae 100644 --- a/docker_config/Dockerfile_ODELIA +++ b/docker_config/Dockerfile_ODELIA @@ -24,7 +24,7 @@ RUN apt install -y \ bsdutils=1:2.37.2-4ubuntu3.4 \ ca-certificates=20240203~22.04.1 \ coreutils=8.32-4.1ubuntu1.2 \ - dpkg=1.21.1ubuntu2.3 \ + dpkg=1.21.1ubuntu2.6 \ e2fsprogs=1.46.5-2ubuntu1.2 \ gpgv=2.2.27-3ubuntu2.4 \ libblkid1=2.37.2-4ubuntu3.4 \ @@ -192,15 +192,14 @@ RUN apt install -y \ # openvpn iputils-ping net-tools sudo and dependencies at fixed versions # TODO remove tools only needed for debugging RUN apt install -y \ - libelf1=0.186-1ubuntu0.1 \ - libbpf0=1:0.5.0-1ubuntu22.04.1 \ - libcap2-bin=1:2.44-1ubuntu0.22.04.2 \ iproute2=5.15.0-1ubuntu2 \ iputils-ping=3:20211215-1ubuntu0.1 \ libatm1=1:2.5.1-4build2 \ + libbpf0=1:0.5.0-1ubuntu22.04.1 \ + libcap2-bin=1:2.44-1ubuntu0.22.04.2 \ + libelf1=0.186-1ubuntu0.1 \ + liblzo2-2=2.10-2build3 \ libpam-cap=1:2.44-1ubuntu0.22.04.2 \ - sudo=1.9.9-1ubuntu2.5 \ - liblzo2-2=2.10-2build3 \ libpkcs11-helper1=1.28-1ubuntu0.22.04.1 \ net-tools=1.60+git20181103.0eebece-1ubuntu5.4 \ openvpn=2.5.11-0ubuntu0.22.04.1 From 04516dcf6f044719c03ab36ee73e421f3f53afe5 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:44:06 +0200 Subject: [PATCH 73/80] added VPN IPs for production server, use same "--add-host" mechanism for all nodes --- docker_config/master_template.yml | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/docker_config/master_template.yml b/docker_config/master_template.yml index cd48e4cd..c3f3d9d2 100644 --- a/docker_config/master_template.yml +++ b/docker_config/master_template.yml @@ -335,8 +335,6 @@ authz_def: | fl_admin_sh: | #!/usr/bin/env bash - # TODO add name and IP address for productive server - echo "10.8.0.4 testserver.local" >> /etc/hosts openvpn ./vpn_client.ovpn >> nohup_vpn.out 2>&1 & DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" @@ -373,8 +371,6 @@ start_ovsr_sh: | start_cln_sh: | #!/usr/bin/env bash - # TODO add name and IP address for productive server - echo "10.8.0.4 testserver.local" >> /etc/hosts openvpn ./vpn_client.ovpn >> nohup_vpn.out 2>&1 & DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" @@ -693,10 +689,9 @@ docker_cln_sh: | docker pull "$DOCKER_IMAGE" fi - # TODO check if admin rights are needed and make sure output files are readable and deletable by non-root users on the host - CONTAINER_NAME=odelia_swarm_client_{~~client_name~~}___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ DOCKER_OPTIONS_A="--name=$CONTAINER_NAME --gpus=$GPU2USE -u $(id -u):$(id -g)" + DOCKER_OPTIONS_A+=" --add-host dl3.tud.de:72.24.4.65 --add-host dl3:72.24.4.65" if [[ ! -z "$ODELIA_ADDITIONAL_DOCKER_OPTIONS" ]]; then DOCKER_OPTIONS_A+=" ${ODELIA_ADDITIONAL_DOCKER_OPTIONS}" fi @@ -788,10 +783,16 @@ docker_svr_sh: | rm -rf ../pid.fl ../daemon_pid.fl # clean up potential leftovers from previous run + ADDITIONAL_DOCKER_OPTIONS=" --add-host dl3.tud.de:72.24.4.65 --add-host dl3:72.24.4.65" + if [[ ! -z "$ODELIA_ADDITIONAL_DOCKER_OPTIONS" ]]; then + ADDITIONAL_DOCKER_OPTIONS+=" ${ODELIA_ADDITIONAL_DOCKER_OPTIONS}" + fi + echo "Starting docker with $DOCKER_IMAGE as $CONTAINER_NAME" # Run docker with appropriate parameters if [ ! -z "$START_SERVER" ]; then docker run -d -t --rm --name=$CONTAINER_NAME \ + ${ADDITIONAL_DOCKER_OPTIONS} \ -v $DIR/..:/startupkit/ -w /startupkit/startup/ \ --ipc=host $NETARG $DOCKER_IMAGE \ /bin/bash -c "nohup ./start.sh >> nohup.out 2>&1 && chmod a+r nohup.out && /bin/bash" @@ -801,6 +802,7 @@ docker_svr_sh: | /bin/bash -c "pip-licenses -s -u --order=license" elif [ ! -z "$INTERACTIVE" ]; then docker run --rm -it --detach-keys="ctrl-x" --name=$CONTAINER_NAME \ + ${ADDITIONAL_DOCKER_OPTIONS} \ -v $DIR/..:/startupkit/ -w /startupkit/startup/ \ --ipc=host $NETARG $DOCKER_IMAGE \ /bin/bash -c "/bin/bash" @@ -835,8 +837,17 @@ docker_adm_sh: | fi CONTAINER_NAME=odelia_swarm_admin___REPLACED_BY_CONTAINER_VERSION_IDENTIFIER_WHEN_BUILDING_DOCKER_IMAGE__ + ADDITIONAL_DOCKER_OPTIONS=" --add-host dl3.tud.de:72.24.4.65 --add-host dl3:72.24.4.65" + if [[ ! -z "$ODELIA_ADDITIONAL_DOCKER_OPTIONS" ]]; then + ADDITIONAL_DOCKER_OPTIONS+=" ${ODELIA_ADDITIONAL_DOCKER_OPTIONS}" + fi + echo "Starting docker with $DOCKER_IMAGE as $CONTAINER_NAME" - docker run --rm -it --name=$CONTAINER_NAME -v $DIR/../local/:/fl_admin/local/ -v $DIR/../startup/:/fl_admin/startup/ -w /fl_admin/startup/ $NETARG $DOCKER_IMAGE /bin/bash -c "./fl_admin.sh" + docker run --rm -it --name=$CONTAINER_NAME \ + ${ADDITIONAL_DOCKER_OPTIONS} \ + -v $DIR/../local/:/fl_admin/local/ -v $DIR/../startup/:/fl_admin/startup/ \ + -w /fl_admin/startup/ $NETARG $DOCKER_IMAGE \ + /bin/bash -c "./fl_admin.sh" compose_yaml: | services: From 9780daf1246ee470a08f2c169a8d68ff9e47a8f6 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:44:16 +0200 Subject: [PATCH 74/80] made path from where VPN credentials are copied more configurable --- _buildStartupKits.sh | 16 ++++++++++------ _generateStartupKitArchives.sh | 6 +++--- buildDockerImageAndStartupKits.sh | 3 ++- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/_buildStartupKits.sh b/_buildStartupKits.sh index 94950376..c61d3cb3 100755 --- a/_buildStartupKits.sh +++ b/_buildStartupKits.sh @@ -2,27 +2,31 @@ set -euo pipefail -if [ "$#" -ne 3 ]; then - echo "Usage: _buildStartupKits.sh SWARM_PROJECT.yml VERSION_STRING CONTAINER_NAME" +if [ "$#" -lt 3 ]; then + echo "Usage: _buildStartupKits.sh SWARM_PROJECT.yml VERSION_STRING CONTAINER_NAME [PATH_FOR_VPN_CREDENTIALS]" exit 1 fi PROJECT_YML=$1 VERSION=$2 CONTAINER_NAME=$3 +PATH_FOR_VPN_CREDENTIALS="" +if [ "$#" -eq 4 ]; then + PATH_FOR_VPN_CREDENTIALS=$4 +fi sed -i 's#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__#'$VERSION'#' $PROJECT_YML -echo "Building startup kits for project $PROJECT_YML with version $VERSION" +ARGUMENTS="$PROJECT_YML $VERSION $PATH_FOR_VPN_CREDENTIALS" + +echo "Building startup kits: $ARGUMENTS" docker run --rm \ -u $(id -u):$(id -g) \ -v /etc/passwd:/etc/passwd \ -v /etc/group:/etc/group \ -v ./:/workspace/ \ -w /workspace/ \ - -e PROJECT_YML=$PROJECT_YML \ - -e VERSION=$VERSION \ $CONTAINER_NAME \ - /bin/bash -c "nvflare provision -p \$PROJECT_YML && ./_generateStartupKitArchives.sh \$PROJECT_YML \$VERSION"|| { echo "Docker run failed"; exit 1; } + /bin/bash -c "nvflare provision -p $PROJECT_YML && ./_generateStartupKitArchives.sh $ARGUMENTS"|| { echo "Docker run failed"; exit 1; } sed -i 's#'$VERSION'#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__#' $PROJECT_YML diff --git a/_generateStartupKitArchives.sh b/_generateStartupKitArchives.sh index 23ea02c9..c7510348 100755 --- a/_generateStartupKitArchives.sh +++ b/_generateStartupKitArchives.sh @@ -5,12 +5,12 @@ set -e OUTPUT_FOLDER=workspace/`grep "^name: " $1 | sed 's/name: //'` TARGET_FOLDER=`ls -d $OUTPUT_FOLDER/prod_* | tail -n 1` LONG_VERSION=$2 - -# TODO copy from different location +PATH_FOR_VPN_CREDENTIALS=$3 cd $TARGET_FOLDER + for startupkit in `ls .`; do - cp ../../../tests/local_vpn/client_configs/${startupkit}_client.ovpn ${startupkit}/startup/vpn_client.ovpn + cp $PATH_FOR_VPN_CREDENTIALS/${startupkit}_client.ovpn ${startupkit}/startup/vpn_client.ovpn zip -rq ${startupkit}_$LONG_VERSION.zip $startupkit echo "Generated startup kit $TARGET_FOLDER/${startupkit}_$LONG_VERSION.zip" done diff --git a/buildDockerImageAndStartupKits.sh b/buildDockerImageAndStartupKits.sh index 654e63cd..1a78fe54 100755 --- a/buildDockerImageAndStartupKits.sh +++ b/buildDockerImageAndStartupKits.sh @@ -77,7 +77,8 @@ docker build $DOCKER_BUILD_ARGS -t $CONTAINER_NAME $CLEAN_SOURCE_DIR -f docker_c echo "Docker image $CONTAINER_NAME built successfully" echo "./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME" -./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME +PATH_FOR_VPN_CREDENTIALS="../../../tests/local_vpn/client_configs" # TODO make configurable +./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME $PATH_FOR_VPN_CREDENTIALS echo "Startup kits built successfully" rm -rf $CLEAN_SOURCE_DIR From 2b84145dcfe4038af36a824bd0006b996369edbf Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:44:28 +0200 Subject: [PATCH 75/80] pass directory with VPN credentials as command-line argument --- _buildStartupKits.sh | 9 +++++---- _generateStartupKitArchives.sh | 8 ++++++-- buildDockerImageAndStartupKits.sh | 9 +++++---- 3 files changed, 16 insertions(+), 10 deletions(-) diff --git a/_buildStartupKits.sh b/_buildStartupKits.sh index c61d3cb3..47f064c8 100755 --- a/_buildStartupKits.sh +++ b/_buildStartupKits.sh @@ -3,21 +3,21 @@ set -euo pipefail if [ "$#" -lt 3 ]; then - echo "Usage: _buildStartupKits.sh SWARM_PROJECT.yml VERSION_STRING CONTAINER_NAME [PATH_FOR_VPN_CREDENTIALS]" + echo "Usage: _buildStartupKits.sh SWARM_PROJECT.yml VERSION_STRING CONTAINER_NAME [VPN_CREDENTIALS_DIR]" exit 1 fi PROJECT_YML=$1 VERSION=$2 CONTAINER_NAME=$3 -PATH_FOR_VPN_CREDENTIALS="" +MOUNT_VPN_CREDENTIALS_DIR="" if [ "$#" -eq 4 ]; then - PATH_FOR_VPN_CREDENTIALS=$4 + MOUNT_VPN_CREDENTIALS_DIR="-v $4:/vpn_credentials/" fi sed -i 's#__REPLACED_BY_CURRENT_VERSION_NUMBER_WHEN_BUILDING_STARTUP_KITS__#'$VERSION'#' $PROJECT_YML -ARGUMENTS="$PROJECT_YML $VERSION $PATH_FOR_VPN_CREDENTIALS" +ARGUMENTS="$PROJECT_YML $VERSION" echo "Building startup kits: $ARGUMENTS" docker run --rm \ @@ -25,6 +25,7 @@ docker run --rm \ -v /etc/passwd:/etc/passwd \ -v /etc/group:/etc/group \ -v ./:/workspace/ \ + $MOUNT_VPN_CREDENTIALS_DIR \ -w /workspace/ \ $CONTAINER_NAME \ /bin/bash -c "nvflare provision -p $PROJECT_YML && ./_generateStartupKitArchives.sh $ARGUMENTS"|| { echo "Docker run failed"; exit 1; } diff --git a/_generateStartupKitArchives.sh b/_generateStartupKitArchives.sh index c7510348..e76161fc 100755 --- a/_generateStartupKitArchives.sh +++ b/_generateStartupKitArchives.sh @@ -5,12 +5,16 @@ set -e OUTPUT_FOLDER=workspace/`grep "^name: " $1 | sed 's/name: //'` TARGET_FOLDER=`ls -d $OUTPUT_FOLDER/prod_* | tail -n 1` LONG_VERSION=$2 -PATH_FOR_VPN_CREDENTIALS=$3 cd $TARGET_FOLDER for startupkit in `ls .`; do - cp $PATH_FOR_VPN_CREDENTIALS/${startupkit}_client.ovpn ${startupkit}/startup/vpn_client.ovpn + VPN_CREDENTIALS_FILE=/vpn_credentials/${startupkit}_client.ovpn + if [[ -f $VPN_CREDENTIALS_FILE ]]; then + cp $VPN_CREDENTIALS_FILE ${startupkit}/startup/vpn_client.ovpn + else + echo "$VPN_CREDENTIALS_FILE does not exist, omitting VPN credentials for ${startupkit} in startup kit" + fi zip -rq ${startupkit}_$LONG_VERSION.zip $startupkit echo "Generated startup kit $TARGET_FOLDER/${startupkit}_$LONG_VERSION.zip" done diff --git a/buildDockerImageAndStartupKits.sh b/buildDockerImageAndStartupKits.sh index 1a78fe54..3786b95b 100755 --- a/buildDockerImageAndStartupKits.sh +++ b/buildDockerImageAndStartupKits.sh @@ -13,14 +13,15 @@ DOCKER_BUILD_ARGS="--no-cache --progress=plain"; while [[ "$#" -gt 0 ]]; do case $1 in -p) PROJECT_FILE="$2"; shift ;; + -c) VPN_CREDENTIALS_DIR="$2"; shift ;; --use-docker-cache) DOCKER_BUILD_ARGS="";; *) echo "Unknown parameter passed: $1"; exit 1 ;; esac shift done -if [ -z "$PROJECT_FILE" ]; then - echo "Usage: buildDockerImageAndStartupKits.sh -p [--use-docker-cache]" +if [[ -z "$PROJECT_FILE" || -z "$VPN_CREDENTIALS_DIR" ]]; then + echo "Usage: buildDockerImageAndStartupKits.sh -p -c [--use-docker-cache]" exit 1 fi @@ -77,8 +78,8 @@ docker build $DOCKER_BUILD_ARGS -t $CONTAINER_NAME $CLEAN_SOURCE_DIR -f docker_c echo "Docker image $CONTAINER_NAME built successfully" echo "./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME" -PATH_FOR_VPN_CREDENTIALS="../../../tests/local_vpn/client_configs" # TODO make configurable -./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME $PATH_FOR_VPN_CREDENTIALS +VPN_CREDENTIALS_DIR=$(realpath $VPN_CREDENTIALS_DIR) +./_buildStartupKits.sh $PROJECT_FILE $VERSION $CONTAINER_NAME $VPN_CREDENTIALS_DIR echo "Startup kits built successfully" rm -rf $CLEAN_SOURCE_DIR From 0d1f098ccea06b96033e70f833963e72d36d1f1b Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:44:39 +0200 Subject: [PATCH 76/80] extended documentation how to build startup kits with VPN credentials --- assets/readme/README.operator.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/assets/readme/README.operator.md b/assets/readme/README.operator.md index d67b03f1..3c88b6a1 100644 --- a/assets/readme/README.operator.md +++ b/assets/readme/README.operator.md @@ -19,18 +19,21 @@ For example, add the following line (replace `` with the server's actual IP dl3.tud.de dl3 ``` +TODO describe this in participant REAME if needed + ## Create Startup Kits ### Via Script (recommended) 1. Use, e.g., the file `application/provision/project_MEVIS_test.yml`, adapt as needed (network protocol etc.) -2. Call `buildDockerImageAndStartupKits.sh -p /path/to/project_configuration.yml` to build the Docker image and the startup kits +2. Call `buildDockerImageAndStartupKits.sh -p /path/to/project_configuration.yml -c /path/to/directory/with/VPN/credentials` to build the Docker image and the startup kits + - swarm nodes (admin, server, clients) are configured in `project_configuration.yml` + - the directory with VPN credentials should contain one `.ovpn` file per node + - use `-c tests/local_vpn/client_configs/` to build startup kits for the integration tests 3. Startup kits are generated to `workspace//prod_00/` 4. Deploy startup kits to the respective server/client operators 5. Push the Docker image to the registry -TODO describe what needs to be done for productive VPN credentials (that must remain a local resource and not be committed) - ### Via the Dashboard (not recommended) Build the Docker image as described above. @@ -81,7 +84,7 @@ passwords somewhere, they are only displayed once (or you can download them agai ## Starting a Swarm Training -1. Connect the *server* host to the VPN as described above. +1. Connect the *server* host to the VPN as described above. (TODO update documentation, this step is not needed if the Docker container handles the VPN connection.) 2. Start the *server* startup kit using the respective `startup/docker.sh` script with the option to start the server 3. Provide the *client* startup kits to the swarm participants (be aware that email providers or other channels may prevent encrypted archives) From 4ebad3aa67f3deb4599f2c1f5250ac4b4b6bf384 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:55:30 +0200 Subject: [PATCH 77/80] CI script now needs additional argument for building image --- scripts/ci/update_apt_versions.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/ci/update_apt_versions.sh b/scripts/ci/update_apt_versions.sh index 75d6732a..0d1d00ec 100755 --- a/scripts/ci/update_apt_versions.sh +++ b/scripts/ci/update_apt_versions.sh @@ -4,6 +4,7 @@ set -e DOCKERFILE_PATH="docker_config/Dockerfile_ODELIA" LOG_PATH=$(mktemp) PROJECT_YML="tests/provision/dummy_project_for_testing.yml" +VPN_TEST_CREDENTIALS="tests/local_vpn/client_configs/" echo "[INFO] Removing APT version pins from Dockerfile..." scripts/dev_utils/dockerfile_update_removeVersionApt.py "$DOCKERFILE_PATH" @@ -14,7 +15,7 @@ git config user.name "GitHub CI" git commit "$DOCKERFILE_PATH" -m "WIP: remove apt versions for rebuild" || echo "[INFO] No version pin removal change to commit." echo "[INFO] Rebuilding Docker image and capturing logs..." -if ! ./buildDockerImageAndStartupKits.sh -p "$PROJECT_YML" > "$LOG_PATH" 2>&1; then +if ! ./buildDockerImageAndStartupKits.sh -p "$PROJECT_YML" -c "$VPN_TEST_CREDENTIALS" > "$LOG_PATH" 2>&1; then echo "Build failed. Output:" cat "$LOG_PATH" exit 1 From cf0dec9202ec1fe9e06dbde4d88511f991421b59 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Tue, 30 Sep 2025 14:56:41 +0200 Subject: [PATCH 78/80] updated apt package versions --- docker_config/Dockerfile_ODELIA | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker_config/Dockerfile_ODELIA b/docker_config/Dockerfile_ODELIA index b4f1e9ae..6c7c8d6d 100644 --- a/docker_config/Dockerfile_ODELIA +++ b/docker_config/Dockerfile_ODELIA @@ -62,7 +62,7 @@ RUN apt install -y \ # Install apt-transport-https curl gnupg lsb-release zip and dependencies at defined versions RUN apt install -y \ apt-transport-https=2.4.14 \ - curl=7.81.0-1ubuntu1.20 \ + curl=7.81.0-1ubuntu1.21 \ dirmngr=2.2.27-3ubuntu2.4 \ distro-info-data=0.52ubuntu0.9 \ gnupg-l10n=2.2.27-3ubuntu2.4 \ @@ -76,7 +76,7 @@ RUN apt install -y \ gpgsm=2.2.27-3ubuntu2.4 \ libassuan0=2.5.5-1build1 \ libbrotli1=1.0.9-2build6 \ - libcurl4=7.81.0-1ubuntu1.20 \ + libcurl4=7.81.0-1ubuntu1.21 \ libexpat1=2.4.7-1ubuntu0.6 \ libksba8=1.6.0-2ubuntu0.2 \ libldap-2.5-0=2.5.19+dfsg-0ubuntu0.22.04.1 \ @@ -116,7 +116,7 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings # Install docker-ce docker-ce-cli containerd.io and dependencies at fixed versions RUN apt install -y \ apparmor=3.0.4-2ubuntu2.4 \ - containerd.io=1.7.27-1 \ + containerd.io=1.7.28-0~ubuntu.22.04~jammy \ dbus-user-session=1.12.20-2ubuntu4.1 \ dbus=1.12.20-2ubuntu4.1 \ dmsetup=2:1.02.175-2.1ubuntu5 \ @@ -135,7 +135,7 @@ RUN apt install -y \ libbsd0=0.11.5-1 \ libcbor0.8=0.8.0-2ubuntu1 \ libcryptsetup12=2:2.4.3-1ubuntu1.3 \ - libcurl3-gnutls=7.81.0-1ubuntu1.20 \ + libcurl3-gnutls=7.81.0-1ubuntu1.21 \ libdbus-1-3=1.12.20-2ubuntu4.1 \ libdevmapper1.02.1=2:1.02.175-2.1ubuntu5 \ libedit2=3.1-20210910-1build1 \ From 29561e19d7ce309be8231d4af4a8075cc1b9279d Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 2 Oct 2025 15:13:29 +0200 Subject: [PATCH 79/80] build only one test image (without Docker cache), added argument (folder with VPN credentials) needed now --- .github/workflows/pr-test.yaml | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 962df74d..3a027852 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -22,7 +22,6 @@ jobs: SITE_NAME: UKA PYTHONUNBUFFERED: 1 - steps: - name: Checkout repository (with submodules) uses: actions/checkout@v3 @@ -37,18 +36,13 @@ jobs: echo "VERSION=$VERSION" echo "version=$VERSION" >> $GITHUB_OUTPUT - - name: Build Docker image for real project (MEVIS) - run: | - chmod +x buildDockerImageAndStartupKits.sh - ./buildDockerImageAndStartupKits.sh -p application/provision/project_MEVIS_test.yml + - name: Build Docker image and startup kits for test project + run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml -c tests/local_vpn/client_configs - name: Show workspace path for MEVIS project run: | echo "WORKSPACE_PATH: ${{ env.WORKSPACE_PATH }}" - find workspace -maxdepth 1 -type d -name "odelia_*_MEVIS_test" || echo "No workspace found" - - - name: Build Docker image and dummy startup kits - run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml --use-docker-cache + find workspace -maxdepth 1 -type d -name "odelia_*_dummy_project_for_testing" || echo "No workspace found" - name: Run integration test checking documentation on github continue-on-error: false From 837c42d0b4445b1e58f36a6a674c5b1de5921784 Mon Sep 17 00:00:00 2001 From: Ole Schwen Date: Thu, 2 Oct 2025 15:27:39 +0200 Subject: [PATCH 80/80] corrected comment --- .github/workflows/pr-test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-test.yaml b/.github/workflows/pr-test.yaml index 3a027852..3106cd9b 100644 --- a/.github/workflows/pr-test.yaml +++ b/.github/workflows/pr-test.yaml @@ -39,7 +39,7 @@ jobs: - name: Build Docker image and startup kits for test project run: ./buildDockerImageAndStartupKits.sh -p tests/provision/dummy_project_for_testing.yml -c tests/local_vpn/client_configs - - name: Show workspace path for MEVIS project + - name: Show workspace path for test project run: | echo "WORKSPACE_PATH: ${{ env.WORKSPACE_PATH }}" find workspace -maxdepth 1 -type d -name "odelia_*_dummy_project_for_testing" || echo "No workspace found"