From 1007a639ee62a2d06b3a63c1e2ec88bbc185ceaa Mon Sep 17 00:00:00 2001 From: Guillaume Lagrange Date: Thu, 20 Nov 2025 12:18:07 +0100 Subject: [PATCH] feat: add --allow-empty run option Mostly aimed for action runner CI which just tests cli flags it passes and does not run benches --- src/app.rs | 4 +- src/run/config.rs | 7 +++ src/run/mod.rs | 5 ++ src/run/runner/wall_time/executor.rs | 2 +- src/run/runner/wall_time/helpers.rs | 73 ++++++++++++++++++++++------ 5 files changed, 72 insertions(+), 19 deletions(-) diff --git a/src/app.rs b/src/app.rs index fa289ab1..314217d4 100644 --- a/src/app.rs +++ b/src/app.rs @@ -60,7 +60,7 @@ pub struct Cli { #[derive(Subcommand, Debug)] enum Commands { /// Run the bench command and upload the results to CodSpeed - Run(run::RunArgs), + Run(Box), /// Manage the CLI authentication state Auth(auth::AuthArgs), /// Pre-install the codspeed executors @@ -88,7 +88,7 @@ pub async fn run() -> Result<()> { match cli.command { Commands::Run(args) => { - run::run(args, &api_client, &codspeed_config, setup_cache_dir).await? + run::run(*args, &api_client, &codspeed_config, setup_cache_dir).await? } Commands::Auth(args) => auth::run(args, &api_client, cli.config_name.as_deref()).await?, Commands::Setup => setup::setup(setup_cache_dir).await?, diff --git a/src/run/config.rs b/src/run/config.rs index 79a10e25..f74c4013 100644 --- a/src/run/config.rs +++ b/src/run/config.rs @@ -25,6 +25,7 @@ pub struct Config { pub skip_upload: bool, pub skip_run: bool, pub skip_setup: bool, + pub allow_empty: bool, } #[derive(Debug, PartialEq, Clone)] @@ -58,6 +59,7 @@ impl Config { skip_upload: false, skip_run: false, skip_setup: false, + allow_empty: false, } } } @@ -97,6 +99,7 @@ impl TryFrom for Config { skip_upload: args.skip_upload, skip_run: args.skip_run, skip_setup: args.skip_setup, + allow_empty: args.allow_empty, }) } } @@ -131,6 +134,7 @@ mod tests { skip_upload: false, skip_run: false, skip_setup: false, + allow_empty: false, perf_run_args: PerfRunArgs { enable_perf: false, perf_unwinding_mode: None, @@ -146,6 +150,7 @@ mod tests { assert!(!config.skip_upload); assert!(!config.skip_run); assert!(!config.skip_setup); + assert!(!config.allow_empty); assert_eq!(config.command, "cargo codspeed bench"); } @@ -165,6 +170,7 @@ mod tests { skip_upload: true, skip_run: true, skip_setup: true, + allow_empty: true, perf_run_args: PerfRunArgs { enable_perf: false, perf_unwinding_mode: Some(UnwindingMode::FramePointer), @@ -199,6 +205,7 @@ mod tests { assert!(config.skip_upload); assert!(config.skip_run); assert!(config.skip_setup); + assert!(config.allow_empty); assert_eq!(config.command, "cargo codspeed bench"); } diff --git a/src/run/mod.rs b/src/run/mod.rs index 1704ec5c..37a5770c 100644 --- a/src/run/mod.rs +++ b/src/run/mod.rs @@ -130,6 +130,10 @@ pub struct RunArgs { #[arg(long, default_value = "false", hide = true)] pub skip_setup: bool, + /// Allow runs without any benchmarks to succeed instead of failing + #[arg(long, default_value = "false", hide = true)] + pub allow_empty: bool, + #[command(flatten)] pub perf_run_args: PerfRunArgs, @@ -169,6 +173,7 @@ impl RunArgs { skip_upload: false, skip_run: false, skip_setup: false, + allow_empty: false, perf_run_args: PerfRunArgs { enable_perf: false, perf_unwinding_mode: None, diff --git a/src/run/runner/wall_time/executor.rs b/src/run/runner/wall_time/executor.rs index fd49ed29..749aba01 100644 --- a/src/run/runner/wall_time/executor.rs +++ b/src/run/runner/wall_time/executor.rs @@ -225,7 +225,7 @@ impl Executor for WallTimeExecutor { perf.save_files_to(&run_data.profile_folder).await?; } - validate_walltime_results(&run_data.profile_folder)?; + validate_walltime_results(&run_data.profile_folder, config.allow_empty)?; Ok(()) } diff --git a/src/run/runner/wall_time/helpers.rs b/src/run/runner/wall_time/helpers.rs index f10c4b6f..23c15acc 100644 --- a/src/run/runner/wall_time/helpers.rs +++ b/src/run/runner/wall_time/helpers.rs @@ -11,10 +11,15 @@ fn add_empty_result_error_explanation(error_details: &str) -> String { } /// Validates that walltime results exist and contain at least one benchmark. -pub fn validate_walltime_results(profile_folder: &Path) -> Result<()> { +/// When `allow_empty` is true, empty benchmark results are allowed. +pub fn validate_walltime_results(profile_folder: &Path, allow_empty: bool) -> Result<()> { let results_dir = profile_folder.join("results"); if !results_dir.exists() { + if allow_empty { + warn!("No walltime results found in profile folder: {results_dir:?}."); + return Ok(()); + } bail!(add_empty_result_error_explanation(&format!( "No walltime results found in profile folder: {results_dir:?}." ))); @@ -22,7 +27,7 @@ pub fn validate_walltime_results(profile_folder: &Path) -> Result<()> { debug!("Validating walltime results in {results_dir:?}"); - let mut found_valid_results = false; + let mut found_benchmark_results = false; for entry in std::fs::read_dir(&results_dir)? { let entry = entry?; @@ -41,19 +46,26 @@ pub fn validate_walltime_results(profile_folder: &Path) -> Result<()> { .with_context(|| format!("Failed to parse walltime results from: {path:?}"))?; if results.benchmarks.is_empty() { - bail!(add_empty_result_error_explanation(&format!( - "No benchmarks found in walltime results file: {path:?}." - ))); + if !allow_empty { + bail!(add_empty_result_error_explanation(&format!( + "No benchmarks found in walltime results file: {path:?}." + ))); + } + debug!("No benchmarks found in {path:?} (allowed)"); } - found_valid_results = true; + found_benchmark_results = true; debug!( "Found {} benchmark(s) in {path:?}", results.benchmarks.len() ); } - if !found_valid_results { + if !found_benchmark_results { + if allow_empty { + warn!("No JSON result files found in: {results_dir:?}."); + return Ok(()); + } bail!(add_empty_result_error_explanation(&format!( "No JSON result files found in: {results_dir:?}." ))); @@ -174,7 +186,7 @@ mod tests { let profile = TestProfileFolder::new(); profile.write_json_file("results.json", &valid_walltime_results_json(1)); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_ok()); } @@ -184,7 +196,7 @@ mod tests { profile.write_json_file("results1.json", &valid_walltime_results_json(2)); profile.write_json_file("results2.json", &valid_walltime_results_json(3)); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_ok()); } @@ -195,7 +207,7 @@ mod tests { profile.write_text_file("readme.txt", "This is a text file"); profile.write_text_file("data.csv", "col1,col2"); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_ok()); } @@ -206,7 +218,7 @@ mod tests { let profile = TestProfileFolder::new(); // Don't create results directory - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_err()); let error = result.unwrap_err().to_string(); assert!(error.contains("No walltime results found in profile folder")); @@ -217,7 +229,7 @@ mod tests { let profile = TestProfileFolder::new(); profile.create_results_dir(); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_err()); let error = result.unwrap_err().to_string(); assert!(error.contains("No JSON result files found in")); @@ -229,7 +241,7 @@ mod tests { profile.write_text_file("readme.txt", "some text"); profile.write_text_file("data.csv", "col1,col2"); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_err()); let error = result.unwrap_err().to_string(); assert!(error.contains("No JSON result files found in")); @@ -240,7 +252,7 @@ mod tests { let profile = TestProfileFolder::new(); profile.write_json_file("results.json", &empty_benchmarks_json()); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_err()); let error = result.unwrap_err().to_string(); assert!(error.contains("No benchmarks found in walltime results file")); @@ -251,7 +263,7 @@ mod tests { let profile = TestProfileFolder::new(); profile.write_json_file("results.json", "{ invalid json }"); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_err()); let error = result.unwrap_err().to_string(); assert!(error.contains("Failed to parse walltime results from")); @@ -263,9 +275,38 @@ mod tests { profile.write_json_file("results1.json", &valid_walltime_results_json(2)); profile.write_json_file("results2.json", &empty_benchmarks_json()); - let result = validate_walltime_results(profile.path()); + let result = validate_walltime_results(profile.path(), false); assert!(result.is_err()); let error = result.unwrap_err().to_string(); assert!(error.contains("No benchmarks found in walltime results file")); } + + // Allow empty cases + + #[test] + fn test_allow_empty_with_empty_benchmarks() { + let profile = TestProfileFolder::new(); + profile.write_json_file("results.json", &empty_benchmarks_json()); + + let result = validate_walltime_results(profile.path(), true); + assert!(result.is_ok()); + } + + #[test] + fn test_allow_empty_with_missing_results_directory() { + let profile = TestProfileFolder::new(); + // Don't create results directory + + let result = validate_walltime_results(profile.path(), true); + assert!(result.is_ok()); + } + + #[test] + fn test_allow_empty_with_no_json_files() { + let profile = TestProfileFolder::new(); + profile.create_results_dir(); + + let result = validate_walltime_results(profile.path(), true); + assert!(result.is_ok()); + } }