From 94cb59c5512e7321fb7ccdf4032ede08eb3f899e Mon Sep 17 00:00:00 2001 From: js Date: Tue, 7 Oct 2025 16:39:58 -0300 Subject: [PATCH 1/5] Initial creation db and validation + store runs on mutation cmd Added Path to flag Added get_commit_hash function refactored into functions implemented error handling for rusqlite errors mod for sqlite --- Cargo.toml | 1 + src/error.rs | 3 + src/git_changes.rs | 11 ++ src/lib.rs | 1 + src/main.rs | 30 +++++ src/sqlite.rs | 265 +++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 311 insertions(+) create mode 100644 src/sqlite.rs diff --git a/Cargo.toml b/Cargo.toml index 5244fe0..dd28fb9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ futures = "0.3" walkdir = "2.4" rayon = "1.8" tempfile = "3.8" +rusqlite = "0.37.0" [dev-dependencies] tempfile = "3.8" diff --git a/src/error.rs b/src/error.rs index 36a61bd..dc38f13 100644 --- a/src/error.rs +++ b/src/error.rs @@ -26,6 +26,9 @@ pub enum MutationError { #[error("Walkdir error: {0}")] Walkdir(#[from] walkdir::Error), + #[error("SQLite error: {0}")] + Sqlite(#[from] rusqlite::Error), + #[error("Other error: {0}")] Other(#[from] anyhow::Error), } diff --git a/src/git_changes.rs b/src/git_changes.rs index 813123a..972f76a 100644 --- a/src/git_changes.rs +++ b/src/git_changes.rs @@ -82,6 +82,17 @@ pub async fn get_lines_touched(file_path: &str) -> Result> { Ok(lines) } +pub fn get_commit_hash() -> Result { + + let commit_hash = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string()) + .unwrap_or_else(|_| "unknown".to_string()); + + Ok(commit_hash) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/lib.rs b/src/lib.rs index 295d75b..5814d18 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -34,6 +34,7 @@ //! } //! ``` +pub mod sqlite; pub mod analyze; pub mod ast_analysis; pub mod coverage; diff --git a/src/main.rs b/src/main.rs index f5ffc36..3a791a2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +use anyhow::Error; use clap::{Parser, Subcommand}; use std::collections::HashMap; use std::path::PathBuf; @@ -10,6 +11,7 @@ mod git_changes; mod mutation; mod operators; mod report; +mod sqlite; use error::{MutationError, Result}; @@ -64,6 +66,10 @@ enum Commands { /// Add custom expert rule for arid node detection #[arg(long, value_name = "PATTERN")] add_expert_rule: Option, + + /// Optional path to SQLite database file (default: mutation.db) + #[arg(long, value_name = "PATH")] + sqlite: Option>, }, /// Analyze mutants Analyze { @@ -105,7 +111,10 @@ async fn main() -> Result<()> { only_security_mutations, disable_ast_filtering, add_expert_rule, + sqlite, } => { + let mut run_id: i64 = 0; + let skip_lines_map = if let Some(path) = skip_lines { read_skip_lines(&path)? } else { @@ -126,6 +135,16 @@ async fn main() -> Result<()> { } else { None }; + + let db_path = match sqlite { + Some(Some(path)) => { + let mut full_path = PathBuf::from("db"); + full_path.push(path); + Some(full_path) + } + Some(None) => Some(PathBuf::from("db/mutation.db")), + None => None, + }; if pr != 0 && file.is_some() { return Err(MutationError::InvalidInput( @@ -144,6 +163,11 @@ async fn main() -> Result<()> { println!("Custom expert rule will be applied: {}", expert_rule); } + if let Some(ref path) = db_path { + sqlite::check_db(path).map_err(Error::from)?; + run_id = sqlite::store_run(path, if pr == 0 { None } else { Some(pr) }).map_err(Error::from)?; + } + mutation::run_mutation( if pr == 0 { None } else { Some(pr) }, file, @@ -157,6 +181,12 @@ async fn main() -> Result<()> { add_expert_rule, ) .await?; + + if let Some(ref path) = db_path { + sqlite::store_mutants(path, run_id).map_err(Error::from)?; + + } + } Commands::Analyze { folder, diff --git a/src/sqlite.rs b/src/sqlite.rs new file mode 100644 index 0000000..fc4985c --- /dev/null +++ b/src/sqlite.rs @@ -0,0 +1,265 @@ +use rusqlite::params; +use std::fs; +use std::path::Path; +use std::path::PathBuf; +use rusqlite::{Connection, Result}; + +use crate::git_changes::{get_commit_hash}; + + +pub fn store_mutants(db_path: &PathBuf, run_id: i64) -> Result<()> { + + println!("SQLite option: Storing mutants on {}", db_path.display()); + let connection = Connection::open(db_path)?; + + + + + //run_id + println!("run_id: {}", run_id.to_string()); + //diff + //patch_hash + //command_to_test + //file_path + //operator + + /* + connection.execute(" + + INSERT INTO mutants (run_id , diff, patch_hash, command_to_test, file_path, operator) + VALUES (?1, ?2, ?3, ?4); + ", params![run_id, commit_hash, pr_number, tool_version],)?; + */ + //Filling mutants table + + + + // Fazer preenchimento da ultima tabela (mutants) + // TODO fill tables with run + // TODO test functionality + // TODO script test + + Ok(()) +} + + + +pub fn store_run(db_path: &PathBuf, pr_number: Option) -> Result { + + println!("SQLite option: Storing current run on {}", db_path.display()); + let connection = Connection::open(db_path)?; + + let proj_query_row: (i32, String) = connection.query_row( + "SELECT id, name FROM projects;", + [], + |row| Ok((row.get(0)?, row.get(1)?)) + )?; + + let project_id = proj_query_row.0; + println!("id: {}", project_id); + + /* + let commit_hash = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string()) + .unwrap_or_else(|_| "unknown".to_string()); +*/ + + let commit_hash = match get_commit_hash() { + Ok(hash) => hash, + Err(_) => "unknown".to_string(), + }; + + println!("commit hash: {}", commit_hash); + + let tool_version = format!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); + + connection.execute(" + + INSERT INTO runs (project_id , commit_hash, pr_number, tool_version) + VALUES (?1, ?2, ?3, ?4); + ", params![project_id, commit_hash, pr_number, tool_version],)?; + + let run_id = connection.last_insert_rowid(); + + Ok(run_id) +} + +fn _check_initial_row(connection: &Connection) -> Result<()> { + println!("SQLite option: Checking first row of projects..."); + + let result = connection.query_row( + "SELECT id, name, repository_url FROM projects WHERE id = 1;", + [], + |row| Ok((row.get::<_, i32>(0)?, row.get::<_, String>(1)?, row.get::<_, String>(2)?)) + ); + + match result { + Ok((id, name, repo)) => { + if id == 1 && name == "Bitcoin Core" && repo == "https://github.com/bitcoin/bitcoin" { + println!("SQLite option: Project table corrected filled!"); + } + }, + Err(rusqlite::Error::QueryReturnedNoRows) => { + println!("SQLite option: No matches found for projects table, filling initial row..."); + _fill_projects_table(&connection)?; + }, + Err(e) => { + eprintln!("SQLite option: FAILED to verify initial project: {}", e); + return Err(e); + } + } + + Ok(()) +} + +fn _fill_projects_table(connection: &Connection) -> Result<()> { + connection.execute(" + ---First time initialization + INSERT OR IGNORE INTO projects (id, name, repository_url) + VALUES (1, 'Bitcoin Core', 'https://github.com/bitcoin/bitcoin'); + ", [])?; + + Ok(()) +} + +fn _check_schema(connection: &Connection) -> Result<()> { + println!("SQLite option: Checking schema integrity..."); + + let expected_tables = vec!["projects", "runs", "mutants"]; + for table in expected_tables { + let exists: bool = connection.query_row( + "SELECT count(*) FROM sqlite_master WHERE type='table' AND name=?1;", + params![table], + |row| row.get(0), + )?; + + if !exists { + return Err(rusqlite::Error::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(format!("Missing table: {}", table)), + )); + } + } + + // Verificação de colunas essenciais por tabela (incluindo colunas virtuais) + let table_columns: Vec<(&str, Vec<&str>)> = vec![ + ("projects", vec!["id", "name", "repository_url"]), + ("runs", vec!["id", "project_id", "commit_hash", "pr_number", "created_at", "tool_version"]), + ("mutants", vec![ + "id", "run_id", "diff", "patch_hash", "status", "killed", + "command_to_test", "file_path", "operator" + ]), + ]; + + for (table, columns) in table_columns { + let mut stmt = connection.prepare(&format!("PRAGMA table_xinfo({});", table))?; + let column_names: Vec = stmt + .query_map([], |row| row.get::<_, String>(1))? + .filter_map(Result::ok) + .collect(); + + for col in columns { + if !column_names.contains(&col.to_string()) { + return Err(rusqlite::Error::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(format!("Missing column '{}' in table '{}'", col, table)), + )); + } + } + } + + println!("SQLite option: Schema verified successfully."); + Ok(()) +} + +fn _createdb(connection: &Connection) -> Result<()> { + println!("SQLite option:: New db detected initializing first fillment..."); + + // DB tables creation + connection.execute_batch(" + PRAGMA foreign_keys = ON; + + -- Projects + CREATE TABLE IF NOT EXISTS projects ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + repository_url TEXT, + UNIQUE(name), + UNIQUE(repository_url) + ); + + --Runs + CREATE TABLE IF NOT EXISTS runs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + commit_hash TEXT NOT NULL, + pr_number INTEGER, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + tool_version TEXT, + FOREIGN KEY(project_id) REFERENCES projects(id) + ); + + CREATE INDEX IF NOT EXISTS idx_runs_project_created ON runs(project_id, created_at DESC); + CREATE INDEX IF NOT EXISTS idx_runs_commit ON runs(commit_hash); + + CREATE TABLE IF NOT EXISTS mutants ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + run_id INTEGER NOT NULL REFERENCES runs(id) ON DELETE CASCADE, + diff TEXT NOT NULL, + patch_hash TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'pending' + CHECK (status IN ('pending','running','killed','survived','timeout','error','skipped','equivalent','unproductive')), + killed INTEGER GENERATED ALWAYS AS (CASE WHEN status='killed' THEN 1 ELSE 0 END) VIRTUAL, + command_to_test TEXT, + file_path TEXT NOT NULL, + operator TEXT NOT NULL, + FOREIGN KEY(run_id) REFERENCES runs(id) + ); + + CREATE INDEX IF NOT EXISTS idx_mutants_run_status ON mutants(run_id, status); + CREATE INDEX IF NOT EXISTS idx_mutants_file ON mutants(file_path); + CREATE INDEX IF NOT EXISTS idx_mutants_operator ON mutants(operator); + CREATE INDEX IF NOT EXISTS idx_mutants_killed ON mutants(killed); + + ")?; + + println!("SQLite option: Ok batch"); + + //Filling projects table + _fill_projects_table(&connection)?; + + Ok(()) +} + +pub fn check_db(db_path: &PathBuf) -> Result<()> { + + println!("SQLite option: Checking if db exist..."); + let is_new_db = !db_path.exists(); + + //Verify path integrity + let exist_path = Path::new("db"); + if !exist_path.exists() { + match fs::create_dir_all(exist_path) { + Ok(_) => {}, + Err(e) => { + eprintln!("FAIL creating new folder db: {}", e); + std::process::exit(1); + } + } + } + + let connection = Connection::open(db_path)?; + + if is_new_db { + _createdb(&connection)?; + + } else { + println!("SQLite option: Current db exists!"); + _check_schema(&connection)?; + _check_initial_row(&connection)?; + } + + Ok(()) +} \ No newline at end of file From dda14254124f9d021ab099645c948083b46b0757 Mon Sep 17 00:00:00 2001 From: JGsouzaa Date: Wed, 29 Oct 2025 09:27:05 -0300 Subject: [PATCH 2/5] Storing mutants on mutation cmd added hash for diffs sha2 dependency filepath functionality sqlite storing mutants into db bug fix in folder_path for sqlite --- Cargo.toml | 1 + src/analyze.rs | 2 +- src/main.rs | 9 ++- src/sqlite.rs | 160 +++++++++++++++++++++++++++++++++++++++---------- 4 files changed, 137 insertions(+), 35 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index dd28fb9..82507fd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ walkdir = "2.4" rayon = "1.8" tempfile = "3.8" rusqlite = "0.37.0" +sha2 = "0.10.9" [dev-dependencies] tempfile = "3.8" diff --git a/src/analyze.rs b/src/analyze.rs index 1ec478a..01545ff 100644 --- a/src/analyze.rs +++ b/src/analyze.rs @@ -35,7 +35,7 @@ pub async fn run_analysis( Ok(()) } -fn find_mutation_folders() -> Result> { +pub fn find_mutation_folders() -> Result> { let mut folders = Vec::new(); for entry in WalkDir::new(".").max_depth(1) { diff --git a/src/main.rs b/src/main.rs index 3a791a2..895a1a8 100644 --- a/src/main.rs +++ b/src/main.rs @@ -170,7 +170,7 @@ async fn main() -> Result<()> { mutation::run_mutation( if pr == 0 { None } else { Some(pr) }, - file, + file.clone(), one_mutant, only_security_mutations, range_lines, @@ -183,8 +183,11 @@ async fn main() -> Result<()> { .await?; if let Some(ref path) = db_path { - sqlite::store_mutants(path, run_id).map_err(Error::from)?; - + sqlite::store_mutants( + path, + run_id, + if pr == 0 { None } else { Some(pr) }, + file).map_err(Error::from)?; } } diff --git a/src/sqlite.rs b/src/sqlite.rs index fc4985c..698bf4b 100644 --- a/src/sqlite.rs +++ b/src/sqlite.rs @@ -1,3 +1,6 @@ +use sha2::{Sha256, Digest}; +use std::error::Error; +use std::process::Command; use rusqlite::params; use std::fs; use std::path::Path; @@ -5,45 +8,149 @@ use std::path::PathBuf; use rusqlite::{Connection, Result}; use crate::git_changes::{get_commit_hash}; +use crate::error::{MutationError}; +use crate::analyze::{find_mutation_folders}; +pub fn update_status_mutant() { +//TODO after running analyze change status to killed or survived +} -pub fn store_mutants(db_path: &PathBuf, run_id: i64) -> Result<()> { - +fn get_hash_from_diff(diff: &str) -> Result> { + let mut hasher = Sha256::new(); + hasher.update(diff.as_bytes()); + let result = hasher.finalize(); + let hash_hex = format!("{:x}", result); + Ok(hash_hex) +} + +fn get_file_diff(mainfile: Option, comparefile: PathBuf) -> Result> { + let mainfile = mainfile.ok_or("Missing source file to compare with mutant in get_file_diff proccess")?; + + let output = Command::new("diff") + .arg(&mainfile) + .arg(&comparefile) + .output()?; + + println!("Executing diff from files {:?} and {:?}", mainfile, comparefile); + + if output.status.success() { + Ok(String::from("Compare files are equal!")) + } else { + let diff_result = str::from_utf8(&output.stdout)?; + Ok(diff_result.to_string()) + } +} + +fn get_files_from_folder(filepath: &Path) -> Result, Box> { + println!("filepath get_files_from_folder: {:?}", filepath); + + if !filepath.is_dir() { + return Err(format!("Current path is not a folder: {:?}", filepath).into()); + } + + let entries = fs::read_dir(filepath)? + .filter_map(|entry| { + match entry { + Ok(e) => { + let path = e.path(); + if path.is_file() { + // Remove "original_file.txt" from vec + if let Some(name) = path.file_name() { + if name != "original_file.txt" { + return Some(path); + } + } + } + None + } + Err(_) => None, + } + }) + .collect(); + + Ok(entries) +} + + +fn check_mutation_folder( + file_to_mutate: &str, + pr_number: Option, + range_lines: Option<(usize, usize)>, +) -> Result { + let file_extension = if file_to_mutate.ends_with(".h") { + ".h" + } else if file_to_mutate.ends_with(".py") { + ".py" + } else { + ".cpp" + }; + + let file_name = Path::new(file_to_mutate) + .file_stem() + .and_then(|s| s.to_str()) + .ok_or_else(|| MutationError::InvalidInput("Invalid file path".to_string())) + .map_err(|e| rusqlite::Error::InvalidParameterName(e.to_string()))?; + + let ext = file_extension.trim_start_matches('.'); + let folder = if let Some(pr) = pr_number { + format!("muts-pr-{}-{}-{}", pr, file_name, ext) + } else if let Some(range) = range_lines { + format!("muts-pr-{}-{}-{}", file_name, range.0, range.1) + } else { + format!("muts-{}-{}", file_name, ext) + }; + + Ok(PathBuf::from(folder)) +} + +pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, originFile: Option, range_lines: Option<(usize, usize)>) -> Result<()> { println!("SQLite option: Storing mutants on {}", db_path.display()); let connection = Connection::open(db_path)?; + let operator: String = "None".to_string(); + + //get mutants folder + //TODO continuar aqui, verificando o check_mutation + if let Some(file_path) = originFile.clone() { + let file_str = file_path.to_string_lossy().to_string(); + let mutation_folder = check_mutation_folder(&file_str, pr_number, range_lines); + let files = get_files_from_folder(&mutation_folder.unwrap()).unwrap_or_default(); + + for file in &files{ + let diff = get_file_diff(originFile.clone(), file.into()).unwrap_or_default(); + let patch_hash = get_hash_from_diff(&diff).unwrap_or_default(); + let mut file_path = String::new(); + file_path = originFile.clone().unwrap_or_default().to_string_lossy().into_owned(); - //run_id - println!("run_id: {}", run_id.to_string()); - //diff - //patch_hash - //command_to_test - //file_path - //operator + //run_id + println!("run_id: {}", run_id.to_string()); - /* - connection.execute(" + //diff + //println!("diff: {}", diff); - INSERT INTO mutants (run_id , diff, patch_hash, command_to_test, file_path, operator) - VALUES (?1, ?2, ?3, ?4); - ", params![run_id, commit_hash, pr_number, tool_version],)?; - */ - //Filling mutants table + //patch_hash + println!("patch_hash: {}", patch_hash); + //file_path + println!("file path: {:?}", file_path); + //operator + println!("operator: {}", operator); + let dummydiff = ""; + connection.execute(" - // Fazer preenchimento da ultima tabela (mutants) - // TODO fill tables with run - // TODO test functionality - // TODO script test + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator) + VALUES (?1, ?2, ?3, ?4, ?5); + ", params![run_id, dummydiff, patch_hash, file_path, operator],)?; + } + + }; Ok(()) } - - pub fn store_run(db_path: &PathBuf, pr_number: Option) -> Result { println!("SQLite option: Storing current run on {}", db_path.display()); @@ -57,14 +164,6 @@ pub fn store_run(db_path: &PathBuf, pr_number: Option) -> Result { let project_id = proj_query_row.0; println!("id: {}", project_id); - - /* - let commit_hash = Command::new("git") - .args(["rev-parse", "HEAD"]) - .output() - .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string()) - .unwrap_or_else(|_| "unknown".to_string()); -*/ let commit_hash = match get_commit_hash() { Ok(hash) => hash, @@ -143,7 +242,6 @@ fn _check_schema(connection: &Connection) -> Result<()> { } } - // Verificação de colunas essenciais por tabela (incluindo colunas virtuais) let table_columns: Vec<(&str, Vec<&str>)> = vec![ ("projects", vec!["id", "name", "repository_url"]), ("runs", vec!["id", "project_id", "commit_hash", "pr_number", "created_at", "tool_version"]), From 655422a8d8c9a17125ab2e14d7de791e9d8cb1e1 Mon Sep 17 00:00:00 2001 From: JGsouzaa Date: Fri, 19 Dec 2025 13:12:55 -0300 Subject: [PATCH 3/5] Update db on analyze cmd implemented update_status_mutant, change to snake case in variable, adition of a file_name colunm on mutants table implemented update command_to_test_mutant fix fullpath bug without -f flag on analysis --run_id necessary for --sqlite flag --- src/analyze.rs | 26 ++++++++++++++ src/main.rs | 38 ++++++++++++++++++-- src/sqlite.rs | 98 ++++++++++++++++++++++++++++++++++++++++++-------- 3 files changed, 146 insertions(+), 16 deletions(-) diff --git a/src/analyze.rs b/src/analyze.rs index 01545ff..a20eeca 100644 --- a/src/analyze.rs +++ b/src/analyze.rs @@ -1,3 +1,4 @@ +use crate::sqlite::{update_status_mutant, update_command_to_test_mutant}; use crate::error::{MutationError, Result}; use crate::report::generate_report; use std::fs; @@ -13,6 +14,8 @@ pub async fn run_analysis( jobs: u32, timeout_secs: u64, survival_threshold: f64, + db_path: Option, + run_id: Option, ) -> Result<()> { let folders = if let Some(folder_path) = folder { vec![folder_path] @@ -28,6 +31,8 @@ pub async fn run_analysis( jobs, timeout_secs, survival_threshold, + db_path.clone(), + run_id, ) .await?; } @@ -58,6 +63,8 @@ pub async fn analyze_folder( jobs: u32, timeout_secs: u64, survival_threshold: f64, + db_path: Option, + run_id: Option, ) -> Result<()> { let mut num_killed: u64 = 0; let mut not_killed = Vec::new(); @@ -125,11 +132,30 @@ pub async fn analyze_folder( if result { println!("NOT KILLED ❌"); + + if let (Some(_), Some(run_id)) = (&db_path, run_id) { + update_status_mutant( + false, + &file_path, + db_path.clone(), + run_id, + )?; + } not_killed.push(file_name.clone()); } else { println!("KILLED ✅"); + + if let (Some(_), Some(run_id)) = (&db_path, run_id) { + update_status_mutant( + true, + &file_path, + db_path.clone(), + run_id, + )?; + } num_killed += 1 } + update_command_to_test_mutant(&test_command, &file_path, db_path.clone(), run_id.clone().unwrap())?; } // Generate report diff --git a/src/main.rs b/src/main.rs index 895a1a8..b22019c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -92,6 +92,14 @@ enum Commands { /// Maximum acceptable survival rate (0.3 = 30%) #[arg(long, default_value = "0.75")] survival_threshold: f64, + + /// Optional path to SQLite database file (default: mutation.db) + #[arg(long, value_name = "PATH")] + sqlite: Option>, + + /// Run ID stored in SQLite + #[arg(long)] + runid: Option, }, } @@ -187,7 +195,8 @@ async fn main() -> Result<()> { path, run_id, if pr == 0 { None } else { Some(pr) }, - file).map_err(Error::from)?; + file, + range_lines).map_err(Error::from)?; } } @@ -197,8 +206,33 @@ async fn main() -> Result<()> { jobs, command, survival_threshold, + sqlite, + runid, } => { - analyze::run_analysis(folder, command, jobs, timeout, survival_threshold).await?; + + let db_path = match sqlite { + Some(Some(path)) => { + let mut full_path = PathBuf::from("db"); + full_path.push(path); + Some(full_path) + } + Some(None) => Some(PathBuf::from("db/mutation.db")), + None => None, + }; + + if runid.is_none() { + return Err(MutationError::InvalidInput( + "--sqlite requires --runid".to_string(), + )); + } + + if runid.is_some() && db_path.is_none() { + return Err(MutationError::InvalidInput( + "--runid requires --sqlite".to_string(), + )); + } + + analyze::run_analysis(folder, command, jobs, timeout, survival_threshold, db_path, runid).await?; } } diff --git a/src/sqlite.rs b/src/sqlite.rs index 698bf4b..3311867 100644 --- a/src/sqlite.rs +++ b/src/sqlite.rs @@ -5,14 +5,81 @@ use rusqlite::params; use std::fs; use std::path::Path; use std::path::PathBuf; -use rusqlite::{Connection, Result}; +use rusqlite::{Connection, Result, Params}; use crate::git_changes::{get_commit_hash}; use crate::error::{MutationError}; -use crate::analyze::{find_mutation_folders}; -pub fn update_status_mutant() { -//TODO after running analyze change status to killed or survived +fn update_mutants_table

(connection: &Connection, sql: &str, params: P) -> Result<(), MutationError> +where + P: Params, +{ + connection.execute(sql, params)?; + + Ok(()) +} + +pub fn update_command_to_test_mutant( + command: &str, + fullpath: &PathBuf, + db_path: Option, + run_id: i64, + ) -> Result<(), MutationError>{ + + let db_path = db_path.unwrap(); + let connection = Connection::open(db_path.clone())?; + let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); + + let sql_command = "UPDATE mutants + SET command_to_test = ? + WHERE run_id = ? AND + file_name = ?"; + + let params = params![command, run_id, fullpath.to_str()]; + update_mutants_table(&connection, sql_command, params)?; + Ok(()) +} + +pub fn update_status_mutant(killed: bool, + fullpath: &PathBuf, + db_path: Option, + run_id: i64, +) -> Result<(), MutationError>{ + + let db_path = db_path.unwrap(); + let connection = Connection::open(db_path.clone())?; + let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); + + let sql_command = + "UPDATE mutants + SET status = ? + WHERE run_id = ? AND + file_name = ?"; + + //status killed + if killed { + println!("killed "); + + println!("SQLite option: Updating mutant {} on {} status changed to killed", + fullpath.display(), + db_path.clone().display()); + + let params = params!["killed", run_id, fullpath.to_str()]; + update_mutants_table(&connection, sql_command, params)?; + + //status survived + } else if !killed { + println!("survived "); + + println!("SQLite option: Updating mutant {} on {} status changed to killed", + fullpath.display(), + db_path.clone().display()); + + let params = params!["survived", run_id, fullpath.to_str()]; + update_mutants_table(&connection, sql_command, params)?; + + }; + Ok(()) } fn get_hash_from_diff(diff: &str) -> Result> { @@ -103,7 +170,7 @@ fn check_mutation_folder( Ok(PathBuf::from(folder)) } -pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, originFile: Option, range_lines: Option<(usize, usize)>) -> Result<()> { +pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, origin_file: Option, range_lines: Option<(usize, usize)>) -> Result<()> { println!("SQLite option: Storing mutants on {}", db_path.display()); let connection = Connection::open(db_path)?; let operator: String = "None".to_string(); @@ -111,18 +178,17 @@ pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, ori //get mutants folder //TODO continuar aqui, verificando o check_mutation - if let Some(file_path) = originFile.clone() { + if let Some(file_path) = origin_file.clone() { let file_str = file_path.to_string_lossy().to_string(); let mutation_folder = check_mutation_folder(&file_str, pr_number, range_lines); let files = get_files_from_folder(&mutation_folder.unwrap()).unwrap_or_default(); for file in &files{ - let diff = get_file_diff(originFile.clone(), file.into()).unwrap_or_default(); + let diff = get_file_diff(origin_file.clone(), file.into()).unwrap_or_default(); let patch_hash = get_hash_from_diff(&diff).unwrap_or_default(); - let mut file_path = String::new(); - - file_path = originFile.clone().unwrap_or_default().to_string_lossy().into_owned(); + + let file_path = origin_file.clone().unwrap_or_default().to_string_lossy().into_owned(); //run_id println!("run_id: {}", run_id.to_string()); @@ -138,12 +204,15 @@ pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, ori //operator println!("operator: {}", operator); + + //filename + let filename = file.to_str(); let dummydiff = ""; connection.execute(" - INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator) - VALUES (?1, ?2, ?3, ?4, ?5); - ", params![run_id, dummydiff, patch_hash, file_path, operator],)?; + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, dummydiff, patch_hash, file_path, operator, filename],)?; } @@ -247,7 +316,7 @@ fn _check_schema(connection: &Connection) -> Result<()> { ("runs", vec!["id", "project_id", "commit_hash", "pr_number", "created_at", "tool_version"]), ("mutants", vec![ "id", "run_id", "diff", "patch_hash", "status", "killed", - "command_to_test", "file_path", "operator" + "command_to_test", "file_path", "operator", "file_name" ]), ]; @@ -313,6 +382,7 @@ fn _createdb(connection: &Connection) -> Result<()> { command_to_test TEXT, file_path TEXT NOT NULL, operator TEXT NOT NULL, + file_name TEXT NOT NULL, FOREIGN KEY(run_id) REFERENCES runs(id) ); From cbc4e05b679b0bd55e195b6633f04deee380b13b Mon Sep 17 00:00:00 2001 From: JGsouzaa Date: Tue, 23 Dec 2025 15:32:42 -0300 Subject: [PATCH 4/5] Code organization Error handling for unwrap() parts MissingDbPath handler for MutationError enum --- src/error.rs | 3 +++ src/sqlite.rs | 56 +++++++++------------------------------------------ 2 files changed, 12 insertions(+), 47 deletions(-) diff --git a/src/error.rs b/src/error.rs index dc38f13..0f807ec 100644 --- a/src/error.rs +++ b/src/error.rs @@ -29,6 +29,9 @@ pub enum MutationError { #[error("SQLite error: {0}")] Sqlite(#[from] rusqlite::Error), + #[error("Db path error")] + MissingDbPath, + #[error("Other error: {0}")] Other(#[from] anyhow::Error), } diff --git a/src/sqlite.rs b/src/sqlite.rs index 3311867..0097a96 100644 --- a/src/sqlite.rs +++ b/src/sqlite.rs @@ -26,7 +26,7 @@ pub fn update_command_to_test_mutant( run_id: i64, ) -> Result<(), MutationError>{ - let db_path = db_path.unwrap(); + let db_path = db_path.ok_or(MutationError::MissingDbPath)?; let connection = Connection::open(db_path.clone())?; let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); @@ -46,7 +46,7 @@ pub fn update_status_mutant(killed: bool, run_id: i64, ) -> Result<(), MutationError>{ - let db_path = db_path.unwrap(); + let db_path = db_path.ok_or(MutationError::MissingDbPath)?; let connection = Connection::open(db_path.clone())?; let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); @@ -56,10 +56,8 @@ pub fn update_status_mutant(killed: bool, WHERE run_id = ? AND file_name = ?"; - //status killed + //status if killed { - println!("killed "); - println!("SQLite option: Updating mutant {} on {} status changed to killed", fullpath.display(), db_path.clone().display()); @@ -67,11 +65,8 @@ pub fn update_status_mutant(killed: bool, let params = params!["killed", run_id, fullpath.to_str()]; update_mutants_table(&connection, sql_command, params)?; - //status survived } else if !killed { - println!("survived "); - - println!("SQLite option: Updating mutant {} on {} status changed to killed", + println!("SQLite option: Updating mutant {} on {} status changed to survived", fullpath.display(), db_path.clone().display()); @@ -98,7 +93,7 @@ fn get_file_diff(mainfile: Option, comparefile: PathBuf) -> Result Result, Box Ok(entries) } - fn check_mutation_folder( file_to_mutate: &str, pr_number: Option, @@ -175,47 +169,24 @@ pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, ori let connection = Connection::open(db_path)?; let operator: String = "None".to_string(); - //get mutants folder - - //TODO continuar aqui, verificando o check_mutation if let Some(file_path) = origin_file.clone() { let file_str = file_path.to_string_lossy().to_string(); - let mutation_folder = check_mutation_folder(&file_str, pr_number, range_lines); + let mutation_folder = check_mutation_folder(&file_str, pr_number, range_lines)?; - let files = get_files_from_folder(&mutation_folder.unwrap()).unwrap_or_default(); + let files = get_files_from_folder(&mutation_folder).unwrap_or_default(); for file in &files{ let diff = get_file_diff(origin_file.clone(), file.into()).unwrap_or_default(); let patch_hash = get_hash_from_diff(&diff).unwrap_or_default(); let file_path = origin_file.clone().unwrap_or_default().to_string_lossy().into_owned(); - - //run_id - println!("run_id: {}", run_id.to_string()); - - //diff - //println!("diff: {}", diff); - - //patch_hash - println!("patch_hash: {}", patch_hash); - - //file_path - println!("file path: {:?}", file_path); - - //operator - println!("operator: {}", operator); - - //filename let filename = file.to_str(); - let dummydiff = ""; - connection.execute(" + connection.execute(" INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) VALUES (?1, ?2, ?3, ?4, ?5, ?6); - ", params![run_id, dummydiff, patch_hash, file_path, operator, filename],)?; - + ", params![run_id, diff, patch_hash, file_path, operator, filename],)?; } - }; Ok(()) } @@ -232,19 +203,15 @@ pub fn store_run(db_path: &PathBuf, pr_number: Option) -> Result { )?; let project_id = proj_query_row.0; - println!("id: {}", project_id); let commit_hash = match get_commit_hash() { Ok(hash) => hash, Err(_) => "unknown".to_string(), }; - println!("commit hash: {}", commit_hash); - let tool_version = format!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); connection.execute(" - INSERT INTO runs (project_id , commit_hash, pr_number, tool_version) VALUES (?1, ?2, ?3, ?4); ", params![project_id, commit_hash, pr_number, tool_version],)?; @@ -288,7 +255,6 @@ fn _fill_projects_table(connection: &Connection) -> Result<()> { INSERT OR IGNORE INTO projects (id, name, repository_url) VALUES (1, 'Bitcoin Core', 'https://github.com/bitcoin/bitcoin'); ", [])?; - Ok(()) } @@ -336,7 +302,6 @@ fn _check_schema(connection: &Connection) -> Result<()> { } } } - println!("SQLite option: Schema verified successfully."); Ok(()) } @@ -344,7 +309,6 @@ fn _check_schema(connection: &Connection) -> Result<()> { fn _createdb(connection: &Connection) -> Result<()> { println!("SQLite option:: New db detected initializing first fillment..."); - // DB tables creation connection.execute_batch(" PRAGMA foreign_keys = ON; @@ -395,14 +359,12 @@ fn _createdb(connection: &Connection) -> Result<()> { println!("SQLite option: Ok batch"); - //Filling projects table _fill_projects_table(&connection)?; Ok(()) } pub fn check_db(db_path: &PathBuf) -> Result<()> { - println!("SQLite option: Checking if db exist..."); let is_new_db = !db_path.exists(); From c02ed02f5886db2fb48c2b4c8b1072a367c98c78 Mon Sep 17 00:00:00 2001 From: JGsouzaa Date: Wed, 24 Dec 2025 13:02:57 -0300 Subject: [PATCH 5/5] Tests updated readme with Storage and Update storage functions bug fix on analyze without --sqlite and --runid flags --- README.md | 66 ++++++++++++++++++ src/analyze.rs | 4 +- src/main.rs | 22 +++--- src/sqlite.rs | 185 ++++++++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 264 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index e18fba0..439445b 100644 --- a/README.md +++ b/README.md @@ -119,6 +119,72 @@ Set timeout and parallel jobs: bcore-mutation analyze -j 8 -t 300 --survival-threshold 0.3 ``` +### Storage + +Performed during the mutants generation (`mutation` command) + +Store generated mutants in the `db` folder (create if does not exists). +Default folder: `mutation.db`: + +```bash +bcore-mutation mutate --sqlite +``` + +### Examples: + +For a specific file, using the default database(`mutation.db`): + +```bash +bcore-mutation mutate -f src/wallet/wallet.cpp --sqlite +``` + +For a specific PR with custom database(`results.db`): + +```bash +bcore-mutation mutate -p 12345 --sqlite results.db +``` + +### Update Storage + +Performed during the mutant analysis (`analyze` command) + +Perform full analysis for a specific run id (obligatory): + +```bash +bcore-mutation analyze --sqlite --runid +``` + +Perform analysis for a specific file: + +```bash +bcore-mutation analyze -f --sqlite --run_id +``` + +Perform analysis for a specific file with custom command to test: + +```bash +bcore-mutation analyze -f --sqlite --run_id -c +``` + +### Examples: + +For general analysis, on run id 10: + +```bash +bcore-mutation analyze --sqlite --run_id 10 +``` + +Analysis on the muts-pr-wallet-1-150 folder generated on run id 1: + +```bash +bcore-mutation analyze -f muts-pr-wallet-1-150 --sqlite --run_id 1 +``` + +Perform analysis for muts-pr-wallet-1-150 folder of run id 2 with custom command `cmake --build build`: + +```bash +bcore-mutation analyze -f muts-pr-wallet-1-150 --sqlite --run_id 2 -c "cmake --build build" + ## Library Usage The tool can also be used as a Rust library: diff --git a/src/analyze.rs b/src/analyze.rs index a20eeca..e2621a5 100644 --- a/src/analyze.rs +++ b/src/analyze.rs @@ -155,7 +155,9 @@ pub async fn analyze_folder( } num_killed += 1 } - update_command_to_test_mutant(&test_command, &file_path, db_path.clone(), run_id.clone().unwrap())?; + if let Some(db_path) = db_path.clone() { + update_command_to_test_mutant(&test_command, &file_path, db_path, run_id.clone().unwrap_or_default())?; + } } // Generate report diff --git a/src/main.rs b/src/main.rs index b22019c..4766c86 100644 --- a/src/main.rs +++ b/src/main.rs @@ -210,7 +210,7 @@ async fn main() -> Result<()> { runid, } => { - let db_path = match sqlite { + let db_path = match sqlite.clone() { Some(Some(path)) => { let mut full_path = PathBuf::from("db"); full_path.push(path); @@ -220,16 +220,18 @@ async fn main() -> Result<()> { None => None, }; - if runid.is_none() { - return Err(MutationError::InvalidInput( - "--sqlite requires --runid".to_string(), - )); - } + if sqlite.is_some() { + if runid.is_none() { + return Err(MutationError::InvalidInput( + "--sqlite requires --runid".to_string(), + )); + } - if runid.is_some() && db_path.is_none() { - return Err(MutationError::InvalidInput( - "--runid requires --sqlite".to_string(), - )); + if runid.is_some() && db_path.is_none() { + return Err(MutationError::InvalidInput( + "--runid requires --sqlite".to_string(), + )); + } } analyze::run_analysis(folder, command, jobs, timeout, survival_threshold, db_path, runid).await?; diff --git a/src/sqlite.rs b/src/sqlite.rs index 0097a96..30e0686 100644 --- a/src/sqlite.rs +++ b/src/sqlite.rs @@ -22,11 +22,10 @@ where pub fn update_command_to_test_mutant( command: &str, fullpath: &PathBuf, - db_path: Option, + db_path: PathBuf, run_id: i64, ) -> Result<(), MutationError>{ - let db_path = db_path.ok_or(MutationError::MissingDbPath)?; let connection = Connection::open(db_path.clone())?; let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); @@ -392,4 +391,186 @@ pub fn check_db(db_path: &PathBuf) -> Result<()> { } Ok(()) +} +#[cfg(test)] +mod tests { + use super::*; + use rusqlite::Connection; + use tempfile::tempdir; + use std::fs::{self, File}; + use tempfile::TempPath; + use tempfile::NamedTempFile; + + fn setup_db() -> (Connection,TempPath) { + let temp_db = NamedTempFile::new().unwrap(); + let db_path = temp_db.into_temp_path(); + let connection = Connection::open(&db_path).unwrap(); + + (connection, db_path) + } + + #[test] + #[allow(unused)] + fn test_db_creation_and_seed() { + + let (connection, db_path) = setup_db(); + + println!("connection: {:?} \n path: {:?}", connection, db_path); + let db_creation_verify = _createdb(&connection); + assert!(db_creation_verify.is_ok()); + + let schema_verify = _check_schema(&connection); + assert!(schema_verify.is_ok()); + + let initial_row_verify = _check_initial_row(&connection); + assert!(initial_row_verify.is_ok()); + } + + #[test] + #[allow(unused)] + fn test_store_run_creates_row() { + let (connection, db_path) = setup_db(); + _createdb(&connection).unwrap(); + + let run_id = store_run(&db_path.to_path_buf(), None).unwrap(); + assert!(run_id > 0, "store_run must return a valid run_id"); + + let count: i64 = connection.query_row( + "SELECT count(*) FROM runs WHERE id=?1", + [run_id], + |row| row.get(0) + ).unwrap(); + assert_eq!(count, 1, "Must exist exactly 1 run"); + } + + #[test] + #[allow(unused)] + fn test_store_mutants_inserts_rows() { + let (connection, db_path) = setup_db(); + + let dir = tempdir().unwrap(); + let origin_file = dir.path().join("origin.rs"); + File::create(&origin_file).unwrap(); + + let mutation_folder = dir.path().join("muts-origin-rs"); + fs::create_dir_all(&mutation_folder).unwrap(); + + let mutant_file = mutation_folder.join("mutant1.rs"); + File::create(&mutant_file).unwrap(); + + let run_id = 1; + + let result = store_mutants(&db_path.to_path_buf(), run_id, None, Some(origin_file.clone()), None); + assert!(result.is_ok()); + } + + #[test] + #[allow(unused)] + fn test_update_status_mutant() { + let (connection, db_path) = setup_db(); + _createdb(&connection).unwrap(); + + let dir = tempdir().unwrap(); + let origin_file = dir.path().join("origin.rs"); + let file_path = &origin_file; + + let operator: String = "None".to_string(); + let run_id = 1; + + let origin_file = origin_file.to_str(); + + //Seed tables + connection.execute(" + INSERT INTO runs (id, project_id, commit_hash) + VALUES (?1, ?2, ?3); + ", params![1, 1, "hash"]).unwrap(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, "killed diff", "", origin_file, operator, origin_file],).unwrap(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, "survived diff", "", origin_file, operator, origin_file],).unwrap(); + + let count: i64 = connection.query_row( + "SELECT count(*) FROM mutants;", + [], + |row| row.get(0) + ).unwrap(); + println!("count: {:?}", count); + assert_eq!(count, 2, "Must exist exactly 2 mutants"); + + //Test for status killed + let result = update_status_mutant(true, &file_path, Some(db_path.to_path_buf()), 1); + assert!(result.is_ok()); + + let proj_query_row: (i32, String, String) = connection.query_row( + "SELECT id, status, diff FROM mutants WHERE run_id=?1 AND id=?2;", + [1, 1], + |row| Ok((row.get(0)?, row.get(1)?,row.get(2)?)) + ).unwrap(); + + assert!(proj_query_row.0 == 1 && proj_query_row.1 == "killed" && proj_query_row.2 == "killed diff", "Status should've been updated to killed"); + + //Test for status survived + let result = update_status_mutant(false, &file_path, Some(db_path.to_path_buf()), 1); + assert!(result.is_ok()); + + let proj_query_row: (i32, String, String) = connection.query_row( + "SELECT id, status, diff FROM mutants WHERE run_id=?1 AND id=?2;", + [1, 2], + |row| Ok((row.get(0)?, row.get(1)?,row.get(2)?)) + ).unwrap(); + + assert!(proj_query_row.0 == 2 && proj_query_row.1 == "survived" && proj_query_row.2 == "survived diff", "Status should've been updated to survived"); + } + + #[test] + #[allow(unused)] + fn test_update_command_mutant() { + let (connection, db_path) = setup_db(); + _createdb(&connection).unwrap(); + + let dir = tempdir().unwrap(); + let origin_file = dir.path().join("origin.rs"); + let file_path = &origin_file; + + let operator: String = "None".to_string(); + let run_id = 1; + + let origin_file = origin_file.to_str(); + + //Seed tables + connection.execute(" + INSERT INTO runs (id, project_id, commit_hash) + VALUES (?1, ?2, ?3); + ", params![1, 1, "hash"]).unwrap(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, "command diff", "", origin_file, operator, origin_file],).unwrap(); + + let count: i64 = connection.query_row( + "SELECT count(*) FROM mutants;", + [], + |row| row.get(0) + ).unwrap(); + println!("count: {:?}", count); + assert_eq!(count, 1, "Must exist exactly 1 mutant"); + + let result = update_command_to_test_mutant("command", file_path, db_path.to_path_buf(), run_id); + assert!(result.is_ok()); + + let proj_query_row: (i32, String, String) = connection.query_row( + "SELECT id, diff, command_to_test FROM mutants WHERE run_id=?1 AND id=?2;", + [1, 1], + |row| Ok((row.get(0)?, row.get(1)?,row.get(2)?)) + ).unwrap(); + + assert!(proj_query_row.0 == 1 && proj_query_row.1 == "command diff" && proj_query_row.2 == "command", "Command should've been updated to command"); + } } \ No newline at end of file