diff --git a/Cargo.toml b/Cargo.toml index 5244fe0..82507fd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,8 @@ futures = "0.3" walkdir = "2.4" rayon = "1.8" tempfile = "3.8" +rusqlite = "0.37.0" +sha2 = "0.10.9" [dev-dependencies] tempfile = "3.8" diff --git a/README.md b/README.md index e18fba0..439445b 100644 --- a/README.md +++ b/README.md @@ -119,6 +119,72 @@ Set timeout and parallel jobs: bcore-mutation analyze -j 8 -t 300 --survival-threshold 0.3 ``` +### Storage + +Performed during the mutants generation (`mutation` command) + +Store generated mutants in the `db` folder (create if does not exists). +Default folder: `mutation.db`: + +```bash +bcore-mutation mutate --sqlite +``` + +### Examples: + +For a specific file, using the default database(`mutation.db`): + +```bash +bcore-mutation mutate -f src/wallet/wallet.cpp --sqlite +``` + +For a specific PR with custom database(`results.db`): + +```bash +bcore-mutation mutate -p 12345 --sqlite results.db +``` + +### Update Storage + +Performed during the mutant analysis (`analyze` command) + +Perform full analysis for a specific run id (obligatory): + +```bash +bcore-mutation analyze --sqlite --runid +``` + +Perform analysis for a specific file: + +```bash +bcore-mutation analyze -f --sqlite --run_id +``` + +Perform analysis for a specific file with custom command to test: + +```bash +bcore-mutation analyze -f --sqlite --run_id -c +``` + +### Examples: + +For general analysis, on run id 10: + +```bash +bcore-mutation analyze --sqlite --run_id 10 +``` + +Analysis on the muts-pr-wallet-1-150 folder generated on run id 1: + +```bash +bcore-mutation analyze -f muts-pr-wallet-1-150 --sqlite --run_id 1 +``` + +Perform analysis for muts-pr-wallet-1-150 folder of run id 2 with custom command `cmake --build build`: + +```bash +bcore-mutation analyze -f muts-pr-wallet-1-150 --sqlite --run_id 2 -c "cmake --build build" + ## Library Usage The tool can also be used as a Rust library: diff --git a/src/analyze.rs b/src/analyze.rs index 1ec478a..e2621a5 100644 --- a/src/analyze.rs +++ b/src/analyze.rs @@ -1,3 +1,4 @@ +use crate::sqlite::{update_status_mutant, update_command_to_test_mutant}; use crate::error::{MutationError, Result}; use crate::report::generate_report; use std::fs; @@ -13,6 +14,8 @@ pub async fn run_analysis( jobs: u32, timeout_secs: u64, survival_threshold: f64, + db_path: Option, + run_id: Option, ) -> Result<()> { let folders = if let Some(folder_path) = folder { vec![folder_path] @@ -28,6 +31,8 @@ pub async fn run_analysis( jobs, timeout_secs, survival_threshold, + db_path.clone(), + run_id, ) .await?; } @@ -35,7 +40,7 @@ pub async fn run_analysis( Ok(()) } -fn find_mutation_folders() -> Result> { +pub fn find_mutation_folders() -> Result> { let mut folders = Vec::new(); for entry in WalkDir::new(".").max_depth(1) { @@ -58,6 +63,8 @@ pub async fn analyze_folder( jobs: u32, timeout_secs: u64, survival_threshold: f64, + db_path: Option, + run_id: Option, ) -> Result<()> { let mut num_killed: u64 = 0; let mut not_killed = Vec::new(); @@ -125,11 +132,32 @@ pub async fn analyze_folder( if result { println!("NOT KILLED ❌"); + + if let (Some(_), Some(run_id)) = (&db_path, run_id) { + update_status_mutant( + false, + &file_path, + db_path.clone(), + run_id, + )?; + } not_killed.push(file_name.clone()); } else { println!("KILLED ✅"); + + if let (Some(_), Some(run_id)) = (&db_path, run_id) { + update_status_mutant( + true, + &file_path, + db_path.clone(), + run_id, + )?; + } num_killed += 1 } + if let Some(db_path) = db_path.clone() { + update_command_to_test_mutant(&test_command, &file_path, db_path, run_id.clone().unwrap_or_default())?; + } } // Generate report diff --git a/src/error.rs b/src/error.rs index 36a61bd..0f807ec 100644 --- a/src/error.rs +++ b/src/error.rs @@ -26,6 +26,12 @@ pub enum MutationError { #[error("Walkdir error: {0}")] Walkdir(#[from] walkdir::Error), + #[error("SQLite error: {0}")] + Sqlite(#[from] rusqlite::Error), + + #[error("Db path error")] + MissingDbPath, + #[error("Other error: {0}")] Other(#[from] anyhow::Error), } diff --git a/src/git_changes.rs b/src/git_changes.rs index 813123a..972f76a 100644 --- a/src/git_changes.rs +++ b/src/git_changes.rs @@ -82,6 +82,17 @@ pub async fn get_lines_touched(file_path: &str) -> Result> { Ok(lines) } +pub fn get_commit_hash() -> Result { + + let commit_hash = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).trim().to_string()) + .unwrap_or_else(|_| "unknown".to_string()); + + Ok(commit_hash) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/lib.rs b/src/lib.rs index 295d75b..5814d18 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -34,6 +34,7 @@ //! } //! ``` +pub mod sqlite; pub mod analyze; pub mod ast_analysis; pub mod coverage; diff --git a/src/main.rs b/src/main.rs index f5ffc36..4766c86 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +use anyhow::Error; use clap::{Parser, Subcommand}; use std::collections::HashMap; use std::path::PathBuf; @@ -10,6 +11,7 @@ mod git_changes; mod mutation; mod operators; mod report; +mod sqlite; use error::{MutationError, Result}; @@ -64,6 +66,10 @@ enum Commands { /// Add custom expert rule for arid node detection #[arg(long, value_name = "PATTERN")] add_expert_rule: Option, + + /// Optional path to SQLite database file (default: mutation.db) + #[arg(long, value_name = "PATH")] + sqlite: Option>, }, /// Analyze mutants Analyze { @@ -86,6 +92,14 @@ enum Commands { /// Maximum acceptable survival rate (0.3 = 30%) #[arg(long, default_value = "0.75")] survival_threshold: f64, + + /// Optional path to SQLite database file (default: mutation.db) + #[arg(long, value_name = "PATH")] + sqlite: Option>, + + /// Run ID stored in SQLite + #[arg(long)] + runid: Option, }, } @@ -105,7 +119,10 @@ async fn main() -> Result<()> { only_security_mutations, disable_ast_filtering, add_expert_rule, + sqlite, } => { + let mut run_id: i64 = 0; + let skip_lines_map = if let Some(path) = skip_lines { read_skip_lines(&path)? } else { @@ -126,6 +143,16 @@ async fn main() -> Result<()> { } else { None }; + + let db_path = match sqlite { + Some(Some(path)) => { + let mut full_path = PathBuf::from("db"); + full_path.push(path); + Some(full_path) + } + Some(None) => Some(PathBuf::from("db/mutation.db")), + None => None, + }; if pr != 0 && file.is_some() { return Err(MutationError::InvalidInput( @@ -144,9 +171,14 @@ async fn main() -> Result<()> { println!("Custom expert rule will be applied: {}", expert_rule); } + if let Some(ref path) = db_path { + sqlite::check_db(path).map_err(Error::from)?; + run_id = sqlite::store_run(path, if pr == 0 { None } else { Some(pr) }).map_err(Error::from)?; + } + mutation::run_mutation( if pr == 0 { None } else { Some(pr) }, - file, + file.clone(), one_mutant, only_security_mutations, range_lines, @@ -157,6 +189,16 @@ async fn main() -> Result<()> { add_expert_rule, ) .await?; + + if let Some(ref path) = db_path { + sqlite::store_mutants( + path, + run_id, + if pr == 0 { None } else { Some(pr) }, + file, + range_lines).map_err(Error::from)?; + } + } Commands::Analyze { folder, @@ -164,8 +206,35 @@ async fn main() -> Result<()> { jobs, command, survival_threshold, + sqlite, + runid, } => { - analyze::run_analysis(folder, command, jobs, timeout, survival_threshold).await?; + + let db_path = match sqlite.clone() { + Some(Some(path)) => { + let mut full_path = PathBuf::from("db"); + full_path.push(path); + Some(full_path) + } + Some(None) => Some(PathBuf::from("db/mutation.db")), + None => None, + }; + + if sqlite.is_some() { + if runid.is_none() { + return Err(MutationError::InvalidInput( + "--sqlite requires --runid".to_string(), + )); + } + + if runid.is_some() && db_path.is_none() { + return Err(MutationError::InvalidInput( + "--runid requires --sqlite".to_string(), + )); + } + } + + analyze::run_analysis(folder, command, jobs, timeout, survival_threshold, db_path, runid).await?; } } diff --git a/src/sqlite.rs b/src/sqlite.rs new file mode 100644 index 0000000..30e0686 --- /dev/null +++ b/src/sqlite.rs @@ -0,0 +1,576 @@ +use sha2::{Sha256, Digest}; +use std::error::Error; +use std::process::Command; +use rusqlite::params; +use std::fs; +use std::path::Path; +use std::path::PathBuf; +use rusqlite::{Connection, Result, Params}; + +use crate::git_changes::{get_commit_hash}; +use crate::error::{MutationError}; + +fn update_mutants_table

(connection: &Connection, sql: &str, params: P) -> Result<(), MutationError> +where + P: Params, +{ + connection.execute(sql, params)?; + + Ok(()) +} + +pub fn update_command_to_test_mutant( + command: &str, + fullpath: &PathBuf, + db_path: PathBuf, + run_id: i64, + ) -> Result<(), MutationError>{ + + let connection = Connection::open(db_path.clone())?; + let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); + + let sql_command = "UPDATE mutants + SET command_to_test = ? + WHERE run_id = ? AND + file_name = ?"; + + let params = params![command, run_id, fullpath.to_str()]; + update_mutants_table(&connection, sql_command, params)?; + Ok(()) +} + +pub fn update_status_mutant(killed: bool, + fullpath: &PathBuf, + db_path: Option, + run_id: i64, +) -> Result<(), MutationError>{ + + let db_path = db_path.ok_or(MutationError::MissingDbPath)?; + let connection = Connection::open(db_path.clone())?; + let fullpath = fullpath.strip_prefix("./").unwrap_or(fullpath); + + let sql_command = + "UPDATE mutants + SET status = ? + WHERE run_id = ? AND + file_name = ?"; + + //status + if killed { + println!("SQLite option: Updating mutant {} on {} status changed to killed", + fullpath.display(), + db_path.clone().display()); + + let params = params!["killed", run_id, fullpath.to_str()]; + update_mutants_table(&connection, sql_command, params)?; + + } else if !killed { + println!("SQLite option: Updating mutant {} on {} status changed to survived", + fullpath.display(), + db_path.clone().display()); + + let params = params!["survived", run_id, fullpath.to_str()]; + update_mutants_table(&connection, sql_command, params)?; + + }; + Ok(()) +} + +fn get_hash_from_diff(diff: &str) -> Result> { + let mut hasher = Sha256::new(); + hasher.update(diff.as_bytes()); + let result = hasher.finalize(); + let hash_hex = format!("{:x}", result); + Ok(hash_hex) +} + +fn get_file_diff(mainfile: Option, comparefile: PathBuf) -> Result> { + let mainfile = mainfile.ok_or("Missing source file to compare with mutant in get_file_diff proccess")?; + + let output = Command::new("diff") + .arg(&mainfile) + .arg(&comparefile) + .output()?; + + println!("Executing diff from files {:?} and {:?} for storage", mainfile, comparefile); + + if output.status.success() { + Ok(String::from("Compare files are equal!")) + } else { + let diff_result = str::from_utf8(&output.stdout)?; + Ok(diff_result.to_string()) + } +} + +fn get_files_from_folder(filepath: &Path) -> Result, Box> { + println!("filepath get_files_from_folder: {:?}", filepath); + + if !filepath.is_dir() { + return Err(format!("Current path is not a folder: {:?}", filepath).into()); + } + + let entries = fs::read_dir(filepath)? + .filter_map(|entry| { + match entry { + Ok(e) => { + let path = e.path(); + if path.is_file() { + // Remove "original_file.txt" from vec + if let Some(name) = path.file_name() { + if name != "original_file.txt" { + return Some(path); + } + } + } + None + } + Err(_) => None, + } + }) + .collect(); + + Ok(entries) +} + +fn check_mutation_folder( + file_to_mutate: &str, + pr_number: Option, + range_lines: Option<(usize, usize)>, +) -> Result { + let file_extension = if file_to_mutate.ends_with(".h") { + ".h" + } else if file_to_mutate.ends_with(".py") { + ".py" + } else { + ".cpp" + }; + + let file_name = Path::new(file_to_mutate) + .file_stem() + .and_then(|s| s.to_str()) + .ok_or_else(|| MutationError::InvalidInput("Invalid file path".to_string())) + .map_err(|e| rusqlite::Error::InvalidParameterName(e.to_string()))?; + + let ext = file_extension.trim_start_matches('.'); + let folder = if let Some(pr) = pr_number { + format!("muts-pr-{}-{}-{}", pr, file_name, ext) + } else if let Some(range) = range_lines { + format!("muts-pr-{}-{}-{}", file_name, range.0, range.1) + } else { + format!("muts-{}-{}", file_name, ext) + }; + + Ok(PathBuf::from(folder)) +} + +pub fn store_mutants(db_path: &PathBuf, run_id: i64, pr_number: Option, origin_file: Option, range_lines: Option<(usize, usize)>) -> Result<()> { + println!("SQLite option: Storing mutants on {}", db_path.display()); + let connection = Connection::open(db_path)?; + let operator: String = "None".to_string(); + + if let Some(file_path) = origin_file.clone() { + let file_str = file_path.to_string_lossy().to_string(); + let mutation_folder = check_mutation_folder(&file_str, pr_number, range_lines)?; + + let files = get_files_from_folder(&mutation_folder).unwrap_or_default(); + + for file in &files{ + let diff = get_file_diff(origin_file.clone(), file.into()).unwrap_or_default(); + let patch_hash = get_hash_from_diff(&diff).unwrap_or_default(); + + let file_path = origin_file.clone().unwrap_or_default().to_string_lossy().into_owned(); + let filename = file.to_str(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, diff, patch_hash, file_path, operator, filename],)?; + } + }; + Ok(()) +} + +pub fn store_run(db_path: &PathBuf, pr_number: Option) -> Result { + + println!("SQLite option: Storing current run on {}", db_path.display()); + let connection = Connection::open(db_path)?; + + let proj_query_row: (i32, String) = connection.query_row( + "SELECT id, name FROM projects;", + [], + |row| Ok((row.get(0)?, row.get(1)?)) + )?; + + let project_id = proj_query_row.0; + + let commit_hash = match get_commit_hash() { + Ok(hash) => hash, + Err(_) => "unknown".to_string(), + }; + + let tool_version = format!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); + + connection.execute(" + INSERT INTO runs (project_id , commit_hash, pr_number, tool_version) + VALUES (?1, ?2, ?3, ?4); + ", params![project_id, commit_hash, pr_number, tool_version],)?; + + let run_id = connection.last_insert_rowid(); + + Ok(run_id) +} + +fn _check_initial_row(connection: &Connection) -> Result<()> { + println!("SQLite option: Checking first row of projects..."); + + let result = connection.query_row( + "SELECT id, name, repository_url FROM projects WHERE id = 1;", + [], + |row| Ok((row.get::<_, i32>(0)?, row.get::<_, String>(1)?, row.get::<_, String>(2)?)) + ); + + match result { + Ok((id, name, repo)) => { + if id == 1 && name == "Bitcoin Core" && repo == "https://github.com/bitcoin/bitcoin" { + println!("SQLite option: Project table corrected filled!"); + } + }, + Err(rusqlite::Error::QueryReturnedNoRows) => { + println!("SQLite option: No matches found for projects table, filling initial row..."); + _fill_projects_table(&connection)?; + }, + Err(e) => { + eprintln!("SQLite option: FAILED to verify initial project: {}", e); + return Err(e); + } + } + + Ok(()) +} + +fn _fill_projects_table(connection: &Connection) -> Result<()> { + connection.execute(" + ---First time initialization + INSERT OR IGNORE INTO projects (id, name, repository_url) + VALUES (1, 'Bitcoin Core', 'https://github.com/bitcoin/bitcoin'); + ", [])?; + Ok(()) +} + +fn _check_schema(connection: &Connection) -> Result<()> { + println!("SQLite option: Checking schema integrity..."); + + let expected_tables = vec!["projects", "runs", "mutants"]; + for table in expected_tables { + let exists: bool = connection.query_row( + "SELECT count(*) FROM sqlite_master WHERE type='table' AND name=?1;", + params![table], + |row| row.get(0), + )?; + + if !exists { + return Err(rusqlite::Error::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(format!("Missing table: {}", table)), + )); + } + } + + let table_columns: Vec<(&str, Vec<&str>)> = vec![ + ("projects", vec!["id", "name", "repository_url"]), + ("runs", vec!["id", "project_id", "commit_hash", "pr_number", "created_at", "tool_version"]), + ("mutants", vec![ + "id", "run_id", "diff", "patch_hash", "status", "killed", + "command_to_test", "file_path", "operator", "file_name" + ]), + ]; + + for (table, columns) in table_columns { + let mut stmt = connection.prepare(&format!("PRAGMA table_xinfo({});", table))?; + let column_names: Vec = stmt + .query_map([], |row| row.get::<_, String>(1))? + .filter_map(Result::ok) + .collect(); + + for col in columns { + if !column_names.contains(&col.to_string()) { + return Err(rusqlite::Error::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(format!("Missing column '{}' in table '{}'", col, table)), + )); + } + } + } + println!("SQLite option: Schema verified successfully."); + Ok(()) +} + +fn _createdb(connection: &Connection) -> Result<()> { + println!("SQLite option:: New db detected initializing first fillment..."); + + connection.execute_batch(" + PRAGMA foreign_keys = ON; + + -- Projects + CREATE TABLE IF NOT EXISTS projects ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + repository_url TEXT, + UNIQUE(name), + UNIQUE(repository_url) + ); + + --Runs + CREATE TABLE IF NOT EXISTS runs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE, + commit_hash TEXT NOT NULL, + pr_number INTEGER, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + tool_version TEXT, + FOREIGN KEY(project_id) REFERENCES projects(id) + ); + + CREATE INDEX IF NOT EXISTS idx_runs_project_created ON runs(project_id, created_at DESC); + CREATE INDEX IF NOT EXISTS idx_runs_commit ON runs(commit_hash); + + CREATE TABLE IF NOT EXISTS mutants ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + run_id INTEGER NOT NULL REFERENCES runs(id) ON DELETE CASCADE, + diff TEXT NOT NULL, + patch_hash TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'pending' + CHECK (status IN ('pending','running','killed','survived','timeout','error','skipped','equivalent','unproductive')), + killed INTEGER GENERATED ALWAYS AS (CASE WHEN status='killed' THEN 1 ELSE 0 END) VIRTUAL, + command_to_test TEXT, + file_path TEXT NOT NULL, + operator TEXT NOT NULL, + file_name TEXT NOT NULL, + FOREIGN KEY(run_id) REFERENCES runs(id) + ); + + CREATE INDEX IF NOT EXISTS idx_mutants_run_status ON mutants(run_id, status); + CREATE INDEX IF NOT EXISTS idx_mutants_file ON mutants(file_path); + CREATE INDEX IF NOT EXISTS idx_mutants_operator ON mutants(operator); + CREATE INDEX IF NOT EXISTS idx_mutants_killed ON mutants(killed); + + ")?; + + println!("SQLite option: Ok batch"); + + _fill_projects_table(&connection)?; + + Ok(()) +} + +pub fn check_db(db_path: &PathBuf) -> Result<()> { + println!("SQLite option: Checking if db exist..."); + let is_new_db = !db_path.exists(); + + //Verify path integrity + let exist_path = Path::new("db"); + if !exist_path.exists() { + match fs::create_dir_all(exist_path) { + Ok(_) => {}, + Err(e) => { + eprintln!("FAIL creating new folder db: {}", e); + std::process::exit(1); + } + } + } + + let connection = Connection::open(db_path)?; + + if is_new_db { + _createdb(&connection)?; + + } else { + println!("SQLite option: Current db exists!"); + _check_schema(&connection)?; + _check_initial_row(&connection)?; + } + + Ok(()) +} +#[cfg(test)] +mod tests { + use super::*; + use rusqlite::Connection; + use tempfile::tempdir; + use std::fs::{self, File}; + use tempfile::TempPath; + use tempfile::NamedTempFile; + + fn setup_db() -> (Connection,TempPath) { + let temp_db = NamedTempFile::new().unwrap(); + let db_path = temp_db.into_temp_path(); + let connection = Connection::open(&db_path).unwrap(); + + (connection, db_path) + } + + #[test] + #[allow(unused)] + fn test_db_creation_and_seed() { + + let (connection, db_path) = setup_db(); + + println!("connection: {:?} \n path: {:?}", connection, db_path); + let db_creation_verify = _createdb(&connection); + assert!(db_creation_verify.is_ok()); + + let schema_verify = _check_schema(&connection); + assert!(schema_verify.is_ok()); + + let initial_row_verify = _check_initial_row(&connection); + assert!(initial_row_verify.is_ok()); + } + + #[test] + #[allow(unused)] + fn test_store_run_creates_row() { + let (connection, db_path) = setup_db(); + _createdb(&connection).unwrap(); + + let run_id = store_run(&db_path.to_path_buf(), None).unwrap(); + assert!(run_id > 0, "store_run must return a valid run_id"); + + let count: i64 = connection.query_row( + "SELECT count(*) FROM runs WHERE id=?1", + [run_id], + |row| row.get(0) + ).unwrap(); + assert_eq!(count, 1, "Must exist exactly 1 run"); + } + + #[test] + #[allow(unused)] + fn test_store_mutants_inserts_rows() { + let (connection, db_path) = setup_db(); + + let dir = tempdir().unwrap(); + let origin_file = dir.path().join("origin.rs"); + File::create(&origin_file).unwrap(); + + let mutation_folder = dir.path().join("muts-origin-rs"); + fs::create_dir_all(&mutation_folder).unwrap(); + + let mutant_file = mutation_folder.join("mutant1.rs"); + File::create(&mutant_file).unwrap(); + + let run_id = 1; + + let result = store_mutants(&db_path.to_path_buf(), run_id, None, Some(origin_file.clone()), None); + assert!(result.is_ok()); + } + + #[test] + #[allow(unused)] + fn test_update_status_mutant() { + let (connection, db_path) = setup_db(); + _createdb(&connection).unwrap(); + + let dir = tempdir().unwrap(); + let origin_file = dir.path().join("origin.rs"); + let file_path = &origin_file; + + let operator: String = "None".to_string(); + let run_id = 1; + + let origin_file = origin_file.to_str(); + + //Seed tables + connection.execute(" + INSERT INTO runs (id, project_id, commit_hash) + VALUES (?1, ?2, ?3); + ", params![1, 1, "hash"]).unwrap(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, "killed diff", "", origin_file, operator, origin_file],).unwrap(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, "survived diff", "", origin_file, operator, origin_file],).unwrap(); + + let count: i64 = connection.query_row( + "SELECT count(*) FROM mutants;", + [], + |row| row.get(0) + ).unwrap(); + println!("count: {:?}", count); + assert_eq!(count, 2, "Must exist exactly 2 mutants"); + + //Test for status killed + let result = update_status_mutant(true, &file_path, Some(db_path.to_path_buf()), 1); + assert!(result.is_ok()); + + let proj_query_row: (i32, String, String) = connection.query_row( + "SELECT id, status, diff FROM mutants WHERE run_id=?1 AND id=?2;", + [1, 1], + |row| Ok((row.get(0)?, row.get(1)?,row.get(2)?)) + ).unwrap(); + + assert!(proj_query_row.0 == 1 && proj_query_row.1 == "killed" && proj_query_row.2 == "killed diff", "Status should've been updated to killed"); + + //Test for status survived + let result = update_status_mutant(false, &file_path, Some(db_path.to_path_buf()), 1); + assert!(result.is_ok()); + + let proj_query_row: (i32, String, String) = connection.query_row( + "SELECT id, status, diff FROM mutants WHERE run_id=?1 AND id=?2;", + [1, 2], + |row| Ok((row.get(0)?, row.get(1)?,row.get(2)?)) + ).unwrap(); + + assert!(proj_query_row.0 == 2 && proj_query_row.1 == "survived" && proj_query_row.2 == "survived diff", "Status should've been updated to survived"); + } + + #[test] + #[allow(unused)] + fn test_update_command_mutant() { + let (connection, db_path) = setup_db(); + _createdb(&connection).unwrap(); + + let dir = tempdir().unwrap(); + let origin_file = dir.path().join("origin.rs"); + let file_path = &origin_file; + + let operator: String = "None".to_string(); + let run_id = 1; + + let origin_file = origin_file.to_str(); + + //Seed tables + connection.execute(" + INSERT INTO runs (id, project_id, commit_hash) + VALUES (?1, ?2, ?3); + ", params![1, 1, "hash"]).unwrap(); + + connection.execute(" + INSERT INTO mutants (run_id , diff, patch_hash, file_path, operator, file_name) + VALUES (?1, ?2, ?3, ?4, ?5, ?6); + ", params![run_id, "command diff", "", origin_file, operator, origin_file],).unwrap(); + + let count: i64 = connection.query_row( + "SELECT count(*) FROM mutants;", + [], + |row| row.get(0) + ).unwrap(); + println!("count: {:?}", count); + assert_eq!(count, 1, "Must exist exactly 1 mutant"); + + let result = update_command_to_test_mutant("command", file_path, db_path.to_path_buf(), run_id); + assert!(result.is_ok()); + + let proj_query_row: (i32, String, String) = connection.query_row( + "SELECT id, diff, command_to_test FROM mutants WHERE run_id=?1 AND id=?2;", + [1, 1], + |row| Ok((row.get(0)?, row.get(1)?,row.get(2)?)) + ).unwrap(); + + assert!(proj_query_row.0 == 1 && proj_query_row.1 == "command diff" && proj_query_row.2 == "command", "Command should've been updated to command"); + } +} \ No newline at end of file