diff --git a/README.md b/README.md index 99dfbb5..b0c8141 100644 --- a/README.md +++ b/README.md @@ -39,6 +39,7 @@ Once a DVF is published, any user can choose to trust the signer of that DVF and - [Etherscan Verified Contracts](#etherscan-verified-contracts) - [Initialization by event topics](#initialization-by-event-topics) - [Public libraries](#public-libraries) + - [Transaction inspection](#transaction-inspection) 7. [Common Problems](#common-problems) 8. [Getting Help](#getting-help) @@ -574,6 +575,20 @@ dv init --project --address
--contractname --lib `` is the path to the library contract file, relative to your project root. `` is the name of the library contract. `
` is the public address of the already deployed library. +### Transaction inspection + +For some use cases, it is not necessary to inspect all transactions of a single contract, but rather all contracts in a single transaction (e.g., execution of a government proposal). This can be achieved with the command `inspect-tx`: + +``` +dv inspect-tx --txhash --chainid +``` + +`` is the hash of the transaction that should be inspected, `` the ID of the chain the transaction has been executed on. + +The command will fetch all contracts that are touched during the transaction. For each of the contracts, raw state changes and emitted events are then fetched and displayed. + +To see the decoded state changes and events (and to generate a DVF file), each contract can be configured in your `.dv_config.json` prior to (re-)running the command. For details, please refer to the [configuration specification](docs/config.md). + ## Common Problems Sometimes, it is possible that the `init` command cannot find a deployment transaction. In this case, you have the following options: diff --git a/docs/config.md b/docs/config.md index 30944a7..7a3fcfe 100644 --- a/docs/config.md +++ b/docs/config.md @@ -7,7 +7,7 @@ When running the `dvf` command, the default configuration file is expected at `$ | --- | --- | | `rpc_urls` | Mapping from chain ID to RPC URL | | `dvf_storage` | Folder where DVFs are stored | -| `trusted_signers`: | List of addresses of trusted signers | +| `trusted_signers` | List of addresses of trusted signers | | `etherscan_global` | Etherscan configuration for all chains, optional | | - `api_url` | Etherscan API URL | | - `api_key` | Etherscan API Key | @@ -23,13 +23,28 @@ When running the `dvf` command, the default configuration file is expected at `$ | - - `api_url` | Chain-specific Blockscout API URL | | - - `api_key` | Chain-specific Blockscout API Key | | `max_blocks_per_event_query` | Number of blocks that can be queried at once in `getLogs`, optional, defaults to 9999 | -| `web3_timeout` | Timeout in seconds for web3 RPC queries, optional, defaults to 700 | +| `web3_timeout` | Timeout in seconds for web3 RPC queries, optional, defaults to 5000 | | `signer` | Configuration on how to sign, optional | | - `wallet_address` | Address which is used to sign | | - `wallet_type` | Can have different structure | | - - `secret_key` | If secret key is used, hex string | | - - `ledger_type` | If ledger is used, "LedgerLive" or "Legacy" | | - - `ledger_index` | If ledger is used, Ledger Index to use | - - - +| `projects` | List of project configurations for the `inspect-tx` command, optional | +| - `project_path` | Path to the root folder of the source code project | +| - `output_path` | Path to the output DVF file | +| - `environment` | Project's development environment, defaults to "Foundry" | +| - `artifacts_path` | Folder containing the project artifacts, defaults to "artifacts" | +| - `build_cache_path` | Folder containing build-info files, optional | +| - `libraries` | Library specifiers in the form Path:Name:Address, optional | +| - `address` | Address of the contract | +| - `chain_id` | Chain ID where the contract is deployed, optional | +| - `contract_name` | Name of the contract | +| - `deployment_tx` | Deployment transaction hash, optional | +| - `factory` | Treat this contract as a factory, which changes bytecode verification, defaults to false | +| - `implementation_config` | Optional implementation project configuration for proxy contracts | +| - - `project_path` | Path to the root folder of the implementation project | +| - - `environment` | Implementation project's development environment, defaults to "Foundry" | +| - - `artifacts_path` | Folder containing the implementation project artifacts, defaults to "artifacts" | +| - - `build_cache_path` | Folder containing the implementation contract's build-info files, optional | +| - - `contract_name` | Name of the implementation contract | diff --git a/lib/bytecode_verification/parse_json.rs b/lib/bytecode_verification/parse_json.rs index 689c639..e992a60 100644 --- a/lib/bytecode_verification/parse_json.rs +++ b/lib/bytecode_verification/parse_json.rs @@ -7,6 +7,7 @@ use alloy::json_abi::Constructor; use alloy::primitives::U256; use clap::ValueEnum; use semver::Version; +use serde::{Deserialize, Serialize}; use serde_json; use serde_json::Value; use std::path::Path; @@ -322,21 +323,47 @@ impl ProjectInfo { }, ); // add base type - types.insert( - base_identifier.clone(), - TypeDescription { - encoding: String::from("inplace"), - label: type_name["baseType"]["typeDescriptions"]["typeString"] - .as_str() - .unwrap() - .to_string(), - number_of_bytes: type_defs.get_number_of_bytes(&base_identifier), - base: None, - key: None, - value: None, - members: None, - }, - ); + if base_identifier.starts_with("t_struct") { + let struct_slots: Vec<(u64, U256, Option)> = vec![( + type_name["baseType"]["referencedDeclaration"] + .as_u64() + .unwrap(), + U256::from(0), + None, + )]; + // we only need the types, so we use a dummy storage vector + let mut storage: Vec = vec![]; + for source in sources.values() { + if let Some(ast) = source.ast.clone() { + for node in &ast.nodes { + Self::find_storage_struct_data( + sources, + node, + type_defs, + &struct_slots, + types, + &mut storage, + ); + } + } + } + } else { + types.insert( + base_identifier.clone(), + TypeDescription { + encoding: String::from("inplace"), + label: type_name["baseType"]["typeDescriptions"]["typeString"] + .as_str() + .unwrap() + .to_string(), + number_of_bytes: type_defs.get_number_of_bytes(&base_identifier), + base: None, + key: None, + value: None, + members: None, + }, + ); + } } else if type_name["nodeType"] == "UserDefinedTypeName" { // go deeper to extract inner structs let identifier = type_name["typeDescriptions"]["typeIdentifier"] @@ -1713,7 +1740,7 @@ impl ProjectInfo { } } -#[derive(ValueEnum, Copy, Clone, Eq, PartialEq)] +#[derive(ValueEnum, Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] pub enum Environment { Foundry, Hardhat, diff --git a/lib/dvf/config.rs b/lib/dvf/config.rs index fb689da..2c0ffed 100644 --- a/lib/dvf/config.rs +++ b/lib/dvf/config.rs @@ -23,6 +23,7 @@ use serde_json::Value; use tempfile::{tempdir, NamedTempFile}; use tracing::debug; +use crate::bytecode_verification::parse_json::Environment; use crate::dvf::abstract_wallet::AbstractWallet; use crate::dvf::parse::ValidationError; use crate::web3; @@ -118,12 +119,62 @@ pub struct DVFConfig { #[serde(default = "default_web3_timeout")] pub web3_timeout: u64, pub signer: Option, + #[serde(default)] + pub projects: Vec, #[serde(skip_serializing)] pub active_chain_id: Option, #[serde(default, skip_serializing)] active_chain: Option, } +#[derive(Debug, Serialize, Deserialize)] +pub struct DVFProjectConfig { + /// Path to the root folder of the source code project (required) + pub project_path: PathBuf, + /// Path to the output DVF file + pub output_path: PathBuf, + /// Project's development environment + #[serde(default = "default_environment")] + pub environment: Environment, + /// Folder containing the project artifacts + #[serde(default = "default_artifacts_path")] + pub artifacts_path: String, + /// Folder containing build-info files + pub build_cache_path: Option, + /// Library specifiers in the form Path:Name:Address + #[serde(default)] + pub libraries: Option>, + /// Address of the contract + pub address: Option
, + /// Chain ID where the contract is deployed + pub chain_id: Option, + /// Name of the contract + pub contract_name: String, + /// Deployment transaction hash + pub deployment_tx: Option, + /// Treat this contract as a factory, which changes bytecode verification + #[serde(default)] + pub factory: bool, + /// Optional implementation project configuration for proxy contracts + pub implementation_config: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DVFImplementationConfig { + /// Path to the root folder of the implementation project + pub project_path: PathBuf, + /// Implementation project's development environment + #[serde(default = "default_environment")] + pub environment: Environment, + /// Folder containing the implementation project artifacts + #[serde(default = "default_artifacts_path")] + pub artifacts_path: String, + /// Folder containing the implementation contract's build-info files + pub build_cache_path: Option, + /// Name of the implementation contract + pub contract_name: String, +} + fn default_max_blocks() -> u64 { 9999 } @@ -132,6 +183,14 @@ fn default_web3_timeout() -> u64 { 5000 } +fn default_environment() -> Environment { + Environment::Foundry +} + +fn default_artifacts_path() -> String { + "artifacts".to_string() +} + impl DVFConfig { const DEFAULT_ETHERSCAN_API_URL: &str = "https://api.etherscan.io/v2/api"; const DEFAULT_BLOCKSCOUT_API_URL: &str = "https://eth.blockscout.com/api"; @@ -207,6 +266,7 @@ impl DVFConfig { secret_key: env::var("SIGNER_SECRET_KEY")?, }), }), + projects: Vec::new(), active_chain_id: None, active_chain: None, }) @@ -921,6 +981,207 @@ impl DVFConfig { println!("{}", "The provided address could not be parsed.".yellow()); } } + /* + // Project configurations + let mut projects: BTreeMap = BTreeMap::new(); + println!(); + println!("{}", "STEP 8".green()); + println!("You can now configure projects for your DVF development."); + println!("Projects allow you to pre-configure development environments, artifact paths,"); + println!("and other settings used by the 'init' command for specific contracts."); + println!(); + + loop { + println!( + "Would you like to add a project configuration? Hit {} to continue without adding projects.", + "".green() + ); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + if input.trim().is_empty() { + break; + } + + if input.trim().eq_ignore_ascii_case("y") { + // Project name + let mut project_name = String::new(); + loop { + println!("Please enter a name for this project:"); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + if sscanf!(&input, "{}", project_name).is_ok() && !project_name.is_empty() { + break; + } else { + println!("{}", "Project name cannot be empty.".yellow()); + } + } + + // Project path + let mut project_path = PathBuf::new(); + loop { + println!("Please enter the path to the root folder of the source code project:"); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + let mut path_str = String::new(); + if sscanf!(&input, "{}", path_str).is_ok() { + if let Ok(path) = replace_tilde(path_str.trim()) { + if path.exists() { + project_path = path; + break; + } else { + println!("{}", "The provided path does not exist.".yellow()); + } + } else { + println!("{}", "The provided path could not be parsed.".yellow()); + } + } else { + println!("{}", "The provided path could not be parsed.".yellow()); + } + } + + // Development environment + let mut environment = Environment::Foundry; + loop { + println!("Please select the development environment:"); + println!("1. Foundry"); + println!("2. Hardhat"); + println!("Hit {} to use default (Foundry)", "".green()); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + if input.trim().is_empty() { + break; + } + + let mut choice: u64 = 0; + if sscanf!(&input, "{}", choice).is_ok() { + environment = match choice { + 1 => Environment::Foundry, + 2 => Environment::Hardhat, + _ => { + println!("{}", "Please enter a valid choice.".yellow()); + continue; + } + }; + break; + } else { + println!("{}", "The provided number could not be parsed.".yellow()); + } + } + + // Artifacts path + let mut artifacts_path = String::from("artifacts"); + loop { + println!("Please enter the folder containing the project artifacts:"); + println!( + "Hit {} to use default value: {}", + "".green(), + artifacts_path.green() + ); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + if input.trim().is_empty() { + break; + } + + let mut path_str = String::new(); + if sscanf!(&input, "{}", path_str).is_ok() && !path_str.is_empty() { + artifacts_path = path_str; + break; + } else { + println!("{}", "The provided path could not be parsed.".yellow()); + } + } + + // Build cache path (optional) + let mut build_cache_path: Option = None; + loop { + println!("Please enter the folder containing build-info files:"); + println!("Hit {} to skip this configuration", "".green()); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + if input.trim().is_empty() { + break; + } + + let mut path_str = String::new(); + if sscanf!(&input, "{}", path_str).is_ok() && !path_str.is_empty() { + build_cache_path = Some(path_str); + break; + } else { + println!("{}", "The provided path could not be parsed.".yellow()); + } + } + + // Libraries (optional) + let mut libraries: Vec = vec![]; + loop { + println!( + "Please enter library specifiers in the form Path:Name:Address, or hit {} to finish:", + "".green() + ); + print!("> "); + + let mut input = String::new(); + let _ = std::io::Write::flush(&mut std::io::stdout()); + io::stdin().read_line(&mut input).unwrap(); + + if input.trim().is_empty() { + break; + } + + let mut library = String::new(); + if sscanf!(&input, "{}", library).is_ok() && !library.is_empty() { + libraries.push(library); + } else { + println!("{}", "The provided library specifier could not be parsed.".yellow()); + } + } + + // Create project config + let project_config = DVFProjectConfig { + project_path, + output_path, + environment, + artifacts_path, + build_cache_path, + libraries, + address: None, + chain_id: None, + event_topics: None, + contract_name: None, + factory: false, + implementation_config: None, // For now, we'll add implementation config in a future update + }; + + projects.insert(project_name, project_config); + println!("{}", "Project configuration added successfully!".green()); + } + } + */ Ok(DVFConfig { rpc_urls, @@ -933,6 +1194,8 @@ impl DVFConfig { max_blocks_per_event_query, web3_timeout, signer, + //projects, + projects: Vec::new(), active_chain_id: None, active_chain: None, }) @@ -1181,6 +1444,18 @@ impl DVFConfig { chain_id ))) } + + /// Get a project configuration by address and chain_id + pub fn get_project_config( + &self, + address: &Address, + chain_id: u64, + ) -> Option<&DVFProjectConfig> { + self.projects.iter().find(|project| { + project.address.as_ref() == Some(address) + && project.chain_id.as_ref() == Some(&chain_id) + }) + } } fn replace_tilde_from_path(path: &Path) -> Result { diff --git a/lib/dvf/discovery.rs b/lib/dvf/discovery.rs index 0b2107e..f1c5932 100644 --- a/lib/dvf/discovery.rs +++ b/lib/dvf/discovery.rs @@ -21,6 +21,8 @@ use crate::utils::progress::{print_progress, ProgressMode}; use crate::utils::read_write_file::get_project_paths; use crate::web3; use crate::web3::stop_anvil_instance; +use crate::web3::TraceWithAddress; +use alloy_node_bindings::AnvilInstance; pub struct DiscoveryParams<'a> { pub config: &'a DVFConfig, @@ -35,20 +37,25 @@ pub struct DiscoveryParams<'a> { pub libraries: Option>, pub implementation_name: Option<&'a str>, pub implementation_project: Option<&'a PathBuf>, - pub implementation_env: Environment, - pub implementation_artifacts: &'a str, + pub implementation_env: Option<&'a Environment>, + pub implementation_artifacts: Option<&'a str>, pub implementation_build_cache: Option<&'a String>, pub zerovalue: bool, pub event_topics: Option>, pub pc: &'a mut u64, pub progress_mode: &'a ProgressMode, pub use_storage_range: bool, + pub tx_hashes: Option>, + // Optional cache (used by inspect-tx): reuse an already computed trace and config + pub cached_traces: Option>, + pub cached_anvil_config: Option<&'a DVFConfig>, } pub struct DiscoveryResult { pub critical_storage_variables: Vec, pub critical_events: Vec, pub storage_var_table: Table, + pub unused_storage_var_table: Table, pub event_table: Table, pub all_events: Vec, pub proxy_warning: bool, @@ -121,7 +128,8 @@ pub fn discover_storage_and_events( let imp_path: PathBuf; let imp_artifacts_path: PathBuf; if let Some(imp_project) = params.implementation_project { - imp_artifacts_path = get_project_paths(imp_project, params.implementation_artifacts); + imp_artifacts_path = + get_project_paths(imp_project, params.implementation_artifacts.unwrap()); imp_path = imp_project.clone(); } else if let Some(project_path) = params.project { imp_path = project_path.clone(); @@ -135,7 +143,7 @@ pub fn discover_storage_and_events( let tmp_project_info = ProjectInfo::new( &implementation_name.to_string(), &imp_path, - params.implementation_env, + *params.implementation_env.unwrap(), &imp_artifacts_path, params.implementation_build_cache, params.libraries.clone(), @@ -149,7 +157,7 @@ pub fn discover_storage_and_events( let fi_impl_layout = forge_inspect::ForgeInspectLayoutStorage::generate_and_parse_layout( &imp_path, implementation_name, - if params.implementation_env == Environment::Hardhat { + if params.implementation_env == Some(&Environment::Hardhat) { tmp_project_info.absolute_path.clone() } else { None @@ -158,7 +166,7 @@ pub fn discover_storage_and_events( let fi_impl_ir = forge_inspect::ForgeInspectIrOptimized::generate_and_parse_ir_optimized( &imp_path, implementation_name, - if params.implementation_env == Environment::Hardhat { + if params.implementation_env == Some(&Environment::Hardhat) { tmp_project_info.absolute_path.clone() } else { None @@ -170,38 +178,19 @@ pub fn discover_storage_and_events( types.extend(tmp_project_info.types.clone()); imp_project_info = Some(tmp_project_info); } - - // Get transaction hashes based on event topics - let mut seen_events: Vec = vec![]; - let tx_hashes: Vec = if let Some(event_topics) = ¶ms.event_topics { - print_progress( - "Obtaining past events and transactions.", - params.pc, - params.progress_mode, - ); - seen_events = web3::get_eth_events( + let (tx_hashes, mut seen_events) = if params.tx_hashes.is_none() { + // Get transaction hashes based on event topics + get_tx_hashes( params.config, params.address, params.start_block_num, params.end_block_num, - event_topics, - )?; - seen_events - .iter() - .filter_map(|e| e.transaction_hash.map(|h| format!("{h:#x}"))) - .collect() - } else { - print_progress( - "Obtaining past transactions.", + params.event_topics.clone(), params.pc, params.progress_mode, - ); - web3::get_all_txs_for_contract( - params.config, - params.address, - params.start_block_num, - params.end_block_num, )? + } else { + (params.tx_hashes.unwrap(), vec![]) }; print_progress("Getting storage snapshot.", params.pc, params.progress_mode); @@ -217,25 +206,39 @@ pub fn discover_storage_and_events( let mut seen_transactions = HashSet::new(); let mut missing_traces = false; - for tx_hash in &tx_hashes { + for (index, tx_hash) in tx_hashes.iter().enumerate() { if seen_transactions.contains(tx_hash) { continue; } seen_transactions.insert(tx_hash); info!("Getting trace for {}", tx_hash); - match web3::get_eth_debug_trace_sim(params.config, tx_hash) { + // Use cached trace if provided (inspect-tx), otherwise fetch + let fetched = if let Some(ref cached) = params.cached_traces { + debug!("Using cached trace at index {} of {}", index, cached.len()); + Ok(( + cached[index].clone(), + None::, + None::, + )) + } else { + web3::get_eth_debug_trace_sim(params.config, tx_hash) + }; + match fetched { Ok((trace, anvil_config, anvil_instance)) => { - let record_traces_config = match &anvil_config { - Some(c) => c, - None => params.config, + let record_traces_config: &DVFConfig = if params.cached_traces.is_some() { + params.cached_anvil_config.unwrap_or(params.config) + } else { + anvil_config.as_ref().unwrap_or(params.config) }; if let Err(err) = contract_state.record_traces(record_traces_config, vec![trace]) { missing_traces = true; info!("Warning. The trace for {tx_hash} cannot be obtained. Some mapping slots might not be decodable. You can try to increase the timeout in the config. Error: {}", err); } - if let Some(anvil_instance) = anvil_instance { - stop_anvil_instance(anvil_instance); + if params.cached_traces.is_none() { + if let Some(anvil_instance) = anvil_instance { + stop_anvil_instance(anvil_instance); + } } } Err(err) => { @@ -258,8 +261,23 @@ pub fn discover_storage_and_events( &storage_layout, &types, params.zerovalue, + params.config, + params.end_block_num, )?; + let mut unused_storage_var_table = Table::new(); + let unused = snapshot.get_unused_nonzero_storage_slots(); + unused_storage_var_table.set_titles(row!["Slot", "Offset", "Value"]); + for unused_part in unused { + let slot_hex = format!("0x{:x}", unused_part.slot); + let value_hex = format!("0x{}", hex::encode(&unused_part.value)); + unused_storage_var_table.add_row(row![ + slot_hex, + unused_part.offset, + wrap_by_length(&value_hex, 66, "\n"), + ]); + } + let proxy_warning = critical_storage_variables .iter() .any(|var| var.var_name == "unknown") @@ -422,12 +440,60 @@ pub fn discover_storage_and_events( critical_storage_variables, critical_events, storage_var_table, + unused_storage_var_table, event_table, all_events, proxy_warning, }) } +pub fn wrap_by_length(s: &str, max_len: usize, delimiter: &str) -> String { + if max_len == 0 { + return s.to_string(); + } + let mut out = String::with_capacity(s.len() + s.len() / max_len); + let mut count = 0; + for ch in s.chars() { + if count == max_len { + out.push_str(delimiter); + count = 0; + } + out.push(ch); + count += 1; + } + out +} + +pub fn get_tx_hashes( + config: &DVFConfig, + address: &Address, + start_block_num: u64, + end_block_num: u64, + event_topics: Option>, + pc: &mut u64, + progress_mode: &ProgressMode, +) -> Result<(Vec, Vec), ValidationError> { + let mut seen_events: Vec = vec![]; + let tx_hashes: Vec = if let Some(event_topics) = &event_topics { + print_progress("Obtaining past events and transactions.", pc, progress_mode); + seen_events = web3::get_eth_events( + config, + address, + start_block_num, + end_block_num, + event_topics, + )?; + seen_events + .iter() + .filter_map(|e| e.transaction_hash.map(|h| format!("{h:#x}"))) + .collect() + } else { + print_progress("Obtaining past transactions.", pc, progress_mode); + web3::get_all_txs_for_contract(config, address, start_block_num, end_block_num)? + }; + Ok((tx_hashes, seen_events)) +} + #[allow(clippy::too_many_arguments)] pub fn create_discovery_params_for_init<'a>( config: &'a DVFConfig, @@ -436,7 +502,7 @@ pub fn create_discovery_params_for_init<'a>( init_block_num: u64, project: &'a PathBuf, artifacts: &'a str, - env: Environment, + env: &'a Environment, build_cache: Option<&'a String>, libraries: Option>, zerovalue: bool, @@ -453,21 +519,27 @@ pub fn create_discovery_params_for_init<'a>( end_block_num: init_block_num, project: Some(project), artifacts, - env, + env: *env, build_cache, libraries, implementation_name: sub_m .get_one::("implementation") .map(|s| s.as_str()), implementation_project: sub_m.get_one::("implementationproject"), - implementation_env: env, - implementation_artifacts: sub_m.get_one::("implementationartifacts").unwrap(), + implementation_env: Some(env), + implementation_artifacts: sub_m + .get_one::("implementationartifacts") + .as_ref() + .map(|s| s.as_str()), implementation_build_cache: sub_m.get_one::("implementationbuildcache"), zerovalue, event_topics, pc, progress_mode, use_storage_range: true, + tx_hashes: None, + cached_traces: None, + cached_anvil_config: None, } } @@ -501,13 +573,19 @@ pub fn create_discovery_params_for_update<'a>( .get_one::("implementation") .map(|s| s.as_str()), implementation_project: sub_m.get_one::("implementationproject"), - implementation_env: *sub_m.get_one::("implementationenv").unwrap(), - implementation_artifacts: sub_m.get_one::("implementationartifacts").unwrap(), + implementation_env: sub_m.get_one::("implementationenv"), + implementation_artifacts: sub_m + .get_one::("implementationartifacts") + .as_ref() + .map(|s| s.as_str()), implementation_build_cache: sub_m.get_one::("implementationbuildcache"), zerovalue, event_topics: None, // Update mode doesn't filter by event topics pc, progress_mode, use_storage_range: false, // cannot use storage range here as we are only trying to get a subset of the state + tx_hashes: None, + cached_traces: None, + cached_anvil_config: None, } } diff --git a/lib/dvf/parse.rs b/lib/dvf/parse.rs index 5643f40..d498ca2 100644 --- a/lib/dvf/parse.rs +++ b/lib/dvf/parse.rs @@ -404,6 +404,44 @@ impl CompleteDVF { Ok(filled) } + pub fn new( + contract_name: &str, + address: &Address, + chain_id: u64, + ) -> Result { + let dumped = CompleteDVF { + version: CURRENT_VERSION, + id: None, + contract_name: contract_name.to_string(), + address: *address, + chain_id, + codehash: String::new(), + deployment_tx: String::new(), + deployment_block_num: 0, + init_block_num: 0, + insecure: Some(false), + immutables: vec![], + constructor_args: vec![], + critical_storage_variables: vec![], + critical_events: vec![], + expiry_in_epoch_seconds: None, + references: None, + unvalidated_metadata: Some(Unvalidated { + author_name: Some(String::from("Author")), + description: Some(String::from("System Description")), + hardfork: Some(vec![String::from("paris"), String::from("shanghai")]), + audit_report: Some(String::from("https://example.org/report.pdf")), + source_url: Some(String::from("https://github.com/source/code")), + security_contact: Some(String::from("security@example.org")), + implementation_name: None, + implementation_address: None, // currently no source for this + }), + signature: None, + }; + dumped.check_version()?; + Ok(dumped) + } + pub fn from_cli(matches: &ArgMatches) -> Result { let immutables: Vec = vec![]; let critical_storage_variables: Vec = vec![]; diff --git a/lib/state/contract_state.rs b/lib/state/contract_state.rs index 16d57f2..7ccb0ef 100644 --- a/lib/state/contract_state.rs +++ b/lib/state/contract_state.rs @@ -396,6 +396,7 @@ impl<'a> ContractState<'a> { ); } + #[allow(clippy::too_many_arguments)] pub fn get_critical_storage_variables( &mut self, snapshot: &mut StorageSnapshot, @@ -403,6 +404,8 @@ impl<'a> ContractState<'a> { pi_storage: &Vec, pi_types: &HashMap, zerovalue: bool, + config: &DVFConfig, + block_num: u64, ) -> Result, ValidationError> { let default_values = &ForgeInspectLayoutStorage::default_values(); // Add default types as we might need them @@ -421,6 +424,8 @@ impl<'a> ContractState<'a> { snapshot, table, zerovalue, + config, + block_num, )?); } @@ -435,8 +440,14 @@ impl<'a> ContractState<'a> { // continue; // } - let new_critical_storage_variables = - self.get_critical_variable(state_variable, snapshot, table, zerovalue)?; + let new_critical_storage_variables = self.get_critical_variable( + state_variable, + snapshot, + table, + zerovalue, + config, + block_num, + )?; let mut has_nonzero = false; for crit_var in &new_critical_storage_variables { if !crit_var.is_zero() { @@ -499,13 +510,20 @@ impl<'a> ContractState<'a> { &self, var: &StateVariable, snapshot: &StorageSnapshot, + config: &DVFConfig, + block_num: u64, ) -> Result { assert!(Self::is_any_array(&var.var_type)); assert!(var.offset == 0); let var_type = &self.types[&var.var_type]; if var_type.encoding == "dynamic_array" { - // Get slot - let slot_val: [u8; 32] = snapshot.get_full_slot(&var.slot); + // On partial storage reads, array lengths might not be retrieved. In that case, they have to be fetched from the RPC. + let slot_val: [u8; 32] = match snapshot.get(&var.slot) { + Some(val) => *val, + None => { + crate::web3::get_eth_storage_at(config, &self.address, &var.slot, block_num)? + } + }; // Assume that array size is in last 8 bytes let arr_size: [u8; 8] = slot_val[24..] .try_into() @@ -535,6 +553,8 @@ impl<'a> ContractState<'a> { snapshot: &mut StorageSnapshot, table: &mut Table, zerovalue: bool, + config: &DVFConfig, + block_num: u64, ) -> Result, ValidationError> { if Self::is_basic_type(&state_variable.var_type) || Self::is_user_defined_type(&state_variable.var_type) @@ -592,6 +612,8 @@ impl<'a> ContractState<'a> { snapshot, table, zerovalue, + config, + block_num, )?); } return Ok(critical_storage_variables); @@ -599,7 +621,7 @@ impl<'a> ContractState<'a> { if Self::is_any_array(&state_variable.var_type) { let mut critical_storage_variables = Vec::::new(); - let num: usize = self.get_array_length(state_variable, snapshot)?; + let num: usize = self.get_array_length(state_variable, snapshot, config, block_num)?; let base_num_bytes: usize = self.get_base_num_bytes(&state_variable.var_type); let mut current_offset = state_variable.offset; // Add length field @@ -616,6 +638,8 @@ impl<'a> ContractState<'a> { snapshot, table, zerovalue, + config, + block_num, )?); } let mut current_slot = match self.is_dynamic_array(&state_variable.var_type) { @@ -630,8 +654,11 @@ impl<'a> ContractState<'a> { slot: current_slot, var_type: self.get_base_type(&state_variable.var_type), }; - critical_storage_variables - .extend(self.get_critical_variable(&base, snapshot, table, zerovalue)?); + critical_storage_variables.extend( + self.get_critical_variable( + &base, snapshot, table, zerovalue, config, block_num, + )?, + ); // Check if we need to skip multiple slots if base_num_bytes > 32 { current_slot = current_slot @@ -699,8 +726,11 @@ impl<'a> ContractState<'a> { slot: *target_slot, var_type: self.get_value_type(&state_variable.var_type), }; - critical_storage_variables - .extend(self.get_critical_variable(&base, snapshot, table, zerovalue)?); + critical_storage_variables.extend( + self.get_critical_variable( + &base, snapshot, table, zerovalue, config, block_num, + )?, + ); } return Ok(critical_storage_variables); } @@ -765,6 +795,8 @@ impl<'a> ContractState<'a> { snapshot, table, zerovalue, + config, + block_num, )?); let mut string_length = U256::from_be_slice(&snapshot.get_slot( &length_var.slot, diff --git a/lib/state/forge_inspect.rs b/lib/state/forge_inspect.rs index 5945b5a..dec8076 100644 --- a/lib/state/forge_inspect.rs +++ b/lib/state/forge_inspect.rs @@ -218,6 +218,8 @@ fn forge_inspect_helper( if let Some(path) = contract_path { contract = format!("{}:{}", path, contract_name); } + + // First attempt w/o --via-ir let forge_inspect = Command::new("forge") .env("RUST_LOG", "error") // prevents `forge inspect` from contaminating the JSON with logs .current_dir(project_path) @@ -230,10 +232,32 @@ fn forge_inspect_helper( .arg(temp_path.as_os_str()) .arg("--cache-path") .arg(temp_cache_path.as_os_str()) - .arg(contract) - .arg(field) + .arg(&contract) + .arg(&field) .output()?; + let forge_inspect = if !forge_inspect.status.success() { + debug!("forge inspect without --via-ir failed, retrying with --via-ir"); + Command::new("forge") + .env("RUST_LOG", "error") + .current_dir(project_path) + .arg("inspect") + .arg("--force") + .arg("--json") + .arg("--via-ir") + .arg("--root") + .arg(".") + .arg("--out") + .arg(temp_path.as_os_str()) + .arg("--cache-path") + .arg(temp_cache_path.as_os_str()) + .arg(&contract) + .arg(&field) + .output()? + } else { + forge_inspect + }; + if !forge_inspect.status.success() { return Err(Error::other(format!( "Failed to run forge inspect: {}", diff --git a/lib/utils/progress.rs b/lib/utils/progress.rs index 1d1b32a..d8f81b9 100644 --- a/lib/utils/progress.rs +++ b/lib/utils/progress.rs @@ -7,6 +7,9 @@ pub enum ProgressMode { BytecodeCheck, GenerateBuildCache, ListEvents, + InspectTx, + InspectTxSub, + InspectTxSubNoconf, } pub fn print_progress(s: &str, i: &mut u64, pm: &ProgressMode) { @@ -20,6 +23,9 @@ pub fn print_progress(s: &str, i: &mut u64, pm: &ProgressMode) { ProgressMode::BytecodeCheck => 3, ProgressMode::GenerateBuildCache => 1, ProgressMode::ListEvents => 1, + ProgressMode::InspectTx => 10, + ProgressMode::InspectTxSub => 10, + ProgressMode::InspectTxSubNoconf => 4, }; println!("{} {}", style(format!("[{i:2}/{total:2}]")).bold().dim(), s); *i += 1; diff --git a/lib/web3.rs b/lib/web3.rs index a548f69..79dd7d3 100644 --- a/lib/web3.rs +++ b/lib/web3.rs @@ -5,6 +5,7 @@ use std::io::Read; use std::str::FromStr; use std::time::Duration; +use alloy_rpc_types_trace::parity::CallType; use colored::Colorize; use indicatif::ProgressBar; use reqwest::blocking::get; @@ -386,6 +387,16 @@ pub fn get_eth_debug_trace_sim( // ignore error as this does not return any result let _ = send_blocking_web3_post(&anvil_config, &impersonate_body); + // add 100 eth to account (required in case deployment happened on a virtual testnet using admin rpc) + let balance_body = json!({ + "jsonrpc": "2.0", + "method": "anvil_setBalance", + "params": [from, "0x56bc75e2d63100000"], + "id": 1 + }); + + let _ = send_blocking_web3_post(&anvil_config, &balance_body); + // submit transaction to anvil let send_tx_body = json!({ "jsonrpc": "2.0", @@ -405,10 +416,29 @@ pub fn get_eth_debug_trace_sim( let tx_result = match send_blocking_web3_post(&anvil_config, &send_tx_body) { Ok(result) => result, - Err(e) => { - // Stop the anvil instance before returning the error - stop_anvil_instance(anvil_instance); - return Err(e); + Err(_) => { + // gas can be set to 0 on virtual testnets so try one more time without these params + let send_tx_body_wo_gas = json!({ + "jsonrpc": "2.0", + "method": "eth_sendTransaction", + "params": [ + { + "from": from, + "to": if to == "null" { serde_json::Value::Null } else { json!(to) }, + "value": value, + "input": data, + } + ], + "id": 1 + }); + match send_blocking_web3_post(&anvil_config, &send_tx_body_wo_gas) { + Ok(result) => result, + Err(e) => { + // Stop the anvil instance before returning the error + stop_anvil_instance(anvil_instance); + return Err(e); + } + } } }; @@ -1946,6 +1976,10 @@ impl StorageSnapshot { } } + pub fn get(&self, slot: &U256) -> Option<&[u8; 32]> { + self.snapshot.get(slot) + } + // Get Storage entry pub fn get_u8_from_slot(&self, slot: &U256, offset: usize) -> u8 { match self.snapshot.get(slot) { @@ -2592,3 +2626,88 @@ pub fn stop_anvil_instance(anvil_instance: AnvilInstance) { // when it goes out of scope, but we can also explicitly drop it drop(anvil_instance); } + +pub fn get_all_addresses( + config: &DVFConfig, + tx_id: &str, +) -> Result<(Vec
, Vec
), ValidationError> { + let mut call_addresses: Vec
= vec![]; + let mut create_addresses: Vec
= vec![]; + + // Try to get transaction trace using get_tx_trace first + match get_tx_trace(config, tx_id) { + Ok(traces) => { + debug!("Using parity trace for {}", tx_id); + for trace in &traces { + match &trace.action { + // we only care for contracts whose state can change during the call, so no staticcall, delegatecall etc. + Action::Call(call) if call.call_type == CallType::Call => { + call_addresses.push(call.to); + } + Action::Create(_create) => { + // For create actions, we need to check the result + if let Some(TraceOutput::Create(create_res)) = &trace.result { + create_addresses.push(create_res.address); + } + } + Action::Selfdestruct(selfdestruct) => { + call_addresses.push(selfdestruct.address); + } + _ => { + // do nothing + } + } + } + } + Err(e) => { + debug!("Parity trace failed with {:?}, trying geth debug trace.", e); + // Fallback to geth debug call trace + let call_frame = get_eth_debug_call_trace(config, tx_id)?; + extract_all_addresses_from_call_frame( + &call_frame, + &mut call_addresses, + &mut create_addresses, + )?; + } + } + + call_addresses.sort(); + call_addresses.dedup(); + create_addresses.sort(); + create_addresses.dedup(); + + debug!("All call addresses for {} are: {:?}", tx_id, call_addresses); + debug!( + "All create addresses for {} are: {:?}", + tx_id, create_addresses + ); + Ok((call_addresses, create_addresses)) +} + +// Extract all addresses from a CallFrame (geth debug trace)u +fn extract_all_addresses_from_call_frame( + call_frame: &CallFrame, + call_addresses: &mut Vec
, + create_addresses: &mut Vec
, +) -> Result<(), ValidationError> { + // Add the 'to' address if it exists and the call didn't fail + // we only care for contracts whose state can change during the call, so no staticcall, delegatecall etc. + if call_frame.error.is_none() { + if let Some(addr) = call_frame.to { + if call_frame.typ == "CALL" { + call_addresses.push(addr); + } else if call_frame.typ == "CREATE" || call_frame.typ == "CREATE2" { + create_addresses.push(addr); + } else if call_frame.typ == "SELFDESTRUCT" { + call_addresses.push(addr); + } + } + } + + // Recursively process all sub-calls + for call in &call_frame.calls { + extract_all_addresses_from_call_frame(call, call_addresses, create_addresses)?; + } + + Ok(()) +} diff --git a/src/dvf.rs b/src/dvf.rs index 9422bd3..1fd743b 100644 --- a/src/dvf.rs +++ b/src/dvf.rs @@ -13,14 +13,15 @@ use dvf_libs::bytecode_verification::verify_bytecode; use dvf_libs::dvf::config::{replace_tilde, DVFConfig}; use dvf_libs::dvf::discovery::{ create_discovery_params_for_init, create_discovery_params_for_update, - discover_storage_and_events, DiscoveryResult, + discover_storage_and_events, wrap_by_length, DiscoveryParams, DiscoveryResult, }; use dvf_libs::dvf::parse::{self, DVFStorageEntry, ValidationError, CURRENT_VERSION_STRING}; use dvf_libs::dvf::registry::{self, Registry}; +// use dvf_libs::state::forge_inspect::{StateVariable, TypeDescription}; use dvf_libs::utils::pretty::PrettyPrinter; use dvf_libs::utils::progress::{print_progress, ProgressMode}; use dvf_libs::utils::read_write_file::get_project_paths; -use dvf_libs::web3; +use dvf_libs::web3::{self, get_transaction_details}; use indicatif::ProgressBar; use prettytable::{row, Table}; use scanf::sscanf; @@ -734,6 +735,22 @@ fn main() { ) .arg(arg!(--buildcache ).help("Folder containing build-info files")), ) + .subcommand( + Command::new("inspect-tx") + .about("Inspect a transaction and load project configuration from config file") + .arg( + arg!(--txhash ) + .help("Transaction hash to inspect (32 byte hex)") + .required(true) + .value_parser(is_valid_32_byte_hex), + ) + .arg( + arg!(--chainid ) + .help("Chain ID where the transaction is located") + .value_parser(value_parser!(u64)) + .default_value("1"), + ), + ) .get_matches(); match matches.get_count("verbose") { @@ -971,6 +988,7 @@ fn process(matches: ArgMatches) -> Result<(), ValidationError> { critical_storage_variables, critical_events, storage_var_table, + unused_storage_var_table: _, event_table, all_events, proxy_warning, @@ -981,7 +999,7 @@ fn process(matches: ArgMatches) -> Result<(), ValidationError> { init_block_num, project, artifacts, - env, + &env, build_cache, libraries.clone(), zerovalue, @@ -994,77 +1012,24 @@ fn process(matches: ArgMatches) -> Result<(), ValidationError> { dumped.critical_storage_variables = critical_storage_variables; dumped.critical_events = critical_events; - let mut pc = 1; - println!(); - println!("DVF Initialization complete. Please follow these steps:"); + let unused_storage_var_table = Table::new(); - if project_info.compiler_version < FIRST_STORAGE_LAYOUT { - println!( - "{}. Warning. You are using an old compiler without storage layout. There will be no storage decoding.", pc - ); - pc += 1; - } else if proxy_warning && sub_m.get_one::("implementation").is_none() { - println!( - "{}. Warning. Some storage slots could not be decoded. This might happen because this is a proxy contract. In that case, use --implementation to decode more.", pc - ); - pc += 1; - } - - println!("{pc}. Validate that the results in the table below are as expected."); - pc += 1; - verify_bytecode::print_generation_summary( - &project.to_string_lossy().to_string(), - &dumped.contract_name, - &dumped.address, - compare_status, + finalize_init_and_print( + project.to_string_lossy().to_string(), + &mut dumped, + output_path, &project_info, + proxy_warning, + &storage_var_table, + &unused_storage_var_table, + &all_events, + &event_table, + init_block_num, + compare_status, &rpc_code, &pretty_printer, - ); - if !dumped.critical_storage_variables.is_empty() { - println!( - "{}. Select critical storage variables by deleting the others from {}.", - pc, - output_path.display() - ); - pc += 1; - - if storage_var_table.is_empty() { - println!(" No values were decoded, this could be because it is a proxy contract or because of an old compiler version."); - } else { - println!(" Below you see decoded values for non-zero storage variables:"); - storage_var_table.printstd(); - } - } - - if !all_events.is_empty() { - println!( - "{}. Select critical events by deleting the others from {}", - pc, - output_path.display() - ); - pc += 1; - - if event_table.is_empty() { - println!(" No events occurred up until block {}.", init_block_num); - } else { - println!(" Event occurrences up to block {}:", init_block_num); - event_table.printstd(); - } - } - - println!( - "{}. Decide whether you want to signal that the contract is insecure, if so set the insecure flag to true.", pc - ); - pc += 1; - - println!( - "{}. Decide if this validation should have an expiry date. Also you can fill in additional, unvalidated metadata.", pc - ); - - dumped.generate_id()?; - dumped.write_to_file(output_path)?; - println!("Wrote DVF to {}!", output_path.display()); + sub_m.get_one::("implementation").is_some(), + )?; exit(0); } Some(("id", sub_m)) => { @@ -1641,8 +1606,419 @@ fn process(matches: ArgMatches) -> Result<(), ValidationError> { println!("Bytecode check succeeded!"); exit(0); } + Some(("inspect-tx", sub_m)) => { + let tx_hash = sub_m.get_one::("txhash").unwrap(); + let chain_id = *sub_m.get_one::("chainid").unwrap(); + + config.set_chain_id(chain_id)?; + + let mut pc = 1_u64; + let progress_mode = ProgressMode::InspectTx; + + print_progress("Inspecting transaction.", &mut pc, &progress_mode); + println!("Chain ID: {}", chain_id); + + let (call_addresses, create_addresses) = web3::get_all_addresses(&config, tx_hash)?; + let (block_num, _, _) = get_transaction_details(&config, tx_hash)?; + println!("The transaction called the following contracts:"); + for address in &call_addresses { + println!("- {}", address); + } + println!("The transaction created the following contracts:"); + for address in &create_addresses { + println!("- {}", address); + } + + let registry = registry::Registry::from_config(&config)?; + let pretty_printer = PrettyPrinter::new(&config, Some(®istry)); + + // Fetch and cache debug trace once; keep anvil alive until end + let (cached_trace, cached_anvil_config, cached_anvil_instance) = + web3::get_eth_debug_trace_sim(&config, tx_hash)?; + + print_progress("Checking called contracts.", &mut pc, &progress_mode); + for address in &call_addresses { + println!("Checking contract: {}", address); + inspect_called_contract( + &config, + chain_id, + address, + block_num, + tx_hash, + &pretty_printer, + &mut pc, + &progress_mode, + Some(vec![cached_trace.clone()]), + cached_anvil_config.as_ref(), + )?; + } + + print_progress("Checking created contracts.", &mut pc, &progress_mode); + for address in &create_addresses { + println!("Checking contract: {}", address); + inspect_called_contract( + &config, + chain_id, + address, + block_num, + tx_hash, + &pretty_printer, + &mut pc, + &progress_mode, + Some(vec![cached_trace.clone()]), + cached_anvil_config.as_ref(), + )?; + } + + // After all inspections, stop cached anvil instance if present + if let Some(anvil_instance) = cached_anvil_instance { + web3::stop_anvil_instance(anvil_instance); + } + + exit(0); + } _ => Err(ValidationError::Error( "Please specify a command.".to_string(), )), } } + +#[allow(clippy::too_many_arguments)] +fn finalize_init_and_print( + project_dir: String, + dumped: &mut parse::CompleteDVF, + output_path: &Path, + project_info: &ProjectInfo, + proxy_warning: bool, + storage_var_table: &Table, + unused_storage_var_table: &Table, + all_events: &[alloy::json_abi::Event], + event_table: &Table, + init_block_num: u64, + compare_status: CompareBytecode, + rpc_code: &str, + pretty_printer: &PrettyPrinter, + implementation_used: bool, +) -> Result<(), ValidationError> { + let mut pc = 1; + println!(); + println!("DVF Initialization complete. Please follow these steps:"); + + if project_info.compiler_version < FIRST_STORAGE_LAYOUT { + println!( + "{}. Warning. You are using an old compiler without storage layout. There will be no storage decoding.", pc + ); + pc += 1; + } else if proxy_warning && !implementation_used { + println!( + "{}. Warning. Some storage slots could not be decoded. This might happen because this is a proxy contract. In that case, use --implementation to decode more.", pc + ); + pc += 1; + } + + println!("{pc}. Validate that the results in the table below are as expected."); + pc += 1; + verify_bytecode::print_generation_summary( + &project_dir, + &dumped.contract_name, + &dumped.address, + compare_status, + project_info, + rpc_code, + pretty_printer, + ); + if !dumped.critical_storage_variables.is_empty() { + println!( + "{}. Select critical storage variables by deleting the others from {}.", + pc, + output_path.display() + ); + pc += 1; + + if storage_var_table.is_empty() { + println!(" No values were decoded, this could be because it is a proxy contract or because of an old compiler version."); + } else { + println!(" Below you see decoded values for non-zero storage variables:"); + storage_var_table.printstd(); + } + } + + if !unused_storage_var_table.is_empty() { + println!("Unknown storage changes:"); + unused_storage_var_table.printstd(); + println!(); + } + + if !all_events.is_empty() { + println!( + "{}. Select critical events by deleting the others from {}", + pc, + output_path.display() + ); + pc += 1; + + if event_table.is_empty() { + println!(" No events occurred up until block {}.", init_block_num); + } else { + println!(" Event occurrences up to block {}:", init_block_num); + event_table.printstd(); + } + } + + println!( + "{}. Decide whether you want to signal that the contract is insecure, if so set the insecure flag to true.", pc + ); + pc += 1; + + println!( + "{}. Decide if this validation should have an expiry date. Also you can fill in additional, unvalidated metadata.", pc + ); + + dumped.generate_id()?; + dumped.write_to_file(output_path)?; + println!("Wrote DVF to {}!", output_path.display()); + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +fn inspect_called_contract( + config: &DVFConfig, + chain_id: u64, + address: &Address, + block_num: u64, + tx_hash: &String, + pretty_printer: &PrettyPrinter, + pc: &mut u64, + progress_mode: &ProgressMode, + cached_traces: Option>, + cached_anvil_config: Option<&DVFConfig>, +) -> Result<(), ValidationError> { + let project_config = config.get_project_config(address, chain_id); + if let Some(project_config) = project_config { + println!( + "Project configuration found for contract: {} ({})", + address, project_config.contract_name + ); + let mut pc_sub = 1_u64; + let progress_mode_sub = ProgressMode::InspectTxSub; + let mut dumped = + parse::CompleteDVF::new(project_config.contract_name.as_str(), address, chain_id)?; + + let (deployment_block_num, deployment_tx) = + if let Some(depl_tx) = project_config.deployment_tx.as_ref() { + let (block_num, _, _) = get_transaction_details(config, depl_tx.as_str())?; + (block_num, depl_tx.clone()) + } else { + web3::get_deployment(config, &dumped.address)? + }; + dumped.deployment_block_num = deployment_block_num; + dumped.deployment_tx = deployment_tx; + dumped.init_block_num = block_num; + + print_progress("Getting code hash.", &mut pc_sub, &progress_mode_sub); + let rpc_code_hash = web3::get_eth_codehash(config, &dumped.address, deployment_block_num)?; + dumped.codehash = rpc_code_hash; + + print_progress( + "Fetching on-chain bytecode.", + &mut pc_sub, + &progress_mode_sub, + ); + let rpc_code = web3::get_eth_code(config, &dumped.address, deployment_block_num)?; + + print_progress("Fetching init code.", &mut pc_sub, &progress_mode_sub); + let init_code = web3::get_init_code(config, &dumped.deployment_tx, &dumped.address)?; + + debug!("Fetching forge output"); + let compile_output = match project_config.build_cache_path { + None => "Compiling local code.", + Some(_) => "Loading build cache.", + }; + print_progress(compile_output, pc, progress_mode); + let mut project_info = ProjectInfo::new( + &project_config.contract_name, + Path::new(&project_config.project_path), + project_config.environment, + Path::new(&project_config.artifacts_path), + project_config.build_cache_path.as_ref(), + project_config.libraries.clone(), + )?; + + print_progress("Comparing bytecode.", &mut pc_sub, &progress_mode_sub); + let compare_status = + CompareBytecode::compare(&mut project_info, project_config.factory, &rpc_code); + + if !compare_status.matched { + return Err(ValidationError::from( + "Generation Failed. Bytecode mismatch.", + )); + } + + print_progress("Comparing initcode.", &mut pc_sub, &progress_mode_sub); + let compare_init = + CompareInitCode::compare(&mut project_info, &init_code, project_config.factory); + if !compare_init.matched { + return Err(ValidationError::from( + "Initcode mismatch. Consider setting the factory flag in the project configuration if this is a factory contract.", + )); + } + // immutable values are set in CompareBytecode::compare so this has to be after the call + dumped.copy_immutables(&project_info, pretty_printer); + + debug!("Copying parsed constructor arguments to dvf file"); + dumped.copy_constructor_args(&project_info, pretty_printer); + + let DiscoveryResult { + critical_storage_variables, + critical_events, + storage_var_table, + unused_storage_var_table, + event_table, + all_events, + proxy_warning, + } = discover_storage_and_events(DiscoveryParams { + config, + contract_name: &project_config.contract_name, + address, + start_block_num: block_num, + end_block_num: block_num, + project: Some(PathBuf::from(&project_config.project_path)).as_ref(), + artifacts: &project_config.artifacts_path, + env: project_config.environment, + build_cache: project_config.build_cache_path.as_ref(), + libraries: project_config.libraries.clone(), + implementation_name: project_config + .implementation_config + .as_ref() + .map(|impl_config| impl_config.contract_name.as_str()), + implementation_project: project_config + .implementation_config + .as_ref() + .map(|impl_config| PathBuf::from(&impl_config.project_path)) + .as_ref(), + implementation_env: project_config + .implementation_config + .as_ref() + .map(|impl_config| impl_config.environment) + .as_ref(), + implementation_artifacts: project_config + .implementation_config + .as_ref() + .map(|impl_config| impl_config.artifacts_path.as_str()), + implementation_build_cache: project_config + .implementation_config + .as_ref() + .and_then(|impl_config| impl_config.build_cache_path.as_ref()), + zerovalue: false, + event_topics: None, + pc: &mut pc_sub, + progress_mode: &progress_mode_sub, + use_storage_range: false, + tx_hashes: Some(vec![tx_hash.clone()]), + cached_traces, + cached_anvil_config, + })?; + + dumped.critical_storage_variables = critical_storage_variables; + dumped.critical_events = critical_events; + + finalize_init_and_print( + project_config.project_path.to_string_lossy().to_string(), + &mut dumped, + &project_config.output_path, + &project_info, + proxy_warning, + &storage_var_table, + &unused_storage_var_table, + &all_events, + &event_table, + block_num, + compare_status, + &rpc_code, + pretty_printer, + project_config.implementation_config.is_some(), + )?; + } else { + println!( + "No project configuration found for contract {}. Skipping bytecode check and storage decoding.", + address + ); + + let mut pc_sub = 1_u64; + let progress_mode_sub = ProgressMode::InspectTxSubNoconf; + + let tx_hashes = vec![tx_hash.clone()]; + + print_progress("Getting storage snapshot.", &mut pc_sub, &progress_mode_sub); + let snapshot = + web3::StorageSnapshot::from_api(config, address, block_num, &tx_hashes, false)?; + + print_progress("Parsing storage snapshot.", &mut pc_sub, &progress_mode_sub); + let mut storage_var_table = Table::new(); + storage_var_table.set_titles(row!["Slot", "Offset", "Value"]); + let unused = snapshot.get_unused_nonzero_storage_slots(); + for unused_part in unused { + let slot_hex = format!("0x{:x}", unused_part.slot); + let value_hex = format!("0x{}", hex::encode(&unused_part.value)); + storage_var_table.add_row(row![ + slot_hex, + unused_part.offset, + wrap_by_length(&value_hex, 66, "\n"), + ]); + } + + print_progress("Obtaining past events.", &mut pc_sub, &progress_mode_sub); + let seen_events = web3::get_eth_events(config, address, block_num, block_num, &vec![])?; + + let mut event_table = Table::new(); + let mut critical_events: Vec = vec![]; + let all_topics_0: HashSet = + seen_events.iter().map(|e| *e.topic0().unwrap()).collect(); + for unused_topic in all_topics_0 { + let mut table_head = false; + // Collect Occurrences + let mut occurrences: Vec = vec![]; + for seen_event in &seen_events { + let log_inner = &seen_event.inner; + if seen_event.topic0() == Some(&unused_topic) { + // Add Event Name to table + if !table_head { + event_table.add_row(row![unused_topic]); + table_head = true; + } + // Add Event Occurrence to table + let msg = format!("{}", seen_event.inner.data.data); + event_table.add_row(row![format!("- {}", wrap_by_length(&msg, 64, "\n "))]); + let occurrence = parse::DVFEventOccurrence { + topics: log_inner.data.topics().to_vec(), + data: log_inner.data.data.clone(), + }; + occurrences.push(occurrence); + } + } + let event_entry = parse::DVFEventEntry { + sig: String::from("Unknown Signature"), + topic0: unused_topic, + occurrences, + }; + critical_events.push(event_entry); + } + + if !storage_var_table.is_empty() { + println!("Storage changes of {} in transaction {}:", address, tx_hash); + storage_var_table.printstd(); + if !event_table.is_empty() { + println!(); + } + } + if !event_table.is_empty() { + println!( + "Event occurrences of {} in transaction {}:", + address, tx_hash + ); + event_table.printstd(); + } + } + + Ok(()) +} diff --git a/src/gentest.rs b/src/gentest.rs index 2e656ec..18aa4c9 100644 --- a/src/gentest.rs +++ b/src/gentest.rs @@ -115,6 +115,8 @@ fn gen_test(matches: &ArgMatches) -> Result<(), ValidationError> { &vec![], &HashMap::new(), true, + &config, + web3::get_eth_block_number(&config)? - 1, )?; let serialized_res = serde_json::to_string_pretty(&critical_vars)?; diff --git a/tests/Contracts/src/CrazyHiddenStruct.sol b/tests/Contracts/src/CrazyHiddenStruct.sol index 8856a53..e14f9bd 100644 --- a/tests/Contracts/src/CrazyHiddenStruct.sol +++ b/tests/Contracts/src/CrazyHiddenStruct.sol @@ -39,8 +39,10 @@ contract CrazyHiddenStruct { bytes32 private constant StorageLocation2 = 0x852cbd6b186221cbf354c68826ab57cef1512cf2f5d959ca4501e155cbea7ae8; bytes32 private constant StorageLocation3 = 0x9482765040f1c978ae595e69b3ad0e4697ca0d1e0581a09be85cfb4a8462e752; bytes32 private constant StorageLocation4 = 0xe82aa111a62567be9a414850f7168d2e6c9f9d61a82b90598df0a59035cd53a6; - bytes32 private constant DirectStorageLocation1 = 0xbfbceebbfa6e5996c6a04ac6db0e347528756a4f073935304cc6139dcc2fb653; - bytes32 private constant DirectStorageLocation2 = 0x42d0407cb447148fd182bf527909ab1ba2fbaefe3f25cbe9851153586910b294; + bytes32 private constant DirectStorageLocation1 = + 0xbfbceebbfa6e5996c6a04ac6db0e347528756a4f073935304cc6139dcc2fb653; + bytes32 private constant DirectStorageLocation2 = + 0x42d0407cb447148fd182bf527909ab1ba2fbaefe3f25cbe9851153586910b294; bytes32 private constant KeccakStorageLocation1 = keccak256("keccak1"); bytes32 private constant KeccakStorageLocation2 = bytes32(uint256(keccak256("keccak2")) - 1); diff --git a/tests/Contracts/src/MyToken.sol b/tests/Contracts/src/MyToken.sol index 0c28cbf..b50741b 100644 --- a/tests/Contracts/src/MyToken.sol +++ b/tests/Contracts/src/MyToken.sol @@ -78,6 +78,7 @@ interface IERC20Upgradeable { */ function transferFrom(address from, address to, uint256 amount) external returns (bool); } + // OpenZeppelin Contracts v4.4.1 (token/ERC20/extensions/IERC20Metadata.sol) /** @@ -101,6 +102,7 @@ interface IERC20MetadataUpgradeable is IERC20Upgradeable { */ function decimals() external view returns (uint8); } + // OpenZeppelin Contracts v4.4.1 (utils/Context.sol) // OpenZeppelin Contracts (last updated v4.9.0) (proxy/utils/Initializable.sol) @@ -422,7 +424,8 @@ abstract contract Initializable { modifier initializer() { bool isTopLevelCall = !_initializing; require( - (isTopLevelCall && _initialized < 1) || (!AddressUpgradeable.isContract(address(this)) && _initialized == 1), + (isTopLevelCall && _initialized < 1) + || (!AddressUpgradeable.isContract(address(this)) && _initialized == 1), "Initializable: contract is already initialized" ); _initialized = 1;