diff --git a/Cargo.lock b/Cargo.lock index 846624478eeac..6426450d80888 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3841,6 +3841,7 @@ dependencies = [ "similar", "similar-asserts", "solar-parse", + "solar-sema", "soldeer-commands", "strum 0.27.1", "svm-rs", @@ -4125,6 +4126,7 @@ dependencies = [ "clap", "color-eyre", "dotenvy", + "dunce", "eyre", "forge-fmt", "foundry-block-explorers", @@ -4143,6 +4145,7 @@ dependencies = [ "rustls", "serde", "serde_json", + "solar-sema", "strsim", "strum 0.27.1", "tempfile", diff --git a/crates/anvil/core/src/eth/mod.rs b/crates/anvil/core/src/eth/mod.rs index ac7db74892003..5b77e4d0ce380 100644 --- a/crates/anvil/core/src/eth/mod.rs +++ b/crates/anvil/core/src/eth/mod.rs @@ -664,7 +664,7 @@ pub enum EthRequest { /// Add an address to the [`DelegationCapability`] of the wallet /// - /// [`DelegationCapability`]: wallet::DelegationCapability + /// [`DelegationCapability`]: wallet::DelegationCapability #[serde(rename = "anvil_addCapability", with = "sequence")] AnvilAddCapability(Address), diff --git a/crates/cheatcodes/assets/cheatcodes.json b/crates/cheatcodes/assets/cheatcodes.json index eb541e269a888..f1d2b617e3a5f 100644 --- a/crates/cheatcodes/assets/cheatcodes.json +++ b/crates/cheatcodes/assets/cheatcodes.json @@ -4294,6 +4294,106 @@ "status": "stable", "safety": "unsafe" }, + { + "func": { + "id": "eip712HashStruct_0", + "description": "Generates the struct hash of the canonical EIP-712 type representation and its abi-encoded data.\nSupports 2 different inputs:\n 1. Name of the type (i.e. \"PermitSingle\"):\n * requires previous binding generation with `forge bind-json`.\n * bindings will be retrieved from the path configured in `foundry.toml`.\n 2. String representation of the type (i.e. \"Foo(Bar bar) Bar(uint256 baz)\").\n * Note: the cheatcode will use the canonical type even if the input is malformated\n with the wrong order of elements or with extra whitespaces.", + "declaration": "function eip712HashStruct(string calldata typeNameOrDefinition, bytes calldata abiEncodedData) external pure returns (bytes32 typeHash);", + "visibility": "external", + "mutability": "pure", + "signature": "eip712HashStruct(string,bytes)", + "selector": "0xaedeaebc", + "selectorBytes": [ + 174, + 222, + 174, + 188 + ] + }, + "group": "utilities", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "eip712HashStruct_1", + "description": "Generates the struct hash of the canonical EIP-712 type representation and its abi-encoded data.\nRequires previous binding generation with `forge bind-json`.\nParams:\n * `bindingsPath`: path where the output of `forge bind-json` is stored.\n * `typeName`: Name of the type (i.e. \"PermitSingle\").\n * `abiEncodedData`: ABI-encoded data for the struct that is being hashed.", + "declaration": "function eip712HashStruct(string calldata bindingsPath, string calldata typeName, bytes calldata abiEncodedData) external pure returns (bytes32 typeHash);", + "visibility": "external", + "mutability": "pure", + "signature": "eip712HashStruct(string,string,bytes)", + "selector": "0x6d06c57c", + "selectorBytes": [ + 109, + 6, + 197, + 124 + ] + }, + "group": "utilities", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "eip712HashType_0", + "description": "Generates the hash of the canonical EIP-712 type representation.\nSupports 2 different inputs:\n 1. Name of the type (i.e. \"Transaction\"):\n * requires previous binding generation with `forge bind-json`.\n * bindings will be retrieved from the path configured in `foundry.toml`.\n 2. String representation of the type (i.e. \"Foo(Bar bar) Bar(uint256 baz)\").\n * Note: the cheatcode will output the canonical type even if the input is malformated\n with the wrong order of elements or with extra whitespaces.", + "declaration": "function eip712HashType(string calldata typeNameOrDefinition) external pure returns (bytes32 typeHash);", + "visibility": "external", + "mutability": "pure", + "signature": "eip712HashType(string)", + "selector": "0x6792e9e2", + "selectorBytes": [ + 103, + 146, + 233, + 226 + ] + }, + "group": "utilities", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "eip712HashType_1", + "description": "Generates the hash of the canonical EIP-712 type representation.\nRequires previous binding generation with `forge bind-json`.\nParams:\n * `bindingsPath`: path where the output of `forge bind-json` is stored.\n * `typeName`: Name of the type (i.e. \"Transaction\").", + "declaration": "function eip712HashType(string calldata bindingsPath, string calldata typeName) external pure returns (bytes32 typeHash);", + "visibility": "external", + "mutability": "pure", + "signature": "eip712HashType(string,string)", + "selector": "0x18fb6406", + "selectorBytes": [ + 24, + 251, + 100, + 6 + ] + }, + "group": "utilities", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "eip712HashTypedData", + "description": "Generates a ready-to-sign digest of human-readable typed data following the EIP-712 standard.", + "declaration": "function eip712HashTypedData(string calldata jsonData) external pure returns (bytes32 digest);", + "visibility": "external", + "mutability": "pure", + "signature": "eip712HashTypedData(string)", + "selector": "0xea25e615", + "selectorBytes": [ + 234, + 37, + 230, + 21 + ] + }, + "group": "utilities", + "status": "stable", + "safety": "safe" + }, { "func": { "id": "ensNamehash", diff --git a/crates/cheatcodes/spec/src/vm.rs b/crates/cheatcodes/spec/src/vm.rs index 1dac51fd0ca5a..a734c7a033ef1 100644 --- a/crates/cheatcodes/spec/src/vm.rs +++ b/crates/cheatcodes/spec/src/vm.rs @@ -2888,6 +2888,55 @@ interface Vm { /// catch (bytes memory interceptedInitcode) { initcode = interceptedInitcode; } #[cheatcode(group = Utilities, safety = Unsafe)] function interceptInitcode() external; + + /// Generates the hash of the canonical EIP-712 type representation. + /// + /// Supports 2 different inputs: + /// 1. Name of the type (i.e. "Transaction"): + /// * requires previous binding generation with `forge bind-json`. + /// * bindings will be retrieved from the path configured in `foundry.toml`. + /// + /// 2. String representation of the type (i.e. "Foo(Bar bar) Bar(uint256 baz)"). + /// * Note: the cheatcode will output the canonical type even if the input is malformated + /// with the wrong order of elements or with extra whitespaces. + #[cheatcode(group = Utilities)] + function eip712HashType(string calldata typeNameOrDefinition) external pure returns (bytes32 typeHash); + + /// Generates the hash of the canonical EIP-712 type representation. + /// Requires previous binding generation with `forge bind-json`. + /// + /// Params: + /// * `bindingsPath`: path where the output of `forge bind-json` is stored. + /// * `typeName`: Name of the type (i.e. "Transaction"). + #[cheatcode(group = Utilities)] + function eip712HashType(string calldata bindingsPath, string calldata typeName) external pure returns (bytes32 typeHash); + + /// Generates the struct hash of the canonical EIP-712 type representation and its abi-encoded data. + /// + /// Supports 2 different inputs: + /// 1. Name of the type (i.e. "PermitSingle"): + /// * requires previous binding generation with `forge bind-json`. + /// * bindings will be retrieved from the path configured in `foundry.toml`. + /// + /// 2. String representation of the type (i.e. "Foo(Bar bar) Bar(uint256 baz)"). + /// * Note: the cheatcode will use the canonical type even if the input is malformated + /// with the wrong order of elements or with extra whitespaces. + #[cheatcode(group = Utilities)] + function eip712HashStruct(string calldata typeNameOrDefinition, bytes calldata abiEncodedData) external pure returns (bytes32 typeHash); + + /// Generates the struct hash of the canonical EIP-712 type representation and its abi-encoded data. + /// Requires previous binding generation with `forge bind-json`. + /// + /// Params: + /// * `bindingsPath`: path where the output of `forge bind-json` is stored. + /// * `typeName`: Name of the type (i.e. "PermitSingle"). + /// * `abiEncodedData`: ABI-encoded data for the struct that is being hashed. + #[cheatcode(group = Utilities)] + function eip712HashStruct(string calldata bindingsPath, string calldata typeName, bytes calldata abiEncodedData) external pure returns (bytes32 typeHash); + + /// Generates a ready-to-sign digest of human-readable typed data following the EIP-712 standard. + #[cheatcode(group = Utilities)] + function eip712HashTypedData(string calldata jsonData) external pure returns (bytes32 digest); } } diff --git a/crates/cheatcodes/src/config.rs b/crates/cheatcodes/src/config.rs index 210c76553cba7..1ad98cd93e92b 100644 --- a/crates/cheatcodes/src/config.rs +++ b/crates/cheatcodes/src/config.rs @@ -33,6 +33,8 @@ pub struct CheatsConfig { pub rpc_endpoints: ResolvedRpcEndpoints, /// Project's paths as configured pub paths: ProjectPathsConfig, + /// Path to the directory that contains the bindings generated by `forge bind-json`. + pub bind_json_path: PathBuf, /// Filesystem permissions for cheatcodes like `writeFile`, `readFile` pub fs_permissions: FsPermissions, /// Project root @@ -98,6 +100,7 @@ impl CheatsConfig { no_storage_caching: config.no_storage_caching, rpc_endpoints, paths: config.project_paths(), + bind_json_path: config.bind_json.out.clone(), fs_permissions: config.fs_permissions.clone().joined(config.root.as_ref()), root: config.root.clone(), broadcast: config.root.clone().join(&config.broadcast), @@ -303,6 +306,7 @@ impl Default for CheatsConfig { paths: ProjectPathsConfig::builder().build_with_root("./"), fs_permissions: Default::default(), root: Default::default(), + bind_json_path: PathBuf::default().join("utils").join("jsonBindings.sol"), broadcast: Default::default(), allowed_paths: vec![], evm_opts: Default::default(), diff --git a/crates/cheatcodes/src/utils.rs b/crates/cheatcodes/src/utils.rs index 4bdc239036d53..3eed1b1bad230 100644 --- a/crates/cheatcodes/src/utils.rs +++ b/crates/cheatcodes/src/utils.rs @@ -1,14 +1,17 @@ //! Implementations of [`Utilities`](spec::Group::Utilities) cheatcodes. use crate::{Cheatcode, Cheatcodes, CheatcodesExecutor, CheatsCtxt, Result, Vm::*}; -use alloy_dyn_abi::{DynSolType, DynSolValue}; +use alloy_dyn_abi::{eip712_parser::EncodeType, DynSolType, DynSolValue, Resolver, TypedData}; use alloy_ens::namehash; -use alloy_primitives::{aliases::B32, map::HashMap, B64, U256}; +use alloy_primitives::{aliases::B32, keccak256, map::HashMap, Bytes, B64, U256}; use alloy_sol_types::SolValue; +use foundry_common::{fs, TYPE_BINDING_PREFIX}; +use foundry_config::fs_permissions::FsAccessKind; use foundry_evm_core::constants::DEFAULT_CREATE2_DEPLOYER; use proptest::prelude::Strategy; use rand::{seq::SliceRandom, Rng, RngCore}; use revm::context::JournalTr; +use std::path::PathBuf; /// Contains locations of traces ignored via cheatcodes. /// @@ -314,3 +317,148 @@ fn random_int(state: &mut Cheatcodes, bits: Option) -> Result { .current() .abi_encode()) } + +impl Cheatcode for eip712HashType_0Call { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let Self { typeNameOrDefinition } = self; + + let type_def = get_canonical_type_def(typeNameOrDefinition, state, None)?; + + Ok(keccak256(type_def.as_bytes()).to_vec()) + } +} + +impl Cheatcode for eip712HashType_1Call { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let Self { bindingsPath, typeName } = self; + + let path = state.config.ensure_path_allowed(bindingsPath, FsAccessKind::Read)?; + let type_def = get_type_def_from_bindings(typeName, path, &state.config.root)?; + + Ok(keccak256(type_def.as_bytes()).to_vec()) + } +} + +impl Cheatcode for eip712HashStruct_0Call { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let Self { typeNameOrDefinition, abiEncodedData } = self; + + let type_def = get_canonical_type_def(typeNameOrDefinition, state, None)?; + let primary = &type_def[..type_def.find('(').unwrap_or(type_def.len())]; + + get_struct_hash(primary, &type_def, abiEncodedData) + } +} + +impl Cheatcode for eip712HashStruct_1Call { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let Self { bindingsPath, typeName, abiEncodedData } = self; + + let path = state.config.ensure_path_allowed(bindingsPath, FsAccessKind::Read)?; + let type_def = get_type_def_from_bindings(typeName, path, &state.config.root)?; + + get_struct_hash(typeName, &type_def, abiEncodedData) + } +} + +impl Cheatcode for eip712HashTypedDataCall { + fn apply(&self, _state: &mut Cheatcodes) -> Result { + let Self { jsonData } = self; + let typed_data: TypedData = serde_json::from_str(jsonData)?; + let digest = typed_data.eip712_signing_hash()?; + + Ok(digest.to_vec()) + } +} + +/// Returns EIP-712 canonical type definition from the provided string type representation or type +/// name. If type name provided, then it looks up bindings from file generated by `forge bind-json`. +fn get_canonical_type_def( + name_or_def: &String, + state: &mut Cheatcodes, + path: Option, +) -> Result { + let type_def = if name_or_def.contains('(') { + // If the input contains '(', it must be the type definition. + EncodeType::parse(name_or_def).and_then(|parsed| parsed.canonicalize())? + } else { + // Otherwise, it must be the type name. + let path = path.as_ref().unwrap_or(&state.config.bind_json_path); + let path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?; + get_type_def_from_bindings(name_or_def, path, &state.config.root)? + }; + + Ok(type_def) +} + +/// Returns the EIP-712 type definition from the bindings in the provided path. +/// Assumes that read validation for the path has already been checked. +fn get_type_def_from_bindings(name: &String, path: PathBuf, root: &PathBuf) -> Result { + let content = fs::read_to_string(&path)?; + + let type_defs: HashMap<&str, &str> = content + .lines() + .filter_map(|line| { + let relevant = line.trim().strip_prefix(TYPE_BINDING_PREFIX)?; + let (name, def) = relevant.split_once('=')?; + Some((name.trim(), def.trim().strip_prefix('"')?.strip_suffix("\";")?)) + }) + .collect(); + + match type_defs.get(name.as_str()) { + Some(value) => Ok(value.to_string()), + None => { + let bindings = + type_defs.keys().map(|k| format!(" - {k}")).collect::>().join("\n"); + + bail!( + "'{}' not found in '{}'.{}", + name, + path.strip_prefix(root).unwrap_or(&path).to_string_lossy(), + if bindings.is_empty() { + String::new() + } else { + format!("\nAvailable bindings:\n{bindings}\n") + } + ); + } + } +} + +/// Returns the EIP-712 struct hash for provided name, definition and ABI encoded data. +fn get_struct_hash(primary: &str, type_def: &String, abi_encoded_data: &Bytes) -> Result { + let mut resolver = Resolver::default(); + + // Populate the resolver by ingesting the canonical type definition, and then get the + // corresponding `DynSolType` of the primary type. + resolver + .ingest_string(type_def) + .map_err(|e| fmt_err!("Resolver failed to ingest type definition: {e}"))?; + + let resolved_sol_type = resolver + .resolve(primary) + .map_err(|e| fmt_err!("Failed to resolve EIP-712 primary type '{primary}': {e}"))?; + + // ABI-decode the bytes into `DynSolValue::CustomStruct`. + let sol_value = resolved_sol_type.abi_decode(abi_encoded_data.as_ref()).map_err(|e| { + fmt_err!("Failed to ABI decode using resolved_sol_type directly for '{primary}': {e}.") + })?; + + // Use the resolver to properly encode the data. + let encoded_data: Vec = resolver + .encode_data(&sol_value) + .map_err(|e| fmt_err!("Failed to EIP-712 encode data for struct '{primary}': {e}"))? + .ok_or_else(|| fmt_err!("EIP-712 data encoding returned 'None' for struct '{primary}'"))?; + + // Compute the type hash of the primary type. + let type_hash = resolver + .type_hash(primary) + .map_err(|e| fmt_err!("Failed to compute typeHash for EIP712 type '{primary}': {e}"))?; + + // Compute the struct hash of the concatenated type hash and encoded data. + let mut bytes_to_hash = Vec::with_capacity(32 + encoded_data.len()); + bytes_to_hash.extend_from_slice(type_hash.as_slice()); + bytes_to_hash.extend_from_slice(&encoded_data); + + Ok(keccak256(&bytes_to_hash).to_vec()) +} diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 2eade5bdd2ccd..0bff1f1f0461c 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -19,9 +19,10 @@ foundry-config.workspace = true foundry-debugger.workspace = true foundry-evm.workspace = true foundry-wallets.workspace = true +foundry-block-explorers.workspace = true foundry-compilers = { workspace = true, features = ["full"] } -foundry-block-explorers.workspace = true +solar-sema.workspace = true alloy-eips.workspace = true alloy-dyn-abi.workspace = true @@ -53,6 +54,7 @@ tracing.workspace = true tracy-client = { workspace = true, optional = true, features = ["demangle"] } yansi.workspace = true rustls = { workspace = true, features = ["ring"] } +dunce.workspace = true tracing-tracy = { version = "0.11", optional = true } diff --git a/crates/cli/src/opts/build/mod.rs b/crates/cli/src/opts/build/mod.rs index 55c61dcbbedd7..4deffb2a4c37d 100644 --- a/crates/cli/src/opts/build/mod.rs +++ b/crates/cli/src/opts/build/mod.rs @@ -8,6 +8,9 @@ pub use self::core::BuildOpts; mod paths; pub use self::paths::ProjectPathOpts; +mod utils; +pub use self::utils::{solar_pcx_from_build_opts, solar_pcx_from_solc_project}; + // A set of solc compiler settings that can be set via command line arguments, which are intended // to be merged into an existing `foundry_config::Config`. // diff --git a/crates/cli/src/opts/build/utils.rs b/crates/cli/src/opts/build/utils.rs new file mode 100644 index 0000000000000..004416c588439 --- /dev/null +++ b/crates/cli/src/opts/build/utils.rs @@ -0,0 +1,105 @@ +use crate::{opts::BuildOpts, utils::LoadConfig}; + +use eyre::Result; +use foundry_compilers::{ + artifacts::{Source, Sources}, + multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, + solc::{SolcLanguage, SolcVersionedInput}, + CompilerInput, Graph, Project, +}; +use solar_sema::{interface::Session, ParsingContext}; +use std::path::PathBuf; + +/// Builds a Solar [`solar_sema::ParsingContext`] from [`BuildOpts`]. +/// +/// * Configures include paths, remappings and registers all in-memory sources so that solar can +/// operate without touching disk. +/// * If no `target_paths` are provided, all project files are processed. +/// * Only processes the subset of sources with the most up-to-date Solitidy version. +pub fn solar_pcx_from_build_opts<'sess>( + sess: &'sess Session, + build: BuildOpts, + target_paths: Option>, +) -> Result> { + // Process build options + let config = build.load_config()?; + let project = config.ephemeral_project()?; + + let sources = match target_paths { + // If target files are provided, only process those sources + Some(targets) => { + let mut sources = Sources::new(); + for t in targets.into_iter() { + let path = dunce::canonicalize(t)?; + let source = Source::read(&path)?; + sources.insert(path, source); + } + sources + } + // Otherwise, process all project files + None => project.paths.read_input_files()?, + }; + + // Only process sources with latest Solidity version to avoid conflicts. + let graph = Graph::::resolve_sources(&project.paths, sources)?; + let (version, sources, _) = graph + // resolve graph into mapping language -> version -> sources + .into_sources_by_version(&project)? + .sources + .into_iter() + // only interested in Solidity sources + .find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity)) + .ok_or_else(|| eyre::eyre!("no Solidity sources"))? + .1 + .into_iter() + // always pick the latest version + .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2)) + .unwrap(); + + let solc = SolcVersionedInput::build( + sources, + config.solc_settings()?, + SolcLanguage::Solidity, + version, + ); + + Ok(solar_pcx_from_solc_project(sess, &project, &solc, true)) +} + +/// Builds a Solar [`solar_sema::ParsingContext`] from a [`foundry_compilers::Project`] and a +/// [`SolcVersionedInput`]. +/// +/// * Configures include paths, remappings. +/// * Source files can be manually added if the param `add_source_file` is set to `false`. +pub fn solar_pcx_from_solc_project<'sess>( + sess: &'sess Session, + project: &Project, + solc: &SolcVersionedInput, + add_source_files: bool, +) -> ParsingContext<'sess> { + // Configure the parsing context with the paths, remappings and sources + let mut pcx = ParsingContext::new(sess); + + pcx.file_resolver + .set_current_dir(solc.cli_settings.base_path.as_ref().unwrap_or(&project.paths.root)); + for remapping in &project.paths.remappings { + pcx.file_resolver.add_import_remapping(solar_sema::interface::config::ImportRemapping { + context: remapping.context.clone().unwrap_or_default(), + prefix: remapping.name.clone(), + path: remapping.path.clone(), + }); + } + pcx.file_resolver.add_include_paths(solc.cli_settings.include_paths.iter().cloned()); + + if add_source_files { + for (path, source) in &solc.input.sources { + if let Ok(src_file) = + sess.source_map().new_source_file(path.clone(), source.content.as_str()) + { + pcx.add_file(src_file); + } + } + } + + pcx +} diff --git a/crates/common/src/constants.rs b/crates/common/src/constants.rs index 31c0a2345a9d7..161fb2a1110a1 100644 --- a/crates/common/src/constants.rs +++ b/crates/common/src/constants.rs @@ -45,7 +45,10 @@ pub const SYSTEM_TRANSACTION_TYPE: u8 = 126; /// Default user agent set as the header for requests that don't specify one. pub const DEFAULT_USER_AGENT: &str = concat!("foundry/", env!("CARGO_PKG_VERSION")); -/// Returns whether the sender is a known system sender that is the first tx in every block. +/// Prefix for auto-generated type bindings using `forge bind-json`. +pub const TYPE_BINDING_PREFIX: &str = "string constant schema_"; + +/// Returns whether the sender is a known L2 system sender that is the first tx in every block. /// /// Transactions from these senders usually don't have a any fee information OR set absurdly high fees that exceed the gas limit (See: ) /// diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index f9845b8a80180..2d5d77bcb532b 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -78,6 +78,7 @@ serde_json.workspace = true similar = { version = "2", features = ["inline"] } solang-parser.workspace = true solar-parse.workspace = true +solar-sema.workspace = true strum = { workspace = true, features = ["derive"] } thiserror.workspace = true tokio = { workspace = true, features = ["time"] } @@ -87,6 +88,7 @@ watchexec-events = "6.0" watchexec-signals = "5.0" clearscreen = "4.0" evm-disassembler.workspace = true +path-slash.workspace = true # doc server axum = { workspace = true, features = ["ws"] } @@ -109,7 +111,6 @@ reqwest = { workspace = true, features = ["json"] } mockall = "0.13" globset = "0.4" paste = "1.0" -path-slash = "0.2" similar-asserts.workspace = true svm = { package = "svm-rs", version = "0.5", default-features = false, features = [ "rustls", diff --git a/crates/forge/src/cmd/bind_json.rs b/crates/forge/src/cmd/bind_json.rs index 7c6bfaa52ad74..ecb76515b16a3 100644 --- a/crates/forge/src/cmd/bind_json.rs +++ b/crates/forge/src/cmd/bind_json.rs @@ -1,28 +1,30 @@ use super::eip712::Resolver; use clap::{Parser, ValueHint}; use eyre::Result; -use foundry_cli::{opts::BuildOpts, utils::LoadConfig}; -use foundry_common::{compile::with_compilation_reporter, fs}; +use foundry_cli::{ + opts::{solar_pcx_from_solc_project, BuildOpts}, + utils::LoadConfig, +}; +use foundry_common::{fs, TYPE_BINDING_PREFIX}; use foundry_compilers::{ - artifacts::{ - output_selection::OutputSelection, ContractDefinitionPart, Source, SourceUnit, - SourceUnitPart, Sources, - }, + artifacts::{Source, Sources}, multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, - project::ProjectCompiler, - solc::SolcLanguage, - Graph, Project, + solc::{SolcLanguage, SolcVersionedInput}, + CompilerInput, Graph, Project, }; use foundry_config::Config; use itertools::Itertools; +use path_slash::PathExt; use rayon::prelude::*; +use semver::Version; use solar_parse::{ ast::{self, interface::source_map::FileName, visit::Visit, Arena, FunctionKind, Span, VarMut}, interface::Session, Parser as SolarParser, }; +use solar_sema::thread_local::ThreadLocal; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, HashSet}, fmt::{self, Write}, ops::ControlFlow, path::PathBuf, @@ -31,6 +33,8 @@ use std::{ foundry_config::impl_figment_convert!(BindJsonArgs, build); +const JSON_BINDINGS_PLACEHOLDER: &str = "library JsonBindings {}"; + /// CLI arguments for `forge bind-json`. #[derive(Clone, Debug, Parser)] pub struct BindJsonArgs { @@ -44,7 +48,7 @@ pub struct BindJsonArgs { impl BindJsonArgs { pub fn run(self) -> Result<()> { - self.preprocess()?.compile()?.find_structs()?.resolve_imports_and_aliases().write()?; + self.preprocess()?.find_structs()?.resolve_imports_and_aliases().write()?; Ok(()) } @@ -74,7 +78,7 @@ impl BindJsonArgs { let graph = Graph::::resolve_sources(&project.paths, sources)?; // We only generate bindings for a single Solidity version to avoid conflicts. - let mut sources = graph + let (version, mut sources, _) = graph // resolve graph into mapping language -> version -> sources .into_sources_by_version(&project)? .sources @@ -86,8 +90,7 @@ impl BindJsonArgs { .into_iter() // For now, we are always picking the latest version. .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2)) - .unwrap() - .1; + .unwrap(); let sess = Session::builder().with_stderr_emitter().build(); let result = sess.enter_parallel(|| -> solar_parse::interface::Result<()> { @@ -114,9 +117,9 @@ impl BindJsonArgs { eyre::ensure!(result.is_ok(), "failed parsing"); // Insert empty bindings file. - sources.insert(target_path.clone(), Source::new("library JsonBindings {}")); + sources.insert(target_path.clone(), Source::new(JSON_BINDINGS_PLACEHOLDER)); - Ok(PreprocessedState { sources, target_path, project, config }) + Ok(PreprocessedState { version, sources, target_path, project, config }) } } @@ -237,8 +240,8 @@ impl StructToWrite { } } -#[derive(Debug)] struct PreprocessedState { + version: Version, sources: Sources, target_path: PathBuf, project: Project, @@ -246,117 +249,87 @@ struct PreprocessedState { } impl PreprocessedState { - fn compile(self) -> Result { - let Self { sources, target_path, mut project, config } = self; - - project.update_output_selection(|selection| { - *selection = OutputSelection::ast_output_selection(); - }); - - let output = with_compilation_reporter(false, || { - ProjectCompiler::with_sources(&project, sources)?.compile() - })?; + fn find_structs(self) -> Result { + let mut structs_to_write = Vec::new(); + let Self { version, sources, target_path, config, project } = self; - if output.has_compiler_errors() { - eyre::bail!("{output}"); - } + let settings = config.solc_settings()?; + let include = config.bind_json.include; + let exclude = config.bind_json.exclude; + let root = config.root; - // Collect ASTs by getting them from sources and converting into strongly typed - // `SourceUnit`s. Also strips root from paths. - let asts = output - .into_output() - .sources - .into_iter() - .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?))) - .map(|(path, ast)| { - Ok(( - path.strip_prefix(project.root()).unwrap_or(&path).to_path_buf(), - serde_json::from_str::(&serde_json::to_string(&ast)?)?, - )) - }) - .collect::>>()?; + let input = SolcVersionedInput::build(sources, settings, SolcLanguage::Solidity, version); - Ok(CompiledState { asts, target_path, config, project }) - } -} + let mut sess = Session::builder().with_stderr_emitter().build(); + sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); -#[derive(Debug, Clone)] -struct CompiledState { - asts: BTreeMap, - target_path: PathBuf, - config: Config, - project: Project, -} + let result = sess.enter_parallel(|| -> Result<()> { + // Set up the parsing context with the project paths, without adding the source files + let mut parsing_context = solar_pcx_from_solc_project(&sess, &project, &input, false); -impl CompiledState { - fn find_structs(self) -> Result { - let Self { asts, target_path, config, project } = self; - - // construct mapping (file, id) -> (struct definition, optional parent contract name) - let structs = asts - .iter() - .flat_map(|(path, ast)| { - let mut structs = Vec::new(); - // we walk AST directly instead of using visitors because we need to distinguish - // between file-level and contract-level struct definitions - for node in &ast.nodes { - match node { - SourceUnitPart::StructDefinition(def) => { - structs.push((def, None)); - } - SourceUnitPart::ContractDefinition(contract) => { - for node in &contract.nodes { - if let ContractDefinitionPart::StructDefinition(def) = node { - structs.push((def, Some(contract.name.clone()))); - } - } - } - _ => {} + let mut target_files = HashSet::new(); + for (path, source) in &input.input.sources { + if !include.is_empty() { + if !include.iter().any(|matcher| matcher.is_match(path)) { + continue; + } + } else { + // Exclude library files by default + if project.paths.has_library_ancestor(path) { + continue; } } - structs.into_iter().map(|(def, parent)| ((path.as_path(), def.id), (def, parent))) - }) - .collect::>(); - - // Resolver for EIP712 schemas - let resolver = Resolver::new(&asts); - - let mut structs_to_write = Vec::new(); - - let include = config.bind_json.include; - let exclude = config.bind_json.exclude; - - for ((path, id), (def, contract_name)) in structs { - // For some structs there's no schema (e.g. if they contain a mapping), so we just skip - // those. - let Some(schema) = resolver.resolve_struct_eip712(id)? else { continue }; - if !include.is_empty() { - if !include.iter().any(|matcher| matcher.is_match(path)) { + if exclude.iter().any(|matcher| matcher.is_match(path)) { continue; } - } else { - // Exclude library files by default - if project.paths.has_library_ancestor(path) { - continue; + + if let Ok(src_file) = + sess.source_map().new_source_file(path.clone(), source.content.as_str()) + { + target_files.insert(src_file.stable_id); + parsing_context.add_file(src_file); } } - if exclude.iter().any(|matcher| matcher.is_match(path)) { - continue; - } + // Parse and resolve + let hir_arena = ThreadLocal::new(); + if let Ok(Some(gcx)) = parsing_context.parse_and_lower(&hir_arena) { + let hir = &gcx.get().hir; + let resolver = Resolver::new(gcx); + for id in &resolver.struct_ids() { + if let Some(schema) = resolver.resolve_struct_eip712(*id) { + let def = hir.strukt(*id); + let source = hir.source(def.source); + + if !target_files.contains(&source.file.stable_id) { + continue; + } - structs_to_write.push(StructToWrite { - name: def.name.clone(), - contract_name, - path: path.to_path_buf(), - schema, + if let FileName::Real(ref path) = source.file.name { + structs_to_write.push(StructToWrite { + name: def.name.as_str().into(), + contract_name: def + .contract + .map(|id| hir.contract(id).name.as_str().into()), + path: path + .strip_prefix(&root) + .unwrap_or_else(|_| path) + .to_path_buf(), + schema, + + // will be filled later + import_alias: None, + name_in_fns: String::new(), + }); + } + } + } + } + Ok(()) + }); - // will be filled later - import_alias: None, - name_in_fns: String::new(), - }) - } + eyre::ensure!(result.is_ok() && sess.dcx.has_errors().is_ok(), "failed parsing"); Ok(StructsState { structs_to_write, target_path }) } @@ -482,7 +455,7 @@ impl ResolvedState { result, "import {{{}}} from \"{}\";", names.iter().join(", "), - path.display() + path.to_slash_lossy() )?; } @@ -514,8 +487,8 @@ library JsonBindings { for struct_to_write in &self.structs_to_write { writeln!( result, - " string constant schema_{} = \"{}\";", - struct_to_write.name_in_fns, struct_to_write.schema + " {}{} = \"{}\";", + TYPE_BINDING_PREFIX, struct_to_write.name_in_fns, struct_to_write.schema )?; } diff --git a/crates/forge/src/cmd/eip712.rs b/crates/forge/src/cmd/eip712.rs index 72fbea0a4874a..2df5087592f00 100644 --- a/crates/forge/src/cmd/eip712.rs +++ b/crates/forge/src/cmd/eip712.rs @@ -1,11 +1,12 @@ use clap::{Parser, ValueHint}; -use eyre::{Ok, OptionExt, Result}; -use foundry_cli::{opts::BuildOpts, utils::LoadConfig}; -use foundry_common::compile::ProjectCompiler; -use foundry_compilers::artifacts::{ - output_selection::OutputSelection, - visitor::{Visitor, Walk}, - ContractDefinition, EnumDefinition, SourceUnit, StructDefinition, TypeDescriptions, TypeName, +use eyre::Result; +use foundry_cli::opts::{solar_pcx_from_build_opts, BuildOpts}; +use solar_parse::interface::Session; +use solar_sema::{ + hir::StructId, + thread_local::ThreadLocal, + ty::{Ty, TyKind}, + GcxWrapper, Hir, }; use std::{collections::BTreeMap, fmt::Write, path::PathBuf}; @@ -24,232 +25,154 @@ pub struct Eip712Args { impl Eip712Args { pub fn run(self) -> Result<()> { - let config = self.load_config()?; - let mut project = config.ephemeral_project()?; - let target_path = dunce::canonicalize(self.target_path)?; - project.update_output_selection(|selection| { - *selection = OutputSelection::ast_output_selection(); - }); - - let output = ProjectCompiler::new().files([target_path.clone()]).compile(&project)?; - - // Collect ASTs by getting them from sources and converting into strongly typed - // `SourceUnit`s. - let asts = output - .into_output() - .sources - .into_iter() - .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?))) - .map(|(path, ast)| { - Ok((path, serde_json::from_str::(&serde_json::to_string(&ast)?)?)) - }) - .collect::>>()?; - - let resolver = Resolver::new(&asts); - - let target_ast = asts - .get(&target_path) - .ok_or_else(|| eyre::eyre!("Could not find AST for target file {target_path:?}"))?; - - let structs_in_target = { - let mut collector = StructCollector::default(); - target_ast.walk(&mut collector); - collector.0 - }; - - for id in structs_in_target.keys() { - if let Some(resolved) = resolver.resolve_struct_eip712(*id)? { - sh_println!("{resolved}\n")?; + let mut sess = Session::builder().with_stderr_emitter().build(); + sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + + let result = sess.enter(|| -> Result<()> { + // Set up the parsing context with the project paths and sources. + let parsing_context = + solar_pcx_from_build_opts(&sess, self.build, Some(vec![self.target_path]))?; + + // Parse and resolve + let hir_arena = ThreadLocal::new(); + if let Ok(Some(gcx)) = parsing_context.parse_and_lower(&hir_arena) { + let resolver = Resolver::new(gcx); + for id in &resolver.struct_ids() { + if let Some(resolved) = resolver.resolve_struct_eip712(*id) { + _ = sh_println!("{resolved}\n"); + } + } } - } - Ok(()) - } -} + Ok(()) + }); -/// AST [Visitor] used for collecting struct definitions. -#[derive(Debug, Clone, Default)] -pub struct StructCollector(pub BTreeMap); + eyre::ensure!(result.is_ok() && sess.dcx.has_errors().is_ok(), "failed parsing"); -impl Visitor for StructCollector { - fn visit_struct_definition(&mut self, def: &StructDefinition) { - self.0.insert(def.id, def.clone()); + Ok(()) } } -/// Collects mapping from AST id of type definition to representation of this type for EIP-712 -/// encoding. +/// Generates the EIP-712 `encodeType` string for a given struct. /// -/// For now, maps contract definitions to `address` and enums to `uint8`. -#[derive(Debug, Clone, Default)] -struct SimpleCustomTypesCollector(BTreeMap); - -impl Visitor for SimpleCustomTypesCollector { - fn visit_contract_definition(&mut self, def: &ContractDefinition) { - self.0.insert(def.id, "address".to_string()); - } - - fn visit_enum_definition(&mut self, def: &EnumDefinition) { - self.0.insert(def.id, "uint8".to_string()); - } -} - -pub struct Resolver { - simple_types: BTreeMap, - structs: BTreeMap, +/// Requires a reference to the source HIR. +pub struct Resolver<'hir> { + hir: &'hir Hir<'hir>, + gcx: GcxWrapper<'hir>, } -impl Resolver { - pub fn new(asts: &BTreeMap) -> Self { - let simple_types = { - let mut collector = SimpleCustomTypesCollector::default(); - asts.values().for_each(|ast| ast.walk(&mut collector)); - - collector.0 - }; - - let structs = { - let mut collector = StructCollector::default(); - asts.values().for_each(|ast| ast.walk(&mut collector)); - collector.0 - }; +impl<'hir> Resolver<'hir> { + /// Constructs a new [`Resolver`] for the supplied [`Hir`] instance. + pub fn new(gcx: GcxWrapper<'hir>) -> Self { + Self { hir: &gcx.get().hir, gcx } + } - Self { simple_types, structs } + /// Returns the [`StructId`]s of every user-defined struct in source order. + pub fn struct_ids(&self) -> Vec { + self.hir.strukt_ids().collect() } - /// Converts a given struct definition into EIP-712 `encodeType` representation. + /// Converts a given struct into its EIP-712 `encodeType` representation. /// - /// Returns `None` if struct contains any fields that are not supported by EIP-712 (e.g. - /// mappings or function pointers). - pub fn resolve_struct_eip712(&self, id: usize) -> Result> { + /// Returns `None` if the struct, or any of its fields, contains constructs + /// not supported by EIP-712 (mappings, function types, errors, etc). + pub fn resolve_struct_eip712(&self, id: StructId) -> Option { let mut subtypes = BTreeMap::new(); - subtypes.insert(self.structs[&id].name.clone(), id); + subtypes.insert(self.hir.strukt(id).name.as_str().into(), id); self.resolve_eip712_inner(id, &mut subtypes, true, None) } fn resolve_eip712_inner( &self, - id: usize, - subtypes: &mut BTreeMap, + id: StructId, + subtypes: &mut BTreeMap, append_subtypes: bool, rename: Option<&str>, - ) -> Result> { - let def = &self.structs[&id]; - let mut result = format!("{}(", rename.unwrap_or(&def.name)); - - for (idx, member) in def.members.iter().enumerate() { - let Some(ty) = self.resolve_type( - member.type_name.as_ref().ok_or_eyre("missing type name")?, - subtypes, - )? - else { - return Ok(None) - }; - - write!(result, "{ty} {name}", name = member.name)?; - - if idx < def.members.len() - 1 { + ) -> Option { + let def = self.hir.strukt(id); + let mut result = format!("{}(", rename.unwrap_or(def.name.as_str())); + + for (idx, field_id) in def.fields.iter().enumerate() { + let field = self.hir.variable(*field_id); + let ty = self.resolve_type(self.gcx.get().type_of_hir_ty(&field.ty), subtypes)?; + + write!(result, "{ty} {name}", name = field.name?.as_str()).ok()?; + + if idx < def.fields.len() - 1 { result.push(','); } } result.push(')'); - if !append_subtypes { - return Ok(Some(result)) - } + if append_subtypes { + for (subtype_name, subtype_id) in + subtypes.iter().map(|(name, id)| (name.clone(), *id)).collect::>() + { + if subtype_id == id { + continue + } + let encoded_subtype = + self.resolve_eip712_inner(subtype_id, subtypes, false, Some(&subtype_name))?; - for (subtype_name, subtype_id) in - subtypes.iter().map(|(name, id)| (name.clone(), *id)).collect::>() - { - if subtype_id == id { - continue + result.push_str(&encoded_subtype); } - let Some(encoded_subtype) = - self.resolve_eip712_inner(subtype_id, subtypes, false, Some(&subtype_name))? - else { - return Ok(None) - }; - result.push_str(&encoded_subtype); } - Ok(Some(result)) + Some(result) } - /// Converts given [TypeName] into a type which can be converted to - /// [`alloy_dyn_abi::DynSolType`]. - /// - /// Returns `None` if the type is not supported for EIP712 encoding. - pub fn resolve_type( + fn resolve_type( &self, - type_name: &TypeName, - subtypes: &mut BTreeMap, - ) -> Result> { - match type_name { - TypeName::FunctionTypeName(_) | TypeName::Mapping(_) => Ok(None), - TypeName::ElementaryTypeName(ty) => Ok(Some(ty.name.clone())), - TypeName::ArrayTypeName(ty) => { - let Some(inner) = self.resolve_type(&ty.base_type, subtypes)? else { - return Ok(None) + ty: Ty<'hir>, + subtypes: &mut BTreeMap, + ) -> Option { + let ty = ty.peel_refs(); + match ty.kind { + TyKind::Elementary(elem_ty) => Some(elem_ty.to_abi_str().to_string()), + TyKind::Array(element_ty, size) => { + let inner_type = self.resolve_type(element_ty, subtypes)?; + let size = size.to_string(); + Some(format!("{inner_type}[{size}]")) + } + TyKind::DynArray(element_ty) => { + let inner_type = self.resolve_type(element_ty, subtypes)?; + Some(format!("{inner_type}[]")) + } + TyKind::Udvt(ty, _) => self.resolve_type(ty, subtypes), + TyKind::Struct(id) => { + let def = self.hir.strukt(id); + let name = match subtypes.iter().find(|(_, cached_id)| id == **cached_id) { + Some((name, _)) => name.to_string(), + None => { + // Otherwise, assign new name + let mut i = 0; + let mut name = def.name.as_str().into(); + while subtypes.contains_key(&name) { + i += 1; + name = format!("{}_{i}", def.name.as_str()); + } + + subtypes.insert(name.clone(), id); + + // Recursively resolve fields to populate subtypes + for field_id in def.fields { + let field_ty = + self.gcx.get().type_of_hir_ty(&self.hir.variable(*field_id).ty); + self.resolve_type(field_ty, subtypes)?; + } + name + } }; - let len = parse_array_length(&ty.type_descriptions)?; - Ok(Some(format!("{inner}[{}]", len.unwrap_or("")))) - } - TypeName::UserDefinedTypeName(ty) => { - if let Some(name) = self.simple_types.get(&(ty.referenced_declaration as usize)) { - Ok(Some(name.clone())) - } else if let Some(def) = self.structs.get(&(ty.referenced_declaration as usize)) { - let name = - // If we've already seen struct with this ID, just use assigned name. - if let Some((name, _)) = subtypes.iter().find(|(_, id)| **id == def.id) { - name.clone() - } else { - // Otherwise, assign new name. - let mut i = 0; - let mut name = def.name.clone(); - while subtypes.contains_key(&name) { - i += 1; - name = format!("{}_{i}", def.name); - } - - subtypes.insert(name.clone(), def.id); - - // iterate over members to check if they are resolvable and to populate subtypes - for member in &def.members { - if self.resolve_type( - member.type_name.as_ref().ok_or_eyre("missing type name")?, - subtypes, - )? - .is_none() - { - return Ok(None) - } - } - name - }; - - Ok(Some(name)) - } else { - Ok(None) - } + Some(name) } + // For now, map enums to `uint8` + TyKind::Enum(_) => Some("uint8".to_string()), + // For now, map contracts to `address` + TyKind::Contract(_) => Some("address".to_string()), + // EIP-712 doesn't support tuples (should use structs), functions, mappings, nor errors + _ => None, } } } - -fn parse_array_length(type_description: &TypeDescriptions) -> Result> { - let type_string = - type_description.type_string.as_ref().ok_or_eyre("missing typeString for array type")?; - let Some(inside_brackets) = - type_string.rsplit_once("[").and_then(|(_, right)| right.split("]").next()) - else { - eyre::bail!("failed to parse array type string: {type_string}") - }; - - if inside_brackets.is_empty() { - Ok(None) - } else { - Ok(Some(inside_brackets)) - } -} diff --git a/crates/forge/tests/cli/eip712.rs b/crates/forge/tests/cli/eip712.rs index 9ec944631d9db..165942563d157 100644 --- a/crates/forge/tests/cli/eip712.rs +++ b/crates/forge/tests/cli/eip712.rs @@ -1,3 +1,5 @@ +use foundry_config::fs_permissions::PathPermission; + forgetest!(test_eip712, |prj, cmd| { let path = prj .add_source( @@ -55,9 +57,6 @@ library Structs2 { cmd.forge_fuse().args(["eip712", path.to_string_lossy().as_ref()]).assert_success().stdout_eq( str![[r#" -[COMPILING_FILES] with [SOLC_VERSION] -[SOLC_VERSION] [ELAPSED] -No files changed, compilation skipped Foo(Bar bar)Art(uint256 id)Bar(Art art) Bar(Art art)Art(uint256 id) @@ -80,3 +79,573 @@ FooBar(Foo[] foos,Bar[] bars,Foo_1 foo,Bar_1 bar,Rec[] recs,Rec_1 rec)Art(uint25 "#]], ); }); + +forgetest!(test_eip712_cheatcode_simple, |prj, cmd| { + prj.insert_ds_test(); + prj.insert_vm(); + prj.insert_console(); + + prj.add_source( + "Eip712", + r#" +contract Eip712Structs { + struct EIP712Domain { + string name; + string version; + uint256 chainId; + address verifyingContract; + } +} + "#, + ) + .unwrap(); + + prj.add_source("Eip712Cheat.sol", r#" +import "./test.sol"; +import "./Vm.sol"; +import "./console.sol"; + +string constant CANONICAL = "EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)"; + +contract Eip712Test is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testEip712HashType() public { + bytes32 canonicalHash = keccak256(bytes(CANONICAL)); + console.logBytes32(canonicalHash); + + // Can figure out the canonical type from a messy string representation of the type, + // with an invalid order and extra whitespaces + bytes32 fromTypeDef = vm.eip712HashType( + "EIP712Domain(string name, string version, uint256 chainId, address verifyingContract)" + ); + assertEq(fromTypeDef, canonicalHash); + + // Can figure out the canonical type from the previously generated bindings + bytes32 fromTypeName = vm.eip712HashType("EIP712Domain"); + assertEq(fromTypeName, canonicalHash); + } +} +"#, + ) + .unwrap(); + + cmd.forge_fuse().args(["bind-json"]).assert_success(); + + let bindings = prj.root().join("utils").join("JsonBindings.sol"); + assert!(bindings.exists(), "'JsonBindings.sol' was not generated at {bindings:?}"); + + prj.update_config(|config| config.fs_permissions.add(PathPermission::read(bindings))); + cmd.forge_fuse().args(["test", "--mc", "Eip712Test", "-vv"]).assert_success().stdout_eq(str![ + [r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for src/Eip712Cheat.sol:Eip712Test +[PASS] testEip712HashType() ([GAS]) +Logs: + 0x8b73c3c69bb8fe3d512ecc4cf759cc79239f7b179b0ffacaa9a75d522b39400f + +Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) + +"#] + ]); +}); + +forgetest!(test_eip712_cheatcode_nested, |prj, cmd| { + prj.insert_ds_test(); + prj.insert_vm(); + prj.insert_console(); + + prj.add_source( + "Eip712", + r#" +contract Eip712Structs { + struct Transaction { + Person from; + Person to; + Asset tx; + } + struct Person { + address wallet; + string name; + } + struct Asset { + address token; + uint256 amount; + } +} + "#, + ) + .unwrap(); + + prj.add_source("Eip712Cheat.sol", r#" +import "./test.sol"; +import "./Vm.sol"; + +string constant CANONICAL = "Transaction(Person from,Person to,Asset tx)Asset(address token,uint256 amount)Person(address wallet,string name)"; + +contract Eip712Test is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testEip712HashType_byDefinition() public { + bytes32 canonicalHash = keccak256(bytes(CANONICAL)); + + // Can figure out the canonical type from a messy string representation of the type, + // with an invalid order and extra whitespaces + bytes32 fromTypeDef = vm.eip712HashType( + "Person(address wallet, string name) Asset(address token, uint256 amount) Transaction(Person from, Person to, Asset tx)" + ); + assertEq(fromTypeDef, canonicalHash); + } + + function testEip712HashType_byTypeName() public { + bytes32 canonicalHash = keccak256(bytes(CANONICAL)); + + // Can figure out the canonical type from the previously generated bindings + bytes32 fromTypeName = vm.eip712HashType("Transaction"); + assertEq(fromTypeName, canonicalHash); + } + + function testReverts_Eip712HashType_invalidName() public { + // Reverts if the input type is not found in the bindings + vm._expectCheatcodeRevert(); + bytes32 fromTypeName = vm.eip712HashType("InvalidTypeName"); + } + + function testEip712HashType_byCustomPathAndTypeName() public { + bytes32 canonicalHash = keccak256(bytes(CANONICAL)); + + // Can figure out the canonical type from the previously generated bindings + bytes32 fromTypeName = vm.eip712HashType("utils/CustomJsonBindings.sol", "Transaction"); + assertEq(fromTypeName, canonicalHash); + } +} +"#, + ) + .unwrap(); + + // cheatcode by type definition can run without bindings + cmd.forge_fuse() + .args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byDefinition"]) + .assert_success(); + + let bindings = prj.root().join("utils").join("JsonBindings.sol"); + prj.update_config(|config| config.fs_permissions.add(PathPermission::read(&bindings))); + + // cheatcode by type name fails if bindings haven't been generated + cmd.forge_fuse() + .args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byTypeName"]) + .assert_failure() + .stdout_eq(str![[r#" +... +Ran 1 test for src/Eip712Cheat.sol:Eip712Test +[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byTypeName() ([GAS]) +Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests) + +Failing tests: +Encountered 1 failing test in src/Eip712Cheat.sol:Eip712Test +[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byTypeName() ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); + + cmd.forge_fuse().args(["bind-json"]).assert_success(); + assert!(bindings.exists(), "'JsonBindings.sol' was not generated at {bindings:?}"); + + // with generated bindings, cheatcode by type name works + cmd.forge_fuse() + .args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byTypeName"]) + .assert_success(); + + // even with generated bindings, cheatcode by type name fails if name is not present + cmd.forge_fuse() + .args([ + "test", + "--mc", + "Eip712Test", + "--match-test", + "testReverts_Eip712HashType_invalidName", + ]) + .assert_success(); + + let bindings_2 = prj.root().join("utils").join("CustomJsonBindings.sol"); + prj.update_config(|config| { + config.fs_permissions.add(PathPermission::read(&bindings_2)); + }); + + // cheatcode by custom path and type name fails if bindings haven't been generated for that path + cmd.forge_fuse() + .args(["test", "--mc", "Eip712Test", "--match-test", "testEip712HashType_byCustomPathAndTypeName"]) + .assert_failure() + .stdout_eq(str![[r#" +... +Ran 1 test for src/Eip712Cheat.sol:Eip712Test +[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byCustomPathAndTypeName() ([GAS]) +Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests) + +Failing tests: +Encountered 1 failing test in src/Eip712Cheat.sol:Eip712Test +[FAIL: vm.eip712HashType: failed to read from [..] testEip712HashType_byCustomPathAndTypeName() ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); + + cmd.forge_fuse().args(["bind-json", "utils/CustomJsonBindings.sol"]).assert_success(); + assert!(bindings_2.exists(), "'CustomJsonBindings.sol' was not generated at {bindings_2:?}"); + + // with generated bindings, cheatcode by custom path and type name works + cmd.forge_fuse() + .args([ + "test", + "--mc", + "Eip712Test", + "--match-test", + "testEip712HashType_byCustomPathAndTypeName", + ]) + .assert_success(); +}); + +forgetest!(test_eip712_hash_struct_simple, |prj, cmd| { + prj.insert_ds_test(); + prj.insert_vm(); + prj.insert_console(); + + prj.add_source( + "Eip712HashStructDomainTest.sol", + r#" +import "./Vm.sol"; +import "./test.sol"; +import "./console.sol"; + +struct EIP712Domain { + string name; + string version; + uint256 chainId; + address verifyingContract; +} + +string constant _EIP712_DOMAIN_TYPE_DEF = "EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)"; +bytes32 constant _EIP712_DOMAIN_TYPE_HASH = keccak256(bytes(_EIP712_DOMAIN_TYPE_DEF)); + +contract Eip712HashStructDomainTest is DSTest { + Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); + + function testHashEIP712Domain() public { + EIP712Domain memory domain = EIP712Domain({ + name: "Foo", + version: "Bar", + chainId: 1, + verifyingContract: 0xdEADBEeF00000000000000000000000000000000 + }); + + // simulate user-computed domain hash + bytes memory encodedData = abi.encode( + keccak256(bytes(domain.name)), + keccak256(bytes(domain.version)), + bytes32(domain.chainId), + bytes32(uint256(uint160(domain.verifyingContract))) + ); + bytes32 userStructHash = keccak256(abi.encodePacked(_EIP712_DOMAIN_TYPE_HASH, encodedData)); + + // cheatcode-computed domain hash + bytes32 cheatStructHash = vm.eip712HashStruct(_EIP712_DOMAIN_TYPE_DEF, abi.encode(domain)); + console.log("EIP712Domain struct hash from cheatcode:"); + console.logBytes32(cheatStructHash); + + assertEq(cheatStructHash, userStructHash, "EIP712Domain struct hash mismatch"); + } +} +"#, + ) + .unwrap(); + + cmd.forge_fuse().args(["test", "--mc", "Eip712HashStructDomainTest", "-vvvv"]).assert_success(); +}); + +forgetest!(test_eip712_hash_struct_complex, |prj, cmd| { + prj.insert_ds_test(); + prj.insert_vm(); + prj.insert_console(); + + prj.add_source( + "Eip712Permit.sol", + r#" +struct PermitDetails { + address token; + uint160 amount; + uint48 expiration; + uint48 nonce; +} + +bytes32 constant _PERMIT_DETAILS_TYPEHASH = keccak256( + "PermitDetails(address token,uint160 amount,uint48 expiration,uint48 nonce)" +); + +struct PermitSingle { + PermitDetails details; + address spender; + uint256 sigDeadline; +} + +bytes32 constant _PERMIT_SINGLE_TYPEHASH = keccak256( + "PermitSingle(PermitDetails details,address spender,uint256 sigDeadline)PermitDetails(address token,uint160 amount,uint48 expiration,uint48 nonce)" +); + +// borrowed from https://github.com/Uniswap/permit2/blob/main/src/libraries/PermitHash.sol +library PermitHash { + function hash(PermitSingle memory permitSingle) internal pure returns (bytes32) { + bytes32 permitHash = _hashDetails(permitSingle.details); + return + keccak256(abi.encode(_PERMIT_SINGLE_TYPEHASH, permitHash, permitSingle.spender, permitSingle.sigDeadline)); + } + + function _hashDetails(PermitDetails memory details) internal pure returns (bytes32) { + return keccak256(abi.encode(_PERMIT_DETAILS_TYPEHASH, details)); + } +} +"#, + ) + .unwrap(); + + prj.add_source( + "Eip712Transaction.sol", + r#" +struct Asset { + address token; + uint256 amount; +} + +bytes32 constant _ASSET_TYPEHASH = keccak256( + "Asset(address token,uint256 amount)" +); + +struct Person { + address wallet; + string name; +} + +bytes32 constant _PERSON_TYPEHASH = keccak256( + "Person(address wallet,string name)" +); + +struct Transaction { + Person from; + Person to; + Asset tx; +} + +bytes32 constant _TRANSACTION_TYPEHASH = keccak256( + "Transaction(Person from,Person to,Asset tx)Asset(address token,uint256 amount)Person(address wallet,string name)" +); + + +library TransactionHash { + function hash(Transaction memory t) internal pure returns (bytes32) { + bytes32 fromHash = _hashPerson(t.from); + bytes32 toHash = _hashPerson(t.to); + bytes32 assetHash = _hashAsset(t.tx); + return + keccak256(abi.encode(_TRANSACTION_TYPEHASH, fromHash, toHash, assetHash)); + } + + function _hashPerson(Person memory person) internal pure returns (bytes32) { + return keccak256( + abi.encode(_PERSON_TYPEHASH, person.wallet, keccak256(bytes(person.name))) + ); + + } + + function _hashAsset(Asset memory asset) internal pure returns (bytes32) { + return keccak256(abi.encode(_ASSET_TYPEHASH, asset)); + } +} + "#, + ) + .unwrap(); + + let bindings = prj.root().join("utils").join("JsonBindings.sol"); + prj.update_config(|config| config.fs_permissions.add(PathPermission::read(&bindings))); + cmd.forge_fuse().args(["bind-json"]).assert_success(); + + prj.add_source( + "Eip712HashStructTest.sol", + r#" +import "./Vm.sol"; +import "./test.sol"; +import "./console.sol"; +import "./Eip712Permit.sol"; +import "./Eip712Transaction.sol"; + +contract Eip712HashStructTest is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testHashPermitSingle_withTypeName() public { + PermitDetails memory details = PermitDetails({ + token: 0x1111111111111111111111111111111111111111, + amount: 1000 ether, + expiration: 12345, + nonce: 1 + }); + + // user-computed permit (using uniswap hash library) + bytes32 userStructHash = PermitHash._hashDetails(details); + + // cheatcode-computed permit + bytes32 cheatStructHash = vm.eip712HashStruct("PermitDetails", abi.encode(details)); + + assertEq(cheatStructHash, userStructHash, "details struct hash mismatch"); + + PermitSingle memory permit = PermitSingle({ + details: details, + spender: 0x2222222222222222222222222222222222222222, + sigDeadline: 12345 + }); + + // user-computed permit (using uniswap hash library) + userStructHash = PermitHash.hash(permit); + + // cheatcode-computed permit + cheatStructHash = vm.eip712HashStruct("PermitSingle", abi.encode(permit)); + console.log("PermitSingle struct hash from cheatcode:"); + console.logBytes32(cheatStructHash); + + assertEq(cheatStructHash, userStructHash, "permit struct hash mismatch"); + } + + function testHashPermitSingle_withTypeDefinion() public { + PermitDetails memory details = PermitDetails({ + token: 0x1111111111111111111111111111111111111111, + amount: 1000 ether, + expiration: 12345, + nonce: 1 + }); + + // user-computed permit (using uniswap hash library) + bytes32 userStructHash = PermitHash._hashDetails(details); + + // cheatcode-computed permit + bytes32 cheatStructHash = vm.eip712HashStruct("PermitDetails(address token, uint160 amount, uint48 expiration, uint48 nonce)", abi.encode(details)); + + assertEq(cheatStructHash, userStructHash, "details struct hash mismatch"); + + PermitSingle memory permit = PermitSingle({ + details: details, + spender: 0x2222222222222222222222222222222222222222, + sigDeadline: 12345 + }); + + // user-computed permit (using uniswap hash library) + userStructHash = PermitHash.hash(permit); + + // cheatcode-computed permit (previously encoding) + cheatStructHash = vm.eip712HashStruct("PermitDetails(address token, uint160 amount, uint48 expiration, uint48 nonce) PermitSingle(PermitDetails details,address spender,uint256 sigDeadline)", abi.encode(permit)); + console.log("PermitSingle struct hash from cheatcode:"); + console.logBytes32(cheatStructHash); + + assertEq(cheatStructHash, userStructHash, "permit struct hash mismatch"); + } + + function testHashTransaction_withTypeName() public { + Asset memory asset = Asset ({ token: 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2, amount: 100 ether }); + + bytes32 user = TransactionHash._hashAsset(asset); + bytes32 cheat = vm.eip712HashStruct("Asset", abi.encode(asset)); + assertEq(user, cheat, "asset struct hash mismatch"); + + Person memory from = Person ({ wallet: 0x0000000000000000000000000000000000000001, name: "alice" }); + Person memory to = Person ({ wallet: 0x0000000000000000000000000000000000000002, name: "bob" }); + + user = TransactionHash._hashPerson(from); + cheat = vm.eip712HashStruct("Person", abi.encode(from)); + assertEq(user, cheat, "person struct hash mismatch"); + + Transaction memory t = Transaction ({ from: from, to: to, tx: asset }); + + user = TransactionHash.hash(t); + cheat = vm.eip712HashStruct("Transaction", abi.encode(t)); + assertEq(user, cheat, "transaction struct hash mismatch"); + } + + function testHashTransaction_withTypeDefinition() public { + Asset memory asset = Asset ({ token: 0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2, amount: 100 ether }); + + bytes32 user = TransactionHash._hashAsset(asset); + bytes32 cheat = vm.eip712HashStruct("Asset(address token, uint256 amount)", abi.encode(asset)); + assertEq(user, cheat, "asset struct hash mismatch"); + + Person memory from = Person ({ wallet: 0x0000000000000000000000000000000000000001, name: "alice" }); + Person memory to = Person ({ wallet: 0x0000000000000000000000000000000000000002, name: "bob" }); + + user = TransactionHash._hashPerson(from); + cheat = vm.eip712HashStruct("Person(address wallet, string name)", abi.encode(from)); + assertEq(user, cheat, "person struct hash mismatch"); + + Transaction memory t = Transaction ({ from: from, to: to, tx: asset }); + + user = TransactionHash.hash(t); + cheat = vm.eip712HashStruct("Person(address wallet, string name) Asset(address token, uint256 amount) Transaction(Person from, Person to, Asset tx)", abi.encode(t)); + assertEq(user, cheat, "transaction struct hash mismatch"); + } +} +"#, + ) + .unwrap(); + + cmd.forge_fuse() + .args(["test", "--mc", "Eip712HashStructTest", "-vv"]) + .assert_success() + .stdout_eq(str![[r#" +... +[PASS] testHashPermitSingle_withTypeDefinion() ([GAS]) +Logs: + PermitSingle struct hash from cheatcode: + 0x3ed744fdcea02b6b9ad45a9db6e648bf6f18c221909f9ee425191f2a02f9e4a8 + +[PASS] testHashPermitSingle_withTypeName() ([GAS]) +Logs: + PermitSingle struct hash from cheatcode: + 0x3ed744fdcea02b6b9ad45a9db6e648bf6f18c221909f9ee425191f2a02f9e4a8 +... +"#]]); +}); + +forgetest!(test_eip712_hash_typed_data, |prj, cmd| { + prj.insert_ds_test(); + prj.insert_vm(); + prj.insert_console(); + + prj.add_source( + "Eip712HashTypedData.sol", + r#" +import "./Vm.sol"; +import "./test.sol"; +import "./console.sol"; +contract Eip712HashTypedDataTest is DSTest { + Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); + + function testHashEIP712Message() public { + string memory jsonData = + '{"types":{"EIP712Domain":[{"name":"name","type":"string"},{"name":"version","type":"string"},{"name":"chainId","type":"uint256"},{"name":"verifyingContract","type":"address"},{"name":"salt","type":"bytes32"}]},"primaryType":"EIP712Domain","domain":{"name":"example.metamask.io","version":"1","chainId":1,"verifyingContract":"0x0000000000000000000000000000000000000000"},"message":{}}'; + + // since this cheatcode simply exposes an alloy fn, the test has been borrowed from: + // + bytes32 expectedHash = hex"122d1c8ef94b76dad44dcb03fa772361e20855c63311a15d5afe02d1b38f6077"; + assertEq(vm.eip712HashTypedData(jsonData), expectedHash, "EIP712Domain struct hash mismatch"); + } +} +"#, + ) + .unwrap(); + + cmd.forge_fuse().args(["test", "--mc", "Eip712HashTypedDataTest"]).assert_success(); +}); diff --git a/docs/dev/cheatcodes.md b/docs/dev/cheatcodes.md index 0815ca66bef50..0c96c4ba7c7f5 100644 --- a/docs/dev/cheatcodes.md +++ b/docs/dev/cheatcodes.md @@ -155,7 +155,7 @@ update of the files. 2. Implement the cheatcode in [`cheatcodes`] in its category's respective module. Follow the existing implementations as a guide. 3. If a struct, enum, error, or event was added to `Vm`, update [`spec::Cheatcodes::new`] 4. Update the JSON interface by running `cargo cheats` twice. This is expected to fail the first time that this is run after adding a new cheatcode; see [JSON interface](#json-interface) -5. Write an integration test for the cheatcode in [`testdata/cheats/`] +5. Write an integration test for the cheatcode in [`testdata/default/cheats/`] [`sol!`]: https://docs.rs/alloy-sol-macro/latest/alloy_sol_macro/macro.sol.html [`cheatcodes/spec/src/vm.rs`]: ../../crates/cheatcodes/spec/src/vm.rs diff --git a/testdata/cheats/Vm.sol b/testdata/cheats/Vm.sol index 6d054abbfc6ec..b439cf9b6f883 100644 --- a/testdata/cheats/Vm.sol +++ b/testdata/cheats/Vm.sol @@ -208,6 +208,11 @@ interface Vm { function deriveKey(string calldata mnemonic, string calldata derivationPath, uint32 index, string calldata language) external pure returns (uint256 privateKey); function difficulty(uint256 newDifficulty) external; function dumpState(string calldata pathToStateJson) external; + function eip712HashStruct(string calldata typeNameOrDefinition, bytes calldata abiEncodedData) external pure returns (bytes32 typeHash); + function eip712HashStruct(string calldata bindingsPath, string calldata typeName, bytes calldata abiEncodedData) external pure returns (bytes32 typeHash); + function eip712HashType(string calldata typeNameOrDefinition) external pure returns (bytes32 typeHash); + function eip712HashType(string calldata bindingsPath, string calldata typeName) external pure returns (bytes32 typeHash); + function eip712HashTypedData(string calldata jsonData) external pure returns (bytes32 digest); function ensNamehash(string calldata name) external pure returns (bytes32); function envAddress(string calldata name) external view returns (address value); function envAddress(string calldata name, string calldata delim) external view returns (address[] memory value);