diff --git a/Cargo.lock b/Cargo.lock index e80a5ad52f97b..a4e53866074f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3232,7 +3232,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3505,7 +3505,9 @@ dependencies = [ "similar", "similar-asserts", "solang-parser", + "solar-interface", "solar-parse", + "solar-sema", "soldeer-commands", "strum 0.27.1", "svm-rs", @@ -3771,6 +3773,7 @@ dependencies = [ "clap", "color-eyre", "dotenvy", + "dunce", "eyre", "forge-fmt", "foundry-block-explorers", @@ -3788,6 +3791,7 @@ dependencies = [ "rustls", "serde", "serde_json", + "solar-sema", "strsim", "strum 0.27.1", "tempfile", @@ -5341,7 +5345,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi 0.5.0", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -5404,7 +5408,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -7012,7 +7016,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -7552,7 +7556,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -7565,7 +7569,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.9.4", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -8304,7 +8308,7 @@ dependencies = [ "solar-config", "solar-data-structures", "solar-macros", - "thiserror 2.0.12", + "thiserror 1.0.69", "tracing", "unicode-width 0.2.0", ] @@ -8650,7 +8654,7 @@ dependencies = [ "serde_json", "sha2", "tempfile", - "thiserror 2.0.12", + "thiserror 1.0.69", "url", "zip", ] @@ -9975,7 +9979,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 976d34bd265ed..d84e3345fa096 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -194,6 +194,7 @@ foundry-fork-db = "0.12" solang-parser = "=0.3.3" solar-parse = { version = "=0.1.2", default-features = false } solar-sema = { version = "=0.1.2", default-features = false } +solar-interface = { version = "=0.1.2", default-features = false } ## revm revm = { version = "19.4.0", default-features = false } diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 120a114c82a0f..67dd0e2bac876 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -19,9 +19,10 @@ foundry-config.workspace = true foundry-debugger.workspace = true foundry-evm.workspace = true foundry-wallets.workspace = true +foundry-block-explorers.workspace = true foundry-compilers = { workspace = true, features = ["full"] } -foundry-block-explorers.workspace = true +solar-sema.workspace = true alloy-eips.workspace = true alloy-dyn-abi.workspace = true @@ -50,6 +51,7 @@ tracing-subscriber = { workspace = true, features = ["registry", "env-filter"] } tracing.workspace = true yansi.workspace = true rustls = { workspace = true, features = ["ring"] } +dunce.workspace = true tracing-tracy = { version = "0.11", optional = true } diff --git a/crates/cli/src/opts/build/mod.rs b/crates/cli/src/opts/build/mod.rs index 55c61dcbbedd7..4deffb2a4c37d 100644 --- a/crates/cli/src/opts/build/mod.rs +++ b/crates/cli/src/opts/build/mod.rs @@ -8,6 +8,9 @@ pub use self::core::BuildOpts; mod paths; pub use self::paths::ProjectPathOpts; +mod utils; +pub use self::utils::{solar_pcx_from_build_opts, solar_pcx_from_solc_project}; + // A set of solc compiler settings that can be set via command line arguments, which are intended // to be merged into an existing `foundry_config::Config`. // diff --git a/crates/cli/src/opts/build/utils.rs b/crates/cli/src/opts/build/utils.rs new file mode 100644 index 0000000000000..d0ccd233ee49f --- /dev/null +++ b/crates/cli/src/opts/build/utils.rs @@ -0,0 +1,105 @@ +use crate::{opts::BuildOpts, utils::LoadConfig}; + +use eyre::Result; +use foundry_compilers::{ + artifacts::{Source, Sources}, + multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, + solc::{SolcLanguage, SolcVersionedInput}, + CompilerInput, Graph, Project, +}; +use solar_sema::{interface::Session, ParsingContext}; +use std::path::PathBuf; + +/// Builds a Solar [`solar_sema::ParsingContext`] from [`BuildOpts`]. +/// +/// * Configures include paths, remappings and registers all in-memory sources so that solar can +/// operate without touching disk. +/// * If no `target_paths` are provided, all project files are processed. +/// * Only processes the subset of sources with the most up-to-date Solitidy version. +pub fn solar_pcx_from_build_opts<'sess>( + sess: &'sess Session, + build: BuildOpts, + target_paths: Option>, +) -> Result> { + // Process build options + let config = build.load_config()?; + let project = config.ephemeral_project()?; + + let sources = match target_paths { + // If target files are provided, only process those sources + Some(targets) => { + let mut sources = Sources::new(); + for t in targets.into_iter() { + let path = dunce::canonicalize(t)?; + let source = Source::read(&path)?; + sources.insert(path, source); + } + sources + } + // Otherwise, process all project files + None => project.paths.read_input_files()?, + }; + + // Only process sources with latest Solidity version to avoid conflicts. + let graph = Graph::::resolve_sources(&project.paths, sources)?; + let (version, sources, _) = graph + // resolve graph into mapping language -> version -> sources + .into_sources_by_version(&project)? + .sources + .into_iter() + // only interested in Solidity sources + .find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity)) + .ok_or_else(|| eyre::eyre!("no Solidity sources"))? + .1 + .into_iter() + // always pick the latest version + .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2)) + .unwrap(); + + let solc = SolcVersionedInput::build( + sources, + config.solc_settings()?, + SolcLanguage::Solidity, + version, + ); + + Ok(solar_pcx_from_solc_project(sess, &project, &solc, true)) +} + +/// Builds a Solar [`solar_sema::ParsingContext`] from a [`foundry_compilers::Project`] and a +/// [`SolcVersionedInput`]. +/// +/// * Configures include paths, remappings. +/// * Soruce files can be manually added if the param `add_source_file` is set to `false`. +pub fn solar_pcx_from_solc_project<'sess>( + sess: &'sess Session, + project: &Project, + solc: &SolcVersionedInput, + add_source_files: bool, +) -> ParsingContext<'sess> { + // Configure the parsing context with the paths, remappings and sources + let mut pcx = ParsingContext::new(sess); + + pcx.file_resolver + .set_current_dir(solc.cli_settings.base_path.as_ref().unwrap_or(&project.paths.root)); + for remapping in &project.paths.remappings { + pcx.file_resolver.add_import_remapping(solar_sema::interface::config::ImportRemapping { + context: remapping.context.clone().unwrap_or_default(), + prefix: remapping.name.clone(), + path: remapping.path.clone(), + }); + } + pcx.file_resolver.add_include_paths(solc.cli_settings.include_paths.iter().cloned()); + + if add_source_files { + for (path, source) in &solc.input.sources { + if let Ok(src_file) = + sess.source_map().new_source_file(path.clone(), source.content.as_str()) + { + pcx.add_file(src_file); + } + } + } + + pcx +} diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index c03a708a1f38f..748785371f696 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -73,6 +73,8 @@ serde_json.workspace = true similar = { version = "2", features = ["inline"] } solang-parser.workspace = true solar-parse.workspace = true +solar-sema.workspace = true +solar-interface.workspace = true strum = { workspace = true, features = ["derive"] } thiserror.workspace = true tokio = { workspace = true, features = ["time"] } diff --git a/crates/forge/src/cmd/bind_json.rs b/crates/forge/src/cmd/bind_json.rs index 7c6bfaa52ad74..f494794f04788 100644 --- a/crates/forge/src/cmd/bind_json.rs +++ b/crates/forge/src/cmd/bind_json.rs @@ -1,28 +1,29 @@ use super::eip712::Resolver; use clap::{Parser, ValueHint}; -use eyre::Result; -use foundry_cli::{opts::BuildOpts, utils::LoadConfig}; -use foundry_common::{compile::with_compilation_reporter, fs}; +use eyre::{eyre, Result}; +use foundry_cli::{ + opts::{solar_pcx_from_solc_project, BuildOpts}, + utils::LoadConfig, +}; +use foundry_common::fs; use foundry_compilers::{ - artifacts::{ - output_selection::OutputSelection, ContractDefinitionPart, Source, SourceUnit, - SourceUnitPart, Sources, - }, + artifacts::{Source, Sources}, multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, - project::ProjectCompiler, - solc::SolcLanguage, - Graph, Project, + solc::{SolcLanguage, SolcVersionedInput}, + CompilerInput, Graph, Project, }; use foundry_config::Config; use itertools::Itertools; use rayon::prelude::*; +use semver::Version; use solar_parse::{ ast::{self, interface::source_map::FileName, visit::Visit, Arena, FunctionKind, Span, VarMut}, interface::Session, Parser as SolarParser, }; +use solar_sema::thread_local::ThreadLocal; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, HashSet}, fmt::{self, Write}, ops::ControlFlow, path::PathBuf, @@ -31,6 +32,8 @@ use std::{ foundry_config::impl_figment_convert!(BindJsonArgs, build); +const JSON_BINDINGS_PLACEHOLDER: &str = "library JsonBindings {}"; + /// CLI arguments for `forge bind-json`. #[derive(Clone, Debug, Parser)] pub struct BindJsonArgs { @@ -44,7 +47,7 @@ pub struct BindJsonArgs { impl BindJsonArgs { pub fn run(self) -> Result<()> { - self.preprocess()?.compile()?.find_structs()?.resolve_imports_and_aliases().write()?; + self.preprocess()?.find_structs()?.resolve_imports_and_aliases().write()?; Ok(()) } @@ -74,7 +77,7 @@ impl BindJsonArgs { let graph = Graph::::resolve_sources(&project.paths, sources)?; // We only generate bindings for a single Solidity version to avoid conflicts. - let mut sources = graph + let (version, mut sources, _) = graph // resolve graph into mapping language -> version -> sources .into_sources_by_version(&project)? .sources @@ -86,8 +89,7 @@ impl BindJsonArgs { .into_iter() // For now, we are always picking the latest version. .max_by(|(v1, _, _), (v2, _, _)| v1.cmp(v2)) - .unwrap() - .1; + .unwrap(); let sess = Session::builder().with_stderr_emitter().build(); let result = sess.enter_parallel(|| -> solar_parse::interface::Result<()> { @@ -114,9 +116,13 @@ impl BindJsonArgs { eyre::ensure!(result.is_ok(), "failed parsing"); // Insert empty bindings file. - sources.insert(target_path.clone(), Source::new("library JsonBindings {}")); + if let Some(parent) = target_path.parent() { + fs::create_dir_all(parent)?; + } + fs::write(&target_path, JSON_BINDINGS_PLACEHOLDER)?; + sources.insert(target_path.clone(), Source::new(JSON_BINDINGS_PLACEHOLDER)); - Ok(PreprocessedState { sources, target_path, project, config }) + Ok(PreprocessedState { version, sources, target_path, project, config }) } } @@ -237,8 +243,8 @@ impl StructToWrite { } } -#[derive(Debug)] struct PreprocessedState { + version: Version, sources: Sources, target_path: PathBuf, project: Project, @@ -246,117 +252,88 @@ struct PreprocessedState { } impl PreprocessedState { - fn compile(self) -> Result { - let Self { sources, target_path, mut project, config } = self; - - project.update_output_selection(|selection| { - *selection = OutputSelection::ast_output_selection(); - }); - - let output = with_compilation_reporter(false, || { - ProjectCompiler::with_sources(&project, sources)?.compile() - })?; - - if output.has_compiler_errors() { - eyre::bail!("{output}"); - } - - // Collect ASTs by getting them from sources and converting into strongly typed - // `SourceUnit`s. Also strips root from paths. - let asts = output - .into_output() - .sources - .into_iter() - .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?))) - .map(|(path, ast)| { - Ok(( - path.strip_prefix(project.root()).unwrap_or(&path).to_path_buf(), - serde_json::from_str::(&serde_json::to_string(&ast)?)?, - )) - }) - .collect::>>()?; - - Ok(CompiledState { asts, target_path, config, project }) - } -} - -#[derive(Debug, Clone)] -struct CompiledState { - asts: BTreeMap, - target_path: PathBuf, - config: Config, - project: Project, -} - -impl CompiledState { fn find_structs(self) -> Result { - let Self { asts, target_path, config, project } = self; - - // construct mapping (file, id) -> (struct definition, optional parent contract name) - let structs = asts - .iter() - .flat_map(|(path, ast)| { - let mut structs = Vec::new(); - // we walk AST directly instead of using visitors because we need to distinguish - // between file-level and contract-level struct definitions - for node in &ast.nodes { - match node { - SourceUnitPart::StructDefinition(def) => { - structs.push((def, None)); - } - SourceUnitPart::ContractDefinition(contract) => { - for node in &contract.nodes { - if let ContractDefinitionPart::StructDefinition(def) = node { - structs.push((def, Some(contract.name.clone()))); - } - } - } - _ => {} - } - } - structs.into_iter().map(|(def, parent)| ((path.as_path(), def.id), (def, parent))) - }) - .collect::>(); - - // Resolver for EIP712 schemas - let resolver = Resolver::new(&asts); - let mut structs_to_write = Vec::new(); + let Self { version, sources, target_path, config, project } = self; + let settings = config.solc_settings()?; let include = config.bind_json.include; let exclude = config.bind_json.exclude; + let root = config.root; - for ((path, id), (def, contract_name)) in structs { - // For some structs there's no schema (e.g. if they contain a mapping), so we just skip - // those. - let Some(schema) = resolver.resolve_struct_eip712(id)? else { continue }; + let input = SolcVersionedInput::build(sources, settings, SolcLanguage::Solidity, version); - if !include.is_empty() { - if !include.iter().any(|matcher| matcher.is_match(path)) { - continue; + let sess = Session::builder().with_stderr_emitter().build(); + let _ = sess.enter_parallel(|| -> Result<()> { + // Set up the parsing context with the project paths, without adding the source files + let mut parsing_context = solar_pcx_from_solc_project(&sess, &project, &input, false); + + let mut target_files = HashSet::new(); + for (path, source) in &input.input.sources { + if !include.is_empty() { + if !include.iter().any(|matcher| matcher.is_match(path)) { + continue; + } + } else { + // Exclude library files by default + if project.paths.has_library_ancestor(path) { + continue; + } } - } else { - // Exclude library files by default - if project.paths.has_library_ancestor(path) { + + if exclude.iter().any(|matcher| matcher.is_match(path)) { continue; } - } - if exclude.iter().any(|matcher| matcher.is_match(path)) { - continue; + if let Ok(src_file) = + sess.source_map().new_source_file(path.clone(), source.content.as_str()) + { + target_files.insert(src_file.stable_id); + parsing_context.add_file(src_file); + } } - structs_to_write.push(StructToWrite { - name: def.name.clone(), - contract_name, - path: path.to_path_buf(), - schema, + // Parse and resolve + let hir_arena = ThreadLocal::new(); + if let Some(gcx) = parsing_context.parse_and_lower(&hir_arena).map_err(|_| { + eyre!( + "error + parsing" + ) + })? { + let hir = &gcx.get().hir; + let resolver = Resolver::new(hir); + for id in &resolver.struct_ids() { + if let Some(schema) = resolver.resolve_struct_eip712(*id) { + let def = hir.strukt(*id); + let source = hir.source(def.source); + + if !target_files.contains(&source.file.stable_id) { + continue; + } - // will be filled later - import_alias: None, - name_in_fns: String::new(), - }) - } + if let FileName::Real(ref path) = source.file.name { + structs_to_write.push(StructToWrite { + name: def.name.as_str().into(), + contract_name: def + .contract + .map(|id| hir.contract(id).name.as_str().into()), + path: path + .strip_prefix(&root) + .unwrap_or_else(|_| path) + .to_path_buf(), + schema, + + // will be filled later + import_alias: None, + name_in_fns: String::new(), + }); + } + } + } + } + Ok(()) + }); Ok(StructsState { structs_to_write, target_path }) } diff --git a/crates/forge/src/cmd/eip712.rs b/crates/forge/src/cmd/eip712.rs index 3c85840673d6a..6c84f102aaa3a 100644 --- a/crates/forge/src/cmd/eip712.rs +++ b/crates/forge/src/cmd/eip712.rs @@ -1,11 +1,12 @@ use clap::{Parser, ValueHint}; -use eyre::{Ok, OptionExt, Result}; -use foundry_cli::{opts::BuildOpts, utils::LoadConfig}; -use foundry_common::compile::ProjectCompiler; -use foundry_compilers::artifacts::{ - output_selection::OutputSelection, - visitor::{Visitor, Walk}, - ContractDefinition, EnumDefinition, SourceUnit, StructDefinition, TypeDescriptions, TypeName, +use eyre::{eyre, Result}; +use foundry_cli::opts::{solar_pcx_from_build_opts, BuildOpts}; +use solar_parse::interface::Session; +use solar_sema::{ + ast::LitKind, + hir::{Expr, ExprKind, ItemId, StructId, Type, TypeKind}, + thread_local::ThreadLocal, + Hir, }; use std::{collections::BTreeMap, fmt::Write, path::PathBuf}; @@ -24,232 +25,169 @@ pub struct Eip712Args { impl Eip712Args { pub fn run(self) -> Result<()> { - let config = self.load_config()?; - let mut project = config.ephemeral_project()?; - let target_path = dunce::canonicalize(self.target_path)?; - project.update_output_selection(|selection| { - *selection = OutputSelection::ast_output_selection(); - }); - - let output = ProjectCompiler::new().files([target_path.clone()]).compile(&project)?; - - // Collect ASTs by getting them from sources and converting into strongly typed - // `SourceUnit`s. - let asts = output - .into_output() - .sources - .into_iter() - .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?))) - .map(|(path, ast)| { - Ok((path, serde_json::from_str::(&serde_json::to_string(&ast)?)?)) - }) - .collect::>>()?; - - let resolver = Resolver::new(&asts); - - let target_ast = asts - .get(&target_path) - .ok_or_else(|| eyre::eyre!("Could not find AST for target file {target_path:?}"))?; - - let structs_in_target = { - let mut collector = StructCollector::default(); - target_ast.walk(&mut collector); - collector.0 - }; - - for id in structs_in_target.keys() { - if let Some(resolved) = resolver.resolve_struct_eip712(*id)? { - sh_println!("{resolved}\n")?; + let mut sess = Session::builder().with_stderr_emitter().build(); + sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + + let _ = sess.enter(|| -> Result<()> { + // Set up the parsing context with the project paths and sources. + let parsing_context = + solar_pcx_from_build_opts(&sess, self.build, Some(vec![self.target_path]))?; + + // Parse and resolve + let hir_arena = ThreadLocal::new(); + if let Some(gcx) = + parsing_context.parse_and_lower(&hir_arena).map_err(|_| eyre!("error parsing"))? + { + let hir = &gcx.get().hir; + + let resolver = Resolver::new(hir); + for id in &resolver.struct_ids() { + if let Some(resolved) = resolver.resolve_struct_eip712(*id) { + _ = sh_println!("{resolved}\n"); + } + } } - } - - Ok(()) - } -} -/// AST [Visitor] used for collecting struct definitions. -#[derive(Debug, Clone, Default)] -pub struct StructCollector(pub BTreeMap); + Ok(()) + }); -impl Visitor for StructCollector { - fn visit_struct_definition(&mut self, def: &StructDefinition) { - self.0.insert(def.id, def.clone()); + Ok(()) } } -/// Collects mapping from AST id of type definition to representation of this type for EIP-712 -/// encoding. +/// Generates the EIP-712 `encodeType` string for a given struct. /// -/// For now, maps contract definitions to `address` and enums to `uint8`. -#[derive(Debug, Clone, Default)] -struct SimpleCustomTypesCollector(BTreeMap); - -impl Visitor for SimpleCustomTypesCollector { - fn visit_contract_definition(&mut self, def: &ContractDefinition) { - self.0.insert(def.id, "address".to_string()); - } - - fn visit_enum_definition(&mut self, def: &EnumDefinition) { - self.0.insert(def.id, "uint8".to_string()); - } -} - -pub struct Resolver { - simple_types: BTreeMap, - structs: BTreeMap, +/// Requires a reference to the source HIR. +#[derive(Debug)] +pub struct Resolver<'hir> { + hir: &'hir Hir<'hir>, } -impl Resolver { - pub fn new(asts: &BTreeMap) -> Self { - let simple_types = { - let mut collector = SimpleCustomTypesCollector::default(); - asts.values().for_each(|ast| ast.walk(&mut collector)); - - collector.0 - }; - - let structs = { - let mut collector = StructCollector::default(); - asts.values().for_each(|ast| ast.walk(&mut collector)); - collector.0 - }; +impl<'hir> Resolver<'hir> { + /// Constructs a new [`Resolver`] for the supplied [`Hir`] instance. + pub fn new(hir: &'hir Hir<'hir>) -> Self { + Self { hir } + } - Self { simple_types, structs } + /// Returns the [`StructId`]s of every user-defined struct in source order. + pub fn struct_ids(&self) -> Vec { + self.hir.strukt_ids().collect() } - /// Converts a given struct definition into EIP-712 `encodeType` representation. + /// Converts a given struct into its EIP-712 `encodeType` representation. /// - /// Returns `None` if struct contains any fields that are not supported by EIP-712 (e.g. - /// mappings or function pointers). - pub fn resolve_struct_eip712(&self, id: usize) -> Result> { + /// Returns `None` if the struct, or any of its fields, contains constructs + /// not supported by EIP-712 (mappings, function types, errors, etc). + pub fn resolve_struct_eip712(&self, id: StructId) -> Option { let mut subtypes = BTreeMap::new(); - subtypes.insert(self.structs[&id].name.clone(), id); + subtypes.insert(self.hir.strukt(id).name.as_str().into(), id); self.resolve_eip712_inner(id, &mut subtypes, true, None) } fn resolve_eip712_inner( &self, - id: usize, - subtypes: &mut BTreeMap, + id: StructId, + subtypes: &mut BTreeMap, append_subtypes: bool, rename: Option<&str>, - ) -> Result> { - let def = &self.structs[&id]; - let mut result = format!("{}(", rename.unwrap_or(&def.name)); - - for (idx, member) in def.members.iter().enumerate() { - let Some(ty) = self.resolve_type( - member.type_name.as_ref().ok_or_eyre("missing type name")?, - subtypes, - )? - else { - return Ok(None) - }; - - write!(result, "{ty} {name}", name = member.name)?; - - if idx < def.members.len() - 1 { + ) -> Option { + let def = self.hir.strukt(id); + let mut result = format!("{}(", rename.unwrap_or(def.name.as_str())); + + for (idx, field_id) in def.fields.iter().enumerate() { + let field = self.hir.variable(*field_id); + let ty = self.resolve_type(&field.ty, subtypes)?; + + write!(result, "{ty} {name}", name = field.name?.as_str()).ok()?; + + if idx < def.fields.len() - 1 { result.push(','); } } result.push(')'); - if !append_subtypes { - return Ok(Some(result)) - } + if append_subtypes { + for (subtype_name, subtype_id) in + subtypes.iter().map(|(name, id)| (name.clone(), *id)).collect::>() + { + if subtype_id == id { + continue + } + let encoded_subtype = + self.resolve_eip712_inner(subtype_id, subtypes, false, Some(&subtype_name))?; - for (subtype_name, subtype_id) in - subtypes.iter().map(|(name, id)| (name.clone(), *id)).collect::>() - { - if subtype_id == id { - continue + result.push_str(&encoded_subtype); } - let Some(encoded_subtype) = - self.resolve_eip712_inner(subtype_id, subtypes, false, Some(&subtype_name))? - else { - return Ok(None) - }; - result.push_str(&encoded_subtype); } - Ok(Some(result)) + Some(result) } - /// Converts given [TypeName] into a type which can be converted to - /// [`alloy_dyn_abi::DynSolType`]. - /// - /// Returns `None` if the type is not supported for EIP712 encoding. - pub fn resolve_type( + fn resolve_type( &self, - type_name: &TypeName, - subtypes: &mut BTreeMap, - ) -> Result> { - match type_name { - TypeName::FunctionTypeName(_) | TypeName::Mapping(_) => Ok(None), - TypeName::ElementaryTypeName(ty) => Ok(Some(ty.name.clone())), - TypeName::ArrayTypeName(ty) => { - let Some(inner) = self.resolve_type(&ty.base_type, subtypes)? else { - return Ok(None) - }; - let len = parse_array_length(&ty.type_descriptions)?; - - Ok(Some(format!("{inner}[{}]", len.unwrap_or("")))) + ty: &'hir Type<'hir>, + subtypes: &mut BTreeMap, + ) -> Option { + match ty.kind { + TypeKind::Elementary(ty) => Some(ty.to_abi_str().to_string()), + TypeKind::Array(arr) => { + let inner_type = self.resolve_type(&arr.element, subtypes)?; + let size_str = arr.size.and_then(|expr| parse_array_size(expr)).unwrap_or_default(); + Some(format!("{inner_type}[{size_str}]")) } - TypeName::UserDefinedTypeName(ty) => { - if let Some(name) = self.simple_types.get(&(ty.referenced_declaration as usize)) { - Ok(Some(name.clone())) - } else if let Some(def) = self.structs.get(&(ty.referenced_declaration as usize)) { - let name = - // If we've already seen struct with this ID, just use assigned name. - if let Some((name, _)) = subtypes.iter().find(|(_, id)| **id == def.id) { - name.clone() - } else { - // Otherwise, assign new name. - let mut i = 0; - let mut name = def.name.clone(); - while subtypes.contains_key(&name) { - i += 1; - name = format!("{}_{i}", def.name); - } - - subtypes.insert(name.clone(), def.id); - - // iterate over members to check if they are resolvable and to populate subtypes - for member in &def.members { - if self.resolve_type( - member.type_name.as_ref().ok_or_eyre("missing type name")?, - subtypes, - )? - .is_none() - { - return Ok(None) + TypeKind::Custom(item_id) => { + match item_id { + // For now, map enums to `uint8` + ItemId::Enum(_) => Some("uint8".into()), + // For now, map contracts to `address` + ItemId::Contract(_) => Some("address".into()), + // Resolve user-defined type alias to the original type + ItemId::Udvt(id) => self.resolve_type(&self.hir.udvt(id).ty, subtypes), + // Recursively resolve structs + ItemId::Struct(id) => { + let def = self.hir.strukt(id); + let name = + // If the struct was already resolved, use its previously assigned name + if let Some((name, _)) = subtypes.iter().find(|(_, cached_id)| id == **cached_id) { + name.to_string() + } else { + // Otherwise, assign new name + let mut i = 0; + let mut name = def.name.as_str().into(); + while subtypes.contains_key(&name) { + i += 1; + name = format!("{}_{i}", def.name.as_str()); } - } - name - }; - return Ok(Some(name)) - } else { - return Ok(None) + subtypes.insert(name.clone(), id); + + // Recursively resolve fields to populate subtypes + for field_id in def.fields { + let field_ty = &self.hir.variable(*field_id).ty; + self.resolve_type(field_ty, subtypes)?; + } + name + }; + + Some(name) + } + // Rest of `ItemId` are not supported by EIP-712 + _ => None, } } + // EIP-712 doesn't support functions, mappings, nor errors + TypeKind::Mapping(_) | TypeKind::Function { .. } | TypeKind::Err(_) => None, } } } -fn parse_array_length(type_description: &TypeDescriptions) -> Result> { - let type_string = - type_description.type_string.as_ref().ok_or_eyre("missing typeString for array type")?; - let Some(inside_brackets) = - type_string.rsplit_once("[").and_then(|(_, right)| right.split("]").next()) - else { - eyre::bail!("failed to parse array type string: {type_string}") - }; - - if inside_brackets.is_empty() { - Ok(None) - } else { - Ok(Some(inside_brackets)) +fn parse_array_size<'hir>(expr: &Expr<'hir>) -> Option { + if let ExprKind::Lit(lit) = &expr.kind { + if let LitKind::Number(int) = &lit.kind { + return Some(int.to_string()); + } } + + None } diff --git a/crates/forge/tests/cli/eip712.rs b/crates/forge/tests/cli/eip712.rs index 9ec944631d9db..62aa63079eb4e 100644 --- a/crates/forge/tests/cli/eip712.rs +++ b/crates/forge/tests/cli/eip712.rs @@ -55,9 +55,6 @@ library Structs2 { cmd.forge_fuse().args(["eip712", path.to_string_lossy().as_ref()]).assert_success().stdout_eq( str![[r#" -[COMPILING_FILES] with [SOLC_VERSION] -[SOLC_VERSION] [ELAPSED] -No files changed, compilation skipped Foo(Bar bar)Art(uint256 id)Bar(Art art) Bar(Art art)Art(uint256 id) diff --git a/docs/dev/cheatcodes.md b/docs/dev/cheatcodes.md index 0815ca66bef50..0c96c4ba7c7f5 100644 --- a/docs/dev/cheatcodes.md +++ b/docs/dev/cheatcodes.md @@ -155,7 +155,7 @@ update of the files. 2. Implement the cheatcode in [`cheatcodes`] in its category's respective module. Follow the existing implementations as a guide. 3. If a struct, enum, error, or event was added to `Vm`, update [`spec::Cheatcodes::new`] 4. Update the JSON interface by running `cargo cheats` twice. This is expected to fail the first time that this is run after adding a new cheatcode; see [JSON interface](#json-interface) -5. Write an integration test for the cheatcode in [`testdata/cheats/`] +5. Write an integration test for the cheatcode in [`testdata/default/cheats/`] [`sol!`]: https://docs.rs/alloy-sol-macro/latest/alloy_sol_macro/macro.sol.html [`cheatcodes/spec/src/vm.rs`]: ../../crates/cheatcodes/spec/src/vm.rs