diff --git a/Cargo.lock b/Cargo.lock index d1af35aae..4fab62262 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5365,8 +5365,11 @@ version = "0.0.2" dependencies = [ "alloy-primitives", "foundry-compilers", + "foundry-zksync-compilers", + "foundry-zksync-core", "semver 1.0.23", "thiserror 2.0.6", + "tracing", ] [[package]] @@ -8452,7 +8455,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 3.2.0", + "proc-macro-crate 1.3.1", "proc-macro2 1.0.92", "quote 1.0.37", "syn 2.0.90", @@ -9569,7 +9572,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.11.0", "proc-macro2 1.0.92", "quote 1.0.37", "syn 2.0.90", @@ -11756,7 +11759,7 @@ dependencies = [ "const-hex", "derive_builder", "dunce", - "itertools 0.13.0", + "itertools 0.11.0", "itoa", "lasso", "match_cfg", @@ -11792,7 +11795,7 @@ dependencies = [ "alloy-primitives", "bitflags 2.6.0", "bumpalo", - "itertools 0.13.0", + "itertools 0.11.0", "memchr", "num-bigint 0.4.6", "num-rational", diff --git a/crates/common/src/compile.rs b/crates/common/src/compile.rs index 9a408cdfc..c9d54835a 100644 --- a/crates/common/src/compile.rs +++ b/crates/common/src/compile.rs @@ -19,17 +19,14 @@ use foundry_compilers::{ solc::SolcSettings, Artifact, Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, SolcConfig, }; -use foundry_zksync_compilers::{ - compilers::{ - artifact_output::zk::ZkArtifactOutput, - zksolc::{ZkSolc, ZkSolcCompiler}, - }, - libraries::{self, ZkMissingLibrary}, +use foundry_zksync_compilers::compilers::{ + artifact_output::zk::ZkArtifactOutput, + zksolc::{ZkSolc, ZkSolcCompiler}, }; use num_format::{Locale, ToFormattedString}; use std::{ - collections::{BTreeMap, HashSet}, + collections::BTreeMap, fmt::Display, io::IsTerminal, path::{Path, PathBuf}, @@ -336,7 +333,7 @@ impl ProjectCompiler { let zksolc_version = ZkSolc::get_version_for_path(&project.compiler.zksolc)?; Report::new(SpinnerReporter::spawn_with(format!("Using zksolc-{zksolc_version}"))); } - self.zksync_compile_with(&project.paths.root, || { + self.zksync_compile_with(|| { let files_to_compile = if !files.is_empty() { files } else { project.paths.input_files() }; let sources = Source::read_all(files_to_compile)?; @@ -349,7 +346,6 @@ impl ProjectCompiler { #[instrument(target = "forge::compile", skip_all)] fn zksync_compile_with( self, - root_path: impl AsRef, f: F, ) -> Result> where @@ -394,7 +390,7 @@ impl ProjectCompiler { sh_println!("{output}")?; } - self.zksync_handle_output(root_path, &output)?; + self.zksync_handle_output(&output)?; } Ok(output) @@ -403,71 +399,11 @@ impl ProjectCompiler { /// If configured, this will print sizes or names fn zksync_handle_output( &self, - root_path: impl AsRef, output: &ProjectCompileOutput, ) -> Result<()> { let print_names = self.print_names.unwrap_or(false); let print_sizes = self.print_sizes.unwrap_or(false); - // Process missing libraries - // TODO: skip this if project was not compiled using --detect-missing-libraries - let mut missing_libs_unique: HashSet = HashSet::new(); - for (artifact_id, artifact) in output.artifact_ids() { - // TODO: when compiling specific files, the output might still add cached artifacts - // that are not part of the file list to the output, which may cause missing libraries - // error to trigger for files that were not intended to be compiled. - // This behaviour needs to be investigated better on the foundry-compilers side. - // For now we filter, checking only the files passed to compile. - let is_target_file = - self.files.is_empty() || self.files.iter().any(|f| artifact_id.path == *f); - if is_target_file { - if let Some(mls) = artifact.missing_libraries() { - missing_libs_unique.extend(mls.clone()); - } - } - } - - let missing_libs: Vec = missing_libs_unique - .into_iter() - .map(|ml| { - let mut split = ml.split(':'); - let contract_path = - split.next().expect("Failed to extract contract path for missing library"); - let contract_name = - split.next().expect("Failed to extract contract name for missing library"); - - let mut abs_path_buf = PathBuf::new(); - abs_path_buf.push(root_path.as_ref()); - abs_path_buf.push(contract_path); - - let art = output.find(abs_path_buf.as_path(), contract_name).unwrap_or_else(|| { - panic!( - "Could not find contract {contract_name} at path {contract_path} for compilation output" - ) - }); - - ZkMissingLibrary { - contract_path: contract_path.to_string(), - contract_name: contract_name.to_string(), - missing_libraries: art.missing_libraries().cloned().unwrap_or_default(), - } - }) - .collect(); - - if !missing_libs.is_empty() { - libraries::add_dependencies_to_missing_libraries_cache( - root_path, - missing_libs.as_slice(), - ) - .expect("Error while adding missing libraries"); - let missing_libs_list = missing_libs - .iter() - .map(|ml| format!("{}:{}", ml.contract_path, ml.contract_name)) - .collect::>() - .join(", "); - eyre::bail!("Missing libraries detected: {missing_libs_list}\n\nRun the following command in order to deploy each missing library:\n\nforge create --private-key --rpc-url --chain --zksync\n\nThen pass the library addresses using the --libraries option"); - } - // print any sizes or names if print_names { let mut artifacts: BTreeMap<_, Vec<_>> = BTreeMap::new(); diff --git a/crates/evm/evm/src/executors/mod.rs b/crates/evm/evm/src/executors/mod.rs index 158fdac8d..2839dfc83 100644 --- a/crates/evm/evm/src/executors/mod.rs +++ b/crates/evm/evm/src/executors/mod.rs @@ -303,6 +303,20 @@ impl Executor { self.deploy_with_env(env, rd) } + /// Deploys a library contract and commits the new state to the underlying database. + /// + /// Executes a CREATE transaction with the contract `code` and persistent database state + /// modifications. + pub fn deploy_library( + &mut self, + from: Address, + code: Bytes, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result { + self.strategy.runner.deploy_library(self, from, code, value, rd) + } + /// Deploys a contract using the given `env` and commits the new state to the underlying /// database. /// @@ -672,7 +686,7 @@ impl Executor { /// /// If using a backend with cheatcodes, `tx.gas_price` and `block.number` will be overwritten by /// the cheatcode state in between calls. - fn build_test_env( + pub fn build_test_env( &self, caller: Address, transact_to: TxKind, diff --git a/crates/evm/evm/src/executors/strategy.rs b/crates/evm/evm/src/executors/strategy.rs index a719e841a..0bd2b9334 100644 --- a/crates/evm/evm/src/executors/strategy.rs +++ b/crates/evm/evm/src/executors/strategy.rs @@ -1,12 +1,15 @@ use std::{any::Any, fmt::Debug}; -use alloy_primitives::{Address, U256}; +use alloy_primitives::{Address, Bytes, U256}; use alloy_serde::OtherFields; use eyre::Result; use foundry_cheatcodes::strategy::{ CheatcodeInspectorStrategy, EvmCheatcodeInspectorStrategyRunner, }; -use foundry_evm_core::backend::{strategy::BackendStrategy, Backend, BackendResult, CowBackend}; +use foundry_evm_core::{ + backend::{strategy::BackendStrategy, Backend, BackendResult, CowBackend}, + decode::RevertDecoder, +}; use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContracts; use revm::{ primitives::{Env, EnvWithHandlerCfg, ResultAndState}, @@ -15,7 +18,7 @@ use revm::{ use crate::inspectors::InspectorStack; -use super::Executor; +use super::{DeployResult, EvmError, Executor}; pub trait ExecutorStrategyContext: Debug + Send + Sync + Any { /// Clone the strategy context. @@ -68,9 +71,23 @@ pub trait ExecutorStrategyRunner: Debug + Send + Sync + ExecutorStrategyExt { amount: U256, ) -> BackendResult<()>; + fn get_balance(&self, executor: &mut Executor, address: Address) -> BackendResult; + fn set_nonce(&self, executor: &mut Executor, address: Address, nonce: u64) -> BackendResult<()>; + fn get_nonce(&self, executor: &mut Executor, address: Address) -> BackendResult; + + /// Deploys a library, applying state changes + fn deploy_library( + &self, + executor: &mut Executor, + from: Address, + code: Bytes, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result; + /// Execute a transaction and *WITHOUT* applying state changes. fn call( &self, @@ -110,6 +127,13 @@ pub trait ExecutorStrategyExt { ) { } + fn zksync_get_mut_dual_compiled_contracts<'a>( + &self, + _ctx: &'a mut dyn ExecutorStrategyContext, + ) -> Option<&'a mut DualCompiledContracts> { + None + } + /// Set the fork environment on the context. fn zksync_set_fork_env( &self, @@ -153,6 +177,10 @@ impl ExecutorStrategyRunner for EvmExecutorStrategyRunner { Ok(()) } + fn get_balance(&self, executor: &mut Executor, address: Address) -> BackendResult { + executor.get_balance(address) + } + fn set_nonce( &self, executor: &mut Executor, @@ -166,6 +194,22 @@ impl ExecutorStrategyRunner for EvmExecutorStrategyRunner { Ok(()) } + fn get_nonce(&self, executor: &mut Executor, address: Address) -> BackendResult { + executor.get_nonce(address) + } + + /// Deploys a library, applying state changes + fn deploy_library( + &self, + executor: &mut Executor, + from: Address, + code: Bytes, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result { + executor.deploy(from, code, value, rd) + } + fn call( &self, _ctx: &dyn ExecutorStrategyContext, diff --git a/crates/forge/bin/cmd/create.rs b/crates/forge/bin/cmd/create.rs index 45694252e..eea67040d 100644 --- a/crates/forge/bin/cmd/create.rs +++ b/crates/forge/bin/cmd/create.rs @@ -170,10 +170,10 @@ impl CreateArgs { let (artifact, id) = remove_zk_contract(&mut zk_output, &target_path, &self.contract.name)?; - let ZkContractArtifact { bytecode, factory_dependencies, abi, .. } = artifact; + let ZkContractArtifact { bytecode, abi, factory_dependencies, .. } = &artifact; - let abi = abi.expect("Abi not found"); - let bin = bytecode.expect("Bytecode not found"); + let abi = abi.clone().expect("Abi not found"); + let bin = bytecode.as_ref().expect("Bytecode not found"); let bytecode = match bin.object() { BytecodeObject::Bytecode(bytes) => bytes.to_vec(), @@ -220,7 +220,7 @@ impl CreateArgs { let factory_deps: Vec> = { let factory_dependencies_map = - factory_dependencies.expect("factory deps not found"); + factory_dependencies.as_ref().expect("factory deps not found"); let mut visited_paths = HashSet::new(); let mut visited_bytecodes = HashSet::new(); let mut queue = VecDeque::new(); @@ -248,12 +248,12 @@ impl CreateArgs { ) }); let fdep_fdeps_map = - fdep_art.factory_dependencies.clone().expect("factory deps not found"); + fdep_art.factory_dependencies.as_ref().expect("factory deps not found"); for dep in fdep_fdeps_map.values() { queue.push_back(dep.clone()) } - // TODO(zk): ensure factory deps are also linked + // NOTE(zk): unlinked factory deps don't show up in `factory_dependencies` let fdep_bytecode = fdep_art .bytecode .clone() diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 1e2ca9854..fe381944f 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -5,13 +5,11 @@ use crate::{ TestFilter, }; use alloy_json_abi::{Function, JsonAbi}; -use alloy_primitives::{Address, Bytes, U256}; +use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; use eyre::Result; use foundry_common::{get_contract_name, shell::verbosity, ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ - artifacts::{ - CompactBytecode, CompactContractBytecode, CompactDeployedBytecode, Contract, Libraries, - }, + artifacts::{Contract, Libraries}, compilers::Compiler, Artifact, ArtifactId, ProjectCompileOutput, }; @@ -27,8 +25,10 @@ use foundry_evm::{ traces::{InternalTraceMode, TraceMode}, }; use foundry_linking::{LinkOutput, Linker}; +use foundry_zksync_core::hash_bytecode; use rayon::prelude::*; use revm::primitives::SpecId; +use zksync_types::H256; use std::{ borrow::Borrow, @@ -39,8 +39,9 @@ use std::{ time::Instant, }; -use foundry_zksync_compilers::compilers::{ - artifact_output::zk::ZkArtifactOutput, zksolc::ZkSolcCompiler, +use foundry_zksync_compilers::{ + compilers::{artifact_output::zk::ZkArtifactOutput, zksolc::ZkSolcCompiler}, + dual_compiled_contracts::DualCompiledContract, }; #[derive(Debug, Clone)] @@ -486,14 +487,20 @@ impl MultiContractRunnerBuilder { zk_output: Option>, env: revm::primitives::Env, evm_opts: EvmOpts, - strategy: ExecutorStrategy, + mut strategy: ExecutorStrategy, ) -> Result { + // TODO(zk): move linking to executor strategy + let contracts = output .artifact_ids() .map(|(id, v)| (id.with_stripped_file_prefixes(root), v)) .collect(); let linker = Linker::new(root, contracts); + let zk_output = zk_output.map(|zk| zk.with_stripped_file_prefixes(&root)); + let zk_linker = + zk_output.as_ref().map(|output| Linker::new(root, output.artifact_ids().collect())); + // Build revert decoder from ABIs of all artifacts. let abis = linker .contracts @@ -508,11 +515,88 @@ impl MultiContractRunnerBuilder { linker.contracts.keys(), )?; + let zk_libs = zk_linker + .as_ref() + .map(|zk| { + zk.zk_link_with_nonce_or_address( + Default::default(), + LIBRARY_DEPLOYER, + // NOTE(zk): match with EVM nonces as we will be doing a duplex deployment for + // the libs + 0, + zk.contracts.keys(), + ) + .map(|output| + + // NOTE(zk): zk_linked_contracts later will also contain + // `libs_to_deploy` bytecodes, so those will + // get registered in DualCompiledContracts + + output.libraries) + }) + .transpose()?; + let linked_contracts = linker.get_linked_artifacts(&libraries)?; + let zk_linked_contracts = zk_linker + .as_ref() + .and_then(|linker| zk_libs.as_ref().map(|libs| (linker, libs))) + .map(|(zk, libs)| zk.zk_get_linked_artifacts(zk.contracts.keys(), libs)) + .transpose()?; + + if let Some(dual_compiled_contracts) = + strategy.runner.zksync_get_mut_dual_compiled_contracts(strategy.context.as_mut()) + { + let newly_linked_dual_compiled_contracts = zk_linked_contracts + .iter() + .flat_map(|arts| arts.iter()) + .flat_map(|(needle, zk)| { + linked_contracts + .iter() + .find(|(id, _)| id.source == needle.source && id.name == needle.name) + .map(|(_, evm)| (needle, zk, evm)) + }) + .filter(|(_, zk, evm)| zk.bytecode.is_some() && evm.bytecode.is_some()) + .map(|(id, linked_zk, evm)| { + let (_, unlinked_zk_artifact) = zk_output + .as_ref() + .unwrap() + .artifact_ids() + .find(|(id, _)| id == id) + .unwrap(); + let zk_bytecode = linked_zk.get_bytecode_bytes().unwrap(); + let zk_hash = hash_bytecode(&zk_bytecode); + let evm = evm.get_bytecode_bytes().unwrap(); + let contract = DualCompiledContract { + name: id.name.clone(), + zk_bytecode_hash: zk_hash, + zk_deployed_bytecode: zk_bytecode.to_vec(), + // FIXME: retrieve unlinked factory deps (1.5.9) + zk_factory_deps: vec![zk_bytecode.to_vec()], + evm_bytecode_hash: B256::from_slice(&keccak256(evm.as_ref())[..]), + evm_deployed_bytecode: evm.to_vec(), // FIXME: is this ok? not really used + evm_bytecode: evm.to_vec(), + }; + + // populate factory deps that were already linked + dual_compiled_contracts.extend_factory_deps_by_hash( + contract, + unlinked_zk_artifact.factory_dependencies.iter().flatten().map( + |(_, hash)| { + H256::from_slice( + alloy_primitives::hex::decode(hash).unwrap().as_slice(), + ) + }, + ), + ) + }); + + dual_compiled_contracts + .extend(newly_linked_dual_compiled_contracts.collect::>()); + } + // FIXME: is this comment outdated? I don't see the library deployment code anywhere // Create a mapping of name => (abi, deployment code, Vec) let mut deployable_contracts = DeployableContracts::default(); - for (id, contract) in linked_contracts.iter() { let Some(abi) = &contract.abi else { continue }; @@ -534,36 +618,7 @@ impl MultiContractRunnerBuilder { let mut known_contracts = ContractsByArtifact::default(); if zk_output.is_none() { known_contracts = ContractsByArtifact::new(linked_contracts); - } else if let Some(zk_output) = zk_output { - let zk_contracts = zk_output.with_stripped_file_prefixes(root).into_artifacts(); - let mut zk_contracts_map = BTreeMap::new(); - - for (id, contract) in zk_contracts { - if let Some(abi) = contract.abi { - let bytecode = contract.bytecode.as_ref(); - - // TODO(zk): retrieve link_references - if let Some(bytecode_object) = bytecode.map(|b| b.object()) { - let compact_bytecode = CompactBytecode { - object: bytecode_object.clone(), - source_map: None, - link_references: BTreeMap::new(), - }; - let compact_contract = CompactContractBytecode { - abi: Some(abi), - bytecode: Some(compact_bytecode.clone()), - deployed_bytecode: Some(CompactDeployedBytecode { - bytecode: Some(compact_bytecode), - immutable_references: BTreeMap::new(), - }), - }; - zk_contracts_map.insert(id.clone(), compact_contract); - } - } else { - warn!("Abi not found for contract {}", id.identifier()); - } - } - + } else if let Some(mut zk_contracts_map) = zk_linked_contracts { // Extend zk contracts with solc contracts as well. This is required for traces to // accurately detect contract names deployed in EVM mode, and when using // `vm.zkVmSkip()` cheatcode. diff --git a/crates/forge/src/runner.rs b/crates/forge/src/runner.rs index 496cda0ca..0ccb08d7f 100644 --- a/crates/forge/src/runner.rs +++ b/crates/forge/src/runner.rs @@ -124,7 +124,7 @@ impl<'a> ContractRunner<'a> { let mut result = TestSetup::default(); for code in self.mcr.libs_to_deploy.iter() { - let deploy_result = self.executor.deploy( + let deploy_result = self.executor.deploy_library( LIBRARY_DEPLOYER, code.clone(), U256::ZERO, diff --git a/crates/linking/Cargo.toml b/crates/linking/Cargo.toml index 15d0d113b..f369c62d6 100644 --- a/crates/linking/Cargo.toml +++ b/crates/linking/Cargo.toml @@ -18,3 +18,8 @@ foundry-compilers = { workspace = true, features = ["full"] } semver.workspace = true alloy-primitives = { workspace = true, features = ["rlp"] } thiserror.workspace = true +tracing.workspace = true + +# zk linking utils +foundry-zksync-core.workspace = true +foundry-zksync-compilers.workspace = true diff --git a/crates/linking/src/lib.rs b/crates/linking/src/lib.rs index e44ee7748..0b2e7d3d5 100644 --- a/crates/linking/src/lib.rs +++ b/crates/linking/src/lib.rs @@ -5,15 +5,26 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -use alloy_primitives::{Address, Bytes, B256}; +use alloy_primitives::{ + hex::FromHex, + map::{HashMap, HashSet}, + Address, Bytes, B256, +}; use foundry_compilers::{ - artifacts::{CompactContractBytecodeCow, Libraries}, + artifacts::{ + BytecodeObject, CompactBytecode, CompactContractBytecode, CompactContractBytecodeCow, + CompactDeployedBytecode, Libraries, + }, contracts::ArtifactContracts, Artifact, ArtifactId, }; +use foundry_zksync_compilers::{ + compilers::zksolc::{ZkSolc, ZkSolcCompiler}, + link::{self as zk_link, MissingLibrary}, +}; use semver::Version; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, VecDeque}, path::{Path, PathBuf}, str::FromStr, }; @@ -31,6 +42,18 @@ pub enum LinkerError { CyclicDependency, } +/// Errors that can occur during linking. +#[derive(Debug, thiserror::Error)] +pub enum ZkLinkerError { + #[error(transparent)] + Inner(#[from] LinkerError), + #[error("unable to fully link due to missing libraries")] + MissingLibraries(BTreeSet), + #[error("unable to fully link due to unlinked factory dependencies")] + MissingFactoryDeps(BTreeSet), +} + +#[derive(Debug)] pub struct Linker<'a> { /// Root of the project, used to determine whether artifact/library path can be stripped. pub root: PathBuf, @@ -181,6 +204,60 @@ impl<'a> Linker<'a> { Ok(LinkOutput { libraries, libs_to_deploy }) } + /// Links given artifact with either given library addresses or address computed from sender and + /// nonce. + /// + /// Each key in `libraries` should either be a global path or relative to project root. All + /// remappings should be resolved. + /// + /// When calling for `target` being an external library itself, you should check that `target` + /// does not appear in `libs_to_deploy` to avoid deploying it twice. It may happen in cases + /// when there is a dependency cycle including `target`. + pub fn zk_link_with_nonce_or_address( + &'a self, + libraries: Libraries, + sender: Address, + mut nonce: u64, + targets: impl IntoIterator, + ) -> Result { + // Library paths in `link_references` keys are always stripped, so we have to strip + // user-provided paths to be able to match them correctly. + let mut libraries = libraries.with_stripped_file_prefixes(self.root.as_path()); + + let mut needed_libraries = BTreeSet::new(); + for target in targets { + self.collect_dependencies(target, &mut needed_libraries)?; + } + + let mut libs_to_deploy = Vec::new(); + + // If `libraries` does not contain needed dependency, compute its address and add to + // `libs_to_deploy`. + for id in needed_libraries { + let (lib_path, lib_name) = self.convert_artifact_id_to_lib_path(id); + + libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| { + let address = foundry_zksync_core::compute_create_address(sender, nonce); + libs_to_deploy.push((id, address)); + nonce += 1; + + address.to_checksum(None) + }); + } + + // Link and collect bytecodes for `libs_to_deploy`. + let libs_to_deploy = self + .zk_get_linked_artifacts(libs_to_deploy.into_iter().map(|(id, _)| id), &libraries)? + .into_iter() + .map(|(_, linked)| linked.get_bytecode_bytes().unwrap().into_owned()) + .collect(); + + Ok(LinkOutput { libraries, libs_to_deploy }) + } + + // TODO(zk): zk_link_with_create2 + // a bit more difficult due to the lack of bytecode + // until the contract is fully linked pub fn link_with_create2( &'a self, libraries: Libraries, @@ -265,6 +342,100 @@ impl<'a> Linker<'a> { Ok(contract) } + /// Links given artifact with given libraries. + // TODO(zk): improve interface to reflect batching operation (all bytecodes in all bytecodes + // out) + pub fn zk_link( + contracts: &ArtifactContracts>, + target: &ArtifactId, + libraries: &Libraries, + ) -> Result, ZkLinkerError> { + let artifact_to_link_id = |id: &ArtifactId| format!("{}:{}", id.source.display(), id.name); + + // collect bytecodes & libraries for input to zksolc_link + let bytecodes = contracts + .iter() + .filter_map(|(id, bytecode)| { + let link_id = artifact_to_link_id(id); + let object = bytecode.bytecode.as_ref().map(|bc| bc.object.clone())?; + + let bytes = match object { + BytecodeObject::Bytecode(bytes) => bytes, + BytecodeObject::Unlinked(unlinked) => { + alloy_primitives::hex::decode(unlinked).unwrap().into() + } + }; + + Some((link_id, bytes)) + }) + .collect::>(); + + let libraries = libraries + .libs + .iter() + .flat_map(|(file, libs)| { + libs.iter() + .map(|(name, address)| (file.to_string_lossy(), name.clone(), address.clone())) + }) + .map(|(filename, name, address)| zk_link::Library { + filename: filename.into_owned(), + name, + address: Address::from_hex(address).unwrap(), + }) + .collect::>(); + + let zksolc = ZkSolcCompiler { + // NOTE(zk): zksolc --link --standard-json requires >1.5.8 + // FIXME(zk): compiler from config + zksolc: ZkSolc::get_path_for_version(&Version::new(1, 5, 8)).unwrap(), + solc: Default::default(), + }; + let mut link_output = + zk_link::zksolc_link(&zksolc, zk_link::LinkJsonInput { bytecodes, libraries }) + .expect("able to call zksolc --link"); // FIXME: proper error check + + let link_id = &artifact_to_link_id(target); + + let mut contract = contracts.get(target).ok_or(LinkerError::MissingTargetArtifact)?.clone(); + + if let Some(unlinked) = link_output.unlinked.remove(link_id) { + tracing::error!(factory_dependencies = ?unlinked.factory_dependencies, libraries = ?unlinked.linker_symbols, "unmet linking dependencies"); + + if !unlinked.linker_symbols.is_empty() { + return Err(ZkLinkerError::MissingLibraries( + unlinked.linker_symbols.into_iter().collect(), + )); + } + return Err(ZkLinkerError::MissingFactoryDeps( + unlinked.factory_dependencies.into_iter().collect(), + )); + } + + let linked_output = + link_output.linked.remove(link_id).or_else(|| link_output.ignored.remove(link_id)); + + // NOTE(zk): covers intermittent issue where fully linked bytecode was + // not being returned in `ignored` (or `linked`). + // The check above should catch if the bytecode remains unlinked + let Some(linked) = linked_output else { + return Ok(contract); + }; + + let mut compact_bytecode = CompactBytecode::empty(); + compact_bytecode.object = BytecodeObject::Bytecode( + alloy_primitives::hex::decode(&linked.bytecode).unwrap().into(), + ); + + let mut compact_deployed_bytecode = CompactDeployedBytecode::empty(); + compact_deployed_bytecode.bytecode.replace(compact_bytecode.clone()); + + // TODO(zk): maybe return bytecode hash? + contract.bytecode.replace(std::borrow::Cow::Owned(compact_bytecode)); + contract.deployed_bytecode.replace(std::borrow::Cow::Owned(compact_deployed_bytecode)); + + Ok(contract) + } + pub fn get_linked_artifacts( &self, libraries: &Libraries, @@ -272,6 +443,59 @@ impl<'a> Linker<'a> { self.contracts.keys().map(|id| Ok((id.clone(), self.link(id, libraries)?))).collect() } + pub fn zk_get_linked_artifacts<'b>( + &self, + targets: impl IntoIterator, + libraries: &Libraries, + ) -> Result { + let mut targets = targets.into_iter().cloned().collect::>(); + let mut contracts = self.contracts.clone(); + let mut linked_artifacts = vec![]; + + // FIXME(zk): determine if this loop is still needed like this + while let Some(id) = targets.pop_front() { + match Self::zk_link(&contracts, &id, &libraries) { + Ok(linked) => { + // persist linked contract for successive iterations + *contracts.entry(id.clone()).or_default() = linked.clone(); + + linked_artifacts.push((id.clone(), CompactContractBytecode::from(linked))); + } + Err(ZkLinkerError::MissingFactoryDeps(fdeps)) => { + // attempt linking again if some factory dep remains unlinked + // this is just in the case where a previously unlinked factory dep + // is linked with the same run as `id` would be linked + // and instead `id` remains unlinked + // FIXME(zk): might be unnecessary, observed when paths were wrong + let mut ids = fdeps + .into_iter() + .inspect(|fdep| { + dbg!(&fdep); + }) + .flat_map(|fdep| { + contracts.iter().find(|(id, _)| { + id.source.as_path() == Path::new(fdep.filename.as_str()) && + &id.name == &fdep.library + }) + }) + .map(|(id, _)| id.clone()) + .peekable(); + + // if we have no dep ids then we avoid + // queueing our own id to avoid infinite loop + // TODO(zk): find a better way to avoid issues later + if let Some(_) = ids.peek() { + targets.extend(ids); // queue factory deps for linking + targets.push_back(id); // reque original target + } + } + Err(err) => return Err(err), + } + } + + Ok(linked_artifacts.into_iter().collect()) + } + pub fn get_linked_artifacts_cow( &self, libraries: &Libraries, diff --git a/crates/strategy/zksync/src/executor.rs b/crates/strategy/zksync/src/executor.rs index 3679631fd..69f1279f1 100644 --- a/crates/strategy/zksync/src/executor.rs +++ b/crates/strategy/zksync/src/executor.rs @@ -1,23 +1,29 @@ -use alloy_primitives::{Address, U256}; +use alloy_primitives::{Address, Bytes, TxKind, U256}; use alloy_rpc_types::serde_helpers::OtherFields; -use alloy_zksync::provider::{zksync_provider, ZksyncProvider}; +use alloy_zksync::{ + contracts::l2::contract_deployer::CONTRACT_DEPLOYER_ADDRESS, + provider::{zksync_provider, ZksyncProvider}, +}; use eyre::Result; use foundry_evm::{ - backend::{Backend, BackendResult, CowBackend}, + backend::{Backend, BackendResult, CowBackend, DatabaseExt}, + decode::RevertDecoder, executors::{ strategy::{ EvmExecutorStrategyRunner, ExecutorStrategy, ExecutorStrategyContext, ExecutorStrategyExt, ExecutorStrategyRunner, }, - Executor, + DeployResult, EvmError, Executor, }, inspectors::InspectorStack, }; use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContracts; -use foundry_zksync_core::{vm::ZkEnv, ZkTransactionMetadata, ZKSYNC_TRANSACTION_OTHER_FIELDS_KEY}; +use foundry_zksync_core::{ + encode_create_params, vm::ZkEnv, ZkTransactionMetadata, ZKSYNC_TRANSACTION_OTHER_FIELDS_KEY, +}; use revm::{ - primitives::{Env, EnvWithHandlerCfg, ResultAndState}, + primitives::{CreateScheme, Env, EnvWithHandlerCfg, Output, ResultAndState}, Database, }; @@ -52,6 +58,23 @@ impl ExecutorStrategyContext for ZksyncExecutorStrategyContext { #[derive(Debug, Default, Clone)] pub struct ZksyncExecutorStrategyRunner; +impl ZksyncExecutorStrategyRunner { + fn set_deployment_nonce( + executor: &mut Executor, + address: Address, + nonce: u64, + ) -> BackendResult<()> { + let (address, slot) = foundry_zksync_core::state::get_nonce_storage(address); + // fetch the full nonce to preserve account's tx nonce + let full_nonce = executor.backend.storage(address, slot)?; + let full_nonce = foundry_zksync_core::state::parse_full_nonce(full_nonce); + let new_full_nonce = foundry_zksync_core::state::new_full_nonce(full_nonce.tx_nonce, nonce); + executor.backend.insert_account_storage(address, slot, new_full_nonce)?; + + Ok(()) + } +} + fn get_context_ref(ctx: &dyn ExecutorStrategyContext) -> &ZksyncExecutorStrategyContext { ctx.as_any_ref().downcast_ref().expect("expected ZksyncExecutorStrategyContext") } @@ -75,6 +98,13 @@ impl ExecutorStrategyRunner for ZksyncExecutorStrategyRunner { Ok(()) } + fn get_balance(&self, executor: &mut Executor, address: Address) -> BackendResult { + let (address, slot) = foundry_zksync_core::state::get_balance_storage(address); + let balance = executor.backend.storage(address, slot)?; + + Ok(balance) + } + fn set_nonce( &self, executor: &mut Executor, @@ -94,6 +124,76 @@ impl ExecutorStrategyRunner for ZksyncExecutorStrategyRunner { Ok(()) } + fn get_nonce(&self, executor: &mut Executor, address: Address) -> BackendResult { + let (address, slot) = foundry_zksync_core::state::get_nonce_storage(address); + let full_nonce = executor.backend.storage(address, slot)?; + let full_nonce = foundry_zksync_core::state::parse_full_nonce(full_nonce); + + Ok(full_nonce.tx_nonce) + } + + fn deploy_library( + &self, + executor: &mut Executor, + from: Address, + code: Bytes, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result { + // sync deployer account info + let nonce = EvmExecutorStrategyRunner.get_nonce(executor, from).expect("deployer to exist"); + let balance = + EvmExecutorStrategyRunner.get_balance(executor, from).expect("deployer to exist"); + + Self::set_deployment_nonce(executor, from, nonce).map_err(|err| eyre::eyre!(err))?; + self.set_balance(executor, from, balance).map_err(|err| eyre::eyre!(err))?; + tracing::debug!(?nonce, ?balance, sender = ?from, "deploying lib in EraVM"); + + // TODO(zk): determine how to return also relevant information for the EVM deployment + let evm_deployment = EvmExecutorStrategyRunner.deploy_library( + executor, + from, + code.clone(), + value, + rd.clone(), + )?; + + let ctx = get_context(executor.strategy.context.as_mut()); + + // lookup dual compiled contract based on EVM bytecode + let Some(dual_contract) = ctx.dual_compiled_contracts.find_by_evm_bytecode(code.as_ref()) + else { + // we don't know what the equivalent zk contract would be + return Ok(evm_deployment); + }; + + // no need for constructor args as it's a lib + let create_params = + encode_create_params(&CreateScheme::Create, dual_contract.zk_bytecode_hash, vec![]); + + // populate ctx.transaction_context with factory deps + // we also populate the ctx so the deployment is executed + // entirely in EraVM + let factory_deps = ctx.dual_compiled_contracts.fetch_all_factory_deps(dual_contract); + + // persist existing paymaster data (needed?) + let paymaster_data = + ctx.transaction_context.take().and_then(|metadata| metadata.paymaster_data); + ctx.transaction_context = Some(ZkTransactionMetadata { factory_deps, paymaster_data }); + + // eravm_env: call to ContractDeployer w/ properly encoded calldata + let env = executor.build_test_env( + from, + // foundry_zksync_core::vm::runner::transact takes care of using the ContractDeployer + // address + TxKind::Create, + create_params.into(), + value, + ); + + executor.deploy_with_env(env, rd) + } + fn new_backend_strategy(&self) -> foundry_evm_core::backend::strategy::BackendStrategy { foundry_evm_core::backend::strategy::BackendStrategy::new_zksync() } @@ -175,6 +275,14 @@ impl ExecutorStrategyExt for ZksyncExecutorStrategyRunner { ctx.dual_compiled_contracts = dual_compiled_contracts; } + fn zksync_get_mut_dual_compiled_contracts<'a>( + &self, + ctx: &'a mut dyn ExecutorStrategyContext, + ) -> Option<&'a mut DualCompiledContracts> { + let ctx = get_context(ctx); + Some(&mut ctx.dual_compiled_contracts) + } + fn zksync_set_fork_env( &self, ctx: &mut dyn ExecutorStrategyContext, diff --git a/crates/zksync/compiler/src/lib.rs b/crates/zksync/compiler/src/lib.rs new file mode 100644 index 000000000..a9448b609 --- /dev/null +++ b/crates/zksync/compiler/src/lib.rs @@ -0,0 +1,230 @@ +//! # foundry-zksync +//! +//! Main Foundry ZKSync implementation. +#![warn(missing_docs, unused_crate_dependencies)] + +/// ZKSolc specific logic. +mod zksolc; +pub use zksolc::*; + +use std::path::PathBuf; + +use foundry_config::{Config, SkipBuildFilters, SolcReq}; +use semver::Version; + +pub mod libraries; + +pub mod link; + +use foundry_compilers::{ + artifacts::Severity, + error::SolcError, + solc::{Solc, SolcCompiler, SolcLanguage}, + zksolc::{get_solc_version_info, ZkSolc, ZkSolcCompiler, ZkSolcSettings}, + zksync::artifact_output::zk::ZkArtifactOutput, + Project, ProjectBuilder, ProjectPathsConfig, +}; + +/// Filename for zksync cache +pub const ZKSYNC_SOLIDITY_FILES_CACHE_FILENAME: &str = "zksync-solidity-files-cache.json"; + +/// Directory for zksync artifacts +pub const ZKSYNC_ARTIFACTS_DIR: &str = "zkout"; + +// Config overrides to create zksync specific foundry-compilers data structures + +/// Returns the configured `zksolc` `Settings` that includes: +/// - all libraries +/// - the optimizer (including details, if configured) +/// - evm version +pub fn config_zksolc_settings(config: &Config) -> Result { + let libraries = match config.parsed_libraries() { + Ok(libs) => config.project_paths::().apply_lib_remappings(libs), + Err(e) => return Err(SolcError::msg(format!("Failed to parse libraries: {e}"))), + }; + + Ok(config.zksync.settings(libraries, config.evm_version, config.via_ir)) +} + +/// Create a new zkSync project +pub fn config_create_project( + config: &Config, + cached: bool, + no_artifacts: bool, +) -> Result, SolcError> { + let mut builder = ProjectBuilder::::default() + .artifacts(ZkArtifactOutput {}) + .paths(config_project_paths(config)) + .settings(config_zksolc_settings(config)?) + .ignore_error_codes(config.ignored_error_codes.iter().copied().map(Into::into)) + .ignore_paths(config.ignored_file_paths.clone()) + .set_compiler_severity_filter(if config.deny_warnings { + Severity::Warning + } else { + Severity::Error + }) + .set_offline(config.offline) + .set_cached(cached) + .set_build_info(!no_artifacts && config.build_info) + .set_no_artifacts(no_artifacts); + + if !config.skip.is_empty() { + let filter = SkipBuildFilters::new(config.skip.clone(), config.root.0.clone()); + builder = builder.sparse_output(filter); + } + + let zksolc = if let Some(zksolc) = + config_ensure_zksolc(config.zksync.zksolc.as_ref(), config.offline)? + { + zksolc + } else if !config.offline { + let default_version = semver::Version::new(1, 5, 7); + let mut zksolc = ZkSolc::find_installed_version(&default_version)?; + if zksolc.is_none() { + ZkSolc::blocking_install(&default_version)?; + zksolc = ZkSolc::find_installed_version(&default_version)?; + } + zksolc.unwrap_or_else(|| panic!("Could not install zksolc v{}", default_version)) + } else { + "zksolc".into() + }; + + let zksolc_compiler = ZkSolcCompiler { zksolc, solc: config_solc_compiler(config)? }; + + let project = builder.build(zksolc_compiler)?; + + if config.force { + config.cleanup(&project)?; + } + + Ok(project) +} + +/// Returns solc compiler to use along zksolc using the following rules: +/// 1. If `solc_path` in zksync config options is set, use it. +/// 2. If `solc_path` is not set, check the `solc` requirements: a. If a version is specified, use +/// zkVm solc matching that version. b. If a path is specified, use it. +/// 3. If none of the above, use autodetect which will match source files to a compiler version and +/// use zkVm solc matching that version. +fn config_solc_compiler(config: &Config) -> Result { + if let Some(path) = &config.zksync.solc_path { + if !path.is_file() { + return Err(SolcError::msg(format!("`solc` {} does not exist", path.display()))) + } + let version = get_solc_version_info(path)?.version; + let solc = + Solc::new_with_version(path, Version::new(version.major, version.minor, version.patch)); + return Ok(SolcCompiler::Specific(solc)) + } + + if let Some(ref solc) = config.solc { + let solc = match solc { + SolcReq::Version(version) => { + let solc_version_without_metadata = + format!("{}.{}.{}", version.major, version.minor, version.patch); + let maybe_solc = + ZkSolc::find_solc_installed_version(&solc_version_without_metadata)?; + let path = if let Some(solc) = maybe_solc { + solc + } else { + ZkSolc::solc_blocking_install(&solc_version_without_metadata)? + }; + Solc::new_with_version( + path, + Version::new(version.major, version.minor, version.patch), + ) + } + SolcReq::Local(path) => { + if !path.is_file() { + return Err(SolcError::msg(format!("`solc` {} does not exist", path.display()))) + } + let version = get_solc_version_info(path)?.version; + Solc::new_with_version( + path, + Version::new(version.major, version.minor, version.patch), + ) + } + }; + Ok(SolcCompiler::Specific(solc)) + } else { + Ok(SolcCompiler::AutoDetect) + } +} + +/// Returns the `ProjectPathsConfig` sub set of the config. +pub fn config_project_paths(config: &Config) -> ProjectPathsConfig { + let builder = ProjectPathsConfig::builder() + .cache(config.cache_path.join(ZKSYNC_SOLIDITY_FILES_CACHE_FILENAME)) + .sources(&config.src) + .tests(&config.test) + .scripts(&config.script) + .artifacts(config.root.0.join(ZKSYNC_ARTIFACTS_DIR)) + .libs(config.libs.iter()) + .remappings(config.get_all_remappings()) + .allowed_path(&config.root.0) + .allowed_paths(&config.libs) + .allowed_paths(&config.allow_paths) + .include_paths(&config.include_paths); + + builder.build_with_root(&config.root.0) +} + +/// Ensures that the configured version is installed if explicitly set +/// +/// If `zksolc` is [`SolcReq::Version`] then this will download and install the solc version if +/// it's missing, unless the `offline` flag is enabled, in which case an error is thrown. +/// +/// If `zksolc` is [`SolcReq::Local`] then this will ensure that the path exists. +pub fn config_ensure_zksolc( + zksolc: Option<&SolcReq>, + offline: bool, +) -> Result, SolcError> { + if let Some(ref zksolc) = zksolc { + let zksolc = match zksolc { + SolcReq::Version(version) => { + let mut zksolc = ZkSolc::find_installed_version(version)?; + if zksolc.is_none() { + if offline { + return Err(SolcError::msg(format!( + "can't install missing zksolc {version} in offline mode" + ))) + } + ZkSolc::blocking_install(version)?; + zksolc = ZkSolc::find_installed_version(version)?; + } + zksolc + } + SolcReq::Local(zksolc) => { + if !zksolc.is_file() { + return Err(SolcError::msg(format!( + "`zksolc` {} does not exist", + zksolc.display() + ))) + } + Some(zksolc.clone()) + } + }; + return Ok(zksolc) + } + + Ok(None) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn zksync_project_has_zksync_solc_when_solc_req_is_a_version() { + let config = + Config { solc: Some(SolcReq::Version(Version::new(0, 8, 26))), ..Default::default() }; + let project = config_create_project(&config, false, true).unwrap(); + let solc_compiler = project.compiler.solc; + if let SolcCompiler::Specific(path) = solc_compiler { + let version = get_solc_version_info(&path.solc).unwrap(); + assert!(version.zksync_version.is_some()); + } else { + panic!("Expected SolcCompiler::Specific"); + } + } +} diff --git a/crates/zksync/compilers/src/dual_compiled_contracts.rs b/crates/zksync/compilers/src/dual_compiled_contracts.rs index 6830f1de9..4caaab7dc 100644 --- a/crates/zksync/compilers/src/dual_compiled_contracts.rs +++ b/crates/zksync/compilers/src/dual_compiled_contracts.rs @@ -133,10 +133,8 @@ impl DualCompiledContracts { let mut zksolc_all_bytecodes: HashMap> = Default::default(); for (_, zk_artifact) in zk_output.artifacts() { if let (Some(hash), Some(bytecode)) = (&zk_artifact.hash, &zk_artifact.bytecode) { - // TODO: we can do this because no bytecode object could be unlinked - // at this stage for zksolc, and BytecodeObject as ref will get the bytecode bytes. - // We should be careful however and check/handle errors in - // case an Unlinked BytecodeObject gets here somehow + // NOTE(zk): unlinked objects are _still_ encoded as valid hex + // but the hash wouldn't be present let bytes = bytecode.object().into_bytes().unwrap(); zksolc_all_bytecodes.insert(hash.clone(), bytes.to_vec()); } @@ -163,11 +161,8 @@ impl DualCompiledContracts { if let Some((solc_bytecode, solc_deployed_bytecode)) = solc_bytecodes.get(&contract_file) { - // TODO: we can do this because no bytecode object could be unlinked - // at this stage for zksolc, and BytecodeObject as ref will get the bytecode - // bytes. However, we should check and - // handle errors in case an Unlinked BytecodeObject gets - // here somehow + // NOTE(zk): unlinked objects are _still_ encoded as valid hex + // but the hash wouldn't be present in the artifact let bytecode_vec = bytecode.object().into_bytes().unwrap().to_vec(); let mut factory_deps_vec: Vec> = factory_deps_map .keys() @@ -310,4 +305,26 @@ impl DualCompiledContracts { pub fn is_empty(&self) -> bool { self.contracts.is_empty() } + + /// Extend the inner set of contracts with the given iterator + pub fn extend(&mut self, iter: impl IntoIterator) { + self.contracts.extend(iter.into_iter()); + self.contracts.sort_by(|a, b| a.name.cmp(&b.name)); + self.contracts.dedup_by(|a, b| a.name == b.name); + } + + /// Populate the target's factory deps based on the new list + pub fn extend_factory_deps_by_hash( + &self, + mut target: DualCompiledContract, + factory_deps: impl IntoIterator, + ) -> DualCompiledContract { + let deps_bytecodes = factory_deps + .into_iter() + .flat_map(|hash| self.find_by_zk_bytecode_hash(hash)) + .map(|contract| contract.zk_deployed_bytecode.clone()); + + target.zk_factory_deps.extend(deps_bytecodes); + target + } } diff --git a/crates/zksync/compilers/src/lib.rs b/crates/zksync/compilers/src/lib.rs index 4316c45df..952bd49f2 100644 --- a/crates/zksync/compilers/src/lib.rs +++ b/crates/zksync/compilers/src/lib.rs @@ -7,6 +7,7 @@ pub mod artifacts; pub mod compilers; pub mod dual_compiled_contracts; pub mod libraries; +pub mod link; // TODO: Used in integration tests. // find out why cargo complains about unused dev_dependency for these cases diff --git a/crates/zksync/compilers/src/link.rs b/crates/zksync/compilers/src/link.rs new file mode 100644 index 000000000..591e6d98a --- /dev/null +++ b/crates/zksync/compilers/src/link.rs @@ -0,0 +1,137 @@ +//! Contains items and functions to link via zksolc + +use std::{ + path::Path, + process::{Command, Stdio}, +}; + +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, Bytes, +}; +use foundry_compilers::error::SolcError; +use serde::{Deserialize, Serialize}; + +use crate::compilers::zksolc::ZkSolcCompiler; + +type LinkId = String; + +#[derive(Debug, Clone, Serialize, PartialEq, Eq, Hash)] +#[serde(into = "String")] +/// A library that zksolc will link against +pub struct Library { + /// Path to the library source + pub filename: String, + /// Name of the library + pub name: String, + /// Address of the library + pub address: Address, +} + +impl Into for Library { + fn into(self) -> String { + format!("{}:{}={}", self.filename, self.name, self.address) + } +} + +#[derive(Debug, Clone, Serialize)] +/// JSON Input for `zksolc link` +pub struct LinkJsonInput { + /// List of input bytecodes (linked or unlinked) + pub bytecodes: HashMap, + /// List of libraries to link against + pub libraries: HashSet, +} + +#[derive(Debug, Clone, Deserialize)] +/// Representation of a linked object given by zksolc +pub struct LinkedObject { + // FIXME: obtain factoryDeps from output + // might come in handy to have the libraries used as well + /// Fully linked bytecode + pub bytecode: String, + /// Bytecode hash of the fully linked object + pub hash: String, +} + +#[derive(Debug, Clone, Deserialize)] +/// Representation of a linked object given by zksolc +pub struct UnlinkedObject { + /// List of unlinked libraries + pub linker_symbols: HashSet, + /// List of factory dependencies missing from input + pub factory_dependencies: HashSet, +} + +/// Represent a missing library returned by the compiler +/// +/// Deserialized from: ":" +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Deserialize)] +#[serde(try_from = "String")] +pub struct MissingLibrary { + /// Source path of the contract + pub filename: String, + /// Name of the contract + pub library: String, +} + +impl TryFrom for MissingLibrary { + type Error = &'static str; + + fn try_from(value: String) -> Result { + let mut split = value.split(':'); + let path = split.next().ok_or("failed to parse unlinked library filename")?.to_string(); + let name = split.next().ok_or("failed to parse unlinked library name")?.to_string(); + + Ok(Self { filename: path, library: name }) + } +} + +#[derive(Debug, Clone, Deserialize)] +/// JSON Output for `zksolc link` +pub struct LinkJsonOutput { + /// Fully linked bytecodes resulting from given input + #[serde(default)] + pub linked: HashMap, + /// Not fully linked bytecodes + #[serde(default)] + pub unlinked: HashMap, + /// List of fully linked bytecodes in input + #[serde(default)] + pub ignored: HashMap, +} + +// taken fom compilers +fn map_io_err(zksolc_path: &Path) -> impl FnOnce(std::io::Error) -> SolcError + '_ { + move |err| SolcError::io(err, zksolc_path) +} + +/// Invoke `zksolc link` given the `zksolc` binary and json input to use +#[tracing::instrument(level = tracing::Level::TRACE, ret)] +pub fn zksolc_link( + zksolc: &ZkSolcCompiler, + input: LinkJsonInput, +) -> Result { + let zksolc = &zksolc.zksolc; + let mut cmd = Command::new(&zksolc); + + cmd.arg("--standard-json") + .arg("--link") + .stdin(Stdio::piped()) + .stderr(Stdio::piped()) + .stdout(Stdio::piped()); + + let mut child = cmd.spawn().map_err(map_io_err(&zksolc))?; + + let stdin = child.stdin.as_mut().unwrap(); + let _ = serde_json::to_writer(stdin, &input); + + let output = child.wait_with_output().map_err(map_io_err(&zksolc))?; + tracing::trace!(?output); + + if output.status.success() { + serde_json::from_slice(&output.stdout).map_err(Into::into) + } else { + Err(SolcError::solc_output(&output)) + } +} diff --git a/crates/zksync/core/src/lib.rs b/crates/zksync/core/src/lib.rs index 3a1bbf881..5ce2409a6 100644 --- a/crates/zksync/core/src/lib.rs +++ b/crates/zksync/core/src/lib.rs @@ -181,6 +181,20 @@ pub fn try_decode_create2(data: &[u8]) -> Result<(H256, H256, Vec)> { Ok((H256(salt.0), H256(bytecode_hash.0), constructor_args.to_vec())) } +/// Compute a CREATE address according to zksync +pub fn compute_create_address(sender: Address, nonce: u64) -> Address { + const CREATE_PREFIX: &'static [u8] = b"zksyncCreate"; + let sender = sender.to_h256(); + let nonce = H256::from_low_u64_be(nonce); + let prefix = keccak256(CREATE_PREFIX); + + let payload = [prefix.as_slice(), sender.0.as_slice(), nonce.0.as_slice()].concat(); + let hash = keccak256(payload); + let address = &hash[..20]; + + Address::from_slice(address) +} + /// Try decoding the provided transaction data into create parameters. pub fn try_decode_create(data: &[u8]) -> Result<(H256, Vec)> { let decoded_calldata =