From d6489ae1ca0da9f54d723fb87bb77c4a893dd42b Mon Sep 17 00:00:00 2001 From: Francesco Dainese Date: Fri, 20 Dec 2024 21:53:04 +0100 Subject: [PATCH] feat(zk): zksolc linking --- Cargo.lock | 27 +-- crates/common/Cargo.toml | 1 - crates/common/src/compile.rs | 78 +------- crates/forge/bin/cmd/create.rs | 12 +- crates/forge/src/multi_runner.rs | 123 +++++++----- crates/linking/Cargo.toml | 5 + crates/linking/src/lib.rs | 228 ++++++++++++++++++++++- crates/zksync/compiler/src/lib.rs | 4 +- crates/zksync/compiler/src/link.rs | 135 ++++++++++++++ crates/zksync/compiler/src/zksolc/mod.rs | 35 +++- crates/zksync/core/src/lib.rs | 14 ++ 11 files changed, 510 insertions(+), 152 deletions(-) create mode 100644 crates/zksync/compiler/src/link.rs diff --git a/Cargo.lock b/Cargo.lock index 872764abb..f99c0f476 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1974,7 +1974,7 @@ dependencies = [ "bitflags 2.6.0", "cexpr", "clang-sys", - "itertools 0.12.1", + "itertools 0.11.0", "lazy_static", "lazycell", "log", @@ -4921,7 +4921,6 @@ dependencies = [ "foundry-common-fmt", "foundry-compilers", "foundry-config", - "foundry-zksync-compiler", "itertools 0.13.0", "num-format", "reqwest 0.12.9", @@ -5321,8 +5320,11 @@ version = "0.0.2" dependencies = [ "alloy-primitives", "foundry-compilers", + "foundry-zksync-compiler", + "foundry-zksync-core", "semver 1.0.23", "thiserror 1.0.69", + "tracing", ] [[package]] @@ -7043,15 +7045,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.13.0" @@ -8372,7 +8365,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 3.2.0", + "proc-macro-crate 1.3.1", "proc-macro2 1.0.92", "quote 1.0.37", "syn 2.0.89", @@ -9456,7 +9449,7 @@ checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes", "heck", - "itertools 0.12.1", + "itertools 0.11.0", "log", "multimap", "once_cell", @@ -9476,7 +9469,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.12.1", + "itertools 0.11.0", "proc-macro2 1.0.92", "quote 1.0.37", "syn 2.0.89", @@ -9489,7 +9482,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.11.0", "proc-macro2 1.0.92", "quote 1.0.37", "syn 2.0.89", @@ -11685,7 +11678,7 @@ dependencies = [ "const-hex", "derive_builder", "dunce", - "itertools 0.13.0", + "itertools 0.11.0", "itoa", "lasso", "match_cfg", @@ -11721,7 +11714,7 @@ dependencies = [ "alloy-primitives", "bitflags 2.6.0", "bumpalo", - "itertools 0.13.0", + "itertools 0.11.0", "memchr", "num-bigint 0.4.6", "num-rational", diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 12b952fda..efb141dfb 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] foundry-block-explorers = { workspace = true, features = ["foundry-compilers"] } -foundry-zksync-compiler.workspace = true foundry-common-fmt.workspace = true foundry-compilers.workspace = true foundry-config.workspace = true diff --git a/crates/common/src/compile.rs b/crates/common/src/compile.rs index 81e45f67f..dbde04c56 100644 --- a/crates/common/src/compile.rs +++ b/crates/common/src/compile.rs @@ -24,10 +24,9 @@ use foundry_compilers::{ }, Artifact, Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, SolcConfig, }; -use foundry_zksync_compiler::libraries::{self, ZkMissingLibrary}; use num_format::{Locale, ToFormattedString}; use std::{ - collections::{BTreeMap, HashSet}, + collections::BTreeMap, fmt::Display, io::IsTerminal, path::{Path, PathBuf}, @@ -325,7 +324,7 @@ impl ProjectCompiler { let zksolc_version = ZkSolc::get_version_for_path(&project.compiler.zksolc)?; Report::new(SpinnerReporter::spawn_with(format!("Using zksolc-{zksolc_version}"))); } - self.zksync_compile_with(&project.paths.root, || { + self.zksync_compile_with(|| { let files_to_compile = if !files.is_empty() { files } else { project.paths.input_files() }; let sources = Source::read_all(files_to_compile)?; @@ -338,11 +337,7 @@ impl ProjectCompiler { } #[instrument(target = "forge::compile", skip_all)] - fn zksync_compile_with( - self, - root_path: impl AsRef, - f: F, - ) -> Result + fn zksync_compile_with(self, f: F) -> Result where F: FnOnce() -> Result, { @@ -385,80 +380,17 @@ impl ProjectCompiler { sh_println!("{output}")?; } - self.zksync_handle_output(root_path, &output)?; + self.zksync_handle_output(&output)?; } Ok(output) } /// If configured, this will print sizes or names - fn zksync_handle_output( - &self, - root_path: impl AsRef, - output: &ZkProjectCompileOutput, - ) -> Result<()> { + fn zksync_handle_output(&self, output: &ZkProjectCompileOutput) -> Result<()> { let print_names = self.print_names.unwrap_or(false); let print_sizes = self.print_sizes.unwrap_or(false); - // Process missing libraries - // TODO: skip this if project was not compiled using --detect-missing-libraries - let mut missing_libs_unique: HashSet = HashSet::new(); - for (artifact_id, artifact) in output.artifact_ids() { - // TODO: when compiling specific files, the output might still add cached artifacts - // that are not part of the file list to the output, which may cause missing libraries - // error to trigger for files that were not intended to be compiled. - // This behaviour needs to be investigated better on the foundry-compilers side. - // For now we filter, checking only the files passed to compile. - let is_target_file = - self.files.is_empty() || self.files.iter().any(|f| artifact_id.path == *f); - if is_target_file { - if let Some(mls) = artifact.missing_libraries() { - missing_libs_unique.extend(mls.clone()); - } - } - } - - let missing_libs: Vec = missing_libs_unique - .into_iter() - .map(|ml| { - let mut split = ml.split(':'); - let contract_path = - split.next().expect("Failed to extract contract path for missing library"); - let contract_name = - split.next().expect("Failed to extract contract name for missing library"); - - let mut abs_path_buf = PathBuf::new(); - abs_path_buf.push(root_path.as_ref()); - abs_path_buf.push(contract_path); - - let art = output.find(abs_path_buf.as_path(), contract_name).unwrap_or_else(|| { - panic!( - "Could not find contract {contract_name} at path {contract_path} for compilation output" - ) - }); - - ZkMissingLibrary { - contract_path: contract_path.to_string(), - contract_name: contract_name.to_string(), - missing_libraries: art.missing_libraries().cloned().unwrap_or_default(), - } - }) - .collect(); - - if !missing_libs.is_empty() { - libraries::add_dependencies_to_missing_libraries_cache( - root_path, - missing_libs.as_slice(), - ) - .expect("Error while adding missing libraries"); - let missing_libs_list = missing_libs - .iter() - .map(|ml| format!("{}:{}", ml.contract_path, ml.contract_name)) - .collect::>() - .join(", "); - eyre::bail!("Missing libraries detected: {missing_libs_list}\n\nRun the following command in order to deploy each missing library:\n\nforge create --private-key --rpc-url --chain --zksync\n\nThen pass the library addresses using the --libraries option"); - } - // print any sizes or names if print_names { let mut artifacts: BTreeMap<_, Vec<_>> = BTreeMap::new(); diff --git a/crates/forge/bin/cmd/create.rs b/crates/forge/bin/cmd/create.rs index 15ebbb87a..e676420aa 100644 --- a/crates/forge/bin/cmd/create.rs +++ b/crates/forge/bin/cmd/create.rs @@ -156,10 +156,10 @@ impl CreateArgs { let (artifact, id) = remove_zk_contract(&mut zk_output, &target_path, &self.contract.name)?; - let ZkContractArtifact { bytecode, factory_dependencies, abi, .. } = artifact; + let ZkContractArtifact { bytecode, abi, factory_dependencies, .. } = &artifact; - let abi = abi.expect("Abi not found"); - let bin = bytecode.expect("Bytecode not found"); + let abi = abi.clone().expect("Abi not found"); + let bin = bytecode.as_ref().expect("Bytecode not found"); let bytecode = match bin.object() { BytecodeObject::Bytecode(bytes) => bytes.to_vec(), @@ -206,7 +206,7 @@ impl CreateArgs { let factory_deps: Vec> = { let factory_dependencies_map = - factory_dependencies.expect("factory deps not found"); + factory_dependencies.as_ref().expect("factory deps not found"); let mut visited_paths = HashSet::new(); let mut visited_bytecodes = HashSet::new(); let mut queue = VecDeque::new(); @@ -234,12 +234,12 @@ impl CreateArgs { ) }); let fdep_fdeps_map = - fdep_art.factory_dependencies.clone().expect("factory deps not found"); + fdep_art.factory_dependencies.as_ref().expect("factory deps not found"); for dep in fdep_fdeps_map.values() { queue.push_back(dep.clone()) } - // TODO(zk): ensure factory deps are also linked + // NOTE(zk): unlinked factory deps don't show up in `factory_dependencies` let fdep_bytecode = fdep_art .bytecode .clone() diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index c6a43c1c5..76af31b5e 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -5,14 +5,13 @@ use crate::{ TestFilter, TestOptions, }; use alloy_json_abi::{Function, JsonAbi}; -use alloy_primitives::{Address, Bytes, U256}; +use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; use eyre::Result; use foundry_common::{get_contract_name, ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ - artifacts::{CompactBytecode, CompactContractBytecode, CompactDeployedBytecode, Libraries}, - compilers::Compiler, - zksync::compile::output::ProjectCompileOutput as ZkProjectCompileOutput, - Artifact, ArtifactId, ProjectCompileOutput, + artifacts::Libraries, compilers::Compiler, + zksync::compile::output::ProjectCompileOutput as ZkProjectCompileOutput, Artifact, ArtifactId, + ProjectCompileOutput, }; use foundry_config::Config; use foundry_evm::{ @@ -26,9 +25,11 @@ use foundry_evm::{ traces::{InternalTraceMode, TraceMode}, }; use foundry_linking::{LinkOutput, Linker}; -use foundry_zksync_compiler::DualCompiledContracts; +use foundry_zksync_compiler::{DualCompiledContract, DualCompiledContracts}; +use foundry_zksync_core::hash_bytecode; use rayon::prelude::*; use revm::primitives::SpecId; +use zksync_types::H256; use std::{ borrow::Borrow, @@ -414,13 +415,18 @@ impl MultiContractRunnerBuilder { zk_output: Option, env: revm::primitives::Env, evm_opts: EvmOpts, - dual_compiled_contracts: DualCompiledContracts, + mut dual_compiled_contracts: DualCompiledContracts, ) -> Result { let use_zk = zk_output.is_some(); let mut known_contracts = ContractsByArtifact::default(); + let output = output.with_stripped_file_prefixes(root); let linker = Linker::new(root, output.artifact_ids().collect()); + let zk_output = zk_output.map(|zk| zk.with_stripped_file_prefixes(&root)); + let zk_linker = + zk_output.as_ref().map(|output| Linker::new(root, output.artifact_ids().collect())); + // Build revert decoder from ABIs of all artifacts. let abis = linker .contracts @@ -435,11 +441,74 @@ impl MultiContractRunnerBuilder { linker.contracts.keys(), )?; + let zk_libs = zk_linker + .as_ref() + .map(|zk| { + zk.zk_link_with_nonce_or_address( + Default::default(), + LIBRARY_DEPLOYER, + // NOTE(zk): normally 0, this way we avoid overlaps with EVM libs + // FIXME: needed or 0 is ok? + libs_to_deploy.len() as u64, + zk.contracts.keys(), + ) + .map(|output| + + // NOTE(zk): zk_linked_contracts later will also contain + // `libs_to_deploy` bytecodes, so those will + // get registered in DualCompiledContracts + + output.libraries) + }) + .transpose()?; + let linked_contracts = linker.get_linked_artifacts(&libraries)?; + let zk_linked_contracts = zk_linker + .as_ref() + .and_then(|linker| zk_libs.as_ref().map(|libs| (linker, libs))) + .map(|(zk, libs)| zk.zk_get_linked_artifacts(zk.contracts.keys(), libs)) + .transpose()?; - // Create a mapping of name => (abi, deployment code, Vec) - let mut deployable_contracts = DeployableContracts::default(); + let newly_linked_dual_compiled_contracts = zk_linked_contracts + .iter() + .flat_map(|arts| arts.iter()) + .flat_map(|(needle, zk)| { + linked_contracts + .iter() + .find(|(id, _)| id.source == needle.source && id.name == needle.name) + .map(|(_, evm)| (needle, zk, evm)) + }) + .filter(|(_, zk, evm)| zk.bytecode.is_some() && evm.bytecode.is_some()) + .map(|(id, linked_zk, evm)| { + let (_, unlinked_zk_artifact) = + zk_output.as_ref().unwrap().artifact_ids().find(|(id, _)| id == id).unwrap(); + let zk_bytecode = linked_zk.get_bytecode_bytes().unwrap(); + let zk_hash = hash_bytecode(&zk_bytecode); + let evm = evm.get_bytecode_bytes().unwrap(); + let contract = DualCompiledContract { + name: id.name.clone(), + zk_bytecode_hash: zk_hash, + zk_deployed_bytecode: zk_bytecode.to_vec(), + // FIXME: retrieve unlinked factory deps (1.5.9) + zk_factory_deps: vec![zk_bytecode.to_vec()], + evm_bytecode_hash: B256::from_slice(&keccak256(evm.as_ref())[..]), + evm_deployed_bytecode: evm.to_vec(), // FIXME: is this ok? not really used + evm_bytecode: evm.to_vec(), + }; + + // populate factory deps that were already linked + dual_compiled_contracts.extend_factory_deps_by_hash( + contract, + unlinked_zk_artifact.factory_dependencies.iter().flatten().map(|(_, hash)| { + H256::from_slice(alloy_primitives::hex::decode(hash).unwrap().as_slice()) + }), + ) + }); + dual_compiled_contracts.extend(newly_linked_dual_compiled_contracts.collect::>()); + // FIXME: is this comment outdated? I don't see the library deployment code anywhere + // Create a mapping of name => (abi, deployment code, Vec) + let mut contracts = DeployableContracts::default(); for (id, contract) in linked_contracts.iter() { let Some(abi) = &contract.abi else { continue }; @@ -453,43 +522,13 @@ impl MultiContractRunnerBuilder { continue; }; - deployable_contracts - .insert(id.clone(), TestContract { abi: abi.clone(), bytecode }); + contracts.insert(id.clone(), TestContract { abi: abi.clone(), bytecode }); } } if !use_zk { known_contracts = ContractsByArtifact::new(linked_contracts); - } else if let Some(zk_output) = zk_output { - let zk_contracts = zk_output.with_stripped_file_prefixes(root).into_artifacts(); - let mut zk_contracts_map = BTreeMap::new(); - - for (id, contract) in zk_contracts { - if let Some(abi) = contract.abi { - let bytecode = contract.bytecode.as_ref(); - - // TODO(zk): retrieve link_references - if let Some(bytecode_object) = bytecode.map(|b| b.object()) { - let compact_bytecode = CompactBytecode { - object: bytecode_object.clone(), - source_map: None, - link_references: BTreeMap::new(), - }; - let compact_contract = CompactContractBytecode { - abi: Some(abi), - bytecode: Some(compact_bytecode.clone()), - deployed_bytecode: Some(CompactDeployedBytecode { - bytecode: Some(compact_bytecode), - immutable_references: BTreeMap::new(), - }), - }; - zk_contracts_map.insert(id.clone(), compact_contract); - } - } else { - warn!("Abi not found for contract {}", id.identifier()); - } - } - + } else if let Some(mut zk_contracts_map) = zk_linked_contracts { // Extend zk contracts with solc contracts as well. This is required for traces to // accurately detect contract names deployed in EVM mode, and when using // `vm.zkVmSkip()` cheatcode. @@ -499,7 +538,7 @@ impl MultiContractRunnerBuilder { } Ok(MultiContractRunner { - contracts: deployable_contracts, + contracts, evm_opts, env, evm_spec: self.evm_spec.unwrap_or(SpecId::CANCUN), diff --git a/crates/linking/Cargo.toml b/crates/linking/Cargo.toml index 15d0d113b..eeadd30fa 100644 --- a/crates/linking/Cargo.toml +++ b/crates/linking/Cargo.toml @@ -18,3 +18,8 @@ foundry-compilers = { workspace = true, features = ["full"] } semver.workspace = true alloy-primitives = { workspace = true, features = ["rlp"] } thiserror.workspace = true +tracing.workspace = true + +# zk linking utils +foundry-zksync-core.workspace = true +foundry-zksync-compiler.workspace = true diff --git a/crates/linking/src/lib.rs b/crates/linking/src/lib.rs index e44ee7748..daea95c4f 100644 --- a/crates/linking/src/lib.rs +++ b/crates/linking/src/lib.rs @@ -5,15 +5,24 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -use alloy_primitives::{Address, Bytes, B256}; +use alloy_primitives::{ + hex::FromHex, + map::{HashMap, HashSet}, + Address, Bytes, B256, +}; use foundry_compilers::{ - artifacts::{CompactContractBytecodeCow, Libraries}, + artifacts::{ + BytecodeObject, CompactBytecode, CompactContractBytecode, CompactContractBytecodeCow, + CompactDeployedBytecode, Libraries, + }, contracts::ArtifactContracts, + zksolc::{ZkSolc, ZkSolcCompiler}, Artifact, ArtifactId, }; +use foundry_zksync_compiler::link::{self as zk_link, MissingLibrary}; use semver::Version; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, VecDeque}, path::{Path, PathBuf}, str::FromStr, }; @@ -31,6 +40,18 @@ pub enum LinkerError { CyclicDependency, } +/// Errors that can occur during linking. +#[derive(Debug, thiserror::Error)] +pub enum ZkLinkerError { + #[error(transparent)] + Inner(#[from] LinkerError), + #[error("unable to fully link due to missing libraries")] + MissingLibraries(BTreeSet), + #[error("unable to fully link due to unlinked factory dependencies")] + MissingFactoryDeps(BTreeSet), +} + +#[derive(Debug)] pub struct Linker<'a> { /// Root of the project, used to determine whether artifact/library path can be stripped. pub root: PathBuf, @@ -181,6 +202,60 @@ impl<'a> Linker<'a> { Ok(LinkOutput { libraries, libs_to_deploy }) } + /// Links given artifact with either given library addresses or address computed from sender and + /// nonce. + /// + /// Each key in `libraries` should either be a global path or relative to project root. All + /// remappings should be resolved. + /// + /// When calling for `target` being an external library itself, you should check that `target` + /// does not appear in `libs_to_deploy` to avoid deploying it twice. It may happen in cases + /// when there is a dependency cycle including `target`. + pub fn zk_link_with_nonce_or_address( + &'a self, + libraries: Libraries, + sender: Address, + mut nonce: u64, + targets: impl IntoIterator, + ) -> Result { + // Library paths in `link_references` keys are always stripped, so we have to strip + // user-provided paths to be able to match them correctly. + let mut libraries = libraries.with_stripped_file_prefixes(self.root.as_path()); + + let mut needed_libraries = BTreeSet::new(); + for target in targets { + self.collect_dependencies(target, &mut needed_libraries)?; + } + + let mut libs_to_deploy = Vec::new(); + + // If `libraries` does not contain needed dependency, compute its address and add to + // `libs_to_deploy`. + for id in needed_libraries { + let (lib_path, lib_name) = self.convert_artifact_id_to_lib_path(id); + + libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| { + let address = foundry_zksync_core::compute_create_address(sender, nonce); + libs_to_deploy.push((id, address)); + nonce += 1; + + address.to_checksum(None) + }); + } + + // Link and collect bytecodes for `libs_to_deploy`. + let libs_to_deploy = self + .zk_get_linked_artifacts(libs_to_deploy.into_iter().map(|(id, _)| id), &libraries)? + .into_iter() + .map(|(_, linked)| linked.get_bytecode_bytes().unwrap().into_owned()) + .collect(); + + Ok(LinkOutput { libraries, libs_to_deploy }) + } + + // TODO(zk): zk_link_with_create2 + // a bit more difficult due to the lack of bytecode + // until the contract is fully linked pub fn link_with_create2( &'a self, libraries: Libraries, @@ -265,6 +340,100 @@ impl<'a> Linker<'a> { Ok(contract) } + /// Links given artifact with given libraries. + // TODO(zk): improve interface to reflect batching operation (all bytecodes in all bytecodes + // out) + pub fn zk_link( + contracts: &ArtifactContracts>, + target: &ArtifactId, + libraries: &Libraries, + ) -> Result, ZkLinkerError> { + let artifact_to_link_id = |id: &ArtifactId| format!("{}:{}", id.source.display(), id.name); + + // collect bytecodes & libraries for input to zksolc_link + let bytecodes = contracts + .iter() + .filter_map(|(id, bytecode)| { + let link_id = artifact_to_link_id(id); + let object = bytecode.bytecode.as_ref().map(|bc| bc.object.clone())?; + + let bytes = match object { + BytecodeObject::Bytecode(bytes) => bytes, + BytecodeObject::Unlinked(unlinked) => { + alloy_primitives::hex::decode(unlinked).unwrap().into() + } + }; + + Some((link_id, bytes)) + }) + .collect::>(); + + let libraries = libraries + .libs + .iter() + .flat_map(|(file, libs)| { + libs.iter() + .map(|(name, address)| (file.to_string_lossy(), name.clone(), address.clone())) + }) + .map(|(filename, name, address)| zk_link::Library { + filename: filename.into_owned(), + name, + address: Address::from_hex(address).unwrap(), + }) + .collect::>(); + + let zksolc = ZkSolcCompiler { + // NOTE(zk): zksolc --link --standard-json requires >1.5.8 + // FIXME(zk): compiler from config + zksolc: ZkSolc::get_path_for_version(&Version::new(1, 5, 8)).unwrap(), + solc: Default::default(), + }; + let mut link_output = + zk_link::zksolc_link(&zksolc, zk_link::LinkJsonInput { bytecodes, libraries }) + .expect("able to call zksolc --link"); // FIXME: proper error check + + let link_id = &artifact_to_link_id(target); + + let mut contract = contracts.get(target).ok_or(LinkerError::MissingTargetArtifact)?.clone(); + + if let Some(unlinked) = link_output.unlinked.remove(link_id) { + tracing::error!(factory_dependencies = ?unlinked.factory_dependencies, libraries = ?unlinked.linker_symbols, "unmet linking dependencies"); + + if !unlinked.linker_symbols.is_empty() { + return Err(ZkLinkerError::MissingLibraries( + unlinked.linker_symbols.into_iter().collect(), + )); + } + return Err(ZkLinkerError::MissingFactoryDeps( + unlinked.factory_dependencies.into_iter().collect(), + )); + } + + let linked_output = + link_output.linked.remove(link_id).or_else(|| link_output.ignored.remove(link_id)); + + // NOTE(zk): covers intermittent issue where fully linked bytecode was + // not being returned in `ignored` (or `linked`). + // The check above should catch if the bytecode remains unlinked + let Some(linked) = linked_output else { + return Ok(contract); + }; + + let mut compact_bytecode = CompactBytecode::empty(); + compact_bytecode.object = BytecodeObject::Bytecode( + alloy_primitives::hex::decode(&linked.bytecode).unwrap().into(), + ); + + let mut compact_deployed_bytecode = CompactDeployedBytecode::empty(); + compact_deployed_bytecode.bytecode.replace(compact_bytecode.clone()); + + // TODO(zk): maybe return bytecode hash? + contract.bytecode.replace(std::borrow::Cow::Owned(compact_bytecode)); + contract.deployed_bytecode.replace(std::borrow::Cow::Owned(compact_deployed_bytecode)); + + Ok(contract) + } + pub fn get_linked_artifacts( &self, libraries: &Libraries, @@ -272,6 +441,59 @@ impl<'a> Linker<'a> { self.contracts.keys().map(|id| Ok((id.clone(), self.link(id, libraries)?))).collect() } + pub fn zk_get_linked_artifacts<'b>( + &self, + targets: impl IntoIterator, + libraries: &Libraries, + ) -> Result { + let mut targets = targets.into_iter().cloned().collect::>(); + let mut contracts = self.contracts.clone(); + let mut linked_artifacts = vec![]; + + // FIXME(zk): determine if this loop is still needed like this + while let Some(id) = targets.pop_front() { + match Self::zk_link(&contracts, &id, &libraries) { + Ok(linked) => { + // persist linked contract for successive iterations + *contracts.entry(id.clone()).or_default() = linked.clone(); + + linked_artifacts.push((id.clone(), CompactContractBytecode::from(linked))); + } + Err(ZkLinkerError::MissingFactoryDeps(fdeps)) => { + // attempt linking again if some factory dep remains unlinked + // this is just in the case where a previously unlinked factory dep + // is linked with the same run as `id` would be linked + // and instead `id` remains unlinked + // FIXME(zk): might be unnecessary, observed when paths were wrong + let mut ids = fdeps + .into_iter() + .inspect(|fdep| { + dbg!(&fdep); + }) + .flat_map(|fdep| { + contracts.iter().find(|(id, _)| { + id.source.as_path() == Path::new(fdep.filename.as_str()) && + &id.name == &fdep.library + }) + }) + .map(|(id, _)| id.clone()) + .peekable(); + + // if we have no dep ids then we avoid + // queueing our own id to avoid infinite loop + // TODO(zk): find a better way to avoid issues later + if let Some(_) = ids.peek() { + targets.extend(ids); // queue factory deps for linking + targets.push_back(id); // reque original target + } + } + Err(err) => return Err(err), + } + } + + Ok(linked_artifacts.into_iter().collect()) + } + pub fn get_linked_artifacts_cow( &self, libraries: &Libraries, diff --git a/crates/zksync/compiler/src/lib.rs b/crates/zksync/compiler/src/lib.rs index 69b086b04..a9448b609 100644 --- a/crates/zksync/compiler/src/lib.rs +++ b/crates/zksync/compiler/src/lib.rs @@ -5,15 +5,17 @@ /// ZKSolc specific logic. mod zksolc; +pub use zksolc::*; use std::path::PathBuf; use foundry_config::{Config, SkipBuildFilters, SolcReq}; use semver::Version; -pub use zksolc::*; pub mod libraries; +pub mod link; + use foundry_compilers::{ artifacts::Severity, error::SolcError, diff --git a/crates/zksync/compiler/src/link.rs b/crates/zksync/compiler/src/link.rs new file mode 100644 index 000000000..080e0eb0a --- /dev/null +++ b/crates/zksync/compiler/src/link.rs @@ -0,0 +1,135 @@ +//! Contains items and functions to link via zksolc + +use std::{ + path::Path, + process::{Command, Stdio}, +}; + +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, Bytes, +}; +use foundry_compilers::{error::SolcError, zksolc::ZkSolcCompiler}; +use serde::{Deserialize, Serialize}; + +type LinkId = String; + +#[derive(Debug, Clone, Serialize, PartialEq, Eq, Hash)] +#[serde(into = "String")] +/// A library that zksolc will link against +pub struct Library { + /// Path to the library source + pub filename: String, + /// Name of the library + pub name: String, + /// Address of the library + pub address: Address, +} + +impl Into for Library { + fn into(self) -> String { + format!("{}:{}={}", self.filename, self.name, self.address) + } +} + +#[derive(Debug, Clone, Serialize)] +/// JSON Input for `zksolc link` +pub struct LinkJsonInput { + /// List of input bytecodes (linked or unlinked) + pub bytecodes: HashMap, + /// List of libraries to link against + pub libraries: HashSet, +} + +#[derive(Debug, Clone, Deserialize)] +/// Representation of a linked object given by zksolc +pub struct LinkedObject { + // FIXME: obtain factoryDeps from output + // might come in handy to have the libraries used as well + /// Fully linked bytecode + pub bytecode: String, + /// Bytecode hash of the fully linked object + pub hash: String, +} + +#[derive(Debug, Clone, Deserialize)] +/// Representation of a linked object given by zksolc +pub struct UnlinkedObject { + /// List of unlinked libraries + pub linker_symbols: HashSet, + /// List of factory dependencies missing from input + pub factory_dependencies: HashSet, +} + +/// Represent a missing library returned by the compiler +/// +/// Deserialized from: ":" +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Deserialize)] +#[serde(try_from = "String")] +pub struct MissingLibrary { + /// Source path of the contract + pub filename: String, + /// Name of the contract + pub library: String, +} + +impl TryFrom for MissingLibrary { + type Error = &'static str; + + fn try_from(value: String) -> Result { + let mut split = value.split(':'); + let path = split.next().ok_or("failed to parse unlinked library filename")?.to_string(); + let name = split.next().ok_or("failed to parse unlinked library name")?.to_string(); + + Ok(Self { filename: path, library: name }) + } +} + +#[derive(Debug, Clone, Deserialize)] +/// JSON Output for `zksolc link` +pub struct LinkJsonOutput { + /// Fully linked bytecodes resulting from given input + #[serde(default)] + pub linked: HashMap, + /// Not fully linked bytecodes + #[serde(default)] + pub unlinked: HashMap, + /// List of fully linked bytecodes in input + #[serde(default)] + pub ignored: HashMap, +} + +// taken fom compilers +fn map_io_err(zksolc_path: &Path) -> impl FnOnce(std::io::Error) -> SolcError + '_ { + move |err| SolcError::io(err, zksolc_path) +} + +/// Invoke `zksolc link` given the `zksolc` binary and json input to use +#[tracing::instrument(level = tracing::Level::TRACE, ret)] +pub fn zksolc_link( + zksolc: &ZkSolcCompiler, + input: LinkJsonInput, +) -> Result { + let zksolc = &zksolc.zksolc; + let mut cmd = Command::new(&zksolc); + + cmd.arg("--standard-json") + .arg("--link") + .stdin(Stdio::piped()) + .stderr(Stdio::piped()) + .stdout(Stdio::piped()); + + let mut child = cmd.spawn().map_err(map_io_err(&zksolc))?; + + let stdin = child.stdin.as_mut().unwrap(); + let _ = serde_json::to_writer(stdin, &input); + + let output = child.wait_with_output().map_err(map_io_err(&zksolc))?; + tracing::trace!(?output); + + if output.status.success() { + serde_json::from_slice(&output.stdout).map_err(Into::into) + } else { + Err(SolcError::solc_output(&output)) + } +} diff --git a/crates/zksync/compiler/src/zksolc/mod.rs b/crates/zksync/compiler/src/zksolc/mod.rs index f4e3b0699..757c1deed 100644 --- a/crates/zksync/compiler/src/zksolc/mod.rs +++ b/crates/zksync/compiler/src/zksolc/mod.rs @@ -132,10 +132,8 @@ impl DualCompiledContracts { let mut zksolc_all_bytecodes: HashMap> = Default::default(); for (_, zk_artifact) in zk_output.artifacts() { if let (Some(hash), Some(bytecode)) = (&zk_artifact.hash, &zk_artifact.bytecode) { - // TODO: we can do this because no bytecode object could be unlinked - // at this stage for zksolc, and BytecodeObject as ref will get the bytecode bytes. - // We should be careful however and check/handle errors in - // case an Unlinked BytecodeObject gets here somehow + // NOTE(zk): unlinked objects are _still_ encoded as valid hex + // but the hash wouldn't be present let bytes = bytecode.object().into_bytes().unwrap(); zksolc_all_bytecodes.insert(hash.clone(), bytes.to_vec()); } @@ -162,11 +160,8 @@ impl DualCompiledContracts { if let Some((solc_bytecode, solc_deployed_bytecode)) = solc_bytecodes.get(&contract_file) { - // TODO: we can do this because no bytecode object could be unlinked - // at this stage for zksolc, and BytecodeObject as ref will get the bytecode - // bytes. However, we should check and - // handle errors in case an Unlinked BytecodeObject gets - // here somehow + // NOTE(zk): unlinked objects are _still_ encoded as valid hex + // but the hash wouldn't be present in the artifact let bytecode_vec = bytecode.object().into_bytes().unwrap().to_vec(); let mut factory_deps_vec: Vec> = factory_deps_map .keys() @@ -299,4 +294,26 @@ impl DualCompiledContracts { pub fn push(&mut self, contract: DualCompiledContract) { self.contracts.push(contract); } + + /// Extend the inner set of contracts with the given iterator + pub fn extend(&mut self, iter: impl IntoIterator) { + self.contracts.extend(iter.into_iter()); + self.contracts.sort_by(|a, b| a.name.cmp(&b.name)); + self.contracts.dedup_by(|a, b| a.name == b.name); + } + + /// Populate the target's factory deps based on the new list + pub fn extend_factory_deps_by_hash( + &self, + mut target: DualCompiledContract, + factory_deps: impl IntoIterator, + ) -> DualCompiledContract { + let deps_bytecodes = factory_deps + .into_iter() + .flat_map(|hash| self.find_by_zk_bytecode_hash(hash)) + .map(|contract| contract.zk_deployed_bytecode.clone()); + + target.zk_factory_deps.extend(deps_bytecodes); + target + } } diff --git a/crates/zksync/core/src/lib.rs b/crates/zksync/core/src/lib.rs index 7218ff60e..ef4114fe0 100644 --- a/crates/zksync/core/src/lib.rs +++ b/crates/zksync/core/src/lib.rs @@ -176,6 +176,20 @@ pub fn try_decode_create2(data: &[u8]) -> Result<(H256, H256, Vec)> { Ok((H256(salt.0), H256(bytecode_hash.0), constructor_args.to_vec())) } +/// Compute a CREATE address according to zksync +pub fn compute_create_address(sender: Address, nonce: u64) -> Address { + const CREATE_PREFIX: &'static [u8] = b"zksyncCreate"; + let sender = sender.to_h256(); + let nonce = H256::from_low_u64_be(nonce); + let prefix = keccak256(CREATE_PREFIX); + + let payload = [prefix.as_slice(), sender.0.as_slice(), nonce.0.as_slice()].concat(); + let hash = keccak256(payload); + let address = &hash[..20]; + + Address::from_slice(address) +} + /// Try decoding the provided transaction data into create parameters. pub fn try_decode_create(data: &[u8]) -> Result<(H256, Vec)> { let decoded_calldata =