From 4785f16aa0268827056a805fc168dab3978da73e Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 17 Aug 2023 21:42:28 +0800 Subject: [PATCH 01/35] wip: refactor according to new CircuitBuilder --- Cargo.lock | 27 +-- prover/Cargo.toml | 12 +- prover/src/utils.rs | 2 +- prover/src/zkevm/capacity_checker.rs | 12 +- prover/src/zkevm/circuit.rs | 2 +- prover/src/zkevm/circuit/builder.rs | 330 +++++---------------------- types/Cargo.toml | 2 +- types/src/lib.rs | 9 +- 8 files changed, 85 insertions(+), 311 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed1a2b144..42c63e5ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -433,6 +433,7 @@ dependencies = [ "lazy_static", "log", "mock", + "mpt-zktrie", "once_cell", "poseidon-circuit", "rand", @@ -982,9 +983,9 @@ dependencies = [ [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" @@ -1122,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1387,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "eth-types", "geth-utils", @@ -1600,7 +1601,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "digest 0.7.6", "eth-types", @@ -1640,7 +1641,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -2246,7 +2247,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2446,7 +2447,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2461,9 +2462,8 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ - "bus-mapping", "eth-types", "halo2-mpt-circuits", "halo2_proofs", @@ -4712,10 +4712,11 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.16#f1341e5bf2dc59ea10c19012257c7e386cfc195f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" dependencies = [ "array-init", "bus-mapping", + "either", "env_logger 0.9.3", "eth-types", "ethers-core 0.17.0", diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 07e71a9ba..5740289ca 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -8,12 +8,12 @@ edition = "2021" [dependencies] halo2_proofs = { git = "https://github.com/privacy-scaling-explorations/halo2.git", tag = "v2023_02_02" } -aggregator = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16" } -bus-mapping = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16" } -eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16" } -zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16", default-features = false, features = ["test","scroll","scroll-trace","shanghai"] } -mpt-zktrie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16" } -mock = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16" } +aggregator = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } +bus-mapping = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } +eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } +zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace", default-features = false, features = ["test","scroll","scroll-trace","shanghai"] } +mpt-zktrie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } +mock = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } snark-verifier = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop" } snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop" } diff --git a/prover/src/utils.rs b/prover/src/utils.rs index dcd9e1df6..1d3a911bd 100644 --- a/prover/src/utils.rs +++ b/prover/src/utils.rs @@ -24,7 +24,7 @@ use std::{ str::FromStr, sync::Once, }; -use types::eth::{BlockTrace, BlockTraceJsonRpcResult}; +use types::{BlockTraceJsonRpcResult, eth::BlockTrace}; use zkevm_circuits::evm_circuit::witness::Block; pub const DEFAULT_SERDE_FORMAT: SerdeFormat = SerdeFormat::RawBytesUnchecked; diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index a45fccb3a..01b83b75b 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -1,9 +1,7 @@ use super::circuit::{ block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_witness_block, - fill_zktrie_state_from_proofs, }; use itertools::Itertools; -use mpt_zktrie::state::ZktrieState; use serde_derive::{Deserialize, Serialize}; use types::eth::BlockTrace; @@ -125,7 +123,6 @@ pub struct CircuitCapacityChecker { pub light_mode: bool, pub acc_row_usage: RowUsage, pub row_usages: Vec, - pub state: Option, } // Currently TxTrace is same as BlockTrace, with "transactions" and "executionResults" should be of @@ -144,12 +141,10 @@ impl CircuitCapacityChecker { Self { acc_row_usage: RowUsage::new(), row_usages: Vec::new(), - state: None, light_mode: true, } } pub fn reset(&mut self) { - self.state = None; self.acc_row_usage = RowUsage::new(); self.row_usages = Vec::new(); } @@ -158,14 +153,9 @@ impl CircuitCapacityChecker { txs: &[TxTrace], ) -> Result<(RowUsage, RowUsage), anyhow::Error> { assert!(!txs.is_empty()); - if self.state.is_none() { - self.state = Some(ZktrieState::construct(txs[0].storage_trace.root_before)); - } let traces = txs; - let state = self.state.as_mut().unwrap(); - fill_zktrie_state_from_proofs(state, traces, self.light_mode)?; let witness_block = - block_traces_to_witness_block_with_updated_state(traces, state, self.light_mode)?; + block_traces_to_witness_block_with_updated_state(traces, self.light_mode)?; let rows = calculate_row_usage_of_witness_block(&witness_block)?; let row_usage_details: Vec = rows .into_iter() diff --git a/prover/src/zkevm/circuit.rs b/prover/src/zkevm/circuit.rs index 519a559d9..2c1eca67f 100644 --- a/prover/src/zkevm/circuit.rs +++ b/prover/src/zkevm/circuit.rs @@ -13,7 +13,7 @@ use crate::utils::read_env_var; pub use self::builder::{ block_traces_to_padding_witness_block, block_traces_to_witness_block, block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_trace, - calculate_row_usage_of_witness_block, check_batch_capacity, fill_zktrie_state_from_proofs, + calculate_row_usage_of_witness_block, check_batch_capacity, normalize_withdraw_proof, storage_trace_to_padding_witness_block, SUB_CIRCUIT_NAMES, }; diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 2dd33cf56..fc8bada05 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -6,22 +6,17 @@ use super::{ use crate::config::INNER_DEGREE; use anyhow::{bail, Result}; use bus_mapping::{ - circuit_input_builder::{ - self, BlockHead, CircuitInputBuilder, CircuitsParams, PrecompileEcParams, - }, - state_db::{Account, CodeDB, StateDB}, + circuit_input_builder::{CircuitInputBuilder, CircuitsParams, PrecompileEcParams,}, }; -use eth_types::{evm_types::opcode_ids::OpcodeId, ToAddress, ToBigEndian, H256}; -use ethers_core::types::{Bytes, U256}; +use eth_types::{ToBigEndian, H256}; use halo2_proofs::halo2curves::bn256::Fr; use is_even::IsEven; use itertools::Itertools; -use mpt_zktrie::state::ZktrieState; use std::{ collections::{hash_map::Entry, HashMap}, time::Instant, }; -use types::eth::{BlockTrace, EthBlock, ExecStep, StorageTrace}; +use types::eth::{BlockTrace, StorageTrace}; use zkevm_circuits::{ evm_circuit::witness::{block_apply_mpt_state, block_convert_with_l1_queue_index, Block}, util::SubCircuit, @@ -147,60 +142,6 @@ pub fn check_batch_capacity(block_traces: &mut Vec) -> Result<()> { Ok(()) } -pub fn fill_zktrie_state_from_proofs( - zktrie_state: &mut ZktrieState, - block_traces: &[BlockTrace], - light_mode: bool, -) -> Result<()> { - log::debug!( - "building partial statedb, old root {}, light_mode {}", - hex::encode(zktrie_state.root()), - light_mode - ); - let account_proofs = block_traces.iter().flat_map(|block| { - log::trace!("account proof for block {:?}:", block.header.number); - block.storage_trace.proofs.iter().flat_map(|kv_map| { - kv_map - .iter() - .map(|(k, bts)| (k, bts.iter().map(Bytes::as_ref))) - }) - }); - let storage_proofs = block_traces.iter().flat_map(|block| { - log::trace!("storage proof for block {:?}:", block.header.number); - block - .storage_trace - .storage_proofs - .iter() - .flat_map(|(k, kv_map)| { - kv_map - .iter() - .map(move |(sk, bts)| (k, sk, bts.iter().map(Bytes::as_ref))) - }) - }); - let additional_proofs = block_traces.iter().flat_map(|block| { - log::trace!("storage proof for block {:?}:", block.header.number); - log::trace!("additional proof for block {:?}:", block.header.number); - block - .storage_trace - .deletion_proofs - .iter() - .map(Bytes::as_ref) - }); - zktrie_state.update_statedb_from_proofs( - account_proofs.clone(), - storage_proofs.clone(), - additional_proofs.clone(), - )?; - if !light_mode { - zktrie_state.update_nodes_from_proofs(account_proofs, storage_proofs, additional_proofs)?; - } - log::debug!( - "building partial statedb done, root {}", - hex::encode(zktrie_state.root()) - ); - Ok(()) -} - pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result> { let block_num = block_traces.len(); let total_tx_num = block_traces @@ -228,9 +169,7 @@ pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result Result> { @@ -254,12 +193,10 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res } else { block_traces[0].storage_trace.root_before }; - let mut state = ZktrieState::construct(old_root); - fill_zktrie_state_from_proofs(&mut state, block_traces, false)?; // the only purpose here it to get the updated zktrie state let prev_witness_block = - block_traces_to_witness_block_with_updated_state(block_traces, &mut state, false)?; + block_traces_to_witness_block_with_updated_state(block_traces, false)?; // TODO: when prev_witness_block.tx.is_empty(), the `withdraw_proof` here should be a subset of // storage proofs of prev block @@ -273,19 +210,17 @@ pub fn storage_trace_to_padding_witness_block(storage_trace: StorageTrace) -> Re serde_json::to_string_pretty(&storage_trace)? ); - let mut state = ZktrieState::construct(storage_trace.root_before); let dummy_chunk_traces = vec![BlockTrace { chain_id: *CHAIN_ID, storage_trace, ..Default::default() }]; - fill_zktrie_state_from_proofs(&mut state, &dummy_chunk_traces, false)?; - block_traces_to_witness_block_with_updated_state(&[], &mut state, false) + + block_traces_to_witness_block_with_updated_state(&[], false) } pub fn block_traces_to_witness_block_with_updated_state( block_traces: &[BlockTrace], - zktrie_state: &mut ZktrieState, light_mode: bool, // light_mode used in row estimation ) -> Result> { let chain_id = block_traces @@ -306,14 +241,6 @@ pub fn block_traces_to_witness_block_with_updated_state( ); } - let mut state_db: StateDB = zktrie_state.state().clone(); - - let (zero_coinbase_exist, _) = state_db.get_account(&Default::default()); - if !zero_coinbase_exist { - state_db.set_account(&Default::default(), Account::zero()); - } - - let code_db = build_codedb(&state_db, block_traces)?; let circuit_params = CircuitsParams { max_evm_rows: MAX_RWS, max_rws: MAX_RWS, @@ -332,62 +259,56 @@ pub fn block_traces_to_witness_block_with_updated_state( ec_pairing: MAX_PRECOMPILE_EC_PAIRING, }, }; - let mut builder_block = circuit_input_builder::Block::from_headers(&[], circuit_params); - builder_block.chain_id = chain_id; - builder_block.prev_state_root = U256::from(zktrie_state.root()); - let mut builder = CircuitInputBuilder::new(state_db.clone(), code_db, &builder_block); - for (idx, block_trace) in block_traces.iter().enumerate() { - let is_last = idx == block_traces.len() - 1; - let eth_block: EthBlock = block_trace.clone().into(); - let mut geth_trace = Vec::new(); - for result in &block_trace.execution_results { - geth_trace.push(result.into()); - } - // TODO: Get the history_hashes. - let mut header = BlockHead::new_with_l1_queue_index( - chain_id, - block_trace.start_l1_queue_index, - Vec::new(), - ð_block, + let first_trace = &block_traces[0]; + let more_traces = &block_traces[1..]; + + let metric = |builder: &CircuitInputBuilder, idx: usize| -> Result<(), bus_mapping::Error>{ + let t = Instant::now(); + let block = block_convert_with_l1_queue_index::( + &builder.block, + &builder.code_db, + builder.block.start_l1_queue_index, )?; - // override zeroed minder field with additional "coinbase" field in blocktrace - if let Some(address) = block_trace.coinbase.address { - header.coinbase = address; - } - let block_num = header.number.as_u64(); - builder.block.start_l1_queue_index = start_l1_queue_index; // the chunk's start_l1_queue_index - builder.block.headers.insert(block_num, header); - builder.handle_block_inner(ð_block, geth_trace.as_slice(), false, is_last)?; - log::debug!("handle_block_inner done for block {:?}", block_num); + log::debug!("block convert time {:?}", t.elapsed()); + let rows = ::Inner::min_num_rows_block(&block); + log::debug!( + "after block {}, tx num {:?}, tx len sum {}, rows needed {:?}. estimate time: {:?}", + idx, + builder.block.txs().len(), + builder + .block + .txs() + .iter() + .map(|t| t.input.len()) + .sum::(), + rows, + t.elapsed() + ); + Ok(()) + }; + + let mut builder = CircuitInputBuilder::new_from_l2_trace( + circuit_params, + first_trace, + more_traces.len() != 0, + )?; + + let per_block_metric = false; + if per_block_metric { + metric(&builder, 0)?; + } + + for (idx, block_trace) in block_traces.iter().enumerate() { + let is_last = idx == block_traces.len() - 1; + builder.add_more_l2_trace(block_trace, !is_last)?; let per_block_metric = false; if per_block_metric { - let t = Instant::now(); - let block = block_convert_with_l1_queue_index::( - &builder.block, - &builder.code_db, - start_l1_queue_index, - )?; - log::debug!("block convert time {:?}", t.elapsed()); - let rows = ::Inner::min_num_rows_block(&block); - log::debug!( - "after block {}, tx num {:?}, tx len sum {}, rows needed {:?}. estimate time: {:?}", - idx, - builder.block.txs().len(), - builder - .block - .txs() - .iter() - .map(|t| t.input.len()) - .sum::(), - rows, - t.elapsed() - ); + metric(&builder, idx+1)?; } } - builder.set_value_ops_call_context_rwc_eor(); - builder.set_end_block()?; + builder.finalize_building()?; log::debug!("converting builder.block to witness block"); let mut witness_block = @@ -397,15 +318,14 @@ pub fn block_traces_to_witness_block_with_updated_state( witness_block.circuits_params ); - if !light_mode && zktrie_state.root() != &[0u8; 32] { + if !light_mode && builder.mpt_state.root() != &[0u8; 32] { log::debug!("block_apply_mpt_state"); - block_apply_mpt_state(&mut witness_block, zktrie_state); + block_apply_mpt_state(&mut witness_block, &builder.mpt_state); log::debug!("block_apply_mpt_state done"); } - zktrie_state.set_state(builder.sdb.clone()); log::debug!( "finish replay trie updates, root {}", - hex::encode(zktrie_state.root()) + hex::encode(builder.mpt_state.root()) ); Ok(witness_block) } @@ -425,150 +345,6 @@ pub fn decode_bytecode(bytecode: &str) -> Result> { hex::decode(stripped).map_err(|e| e.into()) } -fn trace_code( - cdb: &mut CodeDB, - code_hash: Option, - code: Bytes, - step: &ExecStep, - sdb: &StateDB, - stack_pos: usize, -) { - // first, try to read from sdb - let stack = step - .stack - .as_ref() - .expect("should have stack in call context"); - let addr = stack[stack.len() - stack_pos - 1].to_address(); //stack N-stack_pos - - let code_hash = code_hash.or_else(|| { - let (_existed, acc_data) = sdb.get_account(&addr); - if acc_data.code_hash != CodeDB::empty_code_hash() && !code.is_empty() { - // they must be same - Some(acc_data.code_hash) - } else { - // let us re-calculate it - None - } - }); - let code_hash = match code_hash { - Some(code_hash) => { - if code_hash.is_zero() { - CodeDB::hash(&code) - } else { - if log::log_enabled!(log::Level::Trace) { - assert_eq!( - code_hash, - CodeDB::hash(&code), - "bytecode len {:?}, step {:?}", - code.len(), - step - ); - } - code_hash - } - } - None => { - let hash = CodeDB::hash(&code); - log::debug!( - "hash_code done: addr {addr:?}, size {}, hash {hash:?}", - &code.len() - ); - hash - } - }; - - cdb.0.entry(code_hash).or_insert_with(|| { - log::trace!( - "trace code addr {:?}, size {} hash {:?}", - addr, - &code.len(), - code_hash - ); - code.to_vec() - }); -} - -pub fn build_codedb(sdb: &StateDB, blocks: &[BlockTrace]) -> Result { - let mut cdb = CodeDB::new(); - log::debug!("building codedb"); - - cdb.insert(Vec::new()); - - for block in blocks.iter().rev() { - log::debug!("build_codedb for block {:?}", block.header.number); - for (er_idx, execution_result) in block.execution_results.iter().enumerate() { - if let Some(bytecode) = &execution_result.byte_code { - let bytecode = decode_bytecode(bytecode)?.to_vec(); - - let code_hash = execution_result - .to - .as_ref() - .and_then(|t| t.poseidon_code_hash) - .unwrap_or_else(|| CodeDB::hash(&bytecode)); - let code_hash = if code_hash.is_zero() { - CodeDB::hash(&bytecode) - } else { - code_hash - }; - if let Entry::Vacant(e) = cdb.0.entry(code_hash) { - e.insert(bytecode); - //log::debug!("inserted tx bytecode {:?} {:?}", code_hash, hash); - } - if execution_result.account_created.is_none() { - //assert_eq!(Some(hash), execution_result.code_hash); - } - } - - for step in execution_result.exec_steps.iter().rev() { - if let Some(data) = &step.extra_data { - match step.op { - OpcodeId::CALL - | OpcodeId::CALLCODE - | OpcodeId::DELEGATECALL - | OpcodeId::STATICCALL => { - let code_idx = if block.transactions[er_idx].to.is_none() { - 0 - } else { - 1 - }; - let callee_code = data.get_code_at(code_idx); - if callee_code.is_none() { - bail!("invalid trace: cannot get code of call: {:?}", step); - } - let code_hash = match step.op { - OpcodeId::CALL | OpcodeId::CALLCODE => data.get_code_hash_at(1), - OpcodeId::STATICCALL => data.get_code_hash_at(0), - _ => None, - }; - trace_code(&mut cdb, code_hash, callee_code.unwrap(), step, sdb, 1); - } - OpcodeId::CREATE | OpcodeId::CREATE2 => { - // notice we do not need to insert code for CREATE, - // bustmapping do this job - } - OpcodeId::EXTCODESIZE | OpcodeId::EXTCODECOPY => { - let code = data.get_code_at(0); - if code.is_none() { - bail!("invalid trace: cannot get code of ext: {:?}", step); - } - trace_code(&mut cdb, None, code.unwrap(), step, sdb, 0); - } - - _ => {} - } - } - } - } - } - - log::debug!("building codedb done"); - for (k, v) in &cdb.0 { - assert!(!k.is_zero()); - log::trace!("codedb codehash {:?}, len {}", k, v.len()); - } - Ok(cdb) -} - pub fn normalize_withdraw_proof(proof: &WithdrawProof) -> StorageTrace { let address = *bus_mapping::l2_predeployed::message_queue::ADDRESS; let key = *bus_mapping::l2_predeployed::message_queue::WITHDRAW_TRIE_ROOT_SLOT; diff --git a/types/Cargo.toml b/types/Cargo.toml index 6febf7603..d226f5649 100644 --- a/types/Cargo.toml +++ b/types/Cargo.toml @@ -4,7 +4,7 @@ version = "0.4.0" edition = "2021" [dependencies] -eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.5.16" } +eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } base64 = "0.13.0" blake2 = "0.10.3" ethers-core = "0.17.0" diff --git a/types/src/lib.rs b/types/src/lib.rs index 8c63fd288..516ea9e28 100644 --- a/types/src/lib.rs +++ b/types/src/lib.rs @@ -1,4 +1,11 @@ -pub mod eth; +use serde::{Deserialize, Serialize}; +pub use eth_types::l2_types as eth; + +#[derive(Deserialize, Serialize, Default, Debug, Clone)] +pub struct BlockTraceJsonRpcResult { + pub result: eth::BlockTrace, +} + pub mod base64 { use base64::{decode, encode}; From 49abf731f586c211f166d1225924599a846de684 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Fri, 18 Aug 2023 08:46:07 +0800 Subject: [PATCH 02/35] dump zkevm-circuits' version and pass compile --- Cargo.lock | 20 ++++++++++---------- prover/src/zkevm/circuit/builder.rs | 6 +++--- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 42c63e5ae..eeca2f504 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -1123,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "eth-types", "geth-utils", @@ -1601,7 +1601,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "digest 0.7.6", "eth-types", @@ -1641,7 +1641,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -2247,7 +2247,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2447,7 +2447,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2462,7 +2462,7 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "eth-types", "halo2-mpt-circuits", @@ -4712,7 +4712,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#61cb695d1066cdfe63dd65d4368ba48d42386dd9" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" dependencies = [ "array-init", "bus-mapping", diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index fc8bada05..c632308ba 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -318,14 +318,14 @@ pub fn block_traces_to_witness_block_with_updated_state( witness_block.circuits_params ); - if !light_mode && builder.mpt_state.root() != &[0u8; 32] { + if !light_mode && builder.mpt_init_state.root() != &[0u8; 32] { log::debug!("block_apply_mpt_state"); - block_apply_mpt_state(&mut witness_block, &builder.mpt_state); + block_apply_mpt_state(&mut witness_block, &builder.mpt_init_state); log::debug!("block_apply_mpt_state done"); } log::debug!( "finish replay trie updates, root {}", - hex::encode(builder.mpt_state.root()) + hex::encode(builder.mpt_init_state.root()) ); Ok(witness_block) } From 1d6417b63e359543e8a1a881249a0be609c2a4c2 Mon Sep 17 00:00:00 2001 From: Zhuo Zhang Date: Fri, 18 Aug 2023 09:53:16 +0000 Subject: [PATCH 03/35] refactor mock prove testnet --- bin/src/mock_testnet.rs | 34 +++++++++++++++++++++-------- prover/src/inner/prover/mock.rs | 19 +++++++--------- prover/src/zkevm/circuit.rs | 3 ++- prover/src/zkevm/circuit/builder.rs | 2 ++ 4 files changed, 37 insertions(+), 21 deletions(-) diff --git a/bin/src/mock_testnet.rs b/bin/src/mock_testnet.rs index 265175740..10d71c5ff 100644 --- a/bin/src/mock_testnet.rs +++ b/bin/src/mock_testnet.rs @@ -6,6 +6,7 @@ use prover::{ utils::init_env_and_log, zkevm::circuit::{ block_traces_to_witness_block, calculate_row_usage_of_witness_block, SuperCircuit, + WitnessBlock, }, }; use reqwest::Url; @@ -33,8 +34,8 @@ async fn main() { let chunks = get_traces_by_block_api(&setting, batch_id).await; - let chunks = chunks.unwrap_or_else(|_| { - panic!("mock-testnet: failed to request API with batch-{batch_id}") + let chunks = chunks.unwrap_or_else(|e| { + panic!("mock-testnet: failed to request API with batch-{batch_id}, err {e:?}") }); match chunks { @@ -62,9 +63,14 @@ async fn main() { block_traces.push(trace); } - //let result = estimate_rows(&block_traces, i, chunk_id); - let result = - Prover::::mock_prove_target_circuit_batch(&block_traces); + let witness_block = match build_block(&block_traces, batch_id, chunk_id) { + Ok(block) => block, + Err(e) => { + log::error!("mock-testnet: building block failed {e:?}"); + continue; + } + }; + let result = Prover::::mock_prove_witness_block(&witness_block); match result { Ok(_) => { @@ -86,7 +92,11 @@ async fn main() { log::info!("mock-testnet: end"); } -fn estimate_rows(block_traces: &[BlockTrace], batch_id: i64, chunk_id: i64) -> anyhow::Result<()> { +fn build_block( + block_traces: &[BlockTrace], + batch_id: i64, + chunk_id: i64, +) -> anyhow::Result { let gas_total: u64 = block_traces .iter() .map(|b| b.header.gas_used.as_u64()) @@ -108,7 +118,7 @@ fn estimate_rows(block_traces: &[BlockTrace], batch_id: i64, chunk_id: i64) -> a gas_total, gas_total as f64 / row_num as f64 ); - Ok(()) + Ok(witness_block) } /// Request block traces by first using rollup API to get chunk info, then fetching blocks from @@ -122,8 +132,14 @@ async fn get_traces_by_block_api( &[("batch_index", batch_index.to_string())], )?; - let resp: RollupscanResponse = reqwest::get(url).await?.json().await?; - log::info!("handling batch {}", resp.batch_index); + let resp: String = reqwest::get(url).await?.text().await?; + log::debug!("resp is {resp}"); + let resp: RollupscanResponse = serde_json::from_str(&resp)?; + log::info!( + "handling batch {}, chunk size {}", + resp.batch_index, + resp.chunks.as_ref().unwrap().len() + ); Ok(resp.chunks) } diff --git a/prover/src/inner/prover/mock.rs b/prover/src/inner/prover/mock.rs index 55d08eb35..16f1b75ff 100644 --- a/prover/src/inner/prover/mock.rs +++ b/prover/src/inner/prover/mock.rs @@ -2,11 +2,12 @@ use super::Prover; use crate::{ config::INNER_DEGREE, utils::metric_of_witness_block, - zkevm::circuit::{block_traces_to_witness_block, check_batch_capacity, TargetCircuit}, + zkevm::circuit::{block_traces_to_witness_block, TargetCircuit}, }; use anyhow::bail; use halo2_proofs::{dev::MockProver, halo2curves::bn256::Fr}; use types::eth::BlockTrace; +use zkevm_circuits::witness::Block; impl Prover { pub fn mock_prove_target_circuit(block_trace: &BlockTrace) -> anyhow::Result<()> { @@ -14,14 +15,13 @@ impl Prover { } pub fn mock_prove_target_circuit_batch(block_traces: &[BlockTrace]) -> anyhow::Result<()> { - log::info!("start mock prove {}", C::name()); - let original_block_len = block_traces.len(); - let mut block_traces = block_traces.to_vec(); - check_batch_capacity(&mut block_traces)?; let witness_block = block_traces_to_witness_block(&block_traces)?; + Self::mock_prove_witness_block(&witness_block) + } + + pub fn mock_prove_witness_block(witness_block: &Block) -> anyhow::Result<()> { log::info!( - "mock proving batch of len {}, batch metric {:?}", - original_block_len, + "mock proving batch, batch metric {:?}", metric_of_witness_block(&witness_block) ); let (circuit, instance) = C::from_witness_block(&witness_block)?; @@ -34,10 +34,7 @@ impl Prover { bail!("{:#?}", errs); } log::info!( - "mock prove {} done. block proved {}/{}, batch metric: {:?}", - C::name(), - block_traces.len(), - original_block_len, + "mock prove done. batch metric: {:?}", metric_of_witness_block(&witness_block), ); Ok(()) diff --git a/prover/src/zkevm/circuit.rs b/prover/src/zkevm/circuit.rs index 519a559d9..f28060027 100644 --- a/prover/src/zkevm/circuit.rs +++ b/prover/src/zkevm/circuit.rs @@ -14,7 +14,8 @@ pub use self::builder::{ block_traces_to_padding_witness_block, block_traces_to_witness_block, block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_trace, calculate_row_usage_of_witness_block, check_batch_capacity, fill_zktrie_state_from_proofs, - normalize_withdraw_proof, storage_trace_to_padding_witness_block, SUB_CIRCUIT_NAMES, + normalize_withdraw_proof, storage_trace_to_padding_witness_block, WitnessBlock, + SUB_CIRCUIT_NAMES, }; // TODO: more smart row capacity checking rather than max_of(row_usage_details) > 1<<20 - 256 diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index f35c335e7..6a4690b76 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -28,6 +28,8 @@ use zkevm_circuits::{ witness::WithdrawProof, }; +pub type WitnessBlock = Block; + pub const SUB_CIRCUIT_NAMES: [&str; 14] = [ "evm", "state", "bytecode", "copy", "keccak", "tx", "rlp", "exp", "modexp", "pi", "poseidon", "sig", "ecc", "mpt", From c51f4110aea5b810caf9db9f3e90b09d8859fe6b Mon Sep 17 00:00:00 2001 From: Zhuo Zhang Date: Fri, 18 Aug 2023 09:56:23 +0000 Subject: [PATCH 04/35] re enable debug assert --- Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 022ffa133..093ccd884 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,8 +23,8 @@ snark-verifier-sdk = { git = "https://github.com/scroll-tech//snark-verifier", t [profile.test] opt-level = 3 -#debug-assertions = true +debug-assertions = true [profile.release] opt-level = 3 -#debug-assertions = true +debug-assertions = true From d3516ea5b96dd4b80e5647fa1927380870de1402 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Fri, 18 Aug 2023 20:14:23 +0800 Subject: [PATCH 05/35] update lock --- Cargo.lock | 48 +++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 41 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 88a4efc2b..d79b8664b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1751,6 +1751,21 @@ dependencies = [ "rustc-hash", ] +[[package]] +name = "halo2-base" +version = "0.2.2" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#2c225864227e74b207d9f4b9e08c4d5f1afc69a1" +dependencies = [ + "ff", + "halo2_proofs", + "itertools", + "num-bigint", + "num-integer", + "num-traits", + "rand_chacha", + "rustc-hash", +] + [[package]] name = "halo2-ecc" version = "0.2.2" @@ -1758,7 +1773,26 @@ source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0#2c225864227e74 dependencies = [ "ff", "group", - "halo2-base", + "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", + "itertools", + "num-bigint", + "num-integer", + "num-traits", + "rand", + "rand_chacha", + "rand_core", + "serde", + "serde_json", +] + +[[package]] +name = "halo2-ecc" +version = "0.2.2" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#2c225864227e74b207d9f4b9e08c4d5f1afc69a1" +dependencies = [ + "ff", + "group", + "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?branch=develop)", "itertools", "num-bigint", "num-integer", @@ -1789,7 +1823,7 @@ dependencies = [ [[package]] name = "halo2-mpt-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/mpt-circuit.git?tag=v0.5.1#2163a9c436ed85363c954ecf7e6e1044a1b991dc" +source = "git+https://github.com/scroll-tech/mpt-circuit.git?branch=v0.5#2163a9c436ed85363c954ecf7e6e1044a1b991dc" dependencies = [ "ethers-core 0.17.0", "halo2_proofs", @@ -3824,8 +3858,8 @@ source = "git+https://github.com/scroll-tech//snark-verifier?tag=v0.1.1#11a09d4a dependencies = [ "bytes", "ethereum-types 0.14.1", - "halo2-base", - "halo2-ecc", + "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", + "halo2-ecc 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", "hex", "itertools", "lazy_static", @@ -3849,7 +3883,7 @@ dependencies = [ "bincode", "env_logger 0.10.0", "ethereum-types 0.14.1", - "halo2-base", + "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", "hex", "itertools", "lazy_static", @@ -4722,8 +4756,8 @@ dependencies = [ "ethers-core 0.17.0", "ethers-signers", "gadgets", - "halo2-base", - "halo2-ecc", + "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?branch=develop)", + "halo2-ecc 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?branch=develop)", "halo2_proofs", "hex", "itertools", From 46bda0c5bc9a29a5270eab5ed332378bae8d27fd Mon Sep 17 00:00:00 2001 From: Zhang Zhuo Date: Fri, 18 Aug 2023 21:24:35 +0800 Subject: [PATCH 06/35] Update mock.rs --- prover/src/inner/prover/mock.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prover/src/inner/prover/mock.rs b/prover/src/inner/prover/mock.rs index 16f1b75ff..046f1f306 100644 --- a/prover/src/inner/prover/mock.rs +++ b/prover/src/inner/prover/mock.rs @@ -15,16 +15,16 @@ impl Prover { } pub fn mock_prove_target_circuit_batch(block_traces: &[BlockTrace]) -> anyhow::Result<()> { - let witness_block = block_traces_to_witness_block(&block_traces)?; + let witness_block = block_traces_to_witness_block(block_traces)?; Self::mock_prove_witness_block(&witness_block) } pub fn mock_prove_witness_block(witness_block: &Block) -> anyhow::Result<()> { log::info!( "mock proving batch, batch metric {:?}", - metric_of_witness_block(&witness_block) + metric_of_witness_block(witness_block) ); - let (circuit, instance) = C::from_witness_block(&witness_block)?; + let (circuit, instance) = C::from_witness_block(witness_block)?; let prover = MockProver::::run(*INNER_DEGREE, &circuit, instance)?; if let Err(errs) = prover.verify_par() { log::error!("err num: {}", errs.len()); @@ -35,7 +35,7 @@ impl Prover { } log::info!( "mock prove done. batch metric: {:?}", - metric_of_witness_block(&witness_block), + metric_of_witness_block(witness_block), ); Ok(()) } From df7a3763eee8035f56a932c5ac6299bc10c75c18 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Mon, 21 Aug 2023 08:57:37 +0800 Subject: [PATCH 07/35] add replay-testnet runner --- bin/Cargo.toml | 4 + bin/src/run_testnet.rs | 248 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 252 insertions(+) create mode 100644 bin/src/run_testnet.rs diff --git a/bin/Cargo.toml b/bin/Cargo.toml index a8f2f1826..3afab13fa 100644 --- a/bin/Cargo.toml +++ b/bin/Cargo.toml @@ -34,3 +34,7 @@ path = "src/zkevm_verify.rs" [[bin]] name = "mock_testnet" path = "src/mock_testnet.rs" + +[[bin]] +name = "run_testnet" +path = "src/run_testnet.rs" diff --git a/bin/src/run_testnet.rs b/bin/src/run_testnet.rs new file mode 100644 index 000000000..ce8d92372 --- /dev/null +++ b/bin/src/run_testnet.rs @@ -0,0 +1,248 @@ +#![allow(dead_code)] +use anyhow::Result; +use ethers_providers::{Http, Provider}; +use prover::{ + inner::Prover, + utils::{read_env_var, init_env_and_log, GIT_VERSION, short_git_version}, + zkevm::circuit::{ + block_traces_to_witness_block, calculate_row_usage_of_witness_block, SuperCircuit, + WitnessBlock, + }, +}; +use log4rs::{ + append::{ + console::{ConsoleAppender, Target}, + file::FileAppender, + }, + config::{Appender, Config, Root}, +}; +use reqwest::Url; +use serde::Deserialize; +use std::{env, str::FromStr}; +use types::eth::BlockTrace; + +const DEFAULT_BEGIN_BATCH: i64 = 1; +const DEFAULT_END_BATCH: i64 = i64::MAX; + + +// build common config from enviroment +fn common_log() -> Config { + dotenv::dotenv().ok(); + // TODO: cannot support complicated `RUST_LOG` for now. + let log_level = read_env_var("RUST_LOG", "INFO".to_string()); + let log_level = log::LevelFilter::from_str(&log_level).unwrap_or(log::LevelFilter::Info); + + let stderr = ConsoleAppender::builder().target(Target::Stderr).build(); + + Config::builder() + .appenders([ + Appender::builder().build("stderr", Box::new(stderr)), + ]) + .build( + Root::builder() + .appender("stderr") + .build(log_level), + ) + .unwrap() + +} + +// build config for failure-debug +fn debug_log() -> Config { + Config::builder() + .appenders([ + // Appender::builder().build("log-file", Box::new(log_file)), + ]) + .build( + Root::builder() + //.appender("log-file") + //.appender("stderr") + .build(log::LevelFilter::Debug), + ) + .unwrap() +} + +fn task_runner() { + log::info!("run as task runner"); +} + +#[tokio::main] +async fn main() { + let common_log_cfg = common_log(); + let log_handle = log4rs::init_config(common_log_cfg).unwrap(); + log::info!("git version {}", GIT_VERSION); + log::info!("short git version {}", short_git_version()); + + log::info!("relay-alpha testnet: begin"); + + let setting = Setting::new(); + log::info!("mock-testnet: {setting:?}"); + + let provider = Provider::::try_from(&setting.l2geth_api_url) + .expect("mock-testnet: failed to initialize ethers Provider"); + + for batch_id in setting.begin_batch..=setting.end_batch { + log::info!("mock-testnet: requesting block traces of batch {batch_id}"); + + let chunks = get_traces_by_block_api(&setting, batch_id).await; + + let chunks = chunks.unwrap_or_else(|e| { + panic!("mock-testnet: failed to request API with batch-{batch_id}, err {e:?}") + }); + + match chunks { + None => { + log::info!("mock-testnet: finished to prove at batch-{batch_id}"); + break; + } + Some(chunks) => { + for chunk in chunks { + let chunk_id = chunk.index; + log::info!("chunk {:?}", chunk); + + // fetch traces + let mut block_traces: Vec = vec![]; + for block_id in chunk.start_block_number..=chunk.end_block_number { + log::info!("mock-testnet: requesting trace of block {block_id}"); + + let trace = provider + .request( + "scroll_getBlockTraceByNumberOrHash", + [format!("{block_id:#x}")], + ) + .await + .unwrap(); + block_traces.push(trace); + } + + let witness_block = match build_block(&block_traces, batch_id, chunk_id) { + Ok(block) => block, + Err(e) => { + log::error!("mock-testnet: building block failed {e:?}"); + continue; + } + }; + let result = Prover::::mock_prove_witness_block(&witness_block); + + match result { + Ok(_) => { + log::info!( + "mock-testnet: succeeded to prove chunk {chunk_id} inside batch {batch_id}" + ) + } + Err(err) => { + log::error!( + "mock-testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{err:?}" + ); + } + } + } + } + } + } + + log::info!("mock-testnet: end"); +} + +fn build_block( + block_traces: &[BlockTrace], + batch_id: i64, + chunk_id: i64, +) -> anyhow::Result { + let gas_total: u64 = block_traces + .iter() + .map(|b| b.header.gas_used.as_u64()) + .sum(); + let witness_block = block_traces_to_witness_block(block_traces)?; + let rows = calculate_row_usage_of_witness_block(&witness_block)?; + log::info!( + "rows of batch {batch_id}(block range {:?} to {:?}):", + block_traces.first().and_then(|b| b.header.number), + block_traces.last().and_then(|b| b.header.number), + ); + for r in &rows { + log::info!("rows of {}: {}", r.name, r.row_num_real); + } + let row_num = rows.iter().map(|x| x.row_num_real).max().unwrap(); + log::info!( + "final rows of chunk {chunk_id}: row {}, gas {}, gas/row {:.2}", + row_num, + gas_total, + gas_total as f64 / row_num as f64 + ); + Ok(witness_block) +} + +/// Request block traces by first using rollup API to get chunk info, then fetching blocks from +/// l2geth. Return None if no more batches. +async fn get_traces_by_block_api( + setting: &Setting, + batch_index: i64, +) -> Result>> { + let url = Url::parse_with_params( + &setting.rollupscan_api_url, + &[("batch_index", batch_index.to_string())], + )?; + + let resp: String = reqwest::get(url).await?.text().await?; + log::debug!("resp is {resp}"); + let resp: RollupscanResponse = serde_json::from_str(&resp)?; + log::info!( + "handling batch {}, chunk size {}", + resp.batch_index, + resp.chunks.as_ref().unwrap().len() + ); + Ok(resp.chunks) +} + +#[derive(Deserialize, Debug)] +struct RollupscanResponse { + batch_index: usize, + chunks: Option>, +} + +#[derive(Deserialize, Debug)] +struct ChunkInfo { + index: i64, + created_at: String, + total_tx_num: i64, + hash: String, + start_block_number: i64, + end_block_number: i64, +} + +#[derive(Debug, Default)] +struct Setting { + begin_batch: i64, + end_batch: i64, + task_runers: u32, + coordinator_url: String, + l2geth_api_url: String, + rollupscan_api_url: String, +} + +impl Setting { + pub fn new() -> Self { + let l2geth_api_url = + env::var("L2GETH_API_URL").expect("mock-testnet: Must set env L2GETH_API_URL"); + let rollupscan_api_url = env::var("ROLLUPSCAN_API_URL"); + let rollupscan_api_url = + rollupscan_api_url.unwrap_or_else(|_| "http://10.0.3.119:8560/api/chunks".to_string()); + let begin_batch = env::var("PROVE_BEGIN_BATCH") + .ok() + .and_then(|n| n.parse().ok()) + .unwrap_or(DEFAULT_BEGIN_BATCH); + let end_batch = env::var("PROVE_END_BATCH") + .ok() + .and_then(|n| n.parse().ok()) + .unwrap_or(DEFAULT_END_BATCH); + + Self { + begin_batch, + end_batch, + l2geth_api_url, + rollupscan_api_url, + ..Default::default() + } + } +} From 6f517900e0180e3690008867bc342ed620f371bc Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Mon, 21 Aug 2023 09:55:15 +0800 Subject: [PATCH 08/35] wip: refactor builder --- prover/src/zkevm/circuit/builder.rs | 111 ++++++++----- types/src/eth.rs | 241 ---------------------------- 2 files changed, 69 insertions(+), 283 deletions(-) delete mode 100644 types/src/eth.rs diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index c632308ba..3525136d0 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -6,9 +6,12 @@ use super::{ use crate::config::INNER_DEGREE; use anyhow::{bail, Result}; use bus_mapping::{ - circuit_input_builder::{CircuitInputBuilder, CircuitsParams, PrecompileEcParams,}, + circuit_input_builder::{ + self, CircuitInputBuilder, CircuitsParams, PrecompileEcParams, + }, + state_db::{CodeDB, StateDB}, }; -use eth_types::{ToBigEndian, H256}; +use eth_types::{ToBigEndian, H256, U256, ToWord}; use halo2_proofs::halo2curves::bn256::Fr; use is_even::IsEven; use itertools::Itertools; @@ -39,10 +42,15 @@ pub fn calculate_row_usage_of_trace( pub fn calculate_row_usage_of_witness_block( witness_block: &Block, ) -> Result> { - let rows = ::Inner::min_num_rows_block_subcircuits( + let mut rows = ::Inner::min_num_rows_block_subcircuits( witness_block, ); + assert_eq!(SUB_CIRCUIT_NAMES[10], "poseidon"); + assert_eq!(SUB_CIRCUIT_NAMES[13], "mpt"); + // empirical estimation is each row in mpt cost 1.2 hash (aka 11 rows) + rows[10].row_num_real += rows[13].row_num_real*11; + log::debug!( "row usage of block {:?}, tx num {:?}, tx calldata len sum {}, rows needed {:?}", witness_block @@ -194,14 +202,19 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res block_traces[0].storage_trace.root_before }; - // the only purpose here it to get the updated zktrie state - let prev_witness_block = - block_traces_to_witness_block_with_updated_state(block_traces, false)?; + if block_traces.is_empty() { + padding_witness_block(old_root.to_word()) + } else { + // the only purpose here it to get the final state root + let prev_witness_block = + block_traces_to_witness_block_with_updated_state(block_traces, false)?; + + // TODO: when prev_witness_block.tx.is_empty(), the `withdraw_proof` here should be a subset of + // storage proofs of prev block + let storage_trace = normalize_withdraw_proof(&prev_witness_block.mpt_updates.withdraw_proof); + storage_trace_to_padding_witness_block(storage_trace) + } - // TODO: when prev_witness_block.tx.is_empty(), the `withdraw_proof` here should be a subset of - // storage proofs of prev block - let storage_trace = normalize_withdraw_proof(&prev_witness_block.mpt_updates.withdraw_proof); - storage_trace_to_padding_witness_block(storage_trace) } pub fn storage_trace_to_padding_witness_block(storage_trace: StorageTrace) -> Result> { @@ -219,6 +232,27 @@ pub fn storage_trace_to_padding_witness_block(storage_trace: StorageTrace) -> Re block_traces_to_witness_block_with_updated_state(&[], false) } +fn global_circuit_params() -> CircuitsParams { + CircuitsParams { + max_evm_rows: MAX_RWS, + max_rws: MAX_RWS, + max_copy_rows: MAX_RWS, + max_txs: MAX_TXS, + max_calldata: MAX_CALLDATA, + max_bytecode: MAX_BYTECODE, + max_inner_blocks: MAX_INNER_BLOCKS, + max_keccak_rows: MAX_KECCAK_ROWS, + max_exp_steps: MAX_EXP_STEPS, + max_mpt_rows: MAX_MPT_ROWS, + max_rlp_rows: MAX_CALLDATA, + max_ec_ops: PrecompileEcParams { + ec_add: MAX_PRECOMPILE_EC_ADD, + ec_mul: MAX_PRECOMPILE_EC_MUL, + ec_pairing: MAX_PRECOMPILE_EC_PAIRING, + }, + } +} + pub fn block_traces_to_witness_block_with_updated_state( block_traces: &[BlockTrace], light_mode: bool, // light_mode used in row estimation @@ -241,25 +275,6 @@ pub fn block_traces_to_witness_block_with_updated_state( ); } - let circuit_params = CircuitsParams { - max_evm_rows: MAX_RWS, - max_rws: MAX_RWS, - max_copy_rows: MAX_RWS, - max_txs: MAX_TXS, - max_calldata: MAX_CALLDATA, - max_bytecode: MAX_BYTECODE, - max_inner_blocks: MAX_INNER_BLOCKS, - max_keccak_rows: MAX_KECCAK_ROWS, - max_exp_steps: MAX_EXP_STEPS, - max_mpt_rows: MAX_MPT_ROWS, - max_rlp_rows: MAX_CALLDATA, - max_ec_ops: PrecompileEcParams { - ec_add: MAX_PRECOMPILE_EC_ADD, - ec_mul: MAX_PRECOMPILE_EC_MUL, - ec_pairing: MAX_PRECOMPILE_EC_PAIRING, - }, - }; - let first_trace = &block_traces[0]; let more_traces = &block_traces[1..]; @@ -289,7 +304,7 @@ pub fn block_traces_to_witness_block_with_updated_state( }; let mut builder = CircuitInputBuilder::new_from_l2_trace( - circuit_params, + global_circuit_params(), first_trace, more_traces.len() != 0, )?; @@ -330,19 +345,31 @@ pub fn block_traces_to_witness_block_with_updated_state( Ok(witness_block) } -pub fn decode_bytecode(bytecode: &str) -> Result> { - let mut stripped = if let Some(stripped) = bytecode.strip_prefix("0x") { - stripped.to_string() - } else { - bytecode.to_string() - }; - - let bytecode_len = stripped.len() as u64; - if !bytecode_len.is_even() { - stripped = format!("0{stripped}"); - } +/// This entry simulate the progress which use block_traces_to_witness_block_with_updated_state +/// to generate a padding block with null trace array: +/// + Everything use default values +/// + no trace, no tx, so no mpt table, zktrie state light mode is useless, +/// and what only needed is the previous state root +pub fn padding_witness_block( + old_root: U256, +) -> Result> { + let mut builder_block = circuit_input_builder::Block::from_headers(&[], global_circuit_params()); + builder_block.chain_id = *CHAIN_ID; + builder_block.prev_state_root = old_root; + let mut builder = CircuitInputBuilder::new( + StateDB::new(), + CodeDB::new(), + &builder_block + ); + builder.finalize_building()?; - hex::decode(stripped).map_err(|e| e.into()) + let witness_block = + block_convert_with_l1_queue_index(&builder.block, &builder.code_db, 0)?; + log::debug!( + "padding witness_block built with circuits_params {:?}", + witness_block.circuits_params + ); + Ok(witness_block) } pub fn normalize_withdraw_proof(proof: &WithdrawProof) -> StorageTrace { diff --git a/types/src/eth.rs b/types/src/eth.rs deleted file mode 100644 index 382f453c8..000000000 --- a/types/src/eth.rs +++ /dev/null @@ -1,241 +0,0 @@ -use eth_types::{ - evm_types::{Gas, GasCost, OpcodeId, ProgramCounter, Stack, Storage}, - Block, GethExecStep, GethExecTrace, Hash, Transaction, Word, H256, -}; -use ethers_core::types::{Address, Bytes, U256, U64}; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; - -#[derive(Deserialize, Serialize, Default, Debug, Clone)] -pub struct BlockTrace { - #[serde(rename = "chainID", default)] - pub chain_id: u64, - pub coinbase: AccountProofWrapper, - pub header: EthBlock, - pub transactions: Vec, - #[serde(rename = "executionResults")] - pub execution_results: Vec, - #[serde(rename = "storageTrace")] - pub storage_trace: StorageTrace, - #[serde(rename = "txStorageTraces", default)] - pub tx_storage_trace: Vec, - #[serde(rename = "startL1QueueIndex", default)] - pub start_l1_queue_index: u64, - // #[serde(rename = "mptwitness", default)] - // pub mpt_witness: Vec, -} - -#[derive(Deserialize, Serialize, Default, Debug, Clone)] -pub struct BlockTraceJsonRpcResult { - pub result: BlockTrace, -} - -impl From for EthBlock { - fn from(mut b: BlockTrace) -> Self { - let mut txs = Vec::new(); - for (idx, tx_data) in b.transactions.iter_mut().enumerate() { - let tx_idx = Some(U64::from(idx)); - let tx = tx_data.to_eth_tx(b.header.hash, b.header.number, tx_idx); - txs.push(tx) - } - EthBlock { - transactions: txs, - difficulty: 0.into(), - ..b.header - } - } -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct TransactionTrace { - // FIXME after traces upgraded - #[serde(default, rename = "txHash")] - pub tx_hash: H256, - #[serde(rename = "type")] - pub type_: u8, - pub nonce: u64, - pub gas: u64, - #[serde(rename = "gasPrice")] - pub gas_price: U256, - pub from: Address, - pub to: Option
, - #[serde(rename = "chainId")] - pub chain_id: U256, - pub value: U256, - pub data: Bytes, - #[serde(rename = "isCreate")] - pub is_create: bool, - pub v: U64, - pub r: U256, - pub s: U256, -} - -impl TransactionTrace { - pub fn to_eth_tx( - &self, - block_hash: Option, - block_number: Option, - transaction_index: Option, - ) -> Transaction { - Transaction { - hash: self.tx_hash, - nonce: U256::from(self.nonce), - block_hash, - block_number, - transaction_index, - from: self.from, - to: self.to, - value: self.value, - gas_price: Some(self.gas_price), - gas: U256::from(self.gas), - input: self.data.clone(), - v: self.v, - r: self.r, - s: self.s, - transaction_type: Some(U64::from(self.type_ as u64)), - access_list: None, - max_priority_fee_per_gas: None, - max_fee_per_gas: None, - chain_id: Some(self.chain_id), - other: Default::default(), - } - } -} - -pub type AccountTrieProofs = HashMap>; -pub type StorageTrieProofs = HashMap>>; - -#[derive(Deserialize, Serialize, Default, Debug, Clone)] -pub struct StorageTrace { - #[serde(rename = "rootBefore")] - pub root_before: Hash, - #[serde(rename = "rootAfter")] - pub root_after: Hash, - pub proofs: Option, - #[serde(rename = "storageProofs", default)] - pub storage_proofs: StorageTrieProofs, - #[serde(rename = "deletionProofs", default)] - pub deletion_proofs: Vec, -} - -pub type EthBlock = Block; - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct ExecutionResult { - #[serde(rename = "l1DataFee", default)] - pub l1_fee: U256, - pub gas: u64, - pub failed: bool, - #[serde(rename = "returnValue", default)] - pub return_value: String, - pub from: Option, - pub to: Option, - #[serde(rename = "accountAfter", default)] - pub account_after: Vec, - #[serde(rename = "accountCreated")] - pub account_created: Option, - #[serde(rename = "poseidonCodeHash")] - pub code_hash: Option, - #[serde(rename = "byteCode")] - pub byte_code: Option, - #[serde(rename = "structLogs")] - pub exec_steps: Vec, -} - -impl From<&ExecutionResult> for GethExecTrace { - fn from(e: &ExecutionResult) -> Self { - let mut struct_logs = Vec::new(); - for exec_step in &e.exec_steps { - let step = exec_step.into(); - struct_logs.push(step) - } - GethExecTrace { - l1_fee: e.l1_fee.as_u64(), - gas: Gas(e.gas), - failed: e.failed, - return_value: e.return_value.clone(), - struct_logs, - } - } -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct ExecStep { - pub pc: u64, - pub op: OpcodeId, - pub gas: u64, - #[serde(rename = "gasCost")] - pub gas_cost: u64, - #[serde(default)] - pub refund: u64, - pub depth: isize, - pub error: Option, - pub stack: Option>, - pub memory: Option>, - pub storage: Option>, - #[serde(rename = "extraData")] - pub extra_data: Option, -} - -impl From<&ExecStep> for GethExecStep { - fn from(e: &ExecStep) -> Self { - let stack = e.stack.clone().map_or_else(Stack::new, Stack::from); - let storage = e.storage.clone().map_or_else(Storage::empty, Storage::from); - - GethExecStep { - pc: ProgramCounter(e.pc as usize), - // FIXME - op: e.op, - gas: Gas(e.gas), - gas_cost: GasCost(e.gas_cost), - refund: Gas(e.refund), - depth: e.depth as u16, - error: e.error.clone(), - stack, - memory: Default::default(), - storage, - } - } -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct ExtraData { - #[serde(rename = "codeList")] - pub code_list: Option>, - #[serde(rename = "proofList")] - pub proof_list: Option>, -} - -impl ExtraData { - pub fn get_code_at(&self, i: usize) -> Option { - self.code_list.as_ref().and_then(|c| c.get(i)).cloned() - } - - pub fn get_code_hash_at(&self, i: usize) -> Option { - self.get_proof_at(i).and_then(|a| a.poseidon_code_hash) - } - - pub fn get_proof_at(&self, i: usize) -> Option { - self.proof_list.as_ref().and_then(|p| p.get(i)).cloned() - } -} - -#[derive(Serialize, Deserialize, Clone, Default, Debug)] -pub struct AccountProofWrapper { - pub address: Option
, - pub nonce: Option, - pub balance: Option, - #[serde(rename = "keccakCodeHash")] - pub keccak_code_hash: Option, - #[serde(rename = "poseidonCodeHash")] - pub poseidon_code_hash: Option, - pub proof: Option>, - pub storage: Option, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct StorageProofWrapper { - pub key: Option, - pub value: Option, - pub proof: Option>, -} From 3b1b39504e252eea35a3f01fb8c033b3b19422a2 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Mon, 21 Aug 2023 16:10:22 +0800 Subject: [PATCH 09/35] add run-testnet crate --- Cargo.lock | 79 ++++- Cargo.toml | 1 + bin/Cargo.toml | 4 - bin/src/run_testnet.rs | 248 -------------- run-testnet/Cargo.toml | 24 ++ run-testnet/README.md | 6 + run-testnet/src/main.rs | 316 ++++++++++++++++++ run-testnet/testnet_coordinator/config.go | 54 +++ .../testnet_coordinator/config.yaml.example | 7 + run-testnet/testnet_coordinator/go.mod | 5 + run-testnet/testnet_coordinator/go.sum | 4 + run-testnet/testnet_coordinator/main.go | 80 +++++ .../testnet_coordinator/task_assign.go | 84 +++++ 13 files changed, 656 insertions(+), 256 deletions(-) delete mode 100644 bin/src/run_testnet.rs create mode 100644 run-testnet/Cargo.toml create mode 100644 run-testnet/README.md create mode 100644 run-testnet/src/main.rs create mode 100644 run-testnet/testnet_coordinator/config.go create mode 100644 run-testnet/testnet_coordinator/config.yaml.example create mode 100644 run-testnet/testnet_coordinator/go.mod create mode 100644 run-testnet/testnet_coordinator/go.sum create mode 100644 run-testnet/testnet_coordinator/main.go create mode 100644 run-testnet/testnet_coordinator/task_assign.go diff --git a/Cargo.lock b/Cargo.lock index d79b8664b..17b5164c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1439,6 +1439,18 @@ dependencies = [ "subtle", ] +[[package]] +name = "filetime" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.3.5", + "windows-sys 0.48.0", +] + [[package]] name = "fixed-hash" version = "0.7.0" @@ -2722,7 +2734,7 @@ dependencies = [ "cfg-if 1.0.0", "instant", "libc", - "redox_syscall", + "redox_syscall 0.2.16", "smallvec", "winapi", ] @@ -2735,7 +2747,7 @@ checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall", + "redox_syscall 0.2.16", "smallvec", "windows-sys 0.45.0", ] @@ -3152,6 +3164,15 @@ dependencies = [ "bitflags", ] +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags", +] + [[package]] name = "regex" version = "1.7.3" @@ -3395,7 +3416,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d470e29e933dac4101180fd6574971892315c414cf2961a192729089687cc9b" dependencies = [ "derive_more", - "primitive-types 0.11.1", + "primitive-types 0.12.1", "rlp", "ruint-macro", "rustc_version", @@ -4018,6 +4039,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tar" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "termcolor" version = "1.2.0" @@ -4027,6 +4059,27 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "testnet-runner" +version = "0.6.4" +dependencies = [ + "anyhow", + "dotenv", + "ethers-providers 1.0.2", + "flate2", + "itertools", + "log", + "log4rs", + "prover", + "reqwest", + "serde", + "serde_derive", + "serde_json", + "tar", + "tokio", + "types", +] + [[package]] name = "textwrap" version = "0.16.0" @@ -4060,7 +4113,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ee93aa2b8331c0fec9091548843f2c90019571814057da3b783f9de09349d73" dependencies = [ "libc", - "redox_syscall", + "redox_syscall 0.2.16", "winapi", ] @@ -4577,6 +4630,15 @@ dependencies = [ "windows-targets 0.42.2", ] +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -4737,6 +4799,15 @@ dependencies = [ "tap", ] +[[package]] +name = "xattr" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea263437ca03c1522846a4ddafbca2542d0ad5ed9b784909d4b27b76f62bc34a" +dependencies = [ + "libc", +] + [[package]] name = "zeroize" version = "1.6.0" diff --git a/Cargo.toml b/Cargo.toml index 093ccd884..c288e0a4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ members = [ "bin", "prover", "types", + "run-testnet", ] [patch.crates-io] diff --git a/bin/Cargo.toml b/bin/Cargo.toml index 3afab13fa..a8f2f1826 100644 --- a/bin/Cargo.toml +++ b/bin/Cargo.toml @@ -34,7 +34,3 @@ path = "src/zkevm_verify.rs" [[bin]] name = "mock_testnet" path = "src/mock_testnet.rs" - -[[bin]] -name = "run_testnet" -path = "src/run_testnet.rs" diff --git a/bin/src/run_testnet.rs b/bin/src/run_testnet.rs deleted file mode 100644 index ce8d92372..000000000 --- a/bin/src/run_testnet.rs +++ /dev/null @@ -1,248 +0,0 @@ -#![allow(dead_code)] -use anyhow::Result; -use ethers_providers::{Http, Provider}; -use prover::{ - inner::Prover, - utils::{read_env_var, init_env_and_log, GIT_VERSION, short_git_version}, - zkevm::circuit::{ - block_traces_to_witness_block, calculate_row_usage_of_witness_block, SuperCircuit, - WitnessBlock, - }, -}; -use log4rs::{ - append::{ - console::{ConsoleAppender, Target}, - file::FileAppender, - }, - config::{Appender, Config, Root}, -}; -use reqwest::Url; -use serde::Deserialize; -use std::{env, str::FromStr}; -use types::eth::BlockTrace; - -const DEFAULT_BEGIN_BATCH: i64 = 1; -const DEFAULT_END_BATCH: i64 = i64::MAX; - - -// build common config from enviroment -fn common_log() -> Config { - dotenv::dotenv().ok(); - // TODO: cannot support complicated `RUST_LOG` for now. - let log_level = read_env_var("RUST_LOG", "INFO".to_string()); - let log_level = log::LevelFilter::from_str(&log_level).unwrap_or(log::LevelFilter::Info); - - let stderr = ConsoleAppender::builder().target(Target::Stderr).build(); - - Config::builder() - .appenders([ - Appender::builder().build("stderr", Box::new(stderr)), - ]) - .build( - Root::builder() - .appender("stderr") - .build(log_level), - ) - .unwrap() - -} - -// build config for failure-debug -fn debug_log() -> Config { - Config::builder() - .appenders([ - // Appender::builder().build("log-file", Box::new(log_file)), - ]) - .build( - Root::builder() - //.appender("log-file") - //.appender("stderr") - .build(log::LevelFilter::Debug), - ) - .unwrap() -} - -fn task_runner() { - log::info!("run as task runner"); -} - -#[tokio::main] -async fn main() { - let common_log_cfg = common_log(); - let log_handle = log4rs::init_config(common_log_cfg).unwrap(); - log::info!("git version {}", GIT_VERSION); - log::info!("short git version {}", short_git_version()); - - log::info!("relay-alpha testnet: begin"); - - let setting = Setting::new(); - log::info!("mock-testnet: {setting:?}"); - - let provider = Provider::::try_from(&setting.l2geth_api_url) - .expect("mock-testnet: failed to initialize ethers Provider"); - - for batch_id in setting.begin_batch..=setting.end_batch { - log::info!("mock-testnet: requesting block traces of batch {batch_id}"); - - let chunks = get_traces_by_block_api(&setting, batch_id).await; - - let chunks = chunks.unwrap_or_else(|e| { - panic!("mock-testnet: failed to request API with batch-{batch_id}, err {e:?}") - }); - - match chunks { - None => { - log::info!("mock-testnet: finished to prove at batch-{batch_id}"); - break; - } - Some(chunks) => { - for chunk in chunks { - let chunk_id = chunk.index; - log::info!("chunk {:?}", chunk); - - // fetch traces - let mut block_traces: Vec = vec![]; - for block_id in chunk.start_block_number..=chunk.end_block_number { - log::info!("mock-testnet: requesting trace of block {block_id}"); - - let trace = provider - .request( - "scroll_getBlockTraceByNumberOrHash", - [format!("{block_id:#x}")], - ) - .await - .unwrap(); - block_traces.push(trace); - } - - let witness_block = match build_block(&block_traces, batch_id, chunk_id) { - Ok(block) => block, - Err(e) => { - log::error!("mock-testnet: building block failed {e:?}"); - continue; - } - }; - let result = Prover::::mock_prove_witness_block(&witness_block); - - match result { - Ok(_) => { - log::info!( - "mock-testnet: succeeded to prove chunk {chunk_id} inside batch {batch_id}" - ) - } - Err(err) => { - log::error!( - "mock-testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{err:?}" - ); - } - } - } - } - } - } - - log::info!("mock-testnet: end"); -} - -fn build_block( - block_traces: &[BlockTrace], - batch_id: i64, - chunk_id: i64, -) -> anyhow::Result { - let gas_total: u64 = block_traces - .iter() - .map(|b| b.header.gas_used.as_u64()) - .sum(); - let witness_block = block_traces_to_witness_block(block_traces)?; - let rows = calculate_row_usage_of_witness_block(&witness_block)?; - log::info!( - "rows of batch {batch_id}(block range {:?} to {:?}):", - block_traces.first().and_then(|b| b.header.number), - block_traces.last().and_then(|b| b.header.number), - ); - for r in &rows { - log::info!("rows of {}: {}", r.name, r.row_num_real); - } - let row_num = rows.iter().map(|x| x.row_num_real).max().unwrap(); - log::info!( - "final rows of chunk {chunk_id}: row {}, gas {}, gas/row {:.2}", - row_num, - gas_total, - gas_total as f64 / row_num as f64 - ); - Ok(witness_block) -} - -/// Request block traces by first using rollup API to get chunk info, then fetching blocks from -/// l2geth. Return None if no more batches. -async fn get_traces_by_block_api( - setting: &Setting, - batch_index: i64, -) -> Result>> { - let url = Url::parse_with_params( - &setting.rollupscan_api_url, - &[("batch_index", batch_index.to_string())], - )?; - - let resp: String = reqwest::get(url).await?.text().await?; - log::debug!("resp is {resp}"); - let resp: RollupscanResponse = serde_json::from_str(&resp)?; - log::info!( - "handling batch {}, chunk size {}", - resp.batch_index, - resp.chunks.as_ref().unwrap().len() - ); - Ok(resp.chunks) -} - -#[derive(Deserialize, Debug)] -struct RollupscanResponse { - batch_index: usize, - chunks: Option>, -} - -#[derive(Deserialize, Debug)] -struct ChunkInfo { - index: i64, - created_at: String, - total_tx_num: i64, - hash: String, - start_block_number: i64, - end_block_number: i64, -} - -#[derive(Debug, Default)] -struct Setting { - begin_batch: i64, - end_batch: i64, - task_runers: u32, - coordinator_url: String, - l2geth_api_url: String, - rollupscan_api_url: String, -} - -impl Setting { - pub fn new() -> Self { - let l2geth_api_url = - env::var("L2GETH_API_URL").expect("mock-testnet: Must set env L2GETH_API_URL"); - let rollupscan_api_url = env::var("ROLLUPSCAN_API_URL"); - let rollupscan_api_url = - rollupscan_api_url.unwrap_or_else(|_| "http://10.0.3.119:8560/api/chunks".to_string()); - let begin_batch = env::var("PROVE_BEGIN_BATCH") - .ok() - .and_then(|n| n.parse().ok()) - .unwrap_or(DEFAULT_BEGIN_BATCH); - let end_batch = env::var("PROVE_END_BATCH") - .ok() - .and_then(|n| n.parse().ok()) - .unwrap_or(DEFAULT_END_BATCH); - - Self { - begin_batch, - end_batch, - l2geth_api_url, - rollupscan_api_url, - ..Default::default() - } - } -} diff --git a/run-testnet/Cargo.toml b/run-testnet/Cargo.toml new file mode 100644 index 000000000..c8503607a --- /dev/null +++ b/run-testnet/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "testnet-runner" +version = "0.6.4" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1.0" +dotenv = "0.15.0" +ethers-providers = "1.0" +itertools = "0.10.5" +log = "0.4" +log4rs = { version = "1.2.0", default_features = false, features = ["console_appender", "file_appender"] } +reqwest = { version = "0.11", default-features = false, features = [ "json", "rustls-tls" ] } +serde = "1.0" +serde_derive = "1.0" +serde_json = "1.0.66" +tar = "0.4" +flate2 = "1" +tokio = { version = "1", features = ["full"] } +types = { path = "../types" } +prover = { path = "../prover" } + diff --git a/run-testnet/README.md b/run-testnet/README.md new file mode 100644 index 000000000..166c1f0b6 --- /dev/null +++ b/run-testnet/README.md @@ -0,0 +1,6 @@ + +App exits: + ++ 9: no more batch avaliable ++ 13: unexpected error in run-time, can not continue executing runner until the issue has been resolved. ++ 17: same as `13` but a batch task may hold without dropping from coordinator, we should reset the task manually \ No newline at end of file diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs new file mode 100644 index 000000000..600cf481c --- /dev/null +++ b/run-testnet/src/main.rs @@ -0,0 +1,316 @@ +#![allow(dead_code)] +use anyhow::Result; +use ethers_providers::{Http, Provider}; +use prover::{ + inner::Prover, + utils::{read_env_var, GIT_VERSION, short_git_version}, + zkevm::circuit::{ + block_traces_to_witness_block, calculate_row_usage_of_witness_block, SuperCircuit, + WitnessBlock, + }, +}; +use log4rs::{ + append::{ + console::{ConsoleAppender, Target}, + file::FileAppender, + }, + config::{Appender, Logger, Config, Root}, +}; +use reqwest::Url; +use serde::Deserialize; +use std::{env, str::FromStr, process::ExitCode}; +use types::eth::BlockTrace; + +const DEFAULT_BEGIN_BATCH: i64 = 1; +const DEFAULT_END_BATCH: i64 = i64::MAX; + + +// build common config from enviroment +fn common_log() -> Result { + dotenv::dotenv().ok(); + // TODO: cannot support complicated `RUST_LOG` for now. + let log_level = read_env_var("RUST_LOG", "INFO".to_string()); + let log_level = log::LevelFilter::from_str(&log_level).unwrap_or(log::LevelFilter::Info); + + let stdoutput = ConsoleAppender::builder().target(Target::Stdout).build(); + + let config = Config::builder() + .appenders([ + Appender::builder().build("std", Box::new(stdoutput)), + ]).build( + Root::builder() + .appender("std") + .build(log_level), + )?; + + Ok(config) +} + +// build config for circuit-debug +fn debug_log(output_dir: &str) -> Result { + use std::path::Path; + let err_output = ConsoleAppender::builder().target(Target::Stderr).build(); + let log_file_path = Path::new(output_dir).join("runner.log"); + let log_file = FileAppender::builder().build(log_file_path).unwrap(); + let config = Config::builder() + .appenders([ + Appender::builder().build("log-file", Box::new(log_file)), + Appender::builder().build("stderr", Box::new(err_output)), + ]) + .logger( + Logger::builder() + .appender("log-file") + .additive(true) + .build("", log::LevelFilter::Debug) + ) + .build( + Root::builder() + .appender("stderr") + .build(log::LevelFilter::Warn), + )?; + + Ok(config) +} + +fn prepare_chunk_dir(output_dir: &str, chunk_id: u64) -> Result { + use std::{path::Path, fs}; + let chunk_path = Path::new(output_dir).join(format!("{}", chunk_id)); + fs::create_dir(chunk_path.as_path())?; + Ok(chunk_path.to_str().ok_or_else(||anyhow::anyhow!("invalid chunk path"))?.into()) +} + +fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()>{ + + use flate2::Compression; + use flate2::write::GzEncoder; + use std::fs::File; + use tar::{Header, Builder}; + use std::path::Path; + + let trace_file_path = Path::new(chunk_dir).join("traces.tar.gz"); + let tarfile = File::create(trace_file_path)?; + let enc = GzEncoder::new(tarfile, Compression::default()); + let mut tar = Builder::new(enc); + + for (i, trace) in traces.iter().enumerate() { + let trace_str = serde_json::to_string(&trace)?; + + let mut header = Header::new_gnu(); + header.set_path(trace.header.number.map_or_else( + ||format!("unknown_block_{}.json", i), + |blkn|format!("{}.json", blkn), + ))?; + header.set_size(trace_str.len() as u64); + header.set_cksum(); + tar.append(&header, trace_str.as_bytes())?; + } + + Ok(()) +} + +fn chunk_handling(batch_id: i64, chunk_id: i64, block_traces: &[BlockTrace]) -> Result<()>{ + + let witness_block = build_block(&block_traces, batch_id, chunk_id) + .map_err(|e|anyhow::anyhow!("testnet: building block failed {e:?}"))?; + + Prover::::mock_prove_witness_block(&witness_block) + .map_err(|e|anyhow::anyhow!("testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{e:?}"))?; + + Ok(()) +} + +const EXIT_NO_MORE_TASK : u8 = 9; +const EXIT_FAILED_ENV : u8 = 13; +const EXIT_FAILED_ENV_WITH_TASK : u8 = 17; + +#[tokio::main] +async fn main() -> ExitCode{ + let log_handle = log4rs::init_config(common_log().unwrap()).unwrap(); + log::info!("git version {}", GIT_VERSION); + log::info!("short git version {}", short_git_version()); + + log::info!("relay-alpha testnet runner: begin"); + + let setting = Setting::new(); + log::info!("settings: {setting:?}"); + + let provider = Provider::::try_from(&setting.l2geth_api_url) + .expect("mock-testnet: failed to initialize ethers Provider"); + + let (batch_id, chunks) = get_chunks_info(&setting) + .await + .unwrap_or_else(|e| { + panic!("mock-testnet: failed to request API err {e:?}") + }); + match chunks { + None => { + log::info!("mock-testnet: finished to prove at batch-{batch_id}"); + return ExitCode::from(EXIT_NO_MORE_TASK); + } + Some(chunks) => { + // TODO: restart from last chunk? + for chunk in chunks { + let chunk_id = chunk.index; + log::info!("chunk {:?}", chunk); + + // fetch traces + let mut block_traces: Vec = vec![]; + for block_id in chunk.start_block_number..=chunk.end_block_number { + log::info!("mock-testnet: requesting trace of block {block_id}"); + + let trace = provider + .request( + "scroll_getBlockTraceByNumberOrHash", + [format!("{block_id:#x}")], + ) + .await + .unwrap(); + block_traces.push(trace); + } + + // start chunk-level testing + //let chunk_dir = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64).unwrap(); + if let Err(_) = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64) + .and_then(|chunk_dir|{ + record_chunk_traces(&chunk_dir, &block_traces)?; + Ok(chunk_dir) + }) + .and_then(|chunk_dir|{ + log::info!("chunk {} has been recorded to {}", chunk_id, chunk_dir); + log_handle.set_config(debug_log(&chunk_dir)?); + Ok(()) + }) + { + if notify_chunks_complete(&setting, batch_id as i64, false).await.is_ok() { + return ExitCode::from(EXIT_FAILED_ENV); + } else { + return ExitCode::from(EXIT_FAILED_ENV_WITH_TASK); + } + } + + let handling_ret = chunk_handling(batch_id as i64, chunk_id, &block_traces); + log_handle.set_config(common_log().unwrap()); + + if handling_ret.is_err() { + // TODO: move data to output dir + } + + log::info!("chunk {} has been handled", chunk_id); + } + } + } + + if let Err(e) = notify_chunks_complete(&setting, batch_id as i64, true).await { + log::error!("can not deliver complete notify to coordinator: {e:?}"); + return ExitCode::from(EXIT_FAILED_ENV_WITH_TASK); + } + log::info!("relay-alpha testnet runner: complete"); + ExitCode::from(0) +} + +fn build_block( + block_traces: &[BlockTrace], + batch_id: i64, + chunk_id: i64, +) -> anyhow::Result { + let gas_total: u64 = block_traces + .iter() + .map(|b| b.header.gas_used.as_u64()) + .sum(); + let witness_block = block_traces_to_witness_block(block_traces)?; + let rows = calculate_row_usage_of_witness_block(&witness_block)?; + log::info!( + "rows of batch {batch_id}(block range {:?} to {:?}):", + block_traces.first().and_then(|b| b.header.number), + block_traces.last().and_then(|b| b.header.number), + ); + for r in &rows { + log::info!("rows of {}: {}", r.name, r.row_num_real); + } + let row_num = rows.iter().map(|x| x.row_num_real).max().unwrap(); + log::info!( + "final rows of chunk {chunk_id}: row {}, gas {}, gas/row {:.2}", + row_num, + gas_total, + gas_total as f64 / row_num as f64 + ); + Ok(witness_block) +} + +/// Request chunk info from cordinator +async fn get_chunks_info( + setting: &Setting, +) -> Result<(usize, Option>)> { + let url = Url::parse(&setting.coordinator_url)?; + + let resp: String = reqwest::get(url).await?.text().await?; + log::debug!("resp is {resp}"); + let resp: RollupscanResponse = serde_json::from_str(&resp)?; + log::info!( + "handling batch {}, chunk size {}", + resp.batch_index, + resp.chunks.as_ref().unwrap().len() + ); + Ok((resp.batch_index, resp.chunks)) +} + +async fn notify_chunks_complete( + setting: &Setting, + batch_index: i64, + completed: bool, +) -> Result<()> { + let url = Url::parse_with_params( + &setting.task_url, + &[(if completed {"done"} else {"drop"}, + batch_index.to_string())], + )?; + + let resp = reqwest::get(url).await?.text().await?; + log::info!( + "notify batch {} {}, resp {}", + batch_index, + if completed {"done"} else {"drop"}, + resp, + ); + Ok(()) +} + +#[derive(Deserialize, Debug)] +struct RollupscanResponse { + batch_index: usize, + chunks: Option>, +} + +#[derive(Deserialize, Debug)] +struct ChunkInfo { + index: i64, + created_at: String, + total_tx_num: i64, + hash: String, + start_block_number: i64, + end_block_number: i64, +} + +#[derive(Debug, Default)] +struct Setting { + coordinator_url: String, + task_url: String, + l2geth_api_url: String, + data_output_dir: String, +} + +impl Setting { + pub fn new() -> Self { + let l2geth_api_url = + env::var("L2GETH_API_URL").expect("mock-testnet: Must set env L2GETH_API_URL"); + let coordinator_url = env::var("ROLLUPSCAN_API_URL"); + let coordinator_url = + coordinator_url.unwrap_or_else(|_| "http://10.0.3.119:8560/api/chunks".to_string()); + + Self { + l2geth_api_url, + coordinator_url, + ..Default::default() + } + } +} diff --git a/run-testnet/testnet_coordinator/config.go b/run-testnet/testnet_coordinator/config.go new file mode 100644 index 000000000..597686426 --- /dev/null +++ b/run-testnet/testnet_coordinator/config.go @@ -0,0 +1,54 @@ +package main + +import ( + "io/ioutil" + "log" + + "gopkg.in/yaml.v3" +) + +type ServerConfig struct { + ServerHost string `yaml:"host,omitempty"` + ServerURL string `yaml:"url,omitempty"` +} + +type Config struct { + StartBatch uint64 `yaml:"start,omitempty"` + ChunkURLTemplate string `yaml:"chunkURL"` + Server *ServerConfig `yaml:"server,omitempty"` +} + +func NewConfig() *Config { + return &Config{ + Server: &ServerConfig{ + ServerHost: "localhost:8560", + ServerURL: "/", + }, + } +} + +func (cfg *Config) LoadEnv(path string) error { + return nil +} + +func (cfg *Config) Load(path string) error { + + data, err := ioutil.ReadFile(path) + if err != nil { + return err + } + + err = yaml.Unmarshal(data, cfg) + if err != nil { + return err + } + + cfgYAML, err := yaml.Marshal(cfg) + if err != nil { + log.Fatal("re-marshal config file fail", err) + } else { + log.Printf("load config:\n%s", cfgYAML) + } + return nil + +} diff --git a/run-testnet/testnet_coordinator/config.yaml.example b/run-testnet/testnet_coordinator/config.yaml.example new file mode 100644 index 000000000..f3c1ee481 --- /dev/null +++ b/run-testnet/testnet_coordinator/config.yaml.example @@ -0,0 +1,7 @@ + + +# start: 0 #batch start from +chunkURL: http:///api/chunks?batch_index=%d +server: + #host: 0.0.0.0:8560 + #url: #the base url server used for \ No newline at end of file diff --git a/run-testnet/testnet_coordinator/go.mod b/run-testnet/testnet_coordinator/go.mod new file mode 100644 index 000000000..860fe17ab --- /dev/null +++ b/run-testnet/testnet_coordinator/go.mod @@ -0,0 +1,5 @@ +module testnet_coordinator + +go 1.20 + +require gopkg.in/yaml.v3 v3.0.1 diff --git a/run-testnet/testnet_coordinator/go.sum b/run-testnet/testnet_coordinator/go.sum new file mode 100644 index 000000000..a62c313c5 --- /dev/null +++ b/run-testnet/testnet_coordinator/go.sum @@ -0,0 +1,4 @@ +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/run-testnet/testnet_coordinator/main.go b/run-testnet/testnet_coordinator/main.go new file mode 100644 index 000000000..ff9307d5d --- /dev/null +++ b/run-testnet/testnet_coordinator/main.go @@ -0,0 +1,80 @@ +package main + +import ( + "fmt" + "io/ioutil" + "log" + "net/http" +) + +type UpstreamError int + +func (sc UpstreamError) Error() string { + return fmt.Sprintf("Upstream status %d", sc) +} + +func readSrcUrl(url string) ([]byte, error) { + resp, err := http.Get(url) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, UpstreamError(resp.StatusCode) + } + + return ioutil.ReadAll(resp.Body) +} + +func main() { + + serverConfig := NewConfig() + if err := serverConfig.Load("config.yaml"); err != nil { + log.Fatalf("Error reading config file: %v", err) + } + + taskAssigner := &TaskAssigner{} + + http.HandleFunc( + serverConfig.Server.ServerURL+"/chunks", + chunksHandler(taskAssigner, serverConfig.ChunkURLTemplate), + ) + http.Handle("/", http.NotFoundHandler()) + + log.Printf("Starting server on %s...", serverConfig.Server.ServerHost) + err := http.ListenAndServe(serverConfig.Server.ServerHost, nil) + if err != nil { + log.Print("ListenAndServe: ", err) + } +} + +func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + assigned_done := false + assigned := assigner.assign_new() + defer func() { + if !assigned_done { + assigner.drop(assigned) + } + }() + url := fmt.Sprintf(url_template, assigned) + resp, err := readSrcUrl(url) + if statusErr, ok := err.(UpstreamError); ok { + http.Error(w, statusErr.Error(), int(statusErr)) + return + } else if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + _, err = w.Write(resp) + if err != nil { + log.Printf("Error writing response: %v\n", err) + return + } + log.Println("send new batch out", assigned) + assigned_done = true + + } +} diff --git a/run-testnet/testnet_coordinator/task_assign.go b/run-testnet/testnet_coordinator/task_assign.go new file mode 100644 index 000000000..6090dbe2c --- /dev/null +++ b/run-testnet/testnet_coordinator/task_assign.go @@ -0,0 +1,84 @@ +package main + +import ( + "log" + "sync" +) + +type TaskStatus int + +const TaskAssigned TaskStatus = 0 +const TaskCompleted TaskStatus = 1 +const TaskReAssign TaskStatus = 2 + +// task managers cache all task it has assigned +// since the cost is trivial (batch number is limited) +type TaskAssigner struct { + sync.Mutex + begin_with uint64 + runingTasks map[uint64]TaskStatus +} + +func construct(start uint64) *TaskAssigner { + return &TaskAssigner{ + begin_with: start, + runingTasks: make(map[uint64]TaskStatus), + } +} + +func (t *TaskAssigner) assign_new() uint64 { + + t.Lock() + defer t.Unlock() + + used := t.begin_with + for tid, status := range t.runingTasks { + if status == TaskReAssign { + t.runingTasks[tid] = TaskAssigned + return tid + } else if tid >= used { + used = tid + 1 + } + } + t.runingTasks[used] = TaskAssigned + return used +} + +func (t *TaskAssigner) drop(id uint64) { + + t.Lock() + defer t.Unlock() + + for tid, status := range t.runingTasks { + if tid == id { + if status == TaskAssigned { + t.runingTasks[tid] = TaskReAssign + } else { + log.Printf("unexpected dropping of completed task (%d)\n", id) + } + return + } + } + log.Printf("unexpected dropping non-existed task (%d)\n", id) +} + +func (t *TaskAssigner) complete(id uint64) { + t.Lock() + defer t.Unlock() + t.runingTasks[id] = TaskCompleted + +} + +func (t *TaskAssigner) status() (result []uint64) { + + t.Lock() + defer t.Unlock() + + for id, status := range t.runingTasks { + if status != TaskCompleted { + result = append(result, id) + } + } + + return +} From d23226e6b40c9d8d90de01e3611175d403c7e0af Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Mon, 21 Aug 2023 22:19:25 +0800 Subject: [PATCH 10/35] bump version of zkevm-circuits --- Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 17b5164c1..bdc0d4f58 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -1123,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "eth-types", "geth-utils", @@ -1613,7 +1613,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "digest 0.7.6", "eth-types", @@ -1653,7 +1653,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -2293,7 +2293,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2493,7 +2493,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2508,7 +2508,7 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "eth-types", "halo2-mpt-circuits", @@ -4817,7 +4817,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#d3bf2036dce76d813bfaf29729c775a7af375e5c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" dependencies = [ "array-init", "bus-mapping", From d728f1eab043e011ba0aafbdd244d70a8478f319 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Tue, 22 Aug 2023 09:20:11 +0800 Subject: [PATCH 11/35] wip: some tuning on run testnet --- run-testnet/src/main.rs | 79 ++++++++++++++++++++++++++++------------- 1 file changed, 54 insertions(+), 25 deletions(-) diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index 600cf481c..ac1a2e8a8 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -21,10 +21,6 @@ use serde::Deserialize; use std::{env, str::FromStr, process::ExitCode}; use types::eth::BlockTrace; -const DEFAULT_BEGIN_BATCH: i64 = 1; -const DEFAULT_END_BATCH: i64 = i64::MAX; - - // build common config from enviroment fn common_log() -> Result { dotenv::dotenv().ok(); @@ -142,6 +138,7 @@ async fn main() -> ExitCode{ .unwrap_or_else(|e| { panic!("mock-testnet: failed to request API err {e:?}") }); + let mut chunks_task_complete = true; match chunks { None => { log::info!("mock-testnet: finished to prove at batch-{batch_id}"); @@ -157,15 +154,27 @@ async fn main() -> ExitCode{ let mut block_traces: Vec = vec![]; for block_id in chunk.start_block_number..=chunk.end_block_number { log::info!("mock-testnet: requesting trace of block {block_id}"); - - let trace = provider + + match provider .request( "scroll_getBlockTraceByNumberOrHash", [format!("{block_id:#x}")], ) - .await - .unwrap(); - block_traces.push(trace); + .await + { + Ok(trace) => { + block_traces.push(trace); + } + Err(e) => { + log::error!("obtain trace from block provider fail: {e:?}"); + break; + } + } + } + + if block_traces.len() < (chunk.end_block_number - chunk.start_block_number + 1) as usize { + chunks_task_complete = false; + break; } // start chunk-level testing @@ -181,11 +190,8 @@ async fn main() -> ExitCode{ Ok(()) }) { - if notify_chunks_complete(&setting, batch_id as i64, false).await.is_ok() { - return ExitCode::from(EXIT_FAILED_ENV); - } else { - return ExitCode::from(EXIT_FAILED_ENV_WITH_TASK); - } + chunks_task_complete = false; + break; } let handling_ret = chunk_handling(batch_id as i64, chunk_id, &block_traces); @@ -200,12 +206,17 @@ async fn main() -> ExitCode{ } } - if let Err(e) = notify_chunks_complete(&setting, batch_id as i64, true).await { + if let Err(e) = notify_chunks_complete(&setting, batch_id as i64, chunks_task_complete).await { log::error!("can not deliver complete notify to coordinator: {e:?}"); return ExitCode::from(EXIT_FAILED_ENV_WITH_TASK); } - log::info!("relay-alpha testnet runner: complete"); - ExitCode::from(0) + + if chunks_task_complete { + log::info!("relay-alpha testnet runner: complete"); + ExitCode::from(0) + }else { + ExitCode::from(EXIT_FAILED_ENV) + } } fn build_block( @@ -241,7 +252,7 @@ fn build_block( async fn get_chunks_info( setting: &Setting, ) -> Result<(usize, Option>)> { - let url = Url::parse(&setting.coordinator_url)?; + let url = Url::parse(&setting.chunks_url)?; let resp: String = reqwest::get(url).await?.text().await?; log::debug!("resp is {resp}"); @@ -293,7 +304,7 @@ struct ChunkInfo { #[derive(Debug, Default)] struct Setting { - coordinator_url: String, + chunks_url: String, task_url: String, l2geth_api_url: String, data_output_dir: String, @@ -302,15 +313,33 @@ struct Setting { impl Setting { pub fn new() -> Self { let l2geth_api_url = - env::var("L2GETH_API_URL").expect("mock-testnet: Must set env L2GETH_API_URL"); - let coordinator_url = env::var("ROLLUPSCAN_API_URL"); - let coordinator_url = - coordinator_url.unwrap_or_else(|_| "http://10.0.3.119:8560/api/chunks".to_string()); + env::var("L2GETH_API_URL").expect("run-testnet: Must set env L2GETH_API_URL"); + let coordinator_url = env::var("COORDINATOR_API_URL"); + let (chunks_url, task_url) = if let Ok(url_prefix) = coordinator_url { + ( + Url::parse(&url_prefix).and_then(|url|url.join("chunks")).expect("run-testnet: Must be valid url for coordinator api"), + Url::parse(&url_prefix).and_then(|url|url.join("tasks")).expect("run-testnet: Must be valid url for coordinator api"), + ) + } else { + ( + Url::parse( + &env::var("CHUNKS_API_URL") + .expect("run-test: CHUNKS_API_URL must be set if COORDINATOR_API_URL is not set"), + ).expect("run-testnet: Must be valid url for chunks api"), + Url::parse( + &env::var("TASKS_API_URL") + .expect("run-test: TASKS_API_URL must be set if COORDINATOR_API_URL is not set"), + ).expect("run-testnet: Must be valid url for tasks api"), + ) + }; + + let data_output_dir = env::var("OUTPUT_DIR").unwrap_or("output".to_string()); Self { l2geth_api_url, - coordinator_url, - ..Default::default() + data_output_dir, + chunks_url: chunks_url.as_str().into(), + task_url: task_url.as_str().into(), } } } From 50ca28741b078a006d02955b0a00e5dc40ddee27 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Tue, 22 Aug 2023 16:03:21 +0800 Subject: [PATCH 12/35] refactor circuit builder and ccc --- prover/src/zkevm/capacity_checker.rs | 43 ++++- prover/src/zkevm/circuit.rs | 2 +- prover/src/zkevm/circuit/builder.rs | 233 +++++++++++++++------------ 3 files changed, 168 insertions(+), 110 deletions(-) diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index 1755398f2..316f9c605 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -1,9 +1,16 @@ use super::circuit::{ block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_witness_block, + global_circuit_params }; +use bus_mapping::{ + circuit_input_builder::{self, CircuitInputBuilder}, + state_db::{CodeDB, StateDB}, +}; +use mpt_zktrie::state::ZktrieState; use itertools::Itertools; use serde_derive::{Deserialize, Serialize}; use types::eth::BlockTrace; +use eth_types::{H256, ToWord}; #[derive(Debug, Clone, Deserialize, Serialize)] pub struct SubCircuitRowUsage { @@ -128,6 +135,7 @@ pub struct CircuitCapacityChecker { pub light_mode: bool, pub acc_row_usage: RowUsage, pub row_usages: Vec, + pub builder_ctx: Option<(CodeDB, StateDB, ZktrieState)>, } // Currently TxTrace is same as BlockTrace, with "transactions" and "executionResults" should be of @@ -147,9 +155,11 @@ impl CircuitCapacityChecker { acc_row_usage: RowUsage::new(), row_usages: Vec::new(), light_mode: true, + builder_ctx: None, } } pub fn reset(&mut self) { + self.builder_ctx = None; self.acc_row_usage = RowUsage::new(); self.row_usages = Vec::new(); } @@ -158,9 +168,37 @@ impl CircuitCapacityChecker { txs: &[TxTrace], ) -> Result<(RowUsage, RowUsage), anyhow::Error> { assert!(!txs.is_empty()); - let traces = txs; + let (mut estimate_builder, traces) = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() { + // here we create a new builder for another (sealed) witness block + // this builder inherit the current execution state (sdb/cdb) of + // the previous one and do not use zktrie state, + // notice the prev_root in current builder may be not invalid (since the state has changed + // but we may not update it in light mode) + let mut builder_block = circuit_input_builder::Block::from_headers( + &[], + global_circuit_params() + ); + builder_block.chain_id = txs[0].chain_id; + builder_block.start_l1_queue_index = txs[0].start_l1_queue_index; + builder_block.prev_state_root = H256(*mpt_state.root()).to_word(); + let mut builder = CircuitInputBuilder::new( + sdb, + code_db, + &builder_block + ); + builder.mpt_init_state = mpt_state; + ( + builder, + &txs[1..], + ) + } else { + ( + CircuitInputBuilder::new_from_l2_trace(global_circuit_params(), &txs[0], txs.len() > 1)?, + txs, + ) + }; let witness_block = - block_traces_to_witness_block_with_updated_state(traces, self.light_mode)?; + block_traces_to_witness_block_with_updated_state(traces, &mut estimate_builder, self.light_mode)?; let rows = calculate_row_usage_of_witness_block(&witness_block)?; let row_usage_details: Vec = rows .into_iter() @@ -172,6 +210,7 @@ impl CircuitCapacityChecker { let tx_row_usage = RowUsage::from_row_usage_details(row_usage_details); self.row_usages.push(tx_row_usage.clone()); self.acc_row_usage.add(&tx_row_usage); + self.builder_ctx.replace((estimate_builder.code_db, estimate_builder.sdb, estimate_builder.mpt_init_state)); Ok((self.acc_row_usage.normalize(), tx_row_usage.normalize())) } } diff --git a/prover/src/zkevm/circuit.rs b/prover/src/zkevm/circuit.rs index 429e0460b..17f56c782 100644 --- a/prover/src/zkevm/circuit.rs +++ b/prover/src/zkevm/circuit.rs @@ -14,7 +14,7 @@ pub use self::builder::{ block_traces_to_padding_witness_block, block_traces_to_witness_block, block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_trace, calculate_row_usage_of_witness_block, check_batch_capacity, - normalize_withdraw_proof, storage_trace_to_padding_witness_block, WitnessBlock, + normalize_withdraw_proof, WitnessBlock, global_circuit_params, SUB_CIRCUIT_NAMES, }; diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 8623f3bc9..6044599a9 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -11,17 +11,17 @@ use bus_mapping::{ }, state_db::{CodeDB, StateDB}, }; -use eth_types::{ToBigEndian, H256, U256, ToWord}; +use eth_types::{ToBigEndian, H256, ToWord}; use halo2_proofs::halo2curves::bn256::Fr; -use is_even::IsEven; use itertools::Itertools; +use mpt_zktrie::state::ZktrieState; use std::{ - collections::{hash_map::Entry, HashMap}, + collections::HashMap, time::Instant, }; use types::eth::{BlockTrace, StorageTrace}; use zkevm_circuits::{ - evm_circuit::witness::{block_apply_mpt_state, block_convert_with_l1_queue_index, Block}, + evm_circuit::witness::{block_apply_mpt_state, block_convert, block_convert_with_l1_queue_index, Block}, util::SubCircuit, witness::WithdrawProof, }; @@ -152,6 +152,60 @@ pub fn check_batch_capacity(block_traces: &mut Vec) -> Result<()> { Ok(()) } + +// prepare an empty builder which can updated by more trace +// from the default settings +// only require the prev state root being provided +// any initial zktrie state can be also set +fn prepare_default_builder( + old_root: H256, + initial_mpt_state: Option, +) -> CircuitInputBuilder { + + let mut builder_block = circuit_input_builder::Block::from_headers( + &[], + global_circuit_params() + ); + builder_block.chain_id = *CHAIN_ID; + builder_block.prev_state_root = old_root.to_word(); + let code_db = CodeDB::new(); + + if let Some(mpt_state) = initial_mpt_state { + assert_eq!(H256::from_slice(mpt_state.root()), old_root, "the provided zktrie state must be the prev state"); + let state_db = StateDB::from(&mpt_state); + let mut builder = CircuitInputBuilder::new( + state_db, + code_db, + &builder_block + ); + builder.mpt_init_state = mpt_state; + builder + } else { + CircuitInputBuilder::new( + StateDB::new(), + code_db, + &builder_block + ) + } + +} + +// check if block traces match preset parameters +fn validite_block_traces(block_traces: &[BlockTrace]) -> Result<()>{ + let chain_id = block_traces + .iter() + .map(|block_trace| block_trace.chain_id) + .next() + .unwrap_or(*CHAIN_ID); + if *CHAIN_ID != chain_id { + bail!( + "CHAIN_ID env var is wrong. chain id in trace {chain_id}, CHAIN_ID {}", + *CHAIN_ID + ); + } + Ok(()) +} + pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result> { let block_num = block_traces.len(); let total_tx_num = block_traces @@ -174,12 +228,16 @@ pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result 1)?; + block_traces_to_witness_block_with_updated_state(&block_traces[1..], &mut builder, false) + } } pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Result> { @@ -187,54 +245,55 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res "block_traces_to_padding_witness_block, input len {:?}", block_traces.len() ); - let chain_id = block_traces - .iter() - .map(|block_trace| block_trace.chain_id) - .next() - .unwrap_or(*CHAIN_ID); - if *CHAIN_ID != chain_id { - bail!( - "CHAIN_ID env var is wrong. chain id in trace {chain_id}, CHAIN_ID {}", - *CHAIN_ID - ); - } - let old_root = if block_traces.is_empty() { - eth_types::Hash::zero() - } else { - block_traces[0].storage_trace.root_before - }; + validite_block_traces(block_traces)?; - if block_traces.is_empty() { - padding_witness_block(old_root.to_word()) + // the only purpose here it to get the final zktrie state and + // proof for withdraw root + let mut padding_builder = if block_traces.is_empty() { + prepare_default_builder(H256::zero(), None) } else { - // the only purpose here it to get the final state root - let prev_witness_block = - block_traces_to_witness_block_with_updated_state(block_traces, false)?; - - // TODO: when prev_witness_block.tx.is_empty(), the `withdraw_proof` here should be a subset of - // storage proofs of prev block - let storage_trace = normalize_withdraw_proof(&prev_witness_block.mpt_updates.withdraw_proof); - storage_trace_to_padding_witness_block(storage_trace) - } + let start_l1_queue_index = block_traces[0].start_l1_queue_index; + let mut builder = CircuitInputBuilder::new_from_l2_trace( + global_circuit_params(), + &block_traces[0], + block_traces.len() > 1 + )?; + for (idx, block_trace) in block_traces[1..].iter().enumerate() { + builder.add_more_l2_trace( + block_trace, + idx + 2 == block_traces.len(),//not typo, we use 1..end of the traces only + )?; + } + builder.finalize_building()?; + let mut witness_block = + block_convert_with_l1_queue_index::(&builder.block, &builder.code_db, start_l1_queue_index)?; + log::debug!( + "witness_block built with circuits_params {:?} for padding", + witness_block.circuits_params + ); + // so we have the finalized state which contain withdraw proof + block_apply_mpt_state(&mut witness_block, &builder.mpt_init_state); + let old_root = H256(*builder.mpt_init_state.root()); + prepare_default_builder( + old_root, + Some(builder.mpt_init_state), + ) + }; -} + // TODO: when prev_witness_block.tx.is_empty(), the `withdraw_proof` here should be a subset of + // storage proofs of prev block + padding_builder.finalize_building()?; -pub fn storage_trace_to_padding_witness_block(storage_trace: StorageTrace) -> Result> { - log::debug!( - "withdraw proof {}", - serde_json::to_string_pretty(&storage_trace)? - ); + let mut padding_block = block_convert(&padding_builder.block, &padding_builder.code_db)?; + // drag the withdraw proof from zktrie state + block_apply_mpt_state(&mut padding_block, &padding_builder.mpt_init_state); - let dummy_chunk_traces = vec![BlockTrace { - chain_id: *CHAIN_ID, - storage_trace, - ..Default::default() - }]; + Ok(padding_block) - block_traces_to_witness_block_with_updated_state(&[], false) } -fn global_circuit_params() -> CircuitsParams { +/// default params for super circuit +pub fn global_circuit_params() -> CircuitsParams { CircuitsParams { max_evm_rows: MAX_RWS, max_rws: MAX_RWS, @@ -255,30 +314,15 @@ fn global_circuit_params() -> CircuitsParams { } } +/// update the builder with another batch of trace and then *FINALIZE* it +/// (so the buidler CAN NOT be update any more) +/// light_mode skip the time consuming calculation on mpt root for each +/// tx, currently used in row estimation pub fn block_traces_to_witness_block_with_updated_state( block_traces: &[BlockTrace], - light_mode: bool, // light_mode used in row estimation + builder: &mut CircuitInputBuilder, + light_mode: bool, ) -> Result> { - let chain_id = block_traces - .iter() - .map(|block_trace| block_trace.chain_id) - .next() - .unwrap_or(*CHAIN_ID); - // total l1 msgs popped before this chunk - let start_l1_queue_index = block_traces - .iter() - .map(|block_trace| block_trace.start_l1_queue_index) - .next() - .unwrap_or(0); - if *CHAIN_ID != chain_id { - bail!( - "CHAIN_ID env var is wrong. chain id in trace {chain_id}, CHAIN_ID {}", - *CHAIN_ID - ); - } - - let first_trace = &block_traces[0]; - let more_traces = &block_traces[1..]; let metric = |builder: &CircuitInputBuilder, idx: usize| -> Result<(), bus_mapping::Error>{ let t = Instant::now(); @@ -305,27 +349,28 @@ pub fn block_traces_to_witness_block_with_updated_state( Ok(()) }; - let mut builder = CircuitInputBuilder::new_from_l2_trace( - global_circuit_params(), - first_trace, - more_traces.len() != 0, - )?; - + // TODO: enable this switch let per_block_metric = false; - if per_block_metric { - metric(&builder, 0)?; - } + + let initial_blk_index = if builder.block.txs.is_empty() { + 0 + } else { + if per_block_metric { + metric(&builder, 0)?; + } + 1 + }; for (idx, block_trace) in block_traces.iter().enumerate() { let is_last = idx == block_traces.len() - 1; builder.add_more_l2_trace(block_trace, !is_last)?; - let per_block_metric = false; if per_block_metric { - metric(&builder, idx+1)?; + metric(&builder, idx + initial_blk_index)?; } } builder.finalize_building()?; + let start_l1_queue_index = builder.block.start_l1_queue_index; log::debug!("converting builder.block to witness block"); let mut witness_block = @@ -347,32 +392,6 @@ pub fn block_traces_to_witness_block_with_updated_state( Ok(witness_block) } -/// This entry simulate the progress which use block_traces_to_witness_block_with_updated_state -/// to generate a padding block with null trace array: -/// + Everything use default values -/// + no trace, no tx, so no mpt table, zktrie state light mode is useless, -/// and what only needed is the previous state root -pub fn padding_witness_block( - old_root: U256, -) -> Result> { - let mut builder_block = circuit_input_builder::Block::from_headers(&[], global_circuit_params()); - builder_block.chain_id = *CHAIN_ID; - builder_block.prev_state_root = old_root; - let mut builder = CircuitInputBuilder::new( - StateDB::new(), - CodeDB::new(), - &builder_block - ); - builder.finalize_building()?; - - let witness_block = - block_convert_with_l1_queue_index(&builder.block, &builder.code_db, 0)?; - log::debug!( - "padding witness_block built with circuits_params {:?}", - witness_block.circuits_params - ); - Ok(witness_block) -} pub fn normalize_withdraw_proof(proof: &WithdrawProof) -> StorageTrace { let address = *bus_mapping::l2_predeployed::message_queue::ADDRESS; From 91f33d42a9944c385a92925877b8f71986b4c503 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Tue, 22 Aug 2023 20:27:00 +0800 Subject: [PATCH 13/35] post merging fixes --- prover/src/zkevm/circuit/builder.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 97a4bf44c..647be9e3b 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -194,7 +194,7 @@ fn prepare_default_builder( let mut builder_block = circuit_input_builder::Block::from_headers( &[], - global_circuit_params() + get_super_circuit_params() ); builder_block.chain_id = *CHAIN_ID; builder_block.prev_state_root = old_root.to_word(); @@ -265,7 +265,7 @@ pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result 1)?; + let mut builder = CircuitInputBuilder::new_from_l2_trace(get_super_circuit_params(), &block_traces[0], block_traces.len() > 1)?; block_traces_to_witness_block_with_updated_state(&block_traces[1..], &mut builder, false) } } @@ -284,7 +284,7 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res } else { let start_l1_queue_index = block_traces[0].start_l1_queue_index; let mut builder = CircuitInputBuilder::new_from_l2_trace( - global_circuit_params(), + get_super_circuit_params(), &block_traces[0], block_traces.len() > 1 )?; @@ -397,7 +397,7 @@ pub fn block_traces_to_witness_block_with_updated_state( "finish replay trie updates, root {}", hex::encode(builder.mpt_init_state.root()) ); - Ok((witness_block, code_db)) + Ok(witness_block) } From 657e2995c91d862d3d3a7c76111d5f74834e57d0 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Tue, 22 Aug 2023 20:33:14 +0800 Subject: [PATCH 14/35] bump zkevm-circuits temporary disable missed circuit params --- Cargo.lock | 129 ++++++---------------------- prover/src/zkevm/circuit/builder.rs | 4 +- 2 files changed, 26 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 567d9b2fa..e6498227c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -36,8 +36,8 @@ dependencies = [ "rand", "serde", "serde_json", - "snark-verifier 0.1.0 (git+https://github.com/scroll-tech/snark-verifier?branch=develop)", - "snark-verifier-sdk 0.0.1 (git+https://github.com/scroll-tech/snark-verifier?branch=develop)", + "snark-verifier", + "snark-verifier-sdk", "zkevm-circuits", ] @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -1123,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "eth-types", "geth-utils", @@ -1613,7 +1613,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "digest 0.7.6", "eth-types", @@ -1653,7 +1653,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -1763,21 +1763,6 @@ dependencies = [ "rustc-hash", ] -[[package]] -name = "halo2-base" -version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#2c225864227e74b207d9f4b9e08c4d5f1afc69a1" -dependencies = [ - "ff", - "halo2_proofs", - "itertools", - "num-bigint", - "num-integer", - "num-traits", - "rand_chacha", - "rustc-hash", -] - [[package]] name = "halo2-ecc" version = "0.2.2" @@ -1785,26 +1770,7 @@ source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0#2c225864227e74 dependencies = [ "ff", "group", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", - "itertools", - "num-bigint", - "num-integer", - "num-traits", - "rand", - "rand_chacha", - "rand_core", - "serde", - "serde_json", -] - -[[package]] -name = "halo2-ecc" -version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#2c225864227e74b207d9f4b9e08c4d5f1afc69a1" -dependencies = [ - "ff", - "group", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?branch=develop)", + "halo2-base", "itertools", "num-bigint", "num-integer", @@ -1835,7 +1801,7 @@ dependencies = [ [[package]] name = "halo2-mpt-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/mpt-circuit.git?branch=v0.5#2163a9c436ed85363c954ecf7e6e1044a1b991dc" +source = "git+https://github.com/scroll-tech/mpt-circuit.git?tag=v0.5.1#2163a9c436ed85363c954ecf7e6e1044a1b991dc" dependencies = [ "ethers-core 0.17.0", "halo2_proofs", @@ -2293,7 +2259,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2493,7 +2459,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2508,7 +2474,7 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "eth-types", "halo2-mpt-circuits", @@ -3036,8 +3002,8 @@ dependencies = [ "serde_json", "serde_stacker", "sha2 0.10.6", - "snark-verifier 0.1.0 (git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.2)", - "snark-verifier-sdk 0.0.1 (git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.2)", + "snark-verifier", + "snark-verifier-sdk", "strum", "strum_macros", "types", @@ -3879,32 +3845,8 @@ source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.2#4466059ce dependencies = [ "bytes", "ethereum-types 0.14.1", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", - "halo2-ecc 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", - "hex", - "itertools", - "lazy_static", - "num-bigint", - "num-integer", - "num-traits", - "poseidon", - "rand", - "revm", - "rlp", - "rustc-hash", - "serde", - "sha3 0.10.6", -] - -[[package]] -name = "snark-verifier" -version = "0.1.0" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#4466059ce9a6dfaf26455e4ffb61d72af775cf52" -dependencies = [ - "bytes", - "ethereum-types 0.14.1", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", - "halo2-ecc 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", + "halo2-base", + "halo2-ecc", "hex", "itertools", "lazy_static", @@ -3928,30 +3870,7 @@ dependencies = [ "bincode", "env_logger 0.10.0", "ethereum-types 0.14.1", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", - "hex", - "itertools", - "lazy_static", - "log", - "num-bigint", - "num-integer", - "num-traits", - "rand", - "rand_chacha", - "serde", - "serde_json", - "snark-verifier 0.1.0 (git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.2)", -] - -[[package]] -name = "snark-verifier-sdk" -version = "0.0.1" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#4466059ce9a6dfaf26455e4ffb61d72af775cf52" -dependencies = [ - "bincode", - "env_logger 0.10.0", - "ethereum-types 0.14.1", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.0)", + "halo2-base", "hex", "itertools", "lazy_static", @@ -3963,7 +3882,7 @@ dependencies = [ "rand_chacha", "serde", "serde_json", - "snark-verifier 0.1.0 (git+https://github.com/scroll-tech/snark-verifier?branch=develop)", + "snark-verifier", ] [[package]] @@ -4864,7 +4783,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#4896caa3662d427daf522314e4ea5ff943fb0c15" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" dependencies = [ "array-init", "bus-mapping", @@ -4874,8 +4793,8 @@ dependencies = [ "ethers-core 0.17.0", "ethers-signers", "gadgets", - "halo2-base 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?branch=develop)", - "halo2-ecc 0.2.2 (git+https://github.com/scroll-tech/halo2-lib?branch=develop)", + "halo2-base", + "halo2-ecc", "halo2_proofs", "hex", "itertools", @@ -4898,8 +4817,8 @@ dependencies = [ "serde", "serde_json", "sha3 0.10.6", - "snark-verifier 0.1.0 (git+https://github.com/scroll-tech/snark-verifier?branch=develop)", - "snark-verifier-sdk 0.0.1 (git+https://github.com/scroll-tech/snark-verifier?branch=develop)", + "snark-verifier", + "snark-verifier-sdk", "strum", "strum_macros", "subtle", diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 647be9e3b..25b0b8698 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -50,8 +50,8 @@ pub fn get_super_circuit_params() -> CircuitsParams { max_bytecode: MAX_BYTECODE, max_inner_blocks: MAX_INNER_BLOCKS, max_keccak_rows: MAX_KECCAK_ROWS, - max_poseidon_rows: MAX_POSEIDON_ROWS, - max_vertical_circuit_rows: MAX_VERTICLE_ROWS, +// max_poseidon_rows: MAX_POSEIDON_ROWS, +// max_vertical_circuit_rows: MAX_VERTICLE_ROWS, max_exp_steps: MAX_EXP_STEPS, max_mpt_rows: MAX_MPT_ROWS, max_rlp_rows: MAX_CALLDATA, From 8d6a03325fc968b664579ef290d73b2cf5b63cbd Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 08:33:28 +0800 Subject: [PATCH 15/35] run-testnet: trivial fixes for coordinator --- run-testnet/testnet_coordinator/.gitignore | 2 + run-testnet/testnet_coordinator/config.go | 2 +- run-testnet/testnet_coordinator/main.go | 57 ++++++++++++++++++++-- 3 files changed, 56 insertions(+), 5 deletions(-) create mode 100644 run-testnet/testnet_coordinator/.gitignore diff --git a/run-testnet/testnet_coordinator/.gitignore b/run-testnet/testnet_coordinator/.gitignore new file mode 100644 index 000000000..bfca54017 --- /dev/null +++ b/run-testnet/testnet_coordinator/.gitignore @@ -0,0 +1,2 @@ +testnet_coordinator +*.yaml \ No newline at end of file diff --git a/run-testnet/testnet_coordinator/config.go b/run-testnet/testnet_coordinator/config.go index 597686426..a0a9cb34c 100644 --- a/run-testnet/testnet_coordinator/config.go +++ b/run-testnet/testnet_coordinator/config.go @@ -22,7 +22,7 @@ func NewConfig() *Config { return &Config{ Server: &ServerConfig{ ServerHost: "localhost:8560", - ServerURL: "/", + ServerURL: "/api", }, } } diff --git a/run-testnet/testnet_coordinator/main.go b/run-testnet/testnet_coordinator/main.go index ff9307d5d..1886d2a1a 100644 --- a/run-testnet/testnet_coordinator/main.go +++ b/run-testnet/testnet_coordinator/main.go @@ -34,18 +34,26 @@ func main() { log.Fatalf("Error reading config file: %v", err) } - taskAssigner := &TaskAssigner{} + taskAssigner := construct(serverConfig.StartBatch) http.HandleFunc( serverConfig.Server.ServerURL+"/chunks", chunksHandler(taskAssigner, serverConfig.ChunkURLTemplate), ) + http.HandleFunc( + serverConfig.Server.ServerURL+"/tasks", + taskHandler(taskAssigner, serverConfig.ChunkURLTemplate), + ) + http.HandleFunc( + serverConfig.Server.ServerURL+"/status", + statusHandler(taskAssigner, serverConfig.ChunkURLTemplate), + ) http.Handle("/", http.NotFoundHandler()) - log.Printf("Starting server on %s...", serverConfig.Server.ServerHost) + log.Printf("Starting server on %s...\n", serverConfig.Server.ServerHost) err := http.ListenAndServe(serverConfig.Server.ServerHost, nil) if err != nil { - log.Print("ListenAndServe: ", err) + log.Println("ListenAndServe: ", err) } } @@ -54,6 +62,7 @@ func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc assigned_done := false assigned := assigner.assign_new() defer func() { + log.Println("send new batch out", assigned, assigned_done) if !assigned_done { assigner.drop(assigned) } @@ -73,8 +82,48 @@ func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc log.Printf("Error writing response: %v\n", err) return } - log.Println("send new batch out", assigned) assigned_done = true } } + +func taskHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + done_index := r.URL.Query().Get("done") + drop_index := r.URL.Query().Get("drop") + + if done_index != "" { + log.Println("receive done notify for batch:", done_index) + var ind uint64 + if _, err := fmt.Sscanf(done_index, "%d", &ind); err != nil { + http.Error(w, "invalid done index, need integer", http.StatusBadRequest) + return + } + assigner.complete(ind) + } else if drop_index != "" { + log.Println("receive drop notify for batch:", drop_index) + var ind uint64 + if _, err := fmt.Sscanf(drop_index, "%d", &ind); err != nil { + http.Error(w, "invalid drop index, need integer", http.StatusBadRequest) + return + } + assigner.drop(ind) + } else { + http.Error(w, "must query with drop or done", http.StatusBadRequest) + return + } + + w.WriteHeader(http.StatusOK) + } +} + +func statusHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + ret := fmt.Sprintf("%v", assigner.status()) + if _, err := w.Write([]byte(ret)); err != nil { + log.Println("unexpected output of status", err) + } + } +} From 9ba7a0f5a2945a4c7e8d149dba8000baa79d0744 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 08:52:11 +0800 Subject: [PATCH 16/35] bump circuit version refine ccc with new circuibuilder constructor --- Cargo.lock | 20 ++++++++++---------- prover/src/zkevm/capacity_checker.rs | 20 ++++++++------------ 2 files changed, 18 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e6498227c..9873af07f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -1123,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "eth-types", "geth-utils", @@ -1613,7 +1613,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "digest 0.7.6", "eth-types", @@ -1653,7 +1653,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -2259,7 +2259,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2459,7 +2459,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2474,7 +2474,7 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "eth-types", "halo2-mpt-circuits", @@ -4783,7 +4783,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#0ae06dd463652082be2c8ed35a430252d3c0265c" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" dependencies = [ "array-init", "bus-mapping", diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index 6235a91c8..babebe7ef 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -154,7 +154,7 @@ impl CircuitCapacityChecker { txs: &[TxTrace], ) -> Result { assert!(!txs.is_empty()); - let (mut estimate_builder, traces) = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() { + let mut estimate_builder = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() { // here we create a new builder for another (sealed) witness block // this builder inherit the current execution state (sdb/cdb) of // the previous one and do not use zktrie state, @@ -167,22 +167,18 @@ impl CircuitCapacityChecker { builder_block.chain_id = txs[0].chain_id; builder_block.start_l1_queue_index = txs[0].start_l1_queue_index; builder_block.prev_state_root = H256(*mpt_state.root()).to_word(); - let mut builder = CircuitInputBuilder::new( + let mut builder = CircuitInputBuilder::new_with_trie_state( sdb, code_db, - &builder_block + mpt_state, + &builder_block, ); - builder.mpt_init_state = mpt_state; - ( - builder, - &txs[1..], - ) + builder.add_more_l2_trace(&txs[0], txs.len() > 1)?; + builder } else { - ( - CircuitInputBuilder::new_from_l2_trace(get_super_circuit_params(), &txs[0], txs.len() > 1)?, - txs, - ) + CircuitInputBuilder::new_from_l2_trace(get_super_circuit_params(), &txs[0], txs.len() > 1)? }; + let traces = &txs[1..]; let witness_block = block_traces_to_witness_block_with_updated_state(traces, &mut estimate_builder, self.light_mode)?; let rows = calculate_row_usage_of_witness_block(&witness_block)?; From 9970977c515e91f3c4d6335b7666284c76a95ec1 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 10:10:20 +0800 Subject: [PATCH 17/35] clippy and fmt --- prover/src/utils.rs | 2 +- prover/src/zkevm/capacity_checker.rs | 47 +++---- prover/src/zkevm/circuit.rs | 4 +- prover/src/zkevm/circuit/builder.rs | 97 +++++++-------- run-testnet/src/main.rs | 177 ++++++++++++++------------- types/src/lib.rs | 3 +- 6 files changed, 164 insertions(+), 166 deletions(-) diff --git a/prover/src/utils.rs b/prover/src/utils.rs index 0f7f36676..637dae66f 100644 --- a/prover/src/utils.rs +++ b/prover/src/utils.rs @@ -24,7 +24,7 @@ use std::{ str::FromStr, sync::Once, }; -use types::{BlockTraceJsonRpcResult, eth::BlockTrace}; +use types::{eth::BlockTrace, BlockTraceJsonRpcResult}; use zkevm_circuits::evm_circuit::witness::Block; pub static LOGGER: Once = Once::new(); diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index babebe7ef..b54ac0e72 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -1,4 +1,3 @@ - use super::circuit::{ MAX_BYTECODE, MAX_CALLDATA, MAX_EXP_STEPS, MAX_KECCAK_ROWS, MAX_MPT_ROWS, MAX_POSEIDON_ROWS, MAX_RWS, MAX_VERTICLE_ROWS, @@ -6,13 +5,13 @@ use super::circuit::{ use super::circuit::{ block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_witness_block, - get_super_circuit_params + get_super_circuit_params, }; use bus_mapping::{ circuit_input_builder::{self, CircuitInputBuilder}, state_db::{CodeDB, StateDB}, }; -use eth_types::{H256, ToWord}; +use eth_types::{ToWord, H256}; use itertools::Itertools; use mpt_zktrie::state::ZktrieState; use serde_derive::{Deserialize, Serialize}; @@ -154,33 +153,35 @@ impl CircuitCapacityChecker { txs: &[TxTrace], ) -> Result { assert!(!txs.is_empty()); - let mut estimate_builder = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() { + let mut estimate_builder = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() + { // here we create a new builder for another (sealed) witness block // this builder inherit the current execution state (sdb/cdb) of - // the previous one and do not use zktrie state, - // notice the prev_root in current builder may be not invalid (since the state has changed - // but we may not update it in light mode) - let mut builder_block = circuit_input_builder::Block::from_headers( - &[], - get_super_circuit_params() - ); + // the previous one and do not use zktrie state, + // notice the prev_root in current builder may be not invalid (since the state has + // changed but we may not update it in light mode) + let mut builder_block = + circuit_input_builder::Block::from_headers(&[], get_super_circuit_params()); builder_block.chain_id = txs[0].chain_id; builder_block.start_l1_queue_index = txs[0].start_l1_queue_index; builder_block.prev_state_root = H256(*mpt_state.root()).to_word(); - let mut builder = CircuitInputBuilder::new_with_trie_state( - sdb, - code_db, - mpt_state, - &builder_block, - ); + let mut builder = + CircuitInputBuilder::new_with_trie_state(sdb, code_db, mpt_state, &builder_block); builder.add_more_l2_trace(&txs[0], txs.len() > 1)?; builder } else { - CircuitInputBuilder::new_from_l2_trace(get_super_circuit_params(), &txs[0], txs.len() > 1)? + CircuitInputBuilder::new_from_l2_trace( + get_super_circuit_params(), + &txs[0], + txs.len() > 1, + )? }; let traces = &txs[1..]; - let witness_block = - block_traces_to_witness_block_with_updated_state(traces, &mut estimate_builder, self.light_mode)?; + let witness_block = block_traces_to_witness_block_with_updated_state( + traces, + &mut estimate_builder, + self.light_mode, + )?; let rows = calculate_row_usage_of_witness_block(&witness_block)?; let row_usage_details: Vec = rows .into_iter() @@ -192,7 +193,11 @@ impl CircuitCapacityChecker { let tx_row_usage = RowUsage::from_row_usage_details(row_usage_details); self.row_usages.push(tx_row_usage.clone()); self.acc_row_usage.add(&tx_row_usage); - self.builder_ctx.replace((estimate_builder.code_db, estimate_builder.sdb, estimate_builder.mpt_init_state)); + self.builder_ctx.replace(( + estimate_builder.code_db, + estimate_builder.sdb, + estimate_builder.mpt_init_state, + )); Ok(self.acc_row_usage.normalize()) } } diff --git a/prover/src/zkevm/circuit.rs b/prover/src/zkevm/circuit.rs index ab269763d..186355def 100644 --- a/prover/src/zkevm/circuit.rs +++ b/prover/src/zkevm/circuit.rs @@ -13,8 +13,8 @@ use crate::utils::read_env_var; pub use self::builder::{ block_traces_to_padding_witness_block, block_traces_to_witness_block, block_traces_to_witness_block_with_updated_state, calculate_row_usage_of_trace, - calculate_row_usage_of_witness_block, check_batch_capacity, - normalize_withdraw_proof, get_super_circuit_params, WitnessBlock, + calculate_row_usage_of_witness_block, check_batch_capacity, get_super_circuit_params, + normalize_withdraw_proof, WitnessBlock, }; pub use builder::{ MAX_BYTECODE, MAX_CALLDATA, MAX_EXP_STEPS, MAX_INNER_BLOCKS, MAX_KECCAK_ROWS, MAX_MPT_ROWS, diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 25b0b8698..6406a5c08 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -2,22 +2,19 @@ use super::{TargetCircuit, AUTO_TRUNCATE, CHAIN_ID}; use crate::config::INNER_DEGREE; use anyhow::{bail, Result}; use bus_mapping::{ - circuit_input_builder::{ - self, CircuitInputBuilder, CircuitsParams, PrecompileEcParams, - }, + circuit_input_builder::{self, CircuitInputBuilder, CircuitsParams, PrecompileEcParams}, state_db::{CodeDB, StateDB}, }; -use eth_types::{ToBigEndian, H256, ToWord}; +use eth_types::{ToBigEndian, ToWord, H256}; use halo2_proofs::halo2curves::bn256::Fr; use itertools::Itertools; use mpt_zktrie::state::ZktrieState; -use std::{ - collections::HashMap, - time::Instant, -}; +use std::{collections::HashMap, time::Instant}; use types::eth::{BlockTrace, StorageTrace}; use zkevm_circuits::{ - evm_circuit::witness::{block_apply_mpt_state, block_convert, block_convert_with_l1_queue_index, Block}, + evm_circuit::witness::{ + block_apply_mpt_state, block_convert, block_convert_with_l1_queue_index, Block, + }, util::SubCircuit, witness::WithdrawProof, }; @@ -50,8 +47,8 @@ pub fn get_super_circuit_params() -> CircuitsParams { max_bytecode: MAX_BYTECODE, max_inner_blocks: MAX_INNER_BLOCKS, max_keccak_rows: MAX_KECCAK_ROWS, -// max_poseidon_rows: MAX_POSEIDON_ROWS, -// max_vertical_circuit_rows: MAX_VERTICLE_ROWS, + // max_poseidon_rows: MAX_POSEIDON_ROWS, + // max_vertical_circuit_rows: MAX_VERTICLE_ROWS, max_exp_steps: MAX_EXP_STEPS, max_mpt_rows: MAX_MPT_ROWS, max_rlp_rows: MAX_CALLDATA, @@ -182,7 +179,6 @@ pub fn check_batch_capacity(block_traces: &mut Vec) -> Result<()> { Ok(()) } - // prepare an empty builder which can updated by more trace // from the default settings // only require the prev state root being provided @@ -191,46 +187,38 @@ fn prepare_default_builder( old_root: H256, initial_mpt_state: Option, ) -> CircuitInputBuilder { - - let mut builder_block = circuit_input_builder::Block::from_headers( - &[], - get_super_circuit_params() - ); + let mut builder_block = + circuit_input_builder::Block::from_headers(&[], get_super_circuit_params()); builder_block.chain_id = *CHAIN_ID; builder_block.prev_state_root = old_root.to_word(); let code_db = CodeDB::new(); if let Some(mpt_state) = initial_mpt_state { - assert_eq!(H256::from_slice(mpt_state.root()), old_root, "the provided zktrie state must be the prev state"); - let state_db = StateDB::from(&mpt_state); - let mut builder = CircuitInputBuilder::new( - state_db, - code_db, - &builder_block + assert_eq!( + H256::from_slice(mpt_state.root()), + old_root, + "the provided zktrie state must be the prev state" ); + let state_db = StateDB::from(&mpt_state); + let mut builder = CircuitInputBuilder::new(state_db, code_db, &builder_block); builder.mpt_init_state = mpt_state; builder } else { - CircuitInputBuilder::new( - StateDB::new(), - code_db, - &builder_block - ) + CircuitInputBuilder::new(StateDB::new(), code_db, &builder_block) } - } // check if block traces match preset parameters -fn validite_block_traces(block_traces: &[BlockTrace]) -> Result<()>{ +fn validite_block_traces(block_traces: &[BlockTrace]) -> Result<()> { let chain_id = block_traces - .iter() - .map(|block_trace| block_trace.chain_id) - .next() - .unwrap_or(*CHAIN_ID); + .iter() + .map(|block_trace| block_trace.chain_id) + .next() + .unwrap_or(*CHAIN_ID); if *CHAIN_ID != chain_id { bail!( - "CHAIN_ID env var is wrong. chain id in trace {chain_id}, CHAIN_ID {}", - *CHAIN_ID + "CHAIN_ID env var is wrong. chain id in trace {chain_id}, CHAIN_ID {}", + *CHAIN_ID ); } Ok(()) @@ -263,9 +251,13 @@ pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result 1)?; + let mut builder = CircuitInputBuilder::new_from_l2_trace( + get_super_circuit_params(), + &block_traces[0], + block_traces.len() > 1, + )?; block_traces_to_witness_block_with_updated_state(&block_traces[1..], &mut builder, false) } } @@ -284,19 +276,22 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res } else { let start_l1_queue_index = block_traces[0].start_l1_queue_index; let mut builder = CircuitInputBuilder::new_from_l2_trace( - get_super_circuit_params(), - &block_traces[0], - block_traces.len() > 1 + get_super_circuit_params(), + &block_traces[0], + block_traces.len() > 1, )?; for (idx, block_trace) in block_traces[1..].iter().enumerate() { builder.add_more_l2_trace( block_trace, - idx + 2 == block_traces.len(),//not typo, we use 1..end of the traces only + idx + 2 == block_traces.len(), //not typo, we use 1..end of the traces only )?; } builder.finalize_building()?; - let mut witness_block = - block_convert_with_l1_queue_index::(&builder.block, &builder.code_db, start_l1_queue_index)?; + let mut witness_block = block_convert_with_l1_queue_index::( + &builder.block, + &builder.code_db, + start_l1_queue_index, + )?; log::debug!( "witness_block built with circuits_params {:?} for padding", witness_block.circuits_params @@ -304,10 +299,7 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res // so we have the finalized state which contain withdraw proof block_apply_mpt_state(&mut witness_block, &builder.mpt_init_state); let old_root = H256(*builder.mpt_init_state.root()); - prepare_default_builder( - old_root, - Some(builder.mpt_init_state), - ) + prepare_default_builder(old_root, Some(builder.mpt_init_state)) }; // TODO: when prev_witness_block.tx.is_empty(), the `withdraw_proof` here should be a subset of @@ -319,7 +311,6 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res block_apply_mpt_state(&mut padding_block, &padding_builder.mpt_init_state); Ok(padding_block) - } /// update the builder with another batch of trace and then *FINALIZE* it @@ -331,8 +322,7 @@ pub fn block_traces_to_witness_block_with_updated_state( builder: &mut CircuitInputBuilder, light_mode: bool, ) -> Result> { - - let metric = |builder: &CircuitInputBuilder, idx: usize| -> Result<(), bus_mapping::Error>{ + let metric = |builder: &CircuitInputBuilder, idx: usize| -> Result<(), bus_mapping::Error> { let t = Instant::now(); let block = block_convert_with_l1_queue_index::( &builder.block, @@ -364,7 +354,7 @@ pub fn block_traces_to_witness_block_with_updated_state( 0 } else { if per_block_metric { - metric(&builder, 0)?; + metric(builder, 0)?; } 1 }; @@ -373,7 +363,7 @@ pub fn block_traces_to_witness_block_with_updated_state( let is_last = idx == block_traces.len() - 1; builder.add_more_l2_trace(block_trace, !is_last)?; if per_block_metric { - metric(&builder, idx + initial_blk_index)?; + metric(builder, idx + initial_blk_index)?; } } @@ -400,7 +390,6 @@ pub fn block_traces_to_witness_block_with_updated_state( Ok(witness_block) } - pub fn normalize_withdraw_proof(proof: &WithdrawProof) -> StorageTrace { let address = *bus_mapping::l2_predeployed::message_queue::ADDRESS; let key = *bus_mapping::l2_predeployed::message_queue::WITHDRAW_TRIE_ROOT_SLOT; diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index ac1a2e8a8..598d3c004 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -1,24 +1,24 @@ #![allow(dead_code)] use anyhow::Result; use ethers_providers::{Http, Provider}; +use log4rs::{ + append::{ + console::{ConsoleAppender, Target}, + file::FileAppender, + }, + config::{Appender, Config, Logger, Root}, +}; use prover::{ inner::Prover, - utils::{read_env_var, GIT_VERSION, short_git_version}, + utils::{read_env_var, short_git_version, GIT_VERSION}, zkevm::circuit::{ block_traces_to_witness_block, calculate_row_usage_of_witness_block, SuperCircuit, WitnessBlock, }, }; -use log4rs::{ - append::{ - console::{ConsoleAppender, Target}, - file::FileAppender, - }, - config::{Appender, Logger, Config, Root}, -}; use reqwest::Url; use serde::Deserialize; -use std::{env, str::FromStr, process::ExitCode}; +use std::{env, process::ExitCode, str::FromStr}; use types::eth::BlockTrace; // build common config from enviroment @@ -31,13 +31,8 @@ fn common_log() -> Result { let stdoutput = ConsoleAppender::builder().target(Target::Stdout).build(); let config = Config::builder() - .appenders([ - Appender::builder().build("std", Box::new(stdoutput)), - ]).build( - Root::builder() - .appender("std") - .build(log_level), - )?; + .appenders([Appender::builder().build("std", Box::new(stdoutput))]) + .build(Root::builder().appender("std").build(log_level))?; Ok(config) } @@ -49,39 +44,39 @@ fn debug_log(output_dir: &str) -> Result { let log_file_path = Path::new(output_dir).join("runner.log"); let log_file = FileAppender::builder().build(log_file_path).unwrap(); let config = Config::builder() - .appenders([ - Appender::builder().build("log-file", Box::new(log_file)), - Appender::builder().build("stderr", Box::new(err_output)), - ]) - .logger( - Logger::builder() - .appender("log-file") - .additive(true) - .build("", log::LevelFilter::Debug) - ) - .build( - Root::builder() - .appender("stderr") - .build(log::LevelFilter::Warn), - )?; - + .appenders([ + Appender::builder().build("log-file", Box::new(log_file)), + Appender::builder().build("stderr", Box::new(err_output)), + ]) + .logger( + Logger::builder() + .appender("log-file") + .additive(true) + .build("", log::LevelFilter::Debug), + ) + .build( + Root::builder() + .appender("stderr") + .build(log::LevelFilter::Warn), + )?; + Ok(config) } fn prepare_chunk_dir(output_dir: &str, chunk_id: u64) -> Result { - use std::{path::Path, fs}; - let chunk_path = Path::new(output_dir).join(format!("{}", chunk_id)); + use std::{fs, path::Path}; + let chunk_path = Path::new(output_dir).join(format!("{chunk_id}")); fs::create_dir(chunk_path.as_path())?; - Ok(chunk_path.to_str().ok_or_else(||anyhow::anyhow!("invalid chunk path"))?.into()) + Ok(chunk_path + .to_str() + .ok_or_else(|| anyhow::anyhow!("invalid chunk path"))? + .into()) } -fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()>{ - - use flate2::Compression; - use flate2::write::GzEncoder; - use std::fs::File; - use tar::{Header, Builder}; - use std::path::Path; +fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()> { + use flate2::{write::GzEncoder, Compression}; + use std::{fs::File, path::Path}; + use tar::{Builder, Header}; let trace_file_path = Path::new(chunk_dir).join("traces.tar.gz"); let tarfile = File::create(trace_file_path)?; @@ -93,8 +88,8 @@ fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()>{ let mut header = Header::new_gnu(); header.set_path(trace.header.number.map_or_else( - ||format!("unknown_block_{}.json", i), - |blkn|format!("{}.json", blkn), + || format!("unknown_block_{i}.json"), + |blkn| format!("{blkn}.json"), ))?; header.set_size(trace_str.len() as u64); header.set_cksum(); @@ -104,23 +99,23 @@ fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()>{ Ok(()) } -fn chunk_handling(batch_id: i64, chunk_id: i64, block_traces: &[BlockTrace]) -> Result<()>{ +fn chunk_handling(batch_id: i64, chunk_id: i64, block_traces: &[BlockTrace]) -> Result<()> { + let witness_block = build_block(block_traces, batch_id, chunk_id) + .map_err(|e| anyhow::anyhow!("testnet: building block failed {e:?}"))?; - let witness_block = build_block(&block_traces, batch_id, chunk_id) - .map_err(|e|anyhow::anyhow!("testnet: building block failed {e:?}"))?; - - Prover::::mock_prove_witness_block(&witness_block) - .map_err(|e|anyhow::anyhow!("testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{e:?}"))?; + Prover::::mock_prove_witness_block(&witness_block).map_err(|e| { + anyhow::anyhow!("testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{e:?}") + })?; Ok(()) } -const EXIT_NO_MORE_TASK : u8 = 9; -const EXIT_FAILED_ENV : u8 = 13; -const EXIT_FAILED_ENV_WITH_TASK : u8 = 17; +const EXIT_NO_MORE_TASK: u8 = 9; +const EXIT_FAILED_ENV: u8 = 13; +const EXIT_FAILED_ENV_WITH_TASK: u8 = 17; #[tokio::main] -async fn main() -> ExitCode{ +async fn main() -> ExitCode { let log_handle = log4rs::init_config(common_log().unwrap()).unwrap(); log::info!("git version {}", GIT_VERSION); log::info!("short git version {}", short_git_version()); @@ -135,9 +130,7 @@ async fn main() -> ExitCode{ let (batch_id, chunks) = get_chunks_info(&setting) .await - .unwrap_or_else(|e| { - panic!("mock-testnet: failed to request API err {e:?}") - }); + .unwrap_or_else(|e| panic!("mock-testnet: failed to request API err {e:?}")); let mut chunks_task_complete = true; match chunks { None => { @@ -154,13 +147,13 @@ async fn main() -> ExitCode{ let mut block_traces: Vec = vec![]; for block_id in chunk.start_block_number..=chunk.end_block_number { log::info!("mock-testnet: requesting trace of block {block_id}"); - + match provider .request( "scroll_getBlockTraceByNumberOrHash", [format!("{block_id:#x}")], ) - .await + .await { Ok(trace) => { block_traces.push(trace); @@ -169,29 +162,37 @@ async fn main() -> ExitCode{ log::error!("obtain trace from block provider fail: {e:?}"); break; } - } + } } - if block_traces.len() < (chunk.end_block_number - chunk.start_block_number + 1) as usize { + if block_traces.len() + < (chunk.end_block_number - chunk.start_block_number + 1) as usize + { chunks_task_complete = false; break; } // start chunk-level testing - //let chunk_dir = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64).unwrap(); - if let Err(_) = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64) - .and_then(|chunk_dir|{ + //let chunk_dir = prepare_chunk_dir(&setting.data_output_dir, chunk_id as + // u64).unwrap(); + if let Err(e) = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64) + .and_then(|chunk_dir| { record_chunk_traces(&chunk_dir, &block_traces)?; Ok(chunk_dir) }) - .and_then(|chunk_dir|{ + .and_then(|chunk_dir| { log::info!("chunk {} has been recorded to {}", chunk_id, chunk_dir); log_handle.set_config(debug_log(&chunk_dir)?); Ok(()) }) { + log::error!( + "can not prepare output enviroment for chunk {}: {:?}", + chunk_id, + e + ); chunks_task_complete = false; - break; + break; } let handling_ret = chunk_handling(batch_id as i64, chunk_id, &block_traces); @@ -201,7 +202,7 @@ async fn main() -> ExitCode{ // TODO: move data to output dir } - log::info!("chunk {} has been handled", chunk_id); + log::info!("chunk {} has been handled", chunk_id); } } } @@ -213,8 +214,8 @@ async fn main() -> ExitCode{ if chunks_task_complete { log::info!("relay-alpha testnet runner: complete"); - ExitCode::from(0) - }else { + ExitCode::from(0) + } else { ExitCode::from(EXIT_FAILED_ENV) } } @@ -248,10 +249,8 @@ fn build_block( Ok(witness_block) } -/// Request chunk info from cordinator -async fn get_chunks_info( - setting: &Setting, -) -> Result<(usize, Option>)> { +/// Request chunk info from cordinator +async fn get_chunks_info(setting: &Setting) -> Result<(usize, Option>)> { let url = Url::parse(&setting.chunks_url)?; let resp: String = reqwest::get(url).await?.text().await?; @@ -272,15 +271,17 @@ async fn notify_chunks_complete( ) -> Result<()> { let url = Url::parse_with_params( &setting.task_url, - &[(if completed {"done"} else {"drop"}, - batch_index.to_string())], + &[( + if completed { "done" } else { "drop" }, + batch_index.to_string(), + )], )?; let resp = reqwest::get(url).await?.text().await?; log::info!( "notify batch {} {}, resp {}", batch_index, - if completed {"done"} else {"drop"}, + if completed { "done" } else { "drop" }, resp, ); Ok(()) @@ -317,19 +318,23 @@ impl Setting { let coordinator_url = env::var("COORDINATOR_API_URL"); let (chunks_url, task_url) = if let Ok(url_prefix) = coordinator_url { ( - Url::parse(&url_prefix).and_then(|url|url.join("chunks")).expect("run-testnet: Must be valid url for coordinator api"), - Url::parse(&url_prefix).and_then(|url|url.join("tasks")).expect("run-testnet: Must be valid url for coordinator api"), + Url::parse(&url_prefix) + .and_then(|url| url.join("chunks")) + .expect("run-testnet: Must be valid url for coordinator api"), + Url::parse(&url_prefix) + .and_then(|url| url.join("tasks")) + .expect("run-testnet: Must be valid url for coordinator api"), ) } else { ( - Url::parse( - &env::var("CHUNKS_API_URL") - .expect("run-test: CHUNKS_API_URL must be set if COORDINATOR_API_URL is not set"), - ).expect("run-testnet: Must be valid url for chunks api"), - Url::parse( - &env::var("TASKS_API_URL") - .expect("run-test: TASKS_API_URL must be set if COORDINATOR_API_URL is not set"), - ).expect("run-testnet: Must be valid url for tasks api"), + Url::parse(&env::var("CHUNKS_API_URL").expect( + "run-test: CHUNKS_API_URL must be set if COORDINATOR_API_URL is not set", + )) + .expect("run-testnet: Must be valid url for chunks api"), + Url::parse(&env::var("TASKS_API_URL").expect( + "run-test: TASKS_API_URL must be set if COORDINATOR_API_URL is not set", + )) + .expect("run-testnet: Must be valid url for tasks api"), ) }; diff --git a/types/src/lib.rs b/types/src/lib.rs index 516ea9e28..8635d740a 100644 --- a/types/src/lib.rs +++ b/types/src/lib.rs @@ -1,12 +1,11 @@ -use serde::{Deserialize, Serialize}; pub use eth_types::l2_types as eth; +use serde::{Deserialize, Serialize}; #[derive(Deserialize, Serialize, Default, Debug, Clone)] pub struct BlockTraceJsonRpcResult { pub result: eth::BlockTrace, } - pub mod base64 { use base64::{decode, encode}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; From 527956ad57e52fbe24ec07a43ea1549ed6b69378 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 14:06:19 +0800 Subject: [PATCH 18/35] refinding testnet runner --- run-testnet/.gitignore | 1 + run-testnet/README.md | 5 ++ run-testnet/src/main.rs | 108 +++++++++++++++++++++++++++++++++------- 3 files changed, 97 insertions(+), 17 deletions(-) create mode 100644 run-testnet/.gitignore diff --git a/run-testnet/.gitignore b/run-testnet/.gitignore new file mode 100644 index 000000000..6caf68aff --- /dev/null +++ b/run-testnet/.gitignore @@ -0,0 +1 @@ +output \ No newline at end of file diff --git a/run-testnet/README.md b/run-testnet/README.md index 166c1f0b6..c4a4d24b8 100644 --- a/run-testnet/README.md +++ b/run-testnet/README.md @@ -1,4 +1,9 @@ +Env: ++ TESTNET_TASKS: specify which task should be run: `mock`, `prove`, `agg` ++ OUTPUT_DIR: the output dir, default is `output` ++ COORDINATOR_API_URL: `http:///api/` (notice the ending slash '/' is important) + App exits: + 9: no more batch avaliable diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index 598d3c004..103c593a7 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -18,7 +18,7 @@ use prover::{ }; use reqwest::Url; use serde::Deserialize; -use std::{env, process::ExitCode, str::FromStr}; +use std::{backtrace, env, panic, process::ExitCode, str::FromStr}; use types::eth::BlockTrace; // build common config from enviroment @@ -51,8 +51,8 @@ fn debug_log(output_dir: &str) -> Result { .logger( Logger::builder() .appender("log-file") - .additive(true) - .build("", log::LevelFilter::Debug), + .additive(false) + .build("prover", log::LevelFilter::Debug), ) .build( Root::builder() @@ -91,6 +91,7 @@ fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()> { || format!("unknown_block_{i}.json"), |blkn| format!("{blkn}.json"), ))?; + header.set_mode(0o644); header.set_size(trace_str.len() as u64); header.set_cksum(); tar.append(&header, trace_str.as_bytes())?; @@ -99,6 +100,12 @@ fn record_chunk_traces(chunk_dir: &str, traces: &[BlockTrace]) -> Result<()> { Ok(()) } +fn mark_chunk_failure(chunk_dir: &str, data: &str) -> Result<()> { + use std::{fs, path::Path}; + fs::write(Path::new(chunk_dir).join("failure"), data)?; + Ok(()) +} + fn chunk_handling(batch_id: i64, chunk_id: i64, block_traces: &[BlockTrace]) -> Result<()> { let witness_block = build_block(block_traces, batch_id, chunk_id) .map_err(|e| anyhow::anyhow!("testnet: building block failed {e:?}"))?; @@ -126,15 +133,15 @@ async fn main() -> ExitCode { log::info!("settings: {setting:?}"); let provider = Provider::::try_from(&setting.l2geth_api_url) - .expect("mock-testnet: failed to initialize ethers Provider"); + .expect("run-testnet: failed to initialize ethers Provider"); let (batch_id, chunks) = get_chunks_info(&setting) .await - .unwrap_or_else(|e| panic!("mock-testnet: failed to request API err {e:?}")); + .unwrap_or_else(|e| panic!("run-testnet: failed to request API err {e:?}")); let mut chunks_task_complete = true; match chunks { None => { - log::info!("mock-testnet: finished to prove at batch-{batch_id}"); + log::info!("run-testnet: finished to prove at batch-{batch_id}"); return ExitCode::from(EXIT_NO_MORE_TASK); } Some(chunks) => { @@ -146,7 +153,7 @@ async fn main() -> ExitCode { // fetch traces let mut block_traces: Vec = vec![]; for block_id in chunk.start_block_number..=chunk.end_block_number { - log::info!("mock-testnet: requesting trace of block {block_id}"); + log::info!("run-testnet: requesting trace of block {block_id}"); match provider .request( @@ -173,9 +180,7 @@ async fn main() -> ExitCode { } // start chunk-level testing - //let chunk_dir = prepare_chunk_dir(&setting.data_output_dir, chunk_id as - // u64).unwrap(); - if let Err(e) = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64) + let chunk_dir = prepare_chunk_dir(&setting.data_output_dir, chunk_id as u64) .and_then(|chunk_dir| { record_chunk_traces(&chunk_dir, &block_traces)?; Ok(chunk_dir) @@ -183,9 +188,10 @@ async fn main() -> ExitCode { .and_then(|chunk_dir| { log::info!("chunk {} has been recorded to {}", chunk_id, chunk_dir); log_handle.set_config(debug_log(&chunk_dir)?); - Ok(()) - }) - { + Ok(chunk_dir) + }); + // u64).unwrap(); + if let Err(e) = chunk_dir { log::error!( "can not prepare output enviroment for chunk {}: {:?}", chunk_id, @@ -194,12 +200,73 @@ async fn main() -> ExitCode { chunks_task_complete = false; break; } + let chunk_dir = chunk_dir.expect("ok ensured"); - let handling_ret = chunk_handling(batch_id as i64, chunk_id, &block_traces); - log_handle.set_config(common_log().unwrap()); + let handling_error = std::sync::Arc::new(std::sync::RwLock::new(String::from( + "unknown error, message not recorded", + ))); - if handling_ret.is_err() { - // TODO: move data to output dir + let write_error = |handling_error: &std::sync::Arc>, + err_msg: String| { + match handling_error.write() { + Ok(mut error_str) => { + *error_str = err_msg; + } + Err(e) => { + log::error!( + "fail to write error message: {:?}\n backup {}", + e, + err_msg + ); + } + } + }; + + let out_err = handling_error.clone(); + // prepare for running test phase + panic::set_hook(Box::new(move |panic_info| { + write_error( + &out_err, + format!( + "catch test panic: {} \nbacktrace: {}", + panic_info, + backtrace::Backtrace::capture(), + ), + ); + })); + + let spec_tasks = setting.spec_tasks.clone(); + + let out_err = handling_error.clone(); + let handling_ret = panic::catch_unwind(move || { + // mock + if spec_tasks.iter().any(|str| str.as_str() == "mock") { + if let Err(e) = chunk_handling(batch_id as i64, chunk_id, &block_traces) { + write_error(&out_err, format!("chunk handling fail: {e:?}")); + return false; + } + } + // TODO: prove + true + }); + + let _ = panic::take_hook(); + + log_handle.set_config(common_log().unwrap()); + if handling_ret.unwrap_or(false) { + log::debug!("encounter some error in batch {}", batch_id); + if let Err(e) = mark_chunk_failure( + &chunk_dir, + handling_error + .read() + .map(|reader| reader.clone()) + .unwrap_or(String::from("default")) + .as_str(), + ) { + log::error!("can not output error data for chunk {}: {:?}", chunk_id, e); + chunks_task_complete = false; + break; + } } log::info!("chunk {} has been handled", chunk_id); @@ -212,6 +279,8 @@ async fn main() -> ExitCode { return ExitCode::from(EXIT_FAILED_ENV_WITH_TASK); } + //TODO: batch level ops + if chunks_task_complete { log::info!("relay-alpha testnet runner: complete"); ExitCode::from(0) @@ -309,6 +378,7 @@ struct Setting { task_url: String, l2geth_api_url: String, data_output_dir: String, + spec_tasks: Vec, } impl Setting { @@ -340,11 +410,15 @@ impl Setting { let data_output_dir = env::var("OUTPUT_DIR").unwrap_or("output".to_string()); + let spec_tasks_str = env::var("TESTNET_TASKS").unwrap_or_default(); + let spec_tasks = spec_tasks_str.split(',').map(String::from).collect(); + Self { l2geth_api_url, data_output_dir, chunks_url: chunks_url.as_str().into(), task_url: task_url.as_str().into(), + spec_tasks, } } } From 8b658d8cd5ee5595bbf3af217042578e83c096f4 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 14:36:03 +0800 Subject: [PATCH 19/35] swap log output in debug --- run-testnet/src/main.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index 103c593a7..e390cef60 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -40,24 +40,24 @@ fn common_log() -> Result { // build config for circuit-debug fn debug_log(output_dir: &str) -> Result { use std::path::Path; - let err_output = ConsoleAppender::builder().target(Target::Stderr).build(); + let app_output = ConsoleAppender::builder().target(Target::Stdout).build(); let log_file_path = Path::new(output_dir).join("runner.log"); let log_file = FileAppender::builder().build(log_file_path).unwrap(); let config = Config::builder() .appenders([ Appender::builder().build("log-file", Box::new(log_file)), - Appender::builder().build("stderr", Box::new(err_output)), + Appender::builder().build("std", Box::new(app_output)), ]) .logger( Logger::builder() - .appender("log-file") + .appender("std") .additive(false) - .build("prover", log::LevelFilter::Debug), + .build("testnet_runner", log::LevelFilter::Info), ) .build( Root::builder() - .appender("stderr") - .build(log::LevelFilter::Warn), + .appender("log-file") + .build(log::LevelFilter::Debug), )?; Ok(config) From 1197c646adeff67c0119510253ed6e900e31ba71 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 14:57:44 +0800 Subject: [PATCH 20/35] more grace runner --- run-testnet/src/main.rs | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index e390cef60..a69d25543 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -124,20 +124,26 @@ const EXIT_FAILED_ENV_WITH_TASK: u8 = 17; #[tokio::main] async fn main() -> ExitCode { let log_handle = log4rs::init_config(common_log().unwrap()).unwrap(); - log::info!("git version {}", GIT_VERSION); - log::info!("short git version {}", short_git_version()); - - log::info!("relay-alpha testnet runner: begin"); let setting = Setting::new(); - log::info!("settings: {setting:?}"); let provider = Provider::::try_from(&setting.l2geth_api_url) .expect("run-testnet: failed to initialize ethers Provider"); - let (batch_id, chunks) = get_chunks_info(&setting) - .await - .unwrap_or_else(|e| panic!("run-testnet: failed to request API err {e:?}")); + log::info!("git version {}", GIT_VERSION); + log::info!("short git version {}", short_git_version()); + log::info!("settings: {setting:?}"); + + log::info!("relay-alpha testnet runner: begin"); + + let (batch_id, chunks) = match get_chunks_info(&setting).await { + Ok(r) => r, + Err(e) => { + log::error!("run-testnet: failed to request API err {e:?}"); + return ExitCode::from(EXIT_FAILED_ENV); + } + }; + let mut chunks_task_complete = true; match chunks { None => { From e4da0b86017dd3ed6ea77857941de823e4f92ae3 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Wed, 23 Aug 2023 16:01:08 +0800 Subject: [PATCH 21/35] wip: slack notification --- run-testnet/run.sh | 0 .../testnet_coordinator/config.yaml.example | 1 + run-testnet/testnet_coordinator/notifier.go | 111 ++++++++++++++++++ .../testnet_coordinator/task_assign.go | 1 + 4 files changed, 113 insertions(+) create mode 100644 run-testnet/run.sh create mode 100644 run-testnet/testnet_coordinator/notifier.go diff --git a/run-testnet/run.sh b/run-testnet/run.sh new file mode 100644 index 000000000..e69de29bb diff --git a/run-testnet/testnet_coordinator/config.yaml.example b/run-testnet/testnet_coordinator/config.yaml.example index f3c1ee481..35a140fe7 100644 --- a/run-testnet/testnet_coordinator/config.yaml.example +++ b/run-testnet/testnet_coordinator/config.yaml.example @@ -2,6 +2,7 @@ # start: 0 #batch start from chunkURL: http:///api/chunks?batch_index=%d +# notifierURL: server: #host: 0.0.0.0:8560 #url: #the base url server used for \ No newline at end of file diff --git a/run-testnet/testnet_coordinator/notifier.go b/run-testnet/testnet_coordinator/notifier.go new file mode 100644 index 000000000..1823cfdb8 --- /dev/null +++ b/run-testnet/testnet_coordinator/notifier.go @@ -0,0 +1,111 @@ +package main + +import ( + "bytes" + "encoding/json" + "fmt" + "log" + "net/http" +) + +type Notifier interface { +} + +type notifier string + +type slackData struct { + Text string `json:"text"` + Agent string `json:"username,omitempty"` + Icon string `json:"icon_emoji,omitempty"` +} + +func getSlackData(text string, agent string) *slackData { + return &slackData{ + Text: text, + Agent: fmt.Sprintf("Testnet", agent), + Icon: "eyes", + } +} + +func notify_slack_channel(url string, slack *slackData) (string, error) { + + data, err := json.Marshal(slack) + if err != nil { + log.Println("marshal slack notify fail", slack.Text, err) + return "", err + } + + resp, err := http.Post(url, "application/json", bytes.NewBuffer(data)) + if err != nil { + log.Println("send slack notify fail", slack.Text, err) + return "", err + } + defer resp.Body.Close() + + return resp.Status, nil +} + +func (n notifier) notify_task_assign(id uint64, agent string) error { + msg := getSlackData( + agent, + fmt.Sprintf("I have receive batch %d", id), + ) + msg.Icon = "check_mark" + resp, err := notify_slack_channel(string(n), msg) + if err == nil { + log.Println("have send task assigned to slack and remote resp", resp) + } + return err +} + +func (n notifier) notify_progress(id uint64) error { + msg := getSlackData( + "", + fmt.Sprintf("We have progress to batch %d", id), + ) + msg.Icon = "party_popper" + resp, err := notify_slack_channel(string(n), msg) + if err == nil { + log.Println("have send progress to slack and remote resp", resp) + } + return err +} + +func (n notifier) notify_chunk_issue(chunk_id uint64) error { + msg := getSlackData( + "", + fmt.Sprintf("Chunk %d has issued, check it in", chunk_id), + ) + msg.Icon = "face_screaming_in_fear" + resp, err := notify_slack_channel(string(n), msg) + if err == nil { + log.Println("have send chunk issue to slack and remote resp", resp) + } + return err +} + +func (n notifier) notify_task_complete(id uint64) error { + msg := getSlackData( + "", + fmt.Sprintf("Batch %d has completed", id), + ) + msg.Icon = "grinning_face" + resp, err := notify_slack_channel(string(n), msg) + if err == nil { + log.Println("have send task assigned to slack and remote resp", resp) + } + return err +} + +func (n notifier) notify_task_drop(id uint64) error { + msg := getSlackData( + "", + fmt.Sprintf("Batch %d can not be completed and has been dropped", id), + ) + msg.Icon = "tired_face" + resp, err := notify_slack_channel(string(n), msg) + if err == nil { + log.Println("have send task assigned to slack and remote resp", resp) + } + return err +} diff --git a/run-testnet/testnet_coordinator/task_assign.go b/run-testnet/testnet_coordinator/task_assign.go index 6090dbe2c..aa6a69535 100644 --- a/run-testnet/testnet_coordinator/task_assign.go +++ b/run-testnet/testnet_coordinator/task_assign.go @@ -17,6 +17,7 @@ type TaskAssigner struct { sync.Mutex begin_with uint64 runingTasks map[uint64]TaskStatus + message *notifier } func construct(start uint64) *TaskAssigner { From 94d546f1c9abe2ebbd995ad8c1e9e0714bf0dc70 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 09:23:06 +0800 Subject: [PATCH 22/35] fix an typo --- run-testnet/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index a69d25543..cdacd489f 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -259,7 +259,7 @@ async fn main() -> ExitCode { let _ = panic::take_hook(); log_handle.set_config(common_log().unwrap()); - if handling_ret.unwrap_or(false) { + if !handling_ret.unwrap_or(false) { log::debug!("encounter some error in batch {}", batch_id); if let Err(e) = mark_chunk_failure( &chunk_dir, From c2dbe43c65ff6f6ac9ab9556cc271301709a02d2 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 09:23:25 +0800 Subject: [PATCH 23/35] add notifier for coordinator --- run-testnet/testnet_coordinator/config.go | 1 + run-testnet/testnet_coordinator/main.go | 50 +++++++++-- run-testnet/testnet_coordinator/notifier.go | 85 +++++++------------ .../testnet_coordinator/task_assign.go | 58 ++++++++++++- 4 files changed, 130 insertions(+), 64 deletions(-) diff --git a/run-testnet/testnet_coordinator/config.go b/run-testnet/testnet_coordinator/config.go index a0a9cb34c..5feb3f10a 100644 --- a/run-testnet/testnet_coordinator/config.go +++ b/run-testnet/testnet_coordinator/config.go @@ -15,6 +15,7 @@ type ServerConfig struct { type Config struct { StartBatch uint64 `yaml:"start,omitempty"` ChunkURLTemplate string `yaml:"chunkURL"` + NotifierURL string `yaml:"notifierURL"` Server *ServerConfig `yaml:"server,omitempty"` } diff --git a/run-testnet/testnet_coordinator/main.go b/run-testnet/testnet_coordinator/main.go index 1886d2a1a..b8f46d17e 100644 --- a/run-testnet/testnet_coordinator/main.go +++ b/run-testnet/testnet_coordinator/main.go @@ -34,7 +34,7 @@ func main() { log.Fatalf("Error reading config file: %v", err) } - taskAssigner := construct(serverConfig.StartBatch) + taskAssigner := construct(serverConfig.StartBatch).setMessenger(serverConfig.NotifierURL) http.HandleFunc( serverConfig.Server.ServerURL+"/chunks", @@ -42,11 +42,15 @@ func main() { ) http.HandleFunc( serverConfig.Server.ServerURL+"/tasks", - taskHandler(taskAssigner, serverConfig.ChunkURLTemplate), + taskHandler(taskAssigner), ) http.HandleFunc( serverConfig.Server.ServerURL+"/status", - statusHandler(taskAssigner, serverConfig.ChunkURLTemplate), + statusHandler(taskAssigner), + ) + http.HandleFunc( + serverConfig.Server.ServerURL+"/nodewarning", + nodeProxyHandler(taskAssigner), ) http.Handle("/", http.NotFoundHandler()) @@ -59,14 +63,17 @@ func main() { func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + assigned_done := false assigned := assigner.assign_new() - defer func() { + defer func(agent string) { log.Println("send new batch out", assigned, assigned_done) if !assigned_done { assigner.drop(assigned) + } else { + assigner.coordinatorNotify(fmt.Sprintf("We have assigned a new batch {%d} to agent %s", assigned, agent), "") } - }() + }(r.RemoteAddr) url := fmt.Sprintf(url_template, assigned) resp, err := readSrcUrl(url) if statusErr, ok := err.(UpstreamError); ok { @@ -87,7 +94,7 @@ func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc } } -func taskHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { +func taskHandler(assigner *TaskAssigner) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { done_index := r.URL.Query().Get("done") @@ -100,7 +107,9 @@ func taskHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { http.Error(w, "invalid done index, need integer", http.StatusBadRequest) return } - assigner.complete(ind) + if prog, now := assigner.complete(ind); prog { + assigner.coordinatorNotify(fmt.Sprintf("we have progress to batch %d", now), COORDINATOR_GOODJOB) + } } else if drop_index != "" { log.Println("receive drop notify for batch:", drop_index) var ind uint64 @@ -109,6 +118,31 @@ func taskHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { return } assigner.drop(ind) + assigner.coordinatorNotify(fmt.Sprintf("Batch %d is once dropped", ind), COORDINATOR_BADNEWS) + } else { + http.Error(w, "must query with drop or done", http.StatusBadRequest) + return + } + + w.WriteHeader(http.StatusOK) + } +} + +func nodeProxyHandler(assigner *TaskAssigner) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + chunk_issue_index := r.URL.Query().Get("chunk_issue") + node_panic := r.URL.Query().Get("panic") + + if chunk_issue_index != "" { + var ind uint64 + if _, err := fmt.Sscanf(chunk_issue_index, "%d", &ind); err != nil { + http.Error(w, "invalid index, need integer", http.StatusBadRequest) + return + } + assigner.nodeProxyNotify(r.RemoteAddr, fmt.Sprintf("Prover has issue in chunk %d, check it", ind)) + } else if node_panic != "" { + assigner.nodeProxyNotify(r.RemoteAddr, fmt.Sprintf("Node status bad, check it")) } else { http.Error(w, "must query with drop or done", http.StatusBadRequest) return @@ -118,7 +152,7 @@ func taskHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { } } -func statusHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { +func statusHandler(assigner *TaskAssigner) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ret := fmt.Sprintf("%v", assigner.status()) diff --git a/run-testnet/testnet_coordinator/notifier.go b/run-testnet/testnet_coordinator/notifier.go index 1823cfdb8..c2b789224 100644 --- a/run-testnet/testnet_coordinator/notifier.go +++ b/run-testnet/testnet_coordinator/notifier.go @@ -8,9 +8,6 @@ import ( "net/http" ) -type Notifier interface { -} - type notifier string type slackData struct { @@ -22,12 +19,12 @@ type slackData struct { func getSlackData(text string, agent string) *slackData { return &slackData{ Text: text, - Agent: fmt.Sprintf("Testnet", agent), + Agent: fmt.Sprintf("Testnet-%s", agent), Icon: "eyes", } } -func notify_slack_channel(url string, slack *slackData) (string, error) { +func notifySlackChannel(url string, slack *slackData) (string, error) { data, err := json.Marshal(slack) if err != nil { @@ -45,67 +42,47 @@ func notify_slack_channel(url string, slack *slackData) (string, error) { return resp.Status, nil } -func (n notifier) notify_task_assign(id uint64, agent string) error { - msg := getSlackData( - agent, - fmt.Sprintf("I have receive batch %d", id), - ) - msg.Icon = "check_mark" - resp, err := notify_slack_channel(string(n), msg) - if err == nil { - log.Println("have send task assigned to slack and remote resp", resp) - } - return err +const COORDINATOR_COMMON = ":white_check_mark:" +const COORDINATOR_GOODJOB = ":tada:" +const COORDINATOR_BADNEWS = ":tired_face:" + +var agentData = map[string]string{ + COORDINATOR_COMMON: "Testnet-coordinator", + COORDINATOR_BADNEWS: "Oh no ...", + COORDINATOR_GOODJOB: "Congraduations!", } -func (n notifier) notify_progress(id uint64) error { - msg := getSlackData( - "", - fmt.Sprintf("We have progress to batch %d", id), - ) - msg.Icon = "party_popper" - resp, err := notify_slack_channel(string(n), msg) - if err == nil { - log.Println("have send progress to slack and remote resp", resp) +func (n notifier) coordinatorNotify(txt string, icon string) error { + if n == "" { + return nil } - return err -} -func (n notifier) notify_chunk_issue(chunk_id uint64) error { - msg := getSlackData( - "", - fmt.Sprintf("Chunk %d has issued, check it in", chunk_id), - ) - msg.Icon = "face_screaming_in_fear" - resp, err := notify_slack_channel(string(n), msg) + if icon == "" { + icon = COORDINATOR_COMMON + } + resp, err := notifySlackChannel(string(n), &slackData{ + Text: txt, + Agent: agentData[icon], + Icon: icon, + }) if err == nil { - log.Println("have send chunk issue to slack and remote resp", resp) + log.Println("have send coordinate notify and remote resp:", resp) } return err } -func (n notifier) notify_task_complete(id uint64) error { - msg := getSlackData( - "", - fmt.Sprintf("Batch %d has completed", id), - ) - msg.Icon = "grinning_face" - resp, err := notify_slack_channel(string(n), msg) - if err == nil { - log.Println("have send task assigned to slack and remote resp", resp) +func (n notifier) nodeProxyNotify(node string, txt string) error { + if n == "" { + return nil } - return err -} -func (n notifier) notify_task_drop(id uint64) error { - msg := getSlackData( - "", - fmt.Sprintf("Batch %d can not be completed and has been dropped", id), - ) - msg.Icon = "tired_face" - resp, err := notify_slack_channel(string(n), msg) + resp, err := notifySlackChannel(string(n), &slackData{ + Text: txt, + Agent: fmt.Sprintf("Testnet-%s", node), + Icon: ":scream:", + }) if err == nil { - log.Println("have send task assigned to slack and remote resp", resp) + log.Println("have send coordinate notify and remote resp:", resp) } return err } diff --git a/run-testnet/testnet_coordinator/task_assign.go b/run-testnet/testnet_coordinator/task_assign.go index aa6a69535..c14b75f4b 100644 --- a/run-testnet/testnet_coordinator/task_assign.go +++ b/run-testnet/testnet_coordinator/task_assign.go @@ -2,6 +2,7 @@ package main import ( "log" + "sort" "sync" ) @@ -15,18 +16,27 @@ const TaskReAssign TaskStatus = 2 // since the cost is trivial (batch number is limited) type TaskAssigner struct { sync.Mutex + notifier begin_with uint64 + progress uint64 runingTasks map[uint64]TaskStatus - message *notifier } func construct(start uint64) *TaskAssigner { return &TaskAssigner{ begin_with: start, + progress: start, runingTasks: make(map[uint64]TaskStatus), } } +func (t *TaskAssigner) setMessenger(url string) *TaskAssigner { + t.Lock() + defer t.Unlock() + t.notifier = notifier(url) + return t +} + func (t *TaskAssigner) assign_new() uint64 { t.Lock() @@ -63,11 +73,55 @@ func (t *TaskAssigner) drop(id uint64) { log.Printf("unexpected dropping non-existed task (%d)\n", id) } -func (t *TaskAssigner) complete(id uint64) { +func (t *TaskAssigner) reset(id uint64) { + t.Lock() defer t.Unlock() + t.runingTasks[id] = TaskReAssign + log.Printf("enforce reset a task (%d)\n", id) +} + +func (t *TaskAssigner) complete(id uint64) (bool, uint64) { + t.Lock() + defer t.Unlock() + if _, existed := t.runingTasks[id]; !existed { + log.Printf("unexpected completed task (%d)\n", id) + return false, t.progress + } t.runingTasks[id] = TaskCompleted + // scan all tasks and make progress + completed := []uint64{} + nowProg := t.progress + + for id, status := range t.runingTasks { + if status == TaskCompleted { + completed = append(completed, id) + } + } + + sort.Slice(completed, func(i, j int) bool { + return completed[i] < completed[j] + }) + + log.Printf("collect completed (%v), now %d\n", completed, t.progress) + + for _, id := range completed { + if id == nowProg { + delete(t.runingTasks, id) + nowProg += 1 + } else if id > nowProg { + break + } else { + panic("unexpected prog") + } + } + + defer func(newProg uint64) { + t.progress = newProg + }(nowProg) + + return nowProg > t.progress, nowProg } func (t *TaskAssigner) status() (result []uint64) { From 4f11ce7be8d7ebd10a0c07b100c7c043962ea9ff Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 09:26:26 +0800 Subject: [PATCH 24/35] bump zkevm --- Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9de5f8b67..65e5987fd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -1123,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "eth-types", "geth-utils", @@ -1613,7 +1613,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "digest 0.7.6", "eth-types", @@ -1653,7 +1653,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -2259,7 +2259,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2459,7 +2459,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2474,7 +2474,7 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "eth-types", "halo2-mpt-circuits", @@ -4783,7 +4783,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#e5d465b93e46b5bf2261351d03f9913cf8232986" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" dependencies = [ "array-init", "bus-mapping", From ae199851795887a1413c121e21badd9258e261f7 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 11:14:49 +0800 Subject: [PATCH 25/35] refine coordinator, add running script --- run-testnet/run.sh | 82 +++++++++++++++++++++++++ run-testnet/testnet_coordinator/main.go | 8 +-- 2 files changed, 86 insertions(+), 4 deletions(-) mode change 100644 => 100755 run-testnet/run.sh diff --git a/run-testnet/run.sh b/run-testnet/run.sh old mode 100644 new mode 100755 index e69de29bb..3256a362f --- a/run-testnet/run.sh +++ b/run-testnet/run.sh @@ -0,0 +1,82 @@ +#!/bin/bash + +set -ue +command -v curl &> /dev/null + +# Check if the environment variable COORDINATOR_API_URL is set +if [ -z "${COORDINATOR_API_URL:-}" ]; then + echo "COORDINATOR_API_URL is not set!" + exit 1 +fi + +function exit_trap { + if [ $1 -ne 0 ]; then + curl ${COORDINATOR_API_URL}nodewarning?panic=runtime_error + fi +} + +trap "curl ${COORDINATOR_API_URL}nodewarning?panic=script_error" ERR +trap 'exit_trap $?' EXIT +trap "curl ${COORDINATOR_API_URL}nodewarning?panic=user_interrupt" SIGINT + +if [ -z "${TESTNET_TASKS:-}" ]; then + echo "should specify at least one tasks from mock, prove and agg, or combine them with commas" + exit 1 +fi + +output_dir="${OUTPUT_DIR:-output}" + +if [ ! -d "$output_dir" ]; then + mkdir -p "$output_dir" + echo "Directory $output_dir created." +fi + +# A function representing your command 'a' +function debug_run { + cargo run --bin testnet-runner --release + exit_code=$? +} + +function check_output { + find "$output_dir" -type d | while read -r chunk_dir; do + fail_file="${chunk_dir}/failure" + + if [ -e "$fail_file" ]; then + #TODO copy $chunk_dir + chunk_name=`echo "$chunk_dir" | grep -oE '[^/]+$'` + echo "${chunk_name} fail (${chunk_dir})" + curl "${COORDINATOR_API_URL}nodewarning?chunk_issue=${chunk_name}" + fi + done +} + +while true; do +# clean output dir before each running + rm -rf ${output_dir}/* + set +e + if [ -z "${DEBUG_RUN:-}"]; then + echo "no implement!" + exit 1 + else + debug_run + fi + set -e + if [ $exit_code -eq 0 ]; then + # normal run, still sleep a while for avoiding unexpected crazy loop + check_output + sleep 10 + elif [ $exit_code -eq 9 ]; then + # there maybe more batchs, wait 10 min + sleep 600 + elif [ $exit_code -eq 13 ]; then + # wrong runtime + exit 1 + # Perform action B + elif [ $exit_code -eq 17 ]; then + curl ${COORDINATOR_API_URL}nodewarning?panic=runtime_error_with_batch_stuck + exit 1 + else + echo "exit with unknown reason" + exit 1 + fi +done \ No newline at end of file diff --git a/run-testnet/testnet_coordinator/main.go b/run-testnet/testnet_coordinator/main.go index b8f46d17e..477966b05 100644 --- a/run-testnet/testnet_coordinator/main.go +++ b/run-testnet/testnet_coordinator/main.go @@ -132,7 +132,7 @@ func nodeProxyHandler(assigner *TaskAssigner) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { chunk_issue_index := r.URL.Query().Get("chunk_issue") - node_panic := r.URL.Query().Get("panic") + node_panic_reason := r.URL.Query().Get("panic") if chunk_issue_index != "" { var ind uint64 @@ -141,10 +141,10 @@ func nodeProxyHandler(assigner *TaskAssigner) http.HandlerFunc { return } assigner.nodeProxyNotify(r.RemoteAddr, fmt.Sprintf("Prover has issue in chunk %d, check it", ind)) - } else if node_panic != "" { - assigner.nodeProxyNotify(r.RemoteAddr, fmt.Sprintf("Node status bad, check it")) + } else if node_panic_reason != "" { + assigner.nodeProxyNotify(r.RemoteAddr, fmt.Sprintf("Node status bad because <%s>, check it", node_panic_reason)) } else { - http.Error(w, "must query with drop or done", http.StatusBadRequest) + http.Error(w, "must query with panic or chunk_issue", http.StatusBadRequest) return } From a4e1e8ad3e3e2791181727129d9bac26df49a30f Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 11:42:00 +0800 Subject: [PATCH 26/35] more refins on running script --- run-testnet/run.sh | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/run-testnet/run.sh b/run-testnet/run.sh index 3256a362f..783979428 100755 --- a/run-testnet/run.sh +++ b/run-testnet/run.sh @@ -10,14 +10,16 @@ if [ -z "${COORDINATOR_API_URL:-}" ]; then fi function exit_trap { - if [ $1 -ne 0 ]; then - curl ${COORDINATOR_API_URL}nodewarning?panic=runtime_error + if [ $exit_code -eq 17 ]; then + curl -s ${COORDINATOR_API_URL}nodewarning?panic=runtime_error_with_batch_stuck + elif [ $1 -ne 0 ]; then + curl -s ${COORDINATOR_API_URL}nodewarning?panic=runtime_error fi } -trap "curl ${COORDINATOR_API_URL}nodewarning?panic=script_error" ERR +trap "curl -s ${COORDINATOR_API_URL}nodewarning?panic=script_error" ERR trap 'exit_trap $?' EXIT -trap "curl ${COORDINATOR_API_URL}nodewarning?panic=user_interrupt" SIGINT +trap "curl -s ${COORDINATOR_API_URL}nodewarning?panic=user_interrupt" SIGINT if [ -z "${TESTNET_TASKS:-}" ]; then echo "should specify at least one tasks from mock, prove and agg, or combine them with commas" @@ -45,7 +47,7 @@ function check_output { #TODO copy $chunk_dir chunk_name=`echo "$chunk_dir" | grep -oE '[^/]+$'` echo "${chunk_name} fail (${chunk_dir})" - curl "${COORDINATOR_API_URL}nodewarning?chunk_issue=${chunk_name}" + curl -s "${COORDINATOR_API_URL}nodewarning?chunk_issue=${chunk_name}" fi done } @@ -64,6 +66,7 @@ while true; do if [ $exit_code -eq 0 ]; then # normal run, still sleep a while for avoiding unexpected crazy loop check_output + echo "checking output done" sleep 10 elif [ $exit_code -eq 9 ]; then # there maybe more batchs, wait 10 min @@ -73,8 +76,7 @@ while true; do exit 1 # Perform action B elif [ $exit_code -eq 17 ]; then - curl ${COORDINATOR_API_URL}nodewarning?panic=runtime_error_with_batch_stuck - exit 1 + exit $exit_code else echo "exit with unknown reason" exit 1 From d6c1aa779c3e2cc44bd8edde1ae0bad57c4756b5 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 11:52:04 +0800 Subject: [PATCH 27/35] refines ... --- run-testnet/run.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/run-testnet/run.sh b/run-testnet/run.sh index 783979428..7b3da504e 100755 --- a/run-testnet/run.sh +++ b/run-testnet/run.sh @@ -12,7 +12,7 @@ fi function exit_trap { if [ $exit_code -eq 17 ]; then curl -s ${COORDINATOR_API_URL}nodewarning?panic=runtime_error_with_batch_stuck - elif [ $1 -ne 0 ]; then + elif [ $1 -eq 1 ]; then curl -s ${COORDINATOR_API_URL}nodewarning?panic=runtime_error fi } @@ -40,6 +40,7 @@ function debug_run { } function check_output { + set -e find "$output_dir" -type d | while read -r chunk_dir; do fail_file="${chunk_dir}/failure" @@ -50,19 +51,19 @@ function check_output { curl -s "${COORDINATOR_API_URL}nodewarning?chunk_issue=${chunk_name}" fi done + set +e } +set +e while true; do # clean output dir before each running rm -rf ${output_dir}/* - set +e if [ -z "${DEBUG_RUN:-}"]; then echo "no implement!" exit 1 else debug_run fi - set -e if [ $exit_code -eq 0 ]; then # normal run, still sleep a while for avoiding unexpected crazy loop check_output From c93eadc68b2d7f942dff333c5d739ed572bb230b Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 19:36:32 +0800 Subject: [PATCH 28/35] add docker file refine script --- Dockerfile.run_testnet | 15 +++++++++++++++ run-testnet/run.sh | 26 ++++++++++++-------------- 2 files changed, 27 insertions(+), 14 deletions(-) create mode 100644 Dockerfile.run_testnet diff --git a/Dockerfile.run_testnet b/Dockerfile.run_testnet new file mode 100644 index 000000000..26259e671 --- /dev/null +++ b/Dockerfile.run_testnet @@ -0,0 +1,15 @@ +FROM scrolltech/go-rust-builder as builder + +RUN mkdir -p /root/src +ADD . /root/src +RUN cd /root/src/run-testnet && cargo build --release --bin testnet-runner +RUN cd /root/src/target/release && find -name libzktrie.so | xargs -I {} cp {} /root/src/target/release + +FROM builder + +COPY --from=builder /root/src/target/release/testnet-runner /bin/ +COPY --from=builder /root/src/run-testnet/run.sh /bin/testnet-runner.sh +COPY --from=builder /root/src/target/release/libzktrie.so /usr/local/lib +ENV LD_LIBRARY_PATH /usr/local/lib + +CMD testnet-runner.sh diff --git a/run-testnet/run.sh b/run-testnet/run.sh index 7b3da504e..25672bbc4 100755 --- a/run-testnet/run.sh +++ b/run-testnet/run.sh @@ -10,11 +10,16 @@ if [ -z "${COORDINATOR_API_URL:-}" ]; then fi function exit_trap { - if [ $exit_code -eq 17 ]; then - curl -s ${COORDINATOR_API_URL}nodewarning?panic=runtime_error_with_batch_stuck - elif [ $1 -eq 1 ]; then - curl -s ${COORDINATOR_API_URL}nodewarning?panic=runtime_error + reason="unknown_error" + if [ $1 -eq 17 ]; then + reason=runtime_error_with_batch_stuck + elif [ $1 -eq 13 ]; then + # wrong runtime + reason=runtime_error + elif [ $1 -eq 0 ]; then + return fi + curl -s ${COORDINATOR_API_URL}nodewarning?panic=${reason} } trap "curl -s ${COORDINATOR_API_URL}nodewarning?panic=script_error" ERR @@ -59,8 +64,8 @@ while true; do # clean output dir before each running rm -rf ${output_dir}/* if [ -z "${DEBUG_RUN:-}"]; then - echo "no implement!" - exit 1 + testnet-runner + exit_code=$? else debug_run fi @@ -72,14 +77,7 @@ while true; do elif [ $exit_code -eq 9 ]; then # there maybe more batchs, wait 10 min sleep 600 - elif [ $exit_code -eq 13 ]; then - # wrong runtime - exit 1 - # Perform action B - elif [ $exit_code -eq 17 ]; then - exit $exit_code else - echo "exit with unknown reason" - exit 1 + exit $exit_code fi done \ No newline at end of file From 87a4b22472aceb28a70afd3402850c69241daad4 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Thu, 24 Aug 2023 20:11:38 +0800 Subject: [PATCH 29/35] update readme --- run-testnet/README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/run-testnet/README.md b/run-testnet/README.md index c4a4d24b8..de96574c8 100644 --- a/run-testnet/README.md +++ b/run-testnet/README.md @@ -8,4 +8,10 @@ App exits: + 9: no more batch avaliable + 13: unexpected error in run-time, can not continue executing runner until the issue has been resolved. -+ 17: same as `13` but a batch task may hold without dropping from coordinator, we should reset the task manually \ No newline at end of file ++ 17: same as `13` but a batch task may hold without dropping from coordinator, we should reset the task manually + +Docker + ++ build `Dockerfile.run_testnet` in parent directory ++ docker run -e L2GETH_API_URL=\ -e COORDINATOR_API_URL=\ -e TESTNET_TASKS=mock,prove,agg \ ++ The entrypoint for `L2GETH_API_URL` is the URL which we passed to `geth connect` \ No newline at end of file From e2feee6b9116d77e2014aff4b72300e6ede02245 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Fri, 25 Aug 2023 11:35:44 +0800 Subject: [PATCH 30/35] add readme and refine test runner --- run-testnet/README.md | 46 +++++++++++++++++++++++++++++++++++++++-- run-testnet/src/main.rs | 37 +++++++++++++++++++++------------ 2 files changed, 68 insertions(+), 15 deletions(-) diff --git a/run-testnet/README.md b/run-testnet/README.md index de96574c8..8e6ee8fce 100644 --- a/run-testnet/README.md +++ b/run-testnet/README.md @@ -1,16 +1,58 @@ +## How is it work ## + +### The runner ### + +The runner (build by cargo --bin testnet-runner) acquire a batch from coordinator and handle each chunk in it. + +The actions on chunks can be specified by `TESTNET_TASKS`: `mock`, `prove`, +can also specfiy `agg` for batch level operations + +For each chunks, it create a directory naming by chunk id under `OUTPUT_DIR`, traces for this chunk is stored in a package `trace.tar.gz` +and actions from prover is traced with verbose logging (in `DEBUG` level) + +If actions failed for some reason, runner would try to catch it and record a `failure` file under the chunk dir. +This include panics raised in prover. In common, runner would not exit suddendly, it exit with special exitcodes when encountering +errors it can not handle. Env: + TESTNET_TASKS: specify which task should be run: `mock`, `prove`, `agg` + OUTPUT_DIR: the output dir, default is `output` + COORDINATOR_API_URL: `http:///api/` (notice the ending slash '/' is important) -App exits: +Runner exits: + 9: no more batch avaliable + 13: unexpected error in run-time, can not continue executing runner until the issue has been resolved. + 17: same as `13` but a batch task may hold without dropping from coordinator, we should reset the task manually -Docker +### The script ### + +The running script `run.sh` kept launching another runner when one has completed without error. It only exit when some serious errors +raised and nothing can be done to resume by itself. Operator should inspect the problem manually and re-run the script after that. +When script quits it is not expected to simply restart it and hope the issue disappear automatically. + +### The coordinator ### + +Coordinator is a singleton service. It assigns and proxies the chunks data from proposer for mutiple runners and record the +completetion. The progress is sent to slack channel. It also relay messages from runner node to slack. + +## Known issues ## + ++ If runner can not access coordinator temporarily, a batch may be 'stuck' by this runner and can not be handled until operator +reset it (i.e. send a `drop` request to coordinator manually). This status would be notified to slack channel. + ++ There is no data persistent avaliable for coordinator and restarting it cause losing all tasks it has currently assigned. Operator +can only adjust the least starting batch index in configuration and start assigning new task from it. + ++ Currently the message from runner is proxied by coordinator and the single point failure may cause lost of node messages. We can +launch a mutiple coordinator groups dedicating for node message relaying (but still need a single instance for assigning and maintaining +tasks) + ++ Any network outrage cause runner can not access l2geth would cause runner stop its handling of current batch and lost remarkable +efforts (for mock proving, it is expected to the works for about 1 hrs) + + +## Docker ## + build `Dockerfile.run_testnet` in parent directory + docker run -e L2GETH_API_URL=\ -e COORDINATOR_API_URL=\ -e TESTNET_TASKS=mock,prove,agg \ diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index cdacd489f..03c8dad01 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -106,17 +106,6 @@ fn mark_chunk_failure(chunk_dir: &str, data: &str) -> Result<()> { Ok(()) } -fn chunk_handling(batch_id: i64, chunk_id: i64, block_traces: &[BlockTrace]) -> Result<()> { - let witness_block = build_block(block_traces, batch_id, chunk_id) - .map_err(|e| anyhow::anyhow!("testnet: building block failed {e:?}"))?; - - Prover::::mock_prove_witness_block(&witness_block).map_err(|e| { - anyhow::anyhow!("testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{e:?}") - })?; - - Ok(()) -} - const EXIT_NO_MORE_TASK: u8 = 9; const EXIT_FAILED_ENV: u8 = 13; const EXIT_FAILED_ENV_WITH_TASK: u8 = 17; @@ -245,14 +234,36 @@ async fn main() -> ExitCode { let out_err = handling_error.clone(); let handling_ret = panic::catch_unwind(move || { + let witness_block = build_block(&block_traces, batch_id as i64, chunk_id) + .map_err(|e| anyhow::anyhow!("testnet: building block failed {e:?}")); + + if let Err(e) = witness_block { + write_error(&out_err, format!("building block fail: {e:?}")); + return false; + } + let witness_block = witness_block.expect("has handled error"); + // mock if spec_tasks.iter().any(|str| str.as_str() == "mock") { - if let Err(e) = chunk_handling(batch_id as i64, chunk_id, &block_traces) { + if let Err(e) = Prover::::mock_prove_witness_block(&witness_block) + .map_err(|e| { + anyhow::anyhow!("testnet: failed to prove chunk {chunk_id} inside batch {batch_id}:\n{e:?}") + }) + { + write_error(&out_err, format!("chunk handling fail: {e:?}")); + return false; + } + } + + // prove + if spec_tasks.iter().any(|str| str.as_str() == "mock") { + // TODO: add prove code here + let prove_ret: Result<()> = Ok(()); + if let Err(e) = prove_ret { write_error(&out_err, format!("chunk handling fail: {e:?}")); return false; } } - // TODO: prove true }); From cf8da8b4554867cc21578c5cd93938551b631dc5 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Fri, 25 Aug 2023 12:18:36 +0800 Subject: [PATCH 31/35] fix; batch index can be -1 --- run-testnet/src/main.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/run-testnet/src/main.rs b/run-testnet/src/main.rs index 03c8dad01..aa193beec 100644 --- a/run-testnet/src/main.rs +++ b/run-testnet/src/main.rs @@ -234,7 +234,7 @@ async fn main() -> ExitCode { let out_err = handling_error.clone(); let handling_ret = panic::catch_unwind(move || { - let witness_block = build_block(&block_traces, batch_id as i64, chunk_id) + let witness_block = build_block(&block_traces, batch_id, chunk_id) .map_err(|e| anyhow::anyhow!("testnet: building block failed {e:?}")); if let Err(e) = witness_block { @@ -291,7 +291,7 @@ async fn main() -> ExitCode { } } - if let Err(e) = notify_chunks_complete(&setting, batch_id as i64, chunks_task_complete).await { + if let Err(e) = notify_chunks_complete(&setting, batch_id, chunks_task_complete).await { log::error!("can not deliver complete notify to coordinator: {e:?}"); return ExitCode::from(EXIT_FAILED_ENV_WITH_TASK); } @@ -336,7 +336,7 @@ fn build_block( } /// Request chunk info from cordinator -async fn get_chunks_info(setting: &Setting) -> Result<(usize, Option>)> { +async fn get_chunks_info(setting: &Setting) -> Result<(i64, Option>)> { let url = Url::parse(&setting.chunks_url)?; let resp: String = reqwest::get(url).await?.text().await?; @@ -375,7 +375,7 @@ async fn notify_chunks_complete( #[derive(Deserialize, Debug)] struct RollupscanResponse { - batch_index: usize, + batch_index: i64, chunks: Option>, } From 0b22c7e45904cb87bfb48422a11e5d4d211df349 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Fri, 25 Aug 2023 16:49:09 +0800 Subject: [PATCH 32/35] run-testnet: fix issue in coordinator --- run-testnet/testnet_coordinator/task_assign.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run-testnet/testnet_coordinator/task_assign.go b/run-testnet/testnet_coordinator/task_assign.go index c14b75f4b..19b2cfd55 100644 --- a/run-testnet/testnet_coordinator/task_assign.go +++ b/run-testnet/testnet_coordinator/task_assign.go @@ -42,7 +42,7 @@ func (t *TaskAssigner) assign_new() uint64 { t.Lock() defer t.Unlock() - used := t.begin_with + used := t.progress for tid, status := range t.runingTasks { if status == TaskReAssign { t.runingTasks[tid] = TaskAssigned From dd5164a8de44dd5e5fbeb6d0e4ae5c9e6a79ed41 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Fri, 25 Aug 2023 17:54:13 +0800 Subject: [PATCH 33/35] add issue handliing --- run-testnet/run.sh | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/run-testnet/run.sh b/run-testnet/run.sh index 25672bbc4..0d4983a4e 100755 --- a/run-testnet/run.sh +++ b/run-testnet/run.sh @@ -38,6 +38,14 @@ if [ ! -d "$output_dir" ]; then echo "Directory $output_dir created." fi + +issue_dir="${ISSUE_DIR:-issues}" + +if [ ! -d "$issue_dir" ]; then + echo "issue dir must be created before running" + exit 1 +fi + # A function representing your command 'a' function debug_run { cargo run --bin testnet-runner --release @@ -54,6 +62,7 @@ function check_output { chunk_name=`echo "$chunk_dir" | grep -oE '[^/]+$'` echo "${chunk_name} fail (${chunk_dir})" curl -s "${COORDINATOR_API_URL}nodewarning?chunk_issue=${chunk_name}" + mv ${chunk_dir} ${issue_dir} fi done set +e @@ -63,7 +72,7 @@ set +e while true; do # clean output dir before each running rm -rf ${output_dir}/* - if [ -z "${DEBUG_RUN:-}"]; then + if [ -z "${DEBUG_RUN:-}" ]; then testnet-runner exit_code=$? else From 73612f566349ca495e3bba836e127c88300a7660 Mon Sep 17 00:00:00 2001 From: Zhang Zhuo Date: Mon, 28 Aug 2023 11:00:02 +0800 Subject: [PATCH 34/35] develop branch uses zkevm-circuits/decelop (#248) * run * upgrade zkevm-circuits * fix tx circuit ccc --- Cargo.lock | 20 ++++++++++---------- prover/Cargo.toml | 12 ++++++------ prover/src/zkevm/capacity_checker.rs | 5 +++-- prover/src/zkevm/circuit/builder.rs | 23 ++++++++++++++++++++--- types/Cargo.toml | 2 +- 5 files changed, 40 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd40d7b59..267f46e5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -419,7 +419,7 @@ checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -1123,7 +1123,7 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "ethers-core 0.17.0", "ethers-signers", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "eth-types", "geth-utils", @@ -1613,7 +1613,7 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "digest 0.7.6", "eth-types", @@ -1653,7 +1653,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "env_logger 0.9.3", "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", @@ -2259,7 +2259,7 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "env_logger 0.9.3", "eth-types", @@ -2459,7 +2459,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "eth-types", "ethers-core 0.17.0", @@ -2474,7 +2474,7 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "eth-types", "halo2-mpt-circuits", @@ -4783,7 +4783,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=feat/supercircuit_test_by_l2trace#9c273c5028db44702094ad3203d9ec9b311d6d8f" +source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#a74ba4211e6178d623279abc5a8e61231189f173" dependencies = [ "array-init", "bus-mapping", diff --git a/prover/Cargo.toml b/prover/Cargo.toml index ea0579d8a..425741bf5 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -8,12 +8,12 @@ edition = "2021" [dependencies] halo2_proofs = { git = "https://github.com/privacy-scaling-explorations/halo2.git", tag = "v2023_02_02" } -aggregator = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } -bus-mapping = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } -eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } -zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace", default-features = false, features = ["test","scroll","scroll-trace","shanghai"] } -mpt-zktrie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } -mock = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } +aggregator = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" } +bus-mapping = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" } +eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" } +zkevm-circuits = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop", default-features = false, features = ["test","scroll","scroll-trace","shanghai"] } +mpt-zktrie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" } +mock = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" } snark-verifier = { git = "https://github.com/scroll-tech/snark-verifier", tag = "v0.1.2" } snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", tag = "v0.1.2" } diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index b54ac0e72..b257b41cd 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -54,7 +54,7 @@ impl RowUsage { MAX_BYTECODE, // bytecode MAX_RWS, // copy MAX_KECCAK_ROWS, // keccak - MAX_CALLDATA, // tx + MAX_VERTICLE_ROWS, // tx MAX_CALLDATA, // rlp 7 * MAX_EXP_STEPS, // exp MAX_KECCAK_ROWS, // modexp @@ -167,13 +167,14 @@ impl CircuitCapacityChecker { builder_block.prev_state_root = H256(*mpt_state.root()).to_word(); let mut builder = CircuitInputBuilder::new_with_trie_state(sdb, code_db, mpt_state, &builder_block); - builder.add_more_l2_trace(&txs[0], txs.len() > 1)?; + builder.add_more_l2_trace(&txs[0], txs.len() > 1, true)?; builder } else { CircuitInputBuilder::new_from_l2_trace( get_super_circuit_params(), &txs[0], txs.len() > 1, + true, )? }; let traces = &txs[1..]; diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 6406a5c08..7f56ac472 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -47,8 +47,8 @@ pub fn get_super_circuit_params() -> CircuitsParams { max_bytecode: MAX_BYTECODE, max_inner_blocks: MAX_INNER_BLOCKS, max_keccak_rows: MAX_KECCAK_ROWS, - // max_poseidon_rows: MAX_POSEIDON_ROWS, - // max_vertical_circuit_rows: MAX_VERTICLE_ROWS, + max_poseidon_rows: MAX_POSEIDON_ROWS, + max_vertical_circuit_rows: MAX_VERTICLE_ROWS, max_exp_steps: MAX_EXP_STEPS, max_mpt_rows: MAX_MPT_ROWS, max_rlp_rows: MAX_CALLDATA, @@ -257,6 +257,7 @@ pub fn block_traces_to_witness_block(block_traces: &[BlockTrace]) -> Result 1, + false, )?; block_traces_to_witness_block_with_updated_state(&block_traces[1..], &mut builder, false) } @@ -272,18 +273,30 @@ pub fn block_traces_to_padding_witness_block(block_traces: &[BlockTrace]) -> Res // the only purpose here it to get the final zktrie state and // proof for withdraw root let mut padding_builder = if block_traces.is_empty() { + log::debug!("preparing default builder"); prepare_default_builder(H256::zero(), None) } else { let start_l1_queue_index = block_traces[0].start_l1_queue_index; + log::debug!( + "new from l2 trace, block num {:?}", + block_traces[0].header.number + ); let mut builder = CircuitInputBuilder::new_from_l2_trace( get_super_circuit_params(), &block_traces[0], block_traces.len() > 1, + false, )?; for (idx, block_trace) in block_traces[1..].iter().enumerate() { + log::debug!( + "adding more l2 trace block_trace idx {}, block num {:?}", + idx + 1, + block_trace.header.number + ); builder.add_more_l2_trace( block_trace, idx + 2 == block_traces.len(), //not typo, we use 1..end of the traces only + false, )?; } builder.finalize_building()?; @@ -361,7 +374,11 @@ pub fn block_traces_to_witness_block_with_updated_state( for (idx, block_trace) in block_traces.iter().enumerate() { let is_last = idx == block_traces.len() - 1; - builder.add_more_l2_trace(block_trace, !is_last)?; + log::debug!( + "add_more_l2_trace idx {idx}, block num {:?}", + block_trace.header.number + ); + builder.add_more_l2_trace(block_trace, !is_last, false)?; if per_block_metric { metric(builder, idx + initial_blk_index)?; } diff --git a/types/Cargo.toml b/types/Cargo.toml index 625918906..f5a7164ff 100644 --- a/types/Cargo.toml +++ b/types/Cargo.toml @@ -4,7 +4,7 @@ version = "0.7.5" edition = "2021" [dependencies] -eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/supercircuit_test_by_l2trace" } +eth-types = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "develop" } base64 = "0.13.0" blake2 = "0.10.3" ethers-core = "0.17.0" From 9a6392fc1edfe04012f69238056a6f6edee24193 Mon Sep 17 00:00:00 2001 From: Ho Vei Date: Mon, 28 Aug 2023 14:23:33 +0800 Subject: [PATCH 35/35] refine coordinator --- run-testnet/run.sh | 17 +++++-- run-testnet/testnet_coordinator/config.go | 6 ++- .../testnet_coordinator/config.yaml.example | 2 + run-testnet/testnet_coordinator/main.go | 51 +++++++++++++++++-- run-testnet/testnet_coordinator/notifier.go | 38 +++++++------- .../testnet_coordinator/task_assign.go | 29 +++++++++-- 6 files changed, 110 insertions(+), 33 deletions(-) diff --git a/run-testnet/run.sh b/run-testnet/run.sh index 0d4983a4e..530c854e7 100755 --- a/run-testnet/run.sh +++ b/run-testnet/run.sh @@ -12,17 +12,24 @@ fi function exit_trap { reason="unknown_error" if [ $1 -eq 17 ]; then - reason=runtime_error_with_batch_stuck + reason="runtime_error, batch_stuck" elif [ $1 -eq 13 ]; then # wrong runtime reason=runtime_error + elif [ $1 -eq 1 ]; then + # unexpected quit + reason="unexpected_error, batch_stuck" elif [ $1 -eq 0 ]; then return fi + + if [ -z "${SCRIPT_ERROR:-}" ]; then + reason="${reason}, script error" + fi curl -s ${COORDINATOR_API_URL}nodewarning?panic=${reason} } -trap "curl -s ${COORDINATOR_API_URL}nodewarning?panic=script_error" ERR +trap "SCRIPT_ERROR=1" ERR trap 'exit_trap $?' EXIT trap "curl -s ${COORDINATOR_API_URL}nodewarning?panic=user_interrupt" SIGINT @@ -83,10 +90,14 @@ while true; do check_output echo "checking output done" sleep 10 + exit_code=$? elif [ $exit_code -eq 9 ]; then # there maybe more batchs, wait 10 min sleep 600 - else + exit_code=$? + fi + + if [ $exit_code -ne 0 ]; then exit $exit_code fi done \ No newline at end of file diff --git a/run-testnet/testnet_coordinator/config.go b/run-testnet/testnet_coordinator/config.go index 5feb3f10a..703c9e0a3 100644 --- a/run-testnet/testnet_coordinator/config.go +++ b/run-testnet/testnet_coordinator/config.go @@ -1,8 +1,8 @@ package main import ( - "io/ioutil" "log" + "os" "gopkg.in/yaml.v3" ) @@ -14,6 +14,8 @@ type ServerConfig struct { type Config struct { StartBatch uint64 `yaml:"start,omitempty"` + GroupID int `yaml:"group_id,omitempty"` + ProxyOnly bool `yaml:"proxy_only,omitempty"` ChunkURLTemplate string `yaml:"chunkURL"` NotifierURL string `yaml:"notifierURL"` Server *ServerConfig `yaml:"server,omitempty"` @@ -34,7 +36,7 @@ func (cfg *Config) LoadEnv(path string) error { func (cfg *Config) Load(path string) error { - data, err := ioutil.ReadFile(path) + data, err := os.ReadFile(path) if err != nil { return err } diff --git a/run-testnet/testnet_coordinator/config.yaml.example b/run-testnet/testnet_coordinator/config.yaml.example index 35a140fe7..7776c63fe 100644 --- a/run-testnet/testnet_coordinator/config.yaml.example +++ b/run-testnet/testnet_coordinator/config.yaml.example @@ -1,6 +1,8 @@ # start: 0 #batch start from +# proxy_only: true #only proxy node message +# group_id: 0 #would be send in channel notification chunkURL: http:///api/chunks?batch_index=%d # notifierURL: server: diff --git a/run-testnet/testnet_coordinator/main.go b/run-testnet/testnet_coordinator/main.go index 477966b05..b01fc525a 100644 --- a/run-testnet/testnet_coordinator/main.go +++ b/run-testnet/testnet_coordinator/main.go @@ -1,8 +1,9 @@ package main import ( + "encoding/json" "fmt" - "io/ioutil" + "io" "log" "net/http" ) @@ -24,7 +25,7 @@ func readSrcUrl(url string) ([]byte, error) { return nil, UpstreamError(resp.StatusCode) } - return ioutil.ReadAll(resp.Body) + return io.ReadAll(resp.Body) } func main() { @@ -34,7 +35,12 @@ func main() { log.Fatalf("Error reading config file: %v", err) } - taskAssigner := construct(serverConfig.StartBatch).setMessenger(serverConfig.NotifierURL) + taskAssigner := construct(serverConfig.StartBatch).setMessenger(serverConfig.NotifierURL, serverConfig.GroupID) + + if serverConfig.ProxyOnly { + log.Println("stop assignment for proxy-only service") + taskAssigner.stopAssignment(true) + } http.HandleFunc( serverConfig.Server.ServerURL+"/chunks", @@ -61,9 +67,33 @@ func main() { } } +type apiDataHead struct { + BatchIndex int64 `json:"batch_index,omitempty"` + ChunkIndex int64 `json:"chunk_index,omitempty"` +} + func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + disable_spec := r.URL.Query().Get("stop") + if disable_spec != "" { + switch disable_spec { + case "yes": + assigner.stopAssignment(true) + case "no": + assigner.stopAssignment(false) + default: + http.Error(w, "should be yes or no", http.StatusBadRequest) + return + } + w.WriteHeader(http.StatusOK) + return + } else if assigner.isStopped() { + log.Println("stop assignment") + http.Error(w, "assignment stopped", http.StatusForbidden) + return + } + assigned_done := false assigned := assigner.assign_new() defer func(agent string) { @@ -84,12 +114,21 @@ func chunksHandler(assigner *TaskAssigner, url_template string) http.HandlerFunc return } + testHead := new(apiDataHead) + if err := json.Unmarshal(resp, testHead); err != nil { + log.Println("Testing resp head fail, must given up", err) + http.Error(w, "Resp is invalid", http.StatusInternalServerError) + return + } + _, err = w.Write(resp) if err != nil { log.Printf("Error writing response: %v\n", err) return } - assigned_done = true + + //assignment is not counted if resp contains unexpected index (often -1 for out of range) + assigned_done = testHead.BatchIndex == int64(assigned) || testHead.ChunkIndex == int64(assigned) } } @@ -155,7 +194,9 @@ func nodeProxyHandler(assigner *TaskAssigner) http.HandlerFunc { func statusHandler(assigner *TaskAssigner) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - ret := fmt.Sprintf("%v", assigner.status()) + status, rng := assigner.status() + + ret := fmt.Sprintf("{%d-%d}, activing: %v", rng[0], rng[1], status) if _, err := w.Write([]byte(ret)); err != nil { log.Println("unexpected output of status", err) } diff --git a/run-testnet/testnet_coordinator/notifier.go b/run-testnet/testnet_coordinator/notifier.go index c2b789224..051fa6744 100644 --- a/run-testnet/testnet_coordinator/notifier.go +++ b/run-testnet/testnet_coordinator/notifier.go @@ -6,9 +6,13 @@ import ( "fmt" "log" "net/http" + "strings" ) -type notifier string +type notifier struct { + api string + coordinator_id int +} type slackData struct { Text string `json:"text"` @@ -16,14 +20,6 @@ type slackData struct { Icon string `json:"icon_emoji,omitempty"` } -func getSlackData(text string, agent string) *slackData { - return &slackData{ - Text: text, - Agent: fmt.Sprintf("Testnet-%s", agent), - Icon: "eyes", - } -} - func notifySlackChannel(url string, slack *slackData) (string, error) { data, err := json.Marshal(slack) @@ -46,23 +42,23 @@ const COORDINATOR_COMMON = ":white_check_mark:" const COORDINATOR_GOODJOB = ":tada:" const COORDINATOR_BADNEWS = ":tired_face:" -var agentData = map[string]string{ - COORDINATOR_COMMON: "Testnet-coordinator", - COORDINATOR_BADNEWS: "Oh no ...", - COORDINATOR_GOODJOB: "Congraduations!", +var agentDataTemplate = map[string]string{ + COORDINATOR_COMMON: "Testnet-coordinator %d", + COORDINATOR_BADNEWS: "Oh no ... (from coordinator %d)", + COORDINATOR_GOODJOB: "Coordinator %d: Congraduations!", } -func (n notifier) coordinatorNotify(txt string, icon string) error { - if n == "" { +func (n *notifier) coordinatorNotify(txt string, icon string) error { + if n.api == "" { return nil } if icon == "" { icon = COORDINATOR_COMMON } - resp, err := notifySlackChannel(string(n), &slackData{ + resp, err := notifySlackChannel(n.api, &slackData{ Text: txt, - Agent: agentData[icon], + Agent: fmt.Sprintf(agentDataTemplate[icon], n.coordinator_id), Icon: icon, }) if err == nil { @@ -72,13 +68,15 @@ func (n notifier) coordinatorNotify(txt string, icon string) error { } func (n notifier) nodeProxyNotify(node string, txt string) error { - if n == "" { + if n.api == "" { return nil } - resp, err := notifySlackChannel(string(n), &slackData{ + node_names := strings.Split(node, ":") + + resp, err := notifySlackChannel(n.api, &slackData{ Text: txt, - Agent: fmt.Sprintf("Testnet-%s", node), + Agent: fmt.Sprintf("Testnet-%s", node_names[0]), Icon: ":scream:", }) if err == nil { diff --git a/run-testnet/testnet_coordinator/task_assign.go b/run-testnet/testnet_coordinator/task_assign.go index 19b2cfd55..833c24036 100644 --- a/run-testnet/testnet_coordinator/task_assign.go +++ b/run-testnet/testnet_coordinator/task_assign.go @@ -17,6 +17,7 @@ const TaskReAssign TaskStatus = 2 type TaskAssigner struct { sync.Mutex notifier + stop_assign bool begin_with uint64 progress uint64 runingTasks map[uint64]TaskStatus @@ -30,13 +31,29 @@ func construct(start uint64) *TaskAssigner { } } -func (t *TaskAssigner) setMessenger(url string) *TaskAssigner { +func (t *TaskAssigner) setMessenger(url string, id int) *TaskAssigner { t.Lock() defer t.Unlock() - t.notifier = notifier(url) + t.notifier = notifier{ + api: url, + coordinator_id: id, + } return t } +func (t *TaskAssigner) stopAssignment(stop bool) { + t.Lock() + defer t.Unlock() + t.stop_assign = stop +} + +func (t *TaskAssigner) isStopped() bool { + + t.Lock() + defer t.Unlock() + return t.stop_assign +} + func (t *TaskAssigner) assign_new() uint64 { t.Lock() @@ -124,15 +141,21 @@ func (t *TaskAssigner) complete(id uint64) (bool, uint64) { return nowProg > t.progress, nowProg } -func (t *TaskAssigner) status() (result []uint64) { +func (t *TaskAssigner) status() (result []uint64, workRange [2]uint64) { t.Lock() defer t.Unlock() + workRange[0] = t.progress + workRange[1] = t.progress + for id, status := range t.runingTasks { if status != TaskCompleted { result = append(result, id) } + if id >= workRange[1] { + workRange[1] = id + } } return