From e7eee631237fdd2ff2f235a43033c4010aa991d2 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Mon, 11 Sep 2023 12:19:15 +0200 Subject: [PATCH 01/12] implement caching for well known methods --- Cargo.lock | 1 + Cargo.toml | 1 + src/cache.rs | 20 ++++++ src/http_fork_source.rs | 132 ++++++++++++++++++++++++++++++++++------ src/lib.rs | 1 + 5 files changed, 138 insertions(+), 17 deletions(-) create mode 100644 src/cache.rs diff --git a/Cargo.lock b/Cargo.lock index 44d9e2eb..e5bdae41 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1795,6 +1795,7 @@ dependencies = [ "once_cell", "openssl-sys", "reqwest", + "rustc-hash", "serde", "serde_json", "simplelog", diff --git a/Cargo.toml b/Cargo.toml index 2c02e1da..8b6df54c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ ethabi = "16.0.0" itertools = "0.10.5" log = "0.4.20" simplelog = "0.12.1" +rustc-hash = "1.1.0" [dev-dependencies] httptest = "0.15.4" diff --git a/src/cache.rs b/src/cache.rs new file mode 100644 index 00000000..1797504d --- /dev/null +++ b/src/cache.rs @@ -0,0 +1,20 @@ +use zksync_basic_types::H256; +use zksync_types::api::{Block, Transaction, TransactionVariant}; +use zksync_types::Transaction as RawTransaction; +use rustc_hash::FxHashMap; + + +#[derive(Default, Debug, Clone)] +pub(crate) struct Cache { + pub(crate) block_hashes: FxHashMap, + pub(crate) blocks_full: FxHashMap>, + pub(crate) blocks_min: FxHashMap>, + pub(crate) raw_block_transactions: FxHashMap>, + pub(crate) transactions: FxHashMap, +} + +impl Cache { + pub (crate) fn new() -> Self { + Self::default() + } +} \ No newline at end of file diff --git a/src/http_fork_source.rs b/src/http_fork_source.rs index 33a00b55..69504353 100644 --- a/src/http_fork_source.rs +++ b/src/http_fork_source.rs @@ -4,13 +4,17 @@ use zksync_web3_decl::{ namespaces::{EthNamespaceClient, ZksNamespaceClient}, }; -use crate::fork::{block_on, ForkSource}; +use crate::{ + cache::Cache, + fork::{block_on, ForkSource}, +}; #[derive(Debug)] /// Fork source that gets the data via HTTP requests. pub struct HttpForkSource { /// URL for the network to fork. pub fork_url: String, + cache: Cache, } impl HttpForkSource { @@ -46,18 +50,58 @@ impl ForkSource for HttpForkSource { &self, hash: zksync_basic_types::H256, ) -> eyre::Result> { - let client = self.create_client(); - block_on(async move { client.get_transaction_by_hash(hash).await }) - .wrap_err("fork http client failed") + self.cache + .transactions + .get(&hash) + .cloned() + .map(|value| Ok(Some(value))) + .unwrap_or_else(|| { + let client = self.create_client(); + block_on(async move { client.get_transaction_by_hash(hash).await }) + .wrap_err("fork http client failed") + .and_then(|result| { + if let Some(transaction) = &result { + self.cache.transactions.insert(hash, transaction.clone()); + } + Ok(result) + }) + }) } fn get_raw_block_transactions( &self, block_number: zksync_basic_types::MiniblockNumber, ) -> eyre::Result> { - let client = self.create_client(); - block_on(async move { client.get_raw_block_transactions(block_number).await }) - .wrap_err("fork http client failed") + let mut block_hash = zksync_basic_types::H256::zero(); + let mut block_number_mapped = false; + + self.cache + .block_hashes + .get(&(block_number.0 as u64)) + .and_then(|hash| { + block_number_mapped = true; + block_hash = *hash; + self.cache.raw_block_transactions.get(hash) + }) + .cloned() + .map(|value| Ok(value)) + .unwrap_or_else(|| { + let client = self.create_client(); + block_on(async move { client.get_raw_block_transactions(block_number).await }) + .wrap_err("fork http client failed") + .and_then(|result| { + if !block_number_mapped { + self.cache + .block_hashes + .insert(block_number.0 as u64, block_hash); + } + + self.cache + .raw_block_transactions + .insert(block_hash, result.clone()); + Ok(result) + }) + }) } fn get_block_by_hash( @@ -65,9 +109,27 @@ impl ForkSource for HttpForkSource { hash: zksync_basic_types::H256, full_transactions: bool, ) -> eyre::Result>> { - let client = self.create_client(); - block_on(async move { client.get_block_by_hash(hash, full_transactions).await }) - .wrap_err("fork http client failed") + let mut cache = if full_transactions { + self.cache.blocks_full + } else { + self.cache.blocks_min + }; + + cache + .get(&hash) + .cloned() + .map(|value| Ok(Some(value))) + .unwrap_or_else(|| { + let client = self.create_client(); + block_on(async move { client.get_block_by_hash(hash, full_transactions).await }) + .wrap_err("fork http client failed") + .and_then(|result| { + if let Some(transaction) = &result { + cache.insert(hash, transaction.clone()); + } + Ok(result) + }) + }) } fn get_block_by_number( @@ -75,12 +137,48 @@ impl ForkSource for HttpForkSource { block_number: zksync_types::api::BlockNumber, full_transactions: bool, ) -> eyre::Result>> { - let client = self.create_client(); - block_on(async move { - client - .get_block_by_number(block_number, full_transactions) - .await - }) - .wrap_err("fork http client failed") + let number = match block_number { + zksync_types::api::BlockNumber::Number(block_number) => Some(block_number), + _ => None, + }; + let mut cache = if full_transactions { + self.cache.blocks_full + } else { + self.cache.blocks_min + }; + + let mut block_hash = zksync_basic_types::H256::zero(); + let mut block_number_mapped = false; + number + .and_then(|number| { + self.cache.block_hashes.get(&number.as_u64()).map(|hash| { + block_number_mapped = true; + block_hash = *hash; + *hash + }) + }) + .and_then(|hash| cache.get(&hash)) + .cloned() + .map(|value| Ok(Some(value))) + .unwrap_or_else(|| { + let client = self.create_client(); + block_on(async move { + client + .get_block_by_number(block_number, full_transactions) + .await + }) + .wrap_err("fork http client failed") + .and_then(|result| { + if !block_number_mapped { + if let Some(number) = number { + self.cache.block_hashes.insert(number.as_u64(), block_hash); + } + } + if let Some(block) = &result { + cache.insert(block_hash, block.clone()); + } + Ok(result) + }) + }) } } diff --git a/src/lib.rs b/src/lib.rs index f629b341..937d2e07 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -55,3 +55,4 @@ pub mod utils; pub mod zks; mod testing; +mod cache; From 1132e22f88747725c513514501093aa221812728 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Mon, 11 Sep 2023 14:30:28 +0200 Subject: [PATCH 02/12] add fs support --- src/cache.rs | 60 +++++++++++++++++++++++++++++++++++++---- src/http_fork_source.rs | 4 +-- 2 files changed, 57 insertions(+), 7 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 1797504d..5d65c6fa 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,20 +1,70 @@ +use eyre::Result; +use rustc_hash::FxHashMap; +use serde::Serialize; use zksync_basic_types::H256; use zksync_types::api::{Block, Transaction, TransactionVariant}; use zksync_types::Transaction as RawTransaction; -use rustc_hash::FxHashMap; +const CACHE_DIR: &'static str = ".cache"; #[derive(Default, Debug, Clone)] pub(crate) struct Cache { - pub(crate) block_hashes: FxHashMap, + pub(crate) block_hashes: FxHashMap, pub(crate) blocks_full: FxHashMap>, pub(crate) blocks_min: FxHashMap>, - pub(crate) raw_block_transactions: FxHashMap>, + pub(crate) block_raw_transactions: FxHashMap>, pub(crate) transactions: FxHashMap, } impl Cache { - pub (crate) fn new() -> Self { + pub(crate) fn new() -> Self { Self::default() } -} \ No newline at end of file + + pub(crate) fn get_block( + &self, + hash: &H256, + full_transactions: bool, + ) -> Option<&Block> { + if full_transactions { + self.blocks_full.get(hash) + } else { + self.blocks_min.get(hash) + } + } + + pub(crate) fn insert_block( + &self, + hash: H256, + full_transactions: bool, + block: Block, + ) { + if full_transactions { + self.blocks_full.insert(hash, block); + } else { + self.blocks_min.insert(hash, block); + } + } + + pub(crate) fn get_block_hash(&self, number: u64) -> Option<&H256> { + self.block_hashes.get(&number) + } + + pub(crate) fn insert_block_hash(&self, number: u64, hash: H256) { + self.block_hashes.insert(number, hash); + Self::write(format!("block-hashes/{number}"), serde_json::to_string(&hash).expect("failed encoding value").as_bytes()); + } + + pub(crate) fn get_block_raw_transactions(&self, hash: &H256) -> Option<&Vec> { + self.block_raw_transactions.get(&hash) + } + + pub(crate) fn insert_block_raw_transactions(&self, hash: H256, transactions: Vec) { + self.block_raw_transactions.insert(hash, transactions); + Self::write(format!("block-raw-transactions/{hash}"), transactions.as_bytes()); + } + + fn write(key: String, data: &[u8]) { + + } +} diff --git a/src/http_fork_source.rs b/src/http_fork_source.rs index 69504353..0ac817ac 100644 --- a/src/http_fork_source.rs +++ b/src/http_fork_source.rs @@ -81,7 +81,7 @@ impl ForkSource for HttpForkSource { .and_then(|hash| { block_number_mapped = true; block_hash = *hash; - self.cache.raw_block_transactions.get(hash) + self.cache.block_raw_transactions.get(hash) }) .cloned() .map(|value| Ok(value)) @@ -97,7 +97,7 @@ impl ForkSource for HttpForkSource { } self.cache - .raw_block_transactions + .block_raw_transactions .insert(block_hash, result.clone()); Ok(result) }) From 4760db1aab218c294a66fc10c53243fda084ebdc Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Wed, 13 Sep 2023 12:33:59 +0200 Subject: [PATCH 03/12] cache network responses --- Cargo.lock | 20 ++ Cargo.toml | 1 + src/cache.rs | 477 ++++++++++++++++++++++++++++++++++++++-- src/fork.rs | 25 ++- src/http_fork_source.rs | 469 ++++++++++++++++++++++++++++++--------- src/lib.rs | 2 +- src/main.rs | 36 ++- src/node.rs | 61 +++-- src/testing.rs | 109 ++++++++- 9 files changed, 1048 insertions(+), 152 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e5bdae41..9d858ba3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1799,6 +1799,7 @@ dependencies = [ "serde", "serde_json", "simplelog", + "tempdir", "tokio", "tracing", "tracing-subscriber", @@ -5049,6 +5050,15 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi 0.3.9", +] + [[package]] name = "reqwest" version = "0.11.19" @@ -6143,6 +6153,16 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + [[package]] name = "tempfile" version = "3.8.0" diff --git a/Cargo.toml b/Cargo.toml index 8b6df54c..fe4d552d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,3 +51,4 @@ rustc-hash = "1.1.0" [dev-dependencies] httptest = "0.15.4" +tempdir = "0.3.7" diff --git a/src/cache.rs b/src/cache.rs index 5d65c6fa..16a1768d 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,31 +1,91 @@ -use eyre::Result; use rustc_hash::FxHashMap; use serde::Serialize; +use std::fs; +use std::fs::File; +use std::io::{BufReader, BufWriter}; +use std::path::Path; +use std::result::Result; +use std::str::FromStr; use zksync_basic_types::H256; use zksync_types::api::{Block, Transaction, TransactionVariant}; use zksync_types::Transaction as RawTransaction; -const CACHE_DIR: &'static str = ".cache"; +const CACHE_TYPE_BLOCKS_FULL: &'static str = "blocks_full"; +const CACHE_TYPE_BLOCKS_MIN: &'static str = "blocks_min"; +const CACHE_TYPE_BLOCK_RAW_TRANSACTIONS: &'static str = "block_raw_transactions"; +const CACHE_TYPE_TRANSACTIONS: &'static str = "transactions"; +/// Cache configuration. Can be one of: +/// +/// None : Caching is disabled +/// Memory : Caching is provided in-memory and not persisted across runs +/// Disk : Caching is persisted on disk in the provided directory and can be reset +#[derive(Debug, Clone)] +pub enum CacheConfig { + None, + Memory, + Disk { dir: String, reset: bool }, +} + +impl std::default::Default for CacheConfig { + fn default() -> Self { + CacheConfig::None + } +} + +/// A general purpose cache. #[derive(Default, Debug, Clone)] pub(crate) struct Cache { - pub(crate) block_hashes: FxHashMap, - pub(crate) blocks_full: FxHashMap>, - pub(crate) blocks_min: FxHashMap>, - pub(crate) block_raw_transactions: FxHashMap>, - pub(crate) transactions: FxHashMap, + config: CacheConfig, + block_hashes: FxHashMap, + blocks_full: FxHashMap>, + blocks_min: FxHashMap>, + block_raw_transactions: FxHashMap>, + transactions: FxHashMap, } impl Cache { - pub(crate) fn new() -> Self { - Self::default() + /// Creates a new cache with the provided config. + pub(crate) fn new(config: CacheConfig) -> Self { + let mut cache = Cache { + config: config.clone(), + ..Default::default() + }; + + if let CacheConfig::Disk { dir, reset } = &config { + if *reset { + fs::remove_dir_all(Path::new(dir)) + .unwrap_or_else(|err| eprintln!("failed removing cache from disk: {:?}", err)); + } + + for cache_type in [ + CACHE_TYPE_BLOCKS_FULL, + CACHE_TYPE_BLOCKS_MIN, + CACHE_TYPE_BLOCK_RAW_TRANSACTIONS, + CACHE_TYPE_TRANSACTIONS, + ] { + fs::create_dir_all(Path::new(dir).join(cache_type)).unwrap_or_else(|err| { + panic!("failed creating directory {}: {:?}", cache_type, err) + }); + } + cache + .read_all_from_disk(&dir) + .unwrap_or_else(|err| eprintln!("failed reading cache from disk: {:?}", err)); + } + + cache } + /// Returns the cached full/minimal block for the provided hash. pub(crate) fn get_block( &self, hash: &H256, full_transactions: bool, ) -> Option<&Block> { + if matches!(self.config, CacheConfig::None) { + return None; + } + if full_transactions { self.blocks_full.get(hash) } else { @@ -33,38 +93,415 @@ impl Cache { } } + /// Cache a full/minimal block for the provided hash. pub(crate) fn insert_block( - &self, + &mut self, hash: H256, full_transactions: bool, block: Block, ) { + if matches!(self.config, CacheConfig::None) { + return; + } + + self.block_hashes.insert(block.number.as_u64(), block.hash); if full_transactions { + self.write_to_disk(CACHE_TYPE_BLOCKS_FULL, format!("{:#x}", hash), &block); self.blocks_full.insert(hash, block); } else { + self.write_to_disk(CACHE_TYPE_BLOCKS_MIN, format!("{:#x}", hash), &block); self.blocks_min.insert(hash, block); } } - pub(crate) fn get_block_hash(&self, number: u64) -> Option<&H256> { + /// Returns the cached full/minimal block for the provided hash. + pub(crate) fn get_block_hash(&self, number: &u64) -> Option<&H256> { + if matches!(self.config, CacheConfig::None) { + return None; + } + self.block_hashes.get(&number) } - pub(crate) fn insert_block_hash(&self, number: u64, hash: H256) { - self.block_hashes.insert(number, hash); - Self::write(format!("block-hashes/{number}"), serde_json::to_string(&hash).expect("failed encoding value").as_bytes()); + /// Returns the cached raw transactions for the provided block number. + pub(crate) fn get_block_raw_transactions(&self, number: &u64) -> Option<&Vec> { + if matches!(self.config, CacheConfig::None) { + return None; + } + + self.block_raw_transactions.get(number) + } + + /// Cache the raw transactions for the provided block number. + pub(crate) fn insert_block_raw_transactions( + &mut self, + number: u64, + transactions: Vec, + ) { + if matches!(self.config, CacheConfig::None) { + return; + } + + self.write_to_disk( + CACHE_TYPE_BLOCK_RAW_TRANSACTIONS, + format!("{}", number), + &transactions, + ); + self.block_raw_transactions.insert(number, transactions); + } + + /// Returns the cached transaction for the provided hash. + pub(crate) fn get_transaction(&self, hash: &H256) -> Option<&Transaction> { + if matches!(self.config, CacheConfig::None) { + return None; + } + + self.transactions.get(&hash) } - pub(crate) fn get_block_raw_transactions(&self, hash: &H256) -> Option<&Vec> { - self.block_raw_transactions.get(&hash) + /// Cache a transaction for the provided hash. + pub(crate) fn insert_transaction(&mut self, hash: H256, transaction: Transaction) { + if matches!(self.config, CacheConfig::None) { + return; + } + + self.write_to_disk( + CACHE_TYPE_TRANSACTIONS, + format!("{:#x}", hash), + &transaction, + ); + self.transactions.insert(hash, transaction); } - pub(crate) fn insert_block_raw_transactions(&self, hash: H256, transactions: Vec) { - self.block_raw_transactions.insert(hash, transactions); - Self::write(format!("block-raw-transactions/{hash}"), transactions.as_bytes()); + /// Reads the cache contents from the disk, if available. + fn read_all_from_disk(&mut self, dir: &str) -> Result<(), String> { + for cache_type in [ + CACHE_TYPE_BLOCKS_FULL, + CACHE_TYPE_BLOCKS_MIN, + CACHE_TYPE_BLOCK_RAW_TRANSACTIONS, + CACHE_TYPE_TRANSACTIONS, + ] { + let cache_dir = Path::new(dir).join(cache_type); + let dir_listing = fs::read_dir(cache_dir.clone()) + .map_err(|err| format!("failed reading dir '{:?}': {:?}", cache_dir, err))?; + for file in dir_listing { + if let Ok(file) = file { + let key = file + .file_name() + .to_str() + .ok_or_else(|| String::from("failed converting filename to string"))? + .to_string(); + + let cache_file = File::open(file.path()).map_err(|err| { + format!("failed reading file: '{:?}': {:?}", file.path(), err) + })?; + let reader = BufReader::new(cache_file); + match cache_type { + CACHE_TYPE_BLOCKS_FULL => { + let key = H256::from_str(&key).map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let block: Block = serde_json::from_reader(reader) + .map_err(|err| { + format!( + "failed parsing json for cache file '{:?}': {:?}", + key, err + ) + })?; + self.block_hashes.insert(block.number.as_u64(), block.hash); + self.blocks_full.insert(key, block); + } + CACHE_TYPE_BLOCKS_MIN => { + let key = H256::from_str(&key).map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let block: Block = serde_json::from_reader(reader) + .map_err(|err| { + format!( + "failed parsing json for cache file '{:?}': {:?}", + key, err + ) + })?; + self.block_hashes.insert(block.number.as_u64(), block.hash); + self.blocks_min.insert(key, block); + } + CACHE_TYPE_BLOCK_RAW_TRANSACTIONS => { + let key = key.parse::().map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let transactions: Vec = serde_json::from_reader(reader) + .map_err(|err| { + format!( + "failed parsing json for cache file '{:?}': {:?}", + key, err + ) + })?; + self.block_raw_transactions.insert(key, transactions); + } + CACHE_TYPE_TRANSACTIONS => { + let key = H256::from_str(&key).map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let transaction: Transaction = serde_json::from_reader(reader) + .map_err(|err| { + format!( + "failed parsing json for cache file '{:?}': {:?}", + key, err + ) + })?; + self.transactions.insert(key, transaction); + } + _ => return Err(format!("invalid cache_type {}", cache_type)), + } + } + } + } + + Ok(()) } - fn write(key: String, data: &[u8]) { + /// Writes the cache contents to disk, if supported. + fn write_to_disk(&self, cache_type: &'static str, key: String, data: &T) { + if let CacheConfig::Disk { dir, .. } = &self.config { + let file = Path::new(&dir).join(cache_type).join(key); + + println!("writing cache {:?}", file); + match File::create(file.clone()) { + Ok(cache_file) => { + let writer = BufWriter::new(cache_file); + if let Err(err) = serde_json::to_writer(writer, data) { + eprintln!("failed writing to cache '{:?}': {:?}", file, err); + } + } + Err(err) => eprintln!("failed creating file: '{:?}': {:?}", file, err), + } + } + } +} + +#[cfg(test)] +mod tests { + use tempdir::TempDir; + use zksync_basic_types::U64; + use zksync_types::{Execute, ExecuteTransactionCommon}; + + use super::*; + + #[test] + fn test_cache_config_none_disables_cache() { + let mut cache = Cache::new(CacheConfig::None); + + cache.insert_block(H256::zero(), true, Default::default()); + assert_eq!(None, cache.get_block(&H256::zero(), true)); + assert_eq!(None, cache.get_block_hash(&0)); + + cache.insert_block(H256::zero(), false, Default::default()); + assert_eq!(None, cache.get_block(&H256::zero(), false)); + assert_eq!(None, cache.get_block_hash(&0)); + + cache.insert_block_raw_transactions(0, Default::default()); + assert_eq!(None, cache.get_block_raw_transactions(&0)); + + cache.insert_transaction(H256::zero(), Default::default()); + assert_eq!(None, cache.get_transaction(&H256::zero())); + } + + #[test] + fn test_cache_config_memory_enables_cache() { + let block_full = Block:: { + hash: H256::repeat_byte(0x1), + number: U64::from(1), + ..Default::default() + }; + let block_min = Block:: { + hash: H256::repeat_byte(0x2), + number: U64::from(2), + ..Default::default() + }; + let transaction = Transaction::default(); + let raw_transactions = vec![RawTransaction { + common_data: ExecuteTransactionCommon::L1(Default::default()), + execute: Execute { + calldata: Default::default(), + contract_address: Default::default(), + factory_deps: None, + value: Default::default(), + }, + received_timestamp_ms: 0, + }]; + + let mut cache = Cache::new(CacheConfig::Memory); + + cache.insert_block(block_full.hash, true, block_full.clone()); + assert_eq!( + Some(&block_full), + cache.get_block(&H256::repeat_byte(0x1), true) + ); + assert_eq!(Some(&H256::repeat_byte(0x1)), cache.get_block_hash(&1)); + + cache.insert_block(block_min.hash, false, block_min.clone()); + assert_eq!( + Some(&block_min), + cache.get_block(&H256::repeat_byte(0x2), false) + ); + assert_eq!(Some(&H256::repeat_byte(0x2)), cache.get_block_hash(&2)); + + cache.insert_block_raw_transactions(0, raw_transactions.clone()); + assert_eq!( + Some(&raw_transactions), + cache.get_block_raw_transactions(&0) + ); + + cache.insert_transaction(H256::zero(), transaction.clone()); + assert_eq!(Some(&transaction), cache.get_transaction(&H256::zero())); + } + + #[test] + fn test_cache_config_disk_enables_cache_and_preserves_it_to_disk() { + let block_full = Block:: { + hash: H256::repeat_byte(0x1), + number: U64::from(1), + ..Default::default() + }; + let block_min = Block:: { + hash: H256::repeat_byte(0x2), + number: U64::from(2), + ..Default::default() + }; + let transaction = Transaction::default(); + let raw_transactions = vec![RawTransaction { + common_data: ExecuteTransactionCommon::L1(Default::default()), + execute: Execute { + calldata: Default::default(), + contract_address: Default::default(), + factory_deps: None, + value: Default::default(), + }, + received_timestamp_ms: 0, + }]; + + let cache_dir = TempDir::new("cache-test").expect("failed creating temporary dir"); + let cache_dir_path = cache_dir + .path() + .to_str() + .expect("invalid dir name") + .to_string(); + let mut cache = Cache::new(CacheConfig::Disk { + dir: cache_dir_path.clone(), + reset: true, + }); + + cache.insert_block(block_full.hash, true, block_full.clone()); + assert_eq!( + Some(&block_full), + cache.get_block(&H256::repeat_byte(0x1), true) + ); + assert_eq!(Some(&H256::repeat_byte(0x1)), cache.get_block_hash(&1)); + + cache.insert_block(block_min.hash, false, block_min.clone()); + assert_eq!( + Some(&block_min), + cache.get_block(&H256::repeat_byte(0x2), false) + ); + assert_eq!(Some(&H256::repeat_byte(0x2)), cache.get_block_hash(&2)); + + cache.insert_block_raw_transactions(0, raw_transactions.clone()); + assert_eq!( + Some(&raw_transactions), + cache.get_block_raw_transactions(&0) + ); + + cache.insert_transaction(H256::zero(), transaction.clone()); + assert_eq!(Some(&transaction), cache.get_transaction(&H256::zero())); + + let new_cache = Cache::new(CacheConfig::Disk { + dir: cache_dir_path, + reset: false, + }); + assert_eq!( + Some(&block_full), + new_cache.get_block(&H256::repeat_byte(0x1), true) + ); + assert_eq!(Some(&H256::repeat_byte(0x1)), new_cache.get_block_hash(&1)); + assert_eq!( + Some(&block_min), + new_cache.get_block(&H256::repeat_byte(0x2), false) + ); + assert_eq!(Some(&H256::repeat_byte(0x2)), new_cache.get_block_hash(&2)); + assert_eq!( + Some(&raw_transactions), + new_cache.get_block_raw_transactions(&0) + ); + assert_eq!(Some(&transaction), new_cache.get_transaction(&H256::zero())); + } + + #[test] + fn test_cache_config_disk_enables_cache_and_can_reset_data_on_disk() { + let block_full = Block:: { + hash: H256::repeat_byte(0x1), + number: U64::from(1), + ..Default::default() + }; + let block_min = Block:: { + hash: H256::repeat_byte(0x2), + number: U64::from(2), + ..Default::default() + }; + let transaction = Transaction::default(); + let raw_transactions = vec![RawTransaction { + common_data: ExecuteTransactionCommon::L1(Default::default()), + execute: Execute { + calldata: Default::default(), + contract_address: Default::default(), + factory_deps: None, + value: Default::default(), + }, + received_timestamp_ms: 0, + }]; + + let cache_dir = TempDir::new("cache-test").expect("failed creating temporary dir"); + let cache_dir_path = cache_dir + .path() + .to_str() + .expect("invalid dir name") + .to_string(); + let mut cache = Cache::new(CacheConfig::Disk { + dir: cache_dir_path.clone(), + reset: true, + }); + + cache.insert_block(block_full.hash, true, block_full.clone()); + assert_eq!( + Some(&block_full), + cache.get_block(&H256::repeat_byte(0x1), true) + ); + assert_eq!(Some(&H256::repeat_byte(0x1)), cache.get_block_hash(&1)); + + cache.insert_block(block_min.hash, false, block_min.clone()); + assert_eq!( + Some(&block_min), + cache.get_block(&H256::repeat_byte(0x2), false) + ); + assert_eq!(Some(&H256::repeat_byte(0x2)), cache.get_block_hash(&2)); + + cache.insert_block_raw_transactions(0, raw_transactions.clone()); + assert_eq!( + Some(&raw_transactions), + cache.get_block_raw_transactions(&0) + ); + + cache.insert_transaction(H256::zero(), transaction.clone()); + assert_eq!(Some(&transaction), cache.get_transaction(&H256::zero())); + let new_cache = Cache::new(CacheConfig::Disk { + dir: cache_dir_path, + reset: true, + }); + assert_eq!(None, new_cache.get_block(&H256::zero(), true)); + assert_eq!(None, new_cache.get_block_hash(&1)); + assert_eq!(None, new_cache.get_block(&H256::zero(), false)); + assert_eq!(None, new_cache.get_block_hash(&2)); + assert_eq!(None, new_cache.get_block_raw_transactions(&0)); + assert_eq!(None, new_cache.get_transaction(&H256::zero())); } } diff --git a/src/fork.rs b/src/fork.rs index 387e74da..17ec946f 100644 --- a/src/fork.rs +++ b/src/fork.rs @@ -25,7 +25,10 @@ use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; use zksync_web3_decl::{jsonrpsee::http_client::HttpClient, namespaces::EthNamespaceClient}; use zksync_web3_decl::{jsonrpsee::http_client::HttpClientBuilder, namespaces::ZksNamespaceClient}; -use crate::node::TEST_NODE_NETWORK_ID; +use crate::{ + cache::CacheConfig, + node::TEST_NODE_NETWORK_ID, +}; use crate::{deps::InMemoryStorage, http_fork_source::HttpForkSource}; use crate::{deps::ReadStorage as RS, system_contracts}; @@ -247,6 +250,7 @@ impl ForkDetails { client: HttpClient, miniblock: u64, chain_id: Option, + cache_config: CacheConfig, ) -> Self { let block_details = client .get_block_details(MiniblockNumber(miniblock as u32)) @@ -277,9 +281,7 @@ impl ForkDetails { ); ForkDetails { - fork_source: HttpForkSource { - fork_url: url.to_owned(), - }, + fork_source: HttpForkSource::new(url.to_owned(), cache_config), l1_block: l1_batch_number, l2_block: block, block_timestamp: block_details.base.timestamp, @@ -290,19 +292,19 @@ impl ForkDetails { } } /// Create a fork from a given network at a given height. - pub async fn from_network(fork: &str, fork_at: Option) -> Self { + pub async fn from_network(fork: &str, fork_at: Option, cache_config: CacheConfig) -> Self { let (url, client) = Self::fork_to_url_and_client(fork); let l2_miniblock = if let Some(fork_at) = fork_at { fork_at } else { client.get_block_number().await.unwrap().as_u64() }; - Self::from_url_and_miniblock_and_chain(url, client, l2_miniblock, None).await + Self::from_url_and_miniblock_and_chain(url, client, l2_miniblock, None, cache_config).await } /// Create a fork from a given network, at a height BEFORE a transaction. /// This will allow us to apply this transaction locally on top of this fork. - pub async fn from_network_tx(fork: &str, tx: H256) -> Self { + pub async fn from_network_tx(fork: &str, tx: H256, cache_config: CacheConfig) -> Self { let (url, client) = Self::fork_to_url_and_client(fork); let tx_details = client.get_transaction_by_hash(tx).await.unwrap().unwrap(); let overwrite_chain_id = Some(L2ChainId(tx_details.chain_id.as_u32() as u16)); @@ -310,7 +312,14 @@ impl ForkDetails { // We have to sync to the one-miniblock before the one where transaction is. let l2_miniblock = miniblock_number.saturating_sub(1) as u64; - Self::from_url_and_miniblock_and_chain(url, client, l2_miniblock, overwrite_chain_id).await + Self::from_url_and_miniblock_and_chain( + url, + client, + l2_miniblock, + overwrite_chain_id, + cache_config, + ) + .await } } diff --git a/src/http_fork_source.rs b/src/http_fork_source.rs index 0ac817ac..0dc729d3 100644 --- a/src/http_fork_source.rs +++ b/src/http_fork_source.rs @@ -1,3 +1,5 @@ +use std::sync::RwLock; + use eyre::Context; use zksync_web3_decl::{ jsonrpsee::http_client::{HttpClient, HttpClientBuilder}, @@ -5,7 +7,7 @@ use zksync_web3_decl::{ }; use crate::{ - cache::Cache, + cache::{Cache, CacheConfig}, fork::{block_on, ForkSource}, }; @@ -14,10 +16,18 @@ use crate::{ pub struct HttpForkSource { /// URL for the network to fork. pub fork_url: String, - cache: Cache, + /// Cache for network data. + pub(crate) cache: RwLock, } impl HttpForkSource { + pub fn new(fork_url: String, cache_config: CacheConfig) -> Self { + Self { + fork_url, + cache: RwLock::new(Cache::new(cache_config)), + } + } + pub fn create_client(&self) -> HttpClient { HttpClientBuilder::default() .build(self.fork_url.clone()) @@ -50,57 +60,66 @@ impl ForkSource for HttpForkSource { &self, hash: zksync_basic_types::H256, ) -> eyre::Result> { - self.cache - .transactions - .get(&hash) - .cloned() - .map(|value| Ok(Some(value))) - .unwrap_or_else(|| { - let client = self.create_client(); - block_on(async move { client.get_transaction_by_hash(hash).await }) - .wrap_err("fork http client failed") - .and_then(|result| { - if let Some(transaction) = &result { - self.cache.transactions.insert(hash, transaction.clone()); - } - Ok(result) - }) + if let Ok(Some(transaction)) = self + .cache + .read() + .and_then(|guard| Ok(guard.get_transaction(&hash).cloned())) + { + return Ok(Some(transaction)); + } + + let client = self.create_client(); + block_on(async move { client.get_transaction_by_hash(hash).await }) + .and_then(|maybe_transaction| { + if let Some(transaction) = &maybe_transaction { + self.cache + .write() + .and_then(|mut guard| { + Ok(guard.insert_transaction(hash, transaction.clone())) + }) + .unwrap_or_else(|err| { + println!( + "failed writing to cache for 'get_transaction_by_hash': {:?}", + err + ) + }); + } + Ok(maybe_transaction) }) + .wrap_err("fork http client failed") } fn get_raw_block_transactions( &self, block_number: zksync_basic_types::MiniblockNumber, ) -> eyre::Result> { - let mut block_hash = zksync_basic_types::H256::zero(); - let mut block_number_mapped = false; - - self.cache - .block_hashes - .get(&(block_number.0 as u64)) - .and_then(|hash| { - block_number_mapped = true; - block_hash = *hash; - self.cache.block_raw_transactions.get(hash) - }) - .cloned() - .map(|value| Ok(value)) - .unwrap_or_else(|| { - let client = self.create_client(); - block_on(async move { client.get_raw_block_transactions(block_number).await }) - .wrap_err("fork http client failed") - .and_then(|result| { - if !block_number_mapped { - self.cache - .block_hashes - .insert(block_number.0 as u64, block_hash); - } - - self.cache - .block_raw_transactions - .insert(block_hash, result.clone()); - Ok(result) - }) + let number = block_number.0 as u64; + if let Ok(Some(transaction)) = self + .cache + .read() + .and_then(|guard| Ok(guard.get_block_raw_transactions(&number).cloned())) + { + return Ok(transaction); + } + + let client = self.create_client(); + block_on(async move { client.get_raw_block_transactions(block_number).await }) + .wrap_err("fork http client failed") + .and_then(|transactions| { + if !transactions.is_empty() { + self.cache + .write() + .and_then(|mut guard| { + Ok(guard.insert_block_raw_transactions(number, transactions.clone())) + }) + .unwrap_or_else(|err| { + println!( + "failed writing to cache for 'get_raw_block_transactions': {:?}", + err + ) + }); + } + Ok(transactions) }) } @@ -109,27 +128,30 @@ impl ForkSource for HttpForkSource { hash: zksync_basic_types::H256, full_transactions: bool, ) -> eyre::Result>> { - let mut cache = if full_transactions { - self.cache.blocks_full - } else { - self.cache.blocks_min - }; + if let Ok(Some(block)) = self + .cache + .read() + .and_then(|guard| Ok(guard.get_block(&hash, full_transactions).cloned())) + { + return Ok(Some(block)); + } - cache - .get(&hash) - .cloned() - .map(|value| Ok(Some(value))) - .unwrap_or_else(|| { - let client = self.create_client(); - block_on(async move { client.get_block_by_hash(hash, full_transactions).await }) - .wrap_err("fork http client failed") - .and_then(|result| { - if let Some(transaction) = &result { - cache.insert(hash, transaction.clone()); - } - Ok(result) - }) + let client = self.create_client(); + block_on(async move { client.get_block_by_hash(hash, full_transactions).await }) + .and_then(|block| { + if let Some(block) = &block { + self.cache + .write() + .and_then(|mut guard| { + Ok(guard.insert_block(hash, full_transactions, block.clone())) + }) + .unwrap_or_else(|err| { + println!("failed writing to cache for 'get_block_by_hash': {:?}", err) + }); + } + Ok(block) }) + .wrap_err("fork http client failed") } fn get_block_by_number( @@ -137,48 +159,289 @@ impl ForkSource for HttpForkSource { block_number: zksync_types::api::BlockNumber, full_transactions: bool, ) -> eyre::Result>> { - let number = match block_number { + let maybe_number = match block_number { zksync_types::api::BlockNumber::Number(block_number) => Some(block_number), _ => None, }; - let mut cache = if full_transactions { - self.cache.blocks_full - } else { - self.cache.blocks_min - }; - let mut block_hash = zksync_basic_types::H256::zero(); - let mut block_number_mapped = false; - number - .and_then(|number| { - self.cache.block_hashes.get(&number.as_u64()).map(|hash| { - block_number_mapped = true; - block_hash = *hash; - *hash - }) - }) - .and_then(|hash| cache.get(&hash)) - .cloned() - .map(|value| Ok(Some(value))) - .unwrap_or_else(|| { - let client = self.create_client(); - block_on(async move { - client - .get_block_by_number(block_number, full_transactions) - .await - }) - .wrap_err("fork http client failed") - .and_then(|result| { - if !block_number_mapped { - if let Some(number) = number { - self.cache.block_hashes.insert(number.as_u64(), block_hash); - } - } - if let Some(block) = &result { - cache.insert(block_hash, block.clone()); - } - Ok(result) - }) + if let Some(block) = maybe_number.and_then(|number| { + self.cache.read().ok().and_then(|guard| { + guard + .get_block_hash(&number.as_u64()) + .and_then(|hash| guard.get_block(hash, full_transactions).cloned()) }) + }) { + return Ok(Some(block)); + } + + let client = self.create_client(); + block_on(async move { + client + .get_block_by_number(block_number, full_transactions) + .await + }) + .and_then(|block| { + if let Some(block) = &block { + self.cache + .write() + .and_then(|mut guard| { + Ok(guard.insert_block(block.hash, full_transactions, block.clone())) + }) + .unwrap_or_else(|err| { + println!( + "failed writing to cache for 'get_block_by_number': {:?}", + err + ) + }); + } + Ok(block) + }) + .wrap_err("fork http client failed") + } +} + +#[cfg(test)] +mod tests { + use zksync_basic_types::{MiniblockNumber, H256, U64}; + use zksync_types::api::BlockNumber; + + use crate::testing; + + use super::*; + + #[test] + fn test_get_block_by_hash_full_is_cached() { + let input_block_hash = H256::repeat_byte(0x01); + let input_block_number = 8; + + let mock_server = testing::MockServer::run(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getBlockByHash", + "params": [ + format!("{input_block_hash:#x}"), + true + ], + }), + testing::BlockResponseBuilder::new() + .set_hash(input_block_hash) + .set_number(input_block_number) + .build(), + ); + + let fork_source = HttpForkSource::new(mock_server.url(), CacheConfig::Memory); + + let actual_block = fork_source + .get_block_by_hash(input_block_hash, true) + .expect("failed fetching block by hash") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + + let actual_block = fork_source + .get_block_by_hash(input_block_hash, true) + .expect("failed fetching cached block by hash") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + } + + #[test] + fn test_get_block_by_hash_minimal_is_cached() { + let input_block_hash = H256::repeat_byte(0x01); + let input_block_number = 8; + + let mock_server = testing::MockServer::run(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getBlockByHash", + "params": [ + format!("{input_block_hash:#x}"), + false + ], + }), + testing::BlockResponseBuilder::new() + .set_hash(input_block_hash) + .set_number(input_block_number) + .build(), + ); + + let fork_source = HttpForkSource::new(mock_server.url(), CacheConfig::Memory); + + let actual_block = fork_source + .get_block_by_hash(input_block_hash, false) + .expect("failed fetching block by hash") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + + let actual_block = fork_source + .get_block_by_hash(input_block_hash, false) + .expect("failed fetching cached block by hash") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + } + + #[test] + fn test_get_block_by_number_full_is_cached() { + let input_block_hash = H256::repeat_byte(0x01); + let input_block_number = 8; + + let mock_server = testing::MockServer::run(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getBlockByNumber", + "params": [ + format!("{input_block_number:#x}"), + true + ], + }), + testing::BlockResponseBuilder::new() + .set_hash(input_block_hash) + .set_number(input_block_number) + .build(), + ); + + let fork_source = HttpForkSource::new(mock_server.url(), CacheConfig::Memory); + + let actual_block = fork_source + .get_block_by_number( + zksync_types::api::BlockNumber::Number(U64::from(input_block_number)), + true, + ) + .expect("failed fetching block by number") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + + let actual_block = fork_source + .get_block_by_number( + zksync_types::api::BlockNumber::Number(U64::from(input_block_number)), + true, + ) + .expect("failed fetching cached block by number") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + } + + #[test] + fn test_get_block_by_number_minimal_is_cached() { + let input_block_hash = H256::repeat_byte(0x01); + let input_block_number = 8; + + let mock_server = testing::MockServer::run(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getBlockByNumber", + "params": [ + format!("{input_block_number:#x}"), + false + ], + }), + testing::BlockResponseBuilder::new() + .set_hash(input_block_hash) + .set_number(input_block_number) + .build(), + ); + + let fork_source = HttpForkSource::new(mock_server.url(), CacheConfig::Memory); + + let actual_block = fork_source + .get_block_by_number(BlockNumber::Number(U64::from(input_block_number)), false) + .expect("failed fetching block by number") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + + let actual_block = fork_source + .get_block_by_number(BlockNumber::Number(U64::from(input_block_number)), false) + .expect("failed fetching cached block by number") + .expect("no block"); + + assert_eq!(input_block_hash, actual_block.hash); + assert_eq!(U64::from(input_block_number), actual_block.number); + } + + #[test] + fn test_get_raw_block_transactions_is_cached() { + let input_block_number = 8u32; + + let mock_server = testing::MockServer::run(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "zks_getRawBlockTransactions", + "params": [ + input_block_number, + ], + }), + testing::RawTransactionsResponseBuilder::new() + .add(1) + .build(), + ); + + let fork_source = HttpForkSource::new(mock_server.url(), CacheConfig::Memory); + + let actual_raw_transactions = fork_source + .get_raw_block_transactions(MiniblockNumber(input_block_number)) + .expect("failed fetching block raw transactions"); + assert_eq!(1, actual_raw_transactions.len()); + + let actual_raw_transactions = fork_source + .get_raw_block_transactions(MiniblockNumber(input_block_number)) + .expect("failed fetching cached block raw transactions"); + assert_eq!(1, actual_raw_transactions.len()); + } + + #[test] + fn test_get_transactions_is_cached() { + let input_tx_hash = H256::repeat_byte(0x01); + + let mock_server = testing::MockServer::run(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getTransactionByHash", + "params": [ + input_tx_hash, + ], + }), + testing::TransactionResponseBuilder::new() + .set_hash(input_tx_hash) + .build(), + ); + + let fork_source = HttpForkSource::new(mock_server.url(), CacheConfig::Memory); + + let actual_transaction = fork_source + .get_transaction_by_hash(input_tx_hash) + .expect("failed fetching transaction") + .expect("no transaction"); + assert_eq!(input_tx_hash, actual_transaction.hash); + + let actual_transaction = fork_source + .get_transaction_by_hash(input_tx_hash) + .expect("failed fetching cached transaction") + .expect("no transaction"); + assert_eq!(input_tx_hash, actual_transaction.hash); } } diff --git a/src/lib.rs b/src/lib.rs index 937d2e07..a298f03e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -54,5 +54,5 @@ pub mod system_contracts; pub mod utils; pub mod zks; -mod testing; mod cache; +mod testing; diff --git a/src/main.rs b/src/main.rs index cb1b5f26..c1bbae99 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +use crate::cache::CacheConfig; use crate::hardhat::{HardhatNamespaceImpl, HardhatNamespaceT}; use crate::node::{ShowGasDetails, ShowStorageLogs, ShowVMDetails}; use clap::{Parser, Subcommand, ValueEnum}; @@ -11,6 +12,7 @@ use simplelog::{ use zks::ZkMockNamespaceImpl; mod bootloader_debug; +mod cache; mod configuration_api; mod console_log; mod deps; @@ -160,6 +162,14 @@ impl From for LevelFilter { } } +/// Cache type config for the node. +#[derive(ValueEnum, Debug, Clone)] +enum CacheType { + None, + Memory, + Disk, +} + #[derive(Debug, Parser)] #[command(author = "Matter Labs", version, about = "Test Node", long_about = None)] struct Cli { @@ -198,6 +208,18 @@ struct Cli { /// Log file path - default: era_test_node.log #[arg(long, default_value = "era_test_node.log")] log_file_path: String, + + /// Cache type, can be one of `none`, `memory`, or `disk` - default: "disk" + #[arg(long, default_value = "disk")] + cache: CacheType, + + /// If true, will reset the local `disk` cache. + #[arg(long)] + reset_cache: bool, + + /// Cache directory location for `disk` cache - default: ".cache" + #[arg(long, default_value = ".cache")] + cache_dir: String, } #[derive(Debug, Subcommand)] @@ -268,6 +290,14 @@ async fn main() -> anyhow::Result<()> { log::info!("+++++ Reading local contracts from {:?} +++++", path); } } + let cache_config = match opt.cache { + CacheType::None => CacheConfig::None, + CacheType::Memory => CacheConfig::Memory, + CacheType::Disk => CacheConfig::Disk { + dir: opt.cache_dir, + reset: opt.reset_cache, + }, + }; let filter = EnvFilter::from_default_env(); let subscriber = FmtSubscriber::builder() @@ -280,9 +310,11 @@ async fn main() -> anyhow::Result<()> { let fork_details = match &opt.command { Command::Run => None, - Command::Fork(fork) => Some(ForkDetails::from_network(&fork.network, fork.fork_at).await), + Command::Fork(fork) => { + Some(ForkDetails::from_network(&fork.network, fork.fork_at, cache_config).await) + } Command::ReplayTx(replay_tx) => { - Some(ForkDetails::from_network_tx(&replay_tx.network, replay_tx.tx).await) + Some(ForkDetails::from_network_tx(&replay_tx.network, replay_tx.tx, cache_config).await) } }; diff --git a/src/node.rs b/src/node.rs index 187a61a7..b0923519 100644 --- a/src/node.rs +++ b/src/node.rs @@ -1930,7 +1930,9 @@ impl EthNamespaceT for #[cfg(test)] mod tests { - use crate::{http_fork_source::HttpForkSource, node::InMemoryNode, testing}; + use crate::{ + cache::CacheConfig, http_fork_source::HttpForkSource, node::InMemoryNode, testing, + }; use zksync_types::{api::BlockNumber, Address, L2ChainId, Nonce, PackedEthSignature}; use zksync_web3_decl::types::SyncState; @@ -2042,7 +2044,7 @@ mod tests { async fn test_get_block_by_hash_uses_fork_source() { let input_block_hash = H256::repeat_byte(0x01); - let mock_server = testing::MockServer::run(); + let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); let mock_block_number = 8; let block_response = testing::BlockResponseBuilder::new() .set_hash(input_block_hash) @@ -2061,7 +2063,7 @@ mod tests { block_response, ); let node = InMemoryNode::::new( - Some(ForkDetails::from_network(&mock_server.url(), None).await), + Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, ShowStorageLogs::None, ShowVMDetails::None, @@ -2135,7 +2137,7 @@ mod tests { #[tokio::test] async fn test_get_block_by_number_uses_fork_source_if_missing_number() { - let mock_server = testing::MockServer::run(); + let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); let mock_block_number = 8; let block_response = testing::BlockResponseBuilder::new() .set_number(mock_block_number) @@ -2153,7 +2155,7 @@ mod tests { block_response, ); let node = InMemoryNode::::new( - Some(ForkDetails::from_network(&mock_server.url(), None).await), + Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, ShowStorageLogs::None, ShowVMDetails::None, @@ -2212,12 +2214,25 @@ mod tests { #[tokio::test] async fn test_get_block_by_number_uses_fork_source_for_latest_block_if_locally_unavailable() { - let latest_block_number = 10; - let mock_server = - testing::MockServer::run_with_config(latest_block_number, H256::repeat_byte(0x01)); - + let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); + let mock_block_number = 1; + let block_response = testing::BlockResponseBuilder::new() + .set_number(mock_block_number) + .build(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getBlockByNumber", + "params": [ + "latest", + true + ], + }), + block_response, + ); let node = InMemoryNode::::new( - Some(ForkDetails::from_network(&mock_server.url(), None).await), + Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, ShowStorageLogs::None, ShowVMDetails::None, @@ -2236,7 +2251,7 @@ mod tests { #[tokio::test] async fn test_get_block_by_number_uses_fork_source_for_earliest_block() { - let mock_server = testing::MockServer::run(); + let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); let mock_block_number = 1; let block_response = testing::BlockResponseBuilder::new() .set_number(mock_block_number) @@ -2254,7 +2269,7 @@ mod tests { block_response, ); let node = InMemoryNode::::new( - Some(ForkDetails::from_network(&mock_server.url(), None).await), + Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, ShowStorageLogs::None, ShowVMDetails::None, @@ -2278,11 +2293,25 @@ mod tests { BlockNumber::Committed, BlockNumber::Finalized, ] { - let latest_block_number = 10; - let mock_server = - testing::MockServer::run_with_config(latest_block_number, H256::repeat_byte(0x01)); + let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); + let mock_block_number = 1; + let block_response = testing::BlockResponseBuilder::new() + .set_number(mock_block_number) + .build(); + mock_server.expect( + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "method": "eth_getBlockByNumber", + "params": [ + block_number, + true + ], + }), + block_response, + ); let node = InMemoryNode::::new( - Some(ForkDetails::from_network(&mock_server.url(), None).await), + Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, ShowStorageLogs::None, ShowVMDetails::None, diff --git a/src/testing.rs b/src/testing.rs index 99067b44..c061d633 100644 --- a/src/testing.rs +++ b/src/testing.rs @@ -10,6 +10,7 @@ use httptest::{ responders::json_encoded, Expectation, Server, }; +use itertools::Itertools; use zksync_basic_types::H256; /// A HTTP server that can be used to mock a fork source. @@ -19,9 +20,11 @@ pub struct MockServer { } impl MockServer { - /// Start the mock server with pre-defined calls used to fetch the fork's state. + /// Start the mock server. pub fn run() -> Self { - Self::run_with_config(10, H256::repeat_byte(0xab)) + MockServer { + inner: Server::run(), + } } /// Start the mock server with pre-defined calls used to fetch the fork's state. @@ -206,6 +209,108 @@ impl BlockResponseBuilder { } } +/// A mock response builder for a transaction +#[derive(Default, Debug, Clone)] +pub struct TransactionResponseBuilder { + hash: H256, +} + +impl TransactionResponseBuilder { + /// Create a new instance of [TransactionResponseBuilder] + pub fn new() -> Self { + Self::default() + } + + /// Sets the block hash + pub fn set_hash(&mut self, hash: H256) -> &mut Self { + self.hash = hash; + self + } + + /// Builds the json response + pub fn build(&mut self) -> serde_json::Value { + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "result": { + "hash": format!("{:#x}", self.hash), + "nonce": "0x0", + "blockHash": "0x51f81bcdfc324a0dff2b5bec9d92e21cbebc4d5e29d3a3d30de3e03fbeab8d7f", + "blockNumber": "0x1", + "transactionIndex": "0x0", + "from": "0x29df43f75149d0552475a6f9b2ac96e28796ed0b", + "to": "0x0000000000000000000000000000000000008006", + "value": "0x0", + "gasPrice": "0x0", + "gas": "0x44aa200", + "input": "0x3cda33510000000000000000000000000000000000000000000000000000000000000000010000553109a66f1432eb2286c54694784d1b6993bc24a168be0a49b4d0fd4500000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000", + "type": "0xff", + "maxFeePerGas": "0x0", + "maxPriorityFeePerGas": "0x0", + "chainId": "0x144", + "l1BatchNumber": "0x1", + "l1BatchTxIndex": "0x0", + }, + }) + } +} + +/// A mock response builder for a transaction +#[derive(Default, Debug, Clone)] +pub struct RawTransactionsResponseBuilder { + serial_ids: Vec, +} + +impl RawTransactionsResponseBuilder { + /// Create a new instance of [RawTransactionsResponseBuilder] + pub fn new() -> Self { + Self::default() + } + + /// Inserts a new raw transaction with a serial id + pub fn add(&mut self, serial_id: u64) -> &mut Self { + self.serial_ids.push(serial_id); + self + } + + /// Builds the json response + pub fn build(&mut self) -> serde_json::Value { + serde_json::json!({ + "jsonrpc": "2.0", + "id": 0, + "result": self.serial_ids.iter().map(|serial_id| serde_json::json!({ + "common_data": { + "L1": { + "sender": "0xcca8009f5e09f8c5db63cb0031052f9cb635af62", + "serialId": serial_id, + "deadlineBlock": 0, + "layer2TipFee": "0x0", + "fullFee": "0x0", + "maxFeePerGas": "0x0", + "gasLimit": "0x989680", + "gasPerPubdataLimit": "0x320", + "opProcessingType": "Common", + "priorityQueueType": "Deque", + "ethHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "ethBlock": 16631249u64, + "canonicalTxHash": "0xaaf9514a005ba59e29b53e1dc84d234d909c5202b44c5179f9c67d8e3cad0636", + "toMint": "0x470de4df820000", + "refundRecipient": "0xcca8009f5e09f8c5db63cb0031052f9cb635af62" + } + }, + "execute": { + "contractAddress": "0xcca8009f5e09f8c5db63cb0031052f9cb635af62", + "calldata": "0x", + "value": "0x470de4df820000", + "factoryDeps": [] + }, + "received_timestamp_ms": 1676429272816u64, + "raw_bytes": null + })).collect_vec(), + }) + } +} + mod test { use super::*; From 0da9fec577218fcdf7c40f72e0245caf49478371 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Thu, 14 Sep 2023 11:07:00 +0200 Subject: [PATCH 04/12] fix tests --- src/cache.rs | 2 +- src/fork.rs | 5 +---- src/node.rs | 61 ++++++++++++++-------------------------------------- 3 files changed, 18 insertions(+), 50 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 16a1768d..db76c4e2 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -16,7 +16,7 @@ const CACHE_TYPE_BLOCK_RAW_TRANSACTIONS: &'static str = "block_raw_transactions" const CACHE_TYPE_TRANSACTIONS: &'static str = "transactions"; /// Cache configuration. Can be one of: -/// +/// /// None : Caching is disabled /// Memory : Caching is provided in-memory and not persisted across runs /// Disk : Caching is persisted on disk in the provided directory and can be reset diff --git a/src/fork.rs b/src/fork.rs index 17ec946f..752448bd 100644 --- a/src/fork.rs +++ b/src/fork.rs @@ -25,10 +25,7 @@ use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; use zksync_web3_decl::{jsonrpsee::http_client::HttpClient, namespaces::EthNamespaceClient}; use zksync_web3_decl::{jsonrpsee::http_client::HttpClientBuilder, namespaces::ZksNamespaceClient}; -use crate::{ - cache::CacheConfig, - node::TEST_NODE_NETWORK_ID, -}; +use crate::{cache::CacheConfig, node::TEST_NODE_NETWORK_ID}; use crate::{deps::InMemoryStorage, http_fork_source::HttpForkSource}; use crate::{deps::ReadStorage as RS, system_contracts}; diff --git a/src/node.rs b/src/node.rs index b0923519..2b622301 100644 --- a/src/node.rs +++ b/src/node.rs @@ -2020,7 +2020,7 @@ mod tests { testing::MockServer::run_with_config(input_block_number, input_block_hash); let node = InMemoryNode::::new( - Some(ForkDetails::from_network(&mock_server.url(), None).await), + Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, ShowStorageLogs::None, ShowVMDetails::None, @@ -2213,24 +2213,10 @@ mod tests { } #[tokio::test] - async fn test_get_block_by_number_uses_fork_source_for_latest_block_if_locally_unavailable() { - let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); - let mock_block_number = 1; - let block_response = testing::BlockResponseBuilder::new() - .set_number(mock_block_number) - .build(); - mock_server.expect( - serde_json::json!({ - "jsonrpc": "2.0", - "id": 0, - "method": "eth_getBlockByNumber", - "params": [ - "latest", - true - ], - }), - block_response, - ); + async fn test_get_block_by_number_uses_locally_available_block_for_latest_block() { + let input_block_number = 10; + let mock_server = + testing::MockServer::run_with_config(input_block_number, H256::repeat_byte(0xab)); let node = InMemoryNode::::new( Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, @@ -2246,16 +2232,13 @@ mod tests { .await .expect("failed fetching block by hash") .expect("no block"); - assert_eq!(U64::from(latest_block_number), actual_block.number); + assert_eq!(U64::from(input_block_number), actual_block.number); } #[tokio::test] async fn test_get_block_by_number_uses_fork_source_for_earliest_block() { let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); - let mock_block_number = 1; - let block_response = testing::BlockResponseBuilder::new() - .set_number(mock_block_number) - .build(); + let input_block_number = 1; mock_server.expect( serde_json::json!({ "jsonrpc": "2.0", @@ -2266,7 +2249,9 @@ mod tests { true ], }), - block_response, + testing::BlockResponseBuilder::new() + .set_number(input_block_number) + .build(), ); let node = InMemoryNode::::new( Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), @@ -2283,33 +2268,19 @@ mod tests { .await .expect("failed fetching block by hash") .expect("no block"); - assert_eq!(U64::from(mock_block_number), actual_block.number); + assert_eq!(U64::from(input_block_number), actual_block.number); } #[tokio::test] - async fn test_get_block_by_number_uses_fork_source_for_latest_alike_blocks() { + async fn test_get_block_by_number_uses_locally_available_for_latest_alike_blocks() { for block_number in [ BlockNumber::Pending, BlockNumber::Committed, BlockNumber::Finalized, ] { - let mock_server = testing::MockServer::run_with_config(10, H256::repeat_byte(0xab)); - let mock_block_number = 1; - let block_response = testing::BlockResponseBuilder::new() - .set_number(mock_block_number) - .build(); - mock_server.expect( - serde_json::json!({ - "jsonrpc": "2.0", - "id": 0, - "method": "eth_getBlockByNumber", - "params": [ - block_number, - true - ], - }), - block_response, - ); + let input_block_number = 10; + let mock_server = + testing::MockServer::run_with_config(input_block_number, H256::repeat_byte(0xab)); let node = InMemoryNode::::new( Some(ForkDetails::from_network(&mock_server.url(), None, CacheConfig::None).await), crate::node::ShowCalls::None, @@ -2326,7 +2297,7 @@ mod tests { .expect("failed fetching block by hash") .expect("no block"); assert_eq!( - U64::from(latest_block_number), + U64::from(input_block_number), actual_block.number, "case {}", block_number, From 40ac65446c1d6ed5b5b7b83729242d9c10c5d307 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Thu, 14 Sep 2023 11:11:07 +0200 Subject: [PATCH 05/12] update README --- README.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/README.md b/README.md index 00ae513e..6d79e853 100644 --- a/README.md +++ b/README.md @@ -67,6 +67,25 @@ Additionally, the file path can be provided via the `--log-file-path` option (de era_test_node --log=error --log-file-path=run.log run ``` +## 📃 Caching + +The node will cache certain network request by default to disk in the `.cache` directory. Alternatively the caching can be disabled tor set to in-memory only +via the `--cache=none|memory|disk` parameter. + +```bash +era_test_node --cache=none run +``` + +```bash +era_test_node --cache=memory run +``` + +Additionally when using `--cache=disk`, the cache directory may be specified via `--cache-dir` and the cache may +be reset on startup via `--reset-cache` parameters. +```bash +era_test_node --cache=disk --cache-dir=/tmp/foo --reset-cache run +``` + ## 🌐 Network Details - L2 RPC: http://localhost:8011 From 630a0d74dd4b141527242155c535017b0ae3942f Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Thu, 14 Sep 2023 11:14:48 +0200 Subject: [PATCH 06/12] update println! to logs --- src/cache.rs | 13 +++++++------ src/hardhat.rs | 4 ++-- src/http_fork_source.rs | 8 ++++---- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index db76c4e2..62ba35aa 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -54,8 +54,9 @@ impl Cache { if let CacheConfig::Disk { dir, reset } = &config { if *reset { - fs::remove_dir_all(Path::new(dir)) - .unwrap_or_else(|err| eprintln!("failed removing cache from disk: {:?}", err)); + fs::remove_dir_all(Path::new(dir)).unwrap_or_else(|err| { + log::error!("failed removing cache from disk: {:?}", err) + }); } for cache_type in [ @@ -70,7 +71,7 @@ impl Cache { } cache .read_all_from_disk(&dir) - .unwrap_or_else(|err| eprintln!("failed reading cache from disk: {:?}", err)); + .unwrap_or_else(|err| log::error!("failed reading cache from disk: {:?}", err)); } cache @@ -265,15 +266,15 @@ impl Cache { if let CacheConfig::Disk { dir, .. } = &self.config { let file = Path::new(&dir).join(cache_type).join(key); - println!("writing cache {:?}", file); + log::debug!("writing cache {:?}", file); match File::create(file.clone()) { Ok(cache_file) => { let writer = BufWriter::new(cache_file); if let Err(err) = serde_json::to_writer(writer, data) { - eprintln!("failed writing to cache '{:?}': {:?}", file, err); + log::error!("failed writing to cache '{:?}': {:?}", file, err); } } - Err(err) => eprintln!("failed creating file: '{:?}': {:?}", file, err), + Err(err) => log::error!("failed creating file: '{:?}': {:?}", file, err), } } } diff --git a/src/hardhat.rs b/src/hardhat.rs index 83b9aecf..4e91f1ef 100644 --- a/src/hardhat.rs +++ b/src/hardhat.rs @@ -71,7 +71,7 @@ impl HardhatNamespaceT inner_guard .fork_storage .set_value(balance_key, u256_to_h256(balance)); - println!( + log::debug!( "👷 Balance for address {:?} has been manually set to {} Wei", address, balance ); @@ -113,7 +113,7 @@ impl HardhatNamespaceT } deployment_nonce = nonce; let enforced_full_nonce = nonces_to_full_nonce(account_nonce, deployment_nonce); - println!( + log::debug!( "👷 Nonces for address {:?} have been set to {}", address, nonce ); diff --git a/src/http_fork_source.rs b/src/http_fork_source.rs index 0dc729d3..f68ebf09 100644 --- a/src/http_fork_source.rs +++ b/src/http_fork_source.rs @@ -78,7 +78,7 @@ impl ForkSource for HttpForkSource { Ok(guard.insert_transaction(hash, transaction.clone())) }) .unwrap_or_else(|err| { - println!( + log::warn!( "failed writing to cache for 'get_transaction_by_hash': {:?}", err ) @@ -113,7 +113,7 @@ impl ForkSource for HttpForkSource { Ok(guard.insert_block_raw_transactions(number, transactions.clone())) }) .unwrap_or_else(|err| { - println!( + log::warn!( "failed writing to cache for 'get_raw_block_transactions': {:?}", err ) @@ -146,7 +146,7 @@ impl ForkSource for HttpForkSource { Ok(guard.insert_block(hash, full_transactions, block.clone())) }) .unwrap_or_else(|err| { - println!("failed writing to cache for 'get_block_by_hash': {:?}", err) + log::warn!("failed writing to cache for 'get_block_by_hash': {:?}", err) }); } Ok(block) @@ -188,7 +188,7 @@ impl ForkSource for HttpForkSource { Ok(guard.insert_block(block.hash, full_transactions, block.clone())) }) .unwrap_or_else(|err| { - println!( + log::warn!( "failed writing to cache for 'get_block_by_number': {:?}", err ) From 5cf77c14933959b56ed7ce7cd6d67816c85fd7b6 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Thu, 14 Sep 2023 13:27:52 +0200 Subject: [PATCH 07/12] fmt --- src/hardhat.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/hardhat.rs b/src/hardhat.rs index 4e91f1ef..7982f3a3 100644 --- a/src/hardhat.rs +++ b/src/hardhat.rs @@ -73,7 +73,8 @@ impl HardhatNamespaceT .set_value(balance_key, u256_to_h256(balance)); log::debug!( "👷 Balance for address {:?} has been manually set to {} Wei", - address, balance + address, + balance ); Ok(true) } @@ -115,7 +116,8 @@ impl HardhatNamespaceT let enforced_full_nonce = nonces_to_full_nonce(account_nonce, deployment_nonce); log::debug!( "👷 Nonces for address {:?} have been set to {}", - address, nonce + address, + nonce ); inner_guard .fork_storage From 5a73df8e1ca5d91e578bae21ab1c3d54559a624c Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Thu, 14 Sep 2023 18:34:37 +0200 Subject: [PATCH 08/12] fmt --- src/cache.rs | 129 ++++++++++++++++++---------------------- src/formatter.rs | 4 +- src/http_fork_source.rs | 38 ++++++------ src/node.rs | 2 +- 4 files changed, 78 insertions(+), 95 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 62ba35aa..389e9496 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -10,10 +10,10 @@ use zksync_basic_types::H256; use zksync_types::api::{Block, Transaction, TransactionVariant}; use zksync_types::Transaction as RawTransaction; -const CACHE_TYPE_BLOCKS_FULL: &'static str = "blocks_full"; -const CACHE_TYPE_BLOCKS_MIN: &'static str = "blocks_min"; -const CACHE_TYPE_BLOCK_RAW_TRANSACTIONS: &'static str = "block_raw_transactions"; -const CACHE_TYPE_TRANSACTIONS: &'static str = "transactions"; +const CACHE_TYPE_BLOCKS_FULL: &str = "blocks_full"; +const CACHE_TYPE_BLOCKS_MIN: &str = "blocks_min"; +const CACHE_TYPE_BLOCK_RAW_TRANSACTIONS: &str = "block_raw_transactions"; +const CACHE_TYPE_TRANSACTIONS: &str = "transactions"; /// Cache configuration. Can be one of: /// @@ -70,7 +70,7 @@ impl Cache { }); } cache - .read_all_from_disk(&dir) + .read_all_from_disk(dir) .unwrap_or_else(|err| log::error!("failed reading cache from disk: {:?}", err)); } @@ -121,7 +121,7 @@ impl Cache { return None; } - self.block_hashes.get(&number) + self.block_hashes.get(number) } /// Returns the cached raw transactions for the provided block number. @@ -157,7 +157,7 @@ impl Cache { return None; } - self.transactions.get(&hash) + self.transactions.get(hash) } /// Cache a transaction for the provided hash. @@ -184,76 +184,63 @@ impl Cache { ] { let cache_dir = Path::new(dir).join(cache_type); let dir_listing = fs::read_dir(cache_dir.clone()) - .map_err(|err| format!("failed reading dir '{:?}': {:?}", cache_dir, err))?; + .map_err(|err| format!("failed reading dir '{:?}': {:?}", cache_dir, err))? + .flatten(); for file in dir_listing { - if let Ok(file) = file { - let key = file - .file_name() - .to_str() - .ok_or_else(|| String::from("failed converting filename to string"))? - .to_string(); - - let cache_file = File::open(file.path()).map_err(|err| { - format!("failed reading file: '{:?}': {:?}", file.path(), err) - })?; - let reader = BufReader::new(cache_file); - match cache_type { - CACHE_TYPE_BLOCKS_FULL => { - let key = H256::from_str(&key).map_err(|err| { - format!("invalid key for cache file '{:?}': {:?}", key, err) + let key = file + .file_name() + .to_str() + .ok_or_else(|| String::from("failed converting filename to string"))? + .to_string(); + + let cache_file = File::open(file.path()).map_err(|err| { + format!("failed reading file: '{:?}': {:?}", file.path(), err) + })?; + let reader = BufReader::new(cache_file); + match cache_type { + CACHE_TYPE_BLOCKS_FULL => { + let key = H256::from_str(&key).map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let block: Block = serde_json::from_reader(reader) + .map_err(|err| { + format!("failed parsing json for cache file '{:?}': {:?}", key, err) })?; - let block: Block = serde_json::from_reader(reader) - .map_err(|err| { - format!( - "failed parsing json for cache file '{:?}': {:?}", - key, err - ) - })?; - self.block_hashes.insert(block.number.as_u64(), block.hash); - self.blocks_full.insert(key, block); - } - CACHE_TYPE_BLOCKS_MIN => { - let key = H256::from_str(&key).map_err(|err| { - format!("invalid key for cache file '{:?}': {:?}", key, err) + self.block_hashes.insert(block.number.as_u64(), block.hash); + self.blocks_full.insert(key, block); + } + CACHE_TYPE_BLOCKS_MIN => { + let key = H256::from_str(&key).map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let block: Block = serde_json::from_reader(reader) + .map_err(|err| { + format!("failed parsing json for cache file '{:?}': {:?}", key, err) })?; - let block: Block = serde_json::from_reader(reader) - .map_err(|err| { - format!( - "failed parsing json for cache file '{:?}': {:?}", - key, err - ) - })?; - self.block_hashes.insert(block.number.as_u64(), block.hash); - self.blocks_min.insert(key, block); - } - CACHE_TYPE_BLOCK_RAW_TRANSACTIONS => { - let key = key.parse::().map_err(|err| { - format!("invalid key for cache file '{:?}': {:?}", key, err) + self.block_hashes.insert(block.number.as_u64(), block.hash); + self.blocks_min.insert(key, block); + } + CACHE_TYPE_BLOCK_RAW_TRANSACTIONS => { + let key = key.parse::().map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let transactions: Vec = serde_json::from_reader(reader) + .map_err(|err| { + format!("failed parsing json for cache file '{:?}': {:?}", key, err) })?; - let transactions: Vec = serde_json::from_reader(reader) - .map_err(|err| { - format!( - "failed parsing json for cache file '{:?}': {:?}", - key, err - ) - })?; - self.block_raw_transactions.insert(key, transactions); - } - CACHE_TYPE_TRANSACTIONS => { - let key = H256::from_str(&key).map_err(|err| { - format!("invalid key for cache file '{:?}': {:?}", key, err) + self.block_raw_transactions.insert(key, transactions); + } + CACHE_TYPE_TRANSACTIONS => { + let key = H256::from_str(&key).map_err(|err| { + format!("invalid key for cache file '{:?}': {:?}", key, err) + })?; + let transaction: Transaction = + serde_json::from_reader(reader).map_err(|err| { + format!("failed parsing json for cache file '{:?}': {:?}", key, err) })?; - let transaction: Transaction = serde_json::from_reader(reader) - .map_err(|err| { - format!( - "failed parsing json for cache file '{:?}': {:?}", - key, err - ) - })?; - self.transactions.insert(key, transaction); - } - _ => return Err(format!("invalid cache_type {}", cache_type)), + self.transactions.insert(key, transaction); } + _ => return Err(format!("invalid cache_type {}", cache_type)), } } } diff --git a/src/formatter.rs b/src/formatter.rs index 7060ab6e..6df67792 100644 --- a/src/formatter.rs +++ b/src/formatter.rs @@ -141,11 +141,11 @@ pub fn print_call(call: &Call, padding: usize, show_calls: &ShowCalls, resolve_h call.revert_reason .as_ref() .map(|s| format!("Revert: {}", s)) - .unwrap_or("".to_string()), + .unwrap_or_else(|| "".to_string()), call.error .as_ref() .map(|s| format!("Error: {}", s)) - .unwrap_or("".to_string()), + .unwrap_or_else(|| "".to_string()), call.gas ); diff --git a/src/http_fork_source.rs b/src/http_fork_source.rs index f68ebf09..70c703af 100644 --- a/src/http_fork_source.rs +++ b/src/http_fork_source.rs @@ -63,20 +63,18 @@ impl ForkSource for HttpForkSource { if let Ok(Some(transaction)) = self .cache .read() - .and_then(|guard| Ok(guard.get_transaction(&hash).cloned())) + .map(|guard| guard.get_transaction(&hash).cloned()) { return Ok(Some(transaction)); } let client = self.create_client(); block_on(async move { client.get_transaction_by_hash(hash).await }) - .and_then(|maybe_transaction| { + .map(|maybe_transaction| { if let Some(transaction) = &maybe_transaction { self.cache .write() - .and_then(|mut guard| { - Ok(guard.insert_transaction(hash, transaction.clone())) - }) + .map(|mut guard| guard.insert_transaction(hash, transaction.clone())) .unwrap_or_else(|err| { log::warn!( "failed writing to cache for 'get_transaction_by_hash': {:?}", @@ -84,7 +82,7 @@ impl ForkSource for HttpForkSource { ) }); } - Ok(maybe_transaction) + maybe_transaction }) .wrap_err("fork http client failed") } @@ -97,7 +95,7 @@ impl ForkSource for HttpForkSource { if let Ok(Some(transaction)) = self .cache .read() - .and_then(|guard| Ok(guard.get_block_raw_transactions(&number).cloned())) + .map(|guard| guard.get_block_raw_transactions(&number).cloned()) { return Ok(transaction); } @@ -105,12 +103,12 @@ impl ForkSource for HttpForkSource { let client = self.create_client(); block_on(async move { client.get_raw_block_transactions(block_number).await }) .wrap_err("fork http client failed") - .and_then(|transactions| { + .map(|transactions| { if !transactions.is_empty() { self.cache .write() - .and_then(|mut guard| { - Ok(guard.insert_block_raw_transactions(number, transactions.clone())) + .map(|mut guard| { + guard.insert_block_raw_transactions(number, transactions.clone()) }) .unwrap_or_else(|err| { log::warn!( @@ -119,7 +117,7 @@ impl ForkSource for HttpForkSource { ) }); } - Ok(transactions) + transactions }) } @@ -131,25 +129,23 @@ impl ForkSource for HttpForkSource { if let Ok(Some(block)) = self .cache .read() - .and_then(|guard| Ok(guard.get_block(&hash, full_transactions).cloned())) + .map(|guard| guard.get_block(&hash, full_transactions).cloned()) { return Ok(Some(block)); } let client = self.create_client(); block_on(async move { client.get_block_by_hash(hash, full_transactions).await }) - .and_then(|block| { + .map(|block| { if let Some(block) = &block { self.cache .write() - .and_then(|mut guard| { - Ok(guard.insert_block(hash, full_transactions, block.clone())) - }) + .map(|mut guard| guard.insert_block(hash, full_transactions, block.clone())) .unwrap_or_else(|err| { log::warn!("failed writing to cache for 'get_block_by_hash': {:?}", err) }); } - Ok(block) + block }) .wrap_err("fork http client failed") } @@ -180,12 +176,12 @@ impl ForkSource for HttpForkSource { .get_block_by_number(block_number, full_transactions) .await }) - .and_then(|block| { + .map(|block| { if let Some(block) = &block { self.cache .write() - .and_then(|mut guard| { - Ok(guard.insert_block(block.hash, full_transactions, block.clone())) + .map(|mut guard| { + guard.insert_block(block.hash, full_transactions, block.clone()) }) .unwrap_or_else(|err| { log::warn!( @@ -194,7 +190,7 @@ impl ForkSource for HttpForkSource { ) }); } - Ok(block) + block }) .wrap_err("fork http client failed") } diff --git a/src/node.rs b/src/node.rs index 2b622301..824a7bad 100644 --- a/src/node.rs +++ b/src/node.rs @@ -1078,7 +1078,7 @@ impl InMemoryNode { timestamp: U256::from(block_context.block_timestamp), l1_batch_number: Some(U64::from(block_context.block_number)), transactions: vec![TransactionVariant::Full( - zksync_types::api::Transaction::from(l2_tx.clone()), + zksync_types::api::Transaction::from(l2_tx), )], gas_used, gas_limit, From 3419520c4e832bc9b2a3c069269a0becaa736d02 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Thu, 14 Sep 2023 19:00:30 +0200 Subject: [PATCH 09/12] fmt --- src/formatter.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/formatter.rs b/src/formatter.rs index 6df67792..5e86effe 100644 --- a/src/formatter.rs +++ b/src/formatter.rs @@ -141,11 +141,11 @@ pub fn print_call(call: &Call, padding: usize, show_calls: &ShowCalls, resolve_h call.revert_reason .as_ref() .map(|s| format!("Revert: {}", s)) - .unwrap_or_else(|| "".to_string()), + .unwrap_or_default(), call.error .as_ref() .map(|s| format!("Error: {}", s)) - .unwrap_or_else(|| "".to_string()), + .unwrap_or_default(), call.gas ); From 2afe4eb9933c4a45d907bcc9ecd81efb5b01adc7 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Fri, 15 Sep 2023 17:38:16 +0200 Subject: [PATCH 10/12] apply code suggestions --- .gitignore | 3 ++- README.md | 2 +- src/cache.rs | 54 ++++++++++++++++++++++++++++++++++++++--- src/hardhat.rs | 4 +-- src/http_fork_source.rs | 4 +++ 5 files changed, 60 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index d11348c1..106a7b11 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ etc/**/*.zbin !.vscode/launch.json !.vscode/tasks.json -*.log \ No newline at end of file +*.log +.cache \ No newline at end of file diff --git a/README.md b/README.md index 6d79e853..d39de19c 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ era_test_node --log=error --log-file-path=run.log run ## 📃 Caching -The node will cache certain network request by default to disk in the `.cache` directory. Alternatively the caching can be disabled tor set to in-memory only +The node will cache certain network request by default to disk in the `.cache` directory. Alternatively the caching can be disabled or set to in-memory only via the `--cache=none|memory|disk` parameter. ```bash diff --git a/src/cache.rs b/src/cache.rs index 389e9496..b5c98c31 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -54,9 +54,19 @@ impl Cache { if let CacheConfig::Disk { dir, reset } = &config { if *reset { - fs::remove_dir_all(Path::new(dir)).unwrap_or_else(|err| { - log::error!("failed removing cache from disk: {:?}", err) - }); + for cache_type in [ + CACHE_TYPE_BLOCKS_FULL, + CACHE_TYPE_BLOCKS_MIN, + CACHE_TYPE_BLOCK_RAW_TRANSACTIONS, + CACHE_TYPE_TRANSACTIONS, + ] { + fs::remove_dir_all(Path::new(dir).join(cache_type)).unwrap_or_else(|err| { + log::warn!("failed removing directory {:?}: {:?}", Path::new(dir).join(cache_type), err) + }); + } + + fs::remove_dir(Path::new(dir)) + .unwrap_or_else(|err| log::warn!("failed removing cache directory: {:?}", err)); } for cache_type in [ @@ -492,4 +502,42 @@ mod tests { assert_eq!(None, new_cache.get_block_raw_transactions(&0)); assert_eq!(None, new_cache.get_transaction(&H256::zero())); } + + #[test] + fn test_cache_config_disk_only_resets_created_data_on_disk() { + let cache_dir = TempDir::new("cache-test").expect("failed creating temporary dir"); + let cache_dir_path = cache_dir + .path() + .to_str() + .expect("invalid dir name") + .to_string(); + let mut cache = Cache::new(CacheConfig::Disk { + dir: cache_dir_path.clone(), + reset: true, + }); + + cache.insert_transaction(H256::zero(), Default::default()); + let cached_tx_file = cache_dir + .path() + .join(CACHE_TYPE_TRANSACTIONS) + .join(format!("{:#x}", H256::zero())); + assert!( + cached_tx_file.exists(), + "cached transaction did not exist on disk" + ); + + let random_file_path = cache_dir.path().join("foobar.txt"); + _ = File::create(&random_file_path).expect("failed creating random file"); + + Cache::new(CacheConfig::Disk { + dir: cache_dir_path, + reset: true, + }); + + assert!( + !cached_tx_file.exists(), + "cached transaction was not reset on disk" + ); + assert!(random_file_path.exists(), "random file was reset from disk"); + } } diff --git a/src/hardhat.rs b/src/hardhat.rs index 7982f3a3..06859da1 100644 --- a/src/hardhat.rs +++ b/src/hardhat.rs @@ -71,7 +71,7 @@ impl HardhatNamespaceT inner_guard .fork_storage .set_value(balance_key, u256_to_h256(balance)); - log::debug!( + log::info!( "👷 Balance for address {:?} has been manually set to {} Wei", address, balance @@ -114,7 +114,7 @@ impl HardhatNamespaceT } deployment_nonce = nonce; let enforced_full_nonce = nonces_to_full_nonce(account_nonce, deployment_nonce); - log::debug!( + log::info!( "👷 Nonces for address {:?} have been set to {}", address, nonce diff --git a/src/http_fork_source.rs b/src/http_fork_source.rs index 70c703af..cc38c9e6 100644 --- a/src/http_fork_source.rs +++ b/src/http_fork_source.rs @@ -65,6 +65,7 @@ impl ForkSource for HttpForkSource { .read() .map(|guard| guard.get_transaction(&hash).cloned()) { + log::debug!("using cached transaction for {hash}"); return Ok(Some(transaction)); } @@ -97,6 +98,7 @@ impl ForkSource for HttpForkSource { .read() .map(|guard| guard.get_block_raw_transactions(&number).cloned()) { + log::debug!("using cached raw transactions for block {block_number}"); return Ok(transaction); } @@ -131,6 +133,7 @@ impl ForkSource for HttpForkSource { .read() .map(|guard| guard.get_block(&hash, full_transactions).cloned()) { + log::debug!("using cached block for {hash}"); return Ok(Some(block)); } @@ -167,6 +170,7 @@ impl ForkSource for HttpForkSource { .and_then(|hash| guard.get_block(hash, full_transactions).cloned()) }) }) { + log::debug!("using cached block for {block_number}"); return Ok(Some(block)); } From 93d1b86c58f88af8488d0986b20a4b8aa6e7bf3f Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Fri, 15 Sep 2023 18:08:05 +0200 Subject: [PATCH 11/12] fmt --- src/cache.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/cache.rs b/src/cache.rs index b5c98c31..b4081f90 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -61,7 +61,11 @@ impl Cache { CACHE_TYPE_TRANSACTIONS, ] { fs::remove_dir_all(Path::new(dir).join(cache_type)).unwrap_or_else(|err| { - log::warn!("failed removing directory {:?}: {:?}", Path::new(dir).join(cache_type), err) + log::warn!( + "failed removing directory {:?}: {:?}", + Path::new(dir).join(cache_type), + err + ) }); } From 3ece8e019931f5446473145351c78b2d43028922 Mon Sep 17 00:00:00 2001 From: Nisheeth Barthwal Date: Sat, 16 Sep 2023 14:13:12 +0200 Subject: [PATCH 12/12] use derived default --- src/cache.rs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index b4081f90..3cf0e91d 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -20,17 +20,15 @@ const CACHE_TYPE_TRANSACTIONS: &str = "transactions"; /// None : Caching is disabled /// Memory : Caching is provided in-memory and not persisted across runs /// Disk : Caching is persisted on disk in the provided directory and can be reset -#[derive(Debug, Clone)] +#[derive(Default, Debug, Clone)] pub enum CacheConfig { + #[default] None, Memory, - Disk { dir: String, reset: bool }, -} - -impl std::default::Default for CacheConfig { - fn default() -> Self { - CacheConfig::None - } + Disk { + dir: String, + reset: bool, + }, } /// A general purpose cache.