diff --git a/Cargo.lock b/Cargo.lock index b71f607243..90eb307374 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7288,6 +7288,7 @@ dependencies = [ "starknet-gateway-types", "starknet_api", "tempfile", + "test-log", "thiserror", "time", "tokio", diff --git a/crates/common/src/state_update.rs b/crates/common/src/state_update.rs index 78cc5f681b..e44b64adf7 100644 --- a/crates/common/src/state_update.rs +++ b/crates/common/src/state_update.rs @@ -398,6 +398,12 @@ impl<'a> From<&'a StateUpdate> for StateUpdateRef<'a> { } } +impl<'a> From<&'a mut StateUpdate> for StateUpdateRef<'a> { + fn from(state_update: &'a mut StateUpdate) -> Self { + Self::from(state_update as &'a StateUpdate) + } +} + impl<'a> From<&'a StateUpdateData> for StateUpdateRef<'a> { fn from(state_update: &'a StateUpdateData) -> Self { Self { @@ -432,6 +438,12 @@ impl<'a> From<&'a StateUpdateData> for StateUpdateRef<'a> { } } +impl<'a> From<&'a mut StateUpdateData> for StateUpdateRef<'a> { + fn from(state_update: &'a mut StateUpdateData) -> Self { + Self::from(state_update as &'a StateUpdateData) + } +} + impl StorageRef<'_> { pub fn iter(&self) -> StorageRefIter<'_> { match self { diff --git a/crates/p2p/src/client/peer_agnostic/traits.rs b/crates/p2p/src/client/peer_agnostic/traits.rs index c96448e732..312b78acdd 100644 --- a/crates/p2p/src/client/peer_agnostic/traits.rs +++ b/crates/p2p/src/client/peer_agnostic/traits.rs @@ -33,7 +33,7 @@ pub trait TransactionStream { start: BlockNumber, stop: BlockNumber, transaction_count_stream: impl Stream> + Send + 'static, - ) -> impl Stream>; + ) -> impl Stream> + Send; } pub trait StateDiffStream { @@ -47,7 +47,7 @@ pub trait StateDiffStream { start: BlockNumber, stop: BlockNumber, state_diff_length_stream: impl Stream> + Send + 'static, - ) -> impl Stream>; + ) -> impl Stream> + Send; } pub trait ClassStream { @@ -56,7 +56,7 @@ pub trait ClassStream { start: BlockNumber, stop: BlockNumber, declared_class_count_stream: impl Stream> + Send + 'static, - ) -> impl Stream>; + ) -> impl Stream> + Send; } pub trait EventStream { diff --git a/crates/pathfinder/Cargo.toml b/crates/pathfinder/Cargo.toml index 86ee8836a8..b8978205a6 100644 --- a/crates/pathfinder/Cargo.toml +++ b/crates/pathfinder/Cargo.toml @@ -92,5 +92,6 @@ rstest = { workspace = true } serde_with = { workspace = true } starknet-gateway-test-fixtures = { path = "../gateway-test-fixtures" } starknet_api = { workspace = true } +test-log = { workspace = true, features = ["trace"] } tokio = { workspace = true, features = ["test-util"] } warp = { workspace = true } diff --git a/crates/pathfinder/proptest-regressions/p2p_network/sync_handlers/tests.txt b/crates/pathfinder/proptest-regressions/p2p_network/sync_handlers/tests.txt index 826e2389a2..31daaf08ff 100644 --- a/crates/pathfinder/proptest-regressions/p2p_network/sync_handlers/tests.txt +++ b/crates/pathfinder/proptest-regressions/p2p_network/sync_handlers/tests.txt @@ -11,3 +11,6 @@ cc 362172e92f8c3bb8b57add0452a53575bef5640a22e0d9cfcabe821c5150086f # shrinks to cc 3c0631f4271587b05d7638c8f95a767a85062d1ffb771167a3b24028376315df # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (7, 9090751796217969733, 1, 4, Step(1), Backward) cc e61a757eb84e98a3e8429942c16b6937603d36bd6272a92db52a392df2370a84 # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (9, 12221019298661150784, 5, 3, Step(1), Backward) cc 86c701dc281422d164cfcdd813470d0908f8da74089472c547085c89fd4fc74b # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (11, 16005500644522549812, 0, 5, Step(1), Forward) +cc 88947174b63dc40a8ecadc8258db12c16449fe512c4729e350ded4c7b4a34baf # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (0, 0, 0, 1, Step(1), Forward) +cc 48a4cce9020765acde8c0046cc73e72ef238865b8712045d0a95c23fb4062070 # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (0, 0, 0, 1, Step(1), Forward) +cc bb0bb73a6e6719184832c149727d3e166cda4c891355f25ba8f8b4ed839ea3c2 # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (0, 0, 0, 1, Step(1), Forward) diff --git a/crates/pathfinder/src/bin/pathfinder/main.rs b/crates/pathfinder/src/bin/pathfinder/main.rs index 8e69014761..641b71a07c 100644 --- a/crates/pathfinder/src/bin/pathfinder/main.rs +++ b/crates/pathfinder/src/bin/pathfinder/main.rs @@ -630,6 +630,8 @@ fn start_p2p_sync( l1_checkpoint_override: Option, verify_tree_hashes: bool, ) -> tokio::task::JoinHandle> { + use pathfinder_block_hashes::BlockHashDb; + let sync = pathfinder_lib::sync::Sync { storage, p2p: p2p_client, @@ -637,10 +639,10 @@ fn start_p2p_sync( eth_address: pathfinder_context.l1_core_address, fgw_client: pathfinder_context.gateway, chain_id: pathfinder_context.network_id, - chain: pathfinder_context.network, public_key: gateway_public_key, l1_checkpoint_override, verify_tree_hashes, + block_hash_db: Some(BlockHashDb::new(pathfinder_context.network)), }; tokio::spawn(sync.run()) } diff --git a/crates/pathfinder/src/p2p_network/sync_handlers/tests.rs b/crates/pathfinder/src/p2p_network/sync_handlers/tests.rs index 6f3e6375d7..ee8a12260e 100644 --- a/crates/pathfinder/src/p2p_network/sync_handlers/tests.rs +++ b/crates/pathfinder/src/p2p_network/sync_handlers/tests.rs @@ -247,14 +247,15 @@ mod prop { // These are the items that we expect to be read from the db // Grouped by block number let expected = overlapping::get(in_db, start_block, limit, step, num_blocks, direction).into_iter() - .map(|Block { header, state_update, .. }| + .map(|Block { header, state_update, .. }| { + let state_update = state_update.unwrap(); ( header.header.number, // Block number state_update.contract_updates.into_iter().map(|(k, v)| (k, v.into())).collect::>(), state_update.system_contract_updates, state_update.declared_sierra_classes, state_update.declared_cairo_classes, - ) + )} ).collect::>(); // Run the handler let request = StateDiffsRequest { iteration: Iteration { start: BlockNumberOrHash::Number(start_block), limit, step, direction, } }; @@ -330,6 +331,7 @@ mod prop { fn get_classes((num_blocks, seed, start_block, limit, step, direction) in strategy::composite()) { // Fake storage with a given number of blocks let (storage, in_db) = fixtures::storage_with_seed(seed, num_blocks); + // Compute the overlapping set between the db and the request // These are the items that we expect to be read from the db // Grouped by block number @@ -344,6 +346,7 @@ mod prop { sierra_defs.into_iter().map(|(_, sierra_def, _)| sierra_def).collect::>() ) ).collect::>(); + // Run the handler let request = ClassesRequest { iteration: Iteration { start: BlockNumberOrHash::Number(start_block), limit, step, direction, } }; let mut responses = Runtime::new().unwrap().block_on(async { @@ -372,11 +375,14 @@ mod prop { }); for expected_for_block in expected { - let actual_cairo_for_block = actual_cairo.drain(..expected_for_block.1.len()).collect::>(); - let actual_sierra_for_block = actual_sierra.drain(..expected_for_block.2.len()).collect::>(); + let actual_cairo_for_block = actual_cairo.drain(..expected_for_block.1.len()).collect::>(); + let actual_sierra_for_block = actual_sierra.drain(..expected_for_block.2.len()).collect::>(); - prop_assert_eq!(expected_for_block.1, actual_cairo_for_block, "block number: {}", expected_for_block.0); - prop_assert_eq!(expected_for_block.2, actual_sierra_for_block, "block number: {}", expected_for_block.0); + let expected_cairo_for_block = expected_for_block.1.into_iter().collect::>(); + let expected_sierra_for_block = expected_for_block.2.into_iter().collect::>(); + + prop_assert_eq!(expected_cairo_for_block, actual_cairo_for_block, "block number: {}", expected_for_block.0); + prop_assert_eq!(expected_sierra_for_block, actual_sierra_for_block, "block number: {}", expected_for_block.0); } } } @@ -507,8 +513,7 @@ mod prop { /// Fixtures for prop tests mod fixtures { - use pathfinder_storage::fake::init::Config; - use pathfinder_storage::fake::{with_n_blocks_rng_and_config, Block}; + use pathfinder_storage::fake::{fill, generate, Block, Config}; use pathfinder_storage::{Storage, StorageBuilder}; use crate::p2p_network::sync_handlers::MAX_COUNT_IN_TESTS; @@ -521,8 +526,7 @@ mod prop { let storage = StorageBuilder::in_memory().unwrap(); // Explicitly choose RNG to make sure seeded storage is always reproducible let mut rng = rand_chacha::ChaCha12Rng::seed_from_u64(seed); - let initializer = with_n_blocks_rng_and_config( - &storage, + let blocks = generate::with_rng_and_config( num_blocks.try_into().unwrap(), &mut rng, Config { @@ -530,7 +534,9 @@ mod prop { ..Default::default() }, ); - (storage, initializer) + fill(&storage, &blocks, None); + + (storage, blocks) } } diff --git a/crates/pathfinder/src/sync.rs b/crates/pathfinder/src/sync.rs index ca554cbfd2..156937d91a 100644 --- a/crates/pathfinder/src/sync.rs +++ b/crates/pathfinder/src/sync.rs @@ -1,14 +1,21 @@ #![allow(dead_code, unused)] -use core::panic; -use std::sync::Arc; use std::time::Duration; use anyhow::Context; use error::SyncError; use futures::{pin_mut, Stream, StreamExt}; -use p2p::client::peer_agnostic::Client as P2PClient; +use p2p::client::peer_agnostic::traits::{ + BlockClient, + ClassStream, + EventStream, + HeaderStream, + StateDiffStream, + StreamItem, + TransactionStream, +}; use p2p::PeerData; +use pathfinder_block_hashes::BlockHashDb; use pathfinder_common::error::AnyhowExt; use pathfinder_common::{ block_hash, @@ -42,24 +49,35 @@ mod transactions; const CHECKPOINT_MARGIN: u64 = 10; -pub struct Sync { +pub struct Sync { pub storage: pathfinder_storage::Storage, - pub p2p: P2PClient, + pub p2p: P, pub eth_client: pathfinder_ethereum::EthereumClient, pub eth_address: H160, - pub fgw_client: GatewayClient, - pub chain: Chain, + pub fgw_client: G, pub chain_id: ChainId, pub public_key: PublicKey, pub l1_checkpoint_override: Option, pub verify_tree_hashes: bool, + pub block_hash_db: Option, } -impl Sync { +impl Sync +where + P: BlockClient + + ClassStream + + EventStream + + HeaderStream + + StateDiffStream + + TransactionStream + + Clone + + Send + + 'static, + G: GatewayApi + Clone + Send + 'static, +{ pub async fn run(self) -> anyhow::Result<()> { let (next, parent_hash) = self.checkpoint_sync().await?; - // TODO: depending on how this is implemented, we might want to loop around it. self.track_sync(next, parent_hash).await } @@ -114,11 +132,10 @@ impl Sync { eth_client: self.eth_client.clone(), eth_address: self.eth_address, fgw_client: self.fgw_client.clone(), - chain: self.chain, chain_id: self.chain_id, public_key: self.public_key, verify_tree_hashes: self.verify_tree_hashes, - block_hash_db: Some(pathfinder_block_hashes::BlockHashDb::new(self.chain)), + block_hash_db: self.block_hash_db.clone(), } .run(checkpoint) .await; @@ -173,16 +190,15 @@ impl Sync { loop { let mut result = track::Sync { - latest: LatestStream::spawn(self.fgw_client.clone(), Duration::from_secs(2)), + latest: LatestStream::spawn(self.fgw_client.clone(), Duration::from_secs(2), false), p2p: self.p2p.clone(), storage: self.storage.clone(), - chain: self.chain, chain_id: self.chain_id, public_key: self.public_key, - block_hash_db: Some(pathfinder_block_hashes::BlockHashDb::new(self.chain)), verify_tree_hashes: self.verify_tree_hashes, + block_hash_db: self.block_hash_db.clone(), } - .run(next, parent_hash, self.fgw_client.clone()) + .run(&mut next, &mut parent_hash, self.fgw_client.clone()) .await; match result { @@ -231,7 +247,10 @@ impl Stream for LatestStream { } impl LatestStream { - fn spawn(fgw: GatewayClient, head_poll_interval: Duration) -> Self { + fn spawn(fgw: G, head_poll_interval: Duration, single_shot: bool) -> Self + where + G: GatewayApi + Clone + Send + 'static, + { // No buffer, for backpressure let (tx, rx) = watch::channel((BlockNumber::GENESIS, BlockHash::ZERO)); @@ -258,7 +277,6 @@ impl LatestStream { } tx.send_if_modified(|current| { - // TODO: handle reorgs correctly if *current != latest { tracing::info!(?latest, "LatestStream"); *current = latest; @@ -267,6 +285,10 @@ impl LatestStream { false } }); + + if single_shot { + return; + } } }); @@ -276,3 +298,725 @@ impl LatestStream { } } } + +#[cfg(test)] +mod tests { + use std::collections::{HashMap, VecDeque}; + use std::ops::{Range, RangeInclusive}; + use std::sync::atomic::{AtomicU64, Ordering}; + use std::sync::{Arc, Mutex}; + + use fake::{Fake, Faker}; + use futures::stream; + use http::header; + use p2p::client::types::{ + ClassDefinition, + ClassDefinitionsError, + EventsForBlockByTransaction, + EventsResponseStreamFailure, + Receipt as P2PReceipt, + StateDiffsError, + }; + use p2p::libp2p::PeerId; + use pathfinder_common::event::Event; + use pathfinder_common::receipt::Receipt; + use pathfinder_common::state_update::StateUpdateData; + use pathfinder_common::transaction::Transaction; + use pathfinder_common::{ + BlockHeader, + BlockId, + ClassHash, + SierraHash, + SignedBlockHeader, + TransactionHash, + }; + use pathfinder_crypto::signature::ecdsa_sign; + use pathfinder_crypto::Felt; + use pathfinder_ethereum::EthereumClient; + use pathfinder_storage::fake::{generate, Block, Config}; + use pathfinder_storage::{Storage, StorageBuilder}; + use rand::Rng; + use rayon::iter::Rev; + use rstest::rstest; + use sha3::digest::consts::U6; + use starknet_gateway_types::error::SequencerError; + + use super::*; + use crate::state::block_hash::{ + calculate_event_commitment, + calculate_receipt_commitment, + calculate_transaction_commitment, + compute_final_hash, + BlockHeaderData, + }; + use crate::state::update_starknet_state; + + /// Generate a fake chain of blocks as in + /// [`pathfinder_storage::fake::generate`] but with additional + /// guarantees: + /// - all commitments computed correctly + /// - all block hashes computed correctly + /// - all blocks signed with the same private key + /// + /// Returns: public key, generated blocks. + pub fn generate_fake_blocks(num_blocks: usize) -> (PublicKey, Vec) { + let private_key = Faker.fake(); + let public_key = PublicKey(pathfinder_crypto::signature::get_pk(private_key).unwrap()); + let blocks = generate::with_config( + num_blocks, + Config { + calculate_block_hash: Box::new(|header: &BlockHeader| { + compute_final_hash(&BlockHeaderData::from_header(header)) + }), + sign_block_hash: Box::new(move |block_hash| ecdsa_sign(private_key, block_hash.0)), + calculate_transaction_commitment: Box::new(calculate_transaction_commitment), + calculate_receipt_commitment: Box::new(calculate_receipt_commitment), + calculate_event_commitment: Box::new(calculate_event_commitment), + update_tries: Box::new(update_starknet_state), + }, + ); + (public_key, blocks) + } + + async fn sync_done_watch(storage: Storage, expected_last: BlockNumber) { + let mut start = std::time::Instant::now(); + tokio::task::spawn_blocking(move || loop { + std::thread::sleep(Duration::from_millis(200)); + let mut db = storage.connection().unwrap(); + let db = db.transaction().unwrap(); + let header = db.block_header(expected_last.into()).unwrap(); + if let Some(header) = header { + if header.number == expected_last { + let after = start.elapsed(); + tracing::info!(?after, "Sync done"); + break; + } + } + }) + .await + .unwrap(); + } + + #[derive(Copy, Clone, Debug)] + struct ErrorSetup { + fatal_at: Option, + expected_last_synced: LastSynced, + } + + #[derive(Copy, Clone, Debug)] + enum LastSynced { + Full(BlockNumber), + HeadersOnly(BlockNumber), + } + + impl LastSynced { + fn block_number(&self) -> BlockNumber { + match self { + LastSynced::Full(b) | LastSynced::HeadersOnly(b) => *b, + } + } + + fn is_full(&self) -> bool { + matches!(self, LastSynced::Full(_)) + } + } + + const ERROR_CONSUMED: u64 = u64::MAX; + const CHECKPOINT_BLOCKS: u64 = 10; + const TRACK_BLOCKS: u64 = CHECKPOINT_MARGIN - 1; + const ALL_BLOCKS: u64 = CHECKPOINT_BLOCKS + TRACK_BLOCKS; + const LAST_IN_CHECKPOINT: BlockNumber = BlockNumber::new_or_panic(CHECKPOINT_BLOCKS - 1); + const LAST_IN_TRACK: BlockNumber = BlockNumber::new_or_panic(ALL_BLOCKS - 1); + + #[rstest] + #[case::sync_restarts_after_recoverable_errors_and_succeeds(ErrorSetup { + // Each sync stage will experience a recoverable error at random blocks + fatal_at: None, + // All blocks will be stored successfully + expected_last_synced: LastSynced::Full(LAST_IN_TRACK), + })] + #[case::checkpoint_bails_after_fatal_error(ErrorSetup { + fatal_at: Some(LAST_IN_CHECKPOINT), + // All headers are stored but transactions fail + expected_last_synced: LastSynced::HeadersOnly(LAST_IN_CHECKPOINT), + })] + #[case::track_bails_after_fatal_error(ErrorSetup { + fatal_at: Some(LAST_IN_TRACK), + // The last block is not stored + expected_last_synced: LastSynced::Full(LAST_IN_TRACK - 1), + })] + #[test_log::test(tokio::test)] + async fn sync(#[case] error_setup: ErrorSetup) { + let (public_key, blocks) = generate_fake_blocks(ALL_BLOCKS as usize); + let last_header = &blocks.last().unwrap().header.header; + let last_checkpoint_header = &blocks[LAST_IN_CHECKPOINT.get() as usize].header.header; + let storage = StorageBuilder::in_tempdir().unwrap(); + + let expected_last_synced_block = error_setup.expected_last_synced.block_number(); + let expect_fully_synced_blocks = error_setup.expected_last_synced.is_full(); + + let error_trigger = ErrorTrigger::new(error_setup.fatal_at); + + let sync = Sync { + storage: storage.clone(), + p2p: FakeP2PClient { + blocks: blocks.clone(), + error_trigger: error_trigger.clone(), + storage: storage.clone(), + }, + // We use `l1_checkpoint_override` instead + eth_client: EthereumClient::new("https://unused.com").unwrap(), + eth_address: H160::zero(), // Unused + fgw_client: FakeFgw { + head: (last_header.number, last_header.hash), + }, + chain_id: ChainId::SEPOLIA_TESTNET, + public_key, + l1_checkpoint_override: Some(EthereumStateUpdate { + state_root: last_checkpoint_header.state_commitment, + block_number: last_checkpoint_header.number, + block_hash: last_checkpoint_header.hash, + }), + verify_tree_hashes: true, + block_hash_db: None, + }; + + tokio::select! { + _ = tokio::time::timeout(Duration::from_secs(10), sync.run()) => (), + _ = sync_done_watch(storage.clone(), expected_last_synced_block) => (), + } + + assert!(error_trigger.all_errors_triggered()); + + let mut db = storage.connection().unwrap(); + let db = db.transaction().unwrap(); + for mut expected in blocks + .into_iter() + .take_while(|block| block.header.header.number <= expected_last_synced_block) + { + let block_number = expected.header.header.number; + let block_id = block_number.into(); + let header = db.block_header(block_id).unwrap().unwrap(); + let signature = db.signature(block_id).unwrap().unwrap(); + + pretty_assertions_sorted::assert_eq!( + header, + expected.header.header, + "block {}", + block_number + ); + pretty_assertions_sorted::assert_eq!( + signature, + expected.header.signature, + "block {}", + block_number + ); + + if expect_fully_synced_blocks { + let transaction_data = db.transaction_data_for_block(block_id).unwrap().unwrap(); + let state_update_data: StateUpdateData = + db.state_update(block_id).unwrap().unwrap().into(); + let declared = db.declared_classes_at(block_id).unwrap().unwrap(); + + let mut cairo_defs = HashMap::new(); + let mut sierra_defs = HashMap::new(); + + for class_hash in declared { + let class = db.class_definition(class_hash).unwrap().unwrap(); + match db.casm_hash(class_hash).unwrap() { + Some(casm_hash) => { + let casm = db.casm_definition(class_hash).unwrap().unwrap(); + sierra_defs.insert(SierraHash(class_hash.0), (class, casm)); + } + None => { + cairo_defs.insert(class_hash, class); + } + } + } + + pretty_assertions_sorted::assert_eq!( + header.state_diff_commitment, + expected.header.header.state_diff_commitment, + "block {}", + block_number + ); + pretty_assertions_sorted::assert_eq!( + header.state_diff_length, + expected.header.header.state_diff_length, + "block {}", + block_number + ); + pretty_assertions_sorted::assert_eq!( + transaction_data, + expected.transaction_data, + "block {}", + block_number + ); + pretty_assertions_sorted::assert_eq!( + state_update_data, + expected.state_update.unwrap().into(), + "block {}", + block_number + ); + pretty_assertions_sorted::assert_eq!( + cairo_defs, + expected.cairo_defs.into_iter().collect::>(), + "block {}", + block_number + ); + pretty_assertions_sorted::assert_eq!( + sierra_defs, + expected + .sierra_defs + .into_iter() + // All sierra fixtures are not compile-able + .map(|(h, s, _)| (h, (s, b"I'm from the fgw!".to_vec()))) + .collect::>(), + "block {}", + block_number + ); + } + } + } + + #[derive(Clone)] + struct FakeP2PClient { + pub blocks: Vec, + pub error_trigger: ErrorTrigger, + pub storage: Storage, + } + + #[derive(Clone)] + enum ErrorTrigger { + Recoverable(Arc>), + Fatal(Arc), + } + + impl ErrorTrigger { + fn new(fatal_at: Option) -> Self { + match fatal_at { + Some(fatal_at) => Self::Fatal(Arc::new(AtomicU64::new(fatal_at.get()))), + None => Self::Recoverable(Arc::new( + (0..=4) + .map(|_| AtomicU64::new((0..CHECKPOINT_BLOCKS).fake())) + .chain( + (5..=9).map(|_| AtomicU64::new((CHECKPOINT_BLOCKS..ALL_BLOCKS).fake())), + ) + .collect(), + )), + } + } + + fn fatal(&self, block: BlockNumber) -> bool { + match self { + Self::Fatal(at) => at + .compare_exchange( + block.get(), + ERROR_CONSUMED, + Ordering::Relaxed, + Ordering::Relaxed, + ) + .is_ok(), + Self::Recoverable { .. } => false, + } + } + + // Sync stages: + // - 0: checkpoint, header + // ... + // - 4: checkpoint, event + // - 5: track, header + // ... + // - 9: track, event + fn recoverable(&self, block: BlockNumber, sync_stage: usize) -> bool { + match self { + Self::Fatal(_) => false, + Self::Recoverable(triggers) => { + let at = &triggers[sync_stage]; + at.compare_exchange( + block.get(), + ERROR_CONSUMED, + Ordering::Relaxed, + Ordering::Relaxed, + ) + .is_ok() + } + } + } + + fn all_errors_triggered(&self) -> bool { + match self { + Self::Fatal(at) => at.load(Ordering::Relaxed) == ERROR_CONSUMED, + Self::Recoverable(triggers) => triggers + .iter() + .all(|at| at.load(Ordering::Relaxed) == ERROR_CONSUMED), + } + } + } + + impl FakeP2PClient { + fn blocks( + mut self, + start: BlockNumber, + stop: BlockNumber, + reverse: bool, + map_fn: F, + ) -> Vec + where + F: FnMut(Block) -> T, + { + let mut blocks = self + .blocks + .into_iter() + .filter_map(move |b| { + let n = b.header.header.number; + (n >= start && n <= stop).then_some(b) + }) + .collect::>(); + + if reverse { + blocks.reverse(); + } + + blocks.into_iter().map(map_fn).collect() + } + } + + impl HeaderStream for FakeP2PClient { + fn header_stream( + self, + start: BlockNumber, + stop: BlockNumber, + reverse: bool, + ) -> impl Stream> + Send { + let error_trigger = self.error_trigger.clone(); + + stream::iter(self.blocks(start, stop, reverse, |mut b| { + let block = b.header.header.number; + + if error_trigger.recoverable(block, 0) || error_trigger.recoverable(block, 5) { + tracing::debug!(%block, + "FakeP2PClient::header_stream triggering recoverable error at", + ); + // This will cause discontinuity + b.header.header = Faker.fake(); + } + + PeerData::for_tests(b.header) + })) + } + } + + impl TransactionStream for FakeP2PClient { + fn transaction_stream( + self, + start: BlockNumber, + stop: BlockNumber, + _: impl Stream> + Send + 'static, + ) -> impl Stream> + Send + { + let error_trigger = self.error_trigger.clone(); + + stream::iter(self.blocks(start, stop, false, |mut b| { + let block = b.header.header.number; + + if error_trigger.recoverable(block, 1) { + tracing::debug!(%block, + "FakeP2PClient::transaction_stream triggering recoverable error at", + ); + // This will cause transaction commitment mismatch + b.transaction_data.pop(); + } + + if error_trigger.fatal(block) { + tracing::debug!(%block, + "FakeP2PClient::transaction_stream triggering fatal error at", + ); + return Err(anyhow::anyhow!("Fatal error at block {block}",)); + } + + Ok(PeerData::for_tests(( + b.transaction_data + .into_iter() + .map(|(t, r, _)| (t, r.into())) + .collect(), + b.header.header.number, + ))) + })) + } + } + + impl StateDiffStream for FakeP2PClient { + fn state_diff_stream( + self, + start: BlockNumber, + stop: BlockNumber, + _: impl Stream> + Send + 'static, + ) -> impl Stream> + Send { + let error_trigger = self.error_trigger.clone(); + + stream::iter(self.blocks(start, stop, false, |mut b| { + let block = b.header.header.number; + + if error_trigger.recoverable(block, 2) { + tracing::debug!(%block, + "FakeP2PClient::state_diff_stream triggering recoverable error at", + ); + // This will cause commitment mismatch + b.state_update + .as_mut() + .unwrap() + .contract_updates + .insert(Faker.fake(), Faker.fake()); + } + + Ok(PeerData::for_tests(( + b.state_update.unwrap().into(), + b.header.header.number, + ))) + })) + } + } + + impl ClassStream for FakeP2PClient { + fn class_stream( + self, + start: BlockNumber, + stop: BlockNumber, + _: impl Stream> + Send + 'static, + ) -> impl Stream> + Send { + let error_trigger = self.error_trigger.clone(); + + stream::iter( + self.blocks(start, stop, false, |mut b| { + let block = b.header.header.number; + + if error_trigger.recoverable(block, 3) { + tracing::debug!(%block, + "FakeP2PClient::class_stream triggering recoverable error at", + ); + // This will trigger unexpected class + b.cairo_defs.push((Faker.fake(), Faker.fake())); + } + + let block_number = b.header.header.number; + b.cairo_defs + .into_iter() + .map(move |(hash, definition)| { + Ok(PeerData::for_tests(ClassDefinition::Cairo { + block_number, + definition, + hash, + })) + }) + .chain(b.sierra_defs.into_iter().map( + move |(hash, sierra_definition, _)| { + Ok(PeerData::for_tests(ClassDefinition::Sierra { + block_number, + sierra_definition, + hash, + })) + }, + )) + }) + .into_iter() + .flatten(), + ) + } + } + + impl EventStream for FakeP2PClient { + fn event_stream( + self, + start: BlockNumber, + stop: BlockNumber, + _: impl Stream> + Send + 'static, + ) -> impl Stream> { + let error_trigger = self.error_trigger.clone(); + + stream::iter(self.blocks(start, stop, false, |mut b| { + let block = b.header.header.number; + + if error_trigger.recoverable(block, 4) { + tracing::debug!(%block, + "FakeP2PClient::event_stream triggering recoverable error at", + ); + // This will trigger event commitment mismatch + b.transaction_data.last_mut().unwrap().2.push(Faker.fake()); + } + + Ok(PeerData::for_tests(( + b.header.header.number, + b.transaction_data + .into_iter() + .map(|(t, _, e)| (t.hash, e)) + .collect(), + ))) + })) + } + } + + impl BlockClient for FakeP2PClient { + async fn transactions_for_block( + self, + block: BlockNumber, + ) -> Option<( + PeerId, + impl Stream> + Send, + )> { + let mut tr = self + .blocks + .iter() + .find(|b| b.header.header.number == block) + .unwrap() + .transaction_data + .iter() + .map(|(t, r, e)| Ok((t.clone(), P2PReceipt::from(r.clone())))) + .collect::>>(); + + if self.error_trigger.recoverable(block, 6) { + tracing::debug!(%block, + "FakeP2PClient::transactions_for_block triggering recoverable error at", + ); + // This will cause transaction hash mismatch + tr.last_mut().unwrap().as_mut().unwrap().0.variant = Faker.fake(); + } + + if self.error_trigger.fatal(block) { + tracing::debug!(%block, + "FakeP2PClient::transactions_for_block triggering fatal error at", + ); + // Returning an error from the "for_block" apis does not trigger a fatal error + // so instead we insert a fake header for this very block to trigger an + // insertion conflict when track is about to store the entire block + let mut db = self.storage.connection().unwrap(); + let db = db.transaction().unwrap(); + let header = BlockHeader { + number: block, + ..Default::default() + }; + db.insert_block_header(&header).unwrap(); + db.commit().unwrap(); + } + + Some((PeerId::random(), stream::iter(tr))) + } + + async fn state_diff_for_block( + self, + block: BlockNumber, + state_diff_length: u64, + ) -> Result, StateDiffsError> { + let mut sd: StateUpdateData = self + .blocks + .iter() + .find(|b| b.header.header.number == block) + .unwrap() + .state_update + .clone() + .unwrap() + .into(); + + assert_eq!(sd.state_diff_length() as u64, state_diff_length); + + if self.error_trigger.recoverable(block, 7) { + tracing::debug!(%block, + "FakeP2PClient::state_diff_for_block triggering recoverable error at", + ); + // This will cause commitment mismatch + sd.contract_updates.insert(Faker.fake(), Faker.fake()); + } + + Ok(Some((PeerId::random(), sd))) + } + + async fn class_definitions_for_block( + self, + block: BlockNumber, + declared_classes_count: u64, + ) -> Result)>, ClassDefinitionsError> { + let b = self + .blocks + .iter() + .find(|b| b.header.header.number == block) + .unwrap(); + let mut defs = b + .cairo_defs + .iter() + .map(|(h, x)| ClassDefinition::Cairo { + block_number: block, + definition: x.clone(), + hash: *h, + }) + .chain( + b.sierra_defs + .iter() + .map(|(h, x, _)| ClassDefinition::Sierra { + block_number: block, + sierra_definition: x.clone(), + hash: *h, + }), + ) + .collect::>(); + + if self.error_trigger.recoverable(block, 8) { + tracing::debug!(%block, + "FakeP2PClient::class_definitions_for_block triggering recoverable error at", + ); + // This will cause unexpected class + defs.push(Faker.fake()); + } + + Ok(Some((PeerId::random(), defs))) + } + + async fn events_for_block( + self, + block: BlockNumber, + ) -> Option<( + PeerId, + impl Stream> + Send, + )> { + let mut e = self + .blocks + .iter() + .find(|b| b.header.header.number == block) + .unwrap() + .transaction_data + .iter() + .flat_map(|(t, _, e)| e.iter().map(move |e| (t.hash, e.clone()))) + .map(Ok) + .collect::>(); + + if self.error_trigger.recoverable(block, 9) { + tracing::debug!(%block, + "FakeP2PClient::events_for_block triggering recoverable error at", + ); + // This will trigger commitment mismatch + e.push(Ok(Faker.fake())); + } + + Some((PeerId::random(), stream::iter(e))) + } + } + + #[derive(Clone)] + struct FakeFgw { + head: (BlockNumber, BlockHash), + } + + #[async_trait::async_trait] + impl GatewayApi for FakeFgw { + async fn pending_casm_by_hash(&self, _: ClassHash) -> Result { + Ok(bytes::Bytes::from_static(b"I'm from the fgw!")) + } + + async fn block_header( + &self, + block: BlockId, + ) -> Result<(BlockNumber, BlockHash), SequencerError> { + assert_eq!(block, BlockId::Latest); + Ok(self.head) + } + } +} diff --git a/crates/pathfinder/src/sync/checkpoint.rs b/crates/pathfinder/src/sync/checkpoint.rs index d95f0bd942..e42702836d 100644 --- a/crates/pathfinder/src/sync/checkpoint.rs +++ b/crates/pathfinder/src/sync/checkpoint.rs @@ -7,6 +7,7 @@ use anyhow::Context; use futures::{pin_mut, Stream, StreamExt, TryStreamExt}; use p2p::client::conv::TryFromDto; use p2p::client::peer_agnostic::traits::{ + BlockClient, ClassStream, EventStream, HeaderStream, @@ -14,7 +15,6 @@ use p2p::client::peer_agnostic::traits::{ StreamItem, TransactionStream, }; -use p2p::client::peer_agnostic::Client as P2PClient; use p2p::client::types::{ClassDefinition, EventsForBlockByTransaction, TransactionData}; use p2p::PeerData; use p2p_proto::common::{BlockNumberOrHash, Direction, Iteration}; @@ -47,37 +47,44 @@ use crate::sync::error::SyncError; use crate::sync::stream::{InfallibleSource, Source, SyncReceiver, SyncResult}; use crate::sync::{class_definitions, events, headers, state_updates, transactions}; -#[cfg(test)] -mod fixture; - /// Provides P2P sync capability for blocks secured by L1. #[derive(Clone)] -pub struct Sync { +pub struct Sync { pub storage: Storage, - pub p2p: P2PClient, + pub p2p: P, // TODO: merge these two inside the client. pub eth_client: pathfinder_ethereum::EthereumClient, pub eth_address: H160, - pub fgw_client: Client, - pub chain: Chain, + pub fgw_client: G, pub chain_id: ChainId, pub public_key: PublicKey, pub verify_tree_hashes: bool, pub block_hash_db: Option, } -impl Sync { +impl Sync +where + P: ClassStream + + EventStream + + HeaderStream + + StateDiffStream + + TransactionStream + + Clone + + Send + + 'static, + G: GatewayApi + Clone + Send + 'static, +{ #[allow(clippy::too_many_arguments)] pub fn new( storage: Storage, - p2p: P2PClient, + p2p: P, ethereum: (pathfinder_ethereum::EthereumClient, H160), - fgw_client: Client, - chain: Chain, + fgw_client: G, chain_id: ChainId, public_key: PublicKey, l1_anchor_override: Option, verify_tree_hashes: bool, + block_hash_db: Option, ) -> Self { Self { storage, @@ -85,11 +92,10 @@ impl Sync { eth_client: ethereum.0, eth_address: ethereum.1, fgw_client, - chain, chain_id, public_key, verify_tree_hashes, - block_hash_db: Some(pathfinder_block_hashes::BlockHashDb::new(chain)), + block_hash_db, } } @@ -167,7 +173,6 @@ impl Sync { handle_header_stream( self.p2p.clone().header_stream(gap.tail, gap.head, true), gap.head(), - self.chain, self.chain_id, self.public_key, self.block_hash_db.clone(), @@ -301,7 +306,6 @@ impl Sync { async fn handle_header_stream( stream: impl Stream> + Send + 'static, head: (BlockNumber, BlockHash), - chain: Chain, chain_id: ChainId, public_key: PublicKey, block_hash_db: Option, @@ -311,7 +315,7 @@ async fn handle_header_stream( .spawn() .pipe(headers::BackwardContinuity::new(head.0, head.1), 10) .pipe( - headers::VerifyHashAndSignature::new(chain, chain_id, public_key, block_hash_db), + headers::VerifyHashAndSignature::new(chain_id, public_key, block_hash_db), 10, ) .try_chunks(1000, 10) @@ -684,7 +688,7 @@ async fn persist_anchor(storage: Storage, anchor: EthereumStateUpdate) -> anyhow let db = db.transaction().context("Creating database transaction")?; db.upsert_l1_state(&anchor).context("Inserting anchor")?; // TODO: this is a bit dodgy, but is used by the sync process. However it - // destroys some RPC assumptions which we should be aware of. + // destroys some RPC assumptions which we should be aware of. db.update_l1_l2_pointer(Some(anchor.block_number)) .context("Updating L1-L2 pointer")?; db.commit().context("Committing database transaction")?; @@ -700,10 +704,7 @@ mod tests { mod handle_header_stream { use assert_matches::assert_matches; - use fake::{Dummy, Fake, Faker}; use futures::stream; - use p2p::libp2p::PeerId; - use p2p_proto::header; use pathfinder_common::{ public_key, BlockCommitmentSignature, @@ -723,13 +724,13 @@ mod tests { StorageCommitment, TransactionCommitment, }; - use pathfinder_storage::fake::{self as fake_storage, Block}; use pathfinder_storage::StorageBuilder; + use rstest::rstest; use serde::Deserialize; use serde_with::{serde_as, DisplayFromStr}; - use super::super::handle_header_stream; use super::*; + use crate::sync::tests::generate_fake_blocks; struct Setup { pub streamed_headers: Vec>, @@ -737,6 +738,7 @@ mod tests { pub storage: Storage, pub head: (BlockNumber, BlockHash), pub public_key: PublicKey, + pub block_hash_db: Option, } #[serde_as] @@ -797,7 +799,7 @@ mod tests { } } - async fn setup() -> Setup { + fn setup_from_fixture() -> Setup { let expected_headers = serde_json::from_str::>(include_str!("fixtures/sepolia_headers.json")) .unwrap() @@ -824,28 +826,54 @@ mod tests { public_key: public_key!( "0x1252b6bce1351844c677869c6327e80eae1535755b611c66b8f46e595b40eea" ), + block_hash_db: Some(pathfinder_block_hashes::BlockHashDb::new( + Chain::SepoliaTestnet, + )), } } - #[tokio::test] - async fn happy_path() { + fn setup_from_fake(num_blocks: usize) -> Setup { + let (public_key, blocks) = generate_fake_blocks(num_blocks); + let expected_headers = blocks.into_iter().map(|b| b.header).collect::>(); + let hdr = &expected_headers.last().unwrap().header; + + Setup { + head: (hdr.number, hdr.hash), + streamed_headers: expected_headers + .iter() + .rev() + .cloned() + .map(PeerData::for_tests) + .collect::>(), + expected_headers, + storage: StorageBuilder::in_tempdir().unwrap(), + public_key, + block_hash_db: None, + } + } + + // These two cases are an implicit verification that [`storage::fake::generate`] + // is just good enough for tests. + #[rstest] + #[case::from_fixture(setup_from_fixture())] + #[case::from_fake(setup_from_fake(10))] + #[test_log::test(tokio::test)] + async fn happy_path(#[case] setup: Setup) { let Setup { streamed_headers, expected_headers, storage, head, public_key, - } = setup().await; + block_hash_db, + } = setup; handle_header_stream( stream::iter(streamed_headers), head, - Chain::SepoliaTestnet, ChainId::SEPOLIA_TESTNET, public_key, - Some(pathfinder_block_hashes::BlockHashDb::new( - Chain::SepoliaTestnet, - )), + block_hash_db, storage.clone(), ) .await @@ -870,6 +898,7 @@ mod tests { pretty_assertions_sorted::assert_eq!(expected_headers, actual_headers); } + #[tokio::test] async fn discontinuity() { let Setup { @@ -878,7 +907,7 @@ mod tests { head, public_key, .. - } = setup().await; + } = setup_from_fixture(); streamed_headers.last_mut().unwrap().data.header.number = BlockNumber::new_or_panic(3); @@ -886,7 +915,6 @@ mod tests { handle_header_stream( stream::iter(streamed_headers), head, - Chain::SepoliaTestnet, ChainId::SEPOLIA_TESTNET, public_key, Some(pathfinder_block_hashes::BlockHashDb::new( @@ -907,14 +935,13 @@ mod tests { head, public_key, .. - } = setup().await; + } = setup_from_fixture(); assert_matches!( handle_header_stream( stream::iter(streamed_headers), head, // Causes mismatches for all block hashes because setup assumes Sepolia - Chain::Mainnet, ChainId::MAINNET, public_key, None, @@ -925,29 +952,29 @@ mod tests { ); } - // TODO readd once the signature verification is enabled - // #[tokio::test] - // async fn bad_signature() { - // let Setup { - // streamed_headers, - // storage, - // head, - // .. - // } = setup().await; - - // assert_matches!( - // handle_header_stream( - // stream::iter(streamed_headers), - // head, - // Chain::SepoliaTestnet, - // ChainId::SEPOLIA_TESTNET, - // PublicKey::ZERO, // Invalid public key - // storage.clone(), - // ) - // .await, - // Err(SyncError::BadHeaderSignature(_)) - // ); - // } + #[tokio::test] + async fn bad_signature() { + let Setup { + streamed_headers, + storage, + head, + block_hash_db, + .. + } = setup_from_fixture(); + + assert_matches!( + handle_header_stream( + stream::iter(streamed_headers), + head, + ChainId::SEPOLIA_TESTNET, + PublicKey::ZERO, // Invalid public key + block_hash_db, + storage.clone(), + ) + .await, + Err(SyncError::BadHeaderSignature(_)) + ); + } #[tokio::test] async fn db_failure() { @@ -957,7 +984,7 @@ mod tests { head, public_key, .. - } = setup().await; + } = setup_from_fixture(); let mut db = storage.connection().unwrap(); let db = db.transaction().unwrap(); @@ -972,7 +999,6 @@ mod tests { handle_header_stream( stream::iter(streamed_headers), head, - Chain::SepoliaTestnet, ChainId::SEPOLIA_TESTNET, public_key, Some(pathfinder_block_hashes::BlockHashDb::new( @@ -987,8 +1013,10 @@ mod tests { } mod handle_transaction_stream { + use std::num::NonZeroU32; + use assert_matches::assert_matches; - use fake::{Dummy, Faker}; + use fake::{Dummy, Fake, Faker}; use futures::stream; use p2p::client::types::TransactionData; use p2p::libp2p::PeerId; @@ -996,8 +1024,8 @@ mod tests { use pathfinder_common::transaction::TransactionVariant; use pathfinder_common::{StarknetVersion, TransactionHash}; use pathfinder_crypto::Felt; - use pathfinder_storage::fake::{self as fake_storage, Block}; - use pathfinder_storage::StorageBuilder; + use pathfinder_storage::fake::{self as fake_storage, Block, Config}; + use pathfinder_storage::{StorageBuilder, TriePruneMode}; use super::super::handle_transaction_stream; use super::*; @@ -1008,107 +1036,65 @@ mod tests { pub storage: Storage, } - async fn setup(num_blocks: usize) -> Setup { - tokio::task::spawn_blocking(move || { - let mut blocks = fake_storage::init::with_n_blocks(num_blocks); - let streamed_transactions = blocks - .iter_mut() - .map(|block| { - let transaction_commitment = calculate_transaction_commitment( - block - .transaction_data - .iter() - .map(|(t, _, _)| t.clone()) - .collect::>() - .as_slice(), - block - .header - .header - .starknet_version - .max(StarknetVersion::V_0_13_2), - ) - .unwrap(); - block.header.header.transaction_commitment = transaction_commitment; - - anyhow::Result::Ok(PeerData::for_tests(( - block - .transaction_data - .iter() - .map(|x| (x.0.clone(), x.1.clone().into())) - .collect::>(), - block.header.header.number, - ))) - }) - .collect::>(); - let expected_transactions = blocks - .iter() - .map(|block| { - block - .transaction_data - .iter() - .map(|x| (x.0.clone(), x.1.clone())) - .collect::>() - }) - .collect::>(); - blocks.iter_mut().for_each(|b| { - // Purge transaction data. - b.transaction_data = Default::default(); - }); + fn setup(num_blocks: usize) -> Setup { + setup_inner( + num_blocks, + Config { + calculate_transaction_commitment: Box::new(calculate_transaction_commitment), + ..Default::default() + }, + ) + } - let storage = StorageBuilder::in_memory_with_trie_pruning_and_pool_size( - pathfinder_storage::TriePruneMode::Archive, - std::num::NonZeroU32::new(5).unwrap(), - ) - .unwrap(); - fake_storage::fill(&storage, &blocks); - Setup { - streamed_transactions, - expected_transactions, - storage, - } - }) - .await - .unwrap() + fn setup_commitment_mismatch(num_blocks: usize) -> Setup { + setup_inner(num_blocks, Default::default()) } - async fn setup_commitment_mismatch(num_blocks: usize) -> Setup { - use fake::{Fake, Faker}; - tokio::task::spawn_blocking(move || { - let mut blocks = fake_storage::init::with_n_blocks(num_blocks); - let streamed_transactions = blocks - .iter_mut() - .map(|block| { - block.header.header.transaction_commitment = Faker.fake(); - - anyhow::Result::Ok(PeerData::for_tests(( - block - .transaction_data - .iter() - .map(|x| (x.0.clone(), x.1.clone().into())) - .collect::>(), - block.header.header.number, - ))) - }) - .collect::>(); - blocks.iter_mut().for_each(|b| { - // Purge transaction data. - b.transaction_data = Default::default(); - }); + fn setup_inner(num_blocks: usize, config: Config) -> Setup { + let blocks = fake_storage::generate::with_config(num_blocks, config); + let only_headers = blocks + .iter() + .map(|block| Block { + header: block.header.clone(), + ..Default::default() + }) + .collect::>(); + let storage = StorageBuilder::in_memory_with_trie_pruning_and_pool_size( + TriePruneMode::Archive, + NonZeroU32::new(5).unwrap(), + ) + .unwrap(); + fake_storage::fill(&storage, &only_headers, None); - let storage = StorageBuilder::in_memory_with_trie_pruning_and_pool_size( - pathfinder_storage::TriePruneMode::Archive, - std::num::NonZeroU32::new(5).unwrap(), - ) - .unwrap(); - fake_storage::fill(&storage, &blocks); - Setup { - streamed_transactions, - expected_transactions: Vec::default(), - storage, - } - }) - .await - .unwrap() + let streamed_transactions = blocks + .iter() + .map(|block| { + anyhow::Result::Ok(PeerData::for_tests(( + block + .transaction_data + .iter() + .map(|x| (x.0.clone(), x.1.clone().into())) + .collect::>(), + block.header.header.number, + ))) + }) + .collect::>(); + let expected_transactions = blocks + .iter() + .map(|block| { + block + .transaction_data + .iter() + .map(|x| (x.0.clone(), x.1.clone())) + .collect::>() + }) + .collect::>(); + + Setup { + streamed_transactions, + expected_transactions, + storage, + } } #[tokio::test] @@ -1118,12 +1104,7 @@ mod tests { streamed_transactions, expected_transactions, storage, - } = setup(NUM_BLOCKS).await; - - let x = expected_transactions - .iter() - .map(|x| x.iter().map(|y| y.0.hash).collect::>()) - .collect::>(); + } = setup(NUM_BLOCKS); handle_transaction_stream( stream::iter(streamed_transactions), @@ -1159,7 +1140,7 @@ mod tests { streamed_transactions, storage, .. - } = setup(1).await; + } = setup(1); assert_matches!( handle_transaction_stream( stream::iter(streamed_transactions), @@ -1180,7 +1161,7 @@ mod tests { streamed_transactions, storage, .. - } = setup_commitment_mismatch(1).await; + } = setup_commitment_mismatch(1); assert_matches!( handle_transaction_stream( stream::iter(streamed_transactions), @@ -1212,7 +1193,7 @@ mod tests { let Setup { streamed_transactions, .. - } = setup(1).await; + } = setup(1); assert_matches!( handle_transaction_stream( stream::iter(streamed_transactions), @@ -1227,6 +1208,9 @@ mod tests { } mod handle_state_diff_stream { + use std::num::NonZeroU32; + use std::path::PathBuf; + use assert_matches::assert_matches; use fake::{Dummy, Fake, Faker}; use futures::stream; @@ -1235,11 +1219,12 @@ mod tests { use pathfinder_common::transaction::DeployTransactionV0; use pathfinder_common::TransactionHash; use pathfinder_crypto::Felt; - use pathfinder_storage::fake::{self as fake_storage, Block}; + use pathfinder_storage::fake::{self as fake_storage, Block, Config}; use pathfinder_storage::StorageBuilder; use super::super::handle_state_diff_stream; use super::*; + use crate::state::update_starknet_state; struct Setup { pub streamed_state_diffs: Vec>, @@ -1249,47 +1234,40 @@ mod tests { async fn setup(num_blocks: usize) -> Setup { tokio::task::spawn_blocking(move || { - let mut blocks = super::fixture::blocks()[..num_blocks].to_vec(); + let blocks = fake_storage::generate::with_config( + num_blocks, + Config { + update_tries: Box::new(update_starknet_state), + ..Default::default() + }, + ); + + let storage = pathfinder_storage::StorageBuilder::in_tempdir().unwrap(); + + let headers_and_txns = blocks + .iter() + .map(|block| Block { + header: block.header.clone(), + transaction_data: block.transaction_data.clone(), + ..Default::default() + }) + .collect::>(); + fake_storage::fill(&storage, &headers_and_txns, None); + let streamed_state_diffs = blocks .iter() .map(|block| { Result::, _>::Ok(PeerData::for_tests(( - block.state_update.clone().into(), + block.state_update.as_ref().unwrap().clone().into(), block.header.header.number, ))) }) .collect::>(); - let mut implicit_declarations = HashSet::new(); let expected_state_diffs = blocks .iter() - .map(|block| { - // Cairo0 Deploy should also count as implicit declaration the first time - // it happens - let mut state_diff: StateUpdateData = block.state_update.clone().into(); - block - .state_update - .contract_updates - .iter() - .for_each(|(_, v)| { - v.class.as_ref().inspect(|class_update| { - if let ContractClassUpdate::Deploy(class_hash) = class_update { - if !implicit_declarations.contains(class_hash) { - state_diff.declared_cairo_classes.insert(*class_hash); - implicit_declarations.insert(*class_hash); - } - } - }); - }); - state_diff - }) + .map(|block| block.state_update.as_ref().unwrap().clone().into()) .collect::>(); - let storage = StorageBuilder::in_memory_with_trie_pruning_and_pool_size( - pathfinder_storage::TriePruneMode::Archive, - std::num::NonZeroU32::new(5).unwrap(), - ) - .unwrap(); - fake_storage::fill(&storage, &blocks); Setup { streamed_state_diffs, expected_state_diffs, @@ -1302,7 +1280,7 @@ mod tests { #[tokio::test] async fn happy_path() { - const NUM_BLOCKS: usize = 2; + const NUM_BLOCKS: usize = 10; let Setup { streamed_state_diffs, expected_state_diffs, @@ -1313,7 +1291,7 @@ mod tests { stream::iter(streamed_state_diffs), storage.clone(), BlockNumber::GENESIS, - false, + true, ) .await .unwrap(); @@ -1494,6 +1472,7 @@ mod tests { let mut block = Block::default(); block.header.header.number = BlockNumber::GENESIS + n; block.header.header.hash = Faker.fake(); + block.state_update = Some(Default::default()); block }; let mut blocks = vec![fake_block(0), fake_block(1)]; @@ -1510,8 +1489,16 @@ mod tests { Default::default() }; - blocks[1].state_update.declared_cairo_classes = [cairo_hash].into(); - blocks[1].state_update.declared_sierra_classes = [ + blocks[1] + .state_update + .as_mut() + .unwrap() + .declared_cairo_classes = [cairo_hash].into(); + blocks[1] + .state_update + .as_mut() + .unwrap() + .declared_sierra_classes = [ (sierra0_hash, Default::default()), (sierra2_hash, Default::default()), ] @@ -1565,7 +1552,7 @@ mod tests { .into(); let storage = StorageBuilder::in_memory().unwrap(); - fake_storage::fill(&storage, &blocks); + fake_storage::fill(&storage, &blocks, None); Setup { streamed_classes, declared_classes, @@ -1709,6 +1696,7 @@ mod tests { use pathfinder_common::transaction::TransactionVariant; use pathfinder_common::{StarknetVersion, TransactionHash}; use pathfinder_crypto::Felt; + use pathfinder_storage::fake::{fill, Block, Config, EventCommitmentFn}; use pathfinder_storage::{fake as fake_storage, StorageBuilder}; use super::super::handle_event_stream; @@ -1721,66 +1709,64 @@ mod tests { pub storage: Storage, } - async fn setup(num_blocks: usize, compute_event_commitments: bool) -> Setup { - tokio::task::spawn_blocking(move || { - let mut blocks = fake_storage::init::with_n_blocks(num_blocks); - let streamed_events = blocks - .iter() - .map(|block| { - Result::Ok(PeerData::for_tests(( - block.header.header.number, - block - .transaction_data - .iter() - .map(|(tx, _, events)| (tx.hash, events.clone())) - .collect::>(), - ))) - }) - .collect::>(); - let expected_events = blocks - .iter() - .map(|block| { + fn setup(num_blocks: usize) -> Setup { + setup_inner( + num_blocks, + Config { + calculate_event_commitment: Box::new(calculate_event_commitment), + ..Default::default() + }, + ) + } + + fn setup_commitment_mismatch(num_blocks: usize) -> Setup { + setup_inner(num_blocks, Default::default()) + } + + fn setup_inner(num_blocks: usize, config: Config) -> Setup { + let blocks = fake_storage::generate::with_config(num_blocks, config); + let without_events = blocks + .iter() + .cloned() + .map(|mut block| { + block + .transaction_data + .iter_mut() + .for_each(|(_, _, e)| e.clear()); + block + }) + .collect::>(); + let storage = StorageBuilder::in_memory().unwrap(); + fill(&storage, &without_events, None); + + let streamed_events = blocks + .iter() + .map(|block| { + Result::Ok(PeerData::for_tests(( + block.header.header.number, block .transaction_data .iter() - .map(|x| (x.0.hash, x.2.clone())) - .collect::>() - }) - .collect::>(); - - let storage = StorageBuilder::in_memory().unwrap(); - blocks.iter_mut().for_each(|block| { - if compute_event_commitments { - block.header.header.event_commitment = calculate_event_commitment( - &block - .transaction_data - .iter() - .map(|(tx, _, events)| (tx.hash, events.as_slice())) - .collect::>(), - block - .header - .header - .starknet_version - .max(StarknetVersion::V_0_13_2), - ) - .unwrap(); - } - // Purge events + .map(|(tx, _, events)| (tx.hash, events.clone())) + .collect::>(), + ))) + }) + .collect::>(); + let expected_events = blocks + .iter() + .map(|block| { block .transaction_data - .iter_mut() - .for_each(|(_, _, events)| events.clear()); - block.cairo_defs.iter_mut().for_each(|(_, def)| def.clear()); - }); - fake_storage::fill(&storage, &blocks); - Setup { - streamed_events, - expected_events, - storage, - } - }) - .await - .unwrap() + .iter() + .map(|x| (x.0.hash, x.2.clone())) + .collect::>() + }) + .collect::>(); + Setup { + streamed_events, + expected_events, + storage, + } } #[tokio::test] @@ -1790,7 +1776,7 @@ mod tests { streamed_events, expected_events, storage, - } = setup(NUM_BLOCKS, true).await; + } = setup(NUM_BLOCKS); handle_event_stream(stream::iter(streamed_events), storage.clone()) .await @@ -1821,7 +1807,7 @@ mod tests { streamed_events, expected_events, storage, - } = setup(NUM_BLOCKS, false).await; + } = setup_commitment_mismatch(NUM_BLOCKS); let expected_peer_id = streamed_events[0].as_ref().unwrap().peer; assert_matches::assert_matches!( diff --git a/crates/pathfinder/src/sync/checkpoint/fixture.rs b/crates/pathfinder/src/sync/checkpoint/fixture.rs deleted file mode 100644 index 6f2371478c..0000000000 --- a/crates/pathfinder/src/sync/checkpoint/fixture.rs +++ /dev/null @@ -1,1740 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use pathfinder_common::receipt::{ - BuiltinCounters, - ExecutionResources, - ExecutionStatus, - L1Gas, - L2Gas, - L2ToL1Message, - Receipt, -}; -use pathfinder_common::state_update::{ContractClassUpdate, ContractUpdate}; -use pathfinder_common::transaction::{ - DeployTransactionV0, - InvokeTransactionV0, - Transaction, - TransactionVariant, -}; -use pathfinder_common::{ - block_commitment_signature_elem, - block_hash, - call_param, - class_commitment, - class_hash, - constructor_param, - contract_address, - contract_address_salt, - entry_point, - event_commitment, - felt, - l2_to_l1_message_payload_elem, - receipt_commitment, - state_commitment, - state_diff_commitment, - storage_address, - storage_commitment, - storage_value, - transaction_commitment, - transaction_hash, - BlockCommitmentSignature, - BlockHeader, - BlockNumber, - BlockTimestamp, - Fee, - GasPrice, - L1DataAvailabilityMode, - SignedBlockHeader, - StarknetVersion, - StateUpdate, - TransactionIndex, -}; -use pathfinder_storage::fake::Block; - -pub fn blocks() -> [Block; 2] { - use ContractClassUpdate::*; - use ExecutionStatus::*; - use TransactionVariant::*; - - [ - Block { - header: SignedBlockHeader { - header: BlockHeader { - hash: block_hash!( - "0x047C3637B57C2B079B93C61539950C17E868A28F46CDEF28F88521067F21E943" - ), - parent_hash: block_hash!("0x0"), - number: BlockNumber::new_or_panic(0), - timestamp: BlockTimestamp::new_or_panic(1637069048), - eth_l1_gas_price: GasPrice(0), - strk_l1_gas_price: GasPrice(0), - eth_l1_data_gas_price: GasPrice(1), - strk_l1_data_gas_price: GasPrice(1), - eth_l2_gas_price: GasPrice(3), - strk_l2_gas_price: GasPrice(3), - sequencer_address: Default::default(), - starknet_version: StarknetVersion::new(0, 0, 0, 0), - class_commitment: class_commitment!("0x0"), - event_commitment: event_commitment!("0x0"), - state_commitment: state_commitment!( - "0x021870BA80540E7831FB21C591EE93481F5AE1BB71FF85A86DDD465BE4EDDEE6" - ), - storage_commitment: storage_commitment!( - "0x021870BA80540E7831FB21C591EE93481F5AE1BB71FF85A86DDD465BE4EDDEE6" - ), - transaction_commitment: transaction_commitment!("0x0"), - transaction_count: 18, - event_count: 0, - l1_da_mode: L1DataAvailabilityMode::Calldata, - receipt_commitment: receipt_commitment!("0x0200A173F6AECAB11A7166EFB0BF8F4362A8403CA32292695A37B322793F1302"), - state_diff_commitment: state_diff_commitment!("0x00A8AC20EF93DBE185D09AE31DE4EA3372ECF753F14EBAAE97ADAB22B1AB72F2"), - state_diff_length: 25, - }, - signature: BlockCommitmentSignature { - r: block_commitment_signature_elem!( - "0x0484A36B518E33198BFE7A29D82842AE092F9181120E1D49B926C651ADF315ED" - ), - s: block_commitment_signature_elem!( - "0x07E21090AB0C6C70FA2B73E17E6288DB298DBE191B04BFD1721436530E84861E" - ), - }, - }, - transaction_data: vec![ - ( - Transaction { - hash: transaction_hash!("0x00E0A2E45A80BB827967E096BCF58874F6C01C191E0A0530624CBA66A508AE75"), - variant: DeployV0(DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - contract_address_salt: contract_address_salt!("0x0546C86DC6E40A5E5492B782D8964E9A4274FF6ECB16D31EB09CEE45A3564015"), - constructor_calldata: vec![ - constructor_param!("0x06CF6C2F36D36B08E591E4489E92CA882BB67B9C39A3AFCCF011972A8DE467F0"), - constructor_param!("0x07AB344D88124307C07B56F6C59C12F4543E9C96398727854A322DEA82C73240"), - ], - }), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x00E0A2E45A80BB827967E096BCF58874F6C01C191E0A0530624CBA66A508AE75"), - transaction_index: TransactionIndex::new_or_panic( - 0, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x012C96AE3C050771689EB261C9BF78FAC2580708C7F1F3D69A9647D8BE59F1E1"), - variant: DeployV0( - DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - contract_address_salt: contract_address_salt!("0x0012AFA0F342ECE0468CA9810F0EA59F9C7204AF32D1B8B0D318C4F2FE1F384E"), - constructor_calldata: vec![ - constructor_param!("0x00CFC2E2866FD08BFB4AC73B70E0C136E326AE18FC797A2C090C8811C695577E"), - constructor_param!("0x05F1DD5A5AEF88E0498EECA4E7B2EA0FA7110608C11531278742F0B5499AF4B3"), - ], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x012C96AE3C050771689EB261C9BF78FAC2580708C7F1F3D69A9647D8BE59F1E1"), - transaction_index: TransactionIndex::new_or_panic( - 1, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x000CE54BBC5647E1C1EA4276C01A708523F740DB0FF5474C77734F73BEEC2624"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x000000000000000000000000C84DD7FD43A7DEFB5B7A15C4FBBE11CBBA6DB1BA"), - ], - sender_address: contract_address!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - entry_point_selector: entry_point!("0x012EAD94AE9D3F9D2BDB6B847CF255F1F398193A1F88884A0AE8E18F24A037B6"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 31, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![ - L2ToL1Message { - from_address: contract_address!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - payload: vec![ - l2_to_l1_message_payload_elem!("0x000000000000000000000000000000000000000000000000000000000000000C"), - l2_to_l1_message_payload_elem!("0x0000000000000000000000000000000000000000000000000000000000000022"), - ], - to_address: contract_address!("0x000000000000000000000000C84DD7FD43A7DEFB5B7A15C4FBBE11CBBA6DB1BA"), - }, - ], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x000CE54BBC5647E1C1EA4276C01A708523F740DB0FF5474C77734F73BEEC2624"), - transaction_index: TransactionIndex::new_or_panic( - 2, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x01C924916A84EF42A3D25D29C5D1085FE212DE04FEADC6E88D4C7A6E5B9039BF"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000000"), - ], - sender_address: contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - entry_point_selector: entry_point!("0x0218F305395474A84A39307FA5297BE118FE17BF65E27AC5E2DE6617BAA44C64"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 238, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x01C924916A84EF42A3D25D29C5D1085FE212DE04FEADC6E88D4C7A6E5B9039BF"), - transaction_index: TransactionIndex::new_or_panic( - 3, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x00A66C346E273CC49510EF2E1620A1A7922135CB86AB227B86E0AFD12243BD90"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0007DBFEC95C10BBC2FD3F37A89AE6E027826134F955251D11C784A6B34FDF50"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000002"), - call_param!("0x04E7E989D58A17CD279ECA440C5EAA829EFB6F9967AAAD89022ACBE644C39B36"), - call_param!("0x0453AE0C9610197B18B13645C44D3D0A407083D96562E8752AAB3FAB616CECB0"), - ], - sender_address: contract_address!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - entry_point_selector: entry_point!("0x0317EB442B72A9FAE758D4FB26830ED0D9F31C8E7DA4DBFF4E8C59EA6A158E7F"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 2, - range_check: 7, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 165, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x00A66C346E273CC49510EF2E1620A1A7922135CB86AB227B86E0AFD12243BD90"), - transaction_index: TransactionIndex::new_or_panic( - 4, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x05C71675616B49FB9D16CAC8BEAAA65F62DC5A532E92785055C15C825166DBBF"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - call_param!("0x0317EB442B72A9FAE758D4FB26830ED0D9F31C8E7DA4DBFF4E8C59EA6A158E7F"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000004"), - call_param!("0x04BE52041FEE36AB5199771ACF4B5D260D223297E588654E5C9477DF2EFA542A"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000002"), - call_param!("0x00299E2F4B5A873E95E65EB03D31E532EA2CDE43B498B50CD3161145DB5542A5"), - call_param!("0x03D6897CF23DA3BF4FD35CC7A43CCAF7C5EAF8F7C5B9031AC9B09A929204175F"), - ], - sender_address: contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - entry_point_selector: entry_point!("0x027C3334165536F239CFD400ED956EABFF55FC60DE4FB56728B6A4F6B87DB01C"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 2, - range_check: 8, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 209, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x05C71675616B49FB9D16CAC8BEAAA65F62DC5A532E92785055C15C825166DBBF"), - transaction_index: TransactionIndex::new_or_panic( - 5, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x060E05C41A6622592A2E2EFF90A9F2E495296A3BE9596E7BC4DFBAFCE00D7A6A"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000001"), - ], - sender_address: contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - entry_point_selector: entry_point!("0x0218F305395474A84A39307FA5297BE118FE17BF65E27AC5E2DE6617BAA44C64"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 332, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![ - L2ToL1Message { - from_address: contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - payload: vec![ - l2_to_l1_message_payload_elem!("0x000000000000000000000000000000000000000000000000000000000000000C"), - l2_to_l1_message_payload_elem!("0x0000000000000000000000000000000000000000000000000000000000000022"), - ], - to_address: contract_address!("0x0000000000000000000000000000000000000000000000000000000000000001"), - }, - ], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x060E05C41A6622592A2E2EFF90A9F2E495296A3BE9596E7BC4DFBAFCE00D7A6A"), - transaction_index: TransactionIndex::new_or_panic( - 6, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x05634F2847140263BA59480AD4781DACC9991D0365145489B27A198EBED2F969"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), - call_param!("0x05AEE31408163292105D875070F98CB48275B8C87E80380B78D30647E05854D5"), - ], - sender_address: contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), - entry_point_selector: entry_point!("0x019A35A6E95CB7A3318DBB244F20975A1CD8587CC6B5259F15F61D7BEB7EE43B"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 178, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x05634F2847140263BA59480AD4781DACC9991D0365145489B27A198EBED2F969"), - transaction_index: TransactionIndex::new_or_panic( - 7, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x00B049C384CF75174150A2540835CC2ABDCCA1D3A3750298A1741A621983E35A"), - variant: DeployV0( - DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), - contract_address_salt: contract_address_salt!("0x05098705E4D57A8620E5B387855EF4DC82F0CCD84B7299DC1179B87517249127"), - constructor_calldata: vec![ - constructor_param!("0x048CBA68D4E86764105ADCDCF641AB67B581A55A4F367203647549C8BF1FEEA2"), - constructor_param!("0x0362D24A3B030998AC75E838955DFEE19EC5B6ECEB235B9BFBECCF51B6304D0B"), - ], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x00B049C384CF75174150A2540835CC2ABDCCA1D3A3750298A1741A621983E35A"), - transaction_index: TransactionIndex::new_or_panic( - 8, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x0227F3D9D5CE6680BDF2991576C1A90ACA8184CA26055BAE92D16C58E3E13340"), - variant: DeployV0( - DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x0735596016A37EE972C42ADEF6A3CF628C19BB3794369C65D2C82BA034AECF2C"), - contract_address_salt: contract_address_salt!("0x0060BC7461113E4AF46FD52E5ECBC5C3F4BE92ED7F1329D53721F9BFBC0370CC"), - constructor_calldata: vec![ - constructor_param!("0x002F50710449A06A9FA789B3C029A63BD0B1F722F46505828A9F815CF91B31D8"), - constructor_param!("0x02A222E62EABE91ABDB6838FA8B267FFE81A6EB575F61E96EC9AA4460C0925A2"), - ], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x0227F3D9D5CE6680BDF2991576C1A90ACA8184CA26055BAE92D16C58E3E13340"), - transaction_index: TransactionIndex::new_or_panic( - 9, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x0376FF82431B52CA1FBC4942DE80BC1B01D8E5CD1EEAB5A277B601B510F2CAB2"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x01E2CD4B3588E8F6F9C4E89FB0E293BF92018C96D7A93EE367D29A284223B6FF"), - call_param!("0x071D1E9D188C784A0BDE95C1D508877A0D93E9102B37213D1E13F3EBC54A7751"), - ], - sender_address: contract_address!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), - entry_point_selector: entry_point!("0x03D7905601C217734671143D457F0DB37F7F8883112ABD34B92C4ABFEAFDE0C3"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 25, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x0376FF82431B52CA1FBC4942DE80BC1B01D8E5CD1EEAB5A277B601B510F2CAB2"), - transaction_index: TransactionIndex::new_or_panic( - 10, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x025F20C74821D84F62989A71FCEEF08C967837B63BAE31B279A11343F10D874A"), - variant: DeployV0( - DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x031C887D82502CEB218C06EBB46198DA3F7B92864A8223746BC836DDA3E34B52"), - contract_address_salt: contract_address_salt!("0x063D1A6F8130958509E2E695C25B147F43F66F56BBA49FDDB7EE363D8F57A774"), - constructor_calldata: vec![ - constructor_param!("0x00DF28E613C065616A2E79CA72F9C1908E17B8C913972A9993DA77588DC9CAE9"), - constructor_param!("0x01432126AC23C7028200E443169C2286F99CDB5A7BF22E607BCD724EFA059040"), - ], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x025F20C74821D84F62989A71FCEEF08C967837B63BAE31B279A11343F10D874A"), - transaction_index: TransactionIndex::new_or_panic( - 11, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x02D10272A8BA726793FD15AA23A1E3C42447D7483EBB0B49DF8B987590FE0055"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0735596016A37EE972C42ADEF6A3CF628C19BB3794369C65D2C82BA034AECF2C"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000001"), - ], - sender_address: contract_address!("0x031C887D82502CEB218C06EBB46198DA3F7B92864A8223746BC836DDA3E34B52"), - entry_point_selector: entry_point!("0x0218F305395474A84A39307FA5297BE118FE17BF65E27AC5E2DE6617BAA44C64"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 332, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![ - L2ToL1Message { - from_address: contract_address!("0x031C887D82502CEB218C06EBB46198DA3F7B92864A8223746BC836DDA3E34B52"), - payload: vec![ - l2_to_l1_message_payload_elem!("0x000000000000000000000000000000000000000000000000000000000000000C"), - l2_to_l1_message_payload_elem!("0x0000000000000000000000000000000000000000000000000000000000000022"), - ], - to_address: contract_address!("0x0000000000000000000000000000000000000000000000000000000000000001"), - }, - ], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x02D10272A8BA726793FD15AA23A1E3C42447D7483EBB0B49DF8B987590FE0055"), - transaction_index: TransactionIndex::new_or_panic( - 12, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x00B05BA5CD0B9E0464D2C1790AD93A159C6EF0594513758BCA9111E74E4099D4"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x031C887D82502CEB218C06EBB46198DA3F7B92864A8223746BC836DDA3E34B52"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000000"), - ], - sender_address: contract_address!("0x0735596016A37EE972C42ADEF6A3CF628C19BB3794369C65D2C82BA034AECF2C"), - entry_point_selector: entry_point!("0x0218F305395474A84A39307FA5297BE118FE17BF65E27AC5E2DE6617BAA44C64"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 238, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x00B05BA5CD0B9E0464D2C1790AD93A159C6EF0594513758BCA9111E74E4099D4"), - transaction_index: TransactionIndex::new_or_panic( - 13, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x04D16393D940FB4A97F20B9034E2A5E954201FEE827B2B5C6DAA38EC272E7C9C"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x01A7CF8B8027EC2D8FD04F1277F3F8AE6379CA957C5FEC9EE7F59D56D86A26E4"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000002"), - call_param!("0x028DFF6722AA73281B2CF84CAC09950B71FA90512DB294D2042119ABDD9F4B87"), - call_param!("0x057A8F8A019CCAB5BFC6FF86C96B1392257ABB8D5D110C01D326B94247AF161C"), - ], - sender_address: contract_address!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), - entry_point_selector: entry_point!("0x0317EB442B72A9FAE758D4FB26830ED0D9F31C8E7DA4DBFF4E8C59EA6A158E7F"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 2, - range_check: 7, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 169, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x04D16393D940FB4A97F20B9034E2A5E954201FEE827B2B5C6DAA38EC272E7C9C"), - transaction_index: TransactionIndex::new_or_panic( - 14, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x009E80672EDD4927A79F5384E656416B066F8EF58238227AC0FCEA01952B70B5"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), - call_param!("0x05F750DC13ED239FA6FC43FF6E10AE9125A33BD05EC034FC3BB4DD168DF3505F"), - ], - sender_address: contract_address!("0x031C887D82502CEB218C06EBB46198DA3F7B92864A8223746BC836DDA3E34B52"), - entry_point_selector: entry_point!("0x019A35A6E95CB7A3318DBB244F20975A1CD8587CC6B5259F15F61D7BEB7EE43B"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 178, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x009E80672EDD4927A79F5384E656416B066F8EF58238227AC0FCEA01952B70B5"), - transaction_index: TransactionIndex::new_or_panic( - 15, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x0387B5B63E40D4426754895FE52ADF668CF8FDE2A02AA9B6D761873F31AF3462"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0449908C349E90F81AB13042B1E49DC251EB6E3E51092D9A40F86859F7F415B0"), - call_param!("0x02670B3A8266D5046696A4B79F7433D117D3A19166F15BBD8585822C4E9B7491"), - ], - sender_address: contract_address!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), - entry_point_selector: entry_point!("0x03D7905601C217734671143D457F0DB37F7F8883112ABD34B92C4ABFEAFDE0C3"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 25, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x0387B5B63E40D4426754895FE52ADF668CF8FDE2A02AA9B6D761873F31AF3462"), - transaction_index: TransactionIndex::new_or_panic( - 16, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x04F0CDFF0D72FC758413A16DB2BC7580DFEC7889A8B921F0FE08641FA265E997"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0449908C349E90F81AB13042B1E49DC251EB6E3E51092D9A40F86859F7F415B0"), - call_param!("0x06CB6104279E754967A721B52BCF5BE525FDC11FA6DB6EF5C3A4DB832ACF7804"), - ], - sender_address: contract_address!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), - entry_point_selector: entry_point!("0x03D7905601C217734671143D457F0DB37F7F8883112ABD34B92C4ABFEAFDE0C3"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 25, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x04F0CDFF0D72FC758413A16DB2BC7580DFEC7889A8B921F0FE08641FA265E997"), - transaction_index: TransactionIndex::new_or_panic( - 17, - ), - }, - vec![], - ), - ], - state_update: StateUpdate { - block_hash: block_hash!("0x047C3637B57C2B079B93C61539950C17E868A28F46CDEF28F88521067F21E943"), - parent_state_commitment: state_commitment!("0x0000000000000000000000000000000000000000000000000000000000000000"), - state_commitment: state_commitment!("0x021870BA80540E7831FB21C591EE93481F5AE1BB71FF85A86DDD465BE4EDDEE6"), - contract_updates: HashMap::from_iter([ - (contract_address!("0x031C9CDB9B00CB35CF31C05855C0EC3ECF6F7952A1CE6E3C53C3455FCD75A280"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x05FAC6815FDDF6AF1CA5E592359862EDE14F171E1544FD9E792288164097C35D"), storage_value!("0x00299E2F4B5A873E95E65EB03D31E532EA2CDE43B498B50CD3161145DB5542A5")), - (storage_address!("0x05FAC6815FDDF6AF1CA5E592359862EDE14F171E1544FD9E792288164097C35E"), storage_value!("0x03D6897CF23DA3BF4FD35CC7A43CCAF7C5EAF8F7C5B9031AC9B09A929204175F")), - (storage_address!("0x0000000000000000000000000000000000000000000000000000000000000005"), storage_value!("0x0000000000000000000000000000000000000000000000000000000000000065")), - (storage_address!("0x00CFC2E2866FD08BFB4AC73B70E0C136E326AE18FC797A2C090C8811C695577E"), storage_value!("0x05F1DD5A5AEF88E0498EECA4E7B2EA0FA7110608C11531278742F0B5499AF4B3")), - (storage_address!("0x05AEE31408163292105D875070F98CB48275B8C87E80380B78D30647E05854D5"), storage_value!("0x00000000000000000000000000000000000000000000000000000000000007C7")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - (contract_address!("0x031C887D82502CEB218C06EBB46198DA3F7B92864A8223746BC836DDA3E34B52"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x05F750DC13ED239FA6FC43FF6E10AE9125A33BD05EC034FC3BB4DD168DF3505F"), storage_value!("0x00000000000000000000000000000000000000000000000000000000000007C7")), - (storage_address!("0x00DF28E613C065616A2E79CA72F9C1908E17B8C913972A9993DA77588DC9CAE9"), storage_value!("0x01432126AC23C7028200E443169C2286F99CDB5A7BF22E607BCD724EFA059040")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - (contract_address!("0x020CFA74EE3564B4CD5435CDACE0F9C4D43B939620E4A0BB5076105DF0A626C6"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x0313AD57FDF765ADDC71329ABF8D74AC2BCE6D46DA8C2B9B82255A5076620300"), storage_value!("0x04E7E989D58A17CD279ECA440C5EAA829EFB6F9967AAAD89022ACBE644C39B36")), - (storage_address!("0x05AEE31408163292105D875070F98CB48275B8C87E80380B78D30647E05854D5"), storage_value!("0x00000000000000000000000000000000000000000000000000000000000007E5")), - (storage_address!("0x0313AD57FDF765ADDC71329ABF8D74AC2BCE6D46DA8C2B9B82255A5076620301"), storage_value!("0x0453AE0C9610197B18B13645C44D3D0A407083D96562E8752AAB3FAB616CECB0")), - (storage_address!("0x06CF6C2F36D36B08E591E4489E92CA882BB67B9C39A3AFCCF011972A8DE467F0"), storage_value!("0x07AB344D88124307C07B56F6C59C12F4543E9C96398727854A322DEA82C73240")), - (storage_address!("0x0000000000000000000000000000000000000000000000000000000000000005"), storage_value!("0x000000000000000000000000000000000000000000000000000000000000022B")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - (contract_address!("0x06EE3440B08A9C805305449EC7F7003F27E9F7E287B83610952EC36BDC5A6BAE"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x05BDAF1D47B176BFCD1114809AF85A46B9C4376E87E361D86536F0288A284B66"), storage_value!("0x057A8F8A019CCAB5BFC6FF86C96B1392257ABB8D5D110C01D326B94247AF161C")), - (storage_address!("0x05BDAF1D47B176BFCD1114809AF85A46B9C4376E87E361D86536F0288A284B65"), storage_value!("0x028DFF6722AA73281B2CF84CAC09950B71FA90512DB294D2042119ABDD9F4B87")), - (storage_address!("0x048CBA68D4E86764105ADCDCF641AB67B581A55A4F367203647549C8BF1FEEA2"), storage_value!("0x0362D24A3B030998AC75E838955DFEE19EC5B6ECEB235B9BFBECCF51B6304D0B")), - (storage_address!("0x05F750DC13ED239FA6FC43FF6E10AE9125A33BD05EC034FC3BB4DD168DF3505F"), storage_value!("0x00000000000000000000000000000000000000000000000000000000000007E5")), - (storage_address!("0x01E2CD4B3588E8F6F9C4E89FB0E293BF92018C96D7A93EE367D29A284223B6FF"), storage_value!("0x071D1E9D188C784A0BDE95C1D508877A0D93E9102B37213D1E13F3EBC54A7751")), - (storage_address!("0x0449908C349E90F81AB13042B1E49DC251EB6E3E51092D9A40F86859F7F415B0"), storage_value!("0x06CB6104279E754967A721B52BCF5BE525FDC11FA6DB6EF5C3A4DB832ACF7804")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - (contract_address!("0x0735596016A37EE972C42ADEF6A3CF628C19BB3794369C65D2C82BA034AECF2C"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x002F50710449A06A9FA789B3C029A63BD0B1F722F46505828A9F815CF91B31D8"), storage_value!("0x02A222E62EABE91ABDB6838FA8B267FFE81A6EB575F61E96EC9AA4460C0925A2")), - (storage_address!("0x0000000000000000000000000000000000000000000000000000000000000005"), storage_value!("0x0000000000000000000000000000000000000000000000000000000000000064")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - ]), - system_contract_updates: Default::default(), - declared_cairo_classes: Default::default(), - declared_sierra_classes: Default::default(), - }, - cairo_defs: Default::default(), - sierra_defs: Default::default(), - }, - Block { - header: SignedBlockHeader { - header: BlockHeader { - hash: block_hash!("0x02A70FB03FE363A2D6BE843343A1D81CE6ABEDA1E9BD5CC6AD8FA9F45E30FDEB"), - parent_hash: block_hash!("0x047C3637B57C2B079B93C61539950C17E868A28F46CDEF28F88521067F21E943"), - number: BlockNumber::new_or_panic(1), - timestamp: BlockTimestamp::new_or_panic( - 1637072695, - ), - eth_l1_gas_price: GasPrice(0), - strk_l1_gas_price: GasPrice(0), - eth_l1_data_gas_price: GasPrice(1), - strk_l1_data_gas_price: GasPrice(1), - eth_l2_gas_price: GasPrice(3), - strk_l2_gas_price: GasPrice(3), - sequencer_address: Default::default(), - starknet_version: StarknetVersion::default(), - class_commitment: class_commitment!("0x0"), - event_commitment: event_commitment!("0x0"), - state_commitment: state_commitment!("0x0525AED4DA9CC6CCE2DE31BA79059546B0828903279E4EAA38768DE33E2CAC32"), - storage_commitment: storage_commitment!("0x0525AED4DA9CC6CCE2DE31BA79059546B0828903279E4EAA38768DE33E2CAC32"), - transaction_commitment: transaction_commitment!("0x040BA52F90B741CD059DBDBACAD788D327E7C8C89DD258881043FD969CDAD86E"), - transaction_count: 8, - event_count: 0, - l1_da_mode: L1DataAvailabilityMode::Calldata, - receipt_commitment: receipt_commitment!("0x00FB6833B56FCA428975B0DF7875F35B7EADBD26B517DAF1B9702E1D85665065"), - state_diff_commitment: state_diff_commitment!("0x05D83BBEEDF35B7D310A43B11F3623DD5D705FF09A7FBC8B634222E083433CAE"), - state_diff_length: 12, - }, - signature: BlockCommitmentSignature { - r: block_commitment_signature_elem!("0x05C328D673C07E530A45D6F12E569DF0D059D97BF920D978E44DAA54FB3DB655"), - s: block_commitment_signature_elem!("0x037FEBA468C96099A9A610C34BAB6230AF8C7E9D78C1DDB7436488961FC5161D") - }, - }, - transaction_data: vec![ - ( - Transaction { - hash: transaction_hash!("0x02F07A65F9F7A6445B2A0B1FB90EF12F5FD3B94128D06A67712EFD3B2F163533"), - variant: DeployV0( - DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - contract_address_salt: contract_address_salt!("0x03A6B18FC3415B7D749F18483393B0D6A1AEF168435016C0F5F5D8902A84A36F"), - constructor_calldata: vec![ - constructor_param!("0x04184FA5A6D40F47A127B046ED6FACFA3E6BC3437B393DA65CC74AFE47CA6C6E"), - constructor_param!("0x001EF78E458502CD457745885204A4AE89F3880EC24DB2D8CA97979DCE15FEDC"), - ], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x02F07A65F9F7A6445B2A0B1FB90EF12F5FD3B94128D06A67712EFD3B2F163533"), - transaction_index: TransactionIndex::new_or_panic( - 0, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x0214C14F39B8AA2DCECFDCA68E540957624E8DB6C3A9012939FF1399975910A0"), - variant: DeployV0( - DeployTransactionV0 { - class_hash: class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - contract_address: contract_address!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - contract_address_salt: contract_address_salt!("0x0090677B5114F8DF8BB7DD5E57A90CCEABE385540CB0CA857ED68E22BD76E20A"), - constructor_calldata: vec![ - constructor_param!("0x010212FA2BE788E5D943714D6A9EAC5E07D8B4B48EAD96B8D0A0CBE7A6DC3832"), - constructor_param!("0x008A81230A7E3FFA40ABE541786A9B69FBB601434CEC9536D5D5B2EE4DF90383"), - ], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 29, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x0214C14F39B8AA2DCECFDCA68E540957624E8DB6C3A9012939FF1399975910A0"), - transaction_index: TransactionIndex::new_or_panic( - 1, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x071EED7F033331C8D7BD1A4DCA8EEDF16951A904DE3E195005E49AAE9E502CA6"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000000"), - ], - sender_address: contract_address!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - entry_point_selector: entry_point!("0x0218F305395474A84A39307FA5297BE118FE17BF65E27AC5E2DE6617BAA44C64"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 238, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x071EED7F033331C8D7BD1A4DCA8EEDF16951A904DE3E195005E49AAE9E502CA6"), - transaction_index: TransactionIndex::new_or_panic( - 2, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x01059391B8C4FBA9743B531BA371908195CCB5DCF2A9532FAC247256FB48912F"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - call_param!("0x0317EB442B72A9FAE758D4FB26830ED0D9F31C8E7DA4DBFF4E8C59EA6A158E7F"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000004"), - call_param!("0x05BD24B507FCC2FD77DC7847BABB8DF01363D58E9B0BBCD2D06D982E1F3E0C86"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000002"), - call_param!("0x026B5943D4A0C420607CEE8030A8CDD859BF2814A06633D165820960A42C6AED"), - call_param!("0x01518EEC76AFD5397CEFD14EDA48D01AD59981F9CE9E70C233CA67ACD8754008"), - ], - sender_address: contract_address!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - entry_point_selector: entry_point!("0x027C3334165536F239CFD400ED956EABFF55FC60DE4FB56728B6A4F6B87DB01C"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 2, - range_check: 8, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 209, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x01059391B8C4FBA9743B531BA371908195CCB5DCF2A9532FAC247256FB48912F"), - transaction_index: TransactionIndex::new_or_panic( - 3, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x073FE0B59AC28A2C3C28B4D8713F4F84D4463C48245539644838CF1E8526B536"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000001"), - ], - sender_address: contract_address!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - entry_point_selector: entry_point!("0x0218F305395474A84A39307FA5297BE118FE17BF65E27AC5E2DE6617BAA44C64"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 332, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![ - L2ToL1Message { - from_address: contract_address!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - payload: vec![ - l2_to_l1_message_payload_elem!("0x000000000000000000000000000000000000000000000000000000000000000C"), - l2_to_l1_message_payload_elem!("0x0000000000000000000000000000000000000000000000000000000000000022"), - ], - to_address: contract_address!("0x0000000000000000000000000000000000000000000000000000000000000001"), - }, - ], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x073FE0B59AC28A2C3C28B4D8713F4F84D4463C48245539644838CF1E8526B536"), - transaction_index: TransactionIndex::new_or_panic( - 4, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x0169D35E8210A26FD2439207D77EF2F0ABE77471ACBC2DA8D5EEAB5127D8D57B"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x0000000000000000000000009C47C96A115DAD3A7DBBDAFB2369FDAA2835D0D4"), - ], - sender_address: contract_address!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - entry_point_selector: entry_point!("0x012EAD94AE9D3F9D2BDB6B847CF255F1F398193A1F88884A0AE8E18F24A037B6"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 31, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![ - L2ToL1Message { - from_address: contract_address!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - payload: vec![ - l2_to_l1_message_payload_elem!("0x000000000000000000000000000000000000000000000000000000000000000C"), - l2_to_l1_message_payload_elem!("0x0000000000000000000000000000000000000000000000000000000000000022"), - ], - to_address: contract_address!("0x0000000000000000000000009C47C96A115DAD3A7DBBDAFB2369FDAA2835D0D4"), - }, - ], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x0169D35E8210A26FD2439207D77EF2F0ABE77471ACBC2DA8D5EEAB5127D8D57B"), - transaction_index: TransactionIndex::new_or_panic( - 5, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x068A8426D72BCAC7DC3C84C52D90F39F64FFDC10E50B86F8D6F047EE243E2BA1"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x02C4301154E2F60000CE44AF78B14619806DDA3B52ABE8BC224D49765A0924C1"), - call_param!("0x0000000000000000000000000000000000000000000000000000000000000002"), - call_param!("0x02B36318931915F71777F7E59246ECAB3189DB48408952CEFDA72F4B7977BE51"), - call_param!("0x07E928DCF189B05E4A3DAE0BC2CB98E447F1843F7DEBBBF574151EB67CDA8797"), - ], - sender_address: contract_address!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - entry_point_selector: entry_point!("0x0317EB442B72A9FAE758D4FB26830ED0D9F31C8E7DA4DBFF4E8C59EA6A158E7F"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 2, - range_check: 7, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 165, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x068A8426D72BCAC7DC3C84C52D90F39F64FFDC10E50B86F8D6F047EE243E2BA1"), - transaction_index: TransactionIndex::new_or_panic( - 6, - ), - }, - vec![], - ), - ( - Transaction { - hash: transaction_hash!("0x07EFF4524AE42C2FFA72FF228CEE4729BF7F31C2A0AEFE3EE1C8ABE546442158"), - variant: InvokeV0( - InvokeTransactionV0 { - calldata: vec![ - call_param!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), - call_param!("0x01AED933FD362FAECD8EA54EE749092BD21F89901B7D1872312584AC5B636C6D"), - ], - sender_address: contract_address!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), - entry_point_selector: entry_point!("0x019A35A6E95CB7A3318DBB244F20975A1CD8587CC6B5259F15F61D7BEB7EE43B"), - entry_point_type: None, - max_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - signature: vec![], - }, - ), - }, - Receipt { - actual_fee: Fee(felt!("0x0000000000000000000000000000000000000000000000000000000000000000")), - execution_resources: ExecutionResources { - builtins: BuiltinCounters { - output: 0, - pedersen: 0, - range_check: 0, - ecdsa: 0, - bitwise: 0, - ec_op: 0, - keccak: 0, - poseidon: 0, - segment_arena: 0, - add_mod: 0, - mul_mod: 0, - range_check96: 0, - }, - n_steps: 178, - n_memory_holes: 0, - data_availability: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - total_gas_consumed: L1Gas { - l1_gas: 0, - l1_data_gas: 0, - }, - l2_gas: L2Gas(0), - }, - l2_to_l1_messages: vec![], - execution_status: Succeeded, - transaction_hash: transaction_hash!("0x07EFF4524AE42C2FFA72FF228CEE4729BF7F31C2A0AEFE3EE1C8ABE546442158"), - transaction_index: TransactionIndex::new_or_panic( - 7, - ), - }, - vec![], - ), - ], - state_update: StateUpdate { - block_hash: block_hash!("0x02A70FB03FE363A2D6BE843343A1D81CE6ABEDA1E9BD5CC6AD8FA9F45E30FDEB"), - parent_state_commitment: state_commitment!("0x021870BA80540E7831FB21C591EE93481F5AE1BB71FF85A86DDD465BE4EDDEE6"), - state_commitment: state_commitment!("0x0525AED4DA9CC6CCE2DE31BA79059546B0828903279E4EAA38768DE33E2CAC32"), - contract_updates: HashMap::from_iter([ - (contract_address!("0x06538FDD3AA353AF8A87F5FE77D1F533EA82815076E30A86D65B72D3EB4F0B80"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x00FFDA4B5CF0DCE9BC9B0D035210590C73375FDBB70CD94EC6949378BFFC410D"), storage_value!("0x07E928DCF189B05E4A3DAE0BC2CB98E447F1843F7DEBBBF574151EB67CDA8797")), - (storage_address!("0x00FFDA4B5CF0DCE9BC9B0D035210590C73375FDBB70CD94EC6949378BFFC410C"), storage_value!("0x02B36318931915F71777F7E59246ECAB3189DB48408952CEFDA72F4B7977BE51")), - (storage_address!("0x01AED933FD362FAECD8EA54EE749092BD21F89901B7D1872312584AC5B636C6D"), storage_value!("0x00000000000000000000000000000000000000000000000000000000000007E5")), - (storage_address!("0x010212FA2BE788E5D943714D6A9EAC5E07D8B4B48EAD96B8D0A0CBE7A6DC3832"), storage_value!("0x008A81230A7E3FFA40ABE541786A9B69FBB601434CEC9536D5D5B2EE4DF90383")), - (storage_address!("0x0000000000000000000000000000000000000000000000000000000000000005"), storage_value!("0x000000000000000000000000000000000000000000000000000000000000022B")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - (contract_address!("0x0327D34747122D7A40F4670265B098757270A449EC80C4871450FFFDAB7C2FA8"), ContractUpdate { - storage: HashMap::from_iter([ - (storage_address!("0x04184FA5A6D40F47A127B046ED6FACFA3E6BC3437B393DA65CC74AFE47CA6C6E"), storage_value!("0x001EF78E458502CD457745885204A4AE89F3880EC24DB2D8CA97979DCE15FEDC")), - (storage_address!("0x0000000000000000000000000000000000000000000000000000000000000005"), storage_value!("0x0000000000000000000000000000000000000000000000000000000000000065")), - (storage_address!("0x01AED933FD362FAECD8EA54EE749092BD21F89901B7D1872312584AC5B636C6D"), storage_value!("0x00000000000000000000000000000000000000000000000000000000000007C7")), - (storage_address!("0x05591C8C3C8D154A30869B463421CD5933770A0241E1A6E8EBCBD91BDD69BEC4"), storage_value!("0x026B5943D4A0C420607CEE8030A8CDD859BF2814A06633D165820960A42C6AED")), - (storage_address!("0x05591C8C3C8D154A30869B463421CD5933770A0241E1A6E8EBCBD91BDD69BEC5"), storage_value!("0x01518EEC76AFD5397CEFD14EDA48D01AD59981F9CE9E70C233CA67ACD8754008")), - ]), - class: Some( - Deploy( - class_hash!("0x010455C752B86932CE552F2B0FE81A880746649B9AEE7E0D842BF3F52378F9F8"), - ), - ), - nonce: None, - }), - ]), - system_contract_updates: Default::default(), - declared_cairo_classes: Default::default(), - declared_sierra_classes: Default::default(), - }, - cairo_defs: Default::default(), - sierra_defs: Default::default(), - }, - ] -} diff --git a/crates/pathfinder/src/sync/fixtures/sepolia_headers.json b/crates/pathfinder/src/sync/fixtures/sepolia_headers.json index 694e6b5f15..a296052d0a 100644 --- a/crates/pathfinder/src/sync/fixtures/sepolia_headers.json +++ b/crates/pathfinder/src/sync/fixtures/sepolia_headers.json @@ -11,8 +11,8 @@ "transaction_count": 7, "event_count": 4, "signature": [ - "0x30a199364f1cec47d386da1839015413a464456ca6d63d6f45156d0a1254356", - "0x784aba6f4948a6a1caa4f7247adcabdfe41a8ec21668c30621cd9d265516dc6" + "0x315b1d77f8b1fc85657725639e88d4e1bfe846b4a866ddeb2e74cd91ccff9ca", + "0x3cbd913e55ca0c9ab107a5988dd4c54d56dd3700948a2b96c19d4728a5864de" ], "state_diff_commitment": "0x047ED57C27FE5AF431AB0D85E4008FC68F04D5E2F7748CDA496C62ABB27944CC", "state_diff_length": 11, @@ -32,8 +32,8 @@ "transaction_count": 1, "event_count": 0, "signature": [ - "0x47fe7aea2435f8be68f0dce8f859f0f2f31db43eddc9269a200ef7a530e34a0", - "0x636260cd4a8a9991a9a2f7b09a4d8ebdfbfdb7d32d0a2b8e23d8ebc0ae014dc" + "0x6c17114df2b79fcbee030755c4bfbee5d988e900ec7b36e102e40bf5f275154", + "0x16d6e992350b67d9101b16612a73560d10753c6bb05e30bce84823ee6813ee" ], "state_diff_commitment": "0x01990D108859C231985BB27DD2E1C3B77A554C9BECB37419E1076CF6032EFBEC", "state_diff_length": 1, @@ -53,8 +53,8 @@ "transaction_count": 1, "event_count": 0, "signature": [ - "0x328251965bcf8dc8f870a027eebe73627b8dbd82d6981cfbd4231a197da53ae", - "0x75bb4eb017f274f1f6e2a785c921a51baa761dad2d85fe54fd8607d816b829f" + "0x7cc4bd63549a47ce8c4821476649c9a19e84cfa6be67c15d8534c856902dc6c", + "0x3e4911a5d981fb702e99844e91797740201e8197e5fe0bcd12bf512ada47653" ], "state_diff_commitment": "0x07320438AE12FF18334E8C0BED2A4382DF1FEA88F0AAA9357B99797D04ECD396", "state_diff_length": 1, diff --git a/crates/pathfinder/src/sync/headers.rs b/crates/pathfinder/src/sync/headers.rs index 5e78486b48..a0589230a5 100644 --- a/crates/pathfinder/src/sync/headers.rs +++ b/crates/pathfinder/src/sync/headers.rs @@ -159,7 +159,6 @@ pub struct BackwardContinuity { /// Ensures that the block hash and signature are correct. pub struct VerifyHashAndSignature { - chain: Chain, chain_id: ChainId, public_key: PublicKey, block_hash_db: Option, @@ -238,9 +237,7 @@ impl ProcessStage for VerifyHashAndSignature { } if !self.verify_signature(&input) { - // TODO: make this an error once state diff commitments and - // signatures are fixed on the feeder gateway return - // Err(SyncError2::BadHeaderSignature); + return Err(SyncError::BadHeaderSignature(*peer)); } Ok(input) @@ -249,13 +246,11 @@ impl ProcessStage for VerifyHashAndSignature { impl VerifyHashAndSignature { pub fn new( - chain: Chain, chain_id: ChainId, public_key: PublicKey, block_hash_db: Option, ) -> Self { Self { - chain, chain_id, public_key, block_hash_db, diff --git a/crates/pathfinder/src/sync/state_updates.rs b/crates/pathfinder/src/sync/state_updates.rs index 152c754a67..26ba3b2e8f 100644 --- a/crates/pathfinder/src/sync/state_updates.rs +++ b/crates/pathfinder/src/sync/state_updates.rs @@ -279,96 +279,36 @@ pub async fn batch_update_starknet_state( let PeerData { peer, data: merged } = merge_state_updates(state_updates); - let state_update_ref: StateUpdateRef<'_> = (&merged).into(); - - update_starknet_state_impl( - &peer, - db, - state_update_ref, + let (storage_commitment, class_commitment) = update_starknet_state( + &db, + (&merged).into(), verify_tree_hashes, tail, - storage, - )?; + storage.clone(), + ) + .context("Updating Starknet state")?; + let state_commitment = StateCommitment::calculate(storage_commitment, class_commitment); + let expected_state_commitment = db + .state_commitment(tail.into()) + .context("Querying state commitment")? + .context("State commitment not found")?; + if state_commitment != expected_state_commitment { + tracing::debug!( + %peer, + %tail, + actual_storage_commitment=%storage_commitment, + actual_class_commitment=%class_commitment, + actual_state_commitment=%state_commitment, + %expected_state_commitment, + "State root mismatch"); + return Err(SyncError::StateRootMismatch(peer)); + } + db.update_storage_and_class_commitments(tail, storage_commitment, class_commitment) + .context("Updating storage and class commitments")?; + db.commit().context("Committing db transaction")?; Ok(PeerData::new(peer, tail)) }) .await .context("Joining blocking task")? } - -pub struct UpdateStarknetState { - pub storage: pathfinder_storage::Storage, - pub connection: pathfinder_storage::Connection, - pub current_block: BlockNumber, - pub verify_tree_hashes: bool, -} - -impl ProcessStage for UpdateStarknetState { - type Input = StateUpdateData; - type Output = BlockNumber; - - const NAME: &'static str = "StateDiff::UpdateStarknetState"; - - fn map(&mut self, peer: &PeerId, state_update: Self::Input) -> Result { - let mut db = self - .connection - .transaction() - .context("Creating database transaction")?; - - let tail = self.current_block; - - db.insert_state_update_data(self.current_block, &state_update) - .context("Inserting state update data")?; - - update_starknet_state_impl( - peer, - db, - (&state_update).into(), - self.verify_tree_hashes, - tail, - self.storage.clone(), - )?; - - self.current_block += 1; - - Ok(tail) - } -} - -fn update_starknet_state_impl( - peer: &PeerId, - db: pathfinder_storage::Transaction<'_>, - state_update_ref: StateUpdateRef<'_>, - verify_tree_hashes: bool, - tail: BlockNumber, - storage: Storage, -) -> Result<(), SyncError> { - let (storage_commitment, class_commitment) = update_starknet_state( - &db, - state_update_ref, - verify_tree_hashes, - tail, - storage.clone(), - ) - .context("Updating Starknet state")?; - let state_commitment = StateCommitment::calculate(storage_commitment, class_commitment); - let expected_state_commitment = db - .state_commitment(tail.into()) - .context("Querying state commitment")? - .context("State commitment not found")?; - if state_commitment != expected_state_commitment { - tracing::debug!( - %peer, - %tail, - actual_storage_commitment=%storage_commitment, - actual_class_commitment=%class_commitment, - actual_state_commitment=%state_commitment, - %expected_state_commitment, - "State root mismatch"); - return Err(SyncError::StateRootMismatch(*peer)); - } - db.update_storage_and_class_commitments(tail, storage_commitment, class_commitment) - .context("Updating storage and class commitments")?; - db.commit().context("Committing db transaction")?; - Ok(()) -} diff --git a/crates/pathfinder/src/sync/storage_adapters.rs b/crates/pathfinder/src/sync/storage_adapters.rs index c6878c45b4..2c2e495f19 100644 --- a/crates/pathfinder/src/sync/storage_adapters.rs +++ b/crates/pathfinder/src/sync/storage_adapters.rs @@ -113,15 +113,13 @@ mod tests { } fn expected_class_definition_counts(b: Block) -> usize { - let Block { - state_update: - StateUpdate { - declared_cairo_classes, - declared_sierra_classes, - .. - }, + let Block { state_update, .. } = b; + let StateUpdate { + declared_cairo_classes, + declared_sierra_classes, .. - } = b; + } = state_update.unwrap(); + declared_cairo_classes.len() + declared_sierra_classes.len() } @@ -165,10 +163,10 @@ mod tests { const DB_LEN: usize = 5; let ok_len = len.min(DB_LEN); let storage = pathfinder_storage::StorageBuilder::in_memory().unwrap(); - let expected = pathfinder_storage::fake::with_n_blocks(&storage, DB_LEN) - .into_iter() - .map(count_extractor) - .collect::>(); + let blocks = pathfinder_storage::fake::generate::n_blocks(DB_LEN); + pathfinder_storage::fake::fill(&storage, &blocks, None); + + let expected = blocks.into_iter().map(count_extractor).collect::>(); let stream = super::counts_stream( storage.clone(), BlockNumber::GENESIS, diff --git a/crates/pathfinder/src/sync/track.rs b/crates/pathfinder/src/sync/track.rs index 1bc84949f8..0b69f909ee 100644 --- a/crates/pathfinder/src/sync/track.rs +++ b/crates/pathfinder/src/sync/track.rs @@ -56,7 +56,6 @@ pub struct Sync { pub latest: L, pub p2p: P, pub storage: Storage, - pub chain: Chain, pub chain_id: ChainId, pub public_key: PublicKey, pub block_hash_db: Option, @@ -64,10 +63,11 @@ pub struct Sync { } impl Sync { + /// `next` and `parent_hash` will be advanced each time a block is stored. pub async fn run( self, - next: BlockNumber, - parent_hash: BlockHash, + next: &mut BlockNumber, + parent_hash: &mut BlockHash, fgw: SequencerClient, ) -> Result<(), SyncError> where @@ -82,13 +82,12 @@ impl Sync { let mut headers = HeaderSource { p2p: self.p2p.clone(), latest_onchain: self.latest.clone(), - start: next, + start: *next, } .spawn() - .pipe(headers::ForwardContinuity::new(next, parent_hash), 100) + .pipe(headers::ForwardContinuity::new(*next, *parent_hash), 100) .pipe( headers::VerifyHashAndSignature::new( - self.chain, self.chain_id, self.public_key, self.block_hash_db, @@ -140,7 +139,7 @@ impl Sync { let classes = ClassSource { p2p: self.p2p.clone(), declarations: declarations_1, - start: next, + start: *next, } .spawn() .pipe(class_definitions::VerifyLayout, 10) @@ -174,6 +173,15 @@ impl Sync { 10, ) .into_stream() + .inspect_ok( + |PeerData { + data: (stored_block_number, stored_block_hash), + .. + }| { + *next = *stored_block_number + 1; + *parent_hash = *stored_block_hash; + }, + ) .try_fold((), |_, _| std::future::ready(Ok(()))) .await } @@ -201,7 +209,6 @@ impl HeaderSource { tokio::spawn(async move { let mut latest_onchain = Box::pin(latest_onchain); while let Some(latest_onchain) = latest_onchain.next().await { - // TODO: handle reorgs correctly let mut headers = Box::pin(p2p.clone().header_stream(start, latest_onchain.0, false)); @@ -724,6 +731,7 @@ struct BlockData { pub classes: Vec, } +/// If successful, returns the stored block's number and hash. struct StoreBlock { connection: pathfinder_storage::Connection, // We need this so that we can create extra read-only transactions for parallel contract state @@ -750,7 +758,7 @@ impl StoreBlock { impl ProcessStage for StoreBlock { const NAME: &'static str = "Blocks::Persist"; type Input = BlockData; - type Output = (); + type Output = (BlockNumber, BlockHash); fn map(&mut self, peer: &PeerId, input: Self::Input) -> Result { let BlockData { @@ -809,6 +817,8 @@ impl ProcessStage for StoreBlock { db.insert_transaction_data(block_number, &transactions, Some(&ordered_events)) .context("Inserting transaction data")?; + db.insert_state_update_data(block_number, &state_diff) + .context("Inserting state update data")?; let (storage_commitment, class_commitment) = update_starknet_state( &db, @@ -833,8 +843,6 @@ impl ProcessStage for StoreBlock { db.update_storage_and_class_commitments(block_number, storage_commitment, class_commitment) .context("Updating storage and class commitments")?; - db.insert_state_update_data(block_number, &state_diff) - .context("Inserting state update data")?; classes.into_iter().try_for_each( |CompiledClass { @@ -870,270 +878,11 @@ impl ProcessStage for StoreBlock { let result = db .commit() .context("Committing transaction") - .map_err(Into::into); + .map_err(Into::into) + .map(|_| (block_number, header.hash)); tracing::debug!(number=%block_number, "Block stored"); result } } - -#[cfg(test)] -mod tests { - use futures::{stream, Stream, StreamExt}; - use p2p::client::types::{ - ClassDefinition, - ClassDefinitionsError, - EventsResponseStreamFailure, - Receipt as P2PReceipt, - StateDiffsError, - }; - use p2p::libp2p::PeerId; - use p2p::PeerData; - use p2p_proto::common::Hash; - use pathfinder_common::{BlockHeader, ReceiptCommitment, SignedBlockHeader}; - use pathfinder_storage::fake::init::Config; - use pathfinder_storage::fake::{self, Block}; - use pathfinder_storage::StorageBuilder; - use starknet_gateway_types::error::SequencerError; - - use super::*; - use crate::state::block_hash::{ - calculate_event_commitment, - calculate_receipt_commitment, - calculate_transaction_commitment, - compute_final_hash, - BlockHeaderData, - }; - - #[tokio::test] - async fn happy_path() { - const N: usize = 10; - let blocks = fake::init::with_n_blocks_and_config( - N, - Config { - calculate_block_hash: Box::new(|header: &BlockHeader| { - compute_final_hash(&BlockHeaderData::from_header(header)) - }), - calculate_transaction_commitment: Box::new(calculate_transaction_commitment), - calculate_receipt_commitment: Box::new(calculate_receipt_commitment), - calculate_event_commitment: Box::new(calculate_event_commitment), - }, - ); - - let BlockHeader { hash, number, .. } = blocks.last().unwrap().header.header; - let latest = (number, hash); - - let p2p: FakeP2PClient = FakeP2PClient { - blocks: blocks.clone(), - }; - - let storage = StorageBuilder::in_memory_with_trie_pruning_and_pool_size( - pathfinder_storage::TriePruneMode::Archive, - std::num::NonZeroU32::new(5).unwrap(), - ) - .unwrap(); - - let sync = Sync { - latest: futures::stream::iter(vec![latest]), - p2p, - storage: storage.clone(), - chain: Chain::SepoliaTestnet, - chain_id: ChainId::SEPOLIA_TESTNET, - public_key: PublicKey::default(), - block_hash_db: None, - verify_tree_hashes: false, - }; - - sync.run(BlockNumber::GENESIS, BlockHash::default(), FakeFgw) - .await - .unwrap(); - - let mut db = storage.connection().unwrap(); - let db = db.transaction().unwrap(); - for mut expected in blocks { - // TODO p2p sync does not update class and storage tries yet - expected.header.header.class_commitment = ClassCommitment::ZERO; - expected.header.header.storage_commitment = StorageCommitment::ZERO; - - let block_number = expected.header.header.number; - let block_id = block_number.into(); - let header = db.block_header(block_id).unwrap().unwrap(); - let signature = db.signature(block_id).unwrap().unwrap(); - let transaction_data = db.transaction_data_for_block(block_id).unwrap().unwrap(); - let state_update_data: StateUpdateData = - db.state_update(block_id).unwrap().unwrap().into(); - let declared = db.declared_classes_at(block_id).unwrap().unwrap(); - - let mut cairo_defs = HashMap::new(); - let mut sierra_defs = HashMap::new(); - - for class_hash in declared { - let class = db.class_definition(class_hash).unwrap().unwrap(); - match db.casm_hash(class_hash).unwrap() { - Some(casm_hash) => { - let casm = db.casm_definition(class_hash).unwrap().unwrap(); - sierra_defs.insert(SierraHash(class_hash.0), (class, casm)); - } - None => { - cairo_defs.insert(class_hash, class); - } - } - } - - pretty_assertions_sorted::assert_eq!(header, expected.header.header); - pretty_assertions_sorted::assert_eq!(signature, expected.header.signature); - pretty_assertions_sorted::assert_eq!( - header.state_diff_commitment, - expected.header.header.state_diff_commitment - ); - pretty_assertions_sorted::assert_eq!( - header.state_diff_length, - expected.header.header.state_diff_length - ); - pretty_assertions_sorted::assert_eq!(transaction_data, expected.transaction_data); - pretty_assertions_sorted::assert_eq!(state_update_data, expected.state_update.into()); - pretty_assertions_sorted::assert_eq!( - cairo_defs, - expected.cairo_defs.into_iter().collect::>() - ); - pretty_assertions_sorted::assert_eq!( - sierra_defs, - expected - .sierra_defs - .into_iter() - // All sierra fixtures are not compile-able - .map(|(h, s, _)| (h, (s, b"I'm from the fgw!".to_vec()))) - .collect::>() - ); - } - } - - #[derive(Clone)] - struct FakeP2PClient { - pub blocks: Vec, - } - - impl HeaderStream for FakeP2PClient { - fn header_stream( - self, - start: BlockNumber, - stop: BlockNumber, - reverse: bool, - ) -> impl Stream> + Send { - assert!(!reverse); - assert_eq!(start, self.blocks.first().unwrap().header.header.number); - assert_eq!(stop, self.blocks.last().unwrap().header.header.number); - - stream::iter( - self.blocks - .into_iter() - .map(|block| PeerData::for_tests(block.header)), - ) - } - } - - impl BlockClient for FakeP2PClient { - async fn transactions_for_block( - self, - block: BlockNumber, - ) -> Option<( - PeerId, - impl Stream> + Send, - )> { - let tr = self - .blocks - .iter() - .find(|b| b.header.header.number == block) - .unwrap() - .transaction_data - .iter() - .map(|(t, r, e)| Ok((t.clone(), P2PReceipt::from(r.clone())))) - .collect::>>(); - - Some((PeerId::random(), stream::iter(tr))) - } - - async fn state_diff_for_block( - self, - block: BlockNumber, - state_diff_length: u64, - ) -> Result, StateDiffsError> { - let sd: StateUpdateData = self - .blocks - .iter() - .find(|b| b.header.header.number == block) - .unwrap() - .state_update - .clone() - .into(); - - assert_eq!(sd.state_diff_length() as u64, state_diff_length); - - Ok(Some((PeerId::random(), sd))) - } - - async fn class_definitions_for_block( - self, - block: BlockNumber, - declared_classes_count: u64, - ) -> Result)>, ClassDefinitionsError> { - let b = self - .blocks - .iter() - .find(|b| b.header.header.number == block) - .unwrap(); - let defs = b - .cairo_defs - .iter() - .map(|(h, x)| ClassDefinition::Cairo { - block_number: block, - definition: x.clone(), - hash: *h, - }) - .chain( - b.sierra_defs - .iter() - .map(|(h, x, _)| ClassDefinition::Sierra { - block_number: block, - sierra_definition: x.clone(), - hash: *h, - }), - ) - .collect::>(); - - Ok(Some((PeerId::random(), defs))) - } - - async fn events_for_block( - self, - block: BlockNumber, - ) -> Option<( - PeerId, - impl Stream> + Send, - )> { - let e = self - .blocks - .iter() - .find(|b| b.header.header.number == block) - .unwrap() - .transaction_data - .iter() - .flat_map(|(t, _, e)| e.iter().map(move |e| (t.hash, e.clone()))) - .map(Ok) - .collect::>(); - - Some((PeerId::random(), stream::iter(e))) - } - } - - #[derive(Clone)] - struct FakeFgw; - - #[async_trait::async_trait] - impl GatewayApi for FakeFgw { - async fn pending_casm_by_hash(&self, _: ClassHash) -> Result { - Ok(bytes::Bytes::from_static(b"I'm from the fgw!")) - } - } -} diff --git a/crates/pathfinder/src/sync/transactions.rs b/crates/pathfinder/src/sync/transactions.rs index 1fb17159c7..91ad92314a 100644 --- a/crates/pathfinder/src/sync/transactions.rs +++ b/crates/pathfinder/src/sync/transactions.rs @@ -85,11 +85,11 @@ impl ProcessStage for CalculateHashes { ); type Output = UnverifiedTransactions; - fn map(&mut self, _: &PeerId, input: Self::Input) -> Result { + fn map(&mut self, peer: &PeerId, input: Self::Input) -> Result { use rayon::prelude::*; - // TODO remove the placeholder - let peer = &PeerId::random(); + let (transactions, block_number, version, expected_commitment) = input; + let transactions = transactions .into_par_iter() .map(|(tx, r)| { @@ -175,6 +175,7 @@ impl ProcessStage for VerifyCommitment { version, block_number, } = transactions; + let txs: Vec<_> = transactions.iter().map(|(t, _)| t.clone()).collect(); // This computation can only fail in case of internal trie error which is always // a fatal error diff --git a/crates/rpc/src/method/get_state_update.rs b/crates/rpc/src/method/get_state_update.rs index 6a660df49d..f5e78ed698 100644 --- a/crates/rpc/src/method/get_state_update.rs +++ b/crates/rpc/src/method/get_state_update.rs @@ -480,11 +480,13 @@ mod tests { /// Add some dummy state updates to the context for testing fn context_with_state_updates() -> (Vec, RpcContext) { + let blocks = pathfinder_storage::fake::generate::n_blocks(3); let storage = pathfinder_storage::StorageBuilder::in_memory().unwrap(); + pathfinder_storage::fake::fill(&storage, &blocks, None); - let state_updates = pathfinder_storage::fake::with_n_blocks(&storage, 3) + let state_updates = blocks .into_iter() - .map(|Block { state_update, .. }| state_update) + .map(|Block { state_update, .. }| state_update.unwrap()) .collect(); let context = RpcContext::for_tests().with_storage(storage); diff --git a/crates/storage/Cargo.toml b/crates/storage/Cargo.toml index fcd2b3190e..06244afd10 100644 --- a/crates/storage/Cargo.toml +++ b/crates/storage/Cargo.toml @@ -42,6 +42,7 @@ serde_json = { workspace = true, features = [ serde_with = { workspace = true } sha3 = { workspace = true } starknet-gateway-types = { path = "../gateway-types" } +tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } @@ -51,6 +52,5 @@ zstd = { workspace = true, features = ["experimental"] } assert_matches = { workspace = true } pretty_assertions_sorted = { workspace = true } rstest = { workspace = true } -tempfile = { workspace = true } test-log = { workspace = true, features = ["trace"] } tracing-subscriber = { workspace = true } diff --git a/crates/storage/src/bloom.rs b/crates/storage/src/bloom.rs index 907a10894a..3886e311b8 100644 --- a/crates/storage/src/bloom.rs +++ b/crates/storage/src/bloom.rs @@ -416,6 +416,7 @@ mod tests { use super::*; const KEY: Felt = felt!("0x0218b538681900fad5a0b2ffe1d6781c0c3f14df5d32071ace0bdc9d46cb69ea"); + #[allow(dead_code)] const KEY1: Felt = felt!("0x0218b538681900fad5a0b2ffe1d6781c0c3f14df5d32071ace0bdc9d46cb69eb"); const KEY_NOT_IN_FILTER: Felt = felt!("0x0218b538681900fad5a0b2ffe1d6781c0c3f14df5d32071ace0bdc9d46cb69ec"); diff --git a/crates/storage/src/connection/block.rs b/crates/storage/src/connection/block.rs index 5a4f64763d..c2b6ddfb8f 100644 --- a/crates/storage/src/connection/block.rs +++ b/crates/storage/src/connection/block.rs @@ -1042,8 +1042,10 @@ mod tests { fn event_counts(#[case] sql: &str, #[case] num_of_missing_counts: usize) { use crate::fake; + let faked = fake::generate::n_blocks(10); let storage = StorageBuilder::in_memory().unwrap(); - let faked = fake::with_n_blocks(&storage, 10); + fake::fill(&storage, &faked, None); + let mut connection = storage.connection().unwrap(); let tx = connection.transaction().unwrap(); if !sql.is_empty() { diff --git a/crates/storage/src/fake.rs b/crates/storage/src/fake.rs index e3ecf7dd69..3c37f5b77b 100644 --- a/crates/storage/src/fake.rs +++ b/crates/storage/src/fake.rs @@ -1,33 +1,97 @@ -//! Create fake blockchain data for test purposes +//! Create fake blockchain storage for test purposes +use std::collections::{HashMap, HashSet}; + +use fake::{Fake, Faker}; use pathfinder_common::event::Event; use pathfinder_common::receipt::Receipt; +use pathfinder_common::state_update::{ + ContractClassUpdate, + ContractUpdate, + StateUpdateRef, + SystemContractUpdate, +}; +use pathfinder_common::test_utils::fake_non_empty_with_rng; use pathfinder_common::transaction::Transaction; -use pathfinder_common::{ClassHash, SierraHash, SignedBlockHeader, StateUpdate}; +use pathfinder_common::{ + class_definition, + BlockHash, + BlockHeader, + BlockNumber, + ChainId, + ClassCommitment, + ClassHash, + ContractAddress, + EventCommitment, + ReceiptCommitment, + SierraHash, + SignedBlockHeader, + StarknetVersion, + StateCommitment, + StateUpdate, + StorageCommitment, + TransactionCommitment, + TransactionHash, + TransactionIndex, +}; +use pathfinder_crypto::signature::SignatureError; +use pathfinder_crypto::Felt; +use rand::seq::IteratorRandom; use rand::Rng; +use starknet_gateway_types::class_hash::compute_class_hash; -use crate::Storage; +use crate::{Storage, StorageBuilder}; #[derive(Debug, Default, Clone, PartialEq)] pub struct Block { pub header: SignedBlockHeader, pub transaction_data: Vec<(Transaction, Receipt, Vec)>, - pub state_update: StateUpdate, + pub state_update: Option, pub cairo_defs: Vec<(ClassHash, Vec)>, // Cairo 0 definitions pub sierra_defs: Vec<(SierraHash, Vec, Vec)>, // Sierra + Casm definitions } -/// Initialize [`Storage`] with fake blocks and state updates -/// maintaining [**limited consistency -/// guarantees**](crate::fake::init::with_n_blocks) -pub fn with_n_blocks(storage: &Storage, n: usize) -> Vec { - let mut rng = rand::thread_rng(); - with_n_blocks_and_rng(storage, n, &mut rng) +pub type BlockHashFn = Box BlockHash>; +pub type SignBlockHashFn = Box Result<(Felt, Felt), SignatureError>>; +pub type TransactionCommitmentFn = + Box anyhow::Result>; +pub type ReceiptCommitmentFn = Box anyhow::Result>; +pub type EventCommitmentFn = + Box anyhow::Result>; +pub type UpdateTriesFn = Box< + dyn Fn( + &crate::Transaction<'_>, + StateUpdateRef<'_>, + bool, + BlockNumber, + Storage, + ) -> anyhow::Result<(StorageCommitment, ClassCommitment)>, +>; + +pub struct Config { + pub calculate_block_hash: BlockHashFn, + pub sign_block_hash: SignBlockHashFn, + pub calculate_transaction_commitment: TransactionCommitmentFn, + pub calculate_receipt_commitment: ReceiptCommitmentFn, + pub calculate_event_commitment: EventCommitmentFn, + pub update_tries: UpdateTriesFn, +} + +impl Default for Config { + fn default() -> Self { + Self { + calculate_block_hash: Box::new(|_| Faker.fake()), + sign_block_hash: Box::new(|_| Ok((Faker.fake(), Faker.fake()))), + calculate_transaction_commitment: Box::new(|_, _| Ok(Faker.fake())), + calculate_receipt_commitment: Box::new(|_| Ok(Faker.fake())), + calculate_event_commitment: Box::new(|_, _| Ok(Faker.fake())), + update_tries: Box::new(|_, _, _, _, _| Ok((Faker.fake(), Faker.fake()))), + } + } } -/// Initialize [`Storage`] with a slice of already generated blocks -pub fn fill(storage: &Storage, blocks: &[Block]) { - let mut connection = storage.connection().unwrap(); - let tx = connection.transaction().unwrap(); +pub fn fill(storage: &Storage, blocks: &[Block], update_tries: Option) { + let mut db = storage.connection().unwrap(); + let db = db.transaction().unwrap(); blocks.iter().for_each( |Block { @@ -38,8 +102,10 @@ pub fn fill(storage: &Storage, blocks: &[Block]) { sierra_defs, .. }| { - tx.insert_block_header(&header.header).unwrap(); - tx.insert_transaction_data( + db.insert_block_header(&header.header).unwrap(); + db.insert_signature(header.header.number, &header.signature) + .unwrap(); + db.insert_transaction_data( header.header.number, &transaction_data .iter() @@ -55,20 +121,36 @@ pub fn fill(storage: &Storage, blocks: &[Block]) { ), ) .unwrap(); - tx.insert_signature(header.header.number, &header.signature) - .unwrap(); + + if let Some(state_update) = state_update { + db.insert_state_update(header.header.number, state_update) + .unwrap(); + + if let Some(update_tries) = &update_tries { + update_tries( + &db, + state_update.into(), + false, + header.header.number, + storage.clone(), + ) + .unwrap(); + } + } cairo_defs.iter().for_each(|(cairo_hash, definition)| { - tx.insert_cairo_class(*cairo_hash, definition).unwrap() + db.update_cairo_class(*cairo_hash, definition).unwrap() }); sierra_defs .iter() .for_each(|(sierra_hash, sierra_definition, casm_definition)| { - tx.insert_sierra_class( + db.update_sierra_class( sierra_hash, sierra_definition, state_update + .as_ref() + .unwrap() .declared_sierra_classes .get(sierra_hash) .unwrap(), @@ -76,159 +158,100 @@ pub fn fill(storage: &Storage, blocks: &[Block]) { ) .unwrap() }); - - tx.insert_state_update(header.header.number, state_update) - .unwrap(); }, ); - tx.commit().unwrap(); -} -/// Same as [`with_n_blocks`] except caller can specify the rng used -pub fn with_n_blocks_and_rng(storage: &Storage, n: usize, rng: &mut R) -> Vec { - let blocks = init::with_n_blocks_and_rng(n, rng); - fill(storage, &blocks); - blocks + db.commit().unwrap(); } -/// Same as [`with_n_blocks`] except caller can specify the rng and additional -/// configuration -pub fn with_n_blocks_rng_and_config( - storage: &Storage, - n: usize, - rng: &mut R, - config: init::Config, -) -> Vec { - let blocks = init::with_n_blocks_rng_and_config(n, rng, config); - fill(storage, &blocks); - blocks -} - -/// Raw _fake state initializers_ -pub mod init { - - use fake::{Fake, Faker}; - use pathfinder_common::event::Event; - use pathfinder_common::receipt::Receipt; - use pathfinder_common::state_update::ContractClassUpdate; - use pathfinder_common::test_utils::fake_non_empty_with_rng; - use pathfinder_common::transaction::Transaction; - use pathfinder_common::{ - class_definition, - BlockHash, - BlockHeader, - BlockNumber, - ChainId, - EventCommitment, - ReceiptCommitment, - SignedBlockHeader, - StarknetVersion, - StateCommitment, - TransactionCommitment, - TransactionHash, - TransactionIndex, - }; - use rand::Rng; - use starknet_gateway_types::class_hash::compute_class_hash; - - use super::Block; - - pub type BlockHashFn = Box BlockHash>; - pub type TransactionCommitmentFn = - Box anyhow::Result>; - pub type ReceiptCommitmentFn = Box anyhow::Result>; - pub type EventCommitmentFn = Box< - dyn Fn(&[(TransactionHash, &[Event])], StarknetVersion) -> anyhow::Result, - >; - - pub struct Config { - pub calculate_block_hash: BlockHashFn, - pub calculate_transaction_commitment: TransactionCommitmentFn, - pub calculate_receipt_commitment: ReceiptCommitmentFn, - pub calculate_event_commitment: EventCommitmentFn, +/// Create fake blocks and state updates with __limited consistency +/// guarantees__: +/// - starknet version: 0.13.2 +/// - chain id: `SEPOLIA_TESTNET` +/// - block headers: +/// - consecutive numbering starting from genesis (`0`) up to `n-1` +/// - parent hash of block N points to hash of block N-1, parent hash of +/// genesis is 0 +/// - state commitment is a hash of storage and class commitments +/// - state diff length and commitment are correctly calculated from its +/// respective state update +/// - block bodies: +/// - transaction indices within a block +/// - transaction hashes in respective receipts +/// - at least 1 transaction with receipt per block +/// - state updates: +/// - block hashes +/// - parent state commitment of block N points to state commitment of block +/// N-1, parent state commitment of genesis is 0 +/// - no replaced classes +/// - each storage diff has its respective nonce update +/// - storage entries constrain at least 1 element +/// - no implicitly declared classes (ie. as in the old deploy transactions +/// that were not preceded by a declare transaction) +/// - declared cairo|sierra definitions +/// - class definition is a serialized to JSON representation of +/// `class_definition::Cairo|Sierra` respectively with random fields +/// - all those definitions are very short and fall far below the soft limit +/// in protobuf encoding +/// - casm definitions for sierra classes are purely random Strings +/// - cairo class hashes and sierra class hashes are correctly calculated +/// from the definitions, casm hashes are random +/// - transactions +/// - transaction hashes are calculated from their respective variant, with +/// ChainId set to `SEPOLIA_TESTNET` +pub mod generate { + use pathfinder_common::{BlockCommitmentSignature, BlockCommitmentSignatureElem}; + + use super::*; + + pub fn n_blocks(n: usize) -> Vec { + with_config(n, Default::default()) } - impl Default for Config { - fn default() -> Self { - Self { - calculate_block_hash: Box::new(|_| Faker.fake()), - calculate_transaction_commitment: Box::new(|_, _| Ok(Faker.fake())), - calculate_receipt_commitment: Box::new(|_| Ok(Faker.fake())), - calculate_event_commitment: Box::new(|_, _| Ok(Faker.fake())), - } - } - } - - /// Create fake blocks and state updates with __limited consistency - /// guarantees__: - /// - starknet version: 0.13.2 - /// - block headers: - /// - consecutive numbering starting from genesis (`0`) up to `n-1` - /// - parent hash wrt previous block, parent hash of the genesis block - /// is `0` - /// - state commitment is a hash of storage and class commitments - /// - block bodies: - /// - transaction indices within a block - /// - transaction hashes in respective receipts - /// - at least 1 transaction with receipt per block - /// - state updates: - /// - block hashes - /// - parent state commitment wrt previous state update, parent state - /// commitment of the genesis state update is `0` - /// - old roots wrt previous state update, old root of the genesis state - /// update is `0` - /// - replaced classes for block N point to some deployed contracts from - /// block N-1 - /// - each storage diff has its respective nonce update - /// - storage entries constrain at least 1 element - /// - deployed Cairo0 contracts are treated as implicit declarations and - /// are added to declared cairo classes` - /// - declared cairo|sierra definitions - /// - class definition is a serialized to JSON representation of - /// `class_definition::Cairo|Sierra` respectively with random fields - /// - all those definitions are **very short and fall far below the soft - /// limit in protobuf encoding - /// - casm definitions for sierra classes are purely random Strings - /// - cairo class hashes and sierra class hashes are correctly - /// calculated from the definitions, casm hashes are random - /// - transactions - /// - transaction hashes are calculated from their respective variant, - /// with ChainId set to `SEPOLIA_TESTNET` - pub fn with_n_blocks(n: usize) -> Vec { - let mut rng = rand::thread_rng(); - with_n_blocks_and_rng(n, &mut rng) - } - - /// Same as [`with_n_blocks`] except caller can specify additional - /// configuration - pub fn with_n_blocks_and_config(n: usize, config: Config) -> Vec { - let mut rng = rand::thread_rng(); - with_n_blocks_rng_and_config(n, &mut rng, config) + pub fn with_config(n: usize, config: Config) -> Vec { + with_rng_and_config(n, &mut rand::thread_rng(), config) } - /// Same as [`with_n_blocks`] except caller can specify the rng used - pub fn with_n_blocks_and_rng(n: usize, rng: &mut R) -> Vec { - with_n_blocks_rng_and_config(n, rng, Default::default()) + pub fn with_rng_and_config(n: usize, rng: &mut R, config: Config) -> Vec { + let Config { + calculate_block_hash, + sign_block_hash, + calculate_transaction_commitment, + calculate_receipt_commitment, + calculate_event_commitment, + update_tries, + } = config; + + let mut blocks = generate_inner( + n, + rng, + calculate_transaction_commitment, + calculate_receipt_commitment, + calculate_event_commitment, + ); + + update_commitments(&mut blocks, update_tries); + compute_block_hashes(&mut blocks, calculate_block_hash, sign_block_hash); + blocks } - /// Same as [`with_n_blocks`] except caller can specify the rng used and - /// additional configuration - pub fn with_n_blocks_rng_and_config( + fn generate_inner( n: usize, rng: &mut R, - config: Config, + calculate_transaction_commitment: TransactionCommitmentFn, + calculate_receipt_commitment: ReceiptCommitmentFn, + calculate_event_commitment: EventCommitmentFn, ) -> Vec { let mut init = Vec::with_capacity(n); + let mut declared_classes_accum = HashSet::new(); for i in 0..n { let mut header: BlockHeader = Faker.fake_with_rng(rng); header.starknet_version = StarknetVersion::V_0_13_2; header.number = BlockNumber::new_or_panic(i.try_into().expect("u64 is at least as wide as usize")); - header.storage_commitment = Default::default(); - header.class_commitment = Default::default(); - header.state_commitment = - StateCommitment::calculate(header.storage_commitment, header.class_commitment); + // Will be fixed after inserting tries + header.state_commitment = StateCommitment::ZERO; // There must be at least 1 transaction per block let transaction_data = fake_non_empty_with_rng::< @@ -255,7 +278,7 @@ pub mod init { }) .collect::>(); - header.transaction_commitment = (config.calculate_transaction_commitment)( + header.transaction_commitment = (calculate_transaction_commitment)( &transaction_data .iter() .map(|(t, ..)| t.clone()) @@ -264,7 +287,7 @@ pub mod init { ) .unwrap(); - header.event_commitment = (config.calculate_event_commitment)( + header.event_commitment = (calculate_event_commitment)( &transaction_data .iter() .map(|(t, _, e)| (t.hash, e.as_slice())) @@ -273,7 +296,7 @@ pub mod init { ) .unwrap(); - header.receipt_commitment = (config.calculate_receipt_commitment)( + header.receipt_commitment = (calculate_receipt_commitment)( &transaction_data .iter() .map(|(_, r, ..)| r.clone()) @@ -287,176 +310,218 @@ pub mod init { .map(|(_, _, events)| events.len()) .sum(); + let num_cairo_classes = rng.gen_range(0..=0); + let num_sierra_classes = rng.gen_range(0..=10); + + let cairo_defs = (0..num_cairo_classes) + .map(|_| { + let def = serde_json::to_vec( + &Faker.fake_with_rng::, _>(rng), + ) + .unwrap(); + (compute_class_hash(&def).unwrap().hash(), def) + }) + .collect::>(); + let sierra_defs = (0..num_sierra_classes) + .map(|_| { + let def = serde_json::to_vec( + &Faker.fake_with_rng::, _>(rng), + ) + .unwrap(); + ( + SierraHash(compute_class_hash(&def).unwrap().hash().0), + (def, Faker.fake_with_rng::(rng).into_bytes()), + ) + }) + .collect::>(); + + let declared_cairo_classes = cairo_defs.keys().copied().collect::>(); + let declared_sierra_classes = sierra_defs + .keys() + .map(|sierra_hash| (*sierra_hash, Faker.fake())) + .collect::>(); + + let all_declared_classes_in_this_block = declared_cairo_classes + .iter() + .copied() + .chain(declared_sierra_classes.keys().map(|x| ClassHash(x.0))) + .collect::>(); + init.push(Block { header: SignedBlockHeader { header, signature: Faker.fake_with_rng(rng), }, transaction_data, - state_update: Default::default(), - cairo_defs: Default::default(), - sierra_defs: Default::default(), + state_update: Some(StateUpdate { + // Will be fixed after block hash computation + block_hash: BlockHash::ZERO, + // Will be fixed after inserting tries + state_commitment: StateCommitment::ZERO, + // Will be fixed after inserting tries + parent_state_commitment: StateCommitment::ZERO, + declared_cairo_classes, + declared_sierra_classes, + system_contract_updates: HashMap::from([( + ContractAddress::ONE, + SystemContractUpdate { + storage: fake_non_empty_with_rng(rng), + }, + )]), + contract_updates: { + // We can only deploy what was declared so far in the chain + if declared_classes_accum.is_empty() { + Default::default() + } else { + Faker + .fake_with_rng::, _>(rng) + .into_iter() + .map(|contract_address| { + ( + contract_address, + ContractUpdate { + class: Some(ContractClassUpdate::Deploy( + *declared_classes_accum.iter().choose(rng).unwrap(), + )), + storage: fake_non_empty_with_rng(rng), + nonce: Faker.fake(), + }, + ) + }) + .collect() + } + }, + }), + cairo_defs: cairo_defs.into_iter().collect(), + sierra_defs: sierra_defs + .into_iter() + .map(|(h, (s, c))| (h, s, c)) + .collect(), }); + + // These new classes from this block can now be deployed in the next blocks + declared_classes_accum.extend(all_declared_classes_in_this_block); } - // Calculate state commitments and randomly choose which contract updates should - // be "replace" instead of "deploy" - if !init.is_empty() { - let Block { - header, - state_update, - .. - } = init.get_mut(0).unwrap(); - header.header.state_commitment = StateCommitment::calculate( - header.header.storage_commitment, - header.header.class_commitment, - ); - state_update.parent_state_commitment = StateCommitment::ZERO; - - for i in 1..n { - let (parent_state_commitment, deployed_in_parent) = init - .get(i - 1) - .map( - |Block { - header, - state_update, - .. - }| { - ( - header.header.state_commitment, - state_update - .contract_updates - .iter() - .filter_map(|(&address, update)| match update.class { - Some(ContractClassUpdate::Deploy(class_hash)) => { - Some((address, class_hash)) - } - Some(_) | None => None, - }) - .collect::>(), - ) + // FIXME Previous way of faking replaced classes made trie generation using + // `update_starknet_state` unstable, ie. state roots did not match + // between the generated block data and what was computed as a result of + // checkpoint or tracking sync test. + + // Compute state diff length and commitment + for Block { + header: + SignedBlockHeader { + header: + BlockHeader { + state_diff_length, + state_diff_commitment, + .. }, - ) - .unwrap(); - let Block { - header, - state_update, .. - } = init.get_mut(i).unwrap(); - - header.header.state_commitment = StateCommitment::calculate( - header.header.storage_commitment, - header.header.class_commitment, - ); - - // - // Fix state updates - // - state_update.parent_state_commitment = parent_state_commitment; - - let num_deployed_in_parent = deployed_in_parent.len(); - - if num_deployed_in_parent > 0 { - // Add some replaced classes - let num_replaced = rng.gen_range(1..=num_deployed_in_parent); - use rand::seq::SliceRandom; - - deployed_in_parent - .choose_multiple(rng, num_replaced) - .for_each(|(address, _)| { - state_update - .contract_updates - .entry(*address) - // It's unlikely rng has generated an update to the previously - // deployed class but it is still possible - .or_default() - .class = - Some(ContractClassUpdate::Replace(Faker.fake_with_rng(rng))); - }); - } - } + }, + state_update, + .. + } in init.iter_mut() + { + *state_diff_length = state_update.as_ref().unwrap().state_diff_length(); + *state_diff_commitment = state_update + .as_ref() + .unwrap() + .compute_state_diff_commitment(); + } - // Compute state diff length and commitment - // Generate definitions for the implicitly declared classes - for Block { - header: - SignedBlockHeader { - header: - BlockHeader { - state_diff_length, - state_diff_commitment, - .. - }, - .. - }, - state_update, - cairo_defs, - .. - } in init.iter_mut() - { - // All remaining Deploys in the current block should also be - // added to `declared_cairo_classes` because Cairo0 Deploys - // were not initially preceded by an explicit declare - // transaction - let implicitly_declared = state_update - .contract_updates - .iter_mut() - .filter_map(|(_, update)| match &mut update.class { - Some(ContractClassUpdate::Deploy(class_hash)) => { - let def = serde_json::to_vec( - &Faker.fake_with_rng::, _>(rng), - ) - .unwrap(); - let new_hash = compute_class_hash(&def).unwrap().hash(); - *class_hash = new_hash; - Some((new_hash, def)) - } - Some(ContractClassUpdate::Replace(_)) | None => None, - }) - .collect::>(); + init + } - state_update.declared_cairo_classes.extend( - implicitly_declared - .iter() - .map(|(class_hash, _)| *class_hash), - ); - cairo_defs.extend(implicitly_declared); + // Updates class, storage and state commitments + fn update_commitments(blocks: &mut [Block], update_tries: UpdateTriesFn) { + // This dummy db is only necessary to build the tries whose roots are the + // storage and class commitments + let dummy_storage = StorageBuilder::in_tempdir().unwrap(); + let mut db = dummy_storage.connection().unwrap(); + let db = db.transaction().unwrap(); + + for Block { + header, + state_update, + .. + } in blocks.iter_mut() + { + let state_update = state_update.as_mut().unwrap(); + + // Required because of foreign key constraint + db.insert_block_header(&header.header).unwrap(); + // Required for the tries + db.insert_state_update(header.header.number, state_update) + .unwrap(); + let (storage_commitment, class_commitment) = update_tries( + &db, + state_update.into(), + false, + header.header.number, + dummy_storage.clone(), + ) + .unwrap(); + let state_commitment = StateCommitment::calculate(storage_commitment, class_commitment); + header.header.storage_commitment = storage_commitment; + header.header.class_commitment = class_commitment; + header.header.state_commitment = state_commitment; + state_update.state_commitment = state_commitment; + } - *state_diff_length = state_update.state_diff_length(); - *state_diff_commitment = state_update.compute_state_diff_commitment(); - } + for i in 1..blocks.len() { + let parent_state_commitment = blocks.get(i - 1).unwrap().header.header.state_commitment; + let Block { state_update, .. } = blocks.get_mut(i).unwrap(); + let state_update = state_update.as_mut().unwrap(); + state_update.parent_state_commitment = parent_state_commitment; + } + } - // Compute the block hash, update parent block hash with the correct value + /// Computes block hashes, updates parent block hashes with the correct + /// values, computes block hash signatures, updates those too + fn compute_block_hashes( + blocks: &mut [Block], + calculate_block_hash: BlockHashFn, + sign_block_hash: SignBlockHashFn, + ) { + if blocks.is_empty() { + return; + } + + let Block { + header, + state_update, + .. + } = blocks.get_mut(0).unwrap(); + header.header.parent_hash = BlockHash::ZERO; + header.header.hash = calculate_block_hash(&header.header); + let (r, s) = sign_block_hash(header.header.hash).unwrap(); + header.signature = BlockCommitmentSignature { + r: BlockCommitmentSignatureElem(r), + s: BlockCommitmentSignatureElem(s), + }; + state_update.as_mut().unwrap().block_hash = header.header.hash; + + for i in 1..blocks.len() { + let parent_hash = blocks + .get(i - 1) + .map(|Block { header, .. }| header.header.hash) + .unwrap(); let Block { header, state_update, .. - } = init.get_mut(0).unwrap(); - header.header.parent_hash = BlockHash::ZERO; - - header.header.hash = (config.calculate_block_hash)(&header.header); - - state_update.block_hash = header.header.hash; - - for i in 1..n { - let parent_hash = init - .get(i - 1) - .map(|Block { header, .. }| header.header.hash) - .unwrap(); - let Block { - header, - state_update, - .. - } = init.get_mut(i).unwrap(); - - header.header.parent_hash = parent_hash; - - header.header.hash = (config.calculate_block_hash)(&header.header); - - state_update.block_hash = header.header.hash; - } + } = blocks.get_mut(i).unwrap(); + + header.header.parent_hash = parent_hash; + header.header.hash = calculate_block_hash(&header.header); + let (r, s) = sign_block_hash(header.header.hash).unwrap(); + header.signature = BlockCommitmentSignature { + r: BlockCommitmentSignatureElem(r), + s: BlockCommitmentSignatureElem(s), + }; + state_update.as_mut().unwrap().block_hash = header.header.hash; } - - init } } diff --git a/crates/storage/src/lib.rs b/crates/storage/src/lib.rs index ec2209d50d..e4a5cf49ff 100644 --- a/crates/storage/src/lib.rs +++ b/crates/storage/src/lib.rs @@ -245,6 +245,19 @@ impl StorageBuilder { storage.create_pool(pool_size) } + /// A workaround for scenarios where a test requires multiple parallel + /// connections and shared cache causes locking errors if the connection + /// pool is larger than 1 and timeouts otherwise. + pub fn in_tempdir() -> anyhow::Result { + let db_dir = tempfile::TempDir::new()?; + let mut db_path = PathBuf::from(db_dir.path()); + db_path.push("db.sqlite"); + crate::StorageBuilder::file(db_path) + .migrate() + .unwrap() + .create_pool(NonZeroU32::new(32).unwrap()) + } + /// Performs the database schema migration and returns a [storage /// manager](StorageManager). ///