Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

p2p sync tests #2394

Open
wants to merge 25 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
9be2532
feat(state_update): allow StateUpdateRef from mut ref
CHr15F0x Nov 20, 2024
7ca8280
fix(transactions): CalculateHashes reports placeholder peer id
CHr15F0x Nov 20, 2024
52bfa09
feat(storage): add in_tempdir for specific test cases
CHr15F0x Nov 20, 2024
2706d0f
chore(bloom): suppress dead code warning in bloom tests
CHr15F0x Nov 20, 2024
8a0c95c
test(fake): add trie generation to fake storage, refactor, update api
CHr15F0x Nov 20, 2024
3adeb25
test(sync_handlers): update proptests
CHr15F0x Nov 20, 2024
7b32c42
test(checkpoint): update tests, remove fixture
CHr15F0x Nov 20, 2024
a2524b9
chore(sync/state_updates): remove dead code
CHr15F0x Nov 20, 2024
7b79858
test(sync/storage_adapters): update tests
CHr15F0x Nov 20, 2024
41adfef
feat(sync/track): advance next and parent_hash, update tests
CHr15F0x Nov 20, 2024
83663a6
test(connection/block): update tests
CHr15F0x Nov 20, 2024
7ba11c4
test(rpc): update tests
CHr15F0x Nov 20, 2024
608adc6
test(checkpoint/transactions): fix test timeout
CHr15F0x Nov 20, 2024
7e93e7c
test(sync): enable verify_tree_hashes in checkpoint and track tests
CHr15F0x Nov 20, 2024
a096c31
refactor(checkpoint): make checkpoint::Sync generic over p2p client
CHr15F0x Nov 20, 2024
ca97d86
refactor(sync): make Sync generic over p2p client
CHr15F0x Nov 20, 2024
fdeaa48
test(sync): add mock
CHr15F0x Nov 21, 2024
360209b
refactor(sync): make a few items generic over feeder gateway client
CHr15F0x Nov 21, 2024
dbac2ad
test(checkpoint): update fixture with correct signatures
CHr15F0x Nov 21, 2024
509932f
test(storage/fake): add signature generation to fake block data gener…
CHr15F0x Nov 21, 2024
ec960a3
feat(sync): reenable block header signature verification
CHr15F0x Nov 21, 2024
c48b773
test(sync): deduplicate fake blocks generation
CHr15F0x Nov 21, 2024
b202d30
test(sync): add test cases for sync with recoverable and fatal errors
CHr15F0x Nov 21, 2024
81dea8d
chore: remove debug logs
CHr15F0x Nov 22, 2024
e0af01a
test(sync/track): remove redundant test
CHr15F0x Nov 22, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 12 additions & 0 deletions crates/common/src/state_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,12 @@ impl<'a> From<&'a StateUpdate> for StateUpdateRef<'a> {
}
}

impl<'a> From<&'a mut StateUpdate> for StateUpdateRef<'a> {
fn from(state_update: &'a mut StateUpdate) -> Self {
Self::from(state_update as &'a StateUpdate)
}
}

impl<'a> From<&'a StateUpdateData> for StateUpdateRef<'a> {
fn from(state_update: &'a StateUpdateData) -> Self {
Self {
Expand Down Expand Up @@ -432,6 +438,12 @@ impl<'a> From<&'a StateUpdateData> for StateUpdateRef<'a> {
}
}

impl<'a> From<&'a mut StateUpdateData> for StateUpdateRef<'a> {
fn from(state_update: &'a mut StateUpdateData) -> Self {
Self::from(state_update as &'a StateUpdateData)
}
}

impl StorageRef<'_> {
pub fn iter(&self) -> StorageRefIter<'_> {
match self {
Expand Down
6 changes: 3 additions & 3 deletions crates/p2p/src/client/peer_agnostic/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub trait TransactionStream {
start: BlockNumber,
stop: BlockNumber,
transaction_count_stream: impl Stream<Item = anyhow::Result<usize>> + Send + 'static,
) -> impl Stream<Item = StreamItem<(TransactionData, BlockNumber)>>;
) -> impl Stream<Item = StreamItem<(TransactionData, BlockNumber)>> + Send;
}

pub trait StateDiffStream {
Expand All @@ -47,7 +47,7 @@ pub trait StateDiffStream {
start: BlockNumber,
stop: BlockNumber,
state_diff_length_stream: impl Stream<Item = anyhow::Result<usize>> + Send + 'static,
) -> impl Stream<Item = StreamItem<(StateUpdateData, BlockNumber)>>;
) -> impl Stream<Item = StreamItem<(StateUpdateData, BlockNumber)>> + Send;
}

pub trait ClassStream {
Expand All @@ -56,7 +56,7 @@ pub trait ClassStream {
start: BlockNumber,
stop: BlockNumber,
declared_class_count_stream: impl Stream<Item = anyhow::Result<usize>> + Send + 'static,
) -> impl Stream<Item = StreamItem<ClassDefinition>>;
) -> impl Stream<Item = StreamItem<ClassDefinition>> + Send;
}

pub trait EventStream {
Expand Down
1 change: 1 addition & 0 deletions crates/pathfinder/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -92,5 +92,6 @@ rstest = { workspace = true }
serde_with = { workspace = true }
starknet-gateway-test-fixtures = { path = "../gateway-test-fixtures" }
starknet_api = { workspace = true }
test-log = { workspace = true, features = ["trace"] }
tokio = { workspace = true, features = ["test-util"] }
warp = { workspace = true }
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,6 @@ cc 362172e92f8c3bb8b57add0452a53575bef5640a22e0d9cfcabe821c5150086f # shrinks to
cc 3c0631f4271587b05d7638c8f95a767a85062d1ffb771167a3b24028376315df # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (7, 9090751796217969733, 1, 4, Step(1), Backward)
cc e61a757eb84e98a3e8429942c16b6937603d36bd6272a92db52a392df2370a84 # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (9, 12221019298661150784, 5, 3, Step(1), Backward)
cc 86c701dc281422d164cfcdd813470d0908f8da74089472c547085c89fd4fc74b # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (11, 16005500644522549812, 0, 5, Step(1), Forward)
cc 88947174b63dc40a8ecadc8258db12c16449fe512c4729e350ded4c7b4a34baf # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (0, 0, 0, 1, Step(1), Forward)
cc 48a4cce9020765acde8c0046cc73e72ef238865b8712045d0a95c23fb4062070 # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (0, 0, 0, 1, Step(1), Forward)
cc bb0bb73a6e6719184832c149727d3e166cda4c891355f25ba8f8b4ed839ea3c2 # shrinks to (num_blocks, seed, start_block, limit, step, direction) = (0, 0, 0, 1, Step(1), Forward)
4 changes: 3 additions & 1 deletion crates/pathfinder/src/bin/pathfinder/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -630,17 +630,19 @@ fn start_p2p_sync(
l1_checkpoint_override: Option<pathfinder_ethereum::EthereumStateUpdate>,
verify_tree_hashes: bool,
) -> tokio::task::JoinHandle<anyhow::Result<()>> {
use pathfinder_block_hashes::BlockHashDb;

let sync = pathfinder_lib::sync::Sync {
storage,
p2p: p2p_client,
eth_client: ethereum_client,
eth_address: pathfinder_context.l1_core_address,
fgw_client: pathfinder_context.gateway,
chain_id: pathfinder_context.network_id,
chain: pathfinder_context.network,
public_key: gateway_public_key,
l1_checkpoint_override,
verify_tree_hashes,
block_hash_db: Some(BlockHashDb::new(pathfinder_context.network)),
};
tokio::spawn(sync.run())
}
Expand Down
28 changes: 17 additions & 11 deletions crates/pathfinder/src/p2p_network/sync_handlers/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,14 +247,15 @@ mod prop {
// These are the items that we expect to be read from the db
// Grouped by block number
let expected = overlapping::get(in_db, start_block, limit, step, num_blocks, direction).into_iter()
.map(|Block { header, state_update, .. }|
.map(|Block { header, state_update, .. }| {
let state_update = state_update.unwrap();
(
header.header.number, // Block number
state_update.contract_updates.into_iter().map(|(k, v)| (k, v.into())).collect::<HashMap<_,_>>(),
state_update.system_contract_updates,
state_update.declared_sierra_classes,
state_update.declared_cairo_classes,
)
)}
).collect::<Vec<_>>();
// Run the handler
let request = StateDiffsRequest { iteration: Iteration { start: BlockNumberOrHash::Number(start_block), limit, step, direction, } };
Expand Down Expand Up @@ -330,6 +331,7 @@ mod prop {
fn get_classes((num_blocks, seed, start_block, limit, step, direction) in strategy::composite()) {
// Fake storage with a given number of blocks
let (storage, in_db) = fixtures::storage_with_seed(seed, num_blocks);

// Compute the overlapping set between the db and the request
// These are the items that we expect to be read from the db
// Grouped by block number
Expand All @@ -344,6 +346,7 @@ mod prop {
sierra_defs.into_iter().map(|(_, sierra_def, _)| sierra_def).collect::<Vec<_>>()
)
).collect::<Vec<_>>();

// Run the handler
let request = ClassesRequest { iteration: Iteration { start: BlockNumberOrHash::Number(start_block), limit, step, direction, } };
let mut responses = Runtime::new().unwrap().block_on(async {
Expand Down Expand Up @@ -372,11 +375,14 @@ mod prop {
});

for expected_for_block in expected {
let actual_cairo_for_block = actual_cairo.drain(..expected_for_block.1.len()).collect::<Vec<_>>();
let actual_sierra_for_block = actual_sierra.drain(..expected_for_block.2.len()).collect::<Vec<_>>();
let actual_cairo_for_block = actual_cairo.drain(..expected_for_block.1.len()).collect::<HashSet<_>>();
let actual_sierra_for_block = actual_sierra.drain(..expected_for_block.2.len()).collect::<HashSet<_>>();

prop_assert_eq!(expected_for_block.1, actual_cairo_for_block, "block number: {}", expected_for_block.0);
prop_assert_eq!(expected_for_block.2, actual_sierra_for_block, "block number: {}", expected_for_block.0);
let expected_cairo_for_block = expected_for_block.1.into_iter().collect::<HashSet<_>>();
let expected_sierra_for_block = expected_for_block.2.into_iter().collect::<HashSet<_>>();

prop_assert_eq!(expected_cairo_for_block, actual_cairo_for_block, "block number: {}", expected_for_block.0);
prop_assert_eq!(expected_sierra_for_block, actual_sierra_for_block, "block number: {}", expected_for_block.0);
}
}
}
Expand Down Expand Up @@ -507,8 +513,7 @@ mod prop {

/// Fixtures for prop tests
mod fixtures {
use pathfinder_storage::fake::init::Config;
use pathfinder_storage::fake::{with_n_blocks_rng_and_config, Block};
use pathfinder_storage::fake::{fill, generate, Block, Config};
use pathfinder_storage::{Storage, StorageBuilder};

use crate::p2p_network::sync_handlers::MAX_COUNT_IN_TESTS;
Expand All @@ -521,16 +526,17 @@ mod prop {
let storage = StorageBuilder::in_memory().unwrap();
// Explicitly choose RNG to make sure seeded storage is always reproducible
let mut rng = rand_chacha::ChaCha12Rng::seed_from_u64(seed);
let initializer = with_n_blocks_rng_and_config(
&storage,
let blocks = generate::with_rng_and_config(
num_blocks.try_into().unwrap(),
&mut rng,
Config {
calculate_receipt_commitment: Box::new(calculate_receipt_commitment),
..Default::default()
},
);
(storage, initializer)
fill(&storage, &blocks, None);

(storage, blocks)
}
}

Expand Down
Loading
Loading