Skip to content

Commit

Permalink
fix: unpack bits to Nibbles (#81)
Browse files Browse the repository at this point in the history
* add additional test coverage for state root

* fix: unpack and pack nibbles

* lint: lint propagation

* lint: private item in docs
  • Loading branch information
frisitano authored Dec 13, 2024
1 parent f329794 commit 133d9c0
Show file tree
Hide file tree
Showing 21 changed files with 320 additions and 172 deletions.
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 13 additions & 1 deletion crates/primitives-traits/src/account.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ pub mod compact_ids {

/// An Ethereum account.
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))]
#[cfg_attr(any(test, feature = "reth-codec"), derive(reth_codecs::Compact))]
#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(compact))]
Expand All @@ -47,6 +47,18 @@ pub struct Account {
pub account_extension: Option<reth_scroll_primitives::AccountExtension>,
}

impl Default for Account {
fn default() -> Self {
Self {
nonce: 0,
balance: U256::ZERO,
bytecode_hash: None,
#[cfg(feature = "scroll")]
account_extension: Some(Default::default()),
}
}
}

impl Account {
/// Whether the account has bytecode.
pub const fn has_bytecode(&self) -> bool {
Expand Down
1 change: 1 addition & 0 deletions crates/scroll/state-commitment/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ zktrie = { git = "https://github.com/scroll-tech/zktrie.git", rev = "309160464c1

# misc
tracing.workspace = true
itertools.workspace = true

[dev-dependencies]
reth-db-api.workspace = true
Expand Down
2 changes: 1 addition & 1 deletion crates/scroll/state-commitment/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ mod commitment;
pub use commitment::BinaryMerklePatriciaTrie;

mod root;
pub use root::{StateRoot, StorageRoot};
pub use root::*;

mod key;
pub use key::PoseidonKeyHasher;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,23 @@ use reth_scroll_primitives::poseidon::EMPTY_ROOT_HASH;
use reth_scroll_trie::HashBuilder;
use reth_trie::{
hashed_cursor::{HashedCursorFactory, HashedPostStateCursorFactory, HashedStorageCursor},
key::BitsCompatibility,
node_iter::{TrieElement, TrieNodeIter},
prefix_set::{PrefixSet, TriePrefixSets},
stats::TrieTracker,
trie_cursor::{InMemoryTrieCursorFactory, TrieCursorFactory},
updates::{StorageTrieUpdates, TrieUpdates},
walker::TrieWalker,
HashedPostState, HashedPostStateSorted, HashedStorage, IntermediateStateRootState, KeyHasher,
Nibbles, StateRootProgress, TrieInput,
BitsCompatibility, HashedPostState, HashedPostStateSorted, HashedStorage,
IntermediateStateRootState, KeyHasher, Nibbles, StateRootProgress, TrieInput,
};
use tracing::{debug, trace};

#[cfg(feature = "metrics")]
use reth_trie::metrics::{StateRootMetrics, TrieRootMetrics, TrieType};

mod utils;
pub use utils::*;

// TODO(scroll): Instead of introducing this new type we should make StateRoot generic over
// the [`HashBuilder`] and key traversal types

Expand Down
66 changes: 66 additions & 0 deletions crates/scroll/state-commitment/src/root/utils.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
//! Utilities for computing state root and storage root from a plain provided state.
use crate::{PoseidonKeyHasher, PoseidonValueHasher, ScrollTrieAccount};
use alloy_primitives::{Address, B256, U256};
use itertools::Itertools;
use reth_scroll_trie::HashBuilder;
use reth_trie::{BitsCompatibility, KeyHasher, Nibbles};

/// Hashes and sorts account keys, then proceeds to calculating the root hash of the state
/// represented as MPT.
pub fn state_root_ref_unhashed<'a, A: Into<ScrollTrieAccount> + Clone + 'a>(
state: impl IntoIterator<Item = (&'a Address, &'a A)>,
) -> B256 {
state_root_unsorted(
state
.into_iter()
.map(|(address, account)| (PoseidonKeyHasher::hash_key(address), account.clone())),
)
}

/// Sorts the hashed account keys and calculates the root hash of the state represented as MPT.
pub fn state_root_unsorted<A: Into<ScrollTrieAccount>>(
state: impl IntoIterator<Item = (B256, A)>,
) -> B256 {
state_root(state.into_iter().sorted_unstable_by_key(|(key, _)| *key))
}

/// Calculates the root hash of the state represented as BMPT.
///
/// # Panics
///
/// If the items are not in sorted order.
pub fn state_root<A: Into<ScrollTrieAccount>>(state: impl IntoIterator<Item = (B256, A)>) -> B256 {
let mut hb = HashBuilder::default();
for (hashed_key, account) in state {
let account_hash = PoseidonValueHasher::hash_account(account.into());
hb.add_leaf(Nibbles::unpack_bits(hashed_key), account_hash.as_slice());
}
hb.root()
}

/// Hashes storage keys, sorts them and them calculates the root hash of the storage trie.
pub fn storage_root_unhashed(storage: impl IntoIterator<Item = (B256, U256)>) -> B256 {
storage_root_unsorted(
storage.into_iter().map(|(slot, value)| (PoseidonKeyHasher::hash_key(slot), value)),
)
}

/// Sorts and calculates the root hash of account storage trie.
pub fn storage_root_unsorted(storage: impl IntoIterator<Item = (B256, U256)>) -> B256 {
storage_root(storage.into_iter().sorted_unstable_by_key(|(key, _)| *key))
}

/// Calculates the root hash of account storage trie.
///
/// # Panics
///
/// If the items are not in sorted order.
pub fn storage_root(storage: impl IntoIterator<Item = (B256, U256)>) -> B256 {
let mut hb = HashBuilder::default();
for (hashed_slot, value) in storage {
let hashed_value = PoseidonValueHasher::hash_storage(value);
hb.add_leaf(Nibbles::unpack_bits(hashed_slot), hashed_value.as_ref());
}
hb.root()
}
90 changes: 81 additions & 9 deletions crates/scroll/state-commitment/src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,22 @@ use reth_db::{
transaction::DbTxMut,
};
use reth_primitives::{Account, StorageEntry};
use reth_provider::test_utils::create_test_provider_factory;
use reth_scroll_state_commitment::{
test_utils::{b256_clear_last_byte, b256_reverse_bits, u256_clear_msb},
PoseidonKeyHasher, StateRoot, StorageRoot,
};
use reth_provider::{test_utils::create_test_provider_factory, StorageTrieWriter, TrieWriter};
use reth_scroll_state_commitment::{test_utils::*, PoseidonKeyHasher, StateRoot, StorageRoot};

use reth_scroll_state_commitment::test_utils::b256_clear_first_byte;
use reth_scroll_state_commitment::state_root_unsorted;
use reth_trie::{
trie_cursor::InMemoryTrieCursorFactory, updates::TrieUpdates, HashedPostState, HashedStorage,
KeyHasher,
prefix_set::{PrefixSetMut, TriePrefixSets},
trie_cursor::InMemoryTrieCursorFactory,
updates::TrieUpdates,
BitsCompatibility, HashedPostState, HashedStorage, KeyHasher, Nibbles,
};
use reth_trie_db::{DatabaseStateRoot, DatabaseStorageRoot, DatabaseTrieCursorFactory};
use std::collections::BTreeMap;

proptest! {
#![proptest_config(ProptestConfig {
cases: 6, ..ProptestConfig::default()
cases: 12, ..ProptestConfig::default()
})]

#[test]
Expand Down Expand Up @@ -169,6 +168,79 @@ proptest! {
assert_eq!(expected_root, storage_root);
}
}

#[test]
fn fuzz_state_root_incremental_database(account_changes: [BTreeMap<B256, U256>; 5]) {
let factory = create_test_provider_factory();
let tx = factory.provider_rw().unwrap();
let mut hashed_account_cursor = tx.tx_ref().cursor_write::<tables::HashedAccounts>().unwrap();

let mut state = BTreeMap::default();
for accounts in account_changes {
let should_generate_changeset = !state.is_empty();
let mut changes = PrefixSetMut::default();
let accounts = accounts.into_iter().map(|(hashed_address, balance)| {
let balance = u256_clear_msb(balance);
let hashed_address = b256_clear_last_byte(hashed_address);
(hashed_address, balance)
}).collect::<BTreeMap<_, _>>();

for (hashed_address, balance) in accounts.clone() {
hashed_account_cursor.upsert(hashed_address, Account { balance, ..Default::default() }).unwrap();
if should_generate_changeset {
changes.insert(Nibbles::unpack_bits(hashed_address));
}
}

let (state_root, trie_updates) = StateRoot::from_tx(tx.tx_ref())
.with_prefix_sets(TriePrefixSets { account_prefix_set: changes.freeze(), ..Default::default() })
.root_with_updates()
.unwrap();

state.append(&mut accounts.clone());
let expected_root = state_root_unsorted(
state.iter().map(|(&key, &balance)| (key, (Account { balance, ..Default::default() }, B256::default())))
);
assert_eq!(expected_root, state_root);
tx.write_trie_updates(&trie_updates).unwrap();
}
}

#[test]
fn incremental_storage_root_database(address: Address, storage_changes: [BTreeMap<B256, U256>; 5]) {
let hashed_address = PoseidonKeyHasher::hash_key(address);
let factory = create_test_provider_factory();
let tx = factory.provider_rw().unwrap();
let mut hashed_storage_cursor = tx.tx_ref().cursor_write::<tables::HashedStorages>().unwrap();

let mut storage = BTreeMap::default();
for change_set in storage_changes {
let should_generate_changeset = !storage.is_empty();
let mut changes = PrefixSetMut::default();
let change_set = change_set.into_iter().map(|(slot, value)| {
let slot = b256_clear_last_byte(slot);
(slot, value)
}).collect::<BTreeMap<_, _>>();

for (hashed_slot, value) in change_set.clone() {
hashed_storage_cursor.upsert(hashed_address, StorageEntry { key: hashed_slot, value }).unwrap();
if should_generate_changeset {
changes.insert(Nibbles::unpack_bits(hashed_slot));
}
}

let (storage_root, _, trie_updates) = StorageRoot::from_tx_hashed(tx.tx_ref(), hashed_address)
.with_prefix_set(changes.freeze())
.root_with_updates()
.unwrap();

storage.append(&mut change_set.clone());
let expected_root = reth_scroll_state_commitment::storage_root_unsorted(storage.clone());
assert_eq!(expected_root, storage_root);

tx.write_individual_storage_trie_updates(hashed_address, &trie_updates).unwrap();
}
}
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion crates/scroll/trie/src/hash_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ mod test {
use alloc::collections::BTreeMap;
use hex_literal::hex;
use reth_scroll_primitives::poseidon::{hash_with_domain, Fr, PrimeField};
use reth_trie::key::BitsCompatibility;
use reth_trie::BitsCompatibility;

#[test]
fn test_convert_to_bit_representation() {
Expand Down
2 changes: 1 addition & 1 deletion crates/scroll/trie/src/leaf.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use super::LEAF_NODE_DOMAIN;
use alloy_primitives::B256;
use reth_scroll_primitives::poseidon::{hash_with_domain, Fr, PrimeField};
use reth_trie::{key::BitsCompatibility, LeafNodeRef};
use reth_trie::{BitsCompatibility, LeafNodeRef};

/// A trait used to hash the leaf node.
pub(crate) trait HashLeaf {
Expand Down
15 changes: 8 additions & 7 deletions crates/storage/provider/src/providers/database/provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,9 @@ use reth_storage_api::{
use reth_storage_errors::provider::{ProviderResult, RootMismatch};
use reth_trie::{
prefix_set::{PrefixSet, PrefixSetMut, TriePrefixSets},
unpack_nibbles,
updates::{StorageTrieUpdates, TrieUpdates},
HashedPostStateSorted, KeyHasher, Nibbles, StoredNibbles,
HashedPostStateSorted, KeyHasher, StoredNibbles,
};
use reth_trie_db::{DatabaseStorageTrieCursor, StateCommitment};
use revm::{
Expand Down Expand Up @@ -278,7 +279,7 @@ impl<TX: DbTx + DbTxMut + 'static, N: NodeTypesForProvider> DatabaseProvider<TX,
let mut account_prefix_set = PrefixSetMut::with_capacity(hashed_addresses.len());
let mut destroyed_accounts = HashSet::default();
for (hashed_address, account) in hashed_addresses {
account_prefix_set.insert(Nibbles::unpack(hashed_address));
account_prefix_set.insert(unpack_nibbles(hashed_address));
if account.is_none() {
destroyed_accounts.insert(hashed_address);
}
Expand All @@ -299,10 +300,10 @@ impl<TX: DbTx + DbTxMut + 'static, N: NodeTypesForProvider> DatabaseProvider<TX,
let mut storage_prefix_sets = HashMap::<B256, PrefixSet>::default();
let storage_entries = self.unwind_storage_hashing(changed_storages.iter().copied())?;
for (hashed_address, hashed_slots) in storage_entries {
account_prefix_set.insert(Nibbles::unpack(hashed_address));
account_prefix_set.insert(unpack_nibbles(hashed_address));
let mut storage_prefix_set = PrefixSetMut::with_capacity(hashed_slots.len());
for slot in hashed_slots {
storage_prefix_set.insert(Nibbles::unpack(slot));
storage_prefix_set.insert(unpack_nibbles(slot));
}
storage_prefix_sets.insert(hashed_address, storage_prefix_set.freeze());
}
Expand Down Expand Up @@ -2551,12 +2552,12 @@ impl<TX: DbTxMut + DbTx + 'static, N: NodeTypes> HashingWriter for DatabaseProvi
let storages = self.plain_state_storages(lists)?;
let storage_entries = self.insert_storage_for_hashing(storages)?;
for (hashed_address, hashed_slots) in storage_entries {
account_prefix_set.insert(Nibbles::unpack(hashed_address));
account_prefix_set.insert(unpack_nibbles(hashed_address));
for slot in hashed_slots {
storage_prefix_sets
.entry(hashed_address)
.or_default()
.insert(Nibbles::unpack(slot));
.insert(unpack_nibbles(slot));
}
}
}
Expand All @@ -2568,7 +2569,7 @@ impl<TX: DbTxMut + DbTx + 'static, N: NodeTypes> HashingWriter for DatabaseProvi
let accounts = self.basic_accounts(lists)?;
let hashed_addresses = self.insert_account_for_hashing(accounts)?;
for (hashed_address, account) in hashed_addresses {
account_prefix_set.insert(Nibbles::unpack(hashed_address));
account_prefix_set.insert(unpack_nibbles(hashed_address));
if account.is_none() {
destroyed_accounts.insert(hashed_address);
}
Expand Down
6 changes: 5 additions & 1 deletion crates/trie/common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,12 @@ bytes.workspace = true
derive_more.workspace = true
itertools.workspace = true
nybbles = { workspace = true, features = ["rlp"] }
smallvec = { version = "1.0", default-features = false, features = [
"const_new",
] }

# `serde` feature
serde = { workspace = true, optional = true }

serde_with = { workspace = true, optional = true }

# `test-utils` feature
Expand Down Expand Up @@ -62,6 +64,7 @@ serde = [
"dep:serde",
"bytes/serde",
"nybbles/serde",
"smallvec/serde",
"alloy-primitives/serde",
"alloy-consensus/serde",
"alloy-trie/serde",
Expand Down Expand Up @@ -91,6 +94,7 @@ arbitrary = [
"alloy-consensus/arbitrary",
"alloy-primitives/arbitrary",
"nybbles/arbitrary",
"smallvec/arbitrary",
"revm-primitives/arbitrary",
"reth-codecs/arbitrary",
"reth-scroll-primitives?/arbitrary"
Expand Down
Loading

0 comments on commit 133d9c0

Please sign in to comment.