Skip to content

Commit

Permalink
Merge branch 'matter-labs:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
toddfil authored Dec 1, 2023
2 parents b24fe12 + 0cd2c6b commit 6eea396
Show file tree
Hide file tree
Showing 66 changed files with 1,289 additions and 913 deletions.
10 changes: 8 additions & 2 deletions .githooks/pre-push
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/sh
#!/bin/bash
#
# Pre-push hook verifying that inappropriate code will not be pushed.

Expand All @@ -8,7 +8,13 @@ NC='\033[0m' # No Color

# Check that prettier formatting rules are not violated.
if ! zk fmt --check; then
echo -e "${RED}Commit error!${NC}"
echo -e "${RED}Push error!${NC}"
echo "Please format the code via 'zk fmt', cannot push unformatted code"
exit 1
fi

if ! zk db check-sqlx-data; then
echo -e "${RED}Push error!${NC}"
echo "Please update sqlx-data.json via 'zk db setup', cannot push invalid sqlx-data.json file"
exit 1
fi
2 changes: 1 addition & 1 deletion .github/release-please/manifest.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"sdk/zksync-rs": "0.4.0",
"core": "18.3.1",
"core": "18.4.0",
"prover": "9.0.0"
}
2 changes: 1 addition & 1 deletion .github/workflows/build-core-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ jobs:
COMPONENT: ${{ matrix.component }}
run: |
ci_run rustup default nightly-2023-08-21
ci_run zk docker $DOCKER_ACTION $COMPONENT -- --public
ci_run zk docker $DOCKER_ACTION $COMPONENT
- name: Show sccache stats
if: always()
run: |
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/check-spelling.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ on:
branches:
- main
pull_request:
merge_group:

env:
env:
CARGO_TERM_COLOR: always

jobs:
Expand All @@ -17,7 +18,7 @@ jobs:
uses: taiki-e/install-action@v2
with:
tool: cargo-spellcheck

- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4

- name: Run cargo-spellcheck
Expand Down
17 changes: 17 additions & 0 deletions core/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
# Changelog

## [18.4.0](https://github.com/matter-labs/zksync-era/compare/core-v18.3.1...core-v18.4.0) (2023-12-01)


### Features

* adds spellchecker workflow, and corrects misspelled words ([#559](https://github.com/matter-labs/zksync-era/issues/559)) ([beac0a8](https://github.com/matter-labs/zksync-era/commit/beac0a85bb1535b05c395057171f197cd976bf82))
* **en:** Support arbitrary genesis block for external nodes ([#537](https://github.com/matter-labs/zksync-era/issues/537)) ([15d7eaf](https://github.com/matter-labs/zksync-era/commit/15d7eaf872e222338810243865cec9dff7f6e799))
* **merkle tree:** Remove enumeration index assignment from Merkle tree ([#551](https://github.com/matter-labs/zksync-era/issues/551)) ([e2c1b20](https://github.com/matter-labs/zksync-era/commit/e2c1b20e361e6ee2f5ac69cefe75d9c5575eb2f7))
* Restore commitment test in Boojum integration ([#539](https://github.com/matter-labs/zksync-era/issues/539)) ([06f510d](https://github.com/matter-labs/zksync-era/commit/06f510d00f855ddafaebb504f7ea799700221072))


### Bug Fixes

* Change no pending batches 404 error into a success response ([#279](https://github.com/matter-labs/zksync-era/issues/279)) ([e8fd805](https://github.com/matter-labs/zksync-era/commit/e8fd805c8be7980de7676bca87cfc2d445aab9e1))
* **vm:** Expose additional types and traits ([#563](https://github.com/matter-labs/zksync-era/issues/563)) ([bd268ac](https://github.com/matter-labs/zksync-era/commit/bd268ac02bc3530c1d3247cb9496c3e13c2e52d9))
* **witness_generator:** Disable BWIP dependency ([#573](https://github.com/matter-labs/zksync-era/issues/573)) ([e05d955](https://github.com/matter-labs/zksync-era/commit/e05d955036c76a29f9b6e900872c69e20278e045))

## [18.3.1](https://github.com/matter-labs/zksync-era/compare/core-v18.3.0...core-v18.3.1) (2023-11-28)


Expand Down
30 changes: 15 additions & 15 deletions core/lib/dal/sqlx-data.json
Original file line number Diff line number Diff line change
Expand Up @@ -7470,21 +7470,6 @@
},
"query": "SELECT number, timestamp, is_finished, l1_tx_count, l2_tx_count, fee_account_address, bloom, priority_ops_onchain_data, hash, parent_hash, commitment, compressed_write_logs, compressed_contracts, eth_prove_tx_id, eth_commit_tx_id, eth_execute_tx_id, merkle_root_hash, l2_to_l1_logs, l2_to_l1_messages, used_contract_hashes, compressed_initial_writes, compressed_repeated_writes, l2_l1_compressed_messages, l2_l1_merkle_root, l1_gas_price, l2_fair_gas_price, rollup_last_leaf_index, zkporter_is_available, bootloader_code_hash, default_aa_code_hash, base_fee_per_gas, aux_data_hash, pass_through_data_hash, meta_parameters_hash, protocol_version, compressed_state_diffs, system_logs, events_queue_commitment, bootloader_initial_content_commitment FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number WHERE eth_prove_tx_id IS NOT NULL AND eth_execute_tx_id IS NULL ORDER BY number LIMIT $1"
},
"8ff9d76b4791af1177231661847b6c8879ad625fd11c15de51a16c81d8712129": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8",
"Bytea",
"Text",
"Int4"
]
}
},
"query": "INSERT INTO witness_inputs(l1_batch_number, merkle_tree_paths, merkel_tree_paths_blob_url, status, protocol_version, created_at, updated_at) VALUES ($1, $2, $3, 'waiting_for_artifacts', $4, now(), now()) ON CONFLICT (l1_batch_number) DO NOTHING"
},
"9051cc1a715e152afdd0c19739c76666b1a9b134e17601ef9fdf3dec5d2fc561": {
"describe": {
"columns": [
Expand Down Expand Up @@ -11182,6 +11167,21 @@
},
"query": "UPDATE l1_batches SET predicted_commit_gas_cost = $2, updated_at = now() WHERE number = $1"
},
"ec35fc5128cf59d19e6d65ed6d84fcc50fedce921405c4ce700dd2e08c990642": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8",
"Bytea",
"Text",
"Int4"
]
}
},
"query": "INSERT INTO witness_inputs(l1_batch_number, merkle_tree_paths, merkel_tree_paths_blob_url, status, protocol_version, created_at, updated_at) VALUES ($1, $2, $3, 'queued', $4, now(), now()) ON CONFLICT (l1_batch_number) DO NOTHING"
},
"ed50c609371b4588964e29f8757c41973706710090a80eb025ec263ce3d019b4": {
"describe": {
"columns": [],
Expand Down
2 changes: 1 addition & 1 deletion core/lib/dal/src/witness_generator_dal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,7 @@ impl WitnessGeneratorDal<'_, '_> {
{
sqlx::query!(
"INSERT INTO witness_inputs(l1_batch_number, merkle_tree_paths, merkel_tree_paths_blob_url, status, protocol_version, created_at, updated_at) \
VALUES ($1, $2, $3, 'waiting_for_artifacts', $4, now(), now()) \
VALUES ($1, $2, $3, 'queued', $4, now(), now()) \
ON CONFLICT (l1_batch_number) DO NOTHING",
block_number.0 as i64,
// TODO(SMA-1476): remove the below column once blob is migrated to GCS.
Expand Down
16 changes: 3 additions & 13 deletions core/lib/eth_client/src/clients/http/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,12 @@ use zksync_types::web3::{
Contract, Options,
},
ethabi,
helpers::CallFuture,
transports::Http,
types::{
Address, Block, BlockId, BlockNumber, Bytes, Filter, Log, Transaction, TransactionId,
TransactionReceipt, H256, U256, U64,
},
Transport, Web3,
Web3,
};

/// An "anonymous" Ethereum client that can invoke read-only methods that aren't
Expand Down Expand Up @@ -286,23 +285,14 @@ impl EthInterface for QueryClient {
Ok(logs)
}

// TODO (PLA-333): at the moment the latest version of `web3` crate doesn't have `Finalized` variant in `BlockNumber`.
// However, it's already added in github repo and probably will be included in the next released version.
// Scope of PLA-333 includes forking/using crate directly from github, after that we will be able to change
// type of `block_id` from `String` to `BlockId` and use `self.web3.eth().block(block_id)`.
async fn block(
&self,
block_id: String,
block_id: BlockId,
component: &'static str,
) -> Result<Option<Block<H256>>, Error> {
COUNTERS.call[&(Method::Block, component)].inc();
let latency = LATENCIES.direct[&Method::Block].start();
let block = CallFuture::new(
self.web3
.transport()
.execute("eth_getBlockByNumber", vec![block_id.into(), false.into()]),
)
.await?;
let block = self.web3.eth().block(block_id).await?;
latency.observe();
Ok(block)
}
Expand Down
2 changes: 1 addition & 1 deletion core/lib/eth_client/src/clients/http/signing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ impl<S: EthereumSigner> EthInterface for SigningClient<S> {

async fn block(
&self,
block_id: String,
block_id: BlockId,
component: &'static str,
) -> Result<Option<Block<H256>>, Error> {
self.query_client.block(block_id, component).await
Expand Down
4 changes: 2 additions & 2 deletions core/lib/eth_client/src/clients/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ impl EthInterface for MockEthereum {

async fn block(
&self,
_block_id: String,
_block_id: BlockId,
_component: &'static str,
) -> Result<Option<Block<H256>>, Error> {
unimplemented!("Not needed right now")
Expand Down Expand Up @@ -524,7 +524,7 @@ impl<T: AsRef<MockEthereum> + Send + Sync> EthInterface for T {

async fn block(
&self,
block_id: String,
block_id: BlockId,
component: &'static str,
) -> Result<Option<Block<H256>>, Error> {
self.as_ref().block(block_id, component).await
Expand Down
2 changes: 1 addition & 1 deletion core/lib/eth_client/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ pub trait EthInterface: Sync + Send {
/// Returns the block header for the specified block number or hash.
async fn block(
&self,
block_id: String,
block_id: BlockId,
component: &'static str,
) -> Result<Option<Block<H256>>, Error>;
}
Expand Down
24 changes: 14 additions & 10 deletions core/lib/merkle_tree/examples/loadtest/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ use std::{

use zksync_crypto::hasher::blake2::Blake2Hasher;
use zksync_merkle_tree::{
Database, HashTree, MerkleTree, MerkleTreePruner, PatchSet, RocksDBWrapper, TreeInstruction,
Database, HashTree, MerkleTree, MerkleTreePruner, PatchSet, RocksDBWrapper, TreeEntry,
TreeInstruction,
};
use zksync_storage::{RocksDB, RocksDBOptions};
use zksync_types::{AccountTreeId, Address, StorageKey, H256, U256};
Expand Down Expand Up @@ -135,19 +136,22 @@ impl Cli {
next_key_idx += new_keys.len() as u64;

next_value_idx += (new_keys.len() + updated_indices.len()) as u64;
let values = (next_value_idx..).map(H256::from_low_u64_be);
let updated_keys = Self::generate_keys(updated_indices.into_iter());
let kvs = new_keys.into_iter().chain(updated_keys).zip(values);
let kvs = new_keys
.into_iter()
.chain(updated_keys)
.zip(next_value_idx..);
let kvs = kvs.map(|(key, idx)| {
// The assigned leaf indices here are not always correct, but it's OK for load test purposes.
TreeEntry::new(key, idx, H256::from_low_u64_be(idx))
});

tracing::info!("Processing block #{version}");
let start = Instant::now();
let root_hash = if self.proofs {
let reads = Self::generate_keys(read_indices.into_iter())
.map(|key| (key, TreeInstruction::Read));
let instructions = kvs
.map(|(key, hash)| (key, TreeInstruction::Write(hash)))
.chain(reads)
.collect();
let reads =
Self::generate_keys(read_indices.into_iter()).map(TreeInstruction::Read);
let instructions = kvs.map(TreeInstruction::Write).chain(reads).collect();
let output = tree.extend_with_proofs(instructions);
output.root_hash().unwrap()
} else {
Expand All @@ -160,7 +164,7 @@ impl Cli {

tracing::info!("Verifying tree consistency...");
let start = Instant::now();
tree.verify_consistency(self.commit_count - 1)
tree.verify_consistency(self.commit_count - 1, false)
.expect("tree consistency check failed");
let elapsed = start.elapsed();
tracing::info!("Verified tree consistency in {elapsed:?}");
Expand Down
10 changes: 5 additions & 5 deletions core/lib/merkle_tree/examples/recovery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ use std::time::Instant;

use zksync_crypto::hasher::blake2::Blake2Hasher;
use zksync_merkle_tree::{
recovery::{MerkleTreeRecovery, RecoveryEntry},
HashTree, Key, PatchSet, PruneDatabase, RocksDBWrapper, ValueHash,
recovery::MerkleTreeRecovery, HashTree, Key, PatchSet, PruneDatabase, RocksDBWrapper,
TreeEntry, ValueHash,
};
use zksync_storage::{RocksDB, RocksDBOptions};

Expand Down Expand Up @@ -94,15 +94,15 @@ impl Cli {
.map(|_| {
last_leaf_index += 1;
if self.random {
RecoveryEntry {
TreeEntry {
key: Key::from(rng.gen::<[u8; 32]>()),
value: ValueHash::zero(),
leaf_index: last_leaf_index,
}
} else {
last_key += key_step - Key::from(rng.gen::<u64>());
// ^ Increases the key by a random increment close to `key` step with some randomness.
RecoveryEntry {
TreeEntry {
key: last_key,
value: ValueHash::zero(),
leaf_index: last_leaf_index,
Expand All @@ -127,7 +127,7 @@ impl Cli {
recovery_started_at.elapsed()
);
let started_at = Instant::now();
tree.verify_consistency(recovered_version).unwrap();
tree.verify_consistency(recovered_version, true).unwrap();
tracing::info!("Verified consistency in {:?}", started_at.elapsed());
}
}
Expand Down
35 changes: 25 additions & 10 deletions core/lib/merkle_tree/src/consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,17 @@ pub enum ConsistencyError {
impl<DB: Database, H: HashTree> MerkleTree<DB, H> {
/// Verifies the internal tree consistency as stored in the database.
///
/// If `validate_indices` flag is set, it will be checked that indices for all tree leaves are unique
/// and are sequentially assigned starting from 1.
///
/// # Errors
///
/// Returns an error (the first encountered one if there are multiple).
pub fn verify_consistency(&self, version: u64) -> Result<(), ConsistencyError> {
pub fn verify_consistency(
&self,
version: u64,
validate_indices: bool,
) -> Result<(), ConsistencyError> {
let manifest = self.db.try_manifest()?;
let manifest = manifest.ok_or(ConsistencyError::MissingVersion(version))?;
if version >= manifest.version_count {
Expand All @@ -91,16 +98,19 @@ impl<DB: Database, H: HashTree> MerkleTree<DB, H> {
// We want to perform a depth-first walk of the tree in order to not keep
// much in memory.
let root_key = Nibbles::EMPTY.with_version(version);
let leaf_data = LeafConsistencyData::new(leaf_count);
self.validate_node(&root_node, root_key, &leaf_data)?;
leaf_data.validate_count()
let leaf_data = validate_indices.then(|| LeafConsistencyData::new(leaf_count));
self.validate_node(&root_node, root_key, leaf_data.as_ref())?;
if let Some(leaf_data) = leaf_data {
leaf_data.validate_count()?;
}
Ok(())
}

fn validate_node(
&self,
node: &Node,
key: NodeKey,
leaf_data: &LeafConsistencyData,
leaf_data: Option<&LeafConsistencyData>,
) -> Result<ValueHash, ConsistencyError> {
match node {
Node::Leaf(leaf) => {
Expand All @@ -111,7 +121,9 @@ impl<DB: Database, H: HashTree> MerkleTree<DB, H> {
full_key: leaf.full_key,
});
}
leaf_data.insert_leaf(leaf)?;
if let Some(leaf_data) = leaf_data {
leaf_data.insert_leaf(leaf)?;
}
}

Node::Internal(node) => {
Expand Down Expand Up @@ -261,7 +273,10 @@ mod tests {
use std::num::NonZeroU64;

use super::*;
use crate::{types::InternalNode, PatchSet};
use crate::{
types::{InternalNode, TreeEntry},
PatchSet,
};
use zksync_types::{H256, U256};

const FIRST_KEY: Key = U256([0, 0, 0, 0x_dead_beef_0000_0000]);
Expand All @@ -270,8 +285,8 @@ mod tests {
fn prepare_database() -> PatchSet {
let mut tree = MerkleTree::new(PatchSet::default());
tree.extend(vec![
(FIRST_KEY, H256([1; 32])),
(SECOND_KEY, H256([2; 32])),
TreeEntry::new(FIRST_KEY, 1, H256([1; 32])),
TreeEntry::new(SECOND_KEY, 2, H256([2; 32])),
]);
tree.db
}
Expand Down Expand Up @@ -300,7 +315,7 @@ mod tests {
.num_threads(1)
.build()
.expect("failed initializing `rayon` thread pool");
thread_pool.install(|| MerkleTree::new(db).verify_consistency(0))
thread_pool.install(|| MerkleTree::new(db).verify_consistency(0, true))
}

#[test]
Expand Down
Loading

0 comments on commit 6eea396

Please sign in to comment.