Skip to content

Commit

Permalink
Merge branch 'main' into feat-jrigada-prestate-tracer-implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
Jrigada authored Mar 7, 2024
2 parents 6424eeb + 8add2d6 commit dfb22f1
Show file tree
Hide file tree
Showing 93 changed files with 2,338 additions and 838 deletions.
519 changes: 87 additions & 432 deletions Cargo.lock

Large diffs are not rendered by default.

9 changes: 8 additions & 1 deletion core/bin/external_node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -548,6 +548,14 @@ async fn main() -> anyhow::Result<()> {
([0, 0, 0, 0], config.required.healthcheck_port).into(),
app_health.clone(),
);
// Start scraping Postgres metrics before store initialization as well.
let metrics_pool = connection_pool.clone();
let mut task_handles = vec![tokio::spawn(async move {
metrics_pool
.run_postgres_metrics_scraping(Duration::from_secs(60))
.await;
Ok(())
})];

// Make sure that the node storage is initialized either via genesis or snapshot recovery.
ensure_storage_initialized(
Expand All @@ -560,7 +568,6 @@ async fn main() -> anyhow::Result<()> {
.await?;

let (stop_sender, stop_receiver) = watch::channel(false);
let mut task_handles = vec![];
init_tasks(
&config,
connection_pool.clone(),
Expand Down
1 change: 0 additions & 1 deletion core/bin/system-constants-generator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ zksync_types = { path = "../../lib/types" }
zksync_utils = { path = "../../lib/utils" }
zksync_contracts = { path = "../../lib/contracts" }
multivm = { path = "../../lib/multivm" }
zkevm_test_harness_1_3_3 = { git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.3.3", package = "zkevm_test_harness" }

codegen = "0.2.0"

Expand Down
3 changes: 3 additions & 0 deletions core/bin/system-constants-generator/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Tool used to regenerate the constants

We use this tool to regenerate the constants (mostly for gas), that are later included in multiple system contracts.
13 changes: 7 additions & 6 deletions core/bin/system-constants-generator/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@ use codegen::{Block, Scope};
use multivm::{
utils::{get_bootloader_encoding_space, get_bootloader_max_txs_in_batch},
vm_latest::constants::MAX_VM_PUBDATA_PER_BATCH,
};
use serde::{Deserialize, Serialize};
use zkevm_test_harness_1_3_3::zk_evm::zkevm_opcode_defs::{
circuit_prices::{
ECRECOVER_CIRCUIT_COST_IN_ERGS, KECCAK256_CIRCUIT_COST_IN_ERGS, SHA256_CIRCUIT_COST_IN_ERGS,
zk_evm_latest::zkevm_opcode_defs::{
circuit_prices::{
ECRECOVER_CIRCUIT_COST_IN_ERGS, KECCAK256_CIRCUIT_COST_IN_ERGS,
SHA256_CIRCUIT_COST_IN_ERGS,
},
system_params::MAX_TX_ERGS_LIMIT,
},
system_params::MAX_TX_ERGS_LIMIT,
};
use serde::{Deserialize, Serialize};
use zksync_types::{
IntrinsicSystemGasConstants, ProtocolVersionId, GUARANTEED_PUBDATA_IN_TX,
L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE,
Expand Down
3 changes: 2 additions & 1 deletion core/bin/system-constants-generator/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,8 @@ pub(super) fn execute_user_txs_in_test_gas_vm(
if !accept_failure {
assert!(
!tx_execution_result.result.is_failed(),
"A transaction has failed"
"A transaction has failed: {:?}",
tx_execution_result.result
);
}
}
Expand Down
5 changes: 3 additions & 2 deletions core/lib/commitment_utils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ categories = ["cryptography"]
[dependencies]
zksync_types = { path = "../../lib/types" }
zksync_utils = { path = "../../lib/utils" }
zkevm_test_harness_1_4_0 = { git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.0", package = "zkevm_test_harness" }
zkevm_test_harness_1_4_1 = { git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.1", package = "zkevm_test_harness" }
circuit_sequencer_api_1_4_0 = { package = "circuit_sequencer_api", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.0" }
circuit_sequencer_api_1_4_1 = { package = "circuit_sequencer_api", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.1" }

zk_evm_1_4_1 = { package = "zk_evm", git = "https://github.com/matter-labs/era-zk_evm.git", branch = "v1.4.1" }
zk_evm_1_3_3 = { package = "zk_evm", git = "https://github.com/matter-labs/era-zk_evm.git", tag = "v1.3.3-rc2" }
multivm = { path = "../../lib/multivm" }
8 changes: 4 additions & 4 deletions core/lib/commitment_utils/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ pub fn events_queue_commitment(
) -> Option<H256> {
match VmVersion::from(protocol_version) {
VmVersion::VmBoojumIntegration => Some(H256(
zkevm_test_harness_1_4_0::witness::utils::events_queue_commitment_fixed(
circuit_sequencer_api_1_4_0::commitments::events_queue_commitment_fixed(
&events_queue
.iter()
.map(|x| to_log_query_1_3_3(*x))
.collect(),
),
)),
VmVersion::Vm1_4_1 | VmVersion::Vm1_4_2 => Some(H256(
zkevm_test_harness_1_4_1::witness::utils::events_queue_commitment_fixed(
circuit_sequencer_api_1_4_1::commitments::events_queue_commitment_fixed(
&events_queue
.iter()
.map(|x| to_log_query_1_4_1(*x))
Expand All @@ -51,12 +51,12 @@ pub fn bootloader_initial_content_commitment(

match VmVersion::from(protocol_version) {
VmVersion::VmBoojumIntegration => Some(H256(
zkevm_test_harness_1_4_0::witness::utils::initial_heap_content_commitment_fixed(
circuit_sequencer_api_1_4_0::commitments::initial_heap_content_commitment_fixed(
&full_bootloader_memory,
),
)),
VmVersion::Vm1_4_1 | VmVersion::Vm1_4_2 => Some(H256(
zkevm_test_harness_1_4_1::witness::utils::initial_heap_content_commitment_fixed(
circuit_sequencer_api_1_4_1::commitments::initial_heap_content_commitment_fixed(
&full_bootloader_memory,
),
)),
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
CREATE INDEX IF NOT EXISTS events_tx_initiator_address_idx
ON events (tx_initiator_address);
CREATE INDEX IF NOT EXISTS transactions_contract_address_idx
ON transactions (contract_address);
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
DROP INDEX IF EXISTS transactions_contract_address_idx;
DROP INDEX IF EXISTS events_tx_initiator_address;
8 changes: 7 additions & 1 deletion core/lib/dal/src/connection/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use sqlx::{
pub use self::processor::StorageProcessor;
pub(crate) use self::processor::StorageProcessorTags;
use self::processor::TracedConnections;
use crate::metrics::CONNECTION_METRICS;
use crate::metrics::{PostgresMetrics, CONNECTION_METRICS};

mod processor;

Expand Down Expand Up @@ -307,6 +307,12 @@ impl ConnectionPool {
self.max_size
}

/// Uses this pool to report Postgres-wide metrics (e.g., table sizes). Should be called sparingly to not spam
/// identical metrics from multiple places. The returned future runs indefinitely and should be spawned as a Tokio task.
pub async fn run_postgres_metrics_scraping(self, scrape_interval: Duration) {
PostgresMetrics::run_scraping(self, scrape_interval).await;
}

/// Creates a `StorageProcessor` entity over a recoverable connection.
/// Upon a database outage connection will block the thread until
/// it will be able to recover the connection (or, if connection cannot
Expand Down
62 changes: 61 additions & 1 deletion core/lib/dal/src/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@
use std::{thread, time::Duration};

use anyhow::Context as _;
use vise::{
Buckets, Counter, EncodeLabelSet, EncodeLabelValue, Family, Histogram, LabeledFamily,
Buckets, Counter, EncodeLabelSet, EncodeLabelValue, Family, Gauge, Histogram, LabeledFamily,
LatencyObserver, Metrics, Unit,
};

use crate::ConnectionPool;

/// Request-related DB metrics.
#[derive(Debug, Metrics)]
#[metrics(prefix = "sql")]
Expand Down Expand Up @@ -96,3 +99,60 @@ pub(crate) struct ConnectionMetrics {

#[vise::register]
pub(crate) static CONNECTION_METRICS: vise::Global<ConnectionMetrics> = vise::Global::new();

#[derive(Debug, Metrics)]
#[metrics(prefix = "postgres")]
pub(crate) struct PostgresMetrics {
/// Size of the data in a certain table as returned by `pg_table_size` function.
#[metrics(unit = Unit::Bytes, labels = ["table"])]
table_data_size: LabeledFamily<String, Gauge<u64>>,
/// Size of the data in a certain table as returned by `pg_indexes_size` function.
#[metrics(unit = Unit::Bytes, labels = ["table"])]
table_indexes_size: LabeledFamily<String, Gauge<u64>>,
/// Size of the data in a certain table as returned by `pg_relation_size` function.
#[metrics(unit = Unit::Bytes, labels = ["table"])]
table_relation_size: LabeledFamily<String, Gauge<u64>>,
/// Size of the data in a certain table as returned by `pg_total_relation_size` function.
#[metrics(unit = Unit::Bytes, labels = ["table"])]
table_total_size: LabeledFamily<String, Gauge<u64>>,
}

#[vise::register]
static POSTGRES_METRICS: vise::Global<PostgresMetrics> = vise::Global::new();

impl PostgresMetrics {
pub(crate) async fn run_scraping(pool: ConnectionPool, scrape_interval: Duration) {
let scrape_timeout = Duration::from_secs(1).min(scrape_interval / 2);
loop {
match tokio::time::timeout(scrape_timeout, Self::scrape(&pool)).await {
Err(_) => {
tracing::info!("Timed out scraping Postgres metrics after {scrape_timeout:?}");
}
Ok(Err(err)) => {
tracing::warn!("Error scraping Postgres metrics: {err:?}");
}
Ok(Ok(())) => { /* everything went fine */ }
}
tokio::time::sleep(scrape_interval).await;
}
}

async fn scrape(pool: &ConnectionPool) -> anyhow::Result<()> {
let mut storage = pool
.access_storage_tagged("postgres_metrics")
.await
.context("cannot acquire Postgres connection")?;
let table_sizes = storage
.system_dal()
.get_table_sizes()
.await
.context("failed getting table sizes")?;
for (table_name, sizes) in table_sizes {
POSTGRES_METRICS.table_data_size[&table_name].set(sizes.table_size);
POSTGRES_METRICS.table_indexes_size[&table_name].set(sizes.indexes_size);
POSTGRES_METRICS.table_relation_size[&table_name].set(sizes.relation_size);
POSTGRES_METRICS.table_total_size[&table_name].set(sizes.total_size);
}
Ok(())
}
}
46 changes: 45 additions & 1 deletion core/lib/dal/src/system_dal.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,16 @@
use std::collections::HashMap;

use sqlx::Row;

use crate::StorageProcessor;
use crate::{instrument::InstrumentExt, StorageProcessor};

#[derive(Debug)]
pub(crate) struct TableSize {
pub table_size: u64,
pub indexes_size: u64,
pub relation_size: u64,
pub total_size: u64,
}

pub struct SystemDal<'a, 'c> {
pub storage: &'a mut StorageProcessor<'c>,
Expand All @@ -27,4 +37,38 @@ impl SystemDal<'_, '_> {
_ => 0,
}
}

pub(crate) async fn get_table_sizes(&mut self) -> sqlx::Result<HashMap<String, TableSize>> {
let rows = sqlx::query!(
r#"
SELECT
table_name,
PG_TABLE_SIZE(('public.' || QUOTE_IDENT(table_name))::regclass) AS table_size,
PG_INDEXES_SIZE(('public.' || QUOTE_IDENT(table_name))::regclass) AS indexes_size,
PG_RELATION_SIZE(('public.' || QUOTE_IDENT(table_name))::regclass) AS relation_size,
PG_TOTAL_RELATION_SIZE(('public.' || QUOTE_IDENT(table_name))::regclass) AS total_size
FROM
information_schema.tables
WHERE
table_schema = 'public'
"#
)
.instrument("get_table_sizes")
.report_latency()
.fetch_all(self.storage)
.await?;

let table_sizes = rows.into_iter().filter_map(|row| {
Some((
row.table_name?,
TableSize {
table_size: row.table_size? as u64,
indexes_size: row.indexes_size.unwrap_or(0) as u64,
relation_size: row.relation_size.unwrap_or(0) as u64,
total_size: row.total_size? as u64,
},
))
});
Ok(table_sizes.collect())
}
}
5 changes: 3 additions & 2 deletions core/lib/l1_contract_interface/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,11 @@ zksync_prover_interface = { path = "../prover_interface" }
# Used to serialize proof data
codegen = { git = "https://github.com/matter-labs/solidity_plonk_verifier.git", branch = "dev" }
# Used to calculate commitment for vk from the old L1 verifier contract (backward comatibility needs)
zkevm_test_harness_1_3_3 = { package = "zkevm_test_harness", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.3.3" }

circuit_sequencer_api_1_3_3 = { package = "circuit_sequencer_api", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.3.3" }
# Used to calculate the kzg commitment and proofs
zkevm_test_harness_1_4_2 = { package = "zkevm_test_harness", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.2" }
kzg = { package = "kzg", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.2" }

sha2 = "0.10.8"
sha3 = "0.10.8"
hex = "0.4"
Expand Down
10 changes: 5 additions & 5 deletions core/lib/l1_contract_interface/src/i_executor/commit/kzg/mod.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
use std::convert::TryInto;

use sha2::Sha256;
use sha3::{Digest, Keccak256};
pub use zkevm_test_harness_1_4_2::kzg::KzgSettings;
use zkevm_test_harness_1_4_2::{
kzg::{compute_commitment, compute_proof, compute_proof_poly},
pub use kzg::KzgSettings;
use kzg::{
compute_commitment, compute_proof, compute_proof_poly,
zkevm_circuits::{
boojum::pairing::{
bls12_381::{Fr, FrRepr, G1Affine},
Expand All @@ -18,6 +16,8 @@ use zkevm_test_harness_1_4_2::{
},
},
};
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use zksync_types::H256;

use self::trusted_setup::KZG_SETTINGS;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
//! Tests for KZG commitments.
use serde::{Deserialize, Serialize};
use zkevm_test_harness_1_4_2::{
use kzg::{
boojum::pairing::{bls12_381::G1Compressed, EncodedPoint},
kzg::{verify_kzg_proof, verify_proof_poly},
verify_kzg_proof, verify_proof_poly,
zkevm_circuits::eip_4844::ethereum_4844_data_into_zksync_pubdata,
};
use serde::{Deserialize, Serialize};

use super::*;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
use std::{convert::TryInto, iter};

use once_cell::sync::Lazy;
use zkevm_test_harness_1_4_2::{
use kzg::{
boojum::pairing::{bls12_381::G2Compressed, EncodedPoint},
kzg::KzgSettings,
zkevm_circuits::{
boojum::pairing::{
bls12_381::{Fr, FrRepr, G1Compressed},
Expand All @@ -12,7 +10,9 @@ use zkevm_test_harness_1_4_2::{
},
eip_4844::input::ELEMENTS_PER_4844_BLOCK,
},
KzgSettings,
};
use once_cell::sync::Lazy;

const FIRST_ROOT_OF_UNITY: FrRepr = FrRepr([
0xe206da11a5d36306,
Expand Down
Loading

0 comments on commit dfb22f1

Please sign in to comment.