diff --git a/crates/header-accumulator/examples/inclusion_proof.rs b/crates/header-accumulator/examples/inclusion_proof.rs index f27c8c51..0728f28e 100644 --- a/crates/header-accumulator/examples/inclusion_proof.rs +++ b/crates/header-accumulator/examples/inclusion_proof.rs @@ -1,9 +1,8 @@ use std::{fs::File, io::BufReader}; -use firehose_protos::EthBlock as Block; use flat_files_decoder::{read_blocks_from_reader, Compression}; use header_accumulator::{ - generate_inclusion_proof, verify_inclusion_proof, EraValidateError, ExtHeaderRecord, + generate_inclusion_proofs, verify_inclusion_proofs, Epoch, EraValidateError, ExtHeaderRecord, }; fn create_test_reader(path: &str) -> BufReader { @@ -12,7 +11,6 @@ fn create_test_reader(path: &str) -> BufReader { fn main() -> Result<(), EraValidateError> { let mut headers: Vec = Vec::new(); - let mut all_blocks: Vec = Vec::new(); for flat_file_number in (0..=8200).step_by(100) { let file = format!( @@ -27,7 +25,6 @@ fn main() -> Result<(), EraValidateError> { .map(|block| ExtHeaderRecord::try_from(block).unwrap()) .collect::>(), ); - all_blocks.extend(blocks); } Err(e) => { eprintln!("error: {:?}", e); @@ -38,23 +35,27 @@ fn main() -> Result<(), EraValidateError> { let start_block = 301; let end_block = 402; - let inclusion_proof = - generate_inclusion_proof(headers, start_block, end_block).unwrap_or_else(|e| { + let headers_to_prove: Vec<_> = headers[start_block..end_block] + .iter() + .map(|ext| ext.full_header.as_ref().unwrap().clone()) + .collect(); + let epoch: Epoch = headers.try_into().unwrap(); + + let inclusion_proof = generate_inclusion_proofs(vec![epoch], headers_to_prove.clone()) + .unwrap_or_else(|e| { println!("Error occurred: {}", e); std::process::exit(1); }); - assert_eq!( - inclusion_proof.len() as usize, - (end_block - start_block + 1) as usize - ); + assert_eq!(inclusion_proof.len(), headers_to_prove.len()); - // Verify inclusion proof - let proof_blocks: Vec = all_blocks[start_block as usize..=end_block as usize].to_vec(); - assert!(verify_inclusion_proof(proof_blocks, None, inclusion_proof.clone()).is_ok()); + let proof_headers = headers_to_prove + .into_iter() + .zip(inclusion_proof) + .map(|(header, proof)| proof.with_header(header)) + .collect::, _>>()?; - // Verify if inclusion proof fails on not proven blocks - let proof_blocks: Vec = all_blocks[302..=403].to_vec(); - assert!(verify_inclusion_proof(proof_blocks, None, inclusion_proof.clone()).is_err()); + // Verify inclusion proof + assert!(verify_inclusion_proofs(None, proof_headers).is_ok()); println!("Inclusion proof verified successfully!"); diff --git a/crates/header-accumulator/src/epoch.rs b/crates/header-accumulator/src/epoch.rs index 3a50efbb..1f117333 100644 --- a/crates/header-accumulator/src/epoch.rs +++ b/crates/header-accumulator/src/epoch.rs @@ -1,7 +1,7 @@ use std::array::IntoIter; use alloy_primitives::map::HashSet; -use ethportal_api::types::execution::accumulator::HeaderRecord; +use ethportal_api::types::execution::accumulator::{EpochAccumulator, HeaderRecord}; use crate::{errors::EraValidateError, types::ExtHeaderRecord}; @@ -30,6 +30,7 @@ pub const MERGE_BLOCK: u64 = 15537394; /// 0 must start from block 0 to block 8191. /// /// All blocks must be at the same epoch +#[derive(Clone)] pub struct Epoch { number: usize, data: Box<[HeaderRecord; MAX_EPOCH_SIZE]>, @@ -81,6 +82,13 @@ impl TryFrom> for Epoch { } } +impl From for EpochAccumulator { + fn from(value: Epoch) -> Self { + let vec: Vec = value.data.to_vec(); + EpochAccumulator::from(vec) + } +} + impl Epoch { pub fn number(&self) -> usize { self.number diff --git a/crates/header-accumulator/src/errors.rs b/crates/header-accumulator/src/errors.rs index 39e513cc..fb7377df 100644 --- a/crates/header-accumulator/src/errors.rs +++ b/crates/header-accumulator/src/errors.rs @@ -9,6 +9,19 @@ pub enum EraValidateError { #[error("Era accumulator mismatch")] EraAccumulatorMismatch, + #[error("Block epoch {block_epoch} (block number {block_number}) could not be proven with provided epoch {epoch_number}.")] + EpochNotMatchForHeader { + epoch_number: usize, + block_number: u64, + block_epoch: usize, + }, + + #[error("Expected epoch {block_epoch} was not found in the provided epoch list. Epochs provided: {epoch_list:?}.")] + EpochNotFoundInProvidedList { + block_epoch: usize, + epoch_list: Vec, + }, + #[error("Error generating inclusion proof")] ProofGenerationFailure, #[error("Error validating inclusion proof")] @@ -28,6 +41,12 @@ pub enum EraValidateError { InvalidBlockRange(u64, u64), #[error("Epoch is in post merge: {0}")] EpochPostMerge(usize), + + #[error("Header block number ({block_number}) is different than expected ({expected_number})")] + HeaderMismatch { + expected_number: u64, + block_number: u64, + }, } impl From for EraValidateError { diff --git a/crates/header-accumulator/src/inclusion_proof.rs b/crates/header-accumulator/src/inclusion_proof.rs index adc82a7e..275e3621 100644 --- a/crates/header-accumulator/src/inclusion_proof.rs +++ b/crates/header-accumulator/src/inclusion_proof.rs @@ -1,106 +1,169 @@ -use crate::{epoch::MAX_EPOCH_SIZE, errors::EraValidateError, types::ExtHeaderRecord}; +use crate::{epoch::MAX_EPOCH_SIZE, errors::EraValidateError, Epoch}; -use alloy_primitives::FixedBytes; use ethportal_api::{ types::execution::{ - accumulator::{EpochAccumulator, HeaderRecord}, - header_with_proof::{BlockHeaderProof, HeaderWithProof, PreMergeAccumulatorProof}, + accumulator::EpochAccumulator, + header_with_proof::{ + BlockHeaderProof, HeaderWithProof as PortalHeaderWithProof, PreMergeAccumulatorProof, + }, }, Header, }; -use firehose_protos::EthBlock as Block; use tree_hash::Hash256; use trin_validation::{ accumulator::PreMergeAccumulator, header_validator::HeaderValidator, historical_roots_acc::HistoricalRootsAccumulator, }; -/// generates an inclusion proof over headers, given blocks between `start_block` and `end_block` +const PROOF_SIZE: usize = 15; + +/// A proof that contains the block number +#[derive(Clone)] +pub struct InclusionProof { + block_number: u64, + proof: [Hash256; PROOF_SIZE], +} + +impl InclusionProof { + /// Takes a header and turns the proof into a provable header + pub fn with_header(self, header: Header) -> Result { + if self.block_number != header.number { + Err(EraValidateError::HeaderMismatch { + expected_number: self.block_number, + block_number: header.number, + }) + } else { + Ok(HeaderWithProof { + proof: self, + header, + }) + } + } +} + +impl From for PreMergeAccumulatorProof { + fn from(value: InclusionProof) -> Self { + Self { proof: value.proof } + } +} + +/// Generates inclusion proofs for headers, given a list epochs that contains +/// the headers to be proven /// /// # Arguments /// -/// * `ext_headers`- A mutable [`Vec`]. The Vector can be any size, however, it must be in chunks of 8192 blocks to work properly -/// to function without error -/// * `start_block` - The starting point of blocks that are to be included in the proofs. This interval is inclusive. -/// * `end_epoch` - The ending point of blocks that are to be included in the proofs. This interval is inclusive. -pub fn generate_inclusion_proof( - mut ext_headers: Vec, - start_block: u64, - end_block: u64, -) -> Result, EraValidateError> { - if start_block > end_block { - return Err(EraValidateError::InvalidBlockRange(start_block, end_block)); - } +/// * `epochs`- A list of epochs [`Vec`]. +/// * `headers_to_prove` - A list of headers [`Vec
`] +pub fn generate_inclusion_proofs( + epochs: Vec, + headers_to_prove: Vec
, +) -> Result, EraValidateError> { + let mut inclusion_proof_vec: Vec = Vec::with_capacity(headers_to_prove.len()); + let epoch_list: Vec<_> = epochs.iter().map(|epoch| epoch.number()).collect(); + let accumulators: Vec<_> = epochs + .into_iter() + .map(|epoch| (epoch.number(), EpochAccumulator::from(epoch))) + .collect(); - // Compute the epoch accumulator for the blocks - // The epochs start on a multiple of 8192 blocks, so we need to round down to the nearest 8192 - let epoch_start = start_block / MAX_EPOCH_SIZE as u64; - - // The epochs end on a multiple of 8192 blocks, so we need to round up to the nearest 8192 - let epoch_end = ((end_block as f32) / MAX_EPOCH_SIZE as f32).ceil() as u64; - - // We need to load blocks from an entire epoch to be able to generate inclusion proofs - // First compute epoch accumulators and the Merkle tree for all the epochs of interest - let mut epoch_accumulators = Vec::new(); - let mut inclusion_proof_vec: Vec<[FixedBytes<32>; 15]> = Vec::new(); - let mut headers: Vec
= Vec::new(); - - for _ in epoch_start..epoch_end { - let epoch_headers: Vec = ext_headers.drain(0..MAX_EPOCH_SIZE).collect(); - let header_records: Vec = epoch_headers.iter().map(Into::into).collect(); - let tmp_headers: Vec
= epoch_headers - .into_iter() - .map(ExtHeaderRecord::try_into) - .collect::>()?; - headers.extend(tmp_headers); - epoch_accumulators.push(EpochAccumulator::from(header_records)); - } + for header in headers_to_prove { + let block_epoch = (header.number / MAX_EPOCH_SIZE as u64) as usize; + + let accumulator = accumulators + .iter() + .find(|epoch| epoch.0 == block_epoch) + .map(|epoch| &epoch.1) + .ok_or(EraValidateError::EpochNotFoundInProvidedList { + block_epoch, + epoch_list: epoch_list.clone(), + })?; - for block_idx in start_block..=end_block { - let epoch = block_idx / MAX_EPOCH_SIZE as u64; - let epoch_acc = epoch_accumulators[epoch as usize].clone(); - let header = headers[block_idx as usize].clone(); - inclusion_proof_vec.push( - PreMergeAccumulator::construct_proof(&header, &epoch_acc) - .map_err(|_| EraValidateError::ProofGenerationFailure)?, - ); + inclusion_proof_vec.push(do_generate_inclusion_proof(&header, accumulator)?); } Ok(inclusion_proof_vec) } -/// verifies an inclusion proof generate by [`generate_inclusion_proof`] +/// Generates an inclusion proof for the header, given the epoch that contains +/// the header to be proven /// -/// * `blocks`- A [`Vec`]. The blocks included in the inclusion proof interval, set in `start_block` and `end_block` of [`generate_inclusion_proof`] -/// * `pre_merge_accumulator_file`- An instance of [`PreMergeAccumulator`] which is a file that maintains a record of historical epoch -/// it is used to verify canonical-ness of headers accumulated from the `blocks` -/// * `inclusion_proof` - The inclusion proof generated from [`generate_inclusion_proof`]. -pub fn verify_inclusion_proof( - blocks: Vec, +/// Returns an error if the header is not inside the epoch. +/// +/// # Arguments +/// +/// * `header`- Header to be proven +/// * `epoch` - Epoch in which the header is located +pub fn generate_inclusion_proof( + header: Header, + epoch: Epoch, +) -> Result { + let block_number = header.number; + let block_epoch = (block_number / MAX_EPOCH_SIZE as u64) as usize; + if block_epoch != epoch.number() { + return Err(EraValidateError::EpochNotMatchForHeader { + epoch_number: epoch.number(), + block_number, + block_epoch, + }); + } + + let epoch_accumulator = EpochAccumulator::from(epoch); + do_generate_inclusion_proof(&header, &epoch_accumulator) +} + +fn do_generate_inclusion_proof( + header: &Header, + epoch_accumulator: &EpochAccumulator, +) -> Result { + PreMergeAccumulator::construct_proof(header, epoch_accumulator) + .map(|proof| InclusionProof { + proof, + block_number: header.number, + }) + .map_err(|_| EraValidateError::ProofGenerationFailure) +} + +/// Verifies a list of provable headers +/// +/// * `pre_merge_accumulator_file`- An optional instance of [`PreMergeAccumulator`] +/// which is a file that maintains a record of historical epoch it is used to +/// verify canonical-ness of headers accumulated from the `blocks` +/// * `header_proofs`- A [`Vec`]. +pub fn verify_inclusion_proofs( pre_merge_accumulator_file: Option, - inclusion_proof: Vec<[Hash256; 15]>, + header_proofs: Vec, ) -> Result<(), EraValidateError> { let pre_merge_acc = pre_merge_accumulator_file.unwrap_or_default(); - let header_validator = HeaderValidator { pre_merge_acc, historical_roots_acc: HistoricalRootsAccumulator::default(), }; - for (block_idx, _) in blocks.iter().enumerate() { - let bhp = BlockHeaderProof::PreMergeAccumulatorProof(PreMergeAccumulatorProof { - proof: inclusion_proof[block_idx], - }); - - let hwp = HeaderWithProof { - header: Header::try_from(&blocks[block_idx])?, - proof: bhp, - }; - - header_validator - .validate_header_with_proof(&hwp) - .map_err(|_| EraValidateError::ProofValidationFailure)?; + for provable_header in header_proofs { + verify_inclusion_proof(&header_validator, provable_header)?; } Ok(()) } + +/// A header with an inclusion proof attached +pub struct HeaderWithProof { + header: Header, + proof: InclusionProof, +} + +/// Verifies if a proof is contained in the header validator +pub fn verify_inclusion_proof( + header_validator: &HeaderValidator, + provable_header: HeaderWithProof, +) -> Result<(), EraValidateError> { + let proof = BlockHeaderProof::PreMergeAccumulatorProof(provable_header.proof.into()); + + let hwp = PortalHeaderWithProof { + header: provable_header.header, + proof, + }; + + header_validator + .validate_header_with_proof(&hwp) + .map_err(|_| EraValidateError::ProofValidationFailure) +}