diff --git a/Cargo.toml b/Cargo.toml index 4fd1b35..3bd495c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] members = ["verkle-db", "verkle-trie", "verkle-spec", "ipa-multipoint"] +resolver = "2" [profile.bench] debug = true diff --git a/ipa-multipoint/Cargo.toml b/ipa-multipoint/Cargo.toml index bedee71..c77b90d 100644 --- a/ipa-multipoint/Cargo.toml +++ b/ipa-multipoint/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" [dependencies] rand = "0.6" -criterion = "0.3.4" +criterion = "0.5.1" bandersnatch = "0.1.1" ark-ff = { version = "^0.3.0", default-features = false } ark-ec = { version = "^0.3.0", default-features = false } diff --git a/ipa-multipoint/src/crs.rs b/ipa-multipoint/src/crs.rs index 9486ab0..134528f 100644 --- a/ipa-multipoint/src/crs.rs +++ b/ipa-multipoint/src/crs.rs @@ -47,20 +47,19 @@ impl std::ops::Index for CRS { } fn generate_random_elements(num_required_points: usize, seed: &'static [u8]) -> Vec { - use ark_ec::group::Group; + use ark_ff::PrimeField; use bandersnatch::Fq; use sha2::{Digest, Sha256}; - let choose_largest = false; + let _choose_largest = false; (0u64..) - .into_iter() // Hash the seed + i to get a possible x value .map(|i| { let mut hasher = Sha256::new(); hasher.update(seed); - hasher.update(&i.to_be_bytes()); + hasher.update(i.to_be_bytes()); let bytes: Vec = hasher.finalize().to_vec(); bytes }) @@ -76,8 +75,7 @@ fn generate_random_elements(num_required_points: usize, seed: &'static [u8]) -> bytes }) // Deserialise the x-cordinate to get a valid banderwagon element - .map(|bytes| Element::from_bytes(&bytes)) - .filter_map(|point| point) + .filter_map(|bytes| Element::from_bytes(&bytes)) .take(num_required_points) .collect() } @@ -88,7 +86,7 @@ fn crs_consistency() { // TODO is a bit different // See: https://hackmd.io/1RcGSMQgT4uREaq1CCx_cg#Methodology use ark_serialize::CanonicalSerialize; - use bandersnatch::Fq; + use sha2::{Digest, Sha256}; let points = generate_random_elements(256, b"eth_verkle_oct_2021"); @@ -96,14 +94,14 @@ fn crs_consistency() { let mut bytes = [0u8; 32]; points[0].serialize(&mut bytes[..]).unwrap(); assert_eq!( - hex::encode(&bytes), + hex::encode(bytes), "01587ad1336675eb912550ec2a28eb8923b824b490dd2ba82e48f14590a298a0", "the first point is incorrect" ); let mut bytes = [0u8; 32]; points[255].serialize(&mut bytes[..]).unwrap(); assert_eq!( - hex::encode(&bytes), + hex::encode(bytes), "3de2be346b539395b0c0de56a5ccca54a317f1b5c80107b0802af9a62276a4d8", "the 256th (last) point is incorrect" ); @@ -112,11 +110,11 @@ fn crs_consistency() { for point in &points { let mut bytes = [0u8; 32]; point.serialize(&mut bytes[..]).unwrap(); - hasher.update(&bytes); + hasher.update(bytes); } let bytes = hasher.finalize().to_vec(); assert_eq!( - hex::encode(&bytes), + hex::encode(bytes), "1fcaea10bf24f750200e06fa473c76ff0468007291fa548e2d99f09ba9256fdb", "unexpected point encountered" ); diff --git a/ipa-multipoint/src/ipa.rs b/ipa-multipoint/src/ipa.rs index a7cf168..caf1d36 100644 --- a/ipa-multipoint/src/ipa.rs +++ b/ipa-multipoint/src/ipa.rs @@ -114,7 +114,7 @@ pub fn create( let mut L_vec: Vec = Vec::with_capacity(num_rounds as usize); let mut R_vec: Vec = Vec::with_capacity(num_rounds as usize); - for k in 0..num_rounds { + for _k in 0..num_rounds { let (a_L, a_R) = halve(a); let (b_L, b_R) = halve(b); let (G_L, G_R) = halve(G); @@ -140,9 +140,9 @@ pub fn create( let x = transcript.challenge_scalar(b"x"); let x_inv = x.inverse().unwrap(); for i in 0..a_L.len() { - a_L[i] = a_L[i] + x * a_R[i]; - b_L[i] = b_L[i] + x_inv * b_R[i]; - G_L[i] = G_L[i] + G_R[i] * x_inv; + a_L[i] += x * a_R[i]; + b_L[i] += x_inv * b_R[i]; + G_L[i] += G_R[i] * x_inv; } a = a_L; @@ -221,8 +221,8 @@ impl IPAProof { let (b_L, b_R) = halve(b); for i in 0..G_L.len() { - G_L[i] = G_L[i] + G_R[i] * *x_inv; - b_L[i] = b_L[i] + b_R[i] * x_inv; + G_L[i] += G_R[i] * *x_inv; + b_L[i] += b_R[i] * x_inv; } G = G_L; b = b_L; @@ -413,11 +413,11 @@ mod tests { use super::*; use crate::crs::CRS; use crate::math_utils::{inner_product, powers_of}; - use ark_std::rand; + use ark_std::rand::SeedableRng; use ark_std::UniformRand; use rand_chacha::ChaCha20Rng; - use std::iter; + #[test] fn test_create_IPAProof_proof() { let n = 8; diff --git a/ipa-multipoint/src/lagrange_basis.rs b/ipa-multipoint/src/lagrange_basis.rs index d431f06..5aca7f7 100644 --- a/ipa-multipoint/src/lagrange_basis.rs +++ b/ipa-multipoint/src/lagrange_basis.rs @@ -1,5 +1,5 @@ use ark_ff::{batch_inversion, batch_inversion_and_mul, Field, One, Zero}; -use ark_poly::{univariate::DensePolynomial, Polynomial, UVPolynomial}; +use ark_poly::{univariate::DensePolynomial, UVPolynomial}; use bandersnatch::Fr; use std::{ convert::TryFrom, @@ -25,8 +25,8 @@ impl Add for LagrangeBasis { } self.values .iter_mut() - .zip(rhs.values.into_iter()) - .for_each(|(lhs, rhs)| *lhs = *lhs + rhs); + .zip(rhs.values) + .for_each(|(lhs, rhs)| *lhs += rhs); self } } @@ -36,7 +36,7 @@ impl Mul for LagrangeBasis { fn mul(mut self, rhs: Fr) -> Self::Output { self.values .iter_mut() - .for_each(|values| *values = *values * rhs); + .for_each(|values| *values *= rhs); self } } @@ -46,7 +46,7 @@ impl Sub<&Fr> for LagrangeBasis { fn sub(mut self, rhs: &Fr) -> Self::Output { self.values .iter_mut() - .for_each(|values| *values = *values - rhs); + .for_each(|values| *values -= rhs); self } } @@ -261,6 +261,8 @@ impl LagrangeBasis { #[test] fn basic_interpolation() { + use ark_poly::Polynomial; + let p1 = Fr::from(8u128); let p2 = Fr::from(2u128); let lag_poly = LagrangeBasis::new(vec![p1, p2]); @@ -276,6 +278,8 @@ fn basic_interpolation() { #[test] fn simple_eval_outside_domain() { + use ark_poly::Polynomial; + let numerator_lag = LagrangeBasis::new(vec![-Fr::from(2), Fr::from(0), Fr::from(12), Fr::from(40)]); let numerator_coeff = numerator_lag.interpolate(); diff --git a/ipa-multipoint/src/lib.rs b/ipa-multipoint/src/lib.rs index 30621d2..fb55271 100644 --- a/ipa-multipoint/src/lib.rs +++ b/ipa-multipoint/src/lib.rs @@ -4,7 +4,7 @@ pub mod math_utils; pub mod multiproof; pub mod transcript; -pub(crate) use ipa::slow_vartime_multiscalar_mul; + pub mod lagrange_basis; diff --git a/ipa-multipoint/src/math_utils.rs b/ipa-multipoint/src/math_utils.rs index e04f659..f64edba 100644 --- a/ipa-multipoint/src/math_utils.rs +++ b/ipa-multipoint/src/math_utils.rs @@ -1,4 +1,4 @@ -use ark_ff::{Field, One}; +use ark_ff::{One}; use bandersnatch::Fr; /// Computes the inner product between two scalar vectors pub fn inner_product(a: &[Fr], b: &[Fr]) -> Fr { @@ -19,14 +19,16 @@ pub fn powers_of(point: Fr, n: usize) -> Vec { fn simple_vandemonde() { use ark_std::test_rng; use ark_std::UniformRand; + use ark_ff::Field; + let rand_fr = Fr::rand(&mut test_rng()); let n = 100; let powers = powers_of(rand_fr, n); assert_eq!(powers[0], Fr::one()); - assert_eq!(powers[n - 1], rand_fr.pow(&[(n - 1) as u64])); + assert_eq!(powers[n - 1], rand_fr.pow([(n - 1) as u64])); for (i, power) in powers.into_iter().enumerate() { - assert_eq!(power, rand_fr.pow(&[i as u64])) + assert_eq!(power, rand_fr.pow([i as u64])) } } diff --git a/ipa-multipoint/src/multiproof.rs b/ipa-multipoint/src/multiproof.rs index 3c180d4..30695c1 100644 --- a/ipa-multipoint/src/multiproof.rs +++ b/ipa-multipoint/src/multiproof.rs @@ -2,20 +2,20 @@ #![allow(non_snake_case)] use crate::crs::CRS; -use crate::ipa::{self, slow_vartime_multiscalar_mul, IPAProof}; +use crate::ipa::{slow_vartime_multiscalar_mul, IPAProof}; use crate::lagrange_basis::{LagrangeBasis, PrecomputedWeights}; -use crate::math_utils::inner_product; + use crate::math_utils::powers_of; use crate::transcript::Transcript; use crate::transcript::TranscriptProtocol; -use ark_ec::{AffineCurve, ProjectiveCurve}; -use ark_ff::PrimeField; -use ark_ff::{batch_inversion, Field}; -use ark_ff::{One, Zero}; -use ark_poly::{Polynomial, UVPolynomial}; + + +use ark_ff::{batch_inversion}; +use ark_ff::{Zero}; + use std::collections::HashMap; -use banderwagon::{multi_scalar_mul, Element, Fr}; +use banderwagon::{Element, Fr}; pub struct MultiPoint; #[derive(Clone, Debug)] @@ -132,7 +132,7 @@ impl MultiPoint { let g1_x = aggregated_queries .into_iter() - .zip(g1_den.into_iter()) + .zip(g1_den) .map(|((_, agg_f_x), den_inv)| { let term: Vec<_> = agg_f_x .values() @@ -160,7 +160,7 @@ impl MultiPoint { MultiPointProof { open_proof: g_3_ipa, - g_x_comm: g_x_comm, + g_x_comm, } } } @@ -235,7 +235,7 @@ impl MultiPointProof { let helper_scalars: Vec<_> = powers_of_r .iter() - .zip(g2_den.into_iter()) + .zip(g2_den) .map(|(r_i, den_inv)| den_inv * r_i) .collect(); @@ -246,7 +246,7 @@ impl MultiPointProof { .sum(); //4. Compute [g_1(X)] = E - let comms: Vec<_> = queries.into_iter().map(|query| query.commitment).collect(); + let comms: Vec<_> = queries.iter().map(|query| query.commitment).collect(); let g1_comm = slow_vartime_multiscalar_mul(helper_scalars.iter(), comms.iter()); transcript.append_point(b"E", &g1_comm); @@ -255,7 +255,7 @@ impl MultiPointProof { let g3_comm = g1_comm - self.g_x_comm; // Check IPA - let b = LagrangeBasis::evaluate_lagrange_coefficients(&precomp, crs.n, t); // TODO: we could put this as a method on PrecomputedWeights + let b = LagrangeBasis::evaluate_lagrange_coefficients(precomp, crs.n, t); // TODO: we could put this as a method on PrecomputedWeights self.open_proof .verify_multiexp(transcript, crs, b, g3_comm, t, g2_t) @@ -280,6 +280,8 @@ pub(crate) fn open_point_outside_of_domain( #[test] fn open_multiproof_lagrange() { + use ark_std::One; + let poly = LagrangeBasis::new(vec![ Fr::one(), Fr::from(10u128), @@ -318,6 +320,8 @@ fn open_multiproof_lagrange() { #[test] fn open_multiproof_lagrange_2_polys() { + use ark_std::One; + let poly = LagrangeBasis::new(vec![ Fr::one(), Fr::from(10u128), @@ -342,7 +346,7 @@ fn open_multiproof_lagrange_2_polys() { }; let prover_query_j = ProverQuery { commitment: poly_comm, - poly: poly, + poly, point: x_j, result: y_j, }; @@ -369,11 +373,12 @@ fn open_multiproof_lagrange_2_polys() { } #[test] fn test_ipa_consistency() { + use crate::math_utils::inner_product; use ark_serialize::CanonicalSerialize; let n = 256; let crs = CRS::new(n, b"eth_verkle_oct_2021"); let precomp = PrecomputedWeights::new(n); - let input_point = Fr::from(2101 as u128); + let input_point = Fr::from(2101_u128); let poly: Vec = (0..n).map(|i| Fr::from(((i % 32) + 1) as u128)).collect(); let polynomial = LagrangeBasis::new(poly.clone()); @@ -398,7 +403,7 @@ fn test_ipa_consistency() { let mut bytes = [0u8; 32]; p_challenge.serialize(&mut bytes[..]).unwrap(); assert_eq!( - hex::encode(&bytes), + hex::encode(bytes), "0a81881cbfd7d7197a54ebd67ed6a68b5867f3c783706675b34ece43e85e7306" ); @@ -437,6 +442,8 @@ fn test_ipa_consistency() { #[test] fn multiproof_consistency() { + use ark_std::One; + use ark_serialize::CanonicalSerialize; let n = 256; let crs = CRS::new(n, b"eth_verkle_oct_2021"); @@ -456,7 +463,7 @@ fn multiproof_consistency() { let y_a = Fr::one(); let point_b = 0; - let y_b = Fr::from(32 as u128); + let y_b = Fr::from(32_u128); let poly_comm_a = crs.commit_lagrange_poly(&polynomial_a); let poly_comm_b = crs.commit_lagrange_poly(&polynomial_b); @@ -486,7 +493,7 @@ fn multiproof_consistency() { let mut bytes = [0u8; 32]; p_challenge.serialize(&mut bytes[..]).unwrap(); assert_eq!( - hex::encode(&bytes), + hex::encode(bytes), "eee8a80357ff74b766eba39db90797d022e8d6dee426ded71234241be504d519" ); diff --git a/ipa-multipoint/src/transcript.rs b/ipa-multipoint/src/transcript.rs index 19f8c0e..f0da236 100644 --- a/ipa-multipoint/src/transcript.rs +++ b/ipa-multipoint/src/transcript.rs @@ -87,7 +87,7 @@ mod tests { #[test] fn test_vector_2() { let mut tr = Transcript::new(b"simple_protocol"); - let five = Fr::from(5 as u128); + let five = Fr::from(5_u128); tr.append_scalar(b"five", &five); tr.append_scalar(b"five again", &five); @@ -102,7 +102,7 @@ mod tests { #[test] fn test_vector_3() { let mut tr = Transcript::new(b"simple_protocol"); - let one = Fr::from(1 as u128); + let one = Fr::from(1_u128); let minus_one = -one; tr.append_scalar(b"-1", &minus_one); diff --git a/verkle-spec/src/lib.rs b/verkle-spec/src/lib.rs index ed94ac7..9d0bcad 100644 --- a/verkle-spec/src/lib.rs +++ b/verkle-spec/src/lib.rs @@ -38,7 +38,7 @@ pub fn hash64(bytes64: [u8; 64]) -> H256 { Element, }; - let committer = TestCommitter::default(); + let committer = TestCommitter; let mut result = Element::zero(); let inputs = crate::util::chunk64(bytes64); diff --git a/verkle-trie/Cargo.toml b/verkle-trie/Cargo.toml index bc81942..f57a159 100644 --- a/verkle-trie/Cargo.toml +++ b/verkle-trie/Cargo.toml @@ -25,7 +25,7 @@ sha2 = "0.9.3" itertools = "0.10.1" [dev-dependencies] -criterion = "0.3.4" +criterion = "0.5.1" tempfile = "3.2.0" [[bench]] diff --git a/verkle-trie/src/database/default.rs b/verkle-trie/src/database/default.rs index 328cd6b..5307064 100644 --- a/verkle-trie/src/database/default.rs +++ b/verkle-trie/src/database/default.rs @@ -144,7 +144,6 @@ impl ReadOnlyHigherDb for VerkleDb { .storage .get_branch_children(branch_id) .into_iter() - .map(|(index, val)| (index, val)) .collect(); // // Then get the children from the batch @@ -157,7 +156,6 @@ impl ReadOnlyHigherDb for VerkleDb { } children .into_iter() - .map(|(index, val)| (index, val)) .collect() } @@ -176,7 +174,6 @@ impl ReadOnlyHigherDb for VerkleDb { .storage .get_stem_children(stem_key) .into_iter() - .map(|(index, val)| (index, val)) .collect(); // // Then get the children from the batch @@ -189,7 +186,6 @@ impl ReadOnlyHigherDb for VerkleDb { } children .into_iter() - .map(|(index, val)| (index, val)) .collect() } } diff --git a/verkle-trie/src/proof/prover.rs b/verkle-trie/src/proof/prover.rs index eab9b84..29efc4f 100644 --- a/verkle-trie/src/proof/prover.rs +++ b/verkle-trie/src/proof/prover.rs @@ -86,10 +86,9 @@ pub(super) fn create_prover_queries( } // Values to help the verifier reconstruct the trie and verify the proof - let depths: Vec<_> = depths_by_stem.into_values().into_iter().collect(); + let depths: Vec<_> = depths_by_stem.into_values().collect(); let extension_present: Vec<_> = extension_present_by_stem .into_values() - .into_iter() .collect(); ( diff --git a/verkle-trie/src/proof/stateless_updater.rs b/verkle-trie/src/proof/stateless_updater.rs index 7c8e47f..0fcf185 100644 --- a/verkle-trie/src/proof/stateless_updater.rs +++ b/verkle-trie/src/proof/stateless_updater.rs @@ -104,7 +104,7 @@ pub(crate) fn update_root( let prefix = stem[0..depth as usize].to_vec(); updated_stems_by_prefix .entry(prefix.clone()) - .or_insert_with(HashSet::new) + .or_default() .insert(stem); if ext_pres == ExtPresent::Present { @@ -189,7 +189,7 @@ pub(crate) fn update_root( let other_stem = hint.other_stems_by_prefix[&prefix]; updated_stems_by_prefix .entry(prefix) - .or_insert_with(HashSet::new) + .or_default() .insert(other_stem); // Since this stem was not present in the trie, we need to make its initial stem commitment @@ -485,8 +485,8 @@ fn build_subtree( } }; - while !path.is_empty() { - let child_index = path.pop().unwrap(); + while let Some(child_index) = path.pop() { + let parent_old_comm = tree.get(&path).unwrap().inner().commitment; @@ -533,12 +533,12 @@ impl SparseVerkleTree { // Now lets fetch the parent node's commitment and recursively update each parent - while !prefix.is_empty() { + while let Some(child_index) = prefix.pop() { // Safety: Fine unwrap because we've checked prefix isn't empty // If we have never updated the parent node before, // then it will be the old commitment // If we have then it will be in updated commitments - let child_index = prefix.pop().unwrap(); + let parent_comm = self.updated_commitments_by_path.get(&prefix); let old_parent_comm = match parent_comm { Some(comm) => *comm, @@ -611,7 +611,7 @@ mod test { values, vec![Some([0u8; 32]), None], meta.commitment, - TestCommitter::default(), + TestCommitter, ); let mut got_bytes = [0u8; 32]; @@ -658,7 +658,7 @@ mod test { values, updated_values, meta.commitment, - TestCommitter::default(), + TestCommitter, ); let mut got_bytes = [0u8; 32]; @@ -710,7 +710,7 @@ mod test { values, updated_values, meta.commitment, - TestCommitter::default(), + TestCommitter, ); let mut got_bytes = [0u8; 32]; diff --git a/verkle-trie/src/proof/verifier.rs b/verkle-trie/src/proof/verifier.rs index 64ced28..6ff19b9 100644 --- a/verkle-trie/src/proof/verifier.rs +++ b/verkle-trie/src/proof/verifier.rs @@ -188,7 +188,6 @@ pub fn create_verifier_queries( let commitments_by_path: BTreeMap, Element> = all_paths .into_iter() .zip(commitments_sorted_by_path) - .map(|(path, comm)| (path, comm)) .collect(); let commitment_by_path_and_z: BTreeMap<_, _> = all_paths_and_zs .iter() diff --git a/verkle-trie/tests/trie_fuzzer.rs b/verkle-trie/tests/trie_fuzzer.rs index 9b7b9d2..5222ab2 100644 --- a/verkle-trie/tests/trie_fuzzer.rs +++ b/verkle-trie/tests/trie_fuzzer.rs @@ -1,23 +1,26 @@ +use std::sync::Mutex; + use once_cell::sync::Lazy; use verkle_trie::{ committer::precompute::PrecomputeLagrange, database::memory_db::MemoryDb, Trie, TrieTrait, VerkleConfig, }; -pub static CONFIG: Lazy> = Lazy::new(|| { - match VerkleConfig::new(MemoryDb::new()) { + +pub static CONFIG: Lazy>> = Lazy::new(|| { + Mutex::new(match VerkleConfig::new(MemoryDb::new()) { Ok(config) => config, Err(_) => { // An error means that the file was already created // Lets call open instead VerkleConfig::open(MemoryDb::new()).expect("should be infallible") } - } + }) }); #[test] fn test_vector_insert_100_step() { let mut prng = BasicPRNG::default(); - let mut trie = Trie::new(CONFIG.clone()); + let mut trie = Trie::new(CONFIG.lock().unwrap().clone()); let batch_size = 100; // N = 100 step_test_helper( @@ -60,7 +63,7 @@ fn test_vector_insert_100_step() { #[test] fn test_vector_insert_1000_step() { let mut prng = BasicPRNG::default(); - let mut trie = Trie::new(CONFIG.clone()); + let mut trie = Trie::new(CONFIG.lock().unwrap().clone()); let batch_size = 1_000; // N = 1_000