From 7774040264bc2b3859dae002546332524dbbd85f Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Tue, 9 Apr 2024 19:37:18 -0700 Subject: [PATCH 01/11] feat: add iter support to hashing --- plonky2/Cargo.toml | 2 +- plonky2/src/gates/lookup.rs | 3 +- plonky2/src/gates/lookup_table.rs | 3 +- plonky2/src/hash/hash_types.rs | 11 +++++-- plonky2/src/hash/hashing.rs | 33 +++++++++++++++++++++ plonky2/src/hash/keccak.rs | 48 ++++++++++++++++++++++++------- plonky2/src/hash/poseidon.rs | 12 ++++++++ plonky2/src/hash/poseidon2.rs | 15 +++++++++- plonky2/src/plonk/config.rs | 19 ++++++++++-- 9 files changed, 125 insertions(+), 21 deletions(-) diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index 845a3bffbb..d3fe0df02c 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -23,7 +23,7 @@ ahash = { workspace = true } anyhow = { workspace = true } hashbrown = { workspace = true } itertools = { workspace = true } -keccak-hash = { version = "0.10.0", default-features = false } +tiny-keccak = { version = "2.0.0", features = ["keccak"] } log = { workspace = true } num = { workspace = true } rand = { workspace = true } diff --git a/plonky2/src/gates/lookup.rs b/plonky2/src/gates/lookup.rs index 23a0fd8742..d552da305e 100644 --- a/plonky2/src/gates/lookup.rs +++ b/plonky2/src/gates/lookup.rs @@ -8,10 +8,9 @@ use alloc::{ use core::usize; use itertools::Itertools; -use keccak_hash::keccak; use super::lookup_table::LookupTable; -use crate::field::extension::Extendable; +use crate::{field::extension::Extendable, hash::keccak::keccak}; use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; diff --git a/plonky2/src/gates/lookup_table.rs b/plonky2/src/gates/lookup_table.rs index 9a4d08c83b..080ac228e7 100644 --- a/plonky2/src/gates/lookup_table.rs +++ b/plonky2/src/gates/lookup_table.rs @@ -11,10 +11,9 @@ use core::usize; use std::sync::Arc; use itertools::Itertools; -use keccak_hash::keccak; use plonky2_util::ceil_div_usize; -use crate::field::extension::Extendable; +use crate::{field::extension::Extendable, hash::keccak::keccak}; use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index 5540116122..23835b4706 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -102,6 +102,10 @@ impl GenericHashOut for HashOut { } } + fn into_iter(&self) -> impl Iterator { + self.elements.into_iter() + } + fn to_vec(&self) -> Vec { self.elements.to_vec() } @@ -180,7 +184,7 @@ impl GenericHashOut for BytesHash { Self(bytes.try_into().unwrap()) } - fn to_vec(&self) -> Vec { + fn into_iter(&self) -> impl Iterator { self.0 // Chunks of 7 bytes since 8 bytes would allow collisions. .chunks(7) @@ -189,7 +193,10 @@ impl GenericHashOut for BytesHash { arr[..bytes.len()].copy_from_slice(bytes); F::from_canonical_u64(u64::from_le_bytes(arr)) }) - .collect() + } + + fn to_vec(&self) -> Vec { + self.into_iter().collect() } } diff --git a/plonky2/src/hash/hashing.rs b/plonky2/src/hash/hashing.rs index 75b35a93d6..263a9344d8 100644 --- a/plonky2/src/hash/hashing.rs +++ b/plonky2/src/hash/hashing.rs @@ -91,6 +91,9 @@ pub trait PlonkyPermutation: /// Return a slice of `RATE` elements fn squeeze(&self) -> &[T]; + + /// Return an array of `RATE` elements + fn squeeze_iter(self) -> impl IntoIterator+Copy; } /// A one-way compression function which takes two ~256 bit inputs and returns a ~256 bit output. @@ -140,6 +143,36 @@ pub fn hash_n_to_m_no_pad>( } } +/// Hash a message without any padding step. Note that this can enable length-extension attacks. +/// However, it is still collision-resistant in cases where the input has a fixed length. +pub fn hash_n_to_m_no_pad_iter, I: IntoIterator>( + inputs: I, +) -> impl Iterator { + let mut perm = P::new(core::iter::repeat(F::ZERO)); + + // Absorb all input chunks. + let mut inputs = inputs.into_iter().peekable(); + while inputs.peek().is_some() { + let input_chunk = inputs.by_ref().take(P::RATE); + perm.set_from_iter(input_chunk, 0); + perm.permute(); + } + + let mut first = true; + core::iter::repeat_with(move || { + if !first { + perm.permute() + } + first = false; + perm.squeeze_iter() + }).flatten() +} + pub fn hash_n_to_hash_no_pad>(inputs: &[F]) -> HashOut { HashOut::from_vec(hash_n_to_m_no_pad::(inputs, NUM_HASH_OUT_ELTS)) } + +pub fn hash_n_to_hash_no_pad_iter, I: IntoIterator>(inputs: I) -> HashOut { + let mut elements = hash_n_to_m_no_pad_iter::(inputs); + HashOut{ elements: std::array::from_fn(|_| elements.next().unwrap()) } +} diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index d3fa8c4b29..0eead8d712 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -1,14 +1,14 @@ #[cfg(not(feature = "std"))] use alloc::{vec, vec::Vec}; +use core::borrow::Borrow; use core::mem::size_of; use itertools::Itertools; -use keccak_hash::keccak; +use tiny_keccak::{Hasher as KeccakHasher, Keccak}; use crate::hash::hash_types::{BytesHash, RichField}; use crate::hash::hashing::PlonkyPermutation; use crate::plonk::config::Hasher; -use crate::util::serialization::Write; pub const SPONGE_RATE: usize = 8; pub const SPONGE_CAPACITY: usize = 4; @@ -68,7 +68,7 @@ impl PlonkyPermutation for KeccakPermutation { } let hash_onion = core::iter::repeat_with(|| { - let output = keccak(state_bytes.clone()).to_fixed_bytes(); + let output = keccak(state_bytes.clone()).0; state_bytes = output.to_vec(); output }); @@ -96,6 +96,12 @@ impl PlonkyPermutation for KeccakPermutation { fn squeeze(&self) -> &[F] { &self.state[..Self::RATE] } + + fn squeeze_iter(self) -> impl IntoIterator+Copy { + let mut vals = [F::default(); SPONGE_RATE]; + vals.copy_from_slice(self.squeeze()); + vals + } } /// Keccak-256 hash function. @@ -106,21 +112,41 @@ impl Hasher for KeccakHash { type Hash = BytesHash; type Permutation = KeccakPermutation; - fn hash_no_pad(input: &[F]) -> Self::Hash { - let mut buffer = Vec::with_capacity(input.len()); - buffer.write_field_vec(input).unwrap(); + fn hash_no_pad_iter>(input: I) -> Self::Hash { + let mut keccak256 = Keccak::v256(); + for x in input.into_iter() { + let b = x.borrow().to_canonical_u64().to_le_bytes(); + keccak256.update(&b); + } + + let mut hash_bytes = [0u8; 32]; + keccak256.finalize(&mut hash_bytes); + let mut arr = [0; N]; - let hash_bytes = keccak(buffer).0; arr.copy_from_slice(&hash_bytes[..N]); BytesHash(arr) } fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { - let mut v = vec![0; N * 2]; - v[0..N].copy_from_slice(&left.0); - v[N..].copy_from_slice(&right.0); + let mut keccak256 = Keccak::v256(); + keccak256.update(&left.0); + keccak256.update(&right.0); + + let mut hash_bytes = [0u8; 32]; + keccak256.finalize(&mut hash_bytes); + let mut arr = [0; N]; - arr.copy_from_slice(&keccak(v).0[..N]); + arr.copy_from_slice(&hash_bytes[..N]); BytesHash(arr) } } + +pub fn keccak>(s: T) -> BytesHash<32> { + let mut keccak256 = Keccak::v256(); + keccak256.update(s.as_ref()); + + let mut hash_bytes = [0u8; 32]; + keccak256.finalize(&mut hash_bytes); + + BytesHash(hash_bytes) +} diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index a7c763252e..2f2b932c0b 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -20,6 +20,8 @@ use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::config::{AlgebraicHasher, Hasher}; +use super::hashing::hash_n_to_hash_no_pad_iter; + pub const SPONGE_RATE: usize = 8; pub const SPONGE_CAPACITY: usize = 4; pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY; @@ -867,6 +869,12 @@ impl PlonkyPermutation< fn squeeze(&self) -> &[T] { &self.state[..Self::RATE] } + + fn squeeze_iter(self) -> impl IntoIterator+Copy { + let mut vals = [T::default(); SPONGE_RATE]; + vals.copy_from_slice(self.squeeze()); + vals + } } /// Poseidon hash function. @@ -881,6 +889,10 @@ impl Hasher for PoseidonHash { hash_n_to_hash_no_pad::(input) } + fn hash_no_pad_iter>(input: I) -> Self::Hash { + hash_n_to_hash_no_pad_iter::(input) + } + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { compress::(left, right) } diff --git a/plonky2/src/hash/poseidon2.rs b/plonky2/src/hash/poseidon2.rs index 5336c714e3..b4cba31054 100644 --- a/plonky2/src/hash/poseidon2.rs +++ b/plonky2/src/hash/poseidon2.rs @@ -20,6 +20,8 @@ use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::config::{AlgebraicHasher, Hasher}; +use super::hashing::hash_n_to_hash_no_pad_iter; + // The number offull rounds and partial rounds is given by the // calc_round_numbers.py script. They happen to be the same for both // width 8 and width 12 with s-box x^7. @@ -31,6 +33,7 @@ pub const ROUND_F_BEGIN: usize = 4; pub const ROUND_F_END: usize = 2 * ROUND_F_BEGIN; pub const ROUND_P: usize = 22; pub const ROUNDS: usize = ROUND_F_END + ROUND_P; +pub const RATE: usize = 8; pub const WIDTH: usize = 12; // we only have width 8 and 12, and 12 is bigger. :) pub trait Poseidon2: PrimeField64 { @@ -522,7 +525,7 @@ impl AsRef<[T]> for Poseidon2Permutation { impl PlonkyPermutation for Poseidon2Permutation { - const RATE: usize = 8; + const RATE: usize = RATE; const WIDTH: usize = WIDTH; fn new>(elts: I) -> Self { @@ -556,6 +559,12 @@ impl PlonkyPermutation< fn squeeze(&self) -> &[T] { &self.state[..Self::RATE] } + + fn squeeze_iter(self) -> impl IntoIterator+Copy { + let mut vals = [T::default(); RATE]; + vals.copy_from_slice(self.squeeze()); + vals + } } /// Poseidon2 hash function. @@ -570,6 +579,10 @@ impl Hasher for Poseidon2Hash { hash_n_to_hash_no_pad::(input) } + fn hash_no_pad_iter>(input: I) -> Self::Hash { + hash_n_to_hash_no_pad_iter::(input) + } + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { compress::(left, right) } diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index e75ed2b904..35d61eb984 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -8,6 +8,7 @@ #[cfg(not(feature = "std"))] use alloc::{vec, vec::Vec}; +use itertools::chain; use core::fmt::Debug; use serde::de::DeserializeOwned; @@ -30,6 +31,7 @@ pub trait GenericHashOut: fn to_bytes(&self) -> Vec; fn from_bytes(bytes: &[u8]) -> Self; + fn into_iter(&self) -> impl Iterator; fn to_vec(&self) -> Vec; } @@ -46,10 +48,21 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { /// Hash a message without any padding step. Note that this can enable length-extension attacks. /// However, it is still collision-resistant in cases where the input has a fixed length. - fn hash_no_pad(input: &[F]) -> Self::Hash; + fn hash_no_pad(input: &[F]) -> Self::Hash { + Self::hash_no_pad_iter(input.into_iter().cloned()) + } + + /// Hash a message without any padding step. Note that this can enable length-extension attacks. + /// However, it is still collision-resistant in cases where the input has a fixed length. + fn hash_no_pad_iter>(input: I) -> Self::Hash; /// Pad the message using the `pad10*1` rule, then hash it. fn hash_pad(input: &[F]) -> Self::Hash { + let len + chain!(input.into_iter().cloned(), + [F::One], + 0..((padded_input.len() + 1) % Self::Permutation::RATE) + ) let mut padded_input = input.to_vec(); padded_input.push(F::ONE); while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { @@ -74,7 +87,9 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { } } - fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash; + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { + Self::hash_no_pad_iter(chain(left.into_iter(), right.into_iter())) + } } /// Trait for algebraic hash functions, built from a permutation using the sponge construction. From 68e7fa983043c5465528dada6ba3ce1ab8ba0dbc Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Fri, 12 Apr 2024 17:42:25 -0700 Subject: [PATCH 02/11] replace vec with iter --- plonky2/src/gates/lookup.rs | 3 +- plonky2/src/gates/lookup_table.rs | 3 +- plonky2/src/hash/hash_types.rs | 71 ++++++++++++++++----------- plonky2/src/hash/hashing.rs | 17 +++++-- plonky2/src/hash/keccak.rs | 31 +++++++----- plonky2/src/hash/merkle_tree.rs | 4 +- plonky2/src/hash/poseidon.rs | 22 ++++----- plonky2/src/hash/poseidon2.rs | 5 +- plonky2/src/iop/challenger.rs | 8 ++- plonky2/src/plonk/circuit_builder.rs | 10 ++-- plonky2/src/plonk/config.rs | 47 +++++++++--------- plonky2/src/util/serialization/mod.rs | 2 +- 12 files changed, 126 insertions(+), 97 deletions(-) diff --git a/plonky2/src/gates/lookup.rs b/plonky2/src/gates/lookup.rs index d552da305e..2007292b1a 100644 --- a/plonky2/src/gates/lookup.rs +++ b/plonky2/src/gates/lookup.rs @@ -10,12 +10,13 @@ use core::usize; use itertools::Itertools; use super::lookup_table::LookupTable; -use crate::{field::extension::Extendable, hash::keccak::keccak}; +use crate::field::extension::Extendable; use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; +use crate::hash::keccak::keccak; use crate::iop::ext_target::ExtensionTarget; use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGeneratorRef}; use crate::iop::target::Target; diff --git a/plonky2/src/gates/lookup_table.rs b/plonky2/src/gates/lookup_table.rs index 080ac228e7..ff01e2bc57 100644 --- a/plonky2/src/gates/lookup_table.rs +++ b/plonky2/src/gates/lookup_table.rs @@ -13,12 +13,13 @@ use std::sync::Arc; use itertools::Itertools; use plonky2_util::ceil_div_usize; -use crate::{field::extension::Extendable, hash::keccak::keccak}; +use crate::field::extension::Extendable; use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; +use crate::hash::keccak::keccak; use crate::iop::ext_target::ExtensionTarget; use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGeneratorRef}; use crate::iop::target::Target; diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index 23835b4706..a63b04b672 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -1,3 +1,5 @@ +use core::borrow::{Borrow, BorrowMut}; + #[cfg(not(feature = "std"))] use alloc::vec::Vec; @@ -83,31 +85,42 @@ where } impl GenericHashOut for HashOut { - fn to_bytes(&self) -> Vec { - self.elements - .into_iter() - .flat_map(|x| x.to_canonical_u64().to_le_bytes()) - .collect() + fn to_bytes(self) -> impl AsRef<[u8]>+AsMut<[u8]>+Borrow<[u8]>+BorrowMut<[u8]>+Copy { + let mut bytes = [0u8; NUM_HASH_OUT_ELTS*8]; + for (i, x) in self.elements.into_iter().enumerate() { + let i = i*8; + bytes[i..i+8].copy_from_slice(&x.to_canonical_u64().to_le_bytes()) + } + bytes } fn from_bytes(bytes: &[u8]) -> Self { + let mut bytes = bytes + .chunks(8) + .take(NUM_HASH_OUT_ELTS) + .map(|x| F::from_canonical_u64(u64::from_le_bytes(x.try_into().unwrap()))); + HashOut { + elements: [(); NUM_HASH_OUT_ELTS].map(|()| bytes.next().unwrap()), + } + } + + fn from_byte_iter(mut bytes: impl Iterator) -> Self { + let bytes = [[(); 8]; NUM_HASH_OUT_ELTS].map(|b| b.map(|()| bytes.next().unwrap())); + HashOut { elements: bytes - .chunks(8) - .take(NUM_HASH_OUT_ELTS) - .map(|x| F::from_canonical_u64(u64::from_le_bytes(x.try_into().unwrap()))) - .collect::>() - .try_into() - .unwrap(), + .map(|x| F::from_canonical_u64(u64::from_le_bytes(x.try_into().unwrap()))), } } - fn into_iter(&self) -> impl Iterator { - self.elements.into_iter() + fn from_iter(mut inputs: impl Iterator) -> Self { + HashOut { + elements: [(); NUM_HASH_OUT_ELTS].map(|()| inputs.next().unwrap()), + } } - fn to_vec(&self) -> Vec { - self.elements.to_vec() + fn into_iter(self) -> impl Iterator { + self.elements.into_iter() } } @@ -176,27 +189,29 @@ impl Sample for BytesHash { } impl GenericHashOut for BytesHash { - fn to_bytes(&self) -> Vec { - self.0.to_vec() + fn to_bytes(self) -> impl AsRef<[u8]>+AsMut<[u8]>+Borrow<[u8]>+BorrowMut<[u8]>+Copy { + self.0 } fn from_bytes(bytes: &[u8]) -> Self { Self(bytes.try_into().unwrap()) } - fn into_iter(&self) -> impl Iterator { - self.0 - // Chunks of 7 bytes since 8 bytes would allow collisions. - .chunks(7) - .map(|bytes| { - let mut arr = [0; 8]; - arr[..bytes.len()].copy_from_slice(bytes); - F::from_canonical_u64(u64::from_le_bytes(arr)) - }) + fn from_byte_iter(mut bytes: impl Iterator) -> Self { + Self([(); N].map(|()| bytes.next().unwrap())) } - fn to_vec(&self) -> Vec { - self.into_iter().collect() + fn into_iter(self) -> impl Iterator { + // Chunks of 7 bytes since 8 bytes would allow collisions. + const STRIDE: usize = 7; + + (0..((N+STRIDE-1)/STRIDE)).map(move |i| { + let mut arr = [0; 8]; + let i = i*STRIDE; + let bytes = &self.0[i..std::cmp::min(i+STRIDE, N)]; + arr[..bytes.len()].copy_from_slice(bytes); + F::from_canonical_u64(u64::from_le_bytes(arr)) + }) } } diff --git a/plonky2/src/hash/hashing.rs b/plonky2/src/hash/hashing.rs index 263a9344d8..234e8c82e0 100644 --- a/plonky2/src/hash/hashing.rs +++ b/plonky2/src/hash/hashing.rs @@ -93,7 +93,7 @@ pub trait PlonkyPermutation: fn squeeze(&self) -> &[T]; /// Return an array of `RATE` elements - fn squeeze_iter(self) -> impl IntoIterator+Copy; + fn squeeze_iter(self) -> impl IntoIterator + Copy; } /// A one-way compression function which takes two ~256 bit inputs and returns a ~256 bit output. @@ -165,14 +165,23 @@ pub fn hash_n_to_m_no_pad_iter, I: IntoIte } first = false; perm.squeeze_iter() - }).flatten() + }) + .flatten() } pub fn hash_n_to_hash_no_pad>(inputs: &[F]) -> HashOut { HashOut::from_vec(hash_n_to_m_no_pad::(inputs, NUM_HASH_OUT_ELTS)) } -pub fn hash_n_to_hash_no_pad_iter, I: IntoIterator>(inputs: I) -> HashOut { +pub fn hash_n_to_hash_no_pad_iter< + F: RichField, + P: PlonkyPermutation, + I: IntoIterator, +>( + inputs: I, +) -> HashOut { let mut elements = hash_n_to_m_no_pad_iter::(inputs); - HashOut{ elements: std::array::from_fn(|_| elements.next().unwrap()) } + HashOut { + elements: std::array::from_fn(|_| elements.next().unwrap()), + } } diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index 0eead8d712..3a0552ea8c 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -61,23 +61,28 @@ impl PlonkyPermutation for KeccakPermutation { } fn permute(&mut self) { - let mut state_bytes = vec![0u8; SPONGE_WIDTH * size_of::()]; - for i in 0..SPONGE_WIDTH { + let mut state_bytes = [0u8; SPONGE_WIDTH * size_of::()]; + for (i, x) in self.state.iter().enumerate() { state_bytes[i * size_of::()..(i + 1) * size_of::()] - .copy_from_slice(&self.state[i].to_canonical_u64().to_le_bytes()); + .copy_from_slice(&x.to_canonical_u64().to_le_bytes()); } - let hash_onion = core::iter::repeat_with(|| { - let output = keccak(state_bytes.clone()).0; - state_bytes = output.to_vec(); - output + let hash_onion = (0..).scan(keccak(&state_bytes), |state, _| { + let output = state.0; + *state = keccak(&output); + Some(output) }); let hash_onion_u64s = hash_onion.flat_map(|output| { - output - .chunks_exact(size_of::()) - .map(|word| u64::from_le_bytes(word.try_into().unwrap())) - .collect_vec() + const STRIDE: usize = size_of::(); + + (0..(32/STRIDE)).map(move |i| { + let mut arr = [0; 8]; + let i = i*STRIDE; + let bytes = &output[i..(i+STRIDE)]; + arr[..bytes.len()].copy_from_slice(bytes); + u64::from_le_bytes(arr) + }) }); // Parse field elements from u64 stream, using rejection sampling such that words that don't @@ -97,7 +102,7 @@ impl PlonkyPermutation for KeccakPermutation { &self.state[..Self::RATE] } - fn squeeze_iter(self) -> impl IntoIterator+Copy { + fn squeeze_iter(self) -> impl IntoIterator + Copy { let mut vals = [F::default(); SPONGE_RATE]; vals.copy_from_slice(self.squeeze()); vals @@ -134,7 +139,7 @@ impl Hasher for KeccakHash { let mut hash_bytes = [0u8; 32]; keccak256.finalize(&mut hash_bytes); - + let mut arr = [0; N]; arr.copy_from_slice(&hash_bytes[..N]); BytesHash(arr) diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index 10962727c6..10f05001b2 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -37,8 +37,8 @@ impl> MerkleCap { log2_strict(self.len()) } - pub fn flatten(&self) -> Vec { - self.0.iter().flat_map(|&h| h.to_vec()).collect() + pub fn flatten(&self) -> impl Iterator + '_ { + self.0.iter().flat_map(|h| h.into_iter()) } } diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index 2f2b932c0b..c8e1bf6f86 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -8,6 +8,7 @@ use core::fmt::Debug; use plonky2_field::packed::PackedField; use unroll::unroll_for_loops; +use super::hashing::hash_n_to_hash_no_pad_iter; use crate::field::extension::{Extendable, FieldExtension}; use crate::field::types::{Field, PrimeField64}; use crate::gates::gate::Gate; @@ -20,8 +21,6 @@ use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::config::{AlgebraicHasher, Hasher}; -use super::hashing::hash_n_to_hash_no_pad_iter; - pub const SPONGE_RATE: usize = 8; pub const SPONGE_CAPACITY: usize = 4; pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY; @@ -339,18 +338,17 @@ pub trait Poseidon: PrimeField64 { let mds_gate = PoseidonMdsGate::::new(); if builder.config.num_routed_wires >= mds_gate.num_wires() { let index = builder.add_gate(mds_gate, vec![]); - for i in 0..SPONGE_WIDTH { + let mut result = [0; SPONGE_WIDTH]; + for (i, r) in result.iter_mut().enumerate() { + *r = i; let input_wire = PoseidonMdsGate::::wires_input(i); builder.connect_extension(state[i], ExtensionTarget::from_range(index, input_wire)); } - (0..SPONGE_WIDTH) - .map(|i| { - let output_wire = PoseidonMdsGate::::wires_output(i); - ExtensionTarget::from_range(index, output_wire) - }) - .collect::>() - .try_into() - .unwrap() + + result.map(|i| { + let output_wire = PoseidonMdsGate::::wires_output(i); + ExtensionTarget::from_range(index, output_wire) + }) } else { let mut result = [builder.zero_extension(); SPONGE_WIDTH]; @@ -870,7 +868,7 @@ impl PlonkyPermutation< &self.state[..Self::RATE] } - fn squeeze_iter(self) -> impl IntoIterator+Copy { + fn squeeze_iter(self) -> impl IntoIterator + Copy { let mut vals = [T::default(); SPONGE_RATE]; vals.copy_from_slice(self.squeeze()); vals diff --git a/plonky2/src/hash/poseidon2.rs b/plonky2/src/hash/poseidon2.rs index b4cba31054..0e24d834a5 100644 --- a/plonky2/src/hash/poseidon2.rs +++ b/plonky2/src/hash/poseidon2.rs @@ -12,6 +12,7 @@ use plonky2_field::extension::{Extendable, FieldExtension}; use plonky2_field::types::{Field, PrimeField64}; use unroll::unroll_for_loops; +use super::hashing::hash_n_to_hash_no_pad_iter; use crate::gates::poseidon2::Poseidon2Gate; use crate::hash::hash_types::{HashOut, RichField}; use crate::hash::hashing::{compress, hash_n_to_hash_no_pad, PlonkyPermutation}; @@ -20,8 +21,6 @@ use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::config::{AlgebraicHasher, Hasher}; -use super::hashing::hash_n_to_hash_no_pad_iter; - // The number offull rounds and partial rounds is given by the // calc_round_numbers.py script. They happen to be the same for both // width 8 and width 12 with s-box x^7. @@ -560,7 +559,7 @@ impl PlonkyPermutation< &self.state[..Self::RATE] } - fn squeeze_iter(self) -> impl IntoIterator+Copy { + fn squeeze_iter(self) -> impl IntoIterator + Copy { let mut vals = [T::default(); RATE]; vals.copy_from_slice(self.squeeze()); vals diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index 57660fd487..384f847060 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -55,7 +55,11 @@ impl> Challenger { } pub fn observe_elements(&mut self, elements: &[F]) { - for &element in elements { + self.observe_elements_iter(elements.into_iter().copied()) + } + + pub fn observe_elements_iter(&mut self, elements: impl IntoIterator) { + for element in elements { self.observe_element(element); } } @@ -70,7 +74,7 @@ impl> Challenger { } pub fn observe_hash>(&mut self, hash: OH::Hash) { - self.observe_elements(&hash.to_vec()) + self.observe_elements_iter(hash.into_iter()) } pub fn observe_cap>(&mut self, cap: &MerkleCap) { diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index de3ab16fdf..fdaf272b02 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -7,7 +7,7 @@ use core::cmp::max; use std::{collections::BTreeMap, sync::Arc, time::Instant}; use hashbrown::{HashMap, HashSet}; -use itertools::Itertools; +use itertools::{chain, Itertools}; use log::{debug, info, warn, Level}; use plonky2_util::ceil_div_usize; @@ -1225,15 +1225,15 @@ impl, const D: usize> CircuitBuilder { let domain_separator = self.domain_separator.unwrap_or_default(); let domain_separator_digest = C::Hasher::hash_pad(&domain_separator); // TODO: This should also include an encoding of gate constraints. - let circuit_digest_parts = [ + let circuit_digest_parts = chain![ constants_sigmas_cap.flatten(), - domain_separator_digest.to_vec(), - vec![ + domain_separator_digest.into_iter(), + [ F::from_canonical_usize(degree_bits), /* Add other circuit data here */ ], ]; - let circuit_digest = C::Hasher::hash_no_pad(&circuit_digest_parts.concat()); + let circuit_digest = C::Hasher::hash_no_pad_iter(circuit_digest_parts); let common = CommonCircuitData { config: self.config, diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 35d61eb984..89078479af 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -6,11 +6,11 @@ //! the Poseidon hash function both internally and natively, and one //! mixing Poseidon internally and truncated Keccak externally. -#[cfg(not(feature = "std"))] -use alloc::{vec, vec::Vec}; -use itertools::chain; +use core::borrow::{Borrow, BorrowMut}; use core::fmt::Debug; +use core::iter::repeat; +use itertools::chain; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; @@ -28,11 +28,17 @@ use crate::plonk::circuit_builder::CircuitBuilder; pub trait GenericHashOut: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned { - fn to_bytes(&self) -> Vec; + fn to_bytes(self) -> impl AsRef<[u8]>+AsMut<[u8]>+Borrow<[u8]>+BorrowMut<[u8]>+Copy; fn from_bytes(bytes: &[u8]) -> Self; + fn from_byte_iter(bytes: impl Iterator) -> Self; + fn from_vals(inputs: &[F]) -> Self { + Self::from_iter(inputs.into_iter().copied()) + } + fn from_iter(inputs: impl Iterator) -> Self { + Self::from_byte_iter(inputs.flat_map(|x| x.to_canonical_u64().to_le_bytes())) + } - fn into_iter(&self) -> impl Iterator; - fn to_vec(&self) -> Vec; + fn into_iter(self) -> impl Iterator; } /// Trait for hash functions. @@ -49,7 +55,7 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { /// Hash a message without any padding step. Note that this can enable length-extension attacks. /// However, it is still collision-resistant in cases where the input has a fixed length. fn hash_no_pad(input: &[F]) -> Self::Hash { - Self::hash_no_pad_iter(input.into_iter().cloned()) + Self::hash_no_pad_iter(input.into_iter().copied()) } /// Hash a message without any padding step. Note that this can enable length-extension attacks. @@ -58,30 +64,21 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { /// Pad the message using the `pad10*1` rule, then hash it. fn hash_pad(input: &[F]) -> Self::Hash { - let len - chain!(input.into_iter().cloned(), - [F::One], - 0..((padded_input.len() + 1) % Self::Permutation::RATE) - ) - let mut padded_input = input.to_vec(); - padded_input.push(F::ONE); - while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { - padded_input.push(F::ZERO); - } - padded_input.push(F::ONE); - Self::hash_no_pad(&padded_input) + let zero_padding = (input.len() + 2) % Self::Permutation::RATE; + let padded_input = chain!( + input.into_iter().copied(), + [F::ONE], + (0..zero_padding).map(|_| F::ZERO), + [F::ONE], + ); + Self::hash_no_pad_iter(padded_input) } /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a /// no-op. fn hash_or_noop(inputs: &[F]) -> Self::Hash { if inputs.len() * 8 <= Self::HASH_SIZE { - let mut inputs_bytes = vec![0u8; Self::HASH_SIZE]; - for i in 0..inputs.len() { - inputs_bytes[i * 8..(i + 1) * 8] - .copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes()); - } - Self::Hash::from_bytes(&inputs_bytes) + Self::Hash::from_iter(inputs.iter().copied().chain(repeat(F::ZERO))) } else { Self::hash_no_pad(inputs) } diff --git a/plonky2/src/util/serialization/mod.rs b/plonky2/src/util/serialization/mod.rs index 393db6c699..ad1c8c60ba 100644 --- a/plonky2/src/util/serialization/mod.rs +++ b/plonky2/src/util/serialization/mod.rs @@ -1361,7 +1361,7 @@ pub trait Write { F: RichField, H: Hasher, { - self.write_all(&h.to_bytes()) + self.write_all(h.to_bytes().as_ref()) } /// Writes a HashOutTarget `h` to `self`. From 46ef6ce040e19c55d4228063ee6f861f929867cd Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Sat, 13 Apr 2024 20:11:49 -0700 Subject: [PATCH 03/11] field-merkle-tree --- plonky2/Cargo.toml | 4 +++ plonky2/benches/field_merkle_tree.rs | 43 +++++++++++++++++++++++++++ plonky2/src/hash/field_merkle_tree.rs | 28 ++++++++--------- plonky2/src/hash/merkle_tree.rs | 37 +++++++++++++++++------ 4 files changed, 87 insertions(+), 25 deletions(-) create mode 100644 plonky2/benches/field_merkle_tree.rs diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index d3fe0df02c..d893398ce3 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -64,6 +64,10 @@ required-features = ["rand_chacha"] name = "field_arithmetic" harness = false +[[bench]] +name = "field_merkle_tree" +harness = false + [[bench]] name = "ffts" harness = false diff --git a/plonky2/benches/field_merkle_tree.rs b/plonky2/benches/field_merkle_tree.rs new file mode 100644 index 0000000000..157518d2b2 --- /dev/null +++ b/plonky2/benches/field_merkle_tree.rs @@ -0,0 +1,43 @@ +mod allocator; + +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::keccak::KeccakHash; +use plonky2::hash::field_merkle_tree::FieldMerkleTree; +use plonky2::hash::poseidon::PoseidonHash; +use plonky2::plonk::config::Hasher; +use tynm::type_name; + +const ELEMS_PER_LEAF_1: usize = 70; +const ELEMS_PER_LEAF_2: usize = 5; +const ELEMS_PER_LEAF_3: usize = 100; + +pub(crate) fn bench_field_merkle_tree>(c: &mut Criterion) { + let mut group = c.benchmark_group(&format!( + "field-merkle-tree<{}, {}>", + type_name::(), + type_name::() + )); + group.sample_size(10); + + for size_log in [13, 14, 15] { + let size = 1 << size_log; + group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| { + let leaves = vec![ + vec![F::rand_vec(ELEMS_PER_LEAF_1); size], + vec![F::rand_vec(ELEMS_PER_LEAF_2); size>>1], + vec![F::rand_vec(ELEMS_PER_LEAF_3); size>>2], + ]; + b.iter(|| FieldMerkleTree::::new(black_box(leaves.clone()), black_box(5))); + }); + } +} + +fn criterion_benchmark(c: &mut Criterion) { + bench_field_merkle_tree::(c); + bench_field_merkle_tree::>(c); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/plonky2/src/hash/field_merkle_tree.rs b/plonky2/src/hash/field_merkle_tree.rs index 3af1f4c856..b5ed1c9d61 100644 --- a/plonky2/src/hash/field_merkle_tree.rs +++ b/plonky2/src/hash/field_merkle_tree.rs @@ -3,12 +3,12 @@ use alloc::vec; #[cfg(not(feature = "std"))] use alloc::vec::Vec; -use itertools::Itertools; +use itertools::{chain, Itertools}; -use crate::hash::hash_types::{RichField, NUM_HASH_OUT_ELTS}; +use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; use crate::hash::merkle_tree::{ - capacity_up_to_mut, fill_digests_buf, merkle_tree_prove, MerkleCap, + capacity_up_to_mut, fill_digests_buf, fill_digests_buf_custom, merkle_tree_prove, MerkleCap }; use crate::plonk::config::{GenericHashOut, Hasher}; use crate::util::log2_strict; @@ -83,24 +83,20 @@ impl> FieldMerkleTree { ); } else { // The rest leaf layers - let new_leaves: Vec> = cap - .iter() - .enumerate() - .map(|(i, cap_hash)| { - let mut new_hash = Vec::with_capacity(NUM_HASH_OUT_ELTS + cur[i].len()); - new_hash.extend(cap_hash.into_iter()); - new_hash.extend(&cur[i]); - new_hash - }) - .collect(); - cap.clear(); - cap.reserve_exact(next_cap_len); + let new_leaves = cap; + cap = Vec::with_capacity(next_cap_len); let tmp_cap_buf = capacity_up_to_mut(&mut cap, next_cap_len); - fill_digests_buf::( + fill_digests_buf_custom::( &mut digests_buf[digests_buf_pos..(digests_buf_pos + num_tmp_digests)], tmp_cap_buf, &new_leaves[..], next_cap_height, + |i, cap_hash| { + H::hash_or_noop_iter(chain!( + cap_hash.into_iter(), + cur[i].iter().copied(), + )) + } ); } diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index cf473d108c..0dc2b16809 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -83,13 +83,16 @@ pub(crate) fn capacity_up_to_mut(v: &mut Vec, len: usize) -> &mut [MaybeUn } } -pub(crate) fn fill_subtree>( +pub(crate) fn fill_subtree( + index: usize, digests_buf: &mut [MaybeUninit], - leaves: &[Vec], -) -> H::Hash { + leaves: &[L], + hash_fn: HF, +) -> H::Hash +where F: RichField, H: Hasher, L: Send+Sync, HF: Send+Clone+Fn(usize, &L) -> H::Hash { assert_eq!(leaves.len(), digests_buf.len() / 2 + 1); if digests_buf.is_empty() { - H::hash_or_noop(&leaves[0]) + hash_fn(index, &leaves[0]) } else { // Layout is: left recursive output || left child digest // || right child digest || right recursive output. @@ -101,9 +104,13 @@ pub(crate) fn fill_subtree>( // Split `leaves` between both children. let (left_leaves, right_leaves) = leaves.split_at(leaves.len() / 2); + let left_index = index; + let right_index = index + leaves.len() / 2; + + let left_fn = hash_fn.clone(); let (left_digest, right_digest) = plonky2_maybe_rayon::join( - || fill_subtree::(left_digests_buf, left_leaves), - || fill_subtree::(right_digests_buf, right_leaves), + move || fill_subtree::(left_index, left_digests_buf, left_leaves, left_fn), + move || fill_subtree::(right_index, right_digests_buf, right_leaves, hash_fn), ); left_digest_mem.write(left_digest); @@ -118,6 +125,17 @@ pub(crate) fn fill_digests_buf>( leaves: &[Vec], cap_height: usize, ) { + fill_digests_buf_custom::(digests_buf, cap_buf, leaves, cap_height, |_, x| H::hash_or_noop(x)) +} + +pub(crate) fn fill_digests_buf_custom( + digests_buf: &mut [MaybeUninit], + cap_buf: &mut [MaybeUninit], + leaves: &[L], + cap_height: usize, + hash_fn: HF, +) +where F: RichField, H: Hasher, L: Send+Sync, HF: Send+Sync+Clone+Fn(usize, &L) -> H::Hash { // Special case of a tree that's all cap. The usual case will panic because we'll try to split // an empty slice into chunks of `0`. (We would not need this if there was a way to split into // `blah` chunks as opposed to chunks _of_ `blah`.) @@ -126,8 +144,9 @@ pub(crate) fn fill_digests_buf>( cap_buf .par_iter_mut() .zip(leaves) - .for_each(|(cap_buf, leaf)| { - cap_buf.write(H::hash_or_noop(leaf)); + .enumerate() + .for_each(|(i, (cap_buf, leaf))| { + cap_buf.write(hash_fn(i, leaf)); }); return; } @@ -143,7 +162,7 @@ pub(crate) fn fill_digests_buf>( // We have `1 << cap_height` sub-trees, one for each entry in `cap`. They are totally // independent, so we schedule one task for each. `digests_buf` and `leaves` are split // into `1 << cap_height` slices, one for each sub-tree. - subtree_cap.write(fill_subtree::(subtree_digests, subtree_leaves)); + subtree_cap.write(fill_subtree::(0, subtree_digests, subtree_leaves, hash_fn.clone())); }, ); } From 4db1abc96318acd6f5acb16eb6fa17c1969364fc Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Sat, 13 Apr 2024 22:27:16 -0700 Subject: [PATCH 04/11] fmt+clippy --- plonky2/benches/field_merkle_tree.rs | 6 +++--- plonky2/src/hash/field_merkle_tree.rs | 27 +++++++++++++++--------- plonky2/src/hash/hash_types.rs | 24 ++++++++++----------- plonky2/src/hash/keccak.rs | 12 +++++------ plonky2/src/hash/merkle_proofs.rs | 2 +- plonky2/src/hash/merkle_tree.rs | 30 +++++++++++++++++++++------ plonky2/src/iop/challenger.rs | 4 ++-- plonky2/src/plonk/config.rs | 12 +++++------ 8 files changed, 70 insertions(+), 47 deletions(-) diff --git a/plonky2/benches/field_merkle_tree.rs b/plonky2/benches/field_merkle_tree.rs index 157518d2b2..dfaa301321 100644 --- a/plonky2/benches/field_merkle_tree.rs +++ b/plonky2/benches/field_merkle_tree.rs @@ -2,9 +2,9 @@ mod allocator; use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::hash::field_merkle_tree::FieldMerkleTree; use plonky2::hash::hash_types::RichField; use plonky2::hash::keccak::KeccakHash; -use plonky2::hash::field_merkle_tree::FieldMerkleTree; use plonky2::hash::poseidon::PoseidonHash; use plonky2::plonk::config::Hasher; use tynm::type_name; @@ -26,8 +26,8 @@ pub(crate) fn bench_field_merkle_tree>(c: &mut Criter group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| { let leaves = vec![ vec![F::rand_vec(ELEMS_PER_LEAF_1); size], - vec![F::rand_vec(ELEMS_PER_LEAF_2); size>>1], - vec![F::rand_vec(ELEMS_PER_LEAF_3); size>>2], + vec![F::rand_vec(ELEMS_PER_LEAF_2); size >> 1], + vec![F::rand_vec(ELEMS_PER_LEAF_3); size >> 2], ]; b.iter(|| FieldMerkleTree::::new(black_box(leaves.clone()), black_box(5))); }); diff --git a/plonky2/src/hash/field_merkle_tree.rs b/plonky2/src/hash/field_merkle_tree.rs index b5ed1c9d61..98257c5d70 100644 --- a/plonky2/src/hash/field_merkle_tree.rs +++ b/plonky2/src/hash/field_merkle_tree.rs @@ -8,7 +8,7 @@ use itertools::{chain, Itertools}; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; use crate::hash::merkle_tree::{ - capacity_up_to_mut, fill_digests_buf, fill_digests_buf_custom, merkle_tree_prove, MerkleCap + capacity_up_to_mut, fill_digests_buf, fill_digests_buf_custom, merkle_tree_prove, MerkleCap, }; use crate::plonk::config::{GenericHashOut, Hasher}; use crate::util::log2_strict; @@ -92,11 +92,8 @@ impl> FieldMerkleTree { &new_leaves[..], next_cap_height, |i, cap_hash| { - H::hash_or_noop_iter(chain!( - cap_hash.into_iter(), - cur[i].iter().copied(), - )) - } + H::hash_or_noop_iter(chain!(cap_hash.into_iter(), cur[i].iter().copied(),)) + }, ); } @@ -210,7 +207,10 @@ mod tests { assert_eq!(layer_1, fmt.digests[2..4]); let root = H::two_to_one(layer_1[0], layer_1[1]); - assert_eq!(fmt.cap.flatten().collect_vec(), root.into_iter().collect_vec()); + assert_eq!( + fmt.cap.flatten().collect_vec(), + root.into_iter().collect_vec() + ); let proof = fmt.open_batch(2); assert_eq!(proof.siblings, [mat_1_leaf_hashes[3], layer_1[0]]); @@ -255,8 +255,12 @@ mod tests { assert_eq!(mat_1_leaf_hashes, fmt.digests[0..4]); let hidden_layer = [ - H::two_to_one(mat_1_leaf_hashes[0], mat_1_leaf_hashes[1]).into_iter().collect_vec(), - H::two_to_one(mat_1_leaf_hashes[2], mat_1_leaf_hashes[3]).into_iter().collect_vec(), + H::two_to_one(mat_1_leaf_hashes[0], mat_1_leaf_hashes[1]) + .into_iter() + .collect_vec(), + H::two_to_one(mat_1_leaf_hashes[2], mat_1_leaf_hashes[3]) + .into_iter() + .collect_vec(), ]; let new_leaves = hidden_layer .iter() @@ -274,7 +278,10 @@ mod tests { assert_eq!(layer_1, fmt.digests[4..]); let root = H::two_to_one(layer_1[0], layer_1[1]); - assert_eq!(fmt.cap.flatten().collect_vec(), root.into_iter().collect_vec()); + assert_eq!( + fmt.cap.flatten().collect_vec(), + root.into_iter().collect_vec() + ); let proof = fmt.open_batch(1); assert_eq!(proof.siblings, [mat_1_leaf_hashes[0], layer_1[1]]); diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index a63b04b672..246976e51e 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -1,7 +1,6 @@ -use core::borrow::{Borrow, BorrowMut}; - #[cfg(not(feature = "std"))] use alloc::vec::Vec; +use core::borrow::BorrowMut; use anyhow::ensure; use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -85,11 +84,11 @@ where } impl GenericHashOut for HashOut { - fn to_bytes(self) -> impl AsRef<[u8]>+AsMut<[u8]>+Borrow<[u8]>+BorrowMut<[u8]>+Copy { - let mut bytes = [0u8; NUM_HASH_OUT_ELTS*8]; + fn to_bytes(self) -> impl AsRef<[u8]> + AsMut<[u8]> + BorrowMut<[u8]> + Copy { + let mut bytes = [0u8; NUM_HASH_OUT_ELTS * 8]; for (i, x) in self.elements.into_iter().enumerate() { - let i = i*8; - bytes[i..i+8].copy_from_slice(&x.to_canonical_u64().to_le_bytes()) + let i = i * 8; + bytes[i..i + 8].copy_from_slice(&x.to_canonical_u64().to_le_bytes()) } bytes } @@ -108,8 +107,7 @@ impl GenericHashOut for HashOut { let bytes = [[(); 8]; NUM_HASH_OUT_ELTS].map(|b| b.map(|()| bytes.next().unwrap())); HashOut { - elements: bytes - .map(|x| F::from_canonical_u64(u64::from_le_bytes(x.try_into().unwrap()))), + elements: bytes.map(|x| F::from_canonical_u64(u64::from_le_bytes(x))), } } @@ -189,7 +187,7 @@ impl Sample for BytesHash { } impl GenericHashOut for BytesHash { - fn to_bytes(self) -> impl AsRef<[u8]>+AsMut<[u8]>+Borrow<[u8]>+BorrowMut<[u8]>+Copy { + fn to_bytes(self) -> impl AsRef<[u8]> + AsMut<[u8]> + BorrowMut<[u8]> + Copy { self.0 } @@ -204,11 +202,11 @@ impl GenericHashOut for BytesHash { fn into_iter(self) -> impl Iterator { // Chunks of 7 bytes since 8 bytes would allow collisions. const STRIDE: usize = 7; - - (0..((N+STRIDE-1)/STRIDE)).map(move |i| { + + (0..((N + STRIDE - 1) / STRIDE)).map(move |i| { let mut arr = [0; 8]; - let i = i*STRIDE; - let bytes = &self.0[i..std::cmp::min(i+STRIDE, N)]; + let i = i * STRIDE; + let bytes = &self.0[i..std::cmp::min(i + STRIDE, N)]; arr[..bytes.len()].copy_from_slice(bytes); F::from_canonical_u64(u64::from_le_bytes(arr)) }) diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index 3a0552ea8c..dadbb29684 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -67,19 +67,19 @@ impl PlonkyPermutation for KeccakPermutation { .copy_from_slice(&x.to_canonical_u64().to_le_bytes()); } - let hash_onion = (0..).scan(keccak(&state_bytes), |state, _| { + let hash_onion = (0..).scan(keccak(state_bytes), |state, _| { let output = state.0; - *state = keccak(&output); + *state = keccak(output); Some(output) }); let hash_onion_u64s = hash_onion.flat_map(|output| { const STRIDE: usize = size_of::(); - - (0..(32/STRIDE)).map(move |i| { + + (0..(32 / STRIDE)).map(move |i| { let mut arr = [0; 8]; - let i = i*STRIDE; - let bytes = &output[i..(i+STRIDE)]; + let i = i * STRIDE; + let bytes = &output[i..(i + STRIDE)]; arr[..bytes.len()].copy_from_slice(bytes); u64::from_le_bytes(arr) }) diff --git a/plonky2/src/hash/merkle_proofs.rs b/plonky2/src/hash/merkle_proofs.rs index 2fe602b29b..6259ab3ac7 100644 --- a/plonky2/src/hash/merkle_proofs.rs +++ b/plonky2/src/hash/merkle_proofs.rs @@ -91,7 +91,7 @@ pub fn verify_field_merkle_proof_to_cap>( let mut leaf_data_index = 1; for &sibling_digest in proof.siblings.iter() { if leaf_data_index < leaf_heights.len() && current_height == leaf_heights[leaf_data_index] { - let new_leaves = chain!( + let new_leaves = chain!( current_digest.into_iter(), leaf_data[leaf_data_index].iter().copied(), ); diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index 0dc2b16809..9ec48c77f4 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -89,7 +89,12 @@ pub(crate) fn fill_subtree( leaves: &[L], hash_fn: HF, ) -> H::Hash -where F: RichField, H: Hasher, L: Send+Sync, HF: Send+Clone+Fn(usize, &L) -> H::Hash { +where + F: RichField, + H: Hasher, + L: Send + Sync, + HF: Send + Clone + Fn(usize, &L) -> H::Hash, +{ assert_eq!(leaves.len(), digests_buf.len() / 2 + 1); if digests_buf.is_empty() { hash_fn(index, &leaves[0]) @@ -110,7 +115,9 @@ where F: RichField, H: Hasher, L: Send+Sync, HF: Send+Clone+Fn(usize, &L) -> let left_fn = hash_fn.clone(); let (left_digest, right_digest) = plonky2_maybe_rayon::join( move || fill_subtree::(left_index, left_digests_buf, left_leaves, left_fn), - move || fill_subtree::(right_index, right_digests_buf, right_leaves, hash_fn), + move || { + fill_subtree::(right_index, right_digests_buf, right_leaves, hash_fn) + }, ); left_digest_mem.write(left_digest); @@ -125,7 +132,9 @@ pub(crate) fn fill_digests_buf>( leaves: &[Vec], cap_height: usize, ) { - fill_digests_buf_custom::(digests_buf, cap_buf, leaves, cap_height, |_, x| H::hash_or_noop(x)) + fill_digests_buf_custom::(digests_buf, cap_buf, leaves, cap_height, |_, x| { + H::hash_or_noop(x) + }) } pub(crate) fn fill_digests_buf_custom( @@ -134,8 +143,12 @@ pub(crate) fn fill_digests_buf_custom( leaves: &[L], cap_height: usize, hash_fn: HF, -) -where F: RichField, H: Hasher, L: Send+Sync, HF: Send+Sync+Clone+Fn(usize, &L) -> H::Hash { +) where + F: RichField, + H: Hasher, + L: Send + Sync, + HF: Send + Sync + Clone + Fn(usize, &L) -> H::Hash, +{ // Special case of a tree that's all cap. The usual case will panic because we'll try to split // an empty slice into chunks of `0`. (We would not need this if there was a way to split into // `blah` chunks as opposed to chunks _of_ `blah`.) @@ -162,7 +175,12 @@ where F: RichField, H: Hasher, L: Send+Sync, HF: Send+Sync+Clone+Fn(usize, &L // We have `1 << cap_height` sub-trees, one for each entry in `cap`. They are totally // independent, so we schedule one task for each. `digests_buf` and `leaves` are split // into `1 << cap_height` slices, one for each sub-tree. - subtree_cap.write(fill_subtree::(0, subtree_digests, subtree_leaves, hash_fn.clone())); + subtree_cap.write(fill_subtree::( + 0, + subtree_digests, + subtree_leaves, + hash_fn.clone(), + )); }, ); } diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index 384f847060..bc0564d2f1 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -55,10 +55,10 @@ impl> Challenger { } pub fn observe_elements(&mut self, elements: &[F]) { - self.observe_elements_iter(elements.into_iter().copied()) + self.observe_elements_iter(elements.iter().copied()) } - pub fn observe_elements_iter(&mut self, elements: impl IntoIterator) { + pub fn observe_elements_iter(&mut self, elements: impl IntoIterator) { for element in elements { self.observe_element(element); } diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index c9af60b718..82a7e53ea2 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -6,7 +6,7 @@ //! the Poseidon hash function both internally and natively, and one //! mixing Poseidon internally and truncated Keccak externally. -use core::borrow::{Borrow, BorrowMut}; +use core::borrow::BorrowMut; use core::fmt::Debug; use core::iter::repeat; @@ -28,11 +28,11 @@ use crate::plonk::circuit_builder::CircuitBuilder; pub trait GenericHashOut: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned { - fn to_bytes(self) -> impl AsRef<[u8]>+AsMut<[u8]>+Borrow<[u8]>+BorrowMut<[u8]>+Copy; + fn to_bytes(self) -> impl AsRef<[u8]> + AsMut<[u8]> + BorrowMut<[u8]> + Copy; fn from_bytes(bytes: &[u8]) -> Self; fn from_byte_iter(bytes: impl Iterator) -> Self; fn from_vals(inputs: &[F]) -> Self { - Self::from_iter(inputs.into_iter().copied()) + Self::from_iter(inputs.iter().copied()) } fn from_iter(inputs: impl Iterator) -> Self { Self::from_byte_iter(inputs.flat_map(|x| x.to_canonical_u64().to_le_bytes())) @@ -55,7 +55,7 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { /// Hash a message without any padding step. Note that this can enable length-extension attacks. /// However, it is still collision-resistant in cases where the input has a fixed length. fn hash_no_pad(input: &[F]) -> Self::Hash { - Self::hash_no_pad_iter(input.into_iter().copied()) + Self::hash_no_pad_iter(input.iter().copied()) } /// Hash a message without any padding step. Note that this can enable length-extension attacks. @@ -66,7 +66,7 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { fn hash_pad(input: &[F]) -> Self::Hash { let zero_padding = (input.len() + 2) % Self::Permutation::RATE; let padded_input = chain!( - input.into_iter().copied(), + input.iter().copied(), [F::ONE], (0..zero_padding).map(|_| F::ZERO), [F::ONE], @@ -83,7 +83,7 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { Self::hash_no_pad(inputs) } } - + /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a /// no-op. fn hash_or_noop_iter>(inputs: I) -> Self::Hash { From c9399dc9e18d5a065fb7e2ec3d41e165b464a7e1 Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Sat, 13 Apr 2024 22:41:25 -0700 Subject: [PATCH 05/11] no-std --- plonky2/src/hash/hash_types.rs | 3 ++- plonky2/src/hash/hashing.rs | 2 +- plonky2/src/hash/keccak.rs | 2 -- plonky2/src/hash/poseidon.rs | 4 +++- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index 246976e51e..4afcb3ea83 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -1,6 +1,7 @@ #[cfg(not(feature = "std"))] use alloc::vec::Vec; use core::borrow::BorrowMut; +use core::cmp; use anyhow::ensure; use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -206,7 +207,7 @@ impl GenericHashOut for BytesHash { (0..((N + STRIDE - 1) / STRIDE)).map(move |i| { let mut arr = [0; 8]; let i = i * STRIDE; - let bytes = &self.0[i..std::cmp::min(i + STRIDE, N)]; + let bytes = &self.0[i..cmp::min(i + STRIDE, N)]; arr[..bytes.len()].copy_from_slice(bytes); F::from_canonical_u64(u64::from_le_bytes(arr)) }) diff --git a/plonky2/src/hash/hashing.rs b/plonky2/src/hash/hashing.rs index 234e8c82e0..489edd679f 100644 --- a/plonky2/src/hash/hashing.rs +++ b/plonky2/src/hash/hashing.rs @@ -182,6 +182,6 @@ pub fn hash_n_to_hash_no_pad_iter< ) -> HashOut { let mut elements = hash_n_to_m_no_pad_iter::(inputs); HashOut { - elements: std::array::from_fn(|_| elements.next().unwrap()), + elements: core::array::from_fn(|_| elements.next().unwrap()), } } diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index dadbb29684..f1da830b79 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -1,5 +1,3 @@ -#[cfg(not(feature = "std"))] -use alloc::{vec, vec::Vec}; use core::borrow::Borrow; use core::mem::size_of; diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index c8e1bf6f86..e5b94c3178 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -2,7 +2,7 @@ //! #[cfg(not(feature = "std"))] -use alloc::{vec, vec::Vec}; +use alloc::vec; use core::fmt::Debug; use plonky2_field::packed::PackedField; @@ -931,6 +931,8 @@ impl AlgebraicHasher for PoseidonHash { #[cfg(test)] pub(crate) mod test_helpers { + #[cfg(not(feature = "std"))] + use alloc::vec::Vec; use super::*; pub(crate) fn check_test_vectors( From 96fc0229940d829f6c920cc47a661eda67115b8b Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Sat, 13 Apr 2024 22:43:39 -0700 Subject: [PATCH 06/11] fmt --- plonky2/src/hash/poseidon.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index e5b94c3178..e9ded96f0d 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -933,6 +933,7 @@ impl AlgebraicHasher for PoseidonHash { pub(crate) mod test_helpers { #[cfg(not(feature = "std"))] use alloc::vec::Vec; + use super::*; pub(crate) fn check_test_vectors( From 9b455d6b8c2042f3f1604923e56651bf9851a9d5 Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Sat, 13 Apr 2024 23:53:33 -0700 Subject: [PATCH 07/11] fix offset --- plonky2/src/hash/field_merkle_tree.rs | 2 +- plonky2/src/hash/merkle_tree.rs | 12 +++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/plonky2/src/hash/field_merkle_tree.rs b/plonky2/src/hash/field_merkle_tree.rs index 98257c5d70..e802904aef 100644 --- a/plonky2/src/hash/field_merkle_tree.rs +++ b/plonky2/src/hash/field_merkle_tree.rs @@ -92,7 +92,7 @@ impl> FieldMerkleTree { &new_leaves[..], next_cap_height, |i, cap_hash| { - H::hash_or_noop_iter(chain!(cap_hash.into_iter(), cur[i].iter().copied(),)) + H::hash_or_noop_iter(chain!(cap_hash.into_iter(), cur[i].iter().copied())) }, ); } diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index 9ec48c77f4..c00db4a26d 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -170,19 +170,21 @@ pub(crate) fn fill_digests_buf_custom( let leaves_chunks = leaves.par_chunks_exact(subtree_leaves_len); assert_eq!(digests_chunks.len(), cap_buf.len()); assert_eq!(digests_chunks.len(), leaves_chunks.len()); - digests_chunks.zip(cap_buf).zip(leaves_chunks).for_each( - |((subtree_digests, subtree_cap), subtree_leaves)| { + digests_chunks + .zip(cap_buf) + .zip(leaves_chunks) + .enumerate() + .for_each(|(i, ((subtree_digests, subtree_cap), subtree_leaves))| { // We have `1 << cap_height` sub-trees, one for each entry in `cap`. They are totally // independent, so we schedule one task for each. `digests_buf` and `leaves` are split // into `1 << cap_height` slices, one for each sub-tree. subtree_cap.write(fill_subtree::( - 0, + i * subtree_leaves_len, subtree_digests, subtree_leaves, hash_fn.clone(), )); - }, - ); + }); } pub fn merkle_tree_prove>( From ad8a828a1086f76818f6b7bd6fe148e7250abfa2 Mon Sep 17 00:00:00 2001 From: Daniel-Aaron-Bloom <76709210+Daniel-Aaron-Bloom@users.noreply.github.com> Date: Mon, 15 Apr 2024 20:39:17 -0700 Subject: [PATCH 08/11] Update plonky2/Cargo.toml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Görgens --- plonky2/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index d893398ce3..f6fb751bbe 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -23,7 +23,7 @@ ahash = { workspace = true } anyhow = { workspace = true } hashbrown = { workspace = true } itertools = { workspace = true } -tiny-keccak = { version = "2.0.0", features = ["keccak"] } +tiny-keccak = { version = "2.0", features = ["keccak"] } log = { workspace = true } num = { workspace = true } rand = { workspace = true } From c090dc80aa739b38673a7620dd683cd3a5b7c38f Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Tue, 16 Apr 2024 16:21:21 -0700 Subject: [PATCH 09/11] merge --- plonky2/src/hash/keccak.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index f1da830b79..a48e5eed87 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -123,7 +123,7 @@ impl Hasher for KeccakHash { } let mut hash_bytes = [0u8; 32]; - keccak256.finalize(&mut hash_bytes); + keccak256.finalize(&mut hash_bytes); let mut arr = [0; N]; arr.copy_from_slice(&hash_bytes[..N]); From 785d06f5937815b24701a0dc67b96294085961fa Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Tue, 16 Apr 2024 16:22:13 -0700 Subject: [PATCH 10/11] fmt --- plonky2/src/hash/keccak.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index fd111d6f8b..f1da830b79 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -68,7 +68,7 @@ impl PlonkyPermutation for KeccakPermutation { let hash_onion = (0..).scan(keccak(state_bytes), |state, _| { let output = state.0; *state = keccak(output); - Some(output) + Some(output) }); let hash_onion_u64s = hash_onion.flat_map(|output| { @@ -123,7 +123,7 @@ impl Hasher for KeccakHash { } let mut hash_bytes = [0u8; 32]; - keccak256.finalize(&mut hash_bytes); + keccak256.finalize(&mut hash_bytes); let mut arr = [0; N]; arr.copy_from_slice(&hash_bytes[..N]); From e3da14670dff440038d9398bcec79a77e7001116 Mon Sep 17 00:00:00 2001 From: Daniel Bloom <7810950-Daniel.Aaron.Bloom@users.noreply.gitlab.com> Date: Tue, 16 Apr 2024 19:50:13 -0700 Subject: [PATCH 11/11] feedback --- plonky2/src/hash/hash_types.rs | 11 ++++++----- plonky2/src/hash/hashing.rs | 18 ++++++++++-------- plonky2/src/hash/keccak.rs | 9 +++------ 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index 4afcb3ea83..d25bf735ae 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -1,7 +1,6 @@ #[cfg(not(feature = "std"))] use alloc::vec::Vec; use core::borrow::BorrowMut; -use core::cmp; use anyhow::ensure; use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -197,17 +196,19 @@ impl GenericHashOut for BytesHash { } fn from_byte_iter(mut bytes: impl Iterator) -> Self { - Self([(); N].map(|()| bytes.next().unwrap())) + Self(core::array::from_fn(|_| bytes.next().unwrap())) } fn into_iter(self) -> impl Iterator { // Chunks of 7 bytes since 8 bytes would allow collisions. const STRIDE: usize = 7; - (0..((N + STRIDE - 1) / STRIDE)).map(move |i| { + (0..N).step_by(STRIDE).map(move |i| { + let mut bytes = &self.0[i..]; + if bytes.len() > STRIDE { + bytes = &bytes[..STRIDE]; + } let mut arr = [0; 8]; - let i = i * STRIDE; - let bytes = &self.0[i..cmp::min(i + STRIDE, N)]; arr[..bytes.len()].copy_from_slice(bytes); F::from_canonical_u64(u64::from_le_bytes(arr)) }) diff --git a/plonky2/src/hash/hashing.rs b/plonky2/src/hash/hashing.rs index 489edd679f..44b77a2bfd 100644 --- a/plonky2/src/hash/hashing.rs +++ b/plonky2/src/hash/hashing.rs @@ -2,6 +2,9 @@ #[cfg(not(feature = "std"))] use alloc::vec::Vec; use core::fmt::Debug; +use core::iter::repeat_with; + +use itertools::chain; use crate::field::extension::Extendable; use crate::field::types::Field; @@ -158,14 +161,13 @@ pub fn hash_n_to_m_no_pad_iter, I: IntoIte perm.permute(); } - let mut first = true; - core::iter::repeat_with(move || { - if !first { - perm.permute() - } - first = false; - perm.squeeze_iter() - }) + chain!( + [perm.squeeze_iter()], + repeat_with(move || { + perm.permute(); + perm.squeeze_iter() + }) + ) .flatten() } diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index f1da830b79..eb31aa672e 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -74,12 +74,9 @@ impl PlonkyPermutation for KeccakPermutation { let hash_onion_u64s = hash_onion.flat_map(|output| { const STRIDE: usize = size_of::(); - (0..(32 / STRIDE)).map(move |i| { - let mut arr = [0; 8]; - let i = i * STRIDE; - let bytes = &output[i..(i + STRIDE)]; - arr[..bytes.len()].copy_from_slice(bytes); - u64::from_le_bytes(arr) + (0..32).step_by(STRIDE).map(move |i| { + let bytes = output[i..].first_chunk::().unwrap(); + u64::from_le_bytes(*bytes) }) });