diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6cc570d1..3e82826e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,7 +3,7 @@ on: pull_request: push: branches: - - master + - main env: RUST_BACKTRACE: 1 diff --git a/CHANGELOG.md b/CHANGELOG.md index fd35cbc8..ee3df55b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ ### Features +- [\#75](https://github.com/arkworks-rs/crypto-primitives/pull/75) Add Cryptographic Hash and Proof of Work. - [\#59](https://github.com/arkworks-rs/crypto-primitives/pull/59) Implement `TwoToOneCRHScheme` for Bowe-Hopwood CRH. - [\#60](https://github.com/arkworks-rs/crypto-primitives/pull/60) Merkle tree no longer requires CRH to input and output bytes. Leaf can be any raw input of CRH, such as field elements. - [\#67](https://github.com/arkworks-rs/crypto-primitives/pull/67) User can access or replace leaf index variable in `PathVar`. diff --git a/src/cryptographic_hash/constraints/mod.rs b/src/cryptographic_hash/constraints/mod.rs new file mode 100644 index 00000000..eb59af13 --- /dev/null +++ b/src/cryptographic_hash/constraints/mod.rs @@ -0,0 +1,22 @@ +use ark_std::borrow::Borrow; + +use ark_ff::PrimeField; +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; + +pub mod poseidon; + +/// R1CS Gadget for Crypto Hash. +pub trait CryptoHashGadget { + type Parameters; + /// Input of the hash + type InputVar: ?Sized; + /// Outout of the Hash + type OutputVar; + + /// Given the input var and parameters, compute the output var. + fn digest>( + cs: ConstraintSystemRef, + param: &Self::Parameters, + input: T, + ) -> Result; +} diff --git a/src/cryptographic_hash/constraints/poseidon.rs b/src/cryptographic_hash/constraints/poseidon.rs new file mode 100644 index 00000000..c5add9a1 --- /dev/null +++ b/src/cryptographic_hash/constraints/poseidon.rs @@ -0,0 +1,73 @@ +use ark_std::{borrow::Borrow, marker::PhantomData}; + +use ark_ff::PrimeField; +use ark_r1cs_std::fields::fp::FpVar; +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; +use ark_sponge::{ + constraints::{AbsorbGadget, CryptographicSpongeVar}, + poseidon::{constraints::PoseidonSpongeVar, PoseidonParameters}, +}; + +use super::CryptoHashGadget; + +pub struct PoseidonHashGadget> { + _field: PhantomData, + _input: PhantomData, +} + +impl> CryptoHashGadget for PoseidonHashGadget { + type Parameters = PoseidonParameters; + + type InputVar = I; + + type OutputVar = FpVar; + + fn digest>( + cs: ConstraintSystemRef, + params: &Self::Parameters, + input: T, + ) -> Result { + let input = input.borrow(); + let mut sponge = PoseidonSpongeVar::new(cs, params); + sponge.absorb(input)?; + + let res = sponge.squeeze_field_elements(1)?; + Ok(res[0].clone()) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + ark_std::UniformRand, + cryptographic_hash::{constraints::CryptoHashGadget, poseidon::PoseidonHash, CryptoHash}, + merkle_tree::tests::test_utils::poseidon_parameters, + }; + use ark_ed_on_bls12_381::Fr; + use ark_r1cs_std::{alloc::AllocVar, fields::fp::FpVar, R1CSVar}; + use ark_relations::r1cs::ConstraintSystem; + use ark_std::test_rng; + + use super::PoseidonHashGadget; + + #[test] + fn test_digest() { + let cs = ConstraintSystem::new_ref(); + let mut rng = test_rng(); + let input = (0..14).map(|_| Fr::rand(&mut rng)).collect::>(); + let input_var = input + .iter() + .map(|x| FpVar::new_witness(cs.clone(), || Ok(*x)).unwrap()) + .collect::>(); + + let param = poseidon_parameters(); + + let native_result = PoseidonHash::<_, &[Fr]>::digest(¶m, input.as_slice()); + let var_result = + PoseidonHashGadget::<_, &[FpVar<_>]>::digest(cs.clone(), ¶m, input_var.as_slice()) + .unwrap(); + + assert_eq!(native_result, var_result.value().unwrap()); + assert!(cs.is_satisfied().unwrap()); + } +} diff --git a/src/cryptographic_hash/mod.rs b/src/cryptographic_hash/mod.rs new file mode 100644 index 00000000..931e9dda --- /dev/null +++ b/src/cryptographic_hash/mod.rs @@ -0,0 +1,29 @@ +pub mod poseidon; + +#[cfg(feature = "r1cs")] +pub mod constraints; + +use ark_std::borrow::Borrow; + +use ark_std::rand::Rng; + +/// Any cryptographic hash implementation will satisfy those two properties: +/// - **Preimage Resistance**: For all adversary, given y = H(x) where x is +/// random, the probability to find z such that H(z) = y is negligible. +/// - **Collision Resistant**: It's computationally infeasible to find two +/// distinct inputs to lead to same output. This property is also satisfied by +/// CRH trait implementors. +/// - **One-way**: +pub trait CryptoHash { + /// Parameter for the crypto hash. + type Parameters: Sync; + /// Input of the hash. + type Input: Sync + ?Sized; + /// Output of the Hash. + type Output; + /// Generate the parameter for the crypto hash using `rng`. + fn setup(rng: &mut R) -> &Self::Parameters; + + /// Given the input and parameters, compute the output. + fn digest>(param: &Self::Parameters, input: T) -> Self::Output; +} diff --git a/src/cryptographic_hash/poseidon.rs b/src/cryptographic_hash/poseidon.rs new file mode 100644 index 00000000..8273496f --- /dev/null +++ b/src/cryptographic_hash/poseidon.rs @@ -0,0 +1,41 @@ +use ark_std::borrow::Borrow; + +use ark_std::marker::PhantomData; + +use ark_ff::PrimeField; +use ark_sponge::{ + poseidon::{PoseidonParameters, PoseidonSponge}, + Absorb, CryptographicSponge, +}; + +use super::CryptoHash; + +/// A wrapper to poseidon cryptographic sponge. +pub struct PoseidonHash { + _field: PhantomData, + _input: PhantomData, +} + +impl CryptoHash for PoseidonHash { + type Parameters = PoseidonParameters; + + type Input = I; + + type Output = F; + + fn setup(_rng: &mut R) -> &Self::Parameters { + // automatic generation of parameters are not implemented yet + // therefore, the developers must specify the parameters themselves + unimplemented!() + } + + fn digest>(param: &Self::Parameters, input: T) -> Self::Output { + let input = input.borrow(); + + let mut sponge = PoseidonSponge::new(param); + sponge.absorb(input); + + let res = sponge.squeeze_field_elements::(1); + res[0] + } +} diff --git a/src/lib.rs b/src/lib.rs index c2083553..01a7442a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -28,6 +28,12 @@ pub mod crh; #[cfg(feature = "merkle_tree")] pub mod merkle_tree; +#[cfg(feature = "cryptographic_hash")] +pub mod cryptographic_hash; + +#[cfg(feature = "proof_of_work")] +pub mod proof_of_work; + #[cfg(feature = "encryption")] pub mod encryption; diff --git a/src/merkle_tree/mod.rs b/src/merkle_tree/mod.rs index 795bbbfd..5f1c81c4 100644 --- a/src/merkle_tree/mod.rs +++ b/src/merkle_tree/mod.rs @@ -3,25 +3,24 @@ /// Defines a trait to chain two types of CRHs. use crate::crh::TwoToOneCRHScheme; use crate::{crh::CRHScheme, Error}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::borrow::Borrow; -use ark_std::hash::Hash; -use ark_std::vec::Vec; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; +use ark_std::{borrow::Borrow, hash::Hash, vec::Vec}; #[cfg(test)] -mod tests; +pub(crate) mod tests; #[cfg(feature = "r1cs")] pub mod constraints; -/// Convert the hash digest in different layers by converting previous layer's output to -/// `TargetType`, which is a `Borrow` to next layer's input. +/// Convert the hash digest in different layers by converting previous layer's +/// output to `TargetType`, which is a `Borrow` to next layer's input. pub trait DigestConverter { type TargetType: Borrow; fn convert(item: From) -> Result; } -/// A trivial converter where digest of previous layer's hash is the same as next layer's input. +/// A trivial converter where digest of previous layer's hash is the same as +/// next layer's input. pub struct IdentityDigestConverter { _prev_layer_digest: T, } @@ -50,8 +49,10 @@ impl DigestConverter for ByteDigestConverter /// Merkle tree have three types of hashes. /// * `LeafHash`: Convert leaf to leaf digest -/// * `TwoLeavesToOneHash`: Convert two leaf digests to one inner digest. This one can be a wrapped -/// version `TwoHashesToOneHash`, which first converts leaf digest to inner digest. +/// * `TwoLeavesToOneHash`: Convert two leaf digests to one inner digest. This +/// one can be a wrapped +/// version `TwoHashesToOneHash`, which first converts leaf digest to inner +/// digest. /// * `TwoHashesToOneHash`: Compress two inner digests to one inner digest pub trait Config { type Leaf: ?Sized; // merkle tree does not store the leaf @@ -77,12 +78,14 @@ pub trait Config { + CanonicalSerialize + CanonicalDeserialize; - // Tom's Note: in the future, if we want different hash function, we can simply add more - // types of digest here and specify a digest converter. Same for constraints. + // Tom's Note: in the future, if we want different hash function, we can simply + // add more types of digest here and specify a digest converter. Same for + // constraints. /// leaf -> leaf digest - /// If leaf hash digest and inner hash digest are different, we can create a new - /// leaf hash which wraps the original leaf hash and convert its output to `Digest`. + /// If leaf hash digest and inner hash digest are different, we can create a + /// new leaf hash which wraps the original leaf hash and convert its + /// output to `Digest`. type LeafHash: CRHScheme; /// 2 inner digest -> inner digest type TwoToOneHash: TwoToOneCRHScheme; @@ -102,7 +105,8 @@ pub type LeafParam

= <

::LeafHash as CRHScheme>::Parameters; /// .. / \ .... /// [I] J /// ``` -/// Suppose we want to prove I, then `leaf_sibling_hash` is J, `auth_path` is `[C,D]` +/// Suppose we want to prove I, then `leaf_sibling_hash` is J, `auth_path` is +/// `[C,D]` #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative( Clone(bound = "P: Config"), @@ -111,17 +115,20 @@ pub type LeafParam

= <

::LeafHash as CRHScheme>::Parameters; )] pub struct Path { pub leaf_sibling_hash: P::LeafDigest, - /// The sibling of path node ordered from higher layer to lower layer (does not include root node). + /// The sibling of path node ordered from higher layer to lower layer (does + /// not include root node). pub auth_path: Vec, /// stores the leaf index of the node pub leaf_index: usize, } impl Path

{ - /// The position of on_path node in `leaf_and_sibling_hash` and `non_leaf_and_sibling_hash_path`. - /// `position[i]` is 0 (false) iff `i`th on-path node from top to bottom is on the left. + /// The position of on_path node in `leaf_and_sibling_hash` and + /// `non_leaf_and_sibling_hash_path`. `position[i]` is 0 (false) iff + /// `i`th on-path node from top to bottom is on the left. /// - /// This function simply converts `self.leaf_index` to boolean array in big endian form. + /// This function simply converts `self.leaf_index` to boolean array in big + /// endian form. #[allow(unused)] // this function is actually used when r1cs feature is on fn position_list(&'_ self) -> impl '_ + Iterator { (0..self.auth_path.len() + 1) @@ -134,7 +141,8 @@ impl Path

{ /// Verify that a leaf is at `self.index` of the merkle tree. /// * `leaf_size`: leaf size in number of bytes /// - /// `verify` infers the tree height by setting `tree_height = self.auth_path.len() + 2` + /// `verify` infers the tree height by setting `tree_height = + /// self.auth_path.len() + 2` pub fn verify>( &self, leaf_hash_params: &LeafParam

, @@ -181,8 +189,9 @@ impl Path

{ /// `index` is the first `path.len()` bits of /// the position of tree. /// -/// If the least significant bit of `index` is 0, then `sibling` will be left and `computed` will be right. -/// Otherwise, `sibling` will be right and `computed` will be left. +/// If the least significant bit of `index` is 0, then `sibling` will be left +/// and `computed` will be right. Otherwise, `sibling` will be right and +/// `computed` will be left. /// /// Returns: (left, right) fn select_left_right_child( @@ -200,17 +209,21 @@ fn select_left_right_child( } /// Defines a merkle tree data structure. -/// This merkle tree has runtime fixed height, and assumes number of leaves is 2^height. +/// This merkle tree has runtime fixed height, and assumes number of leaves is +/// 2^height. /// /// TODO: add RFC-6962 compatible merkle tree in the future. -/// For this release, padding will not be supported because of security concerns: if the leaf hash and two to one hash uses same underlying -/// CRH, a malicious prover can prove a leaf while the actual node is an inner node. In the future, we can prefix leaf hashes in different layers to +/// For this release, padding will not be supported because of security +/// concerns: if the leaf hash and two to one hash uses same underlying +/// CRH, a malicious prover can prove a leaf while the actual node is an inner +/// node. In the future, we can prefix leaf hashes in different layers to /// solve the problem. #[derive(Derivative)] #[derivative(Clone(bound = "P: Config"))] pub struct MerkleTree { - /// stores the non-leaf nodes in level order. The first element is the root node. - /// The ith nodes (starting at 1st) children are at indices `2*i`, `2*i+1` + /// stores the non-leaf nodes in level order. The first element is the root + /// node. The ith nodes (starting at 1st) children are at indices `2*i`, + /// `2*i+1` non_leaf_nodes: Vec, /// store the hash of leaf nodes from left to right leaf_nodes: Vec, @@ -224,7 +237,8 @@ pub struct MerkleTree { impl MerkleTree

{ /// Create an empty merkle tree such that all leaves are zero-filled. - /// Consider using a sparse merkle tree if you need the tree to be low memory + /// Consider using a sparse merkle tree if you need the tree to be low + /// memory pub fn blank( leaf_hash_param: &LeafParam

, two_to_one_hash_param: &TwoToOneParam

, @@ -285,9 +299,10 @@ impl MerkleTree

{ let start_index = level_indices.pop().unwrap(); let upper_bound = left_child(start_index); for current_index in start_index..upper_bound { - // `left_child(current_index)` and `right_child(current_index) returns the position of - // leaf in the whole tree (represented as a list in level order). We need to shift it - // by `-upper_bound` to get the index in `leaf_nodes` list. + // `left_child(current_index)` and `right_child(current_index) returns the + // position of leaf in the whole tree (represented as a list in + // level order). We need to shift it by `-upper_bound` to get + // the index in `leaf_nodes` list. let left_leaf_index = left_child(current_index) - upper_bound; let right_leaf_index = right_child(current_index) - upper_bound; // compute hash @@ -349,9 +364,11 @@ impl MerkleTree

{ self.leaf_nodes[index - 1].clone() }; - // path.len() = `tree height - 2`, the two missing elements being the leaf sibling hash and the root + // path.len() = `tree height - 2`, the two missing elements being the leaf + // sibling hash and the root let mut path = Vec::with_capacity(tree_height - 2); - // Iterate from the bottom layer after the leaves, to the top, storing all sibling node's hash values. + // Iterate from the bottom layer after the leaves, to the top, storing all + // sibling node's hash values. let mut current_node = parent(leaf_index_in_tree).unwrap(); while !is_root(current_node) { let sibling_node = sibling(current_node).unwrap(); @@ -371,8 +388,9 @@ impl MerkleTree

{ }) } - /// Given the index and new leaf, return the hash of leaf and an updated path in order from root to bottom non-leaf level. - /// This does not mutate the underlying tree. + /// Given the index and new leaf, return the hash of leaf and an updated + /// path in order from root to bottom non-leaf level. This does not + /// mutate the underlying tree. fn updated_path>( &self, index: usize, @@ -435,7 +453,8 @@ impl MerkleTree

{ /// .. / \ .... /// [I] J /// ``` - /// update(3, {new leaf}) would swap the leaf value at `[I]` and cause a recomputation of `[A]`, `[B]`, and `[E]`. + /// update(3, {new leaf}) would swap the leaf value at `[I]` and cause a + /// recomputation of `[A]`, `[B]`, and `[E]`. pub fn update(&mut self, index: usize, new_leaf: &P::Leaf) -> Result<(), crate::Error> { assert!(index < self.leaf_nodes.len(), "index out of range"); let (updated_leaf_hash, mut updated_path) = self.updated_path(index, new_leaf)?; @@ -448,7 +467,8 @@ impl MerkleTree

{ Ok(()) } - /// Update the leaf and check if the updated root is equal to `asserted_new_root`. + /// Update the leaf and check if the updated root is equal to + /// `asserted_new_root`. /// /// Tree will not be modified if the check fails. pub fn check_update>( diff --git a/src/merkle_tree/tests/mod.rs b/src/merkle_tree/tests/mod.rs index d328b352..2bbc2d6b 100644 --- a/src/merkle_tree/tests/mod.rs +++ b/src/merkle_tree/tests/mod.rs @@ -1,6 +1,6 @@ #[cfg(feature = "r1cs")] mod constraints; -mod test_utils; +pub(crate) mod test_utils; mod bytes_mt_tests { @@ -119,11 +119,13 @@ mod bytes_mt_tests { } mod field_mt_tests { - use crate::crh::poseidon; - use crate::merkle_tree::tests::test_utils::poseidon_parameters; - use crate::merkle_tree::{Config, IdentityDigestConverter, MerkleTree}; + use crate::{ + crh::poseidon, + merkle_tree::{tests::test_utils::poseidon_parameters, Config, IdentityDigestConverter, MerkleTree}, + MerkleTree, + }; use ark_std::{test_rng, vec::Vec, One, UniformRand}; - + type F = ark_ed_on_bls12_381::Fr; type H = poseidon::CRH; type TwoToOneH = poseidon::TwoToOneCRH; diff --git a/src/pow/constraints/mod.rs b/src/pow/constraints/mod.rs new file mode 100644 index 00000000..ef22a65a --- /dev/null +++ b/src/pow/constraints/mod.rs @@ -0,0 +1,41 @@ +pub mod poseidon; + +use ark_std::borrow::Borrow; + +use ark_ff::PrimeField; +use ark_r1cs_std::{alloc::AllocVar, boolean::Boolean}; +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; + +use crate::cryptographic_hash::{constraints::CryptoHashGadget, CryptoHash}; + +use super::PoW; + +/// R1CS Gadget for Proof of Work +pub trait PoWGadget: CryptoHashGadget { + type NonceVar; + /// Given input var and nonce var, check whether `H(input||nonce)` is a + /// valid proof of work under certain difficulty. + fn verify_pow>( + cs: ConstraintSystemRef, + param: &Self::Parameters, + input: T, + nonce: &Self::NonceVar, + difficulty: usize, + ) -> Result, SynthesisError>; +} + +/// Extension trait for crypto hash to get the gadget. +pub trait CryptoHashWithGadget: CryptoHash +where + >::OutputVar: AllocVar, +{ + type Gadget: CryptoHashGadget; +} + +/// Extension trait for PoW to get the gadget. +pub trait PoWWithGadget: PoW +where + >::OutputVar: AllocVar, +{ + type Gadget: PoWGadget; +} diff --git a/src/pow/constraints/poseidon.rs b/src/pow/constraints/poseidon.rs new file mode 100644 index 00000000..52060c24 --- /dev/null +++ b/src/pow/constraints/poseidon.rs @@ -0,0 +1,94 @@ +use ark_std::borrow::Borrow; + +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; +use ark_sponge::{ + constraints::{AbsorbGadget, CryptographicSpongeVar}, + poseidon::constraints::PoseidonSpongeVar, +}; + +use ark_ff::PrimeField; +use ark_r1cs_std::{boolean::Boolean, fields::fp::FpVar, ToBitsGadget}; + +use crate::cryptographic_hash::constraints::poseidon::PoseidonHashGadget; + +use super::PoWGadget; + +impl> PoWGadget for PoseidonHashGadget { + type NonceVar = FpVar; + + fn verify_pow>( + cs: ConstraintSystemRef, + params: &Self::Parameters, + input: T, + nonce: &Self::NonceVar, + difficulty: usize, + ) -> Result, SynthesisError> { + assert!(F::size_in_bits() >= difficulty, "difficulty is too large"); + + let mut sponge = PoseidonSpongeVar::new(cs, params); + sponge.absorb(input.borrow())?; + sponge.absorb(nonce)?; + + let res = sponge.squeeze_field_elements(1)?[0].clone(); + // we require the least significant `difficulty` bits are zero. + let mut result = Boolean::TRUE; + + res.to_bits_le()? + .into_iter() + .take(difficulty) + .try_for_each(|b| -> Result<(), SynthesisError> { + result = result.and(&b.not())?; + Ok(()) + })?; + Ok(result) + } +} + +#[cfg(test)] +mod tests { + use crate::ark_std::UniformRand; + use crate::pow::constraints::PoWGadget; + use crate::{ + cryptographic_hash::{constraints::poseidon::PoseidonHashGadget, poseidon::PoseidonHash}, + merkle_tree::tests::test_utils::poseidon_parameters, + pow::PoW, + }; + use ark_ed_on_bls12_381::Fr; + use ark_r1cs_std::R1CSVar; + use ark_r1cs_std::{alloc::AllocVar, fields::fp::FpVar}; + use ark_relations::r1cs::ConstraintSystem; + use ark_std::test_rng; + + #[test] + fn test_proof_of_work() { + const BATCH_SIZE: usize = 64; + const DIFFICULTY: usize = 14; + let cs = ConstraintSystem::new_ref(); + let param = poseidon_parameters(); + let mut rng = test_rng(); + let message = (0..5).map(|_| Fr::rand(&mut rng)).collect::>(); + let message_var = message + .iter() + .map(|x| FpVar::new_witness(cs.clone(), || Ok(x.clone())).unwrap()) + .collect::>(); + let (proof, _) = PoseidonHash::<_, &[_]>::generate_pow( + ¶m, + &mut rng, + message.as_slice(), + DIFFICULTY, + BATCH_SIZE, + ); + + let proof_var = FpVar::new_witness(cs.clone(), || Ok(proof)).unwrap(); + let result = PoseidonHashGadget::<_, &[_]>::verify_pow( + cs.clone(), + ¶m, + message_var.as_slice(), + &proof_var, + DIFFICULTY, + ) + .unwrap(); + assert!(result.value().unwrap()); + assert!(cs.is_satisfied().unwrap()); + } +} diff --git a/src/pow/mod.rs b/src/pow/mod.rs new file mode 100644 index 00000000..48b01dc4 --- /dev/null +++ b/src/pow/mod.rs @@ -0,0 +1,109 @@ +#[cfg(feature = "r1cs")] +pub mod constraints; +pub mod poseidon; + +use ark_std::borrow::Borrow; + +use ark_std::rand::Rng; + +use ark_std::vec::Vec; + +#[cfg(feature = "parallel")] +use rayon::prelude::*; + +use crate::cryptographic_hash::CryptoHash; + +/// An extension trait for `CryptoHash`. Any implementation can be used for +/// proof of work. +/// +/// A valid proof of work with difficulty `k` will have `H(M||Nonce)` that make +/// `verify(M, Nonce, k)` output true. In most cases, `verify` outputs true +/// when the bit composition of output has `k` trailing zeroes, but this trait +/// allows implementation to implement their own `verify` logic. +pub trait PoW: CryptoHash { + /// Nonce used with input, such that a valid proof of work for input `M` and + /// difficulty `k` will have `H(M||Nonce)` that make `verify(M, Nonce, + /// k)` output true. In most cases, `verify` outputs true when the bit + /// composition of output has `k` trailing zeroes, but this trait allows + /// implementation to implement their own `verify` logic. + type Nonce: Clone + Sync; + + /// Given input and nonce, check whether `H(input||nonce)` is a valid proof + /// of work under certain difficulty. + fn verify_pow>( + param: &Self::Parameters, + input: T, + nonce: &Self::Nonce, + difficulty: usize, + ) -> bool; + + /// Given input and a list of nonces, batch verify the correctness of nonce + /// under given difficulty. + fn batch_verify>( + param: &Self::Parameters, + input: T, + nonces: &[Self::Nonce], + difficulty: usize, + ) -> Vec { + let input = input.borrow(); + cfg_iter!(nonces) + .map(|nonce| Self::verify_pow(param, input, nonce, difficulty)) + .collect() + } + + /// Return the initial nonce that can be used for PoW generation. + fn initial_nonce(param: &Self::Parameters, rng: &mut R) -> Self::Nonce; + + /// Return the next nonce for PoW Generation. + fn next_nonce(param: &Self::Parameters, nonce: &Self::Nonce) -> Self::Nonce; + + /// Generate initial batch of nonces. + fn batch_nonce( + param: &Self::Parameters, + initial_nonce: Self::Nonce, + batch_size: usize, + ) -> Vec { + let mut result = Vec::with_capacity(batch_size); + result.push(initial_nonce); + for _ in 0..batch_size - 1 { + result.push(Self::next_nonce(param, result.last().unwrap())); + } + + result + } + + /// Generate the nonce as proof of work such that H(input||nonce) is valid + /// under given difficulty. + /// This function will repeatedly run `verify` on a batch of `nonces`, and + /// return the first nonce that successfully let `verify` return true. + /// + /// This function return the first valid nonce and number of batches it has + /// iterated. + /// + /// When `parallel` feature is on, for each batch, all nonces will be + /// checked in parallel. + fn generate_pow>( + param: &Self::Parameters, + rng: &mut R, + input: T, + difficulty: usize, + batch_size: usize, + ) -> (Self::Nonce, usize) { + let input = input.borrow(); + let mut nonces = Self::batch_nonce(param, Self::initial_nonce(param, rng), batch_size); + let mut counter = 0; + loop { + if let Some((i, _)) = Self::batch_verify(param, input, &nonces, difficulty) + .into_iter() + .enumerate() + .filter(|(_, v)| *v) + .next() + { + return (nonces[i].clone(), counter); + }; + let last_nonce = nonces.last().unwrap().clone(); + nonces = Self::batch_nonce(param, Self::next_nonce(param, &last_nonce), batch_size); + counter += 1; + } + } +} diff --git a/src/pow/poseidon.rs b/src/pow/poseidon.rs new file mode 100644 index 00000000..16b02599 --- /dev/null +++ b/src/pow/poseidon.rs @@ -0,0 +1,81 @@ +use ark_std::borrow::Borrow; + +use ark_sponge::{poseidon::PoseidonSponge, Absorb, CryptographicSponge}; + +use ark_ff::{BitIteratorLE, PrimeField}; + +use ark_std::vec::Vec; + +use crate::cryptographic_hash::poseidon::PoseidonHash; + +use super::PoW; + +impl PoW for PoseidonHash { + type Nonce = F; + + fn verify_pow>( + param: &Self::Parameters, + input: T, + nonce: &Self::Nonce, + difficulty: usize, + ) -> bool { + assert!(F::size_in_bits() >= difficulty, "difficulty is too large"); + + let input = input.borrow(); + + let mut sponge = PoseidonSponge::new(param); + sponge.absorb(input); + sponge.absorb(nonce); + + let res = sponge.squeeze_field_elements::(1)[0]; + // we requires the least significant `difficulty` bits are zero + let res = BitIteratorLE::new(res.into_repr()) + .take(difficulty) + .collect::>(); + res.into_iter().all(|x| !x) + } + + fn initial_nonce(_param: &Self::Parameters, rng: &mut R) -> Self::Nonce { + // Start with a random position. + F::rand(rng) + } + + fn next_nonce(_param: &Self::Parameters, nonce: &Self::Nonce) -> Self::Nonce { + *nonce + F::one() + } +} + +#[cfg(test)] +mod tests { + use ark_std::test_rng; + + use crate::{merkle_tree::tests::test_utils::poseidon_parameters, pow::PoW}; + + use super::PoseidonHash; + #[test] + fn test_pow() { + const BATCH_SIZE: usize = 64; + const DIFFICULTY: usize = 14; + let param = poseidon_parameters(); + let message = vec![0x11, 0x12, 0x13, 0x14, 0x15]; + let mut rng = test_rng(); + #[allow(unused)] + let (proof, num_batches_iterated) = PoseidonHash::<_, &[u32]>::generate_pow( + ¶m, + &mut rng, + message.as_slice(), + DIFFICULTY, + BATCH_SIZE, + ); + #[cfg(feature = "std")] + println!( + "total number of iterations: {}x{} = {}", + num_batches_iterated, + BATCH_SIZE, + num_batches_iterated * BATCH_SIZE + ); + let result = + PoseidonHash::<_, &[u32]>::verify_pow(¶m, message.as_slice(), &proof, DIFFICULTY); + assert!(result); + } +}