Skip to content

Commit

Permalink
Chore/fix clippy (#407)
Browse files Browse the repository at this point in the history
This PR addresses warning produced by `cargo clippy`, it also adds an
example pre-commit hook for people to install if they wish so that
`cargo fmt --all -- --check` and `cargo clippy` will run every time you
commit locally as well as a guide to install the hook.
  • Loading branch information
Zyouell authored Nov 25, 2024
1 parent cc50651 commit efcb3d6
Show file tree
Hide file tree
Showing 80 changed files with 562 additions and 673 deletions.
4 changes: 4 additions & 0 deletions custom-hooks/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
## Hook Installation

To install hooks in this file, from the root directory navigate to `./.git/hooks`. Then replace the corresponding file with the one in this folder. If you haven't installed any hooks before it will be called `<file-name>.sample` instead of just `<file-name>`. You may also
have to make the files inside `./.git/hooks` executable by running `chmod +x ./.git/hooks/*` from the project root.
22 changes: 22 additions & 0 deletions custom-hooks/pre-commit
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/bin/sh

set -eu

if cargo fmt --all -- --check
then
echo "cargo fmt OK"
else
echo "There are some code style issues."
echo "Run cargo fmt first."
exit 1
fi

if cargo clippy --all-targets -- -D warnings
then
echo "cargo clippy OK"
else
echo "There are some clippy issues."
exit 1
fi

exit 0
2 changes: 1 addition & 1 deletion groth16-framework/tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ pub(crate) use context::TestContext;
pub(crate) use io::{TestQueryInput, TestQueryOutput};

pub(crate) const NUM_PREPROCESSING_IO: usize = verifiable_db::ivc::NUM_IO;
pub(crate) const NUM_QUERY_IO: usize = verifiable_db::query::PI_LEN::<MAX_NUM_ITEMS_PER_OUTPUT>;
pub(crate) const NUM_QUERY_IO: usize = verifiable_db::query::pi_len::<MAX_NUM_ITEMS_PER_OUTPUT>();
4 changes: 1 addition & 3 deletions groth16-framework/tests/common/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,9 @@ use mp2_common::{
};
use plonky2::field::types::PrimeField64;
use verifiable_db::{
query::api::CircuitInput as QueryInput,
revelation::{api::CircuitInput, PublicInputs as RevelationPI},
test_utils::{
TestRevelationData, MAX_NUM_COLUMNS, MAX_NUM_ITEMS_PER_OUTPUT, MAX_NUM_OUTPUTS,
MAX_NUM_PLACEHOLDERS, MAX_NUM_PREDICATE_OPS, MAX_NUM_RESULT_OPS,
TestRevelationData, MAX_NUM_ITEMS_PER_OUTPUT, MAX_NUM_OUTPUTS, MAX_NUM_PLACEHOLDERS,
},
};

Expand Down
2 changes: 1 addition & 1 deletion groth16-framework/tests/query.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//! Test the Groth16 proving process for the query circuits.
#![allow(incomplete_features)]
#![feature(generic_const_exprs)]

mod common;
Expand Down
2 changes: 1 addition & 1 deletion mp2-common/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//! Utility functions and gadgets
#![allow(incomplete_features)]
#![feature(generic_const_exprs)]
#![feature(generic_arg_infer)]
#![feature(const_for)]
Expand Down
1 change: 1 addition & 0 deletions mp2-common/src/rlp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ const MAX_LEN_BYTES: usize = 2;
/// Maximum size a key can have inside a MPT node.
/// 33 bytes because key is compacted encoded, so it can add up to 1 byte more.
const MAX_ENC_KEY_LEN: usize = 33;

/// Simply the maximum number of nibbles a key can have.
pub const MAX_KEY_NIBBLE_LEN: usize = 64;

Expand Down
2 changes: 1 addition & 1 deletion mp2-common/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ pub type PackedMappingKeyTarget = Array<U32Target, PACKED_MAPPING_KEY_LEN>;

/// Regular hash output function - it can be generated from field elements using
/// poseidon with the output serialized or via regular hash functions.
#[derive(Clone, Hash, Default, Debug, Serialize, Deserialize, Deref, PartialEq, Eq)]
#[derive(Clone, Hash, Default, Debug, Serialize, Deserialize, Deref, PartialEq, Eq, Copy)]
pub struct HashOutput(pub [u8; 32]);
impl AsRef<[u8]> for &HashOutput {
fn as_ref(&self) -> &[u8] {
Expand Down
7 changes: 4 additions & 3 deletions mp2-v1/src/final_extraction/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -245,16 +245,17 @@ mod tests {

use crate::{
final_extraction::{
base_circuit::{
test::ProofsPi, BLOCK_SET_NUM_IO, CONTRACT_SET_NUM_IO, VALUE_SET_NUM_IO,
},
base_circuit::{test::ProofsPi, CONTRACT_SET_NUM_IO, VALUE_SET_NUM_IO},
lengthed_circuit::LENGTH_SET_NUM_IO,
},
length_extraction,
};

use super::{CircuitInput, PublicParameters};

pub(crate) const BLOCK_SET_NUM_IO: usize =
crate::block_extraction::public_inputs::PublicInputs::<F>::TOTAL_LEN;

#[test]
fn test_final_extraction_api() {
let block_circuit = TestDummyCircuit::<BLOCK_SET_NUM_IO>::build();
Expand Down
5 changes: 1 addition & 4 deletions mp2-v1/src/final_extraction/base_circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,6 @@ pub(crate) struct BaseCircuitProofWires {

pub(crate) const CONTRACT_SET_NUM_IO: usize = contract_extraction::PublicInputs::<F>::TOTAL_LEN;
pub(crate) const VALUE_SET_NUM_IO: usize = values_extraction::PublicInputs::<F>::TOTAL_LEN;
// WARN: clippy is wrong on this one, it is used somewhere else.
pub(crate) const BLOCK_SET_NUM_IO: usize =
block_extraction::public_inputs::PublicInputs::<F>::TOTAL_LEN;

#[derive(Clone, Debug)]
pub struct BaseCircuitInput {
Expand Down Expand Up @@ -430,7 +427,7 @@ pub(crate) mod test {
pub(crate) fn random() -> Self {
let value_h = HashOut::<F>::rand().to_bytes().pack(Endianness::Little);
let key = random_vector(64);
let ptr = usize::max_value();
let ptr = usize::MAX;
let value_dv = Point::rand();
let value_dm = Point::rand();
let n = 10;
Expand Down
22 changes: 3 additions & 19 deletions mp2-v1/src/final_extraction/merge_circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub struct MergeTableWires {
}

impl MergeTable {
pub fn build<'a>(
pub fn build(
b: &mut CBuilder,
block_pi: &[Target],
contract_pi: &[Target],
Expand Down Expand Up @@ -125,12 +125,6 @@ pub(crate) struct MergeCircuitInput {
pub(crate) merge: MergeTable,
}

impl MergeCircuitInput {
pub(crate) fn new(base: BaseCircuitProofInputs, merge: MergeTable) -> Self {
Self { base, merge }
}
}

impl CircuitLogicWires<F, D, 0> for MergeTableRecursiveWires {
type CircuitBuilderParams = FinalExtractionBuilderParams;

Expand Down Expand Up @@ -170,16 +164,10 @@ mod test {
use super::*;
use base_circuit::test::{ProofsPi, ProofsPiTarget};
use mp2_common::{
digest::SplitDigestPoint,
group_hashing::{field_hashed_scalar_mul, weierstrass_to_point as wp},
utils::ToFields,
C, D, F,
digest::SplitDigestPoint, group_hashing::weierstrass_to_point as wp, C, D, F,
};
use mp2_test::circuit::{run_circuit, UserCircuit};
use plonky2::{
field::types::Sample,
iop::witness::{PartialWitness, WitnessWrite},
};
use plonky2::iop::witness::WitnessWrite;

use super::MergeTableWires;

Expand Down Expand Up @@ -221,10 +209,6 @@ mod test {
}
}

fn random_field_vector(n: usize) -> Vec<F> {
(0..n).map(|_| F::rand()).collect()
}

#[test]
fn test_final_merge_circuit() {
let pis_a = ProofsPi::random();
Expand Down
8 changes: 4 additions & 4 deletions mp2-v1/src/indexing/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl<T: Eq + Default + std::fmt::Debug + Clone> LagrangeNode for RowPayload<T> {
}

fn hash(&self) -> HashOutput {
self.hash.clone()
self.hash
}

fn min(&self) -> U256 {
Expand All @@ -37,7 +37,7 @@ impl<T: Eq + Default + std::fmt::Debug + Clone> LagrangeNode for RowPayload<T> {
}

fn embedded_hash(&self) -> HashOutput {
self.cell_root_hash.clone().unwrap()
self.cell_root_hash.unwrap()
}
}

Expand All @@ -47,7 +47,7 @@ impl<T> LagrangeNode for IndexNode<T> {
}

fn hash(&self) -> HashOutput {
self.node_hash.clone()
self.node_hash
}

fn min(&self) -> U256 {
Expand All @@ -59,6 +59,6 @@ impl<T> LagrangeNode for IndexNode<T> {
}

fn embedded_hash(&self) -> HashOutput {
self.row_tree_hash.clone()
self.row_tree_hash
}
}
1 change: 1 addition & 0 deletions mp2-v1/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//! Circuits for v1 of Lagrange Proof Network (LPN)
#![allow(incomplete_features)]
#![allow(clippy::large_enum_variant)]
// Add this to allow generic const expressions, e.g. `PAD_LEN(NODE_LEN)`.
#![feature(generic_const_exprs)]
// Add this so we don't need to always specify const generic in generic
Expand Down
24 changes: 11 additions & 13 deletions mp2-v1/src/query/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ where
C: ContextProvider,
{
let (query_for_min, query_for_max) =
bracket_secondary_index(&table_name, settings, primary as Epoch, &bounds);
bracket_secondary_index(&table_name, settings, primary as Epoch, bounds);

// try first with lower node than secondary min query bound
let to_be_proven_node =
Expand Down Expand Up @@ -108,13 +108,12 @@ async fn get_successor_node_with_same_value(
successor_ctx = row_tree
.node_context_at(successor_ctx.left.as_ref().unwrap(), primary as Epoch)
.await
.expect(
format!(
.unwrap_or_else(|| {
panic!(
"Node context not found for left child of node {:?}",
successor_ctx.node_id
)
.as_str(),
);
});
}
Some(successor_ctx)
} else {
Expand Down Expand Up @@ -192,13 +191,12 @@ async fn get_predecessor_node_with_same_value(
predecessor_ctx = row_tree
.node_context_at(predecessor_ctx.right.as_ref().unwrap(), primary as Epoch)
.await
.expect(
format!(
.unwrap_or_else(|| {
panic!(
"Node context not found for right child of node {:?}",
predecessor_ctx.node_id
)
.as_str(),
);
});
}
Some(predecessor_ctx)
} else {
Expand Down Expand Up @@ -299,23 +297,23 @@ async fn find_node_for_proof(
// from the value `value` stored in the node with key `row_key`; the node found is the one to be
// employed to generate the non-existence proof
let mut successor_ctx =
get_successor_node_with_same_value(&row_tree, &node_ctx, value, primary).await;
get_successor_node_with_same_value(row_tree, &node_ctx, value, primary).await;
while successor_ctx.is_some() {
node_ctx = successor_ctx.unwrap();
successor_ctx =
get_successor_node_with_same_value(&row_tree, &node_ctx, value, primary).await;
get_successor_node_with_same_value(row_tree, &node_ctx, value, primary).await;
}
} else {
// starting from the node with key `row_key`, we iterate over its predecessor nodes in the tree,
// until we found a node that either has no predecessor or whose predecessor stores a value different
// from the value `value` stored in the node with key `row_key`; the node found is the one to be
// employed to generate the non-existence proof
let mut predecessor_ctx =
get_predecessor_node_with_same_value(&row_tree, &node_ctx, value, primary).await;
get_predecessor_node_with_same_value(row_tree, &node_ctx, value, primary).await;
while predecessor_ctx.is_some() {
node_ctx = predecessor_ctx.unwrap();
predecessor_ctx =
get_predecessor_node_with_same_value(&row_tree, &node_ctx, value, primary).await;
get_predecessor_node_with_same_value(row_tree, &node_ctx, value, primary).await;
}
}

Expand Down
32 changes: 20 additions & 12 deletions mp2-v1/src/values_extraction/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,20 @@ pub fn generate_proof(
circuit_params.generate_proof(circuit_type)?.serialize()
}

pub trait BranchMacroTrait {
fn new(builder: &CircuitWithUniversalVerifierBuilder<F, D, NUM_IO>) -> Self;

fn circuit_set(&self) -> Vec<HashOut<F>>;

fn generate_proof(
&self,
set: &RecursiveCircuits<F, C, D>,
branch_node: InputNode,
child_proofs: Vec<ProofWithVK>,
is_simple_aggregation: bool,
) -> Result<ProofWithVK>;
}

/// generate a macro filling the BranchCircuit structs manually
macro_rules! impl_branch_circuits {
($struct_name:ty, $($i:expr),*) => {
Expand All @@ -152,7 +166,7 @@ macro_rules! impl_branch_circuits {
in combination with the node input length."]
pub type $struct_name = [< $struct_name GenericNodeLen>]<MAX_BRANCH_NODE_LEN>;

impl $struct_name {
impl BranchMacroTrait for $struct_name {
fn new(builder: &CircuitWithUniversalVerifierBuilder<F, D, NUM_IO>) -> Self {
$struct_name {
$(
Expand All @@ -163,11 +177,11 @@ macro_rules! impl_branch_circuits {
}
/// Returns the set of circuits to be fed to the recursive framework
fn circuit_set(&self) -> Vec<HashOut<F>> {
let mut arr = Vec::new();
$(
arr.push(self.[< b $i >].circuit_data().verifier_only.circuit_digest);
)+
arr

vec![$(
self.[< b $i >].circuit_data().verifier_only.circuit_digest,
)+]

}
/// generates a proof from the inputs stored in `branch`. Depending on the size of the node,
/// and the number of children proofs, it selects the right specialized circuit to generate the proof.
Expand Down Expand Up @@ -413,12 +427,6 @@ mod tests {
mapping_key: Option<Vec<u8>>,
}

impl TestData {
fn is_simple_slot(&self) -> bool {
self.mapping_key.is_none()
}
}

#[test]
fn test_values_extraction_single_variable_apis() {
test_apis(true);
Expand Down
2 changes: 1 addition & 1 deletion mp2-v1/tests/common/bindings/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![allow(unused_imports, clippy::all, rustdoc::all)]
#![allow(unused_imports, clippy::all, rustdoc::all, warnings)]
//! This module contains the sol! generated bindings for solidity contracts.
//! This is autogenerated code.
//! Do not manually edit these files.
Expand Down
11 changes: 5 additions & 6 deletions mp2-v1/tests/common/cases/indexing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,17 +119,17 @@ impl TableIndexing {
let single_columns = SINGLE_SLOTS
.iter()
.enumerate()
.filter_map(|(i, slot)| {
.map(|(i, slot)| {
let identifier =
identifier_single_var_column(*slot, contract_address, chain_id, vec![]);
Some(TableColumn {
TableColumn {
name: format!("column_{}", i),
identifier,
index: IndexType::None,
// ALL single columns are "multiplier" since we do tableA * D(tableB), i.e. all
// entries of table A are repeated for each entry of table B.
multiplier: true,
})
}
})
.collect::<Vec<_>>();
let mapping_column = vec![TableColumn {
Expand Down Expand Up @@ -591,10 +591,9 @@ impl TableIndexing {
debug!(
" CONTRACT storage root pis.storage_root() {:?}",
hex::encode(
&pis.root_hash_field()
pis.root_hash_field()
.into_iter()
.map(|u| u.to_be_bytes())
.flatten()
.flat_map(|u| u.to_be_bytes())
.collect::<Vec<_>>()
)
);
Expand Down
Loading

0 comments on commit efcb3d6

Please sign in to comment.