From 47e422aaba8156205c922823fc355805ade50534 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 31 Jan 2024 22:03:26 +0000 Subject: [PATCH] chore: refactor create proof method (#77) * draft alt design for create-proof * fix clippy Signed-off-by: Kevaundray Wedderburn --------- Signed-off-by: Kevaundray Wedderburn --- ffi_interface/src/lib.rs | 94 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/ffi_interface/src/lib.rs b/ffi_interface/src/lib.rs index aaf47ae..7499eb4 100644 --- a/ffi_interface/src/lib.rs +++ b/ffi_interface/src/lib.rs @@ -315,6 +315,100 @@ pub fn create_proof(input: Vec) -> Vec { proof.to_bytes().unwrap() } +// This is an alternative implementation of create_proof +pub fn create_proof_alt(input: Vec) -> Vec { + // - Checks for the serialized proof queries + /// + // Define the chunk size (8257 bytes) + // C_i, f_i(X), z_i, y_i + // 32, 8192, 1, 32 + // = 8257 + const CHUNK_SIZE: usize = 8257; // TODO: get this from ipa-multipoint + + if input.len() % CHUNK_SIZE != 0 { + // TODO: change this to an error + panic!("Input length must be a multiple of {}", CHUNK_SIZE); + } + let num_proofs = input.len() / CHUNK_SIZE; + + let proofs_bytes = input.chunks_exact(CHUNK_SIZE); + assert!( + proofs_bytes.remainder().is_empty(), + "There should be no left over bytes when chunking the proof" + ); + + // - Deserialize proof queries + // + let mut prover_queries: Vec = Vec::with_capacity(num_proofs); + + for proof_bytes in proofs_bytes { + let prover_query = deserialize_proof_query(proof_bytes); + prover_queries.push(prover_query); + } + + // - Create proofs + // + // TODO: This should be passed in as a pointer + let precomp = PrecomputedWeights::new(256); + + let crs = CRS::default(); + let mut transcript = Transcript::new(b"verkle"); + // TODO: This should not need to clone the CRS, but instead take a reference + + let proof = MultiPoint::open(crs.clone(), &precomp, &mut transcript, prover_queries); + proof.to_bytes().expect("cannot serialize proof") +} + +#[must_use] +fn deserialize_proof_query(bytes: &[u8]) -> ProverQuery { + // Commitment + let (commitment, mut bytes) = take_group_element(bytes); + + // f_x is a polynomial of degree 255, so we have 256 Fr elements + const NUMBER_OF_EVALUATIONS: usize = 256; + let mut collect_lagrange_basis: Vec = Vec::with_capacity(NUMBER_OF_EVALUATIONS); + for _ in 0..NUMBER_OF_EVALUATIONS { + let (scalar, offsetted_bytes) = take_scalar(bytes); + collect_lagrange_basis.push(scalar); + bytes = offsetted_bytes; + } + + // The input point is a single byte + let (z_i, bytes) = take_byte(bytes); + + // The evaluation is a single scalar + let (y_i, bytes) = take_scalar(bytes); + + assert!(bytes.is_empty(), "we should have consumed all the bytes"); + + ProverQuery { + commitment, + poly: LagrangeBasis::new(collect_lagrange_basis), + point: z_i, + result: y_i, + } +} + +#[must_use] +fn take_group_element(bytes: &[u8]) -> (Element, &[u8]) { + let element = Element::from_bytes(&bytes[0..32]).expect("could not deserialize element"); + // Increment the slice by 32 bytes + (element, &bytes[32..]) +} + +#[must_use] +fn take_byte(bytes: &[u8]) -> (usize, &[u8]) { + let z_i = bytes[0] as usize; + // Increment the slice by 32 bytes + (z_i, &bytes[1..]) +} +#[must_use] +fn take_scalar(bytes: &[u8]) -> (Fr, &[u8]) { + let y_i = fr_from_le_bytes(&bytes[0..32]).expect("could not deserialize y_i"); + // Increment the slice by 32 bytes + (y_i, &bytes[32..]) +} + #[cfg(test)] mod tests { use banderwagon::Fr;