Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add bindings to generate proof from uncompressed banderwagon values #96

Merged
merged 22 commits into from
Sep 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
218d91c
add bindings for c
tanishqjasoria May 26, 2024
5c9ef6b
make functions public
tanishqjasoria May 27, 2024
5ba4518
Added verify_proof function to be called from C# bandersnatch repository
jjeangal May 28, 2024
0c60651
Merge pull request #1 from jjeangal/bindings
tanishqjasoria May 30, 2024
8561333
- Created take_uncompressed_group_element fn to transform uncompress…
jjeangal May 30, 2024
5d61218
add deserialize_proof_query_uncompressed fct to interface
jjeangal May 30, 2024
dce6542
Created new create_proof_uncompressed to work with uncompressed comit…
jjeangal May 30, 2024
00e22e0
renaming
jjeangal May 31, 2024
7033f77
reverse commitment byte array and order of X and Y
jjeangal May 31, 2024
e40d459
deserialize_verifier_query_uncompressed fct
jjeangal May 31, 2024
410f41d
forgot #[no mangle]
jjeangal May 31, 2024
152d435
Merge remote-tracking branch 'upstream/bindings' into bindings
jjeangal Jun 3, 2024
26661b3
remove unusued imports
jjeangal Jun 5, 2024
2107289
removed array reversal for uncompressing, using littleEndian now
jjeangal Jun 6, 2024
6e71b02
changed proof sizes to 1120
jjeangal Jun 6, 2024
8da987e
uncompress and decompress proofs in creation and verification
jjeangal Jun 7, 2024
4960905
Merge pull request #2 from jjeangal/bindings
tanishqjasoria Aug 13, 2024
ee3121b
fix clippy issues
tanishqjasoria Aug 13, 2024
b43e617
add build.rs file
tanishqjasoria Aug 15, 2024
8de9306
add csharp dummpy code and cbindgen to automatically generate the bi…
tanishqjasoria Aug 20, 2024
6bbb269
cargo format and allow deprecated
tanishqjasoria Aug 20, 2024
0867b35
improve csharp lib
tanishqjasoria Aug 20, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
/target
Cargo.lock
.DS_Store
.DS_Store

# jetbrains ide files
.idea

# c# build files
bin
obj

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ members = [
"verkle-trie",
"verkle-spec",
"ipa-multipoint",
"banderwagon", "ffi_interface",
"banderwagon",
"ffi_interface",
"bindings/c",
"bindings/csharp/rust_code",
kevaundray marked this conversation as resolved.
Show resolved Hide resolved
]
resolver = "2"

Expand Down
1 change: 1 addition & 0 deletions bindings/c/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
build
20 changes: 20 additions & 0 deletions bindings/c/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
[package]
name = "c_verkle"
version = "0.1.0"
edition = "2021"


[lib]
crate-type = ["staticlib", "cdylib", "rlib"]

[dependencies]
libc = "0.2.2"
hex = "*"
banderwagon = { path = "../../banderwagon" }
ipa-multipoint = { path = "../../ipa-multipoint" }
verkle-spec = { path = "../../verkle-spec" }
verkle-trie = { path = "../../verkle-trie" }
ffi_interface = { path = "../../ffi_interface" }

[build-dependencies]
cbindgen = "0.26.0"
31 changes: 31 additions & 0 deletions bindings/c/build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
use std::env;
use std::path::PathBuf;

/// The directory where the generated header file will be written.
const DIR_FOR_HEADER: &str = "build";

fn main() {
// linker flags
// Link libm on Unix-like systems (needed due to use of num_cpus crate)
#[cfg(not(target_os = "windows"))]
println!("cargo:rustc-link-lib=m");

println!("cargo:rerun-if-changed=src/");
let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let package_name = env::var("CARGO_PKG_NAME").unwrap();

let path_to_crate_dir = PathBuf::from(&crate_dir);

let output_file = PathBuf::from(&path_to_crate_dir)
.join(DIR_FOR_HEADER)
.join(format!("{}.h", package_name))
.display()
.to_string();

cbindgen::Builder::new()
.with_crate(crate_dir)
.with_language(cbindgen::Language::C)
.generate()
.unwrap()
.write_to_file(output_file);
tanishqjasoria marked this conversation as resolved.
Show resolved Hide resolved
}
309 changes: 309 additions & 0 deletions bindings/c/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,309 @@
use ffi_interface::{
deserialize_proof_query, deserialize_proof_query_uncompressed, deserialize_verifier_query,
deserialize_verifier_query_uncompressed, fr_from_le_bytes, Context,
};
use ipa_multipoint::committer::Committer;
use ipa_multipoint::multiproof::{MultiPoint, MultiPointProof, ProverQuery, VerifierQuery};
use ipa_multipoint::transcript::Transcript;

#[allow(deprecated)]
use ffi_interface::get_tree_key_hash;

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn context_new() -> *mut Context {
let ctx = Box::new(Context::default());
Box::into_raw(ctx)
}

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn context_free(ctx: *mut Context) {
if ctx.is_null() {
return;
}
unsafe {
let _ = Box::from_raw(ctx);
}
}

#[allow(deprecated)]
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn pedersen_hash(
ctx: *mut Context,
address: *const u8,
tree_index_le: *const u8,
out: *mut u8,
) {
if ctx.is_null() || address.is_null() || tree_index_le.is_null() || out.is_null() {
// TODO: We have ommited the error handling for null pointers at the moment.
// TODO: Likely will panic in this case.
tanishqjasoria marked this conversation as resolved.
Show resolved Hide resolved
return;
}

let (tree_index, add, context) = unsafe {
let add_slice = std::slice::from_raw_parts(address, 32);
let ctx_ref = &*ctx;
let tree_index_slice = std::slice::from_raw_parts(tree_index_le, 32);

(tree_index_slice, add_slice, ctx_ref)
};

let hash = get_tree_key_hash(
context,
<[u8; 32]>::try_from(add).unwrap(),
<[u8; 32]>::try_from(tree_index).unwrap(),
);

unsafe {
let commitment_data_slice = std::slice::from_raw_parts_mut(out, 32);
commitment_data_slice.copy_from_slice(&hash);
}
}

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn multi_scalar_mul(
ctx: *mut Context,
scalars: *const u8,
len: usize,
out: *mut u8,
) {
let (scalar_slice, context) = unsafe {
let scalar = std::slice::from_raw_parts(scalars, len);
let ctx_ref = &*ctx;

(scalar, ctx_ref)
};

let mut inputs = Vec::with_capacity(len);
for chunk in scalar_slice.chunks_exact(32) {
inputs.push(fr_from_le_bytes(chunk).unwrap());
}

let data = context.committer.commit_lagrange(&inputs);
let hash = data.to_bytes();

unsafe {
let commitment_data_slice = std::slice::from_raw_parts_mut(out, 32);
commitment_data_slice.copy_from_slice(&hash);
}
}

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn create_proof(ctx: *mut Context, input: *const u8, len: usize, out: *mut u8) {
const CHUNK_SIZE: usize = 8257; // TODO: get this from ipa-multipoint
const PROOF_SIZE: usize = 576; // TODO: get this from ipa-multipoint

let (scalar_slice, context) = unsafe {
let scalar = std::slice::from_raw_parts(input, len);
let ctx_ref = &*ctx;

(scalar, ctx_ref)
};

let num_openings = len / CHUNK_SIZE;

let proofs_bytes = scalar_slice.chunks_exact(CHUNK_SIZE);
assert!(
proofs_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the proof"
);

// - Deserialize proof queries
//
let mut prover_queries: Vec<ProverQuery> = Vec::with_capacity(num_openings);

for proof_bytes in proofs_bytes {
let prover_query = deserialize_proof_query(proof_bytes);
prover_queries.push(prover_query);
}

// - Create proofs
//

let mut transcript = Transcript::new(b"verkle");

let proof = MultiPoint::open(
// TODO: This should not need to clone the CRS, but instead take a reference
context.crs.clone(),
&context.precomputed_weights,
&mut transcript,
prover_queries,
);

let hash = proof.to_bytes().expect("cannot serialize proof");
unsafe {
let commitment_data_slice = std::slice::from_raw_parts_mut(out, PROOF_SIZE);
commitment_data_slice.copy_from_slice(&hash);
}
}

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn create_proof_uncompressed(
ctx: *mut Context,
input: *const u8,
len: usize,
out: *mut u8,
) {
// 8257 + 32 because first commitment is uncompressed as 64 bytes
const CHUNK_SIZE: usize = 8289; // TODO: get this from ipa-multipoint
const PROOF_SIZE: usize = 1120; // TODO: get this from ipa-multipoint

let (scalar_slice, context) = unsafe {
let scalar = std::slice::from_raw_parts(input, len);
let ctx_ref = &*ctx;

(scalar, ctx_ref)
};

let num_openings = len / CHUNK_SIZE;

let proofs_bytes = scalar_slice.chunks_exact(CHUNK_SIZE);
assert!(
proofs_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the proof"
);

// - Deserialize proof queries
//
let mut prover_queries: Vec<ProverQuery> = Vec::with_capacity(num_openings);

for proof_bytes in proofs_bytes {
let prover_query = deserialize_proof_query_uncompressed(proof_bytes);
prover_queries.push(prover_query);
}

// - Create proofs
//

let mut transcript = Transcript::new(b"verkle");

let proof = MultiPoint::open(
// TODO: This should not need to clone the CRS, but instead take a reference
context.crs.clone(),
&context.precomputed_weights,
&mut transcript,
prover_queries,
);

let hash = proof
.to_bytes_uncompressed()
.expect("cannot serialize proof");
unsafe {
let commitment_data_slice = std::slice::from_raw_parts_mut(out, PROOF_SIZE);
commitment_data_slice.copy_from_slice(&hash);
}
}

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn verify_proof(ctx: *mut Context, input: *const u8, len: usize) -> bool {
const CHUNK_SIZE: usize = 65; // TODO: get this from ipa-multipoint
const PROOF_SIZE: usize = 576; // TODO: get this from ipa-multipoint

let (proof_slice, verifier_queries_slices, context) = unsafe {
let input_slice = std::slice::from_raw_parts(input, len);

let (proof_slice, verifier_queries_slices) = input_slice.split_at(PROOF_SIZE);

let ctx_ref = &*ctx;

(proof_slice, verifier_queries_slices, ctx_ref)
};

let verifier_queries_bytes = verifier_queries_slices.chunks_exact(CHUNK_SIZE);
assert!(
verifier_queries_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the verifier queries"
);

let num_openings = verifier_queries_bytes.len() / CHUNK_SIZE;

// - Deserialize verifier queries
//

let mut verifier_queries: Vec<VerifierQuery> = Vec::with_capacity(num_openings);

for verifier_query_bytes in verifier_queries_bytes {
let verifier_query = deserialize_verifier_query(verifier_query_bytes);
verifier_queries.push(verifier_query);
}

// - Check proof
//

let proof = MultiPointProof::from_bytes(proof_slice, 256).unwrap();

let mut transcript = Transcript::new(b"verkle");

// TODO: This should not need to clone the CRS, but instead take a reference

MultiPointProof::check(
&proof,
&context.crs.clone(),
&context.precomputed_weights,
&verifier_queries,
&mut transcript,
)
}

#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn verify_proof_uncompressed(
ctx: *mut Context,
input: *const u8,
len: usize,
) -> bool {
// Chunk is now 65 + 32 = 97 because first commitment is uncompressed as 64 bytes
const CHUNK_SIZE: usize = 97; // TODO: get this from ipa-multipoint
const PROOF_SIZE: usize = 1120; // TODO: get this from ipa-multipoint

let (proof_slice, verifier_queries_slices, context) = unsafe {
let input_slice = std::slice::from_raw_parts(input, len);

let (proof_slice, verifier_queries_slices) = input_slice.split_at(PROOF_SIZE);

let ctx_ref = &*ctx;

(proof_slice, verifier_queries_slices, ctx_ref)
};

let verifier_queries_bytes = verifier_queries_slices.chunks_exact(CHUNK_SIZE);
assert!(
verifier_queries_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the verifier queries"
);

let num_openings = verifier_queries_bytes.len() / CHUNK_SIZE;

// - Deserialize verifier queries
//

let mut verifier_queries: Vec<VerifierQuery> = Vec::with_capacity(num_openings);

for verifier_query_bytes in verifier_queries_bytes {
let verifier_query = deserialize_verifier_query_uncompressed(verifier_query_bytes);
verifier_queries.push(verifier_query);
}

// - Check proof
//

let proof = MultiPointProof::from_bytes_unchecked_uncompressed(proof_slice, 256).unwrap();

let mut transcript = Transcript::new(b"verkle");

// TODO: This should not need to clone the CRS, but instead take a reference

MultiPointProof::check(
&proof,
&context.crs.clone(),
&context.precomputed_weights,
&verifier_queries,
&mut transcript,
)
}
Loading
Loading