Skip to content

Commit

Permalink
chore(ffi_interface): Switch from Big Endian to Little Endian (#76)
Browse files Browse the repository at this point in the history
* Change from Big endian to Little Endian

Co-authored-by: Dragan Pilipovic <[email protected]>

* modify interop tests to show how to change the java tests

* remove allocation

---------

Co-authored-by: Dragan Pilipovic <[email protected]>
  • Loading branch information
kevaundray and dragan2234 authored Jan 28, 2024
1 parent dbc3167 commit ea7d2bd
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 33 deletions.
48 changes: 37 additions & 11 deletions ffi_interface/src/interop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ pub(crate) fn group_to_field(point: &Element) -> Fr {
mod test {
use super::Java_org_hyperledger_besu_nativelib_ipamultipoint_LibIpaMultipoint_pedersenHash;
use crate::{
commit_to_scalars, deprecated_serialize_commitment, fr_to_be_bytes, get_tree_key_hash,
commit_to_scalars, deprecated_serialize_commitment, fr_to_le_bytes, get_tree_key_hash,
hash_commitment,
interop::{
Java_org_hyperledger_besu_nativelib_ipamultipoint_LibIpaMultipoint_commit,
Expand Down Expand Up @@ -152,42 +152,68 @@ mod test {

#[test]
fn interop_commit() {
let scalars: Vec<_> = (0..256)
let scalars_le: Vec<_> = (0..256)
.map(|i| {
let val = Fr::from((i + 1) as u128);
fr_to_be_bytes(-val)
fr_to_le_bytes(-val)
})
.flatten()
.collect();

let expected_hash =
Java_org_hyperledger_besu_nativelib_ipamultipoint_LibIpaMultipoint_commit(&scalars);
let scalars_be: Vec<_> = (0..256)
.map(|i| {
let val = Fr::from((i + 1) as u128);
let mut arr = fr_to_le_bytes(-val);
arr.reverse();
arr
})
.flatten()
.collect();

// The previous implementation will return the hash in big endian format
// The new implementation will return the hash in little endian format
// however.
let expected_hash_be =
Java_org_hyperledger_besu_nativelib_ipamultipoint_LibIpaMultipoint_commit(&scalars_be);
let expected_hash_le: Vec<_> = expected_hash_be.iter().rev().cloned().collect();

let crs = CRS::default();
let committer = DefaultCommitter::new(&crs.G);

let got_commitment = commit_to_scalars(&committer, &scalars).unwrap();
let got_commitment = commit_to_scalars(&committer, &scalars_le).unwrap();
let got_hash = hash_commitment(got_commitment);
assert_eq!(got_hash.to_vec(), expected_hash)
assert_eq!(got_hash.to_vec(), expected_hash_le)
}

#[test]
fn interop_commit_root() {
let scalars: Vec<_> = (0..256)
let scalars_le: Vec<_> = (0..256)
.map(|i| {
let val = Fr::from((i + 1) as u128);
fr_to_le_bytes(-val)
})
.flatten()
.collect();

let scalars_be: Vec<_> = (0..256)
.map(|i| {
let val = Fr::from((i + 1) as u128);
fr_to_be_bytes(-val)
let mut arr = fr_to_le_bytes(-val);
arr.reverse();
arr
})
.flatten()
.collect();

let expected_hash =
Java_org_hyperledger_besu_nativelib_ipamultipoint_LibIpaMultipoint_commitRoot(&scalars);
Java_org_hyperledger_besu_nativelib_ipamultipoint_LibIpaMultipoint_commitRoot(
&scalars_be,
);

let crs = CRS::default();
let committer = DefaultCommitter::new(&crs.G);

let got_commitment = commit_to_scalars(&committer, &scalars).unwrap();
let got_commitment = commit_to_scalars(&committer, &scalars_le).unwrap();
let got_hash = deprecated_serialize_commitment(got_commitment);
assert_eq!(got_hash.to_vec(), expected_hash)
}
Expand Down
36 changes: 14 additions & 22 deletions ffi_interface/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ fn _commit_to_scalars(committer: &DefaultCommitter, scalars: &[u8]) -> Result<El
// big endian bytes
let mut inputs = Vec::with_capacity(num_scalars);
for chunk in scalars.chunks_exact(32) {
inputs.push(fr_from_be_bytes(chunk)?);
inputs.push(fr_from_le_bytes(chunk)?);
}

Ok(committer.commit_lagrange(&inputs))
Expand Down Expand Up @@ -129,8 +129,8 @@ pub fn update_commitment(
new_scalar_bytes: ScalarBytes,
) -> Result<CommitmentBytes, Error> {
let old_commitment = Element::from_bytes_unchecked_uncompressed(old_commitment_bytes);
let old_scalar = fr_from_be_bytes(&old_scalar_bytes)?;
let new_scalar = fr_from_be_bytes(&new_scalar_bytes)?;
let old_scalar = fr_from_le_bytes(&old_scalar_bytes)?;
let new_scalar = fr_from_le_bytes(&new_scalar_bytes)?;

// w-v
let delta = new_scalar - old_scalar;
Expand All @@ -151,7 +151,7 @@ pub fn hash_commitment(commitment: CommitmentBytes) -> ScalarBytes {
// TODO: We could introduce a method named `hash_commit_to_scalars`
// TODO: which would save this serialization roundtrip. We should profile/check that
// TODO: this is actually a bottleneck for the average workflow before doing this.
fr_to_be_bytes(Element::from_bytes_unchecked_uncompressed(commitment).map_to_scalar_field())
fr_to_le_bytes(Element::from_bytes_unchecked_uncompressed(commitment).map_to_scalar_field())
}
/// Hashes a vector of commitments.
///
Expand All @@ -166,7 +166,7 @@ pub fn hash_commitments(commitments: &[CommitmentBytes]) -> Vec<ScalarBytes> {

Element::batch_map_to_scalar_field(&elements)
.into_iter()
.map(fr_to_be_bytes)
.map(fr_to_le_bytes)
.collect()
}

Expand All @@ -176,23 +176,15 @@ pub fn deprecated_serialize_commitment(commitment: CommitmentBytes) -> [u8; 32]
Element::from_bytes_unchecked_uncompressed(commitment).to_bytes()
}

// TODO: We use big endian bytes here to be interopable with the java implementation
// TODO: we should stick to one endianness everywhere to avoid confusion
fn fr_to_be_bytes(fr: banderwagon::Fr) -> [u8; 32] {
fn fr_to_le_bytes(fr: banderwagon::Fr) -> [u8; 32] {
let mut bytes = [0u8; 32];
fr.serialize_compressed(&mut bytes[..])
.expect("Failed to serialize scalar to bytes");
// serialized compressed outputs bytes in LE order, so we reverse to get BE order
bytes.reverse();
bytes
}
fn fr_from_be_bytes(bytes: &[u8]) -> Result<banderwagon::Fr, Error> {
let mut bytes = bytes.to_vec();
bytes.reverse(); // deserialize expects the bytes to be in little endian order
banderwagon::Fr::deserialize_uncompressed(&bytes[..]).map_err(|_| {
Error::FailedToDeserializeScalar {
bytes: bytes.to_vec(),
}
fn fr_from_le_bytes(bytes: &[u8]) -> Result<banderwagon::Fr, Error> {
banderwagon::Fr::deserialize_uncompressed(bytes).map_err(|_| Error::FailedToDeserializeScalar {
bytes: bytes.to_vec(),
})
}

Expand Down Expand Up @@ -267,7 +259,7 @@ mod tests {
crs::CRS,
};

use crate::{fr_from_be_bytes, fr_to_be_bytes};
use crate::{fr_from_le_bytes, fr_to_le_bytes};
#[test]
fn commitment_update() {
let crs = CRS::default();
Expand Down Expand Up @@ -295,8 +287,8 @@ mod tests {
&committer,
commitment.to_bytes_uncompressed(),
0,
fr_to_be_bytes(a_0),
fr_to_be_bytes(a_2),
fr_to_le_bytes(a_0),
fr_to_le_bytes(a_2),
)
.unwrap();

Expand All @@ -306,8 +298,8 @@ mod tests {
#[test]
fn from_be_to_be_bytes() {
let value = banderwagon::Fr::from(123456u128);
let bytes = fr_to_be_bytes(value);
let got_value = fr_from_be_bytes(&bytes).unwrap();
let bytes = fr_to_le_bytes(value);
let got_value = fr_from_le_bytes(&bytes).unwrap();
assert_eq!(got_value, value)
}
}
Expand Down

0 comments on commit ea7d2bd

Please sign in to comment.