Skip to content

Commit

Permalink
Use gnark prover (helius-labs#97)
Browse files Browse the repository at this point in the history
* Use grank prover

* Intermediate commit

* Intermediate commit

* Use Grank prover

* Fix

* Fix merge issues
  • Loading branch information
pmantica11 authored May 28, 2024
1 parent ba5b50b commit 0699d30
Show file tree
Hide file tree
Showing 22 changed files with 814 additions and 63 deletions.
322 changes: 295 additions & 27 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,11 @@ sqlx = {version = "0.6.2", features = [
thiserror = "1.0.31"
# time pinned because of https://github.com/launchbadge/sqlx/issues/3189
ark-bn254 = "0.4.0"
hex = "0.4.3"
num-bigint = "0.4.4"
num-traits = "0.2.18"
num_enum = "0.7.2"
reqwest = "0.12.4"
time = "=0.3.34"
tokio = {version = "1.23.0", features = ["full"]}
tower = {version = "0.4.13", features = ["full"]}
Expand Down
13 changes: 13 additions & 0 deletions src/api/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ use super::method::get_latest_non_voting_signatures::get_latest_non_voting_signa
use super::method::get_transaction_with_compression_info::{
get_transaction_with_compression_info, GetTransactionRequest, GetTransactionResponse,
};
use super::method::get_validity_proof::{get_validity_proof, CompressedProofWithContext};
use super::method::utils::GetLatestSignaturesRequest;
use super::method::utils::{AccountBalanceResponse, GetPaginatedSignaturesResponse, HashRequest};
use super::{
Expand Down Expand Up @@ -251,6 +252,13 @@ impl PhotonApi {
.await
}

pub async fn get_validity_proof(
&self,
request: HashList,
) -> Result<CompressedProofWithContext, PhotonApiError> {
get_validity_proof(self.db_conn.as_ref(), request).await
}

pub async fn get_latest_compression_signatures(
&self,
request: GetLatestSignaturesRequest,
Expand Down Expand Up @@ -282,6 +290,11 @@ impl PhotonApi {
request: Some(HashList::schema().1),
response: GetMultipleCompressedAccountProofsResponse::schema().1,
},
OpenApiSpec {
name: "getValidityProof".to_string(),
request: Some(HashList::schema().1),
response: CompressedProofWithContext::schema().1,
},
OpenApiSpec {
name: "getCompressedTokenAccountsByOwner".to_string(),
request: Some(GetCompressedTokenAccountsByOwner::schema().1),
Expand Down
10 changes: 8 additions & 2 deletions src/api/method/get_multiple_compressed_account_proofs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use crate::common::typedefs::hash::Hash;
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct MerkleProofWithContext {
pub proof: Vec<Hash>,
pub root: Hash,
pub leaf_index: u32,
pub hash: Hash,
pub merkle_tree: SerializablePubkey,
Expand Down Expand Up @@ -111,7 +112,7 @@ pub async fn get_multiple_compressed_account_proofs_helper(
hash
)))?;

let proofs = required_node_indices
let mut proof = required_node_indices
.iter()
.enumerate()
.map(|(level, idx)| {
Expand Down Expand Up @@ -147,8 +148,13 @@ pub async fn get_multiple_compressed_account_proofs_helper(
hash
)))?;

let root = proof.pop().ok_or(PhotonApiError::UnexpectedError(
"Root node not found in proof".to_string(),
))?;

Ok(MerkleProofWithContext {
proof: proofs,
proof,
root,
leaf_index: leaf_model.leaf_idx.ok_or(PhotonApiError::RecordNotFound(
"Leaf index not found".to_string(),
))? as u32,
Expand Down
258 changes: 258 additions & 0 deletions src/api/method/get_validity_proof.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,258 @@
use crate::{api::error::PhotonApiError, common::typedefs::hash::Hash};
use lazy_static::lazy_static;
use num_bigint::BigUint;
use reqwest::Client;
use sea_orm::DatabaseConnection;
use serde::{Deserialize, Serialize};
use std::str::FromStr;
use utoipa::ToSchema;

use super::get_multiple_compressed_account_proofs::{
get_multiple_compressed_account_proofs_helper, HashList,
};

lazy_static! {
static ref FIELD_SIZE: BigUint = BigUint::from_str(
"21888242871839275222246405745257275088548364400416034343698204186575808495616"
)
.unwrap();
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct HexInputsForProver {
root: String,
path_index: u32,
path_elements: Vec<String>,
leaf: String,
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct HexBatchInputsForProver {
#[serde(rename = "input-compressed-accounts")]
input_compressed_accounts: Vec<HexInputsForProver>,
}

#[derive(Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct CompressedProofWithContext {
pub compressed_proof: CompressedProof,
roots: Vec<String>,
root_indices: Vec<u64>,
leaf_indices: Vec<u32>,
leaves: Vec<String>,
merkle_trees: Vec<String>,
}

fn hash_to_hex(hash: &Hash) -> String {
let bytes = hash.to_vec();
let hex = hex::encode(bytes);
format!("0x{}", hex)
}

#[derive(Serialize, Deserialize, Debug)]
struct GnarkProofJson {
ar: [String; 2],
bs: [[String; 2]; 2],
krs: [String; 2],
}

#[derive(Debug)]
struct ProofABC {
a: Vec<u8>,
b: Vec<u8>,
c: Vec<u8>,
}

#[derive(Serialize, Deserialize, ToSchema, Default)]
pub struct CompressedProof {
a: Vec<u8>,
b: Vec<u8>,
c: Vec<u8>,
}

fn deserialize_hex_string_to_bytes(hex_str: &str) -> Vec<u8> {
let hex_str = if hex_str.starts_with("0x") {
&hex_str[2..]
} else {
hex_str
};

// Left pad with 0s if the length is not 64
let hex_str = format!("{:0>64}", hex_str);

hex::decode(&hex_str).expect("Failed to decode hex string")
}

fn proof_from_json_struct(json: GnarkProofJson) -> ProofABC {
let proof_ax = deserialize_hex_string_to_bytes(&json.ar[0]);
let proof_ay = deserialize_hex_string_to_bytes(&json.ar[1]);
let proof_a = [proof_ax, proof_ay].concat();

let proof_bx0 = deserialize_hex_string_to_bytes(&json.bs[0][0]);
let proof_bx1 = deserialize_hex_string_to_bytes(&json.bs[0][1]);
let proof_by0 = deserialize_hex_string_to_bytes(&json.bs[1][0]);
let proof_by1 = deserialize_hex_string_to_bytes(&json.bs[1][1]);
let proof_b = [proof_bx0, proof_bx1, proof_by0, proof_by1].concat();

let proof_cx = deserialize_hex_string_to_bytes(&json.krs[0]);
let proof_cy = deserialize_hex_string_to_bytes(&json.krs[1]);
let proof_c = [proof_cx, proof_cy].concat();

ProofABC {
a: proof_a,
b: proof_b,
c: proof_c,
}
}

fn y_element_is_positive_g1(y_element: &BigUint) -> bool {
y_element <= &(FIELD_SIZE.clone() - y_element)
}

fn y_element_is_positive_g2(y_element1: &BigUint, y_element2: &BigUint) -> bool {
let field_midpoint = FIELD_SIZE.clone() / 2u32;

if y_element1 < &field_midpoint {
true
} else if y_element1 > &field_midpoint {
false
} else {
y_element2 < &field_midpoint
}
}

fn add_bitmask_to_byte(byte: u8, is_positive: bool) -> u8 {
if is_positive {
byte | 0x80
} else {
byte & 0x7F
}
}

fn negate_and_compress_proof(proof: ProofABC) -> CompressedProof {
let proof_a = &proof.a;
let proof_b = &proof.b;
let proof_c = &proof.c;

let a_x_element = &mut proof_a[0..32].to_vec();
let a_y_element = BigUint::from_bytes_be(&proof_a[32..64]);

let proof_a_is_positive = !y_element_is_positive_g1(&a_y_element);
a_x_element[0] = add_bitmask_to_byte(a_x_element[0], proof_a_is_positive);

let b_x_element = &mut proof_b[0..64].to_vec();
let b_y_element = &proof_b[64..128];
let b_y1_element = BigUint::from_bytes_be(&b_y_element[0..32]);
let b_y2_element = BigUint::from_bytes_be(&b_y_element[32..64]);

let proof_b_is_positive = y_element_is_positive_g2(&b_y1_element, &b_y2_element);
b_x_element[0] = add_bitmask_to_byte(b_x_element[0], proof_b_is_positive);

let c_x_element = &mut proof_c[0..32].to_vec();
let c_y_element = BigUint::from_bytes_be(&proof_c[32..64]);

let proof_c_is_positive = y_element_is_positive_g1(&c_y_element);
c_x_element[0] = add_bitmask_to_byte(c_x_element[0], proof_c_is_positive);

CompressedProof {
a: a_x_element.clone(),
b: b_x_element.clone(),
c: c_x_element.clone(),
}
}

pub async fn get_validity_proof(
conn: &DatabaseConnection,
hashes: HashList,
) -> Result<CompressedProofWithContext, PhotonApiError> {
if hashes.0.is_empty() {
return Err(PhotonApiError::UnexpectedError(
"No hashes provided for proof generation".to_string(),
));
}
let client = Client::new();
let prover_endpoint = "http://localhost:3001"; // Change this as necessary

// Get merkle proofs
let merkle_proofs_with_context =
get_multiple_compressed_account_proofs_helper(conn, hashes.0).await?;

let mut inputs: Vec<HexInputsForProver> = Vec::new();
for proof in merkle_proofs_with_context.clone() {
let input = HexInputsForProver {
root: hash_to_hex(&proof.root),
path_index: proof.leaf_index,
path_elements: proof.proof.iter().map(|x| hash_to_hex(x)).collect(),
leaf: hash_to_hex(&proof.hash),
};
inputs.push(input);
}

let batch_inputs = HexBatchInputsForProver {
input_compressed_accounts: inputs,
};

let inclusion_proof_url = format!("{}/prove", prover_endpoint);
let json_body = serde_json::to_string(&batch_inputs).map_err(|e| {
PhotonApiError::UnexpectedError(format!(
"Got an error while serializing the request {}",
e.to_string()
))
})?;
let res = client
.post(&inclusion_proof_url)
.body(json_body.clone())
.header("Content-Type", "application/json")
.send()
.await
.map_err(|e| {
PhotonApiError::UnexpectedError(format!("Error fetching proof {}", e.to_string()))
})?;

if !res.status().is_success() {
return Err(PhotonApiError::UnexpectedError(format!(
"Error fetching proof {}",
res.status().to_string(),
)));
}

let text = res.text().await.map_err(|e| {
PhotonApiError::UnexpectedError(format!("Error fetching proof {}", e.to_string()))
})?;

let proof: GnarkProofJson = serde_json::from_str(&text).map_err(|e| {
PhotonApiError::UnexpectedError(format!(
"Got an error while deserializing the response {}",
e.to_string()
))
})?;

let proof = proof_from_json_struct(proof);
let compressed_proof = negate_and_compress_proof(proof);

Ok(CompressedProofWithContext {
compressed_proof,
roots: merkle_proofs_with_context
.iter()
.map(|x| x.root.clone().to_string())
.collect(),
root_indices: merkle_proofs_with_context
.iter()
.map(|x| x.root_seq)
.collect(),
leaf_indices: merkle_proofs_with_context
.iter()
.map(|x| x.leaf_index)
.collect(),
leaves: merkle_proofs_with_context
.iter()
.map(|x| x.hash.clone().to_string())
.collect(),
merkle_trees: merkle_proofs_with_context
.iter()
.map(|x| x.merkle_tree.clone().to_string())
.collect(),
})
}
1 change: 1 addition & 0 deletions src/api/method/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,5 @@ pub mod get_latest_non_voting_signatures;
pub mod get_multiple_compressed_account_proofs;
pub mod get_multiple_compressed_accounts;
pub mod get_transaction_with_compression_info;
pub mod get_validity_proof;
pub mod utils;
6 changes: 6 additions & 0 deletions src/api/rpc_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,12 @@ fn build_rpc_module(
.map_err(Into::into)
},
)?;
module.register_async_method("getValidityProof", |rpc_params, rpc_context| async move {
let ApiAndIndexer { api, indexer } = rpc_context.as_ref();
conditionally_index_latest_blocks(indexer).await;
let payload = rpc_params.parse()?;
api.get_validity_proof(payload).await.map_err(Into::into)
})?;

module.register_async_method(
"getLatestCompressionSignatures",
Expand Down
6 changes: 5 additions & 1 deletion src/openapi/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use crate::api::method::get_multiple_compressed_account_proofs::MerkleProofWithC
use crate::api::method::get_multiple_compressed_accounts::AccountList;

use crate::api::method::get_transaction_with_compression_info::AccountWithOptionalTokenData;
use crate::api::method::get_validity_proof::CompressedProof;
use crate::api::method::get_validity_proof::CompressedProofWithContext;
use crate::api::method::utils::Context;
use crate::api::method::utils::Limit;
use crate::api::method::utils::PaginatedSignatureInfoList;
Expand Down Expand Up @@ -78,7 +80,9 @@ const JSON_CONTENT_TYPE: &str = "application/json";
AccountState,
AccountWithOptionalTokenData,
UnixTimestamp,
UnsignedInteger
UnsignedInteger,
CompressedProof,
CompressedProofWithContext
)))]
struct ApiDoc;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"slot":33,"transaction":["AaGnQ+CcN7AjnPuLQ/TeNX1A2GfpzahQvhyFBn+XOejRUlrYLE9T7V0r+RfugNjXmF4nBe9aR/UYfM6nsFs/5wqAAQAHCuYKUBfZlcrgaJr6J8AIBPN6lyYp1QkSvf0x13vIb5QLsW59Lug0r1wz/S3TSnQmOY2O4L36K6YLXevV+QUv5zlETX0gTIA9tAHPRSx5dpkRs7dzIqN/ZtYBV6+xXzFBRQMGRm/lIRcy/+ytunLDm+e8jOW7xfcSayxDmzpAAAAAUWwytQBJW8Xd12pFBWuS/CX8e68+yKjmbwbZR2y6uFMOk0MhnnnW7qJcTxu17COx851pv2IOd4Bl3jlDcCIw6wu8D8C7R8ovdMQRLpSrE8+jxjTl3BfqywPNGiPNfnh81/rcnOkzRlVEOWGXY+St8XrqvZsYR5GQUT0IMZiUTD9BbD2wNHXqhcavQ6sML4++KFWFi/g/4Gcx30CKXBmgHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJqhR+k9yJxLSrhu3HSeNiECqzyNLyc2jtSnRhzvrp4wCAwAFAkBCDwAECgAABQYHCAEECQJXGhCpBxXK8hlLAAAAAAAAAAAAAAAAAQAAAOYKUBfZlcrgaJr6J8AIBPN6lyYp1QkSvf0x13vIb5QLAMqaOwAAAAAAAAEAAAAAAAAAAAABAMqaOwAAAAABAA==","base64"],"meta":{"err":null,"status":{"Ok":null},"fee":5000,"preBalances":[2000000000,0,9496836480,1,1141440,1169280,1141440,0,1141440,1],"postBalances":[999994850,1000000000,9496836630,1,1141440,1169280,1141440,0,1141440,1],"innerInstructions":[{"index":1,"instructions":[{"programIdIndex":9,"accounts":[0,1],"data":"3Bxs3zzLZLuLQEYX","stackHeight":2},{"programIdIndex":8,"accounts":[0,7,5,6,9,2],"data":"5wPdbtbk7M4b3JSHrvzsqaE87aDpZLBDivFc4ZEXBey7DMEGE8kVrdxkszbZXm","stackHeight":2},{"programIdIndex":6,"accounts":[],"data":"LcYsUQdeQKxGUiMMFfD4pZgkFNr3k3cRuoFyrnCmq4CHUtoZLo1JaquAUtFpm39gnKaDf6AKymVRbKiPjBG8pGK4WwYHgA6FEbFpokSfDAxGUH7FFumz84ZeBu92gqqJNXyJaeDmuCMXGPbdNihdYvmaL22jQqq14i6tQcimbuHKPHqKupSSHTR775GPTysVXLUp4Dp7zxJv2UwDH5uHgWbaXCZoFRQd9k1DeGvoZU7HenFJW6kEA9LUqmWrfTEmfb6JuVGAshXRikrbBXnbCvZ8FHXgb26d1urUvYAZSZGaA1X1x4UVLmRhtsSNLhEhKVZGWvULSJkYkbpXFziW78S5Znuk7qyWBHQBGcoTQKQmVzeVfq1196TMy73a5jgiA8Pk4QQwxkSBfp8AqmSKsBMjxiwThw4Uhf1fU7X8esJ8Mqh6Ksno48g9Ucpe961BeM4VLQVtp16SkvKHWARq3aQRASZgPnkBJmWHdfCjdE8Ehu4mQwTxrx2WZFFJTSPZFD4wPa6EiNvV6F9oHjcjE2ieQthX7kJdtyWefkaJXKdqeCnfETcowtU7zqxaqpGjbWcvGJyyb7ostkY7PqRUqwL7PB735dFy2RRwEtNerYgYizdb7pobsK8oVWc6wqQ5pxw7KQJYjJjq6rwU8ujs65avTYWhJFMjaXtDgoBBhgtX8h8zpLiyivqRiBscZhFQtJnYu2xPcPYhBYc6CKZVGokv6QSWqcHvWgmBb5m5kfnnc26TkGEhvr2ZAK6QNtRkn3PFWGeR5AV1WAvnpFWFUQkKLo7m61YYsE5bSV1jXS6ZhiRyatBvgqn7NV9aX9DrzpGMGAvDSWRLATTqKeJmF8grfVg79XLFVNjB4x3Up9eEEwzHrYb3ofj5SjZFyC6euNu4jGaKHs62bqKZE7drouZrxeJGZWq2FzWKXXgjQcbFatXj5NhARTpztUQtLfTABEi9FqDB1aH3uvzYsnxTJ5or9vNf4y3cNN48shAStTb2Zf8so8wJpYUEjtxBqQf6qZ85jwKbd8MKJQKRPgAvnC819Re61RVzZjWE7wiBGH1aw3cEGR4KxbbfpPECLDsfx26cwLtpBGfSi5pFU9ShA7UXHCiDYdmZ6e2WTbcNWBr3TZHwHwjeK4urDyNVj3METcNwcV1213xZ22bQowH2TdehDY8k4JLRBoEJvjm4zSctUuwUYPNpdBxk7GeNR8WPBhx3sMTBcgZGNi9pMeSpJUeZnzSSLb8fwyhVPELsudwXkK7E3ZXNR7TwSt4kLjsCpDEW67FQi5MTUr8ARhZcaU8zZvr6oz8NSD4ZXnArtXfStNbLdcGEXVjNhqoA5HLYwYY9KZafV7u","stackHeight":3},{"programIdIndex":9,"accounts":[0,2],"data":"3Bxs4Rwr9ZeRDby9","stackHeight":3},{"programIdIndex":6,"accounts":[],"data":"1111zaZ9wsp8kxfRBdZ7K1xifEhCTswwgtp5LidkHWPTZKxFCt1VwbDHM6quq6jpVFaJMT4QiAyRRQh8VBnxprBgeogGWjUACyzsfriqW7F6eVJd1eByqj9XsJbnwFi8cPApGfqxf98xjfPvQYdEWnwazdbXFJjGp1UHnHJJ8upQnVYy2dE9momwSVu","stackHeight":2}]}],"logMessages":["Program ComputeBudget111111111111111111111111111111 invoke [1]","Program ComputeBudget111111111111111111111111111111 success","Program 6UqiSPd2mRCTTwkzhcs1M6DGYsqHWd5jiPueX3LwDMXQ invoke [1]","Program log: Instruction: Invoke","Program 11111111111111111111111111111111 invoke [2]","Program 11111111111111111111111111111111 success","Program 5QPEJ5zDsVou9FQS3KCauKswM3VwBEBu4dpL9xTqkWwN invoke [2]","Program log: Instruction: AppendLeavesToMerkleTrees","Program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV invoke [3]","Program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV consumed 38 of 925444 compute units","Program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV success","Program 11111111111111111111111111111111 invoke [3]","Program 11111111111111111111111111111111 success","Program 5QPEJ5zDsVou9FQS3KCauKswM3VwBEBu4dpL9xTqkWwN consumed 55231 of 972893 compute units","Program 5QPEJ5zDsVou9FQS3KCauKswM3VwBEBu4dpL9xTqkWwN success","Program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV invoke [2]","Program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV consumed 38 of 915617 compute units","Program noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV success","Program 6UqiSPd2mRCTTwkzhcs1M6DGYsqHWd5jiPueX3LwDMXQ consumed 84740 of 999850 compute units","Program 6UqiSPd2mRCTTwkzhcs1M6DGYsqHWd5jiPueX3LwDMXQ success"],"preTokenBalances":[],"postTokenBalances":[],"rewards":[],"loadedAddresses":{"writable":[],"readonly":[]},"computeUnitsConsumed":84890},"version":0,"blockTime":1715633585}
Loading

0 comments on commit 0699d30

Please sign in to comment.