Skip to content

Commit

Permalink
Remove debug prints
Browse files Browse the repository at this point in the history
  • Loading branch information
sergeytimoshin committed Dec 4, 2024
1 parent 2461bc1 commit d47f327
Show file tree
Hide file tree
Showing 6 changed files with 118 additions and 247 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ pub fn get_batch_append_with_proofs_inputs<const HEIGHT: usize>(
merkle_proofs: Vec<Vec<[u8; 32]>>,
batch_size: u32,
) -> BatchAppendWithProofsCircuitInputs {
println!("=== get_batch_append_with_proofs_inputs ===");
let mut new_root = [0u8; 32];
let mut changelog: Vec<ChangelogEntry<HEIGHT>> = Vec::new();
let mut circuit_merkle_proofs = Vec::with_capacity(batch_size as usize);
Expand Down Expand Up @@ -92,12 +91,6 @@ pub fn get_batch_append_with_proofs_inputs<const HEIGHT: usize>(
// Calculate the public input hash chain with old root, new root, and leaves hash chain
let public_input_hash =
calculate_hash_chain(&[current_root, new_root, leaves_hashchain, start_index_bytes]);
println!("public_input_hash: {:?}", public_input_hash);
println!("current root {:?}", current_root);
println!("new root {:?}", new_root);
println!("leaves hashchain {:?}", leaves_hashchain);
println!("start index {:?}", start_index_bytes);
println!("leaves circuit {:?}", leaves);
BatchAppendWithProofsCircuitInputs {
public_input_hash: BigInt::from_bytes_be(Sign::Plus, &public_input_hash),
old_root: BigInt::from_bytes_be(Sign::Plus, &current_root),
Expand Down
1 change: 0 additions & 1 deletion client/src/rpc/rpc_connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use solana_sdk::hash::Hash;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::{Keypair, Signature};
use solana_sdk::transaction::Transaction;
use solana_transaction_status::EncodedConfirmedTransactionWithStatusMeta;
use std::fmt::Debug;

#[async_trait]
Expand Down
2 changes: 1 addition & 1 deletion client/src/rpc/solana_rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use solana_sdk::signature::{Keypair, Signature};
use solana_sdk::transaction::Transaction;
use solana_transaction_status::option_serializer::OptionSerializer;
use solana_transaction_status::{
EncodedConfirmedTransactionWithStatusMeta, UiInstruction, UiTransactionEncoding,
UiInstruction, UiTransactionEncoding,
};
use std::fmt::{Debug, Display, Formatter};
use std::time::Duration;
Expand Down
91 changes: 4 additions & 87 deletions forester/src/batched_ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ use light_client::rpc::RpcConnection;
use light_client::rpc_pool::SolanaRpcPool;
use light_hasher::{Hasher, Poseidon};
use light_prover_client::batch_append_with_proofs::get_batch_append_with_proofs_inputs;
use light_prover_client::batch_append_with_subtrees::calculate_hash_chain;
use light_prover_client::batch_update::get_batch_update_inputs;
use light_prover_client::gnark::batch_append_with_proofs_json_formatter::BatchAppendWithProofsInputsJson;
use light_prover_client::gnark::batch_update_json_formatter::update_inputs_string;
Expand All @@ -31,7 +30,7 @@ use solana_sdk::signature::Keypair;
use solana_sdk::signer::Signer;
use std::sync::Arc;
use tokio::sync::Mutex;
use tracing::{error, info};
use tracing::error;

pub struct BatchedOperations<R: RpcConnection, I: Indexer<R>> {
pub rpc_pool: Arc<SolanaRpcPool<R>>,
Expand Down Expand Up @@ -72,39 +71,25 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
}

pub async fn perform_batch_append(&self) -> Result<usize> {
info!("=== perform_batch_append begin ===");
let mut rpc = self.rpc_pool.get_connection().await?;

let (num_inserted_zkps, batch_size) = {
let mut output_queue_account =
rpc.get_account(self.output_queue).await.unwrap().unwrap();
let mut output_queue = ZeroCopyBatchedQueueAccount::from_bytes_mut(
let output_queue = ZeroCopyBatchedQueueAccount::from_bytes_mut(
output_queue_account.data.as_mut_slice(),
)
.unwrap();
let queue_account = output_queue.get_account();
let batch_index = queue_account.queue.next_full_batch_index;

println!("queue: {:?}", queue_account.queue);

let num_inserted_zkps =
output_queue.batches[batch_index as usize].get_num_inserted_zkps();
let zkp_batch_size = queue_account.queue.zkp_batch_size;

let batches = &mut output_queue.batches;
let full_batch = batches.get_mut(batch_index as usize).unwrap();
println!("full batch: {:?}", full_batch);

(num_inserted_zkps, zkp_batch_size)
};

println!(
"num_inserted_zkps: {}, batch_size: {}",
num_inserted_zkps, batch_size
);

let instruction_data = self.create_append_batch_ix_data().await;

let instruction = create_batch_append_instruction(
self.authority.pubkey(),
self.derivation,
Expand All @@ -114,15 +99,14 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
instruction_data?.try_to_vec()?,
);

let result = rpc
rpc
.create_and_send_transaction_with_event::<BatchAppendEvent>(
&[instruction],
&self.authority.pubkey(),
&[&self.authority],
None,
)
.await?;
println!("batch append result: {:?}", result);

self.indexer
.lock()
Expand All @@ -134,13 +118,10 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
num_inserted_zkps,
)
.await;

info!("=== perform_batch_append end ===");
Ok(batch_size as usize)
}

pub async fn perform_batch_nullify(&self) -> Result<usize> {
info!("=== perform_batch_nullify begin ===");
let mut rpc = self.rpc_pool.get_connection().await?;

let instruction_data = self.get_batched_nullify_ix_data().await?;
Expand All @@ -153,7 +134,7 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
instruction_data.try_to_vec()?,
);

let result = rpc
rpc
.create_and_send_transaction_with_event::<BatchNullifyEvent>(
&[instruction],
&self.authority.pubkey(),
Expand All @@ -162,8 +143,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
)
.await?;

println!("batch nullify result: {:?}", result);

let (batch_index, batch_size) = {
let mut account = rpc.get_account(self.merkle_tree).await.unwrap().unwrap();
let merkle_tree =
Expand All @@ -184,13 +163,10 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
batch_index as usize,
)
.await;

info!("=== perform_batch_nullify end ===");
Ok(batch_size as usize)
}

async fn create_append_batch_ix_data(&self) -> Result<InstructionDataBatchAppendInputs> {
info!("=== create_append_batch_ix_data begin ===");
let mut rpc = self.rpc_pool.get_connection().await.unwrap();

let (merkle_tree_next_index, current_root) = {
Expand All @@ -205,9 +181,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
)
};

info!("Merkle tree next index: {}", merkle_tree_next_index);
info!("Current root: {:?}", current_root);

let (zkp_batch_size, _full_batch_index, num_inserted_zkps, leaves_hashchain) = {
let mut output_queue_account =
rpc.get_account(self.output_queue).await.unwrap().unwrap();
Expand All @@ -233,9 +206,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
leaves_hashchain,
)
};
info!("ZKP batch size: {}", zkp_batch_size);
info!("Number of inserted zkps: {}", num_inserted_zkps);

let start = num_inserted_zkps as usize * zkp_batch_size as usize;
let end = start + zkp_batch_size as usize;

Expand All @@ -247,14 +217,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
.await
.unwrap();

let local_leaves_hashchain = calculate_hash_chain(&leaves);
info!("start index: {}", start);
info!("end index: {}", end);
info!("num inserted zkps: {}", num_inserted_zkps);
info!("zkp batch size: {}", zkp_batch_size);
assert_eq!(local_leaves_hashchain, leaves_hashchain);
// info!("In hash chain Batch update leaves: {:?}", leaves);

let (old_leaves, merkle_proofs) = {
let mut old_leaves = vec![];
let mut merkle_proofs = vec![];
Expand All @@ -273,18 +235,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
(old_leaves, merkle_proofs)
};

let leaf_strings = leaves
.iter()
.map(|l| Pubkey::from(*l).to_string())
.collect::<Vec<_>>();
println!("leaves: {:?}", leaf_strings);

let old_leaf_strings = old_leaves
.iter()
.map(|l| Pubkey::from(*l).to_string())
.collect::<Vec<_>>();
println!("old_leaves: {:?}", old_leaf_strings);

let (proof, new_root) = {
let circuit_inputs = get_batch_append_with_proofs_inputs::<26>(
current_root,
Expand Down Expand Up @@ -333,16 +283,13 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
}
};

info!("=== create_append_batch_ix_data end ===");

Ok(InstructionDataBatchAppendInputs {
public_inputs: AppendBatchProofInputsIx { new_root },
compressed_proof: proof,
})
}

async fn get_batched_nullify_ix_data(&self) -> Result<InstructionDataBatchNullifyInputs> {
info!("=== get_batched_nullify_ix_data begin ===");
let mut rpc = self.rpc_pool.get_connection().await.unwrap();

let (zkp_batch_size, old_root, old_root_index, leaves_hashchain) = {
Expand All @@ -355,24 +302,9 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
let zkp_size = account.queue.zkp_batch_size;
let batch = &merkle_tree.batches[batch_idx];
let zkp_idx = batch.get_num_inserted_zkps();

let hashchains = merkle_tree
.hashchain_store
.clone()
.iter()
.map(|x| {
let x = x.clone();
x.as_slice().to_vec()
})
.collect::<Vec<_>>();
for (i, x) in hashchains.iter().enumerate() {
println!("hashchain {}: {:?}", i, x);
}

let hashchain = merkle_tree.hashchain_store[batch_idx][zkp_idx as usize];
let root_idx = merkle_tree.root_history.last_index();
let root = *merkle_tree.root_history.last().unwrap();

(zkp_size, root, root_idx, hashchain)
};

Expand Down Expand Up @@ -408,19 +340,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
nullifiers.push(nullifier);
}

let leaf_strings = leaves
.iter()
.map(|l| Pubkey::from(*l).to_string())
.collect::<Vec<_>>();

let old_leaf_strings = old_leaves
.iter()
.map(|l| Pubkey::from(*l).to_string())
.collect::<Vec<_>>();

let local_nullifier_hashchain = calculate_hash_chain(&nullifiers);
assert_eq!(leaves_hashchain, local_nullifier_hashchain);

let inputs = get_batch_update_inputs::<26>(
old_root,
tx_hashes,
Expand Down Expand Up @@ -463,8 +382,6 @@ impl<R: RpcConnection, I: Indexer<R>> BatchedOperations<R, I> {
));
};

info!("=== get_batched_nullify_ix_data end ===");

Ok(InstructionDataBatchNullifyInputs {
public_inputs: BatchProofInputsIx {
new_root,
Expand Down
8 changes: 0 additions & 8 deletions programs/registry/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,11 @@ pub fn get_cpi_authority_pda() -> (Pubkey, u8) {
}

pub fn get_forester_epoch_pda_from_authority(authority: &Pubkey, epoch: u64) -> (Pubkey, u8) {
println!(
"get_forester_epoch_pda_from_authority: authority: {}, epoch: {}",
authority, epoch
);
let forester_pda = get_forester_pda(authority);
get_forester_epoch_pda(&forester_pda.0, epoch)
}

pub fn get_forester_epoch_pda_from_derivation(derivation: &Pubkey, epoch: u64) -> (Pubkey, u8) {
println!(
"get_forester_epoch_pda_from_derivation: derivation: {}, epoch: {}",
derivation, epoch
);
let forester_pda = get_forester_pda(derivation);
get_forester_epoch_pda(&forester_pda.0, epoch)
}
Expand Down
Loading

0 comments on commit d47f327

Please sign in to comment.