From 935b45aa6b6e32b7712f43c13ff66628605b3719 Mon Sep 17 00:00:00 2001 From: Justin Starry Date: Sat, 10 Aug 2024 01:12:01 +0800 Subject: [PATCH] Rename types and metrics from executed to processed (#2435) --- core/src/banking_stage/committer.rs | 22 +- core/src/banking_stage/consume_worker.rs | 36 +-- core/src/banking_stage/consumer.rs | 124 +++++----- core/src/banking_stage/leader_slot_metrics.rs | 60 ++--- .../leader_slot_timing_metrics.rs | 4 +- ledger/src/blockstore_processor.rs | 4 +- poh/src/poh_recorder.rs | 6 +- runtime/src/bank.rs | 225 +++++++++--------- runtime/src/bank/tests.rs | 31 +-- svm/doc/spec.md | 6 +- svm/examples/paytube/src/settler.rs | 9 +- svm/src/account_saver.rs | 74 +++--- svm/src/lib.rs | 1 + svm/src/transaction_commit_result.rs | 4 +- svm/src/transaction_execution_result.rs | 38 --- svm/src/transaction_processing_result.rs | 48 ++++ svm/src/transaction_processor.rs | 25 +- svm/tests/conformance.rs | 18 +- svm/tests/integration_test.rs | 24 +- 19 files changed, 394 insertions(+), 365 deletions(-) create mode 100644 svm/src/transaction_processing_result.rs diff --git a/core/src/banking_stage/committer.rs b/core/src/banking_stage/committer.rs index d91900299107c8..c020c92866dec0 100644 --- a/core/src/banking_stage/committer.rs +++ b/core/src/banking_stage/committer.rs @@ -6,7 +6,7 @@ use { }, solana_measure::measure_us, solana_runtime::{ - bank::{Bank, ExecutedTransactionCounts, TransactionBalancesSet}, + bank::{Bank, ProcessedTransactionCounts, TransactionBalancesSet}, bank_utils, prioritization_fee_cache::PrioritizationFeeCache, transaction_batch::TransactionBatch, @@ -15,7 +15,9 @@ use { solana_sdk::{hash::Hash, pubkey::Pubkey, saturating_add_assign}, solana_svm::{ transaction_commit_result::{TransactionCommitResult, TransactionCommitResultExtensions}, - transaction_execution_result::TransactionExecutionResult, + transaction_processing_result::{ + TransactionProcessingResult, TransactionProcessingResultExtensions, + }, }, solana_transaction_status::{ token_balances::TransactionTokenBalancesSet, TransactionTokenBalance, @@ -67,27 +69,27 @@ impl Committer { pub(super) fn commit_transactions( &self, batch: &TransactionBatch, - execution_results: Vec, + processing_results: Vec, last_blockhash: Hash, lamports_per_signature: u64, starting_transaction_index: Option, bank: &Arc, pre_balance_info: &mut PreBalanceInfo, execute_and_commit_timings: &mut LeaderExecuteAndCommitTimings, - execution_counts: &ExecutedTransactionCounts, + processed_counts: &ProcessedTransactionCounts, ) -> (u64, Vec) { - let executed_transactions = execution_results + let processed_transactions = processing_results .iter() .zip(batch.sanitized_transactions()) - .filter_map(|(execution_result, tx)| execution_result.was_executed().then_some(tx)) + .filter_map(|(processing_result, tx)| processing_result.was_processed().then_some(tx)) .collect_vec(); let (commit_results, commit_time_us) = measure_us!(bank.commit_transactions( batch.sanitized_transactions(), - execution_results, + processing_results, last_blockhash, lamports_per_signature, - execution_counts, + processed_counts, &mut execute_and_commit_timings.execute_timings, )); execute_and_commit_timings.commit_us = commit_time_us; @@ -122,7 +124,7 @@ impl Committer { starting_transaction_index, ); self.prioritization_fee_cache - .update(bank, executed_transactions.into_iter()); + .update(bank, processed_transactions.into_iter()); }); execute_and_commit_timings.find_and_send_votes_us = find_and_send_votes_us; (commit_time_us, commit_transaction_statuses) @@ -145,7 +147,7 @@ impl Committer { let batch_transaction_indexes: Vec<_> = commit_results .iter() .map(|commit_result| { - if commit_result.was_executed() { + if commit_result.was_committed() { let this_transaction_index = transaction_index; saturating_add_assign!(transaction_index, 1); this_transaction_index diff --git a/core/src/banking_stage/consume_worker.rs b/core/src/banking_stage/consume_worker.rs index 449ea9ab963a39..3902ce8829f163 100644 --- a/core/src/banking_stage/consume_worker.rs +++ b/core/src/banking_stage/consume_worker.rs @@ -234,18 +234,18 @@ impl ConsumeWorkerMetrics { }: &ExecuteAndCommitTransactionsOutput, ) { self.count_metrics - .transactions_attempted_execution_count + .transactions_attempted_processing_count .fetch_add( - transaction_counts.attempted_execution_count, + transaction_counts.attempted_processing_count, Ordering::Relaxed, ); self.count_metrics - .executed_transactions_count - .fetch_add(transaction_counts.executed_count, Ordering::Relaxed); + .processed_transactions_count + .fetch_add(transaction_counts.processed_count, Ordering::Relaxed); self.count_metrics - .executed_with_successful_result_count + .processed_with_successful_result_count .fetch_add( - transaction_counts.executed_with_successful_result_count, + transaction_counts.processed_with_successful_result_count, Ordering::Relaxed, ); self.count_metrics @@ -410,9 +410,9 @@ impl ConsumeWorkerMetrics { } struct ConsumeWorkerCountMetrics { - transactions_attempted_execution_count: AtomicU64, - executed_transactions_count: AtomicU64, - executed_with_successful_result_count: AtomicU64, + transactions_attempted_processing_count: AtomicU64, + processed_transactions_count: AtomicU64, + processed_with_successful_result_count: AtomicU64, retryable_transaction_count: AtomicUsize, retryable_expired_bank_count: AtomicUsize, cost_model_throttled_transactions_count: AtomicU64, @@ -423,9 +423,9 @@ struct ConsumeWorkerCountMetrics { impl Default for ConsumeWorkerCountMetrics { fn default() -> Self { Self { - transactions_attempted_execution_count: AtomicU64::default(), - executed_transactions_count: AtomicU64::default(), - executed_with_successful_result_count: AtomicU64::default(), + transactions_attempted_processing_count: AtomicU64::default(), + processed_transactions_count: AtomicU64::default(), + processed_with_successful_result_count: AtomicU64::default(), retryable_transaction_count: AtomicUsize::default(), retryable_expired_bank_count: AtomicUsize::default(), cost_model_throttled_transactions_count: AtomicU64::default(), @@ -441,19 +441,19 @@ impl ConsumeWorkerCountMetrics { "banking_stage_worker_counts", "id" => id, ( - "transactions_attempted_execution_count", - self.transactions_attempted_execution_count + "transactions_attempted_processing_count", + self.transactions_attempted_processing_count .swap(0, Ordering::Relaxed), i64 ), ( - "executed_transactions_count", - self.executed_transactions_count.swap(0, Ordering::Relaxed), + "processed_transactions_count", + self.processed_transactions_count.swap(0, Ordering::Relaxed), i64 ), ( - "executed_with_successful_result_count", - self.executed_with_successful_result_count + "processed_with_successful_result_count", + self.processed_with_successful_result_count .swap(0, Ordering::Relaxed), i64 ), diff --git a/core/src/banking_stage/consumer.rs b/core/src/banking_stage/consumer.rs index 9965b1c3214c3d..5d1d7c1637c40c 100644 --- a/core/src/banking_stage/consumer.rs +++ b/core/src/banking_stage/consumer.rs @@ -3,7 +3,7 @@ use { committer::{CommitTransactionDetails, Committer, PreBalanceInfo}, immutable_deserialized_packet::ImmutableDeserializedPacket, leader_slot_metrics::{ - LeaderSlotMetricsTracker, ProcessTransactionsCounts, ProcessTransactionsSummary, + CommittedTransactionsCounts, LeaderSlotMetricsTracker, ProcessTransactionsSummary, }, leader_slot_timing_metrics::LeaderExecuteAndCommitTimings, qos_service::QosService, @@ -34,6 +34,7 @@ use { solana_svm::{ account_loader::{validate_fee_payer, TransactionCheckResult}, transaction_error_metrics::TransactionErrorMetrics, + transaction_processing_result::TransactionProcessingResultExtensions, transaction_processor::{ExecutionRecordingConfig, TransactionProcessingConfig}, }, solana_timings::ExecuteTimings, @@ -57,7 +58,7 @@ pub struct ProcessTransactionBatchOutput { pub struct ExecuteAndCommitTransactionsOutput { // Transactions counts reported to `ConsumeWorkerMetrics` and then // accumulated later for `LeaderSlotMetrics` - pub(crate) transaction_counts: ExecuteAndCommitTransactionsCounts, + pub(crate) transaction_counts: LeaderProcessedTransactionCounts, // Transactions that either were not executed, or were executed and failed to be committed due // to the block ending. pub(crate) retryable_transaction_indexes: Vec, @@ -71,15 +72,15 @@ pub struct ExecuteAndCommitTransactionsOutput { } #[derive(Debug, Default, PartialEq)] -pub struct ExecuteAndCommitTransactionsCounts { - // Total number of transactions that were passed as candidates for execution - pub(crate) attempted_execution_count: u64, - // The number of transactions of that were executed. See description of in `ProcessTransactionsSummary` +pub struct LeaderProcessedTransactionCounts { + // Total number of transactions that were passed as candidates for processing + pub(crate) attempted_processing_count: u64, + // The number of transactions of that were processed. See description of in `ProcessTransactionsSummary` // for possible outcomes of execution. - pub(crate) executed_count: u64, - // Total number of the executed transactions that returned success/not + pub(crate) processed_count: u64, + // Total number of the processed transactions that returned success/not // an error. - pub(crate) executed_with_successful_result_count: u64, + pub(crate) processed_with_successful_result_count: u64, } pub struct Consumer { @@ -284,7 +285,7 @@ impl Consumer { ) -> ProcessTransactionsSummary { let mut chunk_start = 0; let mut all_retryable_tx_indexes = vec![]; - let mut total_transaction_counts = ProcessTransactionsCounts::default(); + let mut total_transaction_counts = CommittedTransactionsCounts::default(); let mut total_cost_model_throttled_transactions_count: u64 = 0; let mut total_cost_model_us: u64 = 0; let mut total_execute_and_commit_timings = LeaderExecuteAndCommitTimings::default(); @@ -622,22 +623,23 @@ impl Consumer { execute_and_commit_timings.load_execute_us = load_execute_us; let LoadAndExecuteTransactionsOutput { - execution_results, - execution_counts, + processing_results, + processed_counts, } = load_and_execute_transactions_output; - let transaction_counts = ExecuteAndCommitTransactionsCounts { - executed_count: execution_counts.executed_transactions_count, - executed_with_successful_result_count: execution_counts.executed_successfully_count, - attempted_execution_count: execution_results.len() as u64, + let transaction_counts = LeaderProcessedTransactionCounts { + processed_count: processed_counts.processed_transactions_count, + processed_with_successful_result_count: processed_counts + .processed_with_successful_result_count, + attempted_processing_count: processing_results.len() as u64, }; - let (executed_transactions, execution_results_to_transactions_us) = - measure_us!(execution_results + let (processed_transactions, processing_results_to_transactions_us) = + measure_us!(processing_results .iter() .zip(batch.sanitized_transactions()) - .filter_map(|(execution_result, tx)| { - if execution_result.was_executed() { + .filter_map(|(processing_result, tx)| { + if processing_result.was_processed() { Some(tx.to_versioned_transaction()) } else { None @@ -661,7 +663,7 @@ impl Consumer { let (record_transactions_summary, record_us) = measure_us!(self .transaction_recorder - .record_transactions(bank.slot(), executed_transactions)); + .record_transactions(bank.slot(), processed_transactions)); execute_and_commit_timings.record_us = record_us; let RecordTransactionsSummary { @@ -670,13 +672,13 @@ impl Consumer { starting_transaction_index, } = record_transactions_summary; execute_and_commit_timings.record_transactions_timings = RecordTransactionsTimings { - execution_results_to_transactions_us, + processing_results_to_transactions_us, ..record_transactions_timings }; if let Err(recorder_err) = record_transactions_result { - retryable_transaction_indexes.extend(execution_results.iter().enumerate().filter_map( - |(index, execution_result)| execution_result.was_executed().then_some(index), + retryable_transaction_indexes.extend(processing_results.iter().enumerate().filter_map( + |(index, processing_result)| processing_result.was_processed().then_some(index), )); return ExecuteAndCommitTransactionsOutput { @@ -691,22 +693,22 @@ impl Consumer { } let (commit_time_us, commit_transaction_statuses) = - if execution_counts.executed_transactions_count != 0 { + if processed_counts.processed_transactions_count != 0 { self.committer.commit_transactions( batch, - execution_results, + processing_results, last_blockhash, lamports_per_signature, starting_transaction_index, bank, &mut pre_balance_info, &mut execute_and_commit_timings, - &execution_counts, + &processed_counts, ) } else { ( 0, - vec![CommitTransactionDetails::NotCommitted; execution_results.len()], + vec![CommitTransactionDetails::NotCommitted; processing_results.len()], ) }; @@ -727,7 +729,7 @@ impl Consumer { ); debug_assert_eq!( - transaction_counts.attempted_execution_count, + transaction_counts.attempted_processing_count, commit_transaction_statuses.len() as u64, ); @@ -1120,10 +1122,10 @@ mod tests { assert_eq!( transaction_counts, - ExecuteAndCommitTransactionsCounts { - attempted_execution_count: 1, - executed_count: 1, - executed_with_successful_result_count: 1, + LeaderProcessedTransactionCounts { + attempted_processing_count: 1, + processed_count: 1, + processed_with_successful_result_count: 1, } ); assert!(commit_transactions_result.is_ok()); @@ -1168,11 +1170,11 @@ mod tests { } = process_transactions_batch_output.execute_and_commit_transactions_output; assert_eq!( transaction_counts, - ExecuteAndCommitTransactionsCounts { - attempted_execution_count: 1, - // Transactions was still executed, just wasn't committed, so should be counted here. - executed_count: 1, - executed_with_successful_result_count: 1, + LeaderProcessedTransactionCounts { + attempted_processing_count: 1, + // Transaction was still processed, just wasn't committed, so should be counted here. + processed_count: 1, + processed_with_successful_result_count: 1, } ); assert_eq!(retryable_transaction_indexes, vec![0]); @@ -1309,10 +1311,10 @@ mod tests { assert_eq!( transaction_counts, - ExecuteAndCommitTransactionsCounts { - attempted_execution_count: 1, - executed_count: 1, - executed_with_successful_result_count: 0, + LeaderProcessedTransactionCounts { + attempted_processing_count: 1, + processed_count: 1, + processed_with_successful_result_count: 0, } ); assert!(commit_transactions_result.is_ok()); @@ -1415,10 +1417,10 @@ mod tests { assert_eq!( transaction_counts, - ExecuteAndCommitTransactionsCounts { - attempted_execution_count: 1, - executed_count: 0, - executed_with_successful_result_count: 0, + LeaderProcessedTransactionCounts { + attempted_processing_count: 1, + processed_count: 0, + processed_with_successful_result_count: 0, } ); assert!(retryable_transaction_indexes.is_empty()); @@ -1506,7 +1508,7 @@ mod tests { commit_transactions_result, .. } = process_transactions_batch_output.execute_and_commit_transactions_output; - assert_eq!(transaction_counts.executed_with_successful_result_count, 1); + assert_eq!(transaction_counts.processed_with_successful_result_count, 1); assert!(commit_transactions_result.is_ok()); let block_cost = get_block_cost(); @@ -1537,7 +1539,7 @@ mod tests { retryable_transaction_indexes, .. } = process_transactions_batch_output.execute_and_commit_transactions_output; - assert_eq!(transaction_counts.executed_with_successful_result_count, 1); + assert_eq!(transaction_counts.processed_with_successful_result_count, 1); assert!(commit_transactions_result.is_ok()); // first one should have been committed, second one not committed due to AccountInUse error during @@ -1664,10 +1666,10 @@ mod tests { assert_eq!( transaction_counts, - ExecuteAndCommitTransactionsCounts { - attempted_execution_count: 2, - executed_count: 1, - executed_with_successful_result_count: 1, + LeaderProcessedTransactionCounts { + attempted_processing_count: 2, + processed_count: 1, + processed_with_successful_result_count: 1, } ); assert_eq!(retryable_transaction_indexes, vec![1]); @@ -1725,13 +1727,13 @@ mod tests { assert!(!reached_max_poh_height); assert_eq!( transaction_counts, - ProcessTransactionsCounts { - attempted_execution_count: transactions_len as u64, + CommittedTransactionsCounts { + attempted_processing_count: transactions_len as u64, // Both transactions should have been committed, even though one was an error, // because InstructionErrors are committed committed_transactions_count: 2, committed_transactions_with_successful_result_count: 1, - executed_but_failed_commit: 0, + processed_but_failed_commit: 0, } ); assert_eq!( @@ -1786,11 +1788,11 @@ mod tests { assert!(!reached_max_poh_height); assert_eq!( transaction_counts, - ProcessTransactionsCounts { - attempted_execution_count: transactions_len as u64, + CommittedTransactionsCounts { + attempted_processing_count: transactions_len as u64, committed_transactions_count: 2, committed_transactions_with_successful_result_count: 2, - executed_but_failed_commit: 0, + processed_but_failed_commit: 0, } ); @@ -1862,12 +1864,12 @@ mod tests { assert!(reached_max_poh_height); assert_eq!( transaction_counts, - ProcessTransactionsCounts { - attempted_execution_count: 1, + CommittedTransactionsCounts { + attempted_processing_count: 1, // MaxHeightReached error does not commit, should be zero here committed_transactions_count: 0, committed_transactions_with_successful_result_count: 0, - executed_but_failed_commit: 1, + processed_but_failed_commit: 1, } ); diff --git a/core/src/banking_stage/leader_slot_metrics.rs b/core/src/banking_stage/leader_slot_metrics.rs index 4e290600a4de3c..98cf4d72f92c91 100644 --- a/core/src/banking_stage/leader_slot_metrics.rs +++ b/core/src/banking_stage/leader_slot_metrics.rs @@ -1,6 +1,6 @@ use { super::{ - consumer::ExecuteAndCommitTransactionsCounts, + consumer::LeaderProcessedTransactionCounts, leader_slot_timing_metrics::{LeaderExecuteAndCommitTimings, LeaderSlotTimingMetrics}, packet_deserializer::PacketReceiverStats, unprocessed_transaction_storage::{ @@ -13,15 +13,15 @@ use { std::time::Instant, }; -/// A summary of what happened to transactions passed to the execution pipeline. +/// A summary of what happened to transactions passed to the processing pipeline. /// Transactions can -/// 1) Did not even make it to execution due to being filtered out by things like AccountInUse +/// 1) Did not even make it to processing due to being filtered out by things like AccountInUse /// lock conflicts or CostModel compute limits. These types of errors are retryable and /// counted in `Self::retryable_transaction_indexes`. -/// 2) Did not execute due to some fatal error like too old, or duplicate signature. These +/// 2) Did not process due to some fatal error like too old, or duplicate signature. These /// will be dropped from the transactions queue and not counted in `Self::retryable_transaction_indexes` -/// 3) Were executed and committed, captured by `transaction_counts` below. -/// 4) Were executed and failed commit, captured by `transaction_counts` below. +/// 3) Were processed and committed, captured by `transaction_counts` below. +/// 4) Were processed and failed commit, captured by `transaction_counts` below. pub(crate) struct ProcessTransactionsSummary { /// Returns true if we hit the end of the block/max PoH height for the block /// before processing all the transactions in the batch. @@ -29,7 +29,7 @@ pub(crate) struct ProcessTransactionsSummary { /// Total transaction counts tracked for reporting `LeaderSlotMetrics`. See /// description of struct above for possible outcomes for these transactions - pub transaction_counts: ProcessTransactionsCounts, + pub transaction_counts: CommittedTransactionsCounts, /// Indexes of transactions in the transactions slice that were not /// committed but are retryable @@ -53,42 +53,42 @@ pub(crate) struct ProcessTransactionsSummary { } #[derive(Debug, Default, PartialEq)] -pub struct ProcessTransactionsCounts { - /// Total number of transactions that were passed as candidates for execution - pub attempted_execution_count: u64, +pub struct CommittedTransactionsCounts { + /// Total number of transactions that were passed as candidates for processing + pub attempted_processing_count: u64, /// Total number of transactions that made it into the block pub committed_transactions_count: u64, - /// Total number of transactions that made it into the block where the - /// transactions output from execution was success/no error. + /// Total number of transactions that made it into the block where the transactions + /// output from processing was success/no error. pub committed_transactions_with_successful_result_count: u64, - /// All transactions that were executed but then failed record because the + /// All transactions that were processed but then failed record because the /// slot ended - pub executed_but_failed_commit: u64, + pub processed_but_failed_commit: u64, } -impl ProcessTransactionsCounts { +impl CommittedTransactionsCounts { pub fn accumulate( &mut self, - transaction_counts: &ExecuteAndCommitTransactionsCounts, + transaction_counts: &LeaderProcessedTransactionCounts, committed: bool, ) { saturating_add_assign!( - self.attempted_execution_count, - transaction_counts.attempted_execution_count + self.attempted_processing_count, + transaction_counts.attempted_processing_count ); if committed { saturating_add_assign!( self.committed_transactions_count, - transaction_counts.executed_count + transaction_counts.processed_count ); saturating_add_assign!( self.committed_transactions_with_successful_result_count, - transaction_counts.executed_with_successful_result_count + transaction_counts.processed_with_successful_result_count ); } else { saturating_add_assign!( - self.executed_but_failed_commit, - transaction_counts.executed_count + self.processed_but_failed_commit, + transaction_counts.processed_count ); } } @@ -173,10 +173,10 @@ struct LeaderSlotPacketCountMetrics { // duplicate signature checks retryable_packets_filtered_count: u64, - // total number of transactions that attempted execution in this slot. Should equal the sum + // total number of transactions that attempted processing in this slot. Should equal the sum // of `committed_transactions_count`, `retryable_errored_transaction_count`, and // `nonretryable_errored_transactions_count`. - transactions_attempted_execution_count: u64, + transactions_attempted_processing_count: u64, // total number of transactions that were executed and committed into the block // on this thread @@ -305,8 +305,8 @@ impl LeaderSlotPacketCountMetrics { i64 ), ( - "transactions_attempted_execution_count", - self.transactions_attempted_execution_count, + "transactions_attempted_processing_count", + self.transactions_attempted_processing_count, i64 ), ( @@ -607,8 +607,8 @@ impl LeaderSlotMetricsTracker { saturating_add_assign!( leader_slot_metrics .packet_count_metrics - .transactions_attempted_execution_count, - transaction_counts.attempted_execution_count + .transactions_attempted_processing_count, + transaction_counts.attempted_processing_count ); saturating_add_assign!( @@ -629,7 +629,7 @@ impl LeaderSlotMetricsTracker { leader_slot_metrics .packet_count_metrics .executed_transactions_failed_commit_count, - transaction_counts.executed_but_failed_commit + transaction_counts.processed_but_failed_commit ); saturating_add_assign!( @@ -644,7 +644,7 @@ impl LeaderSlotMetricsTracker { .packet_count_metrics .nonretryable_errored_transactions_count, transaction_counts - .attempted_execution_count + .attempted_processing_count .saturating_sub(transaction_counts.committed_transactions_count) .saturating_sub(retryable_transaction_indexes.len() as u64) ); diff --git a/core/src/banking_stage/leader_slot_timing_metrics.rs b/core/src/banking_stage/leader_slot_timing_metrics.rs index 6dbd697a956010..0de9296ce91aac 100644 --- a/core/src/banking_stage/leader_slot_timing_metrics.rs +++ b/core/src/banking_stage/leader_slot_timing_metrics.rs @@ -55,9 +55,9 @@ impl LeaderExecuteAndCommitTimings { "id" => id, ("slot", slot as i64, i64), ( - "execution_results_to_transactions_us", + "processing_results_to_transactions_us", self.record_transactions_timings - .execution_results_to_transactions_us as i64, + .processing_results_to_transactions_us as i64, i64 ), ( diff --git a/ledger/src/blockstore_processor.rs b/ledger/src/blockstore_processor.rs index 24ff5ce8fb7baa..8d8211e02a07c1 100644 --- a/ledger/src/blockstore_processor.rs +++ b/ledger/src/blockstore_processor.rs @@ -57,7 +57,7 @@ use { }, }, solana_svm::{ - transaction_commit_result::TransactionCommitResult, + transaction_commit_result::{TransactionCommitResult, TransactionCommitResultExtensions}, transaction_processor::ExecutionRecordingConfig, }, solana_timings::{report_execute_timings, ExecuteTimingType, ExecuteTimings}, @@ -190,7 +190,7 @@ pub fn execute_batch( let committed_transactions = commit_results .iter() .zip(batch.sanitized_transactions()) - .filter_map(|(commit_result, tx)| commit_result.is_ok().then_some(tx)) + .filter_map(|(commit_result, tx)| commit_result.was_committed().then_some(tx)) .collect_vec(); let first_err = get_first_error(batch, &commit_results); diff --git a/poh/src/poh_recorder.rs b/poh/src/poh_recorder.rs index 4bbd0ad3eb8214..5c6f5d26c7f4f5 100644 --- a/poh/src/poh_recorder.rs +++ b/poh/src/poh_recorder.rs @@ -113,7 +113,7 @@ impl Record { #[derive(Default, Debug)] pub struct RecordTransactionsTimings { - pub execution_results_to_transactions_us: u64, + pub processing_results_to_transactions_us: u64, pub hash_us: u64, pub poh_record_us: u64, } @@ -121,8 +121,8 @@ pub struct RecordTransactionsTimings { impl RecordTransactionsTimings { pub fn accumulate(&mut self, other: &RecordTransactionsTimings) { saturating_add_assign!( - self.execution_results_to_transactions_us, - other.execution_results_to_transactions_us + self.processing_results_to_transactions_us, + other.processing_results_to_transactions_us ); saturating_add_assign!(self.hash_us, other.hash_us); saturating_add_assign!(self.poh_record_us, other.poh_record_us); diff --git a/runtime/src/bank.rs b/runtime/src/bank.rs index 70fe81e9326063..6b93ec643ee031 100644 --- a/runtime/src/bank.rs +++ b/runtime/src/bank.rs @@ -155,9 +155,12 @@ use { transaction_commit_result::{CommittedTransaction, TransactionCommitResult}, transaction_error_metrics::TransactionErrorMetrics, transaction_execution_result::{ - TransactionExecutionDetails, TransactionExecutionResult, TransactionLoadedAccountsStats, + TransactionExecutionDetails, TransactionLoadedAccountsStats, }, transaction_processing_callback::TransactionProcessingCallback, + transaction_processing_result::{ + TransactionProcessingResult, TransactionProcessingResultExtensions, + }, transaction_processor::{ ExecutionRecordingConfig, TransactionBatchProcessor, TransactionLogMessages, TransactionProcessingConfig, TransactionProcessingEnvironment, @@ -315,12 +318,12 @@ impl BankRc { } pub struct LoadAndExecuteTransactionsOutput { - // Vector of results indicating whether a transaction was executed or could not - // be executed. Note executed transactions can still have failed! - pub execution_results: Vec, - // Executed transaction counts used to update bank transaction counts and + // Vector of results indicating whether a transaction was processed or could not + // be processed. Note processed transactions can still have failed! + pub processing_results: Vec, + // Processed transaction counts used to update bank transaction counts and // for metrics reporting. - pub execution_counts: ExecutedTransactionCounts, + pub processed_counts: ProcessedTransactionCounts, } pub struct TransactionSimulationResult { @@ -883,10 +886,10 @@ struct PrevEpochInflationRewards { } #[derive(Debug, Default, PartialEq)] -pub struct ExecutedTransactionCounts { - pub executed_transactions_count: u64, - pub executed_successfully_count: u64, - pub executed_non_vote_transactions_count: u64, +pub struct ProcessedTransactionCounts { + pub processed_transactions_count: u64, + pub processed_non_vote_transactions_count: u64, + pub processed_with_successful_result_count: u64, pub signature_count: u64, } @@ -3082,12 +3085,13 @@ impl Bank { fn update_transaction_statuses( &self, sanitized_txs: &[SanitizedTransaction], - execution_results: &[TransactionExecutionResult], + processing_results: &[TransactionProcessingResult], ) { let mut status_cache = self.status_cache.write().unwrap(); - assert_eq!(sanitized_txs.len(), execution_results.len()); - for (tx, execution_result) in sanitized_txs.iter().zip(execution_results) { - if let Some(details) = execution_result.details() { + assert_eq!(sanitized_txs.len(), processing_results.len()); + for (tx, processing_result) in sanitized_txs.iter().zip(processing_results) { + if let Ok(processed_tx) = &processing_result { + let details = &processed_tx.execution_details; // Add the message hash to the status cache to ensure that this message // won't be processed again with a different signature. status_cache.insert( @@ -3315,7 +3319,7 @@ impl Bank { let mut timings = ExecuteTimings::default(); let LoadAndExecuteTransactionsOutput { - mut execution_results, + mut processing_results, .. } = self.load_and_execute_transactions( &batch, @@ -3352,18 +3356,15 @@ impl Bank { debug!("simulate_transaction: {:?}", timings); - let execution_result = - execution_results - .pop() - .unwrap_or(TransactionExecutionResult::NotExecuted( - TransactionError::InvalidProgramForExecution, - )); - let flattened_result = execution_result.flattened_result(); + let processing_result = processing_results + .pop() + .unwrap_or(Err(TransactionError::InvalidProgramForExecution)); + let flattened_result = processing_result.flattened_result(); let (post_simulation_accounts, logs, return_data, inner_instructions) = - match execution_result { - TransactionExecutionResult::Executed(executed_tx) => { - let details = executed_tx.execution_details; - let post_simulation_accounts = executed_tx + match processing_result { + Ok(processed_tx) => { + let details = processed_tx.execution_details; + let post_simulation_accounts = processed_tx .loaded_transaction .accounts .into_iter() @@ -3376,7 +3377,7 @@ impl Bank { details.inner_instructions, ) } - TransactionExecutionResult::NotExecuted(_) => (vec![], None, None, None), + Err(_) => (vec![], None, None, None), }; let logs = logs.unwrap_or_default(); @@ -3491,39 +3492,43 @@ impl Bank { timings.accumulate(&sanitized_output.execute_timings); let ((), collect_logs_us) = - measure_us!(self.collect_logs(sanitized_txs, &sanitized_output.execution_results)); + measure_us!(self.collect_logs(sanitized_txs, &sanitized_output.processing_results)); timings.saturating_add_in_place(ExecuteTimingType::CollectLogsUs, collect_logs_us); - let mut execution_counts = ExecutedTransactionCounts::default(); + let mut processed_counts = ProcessedTransactionCounts::default(); let err_count = &mut error_counters.total; - for (execution_result, tx) in sanitized_output.execution_results.iter().zip(sanitized_txs) { + for (processing_result, tx) in sanitized_output + .processing_results + .iter() + .zip(sanitized_txs) + { if let Some(debug_keys) = &self.transaction_debug_keys { for key in tx.message().account_keys().iter() { if debug_keys.contains(key) { - let result = execution_result.flattened_result(); + let result = processing_result.flattened_result(); info!("slot: {} result: {:?} tx: {:?}", self.slot, result, tx); break; } } } - if execution_result.was_executed() { + if processing_result.was_processed() { // Signature count must be accumulated only if the transaction - // is executed, otherwise a mismatched count between banking and - // replay could occur - execution_counts.signature_count += + // is processed, otherwise a mismatched count between banking + // and replay could occur + processed_counts.signature_count += u64::from(tx.message().header().num_required_signatures); - execution_counts.executed_transactions_count += 1; + processed_counts.processed_transactions_count += 1; if !tx.is_simple_vote_transaction() { - execution_counts.executed_non_vote_transactions_count += 1; + processed_counts.processed_non_vote_transactions_count += 1; } } - match execution_result.flattened_result() { + match processing_result.flattened_result() { Ok(()) => { - execution_counts.executed_successfully_count += 1; + processed_counts.processed_with_successful_result_count += 1; } Err(err) => { if *err_count == 0 { @@ -3535,15 +3540,15 @@ impl Bank { } LoadAndExecuteTransactionsOutput { - execution_results: sanitized_output.execution_results, - execution_counts, + processing_results: sanitized_output.processing_results, + processed_counts, } } fn collect_logs( &self, transactions: &[SanitizedTransaction], - execution_results: &[TransactionExecutionResult], + processing_results: &[TransactionProcessingResult], ) { let transaction_log_collector_config = self.transaction_log_collector_config.read().unwrap(); @@ -3551,12 +3556,13 @@ impl Bank { return; } - let collected_logs: Vec<_> = execution_results + let collected_logs: Vec<_> = processing_results .iter() .zip(transactions) - .filter_map(|(execution_result, transaction)| { + .filter_map(|(processing_result, transaction)| { // Skip log collection for unprocessed transactions - let execution_details = execution_result.details()?; + let processed_tx = processing_result.processed_transaction()?; + let execution_details = &processed_tx.execution_details; Self::collect_transaction_logs( &transaction_log_collector_config, transaction, @@ -3698,17 +3704,17 @@ impl Bank { fn filter_program_errors_and_collect_fee( &self, - execution_results: &[TransactionExecutionResult], + processing_results: &[TransactionProcessingResult], ) { let mut fees = 0; - execution_results + processing_results .iter() - .for_each(|execution_result| match execution_result { - TransactionExecutionResult::Executed(executed_tx) => { - fees += executed_tx.loaded_transaction.fee_details.total_fee(); + .for_each(|processing_result| match processing_result { + Ok(processed_tx) => { + fees += processed_tx.loaded_transaction.fee_details.total_fee(); } - TransactionExecutionResult::NotExecuted(_) => {} + Err(_) => {} }); self.collector_fees.fetch_add(fees, Relaxed); @@ -3717,17 +3723,18 @@ impl Bank { // Note: this function is not yet used; next PR will call it behind a feature gate fn filter_program_errors_and_collect_fee_details( &self, - execution_results: &[TransactionExecutionResult], + processing_results: &[TransactionProcessingResult], ) { let mut accumulated_fee_details = FeeDetails::default(); - execution_results + processing_results .iter() - .for_each(|execution_result| match execution_result { - TransactionExecutionResult::Executed(executed_tx) => { - accumulated_fee_details.accumulate(&executed_tx.loaded_transaction.fee_details); + .for_each(|processing_result| match processing_result { + Ok(processed_tx) => { + accumulated_fee_details + .accumulate(&processed_tx.loaded_transaction.fee_details); } - TransactionExecutionResult::NotExecuted(_) => {} + Err(_) => {} }); self.collector_fee_details @@ -3739,10 +3746,10 @@ impl Bank { pub fn commit_transactions( &self, sanitized_txs: &[SanitizedTransaction], - mut execution_results: Vec, + mut processing_results: Vec, last_blockhash: Hash, lamports_per_signature: u64, - execution_counts: &ExecutedTransactionCounts, + processed_counts: &ProcessedTransactionCounts, timings: &mut ExecuteTimings, ) -> Vec { assert!( @@ -3750,39 +3757,36 @@ impl Bank { "commit_transactions() working on a bank that is already frozen or is undergoing freezing!" ); - let ExecutedTransactionCounts { - executed_transactions_count, - executed_non_vote_transactions_count, - executed_successfully_count, + let ProcessedTransactionCounts { + processed_transactions_count, + processed_non_vote_transactions_count, + processed_with_successful_result_count, signature_count, - } = *execution_counts; + } = *processed_counts; - self.increment_transaction_count(executed_transactions_count); + self.increment_transaction_count(processed_transactions_count); self.increment_non_vote_transaction_count_since_restart( - executed_non_vote_transactions_count, + processed_non_vote_transactions_count, ); self.increment_signature_count(signature_count); - let executed_with_failure_result_count = - executed_transactions_count.saturating_sub(executed_successfully_count); - if executed_with_failure_result_count > 0 { - self.transaction_error_count - .fetch_add(executed_with_failure_result_count, Relaxed); - } + let processed_with_failure_result_count = + processed_transactions_count.saturating_sub(processed_with_successful_result_count); + self.transaction_error_count + .fetch_add(processed_with_failure_result_count, Relaxed); - // Should be equivalent to checking `executed_transactions_count > 0` - if execution_results.iter().any(|result| result.was_executed()) { + if processed_transactions_count > 0 { self.is_delta.store(true, Relaxed); self.transaction_entries_count.fetch_add(1, Relaxed); self.transactions_per_entry_max - .fetch_max(executed_transactions_count, Relaxed); + .fetch_max(processed_transactions_count, Relaxed); } let ((), store_accounts_us) = measure_us!({ let durable_nonce = DurableNonce::from_blockhash(&last_blockhash); let (accounts_to_store, transactions) = collect_accounts_to_store( sanitized_txs, - &mut execution_results, + &mut processing_results, &durable_nonce, lamports_per_signature, ); @@ -3791,17 +3795,17 @@ impl Bank { .store_cached((self.slot(), accounts_to_store.as_slice()), &transactions); }); - self.collect_rent(&execution_results); + self.collect_rent(&processing_results); // Cached vote and stake accounts are synchronized with accounts-db // after each transaction. let ((), update_stakes_cache_us) = - measure_us!(self.update_stakes_cache(sanitized_txs, &execution_results)); + measure_us!(self.update_stakes_cache(sanitized_txs, &processing_results)); let ((), update_executors_us) = measure_us!({ let mut cache = None; - for execution_result in &execution_results { - if let TransactionExecutionResult::Executed(executed_tx) = execution_result { + for processing_result in &processing_results { + if let Some(executed_tx) = processing_result.processed_transaction() { let programs_modified_by_tx = &executed_tx.programs_modified_by_tx; if executed_tx.was_successful() && !programs_modified_by_tx.is_empty() { cache @@ -3814,9 +3818,10 @@ impl Bank { } }); - let accounts_data_len_delta = execution_results + let accounts_data_len_delta = processing_results .iter() - .filter_map(TransactionExecutionResult::details) + .filter_map(|processing_result| processing_result.processed_transaction()) + .map(|processed_tx| &processed_tx.execution_details) .filter_map(|details| { details .status @@ -3827,12 +3832,12 @@ impl Bank { self.update_accounts_data_size_delta_on_chain(accounts_data_len_delta); let ((), update_transaction_statuses_us) = - measure_us!(self.update_transaction_statuses(sanitized_txs, &execution_results)); + measure_us!(self.update_transaction_statuses(sanitized_txs, &processing_results)); if self.feature_set.is_active(&reward_full_priority_fee::id()) { - self.filter_program_errors_and_collect_fee_details(&execution_results) + self.filter_program_errors_and_collect_fee_details(&processing_results) } else { - self.filter_program_errors_and_collect_fee(&execution_results) + self.filter_program_errors_and_collect_fee(&processing_results) }; timings.saturating_add_in_place(ExecuteTimingType::StoreUs, store_accounts_us); @@ -3846,45 +3851,45 @@ impl Bank { update_transaction_statuses_us, ); - Self::create_commit_results(execution_results) + Self::create_commit_results(processing_results) } fn create_commit_results( - execution_results: Vec, + processing_results: Vec, ) -> Vec { - execution_results + processing_results .into_iter() - .map(|execution_result| match execution_result { - TransactionExecutionResult::Executed(executed_tx) => { - let loaded_tx = &executed_tx.loaded_transaction; + .map(|processing_result| match processing_result { + Ok(processed_tx) => { + let loaded_tx = &processed_tx.loaded_transaction; let loaded_account_stats = TransactionLoadedAccountsStats { loaded_accounts_data_size: loaded_tx.loaded_accounts_data_size, loaded_accounts_count: loaded_tx.accounts.len(), }; // Rent is only collected for successfully executed transactions - let rent_debits = if executed_tx.was_successful() { - executed_tx.loaded_transaction.rent_debits + let rent_debits = if processed_tx.was_successful() { + processed_tx.loaded_transaction.rent_debits } else { RentDebits::default() }; Ok(CommittedTransaction { loaded_account_stats, - execution_details: executed_tx.execution_details, - fee_details: executed_tx.loaded_transaction.fee_details, + execution_details: processed_tx.execution_details, + fee_details: processed_tx.loaded_transaction.fee_details, rent_debits, }) } - TransactionExecutionResult::NotExecuted(err) => Err(err), + Err(err) => Err(err), }) .collect() } - fn collect_rent(&self, execution_results: &[TransactionExecutionResult]) { - let collected_rent = execution_results + fn collect_rent(&self, processing_results: &[TransactionProcessingResult]) { + let collected_rent = processing_results .iter() - .filter_map(|executed_result| executed_result.executed_transaction()) + .filter_map(|processing_result| processing_result.processed_transaction()) .filter(|executed_tx| executed_tx.was_successful()) .map(|executed_tx| executed_tx.loaded_transaction.rent) .sum(); @@ -4557,8 +4562,8 @@ impl Bank { }; let LoadAndExecuteTransactionsOutput { - execution_results, - execution_counts, + processing_results, + processed_counts, } = self.load_and_execute_transactions( batch, max_age, @@ -4579,10 +4584,10 @@ impl Bank { self.last_blockhash_and_lamports_per_signature(); let commit_results = self.commit_transactions( batch.sanitized_transactions(), - execution_results, + processing_results, last_blockhash, lamports_per_signature, - &execution_counts, + &processed_counts, timings, ); let post_balances = if collect_balances { @@ -5822,16 +5827,16 @@ impl Bank { fn update_stakes_cache( &self, txs: &[SanitizedTransaction], - execution_results: &[TransactionExecutionResult], + processing_results: &[TransactionProcessingResult], ) { - debug_assert_eq!(txs.len(), execution_results.len()); + debug_assert_eq!(txs.len(), processing_results.len()); let new_warmup_cooldown_rate_epoch = self.new_warmup_cooldown_rate_epoch(); txs.iter() - .zip(execution_results) - .filter_map(|(tx, execution_result)| { - execution_result - .executed_transaction() - .map(|executed_tx| (tx, executed_tx)) + .zip(processing_results) + .filter_map(|(tx, processing_result)| { + processing_result + .processed_transaction() + .map(|processed_tx| (tx, processed_tx)) }) .filter(|(_, executed_tx)| executed_tx.was_successful()) .flat_map(|(tx, executed_tx)| { diff --git a/runtime/src/bank/tests.rs b/runtime/src/bank/tests.rs index 288d0b8bb23a1b..a46ae1f8578342 100644 --- a/runtime/src/bank/tests.rs +++ b/runtime/src/bank/tests.rs @@ -232,8 +232,11 @@ fn test_race_register_tick_freeze() { } } -fn new_execution_result(status: Result<()>, fee_details: FeeDetails) -> TransactionExecutionResult { - TransactionExecutionResult::Executed(Box::new(ExecutedTransaction { +fn new_processing_result( + status: Result<()>, + fee_details: FeeDetails, +) -> TransactionProcessingResult { + Ok(ExecutedTransaction { loaded_transaction: LoadedTransaction { fee_details, ..LoadedTransaction::default() @@ -247,7 +250,7 @@ fn new_execution_result(status: Result<()>, fee_details: FeeDetails) -> Transact accounts_data_len_delta: 0, }, programs_modified_by_tx: HashMap::new(), - })) + }) } impl Bank { @@ -2874,9 +2877,9 @@ fn test_filter_program_errors_and_collect_fee() { let tx_fee = 42; let fee_details = FeeDetails::new(tx_fee, 0, false); - let execution_results = vec![ - new_execution_result(Ok(()), fee_details), - new_execution_result( + let processing_results = vec![ + new_processing_result(Ok(()), fee_details), + new_processing_result( Err(TransactionError::InstructionError( 1, SystemError::ResultWithNegativeLamports.into(), @@ -2886,7 +2889,7 @@ fn test_filter_program_errors_and_collect_fee() { ]; let initial_balance = bank.get_balance(&leader); - bank.filter_program_errors_and_collect_fee(&execution_results); + bank.filter_program_errors_and_collect_fee(&processing_results); bank.freeze(); assert_eq!( bank.get_balance(&leader), @@ -2905,9 +2908,9 @@ fn test_filter_program_errors_and_collect_priority_fee() { let priority_fee = 42; let fee_details: FeeDetails = FeeDetails::new(0, priority_fee, false); - let execution_results = vec![ - new_execution_result(Ok(()), fee_details), - new_execution_result( + let processing_results = vec![ + new_processing_result(Ok(()), fee_details), + new_processing_result( Err(TransactionError::InstructionError( 1, SystemError::ResultWithNegativeLamports.into(), @@ -2917,7 +2920,7 @@ fn test_filter_program_errors_and_collect_priority_fee() { ]; let initial_balance = bank.get_balance(&leader); - bank.filter_program_errors_and_collect_fee(&execution_results); + bank.filter_program_errors_and_collect_fee(&processing_results); bank.freeze(); assert_eq!( bank.get_balance(&leader), @@ -12800,9 +12803,9 @@ fn test_filter_program_errors_and_collect_fee_details() { let bank = Bank::new_for_tests(&genesis_config); let results = vec![ - TransactionExecutionResult::NotExecuted(TransactionError::AccountNotFound), - new_execution_result(Ok(()), tx_fee_details), - new_execution_result( + Err(TransactionError::AccountNotFound), + new_processing_result(Ok(()), tx_fee_details), + new_processing_result( Err(TransactionError::InstructionError( 0, SystemError::ResultWithNegativeLamports.into(), diff --git a/svm/doc/spec.md b/svm/doc/spec.md index a851b051bb909a..c40a928e4c4e4d 100644 --- a/svm/doc/spec.md +++ b/svm/doc/spec.md @@ -206,9 +206,9 @@ The output of the transaction batch processor's - `error_metrics`: Error metrics for transactions that were processed. - `execute_timings`: Timings for transaction batch execution. -- `execution_results`: Vector of results indicating whether a transaction was - executed or could not be executed. Note executed transactions can still have - failed! +- `processing_results`: Vector of results indicating whether a transaction was + processed or could not be processed for some reason. Note that processed + transactions can still have failed! # Functional Model diff --git a/svm/examples/paytube/src/settler.rs b/svm/examples/paytube/src/settler.rs index 5db63c4e675809..8923512ecd6915 100644 --- a/svm/examples/paytube/src/settler.rs +++ b/svm/examples/paytube/src/settler.rs @@ -18,7 +18,10 @@ use { pubkey::Pubkey, signature::Keypair, signer::Signer, system_instruction, transaction::Transaction as SolanaTransaction, }, - solana_svm::transaction_processor::LoadAndExecuteSanitizedTransactionsOutput, + solana_svm::{ + transaction_processing_result::TransactionProcessingResultExtensions, + transaction_processor::LoadAndExecuteSanitizedTransactionsOutput, + }, spl_associated_token_account::get_associated_token_address, std::collections::HashMap, }; @@ -61,11 +64,11 @@ impl Ledger { let mut ledger: HashMap = HashMap::new(); paytube_transactions .iter() - .zip(svm_output.execution_results) + .zip(svm_output.processing_results) .for_each(|(transaction, result)| { // Only append to the ledger if the PayTube transaction was // successful. - if result.was_executed_successfully() { + if result.was_processed_with_successful_result() { let mint = transaction.mint; let mut keys = [transaction.from, transaction.to]; keys.sort(); diff --git a/svm/src/account_saver.rs b/svm/src/account_saver.rs index 4ba2ec259fd87b..2657e7a7cb9717 100644 --- a/svm/src/account_saver.rs +++ b/svm/src/account_saver.rs @@ -1,7 +1,9 @@ use { crate::{ rollback_accounts::RollbackAccounts, - transaction_execution_result::TransactionExecutionResult, + transaction_processing_result::{ + TransactionProcessingResult, TransactionProcessingResultExtensions, + }, }, solana_sdk::{ account::AccountSharedData, nonce::state::DurableNonce, pubkey::Pubkey, @@ -14,20 +16,20 @@ use { // optimization edge cases where some write locked accounts have skip storage. fn max_number_of_accounts_to_collect( txs: &[SanitizedTransaction], - execution_results: &[TransactionExecutionResult], + processing_results: &[TransactionProcessingResult], ) -> usize { - execution_results + processing_results .iter() .zip(txs) - .filter_map(|(execution_result, tx)| { - execution_result - .executed_transaction() - .map(|executed_tx| (executed_tx, tx)) + .filter_map(|(processing_result, tx)| { + processing_result + .processed_transaction() + .map(|processed_tx| (processed_tx, tx)) }) .map( - |(executed_tx, tx)| match executed_tx.execution_details.status { + |(processed_tx, tx)| match processed_tx.execution_details.status { Ok(_) => tx.message().num_write_locks() as usize, - Err(_) => executed_tx.loaded_transaction.rollback_accounts.count(), + Err(_) => processed_tx.loaded_transaction.rollback_accounts.count(), }, ) .sum() @@ -35,35 +37,35 @@ fn max_number_of_accounts_to_collect( pub fn collect_accounts_to_store<'a>( txs: &'a [SanitizedTransaction], - execution_results: &'a mut [TransactionExecutionResult], + processing_results: &'a mut [TransactionProcessingResult], durable_nonce: &DurableNonce, lamports_per_signature: u64, ) -> ( Vec<(&'a Pubkey, &'a AccountSharedData)>, Vec>, ) { - let collect_capacity = max_number_of_accounts_to_collect(txs, execution_results); + let collect_capacity = max_number_of_accounts_to_collect(txs, processing_results); let mut accounts = Vec::with_capacity(collect_capacity); let mut transactions = Vec::with_capacity(collect_capacity); - for (execution_result, tx) in execution_results.iter_mut().zip(txs) { - let Some(executed_tx) = execution_result.executed_transaction_mut() else { + for (processing_result, transaction) in processing_results.iter_mut().zip(txs) { + let Some(processed_tx) = processing_result.processed_transaction_mut() else { // Don't store any accounts if tx wasn't executed continue; }; - if executed_tx.execution_details.status.is_ok() { + if processed_tx.execution_details.status.is_ok() { collect_accounts_for_successful_tx( &mut accounts, &mut transactions, - tx, - &executed_tx.loaded_transaction.accounts, + transaction, + &processed_tx.loaded_transaction.accounts, ); } else { collect_accounts_for_failed_tx( &mut accounts, &mut transactions, - tx, - &mut executed_tx.loaded_transaction.rollback_accounts, + transaction, + &mut processed_tx.loaded_transaction.rollback_accounts, durable_nonce, lamports_per_signature, ); @@ -180,11 +182,11 @@ mod tests { )) } - fn new_execution_result( + fn new_processing_result( status: Result<()>, loaded_transaction: LoadedTransaction, - ) -> TransactionExecutionResult { - TransactionExecutionResult::Executed(Box::new(ExecutedTransaction { + ) -> TransactionProcessingResult { + Ok(ExecutedTransaction { execution_details: TransactionExecutionDetails { status, log_messages: None, @@ -195,7 +197,7 @@ mod tests { }, loaded_transaction, programs_modified_by_tx: HashMap::new(), - })) + }) } #[test] @@ -260,14 +262,14 @@ mod tests { }; let txs = vec![tx0.clone(), tx1.clone()]; - let mut execution_results = vec![ - new_execution_result(Ok(()), loaded0), - new_execution_result(Ok(()), loaded1), + let mut processing_results = vec![ + new_processing_result(Ok(()), loaded0), + new_processing_result(Ok(()), loaded1), ]; - let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &execution_results); + let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &processing_results); assert_eq!(max_collected_accounts, 2); let (collected_accounts, transactions) = - collect_accounts_to_store(&txs, &mut execution_results, &DurableNonce::default(), 0); + collect_accounts_to_store(&txs, &mut processing_results, &DurableNonce::default(), 0); assert_eq!(collected_accounts.len(), 2); assert!(collected_accounts .iter() @@ -314,18 +316,18 @@ mod tests { }; let txs = vec![tx]; - let mut execution_results = vec![new_execution_result( + let mut processing_results = vec![new_processing_result( Err(TransactionError::InstructionError( 1, InstructionError::InvalidArgument, )), loaded, )]; - let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &execution_results); + let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &processing_results); assert_eq!(max_collected_accounts, 1); let durable_nonce = DurableNonce::from_blockhash(&Hash::new_unique()); let (collected_accounts, _) = - collect_accounts_to_store(&txs, &mut execution_results, &durable_nonce, 0); + collect_accounts_to_store(&txs, &mut processing_results, &durable_nonce, 0); assert_eq!(collected_accounts.len(), 1); assert_eq!( collected_accounts @@ -400,17 +402,17 @@ mod tests { let durable_nonce = DurableNonce::from_blockhash(&Hash::new_unique()); let txs = vec![tx]; - let mut execution_results = vec![new_execution_result( + let mut processing_results = vec![new_processing_result( Err(TransactionError::InstructionError( 1, InstructionError::InvalidArgument, )), loaded, )]; - let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &execution_results); + let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &processing_results); assert_eq!(max_collected_accounts, 2); let (collected_accounts, _) = - collect_accounts_to_store(&txs, &mut execution_results, &durable_nonce, 0); + collect_accounts_to_store(&txs, &mut processing_results, &durable_nonce, 0); assert_eq!(collected_accounts.len(), 2); assert_eq!( collected_accounts @@ -498,17 +500,17 @@ mod tests { let durable_nonce = DurableNonce::from_blockhash(&Hash::new_unique()); let txs = vec![tx]; - let mut execution_results = vec![new_execution_result( + let mut processing_results = vec![new_processing_result( Err(TransactionError::InstructionError( 1, InstructionError::InvalidArgument, )), loaded, )]; - let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &execution_results); + let max_collected_accounts = max_number_of_accounts_to_collect(&txs, &processing_results); assert_eq!(max_collected_accounts, 1); let (collected_accounts, _) = - collect_accounts_to_store(&txs, &mut execution_results, &durable_nonce, 0); + collect_accounts_to_store(&txs, &mut processing_results, &durable_nonce, 0); assert_eq!(collected_accounts.len(), 1); let collected_nonce_account = collected_accounts .iter() diff --git a/svm/src/lib.rs b/svm/src/lib.rs index cbfef2305e41f9..b031ce7d6e1c53 100644 --- a/svm/src/lib.rs +++ b/svm/src/lib.rs @@ -15,6 +15,7 @@ pub mod transaction_commit_result; pub mod transaction_error_metrics; pub mod transaction_execution_result; pub mod transaction_processing_callback; +pub mod transaction_processing_result; pub mod transaction_processor; #[macro_use] diff --git a/svm/src/transaction_commit_result.rs b/svm/src/transaction_commit_result.rs index 5cc413d7b175f9..53590c0c5d2b50 100644 --- a/svm/src/transaction_commit_result.rs +++ b/svm/src/transaction_commit_result.rs @@ -18,13 +18,13 @@ pub struct CommittedTransaction { } pub trait TransactionCommitResultExtensions { - fn was_executed(&self) -> bool; + fn was_committed(&self) -> bool; fn was_executed_successfully(&self) -> bool; fn transaction_result(&self) -> TransactionResult<()>; } impl TransactionCommitResultExtensions for TransactionCommitResult { - fn was_executed(&self) -> bool { + fn was_committed(&self) -> bool { self.is_ok() } diff --git a/svm/src/transaction_execution_result.rs b/svm/src/transaction_execution_result.rs index a7c965fbed01bd..2ac684d4cc219c 100644 --- a/svm/src/transaction_execution_result.rs +++ b/svm/src/transaction_execution_result.rs @@ -51,44 +51,6 @@ impl ExecutedTransaction { } } -impl TransactionExecutionResult { - pub fn was_executed_successfully(&self) -> bool { - self.executed_transaction() - .map(|executed_tx| executed_tx.was_successful()) - .unwrap_or(false) - } - - pub fn was_executed(&self) -> bool { - self.executed_transaction().is_some() - } - - pub fn details(&self) -> Option<&TransactionExecutionDetails> { - self.executed_transaction() - .map(|executed_tx| &executed_tx.execution_details) - } - - pub fn flattened_result(&self) -> transaction::Result<()> { - match self { - Self::Executed(executed_tx) => executed_tx.execution_details.status.clone(), - Self::NotExecuted(err) => Err(err.clone()), - } - } - - pub fn executed_transaction(&self) -> Option<&ExecutedTransaction> { - match self { - Self::Executed(executed_tx) => Some(executed_tx.as_ref()), - Self::NotExecuted { .. } => None, - } - } - - pub fn executed_transaction_mut(&mut self) -> Option<&mut ExecutedTransaction> { - match self { - Self::Executed(executed_tx) => Some(executed_tx.as_mut()), - Self::NotExecuted { .. } => None, - } - } -} - #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct TransactionExecutionDetails { pub status: transaction::Result<()>, diff --git a/svm/src/transaction_processing_result.rs b/svm/src/transaction_processing_result.rs new file mode 100644 index 00000000000000..0ad68e0d18a803 --- /dev/null +++ b/svm/src/transaction_processing_result.rs @@ -0,0 +1,48 @@ +use { + crate::transaction_execution_result::ExecutedTransaction, + solana_sdk::transaction::Result as TransactionResult, +}; + +pub type TransactionProcessingResult = TransactionResult; +pub type ProcessedTransaction = ExecutedTransaction; + +pub trait TransactionProcessingResultExtensions { + fn was_processed(&self) -> bool; + fn was_processed_with_successful_result(&self) -> bool; + fn processed_transaction(&self) -> Option<&ProcessedTransaction>; + fn processed_transaction_mut(&mut self) -> Option<&mut ProcessedTransaction>; + fn flattened_result(&self) -> TransactionResult<()>; +} + +impl TransactionProcessingResultExtensions for TransactionProcessingResult { + fn was_processed(&self) -> bool { + self.is_ok() + } + + fn was_processed_with_successful_result(&self) -> bool { + match self { + Ok(processed_tx) => processed_tx.was_successful(), + Err(_) => false, + } + } + + fn processed_transaction(&self) -> Option<&ProcessedTransaction> { + match self { + Ok(processed_tx) => Some(processed_tx), + Err(_) => None, + } + } + + fn processed_transaction_mut(&mut self) -> Option<&mut ProcessedTransaction> { + match self { + Ok(processed_tx) => Some(processed_tx), + Err(_) => None, + } + } + + fn flattened_result(&self) -> TransactionResult<()> { + self.as_ref() + .map_err(|err| err.clone()) + .and_then(|processed_tx| processed_tx.execution_details.status.clone()) + } +} diff --git a/svm/src/transaction_processor.rs b/svm/src/transaction_processor.rs index fa2bd505948a24..54b1abb6740661 100644 --- a/svm/src/transaction_processor.rs +++ b/svm/src/transaction_processor.rs @@ -13,10 +13,9 @@ use { rollback_accounts::RollbackAccounts, transaction_account_state_info::TransactionAccountStateInfo, transaction_error_metrics::TransactionErrorMetrics, - transaction_execution_result::{ - ExecutedTransaction, TransactionExecutionDetails, TransactionExecutionResult, - }, + transaction_execution_result::{ExecutedTransaction, TransactionExecutionDetails}, transaction_processing_callback::TransactionProcessingCallback, + transaction_processing_result::TransactionProcessingResult, }, log::debug, percentage::Percentage, @@ -69,9 +68,10 @@ pub struct LoadAndExecuteSanitizedTransactionsOutput { pub error_metrics: TransactionErrorMetrics, /// Timings for transaction batch execution. pub execute_timings: ExecuteTimings, - // Vector of results indicating whether a transaction was executed or could not - // be executed. Note executed transactions can still have failed! - pub execution_results: Vec, + /// Vector of results indicating whether a transaction was processed or + /// could not be processed. Note processed transactions can still have a + /// failure result meaning that the transaction will be rolled back. + pub processing_results: Vec, } /// Configuration of the recording capabilities for transaction execution @@ -269,12 +269,11 @@ impl TransactionBatchProcessor { if program_cache_for_tx_batch.hit_max_limit { const ERROR: TransactionError = TransactionError::ProgramCacheHitMaxLimit; - let execution_results = - vec![TransactionExecutionResult::NotExecuted(ERROR); sanitized_txs.len()]; + let processing_results = vec![Err(ERROR); sanitized_txs.len()]; return LoadAndExecuteSanitizedTransactionsOutput { error_metrics, execute_timings, - execution_results, + processing_results, }; } @@ -294,12 +293,12 @@ impl TransactionBatchProcessor { &program_cache_for_tx_batch, )); - let (execution_results, execution_us): (Vec, u64) = + let (processing_results, execution_us): (Vec, u64) = measure_us!(loaded_transactions .into_iter() .zip(sanitized_txs.iter()) .map(|(load_result, tx)| match load_result { - Err(e) => TransactionExecutionResult::NotExecuted(e.clone()), + Err(e) => Err(e.clone()), Ok(loaded_transaction) => { let executed_tx = self.execute_loaded_transaction( tx, @@ -317,7 +316,7 @@ impl TransactionBatchProcessor { program_cache_for_tx_batch.merge(&executed_tx.programs_modified_by_tx); } - TransactionExecutionResult::Executed(Box::new(executed_tx)) + Ok(executed_tx) } }) .collect()); @@ -354,7 +353,7 @@ impl TransactionBatchProcessor { LoadAndExecuteSanitizedTransactionsOutput { error_metrics, execute_timings, - execution_results, + processing_results, } } diff --git a/svm/tests/conformance.rs b/svm/tests/conformance.rs index 8e82cf98623837..5b32c2e164c2d2 100644 --- a/svm/tests/conformance.rs +++ b/svm/tests/conformance.rs @@ -36,6 +36,7 @@ use { account_loader::CheckedTransactionDetails, program_loader, transaction_processing_callback::TransactionProcessingCallback, + transaction_processing_result::TransactionProcessingResultExtensions, transaction_processor::{ ExecutionRecordingConfig, TransactionBatchProcessor, TransactionProcessingConfig, TransactionProcessingEnvironment, @@ -311,17 +312,8 @@ fn run_fixture(fixture: InstrFixture, filename: OsString, execute_as_instr: bool ); // Assert that the transaction has worked without errors. - if !result.execution_results[0].was_executed() - || result.execution_results[0] - .details() - .unwrap() - .status - .is_err() - { - if matches!( - result.execution_results[0].flattened_result(), - Err(TransactionError::InsufficientFundsForRent { .. }) - ) { + if let Err(err) = result.processing_results[0].flattened_result() { + if matches!(err, TransactionError::InsufficientFundsForRent { .. }) { // This is a transaction error not an instruction error, so execute the instruction // instead. execute_fixture_as_instr( @@ -344,7 +336,9 @@ fn run_fixture(fixture: InstrFixture, filename: OsString, execute_as_instr: bool return; } - let executed_tx = result.execution_results[0].executed_transaction().unwrap(); + let executed_tx = result.processing_results[0] + .processed_transaction() + .unwrap(); let execution_details = &executed_tx.execution_details; let loaded_accounts = &executed_tx.loaded_transaction.accounts; verify_accounts_and_data( diff --git a/svm/tests/integration_test.rs b/svm/tests/integration_test.rs index 8d8c80b8e89422..5070bca08e0907 100644 --- a/svm/tests/integration_test.rs +++ b/svm/tests/integration_test.rs @@ -33,8 +33,8 @@ use { }, solana_svm::{ account_loader::{CheckedTransactionDetails, TransactionCheckResult}, - transaction_execution_result::TransactionExecutionResult, transaction_processing_callback::TransactionProcessingCallback, + transaction_processing_result::TransactionProcessingResultExtensions, transaction_processor::{ ExecutionRecordingConfig, TransactionBatchProcessor, TransactionProcessingConfig, TransactionProcessingEnvironment, @@ -429,9 +429,11 @@ fn svm_integration() { &processing_config, ); - assert_eq!(result.execution_results.len(), 5); + assert_eq!(result.processing_results.len(), 5); - let executed_tx_0 = result.execution_results[0].executed_transaction().unwrap(); + let executed_tx_0 = result.processing_results[0] + .processed_transaction() + .unwrap(); assert!(executed_tx_0.was_successful()); let logs = executed_tx_0 .execution_details @@ -440,7 +442,9 @@ fn svm_integration() { .unwrap(); assert!(logs.contains(&"Program log: Hello, Solana!".to_string())); - let executed_tx_1 = result.execution_results[1].executed_transaction().unwrap(); + let executed_tx_1 = result.processing_results[1] + .processed_transaction() + .unwrap(); assert!(executed_tx_1.was_successful()); // The SVM does not commit the account changes in MockBank @@ -453,7 +457,9 @@ fn svm_integration() { .unwrap(); assert_eq!(recipient_data.1.lamports(), 900010); - let executed_tx_2 = result.execution_results[2].executed_transaction().unwrap(); + let executed_tx_2 = result.processing_results[2] + .processed_transaction() + .unwrap(); let return_data = executed_tx_2 .execution_details .return_data @@ -464,7 +470,9 @@ fn svm_integration() { let clock_info: Clock = bincode::deserialize(clock_data.data()).unwrap(); assert_eq!(clock_info.unix_timestamp, time); - let executed_tx_3 = result.execution_results[3].executed_transaction().unwrap(); + let executed_tx_3 = result.processing_results[3] + .processed_transaction() + .unwrap(); assert!(executed_tx_3.execution_details.status.is_err()); assert!(executed_tx_3 .execution_details @@ -474,7 +482,7 @@ fn svm_integration() { .contains(&"Transfer: insufficient lamports 900000, need 900050".to_string())); assert!(matches!( - result.execution_results[4], - TransactionExecutionResult::NotExecuted(TransactionError::BlockhashNotFound) + result.processing_results[4], + Err(TransactionError::BlockhashNotFound) )); }