-
Notifications
You must be signed in to change notification settings - Fork 11.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[Indexer] Add synthetic ingestion and benchmark framework (#19899)
## Description This PR adds two things: 1. A synthetic workload generator for the indexer. It generates synthetic checkpoints and transactions which are written to the ingestion dir. Currently only transfer transactions are generated, but we could extend it later. 2. A benchmark framework that allows us to connect the synthetic ingestion with any indexer implementation. The indexer implementation will need to implement a wrapper for a trait type. ## Test plan Added an e2e test. --- ## Release notes Check each box that your changes affect. If none of the boxes relate to your changes, release notes aren't required. For each box you select, include information after the relevant heading that describes the impact of your changes that a user might notice and any actions they must take to implement updates. - [ ] Protocol: - [ ] Nodes (Validators and Full nodes): - [ ] Indexer: - [ ] JSON-RPC: - [ ] GraphQL: - [ ] CLI: - [ ] Rust SDK: - [ ] REST API:
- Loading branch information
Showing
18 changed files
with
571 additions
and
14 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,116 @@ | ||
// Copyright (c) Mysten Labs, Inc. | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
use crate::config::{BenchmarkConfig, IngestionConfig, IngestionSources, UploadOptions}; | ||
use crate::database::ConnectionPool; | ||
use crate::db::{reset_database, run_migrations}; | ||
use crate::errors::IndexerError; | ||
use crate::indexer::Indexer; | ||
use crate::metrics::IndexerMetrics; | ||
use crate::store::PgIndexerStore; | ||
use std::path::PathBuf; | ||
use sui_synthetic_ingestion::benchmark::{run_benchmark, BenchmarkableIndexer}; | ||
use sui_synthetic_ingestion::{IndexerProgress, SyntheticIngestionConfig}; | ||
use tokio::sync::watch; | ||
use tokio::task::JoinHandle; | ||
use tokio_util::sync::CancellationToken; | ||
|
||
pub async fn run_indexer_benchmark( | ||
config: BenchmarkConfig, | ||
pool: ConnectionPool, | ||
metrics: IndexerMetrics, | ||
) { | ||
if config.reset_db { | ||
reset_database(pool.dedicated_connection().await.unwrap()) | ||
.await | ||
.unwrap(); | ||
} else { | ||
run_migrations(pool.dedicated_connection().await.unwrap()) | ||
.await | ||
.unwrap(); | ||
} | ||
let store = PgIndexerStore::new(pool, UploadOptions::default(), metrics.clone()); | ||
let ingestion_dir = tempfile::tempdir().unwrap().into_path(); | ||
let synthetic_ingestion_config = SyntheticIngestionConfig { | ||
ingestion_dir: ingestion_dir.clone(), | ||
checkpoint_size: config.checkpoint_size, | ||
num_checkpoints: config.num_checkpoints, | ||
starting_checkpoint: config.starting_checkpoint, | ||
}; | ||
let indexer = BenchmarkIndexer::new(store, metrics, ingestion_dir); | ||
run_benchmark(synthetic_ingestion_config, indexer).await; | ||
} | ||
|
||
pub struct BenchmarkIndexer { | ||
inner: Option<BenchmarkIndexerInner>, | ||
cancel: CancellationToken, | ||
committed_checkpoints_rx: watch::Receiver<Option<IndexerProgress>>, | ||
handle: Option<JoinHandle<anyhow::Result<(), IndexerError>>>, | ||
} | ||
|
||
struct BenchmarkIndexerInner { | ||
ingestion_dir: PathBuf, | ||
store: PgIndexerStore, | ||
metrics: IndexerMetrics, | ||
committed_checkpoints_tx: watch::Sender<Option<IndexerProgress>>, | ||
} | ||
|
||
impl BenchmarkIndexer { | ||
pub fn new(store: PgIndexerStore, metrics: IndexerMetrics, ingestion_dir: PathBuf) -> Self { | ||
let cancel = CancellationToken::new(); | ||
let (committed_checkpoints_tx, committed_checkpoints_rx) = watch::channel(None); | ||
Self { | ||
inner: Some(BenchmarkIndexerInner { | ||
ingestion_dir, | ||
store, | ||
metrics, | ||
committed_checkpoints_tx, | ||
}), | ||
cancel, | ||
committed_checkpoints_rx, | ||
handle: None, | ||
} | ||
} | ||
} | ||
|
||
#[async_trait::async_trait] | ||
impl BenchmarkableIndexer for BenchmarkIndexer { | ||
fn subscribe_to_committed_checkpoints(&self) -> watch::Receiver<Option<IndexerProgress>> { | ||
self.committed_checkpoints_rx.clone() | ||
} | ||
|
||
async fn start(&mut self) { | ||
let BenchmarkIndexerInner { | ||
ingestion_dir, | ||
store, | ||
metrics, | ||
committed_checkpoints_tx, | ||
} = self.inner.take().unwrap(); | ||
let ingestion_config = IngestionConfig { | ||
sources: IngestionSources { | ||
data_ingestion_path: Some(ingestion_dir), | ||
..Default::default() | ||
}, | ||
..Default::default() | ||
}; | ||
let cancel = self.cancel.clone(); | ||
let handle = tokio::task::spawn(async move { | ||
Indexer::start_writer( | ||
ingestion_config, | ||
store, | ||
metrics, | ||
Default::default(), | ||
None, | ||
cancel, | ||
Some(committed_checkpoints_tx), | ||
) | ||
.await | ||
}); | ||
self.handle = Some(handle); | ||
} | ||
|
||
async fn stop(mut self) { | ||
self.cancel.cancel(); | ||
self.handle.unwrap().await.unwrap().unwrap(); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.