diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1376a743ec..258046b44e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -54,6 +54,25 @@ jobs: target/release/papyrus_node --base_layer.node_url ${{ secrets.CI_BASE_LAYER_NODE_URL }} & sleep 30 ; kill $! + executable-run-no-rpc: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - uses: Noelware/setup-protoc@1.1.0 + with: + version: ${{env.PROTOC_VERSION}} + - run: mkdir data + + - name: Build node + run: cargo build -r --no-default-features + + - name: Run executable + run: > + target/release/papyrus_node --base_layer.node_url ${{ secrets.CI_BASE_LAYER_NODE_URL }} + & sleep 30 ; kill $! + test: runs-on: ubuntu-latest steps: @@ -88,6 +107,19 @@ jobs: cargo test -r --test '*' -- --include-ignored --skip test_gw_integration_testnet; cargo run -r -p papyrus_node --bin central_source_integration_test + test-no-rpc: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + + - run: | + cargo test -p papyrus_node --no-default-features + env: + SEED: 0 + + rustfmt: runs-on: ubuntu-latest steps: diff --git a/Cargo.lock b/Cargo.lock index 1866b8c494..79f997c24d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5693,6 +5693,7 @@ dependencies = [ "async-stream", "clap", "const_format", + "futures", "futures-util", "insta", "itertools 0.10.5", diff --git a/commitlint.config.js b/commitlint.config.js index 1d7f6cfb27..6b474f2606 100644 --- a/commitlint.config.js +++ b/commitlint.config.js @@ -28,6 +28,7 @@ const Configuration = { 'load_test', 'monitoring', 'network', + 'node', 'release', 'starknet_client', 'storage', diff --git a/crates/papyrus_node/Cargo.toml b/crates/papyrus_node/Cargo.toml index 215a48d1db..7de5dd54ad 100644 --- a/crates/papyrus_node/Cargo.toml +++ b/crates/papyrus_node/Cargo.toml @@ -8,6 +8,10 @@ license-file.workspace = true [package.metadata.cargo-udeps.ignore] normal = ["papyrus_base_layer"] +[features] +default = ["rpc"] +rpc = ["papyrus_rpc"] + [dependencies] anyhow.workspace = true async-stream.workspace = true @@ -22,9 +26,9 @@ papyrus_base_layer = { path = "../papyrus_base_layer" } papyrus_config = { path = "../papyrus_config", version = "0.3.0-rc.0" } papyrus_common = { path = "../papyrus_common", version = "0.3.0-rc.0" } papyrus_monitoring_gateway = { path = "../papyrus_monitoring_gateway" } -papyrus_rpc = { path = "../papyrus_rpc" } +papyrus_rpc = { path = "../papyrus_rpc", version = "0.3.0-rc.0", optional = true } papyrus_storage = { path = "../papyrus_storage", version = "0.3.0-rc.0" } -papyrus_sync = { path = "../papyrus_sync" } +papyrus_sync = { path = "../papyrus_sync", version = "0.3.0-rc.0" } reqwest = { workspace = true, features = ["json", "blocking"] } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["arbitrary_precision"] } @@ -38,6 +42,9 @@ tracing.workspace = true url.workspace = true validator = { workspace = true, features = ["derive"] } +[target.'cfg(not(feature = "rpc"))'.dependencies] +futures.workspace = true + [dev-dependencies] metrics-exporter-prometheus.workspace = true pretty_assertions.workspace = true diff --git a/crates/papyrus_node/src/bin/dump_config.rs b/crates/papyrus_node/src/bin/dump_config.rs index ff0a6c7a3a..9aafca3a69 100644 --- a/crates/papyrus_node/src/bin/dump_config.rs +++ b/crates/papyrus_node/src/bin/dump_config.rs @@ -8,6 +8,7 @@ use papyrus_config::{ParamPath, SerializedParam}; use papyrus_node::config::{NodeConfig, DEFAULT_CONFIG_PATH}; use starknet_api::core::ChainId; +#[cfg(feature = "rpc")] lazy_static! { /// Returns vector of (pointer target name, pointer target serialized param, vec) /// to be applied on the dumped node config. @@ -39,6 +40,38 @@ lazy_static! { )]; } +#[cfg(not(feature = "rpc"))] +lazy_static! { + /// Returns vector of (pointer target name, pointer target serialized param, vec) + /// to be applied on the dumped node config. + /// The config updates will be performed on the shared pointer targets, and finally, the values + /// will be propagated to the pointer params. + static ref CONFIG_POINTERS: Vec<((ParamPath, SerializedParam), Vec)> = vec![( + ser_pointer_target_param( + "chain_id", + &ChainId("SN_MAIN".to_string()), + "The chain to follow. For more details see https://docs.starknet.io/documentation/architecture_and_concepts/Blocks/transactions/#chain-id.", + ), + vec!["storage.db_config.chain_id".to_owned()], + ), + ( + ser_pointer_target_param( + "starknet_url", + &"https://alpha-mainnet.starknet.io/".to_string(), + "The URL of a centralized Starknet gateway.", + ), + vec!["central.url".to_owned(), "monitoring_gateway.starknet_url".to_owned()], + ), + ( + ser_pointer_target_param( + "collect_metrics", + &false, + "If true, collect metrics for the node.", + ), + vec!["monitoring_gateway.collect_metrics".to_owned()], + )]; +} + /// Updates the default config file by: /// cargo run --bin dump_config -q #[cfg_attr(coverage_nightly, coverage_attribute)] diff --git a/crates/papyrus_node/src/config/config_test.rs b/crates/papyrus_node/src/config/config_test.rs index 6688985e2a..fd737ad05e 100644 --- a/crates/papyrus_node/src/config/config_test.rs +++ b/crates/papyrus_node/src/config/config_test.rs @@ -73,6 +73,9 @@ fn load_http_headers() { assert_eq!(config.central.http_headers.unwrap(), target_http_headers); } +// insta doesn't work well with features, so if the output between two features are different we +// can only test one of them. We chose to test rpc over testing not(rpc). +#[cfg(feature = "rpc")] #[test] // Regression test which checks that the default config dumping hasn't changed. fn test_dump_default_config() { @@ -95,11 +98,15 @@ fn test_default_config_process() { #[test] fn test_update_dumped_config_by_command() { - let args = - get_args(vec!["--rpc.max_events_keys", "1234", "--storage.db_config.path_prefix", "/abc"]); + let args = get_args(vec![ + "--central.retry_config.retry_max_delay_millis", + "1234", + "--storage.db_config.path_prefix", + "/abc", + ]); env::set_current_dir(get_absolute_path("")).expect("Couldn't set working dir."); let config = NodeConfig::load_and_process(args).unwrap(); - assert_eq!(config.rpc.max_events_keys, 1234); + assert_eq!(config.central.retry_config.retry_max_delay_millis, 1234); assert_eq!(config.storage.db_config.path_prefix.to_str(), Some("/abc")); } diff --git a/crates/papyrus_node/src/config/mod.rs b/crates/papyrus_node/src/config/mod.rs index 61813522a8..1730fde9c6 100644 --- a/crates/papyrus_node/src/config/mod.rs +++ b/crates/papyrus_node/src/config/mod.rs @@ -13,10 +13,15 @@ use std::{env, fs, io}; use clap::{arg, value_parser, Arg, ArgMatches, Command}; use itertools::{chain, Itertools}; use papyrus_base_layer::ethereum_base_layer_contract::EthereumBaseLayerConfig; +#[cfg(not(feature = "rpc"))] +use papyrus_config::dumping::ser_param; use papyrus_config::dumping::{append_sub_config_name, ser_optional_sub_config, SerializeConfig}; use papyrus_config::loading::load_and_process_config; +#[cfg(not(feature = "rpc"))] +use papyrus_config::ParamPrivacyInput; use papyrus_config::{ConfigError, ParamPath, SerializedParam}; use papyrus_monitoring_gateway::MonitoringGatewayConfig; +#[cfg(feature = "rpc")] use papyrus_rpc::RpcConfig; use papyrus_storage::db::DbConfig; use papyrus_storage::StorageConfig; @@ -90,3 +95,24 @@ pub fn node_command() -> Command { .version(VERSION_FULL) .about("Papyrus is a StarkNet full node written in Rust.") } + +// TODO(shahak): Try to make this config empty. +#[cfg(not(feature = "rpc"))] +#[derive(Debug, Default, Deserialize, Serialize, Clone, PartialEq, Validate)] +pub struct RpcConfig { + // We need to add some field because empty configs are not supported, and that field needs to + // be one that exists in the real RpcConfig. + pub collect_metrics: bool, +} + +#[cfg(not(feature = "rpc"))] +impl SerializeConfig for RpcConfig { + fn dump(&self) -> BTreeMap { + BTreeMap::from_iter([ser_param( + "collect_metrics", + &self.collect_metrics, + "If true, collect metrics for the rpc.", + ParamPrivacyInput::Public, + )]) + } +} diff --git a/crates/papyrus_node/src/main.rs b/crates/papyrus_node/src/main.rs index c7a8a5a16d..8ef3b765c2 100644 --- a/crates/papyrus_node/src/main.rs +++ b/crates/papyrus_node/src/main.rs @@ -3,6 +3,7 @@ mod main_test; use std::env::args; use std::future; +use std::future::Future; use std::process::exit; use std::sync::Arc; use std::time::Duration; @@ -15,6 +16,7 @@ use papyrus_config::ConfigError; use papyrus_monitoring_gateway::MonitoringServer; use papyrus_node::config::NodeConfig; use papyrus_node::version::VERSION_FULL; +#[cfg(feature = "rpc")] use papyrus_rpc::run_server; use papyrus_storage::{open_storage, update_storage_metrics, StorageReader, StorageWriter}; use papyrus_sync::sources::base_layer::{BaseLayerSourceError, EthereumBaseLayerSource}; @@ -22,12 +24,11 @@ use papyrus_sync::sources::central::{CentralError, CentralSource}; use papyrus_sync::sources::pending::PendingSource; use papyrus_sync::{StateSync, StateSyncError}; use starknet_api::block::BlockHash; -use starknet_api::hash::{StarkFelt, GENESIS_HASH}; -use starknet_api::stark_felt; +use starknet_api::hash::GENESIS_HASH; use starknet_client::reader::objects::pending_data::{PendingBlock, PendingBlockOrDeprecated}; use starknet_client::reader::PendingData; use tokio::sync::RwLock; -use tokio::task::JoinHandle; +use tokio::task::{JoinError, JoinHandle}; use tracing::metadata::LevelFilter; use tracing::{debug_span, error, info, warn, Instrument}; use tracing_subscriber::prelude::*; @@ -40,6 +41,37 @@ const DEFAULT_LEVEL: LevelFilter = LevelFilter::INFO; // Duration between updates to the storage metrics (those in the collect_storage_metrics function). const STORAGE_METRICS_UPDATE_INTERVAL: Duration = Duration::from_secs(10); +#[cfg(feature = "rpc")] +async fn create_rpc_server_future( + config: &NodeConfig, + shared_highest_block: Arc>>, + pending_data: Arc>, + pending_classes: Arc>, + storage_reader: StorageReader, +) -> anyhow::Result>> { + let (_, server_handle) = run_server( + &config.rpc, + shared_highest_block, + pending_data, + pending_classes, + storage_reader, + VERSION_FULL, + ) + .await?; + Ok(tokio::spawn(server_handle.stopped())) +} + +#[cfg(not(feature = "rpc"))] +async fn create_rpc_server_future( + _config: &NodeConfig, + _shared_highest_block: Arc>>, + _pending_data: Arc>, + _pending_classes: Arc>, + _storage_reader: StorageReader, +) -> anyhow::Result>> { + Ok(futures::future::pending()) +} + async fn run_threads(config: NodeConfig) -> anyhow::Result<()> { let (storage_reader, storage_writer) = open_storage(config.storage.clone())?; @@ -65,7 +97,9 @@ async fn run_threads(config: NodeConfig) -> anyhow::Result<()> { // The pending data might change later to DeprecatedPendingBlock, depending on the response // from the feeder gateway. block: PendingBlockOrDeprecated::Current(PendingBlock { - parent_block_hash: BlockHash(stark_felt!(GENESIS_HASH)), + parent_block_hash: BlockHash( + GENESIS_HASH.try_into().expect("Failed converting genesis hash to StarkHash"), + ), ..Default::default() }), ..Default::default() @@ -73,16 +107,14 @@ async fn run_threads(config: NodeConfig) -> anyhow::Result<()> { let pending_classes = Arc::new(RwLock::new(PendingClasses::default())); // JSON-RPC server. - let (_, server_handle) = run_server( - &config.rpc, + let server_handle_future = create_rpc_server_future( + &config, shared_highest_block.clone(), pending_data.clone(), pending_classes.clone(), storage_reader.clone(), - VERSION_FULL, ) .await?; - let server_handle_future = tokio::spawn(server_handle.stopped()); // Sync task. let sync_future = run_sync( diff --git a/crates/papyrus_node/src/main_test.rs b/crates/papyrus_node/src/main_test.rs index 809db535cf..2ea41b61a0 100644 --- a/crates/papyrus_node/src/main_test.rs +++ b/crates/papyrus_node/src/main_test.rs @@ -2,13 +2,26 @@ use std::time::Duration; use metrics_exporter_prometheus::PrometheusBuilder; use papyrus_node::config::NodeConfig; +#[cfg(feature = "rpc")] use papyrus_rpc::RpcConfig; use papyrus_storage::{open_storage, StorageConfig}; use tempfile::TempDir; -use test_utils::{get_absolute_path, prometheus_is_contained}; +#[cfg(feature = "rpc")] +use test_utils::get_absolute_path; +use test_utils::prometheus_is_contained; use crate::{run_threads, spawn_storage_metrics_collector}; +#[cfg(feature = "rpc")] +fn fix_execution_config_path(config: &mut NodeConfig) { + let default_execution_config_path = RpcConfig::default().execution_config; + config.rpc.execution_config = + get_absolute_path(default_execution_config_path.to_str().unwrap()); +} + +#[cfg(not(feature = "rpc"))] +fn fix_execution_config_path(_config: &mut NodeConfig) {} + // The mission of this test is to ensure that if an error is returned from one of the spawned tasks, // the node will stop, and this error will be returned. This is done by checking the case of an // illegal central URL, which will cause the sync task to return an error. @@ -18,10 +31,7 @@ async fn run_threads_stop() { let temp_dir = TempDir::new().unwrap(); config.storage.db_config.path_prefix = temp_dir.path().into(); - // Fix the path to the execution config. - let default_execution_config_path = RpcConfig::default().execution_config; - config.rpc.execution_config = - get_absolute_path(default_execution_config_path.to_str().unwrap()); + fix_execution_config_path(&mut config); // Error when not supplying legal central URL. config.central.url = "_not_legal_url".to_string();