From 478eef8fab50ca93eed52f6a649137b4a398a37c Mon Sep 17 00:00:00 2001 From: IAvecilla Date: Mon, 11 Mar 2024 12:23:10 -0300 Subject: [PATCH] Fix clippy and update comments --- node/tests/src/main.rs | 58 ++++++++++++++++------------------------- node/tests/src/utils.rs | 4 +++ node/tools/src/k8s.rs | 4 +-- 3 files changed, 29 insertions(+), 37 deletions(-) diff --git a/node/tests/src/main.rs b/node/tests/src/main.rs index ae6d3c60..e74c1e09 100644 --- a/node/tests/src/main.rs +++ b/node/tests/src/main.rs @@ -1,7 +1,4 @@ -//! This is a simple test for the RPC server. It checks if the server is running and can respond to. -use std::{sync::Mutex, thread::sleep, time::Duration}; - -use anyhow::Context; +//! Binary containing the tests for the consensus nodes to run in the kubernetes cluster. use clap::{Args, Parser, Subcommand}; use jsonrpsee::{ core::{client::ClientT, RpcResult}, @@ -12,6 +9,7 @@ use jsonrpsee::{ }; use std::net::SocketAddr; use std::sync::Arc; +use std::{sync::Mutex, thread::sleep, time::Duration}; use tracing::info; use zksync_concurrency::{ ctx::{self, Ctx}, @@ -24,7 +22,7 @@ use zksync_consensus_tools::{ mod utils; -/// Command line arguments. +/// CLI for the tester binary. #[derive(Debug, Parser)] #[command(name = "tester")] struct TesterCLI { @@ -33,36 +31,28 @@ struct TesterCLI { command: TesterCommands, } -/// Subcommands. +/// Subcommands for the `tester` binary. #[derive(Subcommand, Debug)] enum TesterCommands { - /// Set up the test pod. + /// Set up and deploy the test pod. StartPod, - /// Deploy the nodes. + /// Run the tests and the RPC server with the test results. Run(RunArgs), } +/// Arguments for the `run` subcommand. #[derive(Args, Debug)] pub struct RunArgs { + /// Port for the RPC server to check the test results. #[clap(long, default_value = "3030")] pub rpc_port: u16, } -#[derive(Debug, Default)] -pub struct TestResult { - passed: u16, -} - -impl TestResult { - fn add_passed(&mut self) { - self.passed += 1; - } -} - +/// Run the RPC server to check the test results. async fn run_test_rpc_server( ctx: &Ctx, port: u16, - test_result: Arc>, + test_result: Arc>, ) -> anyhow::Result<()> { let ip_address = SocketAddr::from(([0, 0, 0, 0], port)); // Custom tower service to handle the RPC requests @@ -95,13 +85,14 @@ async fn run_test_rpc_server( .await } -fn tests_status(counter: Arc>) -> RpcResult { - Ok(format!("Tests passed: {}", counter.lock().unwrap().passed)) +/// Method for the RPC server to check the test results. +fn tests_status(counter: Arc>) -> RpcResult { + Ok(format!("Tests passed: {}", counter.lock().unwrap())) } /// Sanity test for the RPC server. /// We use unwraps here because this function is intended to be used like a test. -pub async fn sanity_test(test_result: Arc>) -> anyhow::Result<()> { +pub async fn sanity_test(test_result: Arc>) -> anyhow::Result<()> { let client = k8s::get_client().await.unwrap(); let nodes_socket = k8s::get_consensus_nodes_rpc_address(&client).await.unwrap(); for socket in nodes_socket { @@ -113,13 +104,14 @@ pub async fn sanity_test(test_result: Arc>) -> anyhow::Result< .unwrap(); assert_eq!(response, health_check::callback().unwrap()); } - test_result.lock().unwrap().add_passed(); + *test_result.lock().unwrap() += 1; Ok(()) } -/// Sanity test for the RPC server. +/// Delay test for the RPC server. +/// This tests introduce some delay in a specific node and checks if it recovers after some time. /// We use unwraps here because this function is intended to be used like a test. -pub async fn delay_test(test_result: Arc>) -> anyhow::Result<()> { +pub async fn delay_test(test_result: Arc>) -> anyhow::Result<()> { let client = k8s::get_client().await.unwrap(); let target_node = "consensus-node-01"; let ip = k8s::get_node_rpc_address_with_name(&client, target_node) @@ -154,7 +146,7 @@ pub async fn delay_test(test_result: Arc>) -> anyhow::Result<( let new_last_voted_view: u64 = serde_json::from_value(response.get("last_commited_block").unwrap().to_owned()).unwrap(); assert!(new_last_voted_view > last_voted_view); - test_result.lock().unwrap().add_passed(); + *test_result.lock().unwrap() += 1; Ok(()) } @@ -164,7 +156,7 @@ async fn main() -> anyhow::Result<()> { let ctx = &ctx::root(); tracing_subscriber::fmt::init(); let args = TesterCLI::parse(); - let test_results = Arc::new(Mutex::new(TestResult::default())); + let test_passed = Arc::new(Mutex::new(0)); match args.command { TesterCommands::StartPod => { @@ -174,14 +166,10 @@ async fn main() -> anyhow::Result<()> { } TesterCommands::Run(args) => { scope::run!(ctx, |ctx, s| async { - s.spawn(run_test_rpc_server( - ctx, - args.rpc_port, - test_results.clone(), - )); + s.spawn(run_test_rpc_server(ctx, args.rpc_port, test_passed.clone())); s.spawn(async { - sanity_test(test_results.clone()).await?; - delay_test(test_results.clone()).await + sanity_test(test_passed.clone()).await?; + delay_test(test_passed.clone()).await }); Ok(()) }) diff --git a/node/tests/src/utils.rs b/node/tests/src/utils.rs index d834f55c..614d31c9 100644 --- a/node/tests/src/utils.rs +++ b/node/tests/src/utils.rs @@ -1,3 +1,4 @@ +//! This module contains utility functions for the test binary. use anyhow::Context; use zksync_consensus_tools::k8s; @@ -10,6 +11,8 @@ pub(crate) async fn start_tests_pod() -> anyhow::Result<()> { Ok(()) } +/// Deploy the necessary roles for the test pod. +/// This includes the role for listing the consensus pods and the role to deploy network chaos. pub(crate) async fn deploy_role() -> anyhow::Result<()> { let client = k8s::get_client().await?; k8s::create_or_reuse_namespace(&client, k8s::DEFAULT_NAMESPACE).await?; @@ -18,6 +21,7 @@ pub(crate) async fn deploy_role() -> anyhow::Result<()> { Ok(()) } +/// Deploy a service to expose the tester RPC server. pub(crate) async fn deploy_rpc_service() -> anyhow::Result<()> { let client = k8s::get_client().await?; k8s::expose_tester_rpc(&client).await?; diff --git a/node/tools/src/k8s.rs b/node/tools/src/k8s.rs index f2f99046..97b9a6fc 100644 --- a/node/tools/src/k8s.rs +++ b/node/tools/src/k8s.rs @@ -185,7 +185,7 @@ pub async fn create_or_reuse_pod_reader_role(client: &Client) -> anyhow::Result< ..Default::default() }, rules: vec![PolicyRule { - api_groups: Some(vec!["".to_string()]), + api_groups: Some(vec![String::new()]), resources: Some(vec!["pods".to_string()]), verbs: vec![ "get".to_owned(), @@ -224,7 +224,7 @@ pub async fn create_or_reuse_pod_reader_role(client: &Client) -> anyhow::Result< subjects: vec![Subject { kind: "ServiceAccount".to_string(), name: "default".to_string(), - api_group: "".to_owned().into(), + api_group: String::new().into(), ..Default::default() }] .into(),