Skip to content

Commit

Permalink
add support for the new decentralized nodes API
Browse files Browse the repository at this point in the history
  • Loading branch information
paulormart committed Nov 18, 2024
1 parent e682717 commit 2e3ed8b
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 82 deletions.
7 changes: 7 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,11 @@ fn default_redis_pool_expire_seconds() -> u64 {
60
}

/// provides default value for decentralized nodes public endpoint if ONET_DN_URL env var is not set
fn default_dn_url() -> String {
"https://nodes.web3.foundation/api/cohort/1".into()
}

#[derive(Clone, Deserialize, Debug)]
pub struct Config {
// general
Expand Down Expand Up @@ -335,6 +340,8 @@ pub struct Config {
pub redis_pool_timeout_seconds: u64,
#[serde(default = "default_redis_pool_expire_seconds")]
pub redis_pool_expire_seconds: u64,
#[serde(default = "default_dn_url")]
pub dn_url: String,
}

/// Inject dotenv and env vars into the Config struct
Expand Down
126 changes: 126 additions & 0 deletions src/dn.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
use crate::config::CONFIG;
use crate::errors::OnetError;
use log::{info, warn};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::fs;
use std::path::Path;
use std::str::FromStr;
use subxt::utils::AccountId32;
use url::Url;

const DN_VALIDATORS_FILENAME: &str = ".dn";

#[derive(Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub enum Status {
Active,
Pending,
Graduated,
Removed,
}

#[derive(Serialize, Deserialize, Debug)]
pub struct Validator {
#[serde(default)]
identity: String,
#[serde(default)]
stash: String,
#[serde(skip_serializing_if = "Option::is_none")]
status: Option<Status>,
}

#[derive(Serialize, Deserialize, Debug)]
pub struct Term {
#[serde(default)]
start: String,
#[serde(default)]
end: String,
}

impl Validator {
fn is_active(&self) -> bool {
matches!(self.status, Some(Status::Active))
}
}

#[derive(Serialize, Deserialize, Debug)]
struct Response {
selected: Vec<Validator>,
backups: Vec<Validator>,
nominators: Vec<String>,
#[serde(skip_serializing)]
statuses: BTreeMap<Status, String>,
term: Term,
}

/// Fetch stashes from 1kv endpoint https://nodes.web3.foundation/api/cohort/1/polkadot/
pub async fn try_fetch_stashes_from_remote_url(
is_loading: bool,
) -> Result<Vec<AccountId32>, OnetError> {
let config = CONFIG.clone();
let url = format!(
"{}/{}/",
config.dn_url.to_lowercase(),
config.chain_name.to_lowercase()
);

let url = Url::parse(&*url)?;

let filename = format!(
"{}{}_{}",
config.data_path,
DN_VALIDATORS_FILENAME,
config.chain_name.to_lowercase()
);

let validators: Vec<Validator> = if is_loading {
// Try to read from cached file
read_cached_filename(&filename)?
} else {
match reqwest::get(url.to_string()).await {
Ok(request) => {
match request.json::<Response>().await {
Ok(response) => {
info!("response {:?}", response);
// Serialize and cache
let serialized = serde_json::to_string(&response.selected)?;
fs::write(&filename, serialized)?;
response.selected
}
Err(e) => {
warn!("Parsing json from url {} failed with error: {:?}", url, e);
// Try to read from cached file
read_cached_filename(&filename)?
}
}
}
Err(e) => {
warn!("Fetching url {} failed with error: {:?}", url, e);
// Try to read from cached file
read_cached_filename(&filename)?
}
}
};

// Parse stashes
let v: Vec<AccountId32> = validators
.iter()
.filter(|v| v.is_active())
.map(|x| AccountId32::from_str(&x.stash).unwrap())
.collect();

Ok(v)
}

pub fn read_cached_filename(filename: &str) -> Result<Vec<Validator>, OnetError> {
// Try to read from cached file
if Path::new(filename).exists() {
let serialized = fs::read_to_string(filename)?;
let validators: Vec<Validator> = serde_json::from_str(&serialized).unwrap();

info!("Read from cached file: {:?}", validators);
Ok(validators)
} else {
Ok(Vec::new())
}
}
1 change: 1 addition & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
mod api;
mod cache;
mod config;
mod dn;
mod errors;
mod matrix;
mod mcda;
Expand Down
8 changes: 5 additions & 3 deletions src/matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1607,11 +1607,13 @@ mod tests {
#[test]
fn it_defines_a_matrix_room() {
let config = &CONFIG;
assert_eq!(config.matrix_bot_user, "@some-bot-handle:matrix.org".to_string());
assert_eq!(
config.matrix_bot_user,
"@some-bot-handle:matrix.org".to_string()
);
let user_id = "@ematest:matrix.org";
let chain = SupportedRuntime::Polkadot;
let room: Room = Room::new_private(chain, user_id);
assert_eq!(room.room_alias, "#b25ldC9Qb2xrYWRvdC9AZW1hdGVzdDptYXRyaXgub3JnL0Bzb21lLWJvdC1oYW5kbGU6bWF0cml4Lm9yZw==:matrix.org".to_string());
}

}
}
75 changes: 3 additions & 72 deletions src/onet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
// SOFTWARE.
use crate::cache::{create_or_await_pool, CacheKey, RedisPool};
use crate::config::{Config, CONFIG};
use crate::dn::try_fetch_stashes_from_remote_url;
use crate::errors::{CacheError, OnetError};
use crate::matrix::{Matrix, UserID, MATRIX_SUBSCRIBERS_FILENAME};
use crate::records::EpochIndex;
Expand All @@ -28,17 +29,15 @@ use crate::runtimes::{
kusama, paseo, polkadot,
support::{ChainPrefix, ChainTokenSymbol, SupportedRuntime},
};
use log::{debug, error, info, warn};
use log::{error, info, warn};
use redis::aio::Connection;
use reqwest::Url;
use serde::{Deserialize, Serialize};
use std::{
collections::BTreeMap,
convert::TryInto,
fs,
fs::File,
io::{BufRead, BufReader},
path::Path,
result::Result,
str::FromStr,
sync::mpsc,
Expand All @@ -65,7 +64,6 @@ use actix_cors::Cors;
use actix_web::{dev::ServerHandle, http, middleware, rt, web, App, HttpServer};
use std::env;

const TVP_VALIDATORS_FILENAME: &str = ".tvp";
pub const BLOCK_FILENAME: &str = ".block";
pub const EPOCH_FILENAME: &str = ".epoch";

Expand Down Expand Up @@ -392,6 +390,7 @@ impl Onet {
SupportedRuntime::Polkadot => polkadot::init_and_subscribe_on_chain_events(self).await,
SupportedRuntime::Kusama => kusama::init_and_subscribe_on_chain_events(self).await,
SupportedRuntime::Paseo => paseo::init_and_subscribe_on_chain_events(self).await,
// _ => todo!(),
}
}
// cache methods
Expand Down Expand Up @@ -532,74 +531,6 @@ struct Validity {
r#type: String,
}

fn read_tvp_cached_filename(filename: &str) -> Result<Vec<Validator>, OnetError> {
// Try to read from cached file
if Path::new(filename).exists() {
let serialized = fs::read_to_string(filename)?;
let validators: Vec<Validator> = serde_json::from_str(&serialized).unwrap();
Ok(validators)
} else {
Ok(Vec::new())
}
}

/// Fetch stashes from 1kv endpoint https://polkadot.w3f.community/candidates
pub async fn try_fetch_stashes_from_remote_url(
is_loading: bool,
) -> Result<Vec<AccountId32>, OnetError> {
let config = CONFIG.clone();
let url = format!(
"https://{}.w3f.community/candidates",
config.chain_name.to_lowercase()
);
let url = Url::parse(&*url)?;

let tvp_validators_filename = format!(
"{}{}_{}",
config.data_path,
TVP_VALIDATORS_FILENAME,
config.chain_name.to_lowercase()
);

let validators: Vec<Validator> = if is_loading {
// Try to read from cached file
read_tvp_cached_filename(&tvp_validators_filename)?
} else {
match reqwest::get(url.to_string()).await {
Ok(request) => {
match request.json::<Vec<Validator>>().await {
Ok(validators) => {
debug!("validators {:?}", validators);
// Serialize and cache
let serialized = serde_json::to_string(&validators)?;
fs::write(&tvp_validators_filename, serialized)?;
validators
}
Err(e) => {
warn!("Parsing json from url {} failed with error: {:?}", url, e);
// Try to read from cached file
read_tvp_cached_filename(&tvp_validators_filename)?
}
}
}
Err(e) => {
warn!("Fetching url {} failed with error: {:?}", url, e);
// Try to read from cached file
read_tvp_cached_filename(&tvp_validators_filename)?
}
}
};

// Parse stashes
let v: Vec<AccountId32> = validators
.iter()
.filter(|v| v.validity.iter().all(|x| x.valid))
.map(|x| AccountId32::from_str(&x.stash).unwrap())
.collect();

Ok(v)
}

pub fn get_account_id_from_storage_key(key: StorageKey) -> AccountId32 {
let s = &key[key.len() - 32..];
let v: [u8; 32] = s.try_into().expect("slice with incorrect length");
Expand Down
6 changes: 3 additions & 3 deletions src/report.rs
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,7 @@ impl From<RawData> for Report {
data.network.name, data.meta.active_era_index,
));
report.add_raw_text(format!(
"<i>Valid <a href=\"https://wiki.polkadot.network/docs/thousand-validators\">TVP validators</a> are shown in bold (100% Commission • Others • <b>TVP</b>).</i>",
"<i>Valid <a href=\"https://nodes.web3.foundation/\">Decentralized Nodes</a> are shown in bold (100% Commission • Others • <b>DN</b>).</i>",
));

// report.add_raw_text(format!(
Expand Down Expand Up @@ -1638,12 +1638,12 @@ fn top_performers_report<'a>(
if max > 0 {
if is_short {
report.add_raw_text(format!(
"Top {} Best TVP Validators performances of the last {} sessions:",
"Top {} Validators performances of the last {} sessions:",
max, data.records_total_full_epochs
));
} else {
report.add_raw_text(format!(
"🏆 <b>Top {} Best TVP Validators performances</b> of the last {} sessions:",
"🏆 <b>Top {} Decentralized Nodes performances</b> of the last {} sessions:",
max, data.records_total_full_epochs
));
}
Expand Down
5 changes: 3 additions & 2 deletions src/runtimes/kusama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,14 @@
// SOFTWARE.
use crate::cache::{CacheKey, Index, Trait, Verbosity};
use crate::config::CONFIG;
use crate::dn::try_fetch_stashes_from_remote_url;
use crate::errors::{CacheError, OnetError};
use crate::matrix::FileInfo;
use crate::mcda::criterias::build_limits_from_session;
use crate::onet::{
get_account_id_from_storage_key, get_latest_block_number_processed, get_signer_from_seed,
get_subscribers, get_subscribers_by_epoch, try_fetch_stashes_from_remote_url,
write_latest_block_number_processed, Onet, ReportType, EPOCH_FILENAME,
get_subscribers, get_subscribers_by_epoch, write_latest_block_number_processed, Onet,
ReportType, EPOCH_FILENAME,
};
use crate::records::{
AuthorityIndex, AuthorityRecord, BlockNumber, EpochIndex, EpochKey, EraIndex, Identity,
Expand Down
5 changes: 3 additions & 2 deletions src/runtimes/polkadot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,14 @@
// SOFTWARE.
use crate::cache::{CacheKey, Index, Trait, Verbosity};
use crate::config::CONFIG;
use crate::dn::try_fetch_stashes_from_remote_url;
use crate::errors::{CacheError, OnetError};
use crate::matrix::FileInfo;
use crate::mcda::criterias::build_limits_from_session;
use crate::onet::{
get_account_id_from_storage_key, get_latest_block_number_processed, get_signer_from_seed,
get_subscribers, get_subscribers_by_epoch, try_fetch_stashes_from_remote_url,
write_latest_block_number_processed, Onet, ReportType, EPOCH_FILENAME,
get_subscribers, get_subscribers_by_epoch, write_latest_block_number_processed, Onet,
ReportType, EPOCH_FILENAME,
};
use crate::records::{
AuthorityIndex, AuthorityRecord, BlockNumber, EpochIndex, EpochKey, EraIndex, Identity,
Expand Down

0 comments on commit 2e3ed8b

Please sign in to comment.