diff --git a/.github/workflows/sub-ci.yml b/.github/workflows/sub-ci.yml index 60db7847..70591ce8 100644 --- a/.github/workflows/sub-ci.yml +++ b/.github/workflows/sub-ci.yml @@ -63,7 +63,7 @@ jobs: - name: "Set some postgres settings" run: | docker exec ${{ job.services.postgres.id }} sh -c 'cat /var/lib/postgresql/data/postgresql.conf' - docker exec ${{ job.services.postgres.id }} sh -c 'echo "max_connections=500" >> /var/lib/postgresql/data/postgresql.conf' + docker exec ${{ job.services.postgres.id }} sh -c 'echo "max_connections=1000" >> /var/lib/postgresql/data/postgresql.conf' docker kill --signal=SIGHUP ${{ job.services.postgres.id }} - name: Checkout diff --git a/Cargo.lock b/Cargo.lock index 6fada42f..6db0ce28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2935,7 +2935,6 @@ dependencies = [ "tower-http", "tracing", "tracing-subscriber", - "tungstenite", "url", "uuid", "validator", @@ -3728,7 +3727,7 @@ checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "relay_client" version = "0.1.0" -source = "git+https://github.com/WalletConnect/WalletConnectRust.git?tag=v0.26.0#7a205511a98011e4fc0cffb7a3652d435b191cd7" +source = "git+https://github.com/WalletConnect/WalletConnectRust.git?tag=v0.26.2#b031367a9dda46efc26fd7a7ad2e847d4cf1a2af" dependencies = [ "chrono", "futures-channel", @@ -3751,7 +3750,7 @@ dependencies = [ [[package]] name = "relay_rpc" version = "0.1.0" -source = "git+https://github.com/WalletConnect/WalletConnectRust.git?tag=v0.26.0#7a205511a98011e4fc0cffb7a3652d435b191cd7" +source = "git+https://github.com/WalletConnect/WalletConnectRust.git?tag=v0.26.2#b031367a9dda46efc26fd7a7ad2e847d4cf1a2af" dependencies = [ "alloy-json-abi", "alloy-json-rpc", diff --git a/Cargo.toml b/Cargo.toml index 67310da6..b00de10d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,7 +59,6 @@ thiserror = "1.0" async-trait = "0.1" tokio-stream = "0.1.11" regex = "1.7.1" -tungstenite = { version = "0.20", features = ["native-tls"] } url = "2.3.1" sha256 = "1.1.1" chacha20poly1305 = "0.10.1" @@ -75,8 +74,8 @@ futures = "0.3.26" futures-util = "0.3" dashmap = "5.4.0" -relay_rpc = { git = "https://github.com/WalletConnect/WalletConnectRust.git", tag = "v0.26.0", features = ["cacao"] } -relay_client = { git = "https://github.com/WalletConnect/WalletConnectRust.git", tag = "v0.26.0" } +relay_rpc = { git = "https://github.com/WalletConnect/WalletConnectRust.git", tag = "v0.26.2", features = ["cacao"] } +relay_client = { git = "https://github.com/WalletConnect/WalletConnectRust.git", tag = "v0.26.2" } x25519-dalek = { version = "2.0.0", features = ["static_secrets"] } hkdf = "0.12.3" sha2 = "0.10.6" diff --git a/justfile b/justfile index fc9139ea..3d08fbbb 100644 --- a/justfile +++ b/justfile @@ -55,10 +55,11 @@ devloop: unit fmt-imports popd just run-storage-docker test-integration + just run & while ! nc -z 127.0.0.1 3000; do sleep 1; done - just test-deployment + echo "✅ Success! ✅" # Run project linter diff --git a/rs-relay b/rs-relay index f7a10ce8..778c968e 160000 --- a/rs-relay +++ b/rs-relay @@ -1 +1 @@ -Subproject commit f7a10ce801395bbccabd277779e4d4ef2a59a92c +Subproject commit 778c968ec0163492d103fa27a4a817ab99c18d5b diff --git a/src/config/deployed/mod.rs b/src/config/deployed/mod.rs index ee066dae..871d36c9 100644 --- a/src/config/deployed/mod.rs +++ b/src/config/deployed/mod.rs @@ -25,8 +25,9 @@ pub struct DeployedConfiguration { pub postgres_max_connections: u32, pub keypair_seed: String, pub project_id: ProjectId, - /// Websocket URL e.g. wss://relay.walletconnect.com + /// Relay URL e.g. https://relay.walletconnect.com pub relay_url: Url, + pub relay_public_key: String, pub notify_url: Url, pub registry_url: Url, @@ -88,6 +89,7 @@ pub fn get_configuration() -> Result { keypair_seed: config.keypair_seed, project_id: config.project_id, relay_url: config.relay_url, + relay_public_key: config.relay_public_key, notify_url: config.notify_url, registry_url: config.registry_url, registry_auth_token: config.registry_auth_token, diff --git a/src/config/local.rs b/src/config/local.rs index 77f3edcb..fb56d868 100644 --- a/src/config/local.rs +++ b/src/config/local.rs @@ -60,21 +60,33 @@ pub fn default_postgres_max_connections() -> u32 { } fn default_keypair_seed() -> String { - hex::encode(rand::Rng::gen::<[u8; 10]>(&mut rand::thread_rng())) + // Use a fixed seed as opposed to a random one for each startup because the server runs on a fixed host and re-uses the database. + // Using a random one will result in: + // - Duplicate relay topic subscriptions for the same topics + // - Duplicate webhook registrations for the same webhook URL + "".to_owned() + // hex::encode(rand::Rng::gen::<[u8; 10]>(&mut rand::thread_rng())) } fn default_relay_url() -> Url { - "ws://127.0.0.1:8888".parse().unwrap() + "http://127.0.0.1:8888".parse().unwrap() } fn default_registry_url() -> Url { "https://registry.walletconnect.com".parse().unwrap() } -pub fn get_configuration() -> Result { +pub async fn get_configuration() -> Result { load_dot_env()?; let config = envy::from_env::()?; + let relay_public_key = reqwest::get(config.relay_url.join("/public-key").unwrap()) + .await + .unwrap() + .text() + .await + .unwrap(); + let socket_addr = SocketAddr::from((config.bind_ip, config.port)); let notify_url = format!("http://{socket_addr}").parse::().unwrap(); let config = Configuration { @@ -88,6 +100,7 @@ pub fn get_configuration() -> Result { keypair_seed: config.keypair_seed, project_id: config.project_id, relay_url: config.relay_url, + relay_public_key, registry_url: config.registry_url, registry_auth_token: config.registry_auth_token, auth_redis_addr_read: None, diff --git a/src/config/mod.rs b/src/config/mod.rs index 39cf72c6..fbddb390 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -20,8 +20,9 @@ pub struct Configuration { pub postgres_max_connections: u32, pub keypair_seed: String, pub project_id: ProjectId, - /// Websocket URL e.g. wss://relay.walletconnect.com + /// Relay URL e.g. https://relay.walletconnect.com pub relay_url: Url, + pub relay_public_key: String, pub notify_url: Url, pub registry_url: Url, @@ -59,10 +60,10 @@ impl Configuration { } } -pub fn get_configuration() -> Result { +pub async fn get_configuration() -> Result { if env::var("ENVIRONMENT") == Ok("DEPLOYED".to_owned()) { deployed::get_configuration() } else { - local::get_configuration() + local::get_configuration().await } } diff --git a/src/error.rs b/src/error.rs index 2e84e1e7..ccf67ef3 100644 --- a/src/error.rs +++ b/src/error.rs @@ -4,7 +4,6 @@ use { auth, model::types::AccountId, rate_limit::{InternalRateLimitError, RateLimitExceeded}, - services::websocket_server::handlers::notify_watch_subscriptions::CheckAppAuthorizationError, }, axum::{response::IntoResponse, Json}, chacha20poly1305::aead, @@ -16,10 +15,14 @@ use { }, serde_json::json, std::{array::TryFromSliceError, string::FromUtf8Error, sync::Arc}, + thiserror::Error, tracing::{error, info, warn}, }; -#[derive(Debug, thiserror::Error)] +// Import not part of group above because it breaks formatting: https://github.com/rust-lang/rustfmt/issues/4746 +use crate::services::public_http_server::handlers::relay_webhook::handlers::notify_watch_subscriptions::CheckAppAuthorizationError; + +#[derive(Debug, Error)] pub enum NotifyServerError { #[error("Failed to load .env {0}")] DotEnvy(#[from] dotenvy::Error), @@ -63,9 +66,6 @@ pub enum NotifyServerError { #[error(transparent)] SerdeJson(#[from] serde_json::error::Error), - #[error(transparent)] - WebSocket(#[from] tungstenite::Error), - #[error(transparent)] Broadcast(#[from] tokio::sync::broadcast::error::TryRecvError), @@ -125,9 +125,6 @@ pub enum NotifyServerError { #[error("Cryptography failure: {0}")] EncryptionError(aead::Error), - #[error("Failed to receive on websocket")] - RecvError, - #[error(transparent)] SystemTimeError(#[from] std::time::SystemTimeError), diff --git a/src/lib.rs b/src/lib.rs index 49e2185e..4e47ca70 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,9 +4,10 @@ use { metrics::Metrics, registry::storage::redis::Redis, relay_client_helpers::create_http_client, + rpc::decode_key, services::{ - private_http_server, public_http_server, publisher_service, watcher_expiration_job, - websocket_server::{self, decode_key}, + private_http_server, public_http_server, publisher_service, + relay_mailbox_clearing_service, relay_renewal_job, watcher_expiration_job, }, state::AppState, }, @@ -40,6 +41,7 @@ pub mod publish_relay_message; pub mod rate_limit; pub mod registry; pub mod relay_client_helpers; +pub mod rpc; pub mod services; pub mod spec; pub mod state; @@ -70,18 +72,7 @@ pub async fn bootstrap( .map_err(|_| NotifyServerError::InvalidKeypairSeed)?; // TODO don't ignore error let keypair = Keypair::generate(&mut StdRng::from_seed(keypair_seed)); - let (relay_ws_client, rx) = { - let (tx, rx) = tokio::sync::mpsc::unbounded_channel(); - let connection_handler = - services::websocket_server::relay_ws_client::RelayConnectionHandler::new( - "notify-client", - tx, - ); - let relay_ws_client = Arc::new(relay_client::websocket::Client::new(connection_handler)); - (relay_ws_client, rx) - }; - - let relay_http_client = Arc::new(create_http_client( + let relay_client = Arc::new(create_http_client( &keypair, config.relay_url.clone(), config.notify_url.clone(), @@ -106,21 +97,32 @@ pub async fn bootstrap( metrics.clone(), )?); + let (relay_mailbox_clearer_tx, relay_mailbox_clearer_rx) = tokio::sync::mpsc::channel(1000); + let state = Arc::new(AppState::new( analytics.clone(), config.clone(), postgres.clone(), - keypair, + Keypair::from(keypair.secret_key()), keypair_seed, - relay_ws_client.clone(), - relay_http_client.clone(), + relay_client.clone(), metrics.clone(), redis, registry, + relay_mailbox_clearer_tx, config.clock, BlockchainApiProvider::new(config.project_id), )?); + let relay_renewal_job = relay_renewal_job::start( + state.notify_keys.key_agreement_topic.clone(), + state.config.notify_url.clone(), + keypair, + relay_client.clone(), + postgres.clone(), + metrics.clone(), + ) + .await?; let private_http_server = private_http_server::start(config.bind_ip, config.telemetry_prometheus_port); let public_http_server = public_http_server::start( @@ -130,22 +132,24 @@ pub async fn bootstrap( state.clone(), geoip_resolver, ); - let websocket_server = websocket_server::start(state, relay_ws_client, rx); let publisher_service = publisher_service::start( postgres.clone(), - relay_http_client.clone(), + relay_client.clone(), metrics.clone(), analytics, ); let watcher_expiration_job = watcher_expiration_job::start(postgres, metrics); + let batch_receive_service = + relay_mailbox_clearing_service::start(relay_client.clone(), relay_mailbox_clearer_rx); select! { _ = shutdown.recv() => info!("Shutdown signal received, killing services"), e = private_http_server => error!("Private HTTP server terminating with error {e:?}"), e = public_http_server => error!("Public HTTP server terminating with error {e:?}"), - e = websocket_server => error!("Relay websocket server terminating with error {e:?}"), + e = relay_renewal_job => error!("Relay renewal job terminating with error {e:?}"), e = publisher_service => error!("Publisher service terminating with error {e:?}"), e = watcher_expiration_job => error!("Watcher expiration job terminating with error {e:?}"), + e = batch_receive_service => error!("Batch receive service terminating with error {e:?}"), } Ok(()) diff --git a/src/main.rs b/src/main.rs index 1393a7a1..0a2fc2b0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,7 +6,7 @@ use { #[tokio::main] async fn main() -> Result<(), NotifyServerError> { - let config = get_configuration()?; + let config = get_configuration().await?; tracing_subscriber::fmt() .with_env_filter(&config.log_level) diff --git a/src/model/types/mod.rs b/src/model/types/mod.rs index b9dbe508..57ef9ec8 100644 --- a/src/model/types/mod.rs +++ b/src/model/types/mod.rs @@ -1,7 +1,5 @@ use { - crate::{ - error::NotifyServerError, services::websocket_server::decode_key, utils::get_client_id, - }, + crate::{error::NotifyServerError, rpc::decode_key, utils::get_client_id}, chrono::{DateTime, Utc}, relay_rpc::domain::{DecodedClientId, ProjectId, Topic}, sqlx::FromRow, diff --git a/src/publish_relay_message.rs b/src/publish_relay_message.rs index 8c26842e..f97c383a 100644 --- a/src/publish_relay_message.rs +++ b/src/publish_relay_message.rs @@ -26,7 +26,7 @@ fn calculate_retry_in(tries: i32) -> Duration { #[instrument(skip_all, fields(topic = %publish.topic, tag = %publish.tag, message_id = %get_message_id(&publish.message)))] pub async fn publish_relay_message( - relay_http_client: &Client, + relay_client: &Client, publish: &Publish, metrics: Option<&Metrics>, ) -> Result<(), Error> { @@ -35,7 +35,7 @@ pub async fn publish_relay_message( let client_publish_call = || async { let start = Instant::now(); - let result = relay_http_client + let result = relay_client .publish( publish.topic.clone(), publish.message.clone(), @@ -83,9 +83,9 @@ pub async fn publish_relay_message( Ok(()) } -#[instrument(skip(relay_ws_client, metrics))] +#[instrument(skip(relay_client, metrics))] pub async fn subscribe_relay_topic( - relay_ws_client: &relay_client::websocket::Client, + relay_client: &Client, topic: &Topic, metrics: Option<&Metrics>, ) -> Result<(), Error> { @@ -94,7 +94,7 @@ pub async fn subscribe_relay_topic( let client_publish_call = || async { let start = Instant::now(); - let result = relay_ws_client.subscribe_blocking(topic.clone()).await; + let result = relay_client.subscribe_blocking(topic.clone()).await; if let Some(metrics) = metrics { metrics.relay_subscribe_request(start); } @@ -136,9 +136,9 @@ pub async fn subscribe_relay_topic( Ok(()) } -#[instrument(skip(relay_http_client, metrics))] +#[instrument(skip(relay_client, metrics))] pub async fn extend_subscription_ttl( - relay_http_client: &Client, + relay_client: &Client, topic: Topic, metrics: Option<&Metrics>, ) -> Result<(), Error> { @@ -155,7 +155,7 @@ pub async fn extend_subscription_ttl( ttl_secs: NOTIFY_NOOP_TTL.as_secs() as u32, prompt: false, }; - publish_relay_message(relay_http_client, &publish, metrics).await + publish_relay_message(relay_client, &publish, metrics).await } #[cfg(test)] diff --git a/src/rate_limit/token_bucket.rs b/src/rate_limit/token_bucket.rs index 59d85c54..5a571e62 100644 --- a/src/rate_limit/token_bucket.rs +++ b/src/rate_limit/token_bucket.rs @@ -2,7 +2,7 @@ use { crate::{ error::NotifyServerError, registry::storage::redis::Redis, - services::websocket_server::error::{ + services::public_http_server::handlers::relay_webhook::error::{ RelayMessageClientError, RelayMessageError, RelayMessageServerError, }, }, diff --git a/src/registry/mod.rs b/src/registry/mod.rs index adeb2750..5f452ea6 100644 --- a/src/registry/mod.rs +++ b/src/registry/mod.rs @@ -2,6 +2,7 @@ use { crate::{error::NotifyServerError, metrics::Metrics}, hyper::header, relay_rpc::domain::ProjectId, + reqwest::header::HeaderValue, serde::{Deserialize, Serialize}, sha2::{Digest, Sha256}, std::{ @@ -10,7 +11,6 @@ use { }, storage::{redis::Redis, KeyValueStorage}, tracing::{error, warn}, - tungstenite::http::HeaderValue, url::Url, }; diff --git a/src/relay_client_helpers.rs b/src/relay_client_helpers.rs index cb32e042..94cb6785 100644 --- a/src/relay_client_helpers.rs +++ b/src/relay_client_helpers.rs @@ -27,6 +27,7 @@ pub fn create_http_connect_options( notify_url: Url, project_id: ProjectId, ) -> Result { + // TODO remove once switched to https relay_url .set_scheme(&relay_url.scheme().replace("ws", "http")) .map_err(|_| NotifyServerError::UrlSetScheme)?; @@ -39,22 +40,6 @@ pub fn create_http_connect_options( ) } -pub fn create_ws_connect_options( - keypair: &Keypair, - relay_url: Url, - notify_url: Url, - project_id: ProjectId, -) -> Result { - Ok(create_connect_options( - keypair, - &relay_url, - notify_url, - project_id, - Some(Duration::from_secs(60 * 60)), - )? - .with_address(relay_url)) -} - fn create_connect_options( keypair: &Keypair, relay_url: &Url, diff --git a/src/rpc.rs b/src/rpc.rs new file mode 100644 index 00000000..3d452524 --- /dev/null +++ b/src/rpc.rs @@ -0,0 +1,117 @@ +use { + crate::error::NotifyServerError, + rand::Rng, + relay_rpc::{domain::MessageId, rpc::JSON_RPC_VERSION_STR}, + serde::{Deserialize, Serialize}, + sha2::Sha256, +}; + +pub fn decode_key(key: &str) -> Result<[u8; 32], NotifyServerError> { + Ok(hex::decode(key)? + .get(..32) + .ok_or(NotifyServerError::InputTooShortError)? + .try_into()?) +} + +pub fn derive_key( + public_key: &x25519_dalek::PublicKey, + private_key: &x25519_dalek::StaticSecret, +) -> Result<[u8; 32], NotifyServerError> { + let shared_key = private_key.diffie_hellman(public_key); + + let derived_key = hkdf::Hkdf::::new(None, shared_key.as_bytes()); + + let mut expanded_key = [0u8; 32]; + derived_key + .expand(b"", &mut expanded_key) + .map_err(NotifyServerError::HkdfInvalidLength)?; + Ok(expanded_key) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RequestBody { + pub id: MessageId, + pub jsonrpc: String, + pub params: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct NotifyRequest { + pub id: u64, + pub jsonrpc: String, + pub method: String, + pub params: T, +} + +impl NotifyRequest { + pub fn new(method: &str, params: T) -> Self { + let id = chrono::Utc::now().timestamp_millis().unsigned_abs(); + let id = id * 1000 + rand::thread_rng().gen_range(100, 1000); + + NotifyRequest { + id, + jsonrpc: JSON_RPC_VERSION_STR.to_owned(), + method: method.to_owned(), + params, + } + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct NotifyResponse { + pub id: u64, + pub jsonrpc: String, + pub result: T, +} + +impl NotifyResponse { + pub fn new(id: u64, result: T) -> Self { + NotifyResponse { + id, + jsonrpc: JSON_RPC_VERSION_STR.to_owned(), + result, + } + } +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ResponseAuth { + pub response_auth: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct NotifyWatchSubscriptions { + pub watch_subscriptions_auth: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct NotifySubscriptionsChanged { + pub subscriptions_changed_auth: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct NotifySubscribe { + pub subscription_auth: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct NotifyUpdate { + pub update_auth: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct NotifyDelete { + pub delete_auth: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct AuthMessage { + pub auth: String, +} diff --git a/src/services/mod.rs b/src/services/mod.rs index 7976da8b..d08f7cd4 100644 --- a/src/services/mod.rs +++ b/src/services/mod.rs @@ -1,5 +1,6 @@ pub mod private_http_server; pub mod public_http_server; pub mod publisher_service; +pub mod relay_mailbox_clearing_service; +pub mod relay_renewal_job; pub mod watcher_expiration_job; -pub mod websocket_server; diff --git a/src/services/public_http_server/handlers/mod.rs b/src/services/public_http_server/handlers/mod.rs index a3282656..0d60e32e 100644 --- a/src/services/public_http_server/handlers/mod.rs +++ b/src/services/public_http_server/handlers/mod.rs @@ -6,5 +6,6 @@ pub mod health; pub mod notify_v0; pub mod notify_v1; pub mod post_welcome_notification; +pub mod relay_webhook; pub mod subscribe_topic; pub mod webhooks; diff --git a/src/services/websocket_server/error.rs b/src/services/public_http_server/handlers/relay_webhook/error.rs similarity index 100% rename from src/services/websocket_server/error.rs rename to src/services/public_http_server/handlers/relay_webhook/error.rs diff --git a/src/services/websocket_server/handlers/mod.rs b/src/services/public_http_server/handlers/relay_webhook/handlers/mod.rs similarity index 91% rename from src/services/websocket_server/handlers/mod.rs rename to src/services/public_http_server/handlers/relay_webhook/handlers/mod.rs index 6d2d69a1..315ef50e 100644 --- a/src/services/websocket_server/handlers/mod.rs +++ b/src/services/public_http_server/handlers/relay_webhook/handlers/mod.rs @@ -1,6 +1,5 @@ use { - super::NotifyRequest, - crate::{error::NotifyServerError, types::Envelope}, + crate::{error::NotifyServerError, rpc::NotifyRequest, types::Envelope}, chacha20poly1305::{aead::Aead, ChaCha20Poly1305, KeyInit}, serde::de::DeserializeOwned, sha2::digest::generic_array::GenericArray, diff --git a/src/services/websocket_server/handlers/notify_delete.rs b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_delete.rs similarity index 89% rename from src/services/websocket_server/handlers/notify_delete.rs rename to src/services/public_http_server/handlers/relay_webhook/handlers/notify_delete.rs index 2f0b4aea..11860e95 100644 --- a/src/services/websocket_server/handlers/notify_delete.rs +++ b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_delete.rs @@ -10,8 +10,8 @@ use { publish_relay_message::publish_relay_message, rate_limit::{self, Clock, RateLimitError}, registry::storage::redis::Redis, - services::websocket_server::{ - decode_key, + rpc::{decode_key, NotifyDelete, NotifyRequest, NotifyResponse, ResponseAuth}, + services::public_http_server::handlers::relay_webhook::{ error::{RelayMessageClientError, RelayMessageError, RelayMessageServerError}, handlers::{ decrypt_message, @@ -19,7 +19,7 @@ use { prepare_subscription_watchers, send_to_subscription_watchers, }, }, - NotifyDelete, NotifyRequest, NotifyResponse, ResponseAuth, + RelayIncomingMessage, }, spec::{ NOTIFY_DELETE_ACT, NOTIFY_DELETE_RESPONSE_ACT, NOTIFY_DELETE_RESPONSE_TAG, @@ -31,9 +31,8 @@ use { }, base64::Engine, chrono::Utc, - relay_client::websocket::{Client, PublishedMessage}, relay_rpc::{ - domain::{DecodedClientId, Topic}, + domain::{DecodedClientId, SubscriptionId, Topic}, rpc::Publish, }, std::{collections::HashSet, sync::Arc}, @@ -41,13 +40,8 @@ use { }; // TODO make and test idempotency -pub async fn handle( - msg: PublishedMessage, - state: &AppState, - client: &Client, -) -> Result<(), RelayMessageError> { +pub async fn handle(msg: RelayIncomingMessage, state: &AppState) -> Result<(), RelayMessageError> { let topic = msg.topic; - let subscription_id = msg.subscription_id; if let Some(redis) = state.redis.as_ref() { notify_delete_rate_limit(redis, &topic, &state.clock).await?; @@ -152,9 +146,18 @@ pub async fn handle( .await .map_err(RelayMessageServerError::NotifyServerError)?; // TODO change to client error? - if let Err(e) = client.unsubscribe(topic.clone(), subscription_id).await { - warn!("Error unsubscribing Notify from topic: {}", e); - }; + tokio::task::spawn({ + let relay_client = state.relay_client.clone(); + let topic = topic.clone(); + async move { + // Relay ignores subscription_id, generate a random one since we don't have it here. + // https://walletconnect.slack.com/archives/C05ABTQSPFY/p1706337410799659?thread_ts=1706307603.828609&cid=C05ABTQSPFY + let subscription_id = SubscriptionId::generate(); + if let Err(e) = relay_client.unsubscribe(topic, subscription_id).await { + warn!("Error unsubscribing Notify from topic: {}", e); + } + } + }); state.analytics.client(SubscriberUpdateParams { project_pk: project.id, @@ -211,7 +214,7 @@ pub async fn handle( base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); publish_relay_message( - &state.relay_http_client, + &state.relay_client, &Publish { topic: topic_from_key(&sym_key), message: base64_notification.into(), @@ -229,7 +232,7 @@ pub async fn handle( watchers_with_subscriptions, &state.notify_keys.authentication_secret, &state.notify_keys.authentication_client_id, - &state.relay_http_client.clone(), + &state.relay_client, state.metrics.as_ref(), ) .await diff --git a/src/services/websocket_server/handlers/notify_get_notifications.rs b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_get_notifications.rs similarity index 96% rename from src/services/websocket_server/handlers/notify_get_notifications.rs rename to src/services/public_http_server/handlers/relay_webhook/handlers/notify_get_notifications.rs index e6034932..9476b82d 100644 --- a/src/services/websocket_server/handlers/notify_get_notifications.rs +++ b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_get_notifications.rs @@ -12,11 +12,11 @@ use { publish_relay_message::publish_relay_message, rate_limit::{self, Clock, RateLimitError}, registry::storage::redis::Redis, - services::websocket_server::{ - decode_key, + rpc::{decode_key, AuthMessage, NotifyRequest, NotifyResponse}, + services::public_http_server::handlers::relay_webhook::{ error::{RelayMessageClientError, RelayMessageError, RelayMessageServerError}, handlers::decrypt_message, - AuthMessage, NotifyRequest, NotifyResponse, + RelayIncomingMessage, }, spec::{ NOTIFY_GET_NOTIFICATIONS_ACT, NOTIFY_GET_NOTIFICATIONS_RESPONSE_ACT, @@ -28,7 +28,6 @@ use { }, base64::Engine, chrono::Utc, - relay_client::websocket::PublishedMessage, relay_rpc::{ domain::{DecodedClientId, Topic}, rpc::Publish, @@ -38,7 +37,7 @@ use { }; // TODO test idempotency -pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), RelayMessageError> { +pub async fn handle(msg: RelayIncomingMessage, state: &AppState) -> Result<(), RelayMessageError> { let topic = msg.topic; if let Some(redis) = state.redis.as_ref() { @@ -175,7 +174,7 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay let response = base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); publish_relay_message( - &state.relay_http_client, + &state.relay_client, &Publish { topic, message: response.into(), diff --git a/src/services/websocket_server/handlers/notify_subscribe.rs b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_subscribe.rs similarity index 94% rename from src/services/websocket_server/handlers/notify_subscribe.rs rename to src/services/public_http_server/handlers/relay_webhook/handlers/notify_subscribe.rs index 520aefaf..441ceefe 100644 --- a/src/services/websocket_server/handlers/notify_subscribe.rs +++ b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_subscribe.rs @@ -12,10 +12,11 @@ use { }, rate_limit::{self, Clock, RateLimitError}, registry::storage::redis::Redis, + rpc::{ + decode_key, derive_key, NotifyRequest, NotifyResponse, NotifySubscribe, ResponseAuth, + }, services::{ - publisher_service::helpers::{upsert_notification, upsert_subscriber_notifications}, - websocket_server::{ - decode_key, derive_key, + public_http_server::handlers::relay_webhook::{ error::{RelayMessageClientError, RelayMessageError, RelayMessageServerError}, handlers::{ decrypt_message, @@ -23,8 +24,9 @@ use { prepare_subscription_watchers, send_to_subscription_watchers, }, }, - NotifyRequest, NotifyResponse, NotifySubscribe, ResponseAuth, + RelayIncomingMessage, }, + publisher_service::helpers::{upsert_notification, upsert_subscriber_notifications}, }, spec::{ NOTIFY_SUBSCRIBE_ACT, NOTIFY_SUBSCRIBE_RESPONSE_ACT, NOTIFY_SUBSCRIBE_RESPONSE_TAG, @@ -36,7 +38,6 @@ use { }, base64::Engine, chrono::Utc, - relay_client::websocket::PublishedMessage, relay_rpc::{ domain::{DecodedClientId, Topic}, rpc::Publish, @@ -52,7 +53,7 @@ use { // TODO test idempotency (create subscriber a second time for the same account) #[instrument(name = "wc_notifySubscribe", skip_all)] -pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), RelayMessageError> { +pub async fn handle(msg: RelayIncomingMessage, state: &AppState) -> Result<(), RelayMessageError> { let topic = msg.topic; if let Some(redis) = state.redis.as_ref() { @@ -195,13 +196,9 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay .map_err(RelayMessageServerError::NotifyServerError)?; // TODO change to client error? info!("Timing: Subscribing to notify_topic: {notify_topic}"); - subscribe_relay_topic( - &state.relay_ws_client, - ¬ify_topic, - state.metrics.as_ref(), - ) - .await - .map_err(|e| RelayMessageServerError::NotifyServerError(e.into()))?; + subscribe_relay_topic(&state.relay_client, ¬ify_topic, state.metrics.as_ref()) + .await + .map_err(|e| RelayMessageServerError::NotifyServerError(e.into()))?; info!("Timing: Finished subscribing to topic"); info!("Timing: Recording SubscriberUpdateParams"); @@ -262,7 +259,7 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay info!("Publishing subscribe response to topic: {response_topic}"); publish_relay_message( - &state.relay_http_client, + &state.relay_client, &Publish { topic: response_topic, message: base64_notification.into(), @@ -277,13 +274,9 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay info!("Finished publishing subscribe response"); } - extend_subscription_ttl( - &state.relay_http_client, - notify_topic, - state.metrics.as_ref(), - ) - .await - .map_err(|e| RelayMessageServerError::NotifyServerError(e.into()))?; // TODO change to client error? + extend_subscription_ttl(&state.relay_client, notify_topic, state.metrics.as_ref()) + .await + .map_err(|e| RelayMessageServerError::NotifyServerError(e.into()))?; // TODO change to client error? // TODO do in same txn as upsert_subscriber() if subscriber.inserted { @@ -334,7 +327,7 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay watchers_with_subscriptions, &state.notify_keys.authentication_secret, &state.notify_keys.authentication_client_id, - &state.relay_http_client.clone(), + &state.relay_client, state.metrics.as_ref(), ) .await diff --git a/src/services/websocket_server/handlers/notify_update.rs b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_update.rs similarity index 96% rename from src/services/websocket_server/handlers/notify_update.rs rename to src/services/public_http_server/handlers/relay_webhook/handlers/notify_update.rs index 904d7164..13b38bab 100644 --- a/src/services/websocket_server/handlers/notify_update.rs +++ b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_update.rs @@ -11,13 +11,13 @@ use { publish_relay_message::publish_relay_message, rate_limit::{self, Clock, RateLimitError}, registry::storage::redis::Redis, - services::websocket_server::{ - decode_key, + rpc::{decode_key, NotifyRequest, NotifyResponse, NotifyUpdate, ResponseAuth}, + services::public_http_server::handlers::relay_webhook::{ error::{RelayMessageClientError, RelayMessageError, RelayMessageServerError}, handlers::{ decrypt_message, notify_watch_subscriptions::send_to_subscription_watchers, }, - NotifyRequest, NotifyResponse, NotifyUpdate, ResponseAuth, + RelayIncomingMessage, }, spec::{ NOTIFY_UPDATE_ACT, NOTIFY_UPDATE_RESPONSE_ACT, NOTIFY_UPDATE_RESPONSE_TAG, @@ -29,7 +29,6 @@ use { }, base64::Engine, chrono::Utc, - relay_client::websocket::PublishedMessage, relay_rpc::{ domain::{DecodedClientId, Topic}, rpc::Publish, @@ -39,7 +38,7 @@ use { }; // TODO test idempotency -pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), RelayMessageError> { +pub async fn handle(msg: RelayIncomingMessage, state: &AppState) -> Result<(), RelayMessageError> { let topic = msg.topic; if let Some(redis) = state.redis.as_ref() { @@ -210,7 +209,7 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); publish_relay_message( - &state.relay_http_client, + &state.relay_client, &Publish { topic: topic_from_key(&sym_key), message: base64_notification.into(), @@ -228,7 +227,7 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay watchers_with_subscriptions, &state.notify_keys.authentication_secret, &state.notify_keys.authentication_client_id, - &state.relay_http_client.clone(), + &state.relay_client, state.metrics.as_ref(), ) .await diff --git a/src/services/websocket_server/handlers/notify_watch_subscriptions.rs b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_watch_subscriptions.rs similarity index 97% rename from src/services/websocket_server/handlers/notify_watch_subscriptions.rs rename to src/services/public_http_server/handlers/relay_webhook/handlers/notify_watch_subscriptions.rs index ebfcb758..c129722d 100644 --- a/src/services/websocket_server/handlers/notify_watch_subscriptions.rs +++ b/src/services/public_http_server/handlers/relay_webhook/handlers/notify_watch_subscriptions.rs @@ -18,11 +18,14 @@ use { publish_relay_message::publish_relay_message, rate_limit::{self, Clock, RateLimitError}, registry::storage::redis::Redis, - services::websocket_server::{ - decode_key, derive_key, + rpc::{ + decode_key, derive_key, NotifyRequest, NotifyResponse, NotifySubscriptionsChanged, + NotifyWatchSubscriptions, + }, + services::public_http_server::handlers::relay_webhook::{ error::{RelayMessageClientError, RelayMessageError, RelayMessageServerError}, handlers::decrypt_message, - NotifyRequest, NotifyResponse, NotifySubscriptionsChanged, NotifyWatchSubscriptions, + RelayIncomingMessage, }, spec::{ NOTIFY_SUBSCRIPTIONS_CHANGED_ACT, NOTIFY_SUBSCRIPTIONS_CHANGED_METHOD, @@ -36,7 +39,6 @@ use { }, base64::Engine, chrono::{Duration, Utc}, - relay_client::websocket::PublishedMessage, relay_rpc::{ domain::DecodedClientId, rpc::{Publish, JSON_RPC_VERSION_STR}, @@ -49,7 +51,7 @@ use { }; #[instrument(name = "wc_notifyWatchSubscriptions", skip_all)] -pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), RelayMessageError> { +pub async fn handle(msg: RelayIncomingMessage, state: &AppState) -> Result<(), RelayMessageError> { if msg.topic != state.notify_keys.key_agreement_topic { return Err(RelayMessageClientError::WrongNotifyWatchSubscriptionsTopic( msg.topic, @@ -195,7 +197,7 @@ pub async fn handle(msg: PublishedMessage, state: &AppState) -> Result<(), Relay info!("Publishing response on topic {response_topic}"); publish_relay_message( - &state.relay_http_client, + &state.relay_client, &Publish { topic: response_topic, message: base64_notification.into(), diff --git a/src/services/public_http_server/handlers/relay_webhook/mod.rs b/src/services/public_http_server/handlers/relay_webhook/mod.rs new file mode 100644 index 00000000..e3d8c675 --- /dev/null +++ b/src/services/public_http_server/handlers/relay_webhook/mod.rs @@ -0,0 +1,210 @@ +use { + self::error::RelayMessageServerError, + crate::{ + metrics::RelayIncomingMessageStatus, + services::public_http_server::handlers::relay_webhook::{ + error::RelayMessageError, + handlers::{ + notify_delete, notify_get_notifications, notify_subscribe, notify_update, + notify_watch_subscriptions, + }, + }, + spec, + state::AppState, + }, + axum::{ + extract::State, + http::StatusCode, + response::{IntoResponse, Response}, + Json, + }, + relay_rpc::{ + domain::Topic, + jwt::{JwtError, VerifyableClaims}, + rpc::{ + msg_id::get_message_id, Receipt, WatchAction, WatchEventClaims, WatchStatus, WatchType, + WatchWebhookPayload, + }, + }, + serde_json::json, + std::{collections::HashSet, sync::Arc, time::Instant}, + thiserror::Error, + tracing::{error, info, instrument, warn}, +}; + +pub mod error; +pub mod handlers; + +#[derive(Debug, Error)] +pub enum ClientError { + #[error("Received more or less than 1 watch event. Got {0} events")] + NotSingleWatchEvent(usize), + + #[error("Could not parse watch event claims: {0}")] + ParseWatchEvent(JwtError), + + #[error("Could not verify watch event: {0}")] + VerifyWatchEvent(JwtError), + + #[error("JWT has wrong issuer")] + WrongIssuer, + + #[error("Expected WatchAction::WatchEvent, got {0:?}")] + WrongWatchAction(WatchAction), + + #[error("Expected WatchType::Subscriber, got {0:?}")] + WrongWatchType(WatchType), + + #[error("Expected WatchStatus::Queued, got {0:?}")] + WrongWatchStatus(WatchStatus), +} + +#[derive(Debug, Error)] +pub enum Error { + #[error("Client error: {0}")] + ClientError(ClientError), + + #[error("Server error: {0}")] + ServerError(RelayMessageServerError), +} + +impl IntoResponse for Error { + fn into_response(self) -> Response { + match self { + Error::ClientError(e) => { + warn!("Relay webhook client error: {e:?}"); + ( + StatusCode::UNPROCESSABLE_ENTITY, + Json(json!({ "error": e.to_string() })), + ) + .into_response() + } + Error::ServerError(e) => { + error!("Relay webhook server error: {e:?}"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(json!({ "error": "Internal server error" })), + ) + .into_response() + } + } + } +} + +pub async fn handler( + State(state): State>, + Json(payload): Json, +) -> Result { + let event = if payload.event_auth.len() == 1 { + payload.event_auth.first().expect("Asserted 1 entry") + } else { + return Err(Error::ClientError(ClientError::NotSingleWatchEvent( + payload.event_auth.len(), + ))); + }; + + let claims = WatchEventClaims::try_from_str(event) + .map_err(|e| Error::ClientError(ClientError::ParseWatchEvent(e)))?; + + claims + .verify_basic(&HashSet::from([state.config.notify_url.to_string()]), None) + .map_err(|e| Error::ClientError(ClientError::VerifyWatchEvent(e)))?; + + if claims.basic.iss != state.relay_identity { + return Err(Error::ClientError(ClientError::WrongIssuer)); + } + + // TODO check sub + + let event = claims.evt; + + state + .relay_mailbox_clearer_tx + .send(Receipt { + topic: event.topic.clone(), + message_id: event.message_id, + }) + .await + .expect("Batch receive channel should not be closed"); + + let incoming_message = RelayIncomingMessage { + topic: event.topic, + message: event.message, + tag: event.tag, + }; + + if claims.act != WatchAction::WatchEvent { + return Err(Error::ClientError(ClientError::WrongWatchAction( + claims.act, + ))); + } + if claims.typ != WatchType::Subscriber { + return Err(Error::ClientError(ClientError::WrongWatchType(claims.typ))); + } + // TODO check whu + + if event.status != WatchStatus::Queued { + return Err(Error::ClientError(ClientError::WrongWatchStatus( + event.status, + ))); + } + + handle_msg(incoming_message, &state) + .await + .map_err(Error::ServerError)?; + + Ok(StatusCode::NO_CONTENT.into_response()) +} + +pub struct RelayIncomingMessage { + pub topic: Topic, + pub message: Arc, + pub tag: u32, +} + +#[instrument(skip_all, fields(topic = %msg.topic, tag = %msg.tag, message_id = %get_message_id(&msg.message)))] +async fn handle_msg( + msg: RelayIncomingMessage, + state: &AppState, +) -> Result<(), RelayMessageServerError> { + let start = Instant::now(); + let topic = msg.topic.clone(); + let tag = msg.tag; + info!("Received tag {tag} on topic {topic}"); + + let result = match tag { + spec::NOTIFY_SUBSCRIBE_TAG => notify_subscribe::handle(msg, state).await, + spec::NOTIFY_DELETE_TAG => notify_delete::handle(msg, state).await, + spec::NOTIFY_UPDATE_TAG => notify_update::handle(msg, state).await, + spec::NOTIFY_WATCH_SUBSCRIPTIONS_TAG => { + notify_watch_subscriptions::handle(msg, state).await + } + spec::NOTIFY_GET_NOTIFICATIONS_TAG => notify_get_notifications::handle(msg, state).await, + _ => { + warn!("Ignored tag {tag} on topic {topic}"); + Ok(()) + } + }; + + let (status, result) = if let Err(e) = result { + match e { + RelayMessageError::Client(e) => { + warn!("Relay message client error handling {tag} on topic {topic}: {e}"); + (RelayIncomingMessageStatus::ClientError, Ok(())) + } + RelayMessageError::Server(e) => { + error!("Relay message server error handling {tag} on topic {topic}: {e}"); + (RelayIncomingMessageStatus::ServerError, Err(e)) + } + } + } else { + info!("Success processing {tag} on topic {topic}"); + (RelayIncomingMessageStatus::Success, Ok(())) + }; + + if let Some(metrics) = &state.metrics { + metrics.relay_incoming_message(tag, status, start); + } + + result +} diff --git a/src/services/public_http_server/handlers/subscribe_topic.rs b/src/services/public_http_server/handlers/subscribe_topic.rs index 6344034e..e2a24e7d 100644 --- a/src/services/public_http_server/handlers/subscribe_topic.rs +++ b/src/services/public_http_server/handlers/subscribe_topic.rs @@ -101,15 +101,10 @@ pub async fn handler( let topic = project.topic.into(); info!("Subscribing to project topic: {topic}"); - subscribe_relay_topic(&state.relay_ws_client, &topic, state.metrics.as_ref()).await?; + subscribe_relay_topic(&state.relay_client, &topic, state.metrics.as_ref()).await?; info!("Extending subscription TTL"); - extend_subscription_ttl( - &state.relay_http_client, - topic.clone(), - state.metrics.as_ref(), - ) - .await?; + extend_subscription_ttl(&state.relay_client, topic.clone(), state.metrics.as_ref()).await?; info!("Successfully subscribed to project topic: {topic}"); Ok(Json(SubscribeTopicResponseBody { diff --git a/src/services/public_http_server/mod.rs b/src/services/public_http_server/mod.rs index ea1279ba..3d4e2362 100644 --- a/src/services/public_http_server/mod.rs +++ b/src/services/public_http_server/mod.rs @@ -24,6 +24,7 @@ use { }; pub const DID_JSON_ENDPOINT: &str = "/.well-known/did.json"; +pub const RELAY_WEBHOOK_ENDPOINT: &str = "/v1/relay-webhook"; pub mod handlers; @@ -70,6 +71,7 @@ pub async fn start( let app = Router::new() .route("/health", get(handlers::health::handler)) .route(DID_JSON_ENDPOINT, get(handlers::did_json::handler)) + .route(RELAY_WEBHOOK_ENDPOINT, post(handlers::relay_webhook::handler)) .route("/:project_id/notify", post(handlers::notify_v0::handler)) .route("/v1/:project_id/notify", post(handlers::notify_v1::handler)) .route( diff --git a/src/services/publisher_service/helpers.rs b/src/services/publisher_service/helpers.rs index 1cdddae6..66830362 100644 --- a/src/services/publisher_service/helpers.rs +++ b/src/services/publisher_service/helpers.rs @@ -113,6 +113,9 @@ pub struct NotificationToProcess { pub project_authentication_private_key: String, } +/// Picks a notification to be processed, marking it as status=processing. +/// status=processing used over a lock on un-sent messages used as it avoids long-running transactions +/// which could become a problem at high throughputs. #[instrument(skip(postgres, metrics))] pub async fn pick_subscriber_notification_for_processing( postgres: &PgPool, diff --git a/src/services/publisher_service/mod.rs b/src/services/publisher_service/mod.rs index 94a084c6..42956fb4 100644 --- a/src/services/publisher_service/mod.rs +++ b/src/services/publisher_service/mod.rs @@ -7,7 +7,7 @@ use { metrics::Metrics, notify_message::{sign_message, JwtNotification, ProjectSigningDetails}, publish_relay_message::publish_relay_message, - services::websocket_server::decode_key, + rpc::decode_key, spec::{NOTIFY_MESSAGE_TAG, NOTIFY_MESSAGE_TTL}, types::{Envelope, EnvelopeType0}, utils::topic_from_key, @@ -55,7 +55,7 @@ const PUBLISHING_GIVE_UP_TIMEOUT: Duration = Duration::from_secs(60 * 60 * 24); #[instrument(skip_all)] pub async fn start( postgres: PgPool, - relay_http_client: Arc, + relay_client: Arc, metrics: Option, analytics: NotifyAnalytics, ) -> Result<(), sqlx::Error> { @@ -99,14 +99,14 @@ pub async fn start( // Spawning a new task to process the messages from the queue tokio::spawn({ let postgres = postgres.clone(); - let relay_http_client = relay_http_client.clone(); + let relay_client = relay_client.clone(); let spawned_tasks_counter = spawned_tasks_counter.clone(); let metrics = metrics.clone(); let analytics = analytics.clone(); async move { process_and_handle( &postgres, - relay_http_client, + relay_client, metrics.as_ref(), &analytics, spawned_tasks_counter, @@ -129,14 +129,14 @@ pub async fn start( // Spawning a new task to process the messages from the queue tokio::spawn({ let postgres = postgres.clone(); - let relay_http_client = relay_http_client.clone(); + let relay_client = relay_client.clone(); let spawned_tasks_counter = spawned_tasks_counter.clone(); let metrics = metrics.clone(); let analytics = analytics.clone(); async move { process_and_handle( &postgres, - relay_http_client, + relay_client, metrics.as_ref(), &analytics, spawned_tasks_counter, @@ -157,7 +157,7 @@ pub async fn start( /// the spawned tasks counter and metrics async fn process_and_handle( postgres: &PgPool, - relay_http_client: Arc, + relay_client: Arc, metrics: Option<&Metrics>, analytics: &NotifyAnalytics, spawned_tasks_counter: Arc, @@ -174,7 +174,7 @@ async fn process_and_handle( // TODO: Add worker execution time metric } - if let Err(e) = process_queued_messages(postgres, relay_http_client, metrics, analytics).await { + if let Err(e) = process_queued_messages(postgres, relay_client, metrics, analytics).await { if let Some(metrics) = metrics { metrics.publishing_workers_errors.add(&ctx, 1, &[]); } @@ -197,7 +197,7 @@ async fn process_and_handle( #[instrument(skip_all)] async fn process_queued_messages( postgres: &PgPool, - relay_http_client: Arc, + relay_client: Arc, metrics: Option<&Metrics>, analytics: &NotifyAnalytics, ) -> Result<(), NotifyServerError> { @@ -212,7 +212,7 @@ async fn process_queued_messages( let process_result = process_with_timeout( PUBLISHING_TIMEOUT, notification, - relay_http_client.clone(), + relay_client.clone(), metrics, analytics, ); @@ -248,17 +248,17 @@ async fn process_queued_messages( } /// Process publishing with the threshold timeout -#[instrument(skip(relay_http_client, metrics, analytics, notification))] +#[instrument(skip(relay_client, metrics, analytics, notification))] async fn process_with_timeout( execution_threshold: Duration, notification: NotificationToProcess, - relay_http_client: Arc, + relay_client: Arc, metrics: Option<&Metrics>, analytics: &NotifyAnalytics, ) -> Result<(), NotifyServerError> { match timeout( execution_threshold, - process_notification(notification, relay_http_client.clone(), metrics, analytics), + process_notification(notification, relay_client.clone(), metrics, analytics), ) .await { @@ -275,7 +275,7 @@ async fn process_with_timeout( #[instrument(skip_all, fields(notification = ?notification))] async fn process_notification( notification: NotificationToProcess, - relay_http_client: Arc, + relay_client: Arc, metrics: Option<&Metrics>, analytics: &NotifyAnalytics, ) -> Result<(), NotifyServerError> { @@ -327,7 +327,7 @@ async fn process_notification( prompt: true, }; let message_id = publish.msg_id(); - publish_relay_message(&relay_http_client, &publish, metrics).await?; + publish_relay_message(&relay_client, &publish, metrics).await?; analytics.message(SubscriberNotificationParams { project_pk: notification.project, diff --git a/src/services/relay_mailbox_clearing_service.rs b/src/services/relay_mailbox_clearing_service.rs new file mode 100644 index 00000000..74522aad --- /dev/null +++ b/src/services/relay_mailbox_clearing_service.rs @@ -0,0 +1,233 @@ +use { + relay_client::http::Client, + relay_rpc::rpc::Receipt, + std::{convert::Infallible, sync::Arc, time::Duration}, + tokio::{ + select, + sync::mpsc::{Receiver, Sender}, + }, + tracing::{info, warn}, +}; + +const BATCH_SIZE: usize = 500; +pub const BATCH_TIMEOUT: Duration = Duration::from_secs(5); + +pub async fn start( + relay_client: Arc, + batch_receive_rx: Receiver, +) -> Result<(), Infallible> { + let (output_tx, mut output_rx) = tokio::sync::mpsc::channel(1); + let relay_handler = async { + while let Some(receipts) = output_rx.recv().await { + if let Err(e) = relay_client.batch_receive(receipts).await { + // Failure is not a major issue, as messages will expire from the mailbox after their TTL anyway + warn!("Error while calling batch_receive: {e:?}"); + // TODO retry + // TODO metrics + } + } + info!("output_rx closed"); + Ok(()) + }; + select! { + e = batcher::(BATCH_TIMEOUT, batch_receive_rx, output_tx) => e, + e = relay_handler => e, + } +} + +/// Accepts items from `items_to_batch`, batches them up to `MAX_BATCH_SIZE`, and sends batches to `output_batches`. +/// If an item is not received for `timeout` and a partial batch ready, then the partial batch will be sent. +/// Note: MATCH_BATCH_SIZE must be at least 2 or the behavior is undefined. +async fn batcher( + timeout: Duration, + mut items_to_batch: Receiver, + output_batches: Sender>, +) -> Result<(), Infallible> { + let mut batch_buffer = Vec::with_capacity(MAX_BATCH_SIZE); + + async fn send_batch( + batch: Vec, + output_batches: &Sender>, + ) -> Option> { + assert!(!batch.is_empty()); + assert!(batch.len() <= MAX_BATCH_SIZE); + if let Err(e) = output_batches.send(batch).await { + info!("output_batches closed: {e:?}"); + return None; + } + Some(Vec::with_capacity(MAX_BATCH_SIZE)) + } + + loop { + if batch_buffer.is_empty() { + match items_to_batch.recv().await { + Some(item) => { + batch_buffer.push(item); + } + None => { + info!("items_to_batch closed"); + break; + } + } + } else { + match tokio::time::timeout(timeout, items_to_batch.recv()).await { + Ok(Some(item)) => { + batch_buffer.push(item); + if batch_buffer.len() >= MAX_BATCH_SIZE { + batch_buffer = if let Some(batch_buffer) = + send_batch::(batch_buffer, &output_batches).await + { + batch_buffer + } else { + break; + }; + } + } + Ok(None) => { + info!("items_to_batch closed"); + break; + } + Err(_) => { + batch_buffer = if let Some(batch_buffer) = + send_batch::(batch_buffer, &output_batches).await + { + batch_buffer + } else { + break; + }; + } + } + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn setup() -> (Sender, Receiver>) { + let (tx, rx) = tokio::sync::mpsc::channel(1); + let (output_tx, output_rx) = tokio::sync::mpsc::channel(1); + tokio::task::spawn(batcher::<10, _>(Duration::from_millis(100), rx, output_tx)); + (tx, output_rx) + } + + #[tokio::test] + async fn sends_one_after_timeout() { + let (tx, mut output_rx) = setup(); + tx.send(1).await.unwrap(); + assert_eq!( + tokio::time::timeout(Duration::from_millis(150), output_rx.recv()) + .await + .unwrap() + .unwrap(), + vec![1] + ); + } + + #[tokio::test] + async fn sends_batch_instantly() { + let (tx, mut output_rx) = setup(); + for i in 0..10 { + tx.send(i).await.unwrap(); + } + assert_eq!( + tokio::time::timeout(Duration::from_millis(10), output_rx.recv()) + .await + .unwrap() + .unwrap(), + (0..10).collect::>() + ); + } + + #[tokio::test] + async fn sends_two_batches_instantly() { + let (tx, mut output_rx) = setup(); + for i in 0..20 { + tx.send(i).await.unwrap(); + } + assert_eq!( + tokio::time::timeout(Duration::from_millis(10), output_rx.recv()) + .await + .unwrap() + .unwrap(), + (0..10).collect::>() + ); + assert_eq!( + tokio::time::timeout(Duration::from_millis(10), output_rx.recv()) + .await + .unwrap() + .unwrap(), + (10..20).collect::>() + ); + } + + #[tokio::test] + async fn sends_two_batches_second_after_timeout() { + let (tx, mut output_rx) = setup(); + for i in 0..19 { + tx.send(i).await.unwrap(); + } + assert_eq!( + tokio::time::timeout(Duration::from_millis(10), output_rx.recv()) + .await + .unwrap() + .unwrap(), + (0..10).collect::>() + ); + assert_eq!( + tokio::time::timeout(Duration::from_millis(110), output_rx.recv()) + .await + .unwrap() + .unwrap(), + (10..19).collect::>() + ); + } + + #[tokio::test] + async fn timeout_reset_after_each_item() { + let (tx, mut output_rx) = setup(); + for i in 0..10 { + tx.send(i).await.unwrap(); + tokio::time::sleep(Duration::from_millis(50)).await; + } + assert_eq!( + tokio::time::timeout(Duration::from_millis(110), output_rx.recv()) + .await + .unwrap() + .unwrap(), + (0..10).collect::>(), + ); + } + + #[tokio::test] + async fn does_not_send_empty_batches() { + let (tx, mut output_rx) = setup(); + + // Nothing received initially + assert!( + tokio::time::timeout(Duration::from_millis(150), output_rx.recv()) + .await + .is_err() + ); + + // Get a batch + tx.send(1).await.unwrap(); + assert_eq!( + tokio::time::timeout(Duration::from_millis(150), output_rx.recv()) + .await + .unwrap() + .unwrap(), + vec![1] + ); + + // Nothing received after + assert!( + tokio::time::timeout(Duration::from_millis(150), output_rx.recv()) + .await + .is_err() + ); + } +} diff --git a/src/services/relay_renewal_job/mod.rs b/src/services/relay_renewal_job/mod.rs new file mode 100644 index 00000000..45a7c388 --- /dev/null +++ b/src/services/relay_renewal_job/mod.rs @@ -0,0 +1,86 @@ +use { + crate::{error::NotifyServerError, metrics::Metrics}, + chrono::Duration, + relay_client::http::Client, + relay_rpc::{auth::ed25519_dalek::Keypair, domain::Topic}, + sqlx::PgPool, + std::{future::Future, sync::Arc}, + tokio::{sync::Mutex, time}, + tracing::{error, info, instrument}, + url::Url, +}; + +mod refresh_topic_subscriptions; +mod register_webhook; + +pub async fn start( + key_agreement_topic: Topic, + notify_url: Url, + keypair: Keypair, + relay_client: Arc, + postgres: PgPool, + metrics: Option, +) -> Result, NotifyServerError> { + let period = Duration::days(1); + + let mut interval = time::interval(period.to_std().expect("Should be able to convert to STD")); + + let renew_all_topics_lock = Arc::new(Mutex::new(false)); + + // We must be able to run the job once on startup or we are non-functional + // Call tick() now so that the first tick() inside the loop actually waits for the period + interval.tick().await; + job( + key_agreement_topic.clone(), + renew_all_topics_lock.clone(), + ¬ify_url, + &keypair, + &relay_client, + &postgres, + metrics.as_ref(), + ) + .await?; + + Ok(async move { + loop { + interval.tick().await; + info!("Running relay renewal job"); + if let Err(e) = job( + key_agreement_topic.clone(), + renew_all_topics_lock.clone(), + ¬ify_url, + &keypair, + &relay_client, + &postgres, + metrics.as_ref(), + ) + .await + { + error!("Error running relay renewal job: {e:?}"); + // TODO metrics + } + } + }) +} + +#[instrument(skip_all)] +async fn job( + key_agreement_topic: Topic, + renew_all_topics_lock: Arc>, + notify_url: &Url, + keypair: &Keypair, + relay_client: &Client, + postgres: &PgPool, + metrics: Option<&Metrics>, +) -> Result<(), NotifyServerError> { + register_webhook::run(notify_url, keypair, relay_client).await?; + refresh_topic_subscriptions::run( + key_agreement_topic, + renew_all_topics_lock, + relay_client, + postgres, + metrics, + ) + .await?; + Ok(()) +} diff --git a/src/services/relay_renewal_job/refresh_topic_subscriptions.rs b/src/services/relay_renewal_job/refresh_topic_subscriptions.rs new file mode 100644 index 00000000..1b288540 --- /dev/null +++ b/src/services/relay_renewal_job/refresh_topic_subscriptions.rs @@ -0,0 +1,133 @@ +use { + crate::{ + error::NotifyServerError, + metrics::Metrics, + model::helpers::{get_project_topics, get_subscriber_topics}, + publish_relay_message::{extend_subscription_ttl, subscribe_relay_topic}, + }, + futures_util::{StreamExt, TryFutureExt, TryStreamExt}, + relay_client::http::Client, + relay_rpc::{domain::Topic, rpc::MAX_SUBSCRIPTION_BATCH_SIZE}, + sqlx::PgPool, + std::{sync::Arc, time::Instant}, + tokio::sync::Mutex, + tracing::{error, info, instrument}, + wc::metrics::otel::Context, +}; + +// TODO change error type +#[instrument(skip_all)] +pub async fn run( + key_agreement_topic: Topic, + renew_all_topics_lock: Arc>, + client: &Client, + postgres: &PgPool, + metrics: Option<&Metrics>, +) -> Result<(), NotifyServerError> { + // TODO only renew when the subscription needs it + info!("Resubscribing to all topics"); + let start = Instant::now(); + + let subscriber_topics = get_subscriber_topics(postgres, metrics).await?; + let subscriber_topics_count = subscriber_topics.len(); + info!("subscriber_topics_count: {subscriber_topics_count}"); + + let project_topics = get_project_topics(postgres, metrics).await?; + let project_topics_count = project_topics.len(); + info!("project_topics_count: {project_topics_count}"); + + // TODO: These need to be paginated and streamed from the database directly + // instead of collecting them to a single giant vec. + let topics = [key_agreement_topic] + .into_iter() + .chain(subscriber_topics.into_iter()) + .chain(project_topics.into_iter()) + .collect::>(); + let topics_count = topics.len(); + info!("topics_count: {topics_count}"); + + // Collect each batch into its own vec, since `batch_subscribe` would convert + // them anyway. + let topic_batches = topics + .chunks(MAX_SUBSCRIPTION_BATCH_SIZE) + .map(|chunk| chunk.to_vec()) + .collect::>(); + + // Limit concurrency to avoid overwhelming the relay with requests. + const REQUEST_CONCURRENCY: usize = 200; + + futures_util::stream::iter(topic_batches) + .map(|topic_batch| { + // Map result to an unsized type to avoid allocation when collecting, + // as we don't care about subscription IDs. + client.batch_subscribe_blocking(topic_batch).map_ok(|_| ()) + }) + .buffer_unordered(REQUEST_CONCURRENCY) + .try_collect::>() + .await?; + + let elapsed: u64 = start + .elapsed() + .as_millis() + .try_into() + .expect("No error getting ms of elapsed time"); + info!("resubscribe took {elapsed}ms"); + + // If operation already running, don't start another one + let mut operation_running = renew_all_topics_lock.lock().await; + if !*operation_running { + *operation_running = true; + // Renew all subscription TTLs. + // This can take a long time (e.g. 2 hours), so cannot block server startup. + tokio::task::spawn({ + let client = client.clone(); + let metrics = metrics.cloned(); + let renew_all_topics_lock = renew_all_topics_lock.clone(); + async move { + let client = &client; + let metrics = metrics.as_ref(); + let start = Instant::now(); + let result = futures_util::stream::iter(topics) + .map(|topic| async move { + // Subscribe a second time as the initial subscription above may have expired + subscribe_relay_topic(client, &topic, metrics) + .map_ok(|_| ()) + .and_then(|_| { + // Subscribing only guarantees 5m TTL, so we always need to extend it. + extend_subscription_ttl(client, topic.clone(), metrics) + .map_ok(|_| ()) + }) + .await + }) + // Above we want to resubscribe as quickly as possible so use a high concurrency value + // But here we prefer stability and are OK with a lower value + .buffer_unordered(25) + .try_collect::>() + .await; + let elapsed: u64 = start.elapsed().as_millis().try_into().unwrap(); + if let Err(e) = result { + // An error here is bad, as topics will not have been renewed. + // However, this should be rare and many resubscribes will happen within 30 days so all topics should be renewed eventually. + // With we will be able to guarantee renewal much better. + error!("Failed to renew all topic subscriptions in {elapsed}ms: {e}"); + } else { + info!("Success renewing all topic subscriptions in {elapsed}ms"); + } + *renew_all_topics_lock.lock().await = false; + } + }); + } + + if let Some(metrics) = metrics { + let ctx = Context::current(); + metrics + .subscribed_project_topics + .observe(&ctx, project_topics_count as u64, &[]); + metrics + .subscribed_subscriber_topics + .observe(&ctx, subscriber_topics_count as u64, &[]); + metrics.subscribe_latency.record(&ctx, elapsed, &[]); + } + + Ok(()) +} diff --git a/src/services/relay_renewal_job/register_webhook.rs b/src/services/relay_renewal_job/register_webhook.rs new file mode 100644 index 00000000..60b0d541 --- /dev/null +++ b/src/services/relay_renewal_job/register_webhook.rs @@ -0,0 +1,124 @@ +use { + crate::{services::public_http_server::RELAY_WEBHOOK_ENDPOINT, spec::INCOMING_TAGS}, + relay_client::{ + error::Error, + http::{Client, WatchRegisterRequest}, + }, + relay_rpc::{ + auth::ed25519_dalek::Keypair, + rpc::{WatchStatus, WatchType}, + }, + std::time::Duration, + tracing::instrument, + url::Url, +}; + +#[instrument(skip_all)] +pub async fn run(notify_url: &Url, keypair: &Keypair, client: &Client) -> Result<(), Error> { + client + .watch_register( + WatchRegisterRequest { + service_url: notify_url.to_string(), + webhook_url: notify_url + .join(RELAY_WEBHOOK_ENDPOINT) + .expect("Should be able to join static URLs") + .to_string(), + watch_type: WatchType::Subscriber, + tags: INCOMING_TAGS.to_vec(), + // Alternatively we could not care about the tag, as an incoming message is an incoming message + // tags: (4000..4100).collect(), + statuses: vec![WatchStatus::Queued], + ttl: Duration::from_secs(60 * 60 * 24 * 30), + }, + keypair, + ) + .await?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::relay_client_helpers::create_http_client, + chrono::Utc, + hyper::StatusCode, + rand::rngs::StdRng, + rand_core::SeedableRng, + relay_rpc::{ + domain::{DecodedClientId, DidKey, ProjectId}, + jwt::VerifyableClaims, + rpc::{ + self, Params, Payload, Response, SuccessfulResponse, WatchAction, + WatchRegisterClaims, WatchRegisterResponse, + }, + }, + wiremock::{ + http::Method, + matchers::{method, path}, + Mock, MockServer, Request, ResponseTemplate, + }, + }; + + #[tokio::test] + async fn register_webhook_30_day_expiration() { + let relay = MockServer::start().await; + Mock::given(method(Method::Post)) + .and(path("/rpc")) + .respond_with(|req: &Request| { + let req = req.body_json::().unwrap(); + ResponseTemplate::new(StatusCode::OK).set_body_json(Payload::Response( + Response::Success(SuccessfulResponse { + id: req.id, + jsonrpc: req.jsonrpc, + result: serde_json::to_value(WatchRegisterResponse { + relay_id: DidKey::from(DecodedClientId::from_key( + &Keypair::generate(&mut StdRng::from_entropy()).public_key(), + )), + }) + .unwrap(), + }), + )) + }) + .mount(&relay) + .await; + let relay_url = relay.uri().parse::().unwrap(); + let notify_url = "https://example.com".parse::().unwrap(); + let keypair = Keypair::generate(&mut StdRng::from_entropy()); + let relay_client = create_http_client( + &keypair, + relay_url, + notify_url.clone(), + ProjectId::generate(), + ) + .unwrap(); + run(¬ify_url, &keypair, &relay_client).await.unwrap(); + let requests = relay.received_requests().await.unwrap(); + assert_eq!(requests.len(), 1); + let request = requests.first().unwrap(); + let request = request.body_json::().unwrap(); + match request.params { + Params::WatchRegister(p) => { + let claims = WatchRegisterClaims::try_from_str(&p.register_auth).unwrap(); + assert_eq!( + claims.whu, + notify_url + .join(RELAY_WEBHOOK_ENDPOINT) + .expect("Should be able to join static URLs") + .to_string() + ); + assert_eq!(claims.typ, WatchType::Subscriber); + assert_eq!(claims.act, WatchAction::Register); + assert_eq!(claims.sts, vec![WatchStatus::Queued]); + const LEEWAY: i64 = 2; + let expected_iat = Utc::now().timestamp(); + assert!(claims.basic.iat <= expected_iat); + assert!(claims.basic.iat >= expected_iat - LEEWAY); + let expected_exp = Utc::now().timestamp() + 30 * 24 * 60 * 60; + assert!(claims.basic.exp.unwrap() <= expected_exp); + assert!(claims.basic.exp.unwrap() > expected_exp - LEEWAY); + } + _ => panic!("Expected WatchRegister request, got {:?}", request.params), + } + } +} diff --git a/src/services/watcher_expiration_job.rs b/src/services/watcher_expiration_job.rs index a456d0b9..278f0a93 100644 --- a/src/services/watcher_expiration_job.rs +++ b/src/services/watcher_expiration_job.rs @@ -3,7 +3,7 @@ use { sqlx::PgPool, std::time::Duration, tokio::time, - tracing::{error, info}, + tracing::{error, info, instrument}, }; pub async fn start(postgres: PgPool, metrics: Option) { @@ -14,10 +14,12 @@ pub async fn start(postgres: PgPool, metrics: Option) { info!("Running watcher expiration job"); if let Err(e) = job(&postgres, metrics.as_ref()).await { error!("Error running watcher expiration job: {e:?}"); + // TODO metrics } } } +#[instrument(skip_all)] async fn job(postgres: &PgPool, metrics: Option<&Metrics>) -> sqlx::Result<()> { let count = delete_expired_subscription_watchers(postgres, metrics).await?; info!("Expired {count} watchers"); diff --git a/src/services/websocket_server/mod.rs b/src/services/websocket_server/mod.rs deleted file mode 100644 index b1431bab..00000000 --- a/src/services/websocket_server/mod.rs +++ /dev/null @@ -1,365 +0,0 @@ -use { - crate::{ - error::NotifyServerError, - metrics::{Metrics, RelayIncomingMessageStatus}, - model::helpers::{get_project_topics, get_subscriber_topics}, - publish_relay_message::{extend_subscription_ttl, subscribe_relay_topic}, - relay_client_helpers::create_ws_connect_options, - services::websocket_server::{ - error::RelayMessageError, - handlers::{ - notify_delete, notify_get_notifications, notify_subscribe, notify_update, - notify_watch_subscriptions, - }, - }, - spec::{ - NOTIFY_DELETE_TAG, NOTIFY_GET_NOTIFICATIONS_TAG, NOTIFY_SUBSCRIBE_TAG, - NOTIFY_UPDATE_TAG, NOTIFY_WATCH_SUBSCRIPTIONS_TAG, - }, - state::AppState, - }, - futures_util::{StreamExt, TryFutureExt, TryStreamExt}, - rand::Rng, - relay_client::websocket::{Client, PublishedMessage}, - relay_rpc::{ - domain::{MessageId, Topic}, - rpc::{msg_id::get_message_id, JSON_RPC_VERSION_STR, MAX_SUBSCRIPTION_BATCH_SIZE}, - }, - relay_ws_client::RelayClientEvent, - serde::{Deserialize, Serialize}, - sha2::Sha256, - sqlx::PgPool, - std::{convert::Infallible, sync::Arc, time::Instant}, - tokio::sync::{mpsc::UnboundedReceiver, Mutex}, - tracing::{error, info, instrument, warn}, - wc::metrics::otel::Context, -}; - -pub mod error; -pub mod handlers; -pub mod relay_ws_client; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct RequestBody { - pub id: MessageId, - pub jsonrpc: String, - pub params: String, -} - -async fn connect(state: &AppState, client: &Client) -> Result<(), NotifyServerError> { - info!("Connecting to relay"); - client - .connect(&create_ws_connect_options( - &state.keypair, - state.config.relay_url.clone(), - state.config.notify_url.clone(), - state.config.project_id.clone(), - )?) - .await?; - - resubscribe( - state.notify_keys.key_agreement_topic.clone(), - &state.postgres, - client, - &state.relay_http_client, - state.metrics.as_ref(), - state.renew_all_topics_lock.clone(), - ) - .await?; - - Ok(()) -} - -pub async fn start( - state: Arc, - relay_ws_client: Arc, - mut rx: UnboundedReceiver, -) -> Result { - connect(&state, &relay_ws_client).await?; - loop { - let Some(msg) = rx.recv().await else { - return Err(crate::error::NotifyServerError::RelayClientStopped); - }; - match msg { - relay_ws_client::RelayClientEvent::Message(msg) => { - let state = state.clone(); - let relay_ws_client = relay_ws_client.clone(); - tokio::spawn(async move { handle_msg(msg, &state, &relay_ws_client).await }); - } - relay_ws_client::RelayClientEvent::Error(e) => { - warn!("Received error from relay: {e}"); - while let Err(e) = connect(&state, &relay_ws_client).await { - error!("Error reconnecting to relay: {}", e); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - } - } - relay_ws_client::RelayClientEvent::Disconnected(e) => { - info!("Received disconnect from relay: {e:?}"); - while let Err(e) = connect(&state, &relay_ws_client).await { - warn!("Error reconnecting to relay: {e}"); - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - } - } - relay_ws_client::RelayClientEvent::Connected => { - info!("Connected to relay"); - } - } - } -} - -#[instrument(skip_all, fields(topic = %msg.topic, tag = %msg.tag, message_id = %get_message_id(&msg.message)))] -async fn handle_msg(msg: PublishedMessage, state: &AppState, client: &Client) { - let start = Instant::now(); - let topic = msg.topic.clone(); - let tag = msg.tag; - info!("Received tag {tag} on topic {topic}"); - - let result = match tag { - NOTIFY_DELETE_TAG => notify_delete::handle(msg, state, client).await, - NOTIFY_SUBSCRIBE_TAG => notify_subscribe::handle(msg, state).await, - NOTIFY_UPDATE_TAG => notify_update::handle(msg, state).await, - NOTIFY_WATCH_SUBSCRIPTIONS_TAG => notify_watch_subscriptions::handle(msg, state).await, - NOTIFY_GET_NOTIFICATIONS_TAG => notify_get_notifications::handle(msg, state).await, - _ => { - info!("Ignored tag {tag} on topic {topic}"); - Ok(()) - } - }; - - let status = if let Err(e) = result { - match e { - RelayMessageError::Client(e) => { - warn!("Relay message client error handling {tag} on topic {topic}: {e}"); - RelayIncomingMessageStatus::ClientError - } - RelayMessageError::Server(e) => { - error!("Relay message server error handling {tag} on topic {topic}: {e}"); - RelayIncomingMessageStatus::ServerError - } - } - } else { - info!("Success processing {tag} on topic {topic}"); - RelayIncomingMessageStatus::Success - }; - - if let Some(metrics) = &state.metrics { - metrics.relay_incoming_message(tag, status, start); - } -} - -#[instrument(skip_all)] -async fn resubscribe( - key_agreement_topic: Topic, - postgres: &PgPool, - client: &Client, - relay_http_client: &Arc, - metrics: Option<&Metrics>, - renew_all_topics_lock: Arc>, -) -> Result<(), NotifyServerError> { - info!("Resubscribing to all topics"); - let start = Instant::now(); - - let subscriber_topics = get_subscriber_topics(postgres, metrics).await?; - let subscriber_topics_count = subscriber_topics.len(); - info!("subscriber_topics_count: {subscriber_topics_count}"); - - let project_topics = get_project_topics(postgres, metrics).await?; - let project_topics_count = project_topics.len(); - info!("project_topics_count: {project_topics_count}"); - - // TODO: These need to be paginated and streamed from the database directly - // instead of collecting them to a single giant vec. - let topics = [key_agreement_topic] - .into_iter() - .chain(subscriber_topics.into_iter()) - .chain(project_topics.into_iter()) - .collect::>(); - let topics_count = topics.len(); - info!("topics_count: {topics_count}"); - - // Collect each batch into its own vec, since `batch_subscribe` would convert - // them anyway. - let topic_batches = topics - .chunks(MAX_SUBSCRIPTION_BATCH_SIZE) - .map(|chunk| chunk.to_vec()) - .collect::>(); - - // Limit concurrency to avoid overwhelming the relay with requests. - const REQUEST_CONCURRENCY: usize = 200; - - futures_util::stream::iter(topic_batches) - .map(|topic_batch| { - // Map result to an unsized type to avoid allocation when collecting, - // as we don't care about subscription IDs. - client.batch_subscribe_blocking(topic_batch).map_ok(|_| ()) - }) - .buffer_unordered(REQUEST_CONCURRENCY) - .try_collect::>() - .await?; - - let elapsed: u64 = start.elapsed().as_millis().try_into().unwrap(); - info!("resubscribe took {elapsed}ms"); - - // If operation already running, don't start another one - let mut operation_running = renew_all_topics_lock.lock().await; - if !*operation_running { - *operation_running = true; - // Renew all subscription TTLs. - // This can take a long time (e.g. 2 hours), so cannot block server startup. - tokio::task::spawn({ - let client = client.clone(); - let relay_http_client = relay_http_client.clone(); - let metrics = metrics.cloned(); - let renew_all_topics_lock = renew_all_topics_lock.clone(); - async move { - let client = &client; - let relay_http_client = &relay_http_client; - let metrics = metrics.as_ref(); - let start = Instant::now(); - let result = futures_util::stream::iter(topics) - .map(|topic| async move { - // Subscribe a second time as the initial subscription above may have expired - subscribe_relay_topic(client, &topic, metrics) - .map_ok(|_| ()) - .and_then(|_| { - // Subscribing only guarantees 5m TTL, so we always need to extend it. - extend_subscription_ttl(relay_http_client, topic.clone(), metrics) - .map_ok(|_| ()) - }) - .await - }) - // Above we want to resubscribe as quickly as possible so use a high concurrency value - // But here we prefer stability and are OK with a lower value - .buffer_unordered(25) - .try_collect::>() - .await; - let elapsed: u64 = start.elapsed().as_millis().try_into().unwrap(); - if let Err(e) = result { - // An error here is bad, as topics will not have been renewed. - // However, this should be rare and many resubscribes will happen within 30 days so all topics should be renewed eventually. - // With we will be able to guarantee renewal much better. - error!("Failed to renew all topic subscriptions in {elapsed}ms: {e}"); - } else { - info!("Success renewing all topic subscriptions in {elapsed}ms"); - } - *renew_all_topics_lock.lock().await = false; - } - }); - } - - if let Some(metrics) = metrics { - let ctx = Context::current(); - metrics - .subscribed_project_topics - .observe(&ctx, project_topics_count as u64, &[]); - metrics - .subscribed_subscriber_topics - .observe(&ctx, subscriber_topics_count as u64, &[]); - metrics.subscribe_latency.record(&ctx, elapsed, &[]); - } - - Ok(()) -} - -pub fn decode_key(key: &str) -> Result<[u8; 32], NotifyServerError> { - Ok(hex::decode(key)? - .get(..32) - .ok_or(NotifyServerError::InputTooShortError)? - .try_into()?) -} - -pub fn derive_key( - public_key: &x25519_dalek::PublicKey, - private_key: &x25519_dalek::StaticSecret, -) -> Result<[u8; 32], NotifyServerError> { - let shared_key = private_key.diffie_hellman(public_key); - - let derived_key = hkdf::Hkdf::::new(None, shared_key.as_bytes()); - - let mut expanded_key = [0u8; 32]; - derived_key - .expand(b"", &mut expanded_key) - .map_err(NotifyServerError::HkdfInvalidLength)?; - Ok(expanded_key) -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct NotifyRequest { - pub id: u64, - pub jsonrpc: String, - pub method: String, - pub params: T, -} - -impl NotifyRequest { - pub fn new(method: &str, params: T) -> Self { - let id = chrono::Utc::now().timestamp_millis().unsigned_abs(); - let id = id * 1000 + rand::thread_rng().gen_range(100, 1000); - - NotifyRequest { - id, - jsonrpc: JSON_RPC_VERSION_STR.to_owned(), - method: method.to_owned(), - params, - } - } -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct NotifyResponse { - pub id: u64, - pub jsonrpc: String, - pub result: T, -} - -impl NotifyResponse { - pub fn new(id: u64, result: T) -> Self { - NotifyResponse { - id, - jsonrpc: JSON_RPC_VERSION_STR.to_owned(), - result, - } - } -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct ResponseAuth { - pub response_auth: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct NotifyWatchSubscriptions { - pub watch_subscriptions_auth: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct NotifySubscriptionsChanged { - pub subscriptions_changed_auth: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct NotifySubscribe { - pub subscription_auth: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct NotifyUpdate { - pub update_auth: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct NotifyDelete { - pub delete_auth: String, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct AuthMessage { - pub auth: String, -} diff --git a/src/services/websocket_server/relay_ws_client.rs b/src/services/websocket_server/relay_ws_client.rs deleted file mode 100644 index 0266c587..00000000 --- a/src/services/websocket_server/relay_ws_client.rs +++ /dev/null @@ -1,75 +0,0 @@ -use { - relay_client::websocket::ConnectionHandler, - std::sync::Arc, - tokio::sync::mpsc, - tracing::{error, info}, - tungstenite::protocol::CloseFrame, -}; - -pub struct RelayConnectionHandler { - name: &'static str, - tx: mpsc::UnboundedSender, -} - -#[derive(Debug, Clone)] -pub enum RelayClientEvent { - Message(relay_client::websocket::PublishedMessage), - Error(Arc), - Disconnected(Option>), - Connected, -} - -impl RelayConnectionHandler { - pub fn new(name: &'static str, tx: mpsc::UnboundedSender) -> Self { - Self { name, tx } - } -} - -impl ConnectionHandler for RelayConnectionHandler { - fn connected(&mut self) { - info!("[{}] connection open", self.name); - if let Err(e) = self.tx.send(RelayClientEvent::Connected) { - error!("[{}] failed to emit the connection event: {}", self.name, e); - } - } - - fn disconnected(&mut self, frame: Option>) { - info!("[{}] connection closed: frame={frame:?}", self.name); - if let Err(e) = self.tx.send(RelayClientEvent::Disconnected(frame)) { - error!( - "[{}] failed to emit the disconnection event: {}", - self.name, e - ); - } - } - - fn message_received(&mut self, message: relay_client::websocket::PublishedMessage) { - info!( - "[{}] inbound message: topic={} message={}", - self.name, message.topic, message.message - ); - if let Err(e) = self.tx.send(RelayClientEvent::Message(message)) { - error!("[{}] failed to emit the message event: {}", self.name, e); - } - } - - fn inbound_error(&mut self, error: relay_client::error::Error) { - info!("[{}] inbound error: {error}", self.name); - if let Err(e) = self.tx.send(RelayClientEvent::Error(Arc::new(error))) { - error!( - "[{}] failed to emit the inbound error event: {}", - self.name, e - ); - } - } - - fn outbound_error(&mut self, error: relay_client::error::Error) { - info!("[{}] outbound error: {error}", self.name); - if let Err(e) = self.tx.send(RelayClientEvent::Error(Arc::new(error))) { - error!( - "[{}] failed to emit the outbound error event: {}", - self.name, e - ); - } - } -} diff --git a/src/spec.rs b/src/spec.rs index 99c4a286..10056b55 100644 --- a/src/spec.rs +++ b/src/spec.rs @@ -28,6 +28,16 @@ pub const NOTIFY_NOOP_TAG: u32 = 4050; pub const NOTIFY_GET_NOTIFICATIONS_TAG: u32 = 4014; pub const NOTIFY_GET_NOTIFICATIONS_RESPONSE_TAG: u32 = 4015; +pub const INCOMING_TAGS: [u32; 7] = [ + NOTIFY_SUBSCRIBE_TAG, + NOTIFY_MESSAGE_RESPONSE_TAG, + NOTIFY_DELETE_TAG, + NOTIFY_UPDATE_TAG, + NOTIFY_WATCH_SUBSCRIPTIONS_TAG, + NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TAG, + NOTIFY_GET_NOTIFICATIONS_TAG, +]; + // TTLs // https://specs.walletconnect.com/2.0/specs/clients/notify/rpc-methods // https://specs.walletconnect.com/2.0/specs/clients/notify/notify-authentication diff --git a/src/state.rs b/src/state.rs index 9b0e64cb..7f81ce9f 100644 --- a/src/state.rs +++ b/src/state.rs @@ -9,13 +9,19 @@ use { Configuration, }, build_info::BuildInfo, - relay_rpc::auth::{ - cacao::signature::eip1271::blockchain_api::BlockchainApiProvider, ed25519_dalek::Keypair, + relay_client::http::Client, + relay_rpc::{ + auth::{ + cacao::signature::eip1271::blockchain_api::BlockchainApiProvider, + ed25519_dalek::{Keypair, PublicKey}, + }, + domain::{DecodedClientId, DidKey}, + rpc::Receipt, }, serde::{Deserialize, Serialize}, sqlx::PgPool, std::{fmt, sync::Arc}, - tokio::sync::Mutex, + tokio::sync::mpsc::Sender, tracing::info, }; @@ -26,14 +32,14 @@ pub struct AppState { pub metrics: Option, pub postgres: PgPool, pub keypair: Keypair, - pub relay_ws_client: Arc, - pub relay_http_client: Arc, + pub relay_client: Arc, + pub relay_identity: DidKey, pub redis: Option>, pub registry: Arc, pub notify_keys: NotifyKeys, + pub relay_mailbox_clearer_tx: Sender, pub clock: Clock, pub provider: BlockchainApiProvider, - pub renew_all_topics_lock: Arc>, } build_info::build_info!(fn build_info); @@ -46,16 +52,20 @@ impl AppState { postgres: PgPool, keypair: Keypair, keypair_seed: [u8; 32], - relay_ws_client: Arc, - relay_http_client: Arc, + relay_client: Arc, metrics: Option, redis: Option>, registry: Arc, + relay_mailbox_clearer_tx: Sender, clock: Clock, provider: BlockchainApiProvider, ) -> Result { let build_info: &BuildInfo = build_info(); + let relay_identity = DidKey::from(DecodedClientId::from_key( + &PublicKey::from_bytes(&hex::decode(&config.relay_public_key).unwrap()).unwrap(), + )); + let notify_keys = NotifyKeys::new(&config.notify_url, keypair_seed)?; Ok(Self { @@ -65,14 +75,14 @@ impl AppState { metrics, postgres, keypair, - relay_ws_client, - relay_http_client, + relay_client, + relay_identity, redis, registry, notify_keys, + relay_mailbox_clearer_tx, clock, provider, - renew_all_topics_lock: Arc::new(Mutex::new(false)), }) } diff --git a/terraform/README.md b/terraform/README.md index c95220ac..0afb5a26 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -57,6 +57,7 @@ To authenticate, run `terraform login` and follow the instructions. | [project\_id](#input\_project\_id) | The ID of the project to use for the app |
string
|
n/a
| yes | | [registry\_api\_auth\_token](#input\_registry\_api\_auth\_token) | The auth token for the registry API |
string
|
n/a
| yes | | [registry\_api\_endpoint](#input\_registry\_api\_endpoint) | The endpoint of the registry API |
string
|
n/a
| yes | +| [relay\_public\_key](#input\_relay\_public\_key) | The public key of the relay server obtained from relay.walletconnect.com/public-key |
string
|
n/a
| yes | | [relay\_url](#input\_relay\_url) | The URL of the relay server |
string
|
n/a
| yes | | [webhook\_cloudwatch\_p2](#input\_webhook\_cloudwatch\_p2) | The webhook to send CloudWatch P2 alerts to |
string
|
""
| no | | [webhook\_prometheus\_p2](#input\_webhook\_prometheus\_p2) | The webhook to send Prometheus P2 alerts to |
string
|
""
| no | diff --git a/terraform/ecs/README.md b/terraform/ecs/README.md index c5e61a84..54830f25 100644 --- a/terraform/ecs/README.md +++ b/terraform/ecs/README.md @@ -63,6 +63,7 @@ This module creates an ECS cluster and an autoscaling group of EC2 instances to | [redis\_pool\_size](#input\_redis\_pool\_size) | The size of the Redis connection pool |
number
|
128
| no | | [registry\_api\_auth\_token](#input\_registry\_api\_auth\_token) | The auth token for the registry API |
string
|
n/a
| yes | | [registry\_api\_endpoint](#input\_registry\_api\_endpoint) | The endpoint of the registry API |
string
|
n/a
| yes | +| [relay\_public\_key](#input\_relay\_public\_key) | The public key of the relay server obtained from relay.walletconnect.com/public-key |
string
|
n/a
| yes | | [relay\_url](#input\_relay\_url) | The URL of the relay server |
string
|
n/a
| yes | | [route53\_zones](#input\_route53\_zones) | The FQDNs to use for the app |
map(string)
|
n/a
| yes | | [route53\_zones\_certificates](#input\_route53\_zones\_certificates) | The ARNs of the ACM certificates to use for HTTPS |
map(string)
|
n/a
| yes | diff --git a/terraform/ecs/cluster.tf b/terraform/ecs/cluster.tf index 0d9de5b4..9393f074 100644 --- a/terraform/ecs/cluster.tf +++ b/terraform/ecs/cluster.tf @@ -72,6 +72,7 @@ resource "aws_ecs_task_definition" "app_task" { { name = "KEYPAIR_SEED", value = var.keypair_seed }, { name = "PROJECT_ID", value = var.project_id }, { name = "RELAY_URL", value = var.relay_url }, + { name = "RELAY_PUBLIC_KEY", value = var.relay_public_key }, { name = "NOTIFY_URL", value = var.notify_url }, { name = "POSTGRES_URL", value = var.postgres_url }, diff --git a/terraform/ecs/variables.tf b/terraform/ecs/variables.tf index 9429e520..f40581d6 100644 --- a/terraform/ecs/variables.tf +++ b/terraform/ecs/variables.tf @@ -124,6 +124,11 @@ variable "relay_url" { type = string } +variable "relay_public_key" { + description = "The public key of the relay server obtained from relay.walletconnect.com/public-key" + type = string +} + variable "notify_url" { description = "The URL of the notify server" type = string diff --git a/terraform/res_application.tf b/terraform/res_application.tf index c3ffaa29..b09c2fbf 100644 --- a/terraform/res_application.tf +++ b/terraform/res_application.tf @@ -59,6 +59,7 @@ module "ecs" { keypair_seed = var.keypair_seed project_id = var.project_id relay_url = var.relay_url + relay_public_key = var.relay_public_key notify_url = var.notify_url ofac_blocked_countries = var.ofac_blocked_countries diff --git a/terraform/variables.tf b/terraform/variables.tf index 5731f829..e8a316d4 100644 --- a/terraform/variables.tf +++ b/terraform/variables.tf @@ -83,6 +83,11 @@ variable "relay_url" { type = string } +variable "relay_public_key" { + description = "The public key of the relay server obtained from relay.walletconnect.com/public-key" + type = string +} + variable "notify_url" { description = "The URL of the notify server" type = string diff --git a/tests/deployment.rs b/tests/deployment.rs index cd5df56e..5d49d245 100644 --- a/tests/deployment.rs +++ b/tests/deployment.rs @@ -1,69 +1,24 @@ use { crate::utils::{ - encode_auth, generate_account, verify_jwt, MockGetRpcUrl, UnregisterIdentityRequestAuth, - JWT_LEEWAY, RELAY_MESSAGE_DELIVERY_TIMEOUT, - }, - base64::Engine, - chacha20poly1305::{ - aead::{generic_array::GenericArray, Aead, OsRng}, - ChaCha20Poly1305, KeyInit, + assert_successful_response, generate_account, generate_identity_key, + http_api::subscribe_topic, + notify_relay_api::{ + accept_notify_message, accept_watch_subscriptions_changed, subscribe, + watch_subscriptions, + }, + sign_cacao, unregister_identity_key, IdentityKeyDetails, RelayClient, }, - chrono::Utc, - data_encoding::BASE64URL, - ed25519_dalek::{SigningKey, VerifyingKey}, - hyper::StatusCode, notify_server::{ - auth::{ - add_ttl, from_jwt, DidWeb, NotifyServerSubscription, SharedClaims, - SubscriptionDeleteRequestAuth, SubscriptionDeleteResponseAuth, SubscriptionRequestAuth, - SubscriptionResponseAuth, SubscriptionUpdateRequestAuth, - SubscriptionUpdateResponseAuth, WatchSubscriptionsChangedRequestAuth, - WatchSubscriptionsRequestAuth, WatchSubscriptionsResponseAuth, STATEMENT_THIS_DOMAIN, - }, - jsonrpc::NotifyPayload, - publish_relay_message::subscribe_relay_topic, - services::{ - public_http_server::handlers::{ - notify_v0::NotifyBody, - subscribe_topic::{SubscribeTopicRequestBody, SubscribeTopicResponseBody}, - }, - websocket_server::{ - decode_key, derive_key, relay_ws_client::RelayClientEvent, NotifyRequest, - NotifyResponse, NotifyWatchSubscriptions, - }, - }, - spec::{ - NOTIFY_DELETE_ACT, NOTIFY_DELETE_METHOD, NOTIFY_DELETE_RESPONSE_ACT, - NOTIFY_DELETE_RESPONSE_TAG, NOTIFY_DELETE_TAG, NOTIFY_DELETE_TTL, NOTIFY_MESSAGE_ACT, - NOTIFY_MESSAGE_TAG, NOTIFY_NOOP_TAG, NOTIFY_SUBSCRIBE_ACT, NOTIFY_SUBSCRIBE_METHOD, - NOTIFY_SUBSCRIBE_RESPONSE_ACT, NOTIFY_SUBSCRIBE_RESPONSE_TAG, NOTIFY_SUBSCRIBE_TAG, - NOTIFY_SUBSCRIBE_TTL, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT, - NOTIFY_SUBSCRIPTIONS_CHANGED_TAG, NOTIFY_UPDATE_ACT, NOTIFY_UPDATE_METHOD, - NOTIFY_UPDATE_RESPONSE_ACT, NOTIFY_UPDATE_RESPONSE_TAG, NOTIFY_UPDATE_TAG, - NOTIFY_UPDATE_TTL, NOTIFY_WATCH_SUBSCRIPTIONS_ACT, NOTIFY_WATCH_SUBSCRIPTIONS_METHOD, - NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_ACT, NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_TAG, - NOTIFY_WATCH_SUBSCRIPTIONS_TAG, NOTIFY_WATCH_SUBSCRIPTIONS_TTL, - }, - types::{encode_scope, Envelope, EnvelopeType0, EnvelopeType1, Notification}, + auth::{CacaoValue, DidWeb, STATEMENT_THIS_DOMAIN}, + rpc::decode_key, + services::public_http_server::handlers::notify_v0::NotifyBody, + types::Notification, utils::topic_from_key, }, - rand::{rngs::StdRng, SeedableRng}, - relay_rpc::{ - auth::{ - cacao::{self, signature::eip191::eip191_bytes}, - ed25519_dalek::Keypair, - }, - domain::{DecodedClientId, ProjectId}, - rpc::msg_id::get_message_id, - }, - serde_json::json, - sha2::Digest, - sha3::Keccak256, - std::{collections::HashSet, env, sync::Arc}, - tokio::sync::broadcast::Receiver, + relay_rpc::domain::ProjectId, + std::{collections::HashSet, env}, url::Url, uuid::Uuid, - x25519_dalek::{PublicKey, StaticSecret}, }; mod utils; @@ -84,12 +39,13 @@ mod utils; // The staging keys server is always used, to avoid unnecessary load on prod server. fn get_vars() -> Vars { - let relay_project_id = env::var("PROJECT_ID").unwrap(); + let relay_project_id = env::var("PROJECT_ID").unwrap().into(); let keys_server_url = "https://staging.keys.walletconnect.com".parse().unwrap(); let notify_prod_project_id = || { env::var("NOTIFY_PROD_PROJECT_ID") .unwrap_or_else(|_| env::var("NOTIFY_PROJECT_ID").unwrap()) + .into() }; let notify_prod_project_secret = || { env::var("NOTIFY_PROD_PROJECT_SECRET") @@ -98,6 +54,7 @@ fn get_vars() -> Vars { let notify_staging_project_id = || { env::var("NOTIFY_STAGING_PROJECT_ID") .unwrap_or_else(|_| env::var("NOTIFY_PROJECT_ID").unwrap()) + .into() }; let notify_staging_project_secret = || { env::var("NOTIFY_STAGING_PROJECT_SECRET") @@ -107,32 +64,32 @@ fn get_vars() -> Vars { let env = std::env::var("ENVIRONMENT").unwrap_or_else(|_| "LOCAL".to_owned()); match env.as_str() { "PROD" => Vars { - notify_url: "https://notify.walletconnect.com".to_owned(), - relay_url: "wss://relay.walletconnect.com".to_owned(), + notify_url: "https://notify.walletconnect.com".parse().unwrap(), + relay_url: "https://relay.walletconnect.com".parse().unwrap(), relay_project_id, notify_project_id: notify_prod_project_id(), notify_project_secret: notify_prod_project_secret(), keys_server_url, }, "STAGING" => Vars { - notify_url: "https://staging.notify.walletconnect.com".to_owned(), - relay_url: "wss://staging.relay.walletconnect.com".to_owned(), + notify_url: "https://staging.notify.walletconnect.com".parse().unwrap(), + relay_url: "https://staging.relay.walletconnect.com".parse().unwrap(), relay_project_id, notify_project_id: notify_staging_project_id(), notify_project_secret: notify_staging_project_secret(), keys_server_url, }, "DEV" => Vars { - notify_url: "https://dev.notify.walletconnect.com".to_owned(), - relay_url: "wss://staging.relay.walletconnect.com".to_owned(), + notify_url: "https://dev.notify.walletconnect.com".parse().unwrap(), + relay_url: "https://staging.relay.walletconnect.com".parse().unwrap(), relay_project_id, notify_project_id: notify_prod_project_id(), notify_project_secret: notify_prod_project_secret(), keys_server_url, }, "LOCAL" => Vars { - notify_url: "http://127.0.0.1:3000".to_owned(), - relay_url: "ws://127.0.0.1:8888".to_owned(), + notify_url: "http://127.0.0.1:3000".parse().unwrap(), + relay_url: "http://127.0.0.1:8888".parse().unwrap(), relay_project_id, notify_project_id: notify_prod_project_id(), notify_project_secret: notify_prod_project_secret(), @@ -143,919 +100,161 @@ fn get_vars() -> Vars { } struct Vars { - notify_url: String, - relay_url: String, - relay_project_id: String, - notify_project_id: String, - notify_project_secret: String, - keys_server_url: Url, -} - -pub async fn create_client( + notify_url: Url, relay_url: Url, relay_project_id: ProjectId, - notify_url: Url, -) -> ( - Arc, - Receiver, -) { - let (tx, mut rx) = tokio::sync::broadcast::channel(8); - let (mpsc_tx, mut mpsc_rx) = tokio::sync::mpsc::unbounded_channel(); - tokio::task::spawn(async move { - while let Some(event) = mpsc_rx.recv().await { - let _ = tx.send(event); - } - }); - let connection_handler = - notify_server::services::websocket_server::relay_ws_client::RelayConnectionHandler::new( - "notify-client", - mpsc_tx, - ); - let relay_ws_client = Arc::new(relay_client::websocket::Client::new(connection_handler)); - - let keypair = Keypair::generate(&mut StdRng::from_entropy()); - let opts = notify_server::relay_client_helpers::create_ws_connect_options( - &keypair, - relay_url, - notify_url, - relay_project_id, - ) - .unwrap(); - relay_ws_client.connect(&opts).await.unwrap(); - - // Eat up the "connected" message - _ = rx.recv().await.unwrap(); - - (relay_ws_client, rx) -} - -#[allow(clippy::too_many_arguments)] -async fn watch_subscriptions( - vars: &Vars, - app_domain: Option<&str>, - identity_signing_key: &SigningKey, - identity_did_key: &str, - did_pkh: &str, - relay_ws_client: &relay_client::websocket::Client, - rx: &mut Receiver, -) -> (Vec, [u8; 32]) { - let (key_agreement_key, authentication_key) = { - let did_json_url = Url::parse(&vars.notify_url) - .unwrap() - .join("/.well-known/did.json") - .unwrap(); - let did_json = reqwest::get(did_json_url) - .await - .unwrap() - .json::() // TODO use struct - .await - .unwrap(); - let verification_method = did_json - .get("verificationMethod") - .unwrap() - .as_array() - .unwrap(); - let key_agreement = verification_method - .iter() - .find(|key| { - key.as_object() - .unwrap() - .get("id") - .unwrap() - .as_str() - .unwrap() - .ends_with("wc-notify-subscribe-key") - }) - .unwrap() - .as_object() - .unwrap() - .get("publicKeyJwk") - .unwrap() - .as_object() - .unwrap() - .get("x") - .unwrap() - .as_str() - .unwrap(); - let authentication = verification_method - .iter() - .find(|key| { - key.as_object() - .unwrap() - .get("id") - .unwrap() - .as_str() - .unwrap() - .ends_with("wc-notify-authentication-key") - }) - .unwrap() - .as_object() - .unwrap() - .get("publicKeyJwk") - .unwrap() - .as_object() - .unwrap() - .get("x") - .unwrap() - .as_str() - .unwrap(); - let key_agreement: [u8; 32] = BASE64URL - .decode(key_agreement.as_bytes()) - .unwrap() - .try_into() - .unwrap(); - let authentication: [u8; 32] = BASE64URL - .decode(authentication.as_bytes()) - .unwrap() - .try_into() - .unwrap(); - (key_agreement, authentication) - }; - - let now = Utc::now(); - let subscription_auth = WatchSubscriptionsRequestAuth { - shared_claims: SharedClaims { - iat: now.timestamp() as u64, - exp: add_ttl(now, NOTIFY_SUBSCRIBE_TTL).timestamp() as u64, - iss: identity_did_key.to_owned(), - act: NOTIFY_WATCH_SUBSCRIPTIONS_ACT.to_owned(), - aud: DecodedClientId(authentication_key).to_did_key(), - mjv: "0".to_owned(), - }, - ksu: vars.keys_server_url.to_string(), - sub: did_pkh.to_owned(), - app: app_domain.map(|domain| DidWeb::from_domain(domain.to_owned())), - }; - - let message = NotifyRequest::new( - NOTIFY_WATCH_SUBSCRIPTIONS_METHOD, - NotifyWatchSubscriptions { - watch_subscriptions_auth: encode_auth(&subscription_auth, identity_signing_key), - }, - ); - - let secret = StaticSecret::random_from_rng(OsRng); - let public = PublicKey::from(&secret); - - let response_topic_key = - derive_key(&x25519_dalek::PublicKey::from(key_agreement_key), &secret).unwrap(); - let response_topic = topic_from_key(&response_topic_key); - - let envelope = Envelope::::new( - &response_topic_key, - serde_json::to_value(message).unwrap(), - *public.as_bytes(), - ) - .unwrap(); - let message = base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); - - let watch_subscriptions_topic = topic_from_key(&key_agreement_key); - relay_ws_client - .publish( - watch_subscriptions_topic, - message, - NOTIFY_WATCH_SUBSCRIPTIONS_TAG, - NOTIFY_WATCH_SUBSCRIPTIONS_TTL, - false, - ) - .await - .unwrap(); - - subscribe_relay_topic(relay_ws_client, &response_topic, None) - .await - .unwrap(); - - let resp = rx.recv().await.unwrap(); - - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_TAG); - - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), - ) - .unwrap(); - let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(&response_topic_key)) - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - let response: NotifyResponse = - serde_json::from_slice(&decrypted_response).unwrap(); - - println!( - "received watch_subscriptions_response with id msg.id {} and message_id {} and RPC ID {}", - msg.message_id, - get_message_id(&msg.message), - response.id, - ); - - let response_auth = response - .result - .get("responseAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let auth = from_jwt::(response_auth).unwrap(); - assert_eq!( - auth.shared_claims.act, - NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_ACT - ); - assert_eq!( - auth.shared_claims.iss, - DecodedClientId(authentication_key).to_did_key() - ); - - (auth.sbs, response_topic_key) + notify_project_id: ProjectId, + notify_project_secret: String, + keys_server_url: Url, } -async fn run_test(statement: String, watch_subscriptions_all_domains: bool) { +#[tokio::test] +async fn deployment_integration() { let vars = get_vars(); - let (identity_signing_key, identity_did_key) = { - let keypair = Keypair::generate(&mut StdRng::from_entropy()); - let signing_key = SigningKey::from_bytes(keypair.secret_key().as_bytes()); - let client_id = DecodedClientId::from_key(&keypair.public_key()); - let client_did_key = client_id.to_did_key(); - (signing_key, client_did_key) - }; - let (account_signing_key, account) = generate_account(); - let did_pkh = account.to_did_pkh(); - let app_domain = format!("{}.walletconnect.com", vars.notify_project_id); + let (identity_signing_key, identity_public_key) = generate_identity_key(); + let identity_key_details = IdentityKeyDetails { + keys_server_url: vars.keys_server_url, + signing_key: identity_signing_key, + client_id: identity_public_key.clone(), + }; - // Register identity key with keys server - { - let mut cacao = cacao::Cacao { - h: cacao::header::Header { - t: "eip4361".to_owned(), - }, - p: cacao::payload::Payload { - domain: app_domain.to_owned(), - iss: did_pkh.clone(), - statement: Some(statement), - aud: identity_did_key.clone(), - version: cacao::Version::V1, - nonce: "xxxx".to_owned(), // TODO - iat: Utc::now().to_rfc3339(), - exp: None, - nbf: None, - request_id: None, - resources: Some(vec![vars.keys_server_url.to_string()]), - }, - s: cacao::signature::Signature { - t: "".to_owned(), - s: "".to_owned(), - }, - }; - let (signature, recovery): (k256::ecdsa::Signature, _) = account_signing_key - .sign_digest_recoverable(Keccak256::new_with_prefix(eip191_bytes( - &cacao.siwe_message().unwrap(), - ))) - .unwrap(); - let cacao_signature = [&signature.to_bytes()[..], &[recovery.to_byte()]].concat(); - cacao.s.t = "eip191".to_owned(); - cacao.s.s = hex::encode(cacao_signature); - cacao.verify(&MockGetRpcUrl).await.unwrap(); + let project_id = vars.notify_project_id; + let app_domain = DidWeb::from_domain(format!("{project_id}.example.com")); + let (key_agreement, authentication_key, app_client_id) = subscribe_topic( + &project_id, + &vars.notify_project_secret, + app_domain.clone(), + &vars.notify_url, + ) + .await; - let response = reqwest::Client::builder() + assert_successful_response( + reqwest::Client::builder() .build() .unwrap() - .post(vars.keys_server_url.join("/identity").unwrap()) - .header("Content-Type", "application/json") - .body(serde_json::to_string(&json!({"cacao": cacao})).unwrap()) + .post( + identity_key_details + .keys_server_url + .join("/identity") + .unwrap(), + ) + .json(&CacaoValue { + cacao: sign_cacao( + &app_domain, + &account, + STATEMENT_THIS_DOMAIN.to_owned(), + identity_public_key.clone(), + identity_key_details.keys_server_url.to_string(), + &account_signing_key, + ) + .await, + }) .send() .await - .unwrap(); - let status = response.status(); - assert!(status.is_success()); - } - - // ==== watchSubscriptions ==== - // { - // let (relay_ws_client, mut rx) = create_client(&relay_url, &relay_project_id, ¬ify_url).await; - - // let (subs, _) = watch_subscriptions( - // app_domain, - // ¬ify_url, - // &identity_signing_key, - // &identity_did_key, - // &did_pkh, - // &relay_ws_client, - // &mut rx, - // ) - // .await; - - // assert!(subs.is_empty()); - // } - - let (relay_ws_client, mut rx) = create_client( - vars.relay_url.parse().unwrap(), - vars.relay_project_id.clone().into(), - vars.notify_url.parse().unwrap(), + .unwrap(), ) .await; - // ==== subscribe topic ==== - - // Register project - generating subscribe topic - let subscribe_topic_response = reqwest::Client::new() - .post(format!( - "{}/{}/subscribe-topic", - &vars.notify_url, &vars.notify_project_id - )) - .bearer_auth(&vars.notify_project_secret) - .json(&SubscribeTopicRequestBody { - app_domain: app_domain.clone().into(), - }) - .send() - .await - .unwrap(); - assert_eq!(subscribe_topic_response.status(), StatusCode::OK); - let subscribe_topic_response_body = subscribe_topic_response - .json::() - .await - .unwrap(); - - let watch_topic_key = { - let (subs, watch_topic_key) = watch_subscriptions( - &vars, - if watch_subscriptions_all_domains { - None - } else { - Some(&app_domain) - }, - &identity_signing_key, - &identity_did_key, - &did_pkh, - &relay_ws_client, - &mut rx, - ) - .await; - - assert!(subs.is_empty()); - - watch_topic_key - }; - - let app_subscribe_public_key = &subscribe_topic_response_body.subscribe_key; - let app_authentication_public_key = &subscribe_topic_response_body.authentication_key; - let dapp_did_key = DecodedClientId( - hex::decode(app_authentication_public_key) - .unwrap() - .as_slice() - .try_into() - .unwrap(), + let mut relay_client = RelayClient::new( + vars.relay_url, + vars.relay_project_id, + vars.notify_url.clone(), ) - .to_did_key(); - - // Get subscribe topic for dapp - let subscribe_topic = topic_from_key(hex::decode(app_subscribe_public_key).unwrap().as_slice()); - - // ---------------------------------------------------- - // SUBSCRIBE WALLET CLIENT TO DAPP THROUGHT NOTIFY - // ---------------------------------------------------- - - // Prepare subscription auth for *wallet* client - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-subscription - let notification_type = Uuid::new_v4(); - let notification_types = HashSet::from([notification_type, Uuid::new_v4()]); - let now = Utc::now(); - let subscription_auth = SubscriptionRequestAuth { - shared_claims: SharedClaims { - iat: now.timestamp() as u64, - exp: add_ttl(now, NOTIFY_SUBSCRIBE_TTL).timestamp() as u64, - iss: identity_did_key.clone(), - act: NOTIFY_SUBSCRIBE_ACT.to_owned(), - aud: dapp_did_key.clone(), - mjv: "0".to_owned(), - }, - ksu: vars.keys_server_url.to_string(), - sub: did_pkh.clone(), - scp: encode_scope(¬ification_types), - app: DidWeb::from_domain(app_domain.clone()), - }; - - // Encode the subscription auth - let subscription_auth = encode_auth(&subscription_auth, &identity_signing_key); - - let sub_auth = json!({ "subscriptionAuth": subscription_auth }); - let message = NotifyRequest::new(NOTIFY_SUBSCRIBE_METHOD, sub_auth); + .await; - let subscription_secret = StaticSecret::random_from_rng(OsRng); - let subscription_public = PublicKey::from(&subscription_secret); - let response_topic_key = derive_key( - &x25519_dalek::PublicKey::from(decode_key(app_subscribe_public_key).unwrap()), - &subscription_secret, + let (subs, watch_topic_key, notify_server_client_id) = watch_subscriptions( + &mut relay_client, + vars.notify_url.clone(), + &identity_key_details, + Some(app_domain.clone()), + &account, ) - .unwrap(); - - let cipher = ChaCha20Poly1305::new(GenericArray::from_slice(&response_topic_key)); + .await; + assert!(subs.is_empty()); - let envelope: Envelope = Envelope::::new( - &response_topic_key, - serde_json::to_value(message).unwrap(), - *subscription_public.as_bytes(), + let notification_type = Uuid::new_v4(); + let notification_types = HashSet::from([notification_type]); + let mut relay_client2 = relay_client.clone(); + subscribe( + &mut relay_client, + &account, + &identity_key_details, + key_agreement, + &app_client_id, + app_domain.clone(), + notification_types.clone(), ) - .unwrap(); - let message = base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); - - // Get response topic for wallet client and notify communication - let response_topic = topic_from_key(&response_topic_key); - println!("subscription response_topic: {response_topic}"); - - subscribe_relay_topic(&relay_ws_client, &response_topic, None) - .await - .unwrap(); - - // Send subscription request to notify - relay_ws_client - .publish( - subscribe_topic, - message, - NOTIFY_SUBSCRIBE_TAG, - NOTIFY_SUBSCRIBE_TTL, - false, - ) - .await - .unwrap(); - - let resp = rx.recv().await.unwrap(); - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - let msg = if msg.tag == NOTIFY_SUBSCRIBE_RESPONSE_TAG { - assert_eq!(msg.tag, NOTIFY_SUBSCRIBE_RESPONSE_TAG); - msg - } else { - println!( - "got additional message with unexpected tag {} msg.id {} and message_id {}", - msg.tag, - msg.message_id, - get_message_id(&msg.message), - ); - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), - ) - .unwrap(); - let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(&watch_topic_key)) - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - let response: NotifyResponse = - serde_json::from_slice(&decrypted_response).unwrap(); - println!( - "warn: got additional message with unexpected tag {} msg.id {} and message_id {} RPC ID {}", - msg.tag, - msg.message_id, - get_message_id(&msg.message), - response.id, - ); - - let resp = rx.recv().await.unwrap(); - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_SUBSCRIBE_RESPONSE_TAG); - msg - }; - - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), + .await; + let subs = accept_watch_subscriptions_changed( + &mut relay_client2, + ¬ify_server_client_id, + &identity_key_details, + &account, + watch_topic_key, ) - .unwrap(); - - let decrypted_response = cipher - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - - let response: NotifyResponse = - serde_json::from_slice(&decrypted_response).unwrap(); - - let response_auth = response - .result - .get("responseAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let subscribe_response_auth = from_jwt::(response_auth).unwrap(); - assert_eq!( - subscribe_response_auth.shared_claims.act, - NOTIFY_SUBSCRIBE_RESPONSE_ACT - ); - - let notify_key = { - let resp = rx.recv().await.unwrap(); - - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_SUBSCRIPTIONS_CHANGED_TAG); - - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), - ) - .unwrap(); - - let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(&watch_topic_key)) - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - - let response: NotifyRequest = - serde_json::from_slice(&decrypted_response).unwrap(); - - let response_auth = response - .params - .get("subscriptionsChangedAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let auth = from_jwt::(response_auth).unwrap(); - assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT); - assert_eq!(auth.sbs.len(), 1); - let sub = &auth.sbs[0]; - assert_eq!(sub.scope, notification_types); - assert_eq!(sub.account, account); - assert_eq!(sub.app_domain, app_domain); - assert_eq!(&sub.app_authentication_key, &dapp_did_key); - assert_eq!( - DecodedClientId::try_from_did_key(&sub.app_authentication_key) - .unwrap() - .0, - decode_key(app_authentication_public_key).unwrap() - ); - assert_eq!(sub.scope, notification_types); - decode_key(&sub.sym_key).unwrap() - }; - - let notify_topic = topic_from_key(¬ify_key); + .await; + assert_eq!(subs.len(), 1); + let sub = &subs[0]; + assert_eq!(sub.account, account); + assert_eq!(sub.app_domain, app_domain.domain()); - subscribe_relay_topic(&relay_ws_client, ¬ify_topic, None) - .await - .unwrap(); + let notify_key = decode_key(&sub.sym_key).unwrap(); - let msg_4050 = rx.recv().await.unwrap(); - let RelayClientEvent::Message(msg) = msg_4050 else { - panic!("Expected message, got {:?}", msg_4050); - }; - assert_eq!(msg.tag, NOTIFY_NOOP_TAG); + relay_client.subscribe(topic_from_key(¬ify_key)).await; let notification = Notification { r#type: notification_type, title: "title".to_owned(), body: "body".to_owned(), - icon: Some("icon".to_owned()), - url: Some("url".to_owned()), + icon: None, + url: None, }; let notify_body = NotifyBody { notification_id: None, notification: notification.clone(), - accounts: vec![account], - }; - - let _res = reqwest::Client::new() - .post(format!( - "{}/{}/notify", - &vars.notify_url, &vars.notify_project_id - )) - .bearer_auth(&vars.notify_project_secret) - .json(¬ify_body) - .send() - .await - .unwrap(); - - let resp = rx.recv().await.unwrap(); - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); + accounts: vec![account.clone()], }; - assert_eq!(msg.tag, NOTIFY_MESSAGE_TAG); - - let cipher = ChaCha20Poly1305::new(GenericArray::from_slice(¬ify_key)); - let Envelope:: { iv, sealbox, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), - ) - .unwrap(); - - // TODO: add proper type for that val - let decrypted_notification: NotifyRequest = serde_json::from_slice( - &cipher - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) + assert_successful_response( + reqwest::Client::new() + .post( + vars.notify_url + .join(&format!("{project_id}/notify")) + .unwrap(), + ) + .bearer_auth(vars.notify_project_secret) + .json(¬ify_body) + .send() + .await .unwrap(), ) - .unwrap(); - - // let received_notification = decrypted_notification.params; - let claims = verify_jwt( - &decrypted_notification.params.message_auth, - &VerifyingKey::from_bytes(&decode_key(app_authentication_public_key).unwrap()).unwrap(), - ) - .unwrap(); - - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-message - // TODO: verify issuer - assert_eq!(claims.msg.r#type, notification.r#type); - assert_eq!(claims.msg.title, notification.title); - assert_eq!(claims.msg.body, notification.body); - assert_eq!(claims.msg.icon, "icon"); - assert_eq!(claims.msg.url, "url"); - assert_eq!(claims.sub, did_pkh); - assert!(claims.iat < chrono::Utc::now().timestamp() + JWT_LEEWAY); // TODO remove leeway - assert!(claims.exp > chrono::Utc::now().timestamp() - JWT_LEEWAY); // TODO remove leeway - assert_eq!(claims.app.as_ref(), app_domain); - assert_eq!(claims.sub, did_pkh); - assert_eq!(claims.act, NOTIFY_MESSAGE_ACT); - - // TODO Notify receipt? - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-receipt - - // Update subscription - - // Prepare update auth for *wallet* client - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-updatelet notification_type = Uuid::new_v4(); - let notification_type = Uuid::new_v4(); - let notification_types = HashSet::from([notification_type, Uuid::new_v4(), Uuid::new_v4()]); - let now = Utc::now(); - let update_auth = SubscriptionUpdateRequestAuth { - shared_claims: SharedClaims { - iat: now.timestamp() as u64, - exp: add_ttl(now, NOTIFY_UPDATE_TTL).timestamp() as u64, - iss: identity_did_key.clone(), - act: NOTIFY_UPDATE_ACT.to_owned(), - aud: dapp_did_key.clone(), - mjv: "0".to_owned(), - }, - ksu: vars.keys_server_url.to_string(), - sub: did_pkh.clone(), - scp: encode_scope(¬ification_types), - app: DidWeb::from_domain(app_domain.clone()), - }; - - // Encode the subscription auth - let update_auth = encode_auth(&update_auth, &identity_signing_key); - - let sub_auth = json!({ "updateAuth": update_auth }); - - let delete_message = NotifyRequest::new(NOTIFY_UPDATE_METHOD, sub_auth); - - let envelope = Envelope::::new(¬ify_key, delete_message).unwrap(); - - let encoded_message = base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); - - relay_ws_client - .publish( - notify_topic.clone(), - encoded_message, - NOTIFY_UPDATE_TAG, - NOTIFY_UPDATE_TTL, - false, - ) - .await - .unwrap(); - - // Check for update response - let resp = rx.recv().await.unwrap(); - - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_UPDATE_RESPONSE_TAG); + .await; - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), + let (_, claims) = accept_notify_message( + &mut relay_client, + &account, + &authentication_key, + &app_client_id, + &app_domain, + ¬ify_key, ) - .unwrap(); - - let decrypted_response = cipher - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - - let response: NotifyResponse = - serde_json::from_slice(&decrypted_response).unwrap(); - - let response_auth = response - .result - .get("responseAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let claims = from_jwt::(response_auth).unwrap(); - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-update-response - // TODO verify issuer - assert_eq!(claims.sub, did_pkh); - assert!((claims.shared_claims.iat as i64) < chrono::Utc::now().timestamp() + JWT_LEEWAY); // TODO remove leeway - assert!((claims.shared_claims.exp as i64) > chrono::Utc::now().timestamp() - JWT_LEEWAY); // TODO remove leeway - assert_eq!(claims.app, DidWeb::from_domain(app_domain.clone())); - assert_eq!(claims.shared_claims.aud, identity_did_key); - assert_eq!(claims.shared_claims.act, NOTIFY_UPDATE_RESPONSE_ACT); - - { - let resp = rx.recv().await.unwrap(); - - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_SUBSCRIPTIONS_CHANGED_TAG); - - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), - ) - .unwrap(); - - let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(&watch_topic_key)) - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - - let response: NotifyRequest = - serde_json::from_slice(&decrypted_response).unwrap(); - - let response_auth = response - .params - .get("subscriptionsChangedAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let auth = from_jwt::(response_auth).unwrap(); - assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT); - assert_eq!(auth.sbs.len(), 1); - let subs = &auth.sbs[0]; - assert_eq!(subs.scope, notification_types); - } - - // Prepare deletion auth for *wallet* client - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-delete - let now = Utc::now(); - let delete_auth = SubscriptionDeleteRequestAuth { - shared_claims: SharedClaims { - iat: now.timestamp() as u64, - exp: add_ttl(now, NOTIFY_DELETE_TTL).timestamp() as u64, - iss: identity_did_key.clone(), - aud: dapp_did_key.clone(), - act: NOTIFY_DELETE_ACT.to_owned(), - mjv: "0".to_owned(), - }, - ksu: vars.keys_server_url.to_string(), - sub: did_pkh.clone(), - app: DidWeb::from_domain(app_domain.clone()), - }; - - // Encode the subscription auth - let delete_auth = encode_auth(&delete_auth, &identity_signing_key); - - let sub_auth = json!({ "deleteAuth": delete_auth }); - - let delete_message = NotifyRequest::new(NOTIFY_DELETE_METHOD, sub_auth); - - let envelope = Envelope::::new(¬ify_key, delete_message).unwrap(); - - let encoded_message = base64::engine::general_purpose::STANDARD.encode(envelope.to_bytes()); - - relay_ws_client - .publish( - notify_topic, - encoded_message, - NOTIFY_DELETE_TAG, - NOTIFY_DELETE_TTL, - false, - ) - .await - .unwrap(); - - // Check for delete response - let resp = rx.recv().await.unwrap(); - - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_DELETE_RESPONSE_TAG); + .await; - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), + assert_eq!(claims.msg.r#type, notification_type); + assert_eq!(claims.msg.title, "title"); + assert_eq!(claims.msg.body, "body"); + assert_eq!(claims.msg.icon, ""); + assert_eq!(claims.msg.url, ""); + + unregister_identity_key( + identity_key_details.keys_server_url, + &account, + &identity_key_details.signing_key, + &identity_public_key, ) - .unwrap(); - - let decrypted_response = cipher - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - - let response: NotifyResponse = - serde_json::from_slice(&decrypted_response).unwrap(); - - let response_auth = response - .result - .get("responseAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let claims = from_jwt::(response_auth).unwrap(); - // https://github.com/WalletConnect/walletconnect-docs/blob/main/docs/specs/clients/notify/notify-authentication.md#notify-delete-response - // TODO verify issuer - assert_eq!(claims.sub, did_pkh); - assert!((claims.shared_claims.iat as i64) < chrono::Utc::now().timestamp() + JWT_LEEWAY); // TODO remove leeway - assert!((claims.shared_claims.exp as i64) > chrono::Utc::now().timestamp() - JWT_LEEWAY); // TODO remove leeway - assert_eq!(claims.app, DidWeb::from_domain(app_domain)); - assert_eq!(claims.shared_claims.aud, identity_did_key); - assert_eq!(claims.shared_claims.act, NOTIFY_DELETE_RESPONSE_ACT); - - { - let resp = rx.recv().await.unwrap(); - - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - assert_eq!(msg.tag, NOTIFY_SUBSCRIPTIONS_CHANGED_TAG); - - let Envelope:: { sealbox, iv, .. } = Envelope::::from_bytes( - base64::engine::general_purpose::STANDARD - .decode(msg.message.as_bytes()) - .unwrap(), - ) - .unwrap(); - - let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(&watch_topic_key)) - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - - let response: NotifyRequest = - serde_json::from_slice(&decrypted_response).unwrap(); - - let response_auth = response - .params - .get("subscriptionsChangedAuth") // TODO use structure - .unwrap() - .as_str() - .unwrap(); - let auth = from_jwt::(response_auth).unwrap(); - assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT); - assert!(auth.sbs.is_empty()); - } - - let resp = reqwest::Client::new() - .post(format!( - "{}/{}/notify", - &vars.notify_url, &vars.notify_project_id - )) - .bearer_auth(vars.notify_project_secret) - .json(¬ify_body) - .send() - .await - .unwrap(); - - let resp = resp - .json::() - .await - .unwrap(); - - assert_eq!(resp.not_found.len(), 1); - - let unregister_auth = UnregisterIdentityRequestAuth { - shared_claims: SharedClaims { - iat: Utc::now().timestamp() as u64, - exp: Utc::now().timestamp() as u64 + 3600, - iss: identity_did_key.clone(), - aud: vars.keys_server_url.to_string(), - act: "unregister_identity".to_owned(), - mjv: "0".to_owned(), - }, - pkh: did_pkh, - }; - let unregister_auth = encode_auth(&unregister_auth, &identity_signing_key); - reqwest::Client::new() - .delete(vars.keys_server_url.join("/identity").unwrap()) - .body(serde_json::to_string(&json!({"idAuth": unregister_auth})).unwrap()) - .send() - .await - .unwrap(); - - if let Ok(resp) = tokio::time::timeout(RELAY_MESSAGE_DELIVERY_TIMEOUT, rx.recv()).await { - let resp = resp.unwrap(); - let RelayClientEvent::Message(msg) = resp else { - panic!("Expected message, got {:?}", resp); - }; - println!( - "warn: received extra left-over message with tag {}", - msg.tag - ); - } -} - -#[tokio::test] -async fn notify_this_domain() { - run_test(STATEMENT_THIS_DOMAIN.to_owned(), false).await + .await; } diff --git a/tests/integration.rs b/tests/integration.rs index ff14be83..2335a8b1 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1,34 +1,30 @@ use { crate::utils::{ - encode_auth, format_eip155_account, generate_eoa, verify_jwt, RelayClient, - UnregisterIdentityRequestAuth, JWT_LEEWAY, RELAY_MESSAGE_DELIVERY_TIMEOUT, + assert_successful_response, encode_auth, format_eip155_account, generate_eoa, + generate_identity_key, + notify_relay_api::{accept_watch_subscriptions_changed, subscribe, watch_subscriptions}, + relay_api::{decode_message, decode_response_message}, + sign_cacao, IdentityKeyDetails, RelayClient, RELAY_MESSAGE_DELIVERY_TIMEOUT, }, async_trait::async_trait, - base64::{engine::general_purpose::STANDARD as BASE64, Engine}, - chacha20poly1305::{aead::Aead, ChaCha20Poly1305, KeyInit}, chrono::{DateTime, Duration, TimeZone, Utc}, - data_encoding::BASE64URL, - ed25519_dalek::{SigningKey, VerifyingKey}, + ed25519_dalek::VerifyingKey, futures::future::BoxFuture, hyper::StatusCode, itertools::Itertools, notify_server::{ auth::{ - add_ttl, encode_authentication_private_key, encode_authentication_public_key, + encode_authentication_private_key, encode_authentication_public_key, encode_subscribe_private_key, encode_subscribe_public_key, from_jwt, CacaoValue, DidWeb, GetSharedClaims, KeyServerResponse, MessageResponseAuth, - NotifyServerSubscription, SharedClaims, SubscriptionDeleteRequestAuth, + NotifyServerSubscription, SubscriptionDeleteRequestAuth, SubscriptionDeleteResponseAuth, SubscriptionGetNotificationsRequestAuth, - SubscriptionGetNotificationsResponseAuth, SubscriptionRequestAuth, - SubscriptionResponseAuth, SubscriptionUpdateRequestAuth, - SubscriptionUpdateResponseAuth, WatchSubscriptionsChangedRequestAuth, - WatchSubscriptionsChangedResponseAuth, WatchSubscriptionsRequestAuth, - WatchSubscriptionsResponseAuth, KEYS_SERVER_IDENTITY_ENDPOINT, + SubscriptionGetNotificationsResponseAuth, SubscriptionUpdateRequestAuth, + SubscriptionUpdateResponseAuth, KEYS_SERVER_IDENTITY_ENDPOINT, KEYS_SERVER_IDENTITY_ENDPOINT_PUBLIC_KEY_QUERY, KEYS_SERVER_STATUS_SUCCESS, STATEMENT_ALL_DOMAINS, STATEMENT_THIS_DOMAIN, }, config::Configuration, - jsonrpc::NotifyPayload, model::{ helpers::{ get_notifications_for_subscriber, get_project_by_app_domain, @@ -46,12 +42,14 @@ use { notify_message::NotifyMessage, rate_limit::{self, ClockImpl}, registry::{storage::redis::Redis, RegistryAuthResponse}, + relay_client_helpers::create_http_client, + rpc::{ + decode_key, AuthMessage, NotifyDelete, NotifyRequest, NotifyResponse, NotifyUpdate, + ResponseAuth, + }, services::{ public_http_server::{ handlers::{ - did_json::{ - DidJson, WC_NOTIFY_AUTHENTICATION_KEY_ID, WC_NOTIFY_SUBSCRIBE_KEY_ID, - }, notify_v0::NotifyBody, notify_v1::{ self, notify_rate_limit, subscriber_rate_limit, subscriber_rate_limit_key, @@ -59,7 +57,7 @@ use { }, subscribe_topic::{SubscribeTopicRequestBody, SubscribeTopicResponseBody}, }, - DID_JSON_ENDPOINT, + RELAY_WEBHOOK_ENDPOINT, }, publisher_service::{ helpers::{ @@ -69,55 +67,38 @@ use { }, types::SubscriberNotificationStatus, }, - websocket_server::{ - decode_key, derive_key, AuthMessage, NotifyDelete, NotifyRequest, NotifyResponse, - NotifySubscribe, NotifySubscriptionsChanged, NotifyUpdate, - NotifyWatchSubscriptions, ResponseAuth, - }, + relay_mailbox_clearing_service::BATCH_TIMEOUT, }, spec::{ NOTIFY_DELETE_ACT, NOTIFY_DELETE_METHOD, NOTIFY_DELETE_RESPONSE_ACT, NOTIFY_DELETE_RESPONSE_TAG, NOTIFY_DELETE_TAG, NOTIFY_DELETE_TTL, NOTIFY_GET_NOTIFICATIONS_ACT, NOTIFY_GET_NOTIFICATIONS_RESPONSE_ACT, NOTIFY_GET_NOTIFICATIONS_RESPONSE_TAG, NOTIFY_GET_NOTIFICATIONS_TAG, - NOTIFY_GET_NOTIFICATIONS_TTL, NOTIFY_MESSAGE_ACT, NOTIFY_MESSAGE_METHOD, - NOTIFY_MESSAGE_RESPONSE_ACT, NOTIFY_MESSAGE_RESPONSE_TAG, NOTIFY_MESSAGE_RESPONSE_TTL, - NOTIFY_MESSAGE_TAG, NOTIFY_NOOP_TAG, NOTIFY_SUBSCRIBE_ACT, NOTIFY_SUBSCRIBE_METHOD, - NOTIFY_SUBSCRIBE_RESPONSE_ACT, NOTIFY_SUBSCRIBE_RESPONSE_TAG, NOTIFY_SUBSCRIBE_TAG, - NOTIFY_SUBSCRIBE_TTL, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT, - NOTIFY_SUBSCRIPTIONS_CHANGED_METHOD, NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONE_ACT, - NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TAG, NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TTL, - NOTIFY_SUBSCRIPTIONS_CHANGED_TAG, NOTIFY_UPDATE_ACT, NOTIFY_UPDATE_METHOD, + NOTIFY_GET_NOTIFICATIONS_TTL, NOTIFY_MESSAGE_RESPONSE_ACT, NOTIFY_MESSAGE_RESPONSE_TAG, + NOTIFY_MESSAGE_RESPONSE_TTL, NOTIFY_NOOP_TAG, NOTIFY_UPDATE_ACT, NOTIFY_UPDATE_METHOD, NOTIFY_UPDATE_RESPONSE_ACT, NOTIFY_UPDATE_RESPONSE_TAG, NOTIFY_UPDATE_TAG, - NOTIFY_UPDATE_TTL, NOTIFY_WATCH_SUBSCRIPTIONS_ACT, NOTIFY_WATCH_SUBSCRIPTIONS_METHOD, - NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_ACT, NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_TAG, - NOTIFY_WATCH_SUBSCRIPTIONS_TAG, NOTIFY_WATCH_SUBSCRIPTIONS_TTL, + NOTIFY_UPDATE_TTL, }, - types::{encode_scope, Envelope, EnvelopeType0, EnvelopeType1, Notification}, + types::{encode_scope, Notification}, utils::{get_client_id, is_same_address, topic_from_key}, }, rand::rngs::StdRng, rand_chacha::rand_core::OsRng, rand_core::SeedableRng, - // regex::Regex, relay_rpc::{ auth::{ - cacao::{ - self, - header::EIP4361, - signature::eip191::{eip191_bytes, EIP191}, - Cacao, - }, - ed25519_dalek::Keypair, + cacao::Cacao, + ed25519_dalek::{ed25519::signature::Signature, Keypair, Signer}, + }, + domain::{DecodedClientId, DidKey, ProjectId, Topic}, + jwt::{JwtBasicClaims, JwtHeader, VerifyableClaims}, + rpc::{ + SubscriptionData, WatchAction, WatchEventClaims, WatchEventPayload, WatchStatus, + WatchType, WatchWebhookPayload, }, - domain::{DecodedClientId, ProjectId, Topic}, - rpc::SubscriptionData, }, - reqwest::Response, serde::de::DeserializeOwned, serde_json::{json, Value}, - sha2::{digest::generic_array::GenericArray, Digest}, - sha3::Keccak256, sqlx::{ error::BoxDynError, migrate::{Migration, MigrationSource, Migrator}, @@ -139,14 +120,19 @@ use { }, tracing_subscriber::fmt::format::FmtSpan, url::Url, - utils::{generate_account, MockGetRpcUrl}, + utils::{ + generate_account, + notify_relay_api::{accept_notify_message, subscribe_with_mjv}, + relay_api::{publish_jwt_message, TopicEncrptionScheme}, + unregister_identity_key, + }, uuid::Uuid, wiremock::{ http::Method, matchers::{method, path, query_param}, Mock, MockServer, ResponseTemplate, }, - x25519_dalek::{PublicKey, StaticSecret}, + x25519_dalek::PublicKey, }; mod utils; @@ -165,7 +151,7 @@ fn get_vars() -> Vars { project_id: env::var("PROJECT_ID").unwrap(), // No use-case to modify these currently. - relay_url: "ws://127.0.0.1:8888".to_owned(), + relay_url: "http://127.0.0.1:8888".to_owned(), } } @@ -895,7 +881,7 @@ async fn find_free_port(bind_ip: IpAddr) -> u16 { async fn wait_for_socket_addr_to_be(socket_addr: SocketAddr, open: bool) -> Result<(), Elapsed> { use {std::time::Duration, tokio::time}; - time::timeout(Duration::from_secs(3), async { + time::timeout(Duration::from_secs(10), async { while is_socket_addr_available(socket_addr).await != open { time::sleep(Duration::from_millis(10)).await; } @@ -934,6 +920,7 @@ struct NotifyServerContext { redis: Arc, #[allow(dead_code)] // must hold onto MockServer reference or it will shut down registry_mock_server: MockServer, + keypair_seed: String, clock: Arc, } @@ -964,20 +951,29 @@ impl AsyncTestContext for NotifyServerContext { let telemetry_prometheus_port = find_free_port(bind_ip).await; let socket_addr = SocketAddr::from((bind_ip, bind_port)); let notify_url = format!("http://{socket_addr}").parse::().unwrap(); + let relay_url = vars.relay_url.parse::().unwrap(); + let relay_public_key = reqwest::get(relay_url.join("/public-key").unwrap()) + .await + .unwrap() + .text() + .await + .unwrap(); let (_, postgres_url) = get_postgres().await; + let keypair_seed = hex::encode(rand::Rng::gen::<[u8; 10]>(&mut rand::thread_rng())); let clock = Arc::new(MockClock::new(Utc::now())); // TODO reuse the local configuration defaults here let config = Configuration { postgres_url, - postgres_max_connections: 10, + postgres_max_connections: 2, log_level: "WARN,notify_server=DEBUG".to_string(), public_ip: bind_ip, bind_ip, port: bind_port, registry_url: registry_mock_server.uri().parse().unwrap(), - keypair_seed: hex::encode(rand::Rng::gen::<[u8; 10]>(&mut rand::thread_rng())), + keypair_seed: keypair_seed.clone(), project_id: vars.project_id.into(), - relay_url: vars.relay_url.parse().unwrap(), + relay_url, + relay_public_key, notify_url: notify_url.clone(), registry_auth_token: "".to_owned(), auth_redis_addr_read: Some("redis://localhost:6378/0".to_owned()), @@ -1012,6 +1008,7 @@ impl AsyncTestContext for NotifyServerContext { let postgres = PgPoolOptions::new() .acquire_timeout(std::time::Duration::from_secs(60)) + .max_connections(1) .connect(&config.postgres_url) .await .unwrap(); @@ -1031,6 +1028,7 @@ impl AsyncTestContext for NotifyServerContext { postgres, redis, registry_mock_server, + keypair_seed, clock, } } @@ -1043,17 +1041,6 @@ impl AsyncTestContext for NotifyServerContext { } } -async fn assert_successful_response(response: Response) -> Response { - let status = response.status(); - if !status.is_success() { - panic!( - "non-successful response {status}: {:?}", - response.text().await - ); - } - response -} - #[test_context(NotifyServerContext)] #[tokio::test] async fn test_get_subscribers_v0(notify_server: &NotifyServerContext) { @@ -2683,177 +2670,8 @@ async fn test_subscribe_topic_conflict(notify_server: &NotifyServerContext) { assert_eq!(subscribe_topic_response.status(), StatusCode::CONFLICT); } -async fn get_notify_did_json( - notify_server_url: &Url, -) -> (x25519_dalek::PublicKey, DecodedClientId) { - let did_json_url = notify_server_url.join(DID_JSON_ENDPOINT).unwrap(); - let did_json = reqwest::get(did_json_url) - .await - .unwrap() - .json::() - .await - .unwrap(); - let key_agreement = &did_json - .verification_method - .iter() - .find(|key| key.id.ends_with(WC_NOTIFY_SUBSCRIBE_KEY_ID)) - .unwrap() - .public_key_jwk - .x; - let authentication = &did_json - .verification_method - .iter() - .find(|key| key.id.ends_with(WC_NOTIFY_AUTHENTICATION_KEY_ID)) - .unwrap() - .public_key_jwk - .x; - let key_agreement: [u8; 32] = BASE64URL - .decode(key_agreement.as_bytes()) - .unwrap() - .try_into() - .unwrap(); - let authentication: [u8; 32] = BASE64URL - .decode(authentication.as_bytes()) - .unwrap() - .try_into() - .unwrap(); - ( - x25519_dalek::PublicKey::from(key_agreement), - // Better approach, but dependency versions conflict right now - // DecodedClientId::from_key( - // ed25519_dalek::VerifyingKey::from_bytes(&authentication).unwrap(), - // ), - DecodedClientId(authentication), - ) -} - -#[derive(Clone)] -struct IdentityKeyDetails { - keys_server_url: Url, - signing_key: SigningKey, - client_id: DecodedClientId, -} - -struct TopicEncryptionSchemeAsymetric { - client_private: x25519_dalek::StaticSecret, - client_public: x25519_dalek::PublicKey, - server_public: x25519_dalek::PublicKey, -} - -enum TopicEncrptionScheme { - Asymetric(TopicEncryptionSchemeAsymetric), - Symetric([u8; 32]), -} - -async fn publish_watch_subscriptions_request( - relay_client: &mut RelayClient, - account: &AccountId, - client_id: &DecodedClientId, - identity_key_details: &IdentityKeyDetails, - encryption_details: TopicEncryptionSchemeAsymetric, - app: Option, -) { - publish_jwt_message( - relay_client, - client_id, - identity_key_details, - &TopicEncrptionScheme::Asymetric(encryption_details), - NOTIFY_WATCH_SUBSCRIPTIONS_TAG, - NOTIFY_WATCH_SUBSCRIPTIONS_TTL, - NOTIFY_WATCH_SUBSCRIPTIONS_ACT, - None, - |shared_claims| { - serde_json::to_value(NotifyRequest::new( - NOTIFY_WATCH_SUBSCRIPTIONS_METHOD, - NotifyWatchSubscriptions { - watch_subscriptions_auth: encode_auth( - &WatchSubscriptionsRequestAuth { - shared_claims, - ksu: identity_key_details.keys_server_url.to_string(), - sub: account.to_did_pkh(), - app, - }, - &identity_key_details.signing_key, - ), - }, - )) - .unwrap() - }, - ) - .await -} - -#[allow(clippy::too_many_arguments)] -async fn publish_subscribe_request( - relay_client: &mut RelayClient, - did_pkh: String, - client_id: &DecodedClientId, - identity_key_details: &IdentityKeyDetails, - encryption_details: TopicEncryptionSchemeAsymetric, - app: DidWeb, - notification_types: HashSet, - mjv: String, -) { - publish_jwt_message( - relay_client, - client_id, - identity_key_details, - &TopicEncrptionScheme::Asymetric(encryption_details), - NOTIFY_SUBSCRIBE_TAG, - NOTIFY_SUBSCRIBE_TTL, - NOTIFY_SUBSCRIBE_ACT, - Some(mjv), - |shared_claims| { - serde_json::to_value(NotifyRequest::new( - NOTIFY_SUBSCRIBE_METHOD, - NotifySubscribe { - subscription_auth: encode_auth( - &SubscriptionRequestAuth { - shared_claims, - ksu: identity_key_details.keys_server_url.to_string(), - sub: did_pkh.clone(), - scp: notification_types - .iter() - .map(ToString::to_string) - .collect::>() - .join(" "), - app, - }, - &identity_key_details.signing_key, - ), - }, - )) - .unwrap() - }, - ) - .await -} - -#[allow(clippy::too_many_arguments)] -async fn subscribe( - relay_client: &mut RelayClient, - account: &AccountId, - identity_key_details: &IdentityKeyDetails, - app_key_agreement_key: x25519_dalek::PublicKey, - app_client_id: &DecodedClientId, - app: DidWeb, - notification_types: HashSet, -) { - let _subs = subscribe_with_mjv( - relay_client, - account, - identity_key_details, - app_key_agreement_key, - app_client_id, - app, - notification_types, - "0".to_owned(), - ) - .await; -} - #[allow(clippy::too_many_arguments)] -async fn subscribe_v1( +pub async fn subscribe_v1( relay_client: &mut RelayClient, account: &AccountId, identity_key_details: &IdentityKeyDetails, @@ -2875,156 +2693,7 @@ async fn subscribe_v1( .await } -#[allow(clippy::too_many_arguments)] -async fn subscribe_with_mjv( - relay_client: &mut RelayClient, - account: &AccountId, - identity_key_details: &IdentityKeyDetails, - app_key_agreement_key: x25519_dalek::PublicKey, - app_client_id: &DecodedClientId, - app: DidWeb, - notification_types: HashSet, - mjv: String, -) -> Vec { - let secret = StaticSecret::random_from_rng(OsRng); - let public = PublicKey::from(&secret); - let response_topic_key = derive_key(&app_key_agreement_key, &secret).unwrap(); - let response_topic = topic_from_key(&response_topic_key); - - publish_subscribe_request( - relay_client, - account.to_did_pkh(), - app_client_id, - identity_key_details, - TopicEncryptionSchemeAsymetric { - client_private: secret, - client_public: public, - server_public: app_key_agreement_key, - }, - app, - notification_types, - mjv, - ) - .await; - - // https://walletconnect.slack.com/archives/C03SMNKLPU0/p1704449850496039?thread_ts=1703984667.223199&cid=C03SMNKLPU0 - tokio::time::sleep(std::time::Duration::from_secs(1)).await; - - relay_client.subscribe(response_topic.clone()).await; - - let msg = relay_client - .accept_message(NOTIFY_SUBSCRIBE_RESPONSE_TAG, &response_topic) - .await; - - let (_id, auth) = decode_response_message::(msg, &response_topic_key); - assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIBE_RESPONSE_ACT); - assert_eq!(auth.shared_claims.iss, app_client_id.to_did_key()); - assert_eq!( - auth.shared_claims.aud, - identity_key_details.client_id.to_did_key() - ); - assert_eq!(auth.sub, account.to_did_pkh()); - - auth.sbs -} - -#[allow(clippy::too_many_arguments)] -async fn publish_jwt_message( - relay_client: &mut RelayClient, - client_id: &DecodedClientId, - identity_key_details: &IdentityKeyDetails, - encryption_details: &TopicEncrptionScheme, - tag: u32, - ttl: std::time::Duration, - act: &str, - mjv: Option, - make_message: impl FnOnce(SharedClaims) -> serde_json::Value, -) { - fn make_shared_claims( - now: DateTime, - ttl: std::time::Duration, - act: &str, - client_id: &DecodedClientId, - mjv: Option, - identity_key_details: &IdentityKeyDetails, - ) -> SharedClaims { - SharedClaims { - iat: now.timestamp() as u64, - exp: add_ttl(now, ttl).timestamp() as u64, - iss: identity_key_details.client_id.to_did_key(), - act: act.to_owned(), - aud: client_id.to_did_key(), - mjv: mjv.unwrap_or_else(|| "0".to_owned()), - } - } - - let now = Utc::now(); - - let message = make_message(make_shared_claims( - now, - ttl, - act, - client_id, - mjv, - identity_key_details, - )); - - let (envelope, topic) = match encryption_details { - TopicEncrptionScheme::Asymetric(TopicEncryptionSchemeAsymetric { - client_private: client_secret, - client_public, - server_public, - }) => { - let response_topic_key = derive_key(server_public, client_secret).unwrap(); - ( - Envelope::::new( - &response_topic_key, - message, - *client_public.as_bytes(), - ) - .unwrap() - .to_bytes(), - topic_from_key(server_public.as_bytes()), - ) - } - TopicEncrptionScheme::Symetric(sym_key) => ( - Envelope::::new(sym_key, message) - .unwrap() - .to_bytes(), - topic_from_key(sym_key), - ), - }; - - let message = BASE64.encode(envelope); - - relay_client.publish(topic, message, tag, ttl).await; -} - -fn decode_message(msg: SubscriptionData, key: &[u8; 32]) -> T -where - T: DeserializeOwned, -{ - let Envelope:: { sealbox, iv, .. } = - Envelope::::from_bytes(BASE64.decode(msg.message.as_bytes()).unwrap()) - .unwrap(); - let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(key)) - .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) - .unwrap(); - serde_json::from_slice::(&decrypted_response).unwrap() -} - -fn decode_response_message(msg: SubscriptionData, key: &[u8; 32]) -> (u64, T) -where - T: GetSharedClaims + DeserializeOwned, -{ - let response = decode_message::>(msg, key); - ( - response.id, - from_jwt::(&response.result.response_auth).unwrap(), - ) -} - -fn decode_auth_message(msg: SubscriptionData, key: &[u8; 32]) -> (u64, T) +pub fn decode_auth_message(msg: SubscriptionData, key: &[u8; 32]) -> (u64, T) where T: GetSharedClaims + DeserializeOwned, { @@ -3032,139 +2701,6 @@ where (response.id, from_jwt::(&response.result.auth).unwrap()) } -#[allow(clippy::too_many_arguments)] -async fn watch_subscriptions( - relay_client: &mut RelayClient, - notify_server_url: Url, - identity_key_details: &IdentityKeyDetails, - app_domain: Option, - account: &AccountId, -) -> (Vec, [u8; 32], DecodedClientId) { - let (key_agreement_key, client_id) = get_notify_did_json(¬ify_server_url).await; - - let secret = StaticSecret::random_from_rng(OsRng); - let public = PublicKey::from(&secret); - - let response_topic_key = derive_key(&key_agreement_key, &secret).unwrap(); - let response_topic = topic_from_key(&response_topic_key); - - publish_watch_subscriptions_request( - relay_client, - account, - &client_id, - identity_key_details, - TopicEncryptionSchemeAsymetric { - client_private: secret, - client_public: public, - server_public: key_agreement_key, - }, - app_domain, - ) - .await; - - relay_client.subscribe(response_topic.clone()).await; - - let msg = relay_client - .accept_message(NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_TAG, &response_topic) - .await; - - let (_id, auth) = - decode_response_message::(msg, &response_topic_key); - assert_eq!( - auth.shared_claims.act, - NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_ACT - ); - assert_eq!(auth.shared_claims.iss, client_id.to_did_key()); - assert_eq!( - auth.shared_claims.aud, - identity_key_details.client_id.to_did_key() - ); - assert_eq!(auth.sub, account.to_did_pkh()); - - (auth.sbs, response_topic_key, client_id) -} - -async fn publish_subscriptions_changed_response( - relay_client: &mut RelayClient, - did_pkh: &AccountId, - client_id: &DecodedClientId, - identity_key_details: &IdentityKeyDetails, - sym_key: [u8; 32], - id: u64, -) { - publish_jwt_message( - relay_client, - client_id, - identity_key_details, - &TopicEncrptionScheme::Symetric(sym_key), - NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TAG, - NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TTL, - NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONE_ACT, - None, - |shared_claims| { - serde_json::to_value(NotifyResponse::new( - id, - ResponseAuth { - response_auth: encode_auth( - &WatchSubscriptionsChangedResponseAuth { - shared_claims, - ksu: identity_key_details.keys_server_url.to_string(), - sub: did_pkh.to_did_pkh(), - }, - &identity_key_details.signing_key, - ), - }, - )) - .unwrap() - }, - ) - .await -} - -#[allow(clippy::too_many_arguments)] -async fn accept_watch_subscriptions_changed( - relay_client: &mut RelayClient, - notify_server_client_id: &DecodedClientId, - identity_key_details: &IdentityKeyDetails, - account: &AccountId, - watch_topic_key: [u8; 32], -) -> Vec { - let msg = relay_client - .accept_message( - NOTIFY_SUBSCRIPTIONS_CHANGED_TAG, - &topic_from_key(&watch_topic_key), - ) - .await; - - let request = - decode_message::>(msg, &watch_topic_key); - assert_eq!(request.method, NOTIFY_SUBSCRIPTIONS_CHANGED_METHOD); - let auth = from_jwt::( - &request.params.subscriptions_changed_auth, - ) - .unwrap(); - - assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT); - assert_eq!(auth.shared_claims.iss, notify_server_client_id.to_did_key()); - assert_eq!( - auth.shared_claims.aud, - identity_key_details.client_id.to_did_key() - ); - assert_eq!(auth.sub, account.to_did_pkh()); - - publish_subscriptions_changed_response( - relay_client, - account, - notify_server_client_id, - identity_key_details, - watch_topic_key, - request.id, - ) - .await; - - auth.sbs -} - async fn publish_notify_message_response( relay_client: &mut RelayClient, account: &AccountId, @@ -3204,38 +2740,6 @@ async fn publish_notify_message_response( .await } -#[allow(clippy::too_many_arguments)] -async fn accept_notify_message( - client: &mut RelayClient, - account: &AccountId, - app_authentication: &VerifyingKey, - app_client_id: &DecodedClientId, - app_domain: &DidWeb, - notify_key: &[u8; 32], -) -> (u64, NotifyMessage) { - let msg = client - .accept_message(NOTIFY_MESSAGE_TAG, &topic_from_key(notify_key)) - .await; - - let request = decode_message::>(msg, notify_key); - assert_eq!(request.method, NOTIFY_MESSAGE_METHOD); - - let claims = verify_jwt(&request.params.message_auth, app_authentication).unwrap(); - - assert_eq!(claims.iss, app_client_id.to_did_key()); - assert_eq!(claims.sub, account.to_did_pkh()); - assert!(claims.iat < chrono::Utc::now().timestamp() + JWT_LEEWAY); // TODO remove leeway - assert!(claims.exp > chrono::Utc::now().timestamp() - JWT_LEEWAY); // TODO remove leeway - assert_eq!(claims.app.as_ref(), app_domain.domain()); // bug: https://github.com/WalletConnect/notify-server/issues/251 - assert_eq!(claims.act, NOTIFY_MESSAGE_ACT); - assert!(is_same_address( - &AccountId::from_did_pkh(&claims.sub).unwrap(), - account - )); - - (request.id, claims) -} - #[allow(clippy::too_many_arguments)] async fn accept_and_respond_to_notify_message( relay_client: &mut RelayClient, @@ -3609,55 +3113,6 @@ async fn get_notifications( auth.result } -fn generate_identity_key() -> (SigningKey, DecodedClientId) { - let keypair = Keypair::generate(&mut StdRng::from_entropy()); - let signing_key = SigningKey::from_bytes(keypair.secret_key().as_bytes()); - let client_id = DecodedClientId::from_key(&keypair.public_key()); - (signing_key, client_id) -} - -async fn sign_cacao( - app_domain: &DidWeb, - account: &AccountId, - statement: String, - identity_public_key: DecodedClientId, - keys_server_url: String, - account_signing_key: &k256::ecdsa::SigningKey, -) -> cacao::Cacao { - let mut cacao = cacao::Cacao { - h: cacao::header::Header { - t: EIP4361.to_owned(), - }, - p: cacao::payload::Payload { - domain: app_domain.domain().to_owned(), - iss: account.to_did_pkh(), - statement: Some(statement), - aud: identity_public_key.to_did_key(), - version: cacao::Version::V1, - nonce: hex::encode(rand::Rng::gen::<[u8; 10]>(&mut rand::thread_rng())), - iat: Utc::now().to_rfc3339(), - exp: None, - nbf: None, - request_id: None, - resources: Some(vec![keys_server_url]), - }, - s: cacao::signature::Signature { - t: "".to_owned(), - s: "".to_owned(), - }, - }; - let (signature, recovery): (k256::ecdsa::Signature, _) = account_signing_key - .sign_digest_recoverable(Keccak256::new_with_prefix(eip191_bytes( - &cacao.siwe_message().unwrap(), - ))) - .unwrap(); - let cacao_signature = [&signature.to_bytes()[..], &[recovery.to_byte()]].concat(); - cacao.s.t = EIP191.to_owned(); - cacao.s.s = hex::encode(cacao_signature); - cacao.verify(&MockGetRpcUrl).await.unwrap(); - cacao -} - async fn subscribe_topic( project_id: &ProjectId, app_domain: DidWeb, @@ -3667,59 +3122,8 @@ async fn subscribe_topic( ed25519_dalek::VerifyingKey, DecodedClientId, ) { - let response = assert_successful_response( - reqwest::Client::new() - .post( - notify_server_url - .join(&format!("/{project_id}/subscribe-topic",)) - .unwrap(), - ) - .bearer_auth(Uuid::new_v4()) - .json(&SubscribeTopicRequestBody { - app_domain: app_domain.into_domain(), - }) - .send() - .await - .unwrap(), - ) - .await - .json::() - .await - .unwrap(); - - let authentication = decode_key(&response.authentication_key).unwrap(); - let key_agreement = decode_key(&response.subscribe_key).unwrap(); - - let key_agreement = x25519_dalek::PublicKey::from(key_agreement); - let authentication = ed25519_dalek::VerifyingKey::from_bytes(&authentication).unwrap(); - let client_id = get_client_id(&authentication); - (key_agreement, authentication, client_id) -} - -async fn unregister_identity_key( - keys_server_url: Url, - account: &AccountId, - identity_signing_key: &SigningKey, - identity_did_key: &DecodedClientId, -) { - let unregister_auth = UnregisterIdentityRequestAuth { - shared_claims: SharedClaims { - iat: Utc::now().timestamp() as u64, - exp: Utc::now().timestamp() as u64 + 3600, - iss: identity_did_key.to_did_key(), - aud: keys_server_url.to_string(), - act: "unregister_identity".to_owned(), - mjv: "0".to_owned(), - }, - pkh: account.to_did_pkh(), - }; - let unregister_auth = encode_auth(&unregister_auth, identity_signing_key); - reqwest::Client::new() - .delete(keys_server_url.join("/identity").unwrap()) - .body(serde_json::to_string(&json!({"idAuth": unregister_auth})).unwrap()) - .send() + utils::http_api::subscribe_topic(project_id, Uuid::new_v4(), app_domain, notify_server_url) .await - .unwrap(); } #[test_context(NotifyServerContext)] @@ -9454,3 +8858,284 @@ async fn test_duplicate_address_migration() { ) .await; } + +#[test_context(NotifyServerContext)] +#[tokio::test] +async fn relay_webhook_rejects_invalid_jwt(notify_server: &NotifyServerContext) { + let response = reqwest::Client::new() + .post(notify_server.url.join(RELAY_WEBHOOK_ENDPOINT).unwrap()) + .json(&WatchWebhookPayload { + event_auth: vec!["".to_string()], + }) + .send() + .await + .unwrap(); + let status = response.status(); + if status != StatusCode::UNPROCESSABLE_ENTITY { + panic!( + "expected unprocessable entity response, got {status}: {:?}", + response.text().await + ); + } + let body = response.json::().await.unwrap(); + assert_eq!( + body, + json!({ + "error": "Could not parse watch event claims: Invalid format", + }) + ); +} + +#[test_context(NotifyServerContext)] +#[tokio::test] +async fn relay_webhook_rejects_wrong_aud(notify_server: &NotifyServerContext) { + let webhook_url = notify_server.url.join(RELAY_WEBHOOK_ENDPOINT).unwrap(); + let keypair = Keypair::generate(&mut StdRng::from_entropy()); + let payload = WatchWebhookPayload { + event_auth: vec![WatchEventClaims { + basic: JwtBasicClaims { + iss: DidKey::from(DecodedClientId::from_key(&keypair.public_key())), + aud: "example.com".to_owned(), + // sub: DecodedClientId::from_key(¬ify_server.keypair.public_key()).to_did_key(), + sub: "".to_string(), + iat: chrono::Utc::now().timestamp(), + exp: Some(chrono::Utc::now().timestamp() + 60), + }, + act: WatchAction::WatchEvent, + typ: WatchType::Subscriber, + whu: webhook_url.to_string(), + evt: WatchEventPayload { + message_id: serde_json::from_str("0").unwrap(), + status: WatchStatus::Queued, + topic: Topic::generate(), + message: "message".to_owned().into(), + published_at: 0, + tag: 0, + }, + } + .encode(&keypair) + .unwrap()], + }; + let response = reqwest::Client::new() + .post(webhook_url) + .json(&payload) + .send() + .await + .unwrap(); + let status = response.status(); + if status != StatusCode::UNPROCESSABLE_ENTITY { + panic!( + "expected unprocessable entity response, got {status}: {:?}", + response.text().await + ); + } + let body = response.json::().await.unwrap(); + assert_eq!( + body, + json!({ + "error": "Could not verify watch event: Invalid audience", + }) + ); +} + +#[test_context(NotifyServerContext)] +#[tokio::test] +async fn relay_webhook_rejects_invalid_signature(notify_server: &NotifyServerContext) { + let webhook_url = notify_server.url.join(RELAY_WEBHOOK_ENDPOINT).unwrap(); + let keypair1 = Keypair::generate(&mut StdRng::from_entropy()); + let keypair2 = Keypair::generate(&mut StdRng::from_entropy()); + let claims = WatchEventClaims { + basic: JwtBasicClaims { + iss: DidKey::from(DecodedClientId::from_key(&keypair1.public_key())), + aud: notify_server.url.to_string(), + // sub: DecodedClientId::from_key(¬ify_server.keypair.public_key()).to_did_key(), + sub: "".to_string(), + iat: chrono::Utc::now().timestamp(), + exp: Some(chrono::Utc::now().timestamp() + 60), + }, + act: WatchAction::WatchEvent, + typ: WatchType::Subscriber, + whu: webhook_url.to_string(), + evt: WatchEventPayload { + message_id: serde_json::from_str("0").unwrap(), + status: WatchStatus::Queued, + topic: Topic::generate(), + message: "message".to_owned().into(), + published_at: 0, + tag: 0, + }, + }; + let event_auth = { + let encoder = &data_encoding::BASE64URL_NOPAD; + let header = encoder.encode( + serde_json::to_string(&JwtHeader::default()) + .unwrap() + .as_bytes(), + ); + let claims = encoder.encode(serde_json::to_string(&claims).unwrap().as_bytes()); + let message = format!("{header}.{claims}"); + let signature = encoder.encode(keypair2.sign(message.as_bytes()).as_bytes()); + format!("{message}.{signature}") + }; + let payload = WatchWebhookPayload { + event_auth: vec![event_auth], + }; + let response = reqwest::Client::new() + .post(webhook_url) + .json(&payload) + .send() + .await + .unwrap(); + let status = response.status(); + if status != StatusCode::UNPROCESSABLE_ENTITY { + panic!( + "expected unprocessable entity response, got {status}: {:?}", + response.text().await + ); + } + let body = response.json::().await.unwrap(); + assert_eq!( + body, + json!({ + "error": "Could not parse watch event claims: Invalid signature", + }) + ); +} + +#[test_context(NotifyServerContext)] +#[tokio::test] +async fn relay_webhook_rejects_wrong_iss(notify_server: &NotifyServerContext) { + let webhook_url = notify_server.url.join(RELAY_WEBHOOK_ENDPOINT).unwrap(); + let keypair = Keypair::generate(&mut StdRng::from_entropy()); + let payload = WatchWebhookPayload { + event_auth: vec![WatchEventClaims { + basic: JwtBasicClaims { + iss: DidKey::from(DecodedClientId::from_key(&keypair.public_key())), + aud: notify_server.url.to_string(), + // sub: DecodedClientId::from_key(¬ify_server.keypair.public_key()).to_did_key(), + sub: "".to_string(), + iat: chrono::Utc::now().timestamp(), + exp: Some(chrono::Utc::now().timestamp() + 60), + }, + act: WatchAction::WatchEvent, + typ: WatchType::Subscriber, + whu: webhook_url.to_string(), + evt: WatchEventPayload { + message_id: serde_json::from_str("0").unwrap(), + status: WatchStatus::Queued, + topic: Topic::generate(), + message: "message".to_owned().into(), + published_at: 0, + tag: 0, + }, + } + .encode(&keypair) + .unwrap()], + }; + let response = reqwest::Client::new() + .post(webhook_url) + .json(&payload) + .send() + .await + .unwrap(); + let status = response.status(); + if status != StatusCode::UNPROCESSABLE_ENTITY { + panic!( + "expected unprocessable entity response, got {status}: {:?}", + response.text().await + ); + } + let body = response.json::().await.unwrap(); + assert_eq!( + body, + json!({ + "error": "JWT has wrong issuer", + }) + ); +} + +// TODO test wrong sub gives error +// TODO test wrong act gives error +// TODO test wrong typ gives error +// TODO test wrong whu gives error +// TODO test wrong status gives error + +#[test_context(NotifyServerContext)] +#[tokio::test] +async fn batch_receive_called(notify_server: &NotifyServerContext) { + let (account_signing_key, account) = generate_account(); + + let keys_server = MockServer::start().await; + let keys_server_url = keys_server.uri().parse::().unwrap(); + + let (identity_signing_key, identity_public_key) = generate_identity_key(); + let identity_key_details = IdentityKeyDetails { + keys_server_url, + signing_key: identity_signing_key, + client_id: identity_public_key.clone(), + }; + + let project_id1 = ProjectId::generate(); + let app_domain = DidWeb::from_domain(format!("{project_id1}.example.com")); + let (key_agreement, _authentication1, app_client_id) = + subscribe_topic(&project_id1, app_domain.clone(), ¬ify_server.url).await; + + register_mocked_identity_key( + &keys_server, + identity_public_key.clone(), + sign_cacao( + &app_domain, + &account, + STATEMENT_THIS_DOMAIN.to_owned(), + identity_public_key.clone(), + identity_key_details.keys_server_url.to_string(), + &account_signing_key, + ) + .await, + ) + .await; + + let vars = get_vars(); + let mut relay_client = RelayClient::new( + vars.relay_url.parse().unwrap(), + vars.project_id.clone().into(), + notify_server.url.clone(), + ) + .await; + + subscribe( + &mut relay_client, + &account, + &identity_key_details, + key_agreement, + &app_client_id, + app_domain.clone(), + HashSet::from([Uuid::new_v4()]), + ) + .await; + + tokio::time::sleep(BATCH_TIMEOUT + std::time::Duration::from_secs(1)).await; + + // Cannot poll because .fetch() also removes the messages + + let notify_server_relay_client = { + let keypair_seed = + decode_key(&sha256::digest(notify_server.keypair_seed.as_bytes())).unwrap(); + let keypair = Keypair::generate(&mut StdRng::from_seed(keypair_seed)); + + create_http_client( + &keypair, + vars.relay_url.parse().unwrap(), + notify_server.url.clone(), + vars.project_id.into(), + ) + .unwrap() + }; + + let response = notify_server_relay_client + .fetch(topic_from_key(key_agreement.as_bytes())) + .await + .unwrap(); + println!("fetch response: {response:?}"); + assert_eq!(response.messages.len(), 0); +} diff --git a/tests/utils/http_api.rs b/tests/utils/http_api.rs new file mode 100644 index 00000000..e119d895 --- /dev/null +++ b/tests/utils/http_api.rs @@ -0,0 +1,105 @@ +use { + super::assert_successful_response, + data_encoding::BASE64URL, + notify_server::{ + auth::DidWeb, + rpc::decode_key, + services::public_http_server::{ + handlers::{ + did_json::{DidJson, WC_NOTIFY_AUTHENTICATION_KEY_ID, WC_NOTIFY_SUBSCRIBE_KEY_ID}, + subscribe_topic::{SubscribeTopicRequestBody, SubscribeTopicResponseBody}, + }, + DID_JSON_ENDPOINT, + }, + utils::get_client_id, + }, + relay_rpc::domain::{DecodedClientId, ProjectId}, + std::fmt::Display, + url::Url, +}; + +pub async fn subscribe_topic( + project_id: &ProjectId, + project_secret: T, + app_domain: DidWeb, + notify_server_url: &Url, +) -> ( + x25519_dalek::PublicKey, + ed25519_dalek::VerifyingKey, + DecodedClientId, +) +where + T: Display, +{ + let response = assert_successful_response( + reqwest::Client::new() + .post( + notify_server_url + .join(&format!("/{project_id}/subscribe-topic",)) + .unwrap(), + ) + .bearer_auth(project_secret) + .json(&SubscribeTopicRequestBody { + app_domain: app_domain.into_domain(), + }) + .send() + .await + .unwrap(), + ) + .await + .json::() + .await + .unwrap(); + + let authentication = decode_key(&response.authentication_key).unwrap(); + let key_agreement = decode_key(&response.subscribe_key).unwrap(); + + let key_agreement = x25519_dalek::PublicKey::from(key_agreement); + let authentication = ed25519_dalek::VerifyingKey::from_bytes(&authentication).unwrap(); + let client_id = get_client_id(&authentication); + (key_agreement, authentication, client_id) +} + +pub async fn get_notify_did_json( + notify_server_url: &Url, +) -> (x25519_dalek::PublicKey, DecodedClientId) { + let did_json_url = notify_server_url.join(DID_JSON_ENDPOINT).unwrap(); + let did_json = reqwest::get(did_json_url) + .await + .unwrap() + .json::() + .await + .unwrap(); + let key_agreement = &did_json + .verification_method + .iter() + .find(|key| key.id.ends_with(WC_NOTIFY_SUBSCRIBE_KEY_ID)) + .unwrap() + .public_key_jwk + .x; + let authentication = &did_json + .verification_method + .iter() + .find(|key| key.id.ends_with(WC_NOTIFY_AUTHENTICATION_KEY_ID)) + .unwrap() + .public_key_jwk + .x; + let key_agreement: [u8; 32] = BASE64URL + .decode(key_agreement.as_bytes()) + .unwrap() + .try_into() + .unwrap(); + let authentication: [u8; 32] = BASE64URL + .decode(authentication.as_bytes()) + .unwrap() + .try_into() + .unwrap(); + ( + x25519_dalek::PublicKey::from(key_agreement), + // Better approach, but dependency versions conflict right now + // DecodedClientId::from_key( + // ed25519_dalek::VerifyingKey::from_bytes(&authentication).unwrap(), + // ), + DecodedClientId(authentication), + ) +} diff --git a/tests/utils/mod.rs b/tests/utils/mod.rs index af8ec142..a36707e0 100644 --- a/tests/utils/mod.rs +++ b/tests/utils/mod.rs @@ -1,9 +1,10 @@ use { base64::Engine, + chrono::Utc, ed25519_dalek::{Signer, VerifyingKey}, k256::ecdsa::SigningKey, notify_server::{ - auth::{AuthError, GetSharedClaims, SharedClaims}, + auth::{AuthError, DidWeb, GetSharedClaims, SharedClaims}, error::NotifyServerError, model::types::AccountId, notify_message::NotifyMessage, @@ -14,20 +15,39 @@ use { rand_core::SeedableRng, relay_client::http::Client, relay_rpc::{ - auth::{cacao::signature::eip1271::get_rpc_url::GetRpcUrl, ed25519_dalek::Keypair}, - domain::{ProjectId, Topic}, + auth::{ + cacao::{ + self, + header::EIP4361, + signature::{ + eip1271::get_rpc_url::GetRpcUrl, + eip191::{eip191_bytes, EIP191}, + }, + }, + ed25519_dalek::Keypair, + }, + domain::{DecodedClientId, ProjectId, Topic}, jwt::{JwtHeader, JWT_HEADER_ALG, JWT_HEADER_TYP}, rpc::SubscriptionData, }, + reqwest::Response, serde::Serialize, + serde_json::json, sha2::Digest, sha3::Keccak256, std::{sync::Arc, time::Duration}, - tokio::sync::{broadcast::Receiver, RwLock}, + tokio::sync::{ + broadcast::{error::RecvError, Receiver}, + RwLock, + }, tracing::info, url::Url, }; +pub mod http_api; +pub mod notify_relay_api; +pub mod relay_api; + pub const RELAY_MESSAGE_DELIVERY_TIMEOUT: Duration = Duration::from_secs(30); pub const JWT_LEEWAY: i64 = 30; @@ -124,6 +144,7 @@ impl RelayClient { .client .publish(topic.clone(), message.clone(), tag, ttl, false) .await; + println!("publishing {tag}"); match result { Ok(_) => return, e if tries > RETRIES => e.unwrap(), @@ -135,7 +156,24 @@ impl RelayClient { pub async fn accept_message(&mut self, tag: u32, topic: &Topic) -> SubscriptionData { let result = tokio::time::timeout(RELAY_MESSAGE_DELIVERY_TIMEOUT, async { loop { - let msg = self.receiver.recv().await.unwrap(); + let msg = match self.receiver.recv().await { + Ok(msg) => msg, + Err(RecvError::Closed) => panic!("Receiver closed"), + Err(RecvError::Lagged(c)) => { + println!("Rceiver lagged by {c} messages; remaining messages:"); + loop { + let next_message_fut = + tokio::time::timeout(Duration::from_secs(1), self.receiver.recv()) + .await; + let remaining_message = match next_message_fut { + Ok(msg) => msg, + Err(_) => break, + }; + println!("- {remaining_message:?}") + } + panic!("Receiver lagged"); + } + }; if msg.tag == tag && &msg.topic == topic { return msg; } else { @@ -248,6 +286,99 @@ impl GetSharedClaims for UnregisterIdentityRequestAuth { } } +pub async fn unregister_identity_key( + keys_server_url: Url, + account: &AccountId, + identity_signing_key: &ed25519_dalek::SigningKey, + identity_did_key: &DecodedClientId, +) { + let unregister_auth = UnregisterIdentityRequestAuth { + shared_claims: SharedClaims { + iat: Utc::now().timestamp() as u64, + exp: Utc::now().timestamp() as u64 + 3600, + iss: identity_did_key.to_did_key(), + aud: keys_server_url.to_string(), + act: "unregister_identity".to_owned(), + mjv: "0".to_owned(), + }, + pkh: account.to_did_pkh(), + }; + let unregister_auth = encode_auth(&unregister_auth, identity_signing_key); + reqwest::Client::new() + .delete(keys_server_url.join("/identity").unwrap()) + .body(serde_json::to_string(&json!({"idAuth": unregister_auth})).unwrap()) + .send() + .await + .unwrap(); +} + +pub async fn assert_successful_response(response: Response) -> Response { + let status = response.status(); + if !status.is_success() { + panic!( + "non-successful response {status}: {:?}", + response.text().await + ); + } + response +} + +#[derive(Clone)] +pub struct IdentityKeyDetails { + pub keys_server_url: Url, + pub signing_key: ed25519_dalek::SigningKey, + pub client_id: DecodedClientId, +} + +pub fn generate_identity_key() -> (ed25519_dalek::SigningKey, DecodedClientId) { + let keypair = Keypair::generate(&mut StdRng::from_entropy()); + let signing_key = ed25519_dalek::SigningKey::from_bytes(keypair.secret_key().as_bytes()); + let client_id = DecodedClientId::from_key(&keypair.public_key()); + (signing_key, client_id) +} + +pub async fn sign_cacao( + app_domain: &DidWeb, + account: &AccountId, + statement: String, + identity_public_key: DecodedClientId, + keys_server_url: String, + account_signing_key: &k256::ecdsa::SigningKey, +) -> cacao::Cacao { + let mut cacao = cacao::Cacao { + h: cacao::header::Header { + t: EIP4361.to_owned(), + }, + p: cacao::payload::Payload { + domain: app_domain.domain().to_owned(), + iss: account.to_did_pkh(), + statement: Some(statement), + aud: identity_public_key.to_did_key(), + version: cacao::Version::V1, + nonce: hex::encode(rand::Rng::gen::<[u8; 10]>(&mut rand::thread_rng())), + iat: Utc::now().to_rfc3339(), + exp: None, + nbf: None, + request_id: None, + resources: Some(vec![keys_server_url]), + }, + s: cacao::signature::Signature { + t: "".to_owned(), + s: "".to_owned(), + }, + }; + let (signature, recovery): (k256::ecdsa::Signature, _) = account_signing_key + .sign_digest_recoverable(Keccak256::new_with_prefix(eip191_bytes( + &cacao.siwe_message().unwrap(), + ))) + .unwrap(); + let cacao_signature = [&signature.to_bytes()[..], &[recovery.to_byte()]].concat(); + cacao.s.t = EIP191.to_owned(); + cacao.s.s = hex::encode(cacao_signature); + cacao.verify(&MockGetRpcUrl).await.unwrap(); + cacao +} + pub struct MockGetRpcUrl; impl GetRpcUrl for MockGetRpcUrl { fn get_rpc_url(&self, _: String) -> Option { diff --git a/tests/utils/notify_relay_api.rs b/tests/utils/notify_relay_api.rs new file mode 100644 index 00000000..ff032776 --- /dev/null +++ b/tests/utils/notify_relay_api.rs @@ -0,0 +1,371 @@ +use { + super::{ + encode_auth, + relay_api::{publish_jwt_message, TopicEncrptionScheme, TopicEncryptionSchemeAsymetric}, + IdentityKeyDetails, RelayClient, + }, + crate::utils::{ + http_api::get_notify_did_json, + relay_api::{decode_message, decode_response_message}, + verify_jwt, JWT_LEEWAY, + }, + ed25519_dalek::VerifyingKey, + notify_server::{ + auth::{ + from_jwt, DidWeb, NotifyServerSubscription, SubscriptionRequestAuth, + SubscriptionResponseAuth, WatchSubscriptionsChangedRequestAuth, + WatchSubscriptionsChangedResponseAuth, WatchSubscriptionsRequestAuth, + WatchSubscriptionsResponseAuth, + }, + jsonrpc::NotifyPayload, + model::types::AccountId, + notify_message::NotifyMessage, + rpc::{ + derive_key, NotifyRequest, NotifyResponse, NotifySubscribe, NotifySubscriptionsChanged, + NotifyWatchSubscriptions, ResponseAuth, + }, + spec::{ + NOTIFY_MESSAGE_ACT, NOTIFY_MESSAGE_METHOD, NOTIFY_MESSAGE_TAG, NOTIFY_SUBSCRIBE_ACT, + NOTIFY_SUBSCRIBE_METHOD, NOTIFY_SUBSCRIBE_RESPONSE_ACT, NOTIFY_SUBSCRIBE_RESPONSE_TAG, + NOTIFY_SUBSCRIBE_TAG, NOTIFY_SUBSCRIBE_TTL, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT, + NOTIFY_SUBSCRIPTIONS_CHANGED_METHOD, NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONE_ACT, + NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TAG, NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TTL, + NOTIFY_SUBSCRIPTIONS_CHANGED_TAG, NOTIFY_WATCH_SUBSCRIPTIONS_ACT, + NOTIFY_WATCH_SUBSCRIPTIONS_METHOD, NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_ACT, + NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_TAG, NOTIFY_WATCH_SUBSCRIPTIONS_TAG, + NOTIFY_WATCH_SUBSCRIPTIONS_TTL, + }, + utils::{is_same_address, topic_from_key}, + }, + rand_chacha::rand_core::OsRng, + relay_rpc::domain::DecodedClientId, + std::collections::HashSet, + url::Url, + uuid::Uuid, + x25519_dalek::{PublicKey, StaticSecret}, +}; + +async fn publish_watch_subscriptions_request( + relay_client: &mut RelayClient, + account: &AccountId, + client_id: &DecodedClientId, + identity_key_details: &IdentityKeyDetails, + encryption_details: TopicEncryptionSchemeAsymetric, + app: Option, +) { + publish_jwt_message( + relay_client, + client_id, + identity_key_details, + &TopicEncrptionScheme::Asymetric(encryption_details), + NOTIFY_WATCH_SUBSCRIPTIONS_TAG, + NOTIFY_WATCH_SUBSCRIPTIONS_TTL, + NOTIFY_WATCH_SUBSCRIPTIONS_ACT, + None, + |shared_claims| { + serde_json::to_value(NotifyRequest::new( + NOTIFY_WATCH_SUBSCRIPTIONS_METHOD, + NotifyWatchSubscriptions { + watch_subscriptions_auth: encode_auth( + &WatchSubscriptionsRequestAuth { + shared_claims, + ksu: identity_key_details.keys_server_url.to_string(), + sub: account.to_did_pkh(), + app, + }, + &identity_key_details.signing_key, + ), + }, + )) + .unwrap() + }, + ) + .await +} + +#[allow(clippy::too_many_arguments)] +pub async fn watch_subscriptions( + relay_client: &mut RelayClient, + notify_server_url: Url, + identity_key_details: &IdentityKeyDetails, + app_domain: Option, + account: &AccountId, +) -> (Vec, [u8; 32], DecodedClientId) { + let (key_agreement_key, client_id) = get_notify_did_json(¬ify_server_url).await; + + let secret = StaticSecret::random_from_rng(OsRng); + let public = PublicKey::from(&secret); + + let response_topic_key = derive_key(&key_agreement_key, &secret).unwrap(); + let response_topic = topic_from_key(&response_topic_key); + + publish_watch_subscriptions_request( + relay_client, + account, + &client_id, + identity_key_details, + TopicEncryptionSchemeAsymetric { + client_private: secret, + client_public: public, + server_public: key_agreement_key, + }, + app_domain, + ) + .await; + + relay_client.subscribe(response_topic.clone()).await; + + let msg = relay_client + .accept_message(NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_TAG, &response_topic) + .await; + + let (_id, auth) = + decode_response_message::(msg, &response_topic_key); + assert_eq!( + auth.shared_claims.act, + NOTIFY_WATCH_SUBSCRIPTIONS_RESPONSE_ACT + ); + assert_eq!(auth.shared_claims.iss, client_id.to_did_key()); + assert_eq!( + auth.shared_claims.aud, + identity_key_details.client_id.to_did_key() + ); + assert_eq!(auth.sub, account.to_did_pkh()); + + (auth.sbs, response_topic_key, client_id) +} + +async fn publish_subscriptions_changed_response( + relay_client: &mut RelayClient, + did_pkh: &AccountId, + client_id: &DecodedClientId, + identity_key_details: &IdentityKeyDetails, + sym_key: [u8; 32], + id: u64, +) { + publish_jwt_message( + relay_client, + client_id, + identity_key_details, + &TopicEncrptionScheme::Symetric(sym_key), + NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TAG, + NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONSE_TTL, + NOTIFY_SUBSCRIPTIONS_CHANGED_RESPONE_ACT, + None, + |shared_claims| { + serde_json::to_value(NotifyResponse::new( + id, + ResponseAuth { + response_auth: encode_auth( + &WatchSubscriptionsChangedResponseAuth { + shared_claims, + ksu: identity_key_details.keys_server_url.to_string(), + sub: did_pkh.to_did_pkh(), + }, + &identity_key_details.signing_key, + ), + }, + )) + .unwrap() + }, + ) + .await +} + +#[allow(clippy::too_many_arguments)] +pub async fn accept_watch_subscriptions_changed( + relay_client: &mut RelayClient, + notify_server_client_id: &DecodedClientId, + identity_key_details: &IdentityKeyDetails, + account: &AccountId, + watch_topic_key: [u8; 32], +) -> Vec { + let msg = relay_client + .accept_message( + NOTIFY_SUBSCRIPTIONS_CHANGED_TAG, + &topic_from_key(&watch_topic_key), + ) + .await; + + let request = + decode_message::>(msg, &watch_topic_key); + assert_eq!(request.method, NOTIFY_SUBSCRIPTIONS_CHANGED_METHOD); + let auth = from_jwt::( + &request.params.subscriptions_changed_auth, + ) + .unwrap(); + + assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIPTIONS_CHANGED_ACT); + assert_eq!(auth.shared_claims.iss, notify_server_client_id.to_did_key()); + assert_eq!( + auth.shared_claims.aud, + identity_key_details.client_id.to_did_key() + ); + assert_eq!(auth.sub, account.to_did_pkh()); + + publish_subscriptions_changed_response( + relay_client, + account, + notify_server_client_id, + identity_key_details, + watch_topic_key, + request.id, + ) + .await; + + auth.sbs +} + +#[allow(clippy::too_many_arguments)] +async fn publish_subscribe_request( + relay_client: &mut RelayClient, + did_pkh: String, + client_id: &DecodedClientId, + identity_key_details: &IdentityKeyDetails, + encryption_details: TopicEncryptionSchemeAsymetric, + app: DidWeb, + notification_types: HashSet, + mjv: String, +) { + publish_jwt_message( + relay_client, + client_id, + identity_key_details, + &TopicEncrptionScheme::Asymetric(encryption_details), + NOTIFY_SUBSCRIBE_TAG, + NOTIFY_SUBSCRIBE_TTL, + NOTIFY_SUBSCRIBE_ACT, + Some(mjv), + |shared_claims| { + serde_json::to_value(NotifyRequest::new( + NOTIFY_SUBSCRIBE_METHOD, + NotifySubscribe { + subscription_auth: encode_auth( + &SubscriptionRequestAuth { + shared_claims, + ksu: identity_key_details.keys_server_url.to_string(), + sub: did_pkh.clone(), + scp: notification_types + .iter() + .map(ToString::to_string) + .collect::>() + .join(" "), + app, + }, + &identity_key_details.signing_key, + ), + }, + )) + .unwrap() + }, + ) + .await +} + +#[allow(clippy::too_many_arguments)] +pub async fn subscribe( + relay_client: &mut RelayClient, + account: &AccountId, + identity_key_details: &IdentityKeyDetails, + app_key_agreement_key: x25519_dalek::PublicKey, + app_client_id: &DecodedClientId, + app: DidWeb, + notification_types: HashSet, +) { + let _subs = subscribe_with_mjv( + relay_client, + account, + identity_key_details, + app_key_agreement_key, + app_client_id, + app, + notification_types, + "0".to_owned(), + ) + .await; +} + +#[allow(clippy::too_many_arguments)] +pub async fn subscribe_with_mjv( + relay_client: &mut RelayClient, + account: &AccountId, + identity_key_details: &IdentityKeyDetails, + app_key_agreement_key: x25519_dalek::PublicKey, + app_client_id: &DecodedClientId, + app: DidWeb, + notification_types: HashSet, + mjv: String, +) -> Vec { + let secret = StaticSecret::random_from_rng(OsRng); + let public = PublicKey::from(&secret); + let response_topic_key = derive_key(&app_key_agreement_key, &secret).unwrap(); + let response_topic = topic_from_key(&response_topic_key); + + publish_subscribe_request( + relay_client, + account.to_did_pkh(), + app_client_id, + identity_key_details, + TopicEncryptionSchemeAsymetric { + client_private: secret, + client_public: public, + server_public: app_key_agreement_key, + }, + app, + notification_types, + mjv, + ) + .await; + + // https://walletconnect.slack.com/archives/C03SMNKLPU0/p1704449850496039?thread_ts=1703984667.223199&cid=C03SMNKLPU0 + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + + relay_client.subscribe(response_topic.clone()).await; + + let msg = relay_client + .accept_message(NOTIFY_SUBSCRIBE_RESPONSE_TAG, &response_topic) + .await; + + let (_id, auth) = decode_response_message::(msg, &response_topic_key); + assert_eq!(auth.shared_claims.act, NOTIFY_SUBSCRIBE_RESPONSE_ACT); + assert_eq!(auth.shared_claims.iss, app_client_id.to_did_key()); + assert_eq!( + auth.shared_claims.aud, + identity_key_details.client_id.to_did_key() + ); + assert_eq!(auth.sub, account.to_did_pkh()); + + auth.sbs +} + +#[allow(clippy::too_many_arguments)] +pub async fn accept_notify_message( + client: &mut RelayClient, + account: &AccountId, + app_authentication: &VerifyingKey, + app_client_id: &DecodedClientId, + app_domain: &DidWeb, + notify_key: &[u8; 32], +) -> (u64, NotifyMessage) { + let msg = client + .accept_message(NOTIFY_MESSAGE_TAG, &topic_from_key(notify_key)) + .await; + + let request = decode_message::>(msg, notify_key); + assert_eq!(request.method, NOTIFY_MESSAGE_METHOD); + + let claims = verify_jwt(&request.params.message_auth, app_authentication).unwrap(); + + assert_eq!(claims.iss, app_client_id.to_did_key()); + assert_eq!(claims.sub, account.to_did_pkh()); + assert!(claims.iat < chrono::Utc::now().timestamp() + JWT_LEEWAY); // TODO remove leeway + assert!(claims.exp > chrono::Utc::now().timestamp() - JWT_LEEWAY); // TODO remove leeway + assert_eq!(claims.app.as_ref(), app_domain.domain()); // bug: https://github.com/WalletConnect/notify-server/issues/251 + assert_eq!(claims.act, NOTIFY_MESSAGE_ACT); + assert!(is_same_address( + &AccountId::from_did_pkh(&claims.sub).unwrap(), + account + )); + + (request.id, claims) +} diff --git a/tests/utils/relay_api.rs b/tests/utils/relay_api.rs new file mode 100644 index 00000000..eac137db --- /dev/null +++ b/tests/utils/relay_api.rs @@ -0,0 +1,122 @@ +use { + super::{IdentityKeyDetails, RelayClient}, + chacha20poly1305::{aead::Aead, ChaCha20Poly1305, KeyInit}, + chrono::{DateTime, Utc}, + data_encoding::BASE64, + notify_server::{ + auth::{add_ttl, from_jwt, GetSharedClaims, SharedClaims}, + rpc::{derive_key, NotifyResponse, ResponseAuth}, + types::{Envelope, EnvelopeType0, EnvelopeType1}, + utils::topic_from_key, + }, + relay_rpc::{domain::DecodedClientId, rpc::SubscriptionData}, + serde::de::DeserializeOwned, + sha2::digest::generic_array::GenericArray, +}; + +pub fn decode_message(msg: SubscriptionData, key: &[u8; 32]) -> T +where + T: DeserializeOwned, +{ + let Envelope:: { sealbox, iv, .. } = + Envelope::::from_bytes(BASE64.decode(msg.message.as_bytes()).unwrap()) + .unwrap(); + let decrypted_response = ChaCha20Poly1305::new(GenericArray::from_slice(key)) + .decrypt(&iv.into(), chacha20poly1305::aead::Payload::from(&*sealbox)) + .unwrap(); + serde_json::from_slice::(&decrypted_response).unwrap() +} + +pub fn decode_response_message(msg: SubscriptionData, key: &[u8; 32]) -> (u64, T) +where + T: GetSharedClaims + DeserializeOwned, +{ + let response = decode_message::>(msg, key); + ( + response.id, + from_jwt::(&response.result.response_auth).unwrap(), + ) +} + +pub struct TopicEncryptionSchemeAsymetric { + pub client_private: x25519_dalek::StaticSecret, + pub client_public: x25519_dalek::PublicKey, + pub server_public: x25519_dalek::PublicKey, +} + +pub enum TopicEncrptionScheme { + Asymetric(TopicEncryptionSchemeAsymetric), + Symetric([u8; 32]), +} + +#[allow(clippy::too_many_arguments)] +pub async fn publish_jwt_message( + relay_client: &mut RelayClient, + client_id: &DecodedClientId, + identity_key_details: &IdentityKeyDetails, + encryption_details: &TopicEncrptionScheme, + tag: u32, + ttl: std::time::Duration, + act: &str, + mjv: Option, + make_message: impl FnOnce(SharedClaims) -> serde_json::Value, +) { + fn make_shared_claims( + now: DateTime, + ttl: std::time::Duration, + act: &str, + client_id: &DecodedClientId, + mjv: Option, + identity_key_details: &IdentityKeyDetails, + ) -> SharedClaims { + SharedClaims { + iat: now.timestamp() as u64, + exp: add_ttl(now, ttl).timestamp() as u64, + iss: identity_key_details.client_id.to_did_key(), + act: act.to_owned(), + aud: client_id.to_did_key(), + mjv: mjv.unwrap_or_else(|| "0".to_owned()), + } + } + + let now = Utc::now(); + + let message = make_message(make_shared_claims( + now, + ttl, + act, + client_id, + mjv, + identity_key_details, + )); + + let (envelope, topic) = match encryption_details { + TopicEncrptionScheme::Asymetric(TopicEncryptionSchemeAsymetric { + client_private: client_secret, + client_public, + server_public, + }) => { + let response_topic_key = derive_key(server_public, client_secret).unwrap(); + ( + Envelope::::new( + &response_topic_key, + message, + *client_public.as_bytes(), + ) + .unwrap() + .to_bytes(), + topic_from_key(server_public.as_bytes()), + ) + } + TopicEncrptionScheme::Symetric(sym_key) => ( + Envelope::::new(sym_key, message) + .unwrap() + .to_bytes(), + topic_from_key(sym_key), + ), + }; + + let message = BASE64.encode(&envelope); + + relay_client.publish(topic, message, tag, ttl).await; +}