diff --git a/server/main-api/src/calendar/connectum.rs b/server/main-api/src/calendar/connectum.rs index 46110b955..8c40071b4 100644 --- a/server/main-api/src/calendar/connectum.rs +++ b/server/main-api/src/calendar/connectum.rs @@ -1,5 +1,5 @@ use std::time::{Duration, Instant}; -use std::{env, io}; +use std::{env, fmt, io}; use chrono::{DateTime, Utc}; use oauth2::basic::{BasicClient, BasicTokenResponse}; @@ -16,6 +16,13 @@ pub(in crate::calendar) struct APIRequestor { pool: PgPool, oauth_token: Option<(Instant, BasicTokenResponse)>, } +impl fmt::Debug for APIRequestor { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("APIRequestor") + .field("oauth_token", &self.oauth_token.clone().map(|(i, _)| i)) + .finish() + } +} impl From<&PgPool> for APIRequestor { fn from(pool: &PgPool) -> Self { @@ -39,6 +46,7 @@ impl From<&PgPool> for APIRequestor { } impl APIRequestor { + #[tracing::instrument] pub(crate) async fn refresh(&self, id: String) -> Result<(), crate::BoxedError> { let sync_start = Utc::now(); let start = Instant::now(); @@ -75,6 +83,7 @@ impl APIRequestor { } true } + #[tracing::instrument(ret(level = tracing::Level::TRACE))] pub(crate) async fn try_refresh_token(&mut self) -> Result { if self.should_refresh_token() { self.oauth_token = Some(Self::fetch_new_oauth_token().await?); @@ -146,6 +155,7 @@ impl APIRequestor { Ok(()) } + #[tracing::instrument(ret(level = tracing::Level::TRACE))] async fn fetch_new_oauth_token() -> Result<(Instant, BasicTokenResponse), crate::BoxedError> { let client_id = env::var("CONNECTUM_OAUTH_CLIENT_ID") .map_err(|e| { @@ -176,6 +186,7 @@ impl APIRequestor { .await; Ok((Instant::now(), token?)) } + #[tracing::instrument(skip(tx))] async fn delete_events( &self, tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, @@ -185,6 +196,7 @@ impl APIRequestor { .execute(&mut **tx) .await } + #[tracing::instrument(skip(tx))] async fn update_last_calendar_scrape_at( &self, tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, diff --git a/server/main-api/src/calendar/mod.rs b/server/main-api/src/calendar/mod.rs index ef2e209dd..84a4b7063 100644 --- a/server/main-api/src/calendar/mod.rs +++ b/server/main-api/src/calendar/mod.rs @@ -7,6 +7,8 @@ use sqlx::PgPool; use tracing::error; use crate::calendar::models::{CalendarLocation, Event, LocationEvents}; +use crate::limited::hash_map::LimitedHashMap; +use crate::limited::vec::LimitedVec; mod connectum; mod models; @@ -49,7 +51,7 @@ pub async fn calendar_handler( Err(e) => return e, }; let locations = match get_locations(&data.db, &ids).await { - Ok(l) => l, + Ok(l) => l.0, Err(e) => return e, }; if let Err(e) = validate_locations(&ids, &locations) { @@ -91,25 +93,27 @@ fn validate_locations(ids: &[String], locations: &[CalendarLocation]) -> Result< Ok(()) } +#[tracing::instrument(skip(pool))] async fn get_locations( pool: &PgPool, ids: &[String], -) -> Result, HttpResponse> { +) -> Result, HttpResponse> { match sqlx::query_as!(CalendarLocation, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name FROM de WHERE key = ANY($1::text[])", ids).fetch_all(pool).await { Err(e) => { error!("could not refetch due to {e:?}"); Err(HttpResponse::InternalServerError().body("could not get calendar entries, please try again later")) } - Ok(locations) => Ok(locations), + Ok(locations) => Ok(LimitedVec(locations)), } } +#[tracing::instrument(skip(pool),ret(level = tracing::Level::TRACE))] async fn get_from_db( pool: &PgPool, locations: &[CalendarLocation], start_after: &DateTime, end_before: &DateTime, -) -> Result, crate::BoxedError> { +) -> Result, crate::BoxedError> { let mut located_events: HashMap = HashMap::new(); for location in locations { let events = sqlx::query_as!(Event, r#"SELECT id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type AS "entry_type!:crate::calendar::models::EventType",detailed_entry_type @@ -124,7 +128,7 @@ async fn get_from_db( }, ); } - Ok(located_events) + Ok(LimitedHashMap(located_events)) } #[cfg(test)] diff --git a/server/main-api/src/calendar/models.rs b/server/main-api/src/calendar/models.rs index 8627d505c..fc85c4c2b 100644 --- a/server/main-api/src/calendar/models.rs +++ b/server/main-api/src/calendar/models.rs @@ -1,13 +1,7 @@ -use crate::models::Location; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -#[derive(Serialize, Deserialize, Clone, Debug)] -pub(super) struct EventsCollection { - pub(super) events: HashMap, - pub(super) max_last_sync: DateTime, -} +use crate::models::Location; #[derive(Serialize, Deserialize, Clone, Debug)] pub(super) struct CalendarLocation { diff --git a/server/main-api/src/calendar/refresh.rs b/server/main-api/src/calendar/refresh.rs index 25dbc2099..b7bf9c36b 100644 --- a/server/main-api/src/calendar/refresh.rs +++ b/server/main-api/src/calendar/refresh.rs @@ -15,6 +15,8 @@ const NUMBER_OF_CONCURRENT_SCRAPES: usize = 3; struct LocationKey { key: String, } + +#[tracing::instrument(skip(pool))] pub async fn all_entries(pool: &PgPool) { if let Err(e) = std::env::var("CONNECTUM_OAUTH_CLIENT_ID") { error!("Please make sure that CONNECTUM_OAUTH_CLIENT_ID are valid to use calendar features: {e:?}"); diff --git a/server/main-api/src/details.rs b/server/main-api/src/details.rs index 35efb2034..f1ebe63c1 100644 --- a/server/main-api/src/details.rs +++ b/server/main-api/src/details.rs @@ -50,7 +50,8 @@ pub async fn get_handler( } } -async fn get_alias_and_redirect(conn: &PgPool, query: &str) -> Option<(String, String)> { +#[tracing::instrument(skip(pool))] +async fn get_alias_and_redirect(pool: &PgPool, query: &str) -> Option<(String, String)> { let result = sqlx::query_as!( LocationKeyAlias, r#" @@ -59,7 +60,7 @@ async fn get_alias_and_redirect(conn: &PgPool, query: &str) -> Option<(String, S WHERE alias = $1 OR key = $1 "#, query ) - .fetch_all(conn) + .fetch_all(pool) .await; match result { Ok(d) => { diff --git a/server/main-api/src/feedback/github.rs b/server/main-api/src/feedback/github.rs index df7df4835..272f8b4c5 100644 --- a/server/main-api/src/feedback/github.rs +++ b/server/main-api/src/feedback/github.rs @@ -13,6 +13,7 @@ fn github_token() -> Result { } } +#[tracing::instrument] pub async fn open_issue(title: &str, description: &str, labels: Vec) -> HttpResponse { let title = clean_feedback_data(title, 512); let description = clean_feedback_data(description, 1024 * 1024); @@ -56,6 +57,7 @@ pub async fn open_issue(title: &str, description: &str, labels: Vec) -> }; } +#[tracing::instrument] pub async fn open_pr( branch: String, title: &str, diff --git a/server/main-api/src/feedback/proposed_edits/coordinate.rs b/server/main-api/src/feedback/proposed_edits/coordinate.rs index 67515cbdd..56f39a364 100644 --- a/server/main-api/src/feedback/proposed_edits/coordinate.rs +++ b/server/main-api/src/feedback/proposed_edits/coordinate.rs @@ -38,7 +38,7 @@ impl CoordinateFile { } } -#[derive(Deserialize, Clone, Default)] +#[derive(Deserialize, Debug, Clone, Copy, Default, PartialEq)] pub struct Coordinate { lat: f64, lon: f64, diff --git a/server/main-api/src/feedback/proposed_edits/mod.rs b/server/main-api/src/feedback/proposed_edits/mod.rs index 3770a8e3e..61cc9a4b0 100644 --- a/server/main-api/src/feedback/proposed_edits/mod.rs +++ b/server/main-api/src/feedback/proposed_edits/mod.rs @@ -6,6 +6,8 @@ use actix_web::{post, HttpResponse}; use serde::Deserialize; use tracing::error; +use crate::limited::hash_map::LimitedHashMap; + use super::github; use super::proposed_edits::coordinate::Coordinate; use super::proposed_edits::image::Image; @@ -17,7 +19,7 @@ mod discription; mod image; mod tmp_repo; -#[derive(Deserialize, Clone)] +#[derive(Debug, Deserialize, Clone)] struct Edit { coordinate: Option, image: Option, @@ -26,16 +28,17 @@ pub trait AppliableEdit { fn apply(&self, key: &str, base_dir: &Path) -> String; } -#[derive(Deserialize)] +#[derive(Debug, Deserialize)] pub struct EditRequest { token: String, - edits: HashMap, + edits: LimitedHashMap, additional_context: String, privacy_checked: bool, } const GIT_URL: &str = "git@github.com:TUM-Dev/NavigaTUM.git"; impl EditRequest { + #[tracing::instrument] async fn apply_changes_and_generate_description( &self, branch_name: &str, @@ -48,6 +51,7 @@ impl EditRequest { } fn edits_for(&self, extractor: fn(Edit) -> Option) -> HashMap { self.edits + .0 .clone() .into_iter() .filter_map(|(k, edit)| extractor(edit).map(|coord| (k, coord))) @@ -57,10 +61,15 @@ impl EditRequest { fn extract_labels(&self) -> Vec { let mut labels = vec!["webform".to_string()]; - if self.edits.iter().any(|(_, edit)| edit.coordinate.is_none()) { + if self + .edits + .0 + .iter() + .any(|(_, edit)| edit.coordinate.is_none()) + { labels.push("coordinate".to_string()); } - if self.edits.iter().any(|(_, edit)| edit.image.is_none()) { + if self.edits.0.iter().any(|(_, edit)| edit.image.is_none()) { labels.push("image".to_string()); } labels @@ -100,12 +109,12 @@ pub async fn propose_edits( .content_type("text/plain") .body("Using this endpoint without accepting the privacy policy is not allowed"); }; - if req_data.edits.is_empty() { + if req_data.edits.0.is_empty() { return HttpResponse::UnprocessableEntity() .content_type("text/plain") .body("Not enough edits provided"); }; - if req_data.edits.len() > 500 { + if req_data.edits.0.len() > 500 { return HttpResponse::InsufficientStorage() .content_type("text/plain") .body("Too many edits provided"); diff --git a/server/main-api/src/feedback/proposed_edits/tmp_repo.rs b/server/main-api/src/feedback/proposed_edits/tmp_repo.rs index 88e375af6..860635ba7 100644 --- a/server/main-api/src/feedback/proposed_edits/tmp_repo.rs +++ b/server/main-api/src/feedback/proposed_edits/tmp_repo.rs @@ -4,11 +4,13 @@ use tracing::{debug, info}; use super::discription::Description; use super::EditRequest; +#[derive(Debug)] pub struct TempRepo { dir: tempfile::TempDir, branch_name: String, } impl TempRepo { + #[tracing::instrument] pub async fn clone_and_checkout( url: &'static str, branch_name: &str, @@ -48,6 +50,7 @@ impl TempRepo { } } + #[tracing::instrument] pub fn apply_and_gen_description(&self, edits: &EditRequest) -> Description { let mut description = Description::default(); description.add_context(&edits.additional_context); @@ -60,6 +63,7 @@ impl TempRepo { description } + #[tracing::instrument] pub async fn commit(&self, title: &str) -> Result<(), crate::BoxedError> { let out = Command::new("git") .current_dir(&self.dir) @@ -82,6 +86,7 @@ impl TempRepo { _ => Err(format!("git commit failed with output: {out:?}").into()), } } + #[tracing::instrument] pub async fn push(&self) -> Result<(), crate::BoxedError> { let out = Command::new("git") .current_dir(&self.dir) diff --git a/server/main-api/src/feedback/tokens.rs b/server/main-api/src/feedback/tokens.rs index 78bdfa8d5..cfdade370 100644 --- a/server/main-api/src/feedback/tokens.rs +++ b/server/main-api/src/feedback/tokens.rs @@ -1,3 +1,5 @@ +use std::fmt; + use actix_web::HttpResponse; use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation}; use serde::{Deserialize, Serialize}; @@ -7,6 +9,13 @@ use tracing::error; #[derive(Default)] pub struct RecordedTokens(Mutex>); +impl fmt::Debug for RecordedTokens { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + //fields purposely omitted + f.debug_struct("RecordedTokens").finish() + } +} + pub struct TokenRecord { kid: u64, next_reset: i64, @@ -43,6 +52,7 @@ impl Claims { } impl RecordedTokens { + #[tracing::instrument(skip(token))] pub async fn validate(&self, token: &str) -> Option { if !able_to_process_feedback() { return Some( diff --git a/server/main-api/src/limited/hash_map.rs b/server/main-api/src/limited/hash_map.rs new file mode 100644 index 000000000..da4322032 --- /dev/null +++ b/server/main-api/src/limited/hash_map.rs @@ -0,0 +1,60 @@ +use std::collections::HashMap; +use std::fmt; +use std::hash::Hash; + +use serde::{Deserialize, Serialize}; + +use crate::limited::OrMore; + +#[derive(Serialize, Deserialize, Clone, Default)] +pub struct LimitedHashMap(pub HashMap); + +impl From> for LimitedHashMap { + fn from(value: HashMap) -> Self { + LimitedHashMap(value) + } +} + +const LIMIT: usize = 3; +impl fmt::Debug + for LimitedHashMap +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut collection = self.0.clone().into_iter().collect::>(); + collection.sort_unstable_by(|(k1, _), (k2, _)| k1.cmp(k2)); + if self.0.len() <= LIMIT { + f.debug_map().entries(collection).finish() + } else { + f.debug_map() + .entries( + collection + .into_iter() + .take(LIMIT) + .map(|(k, v)| (OrMore::Value(k), OrMore::Value(v))) + .chain([(OrMore::More, OrMore::More)]), + ) + .finish() + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_limited_output() { + let w: LimitedHashMap = LimitedHashMap(HashMap::new()); + assert_eq!(format!("{w:?}"), "{}"); + let w = LimitedHashMap(HashMap::from([(1, 1)])); + assert_eq!(format!("{w:?}"), "{1: 1}"); + let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2)])); + assert_eq!(format!("{w:?}"), "{1: 1, 2: 2}"); + let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2), (3, 3)])); + assert_eq!(format!("{w:?}"), "{1: 1, 2: 2, 3: 3}"); + let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2), (3, 3), (4, 4)])); + assert_eq!(format!("{w:?}"), "{1: 1, 2: 2, 3: 3, ...: ...}"); + let w = LimitedHashMap(HashMap::from([(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)])); + assert_eq!(format!("{w:?}"), "{1: 1, 2: 2, 3: 3, ...: ...}"); + } +} diff --git a/server/main-api/src/limited/mod.rs b/server/main-api/src/limited/mod.rs new file mode 100644 index 000000000..688b90688 --- /dev/null +++ b/server/main-api/src/limited/mod.rs @@ -0,0 +1,19 @@ +use std::fmt; +use std::fmt::Formatter; + +pub mod hash_map; +pub mod vec; + +enum OrMore { + Value(T), + More, +} + +impl fmt::Debug for OrMore { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { + match self { + OrMore::Value(t) => fmt::Debug::fmt(t, f), + OrMore::More => write!(f, "..."), + } + } +} diff --git a/server/main-api/src/limited/vec.rs b/server/main-api/src/limited/vec.rs new file mode 100644 index 000000000..f7acdb241 --- /dev/null +++ b/server/main-api/src/limited/vec.rs @@ -0,0 +1,89 @@ +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::vec::IntoIter; + +use crate::limited::OrMore; + +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct LimitedVec(pub Vec); + +impl AsRef<[T]> for LimitedVec { + fn as_ref(&self) -> &[T] { + &self.0 + } +} + +impl IntoIterator for LimitedVec { + type Item = T; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl LimitedVec { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + pub fn len(&self) -> usize { + self.0.len() + } +} + +impl From> for LimitedVec { + fn from(value: Vec) -> Self { + LimitedVec(value) + } +} + +const LIMIT: usize = 3; +impl fmt::Debug for LimitedVec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.0.len() <= LIMIT { + f.debug_list().entries(self.0.iter().take(LIMIT)).finish() + } else { + f.debug_list() + .entries( + self.0 + .iter() + .take(LIMIT) + .map(OrMore::Value) + .chain([OrMore::More]), + ) + .finish() + } + } +} +impl FromIterator for LimitedVec { + fn from_iter>(iter: I) -> Self { + let mut c = Vec::new(); + + for i in iter { + c.push(i); + } + + LimitedVec(c) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_limited_output() { + let w: LimitedVec = LimitedVec(vec![]); + assert_eq!(format!("{w:?}"), "[]"); + let w = LimitedVec(vec![1]); + assert_eq!(format!("{w:?}"), "[1]"); + let w = LimitedVec(vec![1, 2]); + assert_eq!(format!("{w:?}"), "[1, 2]"); + let w = LimitedVec(vec![1, 2, 3]); + assert_eq!(format!("{w:?}"), "[1, 2, 3]"); + let w = LimitedVec(vec![1, 2, 3, 4]); + assert_eq!(format!("{w:?}"), "[1, 2, 3, ...]"); + let w = LimitedVec(vec![1, 2, 3, 4, 5]); + assert_eq!(format!("{w:?}"), "[1, 2, 3, ...]"); + } +} diff --git a/server/main-api/src/main.rs b/server/main-api/src/main.rs index 3df71ad7d..12d58814c 100644 --- a/server/main-api/src/main.rs +++ b/server/main-api/src/main.rs @@ -15,6 +15,7 @@ use tracing::{debug, error, info}; mod calendar; mod details; mod feedback; +mod limited; mod maps; mod models; mod search; diff --git a/server/main-api/src/maps/fetch_tile.rs b/server/main-api/src/maps/fetch_tile.rs index ed4d2c062..50d025fa0 100644 --- a/server/main-api/src/maps/fetch_tile.rs +++ b/server/main-api/src/maps/fetch_tile.rs @@ -4,6 +4,7 @@ use std::{fmt, io}; use tracing::{error, warn}; +use crate::limited::vec::LimitedVec; use crate::maps::overlay_map::OverlayMapTask; use crate::BoxedError; @@ -24,7 +25,7 @@ impl Display for TileLocation { } } -#[derive(Debug)] +#[derive(Debug, Hash, Copy, Clone)] pub struct FetchTileTask { location: TileLocation, index: (u32, u32), @@ -74,10 +75,11 @@ impl FetchTileTask { } // type and create are specified, because a custom conversion is needed + #[tracing::instrument(ret(level = tracing::Level::TRACE))] pub async fn fulfill(self) -> Option<((u32, u32), image::DynamicImage)> { let raw_tile = download_map_image(self.location).await; match raw_tile { - Ok(bytes) => match image::load_from_memory(&bytes) { + Ok(bytes) => match image::load_from_memory(&bytes.0) { Ok(img) => Some((self.index, img)), Err(e) => { error!("Error while parsing image: {e:#?} for {self:?}"); @@ -92,7 +94,8 @@ impl FetchTileTask { } } -async fn download_map_image(location: TileLocation) -> Result, BoxedError> { +#[tracing::instrument] +async fn download_map_image(location: TileLocation) -> Result, BoxedError> { let url = format!( "https://nav.tum.de/maps/styles/osm-liberty/{z}/{x}/{y}@2x.png", x = location.x, @@ -110,7 +113,7 @@ async fn download_map_image(location: TileLocation) -> Result, BoxedErro // wait with exponential backoff let size = bytes.len(); if size > 500 { - return Ok(bytes.into()); + return Ok(LimitedVec(bytes.into())); } let wait_time_ms = 1.5_f32.powi(i).round() as u64; let wait_time = Duration::from_millis(wait_time_ms); diff --git a/server/main-api/src/maps/mod.rs b/server/main-api/src/maps/mod.rs index ebf63d1d4..fdb198aa2 100644 --- a/server/main-api/src/maps/mod.rs +++ b/server/main-api/src/maps/mod.rs @@ -10,6 +10,7 @@ use tokio::time::Instant; use tracing::{debug, error, warn}; use unicode_truncate::UnicodeTruncateStr; +use crate::limited::vec::LimitedVec; use crate::maps::overlay_map::OverlayMapTask; use crate::maps::overlay_text::{OverlayText, CANTARELL_BOLD, CANTARELL_REGULAR}; use crate::models::Location; @@ -28,18 +29,19 @@ pub fn configure(cfg: &mut web::ServiceConfig) { } } +#[tracing::instrument(skip(pool))] async fn get_localised_data( - conn: &PgPool, + pool: &PgPool, id: &str, should_use_english: bool, ) -> Result { let result = if should_use_english { sqlx::query_as!(Location, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", id) - .fetch_all(conn) + .fetch_all(pool) .await } else { sqlx::query_as!(Location, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", id) - .fetch_all(conn) + .fetch_all(pool) .await }; @@ -59,7 +61,11 @@ async fn get_localised_data( } } -async fn construct_image_from_data(data: Location, format: PreviewFormat) -> Option> { +#[tracing::instrument] +async fn construct_image_from_data( + data: Location, + format: PreviewFormat, +) -> Option> { let start_time = Instant::now(); let mut img = match format { PreviewFormat::OpenGraph => image::RgbaImage::new(1200, 630), @@ -89,10 +95,10 @@ fn draw_pin(img: &mut ImageBuffer, Vec>) { ); } -fn wrap_image_in_response(img: &image::RgbaImage) -> Vec { +fn wrap_image_in_response(img: &image::RgbaImage) -> LimitedVec { let mut w = Cursor::new(Vec::new()); img.write_to(&mut w, image::ImageFormat::Png).unwrap(); - w.into_inner() + LimitedVec(w.into_inner()) } const WHITE_PIXEL: Rgba = Rgba([255, 255, 255, 255]); fn draw_bottom(data: &Location, img: &mut image::RgbaImage) { @@ -123,16 +129,17 @@ fn draw_bottom(data: &Location, img: &mut image::RgbaImage) { .draw_onto(img); } -fn load_default_image() -> Vec { +fn load_default_image() -> LimitedVec { warn!("Loading default preview image, as map rendering failed. Check the connection to the tileserver"); let img = image::load_from_memory(include_bytes!("static/logo-card.png")).unwrap(); // encode the image as PNG let mut w = Cursor::new(Vec::new()); img.write_to(&mut w, image::ImageFormat::Png).unwrap(); - w.into_inner() + LimitedVec(w.into_inner()) } -async fn get_possible_redirect_url(conn: &PgPool, query: &str, args: &QueryArgs) -> Option { +#[tracing::instrument(skip(pool))] +async fn get_possible_redirect_url(pool: &PgPool, query: &str, args: &QueryArgs) -> Option { let result = sqlx::query_as!( LocationKeyAlias, r#" @@ -142,7 +149,7 @@ async fn get_possible_redirect_url(conn: &PgPool, query: &str, args: &QueryArgs) LIMIT 1"#, query ) - .fetch_one(conn) + .fetch_one(pool) .await; match result { Ok(d) => Some(format!( @@ -213,5 +220,5 @@ pub async fn maps_handler( "Preview Generation for {id} took {elapsed:?}", elapsed = start_time.elapsed() ); - HttpResponse::Ok().content_type("image/png").body(img) + HttpResponse::Ok().content_type("image/png").body(img.0) } diff --git a/server/main-api/src/maps/overlay_map.rs b/server/main-api/src/maps/overlay_map.rs index beda245e8..bf9026184 100644 --- a/server/main-api/src/maps/overlay_map.rs +++ b/server/main-api/src/maps/overlay_map.rs @@ -1,5 +1,7 @@ -use futures::{stream::FuturesUnordered, StreamExt}; +use std::fmt; use std::ops::Range; + +use futures::{stream::FuturesUnordered, StreamExt}; use tracing::warn; use crate::maps::fetch_tile::FetchTileTask; @@ -11,6 +13,16 @@ pub struct OverlayMapTask { pub z: u32, } +impl fmt::Debug for OverlayMapTask { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("OverlayMapTask") + .field(&self.x) + .field(&self.y) + .field(&self.z) + .finish() + } +} + const POSSIBLE_INDEX_RANGE: Range = 0..7; impl OverlayMapTask { @@ -28,6 +40,7 @@ impl OverlayMapTask { let (x, y, z) = lat_lon_z_to_xyz(entry.lat, entry.lon, zoom); Self { x, y, z } } + #[tracing::instrument(skip(img))] pub async fn draw_onto(&self, img: &mut image::RgbaImage) -> bool { // coordinate system is centered around the center of the image // around this center there is a 5*5 grid of tiles diff --git a/server/main-api/src/maps/overlay_text.rs b/server/main-api/src/maps/overlay_text.rs index afcf3fd2d..15a6bf7ca 100644 --- a/server/main-api/src/maps/overlay_text.rs +++ b/server/main-api/src/maps/overlay_text.rs @@ -1,3 +1,6 @@ +use std::fmt; +use std::fmt::Formatter; + use ab_glyph::{FontArc, PxScale}; use image::Rgba; use imageproc::definitions::HasBlack; @@ -19,6 +22,16 @@ pub struct OverlayText { font: &'static FontArc, } +impl fmt::Debug for OverlayText { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("OverlayText") + .field("x", &self.x) + .field("y", &self.y) + .field("text", &self.text) + .finish() + } +} + impl OverlayText { pub fn with(text: &str, font: &'static FontArc) -> Self { Self { @@ -32,6 +45,8 @@ impl OverlayText { pub fn at(self, x: i32, y: i32) -> Self { Self { x, y, ..self } } + + #[tracing::instrument(skip(img))] pub fn draw_onto(self, img: &mut image::RgbaImage) { let (w, _) = text_size(SCALE, self.font, &self.text); draw_text_mut( diff --git a/server/main-api/src/search/mod.rs b/server/main-api/src/search/mod.rs index a2ef13c5f..3502b4d75 100644 --- a/server/main-api/src/search/mod.rs +++ b/server/main-api/src/search/mod.rs @@ -109,7 +109,9 @@ async fn cached_geoentry_search( highlighting: Highlighting, limits: Limits, ) -> Vec { - search_executor::do_geoentry_search(q, highlighting, limits).await + search_executor::do_geoentry_search(q, highlighting, limits) + .await + .0 } #[cfg(test)] diff --git a/server/main-api/src/search/search_executor/formatter.rs b/server/main-api/src/search/search_executor/formatter.rs index 8cfad2bf9..4210d6dd2 100644 --- a/server/main-api/src/search/search_executor/formatter.rs +++ b/server/main-api/src/search/search_executor/formatter.rs @@ -11,6 +11,7 @@ pub(super) struct RoomVisitor { } impl From<(ParsedQuery, Highlighting)> for RoomVisitor { + #[tracing::instrument] fn from((parsed_input, highlighting): (ParsedQuery, Highlighting)) -> Self { Self { parsed_input, diff --git a/server/main-api/src/search/search_executor/merger.rs b/server/main-api/src/search/search_executor/merger.rs index d48b887d5..af19df1b0 100644 --- a/server/main-api/src/search/search_executor/merger.rs +++ b/server/main-api/src/search/search_executor/merger.rs @@ -2,6 +2,7 @@ use meilisearch_sdk::search::{SearchResult, SearchResults}; use crate::search::search_executor::query::MSHit; +#[tracing::instrument(skip(merged_results, buildings_results, rooms_results))] pub(super) fn merge_search_results( limits: &super::Limits, merged_results: &SearchResults, diff --git a/server/main-api/src/search/search_executor/mod.rs b/server/main-api/src/search/search_executor/mod.rs index 5a4fce176..47f6762ea 100644 --- a/server/main-api/src/search/search_executor/mod.rs +++ b/server/main-api/src/search/search_executor/mod.rs @@ -1,6 +1,7 @@ use serde::Serialize; use tracing::error; +use crate::limited::vec::LimitedVec; use crate::search::search_executor::parser::ParsedQuery; use crate::search::search_executor::query::MSHit; @@ -34,12 +35,12 @@ struct ResultEntry { #[serde(skip_serializing_if = "Option::is_none")] parsed_id: Option, } - +#[tracing::instrument] pub async fn do_geoentry_search( q: String, highlighting: Highlighting, limits: Limits, -) -> Vec { +) -> LimitedVec { let parsed_input = ParsedQuery::from(q.as_str()); match query::GeoEntryQuery::from((&parsed_input, &limits, &highlighting)) @@ -60,14 +61,14 @@ pub async fn do_geoentry_search( .for_each(|r| visitor.visit(r)); match section_buildings.n_visible { - 0 => vec![section_rooms, section_buildings], - _ => vec![section_buildings, section_rooms], + 0 => LimitedVec(vec![section_rooms, section_buildings]), + _ => LimitedVec(vec![section_buildings, section_rooms]), } } Err(e) => { // error should be serde_json::error error!("Error searching for results: {e:?}"); - vec![] + LimitedVec(vec![]) } } } diff --git a/server/main-api/src/search/search_executor/query.rs b/server/main-api/src/search/search_executor/query.rs index 20b5dfdd7..6c1b16c47 100644 --- a/server/main-api/src/search/search_executor/query.rs +++ b/server/main-api/src/search/search_executor/query.rs @@ -24,6 +24,7 @@ pub(super) struct MSHit { rank: i32, } +#[derive(Debug)] struct GeoEntryFilters { default: String, rooms: String, @@ -42,6 +43,7 @@ impl From<&Filter> for GeoEntryFilters { } } +#[derive(Debug)] pub(super) struct GeoEntryQuery { parsed_input: ParsedQuery, limits: Limits, @@ -63,6 +65,7 @@ impl From<(&ParsedQuery, &Limits, &Highlighting)> for GeoEntryQuery { } impl GeoEntryQuery { + #[tracing::instrument(ret(level = tracing::Level::TRACE))] pub async fn execute(self) -> Result, Error> { let q_default = self.prompt_for_querying(); let ms_url = diff --git a/server/main-api/src/setup/database/alias.rs b/server/main-api/src/setup/database/alias.rs index f965a1189..02726788f 100644 --- a/server/main-api/src/setup/database/alias.rs +++ b/server/main-api/src/setup/database/alias.rs @@ -1,7 +1,6 @@ -use std::time::Instant; - use serde::Deserialize; -use tracing::debug; + +use crate::limited::vec::LimitedVec; #[derive(Debug)] pub(super) struct Alias { @@ -102,29 +101,33 @@ impl Alias { .await } } +#[tracing::instrument] pub async fn download_updates( - keys_which_need_updating: &[String], -) -> Result, crate::BoxedError> { + keys_which_need_updating: &LimitedVec, +) -> Result, crate::BoxedError> { let cdn_url = std::env::var("CDN_URL").unwrap_or_else(|_| "https://nav.tum.de/cdn".to_string()); - Ok(reqwest::get(format!("{cdn_url}/api_data.json")) + let aliase = reqwest::get(format!("{cdn_url}/api_data.json")) .await? .json::>() .await? .into_iter() - .filter(|d| keys_which_need_updating.is_empty() || keys_which_need_updating.contains(&d.id)) - .map(AliasIterator::from) - .flat_map(IntoIterator::into_iter) - .collect::>()) + .filter(|d| { + keys_which_need_updating.is_empty() || keys_which_need_updating.0.contains(&d.id) + }) + .map(AliasIterator::from); + Ok(LimitedVec( + aliase + .flat_map(IntoIterator::into_iter) + .collect::>(), + )) } +#[tracing::instrument(skip(tx))] pub async fn load_all_to_db( - aliases: Vec, + aliases: LimitedVec, tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result<(), crate::BoxedError> { - let start = Instant::now(); for task in aliases { task.store(tx).await?; } - debug!("loaded aliases in {elapsed:?}", elapsed = start.elapsed()); - Ok(()) } diff --git a/server/main-api/src/setup/database/data.rs b/server/main-api/src/setup/database/data.rs index d29ea2622..2e07bbc65 100644 --- a/server/main-api/src/setup/database/data.rs +++ b/server/main-api/src/setup/database/data.rs @@ -1,15 +1,39 @@ use std::collections::HashMap; -use std::time::Instant; +use std::fmt; +use std::hash::{Hash, Hasher}; use serde_json::Value; -use tracing::debug; +use crate::limited::vec::LimitedVec; + +#[derive(Clone)] pub(super) struct DelocalisedValues { key: String, hash: i64, de: Value, en: Value, } +impl fmt::Debug for DelocalisedValues { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("DelocalisedValues") + .field("key", &self.key) + .field("hash", &self.hash) + .finish() + } +} + +impl PartialEq for DelocalisedValues { + fn eq(&self, other: &Self) -> bool { + self.hash == other.hash + } +} +impl Eq for DelocalisedValues {} + +impl Hash for DelocalisedValues { + fn hash(&self, state: &mut H) { + state.write_i64(self.hash); + } +} impl From> for DelocalisedValues { fn from(value: HashMap) -> Self { @@ -98,11 +122,10 @@ impl DelocalisedValues { Ok(()) } } - +#[tracing::instrument] pub async fn download_updates( - keys_which_need_updating: &[String], -) -> Result, crate::BoxedError> { - let start = Instant::now(); + keys_which_need_updating: &LimitedVec, +) -> Result, crate::BoxedError> { let cdn_url = std::env::var("CDN_URL").unwrap_or_else(|_| "https://nav.tum.de/cdn".to_string()); let tasks = reqwest::get(format!("{cdn_url}/api_data.json")) .await? @@ -110,35 +133,26 @@ pub async fn download_updates( .await? .into_iter() .map(DelocalisedValues::from) - .filter(|d| keys_which_need_updating.contains(&d.key)) - .collect::>(); - debug!("downloaded data in {elapsed:?}", elapsed = start.elapsed()); + .filter(|d| keys_which_need_updating.0.contains(&d.key)) + .collect::>(); Ok(tasks) } - +#[tracing::instrument(skip(tx))] pub(super) async fn load_all_to_db( - tasks: Vec, + tasks: LimitedVec, tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result<(), crate::BoxedError> { - let start = Instant::now(); for task in tasks.into_iter() { task.store(tx).await?; } - debug!("loaded data in {elapsed:?}", elapsed = start.elapsed()); - Ok(()) } - -pub async fn download_status() -> Result, crate::BoxedError> { - let start = Instant::now(); +#[tracing::instrument] +pub async fn download_status() -> Result, crate::BoxedError> { let cdn_url = std::env::var("CDN_URL").unwrap_or_else(|_| "https://nav.tum.de/cdn".to_string()); let tasks = reqwest::get(format!("{cdn_url}/status_data.json")) .await? .json::>() .await?; - debug!( - "downloaded current status in {elapsed:?}", - elapsed = start.elapsed() - ); - Ok(tasks) + Ok(LimitedVec(tasks)) } diff --git a/server/main-api/src/setup/database/mod.rs b/server/main-api/src/setup/database/mod.rs index e3acb76a9..e6af1d77b 100644 --- a/server/main-api/src/setup/database/mod.rs +++ b/server/main-api/src/setup/database/mod.rs @@ -1,39 +1,41 @@ -use std::time::Instant; +use tracing::{debug, debug_span, info, info_span}; -use tracing::{debug, info}; +use crate::limited::vec::LimitedVec; mod alias; mod data; +#[tracing::instrument(skip(pool))] pub async fn setup(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { info!("setting up the database"); sqlx::migrate!("./migrations").run(pool).await?; info!("migrations complete"); Ok(()) } +#[tracing::instrument(skip(pool))] pub async fn load_data(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { - let status = data::download_status().await?; + let status = data::download_status().await?.0; let new_keys = status .clone() .into_iter() .map(|(k, _)| k) - .collect::>(); - let new_hashes = status.into_iter().map(|(_, h)| h).collect::>(); - info!("deleting old data"); + .collect::>(); + let new_hashes = status + .into_iter() + .map(|(_, h)| h) + .collect::>(); { - let start = Instant::now(); + let _ = info_span!("deleting old data").enter(); let mut tx = pool.begin().await?; cleanup_deleted(&new_keys, &mut tx).await?; tx.commit().await?; - debug!("deleted old data in {elapsed:?}", elapsed = start.elapsed()); } - - debug!("finding changed data"); - let keys_which_need_updating = - find_keys_which_need_updating(pool, &new_keys, &new_hashes).await?; - + let keys_which_need_updating = { + let _ = debug_span!("finding changed data").enter(); + find_keys_which_need_updating(pool, &new_keys, &new_hashes).await? + }; if !keys_which_need_updating.is_empty() { - info!("loading changed {} data", keys_which_need_updating.len()); + let _ = info_span!("loading changed data").enter(); let data = data::download_updates(&keys_which_need_updating).await?; let mut tx = pool.begin().await?; data::load_all_to_db(data, &mut tx).await?; @@ -41,7 +43,7 @@ pub async fn load_data(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { } if !keys_which_need_updating.is_empty() { - info!("loading new aliases"); + let _ = info_span!("loading new aliases").enter(); let aliases = alias::download_updates(&keys_which_need_updating).await?; let mut tx = pool.begin().await?; alias::load_all_to_db(aliases, &mut tx).await?; @@ -50,12 +52,12 @@ pub async fn load_data(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { Ok(()) } +#[tracing::instrument(skip(pool))] async fn find_keys_which_need_updating( pool: &sqlx::PgPool, - keys: &[String], - hashes: &[i64], -) -> Result, crate::BoxedError> { - let start = Instant::now(); + keys: &LimitedVec, + hashes: &LimitedVec, +) -> Result, crate::BoxedError> { let number_of_keys = sqlx::query_scalar!("SELECT COUNT(*) FROM de") .fetch_one(pool) .await?; @@ -64,41 +66,57 @@ async fn find_keys_which_need_updating( "all {updated_cnt} keys need upating", updated_cnt = keys.len() ); - return Ok(keys.to_vec()); + return Ok(keys.clone()); } - let mut keys_which_need_updating = sqlx::query_scalar!( - r#" + let mut keys_which_need_updating = { + let _ = debug_span!("keys_which_need_updating").enter(); + let keys_which_need_updating = sqlx::query_scalar!( + r#" SELECT de.key FROM de, (SELECT * FROM UNNEST($1::text[], $2::int8[])) as expected(key,hash) WHERE de.key = expected.key and de.hash != expected.hash "#, - keys, - hashes - ) - .fetch_all(pool) - .await?; - debug!("find_keys_which_need_updating (update) took {elapsed:?} and yielded {updated_cnt} updated items", elapsed = start.elapsed(), updated_cnt=keys_which_need_updating.len()); + keys.as_ref(), + hashes.as_ref(), + ) + .fetch_all(pool) + .await?; + debug!( + "{updated_cnt} updated items", + updated_cnt = keys_which_need_updating.len() + ); + keys_which_need_updating + }; - let mut keys_which_need_removing = sqlx::query_scalar!( - r#" + let mut keys_which_need_removing = { + let _ = debug_span!("keys_which_need_removing").enter(); + let keys_which_need_removing = sqlx::query_scalar!( + r#" SELECT de.key FROM de WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) as expected2(key) where de.key=expected2.key) "#, - keys - ) - .fetch_all(pool) - .await?; - debug!("find_keys_which_need_updating (update+delete) took {elapsed:?} and yielded {deleted_cnt} deleted items", elapsed = start.elapsed(), deleted_cnt=keys_which_need_removing.len()); + keys.as_ref() + ) + .fetch_all(pool) + .await?; + debug!( + "{deleted_cnt} deleted items", + deleted_cnt = keys_which_need_removing.len() + ); + keys_which_need_removing + }; keys_which_need_updating.append(&mut keys_which_need_removing); - Ok(keys_which_need_updating) + Ok(LimitedVec(keys_which_need_updating)) } +#[tracing::instrument(skip(tx))] async fn cleanup_deleted( - keys: &[String], + keys: &LimitedVec, tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result<(), crate::BoxedError> { + let keys = &keys.0; sqlx::query!("DELETE FROM aliases WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE aliases.key = expected.key)", keys) .execute(&mut **tx) .await?; diff --git a/server/main-api/src/setup/meilisearch.rs b/server/main-api/src/setup/meilisearch.rs index 7bd90376d..b34e6eae6 100644 --- a/server/main-api/src/setup/meilisearch.rs +++ b/server/main-api/src/setup/meilisearch.rs @@ -18,7 +18,7 @@ impl Synonyms { serde_yaml::from_str(include_str!("search_synonyms.yaml")) } } - +#[tracing::instrument(skip(client))] async fn wait_for_healthy(client: &Client) { let mut counter = 0; loop { @@ -43,7 +43,7 @@ async fn wait_for_healthy(client: &Client) { tokio::time::sleep(Duration::from_secs(1)).await; } } - +#[tracing::instrument(skip(client))] pub async fn setup(client: &Client) -> Result<(), crate::BoxedError> { debug!("waiting for Meilisearch to be healthy"); wait_for_healthy(client).await; @@ -99,7 +99,7 @@ pub async fn setup(client: &Client) -> Result<(), crate::BoxedError> { } Ok(()) } - +#[tracing::instrument(skip(client))] pub async fn load_data(client: &Client) -> Result<(), crate::BoxedError> { let start = std::time::Instant::now(); let entries = client.index("entries");