diff --git a/server/.sqlx/query-c1d72e1bd0c218581f5b6259c85aee13b64b16adb36252d6c3135e1ce7ff0887.json b/server/.sqlx/query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json similarity index 75% rename from server/.sqlx/query-c1d72e1bd0c218581f5b6259c85aee13b64b16adb36252d6c3135e1ce7ff0887.json rename to server/.sqlx/query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json index 371820156..2d3ac985a 100644 --- a/server/.sqlx/query-c1d72e1bd0c218581f5b6259c85aee13b64b16adb36252d6c3135e1ce7ff0887.json +++ b/server/.sqlx/query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json @@ -1,40 +1,45 @@ { "db_name": "PostgreSQL", - "query": "SELECT name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", + "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", "describe": { "columns": [ { "ordinal": 0, - "name": "name", + "name": "key", "type_info": "Text" }, { "ordinal": 1, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 2, "name": "last_calendar_scrape_at", "type_info": "Timestamptz" }, { - "ordinal": 2, + "ordinal": 3, "name": "calendar_url", "type_info": "Text" }, { - "ordinal": 3, + "ordinal": 4, "name": "type", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "type_common_name", "type_info": "Text" }, { - "ordinal": 5, + "ordinal": 6, "name": "lat", "type_info": "Float8" }, { - "ordinal": 6, + "ordinal": 7, "name": "lon", "type_info": "Float8" } @@ -45,6 +50,7 @@ ] }, "nullable": [ + false, false, true, true, @@ -54,5 +60,5 @@ false ] }, - "hash": "c1d72e1bd0c218581f5b6259c85aee13b64b16adb36252d6c3135e1ce7ff0887" + "hash": "39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d" } diff --git a/server/.sqlx/query-c7501ce05d89b1453354acf8113b766a50309ca0b3dfb34b523468c5d13a4409.json b/server/.sqlx/query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json similarity index 75% rename from server/.sqlx/query-c7501ce05d89b1453354acf8113b766a50309ca0b3dfb34b523468c5d13a4409.json rename to server/.sqlx/query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json index 3b7205a88..d0083d63f 100644 --- a/server/.sqlx/query-c7501ce05d89b1453354acf8113b766a50309ca0b3dfb34b523468c5d13a4409.json +++ b/server/.sqlx/query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json @@ -1,40 +1,45 @@ { "db_name": "PostgreSQL", - "query": "SELECT name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", + "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", "describe": { "columns": [ { "ordinal": 0, - "name": "name", + "name": "key", "type_info": "Text" }, { "ordinal": 1, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 2, "name": "last_calendar_scrape_at", "type_info": "Timestamptz" }, { - "ordinal": 2, + "ordinal": 3, "name": "calendar_url", "type_info": "Text" }, { - "ordinal": 3, + "ordinal": 4, "name": "type", "type_info": "Text" }, { - "ordinal": 4, + "ordinal": 5, "name": "type_common_name", "type_info": "Text" }, { - "ordinal": 5, + "ordinal": 6, "name": "lat", "type_info": "Float8" }, { - "ordinal": 6, + "ordinal": 7, "name": "lon", "type_info": "Float8" } @@ -45,6 +50,7 @@ ] }, "nullable": [ + false, false, true, true, @@ -54,5 +60,5 @@ false ] }, - "hash": "c7501ce05d89b1453354acf8113b766a50309ca0b3dfb34b523468c5d13a4409" + "hash": "77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf" } diff --git a/server/.sqlx/query-c76a20f4df953be9afcab6409a0434b86b92725b37511ea1df10e848a3e344f4.json b/server/.sqlx/query-c76a20f4df953be9afcab6409a0434b86b92725b37511ea1df10e848a3e344f4.json new file mode 100644 index 000000000..ef7fdf9f5 --- /dev/null +++ b/server/.sqlx/query-c76a20f4df953be9afcab6409a0434b86b92725b37511ea1df10e848a3e344f4.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name FROM de WHERE key = ANY($1::text[])", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "key", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "last_calendar_scrape_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "calendar_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "type", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "type_common_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [ + false, + false, + true, + true, + false, + false + ] + }, + "hash": "c76a20f4df953be9afcab6409a0434b86b92725b37511ea1df10e848a3e344f4" +} diff --git a/server/main-api/src/calendar/mod.rs b/server/main-api/src/calendar/mod.rs index 94d4723ba..785800ac9 100644 --- a/server/main-api/src/calendar/mod.rs +++ b/server/main-api/src/calendar/mod.rs @@ -1,11 +1,11 @@ +use std::collections::HashMap; use actix_web::{get, web, HttpResponse}; use chrono::{DateTime, Utc}; use log::error; use serde::Deserialize; use sqlx::PgPool; -use crate::calendar::models::Event; -use crate::models::Location; +use crate::calendar::models::{CalendarLocation, Event, LocationEvents}; mod connectum; mod models; @@ -13,74 +13,101 @@ pub mod refresh; #[derive(Deserialize, Debug)] pub struct QueryArguments { + ids: Vec, /// eg. 2039-01-19T03:14:07+1 start_after: DateTime, /// eg. 2042-01-07T00:00:00 UTC end_before: DateTime, } -#[get("/api/calendar/{id}")] +#[get("/api/calendar")] pub async fn calendar_handler( - params: web::Path, web::Query(args): web::Query, data: web::Data, ) -> HttpResponse { - let id = params - .into_inner() - .replace(|c: char| c.is_whitespace() || c.is_control(), ""); - let location = match get_location(&data.db, &id).await { - Err(e) => { - error!("could not refetch due to {e:?}"); - return HttpResponse::InternalServerError() - .body("could not get calendar entries, please try again later"); - } - Ok(None) => { - return HttpResponse::NotFound() - .content_type("text/plain") - .body("Room not found"); - } - Ok(Some(loc)) => loc, + let ids = args.ids.into_iter().map(|s| s.replace(|c: char| c.is_whitespace() || c.is_control(), "")).collect::>(); + if ids.len() > 10 { + return HttpResponse::BadRequest() + .body("Too many ids to query. We suspect that users don't need this. If you need this limit increased, please send us a message"); }; - let Some(last_sync) = location.last_calendar_scrape_at else { - return HttpResponse::ServiceUnavailable() - .body("This calendar entry is currently in the process of being scraped, please try again later"); + if ids.is_empty() { + return HttpResponse::BadRequest() + .body("No id requested"); }; - let Some(calendar_url) = location.calendar_url else { - return HttpResponse::NotFound() - .content_type("text/plain") - .body("Room does not have a calendar"); + let locations = match get_locations(&data.db, &ids).await { + Ok(l) => l, + Err(e) => return e }; - match get_from_db(&data.db, &id, &args.start_after, &args.end_before).await { - Ok(events) => HttpResponse::Ok().json(models::Events { - events, - last_sync, - calendar_url, - }), + if let Err(e) = validate_locations(&ids,&locations){ + return e; + } + match get_from_db(&data.db, &locations, &args.start_after, &args.end_before).await { + Ok(events) => HttpResponse::Ok().json(events), Err(e) => { - error!("could not get entries from the db for {id} because {e:?}"); + error!("could not get entries from the db for {ids:?} because {e:?}"); HttpResponse::InternalServerError() .body("could not get calendar entries, please try again later") } } } -async fn get_location(pool: &PgPool, id: &str) -> Result, sqlx::Error> { - sqlx::query_as!(Location, "SELECT name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", id) - .fetch_optional(pool) - .await +fn validate_locations(ids: &[String],locations:&[CalendarLocation])->Result<(),HttpResponse>{ + for id in ids{ + if !locations.iter().any(|l|&l.key==id) { + return Err(HttpResponse::BadRequest() + .body("Requested id {id} does not exist")); + } + } + assert_eq!(locations.len(), ids.len()); + for loc in locations { + if loc.last_calendar_scrape_at.is_none() { + return Err(HttpResponse::ServiceUnavailable() + .body(format!("Room {key}/{url:?} calendar entry is currently in the process of being scraped, please try again later", key = loc.key, url = loc.calendar_url))); + }; + } + for loc in locations { + if loc.calendar_url.is_none() { + return Err(HttpResponse::NotFound() + .content_type("text/plain") + .body(format!("Room {key}/{url:?} does not have a calendar", key = loc.key, url = loc.calendar_url))); + }; + } + Ok(()) +} + +async fn get_locations(pool: &PgPool, ids: &[String]) -> Result, HttpResponse> { + match sqlx::query_as!(CalendarLocation, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name FROM de WHERE key = ANY($1::text[])", ids) + .fetch_all(pool) + .await { + Err(e) => { + error!("could not refetch due to {e:?}"); + Err(HttpResponse::InternalServerError() + .body("could not get calendar entries, please try again later")) + } + Ok(locations) => Ok(locations), + } } async fn get_from_db( pool: &PgPool, - id: &str, + locations: &[CalendarLocation], start_after: &DateTime, end_before: &DateTime, -) -> Result, crate::BoxedError> { - let events = sqlx::query_as!(Event, r#"SELECT id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type AS "entry_type!:crate::calendar::models::EventType",detailed_entry_type +) -> Result, crate::BoxedError> { + let mut located_events: HashMap = HashMap::new(); + for location in locations { + let events = sqlx::query_as!(Event, r#"SELECT id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type AS "entry_type!:crate::calendar::models::EventType",detailed_entry_type FROM calendar WHERE room_code = $1 AND start_at >= $2 AND end_at <= $3"#, - id, start_after, end_before) - .fetch_all(pool) - .await?; - Ok(events) + location.key, start_after, end_before) + .fetch_all(pool) + .await?; + located_events.insert( + location.key.clone(), + LocationEvents { + location: location.clone(), + events, + }); + } + Ok(located_events) } diff --git a/server/main-api/src/calendar/models.rs b/server/main-api/src/calendar/models.rs index 34b3ff39a..9d4e069eb 100644 --- a/server/main-api/src/calendar/models.rs +++ b/server/main-api/src/calendar/models.rs @@ -1,14 +1,45 @@ +use std::collections::HashMap; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; +use crate::models::Location; -#[derive(Serialize, Deserialize, Debug)] -pub(super) struct Events { +#[derive(Serialize, Deserialize, Clone, Debug)] +pub(super) struct EventsCollection { + pub(super) events: HashMap, + pub(super) max_last_sync: DateTime, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub(super) struct CalendarLocation { + pub key: String, + pub name: String, + pub last_calendar_scrape_at: Option>, + pub calendar_url: Option, + pub type_common_name: String, + pub r#type: String, +} + +impl From for CalendarLocation { + fn from(loc: Location) -> Self { + Self { + key: loc.key, + name: loc.name, + last_calendar_scrape_at: loc.last_calendar_scrape_at, + calendar_url: loc.calendar_url, + type_common_name: loc.type_common_name, + r#type: loc.r#type, + } + } +} + + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub(super) struct LocationEvents { pub(super) events: Vec, - pub(super) last_sync: DateTime, - pub(super) calendar_url: String, + pub(super) location: CalendarLocation, } -#[derive(Serialize, Deserialize, Debug, sqlx::Type)] +#[derive(Serialize, Deserialize, Clone, Debug, sqlx::Type)] pub(super) struct Event { pub(super) id: i32, /// e.g. 5121.EG.003 diff --git a/server/main-api/src/maps/mod.rs b/server/main-api/src/maps/mod.rs index b887cb2fe..5624a34f3 100644 --- a/server/main-api/src/maps/mod.rs +++ b/server/main-api/src/maps/mod.rs @@ -34,11 +34,11 @@ async fn get_localised_data( should_use_english: bool, ) -> Result { let result = if should_use_english { - sqlx::query_as!(Location, "SELECT name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", id) + sqlx::query_as!(Location, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", id) .fetch_all(conn) .await } else { - sqlx::query_as!(Location, "SELECT name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", id) + sqlx::query_as!(Location, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", id) .fetch_all(conn) .await }; diff --git a/server/main-api/src/models.rs b/server/main-api/src/models.rs index 04424a9cb..a1cb7a0ba 100644 --- a/server/main-api/src/models.rs +++ b/server/main-api/src/models.rs @@ -2,6 +2,7 @@ use chrono::{DateTime, Utc}; #[derive(Debug, Clone)] pub struct Location { + pub key: String, pub name: String, pub last_calendar_scrape_at: Option>, pub calendar_url: Option,