Skip to content

Commit

Permalink
Made sure that the calendar api can accept multi room calendar requests
Browse files Browse the repository at this point in the history
  • Loading branch information
CommanderStorm committed Jun 23, 2024
1 parent 16bde1d commit 7a1d0e2
Show file tree
Hide file tree
Showing 7 changed files with 190 additions and 67 deletions.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

115 changes: 71 additions & 44 deletions server/main-api/src/calendar/mod.rs
Original file line number Diff line number Diff line change
@@ -1,86 +1,113 @@
use std::collections::HashMap;
use actix_web::{get, web, HttpResponse};
use chrono::{DateTime, Utc};
use log::error;
use serde::Deserialize;
use sqlx::PgPool;

use crate::calendar::models::Event;
use crate::models::Location;
use crate::calendar::models::{CalendarLocation, Event, LocationEvents};

mod connectum;
mod models;
pub mod refresh;

#[derive(Deserialize, Debug)]
pub struct QueryArguments {
ids: Vec<String>,
/// eg. 2039-01-19T03:14:07+1
start_after: DateTime<Utc>,
/// eg. 2042-01-07T00:00:00 UTC
end_before: DateTime<Utc>,
}

#[get("/api/calendar/{id}")]
#[get("/api/calendar")]
pub async fn calendar_handler(
params: web::Path<String>,
web::Query(args): web::Query<QueryArguments>,
data: web::Data<crate::AppData>,
) -> HttpResponse {
let id = params
.into_inner()
.replace(|c: char| c.is_whitespace() || c.is_control(), "");
let location = match get_location(&data.db, &id).await {
Err(e) => {
error!("could not refetch due to {e:?}");
return HttpResponse::InternalServerError()
.body("could not get calendar entries, please try again later");
}
Ok(None) => {
return HttpResponse::NotFound()
.content_type("text/plain")
.body("Room not found");
}
Ok(Some(loc)) => loc,
let ids = args.ids.into_iter().map(|s| s.replace(|c: char| c.is_whitespace() || c.is_control(), "")).collect::<Vec<String>>();
if ids.len() > 10 {
return HttpResponse::BadRequest()
.body("Too many ids to query. We suspect that users don't need this. If you need this limit increased, please send us a message");
};
let Some(last_sync) = location.last_calendar_scrape_at else {
return HttpResponse::ServiceUnavailable()
.body("This calendar entry is currently in the process of being scraped, please try again later");
if ids.is_empty() {
return HttpResponse::BadRequest()
.body("No id requested");
};
let Some(calendar_url) = location.calendar_url else {
return HttpResponse::NotFound()
.content_type("text/plain")
.body("Room does not have a calendar");
let locations = match get_locations(&data.db, &ids).await {
Ok(l) => l,
Err(e) => return e
};
match get_from_db(&data.db, &id, &args.start_after, &args.end_before).await {
Ok(events) => HttpResponse::Ok().json(models::Events {
events,
last_sync,
calendar_url,
}),
if let Err(e) = validate_locations(&ids,&locations){
return e;
}
match get_from_db(&data.db, &locations, &args.start_after, &args.end_before).await {
Ok(events) => HttpResponse::Ok().json(events),
Err(e) => {
error!("could not get entries from the db for {id} because {e:?}");
error!("could not get entries from the db for {ids:?} because {e:?}");
HttpResponse::InternalServerError()
.body("could not get calendar entries, please try again later")
}
}
}

async fn get_location(pool: &PgPool, id: &str) -> Result<Option<Location>, sqlx::Error> {
sqlx::query_as!(Location, "SELECT name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", id)
.fetch_optional(pool)
.await
fn validate_locations(ids: &[String],locations:&[CalendarLocation])->Result<(),HttpResponse>{
for id in ids{
if !locations.iter().any(|l|&l.key==id) {
return Err(HttpResponse::BadRequest()
.body("Requested id {id} does not exist"));
}
}
assert_eq!(locations.len(), ids.len());
for loc in locations {
if loc.last_calendar_scrape_at.is_none() {
return Err(HttpResponse::ServiceUnavailable()
.body(format!("Room {key}/{url:?} calendar entry is currently in the process of being scraped, please try again later", key = loc.key, url = loc.calendar_url)));
};
}
for loc in locations {
if loc.calendar_url.is_none() {
return Err(HttpResponse::NotFound()
.content_type("text/plain")
.body(format!("Room {key}/{url:?} does not have a calendar", key = loc.key, url = loc.calendar_url)));
};
}
Ok(())
}

async fn get_locations(pool: &PgPool, ids: &[String]) -> Result<Vec<CalendarLocation>, HttpResponse> {
match sqlx::query_as!(CalendarLocation, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name FROM de WHERE key = ANY($1::text[])", ids)
.fetch_all(pool)
.await {
Err(e) => {
error!("could not refetch due to {e:?}");
Err(HttpResponse::InternalServerError()
.body("could not get calendar entries, please try again later"))
}
Ok(locations) => Ok(locations),
}
}

async fn get_from_db(
pool: &PgPool,
id: &str,
locations: &[CalendarLocation],
start_after: &DateTime<Utc>,
end_before: &DateTime<Utc>,
) -> Result<Vec<Event>, crate::BoxedError> {
let events = sqlx::query_as!(Event, r#"SELECT id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type AS "entry_type!:crate::calendar::models::EventType",detailed_entry_type
) -> Result<HashMap<String, LocationEvents>, crate::BoxedError> {
let mut located_events: HashMap<String, LocationEvents> = HashMap::new();
for location in locations {
let events = sqlx::query_as!(Event, r#"SELECT id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type AS "entry_type!:crate::calendar::models::EventType",detailed_entry_type
FROM calendar
WHERE room_code = $1 AND start_at >= $2 AND end_at <= $3"#,
id, start_after, end_before)
.fetch_all(pool)
.await?;
Ok(events)
location.key, start_after, end_before)
.fetch_all(pool)
.await?;
located_events.insert(
location.key.clone(),
LocationEvents {
location: location.clone(),
events,
});
}
Ok(located_events)
}
41 changes: 36 additions & 5 deletions server/main-api/src/calendar/models.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,45 @@
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use crate::models::Location;

#[derive(Serialize, Deserialize, Debug)]
pub(super) struct Events {
#[derive(Serialize, Deserialize, Clone, Debug)]
pub(super) struct EventsCollection {
pub(super) events: HashMap<String, LocationEvents>,
pub(super) max_last_sync: DateTime<Utc>,
}

#[derive(Serialize, Deserialize, Clone, Debug)]
pub(super) struct CalendarLocation {
pub key: String,
pub name: String,
pub last_calendar_scrape_at: Option<DateTime<Utc>>,
pub calendar_url: Option<String>,
pub type_common_name: String,
pub r#type: String,
}

impl From<Location> for CalendarLocation {
fn from(loc: Location) -> Self {
Self {
key: loc.key,
name: loc.name,
last_calendar_scrape_at: loc.last_calendar_scrape_at,
calendar_url: loc.calendar_url,
type_common_name: loc.type_common_name,
r#type: loc.r#type,
}
}
}


#[derive(Serialize, Deserialize, Clone, Debug)]
pub(super) struct LocationEvents {
pub(super) events: Vec<Event>,
pub(super) last_sync: DateTime<Utc>,
pub(super) calendar_url: String,
pub(super) location: CalendarLocation,
}

#[derive(Serialize, Deserialize, Debug, sqlx::Type)]
#[derive(Serialize, Deserialize, Clone, Debug, sqlx::Type)]
pub(super) struct Event {
pub(super) id: i32,
/// e.g. 5121.EG.003
Expand Down
Loading

0 comments on commit 7a1d0e2

Please sign in to comment.