From 4ce9392b3b5f7b510d55ff72be913b654cb13baa Mon Sep 17 00:00:00 2001 From: Frank Elsinga Date: Sun, 4 Aug 2024 20:37:07 +0200 Subject: [PATCH] migrated partially to the exploded schema added a migration to explode the schema Necessary to migrate to a parquet based initialisation --- ...02ebc44b6c9061b717e78a97c0a9549b5a84f.json | 46 +++++ ...2285a4722fa9ff9d44be1f705964bf593dc5.json} | 36 +++- ...815374ee3368f45250987475cce81910eec57.json | 46 +++++ ...0de7a221ce7d9bb8378028c695e1b665c55c5.json | 22 -- ...d9b32f08c86626db9f4d63249dfc2cd77b80c.json | 22 -- ...271f091f87dff0acdd61f74e97b36e47a6f1.json} | 36 +++- server/Cargo.toml | 1 + ...04182325_exploded_room_data_model.down.sql | 20 ++ ...0804182325_exploded_room_data_model.up.sql | 99 +++++++++ server/src/build.rs | 20 +- server/src/calendar/models.rs | 16 -- server/src/locations/details.rs | 191 ++++++++++++++++-- server/src/locations/preview.rs | 41 +++- server/src/maps/overlay_map.rs | 33 ++- server/src/models.rs | 15 -- 15 files changed, 521 insertions(+), 123 deletions(-) create mode 100644 server/.sqlx/query-1225dec3f33fb9e257be062530e02ebc44b6c9061b717e78a97c0a9549b5a84f.json rename server/.sqlx/{query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json => query-4bd354d9fc17a42a89874546dc5d2285a4722fa9ff9d44be1f705964bf593dc5.json} (58%) create mode 100644 server/.sqlx/query-4db847e83ba3061db424551812c815374ee3368f45250987475cce81910eec57.json delete mode 100644 server/.sqlx/query-9f518183559171969ae448a0f3f0de7a221ce7d9bb8378028c695e1b665c55c5.json delete mode 100644 server/.sqlx/query-a16d239da728e396e283aea0782d9b32f08c86626db9f4d63249dfc2cd77b80c.json rename server/.sqlx/{query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json => query-c6b3edf0fd2ae3d44ac6a041dabc271f091f87dff0acdd61f74e97b36e47a6f1.json} (58%) create mode 100644 server/migrations/20240804182325_exploded_room_data_model.down.sql create mode 100644 server/migrations/20240804182325_exploded_room_data_model.up.sql diff --git a/server/.sqlx/query-1225dec3f33fb9e257be062530e02ebc44b6c9061b717e78a97c0a9549b5a84f.json b/server/.sqlx/query-1225dec3f33fb9e257be062530e02ebc44b6c9061b717e78a97c0a9549b5a84f.json new file mode 100644 index 000000000..b3ea124ae --- /dev/null +++ b/server/.sqlx/query-1225dec3f33fb9e257be062530e02ebc44b6c9061b717e78a97c0a9549b5a84f.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT type,lat,lon,name,type_common_name FROM de WHERE key = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "type", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "lat", + "type_info": "Float8" + }, + { + "ordinal": 2, + "name": "lon", + "type_info": "Float8" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "type_common_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "1225dec3f33fb9e257be062530e02ebc44b6c9061b717e78a97c0a9549b5a84f" +} diff --git a/server/.sqlx/query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json b/server/.sqlx/query-4bd354d9fc17a42a89874546dc5d2285a4722fa9ff9d44be1f705964bf593dc5.json similarity index 58% rename from server/.sqlx/query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json rename to server/.sqlx/query-4bd354d9fc17a42a89874546dc5d2285a4722fa9ff9d44be1f705964bf593dc5.json index 2d3ac985a..46aa7c93f 100644 --- a/server/.sqlx/query-39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d.json +++ b/server/.sqlx/query-4bd354d9fc17a42a89874546dc5d2285a4722fa9ff9d44be1f705964bf593dc5.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", + "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon,coordinate_source,rank_type,rank_combined,rank_usage,comment\n FROM en\n WHERE key = $1", "describe": { "columns": [ { @@ -42,6 +42,31 @@ "ordinal": 7, "name": "lon", "type_info": "Float8" + }, + { + "ordinal": 8, + "name": "coordinate_source", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "rank_type", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "rank_combined", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "rank_usage", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "comment", + "type_info": "Text" } ], "parameters": { @@ -57,8 +82,13 @@ false, false, false, - false + false, + false, + false, + false, + false, + true ] }, - "hash": "39cdd385e12341d2a7f05c50d9c399f6f367e997fd3a7da6c1c48a288202e82d" + "hash": "4bd354d9fc17a42a89874546dc5d2285a4722fa9ff9d44be1f705964bf593dc5" } diff --git a/server/.sqlx/query-4db847e83ba3061db424551812c815374ee3368f45250987475cce81910eec57.json b/server/.sqlx/query-4db847e83ba3061db424551812c815374ee3368f45250987475cce81910eec57.json new file mode 100644 index 000000000..7d5f17f87 --- /dev/null +++ b/server/.sqlx/query-4db847e83ba3061db424551812c815374ee3368f45250987475cce81910eec57.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT type,lat,lon,name,type_common_name FROM en WHERE key = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "type", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "lat", + "type_info": "Float8" + }, + { + "ordinal": 2, + "name": "lon", + "type_info": "Float8" + }, + { + "ordinal": 3, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "type_common_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "4db847e83ba3061db424551812c815374ee3368f45250987475cce81910eec57" +} diff --git a/server/.sqlx/query-9f518183559171969ae448a0f3f0de7a221ce7d9bb8378028c695e1b665c55c5.json b/server/.sqlx/query-9f518183559171969ae448a0f3f0de7a221ce7d9bb8378028c695e1b665c55c5.json deleted file mode 100644 index 3daa59827..000000000 --- a/server/.sqlx/query-9f518183559171969ae448a0f3f0de7a221ce7d9bb8378028c695e1b665c55c5.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT data FROM de WHERE key = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "data", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "9f518183559171969ae448a0f3f0de7a221ce7d9bb8378028c695e1b665c55c5" -} diff --git a/server/.sqlx/query-a16d239da728e396e283aea0782d9b32f08c86626db9f4d63249dfc2cd77b80c.json b/server/.sqlx/query-a16d239da728e396e283aea0782d9b32f08c86626db9f4d63249dfc2cd77b80c.json deleted file mode 100644 index 5034d13df..000000000 --- a/server/.sqlx/query-a16d239da728e396e283aea0782d9b32f08c86626db9f4d63249dfc2cd77b80c.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT data FROM en WHERE key = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "data", - "type_info": "Jsonb" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "a16d239da728e396e283aea0782d9b32f08c86626db9f4d63249dfc2cd77b80c" -} diff --git a/server/.sqlx/query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json b/server/.sqlx/query-c6b3edf0fd2ae3d44ac6a041dabc271f091f87dff0acdd61f74e97b36e47a6f1.json similarity index 58% rename from server/.sqlx/query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json rename to server/.sqlx/query-c6b3edf0fd2ae3d44ac6a041dabc271f091f87dff0acdd61f74e97b36e47a6f1.json index d0083d63f..06117805d 100644 --- a/server/.sqlx/query-77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf.json +++ b/server/.sqlx/query-c6b3edf0fd2ae3d44ac6a041dabc271f091f87dff0acdd61f74e97b36e47a6f1.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", + "query": "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon,coordinate_source,rank_type,rank_combined,rank_usage,comment\n FROM de\n WHERE key = $1", "describe": { "columns": [ { @@ -42,6 +42,31 @@ "ordinal": 7, "name": "lon", "type_info": "Float8" + }, + { + "ordinal": 8, + "name": "coordinate_source", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "rank_type", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "rank_combined", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "rank_usage", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "comment", + "type_info": "Text" } ], "parameters": { @@ -57,8 +82,13 @@ false, false, false, - false + false, + false, + false, + false, + false, + true ] }, - "hash": "77fcf65552da24afce976f79421934413c43f978f3608a08883919e3d3a983bf" + "hash": "c6b3edf0fd2ae3d44ac6a041dabc271f091f87dff0acdd61f74e97b36e47a6f1" } diff --git a/server/Cargo.toml b/server/Cargo.toml index bea0a6f23..0a602cf9c 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -73,6 +73,7 @@ tempfile = "3.10.1" base64 = "0.22.1" time = "0.3.36" polars = { version = "0.41.3", features = ["parquet", "dtype-struct"] } +anyhow = "1.0.86" #polars = { git = "https://github.com/CommanderStorm/polars.git", branch = "serialisation-experiment", features = ["parquet", "serde", "dtype-full"] } [dev-dependencies] diff --git a/server/migrations/20240804182325_exploded_room_data_model.down.sql b/server/migrations/20240804182325_exploded_room_data_model.down.sql new file mode 100644 index 000000000..f2712a7f6 --- /dev/null +++ b/server/migrations/20240804182325_exploded_room_data_model.down.sql @@ -0,0 +1,20 @@ +-- Add down migration script here + +alter table de drop column coordinate_source; +alter table en drop column coordinate_source; +alter table de drop column rank_type; +alter table en drop column rank_type; +alter table de drop column rank_combined; +alter table en drop column rank_combined; +alter table de drop column rank_usage; +alter table en drop column rank_usage; +alter table de drop column comment; +alter table en drop column comment; + +DROP MATERIALIZED VIEW operators_de; +DROP MATERIALIZED VIEW operators_en; +DROP MATERIALIZED VIEW usage; +DROP MATERIALIZED VIEW computed_properties; +DROP MATERIALIZED VIEW urls_de; +DROP MATERIALIZED VIEW urls_en; +DROP MATERIALIZED VIEW sources; diff --git a/server/migrations/20240804182325_exploded_room_data_model.up.sql b/server/migrations/20240804182325_exploded_room_data_model.up.sql new file mode 100644 index 000000000..c01c51247 --- /dev/null +++ b/server/migrations/20240804182325_exploded_room_data_model.up.sql @@ -0,0 +1,99 @@ +-- Add up migration script here + +alter table de + add coordinate_accuracy text generated always as ((((data -> 'coords'::text) ->> 'accuracy'::text))::text) stored null; +alter table en + add coordinate_accuracy text generated always as ((((data -> 'coords'::text) ->> 'accuracy'::text))::text) stored null; +alter table de + add coordinate_source text generated always as ((((data -> 'coords'::text) ->> 'source'::text))::text) stored not null; +alter table en + add coordinate_source text generated always as ((((data -> 'coords'::text) ->> 'source'::text))::text) stored not null; +alter table de + add comment text generated always as (((data -> 'props'::text) ->> 'comment'::text)::text) stored null; +alter table en + add comment text generated always as (((data -> 'props'::text) ->> 'comment'::text)::text) stored null; + +CREATE MATERIALIZED VIEW ranking_factors AS +SELECT DISTINCT + data -> 'id' as id, + data -> 'ranking_factors' ->> 'rank_type' as rank_type, + data -> 'ranking_factors' ->> 'rank_combined' as rank_combined, + data -> 'ranking_factors' ->> 'rank_usage' as rank_usage, + data -> 'ranking_factors' ->> 'rank_custom' as rank_custom + data -> 'ranking_factors' ->> 'rank_boost' as rank_boost +from de; + +CREATE MATERIALIZED VIEW operators_de AS +SELECT DISTINCT data -> 'props' -> 'operator' ->> 'id' as id, + data -> 'props' -> 'operator' ->> 'url' as url, + data -> 'props' -> 'operator' ->> 'code' as code, + data -> 'props' -> 'operator' ->> 'name' as name +from de; + +CREATE MATERIALIZED VIEW operators_en AS +SELECT DISTINCT data -> 'props' -> 'operator' ->> 'id' as id, + data -> 'props' -> 'operator' ->> 'url' as url, + data -> 'props' -> 'operator' ->> 'code' as code, + data -> 'props' -> 'operator' ->> 'name' as name +from en; + +CREATE MATERIALIZED VIEW usage AS +SELECT DISTINCT data -> 'usage' ->> 'name' as name, + data -> 'usage' ->> 'din_277' as din_277, + data -> 'usage' ->> 'din_277_desc' as din_277_desc +from de +UNION +DISTINCT +SELECT DISTINCT data -> 'usage' ->> 'name' as name, + data -> 'usage' ->> 'din_277' as din_277, + data -> 'usage' ->> 'din_277_desc' as din_277_desc +from en; + +CREATE MATERIALIZED VIEW computed_properties as +( +with facts(key, fact) as (SELECT key, JSON_ARRAY_ELEMENTS((data -> 'props' -> 'computed')::json) as fact + from de), + extracted_facts(key, name, value) as (Select key, fact ->> 'name' as name, fact ->> 'text' as value + From facts) + +select distinct f.key, + room_keys.value as room_key, + address.value as address, + level.value as level, + arch_name.value as arch_name, + room_cnt.value as room_cnt, + building_cnt.value as building_cnt +from extracted_facts f + left outer join extracted_facts room_keys on f.key = room_keys.key and room_keys.name = 'Gebäudekennungen' + left outer join extracted_facts address on f.key = address.key and address.name = 'Adresse' + left outer join extracted_facts level on f.key = level.key and level.name = 'Stockwerk' + left outer join extracted_facts arch_name on f.key = arch_name.key and arch_name.name = 'Architekten-Name' + left outer join extracted_facts room_cnt on f.key = room_cnt.key and room_cnt.name = 'Anzahl Räume' + left outer join extracted_facts building_cnt + on f.key = building_cnt.key and building_cnt.name = 'Anzahl Gebäude' + ); + +CREATE MATERIALIZED VIEW urls_de as +( +with unrolled_urls(key, url) as (SELECT key, JSON_ARRAY_ELEMENTS((data -> 'props' ->> 'links')::json) as url + from de) +SELECT key, url ->> 'url' as url, url ->> 'text' as text +FROM unrolled_urls); + +CREATE MATERIALIZED VIEW urls_en as +( +with unrolled_urls(key, url) as (SELECT key, JSON_ARRAY_ELEMENTS((data -> 'props' ->> 'links')::json) as url + from en) +SELECT key, url ->> 'url' as url, url ->> 'text' as text +FROM unrolled_urls); + +CREATE MATERIALIZED VIEW sources as +( +with unrolled_sources(key, source) as (SELECT key, + JSON_ARRAY_ELEMENTS((data -> 'sources' -> 'base')::json) as source + from de) +SELECT key, + source ->> 'url' as url, + source ->> 'name' as name +FROM unrolled_sources +ORDER BY key, source ->> 'name'); diff --git a/server/src/build.rs b/server/src/build.rs index 3a8149ef0..5cc22a4e1 100644 --- a/server/src/build.rs +++ b/server/src/build.rs @@ -1,3 +1,21 @@ +use std::{env, fs, path::Path}; + +use typify::{TypeSpace, TypeSpaceSettings}; + fn main() { + // sqlx println!("cargo:rerun-if-changed=migrations"); -} + // types + let content = std::fs::read_to_string("../openapi.yaml").unwrap(); + let schema = serde_json::from_str::(&content).unwrap(); + + let mut type_space = TypeSpace::new(TypeSpaceSettings::default().with_struct_builder(true)); + type_space.add_root_schema(schema).unwrap(); + + let contents = + prettyplease::unparse(&syn::parse2::(type_space.to_stream()).unwrap()); + + let mut out_file = Path::new(&env::var("OUT_DIR").unwrap()).to_path_buf(); + out_file.push("codegen.rs"); + fs::write(out_file, contents).unwrap(); +} \ No newline at end of file diff --git a/server/src/calendar/models.rs b/server/src/calendar/models.rs index bb06b6a98..37d8e911f 100644 --- a/server/src/calendar/models.rs +++ b/server/src/calendar/models.rs @@ -2,8 +2,6 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::fmt::Display; -use crate::models::Location; - #[derive(Serialize, Deserialize, Clone, Debug)] pub(super) struct CalendarLocation { pub key: String, @@ -13,20 +11,6 @@ pub(super) struct CalendarLocation { pub type_common_name: String, pub r#type: String, } - -impl From for CalendarLocation { - fn from(loc: Location) -> Self { - Self { - key: loc.key, - name: loc.name, - last_calendar_scrape_at: loc.last_calendar_scrape_at, - calendar_url: loc.calendar_url, - type_common_name: loc.type_common_name, - r#type: loc.r#type, - } - } -} - #[derive(Serialize, Deserialize, Clone, Debug)] pub(super) struct LocationEvents { pub(super) events: Vec, diff --git a/server/src/locations/details.rs b/server/src/locations/details.rs index 5552c6ba5..7d46c2b17 100644 --- a/server/src/locations/details.rs +++ b/server/src/locations/details.rs @@ -1,10 +1,174 @@ +use crate::localisation; +use crate::models::LocationKeyAlias; use actix_web::{get, web, HttpResponse}; +use chrono::DateTime; +use chrono::Utc; +use serde::{Deserialize, Serialize}; use sqlx::Error::RowNotFound; use sqlx::PgPool; use tracing::error; -use crate::localisation; -use crate::models::LocationKeyAlias; +#[derive(Serialize, Debug, Clone)] +struct Usage { + id: i64, + name: String, + din_277: String, + din_277_desc: String, +} + +#[derive(Serialize, Debug, Clone)] +struct Operator { + id: String, + url: String, + code: String, + name: String, +} + +#[derive(Serialize, Debug, Clone)] +struct Url { + name: String, + url: String, +} + +#[derive(Debug, Clone)] +struct DBLocationDetails { + key: String, + name: String, + last_calendar_scrape_at: Option>, + calendar_url: Option, + r#type: String, + type_common_name: String, + lat: f64, + lon: f64, + coordinate_source: String, + coordinate_accuracy: String, + comment: Option, +} + +#[derive(Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +enum LocationType { + #[default] + Room, + Building, + JoinedBuilding, + Area, + Site, + Campus, + Poi, +} + +#[derive(Serialize, Default)] +struct GetLocationDetails { + /// The id, that was requested + id: String, + /// The type of the entry + r#type: LocationType, + /// The type of the entry in a human-readable form + type_common_name: String, + /// The name of the entry in a human-readable form + name: String, + /// A list of alternative ids for this entry. + /// + /// Not to be confused with + /// - [`id`] which is the unique identifier or + /// - [`visual-id`] which is an alternative identifier for the entry (only displayed in the URL). + aliases: Vec, + /// The ids of the parents. + /// They are ordered as they would appear in a Breadcrumb menu. + /// See [`parent_names`] for their human names. + parents: Vec, + /// The ids of the parents. They are ordered as they would appear in a Breadcrumb menu. + /// See [`parents`] for their actual ids. + parent_names: Vec, + /// Data for the info-card table + props: LocationProps, + /// The information you need to request Images from the /cdn/{size}/{id}_{counter}.webp endpoint + imgs: Vec, + ranking_factors: RankingFactors, + /// Where we got our data from, should be displayed at the bottom of any page containing this data + sources: Sources, + /// The url, this item should be displayed at. Present on both redirects and normal entries, to allow for the common /view/:id path + redirect_url: String, + coords: Coordinate, + maps: Maps, + sections: Sections, +} + +#[derive(Serialize, Default)] +struct Sections {} + +#[derive(Serialize, Default)] +struct Maps {} + +#[derive(Serialize, Default)] +struct LocationProps {} + +#[derive(Serialize, Default)] +struct Sources {} + +#[derive(Serialize)] +struct LocationImage {} + +#[derive(Serialize, Default)] +struct RankingFactors { + rank_combined: u32, + rank_type: u32, + rank_usage: u32, + rank_boost: Option, + rank_custom: Option, +} + +#[derive(Serialize, Default)] +struct Coordinate { + lat: f64, + lon: f64, + source: CoordinateSource, + accuracy: Option, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +enum CoordinateAccuracy { + Buiding, +} + +#[derive(Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +enum CoordinateSource { + Roomfinder, + #[default] + Navigatum, + Inferred, +} + +impl TryFrom for GetLocationDetails { + type Error = anyhow::Error; + + fn try_from(base: DBLocationDetails) -> anyhow::Result { + Ok(Self { + id: base.key, + name: base.name, + r#type: LocationType::des(base.r#type)?, + type_common_name: base.type_common_name, + coords: Coordinate { + lat: base.lat, + lon: base.lon, + source: serde_json::from_str(&base.coordinate_source)?, + accuracy: match base.coordinate_accuracy { + Some(a) => CoordinateAccuracy::try_from(a)?, + None => None, + }, + }, + props: LocationProps { + comment: base.comment, + last_calendar_scrape_at: base.last_calendar_scrape_at, + calendar_url: base.calendar_url, + }, + ranking_factors: Default::default(), + }) + } +} #[get("/{id}")] pub async fn get_handler( @@ -19,11 +183,18 @@ pub async fn get_handler( return HttpResponse::NotFound().body("Not found"); }; let result = if args.should_use_english() { - sqlx::query_scalar!("SELECT data FROM en WHERE key = $1", probable_id) + sqlx::query_as!(DBLocationDetails, + r#"SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon,coordinate_source,rank_type,rank_combined,rank_usage,comment + FROM en + WHERE key = $1"#r, + probable_id) .fetch_optional(&data.pool) .await } else { - sqlx::query_scalar!("SELECT data FROM de WHERE key = $1", probable_id) + sqlx::query_as!(DBLocationDetails, + r#"SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon,coordinate_source,rank_type,rank_combined,rank_usage,comment + FROM de + WHERE key = $1"#r, probable_id) .fetch_optional(&data.pool) .await }; @@ -31,14 +202,10 @@ pub async fn get_handler( Ok(d) => match d { None => HttpResponse::NotFound().body("Not found"), Some(d) => { - let mut response_json = serde_json::to_string(&d).unwrap(); - // We don't want to serialise this data at any point in the server. - // This just flows through the server, but adding redirect_url to the response is necessary - response_json.pop(); // remove last } - response_json.push_str(&format!(",\"redirect_url\":\"{redirect_url}\"}}",)); - HttpResponse::Ok() - .content_type("application/json") - .body(response_json) + let mut res = GetLocationDetails::from(d); + res.redirect_url = redirect_url; + + HttpResponse::Ok().json(res) } }, Err(e) => { diff --git a/server/src/locations/preview.rs b/server/src/locations/preview.rs index 073230734..bd9f4258c 100644 --- a/server/src/locations/preview.rs +++ b/server/src/locations/preview.rs @@ -13,9 +13,17 @@ use crate::limited::vec::LimitedVec; use crate::localisation; use crate::maps::overlay_map::OverlayMapTask; use crate::maps::overlay_text::{OverlayText, CANTARELL_BOLD, CANTARELL_REGULAR}; -use crate::models::Location; use crate::models::LocationKeyAlias; +#[derive(Debug)] +struct Location { + name: String, + r#type: String, + type_common_name: String, + lat: f64, + lon: f64, +} + #[tracing::instrument(skip(pool))] async fn get_localised_data( pool: &PgPool, @@ -23,21 +31,29 @@ async fn get_localised_data( should_use_english: bool, ) -> Result { let result = if should_use_english { - sqlx::query_as!(Location, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM en WHERE key = $1", id) - .fetch_all(pool) - .await + sqlx::query_as!( + Location, + "SELECT type,lat,lon,name,type_common_name FROM en WHERE key = $1", + id + ) + .fetch_all(pool) + .await } else { - sqlx::query_as!(Location, "SELECT key,name,last_calendar_scrape_at,calendar_url,type,type_common_name,lat,lon FROM de WHERE key = $1", id) - .fetch_all(pool) - .await + sqlx::query_as!( + Location, + "SELECT type,lat,lon,name,type_common_name FROM de WHERE key = $1", + id + ) + .fetch_all(pool) + .await }; match result { - Ok(r) => match r.len() { - 0 => Err(HttpResponse::NotFound() + Ok(mut r) => match r.pop() { + None => Err(HttpResponse::NotFound() .content_type("text/plain") .body("Not found")), - _ => Ok(r[0].clone()), + Some(item) => Ok(item), }, Err(e) => { error!("Error preparing statement: {e:?}"); @@ -59,7 +75,10 @@ async fn construct_image_from_data( }; // add the map - if !OverlayMapTask::from(&data).draw_onto(&mut img).await { + if !OverlayMapTask::new(&data.r#type, data.lat, data.lon) + .draw_onto(&mut img) + .await + { return None; } draw_pin(&mut img); diff --git a/server/src/maps/overlay_map.rs b/server/src/maps/overlay_map.rs index 3bcb61c51..632283726 100644 --- a/server/src/maps/overlay_map.rs +++ b/server/src/maps/overlay_map.rs @@ -5,7 +5,6 @@ use futures::{stream::FuturesUnordered, StreamExt}; use tracing::warn; use crate::maps::fetch_tile::FetchTileTask; -use crate::models::Location; pub struct OverlayMapTask { pub x: f64, @@ -13,23 +12,6 @@ pub struct OverlayMapTask { pub z: u32, } -impl From<&Location> for OverlayMapTask { - fn from(entry: &Location) -> Self { - let zoom = match entry.r#type.as_str() { - "campus" => 14, - "area" | "site" => 15, - "building" | "joined_building" => 16, - "virtual_room" | "room" | "poi" => 17, - _ => { - warn!("map generation encountered an type for {entry:?}. Assuming it to be a building"); - 16 - } - }; - let (x, y, z) = lat_lon_z_to_xyz(entry.lat, entry.lon, zoom); - Self { x, y, z } - } -} - impl fmt::Debug for OverlayMapTask { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("OverlayMapTask") @@ -43,6 +25,21 @@ impl fmt::Debug for OverlayMapTask { const POSSIBLE_INDEX_RANGE: Range = 0..7; impl OverlayMapTask { + pub fn new(r#type: &str, lat: f64, lon: f64) -> Self { + let zoom = match r#type { + "campus" => 14, + "area" | "site" => 15, + "building" | "joined_building" => 16, + "virtual_room" | "room" | "poi" => 17, + entry => { + warn!("map generation encountered an type for {entry:?}. Assuming it to be a building"); + 16 + } + }; + let (x, y, z) = lat_lon_z_to_xyz(lat, lon, zoom); + Self { x, y, z } + } + #[tracing::instrument(skip(img))] pub async fn draw_onto(&self, img: &mut image::RgbaImage) -> bool { // coordinate system is centered around the center of the image diff --git a/server/src/models.rs b/server/src/models.rs index 17e0c26f6..6327a06ff 100644 --- a/server/src/models.rs +++ b/server/src/models.rs @@ -1,18 +1,3 @@ -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Location { - pub key: String, - pub name: String, - pub last_calendar_scrape_at: Option>, - pub calendar_url: Option, - pub r#type: String, - pub type_common_name: String, - pub lat: f64, - pub lon: f64, -} - #[derive(Debug, Clone)] #[allow(dead_code)] // false positive. Clippy can't detect this due to macros pub struct LocationKeyAlias {