diff --git a/server/main-api/src/setup/database/alias.rs b/server/main-api/src/setup/database/alias.rs index 6927237e9..adb38456a 100644 --- a/server/main-api/src/setup/database/alias.rs +++ b/server/main-api/src/setup/database/alias.rs @@ -1,6 +1,6 @@ -use std::io::Write; use crate::limited::vec::LimitedVec; use polars::prelude::*; +use std::io::Write; use tempfile::tempfile; #[derive(Debug)] @@ -40,26 +40,32 @@ pub async fn download_updates() -> Result, crate::BoxedError> .error_for_status()? .bytes() .await?; - let mut aliase=Vec::::new(); + let mut aliase = Vec::::new(); let mut file = tempfile()?; file.write_all(&body)?; let df = ParquetReader::new(&mut file) - .with_columns(Some(vec!["id".to_string(),"type".to_string(),"visible_id".to_string(),"aliases".to_string()])) - .finish().unwrap(); - let id_col=df.column("id")?.str()?; - let type_col=df.column("type")?.str()?; - let visible_id_col=df.column("visible_id")?.str()?; - for index in 0..id_col.len(){ + .with_columns(Some(vec![ + "id".to_string(), + "type".to_string(), + "visible_id".to_string(), + "aliases".to_string(), + ])) + .finish() + .unwrap(); + let id_col = df.column("id")?.str()?; + let type_col = df.column("type")?.str()?; + let visible_id_col = df.column("visible_id")?.str()?; + for index in 0..id_col.len() { let id = id_col.get(index).unwrap(); let r#type = type_col.get(index).unwrap(); - let visible_id=visible_id_col.get(index).unwrap(); - aliase.push(Alias{ + let visible_id = visible_id_col.get(index).unwrap(); + aliase.push(Alias { alias: id.to_string(), key: id.to_string(), r#type: r#type.to_string(), visible_id: visible_id.to_string(), }); - aliase.push(Alias{ + aliase.push(Alias { alias: visible_id.to_string(), key: id.to_string(), r#type: r#type.to_string(), @@ -67,17 +73,17 @@ pub async fn download_updates() -> Result, crate::BoxedError> }); } - let df_expanded=df.explode(["aliases"])?; - let id_col=df_expanded.column("id")?.str()?; - let type_col=df_expanded.column("type")?.str()?; - let visible_id_col=df_expanded.column("visible_id")?.str()?; - let aliases_col=df_expanded.column("aliases")?.str()?; - for index in 0..id_col.len(){ + let df_expanded = df.explode(["aliases"])?; + let id_col = df_expanded.column("id")?.str()?; + let type_col = df_expanded.column("type")?.str()?; + let visible_id_col = df_expanded.column("visible_id")?.str()?; + let aliases_col = df_expanded.column("aliases")?.str()?; + for index in 0..id_col.len() { let alias = aliases_col.get(index).unwrap(); let id = id_col.get(index).unwrap(); let r#type = type_col.get(index).unwrap(); - let visible_id=visible_id_col.get(index).unwrap(); - aliase.push(Alias{ + let visible_id = visible_id_col.get(index).unwrap(); + aliase.push(Alias { alias: alias.to_string(), key: id.to_string(), r#type: r#type.to_string(), diff --git a/server/main-api/src/setup/database/data.rs b/server/main-api/src/setup/database/data.rs index 7fe5e12b0..ca8258ee2 100644 --- a/server/main-api/src/setup/database/data.rs +++ b/server/main-api/src/setup/database/data.rs @@ -1,12 +1,12 @@ +use crate::limited::vec::LimitedVec; +use polars::prelude::ParquetReader; +use polars::prelude::*; +use serde_json::Value; use std::collections::HashMap; use std::fmt; use std::hash::{Hash, Hasher}; use std::io::Write; -use polars::prelude::ParquetReader; -use serde_json::Value; use tempfile::tempfile; -use crate::limited::vec::LimitedVec; -use polars::prelude::*; #[derive(Clone)] pub(super) struct DelocalisedValues { @@ -136,13 +136,13 @@ pub async fn download_updates() -> Result, crate:: file.write_all(&body)?; let df = ParquetReader::new(&mut file).finish().unwrap(); let mut vals = Vec::::new(); - let col_names=df.get_column_names().clone(); + let col_names = df.get_column_names().clone(); for index in 0..df.get_columns()[0].len() { - let row=df.get_row(index)?; - let mut hm=HashMap::new(); - for (i,a) in row.0.into_iter().enumerate(){ - let v=serde_json::to_value(a)?; - hm.insert(col_names[i].to_string(),v); + let row = df.get_row(index)?; + let mut hm = HashMap::new(); + for (i, a) in row.0.into_iter().enumerate() { + let v = serde_json::to_value(a)?; + hm.insert(col_names[i].to_string(), v); } vals.push(DelocalisedValues::from(hm)); } @@ -169,11 +169,15 @@ pub async fn download_status() -> Result, crate::Boxed let mut file = tempfile()?; file.write_all(&body)?; let df = ParquetReader::new(&mut file).finish().unwrap(); - let id_col=Vec::from(df.column("id")?.str()?); - let hash_col=Vec::from(df.column("id")?.i64()?); - let tasks=id_col.into_iter().zip(hash_col).flat_map(|(id,hash)| match (id,hash) { - (Some(id),Some(hash))=>Some((id.to_string(),hash)), - _=>None, - }).collect(); + let id_col = Vec::from(df.column("id")?.str()?); + let hash_col = Vec::from(df.column("id")?.i64()?); + let tasks = id_col + .into_iter() + .zip(hash_col) + .flat_map(|(id, hash)| match (id, hash) { + (Some(id), Some(hash)) => Some((id.to_string(), hash)), + _ => None, + }) + .collect(); Ok(LimitedVec(tasks)) } diff --git a/server/main-api/src/setup/transportation.rs b/server/main-api/src/setup/transportation.rs index 147532f3a..ed47f95e9 100644 --- a/server/main-api/src/setup/transportation.rs +++ b/server/main-api/src/setup/transportation.rs @@ -54,9 +54,11 @@ impl DBStation { #[tracing::instrument(skip(pool))] pub async fn setup(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { let url = "https://raw.githubusercontent.com/TUM-Dev/NavigaTUM/main/data/external/results/public_transport.json"; - let transportations = reqwest::get(url).await? + let transportations = reqwest::get(url) + .await? .error_for_status()? - .json::>().await?; + .json::>() + .await?; let transportations = transportations.into_iter().flat_map(|s| { let id = s.station.station_id.clone(); let mut stations = vec![DBStation::from_station(s.station, None)];