diff --git a/server/.sqlx/query-0129b39fb399d5d3d41b2053c9bf617a646c40ec0e2f614d035948f240c8b245.json b/server/.sqlx/query-0129b39fb399d5d3d41b2053c9bf617a646c40ec0e2f614d035948f240c8b245.json deleted file mode 100644 index 7300f29d6..000000000 --- a/server/.sqlx/query-0129b39fb399d5d3d41b2053c9bf617a646c40ec0e2f614d035948f240c8b245.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO en(key,data)VALUES ($1,$2)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "0129b39fb399d5d3d41b2053c9bf617a646c40ec0e2f614d035948f240c8b245" -} diff --git a/server/.sqlx/query-1883c95212e7119034b648e74c6a5f5c40a99660e0413e0a26853ecb5c21462e.json b/server/.sqlx/query-1883c95212e7119034b648e74c6a5f5c40a99660e0413e0a26853ecb5c21462e.json deleted file mode 100644 index 714a9f23d..000000000 --- a/server/.sqlx/query-1883c95212e7119034b648e74c6a5f5c40a99660e0413e0a26853ecb5c21462e.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM aliases", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "1883c95212e7119034b648e74c6a5f5c40a99660e0413e0a26853ecb5c21462e" -} diff --git a/server/.sqlx/query-a09ff7e07e367385a4c9b7cfbfbc75fda0eb5321958547cb8a16a6892af10f76.json b/server/.sqlx/query-1ffddd843ece79fa10cfb98c2e8aff2842cad45867c3a72f807ae08c1b10cb37.json similarity index 65% rename from server/.sqlx/query-a09ff7e07e367385a4c9b7cfbfbc75fda0eb5321958547cb8a16a6892af10f76.json rename to server/.sqlx/query-1ffddd843ece79fa10cfb98c2e8aff2842cad45867c3a72f807ae08c1b10cb37.json index 3871a1c01..0d37f5584 100644 --- a/server/.sqlx/query-a09ff7e07e367385a4c9b7cfbfbc75fda0eb5321958547cb8a16a6892af10f76.json +++ b/server/.sqlx/query-1ffddd843ece79fa10cfb98c2e8aff2842cad45867c3a72f807ae08c1b10cb37.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "INSERT INTO calendar (id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type,detailed_entry_type)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)\n ON CONFLICT (id) DO UPDATE SET\n room_code = $2,\n start_at = $3,\n end_at = $4,\n stp_title_de = $5,\n stp_title_en = $6,\n stp_type = $7,\n entry_type = $8,\n detailed_entry_type = $9", + "query": "INSERT INTO calendar (id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type,detailed_entry_type)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)\n ON CONFLICT (id) DO UPDATE SET\n room_code = EXCLUDED.room_code,\n start_at = EXCLUDED.start_at,\n end_at = EXCLUDED.end_at,\n stp_title_de = EXCLUDED.stp_title_de,\n stp_title_en = EXCLUDED.stp_title_en,\n stp_type = EXCLUDED.stp_type,\n entry_type = EXCLUDED.entry_type,\n detailed_entry_type = EXCLUDED.detailed_entry_type", "describe": { "columns": [], "parameters": { @@ -31,5 +31,5 @@ }, "nullable": [] }, - "hash": "a09ff7e07e367385a4c9b7cfbfbc75fda0eb5321958547cb8a16a6892af10f76" + "hash": "1ffddd843ece79fa10cfb98c2e8aff2842cad45867c3a72f807ae08c1b10cb37" } diff --git a/server/.sqlx/query-213693b327a922e452caf987d332f3c6c177016c63bef328525829f02fc74538.json b/server/.sqlx/query-213693b327a922e452caf987d332f3c6c177016c63bef328525829f02fc74538.json new file mode 100644 index 000000000..ba3c13153 --- /dev/null +++ b/server/.sqlx/query-213693b327a922e452caf987d332f3c6c177016c63bef328525829f02fc74538.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\nSELECT de.key\nFROM de, (SELECT * FROM UNNEST($1::text[], $2::int8[])) as expected(key,hash)\nWHERE de.key = expected.key and de.hash != expected.hash\n", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "key", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "TextArray", + "Int8Array" + ] + }, + "nullable": [ + false + ] + }, + "hash": "213693b327a922e452caf987d332f3c6c177016c63bef328525829f02fc74538" +} diff --git a/server/.sqlx/query-6eccbb21f50abb3aeb2fb43e9bc0d5edb9ca81cbc24948b4a69e83e1a2b9c802.json b/server/.sqlx/query-6eccbb21f50abb3aeb2fb43e9bc0d5edb9ca81cbc24948b4a69e83e1a2b9c802.json new file mode 100644 index 000000000..7a537cda2 --- /dev/null +++ b/server/.sqlx/query-6eccbb21f50abb3aeb2fb43e9bc0d5edb9ca81cbc24948b4a69e83e1a2b9c802.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM aliases WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE aliases.key = expected.key)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "6eccbb21f50abb3aeb2fb43e9bc0d5edb9ca81cbc24948b4a69e83e1a2b9c802" +} diff --git a/server/.sqlx/query-9cb68be06f7ef9404f25116da1c1ffc45bb09ab79457f84361e1e2a696e39c15.json b/server/.sqlx/query-9cb68be06f7ef9404f25116da1c1ffc45bb09ab79457f84361e1e2a696e39c15.json new file mode 100644 index 000000000..a382300cc --- /dev/null +++ b/server/.sqlx/query-9cb68be06f7ef9404f25116da1c1ffc45bb09ab79457f84361e1e2a696e39c15.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM de WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE de.key = expected.key)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "9cb68be06f7ef9404f25116da1c1ffc45bb09ab79457f84361e1e2a696e39c15" +} diff --git a/server/.sqlx/query-aa36ca2ae59d5d6f15bc59a0476a3d635a3befaf254645d97ff778e875c90129.json b/server/.sqlx/query-aa36ca2ae59d5d6f15bc59a0476a3d635a3befaf254645d97ff778e875c90129.json new file mode 100644 index 000000000..25d2647eb --- /dev/null +++ b/server/.sqlx/query-aa36ca2ae59d5d6f15bc59a0476a3d635a3befaf254645d97ff778e875c90129.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO en(key,data)\n VALUES ($1,$2)\n ON CONFLICT (key) DO UPDATE\n SET data = EXCLUDED.data", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "aa36ca2ae59d5d6f15bc59a0476a3d635a3befaf254645d97ff778e875c90129" +} diff --git a/server/.sqlx/query-acfccc2d96fcc85154dc3a6dd969ffdcaa2465cb27b510f9e50682f15980c45f.json b/server/.sqlx/query-acfccc2d96fcc85154dc3a6dd969ffdcaa2465cb27b510f9e50682f15980c45f.json deleted file mode 100644 index 5896614a3..000000000 --- a/server/.sqlx/query-acfccc2d96fcc85154dc3a6dd969ffdcaa2465cb27b510f9e50682f15980c45f.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO de(key,data)VALUES ($1,$2)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "acfccc2d96fcc85154dc3a6dd969ffdcaa2465cb27b510f9e50682f15980c45f" -} diff --git a/server/.sqlx/query-b59dbe8a5c3b6f7fdb9543508ae7b1473d24ffea9d69a8a96ca7bedf6aa2c6e4.json b/server/.sqlx/query-b59dbe8a5c3b6f7fdb9543508ae7b1473d24ffea9d69a8a96ca7bedf6aa2c6e4.json deleted file mode 100644 index e925b25fc..000000000 --- a/server/.sqlx/query-b59dbe8a5c3b6f7fdb9543508ae7b1473d24ffea9d69a8a96ca7bedf6aa2c6e4.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM en", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "b59dbe8a5c3b6f7fdb9543508ae7b1473d24ffea9d69a8a96ca7bedf6aa2c6e4" -} diff --git a/server/.sqlx/query-fe797b7bbbe890d6aeb9878dc793ed3c8f876f1974161d94dbd27646143b8411.json b/server/.sqlx/query-b6c9c47d8d15c74d2d35fe34536d79f643839616e1ae8eebd3074d023797c915.json similarity index 60% rename from server/.sqlx/query-fe797b7bbbe890d6aeb9878dc793ed3c8f876f1974161d94dbd27646143b8411.json rename to server/.sqlx/query-b6c9c47d8d15c74d2d35fe34536d79f643839616e1ae8eebd3074d023797c915.json index 839dde1d2..44b14566f 100644 --- a/server/.sqlx/query-fe797b7bbbe890d6aeb9878dc793ed3c8f876f1974161d94dbd27646143b8411.json +++ b/server/.sqlx/query-b6c9c47d8d15c74d2d35fe34536d79f643839616e1ae8eebd3074d023797c915.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "INSERT INTO aliases (alias, key, type, visible_id)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (alias,key) DO UPDATE SET\n key = $2,\n type = $3,\n visible_id = $4", + "query": "INSERT INTO aliases (alias, key, type, visible_id)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (alias,key) DO UPDATE SET\n key = EXCLUDED.key,\n type = EXCLUDED.type,\n visible_id = EXCLUDED.visible_id", "describe": { "columns": [], "parameters": { @@ -13,5 +13,5 @@ }, "nullable": [] }, - "hash": "fe797b7bbbe890d6aeb9878dc793ed3c8f876f1974161d94dbd27646143b8411" + "hash": "b6c9c47d8d15c74d2d35fe34536d79f643839616e1ae8eebd3074d023797c915" } diff --git a/server/.sqlx/query-b85ef040a74d51bf6b539703028583c9a0917f5203ccdc4ed01e4272fb68d1d8.json b/server/.sqlx/query-b85ef040a74d51bf6b539703028583c9a0917f5203ccdc4ed01e4272fb68d1d8.json deleted file mode 100644 index 6de4edc5e..000000000 --- a/server/.sqlx/query-b85ef040a74d51bf6b539703028583c9a0917f5203ccdc4ed01e4272fb68d1d8.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM de", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "b85ef040a74d51bf6b539703028583c9a0917f5203ccdc4ed01e4272fb68d1d8" -} diff --git a/server/.sqlx/query-bcd73ce5d9d43948f0f568e0dee1b806e8fd88ce1c6ada7e52f96ee8cac6b471.json b/server/.sqlx/query-bcd73ce5d9d43948f0f568e0dee1b806e8fd88ce1c6ada7e52f96ee8cac6b471.json new file mode 100644 index 000000000..58d1ea19f --- /dev/null +++ b/server/.sqlx/query-bcd73ce5d9d43948f0f568e0dee1b806e8fd88ce1c6ada7e52f96ee8cac6b471.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\nSELECT de.key\nFROM de\nWHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) as expected2(key) where de.key=expected2.key)\n", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "key", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [ + false + ] + }, + "hash": "bcd73ce5d9d43948f0f568e0dee1b806e8fd88ce1c6ada7e52f96ee8cac6b471" +} diff --git a/server/.sqlx/query-e380ed62114b399467b08a619d47e26e9f3b967cd5c916d5767a1437c22a984e.json b/server/.sqlx/query-e380ed62114b399467b08a619d47e26e9f3b967cd5c916d5767a1437c22a984e.json new file mode 100644 index 000000000..115b5a0b8 --- /dev/null +++ b/server/.sqlx/query-e380ed62114b399467b08a619d47e26e9f3b967cd5c916d5767a1437c22a984e.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO de(key,data,hash)\n VALUES ($1,$2,$3)\n ON CONFLICT (key) DO UPDATE\n SET data = EXCLUDED.data,\n hash = EXCLUDED.hash", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Jsonb", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "e380ed62114b399467b08a619d47e26e9f3b967cd5c916d5767a1437c22a984e" +} diff --git a/server/.sqlx/query-faf09d9ff2bb83f0b3efb0f7c8fe2b1159261aa471864faa47035e0187414d3b.json b/server/.sqlx/query-faf09d9ff2bb83f0b3efb0f7c8fe2b1159261aa471864faa47035e0187414d3b.json new file mode 100644 index 000000000..c50703a49 --- /dev/null +++ b/server/.sqlx/query-faf09d9ff2bb83f0b3efb0f7c8fe2b1159261aa471864faa47035e0187414d3b.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM en WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE en.key = expected.key)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "faf09d9ff2bb83f0b3efb0f7c8fe2b1159261aa471864faa47035e0187414d3b" +} diff --git a/server/main-api/migrations/20240628192147_hash_based_intialisation.down.sql b/server/main-api/migrations/20240628192147_hash_based_intialisation.down.sql new file mode 100644 index 000000000..1d6edf181 --- /dev/null +++ b/server/main-api/migrations/20240628192147_hash_based_intialisation.down.sql @@ -0,0 +1,3 @@ +-- Add down migration script here +DROP INDEX IF EXISTS hash_lut; +alter table de drop column hash; diff --git a/server/main-api/migrations/20240628192147_hash_based_intialisation.up.sql b/server/main-api/migrations/20240628192147_hash_based_intialisation.up.sql new file mode 100644 index 000000000..7e5fa6abe --- /dev/null +++ b/server/main-api/migrations/20240628192147_hash_based_intialisation.up.sql @@ -0,0 +1,3 @@ +-- Add up migration script here +alter table de add hash BIGINT default 0; -- the chance of an empty hash is astronomically slim +CREATE INDEX IF NOT EXISTS hash_lut ON de(key, hash); diff --git a/server/main-api/src/calendar/models.rs b/server/main-api/src/calendar/models.rs index 8e0971c70..8627d505c 100644 --- a/server/main-api/src/calendar/models.rs +++ b/server/main-api/src/calendar/models.rs @@ -68,14 +68,14 @@ impl Event { r#"INSERT INTO calendar (id,room_code,start_at,end_at,stp_title_de,stp_title_en,stp_type,entry_type,detailed_entry_type) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) ON CONFLICT (id) DO UPDATE SET - room_code = $2, - start_at = $3, - end_at = $4, - stp_title_de = $5, - stp_title_en = $6, - stp_type = $7, - entry_type = $8, - detailed_entry_type = $9"#, + room_code = EXCLUDED.room_code, + start_at = EXCLUDED.start_at, + end_at = EXCLUDED.end_at, + stp_title_de = EXCLUDED.stp_title_de, + stp_title_en = EXCLUDED.stp_title_en, + stp_type = EXCLUDED.stp_type, + entry_type = EXCLUDED.entry_type, + detailed_entry_type = EXCLUDED.detailed_entry_type"#, self.id, self.room_code, self.start_at, diff --git a/server/main-api/src/setup/database/alias.rs b/server/main-api/src/setup/database/alias.rs index 98cf55752..06543df3f 100644 --- a/server/main-api/src/setup/database/alias.rs +++ b/server/main-api/src/setup/database/alias.rs @@ -1,10 +1,10 @@ use std::time::Instant; -use log::info; +use log::debug; use serde::Deserialize; #[derive(Debug)] -struct Alias { +pub(super) struct Alias { alias: String, key: String, // the key is the id of the entry r#type: String, // what we display in the url @@ -90,9 +90,9 @@ impl Alias { r#"INSERT INTO aliases (alias, key, type, visible_id) VALUES ($1, $2, $3, $4) ON CONFLICT (alias,key) DO UPDATE SET - key = $2, - type = $3, - visible_id = $4"#, + key = EXCLUDED.key, + type = EXCLUDED.type, + visible_id = EXCLUDED.visible_id"#, self.alias, self.key, self.r#type, @@ -102,24 +102,29 @@ impl Alias { .await } } - -pub async fn load_all_to_db( - tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, -) -> Result<(), crate::BoxedError> { +pub async fn download_updates( + keys_which_need_updating: &[String], +) -> Result, crate::BoxedError> { let cdn_url = std::env::var("CDN_URL").unwrap_or_else(|_| "https://nav.tum.de/cdn".to_string()); - let raw_aliase = reqwest::get(format!("{cdn_url}/api_data.json")) + Ok(reqwest::get(format!("{cdn_url}/api_data.json")) .await? .json::>() - .await?; - let start = Instant::now(); - let set_aliase = raw_aliase + .await? .into_iter() + .filter(|d| keys_which_need_updating.is_empty() || keys_which_need_updating.contains(&d.id)) .map(AliasIterator::from) - .flat_map(IntoIterator::into_iter); - for task in set_aliase { + .flat_map(IntoIterator::into_iter) + .collect::>()) +} +pub async fn load_all_to_db( + aliases: Vec, + tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), crate::BoxedError> { + let start = Instant::now(); + for task in aliases { task.store(tx).await?; } - info!("loaded aliases in {elapsed:?}", elapsed = start.elapsed()); + debug!("loaded aliases in {elapsed:?}", elapsed = start.elapsed()); Ok(()) } diff --git a/server/main-api/src/setup/database/data.rs b/server/main-api/src/setup/database/data.rs index 853f79655..72439d82e 100644 --- a/server/main-api/src/setup/database/data.rs +++ b/server/main-api/src/setup/database/data.rs @@ -1,18 +1,32 @@ use std::collections::HashMap; use std::time::Instant; -use log::info; +use log::debug; use serde_json::Value; -struct DelocalisedValues { +pub(super) struct DelocalisedValues { key: String, + hash: i64, de: Value, en: Value, } impl From> for DelocalisedValues { fn from(value: HashMap) -> Self { + let key = value + .get("id") + .expect("an ID should always exist") + .as_str() + .expect("the id should be a valid string") + .to_string(); + let hash = value + .get("hash") + .expect("a hash should always exist") + .as_i64() + .expect("a hash should be a valid i64"); Self { + key, + hash, de: value .clone() .into_iter() @@ -23,13 +37,6 @@ impl From> for DelocalisedValues { .into_iter() .map(|(k, v)| (k, Self::delocalise(v.clone(), "en"))) .collect(), - key: value - .clone() - .get("id") - .unwrap() - .as_str() - .unwrap() - .to_string(), } } } @@ -63,17 +70,27 @@ impl DelocalisedValues { tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, ) -> Result<(), sqlx::Error> { sqlx::query!( - r#"INSERT INTO de(key,data)VALUES ($1,$2)"#, + r#" + INSERT INTO de(key,data,hash) + VALUES ($1,$2,$3) + ON CONFLICT (key) DO UPDATE + SET data = EXCLUDED.data, + hash = EXCLUDED.hash"#, self.key, - self.de + self.de, + self.hash, ) .execute(&mut **tx) .await?; sqlx::query!( - r#"INSERT INTO en(key,data)VALUES ($1,$2)"#, + r#" + INSERT INTO en(key,data) + VALUES ($1,$2) + ON CONFLICT (key) DO UPDATE + SET data = EXCLUDED.data"#, self.key, - self.en + self.en, ) .execute(&mut **tx) .await?; @@ -82,9 +99,9 @@ impl DelocalisedValues { } } -pub async fn load_all_to_db( - tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, -) -> Result<(), crate::BoxedError> { +pub async fn download_updates( + keys_which_need_updating: &[String], +) -> Result, crate::BoxedError> { let start = Instant::now(); let cdn_url = std::env::var("CDN_URL").unwrap_or_else(|_| "https://nav.tum.de/cdn".to_string()); let tasks = reqwest::get(format!("{cdn_url}/api_data.json")) @@ -92,13 +109,36 @@ pub async fn load_all_to_db( .json::>>() .await? .into_iter() - .map(DelocalisedValues::from); - info!("downloaded data in {elapsed:?}", elapsed = start.elapsed()); + .map(DelocalisedValues::from) + .filter(|d| keys_which_need_updating.contains(&d.key)) + .collect::>(); + debug!("downloaded data in {elapsed:?}", elapsed = start.elapsed()); + Ok(tasks) +} + +pub(super) async fn load_all_to_db( + tasks: Vec, + tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), crate::BoxedError> { let start = Instant::now(); - for task in tasks { + for task in tasks.into_iter() { task.store(tx).await?; } - info!("loaded data in {elapsed:?}", elapsed = start.elapsed()); + debug!("loaded data in {elapsed:?}", elapsed = start.elapsed()); Ok(()) } + +pub async fn download_status() -> Result, crate::BoxedError> { + let start = Instant::now(); + let cdn_url = std::env::var("CDN_URL").unwrap_or_else(|_| "https://nav.tum.de/cdn".to_string()); + let tasks = reqwest::get(format!("{cdn_url}/status_data.json")) + .await? + .json::>() + .await?; + debug!( + "downloaded current status in {elapsed:?}", + elapsed = start.elapsed() + ); + Ok(tasks) +} diff --git a/server/main-api/src/setup/database/mod.rs b/server/main-api/src/setup/database/mod.rs index f1c1681cc..970e8617e 100644 --- a/server/main-api/src/setup/database/mod.rs +++ b/server/main-api/src/setup/database/mod.rs @@ -1,4 +1,6 @@ -use log::info; +use std::time::Instant; + +use log::{debug, info}; mod alias; mod data; @@ -10,23 +12,90 @@ pub async fn setup(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { Ok(()) } pub async fn load_data(pool: &sqlx::PgPool) -> Result<(), crate::BoxedError> { - let mut tx = pool.begin().await?; - + let status = data::download_status().await?; + let new_keys = status + .clone() + .into_iter() + .map(|(k, _)| k) + .collect::>(); + let new_hashes = status.into_iter().map(|(_, h)| h).collect::>(); info!("deleting old data"); - cleanup(&mut tx).await?; - info!("loading new data"); - data::load_all_to_db(&mut tx).await?; - info!("loading new aliases"); - alias::load_all_to_db(&mut tx).await?; - tx.commit().await?; + { + let start = Instant::now(); + let mut tx = pool.begin().await?; + cleanup_deleted(&new_keys, &mut tx).await?; + tx.commit().await?; + debug!("deleted old data in {elapsed:?}", elapsed = start.elapsed()); + } + + debug!("finding changed data"); + let keys_which_need_updating = + find_keys_which_need_updating(pool, &new_keys, &new_hashes).await?; + + if !keys_which_need_updating.is_empty() { + info!("loading changed {} data", keys_which_need_updating.len()); + let data = data::download_updates(&keys_which_need_updating).await?; + let mut tx = pool.begin().await?; + data::load_all_to_db(data, &mut tx).await?; + tx.commit().await?; + } + + if !keys_which_need_updating.is_empty() { + info!("loading new aliases"); + let aliases = alias::download_updates(&keys_which_need_updating).await?; + let mut tx = pool.begin().await?; + alias::load_all_to_db(aliases, &mut tx).await?; + tx.commit().await?; + } Ok(()) } -async fn cleanup(tx: &mut sqlx::Transaction<'_, sqlx::Postgres>) -> Result<(), crate::BoxedError> { - sqlx::query!("DELETE FROM aliases") +async fn find_keys_which_need_updating( + pool: &sqlx::PgPool, + keys: &[String], + hashes: &[i64], +) -> Result, crate::BoxedError> { + let start = Instant::now(); + let mut keys_which_need_updating = sqlx::query_scalar!( + r#" +SELECT de.key +FROM de, (SELECT * FROM UNNEST($1::text[], $2::int8[])) as expected(key,hash) +WHERE de.key = expected.key and de.hash != expected.hash +"#, + keys, + hashes + ) + .fetch_all(pool) + .await?; + debug!("find_keys_which_need_updating (update) took {elapsed:?} and yielded {updated_cnt} updated items", elapsed = start.elapsed(), updated_cnt=keys_which_need_updating.len()); + + let mut keys_which_need_removing = sqlx::query_scalar!( + r#" +SELECT de.key +FROM de +WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) as expected2(key) where de.key=expected2.key) +"#, + keys + ) + .fetch_all(pool) + .await?; + debug!("find_keys_which_need_updating (update+delete) took {elapsed:?} and yielded {deleted_cnt} deleted items", elapsed = start.elapsed(), deleted_cnt=keys_which_need_removing.len()); + keys_which_need_updating.append(&mut keys_which_need_removing); + Ok(keys_which_need_updating) +} + +async fn cleanup_deleted( + keys: &[String], + tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, +) -> Result<(), crate::BoxedError> { + sqlx::query!("DELETE FROM aliases WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE aliases.key = expected.key)", keys) + .execute(&mut **tx) + .await?; + sqlx::query!("DELETE FROM en WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE en.key = expected.key)", keys) + .execute(&mut **tx) + .await?; + sqlx::query!("DELETE FROM de WHERE NOT EXISTS (SELECT * FROM UNNEST($1::text[]) AS expected(key) WHERE de.key = expected.key)", keys) .execute(&mut **tx) .await?; - sqlx::query!("DELETE FROM en").execute(&mut **tx).await?; - sqlx::query!("DELETE FROM de").execute(&mut **tx).await?; Ok(()) }