Skip to content

Commit

Permalink
feat: Refactor DB communication, remove moka cache (#51)
Browse files Browse the repository at this point in the history
To accommodate the stringent requirement for less memory usage, the inefficient cache is no longer used.
Now random generation is done directly through queries on a "dynamic" table created at startup of the application, aggregating data from all the other tables (kinda violates the normal form, but we need it for efficient I/O and that's why the table is dynamic and not static).

* change optional data, now listing return always the least possible (core+variant), while for a single get it will be possible to choose what to get with granularity
* Move to i64/f64 for every number type, wasting some memory but removing a lot of critical conversions between types
* Create a build phase, used to initialize a SQL table 'CREATURE_CORE' using data found in other DB's tables
* Remove Creature 'from' bloated code. Now all the Creature structs must be initialized directly
* remove creature info, squished the data in the core (roles)
* remove create and scales cache. cache is now for the list of runtime values and the result of the get_all_creatures_core_data function
* remove rawcreature, a long time coming.
* add raw_query_builder.rs, helps building raw statement (no compile time checks) in a standardized way. This is done to quarantine the queries that are not statically checked
* update dependencies, remove bad implementation of validator length
* move cache to proxy.rs
* better naming convention. methods that query the DB are now "fetch" while methods that layer on top of them are "get" because the end user does not care where the data comes from.
* downgrade msrv
* limit db fetch for random generator to 20 elements
* fetch random elements for random generator from db (so it does not always return the same 20 elements)

closes: #50
  • Loading branch information
RakuJa authored May 19, 2024
1 parent e175cec commit 69237f1
Show file tree
Hide file tree
Showing 42 changed files with 1,828 additions and 1,519 deletions.
27 changes: 17 additions & 10 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ authors = ["RakuJa"]

# Compiler info
edition = "2021"
rust-version = "1.77.1"
rust-version = "1.75.0"

description = "Beyond Your Bestiary Explorer (BYBE) is a web service that provides tools to help Pathfinder 2e Game Masters."
readme = "README.md"
Expand All @@ -15,7 +15,7 @@ license = "MIT"
keywords = ["webservice", "pathfinder"]
publish = false

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
build = "build/main.rs"

[lints.rust]
unsafe_code = "forbid"
Expand All @@ -24,24 +24,31 @@ unsafe_code = "forbid"
actix-web = "4.5.1"
actix-cors = "0.7.0"
actix-web-validator = "5.0.1"
# Cannot be updated until actix-web updates validator dependency
validator = {version="0.16.1", features = ["derive"]}

utoipa = { version = "4.2.0", features = ["actix_extras"] }
utoipa-swagger-ui = { version = "6.0.0", features = ["actix-web"] }
utoipa = { version = "4.2.3", features = ["actix_extras"] }
utoipa-swagger-ui = { version = "7.0.1", features = ["actix-web"] }

sqlx = { version = "0.7.4", features = ["runtime-async-std", "sqlite"] }
mini-moka = "0.10.3"
cached = { version = "0.51.3", features = ["async"] }

anyhow = "1.0.81"
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.115"
anyhow = "1.0.86"
serde = { version = "1.0.202", features = ["derive"] }
serde_json = "1.0.117"
strum = {version="0.26.2", features = ["derive"]}
rand = "0.9.0-alpha.0"
rand = "0.9.0-alpha.1"
counter = "0.5.7"
dotenvy = "0.15.7"
regex = "1.10.4"

env_logger = "0.11.3"
log = "0.4.21"
maplit = "1.0.2"
num-traits = "0.2.18"
num-traits = "0.2.19"

[build-dependencies]
tokio = { version = "1", features = ["macros", "rt-multi-thread", "rt"] }
anyhow = "1.0.86"
sqlx = {version = "0.7.4", features = ["runtime-async-std", "sqlite"]}
dotenvy = "0.15.7"
113 changes: 113 additions & 0 deletions build/creature_core_db_init.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
use anyhow::Result;
use sqlx::{Pool, Sqlite};

pub async fn create_creature_core_table(conn: &Pool<Sqlite>) -> Result<()> {
delete_core_table(conn).await?;
create_temporary_table(conn).await?;
sqlx::query!(
"
CREATE TABLE IF NOT EXISTS CREATURE_CORE(
id INTEGER PRIMARY KEY NOT NULL,
aon_id INTEGER,
name TEXT NOT NULL DEFAULT '',
hp INTEGER NOT NULL DEFAULT -1,
level INTEGER NOT NULL DEFAULT -99,
size TEXT NOT NULL DEFAULT 'MEDIUM',
rarity TEXT NOT NULL DEFAULT 'COMMON',
is_melee BOOL NOT NULL DEFAULT 0,
is_ranged BOOL NOT NULL DEFAULT 0,
is_spell_caster BOOL NOT NULL DEFAULT 0,
archive_link TEXT,
cr_type TEXT NOT NULL DEFAULT 'MONSTER',
family TEXT NOT NULL DEFAULT '-',
license TEXT NOT NULL DEFAULT '',
source TEXT NOT NULL DEFAULT '',
remaster BOOL NOT NULL DEFAULT 0
)"
)
.execute(conn)
.await?;
insert_role_columns_in_core_table(conn).await?;
Ok(())
}

async fn create_temporary_table(conn: &Pool<Sqlite>) -> Result<()> {
sqlx::query!("
CREATE TABLE IF NOT EXISTS TMP_CREATURE_CORE AS
SELECT
ct.id,
ct.aon_id,
ct.name,
ct.hp,
ct.LEVEL,
ct.SIZE,
ct.rarity,
ct.license,
ct.source,
ct.remaster,
CASE WHEN wt.creature_id IS NOT NULL AND UPPER(wt.wp_type)='MELEE' THEN TRUE ELSE FALSE END AS is_melee,
CASE WHEN wt.creature_id IS NOT NULL AND UPPER(wt.wp_type)='RANGED' THEN TRUE ELSE FALSE END AS is_ranged,
CASE WHEN st.creature_id IS NOT NULL THEN TRUE ELSE FALSE END AS is_spell_caster,
CASE WHEN ct.aon_id IS NOT NULL THEN CONCAT('https://2e.aonprd.com/', CAST(UPPER(COALESCE(UPPER(ct.cr_type) , 'MONSTER')) AS TEXT), 's' , '.aspx?ID=', CAST(ct.aon_id AS TEXT)) ELSE NULL END AS archive_link,
COALESCE(ct.cr_type , 'Monster') AS cr_type,
COALESCE(ct.family , '-') AS family
FROM CREATURE_TABLE ct
LEFT JOIN WEAPON_TABLE wt ON ct.id = wt.creature_id
LEFT JOIN SPELL_TABLE st ON ct.id = st.creature_id
GROUP BY ct.id;
"
// Be careful, cr_type must be either Monster or NPC or we have runtime error
).execute(conn).await?;
Ok(())
}

pub async fn initialize_data(conn: &Pool<Sqlite>) -> Result<()> {
sqlx::query(
"
INSERT INTO CREATURE_CORE (
id, aon_id, name, hp, level, size, rarity,
license, source, remaster, is_melee, is_ranged,
is_spell_caster, archive_link, cr_type, family
) SELECT
id, aon_id, name, hp, level, size, rarity,
license, source, remaster, is_melee, is_ranged,
is_spell_caster, archive_link, cr_type, family
FROM TMP_CREATURE_CORE;
",
)
.execute(conn)
.await?;
Ok(())
}

async fn insert_role_columns_in_core_table(conn: &Pool<Sqlite>) -> Result<()> {
sqlx::query(
"
ALTER TABLE CREATURE_CORE ADD brute_percentage INTEGER NOT NULL DEFAULT 0;
ALTER TABLE CREATURE_CORE ADD magical_striker_percentage INTEGER NOT NULL DEFAULT 0;
ALTER TABLE CREATURE_CORE ADD skill_paragon_percentage INTEGER NOT NULL DEFAULT 0;
ALTER TABLE CREATURE_CORE ADD skirmisher_percentage INTEGER NOT NULL DEFAULT 0;
ALTER TABLE CREATURE_CORE ADD sniper_percentage INTEGER NOT NULL DEFAULT 0;
ALTER TABLE CREATURE_CORE ADD soldier_percentage INTEGER NOT NULL DEFAULT 0;
ALTER TABLE CREATURE_CORE ADD spell_caster_percentage INTEGER NOT NULL DEFAULT 0;
",
)
.execute(conn)
.await?;
Ok(())
}

/// Removes temporary tables created during execution of init
pub async fn cleanup_db(conn: &Pool<Sqlite>) -> Result<()> {
sqlx::query("DROP TABLE TMP_CREATURE_CORE")
.execute(conn)
.await?;
Ok(())
}

async fn delete_core_table(conn: &Pool<Sqlite>) -> Result<()> {
sqlx::query!("DROP TABLE IF EXISTS CREATURE_CORE")
.execute(conn)
.await?;
Ok(())
}
31 changes: 31 additions & 0 deletions build/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
mod creature_core_db_init;

use dotenvy::dotenv;
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::SqlitePool;
use std::env;
use std::str::FromStr;

#[tokio::main]
async fn main() {
dotenv().ok();
let db_url = &env::var("DATABASE_URL")
.expect("DB URL IS NOT SET.. Aborting. Hint: set DATABASE_URL environmental variable");

let conn = SqlitePool::connect_with(
SqliteConnectOptions::from_str(db_url)
.expect("Could not find a valid db in the given path")
.create_if_missing(true),
)
.await
.expect("Could not connect to the given db url, something went wrong..");
creature_core_db_init::create_creature_core_table(&conn)
.await
.expect("Could not initialize tables inside the db, something went wrong..");
creature_core_db_init::initialize_data(&conn)
.await
.expect("Could not populate the db, something went wrong..");
creature_core_db_init::cleanup_db(&conn)
.await
.expect("Could not clean up the db. Dirty state detected, closing..")
}
151 changes: 151 additions & 0 deletions src/db/cr_core_initializer.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
use crate::db::data_providers::fetcher::{
fetch_creature_combat_data, fetch_creature_extra_data, fetch_creature_scales,
fetch_creature_spell_caster_data,
};
use crate::models::creature_component::creature_core::EssentialData;
use crate::models::creature_metadata::creature_role::CreatureRoleEnum;
use crate::models::creature_metadata::rarity_enum::RarityEnum;
use crate::models::creature_metadata::size_enum::SizeEnum;
use crate::models::creature_metadata::type_enum::CreatureTypeEnum;
use crate::models::routers_validator_structs::PaginatedRequest;
use anyhow::{bail, Result};
use serde::{Deserialize, Serialize};
use sqlx::{FromRow, Pool, Sqlite};

/// Handler for startup, first creature_core initialization. Then it shouldn't be used
pub async fn update_creature_core_table(conn: &Pool<Sqlite>) -> Result<()> {
let pagination = PaginatedRequest {
cursor: 0,
page_size: -1,
};
let scales = fetch_creature_scales(conn).await?;
for cr in get_creatures_raw_essential_data(conn, &pagination).await? {
let essential_data = EssentialData {
id: cr.id,
aon_id: cr.aon_id,
name: cr.name,
hp: cr.hp,
level: cr.level,
size: cr.size,
family: cr.family.unwrap_or(String::from("-")),
rarity: cr.rarity,
license: cr.license,
remaster: cr.remaster,
source: cr.source,
cr_type: CreatureTypeEnum::from(cr.cr_type),
};
let extra_data = fetch_creature_extra_data(conn, essential_data.id).await?;
let combat_data = fetch_creature_combat_data(conn, essential_data.id).await?;
let spell_caster_data = fetch_creature_spell_caster_data(conn, essential_data.id).await?;
let roles = CreatureRoleEnum::from_creature_with_given_scales(
&essential_data,
&extra_data,
&combat_data,
&spell_caster_data,
&scales,
);
for (curr_role, curr_percentage) in roles {
update_role_column_value(conn, curr_role, curr_percentage, essential_data.id).await?;
}
}
Ok(())
}

async fn update_role_column_value(
conn: &Pool<Sqlite>,
role: CreatureRoleEnum,
value: i64,
creature_id: i64,
) -> Result<()> {
let x = match role {
CreatureRoleEnum::Brute => {
sqlx::query!(
"UPDATE CREATURE_CORE SET brute_percentage = ? WHERE id = ?",
value,
creature_id
)
}
CreatureRoleEnum::MagicalStriker => {
sqlx::query!(
"UPDATE CREATURE_CORE SET magical_striker_percentage = ? WHERE id = ?",
value,
creature_id
)
}
CreatureRoleEnum::SkillParagon => {
sqlx::query!(
"UPDATE CREATURE_CORE SET skill_paragon_percentage = ? WHERE id = ?",
value,
creature_id
)
}
CreatureRoleEnum::Skirmisher => {
sqlx::query!(
"UPDATE CREATURE_CORE SET skirmisher_percentage = ? WHERE id = ?",
value,
creature_id
)
}
CreatureRoleEnum::Sniper => {
sqlx::query!(
"UPDATE CREATURE_CORE SET sniper_percentage = ? WHERE id = ?",
value,
creature_id
)
}
CreatureRoleEnum::Soldier => {
sqlx::query!(
"UPDATE CREATURE_CORE SET soldier_percentage = ? WHERE id = ?",
value,
creature_id
)
}
CreatureRoleEnum::SpellCaster => {
sqlx::query!(
"UPDATE CREATURE_CORE SET spell_caster_percentage = ? WHERE id = ?",
value,
creature_id
)
}
}
.execute(conn)
.await?;
if x.rows_affected() < 1 {
bail!("Error encountered with creature id: {creature_id}. Could not update role: {role}")
}
Ok(())
}

async fn get_creatures_raw_essential_data(
conn: &Pool<Sqlite>,
paginated_request: &PaginatedRequest,
) -> Result<Vec<RawEssentialData>> {
Ok(sqlx::query_as!(
RawEssentialData,
"SELECT
id, aon_id, name, hp, level, size, family, rarity,
license, remaster, source, cr_type
FROM CREATURE_TABLE ORDER BY name LIMIT ?,?",
paginated_request.cursor,
paginated_request.page_size
)
.fetch_all(conn)
.await?)
}

#[derive(Serialize, Deserialize, FromRow, Clone)]
pub struct RawEssentialData {
pub id: i64,
pub aon_id: Option<i64>,
pub name: String,
pub hp: i64,
pub level: i64,
pub size: SizeEnum,
pub family: Option<String>,
pub rarity: RarityEnum,
pub license: String,
pub remaster: bool,
pub source: String,
pub cr_type: Option<String>,
}
Loading

0 comments on commit 69237f1

Please sign in to comment.