Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Lots of small refactorings #1107

Merged
merged 1 commit into from
Dec 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

39 changes: 1 addition & 38 deletions docs/src/development.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,41 +33,4 @@ Install [Just](https://github.com/casey/just#readme) (improved makefile processo
cargo install just
```

When developing MBTiles SQL code, you may need to use `just prepare-sqlite` whenever SQL queries are modified. Run `just` to see all available commands:

```shell, ignore
❯ just
Available recipes:
run *ARGS # Start Martin server
run-release *ARGS # Start release-compiled Martin server and a test database
debug-page *ARGS # Start Martin server and open a test page
psql *ARGS # Run PSQL utility against the test database
pg_dump *ARGS # Run pg_dump utility against the test database
clean # Perform cargo clean to delete all build files
start # Start a test database
start-ssl # Start an ssl-enabled test database
start-legacy # Start a legacy test database
restart # Restart the test database
stop # Stop the test database
bench # Run benchmark tests
bench-http # Run HTTP requests benchmark using OHA tool. Use with `just run-release`
test # Run all tests using a test database
test-ssl # Run all tests using an SSL connection to a test database. Expected output won't match.
test-legacy # Run all tests using the oldest supported version of the database
test-cargo *ARGS # Run Rust unit and doc tests (cargo test)
test-int # Run integration tests
bless # Run integration tests and save its output as the new expected output
book # Build and open mdbook documentation
package-deb # Build debian package
docs # Build and open code documentation
coverage FORMAT='html' # Run code coverage on tests and save its output in the coverage directory. Parameter could be html or lcov.
docker-build # Build martin docker image
docker-run *ARGS # Build and run martin docker image
git *ARGS # Do any git command, ensuring that the testing environment is set up. Accepts the same arguments as git.
print-conn-str # Print the connection string for the test database
lint # Run cargo fmt and cargo clippy
fmt # Run cargo fmt
fmt2 # Run Nightly cargo fmt, ordering imports
clippy # Run cargo clippy
prepare-sqlite # Update sqlite database schema.
```
When developing MBTiles SQL code, you may need to use `just prepare-sqlite` whenever SQL queries are modified. Run `just` to see all available commands.
2 changes: 1 addition & 1 deletion justfile
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ bench-server: start

# Run HTTP requests benchmark using OHA tool. Use with `just bench-server`
bench-http: (cargo-install "oha")
@echo "Make sure Martin was started with 'just run-release'"
@echo "ATTENTION: Make sure Martin was started with just bench-server"
@echo "Warming up..."
oha -z 5s --no-tui http://localhost:3000/function_zxy_query/18/235085/122323 > /dev/null
oha -z 60s http://localhost:3000/function_zxy_query/18/235085/122323
Expand Down
2 changes: 1 addition & 1 deletion martin/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ harness = false

[features]
default = ["fonts", "mbtiles", "pmtiles", "postgres", "sprites"]
fonts = ["dep:bit-set","dep:pbf_font_tools"]
fonts = ["dep:bit-set", "dep:pbf_font_tools"]
mbtiles = []
pmtiles = ["dep:moka"]
postgres = ["dep:deadpool-postgres", "dep:json-patch", "dep:postgis", "dep:postgres", "dep:postgres-protocol", "dep:semver", "dep:tokio-postgres-rustls"]
Expand Down
4 changes: 2 additions & 2 deletions martin/benches/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ impl Source for NullSource {

async fn get_tile(
&self,
_xyz: &TileCoord,
_query: &Option<UrlQuery>,
_xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
Ok(Vec::new())
}
Expand Down
2 changes: 1 addition & 1 deletion martin/src/bin/martin-cp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinCpResult<()>
.try_for_each_concurrent(concurrency, |xyz| {
let tx = tx.clone();
async move {
let tile = get_tile_content(sources, info, &xyz, query, encodings).await?;
let tile = get_tile_content(sources, info, xyz, query, encodings).await?;
let data = tile.data;
tx.send(TileXyz { xyz, data })
.await
Expand Down
4 changes: 2 additions & 2 deletions martin/src/mbtiles/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ impl Source for MbtSource {

async fn get_tile(
&self,
xyz: &TileCoord,
_url_query: &Option<UrlQuery>,
xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
if let Some(tile) = self
.mbtiles
Expand Down
2 changes: 1 addition & 1 deletion martin/src/pg/config_function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,6 @@ impl PgInfo for FunctionInfo {
tilejson.minzoom = self.minzoom;
tilejson.maxzoom = self.maxzoom;
tilejson.bounds = self.bounds;
patch_json(tilejson, &self.tilejson)
patch_json(tilejson, self.tilejson.as_ref())
}
}
2 changes: 1 addition & 1 deletion martin/src/pg/config_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,6 @@ impl PgInfo for TableInfo {
other: BTreeMap::default(),
};
tilejson.vector_layers = Some(vec![layer]);
patch_json(tilejson, &self.tilejson)
patch_json(tilejson, self.tilejson.as_ref())
}
}
14 changes: 7 additions & 7 deletions martin/src/pg/configurator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ impl PgBuilder {
continue;
}
Ok((id, pg_sql, src_inf)) => {
debug!("{id} query: {}", pg_sql.query);
debug!("{id} query: {}", pg_sql.sql_query);
self.add_func_src(&mut res, id.clone(), &src_inf, pg_sql.clone());
info_map.insert(id, src_inf);
}
Expand Down Expand Up @@ -252,7 +252,7 @@ impl PgBuilder {
warn_on_rename(id, &id2, "Function");
let signature = &pg_sql.signature;
info!("Configured {dup}source {id2} from the function {signature}");
debug!("{id2} query: {}", pg_sql.query);
debug!("{id2} query: {}", pg_sql.sql_query);
info_map.insert(id2, merged_inf);
}

Expand Down Expand Up @@ -285,7 +285,7 @@ impl PgBuilder {
let id2 = self.resolve_id(&source_id, &db_inf);
self.add_func_src(&mut res, id2.clone(), &db_inf, pg_sql.clone());
info!("Discovered source {id2} from function {}", pg_sql.signature);
debug!("{id2} query: {}", pg_sql.query);
debug!("{id2} query: {}", pg_sql.sql_query);
info_map.insert(id2, db_inf);
}
}
Expand All @@ -302,11 +302,11 @@ impl PgBuilder {
&self,
sources: &mut TileInfoSources,
id: String,
info: &impl PgInfo,
sql: PgSqlInfo,
pg_info: &impl PgInfo,
sql_info: PgSqlInfo,
) {
let tilejson = info.to_tilejson(id.clone());
let source = PgSource::new(id, sql, tilejson, self.pool.clone());
let tilejson = pg_info.to_tilejson(id.clone());
let source = PgSource::new(id, sql_info, tilejson, self.pool.clone());
sources.push(Box::new(source));
}
}
Expand Down
4 changes: 2 additions & 2 deletions martin/src/pg/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,6 @@ pub enum PgError {
#[error(r#"Unable to get tile {2:#} from {1}: {0}"#)]
GetTileError(#[source] TokioPgError, String, TileCoord),

#[error(r#"Unable to get tile {2:#} with {:?} params from {1}: {0}"#, query_to_json(.3))]
GetTileWithQueryError(#[source] TokioPgError, String, TileCoord, UrlQuery),
#[error(r#"Unable to get tile {2:#} with {:?} params from {1}: {0}"#, query_to_json(.3.as_ref()))]
GetTileWithQueryError(#[source] TokioPgError, String, TileCoord, Option<UrlQuery>),
}
12 changes: 6 additions & 6 deletions martin/src/pg/function_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ pub async fn query_available_function(pool: &PgPool) -> PgResult<SqlFuncInfoMapM
let schema: String = row.get("schema");
let function: String = row.get("name");
let output_type: String = row.get("output_type");
let output_record_types = jsonb_to_vec(&row.get("output_record_types"));
let output_record_names = jsonb_to_vec(&row.get("output_record_names"));
let input_types = jsonb_to_vec(&row.get("input_types")).expect("Can't get input types");
let input_names = jsonb_to_vec(&row.get("input_names")).expect("Can't get input names");
let output_record_types = jsonb_to_vec(row.get("output_record_types"));
let output_record_names = jsonb_to_vec(row.get("output_record_names"));
let input_types = jsonb_to_vec(row.get("input_types")).expect("Can't get input types");
let input_names = jsonb_to_vec(row.get("input_names")).expect("Can't get input names");
let tilejson = if let Some(text) = row.get("description") {
match serde_json::from_str::<Value>(text) {
Ok(v) => Some(v),
Expand Down Expand Up @@ -126,8 +126,8 @@ pub fn merge_func_info(cfg_inf: &FunctionInfo, db_inf: &FunctionInfo) -> Functio
}
}

fn jsonb_to_vec(jsonb: &Option<Value>) -> Option<Vec<String>> {
jsonb.as_ref().map(|json| {
fn jsonb_to_vec(jsonb: Option<Value>) -> Option<Vec<String>> {
jsonb.map(|json| {
json.as_array()
.unwrap()
.iter()
Expand Down
27 changes: 11 additions & 16 deletions martin/src/pg/pg_source.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::collections::HashMap;

use async_trait::async_trait;
use deadpool_postgres::tokio_postgres::types::{ToSql, Type};
use log::debug;
Expand Down Expand Up @@ -58,35 +56,32 @@ impl Source for PgSource {

async fn get_tile(
&self,
xyz: &TileCoord,
url_query: &Option<UrlQuery>,
xyz: TileCoord,
url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
let empty_query = HashMap::new();
let url_query = url_query.as_ref().unwrap_or(&empty_query);
let conn = self.pool.get().await?;

let param_types: &[Type] = if self.support_url_query() {
&[Type::INT2, Type::INT8, Type::INT8, Type::JSON]
} else {
&[Type::INT2, Type::INT8, Type::INT8]
};

let query = &self.info.query;
let sql = &self.info.sql_query;
let prep_query = conn
.prepare_typed_cached(query, param_types)
.prepare_typed_cached(sql, param_types)
.await
.map_err(|e| {
PrepareQueryError(
e,
self.id.to_string(),
self.info.signature.to_string(),
self.info.query.to_string(),
self.info.sql_query.to_string(),
)
})?;

let tile = if self.support_url_query() {
let json = query_to_json(url_query);
debug!("SQL: {query} [{xyz}, {json:?}]");
debug!("SQL: {sql} [{xyz}, {json:?}]");
let params: &[&(dyn ToSql + Sync)] = &[
&i16::from(xyz.z),
&i64::from(xyz.x),
Expand All @@ -95,7 +90,7 @@ impl Source for PgSource {
];
conn.query_opt(&prep_query, params).await
} else {
debug!("SQL: {query} [{xyz}]");
debug!("SQL: {sql} [{xyz}]");
conn.query_opt(
&prep_query,
&[&i16::from(xyz.z), &i64::from(xyz.x), &i64::from(xyz.y)],
Expand All @@ -107,9 +102,9 @@ impl Source for PgSource {
.map(|row| row.and_then(|r| r.get::<_, Option<TileData>>(0)))
.map_err(|e| {
if self.support_url_query() {
GetTileWithQueryError(e, self.id.to_string(), *xyz, url_query.clone())
GetTileWithQueryError(e, self.id.to_string(), xyz, url_query.cloned())
} else {
GetTileError(e, self.id.to_string(), *xyz)
GetTileError(e, self.id.to_string(), xyz)
}
})?
.unwrap_or_default();
Expand All @@ -120,7 +115,7 @@ impl Source for PgSource {

#[derive(Clone, Debug)]
pub struct PgSqlInfo {
pub query: String,
pub sql_query: String,
pub use_url_query: bool,
pub signature: String,
}
Expand All @@ -129,7 +124,7 @@ impl PgSqlInfo {
#[must_use]
pub fn new(query: String, has_query_params: bool, signature: String) -> Self {
Self {
query,
sql_query: query,
use_url_query: has_query_params,
signature,
}
Expand Down
14 changes: 8 additions & 6 deletions martin/src/pg/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
}

#[must_use]
pub fn patch_json(target: TileJSON, patch: &Option<serde_json::Value>) -> TileJSON {
pub fn patch_json(target: TileJSON, patch: Option<&serde_json::Value>) -> TileJSON {
let Some(tj) = patch else {
// Nothing to merge in, keep the original
return target;
Expand Down Expand Up @@ -85,13 +85,15 @@ pub fn patch_json(target: TileJSON, patch: &Option<serde_json::Value>) -> TileJS
}

#[must_use]
pub fn query_to_json(query: &UrlQuery) -> Json<HashMap<String, serde_json::Value>> {
pub fn query_to_json(query: Option<&UrlQuery>) -> Json<HashMap<String, serde_json::Value>> {
let mut query_as_json = HashMap::new();
for (k, v) in query {
let json_value: serde_json::Value =
serde_json::from_str(v).unwrap_or_else(|_| serde_json::Value::String(v.clone()));
if let Some(query) = query {
for (k, v) in query {
let json_value: serde_json::Value =
serde_json::from_str(v).unwrap_or_else(|_| serde_json::Value::String(v.clone()));

query_as_json.insert(k.clone(), json_value);
query_as_json.insert(k.clone(), json_value);
}
}

Json(query_as_json)
Expand Down
10 changes: 5 additions & 5 deletions martin/src/pmtiles/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,14 +114,14 @@ impl ConfigExtras for PmtConfig {
self.client = Some(Client::new());

// Allow cache size to be disabled with 0
let cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
if cache_size > 0 {
let dir_cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
if dir_cache_size > 0 {
self.cache = Some(
Cache::builder()
.weigher(|_key, value: &Directory| -> u32 {
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
})
.max_capacity(cache_size)
.max_capacity(dir_cache_size)
.build(),
);
}
Expand Down Expand Up @@ -264,8 +264,8 @@ macro_rules! impl_pmtiles_source {

async fn get_tile(
&self,
xyz: &TileCoord,
_url_query: &Option<UrlQuery>,
xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
// TODO: optimize to return Bytes
if let Some(t) = self
Expand Down
6 changes: 5 additions & 1 deletion martin/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,11 @@ pub trait Source: Send + Debug {
false
}

async fn get_tile(&self, xyz: &TileCoord, query: &Option<UrlQuery>) -> MartinResult<TileData>;
async fn get_tile(
&self,
xyz: TileCoord,
url_query: Option<&UrlQuery>,
) -> MartinResult<TileData>;

fn is_valid_zoom(&self, zoom: u8) -> bool {
let tj = self.get_tilejson();
Expand Down
Loading
Loading