Skip to content

Commit

Permalink
use btree and unpublished crates
Browse files Browse the repository at this point in the history
  • Loading branch information
nyurik committed Nov 20, 2023
1 parent 54231ae commit 2c06824
Show file tree
Hide file tree
Showing 12 changed files with 49 additions and 67 deletions.
18 changes: 8 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 5 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ sqlite-hashes = { version = "0.5", default-features = false, features = ["md5",
sqlx = { version = "0.7", features = ["sqlite", "runtime-tokio"] }
subst = { version = "0.3", features = ["yaml"] }
thiserror = "1"
tilejson = "0.3.4"
tilejson = "0.4"
tokio = { version = "1", features = ["macros"] }
tokio-postgres-rustls = "0.10"

Expand All @@ -84,7 +84,9 @@ sqlx-macros.opt-level = 3
insta.opt-level = 3
similar.opt-level = 3

#[patch.crates-io]
[patch.crates-io]
#pmtiles = { path = "../pmtiles-rs" }
#sqlite-hashes = { path = "/home/nyurik/dev/rust/sqlite-hashes" }
#sqlite-hashes = { path = "../sqlite-hashes" }
#tilejson = { path = "../tilejson" }
pmtiles = { git = "https://github.com/nyurik/pmtiles-rs", branch = "override" }
tilejson = { git = "https://github.com/nyurik/tilejson", branch = "btreemap" }
17 changes: 8 additions & 9 deletions martin/src/file_config.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet};
use std::collections::{BTreeMap, HashSet};
use std::future::Future;
use std::mem;
use std::path::PathBuf;
Expand All @@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
use crate::config::{copy_unrecognized_config, UnrecognizedValues};
use crate::file_config::FileError::{InvalidFilePath, InvalidSourceFilePath, IoError};
use crate::source::{Source, TileInfoSources};
use crate::utils::{sorted_opt_map, Error, IdResolver, OptOneMany};
use crate::utils::{Error, IdResolver, OptOneMany};
use crate::OptOneMany::{Many, One};

#[derive(thiserror::Error, Debug)]
Expand Down Expand Up @@ -44,13 +44,13 @@ pub enum FileConfigEnum {
impl FileConfigEnum {
#[must_use]
pub fn new(paths: Vec<PathBuf>) -> FileConfigEnum {
Self::new_extended(paths, HashMap::new(), UnrecognizedValues::new())
Self::new_extended(paths, BTreeMap::new(), UnrecognizedValues::new())
}

#[must_use]
pub fn new_extended(
paths: Vec<PathBuf>,
configs: HashMap<String, FileConfigSrc>,
configs: BTreeMap<String, FileConfigSrc>,
unrecognized: UnrecognizedValues,
) -> FileConfigEnum {
if configs.is_empty() && unrecognized.is_empty() {
Expand Down Expand Up @@ -118,8 +118,7 @@ pub struct FileConfig {
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
pub paths: OptOneMany<PathBuf>,
/// A map of source IDs to file paths or config objects
#[serde(serialize_with = "sorted_opt_map")]
pub sources: Option<HashMap<String, FileConfigSrc>>,
pub sources: Option<BTreeMap<String, FileConfigSrc>>,
#[serde(flatten)]
pub unrecognized: UnrecognizedValues,
}
Expand Down Expand Up @@ -195,7 +194,7 @@ where
};

let mut results = TileInfoSources::default();
let mut configs = HashMap::new();
let mut configs = BTreeMap::new();
let mut files = HashSet::new();
let mut directories = Vec::new();

Expand Down Expand Up @@ -272,7 +271,7 @@ where

#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::collections::BTreeMap;
use std::path::PathBuf;

use indoc::indoc;
Expand Down Expand Up @@ -306,7 +305,7 @@ mod tests {
);
assert_eq!(
cfg.sources,
Some(HashMap::from_iter(vec![
Some(BTreeMap::from_iter(vec![
(
"pm-src1".to_string(),
FileConfigSrc::Path(PathBuf::from("/tmp/file.ext"))
Expand Down
12 changes: 5 additions & 7 deletions martin/src/pg/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::pg::config_table::TableInfoSources;
use crate::pg::configurator::PgBuilder;
use crate::pg::Result;
use crate::source::TileInfoSources;
use crate::utils::{on_slow, sorted_opt_map, IdResolver, OptBoolObj, OptOneMany};
use crate::utils::{on_slow, IdResolver, OptBoolObj, OptOneMany};

pub trait PgInfo {
fn format_id(&self) -> String;
Expand Down Expand Up @@ -46,9 +46,7 @@ pub struct PgConfig {
pub pool_size: Option<usize>,
#[serde(default, skip_serializing_if = "OptBoolObj::is_none")]
pub auto_publish: OptBoolObj<PgCfgPublish>,
#[serde(serialize_with = "sorted_opt_map")]
pub tables: Option<TableInfoSources>,
#[serde(serialize_with = "sorted_opt_map")]
pub functions: Option<FuncInfoSources>,
}

Expand Down Expand Up @@ -139,7 +137,7 @@ impl PgConfig {

#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::collections::BTreeMap;

use indoc::indoc;
use tilejson::Bounds;
Expand Down Expand Up @@ -237,7 +235,7 @@ mod tests {
default_srid: Some(4326),
pool_size: Some(20),
max_feature_count: Some(100),
tables: Some(HashMap::from([(
tables: Some(BTreeMap::from([(
"table_source".to_string(),
TableInfo {
schema: "public".to_string(),
Expand All @@ -251,14 +249,14 @@ mod tests {
buffer: Some(10),
clip_geom: Some(false),
geometry_type: some("GEOMETRY"),
properties: Some(HashMap::from([(
properties: Some(BTreeMap::from([(
"gid".to_string(),
"int4".to_string(),
)])),
..Default::default()
},
)])),
functions: Some(HashMap::from([(
functions: Some(BTreeMap::from([(
"function_zxy_query".to_string(),
FunctionInfo::new_extended(
"public".to_string(),
Expand Down
8 changes: 3 additions & 5 deletions martin/src/pg/config_table.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap};

use serde::{Deserialize, Serialize};
use tilejson::{Bounds, TileJSON, VectorLayer};

use crate::config::UnrecognizedValues;
use crate::pg::config::PgInfo;
use crate::pg::utils::{patch_json, InfoMap};
use crate::utils::sorted_opt_map;

pub type TableInfoSources = InfoMap<TableInfo>;

Expand Down Expand Up @@ -64,8 +63,7 @@ pub struct TableInfo {
pub geometry_type: Option<String>,

/// List of columns, that should be encoded as tile properties
#[serde(serialize_with = "sorted_opt_map")]
pub properties: Option<HashMap<String, String>>,
pub properties: Option<BTreeMap<String, String>>,

/// Mapping of properties to the actual table columns
#[serde(skip_deserializing, skip_serializing)]
Expand Down Expand Up @@ -99,7 +97,7 @@ impl PgInfo for TableInfo {
description: None,
maxzoom: None,
minzoom: None,
other: HashMap::default(),
other: BTreeMap::default(),
};
tilejson.vector_layers = Some(vec![layer]);
patch_json(tilejson, &self.tilejson)
Expand Down
4 changes: 2 additions & 2 deletions martin/src/pg/table_source.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap};

use futures::pin_mut;
use log::{debug, info, warn};
Expand Down Expand Up @@ -250,7 +250,7 @@ pub fn merge_table_info(
_ => {}
}

let empty = HashMap::new();
let empty = BTreeMap::new();
let props = db_inf.properties.as_ref().unwrap_or(&empty);

if let Some(id_column) = &cfg_inf.id_column {
Expand Down
12 changes: 6 additions & 6 deletions martin/src/pg/utils.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap};

use deadpool_postgres::tokio_postgres::types::Json;
use log::{error, info, warn};
Expand All @@ -9,15 +9,15 @@ use crate::source::UrlQuery;

#[must_use]
pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
let mut hashmap = HashMap::new();
let mut result = BTreeMap::new();

let object = value.as_object().unwrap();
for (key, value) in object {
let string_value = value.as_str().unwrap().to_string();
hashmap.insert(key.clone(), string_value);
result.insert(key.clone(), string_value);
}

hashmap
result
}

#[must_use]
Expand Down Expand Up @@ -49,7 +49,7 @@ pub fn patch_json(target: TileJSON, patch: &Option<serde_json::Value>) -> TileJS
}

#[must_use]
pub fn query_to_json(query: &UrlQuery) -> Json<InfoMap<serde_json::Value>> {
pub fn query_to_json(query: &UrlQuery) -> Json<HashMap<String, serde_json::Value>> {
let mut query_as_json = HashMap::new();
for (k, v) in query {
let json_value: serde_json::Value =
Expand Down Expand Up @@ -78,7 +78,7 @@ pub fn polygon_to_bbox(polygon: &ewkb::Polygon) -> Option<Bounds> {
})
}

pub type InfoMap<T> = HashMap<String, T>;
pub type InfoMap<T> = BTreeMap<String, T>;

#[must_use]
pub fn normalize_key<T>(map: &InfoMap<T>, key: &str, info: &str, id: &str) -> Option<String> {
Expand Down
2 changes: 1 addition & 1 deletion martin/src/sprites/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ impl SpriteSources {

let mut results = Self::default();
let mut directories = Vec::new();
let mut configs = HashMap::new();
let mut configs = BTreeMap::new();

if let Some(sources) = cfg.sources {
for (id, source) in sources {
Expand Down
10 changes: 5 additions & 5 deletions martin/src/srv/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -541,7 +541,7 @@ fn parse_x_rewrite_url(header: &HeaderValue) -> Option<String> {

#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::collections::BTreeMap;

use async_trait::async_trait;
use tilejson::{tilejson, Bounds, VectorLayer};
Expand Down Expand Up @@ -589,7 +589,7 @@ mod tests {
bounds: Bounds::new(-10.0, -20.0, 10.0, 20.0),
vector_layers: vec![
VectorLayer::new("layer1".to_string(),
HashMap::from([
BTreeMap::from([
("a".to_string(), "x1".to_string()),
]))
],
Expand All @@ -613,7 +613,7 @@ mod tests {
bounds: Bounds::new(-20.0, -5.0, 5.0, 50.0),
vector_layers: vec![
VectorLayer::new("layer2".to_string(),
HashMap::from([
BTreeMap::from([
("b".to_string(), "x2".to_string()),
]))
],
Expand All @@ -631,11 +631,11 @@ mod tests {
Some(vec![
VectorLayer::new(
"layer1".to_string(),
HashMap::from([("a".to_string(), "x1".to_string())])
BTreeMap::from([("a".to_string(), "x1".to_string())])
),
VectorLayer::new(
"layer2".to_string(),
HashMap::from([("b".to_string(), "x2".to_string())])
BTreeMap::from([("b".to_string(), "x2".to_string())])
),
])
);
Expand Down
18 changes: 3 additions & 15 deletions martin/src/utils/utilities.rs
Original file line number Diff line number Diff line change
@@ -1,28 +1,16 @@
use std::collections::{BTreeMap, HashMap};
use std::future::Future;
use std::io::{Read as _, Write as _};
use std::time::Duration;

use flate2::read::GzDecoder;
use flate2::write::GzEncoder;
use futures::pin_mut;
use serde::{Serialize, Serializer};
#[cfg(test)]
use serde::Serialize as _;
use tokio::time::timeout;

/// Sort an optional hashmap by key, case-insensitive first, then case-sensitive
pub fn sorted_opt_map<S: Serializer, T: Serialize>(
value: &Option<HashMap<String, T>>,
serializer: S,
) -> Result<S::Ok, S::Error> {
value.as_ref().map(sorted_btree_map).serialize(serializer)
}

pub fn sorted_btree_map<K: Serialize + Ord, V>(value: &HashMap<K, V>) -> BTreeMap<&K, &V> {
value.iter().collect()
}

#[cfg(test)]
pub fn sorted_opt_set<S: Serializer>(
pub fn sorted_opt_set<S: serde::Serializer>(
value: &Option<std::collections::HashSet<String>>,
serializer: S,
) -> Result<S::Ok, S::Error> {
Expand Down
5 changes: 2 additions & 3 deletions mbtiles/src/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,10 +211,9 @@ impl Mbtiles {

#[cfg(test)]
mod tests {
use std::collections::HashMap;

use martin_tile_utils::{Encoding, Format};
use sqlx::Executor as _;
use std::collections::BTreeMap;
use tilejson::VectorLayer;

use super::*;
Expand Down Expand Up @@ -267,7 +266,7 @@ mod tests {
description: Some(String::new()),
minzoom: Some(0),
maxzoom: Some(6),
other: HashMap::default()
other: BTreeMap::default()
}])
);
assert_eq!(metadata.id, "world_cities");
Expand Down
2 changes: 1 addition & 1 deletion tests/expected/martin-cp/flat_metadata.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ tilejson:
- 45.0
name: table_source
foo: '{"bar":"foo"}'
generator: martin-cp v0.10.1
format: mvt
generator: martin-cp v0.10.1
json: {}

0 comments on commit 2c06824

Please sign in to comment.