use crate::cli::Commands;
use crate::config_t_rex as t_rex;
use crate::datasource::source_config_from_cli_arg;
use bbox_core::cli::CommonCommands;
use bbox_core::config::{
app_dir, error_exit, from_config_root_or_exit, ConfigError, DatasourceCfg, DsPostgisCfg,
NamedDatasourceCfg,
};
use bbox_core::service::ServiceConfig;
use clap::{ArgMatches, FromArgMatches};
use log::{info, warn};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use std::convert::From;
use std::num::NonZeroU16;
use std::path::{Path, PathBuf};
#[derive(Deserialize, Serialize, Default, Debug)]
#[serde(default)]
pub struct TileServiceCfg {
#[serde(rename = "grid")]
pub grids: Vec<GridCfg>,
#[serde(rename = "datasource")]
pub datasources: Vec<NamedDatasourceCfg>,
#[serde(rename = "tileset")]
pub tilesets: Vec<TileSetCfg>,
#[serde(rename = "tilestore")]
pub tilestores: Vec<TileCacheProviderCfg>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct TileSetCfg {
pub name: String,
#[serde(default)]
pub tms: Vec<TilesetTmsCfg>,
#[serde(flatten)]
pub source: SourceParamCfg,
pub cache: Option<String>,
pub cache_format: Option<String>,
pub cache_limits: Option<CacheLimitCfg>,
#[serde(default)]
pub cache_control: Vec<CacheControlCfg>,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct GridCfg {
pub json: String,
}
impl GridCfg {
pub fn abs_path(&self) -> PathBuf {
app_dir(&self.json)
}
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct TilesetTmsCfg {
pub id: String,
pub minzoom: Option<u8>,
pub maxzoom: Option<u8>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub enum SourceParamCfg {
#[serde(rename = "wms_proxy")]
WmsHttp(WmsHttpSourceParamsCfg),
#[serde(rename = "map_service")]
WmsFcgi(WmsFcgiSourceParamsCfg),
#[serde(rename = "postgis")]
Postgis(PostgisSourceParamsCfg),
#[serde(rename = "mbtiles")]
Mbtiles(MbtilesStoreCfg),
#[serde(rename = "pmtiles")]
Pmtiles(PmtilesStoreCfg),
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct WmsHttpSourceParamsCfg {
pub source: String,
pub layers: String,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct WmsFcgiSourceParamsCfg {
pub project: String,
pub suffix: String,
pub layers: String,
pub params: Option<String>,
pub tile_size: Option<NonZeroU16>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct PostgisSourceParamsCfg {
pub datasource: Option<String>,
pub extent: Option<ExtentCfg>,
pub center: Option<(f64, f64)>,
pub start_zoom: Option<u8>,
pub attribution: Option<String>,
#[serde(default)]
pub postgis2: bool,
pub diagnostics: Option<TileDiagnosticsCfg>,
#[serde(rename = "layer")]
pub layers: Vec<VectorLayerCfg>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct ExtentCfg {
pub minx: f64,
pub miny: f64,
pub maxx: f64,
pub maxy: f64,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct TileDiagnosticsCfg {
pub reference_size: Option<u64>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct VectorLayerCfg {
pub name: String,
pub geometry_field: Option<String>,
pub geometry_type: Option<String>,
pub srid: Option<i32>,
#[serde(default)]
pub no_transform: bool,
pub fid_field: Option<String>,
pub table_name: Option<String>,
#[serde(default, rename = "query")]
pub queries: Vec<VectorLayerQueryCfg>,
pub minzoom: Option<u8>,
pub maxzoom: Option<u8>,
pub query_limit: Option<u32>,
#[serde(default = "default_tile_size")]
pub tile_size: u32,
pub buffer_size: Option<u32>,
#[serde(default)]
pub simplify: bool,
#[serde(default = "default_tolerance")]
pub tolerance: String,
#[serde(default)]
pub make_valid: bool,
#[serde(default)]
pub shift_longitude: bool,
}
fn default_tile_size() -> u32 {
4096
}
const DEFAULT_TOLERANCE: &str = "!pixel_width!/2";
fn default_tolerance() -> String {
DEFAULT_TOLERANCE.to_string()
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct VectorLayerQueryCfg {
pub minzoom: Option<u8>,
pub maxzoom: Option<u8>,
pub simplify: Option<bool>,
pub tolerance: Option<String>,
pub sql: Option<String>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct CacheLimitCfg {
#[serde(default)]
pub minzoom: u8,
pub maxzoom: Option<u8>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct CacheControlCfg {
pub max_age: u64,
pub minzoom: Option<u8>,
pub maxzoom: Option<u8>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct TileCacheProviderCfg {
pub name: String,
pub compression: Option<StoreCompressionCfg>,
#[serde(flatten)]
pub cache: TileStoreCfg,
}
#[derive(Deserialize, Serialize, PartialEq, Clone, Debug)]
pub enum StoreCompressionCfg {
None,
Gzip,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub enum TileStoreCfg {
#[serde(rename = "files")]
Files(FileStoreCfg),
#[serde(rename = "s3")]
S3(S3StoreCfg),
#[serde(rename = "mbtiles")]
Mbtiles(MbtilesStoreCfg),
#[serde(rename = "pmtiles")]
Pmtiles(PmtilesStoreCfg),
#[serde(rename = "nostore")]
NoStore,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct FileStoreCfg {
pub base_dir: PathBuf,
}
impl FileStoreCfg {
pub fn abs_path(&self) -> PathBuf {
app_dir(&self.base_dir)
}
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct S3StoreCfg {
pub path: String,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct MbtilesStoreCfg {
pub path: PathBuf,
}
impl MbtilesStoreCfg {
pub fn abs_path(&self) -> PathBuf {
app_dir(&self.path)
}
}
#[derive(Deserialize, Serialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct PmtilesStoreCfg {
pub path: PathBuf,
}
impl PmtilesStoreCfg {
pub fn abs_path(&self) -> PathBuf {
app_dir(&self.path)
}
}
impl TileStoreCfg {
pub fn from_cli_args(cli: &ArgMatches) -> Option<Self> {
let Ok(Commands::Seed(args)) = Commands::from_arg_matches(cli) else {
return None;
};
if let Some(path) = &args.tile_path {
let cache_cfg = TileStoreCfg::Files(FileStoreCfg {
base_dir: path.into(),
});
Some(cache_cfg)
} else if let Some(s3_path) = &args.s3_path {
let cache_cfg = TileStoreCfg::S3(S3StoreCfg {
path: s3_path.to_string(),
});
Some(cache_cfg)
} else if let Some(path) = &args.mb_path {
let cache_cfg = TileStoreCfg::Mbtiles(MbtilesStoreCfg { path: path.into() });
Some(cache_cfg)
} else if let Some(path) = &args.pm_path {
let cache_cfg = TileStoreCfg::Pmtiles(PmtilesStoreCfg { path: path.into() });
Some(cache_cfg)
} else if args.no_store {
Some(TileStoreCfg::NoStore)
} else {
None
}
}
}
impl ServiceConfig for TileServiceCfg {
fn initialize(cli: &ArgMatches) -> Result<Self, ConfigError> {
let mut cfg: TileServiceCfg = from_config_root_or_exit();
if let Some(t_rex_config) = cli.get_one::<PathBuf>("t_rex_config") {
let t_rex_cfg: t_rex::ApplicationCfg =
t_rex::read_config(t_rex_config.to_str().expect("invalid string"))
.unwrap_or_else(error_exit);
cfg = t_rex_cfg.into();
info!("Imported t-rex config:\n{}", cfg.as_toml());
}
if let Some(cache) = TileStoreCfg::from_cli_args(cli) {
cfg.tilestores.push(TileCacheProviderCfg {
name: "<cli>".to_string(),
compression: None,
cache,
});
}
let file_or_url =
if let Ok(CommonCommands::Serve(args)) = CommonCommands::from_arg_matches(cli) {
args.file_or_url
} else if let Ok(Commands::Seed(args)) = Commands::from_arg_matches(cli) {
args.file_or_url
} else {
None
};
if let Some(file_or_url) = file_or_url {
if let Some(source_cfg) = source_config_from_cli_arg(&file_or_url) {
let name = if let Some(name) = Path::new(&file_or_url).file_stem() {
name.to_string_lossy().to_string()
} else {
file_or_url.to_string()
};
info!("Adding tileset `{name}`");
let ts = TileSetCfg {
name,
tms: Vec::new(),
source: source_cfg,
cache: None,
cache_format: None,
cache_limits: None,
cache_control: Vec::new(),
};
cfg.tilesets.push(ts);
}
}
Ok(cfg)
}
}
impl TileServiceCfg {
pub fn as_toml(&self) -> String {
toml::to_string(&self).unwrap()
}
}
impl From<t_rex::ApplicationCfg> for TileServiceCfg {
fn from(t_rex_config: t_rex::ApplicationCfg) -> Self {
let re = Regex::new(r#"\) AS "\w+"$"#).expect("re");
let datasources = t_rex_config
.datasource
.into_iter()
.filter(|ds| {
if ds.path.is_some() {
warn!("Skipping GDAL datasource");
}
ds.dbconn.is_some()
})
.map(|ds| {
let datasource = DatasourceCfg::Postgis(DsPostgisCfg {
url: ds.dbconn.expect("dbconn"),
});
NamedDatasourceCfg {
name: ds.name.unwrap_or("default".to_string()),
datasource,
}
})
.collect();
let grids = if let Some(g) = &t_rex_config.grid.user {
warn!("User defined grid has to be configured manually");
vec![GridCfg {
json: format!("{}.json", g.srid),
}]
} else {
Vec::new()
};
let tms = if let Some(g) = &t_rex_config.grid.user {
format!("{}", g.srid)
} else {
match &t_rex_config.grid.predefined.as_deref() {
Some("wgs84") => "WorldCRS84Quad".to_string(),
Some("web_mercator") => "WebMercatorQuad".to_string(),
_ => "WebMercatorQuad".to_string(),
}
};
let tilestore = t_rex_config
.cache
.and_then(|cache| {
if let Some(fcache) = cache.file {
Some(TileStoreCfg::Files(FileStoreCfg {
base_dir: fcache.base.into(),
}))
} else {
None
}
})
.map(|cache| TileCacheProviderCfg {
name: "tilecache".to_string(),
compression: Some(StoreCompressionCfg::Gzip),
cache,
});
let cache_name = tilestore.as_ref().map(|ts| ts.name.clone());
let tilestores = if let Some(ts) = tilestore {
vec![ts]
} else {
Vec::new()
};
let tilesets = t_rex_config
.tilesets
.into_iter()
.map(|ts| {
let dsnames = ts
.layers
.iter()
.map(|l| l.datasource.clone())
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
if dsnames.len() > 1 {
warn!(
"Please group layers with datasources ({dsnames:?}) into separate tilesets"
)
}
let datasource = dsnames.first().expect("no datasource").clone();
let layers = ts
.layers
.into_iter()
.map(|l| {
let mut queries = l
.query
.into_iter()
.map(|q| VectorLayerQueryCfg {
minzoom: Some(q.minzoom),
maxzoom: q.maxzoom,
simplify: q.simplify,
tolerance: q.tolerance,
sql: q.sql,
})
.collect::<Vec<_>>();
let mut table_name = l.table_name.clone();
if let Some(table) = &l.table_name {
if table.starts_with("(SELECT ") {
let sql = Some(re.replace_all(table, ")").to_string());
queries.insert(
0,
VectorLayerQueryCfg {
minzoom: l.minzoom,
maxzoom: None,
simplify: Some(l.simplify),
tolerance: Some(l.tolerance.clone()),
sql,
},
);
table_name = None;
}
}
VectorLayerCfg {
name: l.name,
geometry_field: l.geometry_field,
geometry_type: l.geometry_type,
srid: l.srid,
no_transform: l.no_transform,
fid_field: l.fid_field,
table_name,
query_limit: l.query_limit,
queries,
minzoom: l.minzoom,
maxzoom: l.maxzoom,
tile_size: l.tile_size,
simplify: l.simplify,
tolerance: l.tolerance,
buffer_size: l.buffer_size,
make_valid: l.make_valid,
shift_longitude: l.shift_longitude,
}
})
.collect();
let pgcfg = PostgisSourceParamsCfg {
datasource,
extent: ts.extent.map(|ext| ExtentCfg {
maxx: ext.maxx,
maxy: ext.maxy,
minx: ext.minx,
miny: ext.miny,
}),
center: ts.center,
start_zoom: ts.start_zoom,
attribution: ts.attribution,
postgis2: false,
diagnostics: None,
layers,
};
TileSetCfg {
name: ts.name,
tms: vec![TilesetTmsCfg {
id: tms.clone(),
minzoom: ts.minzoom,
maxzoom: ts.maxzoom,
}],
source: SourceParamCfg::Postgis(pgcfg),
cache: cache_name.clone(),
cache_format: None,
cache_limits: ts.cache_limits.map(|l| CacheLimitCfg {
minzoom: l.minzoom,
maxzoom: l.maxzoom,
}),
cache_control: Vec::new(), }
})
.collect();
TileServiceCfg {
grids,
datasources,
tilesets,
tilestores,
}
}
}
static WORLD_EXTENT: ExtentCfg = ExtentCfg {
minx: -180.0,
miny: -90.0,
maxx: 180.0,
maxy: 90.0,
};
impl PostgisSourceParamsCfg {
pub fn attribution(&self) -> String {
self.attribution.clone().unwrap_or("".to_string())
}
pub fn get_extent(&self) -> &ExtentCfg {
self.extent.as_ref().unwrap_or(&WORLD_EXTENT)
}
pub fn get_center(&self) -> (f64, f64) {
if self.center.is_none() {
let ext = self.get_extent();
(
ext.maxx - (ext.maxx - ext.minx) / 2.0,
ext.maxy - (ext.maxy - ext.miny) / 2.0,
)
} else {
self.center.unwrap()
}
}
pub fn get_start_zoom(&self) -> u8 {
self.start_zoom.unwrap_or(2)
}
}
impl VectorLayerCfg {
pub fn minzoom(&self) -> u8 {
self.minzoom.unwrap_or(
self.queries
.iter()
.filter_map(|q| q.minzoom)
.min()
.unwrap_or(0),
)
}
pub fn maxzoom(&self, default: u8) -> u8 {
self.maxzoom.unwrap_or(
self.queries
.iter()
.map(|q| q.maxzoom.unwrap_or(default))
.max()
.unwrap_or(default),
)
}
pub fn zoom_steps(&self, tms_cfg: &[TilesetTmsCfg]) -> Vec<u8> {
let mut zoom_steps: Vec<u8> = self
.queries
.iter()
.filter(|q| q.sql.is_some())
.filter_map(|q| q.minzoom)
.chain(tms_cfg.iter().filter_map(|crs| crs.minzoom))
.chain(tms_cfg.iter().filter_map(|crs| crs.maxzoom.map(|z| z + 1)))
.filter(|z| *z >= self.minzoom())
.chain([self.minzoom()])
.collect::<HashSet<u8>>()
.into_iter()
.collect();
zoom_steps.sort();
zoom_steps
}
pub fn zoom_step_entry<T>(lookup: &HashMap<u8, T>, zoom: u8) -> Option<&T> {
let mut zooms = lookup.keys().cloned().collect::<Vec<_>>();
zooms.sort(); let z = zooms.into_iter().rev().find(|z| zoom >= *z);
z.as_ref().and_then(|z| lookup.get(z))
}
fn query_cfg<F>(&self, level: u8, check: F) -> Option<&VectorLayerQueryCfg>
where
F: Fn(&VectorLayerQueryCfg) -> bool,
{
let mut queries = self
.queries
.iter()
.map(|q| (q.minzoom.unwrap_or(0), q.maxzoom.unwrap_or(255), q))
.collect::<Vec<_>>();
queries.sort_by_key(|t| t.0);
let query = queries
.iter()
.rev()
.find(|q| level >= q.0 && level <= q.1 && check(q.2));
query.map(|q| q.2)
}
pub fn query(&self, level: u8) -> Option<&String> {
let query_cfg = self.query_cfg(level, |q| q.sql.is_some());
query_cfg.and_then(|q| q.sql.as_ref())
}
pub fn simplify(&self, level: u8) -> bool {
let query_cfg = self.query_cfg(level, |q| q.simplify.is_some());
query_cfg.and_then(|q| q.simplify).unwrap_or(self.simplify)
}
pub fn tolerance(&self, level: u8) -> &String {
let query_cfg = self.query_cfg(level, |q| q.tolerance.is_some());
query_cfg
.and_then(|q| q.tolerance.as_ref())
.unwrap_or(&self.tolerance)
}
}
#[cfg(test)]
mod tests {
use super::*;
use toml::Value;
fn parse_config<'a, T: Deserialize<'a>>(toml: &str) -> Result<T, String> {
toml.parse::<Value>()
.and_then(|cfg| cfg.try_into::<T>())
.map_err(|err| format!("{err}"))
}
#[test]
fn zoom_steps() {
const CONFIG: &str = r#"
[[datasource]]
name = "osmdb"
[datasource.postgis]
url = "postgres:///osmdb"
[[tileset]]
name = "osm"
[tileset.postgis]
datasource = "osmdb"
[[tileset.postgis.layer]]
geometry_field = "geom"
geometry_type = "POLYGON"
name = "ocean"
#srid = 8857 / 3857
[[tileset.postgis.layer.query]]
minzoom = 0
maxzoom = 2
sql = """SELECT "geom" FROM "eq"."ocean_low""""
[[tileset.postgis.layer.query]]
minzoom = 3
maxzoom = 9
sql = """SELECT "id","geom" FROM "merc"."ocean_low""""
[[tileset.postgis.layer.query]]
minzoom = 10
sql = """SELECT "id","geom" FROM "merc"."ocean""""
"#;
let cfg: TileServiceCfg = parse_config(CONFIG).unwrap();
let SourceParamCfg::Postgis(ref source) = cfg.tilesets[0].source else {
panic!("Wrong tileset source")
};
assert_eq!(source.layers.len(), 1);
assert_eq!(source.layers[0].minzoom(), 0);
assert_eq!(source.layers[0].maxzoom(42), 42);
assert_eq!(source.layers[0].zoom_steps(&[]), vec![0, 3, 10]);
}
#[test]
fn zoom_min_max() {
const CONFIG: &str = r#"
[[tileset]]
name = "osm"
[tileset.postgis]
datasource = "osmdb"
[[tileset.postgis.layer]]
geometry_field = "geom"
geometry_type = "POLYGON"
name = "ocean"
[[tileset.postgis.layer.query]]
minzoom = 3
maxzoom = 9
sql = """SELECT "id","geom" FROM "merc"."ocean_low""""
[[tileset.postgis.layer.query]]
minzoom = 10
maxzoom = 24
sql = """SELECT "id","geom" FROM "merc"."ocean""""
"#;
let cfg: TileServiceCfg = parse_config(CONFIG).unwrap();
let SourceParamCfg::Postgis(ref source) = cfg.tilesets[0].source else {
panic!("Wrong tileset source")
};
assert_eq!(source.layers[0].minzoom(), 3);
assert_eq!(source.layers[0].maxzoom(42), 24);
assert_eq!(source.layers[0].zoom_steps(&[]), vec![3, 10]);
}
#[test]
fn multi_crs_projected() {
const CONFIG: &str = r#"
[[grid]]
json = "EqualEarthGreenwichWGS84Quad.json"
[[tileset]]
name = "tracking"
[[tileset.tms]]
id = "EqualEarthGreenwichWGS84Quad"
maxzoom = 2
[[tileset.tms]]
id = "WebMercatorQuad"
minzoom = 3
[tileset.postgis]
datasource = "tracking"
[[tileset.postgis.layer]]
name = "waypoints"
geometry_field = "geom"
geometry_type = "POINT"
srid = 4326
[[tileset.postgis.layer.query]]
sql = """SELECT id, ts::TEXT, ST_Point(lon, lat, 4326) AS geom FROM gps.gpslog"""
"#;
let cfg: TileServiceCfg = parse_config(CONFIG).unwrap();
let ts = &cfg.tilesets[0];
let SourceParamCfg::Postgis(ref source) = ts.source else {
panic!("Wrong tileset source")
};
assert_eq!(source.layers[0].minzoom(), 0);
assert_eq!(source.layers[0].maxzoom(42), 42);
assert_eq!(source.layers[0].zoom_steps(&[]), vec![0]);
assert_eq!(source.layers[0].zoom_steps(&ts.tms), vec![0, 3]);
}
#[test]
fn multi_crs_unprojected() {
const CONFIG: &str = r#"
[[grid]]
json = "EqualEarthGreenwichWGS84Quad.json"
[[tileset]]
name = "ocean"
[[tileset.tms]]
id = "EqualEarthGreenwichWGS84Quad"
maxzoom = 2
[[tileset.tms]]
id = "WebMercatorQuad"
minzoom = 3
[tileset.postgis]
datasource = "osmdb"
[[tileset.postgis.layer]]
geometry_field = "geom"
geometry_type = "POLYGON"
name = "ocean"
#srid = 8857 / 3857
[[tileset.postgis.layer.query]]
minzoom = 0
maxzoom = 2
sql = """SELECT "geom" FROM "eq"."ocean_low""""
[[tileset.postgis.layer.query]]
minzoom = 3
maxzoom = 9
sql = """SELECT "id","geom" FROM "merc"."ocean_low""""
"#;
let cfg: TileServiceCfg = parse_config(CONFIG).unwrap();
let ts = &cfg.tilesets[0];
let SourceParamCfg::Postgis(ref source) = ts.source else {
panic!("Wrong tileset source")
};
assert_eq!(source.layers[0].minzoom(), 0);
assert_eq!(source.layers[0].maxzoom(42), 9);
assert_eq!(source.layers[0].zoom_steps(&[]), vec![0, 3]);
assert_eq!(source.layers[0].zoom_steps(&ts.tms), vec![0, 3]);
}
}