PMTiles cache, refactor file configs, modularize (#1094)

* Make it possible to have configuration specific to the file-based
config sections: pmtiles, mbtiles, and sprites.
* Implement PMTiles directory cache shared between all pmtiles (both
http and local), with configurable max cache size (in MB), or 0 to
disable. Defaults to 32MB (?)
* PMTiles now share web client instance, which optimizes connection
reuse in case multiple pmtiles reside on the same host
* Major refactoring to allow modular reuse, enabling the following build
features:
    * **postgres** - enable PostgreSQL/PostGIS tile sources
    * **pmtiles** - enable PMTile tile sources
    * **mbtiles** - enable MBTile tile sources
    * **fonts** - enable font sources
    * **sprites** - enable sprite sources
* Use justfile in the CI

Fixes #1093
This commit is contained in:
Yuri Astrakhan 2023-12-25 00:52:04 -05:00 committed by GitHub
parent 7ff2f5105a
commit 2def6288f1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 583 additions and 395 deletions

6
Cargo.lock generated
View File

@ -1962,7 +1962,7 @@ dependencies = [
[[package]]
name = "martin"
version = "0.11.6"
version = "0.12.0"
dependencies = [
"actix-cors",
"actix-http",
@ -2014,7 +2014,7 @@ dependencies = [
[[package]]
name = "martin-tile-utils"
version = "0.3.1"
version = "0.4.0"
dependencies = [
"approx",
"insta",
@ -2022,7 +2022,7 @@ dependencies = [
[[package]]
name = "mbtiles"
version = "0.8.5"
version = "0.9.0"
dependencies = [
"actix-rt",
"anyhow",

View File

@ -48,8 +48,8 @@ insta = "1"
itertools = "0.12"
json-patch = "1.2"
log = "0.4"
martin-tile-utils = { path = "./martin-tile-utils", version = "0.3.0" }
mbtiles = { path = "./mbtiles", version = "0.8.0" }
martin-tile-utils = { path = "./martin-tile-utils", version = "0.4.0" }
mbtiles = { path = "./mbtiles", version = "0.9.0" }
moka = { version = "0.12", features = ["future"] }
num_cpus = "1"
pbf_font_tools = { version = "2.5.0", features = ["freetype"] }

View File

@ -106,6 +106,16 @@ Martin data is available via the HTTP `GET` endpoints:
| `/font/{font1},…,{fontN}/{start}-{end}` | Composite Font source |
| `/health` | Martin server health check: returns 200 `OK` |
## Re-use Martin as a library
Martin can be used as a standalone server, or as a library in your own Rust application. When used as a library, you can use the following features:
* **postgres** - enable PostgreSQL/PostGIS tile sources
* **pmtiles** - enable PMTile tile sources
* **mbtiles** - enable MBTile tile sources
* **fonts** - enable font sources
* **sprites** - enable sprite sources
## Documentation
See [Martin book](https://maplibre.org/martin/) for complete documentation.

4
debian/config.yaml vendored
View File

@ -17,6 +17,7 @@ worker_processes: 8
# auto_bounds: skip
# pmtiles:
# dir_cache_size_mb: 100
# paths:
# - /dir-path
# - /path/to/pmtiles.pmtiles
@ -32,6 +33,9 @@ worker_processes: 8
# sources:
# mb-src1: /path/to/mbtiles1.mbtiles
# sprites:
# - /path/to/sprites_dir
# fonts:
# - /path/to/font/file.ttf
# - /path/to/font_dir

View File

@ -155,6 +155,8 @@ postgres:
# Publish PMTiles files from local disk or proxy to a web server
pmtiles:
# Memory (in MB) to use for caching PMTiles directories [default: 32, 0 to disable]]
dir_cache_size_mb: 100
paths:
# scan this whole dir, matching all *.pmtiles files
- /dir-path

View File

@ -273,7 +273,16 @@ fmt2:
# Run cargo check
check:
cargo check --workspace --all-targets --bins --tests --lib --benches
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin-tile-utils
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p mbtiles
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p mbtiles --no-default-features
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features fonts
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features mbtiles
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features pmtiles
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features postgres
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features sprites
# Verify doc build
check-doc:
@ -289,7 +298,7 @@ clippy-md:
'echo -e "/workdir/README.md\n$(find /workdir/docs/src -name "*.md")" | tr "\n" "\0" | xargs -0 -P 5 -n1 -I{} markdown-link-check --config /workdir/.github/files/markdown.links.config.json {}'
# These steps automatically run before git push via a git hook
git-pre-push: env-info restart fmt clippy check check-doc test
git-pre-push: env-info restart fmt clippy check-doc test check
# Get environment info
[private]

View File

@ -2,7 +2,7 @@ lints.workspace = true
[package]
name = "martin-tile-utils"
version = "0.3.1"
version = "0.4.0"
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
description = "Utilites to help with map tile processing, such as type and compression detection. Used by the MapLibre's Martin tile server."
keywords = ["maps", "tiles", "mvt", "tileserver"]

View File

@ -3,7 +3,7 @@ lints.workspace = true
[package]
name = "martin"
# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin
version = "0.11.6"
version = "0.12.0"
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"]
@ -59,9 +59,12 @@ name = "bench"
harness = false
[features]
default = ["sprites", "fonts"]
sprites = []
fonts = []
default = ["fonts", "mbtiles", "pmtiles", "postgres", "sprites"]
fonts = ["dep:bit-set","dep:pbf_font_tools"]
mbtiles = []
pmtiles = ["dep:moka"]
postgres = ["dep:deadpool-postgres", "dep:json-patch", "dep:postgis", "dep:postgres", "dep:postgres-protocol", "dep:semver", "dep:tokio-postgres-rustls"]
sprites = ["dep:spreet"]
bless-tests = []
[dependencies]
@ -70,41 +73,41 @@ actix-http.workspace = true
actix-rt.workspace = true
actix-web.workspace = true
async-trait.workspace = true
bit-set.workspace = true
bit-set = { workspace = true, optional = true }
brotli.workspace = true
clap.workspace = true
deadpool-postgres.workspace = true
deadpool-postgres = { workspace = true, optional = true }
env_logger.workspace = true
flate2.workspace = true
futures.workspace = true
itertools.workspace = true
json-patch.workspace = true
json-patch = { workspace = true, optional = true }
log.workspace = true
martin-tile-utils.workspace = true
mbtiles.workspace = true
moka.workspace = true
moka = { workspace = true, optional = true }
num_cpus.workspace = true
pbf_font_tools.workspace = true
pbf_font_tools = { workspace = true, optional = true }
pmtiles.workspace = true
postgis.workspace = true
postgres-protocol.workspace = true
postgres.workspace = true
postgis = { workspace = true, optional = true }
postgres-protocol = { workspace = true, optional = true }
postgres = { workspace = true, optional = true }
regex.workspace = true
reqwest.workspace = true
rustls-native-certs.workspace = true
rustls-pemfile.workspace = true
rustls.workspace = true
semver.workspace = true
semver = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
serde_with.workspace = true
serde_yaml.workspace = true
spreet.workspace = true
spreet = { workspace = true, optional = true }
subst.workspace = true
thiserror.workspace = true
tilejson.workspace = true
tokio = { workspace = true, features = ["io-std"] }
tokio-postgres-rustls.workspace = true
tokio-postgres-rustls = { workspace = true, optional = true }
url.workspace = true
[dev-dependencies]

View File

@ -4,7 +4,9 @@ pub use connections::{Arguments, State};
mod environment;
pub use environment::{Env, OsEnv};
#[cfg(feature = "postgres")]
mod pg;
#[cfg(feature = "postgres")]
pub use pg::{BoundsCalcType, PgArgs, DEFAULT_BOUNDS_TIMEOUT};
mod root;

View File

@ -2,14 +2,12 @@ use std::path::PathBuf;
use clap::Parser;
use log::warn;
use url::Url;
use crate::args::connections::Arguments;
use crate::args::environment::Env;
use crate::args::pg::PgArgs;
use crate::args::srv::SrvArgs;
use crate::args::State::{Ignore, Share, Take};
use crate::config::Config;
#[cfg(any(feature = "mbtiles", feature = "pmtiles", feature = "sprites"))]
use crate::file_config::FileConfigEnum;
use crate::MartinError::ConfigAndConnectionsError;
use crate::{MartinResult, OptOneMany};
@ -27,8 +25,9 @@ pub struct Args {
pub extras: ExtraArgs,
#[command(flatten)]
pub srv: SrvArgs,
#[cfg(feature = "postgres")]
#[command(flatten)]
pub pg: Option<PgArgs>,
pub pg: Option<crate::args::pg::PgArgs>,
}
// None of these params will be transferred to the config
@ -80,19 +79,26 @@ impl Args {
self.srv.merge_into_config(&mut config.srv);
#[allow(unused_mut)]
let mut cli_strings = Arguments::new(self.meta.connection);
let pg_args = self.pg.unwrap_or_default();
if config.postgres.is_none() {
config.postgres = pg_args.into_config(&mut cli_strings, env);
} else {
// config was loaded from a file, we can only apply a few CLI overrides to it
pg_args.override_config(&mut config.postgres, env);
#[cfg(feature = "postgres")]
{
let pg_args = self.pg.unwrap_or_default();
if config.postgres.is_none() {
config.postgres = pg_args.into_config(&mut cli_strings, env);
} else {
// config was loaded from a file, we can only apply a few CLI overrides to it
pg_args.override_config(&mut config.postgres, env);
}
}
#[cfg(feature = "pmtiles")]
if !cli_strings.is_empty() {
config.pmtiles = parse_file_args(&mut cli_strings, "pmtiles", true);
}
#[cfg(feature = "mbtiles")]
if !cli_strings.is_empty() {
config.mbtiles = parse_file_args(&mut cli_strings, "mbtiles", false);
}
@ -110,9 +116,10 @@ impl Args {
}
}
#[cfg(any(feature = "pmtiles", feature = "mbtiles"))]
fn is_url(s: &str, extension: &str) -> bool {
if s.starts_with("http") {
if let Ok(url) = Url::parse(s) {
if let Ok(url) = url::Url::parse(s) {
if url.scheme() == "http" || url.scheme() == "https" {
if let Some(ext) = url.path().rsplit('.').next() {
return ext == extension;
@ -123,11 +130,14 @@ fn is_url(s: &str, extension: &str) -> bool {
false
}
pub fn parse_file_args(
#[cfg(any(feature = "pmtiles", feature = "mbtiles"))]
pub fn parse_file_args<T: crate::file_config::ConfigExtras>(
cli_strings: &mut Arguments,
extension: &str,
allow_url: bool,
) -> FileConfigEnum {
) -> FileConfigEnum<T> {
use crate::args::State::{Ignore, Share, Take};
let paths = cli_strings.process(|s| match PathBuf::try_from(s) {
Ok(v) => {
if allow_url && is_url(s, extension) {
@ -149,9 +159,7 @@ pub fn parse_file_args(
#[cfg(test)]
mod tests {
use super::*;
use crate::pg::PgConfig;
use crate::test_utils::{some, FauxEnv};
use crate::utils::OptOneMany;
use crate::test_utils::FauxEnv;
use crate::MartinError::UnrecognizableConnections;
fn parse(args: &[&str]) -> MartinResult<(Config, MetaArgs)> {
@ -169,8 +177,12 @@ mod tests {
assert_eq!(args, expected);
}
#[cfg(feature = "postgres")]
#[test]
fn cli_with_config() {
use crate::test_utils::some;
use crate::utils::OptOneMany;
let args = parse(&["martin", "--config", "c.toml"]).unwrap();
let meta = MetaArgs {
config: Some(PathBuf::from("c.toml")),
@ -188,7 +200,7 @@ mod tests {
let args = parse(&["martin", "postgres://connection"]).unwrap();
let cfg = Config {
postgres: OptOneMany::One(PgConfig {
postgres: OptOneMany::One(crate::pg::PgConfig {
connection_string: some("postgres://connection"),
..Default::default()
}),

View File

@ -11,7 +11,7 @@ use clap::Parser;
use futures::stream::{self, StreamExt};
use futures::TryStreamExt;
use log::{debug, error, info, log_enabled};
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, PgArgs, SrvArgs};
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, SrvArgs};
use martin::srv::{get_tile_content, merge_tilejson, RESERVED_KEYWORDS};
use martin::{
append_rect, read_config, Config, IdResolver, MartinError, MartinResult, ServerState, Source,
@ -46,8 +46,9 @@ pub struct CopierArgs {
pub copy: CopyArgs,
#[command(flatten)]
pub meta: MetaArgs,
#[cfg(feature = "postgres")]
#[command(flatten)]
pub pg: Option<PgArgs>,
pub pg: Option<martin::args::PgArgs>,
}
#[serde_with::serde_as]
@ -137,6 +138,7 @@ async fn start(copy_args: CopierArgs) -> MartinCpResult<()> {
meta: copy_args.meta,
extras: ExtraArgs::default(),
srv: SrvArgs::default(),
#[cfg(feature = "postgres")]
pg: copy_args.pg,
};

View File

@ -11,15 +11,13 @@ use log::info;
use serde::{Deserialize, Serialize};
use subst::VariableMap;
use crate::file_config::{resolve_files, resolve_files_urls, FileConfigEnum};
#[cfg(any(feature = "mbtiles", feature = "pmtiles", feature = "sprites"))]
use crate::file_config::FileConfigEnum;
#[cfg(feature = "fonts")]
use crate::fonts::FontSources;
use crate::mbtiles::MbtSource;
use crate::pg::PgConfig;
use crate::pmtiles::{PmtFileSource, PmtHttpSource};
use crate::source::{TileInfoSources, TileSources};
#[cfg(feature = "sprites")]
use crate::sprites::SpriteSources;
use crate::sprites::{SpriteConfig, SpriteSources};
use crate::srv::SrvConfig;
use crate::MartinError::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
use crate::{IdResolver, MartinResult, OptOneMany};
@ -39,18 +37,21 @@ pub struct Config {
#[serde(flatten)]
pub srv: SrvConfig,
#[cfg(feature = "postgres")]
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
pub postgres: OptOneMany<PgConfig>,
pub postgres: OptOneMany<crate::pg::PgConfig>,
#[cfg(feature = "pmtiles")]
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
pub pmtiles: FileConfigEnum,
pub pmtiles: FileConfigEnum<crate::pmtiles::PmtConfig>,
#[cfg(feature = "mbtiles")]
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
pub mbtiles: FileConfigEnum,
pub mbtiles: FileConfigEnum<crate::mbtiles::MbtConfig>,
#[cfg(feature = "sprites")]
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
pub sprites: FileConfigEnum,
pub sprites: FileConfigEnum<SpriteConfig>,
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
pub fonts: OptOneMany<PathBuf>,
@ -65,20 +66,33 @@ impl Config {
let mut res = UnrecognizedValues::new();
copy_unrecognized_config(&mut res, "", &self.unrecognized);
#[cfg(feature = "postgres")]
for pg in self.postgres.iter_mut() {
res.extend(pg.finalize()?);
}
#[cfg(feature = "pmtiles")]
res.extend(self.pmtiles.finalize("pmtiles.")?);
#[cfg(feature = "mbtiles")]
res.extend(self.mbtiles.finalize("mbtiles.")?);
#[cfg(feature = "sprites")]
res.extend(self.sprites.finalize("sprites.")?);
// TODO: support for unrecognized fonts?
// res.extend(self.fonts.finalize("fonts.")?);
let is_empty =
self.postgres.is_empty() && self.pmtiles.is_empty() && self.mbtiles.is_empty();
let is_empty = true;
#[cfg(feature = "postgres")]
let is_empty = is_empty && self.postgres.is_empty();
#[cfg(feature = "pmtiles")]
let is_empty = is_empty && self.pmtiles.is_empty();
#[cfg(feature = "mbtiles")]
let is_empty = is_empty && self.mbtiles.is_empty();
#[cfg(feature = "sprites")]
let is_empty = is_empty && self.sprites.is_empty();
@ -103,26 +117,30 @@ impl Config {
})
}
async fn resolve_tile_sources(&mut self, idr: IdResolver) -> MartinResult<TileSources> {
let new_pmt_src = &mut PmtFileSource::new_box;
let new_pmt_url_src = &mut PmtHttpSource::new_url_box;
let new_mbt_src = &mut MbtSource::new_box;
async fn resolve_tile_sources(
&mut self,
#[allow(unused_variables)] idr: IdResolver,
) -> MartinResult<TileSources> {
#[allow(unused_mut)]
let mut sources: Vec<Pin<Box<dyn Future<Output = MartinResult<TileInfoSources>>>>> =
Vec::new();
#[cfg(feature = "postgres")]
for s in self.postgres.iter_mut() {
sources.push(Box::pin(s.resolve(idr.clone())));
}
#[cfg(feature = "pmtiles")]
if !self.pmtiles.is_empty() {
let cfg = &mut self.pmtiles;
let val = resolve_files_urls(cfg, idr.clone(), "pmtiles", new_pmt_src, new_pmt_url_src);
let val = crate::file_config::resolve_files(cfg, idr.clone(), "pmtiles");
sources.push(Box::pin(val));
}
#[cfg(feature = "mbtiles")]
if !self.mbtiles.is_empty() {
let cfg = &mut self.mbtiles;
let val = resolve_files(cfg, idr.clone(), "mbtiles", new_mbt_src);
let val = crate::file_config::resolve_files(cfg, idr.clone(), "mbtiles");
sources.push(Box::pin(val));
}
@ -183,6 +201,7 @@ where
subst::yaml::from_str(contents, env).map_err(|e| ConfigParseError(e, file_name.into()))
}
#[cfg(feature = "postgres")]
#[cfg(test)]
pub mod tests {
use super::*;

View File

@ -1,8 +1,9 @@
use std::collections::{BTreeMap, HashSet};
use std::future::Future;
use std::fmt::Debug;
use std::mem;
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use futures::TryFutureExt;
use log::{info, warn};
use serde::{Deserialize, Serialize};
@ -39,36 +40,60 @@ pub enum FileError {
#[error(r"Unable to parse metadata in file {1}: {0}")]
InvalidUrlMetadata(String, Url),
#[error(r#"Unable to aquire connection to file: {0}"#)]
AquireConnError(String),
#[error(r#"Unable to acquire connection to file: {0}"#)]
AcquireConnError(String),
#[error(r#"PMTiles error {0} processing {1}"#)]
PmtError(pmtiles::PmtError, String),
}
pub trait ConfigExtras: Clone + Debug + Default + PartialEq + Send {
fn init_parsing(&mut self) -> FileResult<()> {
Ok(())
}
#[must_use]
fn is_default(&self) -> bool {
true
}
fn get_unrecognized(&self) -> &UnrecognizedValues;
}
#[async_trait]
pub trait SourceConfigExtras: ConfigExtras {
#[must_use]
fn parse_urls() -> bool {
false
}
async fn new_sources(&self, id: String, path: PathBuf) -> FileResult<Box<dyn Source>>;
async fn new_sources_url(&self, id: String, url: Url) -> FileResult<Box<dyn Source>>;
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum FileConfigEnum {
pub enum FileConfigEnum<T> {
#[default]
None,
Path(PathBuf),
Paths(Vec<PathBuf>),
Config(FileConfig),
Config(FileConfig<T>),
}
impl FileConfigEnum {
impl<T: ConfigExtras> FileConfigEnum<T> {
#[must_use]
pub fn new(paths: Vec<PathBuf>) -> FileConfigEnum {
Self::new_extended(paths, BTreeMap::new(), UnrecognizedValues::new())
pub fn new(paths: Vec<PathBuf>) -> FileConfigEnum<T> {
Self::new_extended(paths, BTreeMap::new(), T::default())
}
#[must_use]
pub fn new_extended(
paths: Vec<PathBuf>,
configs: BTreeMap<String, FileConfigSrc>,
unrecognized: UnrecognizedValues,
) -> FileConfigEnum {
if configs.is_empty() && unrecognized.is_empty() {
custom: T,
) -> Self {
if configs.is_empty() && custom.is_default() {
match paths.len() {
0 => FileConfigEnum::None,
1 => FileConfigEnum::Path(paths.into_iter().next().unwrap()),
@ -82,7 +107,7 @@ impl FileConfigEnum {
} else {
Some(configs)
},
unrecognized,
custom,
})
}
}
@ -102,25 +127,27 @@ impl FileConfigEnum {
}
}
pub fn extract_file_config(&mut self) -> Option<FileConfig> {
match self {
FileConfigEnum::None => None,
FileConfigEnum::Path(path) => Some(FileConfig {
pub fn extract_file_config(&mut self) -> FileResult<Option<FileConfig<T>>> {
let mut res = match self {
FileConfigEnum::None => return Ok(None),
FileConfigEnum::Path(path) => FileConfig {
paths: One(mem::take(path)),
..FileConfig::default()
}),
FileConfigEnum::Paths(paths) => Some(FileConfig {
},
FileConfigEnum::Paths(paths) => FileConfig {
paths: Many(mem::take(paths)),
..Default::default()
}),
FileConfigEnum::Config(cfg) => Some(mem::take(cfg)),
}
},
FileConfigEnum::Config(cfg) => mem::take(cfg),
};
res.custom.init_parsing()?;
Ok(Some(res))
}
pub fn finalize(&self, prefix: &str) -> MartinResult<UnrecognizedValues> {
let mut res = UnrecognizedValues::new();
if let Self::Config(cfg) = self {
copy_unrecognized_config(&mut res, prefix, &cfg.unrecognized);
copy_unrecognized_config(&mut res, prefix, cfg.get_unrecognized());
}
Ok(res)
}
@ -128,20 +155,28 @@ impl FileConfigEnum {
#[serde_with::skip_serializing_none]
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct FileConfig {
pub struct FileConfig<T> {
/// A list of file paths
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
pub paths: OptOneMany<PathBuf>,
/// A map of source IDs to file paths or config objects
pub sources: Option<BTreeMap<String, FileConfigSrc>>,
/// Any customizations related to the specifics of the configuration section
#[serde(flatten)]
pub unrecognized: UnrecognizedValues,
pub custom: T,
}
impl FileConfig {
impl<T: ConfigExtras> FileConfig<T> {
#[must_use]
pub fn is_empty(&self) -> bool {
self.paths.is_none() && self.sources.is_none()
self.paths.is_none()
&& self.sources.is_none()
&& self.get_unrecognized().is_empty()
&& self.custom.is_default()
}
pub fn get_unrecognized(&self) -> &UnrecognizedValues {
self.custom.get_unrecognized()
}
}
@ -181,54 +216,22 @@ pub struct FileConfigSource {
pub path: PathBuf,
}
async fn dummy_resolver(_id: String, _url: Url) -> FileResult<Box<dyn Source>> {
unreachable!()
}
pub async fn resolve_files<Fut>(
config: &mut FileConfigEnum,
pub async fn resolve_files<T: SourceConfigExtras>(
config: &mut FileConfigEnum<T>,
idr: IdResolver,
extension: &str,
new_source: &mut impl FnMut(String, PathBuf) -> Fut,
) -> MartinResult<TileInfoSources>
where
Fut: Future<Output = Result<Box<dyn Source>, FileError>>,
{
let dummy = &mut dummy_resolver;
resolve_int(config, idr, extension, false, new_source, dummy)
) -> MartinResult<TileInfoSources> {
resolve_int(config, idr, extension)
.map_err(crate::MartinError::from)
.await
}
pub async fn resolve_files_urls<Fut1, Fut2>(
config: &mut FileConfigEnum,
async fn resolve_int<T: SourceConfigExtras>(
config: &mut FileConfigEnum<T>,
idr: IdResolver,
extension: &str,
new_source: &mut impl FnMut(String, PathBuf) -> Fut1,
new_url_source: &mut impl FnMut(String, Url) -> Fut2,
) -> MartinResult<TileInfoSources>
where
Fut1: Future<Output = Result<Box<dyn Source>, FileError>>,
Fut2: Future<Output = Result<Box<dyn Source>, FileError>>,
{
resolve_int(config, idr, extension, true, new_source, new_url_source)
.map_err(crate::MartinError::from)
.await
}
async fn resolve_int<Fut1, Fut2>(
config: &mut FileConfigEnum,
idr: IdResolver,
extension: &str,
parse_urls: bool,
new_source: &mut impl FnMut(String, PathBuf) -> Fut1,
new_url_source: &mut impl FnMut(String, Url) -> Fut2,
) -> FileResult<TileInfoSources>
where
Fut1: Future<Output = Result<Box<dyn Source>, FileError>>,
Fut2: Future<Output = Result<Box<dyn Source>, FileError>>,
{
let Some(cfg) = config.extract_file_config() else {
) -> FileResult<TileInfoSources> {
let Some(cfg) = config.extract_file_config()? else {
return Ok(TileInfoSources::default());
};
@ -239,12 +242,12 @@ where
if let Some(sources) = cfg.sources {
for (id, source) in sources {
if let Some(url) = parse_url(parse_urls, source.get_path())? {
if let Some(url) = parse_url(T::parse_urls(), source.get_path())? {
let dup = !files.insert(source.get_path().clone());
let dup = if dup { "duplicate " } else { "" };
let id = idr.resolve(&id, url.to_string());
configs.insert(id.clone(), source);
results.push(new_url_source(id.clone(), url.clone()).await?);
results.push(cfg.custom.new_sources_url(id.clone(), url.clone()).await?);
info!("Configured {dup}source {id} from {}", sanitize_url(&url));
} else {
let can = source.abs_path()?;
@ -258,13 +261,13 @@ where
let id = idr.resolve(&id, can.to_string_lossy().to_string());
info!("Configured {dup}source {id} from {}", can.display());
configs.insert(id.clone(), source.clone());
results.push(new_source(id, source.into_path()).await?);
results.push(cfg.custom.new_sources(id, source.into_path()).await?);
}
}
}
for path in cfg.paths {
if let Some(url) = parse_url(parse_urls, &path)? {
if let Some(url) = parse_url(T::parse_urls(), &path)? {
let id = url
.path_segments()
.and_then(Iterator::last)
@ -278,7 +281,7 @@ where
let id = idr.resolve(id, url.to_string());
configs.insert(id.clone(), FileConfigSrc::Path(path));
results.push(new_url_source(id.clone(), url.clone()).await?);
results.push(cfg.custom.new_sources_url(id.clone(), url.clone()).await?);
info!("Configured source {id} from URL {}", sanitize_url(&url));
} else {
let is_dir = path.is_dir();
@ -307,12 +310,12 @@ where
info!("Configured source {id} from {}", can.display());
files.insert(can);
configs.insert(id.clone(), FileConfigSrc::Path(path.clone()));
results.push(new_source(id, path).await?);
results.push(cfg.custom.new_sources(id, path).await?);
}
}
}
*config = FileConfigEnum::new_extended(directories, configs, cfg.unrecognized);
*config = FileConfigEnum::new_extended(directories, configs, cfg.custom);
Ok(results)
}
@ -351,70 +354,3 @@ fn parse_url(is_enabled: bool, path: &Path) -> Result<Option<Url>, FileError> {
.map(|v| Url::parse(v).map_err(|e| InvalidSourceUrl(e, v.to_string())))
.transpose()
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::path::PathBuf;
use indoc::indoc;
use crate::file_config::{FileConfigEnum, FileConfigSource, FileConfigSrc};
#[test]
fn parse() {
let cfg = serde_yaml::from_str::<FileConfigEnum>(indoc! {"
paths:
- /dir-path
- /path/to/file2.ext
- http://example.org/file.ext
sources:
pm-src1: /tmp/file.ext
pm-src2:
path: /tmp/file.ext
pm-src3: https://example.org/file3.ext
pm-src4:
path: https://example.org/file4.ext
"})
.unwrap();
let res = cfg.finalize("").unwrap();
assert!(res.is_empty(), "unrecognized config: {res:?}");
let FileConfigEnum::Config(cfg) = cfg else {
panic!();
};
let paths = cfg.paths.clone().into_iter().collect::<Vec<_>>();
assert_eq!(
paths,
vec![
PathBuf::from("/dir-path"),
PathBuf::from("/path/to/file2.ext"),
PathBuf::from("http://example.org/file.ext"),
]
);
assert_eq!(
cfg.sources,
Some(BTreeMap::from_iter(vec![
(
"pm-src1".to_string(),
FileConfigSrc::Path(PathBuf::from("/tmp/file.ext"))
),
(
"pm-src2".to_string(),
FileConfigSrc::Obj(FileConfigSource {
path: PathBuf::from("/tmp/file.ext"),
})
),
(
"pm-src3".to_string(),
FileConfigSrc::Path(PathBuf::from("https://example.org/file3.ext"))
),
(
"pm-src4".to_string(),
FileConfigSrc::Obj(FileConfigSource {
path: PathBuf::from("https://example.org/file4.ext"),
})
),
]))
);
}
}

View File

@ -17,8 +17,11 @@ pub mod args;
pub mod file_config;
#[cfg(feature = "fonts")]
pub mod fonts;
#[cfg(feature = "mbtiles")]
pub mod mbtiles;
#[cfg(feature = "postgres")]
pub mod pg;
#[cfg(feature = "pmtiles")]
pub mod pmtiles;
#[cfg(feature = "sprites")]
pub mod sprites;

View File

@ -7,13 +7,39 @@ use async_trait::async_trait;
use log::trace;
use martin_tile_utils::TileInfo;
use mbtiles::MbtilesPool;
use serde::{Deserialize, Serialize};
use tilejson::TileJSON;
use url::Url;
use crate::file_config::FileError::{AquireConnError, InvalidMetadata, IoError};
use crate::file_config::FileResult;
use crate::config::UnrecognizedValues;
use crate::file_config::FileError::{AcquireConnError, InvalidMetadata, IoError};
use crate::file_config::{ConfigExtras, FileResult, SourceConfigExtras};
use crate::source::{TileData, UrlQuery};
use crate::{MartinResult, Source, TileCoord};
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct MbtConfig {
#[serde(flatten)]
pub unrecognized: UnrecognizedValues,
}
impl ConfigExtras for MbtConfig {
fn get_unrecognized(&self) -> &UnrecognizedValues {
&self.unrecognized
}
}
#[async_trait]
impl SourceConfigExtras for MbtConfig {
async fn new_sources(&self, id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
Ok(Box::new(MbtSource::new(id, path).await?))
}
async fn new_sources_url(&self, _id: String, _url: Url) -> FileResult<Box<dyn Source>> {
unreachable!()
}
}
#[derive(Clone)]
pub struct MbtSource {
id: String,
@ -34,10 +60,6 @@ impl Debug for MbtSource {
}
impl MbtSource {
pub async fn new_box(id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
Ok(Box::new(MbtSource::new(id, path).await?))
}
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
let mbt = MbtilesPool::new(&path)
.await
@ -90,7 +112,7 @@ impl Source for MbtSource {
.mbtiles
.get_tile(xyz.z, xyz.x, xyz.y)
.await
.map_err(|_| AquireConnError(self.id.clone()))?
.map_err(|_| AcquireConnError(self.id.clone()))?
{
Ok(tile)
} else {
@ -105,3 +127,71 @@ impl Source for MbtSource {
}
}
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::path::PathBuf;
use indoc::indoc;
use crate::file_config::{FileConfigEnum, FileConfigSource, FileConfigSrc};
use crate::mbtiles::MbtConfig;
#[test]
fn parse() {
let cfg = serde_yaml::from_str::<FileConfigEnum<MbtConfig>>(indoc! {"
paths:
- /dir-path
- /path/to/file2.ext
- http://example.org/file.ext
sources:
pm-src1: /tmp/file.ext
pm-src2:
path: /tmp/file.ext
pm-src3: https://example.org/file3.ext
pm-src4:
path: https://example.org/file4.ext
"})
.unwrap();
let res = cfg.finalize("").unwrap();
assert!(res.is_empty(), "unrecognized config: {res:?}");
let FileConfigEnum::Config(cfg) = cfg else {
panic!();
};
let paths = cfg.paths.clone().into_iter().collect::<Vec<_>>();
assert_eq!(
paths,
vec![
PathBuf::from("/dir-path"),
PathBuf::from("/path/to/file2.ext"),
PathBuf::from("http://example.org/file.ext"),
]
);
assert_eq!(
cfg.sources,
Some(BTreeMap::from_iter(vec![
(
"pm-src1".to_string(),
FileConfigSrc::Path(PathBuf::from("/tmp/file.ext"))
),
(
"pm-src2".to_string(),
FileConfigSrc::Obj(FileConfigSource {
path: PathBuf::from("/tmp/file.ext"),
})
),
(
"pm-src3".to_string(),
FileConfigSrc::Path(PathBuf::from("https://example.org/file3.ext"))
),
(
"pm-src4".to_string(),
FileConfigSrc::Obj(FileConfigSource {
path: PathBuf::from("https://example.org/file4.ext"),
})
),
]))
);
}
}

View File

@ -11,9 +11,10 @@ use crate::config::{copy_unrecognized_config, UnrecognizedValues};
use crate::pg::config_function::FuncInfoSources;
use crate::pg::config_table::TableInfoSources;
use crate::pg::configurator::PgBuilder;
use crate::pg::utils::on_slow;
use crate::pg::PgResult;
use crate::source::TileInfoSources;
use crate::utils::{on_slow, IdResolver, OptBoolObj, OptOneMany};
use crate::utils::{IdResolver, OptBoolObj, OptOneMany};
use crate::MartinResult;
pub trait PgInfo {

View File

@ -36,7 +36,7 @@ pub struct PgBuilderFuncs {
#[derive(Debug, Default, PartialEq)]
#[cfg_attr(test, serde_with::skip_serializing_none, derive(serde::Serialize))]
pub struct PgBuilderTables {
#[cfg_attr(test, serde(serialize_with = "crate::utils::sorted_opt_set"))]
#[cfg_attr(test, serde(serialize_with = "crate::pg::utils::sorted_opt_set"))]
schemas: Option<HashSet<String>>,
source_id_format: String,
id_columns: Option<Vec<String>>,

View File

@ -1,13 +1,48 @@
use std::collections::{BTreeMap, HashMap};
use std::future::Future;
use std::time::Duration;
use deadpool_postgres::tokio_postgres::types::Json;
use futures::pin_mut;
use itertools::Itertools as _;
use log::{error, info, warn};
use postgis::{ewkb, LineString, Point, Polygon};
use tilejson::{Bounds, TileJSON};
use tokio::time::timeout;
use crate::source::UrlQuery;
#[cfg(test)]
pub fn sorted_opt_set<S: serde::Serializer>(
value: &Option<std::collections::HashSet<String>>,
serializer: S,
) -> Result<S::Ok, S::Error> {
use serde::Serialize as _;
value
.as_ref()
.map(|v| {
let mut v: Vec<_> = v.iter().collect();
v.sort();
v
})
.serialize(serializer)
}
pub async fn on_slow<T, S: FnOnce()>(
future: impl Future<Output = T>,
duration: Duration,
fn_on_slow: S,
) -> T {
pin_mut!(future);
if let Ok(result) = timeout(duration, &mut future).await {
result
} else {
fn_on_slow();
future.await
}
}
#[must_use]
pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
let mut result = BTreeMap::new();

View File

@ -1,58 +0,0 @@
use std::fmt::{Debug, Formatter};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use async_trait::async_trait;
use log::{trace, warn};
use martin_tile_utils::{Encoding, Format, TileInfo};
use pmtiles::async_reader::AsyncPmTilesReader;
use pmtiles::cache::NoCache;
use pmtiles::mmap::MmapBackend;
use pmtiles::{Compression, TileType};
use tilejson::TileJSON;
use crate::file_config::FileError::{InvalidMetadata, IoError};
use crate::file_config::FileResult;
use crate::pmtiles::impl_pmtiles_source;
use crate::source::{Source, UrlQuery};
use crate::{MartinResult, TileCoord, TileData};
impl_pmtiles_source!(
PmtFileSource,
MmapBackend,
NoCache,
PathBuf,
Path::display,
InvalidMetadata
);
impl PmtFileSource {
pub async fn new_box(id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
Ok(Box::new(PmtFileSource::new(id, path).await?))
}
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
let backend = MmapBackend::try_from(path.as_path())
.await
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
let reader = AsyncPmTilesReader::try_from_source(backend).await;
let reader = reader
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
Self::new_int(id, path, reader).await
}
}

View File

@ -1,76 +0,0 @@
use std::convert::identity;
use std::fmt::{Debug, Formatter};
use std::sync::Arc;
use async_trait::async_trait;
use log::{trace, warn};
use martin_tile_utils::{Encoding, Format, TileInfo};
use moka::future::Cache;
use pmtiles::async_reader::AsyncPmTilesReader;
use pmtiles::cache::{DirCacheResult, DirectoryCache};
use pmtiles::http::HttpBackend;
use pmtiles::{Compression, Directory, TileType};
use reqwest::Client;
use tilejson::TileJSON;
use url::Url;
use crate::file_config::FileError::InvalidUrlMetadata;
use crate::file_config::{FileError, FileResult};
use crate::pmtiles::impl_pmtiles_source;
use crate::source::{Source, UrlQuery};
use crate::{MartinResult, TileCoord, TileData};
struct PmtCache(Cache<usize, Directory>);
impl PmtCache {
fn new(max_capacity: u64) -> Self {
Self(
Cache::builder()
.weigher(|_key, value: &Directory| -> u32 {
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
})
.max_capacity(max_capacity)
.build(),
)
}
}
#[async_trait]
impl DirectoryCache for PmtCache {
async fn get_dir_entry(&self, offset: usize, tile_id: u64) -> DirCacheResult {
match self.0.get(&offset).await {
Some(dir) => dir.find_tile_id(tile_id).into(),
None => DirCacheResult::NotCached,
}
}
async fn insert_dir(&self, offset: usize, directory: Directory) {
self.0.insert(offset, directory).await;
}
}
impl_pmtiles_source!(
PmtHttpSource,
HttpBackend,
PmtCache,
Url,
identity,
InvalidUrlMetadata
);
impl PmtHttpSource {
pub async fn new_url_box(id: String, url: Url) -> FileResult<Box<dyn Source>> {
let client = Client::new();
let cache = PmtCache::new(4 * 1024 * 1024);
Ok(Box::new(
PmtHttpSource::new_url(client, cache, id, url).await?,
))
}
async fn new_url(client: Client, cache: PmtCache, id: String, url: Url) -> FileResult<Self> {
let reader = AsyncPmTilesReader::new_with_cached_url(cache, client, url.clone()).await;
let reader = reader.map_err(|e| FileError::PmtError(e, url.to_string()))?;
Self::new_int(id, url, reader).await
}
}

View File

@ -1,16 +1,173 @@
mod file_pmtiles;
mod http_pmtiles;
use std::convert::identity;
use std::fmt::{Debug, Formatter};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::Relaxed;
use std::sync::Arc;
pub use file_pmtiles::PmtFileSource;
pub use http_pmtiles::PmtHttpSource;
use async_trait::async_trait;
use log::{trace, warn};
use martin_tile_utils::{Encoding, Format, TileInfo};
use moka::future::Cache;
use pmtiles::async_reader::AsyncPmTilesReader;
use pmtiles::cache::{DirCacheResult, DirectoryCache};
use pmtiles::http::HttpBackend;
use pmtiles::mmap::MmapBackend;
use pmtiles::{Compression, Directory, TileType};
use reqwest::Client;
use serde::{Deserialize, Serialize};
use tilejson::TileJSON;
use url::Url;
use crate::config::UnrecognizedValues;
use crate::file_config::FileError::{InvalidMetadata, InvalidUrlMetadata, IoError};
use crate::file_config::{ConfigExtras, FileError, FileResult, SourceConfigExtras};
use crate::source::UrlQuery;
use crate::{MartinResult, Source, TileCoord, TileData};
type PmtCacheObject = Cache<(usize, usize), Directory>;
#[derive(Clone, Debug)]
pub struct PmtCache {
id: usize,
/// (id, offset) -> Directory, or None to disable caching
cache: Option<PmtCacheObject>,
}
impl PmtCache {
#[must_use]
pub fn new(id: usize, cache: Option<PmtCacheObject>) -> Self {
Self { id, cache }
}
}
#[async_trait]
impl DirectoryCache for PmtCache {
async fn get_dir_entry(&self, offset: usize, tile_id: u64) -> DirCacheResult {
if let Some(cache) = &self.cache {
if let Some(dir) = cache.get(&(self.id, offset)).await {
return dir.find_tile_id(tile_id).into();
}
}
DirCacheResult::NotCached
}
async fn insert_dir(&self, offset: usize, directory: Directory) {
if let Some(cache) = &self.cache {
cache.insert((self.id, offset), directory).await;
}
}
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct PmtConfig {
pub dir_cache_size_mb: Option<u64>,
#[serde(flatten)]
pub unrecognized: UnrecognizedValues,
//
// The rest are internal state, not serialized
//
#[serde(skip)]
pub client: Option<Client>,
#[serde(skip)]
pub next_cache_id: AtomicUsize,
#[serde(skip)]
pub cache: Option<PmtCacheObject>,
}
impl PartialEq for PmtConfig {
fn eq(&self, other: &Self) -> bool {
self.dir_cache_size_mb == other.dir_cache_size_mb && self.unrecognized == other.unrecognized
}
}
impl Clone for PmtConfig {
fn clone(&self) -> Self {
// State is not shared between clones, only the serialized config
Self {
dir_cache_size_mb: self.dir_cache_size_mb,
unrecognized: self.unrecognized.clone(),
..Default::default()
}
}
}
impl PmtConfig {
/// Create a new cache object for a source, giving it a unique internal ID
/// and a reference to the global cache.
pub fn new_cached_source(&self) -> PmtCache {
PmtCache::new(self.next_cache_id.fetch_add(1, Relaxed), self.cache.clone())
}
}
impl ConfigExtras for PmtConfig {
fn init_parsing(&mut self) -> FileResult<()> {
assert!(self.client.is_none());
assert!(self.cache.is_none());
self.client = Some(Client::new());
// Allow cache size to be disabled with 0
let cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
if cache_size > 0 {
self.cache = Some(
Cache::builder()
.weigher(|_key, value: &Directory| -> u32 {
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
})
.max_capacity(cache_size)
.build(),
);
}
Ok(())
}
fn is_default(&self) -> bool {
true
}
fn get_unrecognized(&self) -> &UnrecognizedValues {
&self.unrecognized
}
}
#[async_trait]
impl SourceConfigExtras for PmtConfig {
fn parse_urls() -> bool {
true
}
async fn new_sources(&self, id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
Ok(Box::new(
PmtFileSource::new(self.new_cached_source(), id, path).await?,
))
}
async fn new_sources_url(&self, id: String, url: Url) -> FileResult<Box<dyn Source>> {
Ok(Box::new(
PmtHttpSource::new(
self.client.clone().unwrap(),
self.new_cached_source(),
id,
url,
)
.await?,
))
}
}
macro_rules! impl_pmtiles_source {
($name: ident, $backend: ty, $cache: ty, $path: ty, $display_path: path, $err: ident) => {
($name: ident, $backend: ty, $path: ty, $display_path: path, $err: ident) => {
#[derive(Clone)]
pub struct $name {
id: String,
path: $path,
pmtiles: Arc<AsyncPmTilesReader<$backend, $cache>>,
pmtiles: Arc<AsyncPmTilesReader<$backend, PmtCache>>,
tilejson: TileJSON,
tile_info: TileInfo,
}
@ -31,7 +188,7 @@ macro_rules! impl_pmtiles_source {
async fn new_int(
id: String,
path: $path,
reader: AsyncPmTilesReader<$backend, $cache>,
reader: AsyncPmTilesReader<$backend, PmtCache>,
) -> FileResult<Self> {
let hdr = &reader.get_header();
@ -132,4 +289,53 @@ macro_rules! impl_pmtiles_source {
};
}
pub(crate) use impl_pmtiles_source;
impl_pmtiles_source!(
PmtHttpSource,
HttpBackend,
Url,
identity,
InvalidUrlMetadata
);
impl PmtHttpSource {
pub async fn new(client: Client, cache: PmtCache, id: String, url: Url) -> FileResult<Self> {
let reader = AsyncPmTilesReader::new_with_cached_url(cache, client, url.clone()).await;
let reader = reader.map_err(|e| FileError::PmtError(e, url.to_string()))?;
Self::new_int(id, url, reader).await
}
}
impl_pmtiles_source!(
PmtFileSource,
MmapBackend,
PathBuf,
Path::display,
InvalidMetadata
);
impl PmtFileSource {
pub async fn new(cache: PmtCache, id: String, path: PathBuf) -> FileResult<Self> {
let backend = MmapBackend::try_from(path.as_path())
.await
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
let reader = AsyncPmTilesReader::try_from_cached_source(backend, cache).await;
let reader = reader
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
Self::new_int(id, path, reader).await
}
}

View File

@ -3,6 +3,7 @@ use std::collections::{BTreeMap, HashMap};
use std::fmt::Debug;
use std::path::PathBuf;
use async_trait::async_trait;
use futures::future::try_join_all;
use log::{info, warn};
use serde::{Deserialize, Serialize};
@ -13,7 +14,8 @@ use spreet::{
use tokio::io::AsyncReadExt;
use self::SpriteError::{SpriteInstError, SpriteParsingError, SpriteProcessingError};
use crate::file_config::{FileConfigEnum, FileResult};
use crate::config::UnrecognizedValues;
use crate::file_config::{ConfigExtras, FileConfigEnum, FileResult};
pub type SpriteResult<T> = Result<T, SpriteError>;
@ -57,12 +59,25 @@ pub struct CatalogSpriteEntry {
pub type SpriteCatalog = BTreeMap<String, CatalogSpriteEntry>;
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct SpriteConfig {
#[serde(flatten)]
pub unrecognized: UnrecognizedValues,
}
#[async_trait]
impl ConfigExtras for SpriteConfig {
fn get_unrecognized(&self) -> &UnrecognizedValues {
&self.unrecognized
}
}
#[derive(Debug, Clone, Default)]
pub struct SpriteSources(HashMap<String, SpriteSource>);
impl SpriteSources {
pub fn resolve(config: &mut FileConfigEnum) -> FileResult<Self> {
let Some(cfg) = config.extract_file_config() else {
pub fn resolve(config: &mut FileConfigEnum<SpriteConfig>) -> FileResult<Self> {
let Some(cfg) = config.extract_file_config()? else {
return Ok(Self::default());
};
@ -89,7 +104,7 @@ impl SpriteSources {
results.add_source(name.to_string_lossy().to_string(), path);
}
*config = FileConfigEnum::new_extended(directories, configs, cfg.unrecognized);
*config = FileConfigEnum::new_extended(directories, configs, cfg.custom);
Ok(results)
}

View File

@ -8,7 +8,6 @@ use mbtiles::MbtError;
use crate::file_config::FileError;
#[cfg(feature = "fonts")]
use crate::fonts::FontError;
use crate::pg::PgError;
#[cfg(feature = "sprites")]
use crate::sprites::SpriteError;
@ -58,8 +57,9 @@ pub enum MartinError {
#[error("Unrecognizable connection strings: {0:?}")]
UnrecognizableConnections(Vec<String>),
#[cfg(feature = "postgres")]
#[error(transparent)]
PostgresError(#[from] PgError),
PostgresError(#[from] crate::pg::PgError),
#[error(transparent)]
MbtilesError(#[from] MbtError),

View File

@ -1,28 +1,7 @@
use std::future::Future;
use std::io::{Read as _, Write as _};
use std::time::Duration;
use flate2::read::GzDecoder;
use flate2::write::GzEncoder;
use futures::pin_mut;
use tokio::time::timeout;
#[cfg(test)]
pub fn sorted_opt_set<S: serde::Serializer>(
value: &Option<std::collections::HashSet<String>>,
serializer: S,
) -> Result<S::Ok, S::Error> {
use serde::Serialize as _;
value
.as_ref()
.map(|v| {
let mut v: Vec<_> = v.iter().collect();
v.sort();
v
})
.serialize(serializer)
}
pub fn decode_gzip(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
let mut decoder = GzDecoder::new(data);
@ -49,17 +28,3 @@ pub fn encode_brotli(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
encoder.write_all(data)?;
Ok(encoder.into_inner())
}
pub async fn on_slow<T, S: FnOnce()>(
future: impl Future<Output = T>,
duration: Duration,
fn_on_slow: S,
) -> T {
pin_mut!(future);
if let Ok(result) = timeout(duration, &mut future).await {
result
} else {
fn_on_slow();
future.await
}
}

View File

@ -1,3 +1,5 @@
#![cfg(feature = "postgres")]
use ctor::ctor;
use indoc::indoc;
use insta::assert_yaml_snapshot;

View File

@ -1,3 +1,5 @@
#![cfg(feature = "postgres")]
use actix_http::Request;
use actix_web::http::StatusCode;
use actix_web::test::{call_and_read_body_json, call_service, read_body, TestRequest};

View File

@ -1,3 +1,5 @@
#![cfg(feature = "postgres")]
use ctor::ctor;
use indoc::indoc;
use insta::assert_yaml_snapshot;

View File

@ -1,6 +1,5 @@
use indoc::formatdoc;
pub use martin::args::Env;
use martin::pg::TableInfo;
use martin::{Config, IdResolver, ServerState, Source};
use crate::mock_cfg;
@ -28,11 +27,12 @@ pub async fn mock_sources(mut config: Config) -> MockSource {
(res, config)
}
#[cfg(feature = "postgres")]
#[allow(dead_code)]
#[must_use]
pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a TableInfo {
pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a martin::pg::TableInfo {
let (_, config) = mock;
let vals: Vec<&TableInfo> = config
let vals: Vec<&martin::pg::TableInfo> = config
.postgres
.iter()
.flat_map(|v| v.tables.iter().map(|vv| vv.get(name)))

View File

@ -2,7 +2,7 @@ lints.workspace = true
[package]
name = "mbtiles"
version = "0.8.5"
version = "0.9.0"
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]

View File

@ -166,6 +166,7 @@ postgres:
pmtiles:
dir_cache_size_mb: 100
paths:
- http://localhost:5412/webp2.pmtiles
sources:

View File

@ -165,6 +165,7 @@ pmtiles:
pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
pmt2: http://localhost:5412/webp2.pmtiles
webp2: http://localhost:5412/webp2.pmtiles
dir_cache_size_mb: 100
sprites:
paths: tests/fixtures/sprites/src1
sources: