mirror of
https://github.com/maplibre/martin.git
synced 2024-12-20 05:11:57 +03:00
PMTiles cache, refactor file configs, modularize (#1094)
* Make it possible to have configuration specific to the file-based config sections: pmtiles, mbtiles, and sprites. * Implement PMTiles directory cache shared between all pmtiles (both http and local), with configurable max cache size (in MB), or 0 to disable. Defaults to 32MB (?) * PMTiles now share web client instance, which optimizes connection reuse in case multiple pmtiles reside on the same host * Major refactoring to allow modular reuse, enabling the following build features: * **postgres** - enable PostgreSQL/PostGIS tile sources * **pmtiles** - enable PMTile tile sources * **mbtiles** - enable MBTile tile sources * **fonts** - enable font sources * **sprites** - enable sprite sources * Use justfile in the CI Fixes #1093
This commit is contained in:
parent
7ff2f5105a
commit
2def6288f1
6
Cargo.lock
generated
6
Cargo.lock
generated
@ -1962,7 +1962,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "martin"
|
name = "martin"
|
||||||
version = "0.11.6"
|
version = "0.12.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-http",
|
"actix-http",
|
||||||
@ -2014,7 +2014,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "martin-tile-utils"
|
name = "martin-tile-utils"
|
||||||
version = "0.3.1"
|
version = "0.4.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"approx",
|
"approx",
|
||||||
"insta",
|
"insta",
|
||||||
@ -2022,7 +2022,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mbtiles"
|
name = "mbtiles"
|
||||||
version = "0.8.5"
|
version = "0.9.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -48,8 +48,8 @@ insta = "1"
|
|||||||
itertools = "0.12"
|
itertools = "0.12"
|
||||||
json-patch = "1.2"
|
json-patch = "1.2"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
martin-tile-utils = { path = "./martin-tile-utils", version = "0.3.0" }
|
martin-tile-utils = { path = "./martin-tile-utils", version = "0.4.0" }
|
||||||
mbtiles = { path = "./mbtiles", version = "0.8.0" }
|
mbtiles = { path = "./mbtiles", version = "0.9.0" }
|
||||||
moka = { version = "0.12", features = ["future"] }
|
moka = { version = "0.12", features = ["future"] }
|
||||||
num_cpus = "1"
|
num_cpus = "1"
|
||||||
pbf_font_tools = { version = "2.5.0", features = ["freetype"] }
|
pbf_font_tools = { version = "2.5.0", features = ["freetype"] }
|
||||||
|
10
README.md
10
README.md
@ -106,6 +106,16 @@ Martin data is available via the HTTP `GET` endpoints:
|
|||||||
| `/font/{font1},…,{fontN}/{start}-{end}` | Composite Font source |
|
| `/font/{font1},…,{fontN}/{start}-{end}` | Composite Font source |
|
||||||
| `/health` | Martin server health check: returns 200 `OK` |
|
| `/health` | Martin server health check: returns 200 `OK` |
|
||||||
|
|
||||||
|
## Re-use Martin as a library
|
||||||
|
|
||||||
|
Martin can be used as a standalone server, or as a library in your own Rust application. When used as a library, you can use the following features:
|
||||||
|
|
||||||
|
* **postgres** - enable PostgreSQL/PostGIS tile sources
|
||||||
|
* **pmtiles** - enable PMTile tile sources
|
||||||
|
* **mbtiles** - enable MBTile tile sources
|
||||||
|
* **fonts** - enable font sources
|
||||||
|
* **sprites** - enable sprite sources
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
See [Martin book](https://maplibre.org/martin/) for complete documentation.
|
See [Martin book](https://maplibre.org/martin/) for complete documentation.
|
||||||
|
4
debian/config.yaml
vendored
4
debian/config.yaml
vendored
@ -17,6 +17,7 @@ worker_processes: 8
|
|||||||
# auto_bounds: skip
|
# auto_bounds: skip
|
||||||
|
|
||||||
# pmtiles:
|
# pmtiles:
|
||||||
|
# dir_cache_size_mb: 100
|
||||||
# paths:
|
# paths:
|
||||||
# - /dir-path
|
# - /dir-path
|
||||||
# - /path/to/pmtiles.pmtiles
|
# - /path/to/pmtiles.pmtiles
|
||||||
@ -32,6 +33,9 @@ worker_processes: 8
|
|||||||
# sources:
|
# sources:
|
||||||
# mb-src1: /path/to/mbtiles1.mbtiles
|
# mb-src1: /path/to/mbtiles1.mbtiles
|
||||||
|
|
||||||
|
# sprites:
|
||||||
|
# - /path/to/sprites_dir
|
||||||
|
|
||||||
# fonts:
|
# fonts:
|
||||||
# - /path/to/font/file.ttf
|
# - /path/to/font/file.ttf
|
||||||
# - /path/to/font_dir
|
# - /path/to/font_dir
|
||||||
|
@ -155,6 +155,8 @@ postgres:
|
|||||||
|
|
||||||
# Publish PMTiles files from local disk or proxy to a web server
|
# Publish PMTiles files from local disk or proxy to a web server
|
||||||
pmtiles:
|
pmtiles:
|
||||||
|
# Memory (in MB) to use for caching PMTiles directories [default: 32, 0 to disable]]
|
||||||
|
dir_cache_size_mb: 100
|
||||||
paths:
|
paths:
|
||||||
# scan this whole dir, matching all *.pmtiles files
|
# scan this whole dir, matching all *.pmtiles files
|
||||||
- /dir-path
|
- /dir-path
|
||||||
|
13
justfile
13
justfile
@ -273,7 +273,16 @@ fmt2:
|
|||||||
|
|
||||||
# Run cargo check
|
# Run cargo check
|
||||||
check:
|
check:
|
||||||
cargo check --workspace --all-targets --bins --tests --lib --benches
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin-tile-utils
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p mbtiles
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p mbtiles --no-default-features
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features fonts
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features mbtiles
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features pmtiles
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features postgres
|
||||||
|
RUSTFLAGS='-D warnings' cargo check --bins --tests --lib --benches --examples -p martin --no-default-features --features sprites
|
||||||
|
|
||||||
# Verify doc build
|
# Verify doc build
|
||||||
check-doc:
|
check-doc:
|
||||||
@ -289,7 +298,7 @@ clippy-md:
|
|||||||
'echo -e "/workdir/README.md\n$(find /workdir/docs/src -name "*.md")" | tr "\n" "\0" | xargs -0 -P 5 -n1 -I{} markdown-link-check --config /workdir/.github/files/markdown.links.config.json {}'
|
'echo -e "/workdir/README.md\n$(find /workdir/docs/src -name "*.md")" | tr "\n" "\0" | xargs -0 -P 5 -n1 -I{} markdown-link-check --config /workdir/.github/files/markdown.links.config.json {}'
|
||||||
|
|
||||||
# These steps automatically run before git push via a git hook
|
# These steps automatically run before git push via a git hook
|
||||||
git-pre-push: env-info restart fmt clippy check check-doc test
|
git-pre-push: env-info restart fmt clippy check-doc test check
|
||||||
|
|
||||||
# Get environment info
|
# Get environment info
|
||||||
[private]
|
[private]
|
||||||
|
@ -2,7 +2,7 @@ lints.workspace = true
|
|||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "martin-tile-utils"
|
name = "martin-tile-utils"
|
||||||
version = "0.3.1"
|
version = "0.4.0"
|
||||||
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||||
description = "Utilites to help with map tile processing, such as type and compression detection. Used by the MapLibre's Martin tile server."
|
description = "Utilites to help with map tile processing, such as type and compression detection. Used by the MapLibre's Martin tile server."
|
||||||
keywords = ["maps", "tiles", "mvt", "tileserver"]
|
keywords = ["maps", "tiles", "mvt", "tileserver"]
|
||||||
|
@ -3,7 +3,7 @@ lints.workspace = true
|
|||||||
[package]
|
[package]
|
||||||
name = "martin"
|
name = "martin"
|
||||||
# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin
|
# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin
|
||||||
version = "0.11.6"
|
version = "0.12.0"
|
||||||
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||||
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
|
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
|
||||||
keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"]
|
keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"]
|
||||||
@ -59,9 +59,12 @@ name = "bench"
|
|||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["sprites", "fonts"]
|
default = ["fonts", "mbtiles", "pmtiles", "postgres", "sprites"]
|
||||||
sprites = []
|
fonts = ["dep:bit-set","dep:pbf_font_tools"]
|
||||||
fonts = []
|
mbtiles = []
|
||||||
|
pmtiles = ["dep:moka"]
|
||||||
|
postgres = ["dep:deadpool-postgres", "dep:json-patch", "dep:postgis", "dep:postgres", "dep:postgres-protocol", "dep:semver", "dep:tokio-postgres-rustls"]
|
||||||
|
sprites = ["dep:spreet"]
|
||||||
bless-tests = []
|
bless-tests = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@ -70,41 +73,41 @@ actix-http.workspace = true
|
|||||||
actix-rt.workspace = true
|
actix-rt.workspace = true
|
||||||
actix-web.workspace = true
|
actix-web.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
bit-set.workspace = true
|
bit-set = { workspace = true, optional = true }
|
||||||
brotli.workspace = true
|
brotli.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
deadpool-postgres.workspace = true
|
deadpool-postgres = { workspace = true, optional = true }
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
flate2.workspace = true
|
flate2.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
json-patch.workspace = true
|
json-patch = { workspace = true, optional = true }
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
martin-tile-utils.workspace = true
|
martin-tile-utils.workspace = true
|
||||||
mbtiles.workspace = true
|
mbtiles.workspace = true
|
||||||
moka.workspace = true
|
moka = { workspace = true, optional = true }
|
||||||
num_cpus.workspace = true
|
num_cpus.workspace = true
|
||||||
pbf_font_tools.workspace = true
|
pbf_font_tools = { workspace = true, optional = true }
|
||||||
pmtiles.workspace = true
|
pmtiles.workspace = true
|
||||||
postgis.workspace = true
|
postgis = { workspace = true, optional = true }
|
||||||
postgres-protocol.workspace = true
|
postgres-protocol = { workspace = true, optional = true }
|
||||||
postgres.workspace = true
|
postgres = { workspace = true, optional = true }
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
rustls-native-certs.workspace = true
|
rustls-native-certs.workspace = true
|
||||||
rustls-pemfile.workspace = true
|
rustls-pemfile.workspace = true
|
||||||
rustls.workspace = true
|
rustls.workspace = true
|
||||||
semver.workspace = true
|
semver = { workspace = true, optional = true }
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
serde_with.workspace = true
|
serde_with.workspace = true
|
||||||
serde_yaml.workspace = true
|
serde_yaml.workspace = true
|
||||||
spreet.workspace = true
|
spreet = { workspace = true, optional = true }
|
||||||
subst.workspace = true
|
subst.workspace = true
|
||||||
thiserror.workspace = true
|
thiserror.workspace = true
|
||||||
tilejson.workspace = true
|
tilejson.workspace = true
|
||||||
tokio = { workspace = true, features = ["io-std"] }
|
tokio = { workspace = true, features = ["io-std"] }
|
||||||
tokio-postgres-rustls.workspace = true
|
tokio-postgres-rustls = { workspace = true, optional = true }
|
||||||
url.workspace = true
|
url.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -4,7 +4,9 @@ pub use connections::{Arguments, State};
|
|||||||
mod environment;
|
mod environment;
|
||||||
pub use environment::{Env, OsEnv};
|
pub use environment::{Env, OsEnv};
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
mod pg;
|
mod pg;
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
pub use pg::{BoundsCalcType, PgArgs, DEFAULT_BOUNDS_TIMEOUT};
|
pub use pg::{BoundsCalcType, PgArgs, DEFAULT_BOUNDS_TIMEOUT};
|
||||||
|
|
||||||
mod root;
|
mod root;
|
||||||
|
@ -2,14 +2,12 @@ use std::path::PathBuf;
|
|||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::args::connections::Arguments;
|
use crate::args::connections::Arguments;
|
||||||
use crate::args::environment::Env;
|
use crate::args::environment::Env;
|
||||||
use crate::args::pg::PgArgs;
|
|
||||||
use crate::args::srv::SrvArgs;
|
use crate::args::srv::SrvArgs;
|
||||||
use crate::args::State::{Ignore, Share, Take};
|
|
||||||
use crate::config::Config;
|
use crate::config::Config;
|
||||||
|
#[cfg(any(feature = "mbtiles", feature = "pmtiles", feature = "sprites"))]
|
||||||
use crate::file_config::FileConfigEnum;
|
use crate::file_config::FileConfigEnum;
|
||||||
use crate::MartinError::ConfigAndConnectionsError;
|
use crate::MartinError::ConfigAndConnectionsError;
|
||||||
use crate::{MartinResult, OptOneMany};
|
use crate::{MartinResult, OptOneMany};
|
||||||
@ -27,8 +25,9 @@ pub struct Args {
|
|||||||
pub extras: ExtraArgs,
|
pub extras: ExtraArgs,
|
||||||
#[command(flatten)]
|
#[command(flatten)]
|
||||||
pub srv: SrvArgs,
|
pub srv: SrvArgs,
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[command(flatten)]
|
#[command(flatten)]
|
||||||
pub pg: Option<PgArgs>,
|
pub pg: Option<crate::args::pg::PgArgs>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// None of these params will be transferred to the config
|
// None of these params will be transferred to the config
|
||||||
@ -80,7 +79,11 @@ impl Args {
|
|||||||
|
|
||||||
self.srv.merge_into_config(&mut config.srv);
|
self.srv.merge_into_config(&mut config.srv);
|
||||||
|
|
||||||
|
#[allow(unused_mut)]
|
||||||
let mut cli_strings = Arguments::new(self.meta.connection);
|
let mut cli_strings = Arguments::new(self.meta.connection);
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
|
{
|
||||||
let pg_args = self.pg.unwrap_or_default();
|
let pg_args = self.pg.unwrap_or_default();
|
||||||
if config.postgres.is_none() {
|
if config.postgres.is_none() {
|
||||||
config.postgres = pg_args.into_config(&mut cli_strings, env);
|
config.postgres = pg_args.into_config(&mut cli_strings, env);
|
||||||
@ -88,11 +91,14 @@ impl Args {
|
|||||||
// config was loaded from a file, we can only apply a few CLI overrides to it
|
// config was loaded from a file, we can only apply a few CLI overrides to it
|
||||||
pg_args.override_config(&mut config.postgres, env);
|
pg_args.override_config(&mut config.postgres, env);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
if !cli_strings.is_empty() {
|
if !cli_strings.is_empty() {
|
||||||
config.pmtiles = parse_file_args(&mut cli_strings, "pmtiles", true);
|
config.pmtiles = parse_file_args(&mut cli_strings, "pmtiles", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "mbtiles")]
|
||||||
if !cli_strings.is_empty() {
|
if !cli_strings.is_empty() {
|
||||||
config.mbtiles = parse_file_args(&mut cli_strings, "mbtiles", false);
|
config.mbtiles = parse_file_args(&mut cli_strings, "mbtiles", false);
|
||||||
}
|
}
|
||||||
@ -110,9 +116,10 @@ impl Args {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "pmtiles", feature = "mbtiles"))]
|
||||||
fn is_url(s: &str, extension: &str) -> bool {
|
fn is_url(s: &str, extension: &str) -> bool {
|
||||||
if s.starts_with("http") {
|
if s.starts_with("http") {
|
||||||
if let Ok(url) = Url::parse(s) {
|
if let Ok(url) = url::Url::parse(s) {
|
||||||
if url.scheme() == "http" || url.scheme() == "https" {
|
if url.scheme() == "http" || url.scheme() == "https" {
|
||||||
if let Some(ext) = url.path().rsplit('.').next() {
|
if let Some(ext) = url.path().rsplit('.').next() {
|
||||||
return ext == extension;
|
return ext == extension;
|
||||||
@ -123,11 +130,14 @@ fn is_url(s: &str, extension: &str) -> bool {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_file_args(
|
#[cfg(any(feature = "pmtiles", feature = "mbtiles"))]
|
||||||
|
pub fn parse_file_args<T: crate::file_config::ConfigExtras>(
|
||||||
cli_strings: &mut Arguments,
|
cli_strings: &mut Arguments,
|
||||||
extension: &str,
|
extension: &str,
|
||||||
allow_url: bool,
|
allow_url: bool,
|
||||||
) -> FileConfigEnum {
|
) -> FileConfigEnum<T> {
|
||||||
|
use crate::args::State::{Ignore, Share, Take};
|
||||||
|
|
||||||
let paths = cli_strings.process(|s| match PathBuf::try_from(s) {
|
let paths = cli_strings.process(|s| match PathBuf::try_from(s) {
|
||||||
Ok(v) => {
|
Ok(v) => {
|
||||||
if allow_url && is_url(s, extension) {
|
if allow_url && is_url(s, extension) {
|
||||||
@ -149,9 +159,7 @@ pub fn parse_file_args(
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::pg::PgConfig;
|
use crate::test_utils::FauxEnv;
|
||||||
use crate::test_utils::{some, FauxEnv};
|
|
||||||
use crate::utils::OptOneMany;
|
|
||||||
use crate::MartinError::UnrecognizableConnections;
|
use crate::MartinError::UnrecognizableConnections;
|
||||||
|
|
||||||
fn parse(args: &[&str]) -> MartinResult<(Config, MetaArgs)> {
|
fn parse(args: &[&str]) -> MartinResult<(Config, MetaArgs)> {
|
||||||
@ -169,8 +177,12 @@ mod tests {
|
|||||||
assert_eq!(args, expected);
|
assert_eq!(args, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[test]
|
#[test]
|
||||||
fn cli_with_config() {
|
fn cli_with_config() {
|
||||||
|
use crate::test_utils::some;
|
||||||
|
use crate::utils::OptOneMany;
|
||||||
|
|
||||||
let args = parse(&["martin", "--config", "c.toml"]).unwrap();
|
let args = parse(&["martin", "--config", "c.toml"]).unwrap();
|
||||||
let meta = MetaArgs {
|
let meta = MetaArgs {
|
||||||
config: Some(PathBuf::from("c.toml")),
|
config: Some(PathBuf::from("c.toml")),
|
||||||
@ -188,7 +200,7 @@ mod tests {
|
|||||||
|
|
||||||
let args = parse(&["martin", "postgres://connection"]).unwrap();
|
let args = parse(&["martin", "postgres://connection"]).unwrap();
|
||||||
let cfg = Config {
|
let cfg = Config {
|
||||||
postgres: OptOneMany::One(PgConfig {
|
postgres: OptOneMany::One(crate::pg::PgConfig {
|
||||||
connection_string: some("postgres://connection"),
|
connection_string: some("postgres://connection"),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}),
|
}),
|
||||||
|
@ -11,7 +11,7 @@ use clap::Parser;
|
|||||||
use futures::stream::{self, StreamExt};
|
use futures::stream::{self, StreamExt};
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use log::{debug, error, info, log_enabled};
|
use log::{debug, error, info, log_enabled};
|
||||||
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, PgArgs, SrvArgs};
|
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, SrvArgs};
|
||||||
use martin::srv::{get_tile_content, merge_tilejson, RESERVED_KEYWORDS};
|
use martin::srv::{get_tile_content, merge_tilejson, RESERVED_KEYWORDS};
|
||||||
use martin::{
|
use martin::{
|
||||||
append_rect, read_config, Config, IdResolver, MartinError, MartinResult, ServerState, Source,
|
append_rect, read_config, Config, IdResolver, MartinError, MartinResult, ServerState, Source,
|
||||||
@ -46,8 +46,9 @@ pub struct CopierArgs {
|
|||||||
pub copy: CopyArgs,
|
pub copy: CopyArgs,
|
||||||
#[command(flatten)]
|
#[command(flatten)]
|
||||||
pub meta: MetaArgs,
|
pub meta: MetaArgs,
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[command(flatten)]
|
#[command(flatten)]
|
||||||
pub pg: Option<PgArgs>,
|
pub pg: Option<martin::args::PgArgs>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[serde_with::serde_as]
|
#[serde_with::serde_as]
|
||||||
@ -137,6 +138,7 @@ async fn start(copy_args: CopierArgs) -> MartinCpResult<()> {
|
|||||||
meta: copy_args.meta,
|
meta: copy_args.meta,
|
||||||
extras: ExtraArgs::default(),
|
extras: ExtraArgs::default(),
|
||||||
srv: SrvArgs::default(),
|
srv: SrvArgs::default(),
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
pg: copy_args.pg,
|
pg: copy_args.pg,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -11,15 +11,13 @@ use log::info;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use subst::VariableMap;
|
use subst::VariableMap;
|
||||||
|
|
||||||
use crate::file_config::{resolve_files, resolve_files_urls, FileConfigEnum};
|
#[cfg(any(feature = "mbtiles", feature = "pmtiles", feature = "sprites"))]
|
||||||
|
use crate::file_config::FileConfigEnum;
|
||||||
#[cfg(feature = "fonts")]
|
#[cfg(feature = "fonts")]
|
||||||
use crate::fonts::FontSources;
|
use crate::fonts::FontSources;
|
||||||
use crate::mbtiles::MbtSource;
|
|
||||||
use crate::pg::PgConfig;
|
|
||||||
use crate::pmtiles::{PmtFileSource, PmtHttpSource};
|
|
||||||
use crate::source::{TileInfoSources, TileSources};
|
use crate::source::{TileInfoSources, TileSources};
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
use crate::sprites::SpriteSources;
|
use crate::sprites::{SpriteConfig, SpriteSources};
|
||||||
use crate::srv::SrvConfig;
|
use crate::srv::SrvConfig;
|
||||||
use crate::MartinError::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
|
use crate::MartinError::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
|
||||||
use crate::{IdResolver, MartinResult, OptOneMany};
|
use crate::{IdResolver, MartinResult, OptOneMany};
|
||||||
@ -39,18 +37,21 @@ pub struct Config {
|
|||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub srv: SrvConfig,
|
pub srv: SrvConfig,
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
|
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
|
||||||
pub postgres: OptOneMany<PgConfig>,
|
pub postgres: OptOneMany<crate::pg::PgConfig>,
|
||||||
|
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
|
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
|
||||||
pub pmtiles: FileConfigEnum,
|
pub pmtiles: FileConfigEnum<crate::pmtiles::PmtConfig>,
|
||||||
|
|
||||||
|
#[cfg(feature = "mbtiles")]
|
||||||
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
|
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
|
||||||
pub mbtiles: FileConfigEnum,
|
pub mbtiles: FileConfigEnum<crate::mbtiles::MbtConfig>,
|
||||||
|
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
|
#[serde(default, skip_serializing_if = "FileConfigEnum::is_none")]
|
||||||
pub sprites: FileConfigEnum,
|
pub sprites: FileConfigEnum<SpriteConfig>,
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
|
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
|
||||||
pub fonts: OptOneMany<PathBuf>,
|
pub fonts: OptOneMany<PathBuf>,
|
||||||
@ -65,20 +66,33 @@ impl Config {
|
|||||||
let mut res = UnrecognizedValues::new();
|
let mut res = UnrecognizedValues::new();
|
||||||
copy_unrecognized_config(&mut res, "", &self.unrecognized);
|
copy_unrecognized_config(&mut res, "", &self.unrecognized);
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
for pg in self.postgres.iter_mut() {
|
for pg in self.postgres.iter_mut() {
|
||||||
res.extend(pg.finalize()?);
|
res.extend(pg.finalize()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
res.extend(self.pmtiles.finalize("pmtiles.")?);
|
res.extend(self.pmtiles.finalize("pmtiles.")?);
|
||||||
|
|
||||||
|
#[cfg(feature = "mbtiles")]
|
||||||
res.extend(self.mbtiles.finalize("mbtiles.")?);
|
res.extend(self.mbtiles.finalize("mbtiles.")?);
|
||||||
|
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
res.extend(self.sprites.finalize("sprites.")?);
|
res.extend(self.sprites.finalize("sprites.")?);
|
||||||
|
|
||||||
// TODO: support for unrecognized fonts?
|
// TODO: support for unrecognized fonts?
|
||||||
// res.extend(self.fonts.finalize("fonts.")?);
|
// res.extend(self.fonts.finalize("fonts.")?);
|
||||||
|
|
||||||
let is_empty =
|
let is_empty = true;
|
||||||
self.postgres.is_empty() && self.pmtiles.is_empty() && self.mbtiles.is_empty();
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
|
let is_empty = is_empty && self.postgres.is_empty();
|
||||||
|
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
|
let is_empty = is_empty && self.pmtiles.is_empty();
|
||||||
|
|
||||||
|
#[cfg(feature = "mbtiles")]
|
||||||
|
let is_empty = is_empty && self.mbtiles.is_empty();
|
||||||
|
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
let is_empty = is_empty && self.sprites.is_empty();
|
let is_empty = is_empty && self.sprites.is_empty();
|
||||||
@ -103,26 +117,30 @@ impl Config {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn resolve_tile_sources(&mut self, idr: IdResolver) -> MartinResult<TileSources> {
|
async fn resolve_tile_sources(
|
||||||
let new_pmt_src = &mut PmtFileSource::new_box;
|
&mut self,
|
||||||
let new_pmt_url_src = &mut PmtHttpSource::new_url_box;
|
#[allow(unused_variables)] idr: IdResolver,
|
||||||
let new_mbt_src = &mut MbtSource::new_box;
|
) -> MartinResult<TileSources> {
|
||||||
|
#[allow(unused_mut)]
|
||||||
let mut sources: Vec<Pin<Box<dyn Future<Output = MartinResult<TileInfoSources>>>>> =
|
let mut sources: Vec<Pin<Box<dyn Future<Output = MartinResult<TileInfoSources>>>>> =
|
||||||
Vec::new();
|
Vec::new();
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
for s in self.postgres.iter_mut() {
|
for s in self.postgres.iter_mut() {
|
||||||
sources.push(Box::pin(s.resolve(idr.clone())));
|
sources.push(Box::pin(s.resolve(idr.clone())));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
if !self.pmtiles.is_empty() {
|
if !self.pmtiles.is_empty() {
|
||||||
let cfg = &mut self.pmtiles;
|
let cfg = &mut self.pmtiles;
|
||||||
let val = resolve_files_urls(cfg, idr.clone(), "pmtiles", new_pmt_src, new_pmt_url_src);
|
let val = crate::file_config::resolve_files(cfg, idr.clone(), "pmtiles");
|
||||||
sources.push(Box::pin(val));
|
sources.push(Box::pin(val));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "mbtiles")]
|
||||||
if !self.mbtiles.is_empty() {
|
if !self.mbtiles.is_empty() {
|
||||||
let cfg = &mut self.mbtiles;
|
let cfg = &mut self.mbtiles;
|
||||||
let val = resolve_files(cfg, idr.clone(), "mbtiles", new_mbt_src);
|
let val = crate::file_config::resolve_files(cfg, idr.clone(), "mbtiles");
|
||||||
sources.push(Box::pin(val));
|
sources.push(Box::pin(val));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -183,6 +201,7 @@ where
|
|||||||
subst::yaml::from_str(contents, env).map_err(|e| ConfigParseError(e, file_name.into()))
|
subst::yaml::from_str(contents, env).map_err(|e| ConfigParseError(e, file_name.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests {
|
pub mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
use std::collections::{BTreeMap, HashSet};
|
use std::collections::{BTreeMap, HashSet};
|
||||||
use std::future::Future;
|
use std::fmt::Debug;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use futures::TryFutureExt;
|
use futures::TryFutureExt;
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -39,36 +40,60 @@ pub enum FileError {
|
|||||||
#[error(r"Unable to parse metadata in file {1}: {0}")]
|
#[error(r"Unable to parse metadata in file {1}: {0}")]
|
||||||
InvalidUrlMetadata(String, Url),
|
InvalidUrlMetadata(String, Url),
|
||||||
|
|
||||||
#[error(r#"Unable to aquire connection to file: {0}"#)]
|
#[error(r#"Unable to acquire connection to file: {0}"#)]
|
||||||
AquireConnError(String),
|
AcquireConnError(String),
|
||||||
|
|
||||||
#[error(r#"PMTiles error {0} processing {1}"#)]
|
#[error(r#"PMTiles error {0} processing {1}"#)]
|
||||||
PmtError(pmtiles::PmtError, String),
|
PmtError(pmtiles::PmtError, String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait ConfigExtras: Clone + Debug + Default + PartialEq + Send {
|
||||||
|
fn init_parsing(&mut self) -> FileResult<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
fn is_default(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_unrecognized(&self) -> &UnrecognizedValues;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait SourceConfigExtras: ConfigExtras {
|
||||||
|
#[must_use]
|
||||||
|
fn parse_urls() -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
async fn new_sources(&self, id: String, path: PathBuf) -> FileResult<Box<dyn Source>>;
|
||||||
|
|
||||||
|
async fn new_sources_url(&self, id: String, url: Url) -> FileResult<Box<dyn Source>>;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum FileConfigEnum {
|
pub enum FileConfigEnum<T> {
|
||||||
#[default]
|
#[default]
|
||||||
None,
|
None,
|
||||||
Path(PathBuf),
|
Path(PathBuf),
|
||||||
Paths(Vec<PathBuf>),
|
Paths(Vec<PathBuf>),
|
||||||
Config(FileConfig),
|
Config(FileConfig<T>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileConfigEnum {
|
impl<T: ConfigExtras> FileConfigEnum<T> {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new(paths: Vec<PathBuf>) -> FileConfigEnum {
|
pub fn new(paths: Vec<PathBuf>) -> FileConfigEnum<T> {
|
||||||
Self::new_extended(paths, BTreeMap::new(), UnrecognizedValues::new())
|
Self::new_extended(paths, BTreeMap::new(), T::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new_extended(
|
pub fn new_extended(
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
configs: BTreeMap<String, FileConfigSrc>,
|
configs: BTreeMap<String, FileConfigSrc>,
|
||||||
unrecognized: UnrecognizedValues,
|
custom: T,
|
||||||
) -> FileConfigEnum {
|
) -> Self {
|
||||||
if configs.is_empty() && unrecognized.is_empty() {
|
if configs.is_empty() && custom.is_default() {
|
||||||
match paths.len() {
|
match paths.len() {
|
||||||
0 => FileConfigEnum::None,
|
0 => FileConfigEnum::None,
|
||||||
1 => FileConfigEnum::Path(paths.into_iter().next().unwrap()),
|
1 => FileConfigEnum::Path(paths.into_iter().next().unwrap()),
|
||||||
@ -82,7 +107,7 @@ impl FileConfigEnum {
|
|||||||
} else {
|
} else {
|
||||||
Some(configs)
|
Some(configs)
|
||||||
},
|
},
|
||||||
unrecognized,
|
custom,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -102,25 +127,27 @@ impl FileConfigEnum {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_file_config(&mut self) -> Option<FileConfig> {
|
pub fn extract_file_config(&mut self) -> FileResult<Option<FileConfig<T>>> {
|
||||||
match self {
|
let mut res = match self {
|
||||||
FileConfigEnum::None => None,
|
FileConfigEnum::None => return Ok(None),
|
||||||
FileConfigEnum::Path(path) => Some(FileConfig {
|
FileConfigEnum::Path(path) => FileConfig {
|
||||||
paths: One(mem::take(path)),
|
paths: One(mem::take(path)),
|
||||||
..FileConfig::default()
|
..FileConfig::default()
|
||||||
}),
|
},
|
||||||
FileConfigEnum::Paths(paths) => Some(FileConfig {
|
FileConfigEnum::Paths(paths) => FileConfig {
|
||||||
paths: Many(mem::take(paths)),
|
paths: Many(mem::take(paths)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}),
|
},
|
||||||
FileConfigEnum::Config(cfg) => Some(mem::take(cfg)),
|
FileConfigEnum::Config(cfg) => mem::take(cfg),
|
||||||
}
|
};
|
||||||
|
res.custom.init_parsing()?;
|
||||||
|
Ok(Some(res))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finalize(&self, prefix: &str) -> MartinResult<UnrecognizedValues> {
|
pub fn finalize(&self, prefix: &str) -> MartinResult<UnrecognizedValues> {
|
||||||
let mut res = UnrecognizedValues::new();
|
let mut res = UnrecognizedValues::new();
|
||||||
if let Self::Config(cfg) = self {
|
if let Self::Config(cfg) = self {
|
||||||
copy_unrecognized_config(&mut res, prefix, &cfg.unrecognized);
|
copy_unrecognized_config(&mut res, prefix, cfg.get_unrecognized());
|
||||||
}
|
}
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
@ -128,20 +155,28 @@ impl FileConfigEnum {
|
|||||||
|
|
||||||
#[serde_with::skip_serializing_none]
|
#[serde_with::skip_serializing_none]
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct FileConfig {
|
pub struct FileConfig<T> {
|
||||||
/// A list of file paths
|
/// A list of file paths
|
||||||
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
|
#[serde(default, skip_serializing_if = "OptOneMany::is_none")]
|
||||||
pub paths: OptOneMany<PathBuf>,
|
pub paths: OptOneMany<PathBuf>,
|
||||||
/// A map of source IDs to file paths or config objects
|
/// A map of source IDs to file paths or config objects
|
||||||
pub sources: Option<BTreeMap<String, FileConfigSrc>>,
|
pub sources: Option<BTreeMap<String, FileConfigSrc>>,
|
||||||
|
/// Any customizations related to the specifics of the configuration section
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub unrecognized: UnrecognizedValues,
|
pub custom: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileConfig {
|
impl<T: ConfigExtras> FileConfig<T> {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.paths.is_none() && self.sources.is_none()
|
self.paths.is_none()
|
||||||
|
&& self.sources.is_none()
|
||||||
|
&& self.get_unrecognized().is_empty()
|
||||||
|
&& self.custom.is_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_unrecognized(&self) -> &UnrecognizedValues {
|
||||||
|
self.custom.get_unrecognized()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,54 +216,22 @@ pub struct FileConfigSource {
|
|||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn dummy_resolver(_id: String, _url: Url) -> FileResult<Box<dyn Source>> {
|
pub async fn resolve_files<T: SourceConfigExtras>(
|
||||||
unreachable!()
|
config: &mut FileConfigEnum<T>,
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn resolve_files<Fut>(
|
|
||||||
config: &mut FileConfigEnum,
|
|
||||||
idr: IdResolver,
|
idr: IdResolver,
|
||||||
extension: &str,
|
extension: &str,
|
||||||
new_source: &mut impl FnMut(String, PathBuf) -> Fut,
|
) -> MartinResult<TileInfoSources> {
|
||||||
) -> MartinResult<TileInfoSources>
|
resolve_int(config, idr, extension)
|
||||||
where
|
|
||||||
Fut: Future<Output = Result<Box<dyn Source>, FileError>>,
|
|
||||||
{
|
|
||||||
let dummy = &mut dummy_resolver;
|
|
||||||
resolve_int(config, idr, extension, false, new_source, dummy)
|
|
||||||
.map_err(crate::MartinError::from)
|
.map_err(crate::MartinError::from)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn resolve_files_urls<Fut1, Fut2>(
|
async fn resolve_int<T: SourceConfigExtras>(
|
||||||
config: &mut FileConfigEnum,
|
config: &mut FileConfigEnum<T>,
|
||||||
idr: IdResolver,
|
idr: IdResolver,
|
||||||
extension: &str,
|
extension: &str,
|
||||||
new_source: &mut impl FnMut(String, PathBuf) -> Fut1,
|
) -> FileResult<TileInfoSources> {
|
||||||
new_url_source: &mut impl FnMut(String, Url) -> Fut2,
|
let Some(cfg) = config.extract_file_config()? else {
|
||||||
) -> MartinResult<TileInfoSources>
|
|
||||||
where
|
|
||||||
Fut1: Future<Output = Result<Box<dyn Source>, FileError>>,
|
|
||||||
Fut2: Future<Output = Result<Box<dyn Source>, FileError>>,
|
|
||||||
{
|
|
||||||
resolve_int(config, idr, extension, true, new_source, new_url_source)
|
|
||||||
.map_err(crate::MartinError::from)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn resolve_int<Fut1, Fut2>(
|
|
||||||
config: &mut FileConfigEnum,
|
|
||||||
idr: IdResolver,
|
|
||||||
extension: &str,
|
|
||||||
parse_urls: bool,
|
|
||||||
new_source: &mut impl FnMut(String, PathBuf) -> Fut1,
|
|
||||||
new_url_source: &mut impl FnMut(String, Url) -> Fut2,
|
|
||||||
) -> FileResult<TileInfoSources>
|
|
||||||
where
|
|
||||||
Fut1: Future<Output = Result<Box<dyn Source>, FileError>>,
|
|
||||||
Fut2: Future<Output = Result<Box<dyn Source>, FileError>>,
|
|
||||||
{
|
|
||||||
let Some(cfg) = config.extract_file_config() else {
|
|
||||||
return Ok(TileInfoSources::default());
|
return Ok(TileInfoSources::default());
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -239,12 +242,12 @@ where
|
|||||||
|
|
||||||
if let Some(sources) = cfg.sources {
|
if let Some(sources) = cfg.sources {
|
||||||
for (id, source) in sources {
|
for (id, source) in sources {
|
||||||
if let Some(url) = parse_url(parse_urls, source.get_path())? {
|
if let Some(url) = parse_url(T::parse_urls(), source.get_path())? {
|
||||||
let dup = !files.insert(source.get_path().clone());
|
let dup = !files.insert(source.get_path().clone());
|
||||||
let dup = if dup { "duplicate " } else { "" };
|
let dup = if dup { "duplicate " } else { "" };
|
||||||
let id = idr.resolve(&id, url.to_string());
|
let id = idr.resolve(&id, url.to_string());
|
||||||
configs.insert(id.clone(), source);
|
configs.insert(id.clone(), source);
|
||||||
results.push(new_url_source(id.clone(), url.clone()).await?);
|
results.push(cfg.custom.new_sources_url(id.clone(), url.clone()).await?);
|
||||||
info!("Configured {dup}source {id} from {}", sanitize_url(&url));
|
info!("Configured {dup}source {id} from {}", sanitize_url(&url));
|
||||||
} else {
|
} else {
|
||||||
let can = source.abs_path()?;
|
let can = source.abs_path()?;
|
||||||
@ -258,13 +261,13 @@ where
|
|||||||
let id = idr.resolve(&id, can.to_string_lossy().to_string());
|
let id = idr.resolve(&id, can.to_string_lossy().to_string());
|
||||||
info!("Configured {dup}source {id} from {}", can.display());
|
info!("Configured {dup}source {id} from {}", can.display());
|
||||||
configs.insert(id.clone(), source.clone());
|
configs.insert(id.clone(), source.clone());
|
||||||
results.push(new_source(id, source.into_path()).await?);
|
results.push(cfg.custom.new_sources(id, source.into_path()).await?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for path in cfg.paths {
|
for path in cfg.paths {
|
||||||
if let Some(url) = parse_url(parse_urls, &path)? {
|
if let Some(url) = parse_url(T::parse_urls(), &path)? {
|
||||||
let id = url
|
let id = url
|
||||||
.path_segments()
|
.path_segments()
|
||||||
.and_then(Iterator::last)
|
.and_then(Iterator::last)
|
||||||
@ -278,7 +281,7 @@ where
|
|||||||
|
|
||||||
let id = idr.resolve(id, url.to_string());
|
let id = idr.resolve(id, url.to_string());
|
||||||
configs.insert(id.clone(), FileConfigSrc::Path(path));
|
configs.insert(id.clone(), FileConfigSrc::Path(path));
|
||||||
results.push(new_url_source(id.clone(), url.clone()).await?);
|
results.push(cfg.custom.new_sources_url(id.clone(), url.clone()).await?);
|
||||||
info!("Configured source {id} from URL {}", sanitize_url(&url));
|
info!("Configured source {id} from URL {}", sanitize_url(&url));
|
||||||
} else {
|
} else {
|
||||||
let is_dir = path.is_dir();
|
let is_dir = path.is_dir();
|
||||||
@ -307,12 +310,12 @@ where
|
|||||||
info!("Configured source {id} from {}", can.display());
|
info!("Configured source {id} from {}", can.display());
|
||||||
files.insert(can);
|
files.insert(can);
|
||||||
configs.insert(id.clone(), FileConfigSrc::Path(path.clone()));
|
configs.insert(id.clone(), FileConfigSrc::Path(path.clone()));
|
||||||
results.push(new_source(id, path).await?);
|
results.push(cfg.custom.new_sources(id, path).await?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
*config = FileConfigEnum::new_extended(directories, configs, cfg.unrecognized);
|
*config = FileConfigEnum::new_extended(directories, configs, cfg.custom);
|
||||||
|
|
||||||
Ok(results)
|
Ok(results)
|
||||||
}
|
}
|
||||||
@ -351,70 +354,3 @@ fn parse_url(is_enabled: bool, path: &Path) -> Result<Option<Url>, FileError> {
|
|||||||
.map(|v| Url::parse(v).map_err(|e| InvalidSourceUrl(e, v.to_string())))
|
.map(|v| Url::parse(v).map_err(|e| InvalidSourceUrl(e, v.to_string())))
|
||||||
.transpose()
|
.transpose()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use indoc::indoc;
|
|
||||||
|
|
||||||
use crate::file_config::{FileConfigEnum, FileConfigSource, FileConfigSrc};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse() {
|
|
||||||
let cfg = serde_yaml::from_str::<FileConfigEnum>(indoc! {"
|
|
||||||
paths:
|
|
||||||
- /dir-path
|
|
||||||
- /path/to/file2.ext
|
|
||||||
- http://example.org/file.ext
|
|
||||||
sources:
|
|
||||||
pm-src1: /tmp/file.ext
|
|
||||||
pm-src2:
|
|
||||||
path: /tmp/file.ext
|
|
||||||
pm-src3: https://example.org/file3.ext
|
|
||||||
pm-src4:
|
|
||||||
path: https://example.org/file4.ext
|
|
||||||
"})
|
|
||||||
.unwrap();
|
|
||||||
let res = cfg.finalize("").unwrap();
|
|
||||||
assert!(res.is_empty(), "unrecognized config: {res:?}");
|
|
||||||
let FileConfigEnum::Config(cfg) = cfg else {
|
|
||||||
panic!();
|
|
||||||
};
|
|
||||||
let paths = cfg.paths.clone().into_iter().collect::<Vec<_>>();
|
|
||||||
assert_eq!(
|
|
||||||
paths,
|
|
||||||
vec![
|
|
||||||
PathBuf::from("/dir-path"),
|
|
||||||
PathBuf::from("/path/to/file2.ext"),
|
|
||||||
PathBuf::from("http://example.org/file.ext"),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cfg.sources,
|
|
||||||
Some(BTreeMap::from_iter(vec![
|
|
||||||
(
|
|
||||||
"pm-src1".to_string(),
|
|
||||||
FileConfigSrc::Path(PathBuf::from("/tmp/file.ext"))
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"pm-src2".to_string(),
|
|
||||||
FileConfigSrc::Obj(FileConfigSource {
|
|
||||||
path: PathBuf::from("/tmp/file.ext"),
|
|
||||||
})
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"pm-src3".to_string(),
|
|
||||||
FileConfigSrc::Path(PathBuf::from("https://example.org/file3.ext"))
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"pm-src4".to_string(),
|
|
||||||
FileConfigSrc::Obj(FileConfigSource {
|
|
||||||
path: PathBuf::from("https://example.org/file4.ext"),
|
|
||||||
})
|
|
||||||
),
|
|
||||||
]))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -17,8 +17,11 @@ pub mod args;
|
|||||||
pub mod file_config;
|
pub mod file_config;
|
||||||
#[cfg(feature = "fonts")]
|
#[cfg(feature = "fonts")]
|
||||||
pub mod fonts;
|
pub mod fonts;
|
||||||
|
#[cfg(feature = "mbtiles")]
|
||||||
pub mod mbtiles;
|
pub mod mbtiles;
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
pub mod pg;
|
pub mod pg;
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
pub mod pmtiles;
|
pub mod pmtiles;
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
pub mod sprites;
|
pub mod sprites;
|
||||||
|
@ -7,13 +7,39 @@ use async_trait::async_trait;
|
|||||||
use log::trace;
|
use log::trace;
|
||||||
use martin_tile_utils::TileInfo;
|
use martin_tile_utils::TileInfo;
|
||||||
use mbtiles::MbtilesPool;
|
use mbtiles::MbtilesPool;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use tilejson::TileJSON;
|
use tilejson::TileJSON;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
use crate::file_config::FileError::{AquireConnError, InvalidMetadata, IoError};
|
use crate::config::UnrecognizedValues;
|
||||||
use crate::file_config::FileResult;
|
use crate::file_config::FileError::{AcquireConnError, InvalidMetadata, IoError};
|
||||||
|
use crate::file_config::{ConfigExtras, FileResult, SourceConfigExtras};
|
||||||
use crate::source::{TileData, UrlQuery};
|
use crate::source::{TileData, UrlQuery};
|
||||||
use crate::{MartinResult, Source, TileCoord};
|
use crate::{MartinResult, Source, TileCoord};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct MbtConfig {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub unrecognized: UnrecognizedValues,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConfigExtras for MbtConfig {
|
||||||
|
fn get_unrecognized(&self) -> &UnrecognizedValues {
|
||||||
|
&self.unrecognized
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl SourceConfigExtras for MbtConfig {
|
||||||
|
async fn new_sources(&self, id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
|
||||||
|
Ok(Box::new(MbtSource::new(id, path).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn new_sources_url(&self, _id: String, _url: Url) -> FileResult<Box<dyn Source>> {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct MbtSource {
|
pub struct MbtSource {
|
||||||
id: String,
|
id: String,
|
||||||
@ -34,10 +60,6 @@ impl Debug for MbtSource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MbtSource {
|
impl MbtSource {
|
||||||
pub async fn new_box(id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
|
|
||||||
Ok(Box::new(MbtSource::new(id, path).await?))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
|
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
|
||||||
let mbt = MbtilesPool::new(&path)
|
let mbt = MbtilesPool::new(&path)
|
||||||
.await
|
.await
|
||||||
@ -90,7 +112,7 @@ impl Source for MbtSource {
|
|||||||
.mbtiles
|
.mbtiles
|
||||||
.get_tile(xyz.z, xyz.x, xyz.y)
|
.get_tile(xyz.z, xyz.x, xyz.y)
|
||||||
.await
|
.await
|
||||||
.map_err(|_| AquireConnError(self.id.clone()))?
|
.map_err(|_| AcquireConnError(self.id.clone()))?
|
||||||
{
|
{
|
||||||
Ok(tile)
|
Ok(tile)
|
||||||
} else {
|
} else {
|
||||||
@ -105,3 +127,71 @@ impl Source for MbtSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use indoc::indoc;
|
||||||
|
|
||||||
|
use crate::file_config::{FileConfigEnum, FileConfigSource, FileConfigSrc};
|
||||||
|
use crate::mbtiles::MbtConfig;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse() {
|
||||||
|
let cfg = serde_yaml::from_str::<FileConfigEnum<MbtConfig>>(indoc! {"
|
||||||
|
paths:
|
||||||
|
- /dir-path
|
||||||
|
- /path/to/file2.ext
|
||||||
|
- http://example.org/file.ext
|
||||||
|
sources:
|
||||||
|
pm-src1: /tmp/file.ext
|
||||||
|
pm-src2:
|
||||||
|
path: /tmp/file.ext
|
||||||
|
pm-src3: https://example.org/file3.ext
|
||||||
|
pm-src4:
|
||||||
|
path: https://example.org/file4.ext
|
||||||
|
"})
|
||||||
|
.unwrap();
|
||||||
|
let res = cfg.finalize("").unwrap();
|
||||||
|
assert!(res.is_empty(), "unrecognized config: {res:?}");
|
||||||
|
let FileConfigEnum::Config(cfg) = cfg else {
|
||||||
|
panic!();
|
||||||
|
};
|
||||||
|
let paths = cfg.paths.clone().into_iter().collect::<Vec<_>>();
|
||||||
|
assert_eq!(
|
||||||
|
paths,
|
||||||
|
vec![
|
||||||
|
PathBuf::from("/dir-path"),
|
||||||
|
PathBuf::from("/path/to/file2.ext"),
|
||||||
|
PathBuf::from("http://example.org/file.ext"),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
cfg.sources,
|
||||||
|
Some(BTreeMap::from_iter(vec![
|
||||||
|
(
|
||||||
|
"pm-src1".to_string(),
|
||||||
|
FileConfigSrc::Path(PathBuf::from("/tmp/file.ext"))
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pm-src2".to_string(),
|
||||||
|
FileConfigSrc::Obj(FileConfigSource {
|
||||||
|
path: PathBuf::from("/tmp/file.ext"),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pm-src3".to_string(),
|
||||||
|
FileConfigSrc::Path(PathBuf::from("https://example.org/file3.ext"))
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pm-src4".to_string(),
|
||||||
|
FileConfigSrc::Obj(FileConfigSource {
|
||||||
|
path: PathBuf::from("https://example.org/file4.ext"),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
]))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -11,9 +11,10 @@ use crate::config::{copy_unrecognized_config, UnrecognizedValues};
|
|||||||
use crate::pg::config_function::FuncInfoSources;
|
use crate::pg::config_function::FuncInfoSources;
|
||||||
use crate::pg::config_table::TableInfoSources;
|
use crate::pg::config_table::TableInfoSources;
|
||||||
use crate::pg::configurator::PgBuilder;
|
use crate::pg::configurator::PgBuilder;
|
||||||
|
use crate::pg::utils::on_slow;
|
||||||
use crate::pg::PgResult;
|
use crate::pg::PgResult;
|
||||||
use crate::source::TileInfoSources;
|
use crate::source::TileInfoSources;
|
||||||
use crate::utils::{on_slow, IdResolver, OptBoolObj, OptOneMany};
|
use crate::utils::{IdResolver, OptBoolObj, OptOneMany};
|
||||||
use crate::MartinResult;
|
use crate::MartinResult;
|
||||||
|
|
||||||
pub trait PgInfo {
|
pub trait PgInfo {
|
||||||
|
@ -36,7 +36,7 @@ pub struct PgBuilderFuncs {
|
|||||||
#[derive(Debug, Default, PartialEq)]
|
#[derive(Debug, Default, PartialEq)]
|
||||||
#[cfg_attr(test, serde_with::skip_serializing_none, derive(serde::Serialize))]
|
#[cfg_attr(test, serde_with::skip_serializing_none, derive(serde::Serialize))]
|
||||||
pub struct PgBuilderTables {
|
pub struct PgBuilderTables {
|
||||||
#[cfg_attr(test, serde(serialize_with = "crate::utils::sorted_opt_set"))]
|
#[cfg_attr(test, serde(serialize_with = "crate::pg::utils::sorted_opt_set"))]
|
||||||
schemas: Option<HashSet<String>>,
|
schemas: Option<HashSet<String>>,
|
||||||
source_id_format: String,
|
source_id_format: String,
|
||||||
id_columns: Option<Vec<String>>,
|
id_columns: Option<Vec<String>>,
|
||||||
|
@ -1,13 +1,48 @@
|
|||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
|
use std::future::Future;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use deadpool_postgres::tokio_postgres::types::Json;
|
use deadpool_postgres::tokio_postgres::types::Json;
|
||||||
|
use futures::pin_mut;
|
||||||
use itertools::Itertools as _;
|
use itertools::Itertools as _;
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use postgis::{ewkb, LineString, Point, Polygon};
|
use postgis::{ewkb, LineString, Point, Polygon};
|
||||||
use tilejson::{Bounds, TileJSON};
|
use tilejson::{Bounds, TileJSON};
|
||||||
|
use tokio::time::timeout;
|
||||||
|
|
||||||
use crate::source::UrlQuery;
|
use crate::source::UrlQuery;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn sorted_opt_set<S: serde::Serializer>(
|
||||||
|
value: &Option<std::collections::HashSet<String>>,
|
||||||
|
serializer: S,
|
||||||
|
) -> Result<S::Ok, S::Error> {
|
||||||
|
use serde::Serialize as _;
|
||||||
|
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.map(|v| {
|
||||||
|
let mut v: Vec<_> = v.iter().collect();
|
||||||
|
v.sort();
|
||||||
|
v
|
||||||
|
})
|
||||||
|
.serialize(serializer)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn on_slow<T, S: FnOnce()>(
|
||||||
|
future: impl Future<Output = T>,
|
||||||
|
duration: Duration,
|
||||||
|
fn_on_slow: S,
|
||||||
|
) -> T {
|
||||||
|
pin_mut!(future);
|
||||||
|
if let Ok(result) = timeout(duration, &mut future).await {
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
fn_on_slow();
|
||||||
|
future.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
|
pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
|
||||||
let mut result = BTreeMap::new();
|
let mut result = BTreeMap::new();
|
||||||
|
@ -1,58 +0,0 @@
|
|||||||
use std::fmt::{Debug, Formatter};
|
|
||||||
use std::io;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use log::{trace, warn};
|
|
||||||
use martin_tile_utils::{Encoding, Format, TileInfo};
|
|
||||||
use pmtiles::async_reader::AsyncPmTilesReader;
|
|
||||||
use pmtiles::cache::NoCache;
|
|
||||||
use pmtiles::mmap::MmapBackend;
|
|
||||||
use pmtiles::{Compression, TileType};
|
|
||||||
use tilejson::TileJSON;
|
|
||||||
|
|
||||||
use crate::file_config::FileError::{InvalidMetadata, IoError};
|
|
||||||
use crate::file_config::FileResult;
|
|
||||||
use crate::pmtiles::impl_pmtiles_source;
|
|
||||||
use crate::source::{Source, UrlQuery};
|
|
||||||
use crate::{MartinResult, TileCoord, TileData};
|
|
||||||
|
|
||||||
impl_pmtiles_source!(
|
|
||||||
PmtFileSource,
|
|
||||||
MmapBackend,
|
|
||||||
NoCache,
|
|
||||||
PathBuf,
|
|
||||||
Path::display,
|
|
||||||
InvalidMetadata
|
|
||||||
);
|
|
||||||
|
|
||||||
impl PmtFileSource {
|
|
||||||
pub async fn new_box(id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
|
|
||||||
Ok(Box::new(PmtFileSource::new(id, path).await?))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
|
|
||||||
let backend = MmapBackend::try_from(path.as_path())
|
|
||||||
.await
|
|
||||||
.map_err(|e| {
|
|
||||||
io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
format!("{e:?}: Cannot open file {}", path.display()),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.map_err(|e| IoError(e, path.clone()))?;
|
|
||||||
|
|
||||||
let reader = AsyncPmTilesReader::try_from_source(backend).await;
|
|
||||||
let reader = reader
|
|
||||||
.map_err(|e| {
|
|
||||||
io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
format!("{e:?}: Cannot open file {}", path.display()),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.map_err(|e| IoError(e, path.clone()))?;
|
|
||||||
|
|
||||||
Self::new_int(id, path, reader).await
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,76 +0,0 @@
|
|||||||
use std::convert::identity;
|
|
||||||
use std::fmt::{Debug, Formatter};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use log::{trace, warn};
|
|
||||||
use martin_tile_utils::{Encoding, Format, TileInfo};
|
|
||||||
use moka::future::Cache;
|
|
||||||
use pmtiles::async_reader::AsyncPmTilesReader;
|
|
||||||
use pmtiles::cache::{DirCacheResult, DirectoryCache};
|
|
||||||
use pmtiles::http::HttpBackend;
|
|
||||||
use pmtiles::{Compression, Directory, TileType};
|
|
||||||
use reqwest::Client;
|
|
||||||
use tilejson::TileJSON;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::file_config::FileError::InvalidUrlMetadata;
|
|
||||||
use crate::file_config::{FileError, FileResult};
|
|
||||||
use crate::pmtiles::impl_pmtiles_source;
|
|
||||||
use crate::source::{Source, UrlQuery};
|
|
||||||
use crate::{MartinResult, TileCoord, TileData};
|
|
||||||
|
|
||||||
struct PmtCache(Cache<usize, Directory>);
|
|
||||||
|
|
||||||
impl PmtCache {
|
|
||||||
fn new(max_capacity: u64) -> Self {
|
|
||||||
Self(
|
|
||||||
Cache::builder()
|
|
||||||
.weigher(|_key, value: &Directory| -> u32 {
|
|
||||||
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
|
|
||||||
})
|
|
||||||
.max_capacity(max_capacity)
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl DirectoryCache for PmtCache {
|
|
||||||
async fn get_dir_entry(&self, offset: usize, tile_id: u64) -> DirCacheResult {
|
|
||||||
match self.0.get(&offset).await {
|
|
||||||
Some(dir) => dir.find_tile_id(tile_id).into(),
|
|
||||||
None => DirCacheResult::NotCached,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn insert_dir(&self, offset: usize, directory: Directory) {
|
|
||||||
self.0.insert(offset, directory).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_pmtiles_source!(
|
|
||||||
PmtHttpSource,
|
|
||||||
HttpBackend,
|
|
||||||
PmtCache,
|
|
||||||
Url,
|
|
||||||
identity,
|
|
||||||
InvalidUrlMetadata
|
|
||||||
);
|
|
||||||
|
|
||||||
impl PmtHttpSource {
|
|
||||||
pub async fn new_url_box(id: String, url: Url) -> FileResult<Box<dyn Source>> {
|
|
||||||
let client = Client::new();
|
|
||||||
let cache = PmtCache::new(4 * 1024 * 1024);
|
|
||||||
Ok(Box::new(
|
|
||||||
PmtHttpSource::new_url(client, cache, id, url).await?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn new_url(client: Client, cache: PmtCache, id: String, url: Url) -> FileResult<Self> {
|
|
||||||
let reader = AsyncPmTilesReader::new_with_cached_url(cache, client, url.clone()).await;
|
|
||||||
let reader = reader.map_err(|e| FileError::PmtError(e, url.to_string()))?;
|
|
||||||
|
|
||||||
Self::new_int(id, url, reader).await
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,16 +1,173 @@
|
|||||||
mod file_pmtiles;
|
use std::convert::identity;
|
||||||
mod http_pmtiles;
|
use std::fmt::{Debug, Formatter};
|
||||||
|
use std::io;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::atomic::AtomicUsize;
|
||||||
|
use std::sync::atomic::Ordering::Relaxed;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub use file_pmtiles::PmtFileSource;
|
use async_trait::async_trait;
|
||||||
pub use http_pmtiles::PmtHttpSource;
|
use log::{trace, warn};
|
||||||
|
use martin_tile_utils::{Encoding, Format, TileInfo};
|
||||||
|
use moka::future::Cache;
|
||||||
|
use pmtiles::async_reader::AsyncPmTilesReader;
|
||||||
|
use pmtiles::cache::{DirCacheResult, DirectoryCache};
|
||||||
|
use pmtiles::http::HttpBackend;
|
||||||
|
use pmtiles::mmap::MmapBackend;
|
||||||
|
use pmtiles::{Compression, Directory, TileType};
|
||||||
|
use reqwest::Client;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tilejson::TileJSON;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::config::UnrecognizedValues;
|
||||||
|
use crate::file_config::FileError::{InvalidMetadata, InvalidUrlMetadata, IoError};
|
||||||
|
use crate::file_config::{ConfigExtras, FileError, FileResult, SourceConfigExtras};
|
||||||
|
use crate::source::UrlQuery;
|
||||||
|
use crate::{MartinResult, Source, TileCoord, TileData};
|
||||||
|
|
||||||
|
type PmtCacheObject = Cache<(usize, usize), Directory>;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct PmtCache {
|
||||||
|
id: usize,
|
||||||
|
/// (id, offset) -> Directory, or None to disable caching
|
||||||
|
cache: Option<PmtCacheObject>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PmtCache {
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(id: usize, cache: Option<PmtCacheObject>) -> Self {
|
||||||
|
Self { id, cache }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl DirectoryCache for PmtCache {
|
||||||
|
async fn get_dir_entry(&self, offset: usize, tile_id: u64) -> DirCacheResult {
|
||||||
|
if let Some(cache) = &self.cache {
|
||||||
|
if let Some(dir) = cache.get(&(self.id, offset)).await {
|
||||||
|
return dir.find_tile_id(tile_id).into();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DirCacheResult::NotCached
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn insert_dir(&self, offset: usize, directory: Directory) {
|
||||||
|
if let Some(cache) = &self.cache {
|
||||||
|
cache.insert((self.id, offset), directory).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[serde_with::skip_serializing_none]
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
|
pub struct PmtConfig {
|
||||||
|
pub dir_cache_size_mb: Option<u64>,
|
||||||
|
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub unrecognized: UnrecognizedValues,
|
||||||
|
|
||||||
|
//
|
||||||
|
// The rest are internal state, not serialized
|
||||||
|
//
|
||||||
|
#[serde(skip)]
|
||||||
|
pub client: Option<Client>,
|
||||||
|
|
||||||
|
#[serde(skip)]
|
||||||
|
pub next_cache_id: AtomicUsize,
|
||||||
|
|
||||||
|
#[serde(skip)]
|
||||||
|
pub cache: Option<PmtCacheObject>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for PmtConfig {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.dir_cache_size_mb == other.dir_cache_size_mb && self.unrecognized == other.unrecognized
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for PmtConfig {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
// State is not shared between clones, only the serialized config
|
||||||
|
Self {
|
||||||
|
dir_cache_size_mb: self.dir_cache_size_mb,
|
||||||
|
unrecognized: self.unrecognized.clone(),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PmtConfig {
|
||||||
|
/// Create a new cache object for a source, giving it a unique internal ID
|
||||||
|
/// and a reference to the global cache.
|
||||||
|
pub fn new_cached_source(&self) -> PmtCache {
|
||||||
|
PmtCache::new(self.next_cache_id.fetch_add(1, Relaxed), self.cache.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConfigExtras for PmtConfig {
|
||||||
|
fn init_parsing(&mut self) -> FileResult<()> {
|
||||||
|
assert!(self.client.is_none());
|
||||||
|
assert!(self.cache.is_none());
|
||||||
|
|
||||||
|
self.client = Some(Client::new());
|
||||||
|
|
||||||
|
// Allow cache size to be disabled with 0
|
||||||
|
let cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
|
||||||
|
if cache_size > 0 {
|
||||||
|
self.cache = Some(
|
||||||
|
Cache::builder()
|
||||||
|
.weigher(|_key, value: &Directory| -> u32 {
|
||||||
|
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
|
||||||
|
})
|
||||||
|
.max_capacity(cache_size)
|
||||||
|
.build(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_default(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_unrecognized(&self) -> &UnrecognizedValues {
|
||||||
|
&self.unrecognized
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[async_trait]
|
||||||
|
impl SourceConfigExtras for PmtConfig {
|
||||||
|
fn parse_urls() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn new_sources(&self, id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
|
||||||
|
Ok(Box::new(
|
||||||
|
PmtFileSource::new(self.new_cached_source(), id, path).await?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn new_sources_url(&self, id: String, url: Url) -> FileResult<Box<dyn Source>> {
|
||||||
|
Ok(Box::new(
|
||||||
|
PmtHttpSource::new(
|
||||||
|
self.client.clone().unwrap(),
|
||||||
|
self.new_cached_source(),
|
||||||
|
id,
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! impl_pmtiles_source {
|
macro_rules! impl_pmtiles_source {
|
||||||
($name: ident, $backend: ty, $cache: ty, $path: ty, $display_path: path, $err: ident) => {
|
($name: ident, $backend: ty, $path: ty, $display_path: path, $err: ident) => {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct $name {
|
pub struct $name {
|
||||||
id: String,
|
id: String,
|
||||||
path: $path,
|
path: $path,
|
||||||
pmtiles: Arc<AsyncPmTilesReader<$backend, $cache>>,
|
pmtiles: Arc<AsyncPmTilesReader<$backend, PmtCache>>,
|
||||||
tilejson: TileJSON,
|
tilejson: TileJSON,
|
||||||
tile_info: TileInfo,
|
tile_info: TileInfo,
|
||||||
}
|
}
|
||||||
@ -31,7 +188,7 @@ macro_rules! impl_pmtiles_source {
|
|||||||
async fn new_int(
|
async fn new_int(
|
||||||
id: String,
|
id: String,
|
||||||
path: $path,
|
path: $path,
|
||||||
reader: AsyncPmTilesReader<$backend, $cache>,
|
reader: AsyncPmTilesReader<$backend, PmtCache>,
|
||||||
) -> FileResult<Self> {
|
) -> FileResult<Self> {
|
||||||
let hdr = &reader.get_header();
|
let hdr = &reader.get_header();
|
||||||
|
|
||||||
@ -132,4 +289,53 @@ macro_rules! impl_pmtiles_source {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) use impl_pmtiles_source;
|
impl_pmtiles_source!(
|
||||||
|
PmtHttpSource,
|
||||||
|
HttpBackend,
|
||||||
|
Url,
|
||||||
|
identity,
|
||||||
|
InvalidUrlMetadata
|
||||||
|
);
|
||||||
|
|
||||||
|
impl PmtHttpSource {
|
||||||
|
pub async fn new(client: Client, cache: PmtCache, id: String, url: Url) -> FileResult<Self> {
|
||||||
|
let reader = AsyncPmTilesReader::new_with_cached_url(cache, client, url.clone()).await;
|
||||||
|
let reader = reader.map_err(|e| FileError::PmtError(e, url.to_string()))?;
|
||||||
|
|
||||||
|
Self::new_int(id, url, reader).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_pmtiles_source!(
|
||||||
|
PmtFileSource,
|
||||||
|
MmapBackend,
|
||||||
|
PathBuf,
|
||||||
|
Path::display,
|
||||||
|
InvalidMetadata
|
||||||
|
);
|
||||||
|
|
||||||
|
impl PmtFileSource {
|
||||||
|
pub async fn new(cache: PmtCache, id: String, path: PathBuf) -> FileResult<Self> {
|
||||||
|
let backend = MmapBackend::try_from(path.as_path())
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
format!("{e:?}: Cannot open file {}", path.display()),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map_err(|e| IoError(e, path.clone()))?;
|
||||||
|
|
||||||
|
let reader = AsyncPmTilesReader::try_from_cached_source(backend, cache).await;
|
||||||
|
let reader = reader
|
||||||
|
.map_err(|e| {
|
||||||
|
io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
format!("{e:?}: Cannot open file {}", path.display()),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map_err(|e| IoError(e, path.clone()))?;
|
||||||
|
|
||||||
|
Self::new_int(id, path, reader).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -3,6 +3,7 @@ use std::collections::{BTreeMap, HashMap};
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -13,7 +14,8 @@ use spreet::{
|
|||||||
use tokio::io::AsyncReadExt;
|
use tokio::io::AsyncReadExt;
|
||||||
|
|
||||||
use self::SpriteError::{SpriteInstError, SpriteParsingError, SpriteProcessingError};
|
use self::SpriteError::{SpriteInstError, SpriteParsingError, SpriteProcessingError};
|
||||||
use crate::file_config::{FileConfigEnum, FileResult};
|
use crate::config::UnrecognizedValues;
|
||||||
|
use crate::file_config::{ConfigExtras, FileConfigEnum, FileResult};
|
||||||
|
|
||||||
pub type SpriteResult<T> = Result<T, SpriteError>;
|
pub type SpriteResult<T> = Result<T, SpriteError>;
|
||||||
|
|
||||||
@ -57,12 +59,25 @@ pub struct CatalogSpriteEntry {
|
|||||||
|
|
||||||
pub type SpriteCatalog = BTreeMap<String, CatalogSpriteEntry>;
|
pub type SpriteCatalog = BTreeMap<String, CatalogSpriteEntry>;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct SpriteConfig {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub unrecognized: UnrecognizedValues,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ConfigExtras for SpriteConfig {
|
||||||
|
fn get_unrecognized(&self) -> &UnrecognizedValues {
|
||||||
|
&self.unrecognized
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct SpriteSources(HashMap<String, SpriteSource>);
|
pub struct SpriteSources(HashMap<String, SpriteSource>);
|
||||||
|
|
||||||
impl SpriteSources {
|
impl SpriteSources {
|
||||||
pub fn resolve(config: &mut FileConfigEnum) -> FileResult<Self> {
|
pub fn resolve(config: &mut FileConfigEnum<SpriteConfig>) -> FileResult<Self> {
|
||||||
let Some(cfg) = config.extract_file_config() else {
|
let Some(cfg) = config.extract_file_config()? else {
|
||||||
return Ok(Self::default());
|
return Ok(Self::default());
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -89,7 +104,7 @@ impl SpriteSources {
|
|||||||
results.add_source(name.to_string_lossy().to_string(), path);
|
results.add_source(name.to_string_lossy().to_string(), path);
|
||||||
}
|
}
|
||||||
|
|
||||||
*config = FileConfigEnum::new_extended(directories, configs, cfg.unrecognized);
|
*config = FileConfigEnum::new_extended(directories, configs, cfg.custom);
|
||||||
|
|
||||||
Ok(results)
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,6 @@ use mbtiles::MbtError;
|
|||||||
use crate::file_config::FileError;
|
use crate::file_config::FileError;
|
||||||
#[cfg(feature = "fonts")]
|
#[cfg(feature = "fonts")]
|
||||||
use crate::fonts::FontError;
|
use crate::fonts::FontError;
|
||||||
use crate::pg::PgError;
|
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
use crate::sprites::SpriteError;
|
use crate::sprites::SpriteError;
|
||||||
|
|
||||||
@ -58,8 +57,9 @@ pub enum MartinError {
|
|||||||
#[error("Unrecognizable connection strings: {0:?}")]
|
#[error("Unrecognizable connection strings: {0:?}")]
|
||||||
UnrecognizableConnections(Vec<String>),
|
UnrecognizableConnections(Vec<String>),
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
PostgresError(#[from] PgError),
|
PostgresError(#[from] crate::pg::PgError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
MbtilesError(#[from] MbtError),
|
MbtilesError(#[from] MbtError),
|
||||||
|
@ -1,28 +1,7 @@
|
|||||||
use std::future::Future;
|
|
||||||
use std::io::{Read as _, Write as _};
|
use std::io::{Read as _, Write as _};
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use futures::pin_mut;
|
|
||||||
use tokio::time::timeout;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn sorted_opt_set<S: serde::Serializer>(
|
|
||||||
value: &Option<std::collections::HashSet<String>>,
|
|
||||||
serializer: S,
|
|
||||||
) -> Result<S::Ok, S::Error> {
|
|
||||||
use serde::Serialize as _;
|
|
||||||
|
|
||||||
value
|
|
||||||
.as_ref()
|
|
||||||
.map(|v| {
|
|
||||||
let mut v: Vec<_> = v.iter().collect();
|
|
||||||
v.sort();
|
|
||||||
v
|
|
||||||
})
|
|
||||||
.serialize(serializer)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode_gzip(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
|
pub fn decode_gzip(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
|
||||||
let mut decoder = GzDecoder::new(data);
|
let mut decoder = GzDecoder::new(data);
|
||||||
@ -49,17 +28,3 @@ pub fn encode_brotli(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
|
|||||||
encoder.write_all(data)?;
|
encoder.write_all(data)?;
|
||||||
Ok(encoder.into_inner())
|
Ok(encoder.into_inner())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn on_slow<T, S: FnOnce()>(
|
|
||||||
future: impl Future<Output = T>,
|
|
||||||
duration: Duration,
|
|
||||||
fn_on_slow: S,
|
|
||||||
) -> T {
|
|
||||||
pin_mut!(future);
|
|
||||||
if let Ok(result) = timeout(duration, &mut future).await {
|
|
||||||
result
|
|
||||||
} else {
|
|
||||||
fn_on_slow();
|
|
||||||
future.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "postgres")]
|
||||||
|
|
||||||
use ctor::ctor;
|
use ctor::ctor;
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use insta::assert_yaml_snapshot;
|
use insta::assert_yaml_snapshot;
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "postgres")]
|
||||||
|
|
||||||
use actix_http::Request;
|
use actix_http::Request;
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use actix_web::test::{call_and_read_body_json, call_service, read_body, TestRequest};
|
use actix_web::test::{call_and_read_body_json, call_service, read_body, TestRequest};
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![cfg(feature = "postgres")]
|
||||||
|
|
||||||
use ctor::ctor;
|
use ctor::ctor;
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use insta::assert_yaml_snapshot;
|
use insta::assert_yaml_snapshot;
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use indoc::formatdoc;
|
use indoc::formatdoc;
|
||||||
pub use martin::args::Env;
|
pub use martin::args::Env;
|
||||||
use martin::pg::TableInfo;
|
|
||||||
use martin::{Config, IdResolver, ServerState, Source};
|
use martin::{Config, IdResolver, ServerState, Source};
|
||||||
|
|
||||||
use crate::mock_cfg;
|
use crate::mock_cfg;
|
||||||
@ -28,11 +27,12 @@ pub async fn mock_sources(mut config: Config) -> MockSource {
|
|||||||
(res, config)
|
(res, config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a TableInfo {
|
pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a martin::pg::TableInfo {
|
||||||
let (_, config) = mock;
|
let (_, config) = mock;
|
||||||
let vals: Vec<&TableInfo> = config
|
let vals: Vec<&martin::pg::TableInfo> = config
|
||||||
.postgres
|
.postgres
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|v| v.tables.iter().map(|vv| vv.get(name)))
|
.flat_map(|v| v.tables.iter().map(|vv| vv.get(name)))
|
||||||
|
@ -2,7 +2,7 @@ lints.workspace = true
|
|||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "mbtiles"
|
name = "mbtiles"
|
||||||
version = "0.8.5"
|
version = "0.9.0"
|
||||||
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||||
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
|
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
|
||||||
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]
|
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]
|
||||||
|
@ -166,6 +166,7 @@ postgres:
|
|||||||
|
|
||||||
|
|
||||||
pmtiles:
|
pmtiles:
|
||||||
|
dir_cache_size_mb: 100
|
||||||
paths:
|
paths:
|
||||||
- http://localhost:5412/webp2.pmtiles
|
- http://localhost:5412/webp2.pmtiles
|
||||||
sources:
|
sources:
|
||||||
|
@ -165,6 +165,7 @@ pmtiles:
|
|||||||
pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
|
pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
|
||||||
pmt2: http://localhost:5412/webp2.pmtiles
|
pmt2: http://localhost:5412/webp2.pmtiles
|
||||||
webp2: http://localhost:5412/webp2.pmtiles
|
webp2: http://localhost:5412/webp2.pmtiles
|
||||||
|
dir_cache_size_mb: 100
|
||||||
sprites:
|
sprites:
|
||||||
paths: tests/fixtures/sprites/src1
|
paths: tests/fixtures/sprites/src1
|
||||||
sources:
|
sources:
|
||||||
|
Loading…
Reference in New Issue
Block a user