mirror of
https://github.com/maplibre/martin.git
synced 2024-12-18 12:21:56 +03:00
Implement tile caching (#1105)
Add a top level config parameter -- the size of cache memory (in MB) to use for caching tiles and PMT directories, defaulting to 512, and 0 to disable. This also removes the `pmtiles.dir_cache_size_mb` parameter (it will be ignored, but will give a warning) ``` cache_size_mb: 512 ``` The new cache will contain all tiles as provided by the source. So if PostgreSQL returns a non-compressed tile, the cache will contain the uncompressed variant, and will be compressed for each response. This will be fixed in the later releases. Note that fonts and sprites are not cached at this time, and are still a TODO.
This commit is contained in:
parent
4f7487b448
commit
3dc54d7f9e
40
Cargo.lock
generated
40
Cargo.lock
generated
@ -326,9 +326,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.77"
|
version = "1.0.78"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c9d19de80eff169429ac1e9f48fffb163916b448a44e8e046186232046d9e1f9"
|
checksum = "ca87830a3e3fb156dc96cfbd31cb620265dd053be734723f22b760d6cc3c3051"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "approx"
|
name = "approx"
|
||||||
@ -389,9 +389,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.75"
|
version = "0.1.76"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fdf6721fb0140e4f897002dd086c06f6c27775df19cfe1fccb21181a48fd2c98"
|
checksum = "531b97fb4cd3dfdce92c35dedbfdc1f0b9d8091c8ca943d6dae340ef5012d514"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -1025,9 +1025,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deranged"
|
name = "deranged"
|
||||||
version = "0.3.10"
|
version = "0.3.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc"
|
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"powerfmt",
|
"powerfmt",
|
||||||
"serde",
|
"serde",
|
||||||
@ -1707,9 +1707,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iana-time-zone"
|
name = "iana-time-zone"
|
||||||
version = "0.1.58"
|
version = "0.1.59"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20"
|
checksum = "b6a67363e2aa4443928ce15e57ebae94fd8949958fd1223c4cfc0cd473ad7539"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"android_system_properties",
|
"android_system_properties",
|
||||||
"core-foundation-sys",
|
"core-foundation-sys",
|
||||||
@ -2010,7 +2010,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "martin"
|
name = "martin"
|
||||||
version = "0.12.0"
|
version = "0.13.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-http",
|
"actix-http",
|
||||||
@ -2063,7 +2063,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "martin-tile-utils"
|
name = "martin-tile-utils"
|
||||||
version = "0.4.0"
|
version = "0.4.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"approx",
|
"approx",
|
||||||
"insta",
|
"insta",
|
||||||
@ -2071,7 +2071,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mbtiles"
|
name = "mbtiles"
|
||||||
version = "0.9.0"
|
version = "0.9.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
@ -2703,9 +2703,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.71"
|
version = "1.0.72"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "75cb1540fadbd5b8fbccc4dddad2734eba435053f725621c070711a14bb5f4b8"
|
checksum = "a293318316cf6478ec1ad2a21c49390a8d5b5eae9fab736467d93fbc0edc29c5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
@ -4004,18 +4004,18 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.52"
|
version = "1.0.53"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "83a48fd946b02c0a526b2e9481c8e2a17755e47039164a86c4070446e3a4614d"
|
checksum = "b2cd5904763bad08ad5513ddbb12cf2ae273ca53fa9f68e843e236ec6dfccc09"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror-impl"
|
name = "thiserror-impl"
|
||||||
version = "1.0.52"
|
version = "1.0.53"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e7fbe9b594d6568a6a1443250a7e67d80b74e1e96f6d1715e1e21cc1888291d3"
|
checksum = "3dcf4a824cce0aeacd6f38ae6f24234c8e80d68632338ebaa1443b5df9e29e19"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -4674,11 +4674,11 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-core"
|
name = "windows-core"
|
||||||
version = "0.51.1"
|
version = "0.52.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64"
|
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-targets 0.48.5",
|
"windows-targets 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
4
debian/config.yaml
vendored
4
debian/config.yaml
vendored
@ -7,6 +7,9 @@ listen_addresses: '0.0.0.0:3000'
|
|||||||
# Number of web server workers
|
# Number of web server workers
|
||||||
worker_processes: 8
|
worker_processes: 8
|
||||||
|
|
||||||
|
# Amount of memory (in MB) to use for caching tiles [default: 512, 0 to disable]
|
||||||
|
cache_size_mb: 512
|
||||||
|
|
||||||
# see https://maplibre.org/martin/config-file.html
|
# see https://maplibre.org/martin/config-file.html
|
||||||
|
|
||||||
# postgres:
|
# postgres:
|
||||||
@ -17,7 +20,6 @@ worker_processes: 8
|
|||||||
# auto_bounds: skip
|
# auto_bounds: skip
|
||||||
|
|
||||||
# pmtiles:
|
# pmtiles:
|
||||||
# dir_cache_size_mb: 100
|
|
||||||
# paths:
|
# paths:
|
||||||
# - /dir-path
|
# - /dir-path
|
||||||
# - /path/to/pmtiles.pmtiles
|
# - /path/to/pmtiles.pmtiles
|
||||||
|
@ -24,6 +24,9 @@ listen_addresses: '0.0.0.0:3000'
|
|||||||
# Number of web server workers
|
# Number of web server workers
|
||||||
worker_processes: 8
|
worker_processes: 8
|
||||||
|
|
||||||
|
# Amount of memory (in MB) to use for caching tiles [default: 512, 0 to disable]
|
||||||
|
cache_size_mb: 1024
|
||||||
|
|
||||||
# Database configuration. This can also be a list of PG configs.
|
# Database configuration. This can also be a list of PG configs.
|
||||||
postgres:
|
postgres:
|
||||||
# Database connection string. You can use env vars too, for example:
|
# Database connection string. You can use env vars too, for example:
|
||||||
@ -155,8 +158,6 @@ postgres:
|
|||||||
|
|
||||||
# Publish PMTiles files from local disk or proxy to a web server
|
# Publish PMTiles files from local disk or proxy to a web server
|
||||||
pmtiles:
|
pmtiles:
|
||||||
# Memory (in MB) to use for caching PMTiles directories [default: 32, 0 to disable]]
|
|
||||||
dir_cache_size_mb: 100
|
|
||||||
paths:
|
paths:
|
||||||
# scan this whole dir, matching all *.pmtiles files
|
# scan this whole dir, matching all *.pmtiles files
|
||||||
- /dir-path
|
- /dir-path
|
||||||
|
@ -2,7 +2,7 @@ lints.workspace = true
|
|||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "martin-tile-utils"
|
name = "martin-tile-utils"
|
||||||
version = "0.4.0"
|
version = "0.4.1"
|
||||||
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||||
description = "Utilites to help with map tile processing, such as type and compression detection. Used by the MapLibre's Martin tile server."
|
description = "Utilites to help with map tile processing, such as type and compression detection. Used by the MapLibre's Martin tile server."
|
||||||
keywords = ["maps", "tiles", "mvt", "tileserver"]
|
keywords = ["maps", "tiles", "mvt", "tileserver"]
|
||||||
|
@ -86,7 +86,7 @@ impl Display for Format {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
|
||||||
pub enum Encoding {
|
pub enum Encoding {
|
||||||
/// Data is not compressed, but it can be
|
/// Data is not compressed, but it can be
|
||||||
Uncompressed = 0b0000_0000,
|
Uncompressed = 0b0000_0000,
|
||||||
|
@ -3,7 +3,7 @@ lints.workspace = true
|
|||||||
[package]
|
[package]
|
||||||
name = "martin"
|
name = "martin"
|
||||||
# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin
|
# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin
|
||||||
version = "0.12.0"
|
version = "0.13.0"
|
||||||
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||||
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
|
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
|
||||||
keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"]
|
keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"]
|
||||||
@ -62,7 +62,7 @@ harness = false
|
|||||||
default = ["fonts", "mbtiles", "pmtiles", "postgres", "sprites"]
|
default = ["fonts", "mbtiles", "pmtiles", "postgres", "sprites"]
|
||||||
fonts = ["dep:bit-set", "dep:pbf_font_tools"]
|
fonts = ["dep:bit-set", "dep:pbf_font_tools"]
|
||||||
mbtiles = []
|
mbtiles = []
|
||||||
pmtiles = ["dep:moka"]
|
pmtiles = []
|
||||||
postgres = ["dep:deadpool-postgres", "dep:json-patch", "dep:postgis", "dep:postgres", "dep:postgres-protocol", "dep:semver", "dep:tokio-postgres-rustls"]
|
postgres = ["dep:deadpool-postgres", "dep:json-patch", "dep:postgis", "dep:postgres", "dep:postgres-protocol", "dep:semver", "dep:tokio-postgres-rustls"]
|
||||||
sprites = ["dep:spreet"]
|
sprites = ["dep:spreet"]
|
||||||
bless-tests = []
|
bless-tests = []
|
||||||
@ -85,7 +85,7 @@ json-patch = { workspace = true, optional = true }
|
|||||||
log.workspace = true
|
log.workspace = true
|
||||||
martin-tile-utils.workspace = true
|
martin-tile-utils.workspace = true
|
||||||
mbtiles.workspace = true
|
mbtiles.workspace = true
|
||||||
moka = { workspace = true, optional = true }
|
moka.workspace = true
|
||||||
num_cpus.workspace = true
|
num_cpus.workspace = true
|
||||||
pbf_font_tools = { workspace = true, optional = true }
|
pbf_font_tools = { workspace = true, optional = true }
|
||||||
pmtiles.workspace = true
|
pmtiles.workspace = true
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use criterion::async_executor::FuturesExecutor;
|
use criterion::async_executor::FuturesExecutor;
|
||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
use martin::srv::get_tile_response;
|
use martin::srv::DynTileSource;
|
||||||
use martin::{
|
use martin::{
|
||||||
CatalogSourceEntry, MartinResult, Source, TileCoord, TileData, TileSources, UrlQuery,
|
CatalogSourceEntry, MartinResult, Source, TileCoord, TileData, TileSources, UrlQuery,
|
||||||
};
|
};
|
||||||
@ -58,7 +58,8 @@ impl Source for NullSource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn process_tile(sources: &TileSources) {
|
async fn process_tile(sources: &TileSources) {
|
||||||
get_tile_response(sources, TileCoord { z: 0, x: 0, y: 0 }, "null", "", None)
|
let src = DynTileSource::new(sources, "null", Some(0), "", None, None).unwrap();
|
||||||
|
src.get_http_response(TileCoord { z: 0, x: 0, y: 0 })
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,9 @@ pub struct MetaArgs {
|
|||||||
/// By default, only print if sources are auto-detected.
|
/// By default, only print if sources are auto-detected.
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
pub save_config: Option<PathBuf>,
|
pub save_config: Option<PathBuf>,
|
||||||
|
/// Main cache size (in MB)
|
||||||
|
#[arg(short = 'C', long)]
|
||||||
|
pub cache_size: Option<u64>,
|
||||||
/// **Deprecated** Scan for new sources on sources list requests
|
/// **Deprecated** Scan for new sources on sources list requests
|
||||||
#[arg(short, long, hide = true)]
|
#[arg(short, long, hide = true)]
|
||||||
pub watch: bool,
|
pub watch: bool,
|
||||||
@ -74,6 +77,10 @@ impl Args {
|
|||||||
return Err(ConfigAndConnectionsError(self.meta.connection));
|
return Err(ConfigAndConnectionsError(self.meta.connection));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.meta.cache_size.is_some() {
|
||||||
|
config.cache_size_mb = self.meta.cache_size;
|
||||||
|
}
|
||||||
|
|
||||||
self.srv.merge_into_config(&mut config.srv);
|
self.srv.merge_into_config(&mut config.srv);
|
||||||
|
|
||||||
#[allow(unused_mut)]
|
#[allow(unused_mut)]
|
||||||
|
@ -12,10 +12,10 @@ use futures::stream::{self, StreamExt};
|
|||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use log::{debug, error, info, log_enabled};
|
use log::{debug, error, info, log_enabled};
|
||||||
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, SrvArgs};
|
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, SrvArgs};
|
||||||
use martin::srv::{get_tile_content, merge_tilejson, RESERVED_KEYWORDS};
|
use martin::srv::{merge_tilejson, DynTileSource};
|
||||||
use martin::{
|
use martin::{
|
||||||
append_rect, read_config, Config, IdResolver, MartinError, MartinResult, ServerState, Source,
|
append_rect, read_config, Config, MartinError, MartinResult, ServerState, Source, TileCoord,
|
||||||
TileCoord, TileData, TileRect,
|
TileData, TileRect,
|
||||||
};
|
};
|
||||||
use martin_tile_utils::{bbox_to_xyz, TileInfo};
|
use martin_tile_utils::{bbox_to_xyz, TileInfo};
|
||||||
use mbtiles::sqlx::SqliteConnection;
|
use mbtiles::sqlx::SqliteConnection;
|
||||||
@ -144,7 +144,8 @@ async fn start(copy_args: CopierArgs) -> MartinCpResult<()> {
|
|||||||
|
|
||||||
args.merge_into_config(&mut config, &env)?;
|
args.merge_into_config(&mut config, &env)?;
|
||||||
config.finalize()?;
|
config.finalize()?;
|
||||||
let sources = config.resolve(IdResolver::new(RESERVED_KEYWORDS)).await?;
|
|
||||||
|
let sources = config.resolve().await?;
|
||||||
|
|
||||||
if let Some(file_name) = save_config {
|
if let Some(file_name) = save_config {
|
||||||
config.save_to_file(file_name)?;
|
config.save_to_file(file_name)?;
|
||||||
@ -274,9 +275,18 @@ fn iterate_tiles(tiles: Vec<TileRect>) -> impl Iterator<Item = TileCoord> {
|
|||||||
async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinCpResult<()> {
|
async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinCpResult<()> {
|
||||||
let output_file = &args.output_file;
|
let output_file = &args.output_file;
|
||||||
let concurrency = args.concurrency.unwrap_or(1);
|
let concurrency = args.concurrency.unwrap_or(1);
|
||||||
let (sources, _use_url_query, info) = state.tiles.get_sources(args.source.as_str(), None)?;
|
|
||||||
let sources = sources.as_slice();
|
let src = DynTileSource::new(
|
||||||
let tile_info = sources.first().unwrap().get_tile_info();
|
&state.tiles,
|
||||||
|
args.source.as_str(),
|
||||||
|
None,
|
||||||
|
args.url_query.as_deref().unwrap_or_default(),
|
||||||
|
Some(parse_encoding(args.encoding.as_str())?),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
// parallel async below uses move, so we must only use copyable types
|
||||||
|
let src = &src;
|
||||||
|
|
||||||
let (tx, mut rx) = channel::<TileXyz>(500);
|
let (tx, mut rx) = channel::<TileXyz>(500);
|
||||||
let tiles = compute_tile_ranges(&args);
|
let tiles = compute_tile_ranges(&args);
|
||||||
let mbt = Mbtiles::new(output_file)?;
|
let mbt = Mbtiles::new(output_file)?;
|
||||||
@ -288,30 +298,26 @@ async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinCpResult<()>
|
|||||||
} else {
|
} else {
|
||||||
CopyDuplicateMode::Override
|
CopyDuplicateMode::Override
|
||||||
};
|
};
|
||||||
let mbt_type = init_schema(&mbt, &mut conn, sources, tile_info, &args).await?;
|
let mbt_type = init_schema(&mbt, &mut conn, src.sources.as_slice(), src.info, &args).await?;
|
||||||
let query = args.url_query.as_deref();
|
|
||||||
let req = TestRequest::default()
|
|
||||||
.insert_header((ACCEPT_ENCODING, args.encoding.as_str()))
|
|
||||||
.finish();
|
|
||||||
let accept_encoding = AcceptEncoding::parse(&req)?;
|
|
||||||
let encodings = Some(&accept_encoding);
|
|
||||||
|
|
||||||
let progress = Progress::new(&tiles);
|
let progress = Progress::new(&tiles);
|
||||||
info!(
|
info!(
|
||||||
"Copying {} {tile_info} tiles from {} to {}",
|
"Copying {} {} tiles from {} to {}",
|
||||||
progress.total,
|
progress.total,
|
||||||
|
src.info,
|
||||||
args.source,
|
args.source,
|
||||||
args.output_file.display()
|
args.output_file.display()
|
||||||
);
|
);
|
||||||
|
|
||||||
try_join!(
|
try_join!(
|
||||||
|
// Note: for some reason, tests hang here without the `move` keyword
|
||||||
async move {
|
async move {
|
||||||
stream::iter(iterate_tiles(tiles))
|
stream::iter(iterate_tiles(tiles))
|
||||||
.map(MartinResult::Ok)
|
.map(MartinResult::Ok)
|
||||||
.try_for_each_concurrent(concurrency, |xyz| {
|
.try_for_each_concurrent(concurrency, |xyz| {
|
||||||
let tx = tx.clone();
|
let tx = tx.clone();
|
||||||
async move {
|
async move {
|
||||||
let tile = get_tile_content(sources, info, xyz, query, encodings).await?;
|
let tile = src.get_tile_content(xyz).await?;
|
||||||
let data = tile.data;
|
let data = tile.data;
|
||||||
tx.send(TileXyz { xyz, data })
|
tx.send(TileXyz { xyz, data })
|
||||||
.await
|
.await
|
||||||
@ -375,6 +381,13 @@ async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinCpResult<()>
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_encoding(encoding: &str) -> MartinCpResult<AcceptEncoding> {
|
||||||
|
let req = TestRequest::default()
|
||||||
|
.insert_header((ACCEPT_ENCODING, encoding))
|
||||||
|
.finish();
|
||||||
|
Ok(AcceptEncoding::parse(&req)?)
|
||||||
|
}
|
||||||
|
|
||||||
async fn init_schema(
|
async fn init_schema(
|
||||||
mbt: &Mbtiles,
|
mbt: &Mbtiles,
|
||||||
conn: &mut SqliteConnection,
|
conn: &mut SqliteConnection,
|
||||||
|
@ -4,8 +4,8 @@ use actix_web::dev::Server;
|
|||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use log::{error, info, log_enabled};
|
use log::{error, info, log_enabled};
|
||||||
use martin::args::{Args, OsEnv};
|
use martin::args::{Args, OsEnv};
|
||||||
use martin::srv::{new_server, RESERVED_KEYWORDS};
|
use martin::srv::new_server;
|
||||||
use martin::{read_config, Config, IdResolver, MartinResult};
|
use martin::{read_config, Config, MartinResult};
|
||||||
|
|
||||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ async fn start(args: Args) -> MartinResult<Server> {
|
|||||||
|
|
||||||
args.merge_into_config(&mut config, &env)?;
|
args.merge_into_config(&mut config, &env)?;
|
||||||
config.finalize()?;
|
config.finalize()?;
|
||||||
let sources = config.resolve(IdResolver::new(RESERVED_KEYWORDS)).await?;
|
let sources = config.resolve().await?;
|
||||||
|
|
||||||
if let Some(file_name) = save_config {
|
if let Some(file_name) = save_config {
|
||||||
config.save_to_file(file_name)?;
|
config.save_to_file(file_name)?;
|
||||||
|
@ -18,13 +18,15 @@ use crate::fonts::FontSources;
|
|||||||
use crate::source::{TileInfoSources, TileSources};
|
use crate::source::{TileInfoSources, TileSources};
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
use crate::sprites::{SpriteConfig, SpriteSources};
|
use crate::sprites::{SpriteConfig, SpriteSources};
|
||||||
use crate::srv::SrvConfig;
|
use crate::srv::{SrvConfig, RESERVED_KEYWORDS};
|
||||||
|
use crate::utils::{CacheValue, MainCache, OptMainCache};
|
||||||
use crate::MartinError::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
|
use crate::MartinError::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
|
||||||
use crate::{IdResolver, MartinResult, OptOneMany};
|
use crate::{IdResolver, MartinResult, OptOneMany};
|
||||||
|
|
||||||
pub type UnrecognizedValues = HashMap<String, serde_yaml::Value>;
|
pub type UnrecognizedValues = HashMap<String, serde_yaml::Value>;
|
||||||
|
|
||||||
pub struct ServerState {
|
pub struct ServerState {
|
||||||
|
pub cache: OptMainCache,
|
||||||
pub tiles: TileSources,
|
pub tiles: TileSources,
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
pub sprites: SpriteSources,
|
pub sprites: SpriteSources,
|
||||||
@ -32,8 +34,11 @@ pub struct ServerState {
|
|||||||
pub fonts: FontSources,
|
pub fonts: FontSources,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[serde_with::skip_serializing_none]
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
|
pub cache_size_mb: Option<u64>,
|
||||||
|
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub srv: SrvConfig,
|
pub srv: SrvConfig,
|
||||||
|
|
||||||
@ -107,19 +112,43 @@ impl Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn resolve(&mut self, idr: IdResolver) -> MartinResult<ServerState> {
|
pub async fn resolve(&mut self) -> MartinResult<ServerState> {
|
||||||
|
let resolver = IdResolver::new(RESERVED_KEYWORDS);
|
||||||
|
let cache_size = self.cache_size_mb.unwrap_or(512) * 1024 * 1024;
|
||||||
|
let cache = if cache_size > 0 {
|
||||||
|
info!("Initializing main cache with maximum size {cache_size}B");
|
||||||
|
Some(
|
||||||
|
MainCache::builder()
|
||||||
|
.weigher(|_key, value: &CacheValue| -> u32 {
|
||||||
|
match value {
|
||||||
|
CacheValue::Tile(v) => v.len().try_into().unwrap_or(u32::MAX),
|
||||||
|
CacheValue::PmtDirectory(v) => {
|
||||||
|
v.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.max_capacity(cache_size)
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
info!("Caching is disabled");
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
Ok(ServerState {
|
Ok(ServerState {
|
||||||
tiles: self.resolve_tile_sources(idr).await?,
|
tiles: self.resolve_tile_sources(&resolver, cache.clone()).await?,
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
sprites: SpriteSources::resolve(&mut self.sprites)?,
|
sprites: SpriteSources::resolve(&mut self.sprites)?,
|
||||||
#[cfg(feature = "fonts")]
|
#[cfg(feature = "fonts")]
|
||||||
fonts: FontSources::resolve(&mut self.fonts)?,
|
fonts: FontSources::resolve(&mut self.fonts)?,
|
||||||
|
cache,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn resolve_tile_sources(
|
async fn resolve_tile_sources(
|
||||||
&mut self,
|
&mut self,
|
||||||
#[allow(unused_variables)] idr: IdResolver,
|
#[allow(unused_variables)] idr: &IdResolver,
|
||||||
|
#[allow(unused_variables)] cache: OptMainCache,
|
||||||
) -> MartinResult<TileSources> {
|
) -> MartinResult<TileSources> {
|
||||||
#[allow(unused_mut)]
|
#[allow(unused_mut)]
|
||||||
let mut sources: Vec<Pin<Box<dyn Future<Output = MartinResult<TileInfoSources>>>>> =
|
let mut sources: Vec<Pin<Box<dyn Future<Output = MartinResult<TileInfoSources>>>>> =
|
||||||
@ -133,14 +162,14 @@ impl Config {
|
|||||||
#[cfg(feature = "pmtiles")]
|
#[cfg(feature = "pmtiles")]
|
||||||
if !self.pmtiles.is_empty() {
|
if !self.pmtiles.is_empty() {
|
||||||
let cfg = &mut self.pmtiles;
|
let cfg = &mut self.pmtiles;
|
||||||
let val = crate::file_config::resolve_files(cfg, idr.clone(), "pmtiles");
|
let val = crate::file_config::resolve_files(cfg, idr, cache.clone(), "pmtiles");
|
||||||
sources.push(Box::pin(val));
|
sources.push(Box::pin(val));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "mbtiles")]
|
#[cfg(feature = "mbtiles")]
|
||||||
if !self.mbtiles.is_empty() {
|
if !self.mbtiles.is_empty() {
|
||||||
let cfg = &mut self.mbtiles;
|
let cfg = &mut self.mbtiles;
|
||||||
let val = crate::file_config::resolve_files(cfg, idr.clone(), "mbtiles");
|
let val = crate::file_config::resolve_files(cfg, idr, cache.clone(), "mbtiles");
|
||||||
sources.push(Box::pin(val));
|
sources.push(Box::pin(val));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ use crate::file_config::FileError::{
|
|||||||
InvalidFilePath, InvalidSourceFilePath, InvalidSourceUrl, IoError,
|
InvalidFilePath, InvalidSourceFilePath, InvalidSourceUrl, IoError,
|
||||||
};
|
};
|
||||||
use crate::source::{Source, TileInfoSources};
|
use crate::source::{Source, TileInfoSources};
|
||||||
use crate::utils::{IdResolver, OptOneMany};
|
use crate::utils::{IdResolver, OptMainCache, OptOneMany};
|
||||||
use crate::MartinResult;
|
use crate::MartinResult;
|
||||||
use crate::OptOneMany::{Many, One};
|
use crate::OptOneMany::{Many, One};
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ pub enum FileError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait ConfigExtras: Clone + Debug + Default + PartialEq + Send {
|
pub trait ConfigExtras: Clone + Debug + Default + PartialEq + Send {
|
||||||
fn init_parsing(&mut self) -> FileResult<()> {
|
fn init_parsing(&mut self, _cache: OptMainCache) -> FileResult<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,7 +127,10 @@ impl<T: ConfigExtras> FileConfigEnum<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_file_config(&mut self) -> FileResult<Option<FileConfig<T>>> {
|
pub fn extract_file_config(
|
||||||
|
&mut self,
|
||||||
|
cache: OptMainCache,
|
||||||
|
) -> FileResult<Option<FileConfig<T>>> {
|
||||||
let mut res = match self {
|
let mut res = match self {
|
||||||
FileConfigEnum::None => return Ok(None),
|
FileConfigEnum::None => return Ok(None),
|
||||||
FileConfigEnum::Path(path) => FileConfig {
|
FileConfigEnum::Path(path) => FileConfig {
|
||||||
@ -140,7 +143,7 @@ impl<T: ConfigExtras> FileConfigEnum<T> {
|
|||||||
},
|
},
|
||||||
FileConfigEnum::Config(cfg) => mem::take(cfg),
|
FileConfigEnum::Config(cfg) => mem::take(cfg),
|
||||||
};
|
};
|
||||||
res.custom.init_parsing()?;
|
res.custom.init_parsing(cache)?;
|
||||||
Ok(Some(res))
|
Ok(Some(res))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -218,20 +221,22 @@ pub struct FileConfigSource {
|
|||||||
|
|
||||||
pub async fn resolve_files<T: SourceConfigExtras>(
|
pub async fn resolve_files<T: SourceConfigExtras>(
|
||||||
config: &mut FileConfigEnum<T>,
|
config: &mut FileConfigEnum<T>,
|
||||||
idr: IdResolver,
|
idr: &IdResolver,
|
||||||
|
cache: OptMainCache,
|
||||||
extension: &str,
|
extension: &str,
|
||||||
) -> MartinResult<TileInfoSources> {
|
) -> MartinResult<TileInfoSources> {
|
||||||
resolve_int(config, idr, extension)
|
resolve_int(config, idr, cache, extension)
|
||||||
.map_err(crate::MartinError::from)
|
.map_err(crate::MartinError::from)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn resolve_int<T: SourceConfigExtras>(
|
async fn resolve_int<T: SourceConfigExtras>(
|
||||||
config: &mut FileConfigEnum<T>,
|
config: &mut FileConfigEnum<T>,
|
||||||
idr: IdResolver,
|
idr: &IdResolver,
|
||||||
|
cache: OptMainCache,
|
||||||
extension: &str,
|
extension: &str,
|
||||||
) -> FileResult<TileInfoSources> {
|
) -> FileResult<TileInfoSources> {
|
||||||
let Some(cfg) = config.extract_file_config()? else {
|
let Some(cfg) = config.extract_file_config(cache)? else {
|
||||||
return Ok(TileInfoSources::default());
|
return Ok(TileInfoSources::default());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ pub use source::{CatalogSourceEntry, Source, Tile, TileData, TileSources, UrlQue
|
|||||||
mod utils;
|
mod utils;
|
||||||
pub use utils::{
|
pub use utils::{
|
||||||
append_rect, decode_brotli, decode_gzip, IdResolver, MartinError, MartinResult, OptBoolObj,
|
append_rect, decode_brotli, decode_gzip, IdResolver, MartinError, MartinResult, OptBoolObj,
|
||||||
OptOneMany, TileCoord, TileRect,
|
OptOneMany, TileCoord, TileRect, NO_MAIN_CACHE,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub mod args;
|
pub mod args;
|
||||||
|
@ -9,7 +9,6 @@ use std::sync::Arc;
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use log::{trace, warn};
|
use log::{trace, warn};
|
||||||
use martin_tile_utils::{Encoding, Format, TileInfo};
|
use martin_tile_utils::{Encoding, Format, TileInfo};
|
||||||
use moka::future::Cache;
|
|
||||||
use pmtiles::async_reader::AsyncPmTilesReader;
|
use pmtiles::async_reader::AsyncPmTilesReader;
|
||||||
use pmtiles::cache::{DirCacheResult, DirectoryCache};
|
use pmtiles::cache::{DirCacheResult, DirectoryCache};
|
||||||
use pmtiles::http::HttpBackend;
|
use pmtiles::http::HttpBackend;
|
||||||
@ -24,20 +23,20 @@ use crate::config::UnrecognizedValues;
|
|||||||
use crate::file_config::FileError::{InvalidMetadata, InvalidUrlMetadata, IoError};
|
use crate::file_config::FileError::{InvalidMetadata, InvalidUrlMetadata, IoError};
|
||||||
use crate::file_config::{ConfigExtras, FileError, FileResult, SourceConfigExtras};
|
use crate::file_config::{ConfigExtras, FileError, FileResult, SourceConfigExtras};
|
||||||
use crate::source::UrlQuery;
|
use crate::source::UrlQuery;
|
||||||
|
use crate::utils::cache::get_cached_value;
|
||||||
|
use crate::utils::{CacheKey, CacheValue, OptMainCache};
|
||||||
use crate::{MartinResult, Source, TileCoord, TileData};
|
use crate::{MartinResult, Source, TileCoord, TileData};
|
||||||
|
|
||||||
type PmtCacheObject = Cache<(usize, usize), Directory>;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct PmtCache {
|
pub struct PmtCache {
|
||||||
id: usize,
|
id: usize,
|
||||||
/// (id, offset) -> Directory, or None to disable caching
|
/// Storing (id, offset) -> Directory, or None to disable caching
|
||||||
cache: Option<PmtCacheObject>,
|
cache: OptMainCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PmtCache {
|
impl PmtCache {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new(id: usize, cache: Option<PmtCacheObject>) -> Self {
|
pub fn new(id: usize, cache: OptMainCache) -> Self {
|
||||||
Self { id, cache }
|
Self { id, cache }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -45,17 +44,23 @@ impl PmtCache {
|
|||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl DirectoryCache for PmtCache {
|
impl DirectoryCache for PmtCache {
|
||||||
async fn get_dir_entry(&self, offset: usize, tile_id: u64) -> DirCacheResult {
|
async fn get_dir_entry(&self, offset: usize, tile_id: u64) -> DirCacheResult {
|
||||||
if let Some(cache) = &self.cache {
|
if let Some(dir) = get_cached_value!(&self.cache, CacheValue::PmtDirectory, {
|
||||||
if let Some(dir) = cache.get(&(self.id, offset)).await {
|
CacheKey::PmtDirectory(self.id, offset)
|
||||||
return dir.find_tile_id(tile_id).into();
|
}) {
|
||||||
}
|
dir.find_tile_id(tile_id).into()
|
||||||
|
} else {
|
||||||
|
DirCacheResult::NotCached
|
||||||
}
|
}
|
||||||
DirCacheResult::NotCached
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn insert_dir(&self, offset: usize, directory: Directory) {
|
async fn insert_dir(&self, offset: usize, directory: Directory) {
|
||||||
if let Some(cache) = &self.cache {
|
if let Some(cache) = &self.cache {
|
||||||
cache.insert((self.id, offset), directory).await;
|
cache
|
||||||
|
.insert(
|
||||||
|
CacheKey::PmtDirectory(self.id, offset),
|
||||||
|
CacheValue::PmtDirectory(directory),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -63,8 +68,6 @@ impl DirectoryCache for PmtCache {
|
|||||||
#[serde_with::skip_serializing_none]
|
#[serde_with::skip_serializing_none]
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
pub struct PmtConfig {
|
pub struct PmtConfig {
|
||||||
pub dir_cache_size_mb: Option<u64>,
|
|
||||||
|
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub unrecognized: UnrecognizedValues,
|
pub unrecognized: UnrecognizedValues,
|
||||||
|
|
||||||
@ -78,12 +81,12 @@ pub struct PmtConfig {
|
|||||||
pub next_cache_id: AtomicUsize,
|
pub next_cache_id: AtomicUsize,
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub cache: Option<PmtCacheObject>,
|
pub cache: OptMainCache,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for PmtConfig {
|
impl PartialEq for PmtConfig {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.dir_cache_size_mb == other.dir_cache_size_mb && self.unrecognized == other.unrecognized
|
self.unrecognized == other.unrecognized
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +94,6 @@ impl Clone for PmtConfig {
|
|||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
// State is not shared between clones, only the serialized config
|
// State is not shared between clones, only the serialized config
|
||||||
Self {
|
Self {
|
||||||
dir_cache_size_mb: self.dir_cache_size_mb,
|
|
||||||
unrecognized: self.unrecognized.clone(),
|
unrecognized: self.unrecognized.clone(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
@ -107,24 +109,17 @@ impl PmtConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigExtras for PmtConfig {
|
impl ConfigExtras for PmtConfig {
|
||||||
fn init_parsing(&mut self) -> FileResult<()> {
|
fn init_parsing(&mut self, cache: OptMainCache) -> FileResult<()> {
|
||||||
assert!(self.client.is_none());
|
assert!(self.client.is_none());
|
||||||
assert!(self.cache.is_none());
|
assert!(self.cache.is_none());
|
||||||
|
|
||||||
self.client = Some(Client::new());
|
self.client = Some(Client::new());
|
||||||
|
self.cache = cache;
|
||||||
|
|
||||||
// Allow cache size to be disabled with 0
|
if self.unrecognized.contains_key("dir_cache_size_mb") {
|
||||||
let dir_cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
|
warn!("dir_cache_size_mb is no longer used. Instead, use cache_size_mb param in the root of the config file.");
|
||||||
if dir_cache_size > 0 {
|
|
||||||
self.cache = Some(
|
|
||||||
Cache::builder()
|
|
||||||
.weigher(|_key, value: &Directory| -> u32 {
|
|
||||||
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
|
|
||||||
})
|
|
||||||
.max_capacity(dir_cache_size)
|
|
||||||
.build(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,6 +167,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct Tile {
|
pub struct Tile {
|
||||||
pub data: TileData,
|
pub data: TileData,
|
||||||
pub info: TileInfo,
|
pub info: TileInfo,
|
||||||
|
@ -77,7 +77,7 @@ pub struct SpriteSources(HashMap<String, SpriteSource>);
|
|||||||
|
|
||||||
impl SpriteSources {
|
impl SpriteSources {
|
||||||
pub fn resolve(config: &mut FileConfigEnum<SpriteConfig>) -> FileResult<Self> {
|
pub fn resolve(config: &mut FileConfigEnum<SpriteConfig>) -> FileResult<Self> {
|
||||||
let Some(cfg) = config.extract_file_config()? else {
|
let Some(cfg) = config.extract_file_config(None)? else {
|
||||||
return Ok(Self::default());
|
return Ok(Self::default());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
0
martin/src/srv/fonts.rs
Executable file → Normal file
0
martin/src/srv/fonts.rs
Executable file → Normal file
@ -1,14 +1,14 @@
|
|||||||
mod config;
|
mod config;
|
||||||
pub use config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
|
pub use config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
|
||||||
|
|
||||||
|
#[cfg(feature = "fonts")]
|
||||||
|
mod fonts;
|
||||||
|
|
||||||
mod server;
|
mod server;
|
||||||
pub use server::{new_server, router, Catalog, RESERVED_KEYWORDS};
|
pub use server::{new_server, router, Catalog, RESERVED_KEYWORDS};
|
||||||
|
|
||||||
mod tiles;
|
mod tiles;
|
||||||
pub use tiles::{get_tile_content, get_tile_response, TileRequest};
|
pub use tiles::{DynTileSource, TileRequest};
|
||||||
|
|
||||||
#[cfg(feature = "fonts")]
|
|
||||||
mod fonts;
|
|
||||||
|
|
||||||
mod tiles_info;
|
mod tiles_info;
|
||||||
pub use tiles_info::{merge_tilejson, SourceIDsRequest};
|
pub use tiles_info::{merge_tilejson, SourceIDsRequest};
|
||||||
|
@ -112,7 +112,9 @@ pub fn new_server(config: SrvConfig, state: ServerState) -> MartinResult<(Server
|
|||||||
.allow_any_origin()
|
.allow_any_origin()
|
||||||
.allowed_methods(vec!["GET"]);
|
.allowed_methods(vec!["GET"]);
|
||||||
|
|
||||||
let app = App::new().app_data(Data::new(state.tiles.clone()));
|
let app = App::new()
|
||||||
|
.app_data(Data::new(state.tiles.clone()))
|
||||||
|
.app_data(Data::new(state.cache.clone()));
|
||||||
|
|
||||||
#[cfg(feature = "sprites")]
|
#[cfg(feature = "sprites")]
|
||||||
let app = app.app_data(Data::new(state.sprites.clone()));
|
let app = app.app_data(Data::new(state.sprites.clone()));
|
||||||
|
@ -4,28 +4,18 @@ use actix_web::error::ErrorNotFound;
|
|||||||
use actix_web::http::header::ContentType;
|
use actix_web::http::header::ContentType;
|
||||||
use actix_web::web::{Data, Path};
|
use actix_web::web::{Data, Path};
|
||||||
use actix_web::{middleware, route, HttpResponse, Result as ActixResult};
|
use actix_web::{middleware, route, HttpResponse, Result as ActixResult};
|
||||||
|
use spreet::Spritesheet;
|
||||||
|
|
||||||
use crate::sprites::{SpriteError, SpriteSources};
|
use crate::sprites::{SpriteError, SpriteSources};
|
||||||
use crate::srv::server::map_internal_error;
|
use crate::srv::server::map_internal_error;
|
||||||
use crate::srv::SourceIDsRequest;
|
use crate::srv::SourceIDsRequest;
|
||||||
|
|
||||||
pub fn map_sprite_error(e: SpriteError) -> actix_web::Error {
|
|
||||||
use SpriteError::SpriteNotFound;
|
|
||||||
match e {
|
|
||||||
SpriteNotFound(_) => ErrorNotFound(e.to_string()),
|
|
||||||
_ => map_internal_error(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[route("/sprite/{source_ids}.png", method = "GET", method = "HEAD")]
|
#[route("/sprite/{source_ids}.png", method = "GET", method = "HEAD")]
|
||||||
async fn get_sprite_png(
|
async fn get_sprite_png(
|
||||||
path: Path<SourceIDsRequest>,
|
path: Path<SourceIDsRequest>,
|
||||||
sprites: Data<SpriteSources>,
|
sprites: Data<SpriteSources>,
|
||||||
) -> ActixResult<HttpResponse> {
|
) -> ActixResult<HttpResponse> {
|
||||||
let sheet = sprites
|
let sheet = get_sprite(&path, &sprites).await?;
|
||||||
.get_sprites(&path.source_ids)
|
|
||||||
.await
|
|
||||||
.map_err(map_sprite_error)?;
|
|
||||||
Ok(HttpResponse::Ok()
|
Ok(HttpResponse::Ok()
|
||||||
.content_type(ContentType::png())
|
.content_type(ContentType::png())
|
||||||
.body(sheet.encode_png().map_err(map_internal_error)?))
|
.body(sheet.encode_png().map_err(map_internal_error)?))
|
||||||
@ -41,9 +31,16 @@ async fn get_sprite_json(
|
|||||||
path: Path<SourceIDsRequest>,
|
path: Path<SourceIDsRequest>,
|
||||||
sprites: Data<SpriteSources>,
|
sprites: Data<SpriteSources>,
|
||||||
) -> ActixResult<HttpResponse> {
|
) -> ActixResult<HttpResponse> {
|
||||||
let sheet = sprites
|
let sheet = get_sprite(&path, &sprites).await?;
|
||||||
.get_sprites(&path.source_ids)
|
|
||||||
.await
|
|
||||||
.map_err(map_sprite_error)?;
|
|
||||||
Ok(HttpResponse::Ok().json(sheet.get_index()))
|
Ok(HttpResponse::Ok().json(sheet.get_index()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_sprite(path: &SourceIDsRequest, sprites: &SpriteSources) -> ActixResult<Spritesheet> {
|
||||||
|
sprites
|
||||||
|
.get_sprites(&path.source_ids)
|
||||||
|
.await
|
||||||
|
.map_err(|e| match e {
|
||||||
|
SpriteError::SpriteNotFound(_) => ErrorNotFound(e.to_string()),
|
||||||
|
_ => map_internal_error(e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
263
martin/src/srv/tiles.rs
Normal file → Executable file
263
martin/src/srv/tiles.rs
Normal file → Executable file
@ -6,13 +6,18 @@ use actix_web::http::header::{
|
|||||||
use actix_web::web::{Data, Path, Query};
|
use actix_web::web::{Data, Path, Query};
|
||||||
use actix_web::{route, HttpMessage, HttpRequest, HttpResponse, Result as ActixResult};
|
use actix_web::{route, HttpMessage, HttpRequest, HttpResponse, Result as ActixResult};
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
|
use log::trace;
|
||||||
use martin_tile_utils::{Encoding, Format, TileInfo};
|
use martin_tile_utils::{Encoding, Format, TileInfo};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
use crate::source::{Source, TileSources, UrlQuery};
|
use crate::source::{Source, TileSources, UrlQuery};
|
||||||
use crate::srv::server::map_internal_error;
|
use crate::srv::server::map_internal_error;
|
||||||
use crate::utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip};
|
use crate::utils::cache::get_or_insert_cached_value;
|
||||||
use crate::{Tile, TileCoord};
|
use crate::utils::{
|
||||||
|
decode_brotli, decode_gzip, encode_brotli, encode_gzip, CacheKey, CacheValue, MainCache,
|
||||||
|
OptMainCache,
|
||||||
|
};
|
||||||
|
use crate::{Tile, TileCoord, TileData};
|
||||||
|
|
||||||
static SUPPORTED_ENCODINGS: &[HeaderEnc] = &[
|
static SUPPORTED_ENCODINGS: &[HeaderEnc] = &[
|
||||||
HeaderEnc::brotli(),
|
HeaderEnc::brotli(),
|
||||||
@ -33,125 +38,165 @@ async fn get_tile(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
path: Path<TileRequest>,
|
path: Path<TileRequest>,
|
||||||
sources: Data<TileSources>,
|
sources: Data<TileSources>,
|
||||||
|
cache: Data<OptMainCache>,
|
||||||
) -> ActixResult<HttpResponse> {
|
) -> ActixResult<HttpResponse> {
|
||||||
let xyz = TileCoord {
|
let src = DynTileSource::new(
|
||||||
|
sources.as_ref(),
|
||||||
|
&path.source_ids,
|
||||||
|
Some(path.z),
|
||||||
|
req.query_string(),
|
||||||
|
req.get_header::<AcceptEncoding>(),
|
||||||
|
cache.as_ref().as_ref(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
src.get_http_response(TileCoord {
|
||||||
z: path.z,
|
z: path.z,
|
||||||
x: path.x,
|
x: path.x,
|
||||||
y: path.y,
|
y: path.y,
|
||||||
};
|
|
||||||
|
|
||||||
let source_ids = &path.source_ids;
|
|
||||||
let query = req.query_string();
|
|
||||||
let encodings = req.get_header::<AcceptEncoding>();
|
|
||||||
|
|
||||||
get_tile_response(sources.as_ref(), xyz, source_ids, query, encodings).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_tile_response(
|
|
||||||
sources: &TileSources,
|
|
||||||
xyz: TileCoord,
|
|
||||||
source_ids: &str,
|
|
||||||
query: &str,
|
|
||||||
encodings: Option<AcceptEncoding>,
|
|
||||||
) -> ActixResult<HttpResponse> {
|
|
||||||
let (sources, use_url_query, info) = sources.get_sources(source_ids, Some(xyz.z))?;
|
|
||||||
|
|
||||||
let query = use_url_query.then_some(query);
|
|
||||||
let tile = get_tile_content(sources.as_slice(), info, xyz, query, encodings.as_ref()).await?;
|
|
||||||
|
|
||||||
Ok(if tile.data.is_empty() {
|
|
||||||
HttpResponse::NoContent().finish()
|
|
||||||
} else {
|
|
||||||
let mut response = HttpResponse::Ok();
|
|
||||||
response.content_type(tile.info.format.content_type());
|
|
||||||
if let Some(val) = tile.info.encoding.content_encoding() {
|
|
||||||
response.insert_header((CONTENT_ENCODING, val));
|
|
||||||
}
|
|
||||||
response.body(tile.data)
|
|
||||||
})
|
})
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_tile_content(
|
pub struct DynTileSource<'a> {
|
||||||
sources: &[&dyn Source],
|
pub sources: Vec<&'a dyn Source>,
|
||||||
info: TileInfo,
|
pub info: TileInfo,
|
||||||
xyz: TileCoord,
|
pub query_str: Option<&'a str>,
|
||||||
query: Option<&str>,
|
pub query_obj: Option<UrlQuery>,
|
||||||
encodings: Option<&AcceptEncoding>,
|
pub encodings: Option<AcceptEncoding>,
|
||||||
) -> ActixResult<Tile> {
|
pub cache: Option<&'a MainCache>,
|
||||||
if sources.is_empty() {
|
}
|
||||||
return Err(ErrorNotFound("No valid sources found"));
|
|
||||||
}
|
|
||||||
let query_str = query.filter(|v| !v.is_empty());
|
|
||||||
let query = match query_str {
|
|
||||||
Some(v) => Some(Query::<UrlQuery>::from_query(v)?.into_inner()),
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut tiles = try_join_all(sources.iter().map(|s| s.get_tile(xyz, query.as_ref())))
|
impl<'a> DynTileSource<'a> {
|
||||||
|
pub fn new(
|
||||||
|
sources: &'a TileSources,
|
||||||
|
source_ids: &str,
|
||||||
|
zoom: Option<u8>,
|
||||||
|
query: &'a str,
|
||||||
|
encodings: Option<AcceptEncoding>,
|
||||||
|
cache: Option<&'a MainCache>,
|
||||||
|
) -> ActixResult<Self> {
|
||||||
|
let (sources, use_url_query, info) = sources.get_sources(source_ids, zoom)?;
|
||||||
|
|
||||||
|
if sources.is_empty() {
|
||||||
|
return Err(ErrorNotFound("No valid sources found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut query_obj = None;
|
||||||
|
let mut query_str = None;
|
||||||
|
if use_url_query && !query.is_empty() {
|
||||||
|
query_obj = Some(Query::<UrlQuery>::from_query(query)?.into_inner());
|
||||||
|
query_str = Some(query);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
sources,
|
||||||
|
info,
|
||||||
|
query_str,
|
||||||
|
query_obj,
|
||||||
|
encodings,
|
||||||
|
cache,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_http_response(&self, xyz: TileCoord) -> ActixResult<HttpResponse> {
|
||||||
|
let tile = self.get_tile_content(xyz).await?;
|
||||||
|
|
||||||
|
Ok(if tile.data.is_empty() {
|
||||||
|
HttpResponse::NoContent().finish()
|
||||||
|
} else {
|
||||||
|
let mut response = HttpResponse::Ok();
|
||||||
|
response.content_type(tile.info.format.content_type());
|
||||||
|
if let Some(val) = tile.info.encoding.content_encoding() {
|
||||||
|
response.insert_header((CONTENT_ENCODING, val));
|
||||||
|
}
|
||||||
|
response.body(tile.data)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_tile_content(&self, xyz: TileCoord) -> ActixResult<Tile> {
|
||||||
|
let mut tiles = try_join_all(self.sources.iter().map(|s| async {
|
||||||
|
get_or_insert_cached_value!(
|
||||||
|
self.cache,
|
||||||
|
CacheValue::Tile,
|
||||||
|
s.get_tile(xyz, self.query_obj.as_ref()),
|
||||||
|
{
|
||||||
|
let id = s.get_id().to_owned();
|
||||||
|
if let Some(query_str) = self.query_str {
|
||||||
|
CacheKey::TileWithQuery(id, xyz, query_str.to_owned())
|
||||||
|
} else {
|
||||||
|
CacheKey::Tile(id, xyz)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}))
|
||||||
.await
|
.await
|
||||||
.map_err(map_internal_error)?;
|
.map_err(map_internal_error)?;
|
||||||
|
|
||||||
let mut layer_count = 0;
|
let mut layer_count = 0;
|
||||||
let mut last_non_empty_layer = 0;
|
let mut last_non_empty_layer = 0;
|
||||||
for (idx, tile) in tiles.iter().enumerate() {
|
for (idx, tile) in tiles.iter().enumerate() {
|
||||||
if !tile.is_empty() {
|
if !tile.is_empty() {
|
||||||
layer_count += 1;
|
layer_count += 1;
|
||||||
last_non_empty_layer = idx;
|
last_non_empty_layer = idx;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Minor optimization to prevent concatenation if there are less than 2 tiles
|
||||||
|
let data = match layer_count {
|
||||||
|
1 => tiles.swap_remove(last_non_empty_layer),
|
||||||
|
0 => return Ok(Tile::new(Vec::new(), self.info)),
|
||||||
|
_ => {
|
||||||
|
// Make sure tiles can be concatenated, or if not, that there is only one non-empty tile for each zoom level
|
||||||
|
// TODO: can zlib, brotli, or zstd be concatenated?
|
||||||
|
// TODO: implement decompression step for other concatenate-able formats
|
||||||
|
let can_join = self.info.format == Format::Mvt
|
||||||
|
&& (self.info.encoding == Encoding::Uncompressed
|
||||||
|
|| self.info.encoding == Encoding::Gzip);
|
||||||
|
if !can_join {
|
||||||
|
return Err(ErrorBadRequest(format!(
|
||||||
|
"Can't merge {} tiles. Make sure there is only one non-empty tile source at zoom level {}",
|
||||||
|
self.info,
|
||||||
|
xyz.z
|
||||||
|
)))?;
|
||||||
|
}
|
||||||
|
tiles.concat()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// decide if (re-)encoding of the tile data is needed, and recompress if so
|
||||||
|
self.recompress(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Minor optimization to prevent concatenation if there are less than 2 tiles
|
fn recompress(&self, tile: TileData) -> ActixResult<Tile> {
|
||||||
let data = match layer_count {
|
let mut tile = Tile::new(tile, self.info);
|
||||||
1 => tiles.swap_remove(last_non_empty_layer),
|
if let Some(accept_enc) = &self.encodings {
|
||||||
0 => return Ok(Tile::new(Vec::new(), info)),
|
if self.info.encoding.is_encoded() {
|
||||||
_ => {
|
// already compressed, see if we can send it as is, or need to re-compress
|
||||||
// Make sure tiles can be concatenated, or if not, that there is only one non-empty tile for each zoom level
|
if !accept_enc.iter().any(|e| {
|
||||||
// TODO: can zlib, brotli, or zstd be concatenated?
|
if let Preference::Specific(HeaderEnc::Known(enc)) = e.item {
|
||||||
// TODO: implement decompression step for other concatenate-able formats
|
to_encoding(enc) == Some(tile.info.encoding)
|
||||||
let can_join = info.format == Format::Mvt
|
} else {
|
||||||
&& (info.encoding == Encoding::Uncompressed || info.encoding == Encoding::Gzip);
|
false
|
||||||
if !can_join {
|
}
|
||||||
return Err(ErrorBadRequest(format!(
|
}) {
|
||||||
"Can't merge {info} tiles. Make sure there is only one non-empty tile source at zoom level {}",
|
// need to re-compress the tile - uncompress it first
|
||||||
xyz.z
|
tile = decode(tile)?;
|
||||||
)))?;
|
|
||||||
}
|
|
||||||
tiles.concat()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// decide if (re-)encoding of the tile data is needed, and recompress if so
|
|
||||||
let tile = recompress(Tile::new(data, info), encodings)?;
|
|
||||||
|
|
||||||
Ok(tile)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recompress(mut tile: Tile, accept_enc: Option<&AcceptEncoding>) -> ActixResult<Tile> {
|
|
||||||
if let Some(accept_enc) = accept_enc {
|
|
||||||
if tile.info.encoding.is_encoded() {
|
|
||||||
// already compressed, see if we can send it as is, or need to re-compress
|
|
||||||
if !accept_enc.iter().any(|e| {
|
|
||||||
if let Preference::Specific(HeaderEnc::Known(enc)) = e.item {
|
|
||||||
to_encoding(enc) == Some(tile.info.encoding)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
}) {
|
|
||||||
// need to re-compress the tile - uncompress it first
|
|
||||||
tile = decode(tile)?;
|
|
||||||
}
|
}
|
||||||
}
|
if tile.info.encoding == Encoding::Uncompressed {
|
||||||
if tile.info.encoding == Encoding::Uncompressed {
|
// only apply compression if the content supports it
|
||||||
// only apply compression if the content supports it
|
if let Some(HeaderEnc::Known(enc)) =
|
||||||
if let Some(HeaderEnc::Known(enc)) = accept_enc.negotiate(SUPPORTED_ENCODINGS.iter()) {
|
accept_enc.negotiate(SUPPORTED_ENCODINGS.iter())
|
||||||
// (re-)compress the tile into the preferred encoding
|
{
|
||||||
tile = encode(tile, enc)?;
|
// (re-)compress the tile into the preferred encoding
|
||||||
|
tile = encode(tile, enc)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Ok(tile)
|
||||||
|
} else {
|
||||||
|
// no accepted-encoding header, decode the tile if compressed
|
||||||
|
decode(tile)
|
||||||
}
|
}
|
||||||
Ok(tile)
|
|
||||||
} else {
|
|
||||||
// no accepted-encoding header, decode the tile if compressed
|
|
||||||
decode(tile)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -189,7 +234,7 @@ fn decode(tile: Tile) -> ActixResult<Tile> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_encoding(val: ContentEncoding) -> Option<Encoding> {
|
pub fn to_encoding(val: ContentEncoding) -> Option<Encoding> {
|
||||||
Some(match val {
|
Some(match val {
|
||||||
ContentEncoding::Identity => Encoding::Uncompressed,
|
ContentEncoding::Identity => Encoding::Uncompressed,
|
||||||
ContentEncoding::Gzip => Encoding::Gzip,
|
ContentEncoding::Gzip => Encoding::Gzip,
|
||||||
@ -233,15 +278,9 @@ mod tests {
|
|||||||
("empty,non-empty", vec![1_u8, 2, 3]),
|
("empty,non-empty", vec![1_u8, 2, 3]),
|
||||||
("empty,non-empty,empty", vec![1_u8, 2, 3]),
|
("empty,non-empty,empty", vec![1_u8, 2, 3]),
|
||||||
] {
|
] {
|
||||||
let (src, _, info) = sources.get_sources(source_id, None).unwrap();
|
let src = DynTileSource::new(&sources, source_id, None, "", None, None).unwrap();
|
||||||
let xyz = TileCoord { z: 0, x: 0, y: 0 };
|
let xyz = TileCoord { z: 0, x: 0, y: 0 };
|
||||||
assert_eq!(
|
assert_eq!(expected, &src.get_tile_content(xyz).await.unwrap().data);
|
||||||
expected,
|
|
||||||
&get_tile_content(src.as_slice(), info, xyz, None, None)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.data
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
0
martin/src/srv/tiles_info.rs
Normal file → Executable file
0
martin/src/srv/tiles_info.rs
Normal file → Executable file
91
martin/src/utils/cache.rs
Executable file
91
martin/src/utils/cache.rs
Executable file
@ -0,0 +1,91 @@
|
|||||||
|
use moka::future::Cache;
|
||||||
|
use pmtiles::Directory;
|
||||||
|
|
||||||
|
use crate::{TileCoord, TileData};
|
||||||
|
|
||||||
|
pub type MainCache = Cache<CacheKey, CacheValue>;
|
||||||
|
pub type OptMainCache = Option<MainCache>;
|
||||||
|
pub const NO_MAIN_CACHE: OptMainCache = None;
|
||||||
|
|
||||||
|
#[derive(Debug, Hash, PartialEq, Eq)]
|
||||||
|
pub enum CacheKey {
|
||||||
|
/// (pmtiles_id, offset)
|
||||||
|
PmtDirectory(usize, usize),
|
||||||
|
/// (source_id, xyz)
|
||||||
|
Tile(String, TileCoord),
|
||||||
|
/// (source_id, xyz, url_query)
|
||||||
|
TileWithQuery(String, TileCoord, String),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum CacheValue {
|
||||||
|
Tile(TileData),
|
||||||
|
PmtDirectory(Directory),
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! trace_cache {
|
||||||
|
($typ: literal, $cache: expr, $key: expr) => {
|
||||||
|
trace!(
|
||||||
|
"Cache {} for {:?} in {:?} that has {} entries taking up {} space",
|
||||||
|
$typ,
|
||||||
|
$key,
|
||||||
|
$cache.name(),
|
||||||
|
$cache.entry_count(),
|
||||||
|
$cache.weighted_size(),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! from_cache_value {
|
||||||
|
($value_type: path, $data: expr, $key: expr) => {
|
||||||
|
if let $value_type(data) = $data {
|
||||||
|
data
|
||||||
|
} else {
|
||||||
|
panic!("Unexpected value type {:?} for key {:?} cache", $data, $key)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
|
macro_rules! get_cached_value {
|
||||||
|
($cache: expr, $value_type: path, $make_key: expr) => {
|
||||||
|
if let Some(cache) = $cache {
|
||||||
|
let key = $make_key;
|
||||||
|
if let Some(data) = cache.get(&key).await {
|
||||||
|
$crate::utils::cache::trace_cache!("HIT", cache, key);
|
||||||
|
Some($crate::utils::cache::from_cache_value!(
|
||||||
|
$value_type,
|
||||||
|
data,
|
||||||
|
key
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
$crate::utils::cache::trace_cache!("MISS", cache, key);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! get_or_insert_cached_value {
|
||||||
|
($cache: expr, $value_type: path, $make_item:expr, $make_key: expr) => {{
|
||||||
|
if let Some(cache) = $cache {
|
||||||
|
let key = $make_key;
|
||||||
|
Ok(if let Some(data) = cache.get(&key).await {
|
||||||
|
$crate::utils::cache::trace_cache!("HIT", cache, key);
|
||||||
|
$crate::utils::cache::from_cache_value!($value_type, data, key)
|
||||||
|
} else {
|
||||||
|
$crate::utils::cache::trace_cache!("MISS", cache, key);
|
||||||
|
let data = $make_item.await?;
|
||||||
|
cache.insert(key, $value_type(data.clone())).await;
|
||||||
|
data
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
$make_item.await
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "pmtiles")]
|
||||||
|
pub(crate) use get_cached_value;
|
||||||
|
pub(crate) use {from_cache_value, get_or_insert_cached_value, trace_cache};
|
@ -1,3 +1,6 @@
|
|||||||
|
pub(crate) mod cache;
|
||||||
|
pub use cache::{CacheKey, CacheValue, MainCache, OptMainCache, NO_MAIN_CACHE};
|
||||||
|
|
||||||
mod cfg_containers;
|
mod cfg_containers;
|
||||||
pub use cfg_containers::{OptBoolObj, OptOneMany};
|
pub use cfg_containers::{OptBoolObj, OptOneMany};
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct TileCoord {
|
pub struct TileCoord {
|
||||||
pub z: u8,
|
pub z: u8,
|
||||||
pub x: u32,
|
pub x: u32,
|
||||||
|
@ -22,6 +22,7 @@ macro_rules! create_app {
|
|||||||
.app_data(actix_web::web::Data::new(
|
.app_data(actix_web::web::Data::new(
|
||||||
::martin::srv::Catalog::new(&state).unwrap(),
|
::martin::srv::Catalog::new(&state).unwrap(),
|
||||||
))
|
))
|
||||||
|
.app_data(actix_web::web::Data::new(::martin::NO_MAIN_CACHE))
|
||||||
.app_data(actix_web::web::Data::new(state.tiles))
|
.app_data(actix_web::web::Data::new(state.tiles))
|
||||||
.configure(::martin::srv::router),
|
.configure(::martin::srv::router),
|
||||||
)
|
)
|
||||||
|
@ -26,6 +26,7 @@ macro_rules! create_app {
|
|||||||
.app_data(actix_web::web::Data::new(
|
.app_data(actix_web::web::Data::new(
|
||||||
::martin::srv::Catalog::new(&state).unwrap(),
|
::martin::srv::Catalog::new(&state).unwrap(),
|
||||||
))
|
))
|
||||||
|
.app_data(actix_web::web::Data::new(::martin::NO_MAIN_CACHE))
|
||||||
.app_data(actix_web::web::Data::new(state.tiles))
|
.app_data(actix_web::web::Data::new(state.tiles))
|
||||||
.configure(::martin::srv::router),
|
.configure(::martin::srv::router),
|
||||||
)
|
)
|
||||||
@ -1086,6 +1087,7 @@ tables:
|
|||||||
.app_data(actix_web::web::Data::new(
|
.app_data(actix_web::web::Data::new(
|
||||||
::martin::srv::Catalog::new(&state).unwrap(),
|
::martin::srv::Catalog::new(&state).unwrap(),
|
||||||
))
|
))
|
||||||
|
.app_data(actix_web::web::Data::new(::martin::NO_MAIN_CACHE))
|
||||||
.app_data(actix_web::web::Data::new(state.tiles))
|
.app_data(actix_web::web::Data::new(state.tiles))
|
||||||
.configure(::martin::srv::router),
|
.configure(::martin::srv::router),
|
||||||
)
|
)
|
||||||
|
@ -22,6 +22,7 @@ macro_rules! create_app {
|
|||||||
.app_data(actix_web::web::Data::new(
|
.app_data(actix_web::web::Data::new(
|
||||||
::martin::srv::Catalog::new(&state).unwrap(),
|
::martin::srv::Catalog::new(&state).unwrap(),
|
||||||
))
|
))
|
||||||
|
.app_data(actix_web::web::Data::new(::martin::NO_MAIN_CACHE))
|
||||||
.app_data(actix_web::web::Data::new(state.tiles))
|
.app_data(actix_web::web::Data::new(state.tiles))
|
||||||
.configure(::martin::srv::router),
|
.configure(::martin::srv::router),
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use indoc::formatdoc;
|
use indoc::formatdoc;
|
||||||
pub use martin::args::Env;
|
pub use martin::args::Env;
|
||||||
use martin::{Config, IdResolver, ServerState, Source};
|
use martin::{Config, ServerState, Source};
|
||||||
|
|
||||||
use crate::mock_cfg;
|
use crate::mock_cfg;
|
||||||
|
|
||||||
@ -22,7 +22,7 @@ pub fn mock_pgcfg(yaml: &str) -> Config {
|
|||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub async fn mock_sources(mut config: Config) -> MockSource {
|
pub async fn mock_sources(mut config: Config) -> MockSource {
|
||||||
let res = config.resolve(IdResolver::default()).await;
|
let res = config.resolve().await;
|
||||||
let res = res.unwrap_or_else(|e| panic!("Failed to resolve config {config:?}: {e}"));
|
let res = res.unwrap_or_else(|e| panic!("Failed to resolve config {config:?}: {e}"));
|
||||||
(res, config)
|
(res, config)
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ lints.workspace = true
|
|||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "mbtiles"
|
name = "mbtiles"
|
||||||
version = "0.9.0"
|
version = "0.9.1"
|
||||||
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||||
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
|
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
|
||||||
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]
|
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]
|
||||||
|
@ -8,6 +8,9 @@ listen_addresses: '0.0.0.0:3000'
|
|||||||
# Number of web server workers
|
# Number of web server workers
|
||||||
worker_processes: 8
|
worker_processes: 8
|
||||||
|
|
||||||
|
# Amount of memory (in MB) to use for caching tiles [default: 512, 0 to disable]
|
||||||
|
cache_size_mb: 8
|
||||||
|
|
||||||
# Database configuration. This can also be a list of PG configs.
|
# Database configuration. This can also be a list of PG configs.
|
||||||
postgres:
|
postgres:
|
||||||
# Database connection string
|
# Database connection string
|
||||||
@ -166,7 +169,6 @@ postgres:
|
|||||||
|
|
||||||
|
|
||||||
pmtiles:
|
pmtiles:
|
||||||
dir_cache_size_mb: 100
|
|
||||||
paths:
|
paths:
|
||||||
- http://localhost:5412/webp2.pmtiles
|
- http://localhost:5412/webp2.pmtiles
|
||||||
sources:
|
sources:
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
cache_size_mb: 8
|
||||||
keep_alive: 75
|
keep_alive: 75
|
||||||
listen_addresses: localhost:3111
|
listen_addresses: localhost:3111
|
||||||
worker_processes: 1
|
worker_processes: 1
|
||||||
@ -165,7 +166,6 @@ pmtiles:
|
|||||||
pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
|
pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
|
||||||
pmt2: http://localhost:5412/webp2.pmtiles
|
pmt2: http://localhost:5412/webp2.pmtiles
|
||||||
webp2: http://localhost:5412/webp2.pmtiles
|
webp2: http://localhost:5412/webp2.pmtiles
|
||||||
dir_cache_size_mb: 100
|
|
||||||
sprites:
|
sprites:
|
||||||
paths: tests/fixtures/sprites/src1
|
paths: tests/fixtures/sprites/src1
|
||||||
sources:
|
sources:
|
||||||
|
Loading…
Reference in New Issue
Block a user