fix (re-)compression of images (#579)

Compression middleware turned out to be hard to use for image cases - it
simply looks at the content-encoding, and if not set, tries to compress
if accepted by the client.

Instead, now individual routes are configured with either that
middleware, or for tiles, I decompress and optionally recompress if
applicable.

Now encoding is tracked separately from the tile content, making it
cleaner too. Plus lots of tests for mbtiles & pmtiles.

Fixes #577
This commit is contained in:
Yuri Astrakhan 2023-02-20 10:44:22 -05:00 committed by GitHub
parent 1f0ca167bc
commit 3fcad46500
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 841 additions and 231 deletions

19
Cargo.lock generated
View File

@ -1160,9 +1160,9 @@ dependencies = [
[[package]]
name = "http"
version = "0.2.8"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399"
checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
dependencies = [
"bytes",
"fnv",
@ -1363,7 +1363,7 @@ dependencies = [
[[package]]
name = "martin"
version = "0.7.0"
version = "0.7.1"
dependencies = [
"actix",
"actix-cors",
@ -1371,6 +1371,7 @@ dependencies = [
"actix-rt",
"actix-web",
"async-trait",
"brotli",
"cargo-husky",
"clap 4.1.6",
"criterion",
@ -2221,9 +2222,9 @@ checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
[[package]]
name = "slab"
version = "0.4.7"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef"
checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d"
dependencies = [
"autocfg",
]
@ -2482,9 +2483,9 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.18"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af0097eaf301d576d0b2aead7a59facab6d53cc636340f0291fab8446a2e8613"
checksum = "53250a3b3fed8ff8fd988587d8925d26a83ac3845d9e03b220b37f34c2b8d6c2"
dependencies = [
"itoa",
"serde",
@ -2500,9 +2501,9 @@ checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
[[package]]
name = "time-macros"
version = "0.2.6"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2"
checksum = "a460aeb8de6dcb0f381e1ee05f1cd56fcf5a5f6eb8187ff3d8f0b11078d38b7c"
dependencies = [
"time-core",
]

View File

@ -8,7 +8,7 @@ opt-level = 3
[package]
name = "martin"
version = "0.7.0"
version = "0.7.1"
edition = "2021"
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
@ -36,6 +36,7 @@ actix-http = "3"
actix-rt = "2"
actix-web = "4"
async-trait = "0.1"
brotli = "3"
clap = { version = "4", features = ["derive"] }
deadpool-postgres = "0.10"
env_logger = "0.10"

View File

@ -4,7 +4,7 @@
![Security audit](https://github.com/maplibre/martin/workflows/Security%20audit/badge.svg)
[![Docker pulls](https://img.shields.io/docker/pulls/maplibre/martin.svg)](https://hub.docker.com/r/maplibre/martin)
Martin is a tile server able to generate [vector tiles](https://github.com/mapbox/vector-tile-spec) from the large [PostGIS](https://github.com/postgis/postgis) databases on the fly, or serve tiles from [PMTile](https://protomaps.com/blog/pmtiles-v3-whats-new) and [MBTile](https://github.com/mapbox/mbtiles-spec) files. Martin optimizes for speed and heavy traffic, and is written in [Rust](https://github.com/rust-lang/rust).
Martin is a tile server able to generate [vector tiles](https://github.com/mapbox/vector-tile-spec) from large [PostGIS](https://github.com/postgis/postgis) databases on the fly, or serve tiles from [PMTile](https://protomaps.com/blog/pmtiles-v3-whats-new) and [MBTile](https://github.com/mapbox/mbtiles-spec) files. Martin optimizes for speed and heavy traffic, and is written in [Rust](https://github.com/rust-lang/rust).
![Martin](https://raw.githubusercontent.com/maplibre/martin/main/logo.png)

View File

@ -4,12 +4,12 @@ extern crate core;
use std::ffi::OsStr;
use std::fmt::Display;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use futures::TryStreamExt;
use log::{debug, warn};
use martin_tile_utils::DataFormat;
use log::{debug, info, warn};
use martin_tile_utils::{Format, TileInfo};
use serde_json::{Value as JSONValue, Value};
use sqlx::pool::PoolConnection;
use sqlx::sqlite::SqlitePool;
@ -21,8 +21,11 @@ pub enum MbtError {
#[error("SQL Error {0}")]
SqlError(#[from] sqlx::Error),
#[error(r"Inconsistent tile formats detected: {0:?} vs {1:?}")]
InconsistentMetadata(DataFormat, DataFormat),
#[error(r"MBTile filepath contains unsupported characters: {}", .0.display())]
UnsupportedCharsInFilepath(PathBuf),
#[error(r"Inconsistent tile formats detected: {0} vs {1}")]
InconsistentMetadata(TileInfo, TileInfo),
#[error("No tiles found")]
NoTilesFound,
@ -39,17 +42,21 @@ pub struct Mbtiles {
#[derive(Clone, Debug, PartialEq)]
pub struct Metadata {
pub id: String,
pub tile_format: DataFormat,
pub tile_info: TileInfo,
pub layer_type: Option<String>,
pub tilejson: TileJSON,
pub json: Option<JSONValue>,
}
impl Mbtiles {
pub async fn new(file: &Path) -> MbtResult<Self> {
// TODO: introduce a new error type for invalid file, instead of using lossy
let pool = SqlitePool::connect(&file.to_string_lossy()).await?;
let filename = file
pub async fn new<P: AsRef<Path>>(filepath: P) -> MbtResult<Self> {
let file = filepath
.as_ref()
.to_str()
.ok_or_else(|| MbtError::UnsupportedCharsInFilepath(filepath.as_ref().to_path_buf()))?;
let pool = SqlitePool::connect(file).await?;
let filename = filepath
.as_ref()
.file_stem()
.unwrap_or_else(|| OsStr::new("unknown"))
.to_string_lossy()
@ -75,7 +82,7 @@ impl Mbtiles {
Ok(Metadata {
id: self.filename.to_string(),
tile_format: self.detect_format(&tj, &mut conn).await?,
tile_info: self.detect_format(&tj, &mut conn).await?,
tilejson: tj,
layer_type,
json,
@ -140,15 +147,15 @@ impl Mbtiles {
&self,
tilejson: &TileJSON,
conn: &mut PoolConnection<Sqlite>,
) -> MbtResult<DataFormat> {
let mut format = None;
) -> MbtResult<TileInfo> {
let mut tile_info = None;
let mut tested_zoom = -1_i64;
// First, pick any random tile
let query = query! {"SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles WHERE zoom_level >= 0 LIMIT 1"};
let row = query.fetch_optional(&mut *conn).await?;
if let Some(r) = row {
format = self.parse_tile(r.zoom_level, r.tile_column, r.tile_row, r.tile_data);
tile_info = self.parse_tile(r.zoom_level, r.tile_column, r.tile_row, r.tile_data);
tested_zoom = r.zoom_level.unwrap_or(-1);
}
@ -161,11 +168,11 @@ impl Mbtiles {
let row = query.fetch_optional(&mut *conn).await?;
if let Some(r) = row {
match (
format,
tile_info,
self.parse_tile(Some(z.into()), r.tile_column, r.tile_row, r.tile_data),
) {
(_, None) => {}
(None, new) => format = new,
(None, new) => tile_info = new,
(Some(old), Some(new)) if old == new => {}
(Some(old), Some(new)) => {
return Err(MbtError::InconsistentMetadata(old, new));
@ -174,32 +181,37 @@ impl Mbtiles {
}
}
if let Some(Value::String(tj_fmt)) = tilejson.other.get("format") {
match (format, DataFormat::parse(tj_fmt)) {
if let Some(Value::String(fmt)) = tilejson.other.get("format") {
let file = &self.filename;
match (tile_info, Format::parse(fmt)) {
(_, None) => {
warn!("Unknown format value in metadata: {tj_fmt}");
warn!("Unknown format value in metadata: {fmt}");
}
(None, Some(new)) => {
warn!("Unable to detect tile format, will use metadata.format '{new:?}' for file {}", self.filename);
format = Some(new);
(None, Some(fmt)) => {
if fmt.is_detectable() {
warn!("Metadata table sets detectable '{fmt}' tile format, but it could not be verified for file {file}");
} else {
info!("Using '{fmt}' tile format from metadata table in file {file}");
}
tile_info = Some(fmt.into());
}
(Some(old), Some(new)) if old == new || (old.is_mvt() && new.is_mvt()) => {
debug!("Detected tile format {old:?} matches metadata.format '{tj_fmt}' in file {}", self.filename);
(Some(info), Some(fmt)) if info.format == fmt => {
debug!("Detected tile format {info} matches metadata.format '{fmt}' in file {file}");
}
(Some(old), _) => {
warn!("Found inconsistency: metadata.format='{tj_fmt}', but tiles were detected as {old:?} in file {}. Tiles will be returned as {old:?}.", self.filename);
(Some(info), _) => {
warn!("Found inconsistency: metadata.format='{fmt}', but tiles were detected as {info:?} in file {file}. Tiles will be returned as {info:?}.");
}
}
}
if let Some(format) = format {
if !format.is_mvt() && tilejson.vector_layers.is_some() {
if let Some(info) = tile_info {
if info.format != Format::Mvt && tilejson.vector_layers.is_some() {
warn!(
"{} has vector_layers metadata but non-vector tiles",
self.filename
);
}
Ok(format)
Ok(info)
} else {
Err(MbtError::NoTilesFound)
}
@ -211,17 +223,17 @@ impl Mbtiles {
x: Option<i64>,
y: Option<i64>,
tile: Option<Vec<u8>>,
) -> Option<DataFormat> {
) -> Option<TileInfo> {
if let (Some(z), Some(x), Some(y), Some(tile)) = (z, x, y, tile) {
let fmt = DataFormat::detect(&tile);
if let Some(format) = fmt {
let info = TileInfo::detect(&tile);
if let Some(info) = info {
debug!(
"Tile {z}/{x}/{} is detected as {format:?} in file {}",
"Tile {z}/{x}/{} is detected as {info} in file {}",
(1 << z) - 1 - y,
self.filename,
);
}
fmt
info
} else {
None
}
@ -245,13 +257,17 @@ impl Mbtiles {
mod tests {
use std::collections::HashMap;
use martin_tile_utils::Encoding;
use tilejson::VectorLayer;
use super::*;
#[actix_rt::test]
async fn test_metadata_jpeg() {
let mbt = Mbtiles::new(Path::new("../tests/fixtures/geography-class-jpg.mbtiles")).await;
async fn metadata_jpeg() {
let mbt = Mbtiles::new(Path::new(
"../tests/fixtures/files/geography-class-jpg.mbtiles",
))
.await;
let mbt = mbt.unwrap();
let metadata = mbt.get_metadata().await.unwrap();
let tj = metadata.tilejson;
@ -264,12 +280,12 @@ mod tests {
assert_eq!(tj.template.unwrap(),"{{#__location__}}{{/__location__}}{{#__teaser__}}<div style=\"text-align:center;\">\n\n<img src=\"data:image/png;base64,{{flag_png}}\" style=\"-moz-box-shadow:0px 1px 3px #222;-webkit-box-shadow:0px 1px 5px #222;box-shadow:0px 1px 3px #222;\"><br>\n<strong>{{admin}}</strong>\n\n</div>{{/__teaser__}}{{#__full__}}{{/__full__}}");
assert_eq!(tj.version.unwrap(), "1.0.0");
assert_eq!(metadata.id, "geography-class-jpg");
assert_eq!(metadata.tile_format, DataFormat::Jpeg);
assert_eq!(metadata.tile_info, Format::Jpeg.into());
}
#[actix_rt::test]
async fn test_metadata_mvt() {
let mbt = Mbtiles::new(Path::new("../tests/fixtures/world_cities.mbtiles")).await;
async fn metadata_mvt() {
let mbt = Mbtiles::new(Path::new("../tests/fixtures/files/world_cities.mbtiles")).await;
let mbt = mbt.unwrap();
let metadata = mbt.get_metadata().await.unwrap();
let tj = metadata.tilejson;
@ -292,7 +308,10 @@ mod tests {
}])
);
assert_eq!(metadata.id, "world_cities");
assert_eq!(metadata.tile_format, DataFormat::GzipMvt);
assert_eq!(
metadata.tile_info,
TileInfo::new(Format::Mvt, Encoding::Gzip)
);
assert_eq!(metadata.layer_type, Some("overlay".to_string()));
}
}

View File

@ -1,85 +1,181 @@
// This code was partially adapted from https://github.com/maplibre/mbtileserver-rs
// project originally written by Kaveh Karimi and licensed under MIT/Apache-2.0
use std::fmt::Display;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DataFormat {
Png,
Jpeg,
Webp,
pub enum Format {
Gif,
Jpeg,
Json,
Mvt,
GzipMvt,
ZlibMvt,
BrotliMvt,
ZstdMvt,
Png,
Webp,
}
impl DataFormat {
impl Format {
#[must_use]
pub fn parse(value: &str) -> Option<Self> {
Some(match value.to_ascii_lowercase().as_str() {
"pbf" | "mvt" => Self::Mvt,
"jpg" | "jpeg" => Self::Jpeg,
"png" => Self::Png,
"gif" => Self::Gif,
"jpg" | "jpeg" => Self::Jpeg,
"json" => Self::Json,
"pbf" | "mvt" => Self::Mvt,
"png" => Self::Png,
"webp" => Self::Webp,
_ => None?,
})
}
#[must_use]
pub fn detect(data: &[u8]) -> Option<Self> {
Some(match data {
// Compressed prefixes assume MVT content
v if &v[0..2] == b"\x1f\x8b" => Self::GzipMvt,
v if &v[0..2] == b"\x78\x9c" => Self::ZlibMvt,
v if &v[0..8] == b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A" => Self::Png,
v if &v[0..6] == b"\x47\x49\x46\x38\x39\x61" => Self::Gif,
v if &v[0..3] == b"\xFF\xD8\xFF" => Self::Jpeg,
v if &v[0..4] == b"RIFF" && &v[8..12] == b"WEBP" => Self::Webp,
v if &v[0..1] == b"{" => Self::Json,
_ => None?,
})
}
#[must_use]
pub fn content_type(&self) -> &str {
match *self {
Self::Png => "image/png",
Self::Jpeg => "image/jpeg",
Self::Gif => "image/gif",
Self::Webp => "image/webp",
Self::Jpeg => "image/jpeg",
Self::Json => "application/json",
Self::Mvt | Self::GzipMvt | Self::ZlibMvt | Self::BrotliMvt | Self::ZstdMvt => {
"application/x-protobuf"
}
Self::Mvt => "application/x-protobuf",
Self::Png => "image/png",
Self::Webp => "image/webp",
}
}
#[must_use]
pub fn is_detectable(&self) -> bool {
match *self {
Self::Png | Self::Jpeg | Self::Gif | Self::Webp => true,
// TODO: Json can be detected, but currently we only detect it
// when it's not compressed, so to avoid a warning, keeping it as false for now.
// Once we can detect it inside a compressed data, change it to true.
Self::Mvt | Self::Json => false,
}
}
}
impl Display for Format {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Self::Gif => write!(f, "gif"),
Self::Jpeg => write!(f, "jpeg"),
Self::Json => write!(f, "json"),
Self::Mvt => write!(f, "mvt"),
Self::Png => write!(f, "png"),
Self::Webp => write!(f, "webp"),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Encoding {
/// Data is not compressed, but it can be
Uncompressed = 0b0000_0000,
/// Some formats like JPEG and PNG are already compressed
Internal = 0b0000_0001,
Gzip = 0b0000_0010,
Zlib = 0b0000_0100,
Brotli = 0b0000_1000,
Zstd = 0b0001_0000,
}
impl Encoding {
#[must_use]
pub fn parse(value: &str) -> Option<Self> {
Some(match value.to_ascii_lowercase().as_str() {
"none" => Self::Uncompressed,
"gzip" => Self::Gzip,
"zlib" => Self::Zlib,
"brotli" => Self::Brotli,
"zstd" => Self::Zstd,
_ => None?,
})
}
#[must_use]
pub fn content_encoding(&self) -> Option<&str> {
// We could also return http::ContentEncoding,
// but seems like on overkill to add a dep for that
match *self {
Self::BrotliMvt => Some("br"),
Self::GzipMvt => Some("gzip"),
Self::ZlibMvt => Some("deflate"),
Self::ZstdMvt => Some("zstd"),
Self::Png | Self::Jpeg | Self::Webp | Self::Gif | Self::Json | Self::Mvt => None,
Self::Uncompressed | Self::Internal => None,
Self::Gzip => Some("gzip"),
Self::Zlib => Some("deflate"),
Self::Brotli => Some("br"),
Self::Zstd => Some("zstd"),
}
}
#[must_use]
pub fn is_mvt(&self) -> bool {
pub fn is_encoded(&self) -> bool {
match *self {
Self::Mvt | Self::GzipMvt | Self::ZlibMvt | Self::BrotliMvt | Self::ZstdMvt => true,
Self::Png | Self::Jpeg | Self::Webp | Self::Gif | Self::Json => false,
Self::Uncompressed | Self::Internal => false,
Self::Gzip | Self::Zlib | Self::Brotli | Self::Zstd => true,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct TileInfo {
pub format: Format,
pub encoding: Encoding,
}
impl TileInfo {
#[must_use]
pub fn new(format: Format, encoding: Encoding) -> Self {
Self { format, encoding }
}
/// Try to figure out the format and encoding of the raw tile data
#[must_use]
#[allow(clippy::enum_glob_use)]
pub fn detect(value: &[u8]) -> Option<Self> {
use Encoding::*;
use Format::*;
// TODO: Make detection slower but more accurate:
// - uncompress gzip/zlib/... and run detection again. If detection fails, assume MVT
// - detect json inside a compressed data
// - json should be fully parsed
// - possibly keep the current `detect()` available as a fast path for those who may need it
Some(match value {
// Compressed prefixes assume MVT content
v if v.starts_with(b"\x1f\x8b") => Self::new(Mvt, Gzip),
v if v.starts_with(b"\x78\x9c") => Self::new(Mvt, Zlib),
v if v.starts_with(b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A") => Self::new(Png, Internal),
v if v.starts_with(b"\x47\x49\x46\x38\x39\x61") => Self::new(Gif, Internal),
v if v.starts_with(b"\xFF\xD8\xFF") => Self::new(Jpeg, Internal),
v if v.starts_with(b"RIFF") && v[8..].starts_with(b"WEBP") => Self::new(Webp, Internal),
v if v.starts_with(b"{") => Self::new(Json, Uncompressed),
_ => None?,
})
}
#[must_use]
pub fn encoding(self, encoding: Encoding) -> Self {
Self { encoding, ..self }
}
}
impl From<Format> for TileInfo {
fn from(format: Format) -> Self {
Self::new(
format,
match format {
Format::Png | Format::Jpeg | Format::Webp | Format::Gif => Encoding::Internal,
Format::Mvt | Format::Json => Encoding::Uncompressed,
},
)
}
}
impl Display for TileInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.format.content_type())?;
if let Some(encoding) = self.encoding.content_encoding() {
write!(f, "; encoding={encoding}")?;
} else if self.encoding != Encoding::Uncompressed {
write!(f, "; uncompressed")?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::fs::read;
@ -89,24 +185,32 @@ mod tests {
#[test]
fn test_data_format_png() {
assert_eq!(
DataFormat::detect(&read("./fixtures/world.png").unwrap()),
Some(DataFormat::Png)
TileInfo::detect(&read("./fixtures/world.png").unwrap()).unwrap(),
TileInfo::new(Format::Png, Encoding::Internal)
);
}
#[test]
fn test_data_format_jpg() {
assert_eq!(
DataFormat::detect(&read("./fixtures/world.jpg").unwrap()),
Some(DataFormat::Jpeg)
TileInfo::detect(&read("./fixtures/world.jpg").unwrap()).unwrap(),
TileInfo::new(Format::Jpeg, Encoding::Internal)
);
}
#[test]
fn test_data_format_webp() {
assert_eq!(
DataFormat::detect(&read("./fixtures/dc.webp").unwrap()),
Some(DataFormat::Webp)
TileInfo::detect(&read("./fixtures/dc.webp").unwrap()).unwrap(),
TileInfo::new(Format::Webp, Encoding::Internal)
);
}
#[test]
fn test_data_format_json() {
assert_eq!(
TileInfo::detect(br#"{"foo":"bar"}"#).unwrap(),
TileInfo::new(Format::Json, Encoding::Uncompressed)
);
}
}

View File

@ -9,15 +9,15 @@ use serde::{Deserialize, Serialize};
use serde_yaml::Value;
use crate::config::{copy_unrecognized_config, Unrecognized};
use crate::file_config::FileError::{InvalidFilePath, InvalidSourceFilePath};
use crate::file_config::FileError::{InvalidFilePath, InvalidSourceFilePath, IoError};
use crate::utils::sorted_opt_map;
use crate::OneOrMany::{Many, One};
use crate::{Error, IdResolver, OneOrMany, Source, Sources, Xyz};
#[derive(thiserror::Error, Debug)]
pub enum FileError {
#[error("IO Error {0}")]
IoError(#[from] std::io::Error),
#[error("IO error {0}: {}", .1.display())]
IoError(std::io::Error, PathBuf),
#[error("Source path is not a file: {}", .0.display())]
InvalidFilePath(PathBuf),
@ -144,7 +144,8 @@ where
if let Some(sources) = cfg.sources {
for (id, source) in sources {
let can = source.path().canonicalize()?;
let path = source.path();
let can = path.canonicalize().map_err(|e| IoError(e, path.clone()))?;
if !can.is_file() {
// todo: maybe warn instead?
return Err(InvalidSourceFilePath(id.to_string(), can));
@ -170,7 +171,8 @@ where
let dir_files = if is_dir {
// directories will be kept in the config just in case there are new files
directories.push(path.clone());
path.read_dir()?
path.read_dir()
.map_err(|e| IoError(e, path.clone()))?
.filter_map(std::result::Result::ok)
.filter(|f| {
f.path().extension().filter(|e| *e == extension).is_some()
@ -184,7 +186,7 @@ where
return Err(InvalidFilePath(path.canonicalize().unwrap_or(path)));
};
for path in dir_files {
let can = path.canonicalize()?;
let can = path.canonicalize().map_err(|e| IoError(e, path.clone()))?;
if files.contains(&can) {
if !is_dir {
warn!("Ignoring duplicate MBTiles path: {}", can.display());

View File

@ -26,7 +26,7 @@ mod test_utils;
pub use crate::args::Env;
pub use crate::config::{read_config, Config};
pub use crate::source::{IdResolver, Source, Sources, Xyz};
pub use crate::utils::{BoolOrObject, Error, OneOrMany, Result};
pub use crate::utils::{decode_brotli, decode_gzip, BoolOrObject, Error, OneOrMany, Result};
// Ensure README.md contains valid code
#[cfg(doctest)]

View File

@ -5,11 +5,11 @@ use std::sync::Arc;
use async_trait::async_trait;
use martin_mbtiles::Mbtiles;
use martin_tile_utils::DataFormat;
use martin_tile_utils::TileInfo;
use tilejson::TileJSON;
use crate::file_config::FileError;
use crate::file_config::FileError::{GetTileError, InvalidMetadata};
use crate::file_config::FileError::{GetTileError, InvalidMetadata, IoError};
use crate::source::{Tile, UrlQuery};
use crate::utils::is_valid_zoom;
use crate::{Error, Source, Xyz};
@ -19,7 +19,7 @@ pub struct MbtSource {
id: String,
mbtiles: Arc<Mbtiles>,
tilejson: TileJSON,
format: DataFormat,
tile_info: TileInfo,
}
impl Debug for MbtSource {
@ -39,12 +39,15 @@ impl MbtSource {
}
async fn new(id: String, path: PathBuf) -> Result<Self, FileError> {
let mbt = Mbtiles::new(&path).await.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})?;
let mbt = Mbtiles::new(&path)
.await
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
let meta = mbt
.get_metadata()
@ -55,7 +58,7 @@ impl MbtSource {
id,
mbtiles: Arc::new(mbt),
tilejson: meta.tilejson,
format: meta.tile_format,
tile_info: meta.tile_info,
})
}
}
@ -66,8 +69,8 @@ impl Source for MbtSource {
self.tilejson.clone()
}
fn get_format(&self) -> DataFormat {
self.format
fn get_tile_info(&self) -> TileInfo {
self.tile_info
}
fn clone_source(&self) -> Box<dyn Source> {

View File

@ -3,7 +3,9 @@ use std::collections::HashMap;
use async_trait::async_trait;
use deadpool_postgres::tokio_postgres::types::{ToSql, Type};
use log::debug;
use martin_tile_utils::DataFormat;
use martin_tile_utils::Encoding::Uncompressed;
use martin_tile_utils::Format::Mvt;
use martin_tile_utils::TileInfo;
use tilejson::TileJSON;
use crate::pg::pool::PgPool;
@ -38,8 +40,8 @@ impl Source for PgSource {
self.tilejson.clone()
}
fn get_format(&self) -> DataFormat {
DataFormat::Mvt
fn get_tile_info(&self) -> TileInfo {
TileInfo::new(Mvt, Uncompressed)
}
fn clone_source(&self) -> Box<dyn Source> {

View File

@ -5,14 +5,14 @@ use std::sync::Arc;
use async_trait::async_trait;
use log::warn;
use martin_tile_utils::DataFormat;
use martin_tile_utils::{Encoding, Format, TileInfo};
use pmtiles::async_reader::AsyncPmTilesReader;
use pmtiles::mmap::MmapBackend;
use pmtiles::{Compression, TileType};
use tilejson::TileJSON;
use crate::file_config::FileError;
use crate::file_config::FileError::{GetTileError, InvalidMetadata};
use crate::file_config::FileError::{GetTileError, InvalidMetadata, IoError};
use crate::source::{Source, Tile, UrlQuery, Xyz};
use crate::utils::is_valid_zoom;
use crate::Error;
@ -23,7 +23,7 @@ pub struct PmtSource {
path: PathBuf,
pmtiles: Arc<AsyncPmTilesReader<MmapBackend>>,
tilejson: TileJSON,
format: DataFormat,
tile_info: TileInfo,
}
impl Debug for PmtSource {
@ -38,20 +38,25 @@ impl PmtSource {
}
async fn new(id: String, path: PathBuf) -> Result<Self, FileError> {
let backend = MmapBackend::try_from(path.as_path()).await.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})?;
let backend = MmapBackend::try_from(path.as_path())
.await
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
let reader = AsyncPmTilesReader::try_from_source(backend).await;
let reader = reader.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})?;
let reader = reader
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("{e:?}: Cannot open file {}", path.display()),
)
})
.map_err(|e| IoError(e, path.clone()))?;
let hdr = &reader.header;
if hdr.tile_type != TileType::Mvt && hdr.tile_compression != Compression::None {
@ -65,22 +70,25 @@ impl PmtSource {
}
let format = match hdr.tile_type {
TileType::Mvt => match hdr.tile_compression {
Compression::None => DataFormat::Mvt,
Compression::Unknown => {
warn!(
"MVT tiles have unknown compression in file {}",
path.display()
);
DataFormat::Mvt
}
Compression::Gzip => DataFormat::GzipMvt,
Compression::Brotli => DataFormat::BrotliMvt,
Compression::Zstd => DataFormat::ZstdMvt,
},
TileType::Png => DataFormat::Png,
TileType::Jpeg => DataFormat::Jpeg,
TileType::Webp => DataFormat::Webp,
TileType::Mvt => TileInfo::new(
Format::Mvt,
match hdr.tile_compression {
Compression::None => Encoding::Uncompressed,
Compression::Unknown => {
warn!(
"MVT tiles have unknown compression in file {}",
path.display()
);
Encoding::Uncompressed
}
Compression::Gzip => Encoding::Gzip,
Compression::Brotli => Encoding::Brotli,
Compression::Zstd => Encoding::Zstd,
},
),
TileType::Png => Format::Png.into(),
TileType::Jpeg => Format::Jpeg.into(),
TileType::Webp => Format::Webp.into(),
TileType::Unknown => {
return Err(InvalidMetadata(
"Unknown tile type".to_string(),
@ -99,7 +107,7 @@ impl PmtSource {
path,
pmtiles: Arc::new(reader),
tilejson,
format,
tile_info: format,
})
}
}
@ -110,8 +118,8 @@ impl Source for PmtSource {
self.tilejson.clone()
}
fn get_format(&self) -> DataFormat {
self.format
fn get_tile_info(&self) -> TileInfo {
self.tile_info
}
fn clone_source(&self) -> Box<dyn Source> {

View File

@ -4,7 +4,7 @@ use std::fmt::{Debug, Display, Formatter, Write};
use std::sync::{Arc, Mutex};
use async_trait::async_trait;
use martin_tile_utils::DataFormat;
use martin_tile_utils::TileInfo;
use tilejson::TileJSON;
use crate::utils::Result;
@ -34,7 +34,7 @@ pub type Sources = HashMap<String, Box<dyn Source>>;
pub trait Source: Send + Debug {
fn get_tilejson(&self) -> TileJSON;
fn get_format(&self) -> DataFormat;
fn get_tile_info(&self) -> TileInfo;
fn clone_source(&self) -> Box<dyn Source>;

View File

@ -3,25 +3,29 @@ use std::string::ToString;
use std::time::Duration;
use actix_cors::Cors;
use actix_http::header::HeaderValue;
use actix_http::ContentEncoding;
use actix_web::dev::Server;
use actix_web::http::header::{CACHE_CONTROL, CONTENT_ENCODING};
use actix_web::error::ErrorBadRequest;
use actix_web::http::header::{
AcceptEncoding, Encoding as HeaderEnc, HeaderValue, Preference, CACHE_CONTROL, CONTENT_ENCODING,
};
use actix_web::http::Uri;
use actix_web::middleware::TrailingSlash;
use actix_web::web::{Data, Path, Query};
use actix_web::{
error, middleware, route, web, App, Error, HttpRequest, HttpResponse, HttpServer, Responder,
Result,
error, middleware, route, web, App, Error, HttpMessage, HttpRequest, HttpResponse, HttpServer,
Responder, Result,
};
use futures::future::try_join_all;
use itertools::Itertools;
use log::{debug, error};
use martin_tile_utils::DataFormat;
use martin_tile_utils::{Encoding, Format, TileInfo};
use serde::{Deserialize, Serialize};
use tilejson::TileJSON;
use crate::source::{Source, Sources, UrlQuery, Xyz};
use crate::srv::config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
use crate::utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip};
use crate::Error::BindingError;
/// List of keywords that cannot be used as source IDs. Some of these are reserved for future use.
@ -30,6 +34,12 @@ pub const RESERVED_KEYWORDS: &[&str] = &[
"catalog", "config", "health", "help", "index", "manifest", "refresh", "reload", "status",
];
static SUPPORTED_ENCODINGS: &[HeaderEnc] = &[
HeaderEnc::brotli(),
HeaderEnc::gzip(),
HeaderEnc::identity(),
];
pub struct AppState {
pub sources: Sources,
}
@ -47,23 +57,22 @@ impl AppState {
&self,
source_ids: &str,
zoom: Option<u8>,
) -> Result<(Vec<&dyn Source>, bool, DataFormat)> {
// TODO?: optimize by pre-allocating max allowed layer count on stack
) -> Result<(Vec<&dyn Source>, bool, TileInfo)> {
let mut sources = Vec::new();
let mut format: Option<DataFormat> = None;
let mut info: Option<TileInfo> = None;
let mut use_url_query = false;
for id in source_ids.split(',') {
let src = self.get_source(id)?;
let src_fmt = src.get_format();
let src_inf = src.get_tile_info();
use_url_query |= src.support_url_query();
// make sure all sources have the same format
match format {
Some(fmt) if fmt == src_fmt => {}
Some(fmt) => Err(error::ErrorNotFound(format!(
"Cannot merge sources with {fmt:?} with {src_fmt:?}"
match info {
Some(inf) if inf == src_inf => {}
Some(inf) => Err(error::ErrorNotFound(format!(
"Cannot merge sources with {inf} with {src_inf}"
)))?,
None => format = Some(src_fmt),
None => info = Some(src_inf),
}
// TODO: Use chained-if-let once available
@ -77,7 +86,7 @@ impl AppState {
}
// format is guaranteed to be Some() here
Ok((sources, use_url_query, format.unwrap()))
Ok((sources, use_url_query, info.unwrap()))
}
}
@ -129,7 +138,9 @@ fn map_internal_error<T: std::fmt::Display>(e: T) -> Error {
#[route("/", method = "GET", method = "HEAD")]
#[allow(clippy::unused_async)]
async fn get_index() -> &'static str {
"Martin server is running. Eventually this will be a nice web front."
"Martin server is running. Eventually this will be a nice web front.\n\n\
A list of all available sources is at /catalog\n\n\
See documentation https://github.com/maplibre/martin"
}
/// Return 200 OK if healthy. Used for readiness and liveness probes.
@ -141,7 +152,12 @@ async fn get_health() -> impl Responder {
.message_body("OK")
}
#[route("/catalog", method = "GET", method = "HEAD")]
#[route(
"/catalog",
method = "GET",
method = "HEAD",
wrap = "middleware::Compress::default()"
)]
#[allow(clippy::unused_async)]
async fn get_catalog(state: Data<AppState>) -> impl Responder {
let info: Vec<_> = state
@ -149,11 +165,11 @@ async fn get_catalog(state: Data<AppState>) -> impl Responder {
.iter()
.map(|(id, src)| {
let tilejson = src.get_tilejson();
let format = src.get_format();
let info = src.get_tile_info();
IndexEntry {
id: id.clone(),
content_type: format.content_type().to_string(),
content_encoding: format.content_encoding().map(ToString::to_string),
content_type: info.format.content_type().to_string(),
content_encoding: info.encoding.content_encoding().map(ToString::to_string),
name: tilejson.name,
description: tilejson.description,
attribution: tilejson.attribution,
@ -164,7 +180,12 @@ async fn get_catalog(state: Data<AppState>) -> impl Responder {
HttpResponse::Ok().json(info)
}
#[route("/{source_ids}", method = "GET", method = "HEAD")]
#[route(
"/{source_ids}",
method = "GET",
method = "HEAD",
wrap = "middleware::Compress::default()"
)]
#[allow(clippy::unused_async)]
async fn git_source_info(
req: HttpRequest,
@ -198,7 +219,7 @@ fn get_tiles_url(scheme: &str, host: &str, query_string: &str, tiles_path: &str)
.path_and_query(path_and_query)
.build()
.map(|tiles_url| tiles_url.to_string())
.map_err(|e| error::ErrorBadRequest(format!("Can't build tiles URL: {e}")))
.map_err(|e| ErrorBadRequest(format!("Can't build tiles URL: {e}")))
}
fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON {
@ -253,8 +274,8 @@ async fn get_tile(
};
// Optimization for a single-source request.
let (tile, format) = if path.source_ids.contains(',') {
let (sources, use_url_query, format) = state.get_sources(&path.source_ids, Some(path.z))?;
let (tile, info) = if path.source_ids.contains(',') {
let (sources, use_url_query, info) = state.get_sources(&path.source_ids, Some(path.z))?;
if sources.is_empty() {
return Err(error::ErrorNotFound("No valid sources found"))?;
}
@ -269,14 +290,15 @@ async fn get_tile(
// Make sure tiles can be concatenated, or if not, that there is only one non-empty tile for each zoom level
// TODO: can zlib, brotli, or zstd be concatenated?
// TODO: implement decompression step for other concatenate-able formats
let can_join = format == DataFormat::Mvt || format == DataFormat::GzipMvt;
if !can_join && tiles.iter().map(|v| i32::from(!v.is_empty())).sum::<i32>() > 1 {
let can_join = info.format == Format::Mvt
&& (info.encoding == Encoding::Uncompressed || info.encoding == Encoding::Gzip);
if !can_join && tiles.iter().filter(|v| !v.is_empty()).count() > 1 {
return Err(error::ErrorBadRequest(format!(
"Can't merge {format:?} tiles. Make sure there is only one non-empty tile source at zoom level {}",
"Can't merge {info} tiles. Make sure there is only one non-empty tile source at zoom level {}",
xyz.z
)))?;
}
(tiles.concat(), format)
(tiles.concat(), info)
} else {
let id = &path.source_ids;
let zoom = xyz.z;
@ -295,21 +317,88 @@ async fn get_tile(
.get_tile(&xyz, &query)
.await
.map_err(map_internal_error)?;
(tile, src.get_format())
(tile, src.get_tile_info())
};
Ok(if tile.is_empty() {
HttpResponse::NoContent().finish()
} else {
// decide if (re-)encoding of the tile data is needed, and recompress if so
let (tile, info) = recompress(tile, info, req.get_header::<AcceptEncoding>())?;
let mut response = HttpResponse::Ok();
response.content_type(format.content_type());
if let Some(val) = format.content_encoding() {
response.content_type(info.format.content_type());
if let Some(val) = info.encoding.content_encoding() {
response.insert_header((CONTENT_ENCODING, val));
}
response.body(tile)
})
}
fn recompress(
mut tile: Vec<u8>,
mut info: TileInfo,
accept_enc: Option<AcceptEncoding>,
) -> Result<(Vec<u8>, TileInfo)> {
if let Some(accept_enc) = accept_enc {
if info.encoding.is_encoded() {
// already compressed, see if we can send it as is, or need to re-compress
if !accept_enc.iter().any(|e| {
if let Preference::Specific(HeaderEnc::Known(enc)) = e.item {
to_encoding(enc) == Some(info.encoding)
} else {
false
}
}) {
// need to re-compress the tile - uncompress it first
(tile, info) = decode(tile, info)?;
}
}
if info.encoding == Encoding::Uncompressed {
// only apply compression if the content supports it
if let Some(HeaderEnc::Known(enc)) = accept_enc.negotiate(SUPPORTED_ENCODINGS.iter()) {
// (re-)compress the tile into the preferred encoding
(tile, info) = encode(tile, info, enc)?;
}
}
Ok((tile, info))
} else {
// no accepted-encoding header, decode the tile if compressed
decode(tile, info)
}
}
fn encode(tile: Vec<u8>, info: TileInfo, enc: ContentEncoding) -> Result<(Vec<u8>, TileInfo)> {
Ok(match enc {
ContentEncoding::Brotli => (encode_brotli(&tile)?, info.encoding(Encoding::Brotli)),
ContentEncoding::Gzip => (encode_gzip(&tile)?, info.encoding(Encoding::Gzip)),
_ => (tile, info),
})
}
fn decode(tile: Vec<u8>, info: TileInfo) -> Result<(Vec<u8>, TileInfo)> {
Ok(if info.encoding.is_encoded() {
match info.encoding {
Encoding::Gzip => (decode_gzip(&tile)?, info.encoding(Encoding::Uncompressed)),
Encoding::Brotli => (decode_brotli(&tile)?, info.encoding(Encoding::Uncompressed)),
_ => Err(ErrorBadRequest(format!(
"Tile is is stored as {info}, but the client does not accept this encoding"
)))?,
}
} else {
(tile, info)
})
}
fn to_encoding(val: ContentEncoding) -> Option<Encoding> {
Some(match val {
ContentEncoding::Identity => Encoding::Uncompressed,
ContentEncoding::Gzip => Encoding::Gzip,
ContentEncoding::Brotli => Encoding::Brotli,
// TODO: Deflate => Encoding::Zstd or Encoding::Zlib ?
_ => None?,
})
}
pub fn router(cfg: &mut web::ServiceConfig) {
cfg.service(get_health)
.service(get_index)
@ -340,7 +429,6 @@ pub fn new_server(config: SrvConfig, sources: Sources) -> crate::Result<(Server,
.wrap(cors_middleware)
.wrap(middleware::NormalizePath::new(TrailingSlash::MergeOnly))
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default())
.configure(router)
})
.bind(listen_addresses.clone())

View File

@ -1,6 +1,9 @@
use std::cmp::Ordering::Equal;
use std::collections::{BTreeMap, HashMap};
use std::io::{Read as _, Write as _};
use flate2::read::GzDecoder;
use flate2::write::GzEncoder;
use itertools::Itertools;
use log::{error, info, warn};
use serde::{Deserialize, Serialize, Serializer};
@ -94,3 +97,29 @@ pub fn sorted_opt_map<S: Serializer, T: Serialize>(
})
.serialize(serializer)
}
pub fn decode_gzip(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
let mut decoder = GzDecoder::new(data);
let mut decompressed = Vec::new();
decoder.read_to_end(&mut decompressed)?;
Ok(decompressed)
}
pub fn encode_gzip(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
let mut encoder = GzEncoder::new(Vec::new(), flate2::Compression::default());
encoder.write_all(data)?;
encoder.finish()
}
pub fn decode_brotli(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
let mut decoder = brotli::Decompressor::new(data, 4096);
let mut decompressed = Vec::new();
decoder.read_to_end(&mut decompressed)?;
Ok(decompressed)
}
pub fn encode_brotli(data: &[u8]) -> Result<Vec<u8>, std::io::Error> {
let mut encoder = brotli::CompressorWriter::new(Vec::new(), 4096, 11, 22);
encoder.write_all(data)?;
Ok(encoder.into_inner())
}

View File

@ -153,4 +153,4 @@ postgres:
pmtiles:
sources:
pmt: tests/fixtures/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
pmt: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles

View File

@ -82,6 +82,16 @@
"id": "geography-class-png-no-bounds",
"name": "Geography Class"
},
{
"content_type": "application/json",
"id": "json",
"name": "Dummy json data"
},
{
"content_type": "image/png",
"id": "png",
"name": "ne2sr"
},
{
"content_type": "application/x-protobuf",
"id": "points1",
@ -126,6 +136,22 @@
"id": "table_source_multiple_geom.1",
"name": "public.table_source_multiple_geom.geom2"
},
{
"content_type": "application/x-protobuf",
"description": "Major cities from Natural Earth data",
"id": "uncompressed_mvt",
"name": "Major cities from Natural Earth data"
},
{
"content_type": "image/webp",
"id": "webp",
"name": "ne2sr"
},
{
"content_type": "image/webp",
"id": "webp.1",
"name": "ne2sr"
},
{
"content_encoding": "gzip",
"content_type": "application/x-protobuf",

View File

@ -144,13 +144,18 @@ postgres:
schema: public
function: function_zxy_row_key
pmtiles:
paths: tests/fixtures
paths: tests/fixtures/files
sources:
stamen_toner__raster_CC-BY-ODbL_z3: tests/fixtures/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
png: tests/fixtures/files/png.pmtiles
stamen_toner__raster_CC-BY-ODbL_z3: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
webp: tests/fixtures/files/webp.pmtiles
mbtiles:
paths: tests/fixtures
paths: tests/fixtures/files
sources:
geography-class-jpg: tests/fixtures/geography-class-jpg.mbtiles
geography-class-png: tests/fixtures/geography-class-png.mbtiles
geography-class-png-no-bounds: tests/fixtures/geography-class-png-no-bounds.mbtiles
world_cities: tests/fixtures/world_cities.mbtiles
geography-class-jpg: tests/fixtures/files/geography-class-jpg.mbtiles
geography-class-png: tests/fixtures/files/geography-class-png.mbtiles
geography-class-png-no-bounds: tests/fixtures/files/geography-class-png-no-bounds.mbtiles
json: tests/fixtures/files/json.mbtiles
uncompressed_mvt: tests/fixtures/files/uncompressed_mvt.mbtiles
webp.1: tests/fixtures/files/webp.mbtiles
world_cities: tests/fixtures/files/world_cities.mbtiles

View File

@ -115,4 +115,4 @@ postgres:
- 90.0
pmtiles:
sources:
pmt: tests/fixtures/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
pmt: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles

BIN
tests/fixtures/files/json.mbtiles vendored Normal file

Binary file not shown.

BIN
tests/fixtures/files/png.pmtiles vendored Normal file

Binary file not shown.

1
tests/fixtures/files/tmp.json vendored Normal file
View File

@ -0,0 +1 @@
{"foo":"bar"}

Binary file not shown.

BIN
tests/fixtures/files/webp.mbtiles vendored Normal file

Binary file not shown.

BIN
tests/fixtures/files/webp.pmtiles vendored Normal file

Binary file not shown.

270
tests/mb_server_test.rs Normal file
View File

@ -0,0 +1,270 @@
use actix_web::http::header::{ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_TYPE};
use actix_web::test::{call_service, read_body, read_body_json, TestRequest};
use ctor::ctor;
use indoc::indoc;
use martin::decode_gzip;
use martin::srv::IndexEntry;
use tilejson::TileJSON;
pub mod utils;
pub use utils::*;
#[ctor]
fn init() {
let _ = env_logger::builder().is_test(true).try_init();
}
macro_rules! create_app {
($sources:expr) => {{
let sources = mock_sources(mock_cfg($sources)).await.0;
let state = crate::utils::mock_app_data(sources).await;
::actix_web::test::init_service(
::actix_web::App::new()
.app_data(state)
.configure(::martin::srv::router),
)
.await
}};
}
fn test_get(path: &str) -> TestRequest {
TestRequest::get().uri(path)
}
const CONFIG: &str = indoc! {"
mbtiles:
sources:
m_json: tests/fixtures/files/json.mbtiles
m_mvt: tests/fixtures/files/world_cities.mbtiles
m_raw_mvt: tests/fixtures/files/uncompressed_mvt.mbtiles
m_webp: tests/fixtures/files/webp.mbtiles
"};
#[actix_rt::test]
async fn mbt_get_catalog() {
let app = create_app! { CONFIG };
let req = test_get("/catalog").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let body = read_body(response).await;
let sources: Vec<IndexEntry> = serde_json::from_slice(&body).unwrap();
assert_eq!(sources.iter().filter(|v| v.id == "m_mvt").count(), 1);
assert_eq!(sources.iter().filter(|v| v.id == "m_webp").count(), 1);
assert_eq!(sources.iter().filter(|v| v.id == "m_raw_mvt").count(), 1);
}
#[actix_rt::test]
async fn mbt_get_catalog_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/catalog").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let body = decode_gzip(&read_body(response).await).unwrap();
let sources: Vec<IndexEntry> = serde_json::from_slice(&body).unwrap();
assert_eq!(sources.iter().filter(|v| v.id == "m_mvt").count(), 1);
assert_eq!(sources.iter().filter(|v| v.id == "m_webp").count(), 1);
assert_eq!(sources.iter().filter(|v| v.id == "m_raw_mvt").count(), 1);
}
#[actix_rt::test]
async fn mbt_get_tilejson() {
let app = create_app! { CONFIG };
let req = test_get("/m_mvt").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert!(headers.get(CONTENT_ENCODING).is_none());
let body: TileJSON = read_body_json(response).await;
assert_eq!(body.maxzoom, Some(6));
}
#[actix_rt::test]
async fn mbt_get_tilejson_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_webp").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert_eq!(headers.get(CONTENT_ENCODING).unwrap(), "gzip");
let body = decode_gzip(&read_body(response).await).unwrap();
let body: TileJSON = serde_json::from_slice(body.as_slice()).unwrap();
assert_eq!(body.maxzoom, Some(0));
}
#[actix_rt::test]
async fn mbt_get_raster() {
let app = create_app! { CONFIG };
let req = test_get("/m_webp/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/webp");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 11586);
}
/// get a raster tile with accepted gzip enc, but should still be non-gzipped
#[actix_rt::test]
async fn mbt_get_raster_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_webp/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/webp");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 11586);
}
#[actix_rt::test]
async fn mbt_get_mvt() {
let app = create_app! { CONFIG };
let req = test_get("/m_mvt/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
);
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 1828);
}
/// get an MVT tile with accepted gzip enc
#[actix_rt::test]
async fn mbt_get_mvt_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_mvt/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
);
assert_eq!(response.headers().get(CONTENT_ENCODING).unwrap(), "gzip");
let body = read_body(response).await;
assert_eq!(body.len(), 1107); // this number could change if compression gets more optimized
let body = decode_gzip(&body).unwrap();
assert_eq!(body.len(), 1828);
}
/// get an MVT tile with accepted brotli enc
#[actix_rt::test]
async fn mbt_get_mvt_brotli() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "br");
let req = test_get("/m_mvt/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
);
assert_eq!(response.headers().get(CONTENT_ENCODING).unwrap(), "br");
let body = read_body(response).await;
assert_eq!(body.len(), 871); // this number could change if compression gets more optimized
let body = martin::decode_brotli(&body).unwrap();
assert_eq!(body.len(), 1828);
}
/// get an uncompressed MVT tile
#[actix_rt::test]
async fn mbt_get_raw_mvt() {
let app = create_app! { CONFIG };
let req = test_get("/m_raw_mvt/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
);
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 1828);
}
/// get an uncompressed MVT tile with accepted gzip
#[actix_rt::test]
async fn mbt_get_raw_mvt_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_raw_mvt/0/0/0")
.insert_header(accept)
.to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
);
assert_eq!(response.headers().get(CONTENT_ENCODING).unwrap(), "gzip");
let body = read_body(response).await;
assert_eq!(body.len(), 1107); // this number could change if compression gets more optimized
let body = martin::decode_gzip(&body).unwrap();
assert_eq!(body.len(), 1828);
}
/// get an uncompressed MVT tile with accepted both gzip and brotli enc
#[actix_rt::test]
async fn mbt_get_raw_mvt_gzip_br() {
let app = create_app! { CONFIG };
// Sadly, most browsers prefer to ask for gzip - maybe we should force brotli if supported.
let accept = (ACCEPT_ENCODING, "br, gzip, deflate");
let req = test_get("/m_raw_mvt/0/0/0")
.insert_header(accept)
.to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
);
assert_eq!(response.headers().get(CONTENT_ENCODING).unwrap(), "br");
let body = read_body(response).await;
assert_eq!(body.len(), 871); // this number could change if compression gets more optimized
let body = martin::decode_brotli(&body).unwrap();
assert_eq!(body.len(), 1828);
}
/// get a JSON tile
#[actix_rt::test]
async fn mbt_get_json() {
let app = create_app! { CONFIG };
let req = test_get("/m_json/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/json"
);
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 13);
}
/// get a JSON tile with accepted gzip
#[actix_rt::test]
async fn mbt_get_json_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_json/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/json"
);
assert_eq!(response.headers().get(CONTENT_ENCODING).unwrap(), "gzip");
let body = read_body(response).await;
assert_eq!(body.len(), 33); // this number could change if compression gets more optimized
let body = martin::decode_gzip(&body).unwrap();
assert_eq!(body.len(), 13);
}

View File

@ -1,7 +1,8 @@
use actix_http::Request;
use actix_web::http::header::{ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_TYPE};
use actix_web::test::{call_service, read_body, read_body_json, TestRequest};
use ctor::ctor;
use indoc::indoc;
use martin::decode_gzip;
use martin::srv::IndexEntry;
use tilejson::TileJSON;
@ -26,15 +27,22 @@ macro_rules! create_app {
}};
}
fn test_get(path: &str) -> Request {
TestRequest::get().uri(path).to_request()
fn test_get(path: &str) -> TestRequest {
TestRequest::get().uri(path)
}
#[actix_rt::test]
async fn pmt_get_catalog_ok() {
let app = create_app! { "pmtiles: tests/fixtures/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles" };
const CONFIG: &str = indoc! {"
pmtiles:
sources:
p_png: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
"};
let req = test_get("/catalog");
#[actix_rt::test]
async fn pmt_get_catalog() {
let path = "pmtiles: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles";
let app = create_app! { path };
let req = test_get("/catalog").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let body = read_body(response).await;
@ -45,27 +53,67 @@ async fn pmt_get_catalog_ok() {
}
#[actix_rt::test]
async fn pmt_get_raster() {
let app = create_app! { indoc!{"
pmtiles:
sources:
pmt: tests/fixtures/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles
"} };
let req = test_get("/pmt/0/0/0");
async fn pmt_get_catalog_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/catalog").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(response.headers().get("content-type").unwrap(), "image/png");
let body = read_body(response).await;
assert_eq!(body.len(), 18404);
let body = decode_gzip(&read_body(response).await).unwrap();
let sources: Vec<IndexEntry> = serde_json::from_slice(&body).unwrap();
assert_eq!(sources.iter().filter(|v| v.id == "p_png").count(), 1);
}
let req = test_get("/pmt");
#[actix_rt::test]
async fn pmt_get_tilejson() {
let app = create_app! { CONFIG };
let req = test_get("/p_png").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(
response.headers().get("content-type").unwrap(),
"application/json"
);
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert!(headers.get(CONTENT_ENCODING).is_none());
let body: TileJSON = read_body_json(response).await;
assert_eq!(body.maxzoom, Some(3));
}
#[actix_rt::test]
async fn pmt_get_tilejson_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/p_png").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert_eq!(headers.get(CONTENT_ENCODING).unwrap(), "gzip");
let body = decode_gzip(&read_body(response).await).unwrap();
let body: TileJSON = serde_json::from_slice(body.as_slice()).unwrap();
assert_eq!(body.maxzoom, Some(3));
}
#[actix_rt::test]
async fn pmt_get_raster() {
let app = create_app! { CONFIG };
let req = test_get("/p_png/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/png");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 18404);
}
/// get a raster tile with accepted gzip enc, but should still be non-gzipped
#[actix_rt::test]
async fn pmt_get_raster_gzip() {
let app = create_app! { CONFIG };
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/p_png/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/png");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
assert_eq!(body.len(), 18404);
}

View File

@ -2,7 +2,7 @@
set -euo pipefail
# TODO: use --fail-with-body to get the response body on failure
CURL=${CURL:-curl -sSf}
CURL=${CURL:-curl --silent --show-error --fail --compressed}
DATABASE_URL="${DATABASE_URL:-postgres://postgres@localhost/db}"
MARTIN_BUILD="${MARTIN_BUILD:-cargo build --all-features}"
MARTIN_PORT="${MARTIN_PORT:-3111}"
@ -16,9 +16,9 @@ function wait_for_martin {
PROCESS_ID=$1
echo "Waiting for Martin ($PROCESS_ID) to start by checking $MARTIN_URL/health to be valid..."
for i in {1..60}; do
if curl -sSf "$MARTIN_URL/health" 2>/dev/null >/dev/null; then
if $CURL "$MARTIN_URL/health" 2>/dev/null >/dev/null; then
echo "Martin is up!"
curl -s "$MARTIN_URL/health"
$CURL "$MARTIN_URL/health"
return
fi
if ps -p $PROCESS_ID > /dev/null ; then
@ -70,7 +70,7 @@ test_pbf()
URL="$MARTIN_URL/$2"
echo "Testing $(basename "$FILENAME") from $URL"
$CURL --compressed "$URL" > "$FILENAME"
$CURL "$URL" > "$FILENAME"
if [[ $OSTYPE == linux* ]]; then
./tests/fixtures/vtzero-check "$FILENAME"
@ -143,7 +143,7 @@ echo "Test auto configured Martin"
TEST_OUT_DIR="$(dirname "$0")/output/auto"
mkdir -p "$TEST_OUT_DIR"
ARG=(--default-srid 900913 --disable-bounds --save-config "$(dirname "$0")/output/generated_config.yaml" tests/fixtures)
ARG=(--default-srid 900913 --disable-bounds --save-config "$(dirname "$0")/output/generated_config.yaml" tests/fixtures/files)
set -x
$MARTIN_BIN "${ARG[@]}" 2>&1 | tee test_log_1.txt &
PROCESS_ID=`jobs -p`

View File

@ -5,6 +5,7 @@
mod pg_utils;
use actix_web::web::Data;
use log::warn;
use martin::srv::AppState;
use martin::{Config, Sources};
pub use pg_utils::*;
@ -20,10 +21,12 @@ pub async fn mock_app_data(sources: Sources) -> Data<AppState> {
#[must_use]
pub fn mock_cfg(yaml: &str) -> Config {
let Ok(db_url) = std::env::var("DATABASE_URL") else {
panic!("DATABASE_URL env var is not set. Unable to do integration tests");
let env = if let Ok(db_url) = std::env::var("DATABASE_URL") {
FauxEnv(vec![("DATABASE_URL", db_url.into())].into_iter().collect())
} else {
warn!("DATABASE_URL env var is not set. Might not be able to do integration tests");
FauxEnv::default()
};
let env = FauxEnv(vec![("DATABASE_URL", db_url.into())].into_iter().collect());
let mut cfg: Config = subst::yaml::from_str(yaml, &env).unwrap();
let res = cfg.finalize().unwrap();
assert!(res.is_empty(), "unrecognized config: {res:?}");