Lots of small refactorings (#1107)

* Use `Option<&T>` instead of `&Option<T>` in function arguments.
* Cleaner var names
* Slight optimization of `get_tile` with query params
* Split up srv/server.rs into fonts, sprites, tiles, and tiles_info
files
* better error reporting in tests
This commit is contained in:
Yuri Astrakhan 2023-12-26 02:43:47 -05:00 committed by GitHub
parent 35faf420f4
commit 61d3286815
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 745 additions and 708 deletions

4
Cargo.lock generated
View File

@ -1189,9 +1189,9 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "fdeflate"
version = "0.3.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64d6dafc854908ff5da46ff3f8f473c6984119a2876a383a860246dd7841a868"
checksum = "7caf4086251adeba90011a7ff9bd1f6d7f7595be0871867daa4dbb0fcf2ca932"
dependencies = [
"simd-adler32",
]

View File

@ -33,41 +33,4 @@ Install [Just](https://github.com/casey/just#readme) (improved makefile processo
cargo install just
```
When developing MBTiles SQL code, you may need to use `just prepare-sqlite` whenever SQL queries are modified. Run `just` to see all available commands:
```shell, ignore
just
Available recipes:
run *ARGS # Start Martin server
run-release *ARGS # Start release-compiled Martin server and a test database
debug-page *ARGS # Start Martin server and open a test page
psql *ARGS # Run PSQL utility against the test database
pg_dump *ARGS # Run pg_dump utility against the test database
clean # Perform cargo clean to delete all build files
start # Start a test database
start-ssl # Start an ssl-enabled test database
start-legacy # Start a legacy test database
restart # Restart the test database
stop # Stop the test database
bench # Run benchmark tests
bench-http # Run HTTP requests benchmark using OHA tool. Use with `just run-release`
test # Run all tests using a test database
test-ssl # Run all tests using an SSL connection to a test database. Expected output won't match.
test-legacy # Run all tests using the oldest supported version of the database
test-cargo *ARGS # Run Rust unit and doc tests (cargo test)
test-int # Run integration tests
bless # Run integration tests and save its output as the new expected output
book # Build and open mdbook documentation
package-deb # Build debian package
docs # Build and open code documentation
coverage FORMAT='html' # Run code coverage on tests and save its output in the coverage directory. Parameter could be html or lcov.
docker-build # Build martin docker image
docker-run *ARGS # Build and run martin docker image
git *ARGS # Do any git command, ensuring that the testing environment is set up. Accepts the same arguments as git.
print-conn-str # Print the connection string for the test database
lint # Run cargo fmt and cargo clippy
fmt # Run cargo fmt
fmt2 # Run Nightly cargo fmt, ordering imports
clippy # Run cargo clippy
prepare-sqlite # Update sqlite database schema.
```
When developing MBTiles SQL code, you may need to use `just prepare-sqlite` whenever SQL queries are modified. Run `just` to see all available commands.

View File

@ -102,7 +102,7 @@ bench-server: start
# Run HTTP requests benchmark using OHA tool. Use with `just bench-server`
bench-http: (cargo-install "oha")
@echo "Make sure Martin was started with 'just run-release'"
@echo "ATTENTION: Make sure Martin was started with just bench-server"
@echo "Warming up..."
oha -z 5s --no-tui http://localhost:3000/function_zxy_query/18/235085/122323 > /dev/null
oha -z 60s http://localhost:3000/function_zxy_query/18/235085/122323

View File

@ -60,7 +60,7 @@ harness = false
[features]
default = ["fonts", "mbtiles", "pmtiles", "postgres", "sprites"]
fonts = ["dep:bit-set","dep:pbf_font_tools"]
fonts = ["dep:bit-set", "dep:pbf_font_tools"]
mbtiles = []
pmtiles = ["dep:moka"]
postgres = ["dep:deadpool-postgres", "dep:json-patch", "dep:postgis", "dep:postgres", "dep:postgres-protocol", "dep:semver", "dep:tokio-postgres-rustls"]

View File

@ -45,8 +45,8 @@ impl Source for NullSource {
async fn get_tile(
&self,
_xyz: &TileCoord,
_query: &Option<UrlQuery>,
_xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
Ok(Vec::new())
}

View File

@ -311,7 +311,7 @@ async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinCpResult<()>
.try_for_each_concurrent(concurrency, |xyz| {
let tx = tx.clone();
async move {
let tile = get_tile_content(sources, info, &xyz, query, encodings).await?;
let tile = get_tile_content(sources, info, xyz, query, encodings).await?;
let data = tile.data;
tx.send(TileXyz { xyz, data })
.await

View File

@ -105,8 +105,8 @@ impl Source for MbtSource {
async fn get_tile(
&self,
xyz: &TileCoord,
_url_query: &Option<UrlQuery>,
xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
if let Some(tile) = self
.mbtiles

View File

@ -80,6 +80,6 @@ impl PgInfo for FunctionInfo {
tilejson.minzoom = self.minzoom;
tilejson.maxzoom = self.maxzoom;
tilejson.bounds = self.bounds;
patch_json(tilejson, &self.tilejson)
patch_json(tilejson, self.tilejson.as_ref())
}
}

View File

@ -100,6 +100,6 @@ impl PgInfo for TableInfo {
other: BTreeMap::default(),
};
tilejson.vector_layers = Some(vec![layer]);
patch_json(tilejson, &self.tilejson)
patch_json(tilejson, self.tilejson.as_ref())
}
}

View File

@ -214,7 +214,7 @@ impl PgBuilder {
continue;
}
Ok((id, pg_sql, src_inf)) => {
debug!("{id} query: {}", pg_sql.query);
debug!("{id} query: {}", pg_sql.sql_query);
self.add_func_src(&mut res, id.clone(), &src_inf, pg_sql.clone());
info_map.insert(id, src_inf);
}
@ -252,7 +252,7 @@ impl PgBuilder {
warn_on_rename(id, &id2, "Function");
let signature = &pg_sql.signature;
info!("Configured {dup}source {id2} from the function {signature}");
debug!("{id2} query: {}", pg_sql.query);
debug!("{id2} query: {}", pg_sql.sql_query);
info_map.insert(id2, merged_inf);
}
@ -285,7 +285,7 @@ impl PgBuilder {
let id2 = self.resolve_id(&source_id, &db_inf);
self.add_func_src(&mut res, id2.clone(), &db_inf, pg_sql.clone());
info!("Discovered source {id2} from function {}", pg_sql.signature);
debug!("{id2} query: {}", pg_sql.query);
debug!("{id2} query: {}", pg_sql.sql_query);
info_map.insert(id2, db_inf);
}
}
@ -302,11 +302,11 @@ impl PgBuilder {
&self,
sources: &mut TileInfoSources,
id: String,
info: &impl PgInfo,
sql: PgSqlInfo,
pg_info: &impl PgInfo,
sql_info: PgSqlInfo,
) {
let tilejson = info.to_tilejson(id.clone());
let source = PgSource::new(id, sql, tilejson, self.pool.clone());
let tilejson = pg_info.to_tilejson(id.clone());
let source = PgSource::new(id, sql_info, tilejson, self.pool.clone());
sources.push(Box::new(source));
}
}

View File

@ -61,6 +61,6 @@ pub enum PgError {
#[error(r#"Unable to get tile {2:#} from {1}: {0}"#)]
GetTileError(#[source] TokioPgError, String, TileCoord),
#[error(r#"Unable to get tile {2:#} with {:?} params from {1}: {0}"#, query_to_json(.3))]
GetTileWithQueryError(#[source] TokioPgError, String, TileCoord, UrlQuery),
#[error(r#"Unable to get tile {2:#} with {:?} params from {1}: {0}"#, query_to_json(.3.as_ref()))]
GetTileWithQueryError(#[source] TokioPgError, String, TileCoord, Option<UrlQuery>),
}

View File

@ -29,10 +29,10 @@ pub async fn query_available_function(pool: &PgPool) -> PgResult<SqlFuncInfoMapM
let schema: String = row.get("schema");
let function: String = row.get("name");
let output_type: String = row.get("output_type");
let output_record_types = jsonb_to_vec(&row.get("output_record_types"));
let output_record_names = jsonb_to_vec(&row.get("output_record_names"));
let input_types = jsonb_to_vec(&row.get("input_types")).expect("Can't get input types");
let input_names = jsonb_to_vec(&row.get("input_names")).expect("Can't get input names");
let output_record_types = jsonb_to_vec(row.get("output_record_types"));
let output_record_names = jsonb_to_vec(row.get("output_record_names"));
let input_types = jsonb_to_vec(row.get("input_types")).expect("Can't get input types");
let input_names = jsonb_to_vec(row.get("input_names")).expect("Can't get input names");
let tilejson = if let Some(text) = row.get("description") {
match serde_json::from_str::<Value>(text) {
Ok(v) => Some(v),
@ -126,8 +126,8 @@ pub fn merge_func_info(cfg_inf: &FunctionInfo, db_inf: &FunctionInfo) -> Functio
}
}
fn jsonb_to_vec(jsonb: &Option<Value>) -> Option<Vec<String>> {
jsonb.as_ref().map(|json| {
fn jsonb_to_vec(jsonb: Option<Value>) -> Option<Vec<String>> {
jsonb.map(|json| {
json.as_array()
.unwrap()
.iter()

View File

@ -1,5 +1,3 @@
use std::collections::HashMap;
use async_trait::async_trait;
use deadpool_postgres::tokio_postgres::types::{ToSql, Type};
use log::debug;
@ -58,35 +56,32 @@ impl Source for PgSource {
async fn get_tile(
&self,
xyz: &TileCoord,
url_query: &Option<UrlQuery>,
xyz: TileCoord,
url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
let empty_query = HashMap::new();
let url_query = url_query.as_ref().unwrap_or(&empty_query);
let conn = self.pool.get().await?;
let param_types: &[Type] = if self.support_url_query() {
&[Type::INT2, Type::INT8, Type::INT8, Type::JSON]
} else {
&[Type::INT2, Type::INT8, Type::INT8]
};
let query = &self.info.query;
let sql = &self.info.sql_query;
let prep_query = conn
.prepare_typed_cached(query, param_types)
.prepare_typed_cached(sql, param_types)
.await
.map_err(|e| {
PrepareQueryError(
e,
self.id.to_string(),
self.info.signature.to_string(),
self.info.query.to_string(),
self.info.sql_query.to_string(),
)
})?;
let tile = if self.support_url_query() {
let json = query_to_json(url_query);
debug!("SQL: {query} [{xyz}, {json:?}]");
debug!("SQL: {sql} [{xyz}, {json:?}]");
let params: &[&(dyn ToSql + Sync)] = &[
&i16::from(xyz.z),
&i64::from(xyz.x),
@ -95,7 +90,7 @@ impl Source for PgSource {
];
conn.query_opt(&prep_query, params).await
} else {
debug!("SQL: {query} [{xyz}]");
debug!("SQL: {sql} [{xyz}]");
conn.query_opt(
&prep_query,
&[&i16::from(xyz.z), &i64::from(xyz.x), &i64::from(xyz.y)],
@ -107,9 +102,9 @@ impl Source for PgSource {
.map(|row| row.and_then(|r| r.get::<_, Option<TileData>>(0)))
.map_err(|e| {
if self.support_url_query() {
GetTileWithQueryError(e, self.id.to_string(), *xyz, url_query.clone())
GetTileWithQueryError(e, self.id.to_string(), xyz, url_query.cloned())
} else {
GetTileError(e, self.id.to_string(), *xyz)
GetTileError(e, self.id.to_string(), xyz)
}
})?
.unwrap_or_default();
@ -120,7 +115,7 @@ impl Source for PgSource {
#[derive(Clone, Debug)]
pub struct PgSqlInfo {
pub query: String,
pub sql_query: String,
pub use_url_query: bool,
pub signature: String,
}
@ -129,7 +124,7 @@ impl PgSqlInfo {
#[must_use]
pub fn new(query: String, has_query_params: bool, signature: String) -> Self {
Self {
query,
sql_query: query,
use_url_query: has_query_params,
signature,
}

View File

@ -57,7 +57,7 @@ pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
}
#[must_use]
pub fn patch_json(target: TileJSON, patch: &Option<serde_json::Value>) -> TileJSON {
pub fn patch_json(target: TileJSON, patch: Option<&serde_json::Value>) -> TileJSON {
let Some(tj) = patch else {
// Nothing to merge in, keep the original
return target;
@ -85,13 +85,15 @@ pub fn patch_json(target: TileJSON, patch: &Option<serde_json::Value>) -> TileJS
}
#[must_use]
pub fn query_to_json(query: &UrlQuery) -> Json<HashMap<String, serde_json::Value>> {
pub fn query_to_json(query: Option<&UrlQuery>) -> Json<HashMap<String, serde_json::Value>> {
let mut query_as_json = HashMap::new();
for (k, v) in query {
let json_value: serde_json::Value =
serde_json::from_str(v).unwrap_or_else(|_| serde_json::Value::String(v.clone()));
if let Some(query) = query {
for (k, v) in query {
let json_value: serde_json::Value =
serde_json::from_str(v).unwrap_or_else(|_| serde_json::Value::String(v.clone()));
query_as_json.insert(k.clone(), json_value);
query_as_json.insert(k.clone(), json_value);
}
}
Json(query_as_json)

View File

@ -114,14 +114,14 @@ impl ConfigExtras for PmtConfig {
self.client = Some(Client::new());
// Allow cache size to be disabled with 0
let cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
if cache_size > 0 {
let dir_cache_size = self.dir_cache_size_mb.unwrap_or(32) * 1024 * 1024;
if dir_cache_size > 0 {
self.cache = Some(
Cache::builder()
.weigher(|_key, value: &Directory| -> u32 {
value.get_approx_byte_size().try_into().unwrap_or(u32::MAX)
})
.max_capacity(cache_size)
.max_capacity(dir_cache_size)
.build(),
);
}
@ -264,8 +264,8 @@ macro_rules! impl_pmtiles_source {
async fn get_tile(
&self,
xyz: &TileCoord,
_url_query: &Option<UrlQuery>,
xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
// TODO: optimize to return Bytes
if let Some(t) = self

View File

@ -113,7 +113,11 @@ pub trait Source: Send + Debug {
false
}
async fn get_tile(&self, xyz: &TileCoord, query: &Option<UrlQuery>) -> MartinResult<TileData>;
async fn get_tile(
&self,
xyz: TileCoord,
url_query: Option<&UrlQuery>,
) -> MartinResult<TileData>;
fn is_valid_zoom(&self, zoom: u8) -> bool {
let tj = self.get_tilejson();

44
martin/src/srv/fonts.rs Executable file
View File

@ -0,0 +1,44 @@
use std::string::ToString;
use actix_web::error::{ErrorBadRequest, ErrorNotFound};
use actix_web::web::{Data, Path};
use actix_web::{middleware, route, HttpResponse, Result as ActixResult};
use serde::Deserialize;
use crate::fonts::{FontError, FontSources};
use crate::srv::server::map_internal_error;
#[derive(Deserialize, Debug)]
struct FontRequest {
fontstack: String,
start: u32,
end: u32,
}
#[route(
"/font/{fontstack}/{start}-{end}",
method = "GET",
wrap = "middleware::Compress::default()"
)]
#[allow(clippy::unused_async)]
async fn get_font(path: Path<FontRequest>, fonts: Data<FontSources>) -> ActixResult<HttpResponse> {
let data = fonts
.get_font_range(&path.fontstack, path.start, path.end)
.map_err(map_font_error)?;
Ok(HttpResponse::Ok()
.content_type("application/x-protobuf")
.body(data))
}
pub fn map_font_error(e: FontError) -> actix_web::Error {
#[allow(clippy::enum_glob_use)]
use FontError::*;
match e {
FontNotFound(_) => ErrorNotFound(e.to_string()),
InvalidFontRangeStartEnd(_, _)
| InvalidFontRangeStart(_)
| InvalidFontRangeEnd(_)
| InvalidFontRange(_, _) => ErrorBadRequest(e.to_string()),
_ => map_internal_error(e),
}
}

View File

@ -2,7 +2,16 @@ mod config;
pub use config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
mod server;
pub use server::{
get_tile_content, get_tile_response, merge_tilejson, new_server, router, Catalog, TileRequest,
RESERVED_KEYWORDS,
};
pub use server::{new_server, router, Catalog, RESERVED_KEYWORDS};
mod tiles;
pub use tiles::{get_tile_content, get_tile_response, TileRequest};
#[cfg(feature = "fonts")]
mod fonts;
mod tiles_info;
pub use tiles_info::{merge_tilejson, SourceIDsRequest};
#[cfg(feature = "sprites")]
mod sprites;

View File

@ -2,36 +2,22 @@ use std::string::ToString;
use std::time::Duration;
use actix_cors::Cors;
use actix_http::ContentEncoding;
use actix_web::dev::Server;
use actix_web::error::{ErrorBadRequest, ErrorInternalServerError, ErrorNotFound};
use actix_web::http::header::{
AcceptEncoding, Encoding as HeaderEnc, Preference, CACHE_CONTROL, CONTENT_ENCODING,
};
use actix_web::http::Uri;
use actix_web::error::ErrorInternalServerError;
use actix_web::http::header::CACHE_CONTROL;
use actix_web::middleware::TrailingSlash;
use actix_web::web::{Data, Path, Query};
use actix_web::{
middleware, route, web, App, HttpMessage, HttpRequest, HttpResponse, HttpServer, Responder,
Result as ActixResult,
};
use futures::future::try_join_all;
use itertools::Itertools as _;
use actix_web::web::Data;
use actix_web::{middleware, route, web, App, HttpResponse, HttpServer, Responder};
use log::error;
use martin_tile_utils::{Encoding, Format, TileInfo};
use serde::{Deserialize, Serialize};
use tilejson::{tilejson, TileJSON};
use crate::config::ServerState;
#[cfg(feature = "fonts")]
use crate::fonts::{FontCatalog, FontError, FontSources};
use crate::source::{Source, TileCatalog, TileSources, UrlQuery};
#[cfg(feature = "sprites")]
use crate::sprites::{SpriteCatalog, SpriteError, SpriteSources};
use crate::source::TileCatalog;
use crate::srv::config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
use crate::utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip};
use crate::srv::tiles::get_tile;
use crate::srv::tiles_info::git_source_info;
use crate::MartinError::BindingError;
use crate::{MartinResult, Tile, TileCoord};
use crate::MartinResult;
/// List of keywords that cannot be used as source IDs. Some of these are reserved for future use.
/// Reserved keywords must never end in a "dot number" (e.g. ".1").
@ -41,19 +27,13 @@ pub const RESERVED_KEYWORDS: &[&str] = &[
"reload", "sprite", "status",
];
static SUPPORTED_ENCODINGS: &[HeaderEnc] = &[
HeaderEnc::brotli(),
HeaderEnc::gzip(),
HeaderEnc::identity(),
];
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct Catalog {
pub tiles: TileCatalog,
#[cfg(feature = "sprites")]
pub sprites: SpriteCatalog,
pub sprites: crate::sprites::SpriteCatalog,
#[cfg(feature = "fonts")]
pub fonts: FontCatalog,
pub fonts: crate::fonts::FontCatalog,
}
impl Catalog {
@ -68,47 +48,11 @@ impl Catalog {
}
}
#[derive(Deserialize)]
struct TileJsonRequest {
source_ids: String,
}
#[derive(Deserialize, Clone)]
pub struct TileRequest {
source_ids: String,
z: u8,
x: u32,
y: u32,
}
pub fn map_internal_error<T: std::fmt::Display>(e: T) -> actix_web::Error {
error!("{e}");
ErrorInternalServerError(e.to_string())
}
#[cfg(feature = "sprites")]
pub fn map_sprite_error(e: SpriteError) -> actix_web::Error {
use SpriteError::SpriteNotFound;
match e {
SpriteNotFound(_) => ErrorNotFound(e.to_string()),
_ => map_internal_error(e),
}
}
#[cfg(feature = "fonts")]
pub fn map_font_error(e: FontError) -> actix_web::Error {
#[allow(clippy::enum_glob_use)]
use FontError::*;
match e {
FontNotFound(_) => ErrorNotFound(e.to_string()),
InvalidFontRangeStartEnd(_, _)
| InvalidFontRangeStart(_)
| InvalidFontRangeEnd(_)
| InvalidFontRange(_, _) => ErrorBadRequest(e.to_string()),
_ => map_internal_error(e),
}
}
/// Root path will eventually have a web front. For now, just a stub.
#[route("/", method = "GET", method = "HEAD")]
#[allow(clippy::unused_async)]
@ -139,369 +83,6 @@ async fn get_catalog(catalog: Data<Catalog>) -> impl Responder {
HttpResponse::Ok().json(catalog)
}
#[cfg(feature = "sprites")]
#[route("/sprite/{source_ids}.png", method = "GET", method = "HEAD")]
async fn get_sprite_png(
path: Path<TileJsonRequest>,
sprites: Data<SpriteSources>,
) -> ActixResult<HttpResponse> {
let sheet = sprites
.get_sprites(&path.source_ids)
.await
.map_err(map_sprite_error)?;
Ok(HttpResponse::Ok()
.content_type(actix_web::http::header::ContentType::png())
.body(sheet.encode_png().map_err(map_internal_error)?))
}
#[cfg(feature = "sprites")]
#[route(
"/sprite/{source_ids}.json",
method = "GET",
method = "HEAD",
wrap = "middleware::Compress::default()"
)]
async fn get_sprite_json(
path: Path<TileJsonRequest>,
sprites: Data<SpriteSources>,
) -> ActixResult<HttpResponse> {
let sheet = sprites
.get_sprites(&path.source_ids)
.await
.map_err(map_sprite_error)?;
Ok(HttpResponse::Ok().json(sheet.get_index()))
}
#[cfg(feature = "fonts")]
#[derive(Deserialize, Debug)]
struct FontRequest {
fontstack: String,
start: u32,
end: u32,
}
#[cfg(feature = "fonts")]
#[route(
"/font/{fontstack}/{start}-{end}",
method = "GET",
wrap = "middleware::Compress::default()"
)]
#[allow(clippy::unused_async)]
async fn get_font(path: Path<FontRequest>, fonts: Data<FontSources>) -> ActixResult<HttpResponse> {
let data = fonts
.get_font_range(&path.fontstack, path.start, path.end)
.map_err(map_font_error)?;
Ok(HttpResponse::Ok()
.content_type("application/x-protobuf")
.body(data))
}
#[route(
"/{source_ids}",
method = "GET",
method = "HEAD",
wrap = "middleware::Compress::default()"
)]
#[allow(clippy::unused_async)]
async fn git_source_info(
req: HttpRequest,
path: Path<TileJsonRequest>,
sources: Data<TileSources>,
) -> ActixResult<HttpResponse> {
let sources = sources.get_sources(&path.source_ids, None)?.0;
// Get `X-REWRITE-URL` header value, and extract its `path` component.
// If the header is not present or cannot be parsed as a URL, return the request path.
let tiles_path = req
.headers()
.get("x-rewrite-url")
.and_then(|v| v.to_str().ok())
.and_then(|v| v.parse::<Uri>().ok())
.map_or_else(|| req.path().to_owned(), |v| v.path().to_owned());
let query_string = req.query_string();
let path_and_query = if query_string.is_empty() {
format!("{tiles_path}/{{z}}/{{x}}/{{y}}")
} else {
format!("{tiles_path}/{{z}}/{{x}}/{{y}}?{query_string}")
};
// Construct a tiles URL from the request info, including the query string if present.
let info = req.connection_info();
let tiles_url = Uri::builder()
.scheme(info.scheme())
.authority(info.host())
.path_and_query(path_and_query)
.build()
.map(|tiles_url| tiles_url.to_string())
.map_err(|e| ErrorBadRequest(format!("Can't build tiles URL: {e}")))?;
Ok(HttpResponse::Ok().json(merge_tilejson(&sources, tiles_url)))
}
#[must_use]
pub fn merge_tilejson(sources: &[&dyn Source], tiles_url: String) -> TileJSON {
if sources.len() == 1 {
let mut tj = sources[0].get_tilejson().clone();
tj.tiles = vec![tiles_url];
return tj;
}
let mut attributions = vec![];
let mut descriptions = vec![];
let mut names = vec![];
let mut result = tilejson! {
tiles: vec![tiles_url],
};
for src in sources {
let tj = src.get_tilejson();
if let Some(vector_layers) = &tj.vector_layers {
if let Some(ref mut a) = result.vector_layers {
a.extend(vector_layers.iter().cloned());
} else {
result.vector_layers = Some(vector_layers.clone());
}
}
if let Some(v) = &tj.attribution {
if !attributions.contains(&v) {
attributions.push(v);
}
}
if let Some(bounds) = tj.bounds {
if let Some(a) = result.bounds {
result.bounds = Some(a + bounds);
} else {
result.bounds = tj.bounds;
}
}
if result.center.is_none() {
// Use first found center. Averaging multiple centers might create a center in the middle of nowhere.
result.center = tj.center;
}
if let Some(v) = &tj.description {
if !descriptions.contains(&v) {
descriptions.push(v);
}
}
if let Some(maxzoom) = tj.maxzoom {
if let Some(a) = result.maxzoom {
if a < maxzoom {
result.maxzoom = tj.maxzoom;
}
} else {
result.maxzoom = tj.maxzoom;
}
}
if let Some(minzoom) = tj.minzoom {
if let Some(a) = result.minzoom {
if a > minzoom {
result.minzoom = tj.minzoom;
}
} else {
result.minzoom = tj.minzoom;
}
}
if let Some(name) = &tj.name {
if !names.contains(&name) {
names.push(name);
}
}
}
if !attributions.is_empty() {
result.attribution = Some(attributions.into_iter().join("\n"));
}
if !descriptions.is_empty() {
result.description = Some(descriptions.into_iter().join("\n"));
}
if !names.is_empty() {
result.name = Some(names.into_iter().join(","));
}
result
}
#[route("/{source_ids}/{z}/{x}/{y}", method = "GET", method = "HEAD")]
async fn get_tile(
req: HttpRequest,
path: Path<TileRequest>,
sources: Data<TileSources>,
) -> ActixResult<HttpResponse> {
let xyz = TileCoord {
z: path.z,
x: path.x,
y: path.y,
};
let source_ids = &path.source_ids;
let query = req.query_string();
let encodings = req.get_header::<AcceptEncoding>();
get_tile_response(sources.as_ref(), xyz, source_ids, query, encodings).await
}
pub async fn get_tile_response(
sources: &TileSources,
xyz: TileCoord,
source_ids: &str,
query: &str,
encodings: Option<AcceptEncoding>,
) -> ActixResult<HttpResponse> {
let (sources, use_url_query, info) = sources.get_sources(source_ids, Some(xyz.z))?;
let query = use_url_query.then_some(query);
let tile = get_tile_content(sources.as_slice(), info, &xyz, query, encodings.as_ref()).await?;
Ok(if tile.data.is_empty() {
HttpResponse::NoContent().finish()
} else {
let mut response = HttpResponse::Ok();
response.content_type(tile.info.format.content_type());
if let Some(val) = tile.info.encoding.content_encoding() {
response.insert_header((CONTENT_ENCODING, val));
}
response.body(tile.data)
})
}
pub async fn get_tile_content(
sources: &[&dyn Source],
info: TileInfo,
xyz: &TileCoord,
query: Option<&str>,
encodings: Option<&AcceptEncoding>,
) -> ActixResult<Tile> {
if sources.is_empty() {
return Err(ErrorNotFound("No valid sources found"));
}
let query = match query {
Some(v) if !v.is_empty() => Some(Query::<UrlQuery>::from_query(v)?.into_inner()),
_ => None,
};
let mut tiles = try_join_all(sources.iter().map(|s| s.get_tile(xyz, &query)))
.await
.map_err(map_internal_error)?;
let mut layer_count = 0;
let mut last_non_empty_layer = 0;
for (idx, tile) in tiles.iter().enumerate() {
if !tile.is_empty() {
layer_count += 1;
last_non_empty_layer = idx;
}
}
// Minor optimization to prevent concatenation if there are less than 2 tiles
let data = match layer_count {
1 => tiles.swap_remove(last_non_empty_layer),
0 => return Ok(Tile::new(Vec::new(), info)),
_ => {
// Make sure tiles can be concatenated, or if not, that there is only one non-empty tile for each zoom level
// TODO: can zlib, brotli, or zstd be concatenated?
// TODO: implement decompression step for other concatenate-able formats
let can_join = info.format == Format::Mvt
&& (info.encoding == Encoding::Uncompressed || info.encoding == Encoding::Gzip);
if !can_join {
return Err(ErrorBadRequest(format!(
"Can't merge {info} tiles. Make sure there is only one non-empty tile source at zoom level {}",
xyz.z
)))?;
}
tiles.concat()
}
};
// decide if (re-)encoding of the tile data is needed, and recompress if so
let tile = recompress(Tile::new(data, info), encodings)?;
Ok(tile)
}
fn recompress(mut tile: Tile, accept_enc: Option<&AcceptEncoding>) -> ActixResult<Tile> {
if let Some(accept_enc) = accept_enc {
if tile.info.encoding.is_encoded() {
// already compressed, see if we can send it as is, or need to re-compress
if !accept_enc.iter().any(|e| {
if let Preference::Specific(HeaderEnc::Known(enc)) = e.item {
to_encoding(enc) == Some(tile.info.encoding)
} else {
false
}
}) {
// need to re-compress the tile - uncompress it first
tile = decode(tile)?;
}
}
if tile.info.encoding == Encoding::Uncompressed {
// only apply compression if the content supports it
if let Some(HeaderEnc::Known(enc)) = accept_enc.negotiate(SUPPORTED_ENCODINGS.iter()) {
// (re-)compress the tile into the preferred encoding
tile = encode(tile, enc)?;
}
}
Ok(tile)
} else {
// no accepted-encoding header, decode the tile if compressed
decode(tile)
}
}
fn encode(tile: Tile, enc: ContentEncoding) -> ActixResult<Tile> {
Ok(match enc {
ContentEncoding::Brotli => Tile::new(
encode_brotli(&tile.data)?,
tile.info.encoding(Encoding::Brotli),
),
ContentEncoding::Gzip => {
Tile::new(encode_gzip(&tile.data)?, tile.info.encoding(Encoding::Gzip))
}
_ => tile,
})
}
fn decode(tile: Tile) -> ActixResult<Tile> {
let info = tile.info;
Ok(if info.encoding.is_encoded() {
match info.encoding {
Encoding::Gzip => Tile::new(
decode_gzip(&tile.data)?,
info.encoding(Encoding::Uncompressed),
),
Encoding::Brotli => Tile::new(
decode_brotli(&tile.data)?,
info.encoding(Encoding::Uncompressed),
),
_ => Err(ErrorBadRequest(format!(
"Tile is is stored as {info}, but the client does not accept this encoding"
)))?,
}
} else {
tile
})
}
fn to_encoding(val: ContentEncoding) -> Option<Encoding> {
Some(match val {
ContentEncoding::Identity => Encoding::Uncompressed,
ContentEncoding::Gzip => Encoding::Gzip,
ContentEncoding::Brotli => Encoding::Brotli,
// TODO: Deflate => Encoding::Zstd or Encoding::Zlib ?
_ => None?,
})
}
pub fn router(cfg: &mut web::ServiceConfig) {
cfg.service(get_health)
.service(get_index)
@ -510,10 +91,11 @@ pub fn router(cfg: &mut web::ServiceConfig) {
.service(get_tile);
#[cfg(feature = "sprites")]
cfg.service(get_sprite_json).service(get_sprite_png);
cfg.service(crate::srv::sprites::get_sprite_json)
.service(crate::srv::sprites::get_sprite_png);
#[cfg(feature = "fonts")]
cfg.service(get_font);
cfg.service(crate::srv::fonts::get_font);
}
/// Create a new initialized Actix `App` instance together with the listening address.
@ -555,20 +137,20 @@ pub fn new_server(config: SrvConfig, state: ServerState) -> MartinResult<(Server
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
pub mod tests {
use async_trait::async_trait;
use tilejson::{tilejson, Bounds, VectorLayer};
use martin_tile_utils::{Encoding, Format, TileInfo};
use tilejson::TileJSON;
use super::*;
use crate::source::{Source, TileData};
use crate::{TileCoord, UrlQuery};
#[derive(Debug, Clone)]
struct TestSource {
id: &'static str,
tj: TileJSON,
data: TileData,
pub struct TestSource {
pub id: &'static str,
pub tj: TileJSON,
pub data: TileData,
}
#[async_trait]
@ -591,117 +173,10 @@ mod tests {
async fn get_tile(
&self,
_xyz: &TileCoord,
_url_query: &Option<UrlQuery>,
_xyz: TileCoord,
_url_query: Option<&UrlQuery>,
) -> MartinResult<TileData> {
Ok(self.data.clone())
}
}
#[test]
fn test_merge_tilejson() {
let url = "http://localhost:8888/foo/{z}/{x}/{y}".to_string();
let src1 = TestSource {
id: "id",
tj: tilejson! {
tiles: vec![],
name: "layer1".to_string(),
minzoom: 5,
maxzoom: 10,
bounds: Bounds::new(-10.0, -20.0, 10.0, 20.0),
vector_layers: vec![
VectorLayer::new("layer1".to_string(),
BTreeMap::from([
("a".to_string(), "x1".to_string()),
]))
],
},
data: Vec::default(),
};
let tj = merge_tilejson(&[&src1], url.clone());
assert_eq!(
TileJSON {
tiles: vec![url.clone()],
..src1.tj.clone()
},
tj
);
let src2 = TestSource {
id: "id",
tj: tilejson! {
tiles: vec![],
name: "layer2".to_string(),
minzoom: 7,
maxzoom: 12,
bounds: Bounds::new(-20.0, -5.0, 5.0, 50.0),
vector_layers: vec![
VectorLayer::new("layer2".to_string(),
BTreeMap::from([
("b".to_string(), "x2".to_string()),
]))
],
},
data: Vec::default(),
};
let tj = merge_tilejson(&[&src1, &src2], url.clone());
assert_eq!(tj.tiles, vec![url]);
assert_eq!(tj.name, Some("layer1,layer2".to_string()));
assert_eq!(tj.minzoom, Some(5));
assert_eq!(tj.maxzoom, Some(12));
assert_eq!(tj.bounds, Some(Bounds::new(-20.0, -20.0, 10.0, 50.0)));
assert_eq!(
tj.vector_layers,
Some(vec![
VectorLayer::new(
"layer1".to_string(),
BTreeMap::from([("a".to_string(), "x1".to_string())])
),
VectorLayer::new(
"layer2".to_string(),
BTreeMap::from([("b".to_string(), "x2".to_string())])
),
])
);
}
#[actix_rt::test]
async fn test_tile_content() {
let non_empty_source = TestSource {
id: "non-empty",
tj: tilejson! { tiles: vec![] },
data: vec![1_u8, 2, 3],
};
let empty_source = TestSource {
id: "empty",
tj: tilejson! { tiles: vec![] },
data: Vec::default(),
};
let sources = TileSources::new(vec![vec![
Box::new(non_empty_source),
Box::new(empty_source),
]]);
for (source_id, expected) in &[
("non-empty", vec![1_u8, 2, 3]),
("empty", Vec::<u8>::new()),
("empty,empty", Vec::<u8>::new()),
("non-empty,non-empty", vec![1_u8, 2, 3, 1_u8, 2, 3]),
("non-empty,empty", vec![1_u8, 2, 3]),
("non-empty,empty,non-empty", vec![1_u8, 2, 3, 1_u8, 2, 3]),
("empty,non-empty", vec![1_u8, 2, 3]),
("empty,non-empty,empty", vec![1_u8, 2, 3]),
] {
let (src, _, info) = sources.get_sources(source_id, None).unwrap();
let xyz = TileCoord { z: 0, x: 0, y: 0 };
assert_eq!(
expected,
&get_tile_content(src.as_slice(), info, &xyz, None, None)
.await
.unwrap()
.data
);
}
}
}

49
martin/src/srv/sprites.rs Normal file
View File

@ -0,0 +1,49 @@
use std::string::ToString;
use actix_web::error::ErrorNotFound;
use actix_web::http::header::ContentType;
use actix_web::web::{Data, Path};
use actix_web::{middleware, route, HttpResponse, Result as ActixResult};
use crate::sprites::{SpriteError, SpriteSources};
use crate::srv::server::map_internal_error;
use crate::srv::SourceIDsRequest;
pub fn map_sprite_error(e: SpriteError) -> actix_web::Error {
use SpriteError::SpriteNotFound;
match e {
SpriteNotFound(_) => ErrorNotFound(e.to_string()),
_ => map_internal_error(e),
}
}
#[route("/sprite/{source_ids}.png", method = "GET", method = "HEAD")]
async fn get_sprite_png(
path: Path<SourceIDsRequest>,
sprites: Data<SpriteSources>,
) -> ActixResult<HttpResponse> {
let sheet = sprites
.get_sprites(&path.source_ids)
.await
.map_err(map_sprite_error)?;
Ok(HttpResponse::Ok()
.content_type(ContentType::png())
.body(sheet.encode_png().map_err(map_internal_error)?))
}
#[route(
"/sprite/{source_ids}.json",
method = "GET",
method = "HEAD",
wrap = "middleware::Compress::default()"
)]
async fn get_sprite_json(
path: Path<SourceIDsRequest>,
sprites: Data<SpriteSources>,
) -> ActixResult<HttpResponse> {
let sheet = sprites
.get_sprites(&path.source_ids)
.await
.map_err(map_sprite_error)?;
Ok(HttpResponse::Ok().json(sheet.get_index()))
}

247
martin/src/srv/tiles.rs Normal file
View File

@ -0,0 +1,247 @@
use actix_http::ContentEncoding;
use actix_web::error::{ErrorBadRequest, ErrorNotFound};
use actix_web::http::header::{
AcceptEncoding, Encoding as HeaderEnc, Preference, CONTENT_ENCODING,
};
use actix_web::web::{Data, Path, Query};
use actix_web::{route, HttpMessage, HttpRequest, HttpResponse, Result as ActixResult};
use futures::future::try_join_all;
use martin_tile_utils::{Encoding, Format, TileInfo};
use serde::Deserialize;
use crate::source::{Source, TileSources, UrlQuery};
use crate::srv::server::map_internal_error;
use crate::utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip};
use crate::{Tile, TileCoord};
static SUPPORTED_ENCODINGS: &[HeaderEnc] = &[
HeaderEnc::brotli(),
HeaderEnc::gzip(),
HeaderEnc::identity(),
];
#[derive(Deserialize, Clone)]
pub struct TileRequest {
source_ids: String,
z: u8,
x: u32,
y: u32,
}
#[route("/{source_ids}/{z}/{x}/{y}", method = "GET", method = "HEAD")]
async fn get_tile(
req: HttpRequest,
path: Path<TileRequest>,
sources: Data<TileSources>,
) -> ActixResult<HttpResponse> {
let xyz = TileCoord {
z: path.z,
x: path.x,
y: path.y,
};
let source_ids = &path.source_ids;
let query = req.query_string();
let encodings = req.get_header::<AcceptEncoding>();
get_tile_response(sources.as_ref(), xyz, source_ids, query, encodings).await
}
pub async fn get_tile_response(
sources: &TileSources,
xyz: TileCoord,
source_ids: &str,
query: &str,
encodings: Option<AcceptEncoding>,
) -> ActixResult<HttpResponse> {
let (sources, use_url_query, info) = sources.get_sources(source_ids, Some(xyz.z))?;
let query = use_url_query.then_some(query);
let tile = get_tile_content(sources.as_slice(), info, xyz, query, encodings.as_ref()).await?;
Ok(if tile.data.is_empty() {
HttpResponse::NoContent().finish()
} else {
let mut response = HttpResponse::Ok();
response.content_type(tile.info.format.content_type());
if let Some(val) = tile.info.encoding.content_encoding() {
response.insert_header((CONTENT_ENCODING, val));
}
response.body(tile.data)
})
}
pub async fn get_tile_content(
sources: &[&dyn Source],
info: TileInfo,
xyz: TileCoord,
query: Option<&str>,
encodings: Option<&AcceptEncoding>,
) -> ActixResult<Tile> {
if sources.is_empty() {
return Err(ErrorNotFound("No valid sources found"));
}
let query_str = query.filter(|v| !v.is_empty());
let query = match query_str {
Some(v) => Some(Query::<UrlQuery>::from_query(v)?.into_inner()),
None => None,
};
let mut tiles = try_join_all(sources.iter().map(|s| s.get_tile(xyz, query.as_ref())))
.await
.map_err(map_internal_error)?;
let mut layer_count = 0;
let mut last_non_empty_layer = 0;
for (idx, tile) in tiles.iter().enumerate() {
if !tile.is_empty() {
layer_count += 1;
last_non_empty_layer = idx;
}
}
// Minor optimization to prevent concatenation if there are less than 2 tiles
let data = match layer_count {
1 => tiles.swap_remove(last_non_empty_layer),
0 => return Ok(Tile::new(Vec::new(), info)),
_ => {
// Make sure tiles can be concatenated, or if not, that there is only one non-empty tile for each zoom level
// TODO: can zlib, brotli, or zstd be concatenated?
// TODO: implement decompression step for other concatenate-able formats
let can_join = info.format == Format::Mvt
&& (info.encoding == Encoding::Uncompressed || info.encoding == Encoding::Gzip);
if !can_join {
return Err(ErrorBadRequest(format!(
"Can't merge {info} tiles. Make sure there is only one non-empty tile source at zoom level {}",
xyz.z
)))?;
}
tiles.concat()
}
};
// decide if (re-)encoding of the tile data is needed, and recompress if so
let tile = recompress(Tile::new(data, info), encodings)?;
Ok(tile)
}
fn recompress(mut tile: Tile, accept_enc: Option<&AcceptEncoding>) -> ActixResult<Tile> {
if let Some(accept_enc) = accept_enc {
if tile.info.encoding.is_encoded() {
// already compressed, see if we can send it as is, or need to re-compress
if !accept_enc.iter().any(|e| {
if let Preference::Specific(HeaderEnc::Known(enc)) = e.item {
to_encoding(enc) == Some(tile.info.encoding)
} else {
false
}
}) {
// need to re-compress the tile - uncompress it first
tile = decode(tile)?;
}
}
if tile.info.encoding == Encoding::Uncompressed {
// only apply compression if the content supports it
if let Some(HeaderEnc::Known(enc)) = accept_enc.negotiate(SUPPORTED_ENCODINGS.iter()) {
// (re-)compress the tile into the preferred encoding
tile = encode(tile, enc)?;
}
}
Ok(tile)
} else {
// no accepted-encoding header, decode the tile if compressed
decode(tile)
}
}
fn encode(tile: Tile, enc: ContentEncoding) -> ActixResult<Tile> {
Ok(match enc {
ContentEncoding::Brotli => Tile::new(
encode_brotli(&tile.data)?,
tile.info.encoding(Encoding::Brotli),
),
ContentEncoding::Gzip => {
Tile::new(encode_gzip(&tile.data)?, tile.info.encoding(Encoding::Gzip))
}
_ => tile,
})
}
fn decode(tile: Tile) -> ActixResult<Tile> {
let info = tile.info;
Ok(if info.encoding.is_encoded() {
match info.encoding {
Encoding::Gzip => Tile::new(
decode_gzip(&tile.data)?,
info.encoding(Encoding::Uncompressed),
),
Encoding::Brotli => Tile::new(
decode_brotli(&tile.data)?,
info.encoding(Encoding::Uncompressed),
),
_ => Err(ErrorBadRequest(format!(
"Tile is is stored as {info}, but the client does not accept this encoding"
)))?,
}
} else {
tile
})
}
fn to_encoding(val: ContentEncoding) -> Option<Encoding> {
Some(match val {
ContentEncoding::Identity => Encoding::Uncompressed,
ContentEncoding::Gzip => Encoding::Gzip,
ContentEncoding::Brotli => Encoding::Brotli,
// TODO: Deflate => Encoding::Zstd or Encoding::Zlib ?
_ => None?,
})
}
#[cfg(test)]
mod tests {
use tilejson::tilejson;
use super::*;
use crate::srv::server::tests::TestSource;
#[actix_rt::test]
async fn test_tile_content() {
let non_empty_source = TestSource {
id: "non-empty",
tj: tilejson! { tiles: vec![] },
data: vec![1_u8, 2, 3],
};
let empty_source = TestSource {
id: "empty",
tj: tilejson! { tiles: vec![] },
data: Vec::default(),
};
let sources = TileSources::new(vec![vec![
Box::new(non_empty_source),
Box::new(empty_source),
]]);
for (source_id, expected) in &[
("non-empty", vec![1_u8, 2, 3]),
("empty", Vec::<u8>::new()),
("empty,empty", Vec::<u8>::new()),
("non-empty,non-empty", vec![1_u8, 2, 3, 1_u8, 2, 3]),
("non-empty,empty", vec![1_u8, 2, 3]),
("non-empty,empty,non-empty", vec![1_u8, 2, 3, 1_u8, 2, 3]),
("empty,non-empty", vec![1_u8, 2, 3]),
("empty,non-empty,empty", vec![1_u8, 2, 3]),
] {
let (src, _, info) = sources.get_sources(source_id, None).unwrap();
let xyz = TileCoord { z: 0, x: 0, y: 0 };
assert_eq!(
expected,
&get_tile_content(src.as_slice(), info, xyz, None, None)
.await
.unwrap()
.data
);
}
}
}

View File

@ -0,0 +1,230 @@
use std::string::ToString;
use actix_web::error::ErrorBadRequest;
use actix_web::http::Uri;
use actix_web::web::{Data, Path};
use actix_web::{middleware, route, HttpRequest, HttpResponse, Result as ActixResult};
use itertools::Itertools as _;
use serde::Deserialize;
use tilejson::{tilejson, TileJSON};
use crate::source::{Source, TileSources};
#[derive(Deserialize)]
pub struct SourceIDsRequest {
pub source_ids: String,
}
#[route(
"/{source_ids}",
method = "GET",
method = "HEAD",
wrap = "middleware::Compress::default()"
)]
#[allow(clippy::unused_async)]
async fn git_source_info(
req: HttpRequest,
path: Path<SourceIDsRequest>,
sources: Data<TileSources>,
) -> ActixResult<HttpResponse> {
let sources = sources.get_sources(&path.source_ids, None)?.0;
// Get `X-REWRITE-URL` header value, and extract its `path` component.
// If the header is not present or cannot be parsed as a URL, return the request path.
let tiles_path = req
.headers()
.get("x-rewrite-url")
.and_then(|v| v.to_str().ok())
.and_then(|v| v.parse::<Uri>().ok())
.map_or_else(|| req.path().to_owned(), |v| v.path().to_owned());
let query_string = req.query_string();
let path_and_query = if query_string.is_empty() {
format!("{tiles_path}/{{z}}/{{x}}/{{y}}")
} else {
format!("{tiles_path}/{{z}}/{{x}}/{{y}}?{query_string}")
};
// Construct a tiles URL from the request info, including the query string if present.
let info = req.connection_info();
let tiles_url = Uri::builder()
.scheme(info.scheme())
.authority(info.host())
.path_and_query(path_and_query)
.build()
.map(|tiles_url| tiles_url.to_string())
.map_err(|e| ErrorBadRequest(format!("Can't build tiles URL: {e}")))?;
Ok(HttpResponse::Ok().json(merge_tilejson(&sources, tiles_url)))
}
#[must_use]
pub fn merge_tilejson(sources: &[&dyn Source], tiles_url: String) -> TileJSON {
if sources.len() == 1 {
let mut tj = sources[0].get_tilejson().clone();
tj.tiles = vec![tiles_url];
return tj;
}
let mut attributions = vec![];
let mut descriptions = vec![];
let mut names = vec![];
let mut result = tilejson! {
tiles: vec![tiles_url],
};
for src in sources {
let tj = src.get_tilejson();
if let Some(vector_layers) = &tj.vector_layers {
if let Some(ref mut a) = result.vector_layers {
a.extend(vector_layers.iter().cloned());
} else {
result.vector_layers = Some(vector_layers.clone());
}
}
if let Some(v) = &tj.attribution {
if !attributions.contains(&v) {
attributions.push(v);
}
}
if let Some(bounds) = tj.bounds {
if let Some(a) = result.bounds {
result.bounds = Some(a + bounds);
} else {
result.bounds = tj.bounds;
}
}
if result.center.is_none() {
// Use first found center. Averaging multiple centers might create a center in the middle of nowhere.
result.center = tj.center;
}
if let Some(v) = &tj.description {
if !descriptions.contains(&v) {
descriptions.push(v);
}
}
if let Some(maxzoom) = tj.maxzoom {
if let Some(a) = result.maxzoom {
if a < maxzoom {
result.maxzoom = tj.maxzoom;
}
} else {
result.maxzoom = tj.maxzoom;
}
}
if let Some(minzoom) = tj.minzoom {
if let Some(a) = result.minzoom {
if a > minzoom {
result.minzoom = tj.minzoom;
}
} else {
result.minzoom = tj.minzoom;
}
}
if let Some(name) = &tj.name {
if !names.contains(&name) {
names.push(name);
}
}
}
if !attributions.is_empty() {
result.attribution = Some(attributions.into_iter().join("\n"));
}
if !descriptions.is_empty() {
result.description = Some(descriptions.into_iter().join("\n"));
}
if !names.is_empty() {
result.name = Some(names.into_iter().join(","));
}
result
}
#[cfg(test)]
pub mod tests {
use std::collections::BTreeMap;
use tilejson::{tilejson, Bounds, VectorLayer};
use super::*;
use crate::srv::server::tests::TestSource;
#[test]
fn test_merge_tilejson() {
let url = "http://localhost:8888/foo/{z}/{x}/{y}".to_string();
let src1 = TestSource {
id: "id",
tj: tilejson! {
tiles: vec![],
name: "layer1".to_string(),
minzoom: 5,
maxzoom: 10,
bounds: Bounds::new(-10.0, -20.0, 10.0, 20.0),
vector_layers: vec![
VectorLayer::new("layer1".to_string(),
BTreeMap::from([
("a".to_string(), "x1".to_string()),
]))
],
},
data: Vec::default(),
};
let tj = merge_tilejson(&[&src1], url.clone());
assert_eq!(
TileJSON {
tiles: vec![url.clone()],
..src1.tj.clone()
},
tj
);
let src2 = TestSource {
id: "id",
tj: tilejson! {
tiles: vec![],
name: "layer2".to_string(),
minzoom: 7,
maxzoom: 12,
bounds: Bounds::new(-20.0, -5.0, 5.0, 50.0),
vector_layers: vec![
VectorLayer::new("layer2".to_string(),
BTreeMap::from([
("b".to_string(), "x2".to_string()),
]))
],
},
data: Vec::default(),
};
let tj = merge_tilejson(&[&src1, &src2], url.clone());
assert_eq!(tj.tiles, vec![url]);
assert_eq!(tj.name, Some("layer1,layer2".to_string()));
assert_eq!(tj.minzoom, Some(5));
assert_eq!(tj.maxzoom, Some(12));
assert_eq!(tj.bounds, Some(Bounds::new(-20.0, -20.0, 10.0, 50.0)));
assert_eq!(
tj.vector_layers,
Some(vec![
VectorLayer::new(
"layer1".to_string(),
BTreeMap::from([("a".to_string(), "x1".to_string())])
),
VectorLayer::new(
"layer2".to_string(),
BTreeMap::from([("b".to_string(), "x2".to_string())])
),
])
);
}
}

View File

@ -48,7 +48,7 @@ async fn mbt_get_catalog() {
let req = test_get("/catalog").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let body: serde_json::Value = read_body_json(response).await;
assert_yaml_snapshot!(body, @r###"
---
@ -79,7 +79,7 @@ async fn mbt_get_catalog_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/catalog").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let body = decode_gzip(&read_body(response).await).unwrap();
let body: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_yaml_snapshot!(body, @r###"
@ -110,7 +110,7 @@ async fn mbt_get_tilejson() {
let app = create_app! { CONFIG };
let req = test_get("/m_mvt").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert!(headers.get(CONTENT_ENCODING).is_none());
@ -124,7 +124,7 @@ async fn mbt_get_tilejson_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_webp").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert_eq!(headers.get(CONTENT_ENCODING).unwrap(), "gzip");
@ -138,7 +138,7 @@ async fn mbt_get_raster() {
let app = create_app! { CONFIG };
let req = test_get("/m_webp/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/webp");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
@ -152,7 +152,7 @@ async fn mbt_get_raster_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_webp/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/webp");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
@ -164,7 +164,7 @@ async fn mbt_get_mvt() {
let app = create_app! { CONFIG };
let req = test_get("/m_mvt/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
@ -181,7 +181,7 @@ async fn mbt_get_mvt_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_mvt/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
@ -200,7 +200,7 @@ async fn mbt_get_mvt_brotli() {
let accept = (ACCEPT_ENCODING, "br");
let req = test_get("/m_mvt/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
@ -218,7 +218,7 @@ async fn mbt_get_raw_mvt() {
let app = create_app! { CONFIG };
let req = test_get("/m_raw_mvt/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
@ -237,7 +237,7 @@ async fn mbt_get_raw_mvt_gzip() {
.insert_header(accept)
.to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
@ -259,7 +259,7 @@ async fn mbt_get_raw_mvt_gzip_br() {
.insert_header(accept)
.to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/x-protobuf"
@ -277,7 +277,7 @@ async fn mbt_get_json() {
let app = create_app! { CONFIG };
let req = test_get("/m_json/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/json"
@ -294,7 +294,7 @@ async fn mbt_get_json_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/m_json/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(
response.headers().get(CONTENT_TYPE).unwrap(),
"application/json"

View File

@ -32,14 +32,14 @@ async fn function_source_tile() {
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let src = source(&mock, "function_zxy_query");
let tile = src
.get_tile(&TileCoord { z: 0, x: 0, y: 0 }, &None)
.get_tile(TileCoord { z: 0, x: 0, y: 0 }, None)
.await
.unwrap();
assert!(!tile.is_empty());
let src = source(&mock, "function_zxy_query_jsonb");
let tile = src
.get_tile(&TileCoord { z: 0, x: 0, y: 0 }, &None)
.get_tile(TileCoord { z: 0, x: 0, y: 0 }, None)
.await
.unwrap();
assert!(!tile.is_empty());

View File

@ -46,7 +46,7 @@ postgres:
let req = test_get("/catalog");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let body = read_body(response).await;
let body: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_yaml_snapshot!(body, @r###"
@ -285,7 +285,7 @@ postgres:
let req = test_get("/table_source/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -371,11 +371,11 @@ postgres:
let req = test_get("/table_source_multiple_geom.geom1/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/table_source_multiple_geom.geom2/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -433,12 +433,12 @@ postgres:
// zoom = 6 (points1)
let req = test_get("/points1/6/38/20");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 12 (points1)
let req = test_get("/points1/12/2476/1280");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 13 (nothing)
let req = test_get("/points1/13/4952/2560");
@ -448,22 +448,22 @@ postgres:
// zoom = 0 (points2)
let req = test_get("/points2/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 6 (points2)
let req = test_get("/points2/6/38/20");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 12 (points2)
let req = test_get("/points2/12/2476/1280");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 13 (points2)
let req = test_get("/points2/13/4952/2560");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 0 (nothing)
let req = test_get("/points3857/0/0/0");
@ -473,12 +473,12 @@ postgres:
// zoom = 12 (points3857)
let req = test_get("/points3857/12/2476/1280");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 0 (table_source)
let req = test_get("/table_source/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 12 (nothing)
let req = test_get("/table_source/12/2476/1280");
@ -604,7 +604,7 @@ postgres:
let req = test_get("/points1,points2,points3857");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -694,7 +694,7 @@ postgres:
let req = test_get("/points1,points2,points3857/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -735,27 +735,27 @@ postgres:
// zoom = 6 (points1)
let req = test_get("/points1,points2/6/38/20");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 12 (points1)
let req = test_get("/points1,points2/12/2476/1280");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 13 (points1, points2)
let req = test_get("/points1,points2/13/4952/2560");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 14 (points2)
let req = test_get("/points1,points2/14/9904/5121");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 20 (points2)
let req = test_get("/points1,points2/20/633856/327787");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 21 (nothing)
let req = test_get("/points1,points2/21/1267712/655574");
@ -796,35 +796,35 @@ postgres:
let req = test_get("/function_zoom_xy");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_zxy");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_zxy_query");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_zxy_query_jsonb");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_zxy_query_test");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_zxy_row");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_Mixed_Name");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
let req = test_get("/function_zxy_row_key");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -867,7 +867,7 @@ postgres:
let req = test_get("/function_zxy_query/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -890,22 +890,22 @@ postgres:
// zoom = 0 (function_source1)
let req = test_get("/function_source1/0/0/0");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 6 (function_source1)
let req = test_get("/function_source1/6/38/20");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 12 (function_source1)
let req = test_get("/function_source1/12/2476/1280");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 13 (function_source1)
let req = test_get("/function_source1/13/4952/2560");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 0 (nothing)
let req = test_get("/function_source2/0/0/0");
@ -915,12 +915,12 @@ postgres:
// zoom = 6 (function_source2)
let req = test_get("/function_source2/6/38/20");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 12 (function_source2)
let req = test_get("/function_source2/12/2476/1280");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
// zoom = 13 (nothing)
let req = test_get("/function_source2/13/4952/2560");
@ -941,7 +941,7 @@ postgres:
let req = test_get("/function_zxy_query_test/0/0/0?token=martin");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -953,7 +953,7 @@ postgres:
let req = test_get("/health");
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
#[actix_rt::test]
@ -1097,6 +1097,6 @@ tables:
for (name, _) in cfg.tables.unwrap_or_default() {
let req = test_get(format!("/{name}/0/0/0").as_str());
let response = call_service(&app, req).await;
assert!(response.status().is_success());
assert_response(response).await;
}
}

View File

@ -129,7 +129,7 @@ async fn tables_tilejson() {
async fn tables_tile_ok() {
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let tile = source(&mock, "table_source")
.get_tile(&TileCoord { z: 0, x: 0, y: 0 }, &None)
.get_tile(TileCoord { z: 0, x: 0, y: 0 }, None)
.await
.unwrap();

View File

@ -46,7 +46,7 @@ async fn pmt_get_catalog() {
let req = test_get("/catalog").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let body: serde_json::Value = read_body_json(response).await;
assert_yaml_snapshot!(body, @r###"
---
@ -64,7 +64,7 @@ async fn pmt_get_catalog_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/catalog").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let body = decode_gzip(&read_body(response).await).unwrap();
let body: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_yaml_snapshot!(body, @r###"
@ -82,7 +82,7 @@ async fn pmt_get_tilejson() {
let app = create_app! { CONFIG };
let req = test_get("/p_png").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert!(headers.get(CONTENT_ENCODING).is_none());
@ -96,7 +96,7 @@ async fn pmt_get_tilejson_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/p_png").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
let headers = response.headers();
assert_eq!(headers.get(CONTENT_TYPE).unwrap(), "application/json");
assert_eq!(headers.get(CONTENT_ENCODING).unwrap(), "gzip");
@ -110,7 +110,7 @@ async fn pmt_get_raster() {
let app = create_app! { CONFIG };
let req = test_get("/p_png/0/0/0").to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/png");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;
@ -124,7 +124,7 @@ async fn pmt_get_raster_gzip() {
let accept = (ACCEPT_ENCODING, "gzip");
let req = test_get("/p_png/0/0/0").insert_header(accept).to_request();
let response = call_service(&app, req).await;
assert!(response.status().is_success());
let response = assert_response(response).await;
assert_eq!(response.headers().get(CONTENT_TYPE).unwrap(), "image/png");
assert!(response.headers().get(CONTENT_ENCODING).is_none());
let body = read_body(response).await;

View File

@ -4,6 +4,8 @@
mod pg_utils;
use actix_web::dev::ServiceResponse;
use actix_web::test::read_body;
use log::warn;
use martin::Config;
pub use pg_utils::*;
@ -26,3 +28,14 @@ pub fn mock_cfg(yaml: &str) -> Config {
assert!(res.is_empty(), "unrecognized config: {res:?}");
cfg
}
pub async fn assert_response(response: ServiceResponse) -> ServiceResponse {
if !response.status().is_success() {
let status = response.status();
let headers = response.headers().clone();
let bytes = read_body(response).await;
let body = String::from_utf8_lossy(&bytes);
panic!("response status: {status}\nresponse headers: {headers:?}\nresponse body: {body}");
}
response
}

View File

@ -192,13 +192,19 @@ impl MbtileCopierInt {
let rusqlite_conn = unsafe { Connection::from_handle(handle) }?;
if self.options.copy.copy_tiles() {
self.copy_tiles(&rusqlite_conn, &dif, src_type, dst_type, on_duplicate)?;
self.copy_tiles(
&rusqlite_conn,
dif.as_ref(),
src_type,
dst_type,
on_duplicate,
)?;
} else {
debug!("Skipping copying tiles");
}
if self.options.copy.copy_metadata() {
self.copy_metadata(&rusqlite_conn, &dif, on_duplicate)?;
self.copy_metadata(&rusqlite_conn, dif.as_ref(), on_duplicate)?;
} else {
debug!("Skipping copying metadata");
}
@ -218,7 +224,7 @@ impl MbtileCopierInt {
fn copy_metadata(
&self,
rusqlite_conn: &Connection,
dif: &Option<(Mbtiles, MbtType, MbtType)>,
dif: Option<&(Mbtiles, MbtType, MbtType)>,
on_duplicate: CopyDuplicateMode,
) -> Result<(), MbtError> {
let on_dupl = on_duplicate.to_sql();
@ -277,7 +283,7 @@ impl MbtileCopierInt {
fn copy_tiles(
&self,
rusqlite_conn: &Connection,
dif: &Option<(Mbtiles, MbtType, MbtType)>,
dif: Option<&(Mbtiles, MbtType, MbtType)>,
src_type: MbtType,
dst_type: MbtType,
on_duplicate: CopyDuplicateMode,