mirror of
https://github.com/maplibre/martin.git
synced 2024-12-18 20:31:54 +03:00
Rename MartinError, benchmarks, streamline get_tile (#1016)
Use MartinError, MartinResult, FileError and FileResult, and other similar enums.
This commit is contained in:
parent
566e8fa948
commit
0f2cd100cf
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -1852,7 +1852,7 @@ checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
|
||||
|
||||
[[package]]
|
||||
name = "martin"
|
||||
version = "0.11.0"
|
||||
version = "0.11.1"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
|
@ -3,7 +3,7 @@ lints.workspace = true
|
||||
[package]
|
||||
name = "martin"
|
||||
# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin
|
||||
version = "0.11.0"
|
||||
version = "0.11.1"
|
||||
authors = ["Stepan Kuzmin <to.stepan.kuzmin@gmail.com>", "Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
|
||||
description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support"
|
||||
keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"]
|
||||
@ -54,6 +54,10 @@ path = "src/bin/martin.rs"
|
||||
name = "martin-cp"
|
||||
path = "src/bin/martin-cp.rs"
|
||||
|
||||
[[bench]]
|
||||
name = "bench"
|
||||
harness = false
|
||||
|
||||
[features]
|
||||
default = []
|
||||
bless-tests = []
|
||||
|
73
martin/benches/bench.rs
Normal file
73
martin/benches/bench.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use async_trait::async_trait;
|
||||
use criterion::async_executor::FuturesExecutor;
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use martin::srv::get_tile_response;
|
||||
use martin::{
|
||||
CatalogSourceEntry, MartinResult, Source, TileCoord, TileData, TileSources, UrlQuery,
|
||||
};
|
||||
use martin_tile_utils::{Encoding, Format, TileInfo};
|
||||
use tilejson::{tilejson, TileJSON};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct NullSource {
|
||||
tilejson: TileJSON,
|
||||
}
|
||||
|
||||
impl NullSource {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
tilejson: tilejson! { "https://example.com/".to_string() },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Source for NullSource {
|
||||
fn get_id(&self) -> &str {
|
||||
"null"
|
||||
}
|
||||
|
||||
fn get_tilejson(&self) -> &TileJSON {
|
||||
&self.tilejson
|
||||
}
|
||||
|
||||
fn get_tile_info(&self) -> TileInfo {
|
||||
TileInfo::new(Format::Png, Encoding::Internal)
|
||||
}
|
||||
|
||||
fn clone_source(&self) -> Box<dyn Source> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn support_url_query(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
async fn get_tile(
|
||||
&self,
|
||||
_xyz: &TileCoord,
|
||||
_query: &Option<UrlQuery>,
|
||||
) -> MartinResult<TileData> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
fn get_catalog_entry(&self) -> CatalogSourceEntry {
|
||||
CatalogSourceEntry::default()
|
||||
}
|
||||
}
|
||||
|
||||
async fn process_tile(sources: &TileSources) {
|
||||
get_tile_response(sources, TileCoord { z: 0, x: 0, y: 0 }, "null", "", None)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn bench_null_source(c: &mut Criterion) {
|
||||
let sources = TileSources::new(vec![vec![Box::new(NullSource::new())]]);
|
||||
c.bench_function("get_table_source_tile", |b| {
|
||||
b.to_async(FuturesExecutor).iter(|| process_tile(&sources));
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_null_source);
|
||||
criterion_main!(benches);
|
@ -1,4 +1,4 @@
|
||||
use crate::Error;
|
||||
use crate::{MartinError, MartinResult};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum State<T: Clone> {
|
||||
@ -58,7 +58,7 @@ impl Arguments {
|
||||
}
|
||||
|
||||
/// Check that all params have been claimed
|
||||
pub fn check(self) -> Result<(), Error> {
|
||||
pub fn check(self) -> MartinResult<()> {
|
||||
let mut unrecognized = Vec::new();
|
||||
for (i, value) in self.values.into_iter().enumerate() {
|
||||
if let State::Ignore = self.state[i] {
|
||||
@ -68,7 +68,7 @@ impl Arguments {
|
||||
if unrecognized.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::UnrecognizableConnections(unrecognized))
|
||||
Err(MartinError::UnrecognizableConnections(unrecognized))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::test_utils::{os, some, FauxEnv};
|
||||
use crate::Error;
|
||||
use crate::MartinError;
|
||||
|
||||
#[test]
|
||||
fn test_extract_conn_strings() {
|
||||
@ -217,7 +217,7 @@ mod tests {
|
||||
vec!["postgresql://localhost:5432", "postgres://localhost:5432"]
|
||||
);
|
||||
assert!(matches!(args.check(), Err(
|
||||
Error::UnrecognizableConnections(v)) if v == vec!["mysql://localhost:3306"]));
|
||||
MartinError::UnrecognizableConnections(v)) if v == vec!["mysql://localhost:3306"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -10,7 +10,8 @@ use crate::args::srv::SrvArgs;
|
||||
use crate::args::State::{Ignore, Share, Take};
|
||||
use crate::config::Config;
|
||||
use crate::file_config::FileConfigEnum;
|
||||
use crate::{Error, OptOneMany, Result};
|
||||
use crate::MartinError::ConfigAndConnectionsError;
|
||||
use crate::{MartinResult, OptOneMany};
|
||||
|
||||
#[derive(Parser, Debug, PartialEq, Default)]
|
||||
#[command(about, version)]
|
||||
@ -57,7 +58,11 @@ pub struct ExtraArgs {
|
||||
}
|
||||
|
||||
impl Args {
|
||||
pub fn merge_into_config<'a>(self, config: &mut Config, env: &impl Env<'a>) -> Result<()> {
|
||||
pub fn merge_into_config<'a>(
|
||||
self,
|
||||
config: &mut Config,
|
||||
env: &impl Env<'a>,
|
||||
) -> MartinResult<()> {
|
||||
if self.meta.watch {
|
||||
warn!("The --watch flag is no longer supported, and will be ignored");
|
||||
}
|
||||
@ -65,7 +70,7 @@ impl Args {
|
||||
warn!("The WATCH_MODE env variable is no longer supported, and will be ignored");
|
||||
}
|
||||
if self.meta.config.is_some() && !self.meta.connection.is_empty() {
|
||||
return Err(Error::ConfigAndConnectionsError(self.meta.connection));
|
||||
return Err(ConfigAndConnectionsError(self.meta.connection));
|
||||
}
|
||||
|
||||
self.srv.merge_into_config(&mut config.srv);
|
||||
@ -122,8 +127,9 @@ mod tests {
|
||||
use crate::pg::PgConfig;
|
||||
use crate::test_utils::{some, FauxEnv};
|
||||
use crate::utils::OptOneMany;
|
||||
use crate::MartinError::UnrecognizableConnections;
|
||||
|
||||
fn parse(args: &[&str]) -> Result<(Config, MetaArgs)> {
|
||||
fn parse(args: &[&str]) -> MartinResult<(Config, MetaArgs)> {
|
||||
let args = Args::parse_from(args);
|
||||
let meta = args.meta.clone();
|
||||
let mut config = Config::default();
|
||||
@ -188,7 +194,7 @@ mod tests {
|
||||
let env = FauxEnv::default();
|
||||
let mut config = Config::default();
|
||||
let err = args.merge_into_config(&mut config, &env).unwrap_err();
|
||||
assert!(matches!(err, crate::Error::ConfigAndConnectionsError(..)));
|
||||
assert!(matches!(err, ConfigAndConnectionsError(..)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -199,6 +205,6 @@ mod tests {
|
||||
let mut config = Config::default();
|
||||
let err = args.merge_into_config(&mut config, &env).unwrap_err();
|
||||
let bad = vec!["foobar".to_string()];
|
||||
assert!(matches!(err, crate::Error::UnrecognizableConnections(v) if v == bad));
|
||||
assert!(matches!(err, UnrecognizableConnections(v) if v == bad));
|
||||
}
|
||||
}
|
||||
|
@ -9,9 +9,10 @@ use futures::stream::{self, StreamExt};
|
||||
use futures::TryStreamExt;
|
||||
use log::{debug, error, info, log_enabled};
|
||||
use martin::args::{Args, ExtraArgs, MetaArgs, OsEnv, PgArgs, SrvArgs};
|
||||
use martin::srv::{get_composite_tile, merge_tilejson, RESERVED_KEYWORDS};
|
||||
use martin::srv::{get_tile_content, merge_tilejson, RESERVED_KEYWORDS};
|
||||
use martin::{
|
||||
append_rect, read_config, Config, Error, IdResolver, Result, ServerState, TileRect, Xyz,
|
||||
append_rect, read_config, Config, IdResolver, MartinError, MartinResult, ServerState,
|
||||
TileCoord, TileData, TileRect,
|
||||
};
|
||||
use mbtiles::{
|
||||
init_mbtiles_schema, is_empty_database, CopyDuplicateMode, MbtType, MbtTypeCli, Mbtiles,
|
||||
@ -81,7 +82,7 @@ pub struct CopyArgs {
|
||||
pub zoom_levels: Vec<u8>,
|
||||
}
|
||||
|
||||
async fn start(copy_args: CopierArgs) -> Result<()> {
|
||||
async fn start(copy_args: CopierArgs) -> MartinResult<()> {
|
||||
info!("Starting Martin v{VERSION}");
|
||||
|
||||
let env = OsEnv::default();
|
||||
@ -153,12 +154,12 @@ fn compute_tile_ranges(args: &CopyArgs) -> Vec<TileRect> {
|
||||
ranges
|
||||
}
|
||||
|
||||
struct Tile {
|
||||
xyz: Xyz,
|
||||
data: Vec<u8>,
|
||||
struct TileXyz {
|
||||
xyz: TileCoord,
|
||||
data: TileData,
|
||||
}
|
||||
|
||||
impl Debug for Tile {
|
||||
impl Debug for TileXyz {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{} - {} bytes", self.xyz, self.data.len())
|
||||
}
|
||||
@ -216,24 +217,24 @@ impl Display for Progress {
|
||||
}
|
||||
|
||||
/// Given a list of tile ranges, iterate over all tiles in the ranges
|
||||
fn iterate_tiles(tiles: Vec<TileRect>) -> impl Iterator<Item = Xyz> {
|
||||
fn iterate_tiles(tiles: Vec<TileRect>) -> impl Iterator<Item = TileCoord> {
|
||||
tiles.into_iter().flat_map(|t| {
|
||||
let z = t.zoom;
|
||||
(t.min_x..=t.max_x).flat_map(move |x| (t.min_y..=t.max_y).map(move |y| Xyz { z, x, y }))
|
||||
(t.min_x..=t.max_x)
|
||||
.flat_map(move |x| (t.min_y..=t.max_y).map(move |y| TileCoord { z, x, y }))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn run_tile_copy(args: CopyArgs, state: ServerState) -> Result<()> {
|
||||
async fn run_tile_copy(args: CopyArgs, state: ServerState) -> MartinResult<()> {
|
||||
let output_file = &args.output_file;
|
||||
let concurrency = args.concurrency.unwrap_or(1);
|
||||
let (sources, _use_url_query, info) = state.tiles.get_sources(args.source.as_str(), None)?;
|
||||
let sources = sources.as_slice();
|
||||
let tile_info = sources.first().unwrap().get_tile_info();
|
||||
let (tx, mut rx) = channel::<Tile>(500);
|
||||
let (tx, mut rx) = channel::<TileXyz>(500);
|
||||
let tiles = compute_tile_ranges(&args);
|
||||
let mbt = Mbtiles::new(output_file)?;
|
||||
let mut conn = mbt.open_or_new().await?;
|
||||
let on_dupl = args.on_duplicate;
|
||||
|
||||
let dst_type = if is_empty_database(&mut conn).await? {
|
||||
let dst_type = match args.dst_type.unwrap_or(MbtTypeCli::Normalized) {
|
||||
@ -269,14 +270,15 @@ pub async fn run_tile_copy(args: CopyArgs, state: ServerState) -> Result<()> {
|
||||
try_join!(
|
||||
async move {
|
||||
stream::iter(iterate_tiles(tiles))
|
||||
.map(Ok::<Xyz, Error>)
|
||||
.map(MartinResult::Ok)
|
||||
.try_for_each_concurrent(concurrency, |xyz| {
|
||||
let tx = tx.clone();
|
||||
async move {
|
||||
let data = get_composite_tile(sources, info, &xyz, None).await?;
|
||||
tx.send(Tile { xyz, data })
|
||||
let tile = get_tile_content(sources, info, &xyz, None, None).await?;
|
||||
let data = tile.data;
|
||||
tx.send(TileXyz { xyz, data })
|
||||
.await
|
||||
.map_err(|e| Error::InternalError(e.to_string()))?;
|
||||
.map_err(|e| MartinError::InternalError(e.into()))?;
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
@ -293,7 +295,7 @@ pub async fn run_tile_copy(args: CopyArgs, state: ServerState) -> Result<()> {
|
||||
} else {
|
||||
batch.push((tile.xyz.z, tile.xyz.x, tile.xyz.y, tile.data));
|
||||
if batch.len() >= BATCH_SIZE || last_saved.elapsed() > SAVE_EVERY {
|
||||
mbt.insert_tiles(&mut conn, dst_type, on_dupl, &batch)
|
||||
mbt.insert_tiles(&mut conn, dst_type, args.on_duplicate, &batch)
|
||||
.await?;
|
||||
batch.clear();
|
||||
last_saved = Instant::now();
|
||||
@ -308,7 +310,7 @@ pub async fn run_tile_copy(args: CopyArgs, state: ServerState) -> Result<()> {
|
||||
}
|
||||
}
|
||||
if !batch.is_empty() {
|
||||
mbt.insert_tiles(&mut conn, dst_type, on_dupl, &batch)
|
||||
mbt.insert_tiles(&mut conn, dst_type, args.on_duplicate, &batch)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
|
@ -5,11 +5,11 @@ use clap::Parser;
|
||||
use log::{error, info, log_enabled};
|
||||
use martin::args::{Args, OsEnv};
|
||||
use martin::srv::{new_server, RESERVED_KEYWORDS};
|
||||
use martin::{read_config, Config, IdResolver, Result};
|
||||
use martin::{read_config, Config, IdResolver, MartinResult};
|
||||
|
||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
async fn start(args: Args) -> Result<Server> {
|
||||
async fn start(args: Args) -> MartinResult<Server> {
|
||||
info!("Starting Martin v{VERSION}");
|
||||
|
||||
let env = OsEnv::default();
|
||||
|
@ -19,8 +19,8 @@ use crate::pmtiles::PmtSource;
|
||||
use crate::source::{TileInfoSources, TileSources};
|
||||
use crate::sprites::SpriteSources;
|
||||
use crate::srv::SrvConfig;
|
||||
use crate::Error::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
|
||||
use crate::{IdResolver, OptOneMany, Result};
|
||||
use crate::MartinError::{ConfigLoadError, ConfigParseError, ConfigWriteError, NoSources};
|
||||
use crate::{IdResolver, MartinResult, OptOneMany};
|
||||
|
||||
pub type UnrecognizedValues = HashMap<String, serde_yaml::Value>;
|
||||
|
||||
@ -56,7 +56,7 @@ pub struct Config {
|
||||
|
||||
impl Config {
|
||||
/// Apply defaults to the config, and validate if there is a connection string
|
||||
pub fn finalize(&mut self) -> Result<UnrecognizedValues> {
|
||||
pub fn finalize(&mut self) -> MartinResult<UnrecognizedValues> {
|
||||
let mut res = UnrecognizedValues::new();
|
||||
copy_unrecognized_config(&mut res, "", &self.unrecognized);
|
||||
|
||||
@ -83,7 +83,7 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn resolve(&mut self, idr: IdResolver) -> Result<ServerState> {
|
||||
pub async fn resolve(&mut self, idr: IdResolver) -> MartinResult<ServerState> {
|
||||
Ok(ServerState {
|
||||
tiles: self.resolve_tile_sources(idr).await?,
|
||||
sprites: SpriteSources::resolve(&mut self.sprites)?,
|
||||
@ -91,10 +91,11 @@ impl Config {
|
||||
})
|
||||
}
|
||||
|
||||
async fn resolve_tile_sources(&mut self, idr: IdResolver) -> Result<TileSources> {
|
||||
async fn resolve_tile_sources(&mut self, idr: IdResolver) -> MartinResult<TileSources> {
|
||||
let new_pmt_src = &mut PmtSource::new_box;
|
||||
let new_mbt_src = &mut MbtSource::new_box;
|
||||
let mut sources: Vec<Pin<Box<dyn Future<Output = Result<TileInfoSources>>>>> = Vec::new();
|
||||
let mut sources: Vec<Pin<Box<dyn Future<Output = MartinResult<TileInfoSources>>>>> =
|
||||
Vec::new();
|
||||
|
||||
for s in self.postgres.iter_mut() {
|
||||
sources.push(Box::pin(s.resolve(idr.clone())));
|
||||
@ -113,7 +114,7 @@ impl Config {
|
||||
Ok(TileSources::new(try_join_all(sources).await?))
|
||||
}
|
||||
|
||||
pub fn save_to_file(&self, file_name: PathBuf) -> Result<()> {
|
||||
pub fn save_to_file(&self, file_name: PathBuf) -> MartinResult<()> {
|
||||
let yaml = serde_yaml::to_string(&self).expect("Unable to serialize config");
|
||||
if file_name.as_os_str() == OsStr::new("-") {
|
||||
info!("Current system configuration:");
|
||||
@ -147,7 +148,7 @@ pub fn copy_unrecognized_config(
|
||||
}
|
||||
|
||||
/// Read config from a file
|
||||
pub fn read_config<'a, M>(file_name: &Path, env: &'a M) -> Result<Config>
|
||||
pub fn read_config<'a, M>(file_name: &Path, env: &'a M) -> MartinResult<Config>
|
||||
where
|
||||
M: VariableMap<'a>,
|
||||
M::Value: AsRef<str>,
|
||||
@ -159,7 +160,7 @@ where
|
||||
parse_config(&contents, env, file_name)
|
||||
}
|
||||
|
||||
pub fn parse_config<'a, M>(contents: &str, env: &'a M, file_name: &Path) -> Result<Config>
|
||||
pub fn parse_config<'a, M>(contents: &str, env: &'a M, file_name: &Path) -> MartinResult<Config>
|
||||
where
|
||||
M: VariableMap<'a>,
|
||||
M::Value: AsRef<str>,
|
||||
|
@ -10,9 +10,12 @@ use serde::{Deserialize, Serialize};
|
||||
use crate::config::{copy_unrecognized_config, UnrecognizedValues};
|
||||
use crate::file_config::FileError::{InvalidFilePath, InvalidSourceFilePath, IoError};
|
||||
use crate::source::{Source, TileInfoSources};
|
||||
use crate::utils::{Error, IdResolver, OptOneMany};
|
||||
use crate::utils::{IdResolver, OptOneMany};
|
||||
use crate::MartinResult;
|
||||
use crate::OptOneMany::{Many, One};
|
||||
|
||||
pub type FileResult<T> = Result<T, FileError>;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum FileError {
|
||||
#[error("IO error {0}: {}", .1.display())]
|
||||
@ -102,7 +105,7 @@ impl FileConfigEnum {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(&self, prefix: &str) -> Result<UnrecognizedValues, Error> {
|
||||
pub fn finalize(&self, prefix: &str) -> MartinResult<UnrecognizedValues> {
|
||||
let mut res = UnrecognizedValues::new();
|
||||
if let Self::Config(cfg) = self {
|
||||
copy_unrecognized_config(&mut res, prefix, &cfg.unrecognized);
|
||||
@ -155,7 +158,7 @@ impl FileConfigSrc {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn abs_path(&self) -> Result<PathBuf, FileError> {
|
||||
pub fn abs_path(&self) -> FileResult<PathBuf> {
|
||||
let path = self.get_path();
|
||||
path.canonicalize().map_err(|e| IoError(e, path.clone()))
|
||||
}
|
||||
@ -171,12 +174,12 @@ pub async fn resolve_files<Fut>(
|
||||
idr: IdResolver,
|
||||
extension: &str,
|
||||
new_source: &mut impl FnMut(String, PathBuf) -> Fut,
|
||||
) -> Result<TileInfoSources, Error>
|
||||
) -> MartinResult<TileInfoSources>
|
||||
where
|
||||
Fut: Future<Output = Result<Box<dyn Source>, FileError>>,
|
||||
{
|
||||
resolve_int(config, idr, extension, new_source)
|
||||
.map_err(crate::Error::from)
|
||||
.map_err(crate::MartinError::from)
|
||||
.await
|
||||
}
|
||||
|
||||
@ -185,7 +188,7 @@ async fn resolve_int<Fut>(
|
||||
idr: IdResolver,
|
||||
extension: &str,
|
||||
new_source: &mut impl FnMut(String, PathBuf) -> Fut,
|
||||
) -> Result<TileInfoSources, FileError>
|
||||
) -> FileResult<TileInfoSources>
|
||||
where
|
||||
Fut: Future<Output = Result<Box<dyn Source>, FileError>>,
|
||||
{
|
||||
|
@ -28,6 +28,8 @@ const CUTOFF: f64 = 0.25_f64;
|
||||
/// Each range is 256 codepoints long, so the highest range ID is 0xFFFF / 256 = 255.
|
||||
const MAX_UNICODE_CP_RANGE_ID: usize = MAX_UNICODE_CP / CP_RANGE_SIZE;
|
||||
|
||||
pub type FontResult<T> = Result<T, FontError>;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum FontError {
|
||||
#[error("Font {0} not found")]
|
||||
@ -118,7 +120,7 @@ pub struct CatalogFontEntry {
|
||||
}
|
||||
|
||||
impl FontSources {
|
||||
pub fn resolve(config: &mut OptOneMany<PathBuf>) -> Result<Self, FontError> {
|
||||
pub fn resolve(config: &mut OptOneMany<PathBuf>) -> FontResult<Self> {
|
||||
if config.is_empty() {
|
||||
return Ok(Self::default());
|
||||
}
|
||||
@ -155,7 +157,7 @@ impl FontSources {
|
||||
|
||||
/// Given a list of IDs in a format "id1,id2,id3", return a combined font.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
pub fn get_font_range(&self, ids: &str, start: u32, end: u32) -> Result<Vec<u8>, FontError> {
|
||||
pub fn get_font_range(&self, ids: &str, start: u32, end: u32) -> FontResult<Vec<u8>> {
|
||||
if start > end {
|
||||
return Err(FontError::InvalidFontRangeStartEnd(start, end));
|
||||
}
|
||||
@ -185,7 +187,7 @@ impl FontSources {
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>, FontError>>()?;
|
||||
.collect::<FontResult<Vec<_>>>()?;
|
||||
|
||||
if fonts.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
@ -242,7 +244,7 @@ fn recurse_dirs(
|
||||
path: PathBuf,
|
||||
fonts: &mut HashMap<String, FontSource>,
|
||||
is_top_level: bool,
|
||||
) -> Result<(), FontError> {
|
||||
) -> FontResult<()> {
|
||||
let start_count = fonts.len();
|
||||
if path.is_dir() {
|
||||
for dir_entry in path
|
||||
@ -275,7 +277,7 @@ fn parse_font(
|
||||
lib: &Library,
|
||||
fonts: &mut HashMap<String, FontSource>,
|
||||
path: PathBuf,
|
||||
) -> Result<(), FontError> {
|
||||
) -> FontResult<()> {
|
||||
static RE_SPACES: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
let mut face = lib.new_face(&path, 0)?;
|
||||
|
@ -5,12 +5,12 @@ mod config;
|
||||
pub use config::{read_config, Config, ServerState};
|
||||
|
||||
mod source;
|
||||
pub use source::Source;
|
||||
pub use source::{CatalogSourceEntry, Source, Tile, TileData, TileSources, UrlQuery};
|
||||
|
||||
mod utils;
|
||||
pub use utils::{
|
||||
append_rect, decode_brotli, decode_gzip, Error, IdResolver, OptBoolObj, OptOneMany, Result,
|
||||
TileRect, Xyz,
|
||||
append_rect, decode_brotli, decode_gzip, IdResolver, MartinError, MartinResult, OptBoolObj,
|
||||
OptOneMany, TileCoord, TileRect,
|
||||
};
|
||||
|
||||
pub mod args;
|
||||
|
@ -9,10 +9,10 @@ use martin_tile_utils::TileInfo;
|
||||
use mbtiles::MbtilesPool;
|
||||
use tilejson::TileJSON;
|
||||
|
||||
use crate::file_config::FileError;
|
||||
use crate::file_config::FileError::{AquireConnError, InvalidMetadata, IoError};
|
||||
use crate::source::{Tile, UrlQuery};
|
||||
use crate::{Error, Source, Xyz};
|
||||
use crate::file_config::FileResult;
|
||||
use crate::source::{TileData, UrlQuery};
|
||||
use crate::{MartinResult, Source, TileCoord};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MbtSource {
|
||||
@ -34,11 +34,11 @@ impl Debug for MbtSource {
|
||||
}
|
||||
|
||||
impl MbtSource {
|
||||
pub async fn new_box(id: String, path: PathBuf) -> Result<Box<dyn Source>, FileError> {
|
||||
pub async fn new_box(id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
|
||||
Ok(Box::new(MbtSource::new(id, path).await?))
|
||||
}
|
||||
|
||||
async fn new(id: String, path: PathBuf) -> Result<Self, FileError> {
|
||||
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
|
||||
let mbt = MbtilesPool::new(&path)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@ -81,7 +81,11 @@ impl Source for MbtSource {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
async fn get_tile(&self, xyz: &Xyz, _url_query: &Option<UrlQuery>) -> Result<Tile, Error> {
|
||||
async fn get_tile(
|
||||
&self,
|
||||
xyz: &TileCoord,
|
||||
_url_query: &Option<UrlQuery>,
|
||||
) -> MartinResult<TileData> {
|
||||
if let Some(tile) = self
|
||||
.mbtiles
|
||||
.get_tile(xyz.z, xyz.x, xyz.y)
|
||||
|
@ -11,9 +11,10 @@ use crate::config::{copy_unrecognized_config, UnrecognizedValues};
|
||||
use crate::pg::config_function::FuncInfoSources;
|
||||
use crate::pg::config_table::TableInfoSources;
|
||||
use crate::pg::configurator::PgBuilder;
|
||||
use crate::pg::Result;
|
||||
use crate::pg::PgResult;
|
||||
use crate::source::TileInfoSources;
|
||||
use crate::utils::{on_slow, IdResolver, OptBoolObj, OptOneMany};
|
||||
use crate::MartinResult;
|
||||
|
||||
pub trait PgInfo {
|
||||
fn format_id(&self) -> String;
|
||||
@ -92,7 +93,7 @@ pub struct PgCfgPublishFuncs {
|
||||
|
||||
impl PgConfig {
|
||||
/// Apply defaults to the config, and validate if there is a connection string
|
||||
pub fn finalize(&mut self) -> Result<UnrecognizedValues> {
|
||||
pub fn finalize(&mut self) -> PgResult<UnrecognizedValues> {
|
||||
let mut res = UnrecognizedValues::new();
|
||||
if let Some(ref ts) = self.tables {
|
||||
for (k, v) in ts {
|
||||
@ -111,7 +112,7 @@ impl PgConfig {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn resolve(&mut self, id_resolver: IdResolver) -> crate::Result<TileInfoSources> {
|
||||
pub async fn resolve(&mut self, id_resolver: IdResolver) -> MartinResult<TileInfoSources> {
|
||||
let pg = PgBuilder::new(self, id_resolver).await?;
|
||||
let inst_tables = on_slow(
|
||||
pg.instantiate_tables(),
|
||||
|
@ -17,7 +17,7 @@ use crate::pg::table_source::{
|
||||
};
|
||||
use crate::pg::utils::{find_info, find_kv_ignore_case, normalize_key, InfoMap};
|
||||
use crate::pg::PgError::InvalidTableExtent;
|
||||
use crate::pg::{PgCfgPublish, PgCfgPublishFuncs, Result};
|
||||
use crate::pg::{PgCfgPublish, PgCfgPublishFuncs, PgResult};
|
||||
use crate::source::TileInfoSources;
|
||||
use crate::utils::IdResolver;
|
||||
use crate::utils::OptOneMany::NoVals;
|
||||
@ -79,7 +79,7 @@ macro_rules! get_auto_schemas {
|
||||
}
|
||||
|
||||
impl PgBuilder {
|
||||
pub async fn new(config: &PgConfig, id_resolver: IdResolver) -> Result<Self> {
|
||||
pub async fn new(config: &PgConfig, id_resolver: IdResolver) -> PgResult<Self> {
|
||||
let pool = PgPool::new(config).await?;
|
||||
|
||||
let (auto_tables, auto_functions) = calc_auto(config);
|
||||
@ -107,7 +107,7 @@ impl PgBuilder {
|
||||
|
||||
// FIXME: this function has gotten too long due to the new formatting rules, need to be refactored
|
||||
#[allow(clippy::too_many_lines)]
|
||||
pub async fn instantiate_tables(&self) -> Result<(TileInfoSources, TableInfoSources)> {
|
||||
pub async fn instantiate_tables(&self) -> PgResult<(TileInfoSources, TableInfoSources)> {
|
||||
let mut db_tables_info = query_available_tables(&self.pool).await?;
|
||||
|
||||
// Match configured sources with the discovered ones and add them to the pending list.
|
||||
@ -224,7 +224,7 @@ impl PgBuilder {
|
||||
Ok((res, info_map))
|
||||
}
|
||||
|
||||
pub async fn instantiate_functions(&self) -> Result<(TileInfoSources, FuncInfoSources)> {
|
||||
pub async fn instantiate_functions(&self) -> PgResult<(TileInfoSources, FuncInfoSources)> {
|
||||
let mut db_funcs_info = query_available_function(&self.pool).await?;
|
||||
let mut res = TileInfoSources::default();
|
||||
let mut info_map = FuncInfoSources::new();
|
||||
|
@ -7,9 +7,9 @@ use semver::Version;
|
||||
|
||||
use crate::pg::utils::query_to_json;
|
||||
use crate::source::UrlQuery;
|
||||
use crate::Xyz;
|
||||
use crate::TileCoord;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, PgError>;
|
||||
pub type PgResult<T> = Result<T, PgError>;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum PgError {
|
||||
@ -59,8 +59,8 @@ pub enum PgError {
|
||||
PrepareQueryError(#[source] TokioPgError, String, String, String),
|
||||
|
||||
#[error(r#"Unable to get tile {2:#} from {1}: {0}"#)]
|
||||
GetTileError(#[source] TokioPgError, String, Xyz),
|
||||
GetTileError(#[source] TokioPgError, String, TileCoord),
|
||||
|
||||
#[error(r#"Unable to get tile {2:#} with {:?} params from {1}: {0}"#, query_to_json(.3))]
|
||||
GetTileWithQueryError(#[source] TokioPgError, String, Xyz, UrlQuery),
|
||||
GetTileWithQueryError(#[source] TokioPgError, String, TileCoord, UrlQuery),
|
||||
}
|
||||
|
@ -10,13 +10,13 @@ use crate::pg::configurator::SqlFuncInfoMapMap;
|
||||
use crate::pg::pg_source::PgSqlInfo;
|
||||
use crate::pg::pool::PgPool;
|
||||
use crate::pg::PgError::PostgresError;
|
||||
use crate::pg::Result;
|
||||
use crate::pg::PgResult;
|
||||
|
||||
/// Get the list of functions from the database
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if the built-in query returns unexpected results.
|
||||
pub async fn query_available_function(pool: &PgPool) -> Result<SqlFuncInfoMapMap> {
|
||||
pub async fn query_available_function(pool: &PgPool) -> PgResult<SqlFuncInfoMapMap> {
|
||||
let mut res = SqlFuncInfoMapMap::new();
|
||||
|
||||
pool.get()
|
||||
|
@ -13,6 +13,6 @@ mod utils;
|
||||
pub use config::{PgCfgPublish, PgCfgPublishFuncs, PgCfgPublishTables, PgConfig, PgSslCerts};
|
||||
pub use config_function::FunctionInfo;
|
||||
pub use config_table::TableInfo;
|
||||
pub use errors::{PgError, Result};
|
||||
pub use errors::{PgError, PgResult};
|
||||
pub use function_source::query_available_function;
|
||||
pub use pool::{PgPool, POOL_SIZE_DEFAULT};
|
||||
|
@ -11,8 +11,8 @@ use tilejson::TileJSON;
|
||||
use crate::pg::pool::PgPool;
|
||||
use crate::pg::utils::query_to_json;
|
||||
use crate::pg::PgError::{GetTileError, GetTileWithQueryError, PrepareQueryError};
|
||||
use crate::source::{Source, Tile, UrlQuery};
|
||||
use crate::{Result, Xyz};
|
||||
use crate::source::{Source, TileData, UrlQuery};
|
||||
use crate::{MartinResult, TileCoord};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PgSource {
|
||||
@ -56,7 +56,11 @@ impl Source for PgSource {
|
||||
self.info.use_url_query
|
||||
}
|
||||
|
||||
async fn get_tile(&self, xyz: &Xyz, url_query: &Option<UrlQuery>) -> Result<Tile> {
|
||||
async fn get_tile(
|
||||
&self,
|
||||
xyz: &TileCoord,
|
||||
url_query: &Option<UrlQuery>,
|
||||
) -> MartinResult<TileData> {
|
||||
let empty_query = HashMap::new();
|
||||
let url_query = url_query.as_ref().unwrap_or(&empty_query);
|
||||
let conn = self.pool.get().await?;
|
||||
@ -100,7 +104,7 @@ impl Source for PgSource {
|
||||
};
|
||||
|
||||
let tile = tile
|
||||
.map(|row| row.and_then(|r| r.get::<_, Option<Tile>>(0)))
|
||||
.map(|row| row.and_then(|r| r.get::<_, Option<TileData>>(0)))
|
||||
.map_err(|e| {
|
||||
if self.support_url_query() {
|
||||
GetTileWithQueryError(e, self.id.to_string(), *xyz, url_query.clone())
|
||||
|
@ -8,7 +8,7 @@ use crate::pg::tls::{make_connector, parse_conn_str, SslModeOverride};
|
||||
use crate::pg::PgError::{
|
||||
BadPostgisVersion, PostgisTooOld, PostgresError, PostgresPoolBuildError, PostgresPoolConnError,
|
||||
};
|
||||
use crate::pg::Result;
|
||||
use crate::pg::PgResult;
|
||||
|
||||
pub const POOL_SIZE_DEFAULT: usize = 20;
|
||||
|
||||
@ -27,7 +27,7 @@ pub struct PgPool {
|
||||
}
|
||||
|
||||
impl PgPool {
|
||||
pub async fn new(config: &PgConfig) -> Result<Self> {
|
||||
pub async fn new(config: &PgConfig) -> PgResult<Self> {
|
||||
let (id, mgr) = Self::parse_config(config)?;
|
||||
|
||||
let pool = Pool::builder(mgr)
|
||||
@ -64,7 +64,7 @@ SELECT
|
||||
Ok(Self { id, pool, margin })
|
||||
}
|
||||
|
||||
fn parse_config(config: &PgConfig) -> Result<(String, Manager)> {
|
||||
fn parse_config(config: &PgConfig) -> PgResult<(String, Manager)> {
|
||||
let conn_str = config.connection_string.as_ref().unwrap().as_str();
|
||||
let (pg_cfg, ssl_mode) = parse_conn_str(conn_str)?;
|
||||
|
||||
@ -100,7 +100,7 @@ SELECT
|
||||
Ok((id, mgr))
|
||||
}
|
||||
|
||||
pub async fn get(&self) -> Result<Object> {
|
||||
pub async fn get(&self) -> PgResult<Object> {
|
||||
get_conn(&self.pool, self.id.as_str()).await
|
||||
}
|
||||
|
||||
@ -115,7 +115,7 @@ SELECT
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_conn(pool: &Pool, id: &str) -> Result<Object> {
|
||||
async fn get_conn(pool: &Pool, id: &str) -> PgResult<Object> {
|
||||
pool.get()
|
||||
.await
|
||||
.map_err(|e| PostgresPoolConnError(e, id.to_string()))
|
||||
|
@ -16,13 +16,13 @@ use crate::pg::pg_source::PgSqlInfo;
|
||||
use crate::pg::pool::PgPool;
|
||||
use crate::pg::utils::{json_to_hashmap, normalize_key, polygon_to_bbox};
|
||||
use crate::pg::PgError::PostgresError;
|
||||
use crate::pg::Result;
|
||||
use crate::pg::PgResult;
|
||||
|
||||
static DEFAULT_EXTENT: u32 = 4096;
|
||||
static DEFAULT_BUFFER: u32 = 64;
|
||||
static DEFAULT_CLIP_GEOM: bool = true;
|
||||
|
||||
pub async fn query_available_tables(pool: &PgPool) -> Result<SqlTableInfoMapMapMap> {
|
||||
pub async fn query_available_tables(pool: &PgPool) -> PgResult<SqlTableInfoMapMapMap> {
|
||||
let conn = pool.get().await?;
|
||||
let rows = conn
|
||||
.query(include_str!("scripts/query_available_tables.sql"), &[])
|
||||
@ -101,7 +101,7 @@ pub async fn table_to_query(
|
||||
pool: PgPool,
|
||||
bounds_type: BoundsCalcType,
|
||||
max_feature_count: Option<usize>,
|
||||
) -> Result<(String, PgSqlInfo, TableInfo)> {
|
||||
) -> PgResult<(String, PgSqlInfo, TableInfo)> {
|
||||
let schema = escape_identifier(&info.schema);
|
||||
let table = escape_identifier(&info.table);
|
||||
let geometry_column = escape_identifier(&info.geometry_column);
|
||||
@ -200,7 +200,7 @@ async fn calc_bounds(
|
||||
table: &str,
|
||||
geometry_column: &str,
|
||||
srid: i32,
|
||||
) -> Result<Option<Bounds>> {
|
||||
) -> PgResult<Option<Bounds>> {
|
||||
Ok(pool.get()
|
||||
.await?
|
||||
.query_one(&format!(
|
||||
|
@ -16,7 +16,7 @@ use crate::pg::PgError::{
|
||||
BadConnectionString, CannotLoadRoots, CannotOpenCert, CannotParseCert, CannotUseClientKey,
|
||||
InvalidPrivateKey, UnknownSslMode,
|
||||
};
|
||||
use crate::pg::{PgSslCerts, Result};
|
||||
use crate::pg::{PgResult, PgSslCerts};
|
||||
|
||||
/// A temporary workaround for <https://github.com/sfackler/rust-postgres/pull/988>
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@ -27,7 +27,7 @@ pub enum SslModeOverride {
|
||||
}
|
||||
|
||||
/// Special treatment for sslmode=verify-ca & sslmode=verify-full - if found, replace them with sslmode=require
|
||||
pub fn parse_conn_str(conn_str: &str) -> Result<(Config, SslModeOverride)> {
|
||||
pub fn parse_conn_str(conn_str: &str) -> PgResult<(Config, SslModeOverride)> {
|
||||
let mut mode = SslModeOverride::Unmodified(SslMode::Disable);
|
||||
|
||||
let exp = r"(?P<before>(^|\?|&| )sslmode=)(?P<mode>verify-(ca|full))(?P<after>$|&| )";
|
||||
@ -62,12 +62,12 @@ impl rustls::client::ServerCertVerifier for NoCertificateVerification {
|
||||
_scts: &mut dyn Iterator<Item = &[u8]>,
|
||||
_ocsp: &[u8],
|
||||
_now: std::time::SystemTime,
|
||||
) -> std::result::Result<rustls::client::ServerCertVerified, rustls::Error> {
|
||||
) -> Result<rustls::client::ServerCertVerified, rustls::Error> {
|
||||
Ok(rustls::client::ServerCertVerified::assertion())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_certs(file: &PathBuf) -> Result<Vec<Certificate>> {
|
||||
fn read_certs(file: &PathBuf) -> PgResult<Vec<Certificate>> {
|
||||
Ok(rustls_pemfile::certs(&mut cert_reader(file)?)
|
||||
.map_err(|e| CannotParseCert(e, file.clone()))?
|
||||
.into_iter()
|
||||
@ -75,7 +75,7 @@ fn read_certs(file: &PathBuf) -> Result<Vec<Certificate>> {
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn cert_reader(file: &PathBuf) -> Result<BufReader<File>> {
|
||||
fn cert_reader(file: &PathBuf) -> PgResult<BufReader<File>> {
|
||||
Ok(BufReader::new(
|
||||
File::open(file).map_err(|e| CannotOpenCert(e, file.clone()))?,
|
||||
))
|
||||
@ -84,7 +84,7 @@ fn cert_reader(file: &PathBuf) -> Result<BufReader<File>> {
|
||||
pub fn make_connector(
|
||||
pg_certs: &PgSslCerts,
|
||||
ssl_mode: SslModeOverride,
|
||||
) -> Result<MakeRustlsConnect> {
|
||||
) -> PgResult<MakeRustlsConnect> {
|
||||
let (verify_ca, _verify_hostname) = match ssl_mode {
|
||||
SslModeOverride::Unmodified(mode) => match mode {
|
||||
SslMode::Disable | SslMode::Prefer => (false, false),
|
||||
|
@ -11,10 +11,10 @@ use pmtiles::mmap::MmapBackend;
|
||||
use pmtiles::{Compression, TileType};
|
||||
use tilejson::TileJSON;
|
||||
|
||||
use crate::file_config::FileError;
|
||||
use crate::file_config::FileError::{InvalidMetadata, IoError};
|
||||
use crate::source::{Source, Tile, UrlQuery};
|
||||
use crate::{Error, Xyz};
|
||||
use crate::file_config::FileResult;
|
||||
use crate::source::{Source, TileData, UrlQuery};
|
||||
use crate::{MartinResult, TileCoord};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PmtSource {
|
||||
@ -32,11 +32,11 @@ impl Debug for PmtSource {
|
||||
}
|
||||
|
||||
impl PmtSource {
|
||||
pub async fn new_box(id: String, path: PathBuf) -> Result<Box<dyn Source>, FileError> {
|
||||
pub async fn new_box(id: String, path: PathBuf) -> FileResult<Box<dyn Source>> {
|
||||
Ok(Box::new(PmtSource::new(id, path).await?))
|
||||
}
|
||||
|
||||
async fn new(id: String, path: PathBuf) -> Result<Self, FileError> {
|
||||
async fn new(id: String, path: PathBuf) -> FileResult<Self> {
|
||||
let backend = MmapBackend::try_from(path.as_path())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@ -129,7 +129,11 @@ impl Source for PmtSource {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
async fn get_tile(&self, xyz: &Xyz, _url_query: &Option<UrlQuery>) -> Result<Tile, Error> {
|
||||
async fn get_tile(
|
||||
&self,
|
||||
xyz: &TileCoord,
|
||||
_url_query: &Option<UrlQuery>,
|
||||
) -> MartinResult<TileData> {
|
||||
// TODO: optimize to return Bytes
|
||||
if let Some(t) = self
|
||||
.pmtiles
|
||||
|
@ -8,9 +8,9 @@ use martin_tile_utils::TileInfo;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tilejson::TileJSON;
|
||||
|
||||
use crate::{Result, Xyz};
|
||||
use crate::{MartinResult, TileCoord};
|
||||
|
||||
pub type Tile = Vec<u8>;
|
||||
pub type TileData = Vec<u8>;
|
||||
pub type UrlQuery = HashMap<String, String>;
|
||||
|
||||
pub type TileInfoSource = Box<dyn Source>;
|
||||
@ -33,6 +33,7 @@ impl TileSources {
|
||||
)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_catalog(&self) -> TileCatalog {
|
||||
self.0
|
||||
.iter()
|
||||
@ -48,6 +49,9 @@ impl TileSources {
|
||||
.as_ref())
|
||||
}
|
||||
|
||||
/// Get a list of sources, and the tile info for the merged sources.
|
||||
/// Ensure that all sources have the same format and encoding.
|
||||
/// If zoom is specified, filter out sources that do not support it.
|
||||
pub fn get_sources(
|
||||
&self,
|
||||
source_ids: &str,
|
||||
@ -56,12 +60,14 @@ impl TileSources {
|
||||
let mut sources = Vec::new();
|
||||
let mut info: Option<TileInfo> = None;
|
||||
let mut use_url_query = false;
|
||||
|
||||
for id in source_ids.split(',') {
|
||||
let src = self.get_source(id)?;
|
||||
let src_inf = src.get_tile_info();
|
||||
use_url_query |= src.support_url_query();
|
||||
|
||||
// make sure all sources have the same format
|
||||
// make sure all sources have the same format and encoding
|
||||
// TODO: support multiple encodings of the same format
|
||||
match info {
|
||||
Some(inf) if inf == src_inf => {}
|
||||
Some(inf) => Err(ErrorNotFound(format!(
|
||||
@ -107,7 +113,7 @@ pub trait Source: Send + Debug {
|
||||
false
|
||||
}
|
||||
|
||||
async fn get_tile(&self, xyz: &Xyz, query: &Option<UrlQuery>) -> Result<Tile>;
|
||||
async fn get_tile(&self, xyz: &TileCoord, query: &Option<UrlQuery>) -> MartinResult<TileData>;
|
||||
|
||||
fn is_valid_zoom(&self, zoom: u8) -> bool {
|
||||
let tj = self.get_tilejson();
|
||||
@ -151,8 +157,20 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn xyz_format() {
|
||||
let xyz = Xyz { z: 1, x: 2, y: 3 };
|
||||
let xyz = TileCoord { z: 1, x: 2, y: 3 };
|
||||
assert_eq!(format!("{xyz}"), "1,2,3");
|
||||
assert_eq!(format!("{xyz:#}"), "1/2/3");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Tile {
|
||||
pub data: TileData,
|
||||
pub info: TileInfo,
|
||||
}
|
||||
|
||||
impl Tile {
|
||||
#[must_use]
|
||||
pub fn new(data: TileData, info: TileInfo) -> Self {
|
||||
Self { data, info }
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use spreet::resvg::usvg::{Error as ResvgError, Options, Tree, TreeParsing};
|
||||
use spreet::sprite::{sprite_name, Sprite, Spritesheet, SpritesheetBuilder};
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
use crate::file_config::{FileConfigEnum, FileError};
|
||||
use crate::file_config::{FileConfigEnum, FileResult};
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum SpriteError {
|
||||
@ -54,7 +54,7 @@ pub type SpriteCatalog = BTreeMap<String, CatalogSpriteEntry>;
|
||||
pub struct SpriteSources(HashMap<String, SpriteSource>);
|
||||
|
||||
impl SpriteSources {
|
||||
pub fn resolve(config: &mut FileConfigEnum) -> Result<Self, FileError> {
|
||||
pub fn resolve(config: &mut FileConfigEnum) -> FileResult<Self> {
|
||||
let Some(cfg) = config.extract_file_config() else {
|
||||
return Ok(Self::default());
|
||||
};
|
||||
@ -87,7 +87,7 @@ impl SpriteSources {
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
pub fn get_catalog(&self) -> Result<SpriteCatalog, FileError> {
|
||||
pub fn get_catalog(&self) -> FileResult<SpriteCatalog> {
|
||||
// TODO: all sprite generation should be pre-cached
|
||||
Ok(self
|
||||
.0
|
||||
|
@ -3,5 +3,6 @@ pub use config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
|
||||
|
||||
mod server;
|
||||
pub use server::{
|
||||
get_composite_tile, merge_tilejson, new_server, router, Catalog, RESERVED_KEYWORDS,
|
||||
get_tile_content, get_tile_response, merge_tilejson, new_server, router, Catalog, TileRequest,
|
||||
RESERVED_KEYWORDS,
|
||||
};
|
||||
|
@ -14,7 +14,7 @@ use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::web::{Data, Path, Query};
|
||||
use actix_web::{
|
||||
middleware, route, web, App, HttpMessage, HttpRequest, HttpResponse, HttpServer, Responder,
|
||||
Result,
|
||||
Result as ActixResult,
|
||||
};
|
||||
use futures::future::try_join_all;
|
||||
use itertools::Itertools as _;
|
||||
@ -29,8 +29,8 @@ use crate::source::{Source, TileCatalog, TileSources, UrlQuery};
|
||||
use crate::sprites::{SpriteCatalog, SpriteError, SpriteSources};
|
||||
use crate::srv::config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT};
|
||||
use crate::utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip};
|
||||
use crate::Error::BindingError;
|
||||
use crate::{Error, Xyz};
|
||||
use crate::MartinError::BindingError;
|
||||
use crate::{MartinResult, Tile, TileCoord};
|
||||
|
||||
/// List of keywords that cannot be used as source IDs. Some of these are reserved for future use.
|
||||
/// Reserved keywords must never end in a "dot number" (e.g. ".1").
|
||||
@ -54,7 +54,7 @@ pub struct Catalog {
|
||||
}
|
||||
|
||||
impl Catalog {
|
||||
pub fn new(state: &ServerState) -> Result<Self, Error> {
|
||||
pub fn new(state: &ServerState) -> MartinResult<Self> {
|
||||
Ok(Self {
|
||||
tiles: state.tiles.get_catalog(),
|
||||
sprites: state.sprites.get_catalog()?,
|
||||
@ -68,8 +68,8 @@ struct TileJsonRequest {
|
||||
source_ids: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TileRequest {
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct TileRequest {
|
||||
source_ids: String,
|
||||
z: u8,
|
||||
x: u32,
|
||||
@ -136,7 +136,7 @@ async fn get_catalog(catalog: Data<Catalog>) -> impl Responder {
|
||||
async fn get_sprite_png(
|
||||
path: Path<TileJsonRequest>,
|
||||
sprites: Data<SpriteSources>,
|
||||
) -> Result<HttpResponse> {
|
||||
) -> ActixResult<HttpResponse> {
|
||||
let sheet = sprites
|
||||
.get_sprites(&path.source_ids)
|
||||
.await
|
||||
@ -155,7 +155,7 @@ async fn get_sprite_png(
|
||||
async fn get_sprite_json(
|
||||
path: Path<TileJsonRequest>,
|
||||
sprites: Data<SpriteSources>,
|
||||
) -> Result<HttpResponse> {
|
||||
) -> ActixResult<HttpResponse> {
|
||||
let sheet = sprites
|
||||
.get_sprites(&path.source_ids)
|
||||
.await
|
||||
@ -176,7 +176,7 @@ struct FontRequest {
|
||||
wrap = "middleware::Compress::default()"
|
||||
)]
|
||||
#[allow(clippy::unused_async)]
|
||||
async fn get_font(path: Path<FontRequest>, fonts: Data<FontSources>) -> Result<HttpResponse> {
|
||||
async fn get_font(path: Path<FontRequest>, fonts: Data<FontSources>) -> ActixResult<HttpResponse> {
|
||||
let data = fonts
|
||||
.get_font_range(&path.fontstack, path.start, path.end)
|
||||
.map_err(map_font_error)?;
|
||||
@ -196,7 +196,7 @@ async fn git_source_info(
|
||||
req: HttpRequest,
|
||||
path: Path<TileJsonRequest>,
|
||||
sources: Data<TileSources>,
|
||||
) -> Result<HttpResponse> {
|
||||
) -> ActixResult<HttpResponse> {
|
||||
let sources = sources.get_sources(&path.source_ids, None)?.0;
|
||||
let info = req.connection_info();
|
||||
let tiles_path = get_request_path(&req);
|
||||
@ -212,7 +212,12 @@ fn get_request_path(req: &HttpRequest) -> String {
|
||||
.unwrap_or_else(|| req.path().to_owned())
|
||||
}
|
||||
|
||||
fn get_tiles_url(scheme: &str, host: &str, query_string: &str, tiles_path: &str) -> Result<String> {
|
||||
fn get_tiles_url(
|
||||
scheme: &str,
|
||||
host: &str,
|
||||
query_string: &str,
|
||||
tiles_path: &str,
|
||||
) -> ActixResult<String> {
|
||||
let path_and_query = if query_string.is_empty() {
|
||||
format!("{tiles_path}/{{z}}/{{x}}/{{y}}")
|
||||
} else {
|
||||
@ -326,66 +331,53 @@ async fn get_tile(
|
||||
req: HttpRequest,
|
||||
path: Path<TileRequest>,
|
||||
sources: Data<TileSources>,
|
||||
) -> Result<HttpResponse> {
|
||||
let xyz = Xyz {
|
||||
) -> ActixResult<HttpResponse> {
|
||||
let xyz = TileCoord {
|
||||
z: path.z,
|
||||
x: path.x,
|
||||
y: path.y,
|
||||
};
|
||||
|
||||
// Optimization for a single-source request.
|
||||
let (tile, info) = if path.source_ids.contains(',') {
|
||||
let (sources, use_url_query, info) = sources.get_sources(&path.source_ids, Some(path.z))?;
|
||||
let query = if use_url_query {
|
||||
Some(req.query_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(
|
||||
get_composite_tile(sources.as_slice(), info, &xyz, query).await?,
|
||||
info,
|
||||
)
|
||||
} else {
|
||||
let id = &path.source_ids;
|
||||
let zoom = xyz.z;
|
||||
let src = sources.get_source(id)?;
|
||||
if !TileSources::check_zoom(src, id, zoom) {
|
||||
return Err(ErrorNotFound(format!(
|
||||
"Zoom {zoom} is not valid for source {id}",
|
||||
)));
|
||||
}
|
||||
let query = if src.support_url_query() {
|
||||
Some(Query::<UrlQuery>::from_query(req.query_string())?.into_inner())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let tile = src
|
||||
.get_tile(&xyz, &query)
|
||||
.await
|
||||
.map_err(map_internal_error)?;
|
||||
(tile, src.get_tile_info())
|
||||
};
|
||||
let source_ids = &path.source_ids;
|
||||
let query = req.query_string();
|
||||
let encodings = req.get_header::<AcceptEncoding>();
|
||||
|
||||
Ok(if tile.is_empty() {
|
||||
get_tile_response(sources.as_ref(), xyz, source_ids, query, encodings).await
|
||||
}
|
||||
|
||||
pub async fn get_tile_response(
|
||||
sources: &TileSources,
|
||||
xyz: TileCoord,
|
||||
source_ids: &str,
|
||||
query: &str,
|
||||
encodings: Option<AcceptEncoding>,
|
||||
) -> ActixResult<HttpResponse> {
|
||||
let (sources, use_url_query, info) = sources.get_sources(source_ids, Some(xyz.z))?;
|
||||
|
||||
let sources = sources.as_slice();
|
||||
let query = use_url_query.then_some(query);
|
||||
|
||||
let tile = get_tile_content(sources, info, &xyz, query, encodings.as_ref()).await?;
|
||||
|
||||
Ok(if tile.data.is_empty() {
|
||||
HttpResponse::NoContent().finish()
|
||||
} else {
|
||||
// decide if (re-)encoding of the tile data is needed, and recompress if so
|
||||
let (tile, info) = recompress(tile, info, req.get_header::<AcceptEncoding>())?;
|
||||
let mut response = HttpResponse::Ok();
|
||||
response.content_type(info.format.content_type());
|
||||
if let Some(val) = info.encoding.content_encoding() {
|
||||
response.content_type(tile.info.format.content_type());
|
||||
if let Some(val) = tile.info.encoding.content_encoding() {
|
||||
response.insert_header((CONTENT_ENCODING, val));
|
||||
}
|
||||
response.body(tile)
|
||||
response.body(tile.data)
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_composite_tile(
|
||||
pub async fn get_tile_content(
|
||||
sources: &[&dyn Source],
|
||||
info: TileInfo,
|
||||
xyz: &Xyz,
|
||||
xyz: &TileCoord,
|
||||
query: Option<&str>,
|
||||
) -> Result<Vec<u8>> {
|
||||
encodings: Option<&AcceptEncoding>,
|
||||
) -> ActixResult<Tile> {
|
||||
if sources.is_empty() {
|
||||
return Err(ErrorNotFound("No valid sources found"));
|
||||
}
|
||||
@ -394,9 +386,11 @@ pub async fn get_composite_tile(
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut tiles = try_join_all(sources.iter().map(|s| s.get_tile(xyz, &query)))
|
||||
.await
|
||||
.map_err(map_internal_error)?;
|
||||
|
||||
// Make sure tiles can be concatenated, or if not, that there is only one non-empty tile for each zoom level
|
||||
// TODO: can zlib, brotli, or zstd be concatenated?
|
||||
// TODO: implement decompression step for other concatenate-able formats
|
||||
@ -409,70 +403,80 @@ pub async fn get_composite_tile(
|
||||
xyz.z
|
||||
)))?;
|
||||
}
|
||||
Ok(
|
||||
// Minor optimization to prevent concatenation if there are less than 2 tiles
|
||||
if layer_count == 1 {
|
||||
tiles.swap_remove(0)
|
||||
} else if layer_count == 0 {
|
||||
Vec::new()
|
||||
} else {
|
||||
tiles.concat()
|
||||
},
|
||||
)
|
||||
|
||||
// Minor optimization to prevent concatenation if there are less than 2 tiles
|
||||
let data = match layer_count {
|
||||
1 => tiles.swap_remove(0),
|
||||
0 => return Ok(Tile::new(Vec::new(), info)),
|
||||
_ => tiles.concat(),
|
||||
};
|
||||
|
||||
// decide if (re-)encoding of the tile data is needed, and recompress if so
|
||||
let tile = recompress(Tile::new(data, info), encodings)?;
|
||||
|
||||
Ok(tile)
|
||||
}
|
||||
|
||||
fn recompress(
|
||||
mut tile: Vec<u8>,
|
||||
mut info: TileInfo,
|
||||
accept_enc: Option<AcceptEncoding>,
|
||||
) -> Result<(Vec<u8>, TileInfo)> {
|
||||
fn recompress(mut tile: Tile, accept_enc: Option<&AcceptEncoding>) -> ActixResult<Tile> {
|
||||
if let Some(accept_enc) = accept_enc {
|
||||
if info.encoding.is_encoded() {
|
||||
if tile.info.encoding.is_encoded() {
|
||||
// already compressed, see if we can send it as is, or need to re-compress
|
||||
if !accept_enc.iter().any(|e| {
|
||||
if let Preference::Specific(HeaderEnc::Known(enc)) = e.item {
|
||||
to_encoding(enc) == Some(info.encoding)
|
||||
to_encoding(enc) == Some(tile.info.encoding)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) {
|
||||
// need to re-compress the tile - uncompress it first
|
||||
(tile, info) = decode(tile, info)?;
|
||||
tile = decode(tile)?;
|
||||
}
|
||||
}
|
||||
if info.encoding == Encoding::Uncompressed {
|
||||
if tile.info.encoding == Encoding::Uncompressed {
|
||||
// only apply compression if the content supports it
|
||||
if let Some(HeaderEnc::Known(enc)) = accept_enc.negotiate(SUPPORTED_ENCODINGS.iter()) {
|
||||
// (re-)compress the tile into the preferred encoding
|
||||
(tile, info) = encode(tile, info, enc)?;
|
||||
tile = encode(tile, enc)?;
|
||||
}
|
||||
}
|
||||
Ok((tile, info))
|
||||
Ok(tile)
|
||||
} else {
|
||||
// no accepted-encoding header, decode the tile if compressed
|
||||
decode(tile, info)
|
||||
decode(tile)
|
||||
}
|
||||
}
|
||||
|
||||
fn encode(tile: Vec<u8>, info: TileInfo, enc: ContentEncoding) -> Result<(Vec<u8>, TileInfo)> {
|
||||
fn encode(tile: Tile, enc: ContentEncoding) -> ActixResult<Tile> {
|
||||
Ok(match enc {
|
||||
ContentEncoding::Brotli => (encode_brotli(&tile)?, info.encoding(Encoding::Brotli)),
|
||||
ContentEncoding::Gzip => (encode_gzip(&tile)?, info.encoding(Encoding::Gzip)),
|
||||
_ => (tile, info),
|
||||
ContentEncoding::Brotli => Tile::new(
|
||||
encode_brotli(&tile.data)?,
|
||||
tile.info.encoding(Encoding::Brotli),
|
||||
),
|
||||
ContentEncoding::Gzip => {
|
||||
Tile::new(encode_gzip(&tile.data)?, tile.info.encoding(Encoding::Gzip))
|
||||
}
|
||||
_ => tile,
|
||||
})
|
||||
}
|
||||
|
||||
fn decode(tile: Vec<u8>, info: TileInfo) -> Result<(Vec<u8>, TileInfo)> {
|
||||
fn decode(tile: Tile) -> ActixResult<Tile> {
|
||||
let info = tile.info;
|
||||
Ok(if info.encoding.is_encoded() {
|
||||
match info.encoding {
|
||||
Encoding::Gzip => (decode_gzip(&tile)?, info.encoding(Encoding::Uncompressed)),
|
||||
Encoding::Brotli => (decode_brotli(&tile)?, info.encoding(Encoding::Uncompressed)),
|
||||
Encoding::Gzip => Tile::new(
|
||||
decode_gzip(&tile.data)?,
|
||||
info.encoding(Encoding::Uncompressed),
|
||||
),
|
||||
Encoding::Brotli => Tile::new(
|
||||
decode_brotli(&tile.data)?,
|
||||
info.encoding(Encoding::Uncompressed),
|
||||
),
|
||||
_ => Err(ErrorBadRequest(format!(
|
||||
"Tile is is stored as {info}, but the client does not accept this encoding"
|
||||
)))?,
|
||||
}
|
||||
} else {
|
||||
(tile, info)
|
||||
tile
|
||||
})
|
||||
}
|
||||
|
||||
@ -498,7 +502,7 @@ pub fn router(cfg: &mut web::ServiceConfig) {
|
||||
}
|
||||
|
||||
/// Create a new initialized Actix `App` instance together with the listening address.
|
||||
pub fn new_server(config: SrvConfig, state: ServerState) -> crate::Result<(Server, String)> {
|
||||
pub fn new_server(config: SrvConfig, state: ServerState) -> MartinResult<(Server, String)> {
|
||||
let catalog = Catalog::new(&state)?;
|
||||
let keep_alive = Duration::from_secs(config.keep_alive.unwrap_or(KEEP_ALIVE_DEFAULT));
|
||||
let worker_processes = config.worker_processes.unwrap_or_else(num_cpus::get);
|
||||
@ -547,7 +551,7 @@ mod tests {
|
||||
use tilejson::{tilejson, Bounds, VectorLayer};
|
||||
|
||||
use super::*;
|
||||
use crate::source::{Source, Tile};
|
||||
use crate::source::{Source, TileData};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct TestSource {
|
||||
@ -572,7 +576,11 @@ mod tests {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
async fn get_tile(&self, _xyz: &Xyz, _url_query: &Option<UrlQuery>) -> Result<Tile, Error> {
|
||||
async fn get_tile(
|
||||
&self,
|
||||
_xyz: &TileCoord,
|
||||
_url_query: &Option<UrlQuery>,
|
||||
) -> MartinResult<TileData> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::error::Error;
|
||||
use std::fmt::Write;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
@ -9,7 +10,8 @@ use crate::fonts::FontError;
|
||||
use crate::pg::PgError;
|
||||
use crate::sprites::SpriteError;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
/// A convenience [`Result`] for Martin crate.
|
||||
pub type MartinResult<T> = Result<T, MartinError>;
|
||||
|
||||
fn elide_vec(vec: &[String], max_items: usize, max_len: usize) -> String {
|
||||
let mut s = String::new();
|
||||
@ -32,7 +34,7 @@ fn elide_vec(vec: &[String], max_items: usize, max_len: usize) -> String {
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
pub enum MartinError {
|
||||
#[error("The --config and the connection parameters cannot be used together. Please remove unsupported parameters '{}'", elide_vec(.0, 3, 15))]
|
||||
ConfigAndConnectionsError(Vec<String>),
|
||||
|
||||
@ -73,5 +75,5 @@ pub enum Error {
|
||||
WebError(#[from] actix_web::Error),
|
||||
|
||||
#[error("Internal error: {0}")]
|
||||
InternalError(String),
|
||||
InternalError(Box<dyn Error>),
|
||||
}
|
||||
|
@ -14,4 +14,4 @@ mod utilities;
|
||||
pub use utilities::*;
|
||||
|
||||
mod xyz;
|
||||
pub use xyz::Xyz;
|
||||
pub use xyz::TileCoord;
|
||||
|
@ -1,13 +1,13 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Xyz {
|
||||
pub struct TileCoord {
|
||||
pub z: u8,
|
||||
pub x: u32,
|
||||
pub y: u32,
|
||||
}
|
||||
|
||||
impl Display for Xyz {
|
||||
impl Display for TileCoord {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
if f.alternate() {
|
||||
write!(f, "{}/{}/{}", self.z, self.x, self.y)
|
||||
|
@ -1,7 +1,7 @@
|
||||
use ctor::ctor;
|
||||
use indoc::indoc;
|
||||
use insta::assert_yaml_snapshot;
|
||||
use martin::Xyz;
|
||||
use martin::TileCoord;
|
||||
|
||||
pub mod utils;
|
||||
pub use utils::*;
|
||||
@ -30,14 +30,14 @@ async fn function_source_tile() {
|
||||
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
|
||||
let src = source(&mock, "function_zxy_query");
|
||||
let tile = src
|
||||
.get_tile(&Xyz { z: 0, x: 0, y: 0 }, &None)
|
||||
.get_tile(&TileCoord { z: 0, x: 0, y: 0 }, &None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!tile.is_empty());
|
||||
|
||||
let src = source(&mock, "function_zxy_query_jsonb");
|
||||
let tile = src
|
||||
.get_tile(&Xyz { z: 0, x: 0, y: 0 }, &None)
|
||||
.get_tile(&TileCoord { z: 0, x: 0, y: 0 }, &None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!tile.is_empty());
|
||||
|
@ -1,7 +1,7 @@
|
||||
use ctor::ctor;
|
||||
use indoc::indoc;
|
||||
use insta::assert_yaml_snapshot;
|
||||
use martin::Xyz;
|
||||
use martin::TileCoord;
|
||||
|
||||
pub mod utils;
|
||||
pub use utils::*;
|
||||
@ -127,7 +127,7 @@ async fn tables_tilejson() {
|
||||
async fn tables_tile_ok() {
|
||||
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
|
||||
let tile = source(&mock, "table_source")
|
||||
.get_tile(&Xyz { z: 0, x: 0, y: 0 }, &None)
|
||||
.get_tile(&TileCoord { z: 0, x: 0, y: 0 }, &None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -20,6 +20,6 @@ tilejson:
|
||||
name: table_source
|
||||
foo: '{"bar":"foo"}'
|
||||
format: mvt
|
||||
generator: martin-cp v0.11.0
|
||||
generator: martin-cp v0.11.1
|
||||
json: {}
|
||||
|
||||
|
@ -20,6 +20,6 @@ tilejson:
|
||||
name: table_source
|
||||
foo: '{"bar":"foo"}'
|
||||
format: mvt
|
||||
generator: martin-cp v0.11.0
|
||||
generator: martin-cp v0.11.1
|
||||
json: {}
|
||||
|
||||
|
@ -20,6 +20,6 @@ tilejson:
|
||||
name: table_source
|
||||
foo: '{"bar":"foo"}'
|
||||
format: mvt
|
||||
generator: martin-cp v0.11.0
|
||||
generator: martin-cp v0.11.1
|
||||
json: {}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user