Refactor mbtiles lib, mbtiles tool tests & CI (#702)

* Broke up martin-mbtiles into multiple files
* Made all mbtiles functions take a `SqliteExecutor` -- this way they
can be used with any SQLX connection structs - either a pool connection
or an individual non-pooled connection.
* Simplified mbtiles bin a bit - I realized there is really no need to
pretty print the output for the single value retrieval. Easier to just
dump it to console as is.
* Bump martin-mbtiles to v0.2.0
* Minor fixes in tools docs, cargo.toml, and justfile
* MBTiles tool Integration tests and release publishing

Major thanks to the
[stackoverflow](https://stackoverflow.com/questions/76394665/how-to-pass-sqlx-connection-a-mut-trait-as-a-fn-parameter-in-rust/76395111)
quick reply by @cafce25 on how to use generic sql executor!
This commit is contained in:
Yuri Astrakhan 2023-06-03 18:54:50 -04:00 committed by GitHub
parent 78e67c3ad2
commit 2cdd373044
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 578 additions and 433 deletions

View File

@ -61,15 +61,19 @@ jobs:
if [[ "${{ matrix.target }}" == "aarch64-apple-darwin" ]]; then
rustup target add aarch64-apple-darwin
# compile without debug symbols
RUSTFLAGS='-C link-arg=-s' cargo build --release --target ${{ matrix.target }} --features=vendored-openssl
RUSTFLAGS='-C link-arg=-s' cargo build --release --target ${{ matrix.target }} --features=vendored-openssl --package martin
RUSTFLAGS='-C link-arg=-s' cargo build --release --target ${{ matrix.target }} --features=tools --package martin-mbtiles
else
cargo build --release --target ${{ matrix.target }} --features=ssl
cargo build --release --target ${{ matrix.target }} --features=ssl --package martin
cargo build --release --target ${{ matrix.target }} --features=tools --package martin-mbtiles
fi
mkdir target_releases
if [[ "${{ runner.os }}" == "Windows" ]]; then
mv target/${{ matrix.target }}/release/martin.exe target_releases
mv target/${{ matrix.target }}/release/mbtiles.exe target_releases
else
mv target/${{ matrix.target }}/release/martin target_releases
mv target/${{ matrix.target }}/release/mbtiles target_releases
fi
- name: Save build artifact build-${{ matrix.target }}
uses: actions/upload-artifact@v3
@ -86,19 +90,19 @@ jobs:
matrix:
include:
- os: ubuntu-latest
bin: martin
ext: ''
target: x86_64-unknown-linux-gnu
name: martin-Linux-x86_64.tar.gz
- os: windows-latest
bin: martin.exe
ext: '.exe'
target: x86_64-pc-windows-msvc
name: martin-Windows-x86_64.zip
- os: macOS-latest
bin: martin
ext: ''
target: x86_64-apple-darwin
name: martin-Darwin-x86_64.tar.gz
- os: ubuntu-latest
bin: martin
ext: ''
target: aarch64-apple-darwin
name: martin-Darwin-aarch64.tar.gz
runs-on: ${{ matrix.os }}
@ -147,13 +151,17 @@ jobs:
shell: bash
run: |
if [[ "${{ runner.os }}" != "Windows" ]]; then
chmod +x target/${{ matrix.bin }}
chmod +x target/martin${{ matrix.ext }}
chmod +x target/mbtiles${{ matrix.ext }}
fi
tests/test.sh
env:
DATABASE_URL: ${{ steps.pg.outputs.connection-uri }}
MARTIN_BUILD: "-"
MARTIN_BIN: target/${{ matrix.bin }}
MARTIN_BIN: target/martin${{ matrix.ext }}
MBTILES_BUILD: "-"
MBTILES_BIN: target/mbtiles${{ matrix.ext }}
- name: Compare test output results (Linux)
if: matrix.target == 'x86_64-unknown-linux-gnu'
run: diff --brief --recursive --new-file tests/output tests/expected
@ -170,12 +178,13 @@ jobs:
cd target/
# Symbol stripping does not work cross-platform
if [[ "${{ matrix.target }}" != "aarch64-apple-darwin" ]]; then
strip ${{ matrix.bin }}
strip martin${{ matrix.ext }}
strip mbtiles${{ matrix.ext }}
fi
if [[ "${{ runner.os }}" == "Windows" ]]; then
7z a ../${{ matrix.name }} ${{ matrix.bin }}
7z a ../${{ matrix.name }} martin${{ matrix.ext }} mbtiles${{ matrix.ext }}
else
tar czvf ../${{ matrix.name }} ${{ matrix.bin }}
tar czvf ../${{ matrix.name }} martin${{ matrix.ext }} mbtiles${{ matrix.ext }}
fi
cd -
- name: Generate SHA-256 (MacOS)
@ -276,9 +285,11 @@ jobs:
path: target/
- name: Integration Tests
run: |
chmod +x target/martin
chmod +x target/martin target/mbtiles
tests/test.sh
env:
DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=${{ matrix.sslmode }}
MARTIN_BUILD: "-"
MARTIN_BIN: target/martin
MBTILES_BUILD: "-"
MBTILES_BIN: target/mbtiles

View File

@ -94,6 +94,8 @@ jobs:
if: matrix.platform == 'linux/amd64'
run: |
TAG=$(echo '${{ steps.docker_meta.outputs.json }}' | jq -r '.tags[0]')
export MBTILES_BUILD=-
export MBTILES_BIN=-
export MARTIN_BUILD=-
export MARTIN_BIN="docker run --rm --net host -e DATABASE_URL -v $PWD/tests:/tests $TAG"
echo "MARTIN_BIN=$MARTIN_BIN"

2
Cargo.lock generated
View File

@ -1449,7 +1449,7 @@ dependencies = [
[[package]]
name = "martin-mbtiles"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"actix-rt",
"anyhow",

View File

@ -27,11 +27,11 @@ ssl = ["openssl", "postgres-openssl"]
vendored-openssl = ["ssl", "openssl/vendored"]
[dependencies]
actix.workspace = true
actix-cors.workspace = true
actix-http.workspace = true
actix-rt.workspace = true
actix-web.workspace = true
actix.workspace = true
async-trait.workspace = true
brotli.workspace = true
clap.workspace = true
@ -41,15 +41,13 @@ flate2.workspace = true
futures.workspace = true
itertools.workspace = true
log.workspace = true
martin-mbtiles = { path = "./martin-mbtiles", version = "0.1.0" }
martin-tile-utils = { path = "./martin-tile-utils", version = "0.1.0" }
martin-mbtiles.workspace = true
martin-tile-utils.workspace = true
num_cpus.workspace = true
openssl = { workspace = true, optional = true }
pmtiles.workspace = true
postgis.workspace = true
postgres.workspace = true
postgres-openssl = { workspace = true, optional = true }
postgres-protocol.workspace = true
postgres.workspace = true
regex.workspace = true
semver.workspace = true
serde.workspace = true
@ -59,6 +57,10 @@ subst.workspace = true
thiserror.workspace = true
tilejson.workspace = true
# Optional dependencies for openssl support
openssl = { workspace = true, optional = true }
postgres-openssl = { workspace = true, optional = true }
[dev-dependencies]
cargo-husky.workspace = true
criterion.workspace = true
@ -104,6 +106,8 @@ futures = "0.3"
indoc = "2"
itertools = "0.10"
log = "0.4"
martin-mbtiles = { path = "./martin-mbtiles", version = "0.2.0" }
martin-tile-utils = { path = "./martin-tile-utils", version = "0.1.0" }
num_cpus = "1"
openssl = "0.10"
pmtiles = { version = "0.2.2", features = ["mmap-async-tokio", "tilejson"] }

View File

@ -1,7 +1,13 @@
# Tools
## MBTiles tools
A small utility that allows users to interact with mbtiles files from the CLI as follows: `mbtiles <command> ...`
Martin has a few additional tools that can be used to interact with the data.
#### `meta-get`
Retrieve a metadata value by its name: `mbtiles meta-get <file.mbtiles> <key>`. See `mbtiles meta-get --help` to see available options.
## MBTiles tools
A small utility that allows users to interact with the `*.mbtiles` files from the command line. Use `mbtiles --help` to see a list of available commands, and `mbtiles <command> --help` to see help for a specific command.
### meta-get
Retrieve raw metadata value by its name. The value is printed to stdout without any modifications.
```shell
mbtiles meta-get <file.mbtiles> <key>
```

View File

@ -176,9 +176,15 @@ print-conn-str:
@echo {{ DATABASE_URL }}
# Run cargo fmt and cargo clippy
lint:
lint: fmt clippy
# Run cargo fmt
fmt:
cargo fmt --all -- --check
cargo clippy --workspace --all-targets --all-features -- -D warnings
# Run cargo clippy
clippy:
cargo clippy --workspace --all-targets --all-features --bins --tests --lib --benches -- -D warnings
# These steps automatically run before git push via a git hook
[private]

View File

@ -1,6 +1,6 @@
[package]
name = "martin-mbtiles"
version = "0.1.0"
version = "0.2.0"
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]
@ -13,19 +13,24 @@ rust-version.workspace = true
repository.workspace = true
license.workspace = true
[features]
# TODO: Disable "tools" feature in default builds
default = ["tools"]
tools = ["anyhow", "clap", "tokio"]
[dependencies]
futures.workspace = true
log.workspace = true
martin-tile-utils = { path = "../martin-tile-utils", version = "0.1.0" }
martin-tile-utils.workspace = true
serde_json.workspace = true
sqlx.workspace = true
thiserror.workspace = true
tilejson.workspace = true
# Bin dependencies
anyhow.workspace = true
clap.workspace = true
tokio.workspace = true
anyhow = { workspace = true, optional = true }
clap = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
[dev-dependencies]
# For testing, might as well use the same async framework as the Martin itself
@ -37,3 +42,4 @@ path = "src/lib.rs"
[[bin]]
name = "mbtiles"
path = "src/bin/main.rs"
required-features = ["tools"]

View File

@ -1,13 +1,16 @@
use std::path::{Path, PathBuf};
use anyhow::Result;
use clap::{Parser, Subcommand};
use martin_mbtiles::Mbtiles;
use std::path::PathBuf;
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::{Connection, SqliteConnection};
#[derive(Parser, Debug)]
#[command(
version,
name = "mbtiles",
about = "A utility to work with .mbtiles files content"
about = "A utility to work with .mbtiles file content"
)]
pub struct Args {
#[command(subcommand)]
@ -16,12 +19,12 @@ pub struct Args {
#[derive(Subcommand, Debug)]
enum Commands {
/// Prints all values in the metadata table.
#[command(name = "meta-all")]
MetaAll {
/// MBTiles file to read from
file: PathBuf,
},
// /// Prints all values in the metadata table.
// #[command(name = "meta-all")]
// MetaAll {
// /// MBTiles file to read from
// file: PathBuf,
// },
/// Gets a single value from metadata table.
#[command(name = "meta-get")]
MetaGetValue {
@ -29,23 +32,20 @@ enum Commands {
file: PathBuf,
/// Value to read
key: String,
/// Output the raw value
#[arg(short, long)]
raw: bool,
},
/// Sets a single value in the metadata table, or deletes it if no value.
#[command(name = "meta-set")]
MetaSetValue {
/// MBTiles file to modify
file: PathBuf,
},
/// Copy tiles from one mbtiles file to another.
Copy {
/// MBTiles file to read from
src_file: PathBuf,
/// MBTiles file to write to
dst_file: PathBuf,
},
// /// Sets a single value in the metadata table, or deletes it if no value.
// #[command(name = "meta-set")]
// MetaSetValue {
// /// MBTiles file to modify
// file: PathBuf,
// },
// /// Copy tiles from one mbtiles file to another.
// Copy {
// /// MBTiles file to read from
// src_file: PathBuf,
// /// MBTiles file to write to
// dst_file: PathBuf,
// },
}
#[tokio::main]
@ -53,26 +53,20 @@ async fn main() -> Result<()> {
let args = Args::parse();
match args.command {
Commands::MetaGetValue { file, key, raw } => {
let mbt = Mbtiles::new(&file).await?;
let value = mbt.get_metadata_value(&key).await?;
if raw {
if let Some(s) = value {
println!("{s}")
}
} else {
match value {
Some(s) => println!(r#"The value for metadata key "{key}" is:\n "{s}""#),
None => println!(r#"No value for metadata key "{key}""#),
}
}
}
_ => {
unimplemented!("Oops! This command is not yet available, stay tuned for future updates")
Commands::MetaGetValue { file, key } => {
meta_get_value(file.as_path(), &key).await?;
}
}
Ok(())
}
async fn meta_get_value(file: &Path, key: &str) -> Result<()> {
let mbt = Mbtiles::new(file)?;
let opt = SqliteConnectOptions::new().filename(file).read_only(true);
let mut conn = SqliteConnection::connect_with(&opt).await?;
if let Some(s) = mbt.get_metadata_value(&mut conn, key).await? {
println!("{s}")
}
Ok(())
}

View File

@ -0,0 +1,20 @@
use std::path::PathBuf;
use martin_tile_utils::TileInfo;
#[derive(thiserror::Error, Debug)]
pub enum MbtError {
#[error("SQL Error {0}")]
SqlError(#[from] sqlx::Error),
#[error("MBTile filepath contains unsupported characters: {}", .0.display())]
UnsupportedCharsInFilepath(PathBuf),
#[error("Inconsistent tile formats detected: {0} vs {1}")]
InconsistentMetadata(TileInfo, TileInfo),
#[error("No tiles found")]
NoTilesFound,
}
pub type MbtResult<T> = Result<T, MbtError>;

View File

@ -1,355 +1,9 @@
#![allow(clippy::missing_errors_doc)]
extern crate core;
mod errors;
mod mbtiles;
mod mbtiles_pool;
use std::ffi::OsStr;
use std::fmt::Display;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use futures::TryStreamExt;
use log::{debug, info, warn};
use martin_tile_utils::{Format, TileInfo};
use serde_json::{Value as JSONValue, Value};
use sqlx::pool::PoolConnection;
use sqlx::sqlite::SqlitePool;
use sqlx::{query, Pool, Sqlite};
use tilejson::{tilejson, Bounds, Center, TileJSON};
#[derive(thiserror::Error, Debug)]
pub enum MbtError {
#[error("SQL Error {0}")]
SqlError(#[from] sqlx::Error),
#[error("MBTile filepath contains unsupported characters: {}", .0.display())]
UnsupportedCharsInFilepath(PathBuf),
#[error("Inconsistent tile formats detected: {0} vs {1}")]
InconsistentMetadata(TileInfo, TileInfo),
#[error("No tiles found")]
NoTilesFound,
}
type MbtResult<T> = Result<T, MbtError>;
#[derive(Clone, Debug)]
pub struct Mbtiles {
filename: String,
pool: Pool<Sqlite>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct Metadata {
pub id: String,
pub tile_info: TileInfo,
pub layer_type: Option<String>,
pub tilejson: TileJSON,
pub json: Option<JSONValue>,
}
impl Mbtiles {
pub async fn new<P: AsRef<Path>>(filepath: P) -> MbtResult<Self> {
let file = filepath
.as_ref()
.to_str()
.ok_or_else(|| MbtError::UnsupportedCharsInFilepath(filepath.as_ref().to_path_buf()))?;
let pool = SqlitePool::connect(file).await?;
let filename = filepath
.as_ref()
.file_stem()
.unwrap_or_else(|| OsStr::new("unknown"))
.to_string_lossy()
.to_string();
Ok(Self { filename, pool })
}
fn to_val<V, E: Display>(&self, val: Result<V, E>, title: &str) -> Option<V> {
match val {
Ok(v) => Some(v),
Err(err) => {
let name = &self.filename;
warn!("Unable to parse metadata {title} value in {name}: {err}");
None
}
}
}
pub async fn get_metadata_value(&self, key: &str) -> MbtResult<Option<String>> {
let mut conn = self.pool.acquire().await?;
let query = query! {"SELECT value from metadata where name = ?", key};
let row = query.fetch_optional(&mut conn).await?;
if let Some(row) = row {
if let Some(value) = row.value {
return Ok(Some(value));
}
}
Ok(None)
}
pub async fn get_metadata(&self) -> MbtResult<Metadata> {
let mut conn = self.pool.acquire().await?;
let (tj, layer_type, json) = self.parse_metadata(&mut conn).await?;
Ok(Metadata {
id: self.filename.to_string(),
tile_info: self.detect_format(&tj, &mut conn).await?,
tilejson: tj,
layer_type,
json,
})
}
async fn parse_metadata(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> MbtResult<(TileJSON, Option<String>, Option<Value>)> {
let query = query!("SELECT name, value FROM metadata WHERE value IS NOT ''");
let mut rows = query.fetch(conn);
let mut tj = tilejson! { tiles: vec![] };
let mut layer_type: Option<String> = None;
let mut json: Option<JSONValue> = None;
while let Some(row) = rows.try_next().await? {
if let (Some(name), Some(value)) = (row.name, row.value) {
match name.as_ref() {
"name" => tj.name = Some(value),
"version" => tj.version = Some(value),
"bounds" => tj.bounds = self.to_val(Bounds::from_str(value.as_str()), &name),
"center" => tj.center = self.to_val(Center::from_str(value.as_str()), &name),
"minzoom" => tj.minzoom = self.to_val(value.parse(), &name),
"maxzoom" => tj.maxzoom = self.to_val(value.parse(), &name),
"description" => tj.description = Some(value),
"attribution" => tj.attribution = Some(value),
"type" => layer_type = Some(value),
"legend" => tj.legend = Some(value),
"template" => tj.template = Some(value),
"json" => json = self.to_val(serde_json::from_str(&value), &name),
"format" | "generator" => {
tj.other.insert(name, Value::String(value));
}
_ => {
let file = &self.filename;
warn!("{file} has an unrecognized metadata value {name}={value}");
tj.other.insert(name, Value::String(value));
}
}
}
}
if let Some(JSONValue::Object(obj)) = &mut json {
if let Some(value) = obj.remove("vector_layers") {
if let Ok(v) = serde_json::from_value(value) {
tj.vector_layers = Some(v);
} else {
warn!(
"Unable to parse metadata vector_layers value in {}",
self.filename
);
}
}
}
Ok((tj, layer_type, json))
}
async fn detect_format(
&self,
tilejson: &TileJSON,
conn: &mut PoolConnection<Sqlite>,
) -> MbtResult<TileInfo> {
let mut tile_info = None;
let mut tested_zoom = -1_i64;
// First, pick any random tile
let query = query! {"SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles WHERE zoom_level >= 0 LIMIT 1"};
let row = query.fetch_optional(&mut *conn).await?;
if let Some(r) = row {
tile_info = self.parse_tile(r.zoom_level, r.tile_column, r.tile_row, r.tile_data);
tested_zoom = r.zoom_level.unwrap_or(-1);
}
// Afterwards, iterate over tiles in all allowed zooms and check for consistency
for z in tilejson.minzoom.unwrap_or(0)..=tilejson.maxzoom.unwrap_or(18) {
if i64::from(z) == tested_zoom {
continue;
}
let query = query! {"SELECT tile_column, tile_row, tile_data FROM tiles WHERE zoom_level = ? LIMIT 1", z};
let row = query.fetch_optional(&mut *conn).await?;
if let Some(r) = row {
match (
tile_info,
self.parse_tile(Some(z.into()), r.tile_column, r.tile_row, r.tile_data),
) {
(_, None) => {}
(None, new) => tile_info = new,
(Some(old), Some(new)) if old == new => {}
(Some(old), Some(new)) => {
return Err(MbtError::InconsistentMetadata(old, new));
}
}
}
}
if let Some(Value::String(fmt)) = tilejson.other.get("format") {
let file = &self.filename;
match (tile_info, Format::parse(fmt)) {
(_, None) => {
warn!("Unknown format value in metadata: {fmt}");
}
(None, Some(fmt)) => {
if fmt.is_detectable() {
warn!("Metadata table sets detectable '{fmt}' tile format, but it could not be verified for file {file}");
} else {
info!("Using '{fmt}' tile format from metadata table in file {file}");
}
tile_info = Some(fmt.into());
}
(Some(info), Some(fmt)) if info.format == fmt => {
debug!("Detected tile format {info} matches metadata.format '{fmt}' in file {file}");
}
(Some(info), _) => {
warn!("Found inconsistency: metadata.format='{fmt}', but tiles were detected as {info:?} in file {file}. Tiles will be returned as {info:?}.");
}
}
}
if let Some(info) = tile_info {
if info.format != Format::Mvt && tilejson.vector_layers.is_some() {
warn!(
"{} has vector_layers metadata but non-vector tiles",
self.filename
);
}
Ok(info)
} else {
Err(MbtError::NoTilesFound)
}
}
fn parse_tile(
&self,
z: Option<i64>,
x: Option<i64>,
y: Option<i64>,
tile: Option<Vec<u8>>,
) -> Option<TileInfo> {
if let (Some(z), Some(x), Some(y), Some(tile)) = (z, x, y, tile) {
let info = TileInfo::detect(&tile);
if let Some(info) = info {
debug!(
"Tile {z}/{x}/{} is detected as {info} in file {}",
(1 << z) - 1 - y,
self.filename,
);
}
info
} else {
None
}
}
pub async fn get_tile(&self, z: u8, x: u32, y: u32) -> MbtResult<Option<Vec<u8>>> {
let mut conn = self.pool.acquire().await?;
let y = (1 << z) - 1 - y;
let query = query! {"SELECT tile_data from tiles where zoom_level = ? AND tile_column = ? AND tile_row = ?", z, x, y};
let row = query.fetch_optional(&mut conn).await?;
if let Some(row) = row {
if let Some(tile_data) = row.tile_data {
return Ok(Some(tile_data));
}
}
Ok(None)
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use martin_tile_utils::Encoding;
use tilejson::VectorLayer;
use super::*;
#[actix_rt::test]
async fn metadata_jpeg() {
let mbt = Mbtiles::new(Path::new(
"../tests/fixtures/files/geography-class-jpg.mbtiles",
))
.await;
let mbt = mbt.unwrap();
let metadata = mbt.get_metadata().await.unwrap();
let tj = metadata.tilejson;
assert_eq!(tj.description.unwrap(), "One of the example maps that comes with TileMill - a bright & colorful world map that blends retro and high-tech with its folded paper texture and interactive flag tooltips. ");
assert!(tj.legend.unwrap().starts_with("<div style="));
assert_eq!(tj.maxzoom.unwrap(), 1);
assert_eq!(tj.minzoom.unwrap(), 0);
assert_eq!(tj.name.unwrap(), "Geography Class");
assert_eq!(tj.template.unwrap(),"{{#__location__}}{{/__location__}}{{#__teaser__}}<div style=\"text-align:center;\">\n\n<img src=\"data:image/png;base64,{{flag_png}}\" style=\"-moz-box-shadow:0px 1px 3px #222;-webkit-box-shadow:0px 1px 5px #222;box-shadow:0px 1px 3px #222;\"><br>\n<strong>{{admin}}</strong>\n\n</div>{{/__teaser__}}{{#__full__}}{{/__full__}}");
assert_eq!(tj.version.unwrap(), "1.0.0");
assert_eq!(metadata.id, "geography-class-jpg");
assert_eq!(metadata.tile_info, Format::Jpeg.into());
}
#[actix_rt::test]
async fn metadata_mvt() {
let mbt = Mbtiles::new(Path::new("../tests/fixtures/files/world_cities.mbtiles")).await;
let mbt = mbt.unwrap();
let metadata = mbt.get_metadata().await.unwrap();
let tj = metadata.tilejson;
assert_eq!(tj.maxzoom.unwrap(), 6);
assert_eq!(tj.minzoom.unwrap(), 0);
assert_eq!(tj.name.unwrap(), "Major cities from Natural Earth data");
assert_eq!(tj.version.unwrap(), "2");
assert_eq!(
tj.vector_layers,
Some(vec![VectorLayer {
id: "cities".to_string(),
fields: vec![("name".to_string(), "String".to_string())]
.into_iter()
.collect(),
description: Some(String::new()),
minzoom: Some(0),
maxzoom: Some(6),
other: HashMap::default()
}])
);
assert_eq!(metadata.id, "world_cities");
assert_eq!(
metadata.tile_info,
TileInfo::new(Format::Mvt, Encoding::Gzip)
);
assert_eq!(metadata.layer_type, Some("overlay".to_string()));
}
#[actix_rt::test]
async fn metadata_get_key() {
let mbt = Mbtiles::new(Path::new("../tests/fixtures/files/world_cities.mbtiles"))
.await
.unwrap();
assert_eq!(
mbt.get_metadata_value("bounds").await.unwrap().unwrap(),
"-123.123590,-37.818085,174.763027,59.352706"
);
assert_eq!(
mbt.get_metadata_value("name").await.unwrap().unwrap(),
"Major cities from Natural Earth data"
);
assert_eq!(
mbt.get_metadata_value("maxzoom").await.unwrap().unwrap(),
"6"
);
assert_eq!(
mbt.get_metadata_value("nonexistent_key").await.unwrap(),
None
);
assert_eq!(mbt.get_metadata_value("").await.unwrap(), None);
}
}
pub use errors::MbtError;
pub use mbtiles::{Mbtiles, Metadata};
pub use mbtiles_pool::MbtilesPool;

View File

@ -0,0 +1,362 @@
#![allow(clippy::missing_errors_doc)]
extern crate core;
use std::ffi::OsStr;
use std::fmt::Display;
use std::path::Path;
use std::str::FromStr;
use futures::TryStreamExt;
use log::{debug, info, warn};
use martin_tile_utils::{Format, TileInfo};
use serde_json::{Value as JSONValue, Value};
use sqlx::{query, SqliteExecutor};
use tilejson::{tilejson, Bounds, Center, TileJSON};
use crate::errors::{MbtError, MbtResult};
#[derive(Clone, Debug, PartialEq)]
pub struct Metadata {
pub id: String,
pub tile_info: TileInfo,
pub layer_type: Option<String>,
pub tilejson: TileJSON,
pub json: Option<JSONValue>,
}
#[derive(Clone, Debug)]
pub struct Mbtiles {
filepath: String,
filename: String,
}
impl Mbtiles {
pub fn new<P: AsRef<Path>>(filepath: P) -> MbtResult<Self> {
let path = filepath.as_ref();
Ok(Self {
filepath: path
.to_str()
.ok_or_else(|| MbtError::UnsupportedCharsInFilepath(path.to_path_buf()))?
.to_string(),
filename: path
.file_stem()
.unwrap_or_else(|| OsStr::new("unknown"))
.to_string_lossy()
.to_string(),
})
}
pub fn filepath(&self) -> &str {
&self.filepath
}
pub fn filename(&self) -> &str {
&self.filename
}
fn to_val<V, E: Display>(&self, val: Result<V, E>, title: &str) -> Option<V> {
match val {
Ok(v) => Some(v),
Err(err) => {
let name = &self.filename;
warn!("Unable to parse metadata {title} value in {name}: {err}");
None
}
}
}
pub async fn get_metadata_value<T>(&self, conn: &mut T, key: &str) -> MbtResult<Option<String>>
where
for<'e> &'e mut T: SqliteExecutor<'e>,
{
let query = query!("SELECT value from metadata where name = ?", key);
let row = query.fetch_optional(conn).await?;
if let Some(row) = row {
if let Some(value) = row.value {
return Ok(Some(value));
}
}
Ok(None)
}
pub async fn get_metadata<T>(&self, conn: &mut T) -> MbtResult<Metadata>
where
for<'e> &'e mut T: SqliteExecutor<'e>,
{
let (tj, layer_type, json) = self.parse_metadata(conn).await?;
Ok(Metadata {
id: self.filename.to_string(),
tile_info: self.detect_format(&tj, conn).await?,
tilejson: tj,
layer_type,
json,
})
}
async fn parse_metadata<T>(
&self,
conn: &mut T,
) -> MbtResult<(TileJSON, Option<String>, Option<Value>)>
where
for<'e> &'e mut T: SqliteExecutor<'e>,
{
let query = query!("SELECT name, value FROM metadata WHERE value IS NOT ''");
let mut rows = query.fetch(conn);
let mut tj = tilejson! { tiles: vec![] };
let mut layer_type: Option<String> = None;
let mut json: Option<JSONValue> = None;
while let Some(row) = rows.try_next().await? {
if let (Some(name), Some(value)) = (row.name, row.value) {
match name.as_ref() {
"name" => tj.name = Some(value),
"version" => tj.version = Some(value),
"bounds" => tj.bounds = self.to_val(Bounds::from_str(value.as_str()), &name),
"center" => tj.center = self.to_val(Center::from_str(value.as_str()), &name),
"minzoom" => tj.minzoom = self.to_val(value.parse(), &name),
"maxzoom" => tj.maxzoom = self.to_val(value.parse(), &name),
"description" => tj.description = Some(value),
"attribution" => tj.attribution = Some(value),
"type" => layer_type = Some(value),
"legend" => tj.legend = Some(value),
"template" => tj.template = Some(value),
"json" => json = self.to_val(serde_json::from_str(&value), &name),
"format" | "generator" => {
tj.other.insert(name, Value::String(value));
}
_ => {
let file = &self.filename;
warn!("{file} has an unrecognized metadata value {name}={value}");
tj.other.insert(name, Value::String(value));
}
}
}
}
if let Some(JSONValue::Object(obj)) = &mut json {
if let Some(value) = obj.remove("vector_layers") {
if let Ok(v) = serde_json::from_value(value) {
tj.vector_layers = Some(v);
} else {
warn!(
"Unable to parse metadata vector_layers value in {}",
self.filename
);
}
}
}
Ok((tj, layer_type, json))
}
async fn detect_format<T>(&self, tilejson: &TileJSON, conn: &mut T) -> MbtResult<TileInfo>
where
for<'e> &'e mut T: SqliteExecutor<'e>,
{
let mut tile_info = None;
let mut tested_zoom = -1_i64;
// First, pick any random tile
let query = query!("SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles WHERE zoom_level >= 0 LIMIT 1");
let row = query.fetch_optional(&mut *conn).await?;
if let Some(r) = row {
tile_info = self.parse_tile(r.zoom_level, r.tile_column, r.tile_row, r.tile_data);
tested_zoom = r.zoom_level.unwrap_or(-1);
}
// Afterwards, iterate over tiles in all allowed zooms and check for consistency
for z in tilejson.minzoom.unwrap_or(0)..=tilejson.maxzoom.unwrap_or(18) {
if i64::from(z) == tested_zoom {
continue;
}
let query = query! {"SELECT tile_column, tile_row, tile_data FROM tiles WHERE zoom_level = ? LIMIT 1", z};
let row = query.fetch_optional(&mut *conn).await?;
if let Some(r) = row {
match (
tile_info,
self.parse_tile(Some(z.into()), r.tile_column, r.tile_row, r.tile_data),
) {
(_, None) => {}
(None, new) => tile_info = new,
(Some(old), Some(new)) if old == new => {}
(Some(old), Some(new)) => {
return Err(MbtError::InconsistentMetadata(old, new));
}
}
}
}
if let Some(Value::String(fmt)) = tilejson.other.get("format") {
let file = &self.filename;
match (tile_info, Format::parse(fmt)) {
(_, None) => {
warn!("Unknown format value in metadata: {fmt}");
}
(None, Some(fmt)) => {
if fmt.is_detectable() {
warn!("Metadata table sets detectable '{fmt}' tile format, but it could not be verified for file {file}");
} else {
info!("Using '{fmt}' tile format from metadata table in file {file}");
}
tile_info = Some(fmt.into());
}
(Some(info), Some(fmt)) if info.format == fmt => {
debug!("Detected tile format {info} matches metadata.format '{fmt}' in file {file}");
}
(Some(info), _) => {
warn!("Found inconsistency: metadata.format='{fmt}', but tiles were detected as {info:?} in file {file}. Tiles will be returned as {info:?}.");
}
}
}
if let Some(info) = tile_info {
if info.format != Format::Mvt && tilejson.vector_layers.is_some() {
warn!(
"{} has vector_layers metadata but non-vector tiles",
self.filename
);
}
Ok(info)
} else {
Err(MbtError::NoTilesFound)
}
}
fn parse_tile(
&self,
z: Option<i64>,
x: Option<i64>,
y: Option<i64>,
tile: Option<Vec<u8>>,
) -> Option<TileInfo> {
if let (Some(z), Some(x), Some(y), Some(tile)) = (z, x, y, tile) {
let info = TileInfo::detect(&tile);
if let Some(info) = info {
debug!(
"Tile {z}/{x}/{} is detected as {info} in file {}",
(1 << z) - 1 - y,
self.filename,
);
}
info
} else {
None
}
}
pub async fn get_tile<T>(
&self,
conn: &mut T,
z: u8,
x: u32,
y: u32,
) -> MbtResult<Option<Vec<u8>>>
where
for<'e> &'e mut T: SqliteExecutor<'e>,
{
// let mut conn = self.pool.acquire().await?;
let y = (1 << z) - 1 - y;
let query = query! {"SELECT tile_data from tiles where zoom_level = ? AND tile_column = ? AND tile_row = ?", z, x, y};
let row = query.fetch_optional(conn).await?;
if let Some(row) = row {
if let Some(tile_data) = row.tile_data {
return Ok(Some(tile_data));
}
}
Ok(None)
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use martin_tile_utils::Encoding;
use sqlx::{Connection, SqliteConnection};
use tilejson::VectorLayer;
use super::*;
async fn open(filepath: &str) -> (SqliteConnection, Mbtiles) {
let mbt = Mbtiles::new(filepath).unwrap();
(
SqliteConnection::connect(mbt.filepath()).await.unwrap(),
mbt,
)
}
#[actix_rt::test]
async fn mbtiles_meta() {
let filepath = "../tests/fixtures/files/geography-class-jpg.mbtiles";
let mbt = Mbtiles::new(filepath).unwrap();
assert_eq!(mbt.filepath(), filepath);
assert_eq!(mbt.filename(), "geography-class-jpg");
}
#[actix_rt::test]
async fn metadata_jpeg() {
let (mut conn, mbt) = open("../tests/fixtures/files/geography-class-jpg.mbtiles").await;
let metadata = mbt.get_metadata(&mut conn).await.unwrap();
let tj = metadata.tilejson;
assert_eq!(tj.description.unwrap(), "One of the example maps that comes with TileMill - a bright & colorful world map that blends retro and high-tech with its folded paper texture and interactive flag tooltips. ");
assert!(tj.legend.unwrap().starts_with("<div style="));
assert_eq!(tj.maxzoom.unwrap(), 1);
assert_eq!(tj.minzoom.unwrap(), 0);
assert_eq!(tj.name.unwrap(), "Geography Class");
assert_eq!(tj.template.unwrap(),"{{#__location__}}{{/__location__}}{{#__teaser__}}<div style=\"text-align:center;\">\n\n<img src=\"data:image/png;base64,{{flag_png}}\" style=\"-moz-box-shadow:0px 1px 3px #222;-webkit-box-shadow:0px 1px 5px #222;box-shadow:0px 1px 3px #222;\"><br>\n<strong>{{admin}}</strong>\n\n</div>{{/__teaser__}}{{#__full__}}{{/__full__}}");
assert_eq!(tj.version.unwrap(), "1.0.0");
assert_eq!(metadata.id, "geography-class-jpg");
assert_eq!(metadata.tile_info, Format::Jpeg.into());
}
#[actix_rt::test]
async fn metadata_mvt() {
let (mut conn, mbt) = open("../tests/fixtures/files/world_cities.mbtiles").await;
let metadata = mbt.get_metadata(&mut conn).await.unwrap();
let tj = metadata.tilejson;
assert_eq!(tj.maxzoom.unwrap(), 6);
assert_eq!(tj.minzoom.unwrap(), 0);
assert_eq!(tj.name.unwrap(), "Major cities from Natural Earth data");
assert_eq!(tj.version.unwrap(), "2");
assert_eq!(
tj.vector_layers,
Some(vec![VectorLayer {
id: "cities".to_string(),
fields: vec![("name".to_string(), "String".to_string())]
.into_iter()
.collect(),
description: Some(String::new()),
minzoom: Some(0),
maxzoom: Some(6),
other: HashMap::default()
}])
);
assert_eq!(metadata.id, "world_cities");
assert_eq!(
metadata.tile_info,
TileInfo::new(Format::Mvt, Encoding::Gzip)
);
assert_eq!(metadata.layer_type, Some("overlay".to_string()));
}
#[actix_rt::test]
async fn metadata_get_key() {
let (mut conn, mbt) = open("../tests/fixtures/files/world_cities.mbtiles").await;
let res = mbt.get_metadata_value(&mut conn, "bounds").await.unwrap();
assert_eq!(res.unwrap(), "-123.123590,-37.818085,174.763027,59.352706");
let res = mbt.get_metadata_value(&mut conn, "name").await.unwrap();
assert_eq!(res.unwrap(), "Major cities from Natural Earth data");
let res = mbt.get_metadata_value(&mut conn, "maxzoom").await.unwrap();
assert_eq!(res.unwrap(), "6");
let res = mbt.get_metadata_value(&mut conn, "nonexistent_key").await;
assert_eq!(res.unwrap(), None);
let res = mbt.get_metadata_value(&mut conn, "").await;
assert_eq!(res.unwrap(), None);
}
}

View File

@ -0,0 +1,30 @@
use std::path::Path;
use sqlx::{Pool, Sqlite, SqlitePool};
use crate::errors::MbtResult;
use crate::{Mbtiles, Metadata};
#[derive(Clone, Debug)]
pub struct MbtilesPool {
mbtiles: Mbtiles,
pool: Pool<Sqlite>,
}
impl MbtilesPool {
pub async fn new<P: AsRef<Path>>(filepath: P) -> MbtResult<Self> {
let mbtiles = Mbtiles::new(filepath)?;
let pool = SqlitePool::connect(mbtiles.filepath()).await?;
Ok(Self { mbtiles, pool })
}
pub async fn get_metadata(&self) -> MbtResult<Metadata> {
let mut conn = self.pool.acquire().await?;
self.mbtiles.get_metadata(&mut conn).await
}
pub async fn get_tile(&self, z: u8, x: u32, y: u32) -> MbtResult<Option<Vec<u8>>> {
let mut conn = self.pool.acquire().await?;
self.mbtiles.get_tile(&mut conn, z, x, y).await
}
}

View File

@ -182,10 +182,11 @@ impl Display for TileInfo {
mod tests {
use std::fs::read;
use super::*;
use Encoding::{Internal, Uncompressed};
use Format::{Jpeg, Json, Png, Webp};
use super::*;
fn detect(path: &str) -> Option<TileInfo> {
TileInfo::detect(&read(path).unwrap())
}

View File

@ -4,7 +4,7 @@ use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use martin_mbtiles::Mbtiles;
use martin_mbtiles::MbtilesPool;
use martin_tile_utils::TileInfo;
use tilejson::TileJSON;
@ -17,7 +17,7 @@ use crate::{Error, Source, Xyz};
#[derive(Clone)]
pub struct MbtSource {
id: String,
mbtiles: Arc<Mbtiles>,
mbtiles: Arc<MbtilesPool>,
tilejson: TileJSON,
tile_info: TileInfo,
}
@ -39,7 +39,7 @@ impl MbtSource {
}
async fn new(id: String, path: PathBuf) -> Result<Self, FileError> {
let mbt = Mbtiles::new(&path)
let mbt = MbtilesPool::new(&path)
.await
.map_err(|e| {
io::Error::new(

View File

@ -512,12 +512,14 @@ fn parse_x_rewrite_url(header: &HeaderValue) -> Option<String> {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use async_trait::async_trait;
use tilejson::{tilejson, Bounds, VectorLayer};
use super::*;
use crate::source::{Source, Tile};
use crate::utils;
use async_trait::async_trait;
use std::collections::HashMap;
use tilejson::{tilejson, Bounds, VectorLayer};
#[derive(Debug, Clone)]
struct TestSource {

View File

@ -0,0 +1,11 @@
A utility to work with .mbtiles file content
Usage: mbtiles <COMMAND>
Commands:
meta-get Gets a single value from metadata table
help Print this message or the help of the given subcommand(s)
Options:
-h, --help Print help
-V, --version Print version

View File

@ -0,0 +1,10 @@
Gets a single value from metadata table
Usage: mbtiles meta-get <FILE> <KEY>
Arguments:
<FILE> MBTiles file to read a value from
<KEY> Value to read
Options:
-h, --help Print help

View File

@ -0,0 +1 @@
Major cities from Natural Earth data

View File

@ -10,6 +10,9 @@ MARTIN_URL="http://localhost:${MARTIN_PORT}"
MARTIN_ARGS="${MARTIN_ARGS:---listen-addresses localhost:${MARTIN_PORT}}"
MARTIN_BIN="${MARTIN_BIN:-cargo run --all-features --} ${MARTIN_ARGS}"
MBTILES_BUILD="${MBTILES_BUILD:-cargo build -p martin-mbtiles}"
MBTILES_BIN="${MBTILES_BIN:-target/debug/mbtiles}"
function wait_for_martin {
# Seems the --retry-all-errors option is not available on older curl versions, but maybe in the future we can just use this:
# timeout -k 20s 20s curl --retry 10 --retry-all-errors --retry-delay 1 -sS "$MARTIN_URL/health"
@ -130,11 +133,14 @@ validate_log()
curl --version
# Make sure martin is built - this way it won't timeout while waiting for it to start
# If MARTIN_BUILD is set to "-", don't build
# Make sure martin and mbtiles are built - this way it won't timeout while waiting for it to start
# If set to "-", don't build
if [[ "$MARTIN_BUILD" != "-" ]]; then
$MARTIN_BUILD
fi
if [[ "$MBTILES_BUILD" != "-" ]]; then
$MBTILES_BUILD
fi
echo "------------------------------------------------------------------------------------------------------------------------"
@ -251,4 +257,23 @@ validate_log test_log_2.txt
remove_line "$(dirname "$0")/output/given_config.yaml" " connection_string: "
remove_line "$(dirname "$0")/output/generated_config.yaml" " connection_string: "
echo "------------------------------------------------------------------------------------------------------------------------"
echo "Test mbtiles utility"
if [[ "$MBTILES_BIN" != "-" ]]; then
TEST_OUT_DIR="$(dirname "$0")/output/mbtiles"
mkdir -p "$TEST_OUT_DIR"
set -x
$MBTILES_BIN --help 2>&1 | tee "$TEST_OUT_DIR/help.txt"
$MBTILES_BIN meta-get --help 2>&1 | tee "$TEST_OUT_DIR/meta-get_help.txt"
$MBTILES_BIN meta-get ./tests/fixtures/files/world_cities.mbtiles name 2>&1 | tee "$TEST_OUT_DIR/meta-get_name.txt"
$MBTILES_BIN meta-get ./tests/fixtures/files/world_cities.mbtiles missing_value 2>&1 | tee "$TEST_OUT_DIR/meta-get_missing_value.txt"
{ set +x; } 2> /dev/null
else
echo "Skipping mbtiles utility tests"
fi
>&2 echo "All integration tests have passed"