fix for mbtiles apply-patch with raw bindiff (#1384)

When applying raw (no gzip) patches, `mbtiles` was trying to un-gzip
them first. Now handles it properly.  Also adds a number of tests to catch these cases.
This commit is contained in:
Yuri Astrakhan 2024-06-26 16:37:20 -04:00 committed by GitHub
parent 5d52ca2ddb
commit b71c846abc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 754 additions and 125 deletions

2
Cargo.lock generated
View File

@ -2401,7 +2401,7 @@ dependencies = [
[[package]] [[package]]
name = "mbtiles" name = "mbtiles"
version = "0.10.0" version = "0.11.0"
dependencies = [ dependencies = [
"actix-rt", "actix-rt",
"anyhow", "anyhow",

View File

@ -52,7 +52,7 @@ lambda-web = { version = "0.2.1", features = ["actix4"] }
libsqlite3-sys = { version = ">=0.27", features = ["bundled"] } libsqlite3-sys = { version = ">=0.27", features = ["bundled"] }
log = "0.4" log = "0.4"
martin-tile-utils = { path = "./martin-tile-utils", version = "0.5.0" } martin-tile-utils = { path = "./martin-tile-utils", version = "0.5.0" }
mbtiles = { path = "./mbtiles", version = "0.10.0" } mbtiles = { path = "./mbtiles", version = "0.11.0" }
md5 = "0.7.0" md5 = "0.7.0"
moka = { version = "0.12", features = ["future"] } moka = { version = "0.12", features = ["future"] }
num_cpus = "1" num_cpus = "1"

View File

@ -2,7 +2,7 @@ lints.workspace = true
[package] [package]
name = "mbtiles" name = "mbtiles"
version = "0.10.0" version = "0.11.0"
authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"] authors = ["Yuri Astrakhan <YuriAstrakhan@gmail.com>", "MapLibre contributors"]
description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics." description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics."
keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"] keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"]

View File

@ -4,7 +4,7 @@ use clap::{Parser, Subcommand};
use log::error; use log::error;
use mbtiles::{ use mbtiles::{
apply_patch, AggHashType, CopyDuplicateMode, CopyType, IntegrityCheckType, MbtResult, apply_patch, AggHashType, CopyDuplicateMode, CopyType, IntegrityCheckType, MbtResult,
MbtTypeCli, Mbtiles, MbtilesCopier, PatchType, UpdateZoomType, MbtTypeCli, Mbtiles, MbtilesCopier, PatchTypeCli, UpdateZoomType,
}; };
use tilejson::Bounds; use tilejson::Bounds;
@ -116,8 +116,8 @@ pub struct CopyArgs {
#[arg(long, conflicts_with("diff_with_file"))] #[arg(long, conflicts_with("diff_with_file"))]
apply_patch: Option<PathBuf>, apply_patch: Option<PathBuf>,
/// Specify the type of patch file to generate. /// Specify the type of patch file to generate.
#[arg(long, requires("diff_with_file"), default_value_t=PatchType::default())] #[arg(long, requires("diff_with_file"), default_value_t=PatchTypeCli::default())]
patch_type: PatchType, patch_type: PatchTypeCli,
} }
#[allow(clippy::doc_markdown)] #[allow(clippy::doc_markdown)]
@ -130,8 +130,8 @@ pub struct DiffArgs {
/// Output file to write the resulting difference to /// Output file to write the resulting difference to
diff: PathBuf, diff: PathBuf,
/// Specify the type of patch file to generate. /// Specify the type of patch file to generate.
#[arg(long, default_value_t=PatchType::default())] #[arg(long, default_value_t=PatchTypeCli::default())]
patch_type: PatchType, patch_type: PatchTypeCli,
#[command(flatten)] #[command(flatten)]
pub options: SharedCopyOpts, pub options: SharedCopyOpts,
@ -181,12 +181,12 @@ impl SharedCopyOpts {
dst_file: PathBuf, dst_file: PathBuf,
diff_with_file: Option<PathBuf>, diff_with_file: Option<PathBuf>,
apply_patch: Option<PathBuf>, apply_patch: Option<PathBuf>,
patch_type: PatchType, patch_type: PatchTypeCli,
) -> MbtilesCopier { ) -> MbtilesCopier {
MbtilesCopier { MbtilesCopier {
src_file, src_file,
dst_file, dst_file,
diff_with_file: diff_with_file.map(|p| (p, patch_type)), diff_with_file: diff_with_file.map(|p| (p, patch_type.into())),
apply_patch, apply_patch,
// Shared // Shared
copy: self.copy, copy: self.copy,
@ -329,7 +329,6 @@ mod tests {
use clap::error::ErrorKind; use clap::error::ErrorKind;
use clap::Parser; use clap::Parser;
use mbtiles::CopyDuplicateMode; use mbtiles::CopyDuplicateMode;
use mbtiles::PatchType::Whole;
use super::*; use super::*;
use crate::Commands::{ApplyPatch, Copy, Diff, MetaGetValue, MetaSetValue, Validate}; use crate::Commands::{ApplyPatch, Copy, Diff, MetaGetValue, MetaSetValue, Validate};
@ -540,7 +539,7 @@ mod tests {
file1: PathBuf::from("file1.mbtiles"), file1: PathBuf::from("file1.mbtiles"),
file2: PathBuf::from("file2.mbtiles"), file2: PathBuf::from("file2.mbtiles"),
diff: PathBuf::from("../delta.mbtiles"), diff: PathBuf::from("../delta.mbtiles"),
patch_type: Whole, patch_type: PatchTypeCli::Whole,
options: SharedCopyOpts { options: SharedCopyOpts {
on_duplicate: Some(CopyDuplicateMode::Override), on_duplicate: Some(CopyDuplicateMode::Override),
..Default::default() ..Default::default()

View File

@ -4,19 +4,51 @@ use std::sync::atomic::Ordering::Relaxed;
use std::sync::Arc; use std::sync::Arc;
use std::time::Instant; use std::time::Instant;
use enum_display::EnumDisplay;
use flume::{bounded, Receiver, Sender}; use flume::{bounded, Receiver, Sender};
use futures::TryStreamExt; use futures::TryStreamExt;
use log::{debug, error, info}; use log::{debug, error, info};
use martin_tile_utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip, TileCoord}; use martin_tile_utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip, TileCoord};
use serde::{Deserialize, Serialize};
use sqlite_compressions::{BsdiffRawDiffer, Differ as _}; use sqlite_compressions::{BsdiffRawDiffer, Differ as _};
use sqlx::{query, Executor, Row, SqliteConnection}; use sqlx::{query, Executor, Row, SqliteConnection};
use xxhash_rust::xxh3::xxh3_64; use xxhash_rust::xxh3::xxh3_64;
use crate::MbtType::{Flat, FlatWithHash, Normalized}; use crate::MbtType::{Flat, FlatWithHash, Normalized};
use crate::PatchType::Whole; use crate::PatchType::{BinDiffGz, BinDiffRaw};
use crate::{ use crate::{create_bsdiffraw_tables, get_bsdiff_tbl_name, MbtError, MbtResult, MbtType, Mbtiles};
create_bsdiffraw_tables, get_bsdiff_tbl_name, MbtError, MbtResult, MbtType, Mbtiles, PatchType,
}; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, EnumDisplay)]
#[enum_display(case = "Kebab")]
#[cfg_attr(feature = "cli", derive(clap::ValueEnum))]
pub enum PatchTypeCli {
/// Patch file will contain the entire tile if it is different from the source
#[default]
Whole,
/// Use bin-diff to store only the bytes changed between two versions of each tile. Treats content as gzipped blobs, decoding them before diffing.
BinDiffGz,
/// Use bin-diff to store only the bytes changed between two versions of each tile. Treats content as blobs without any special encoding.
BinDiffRaw,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, EnumDisplay)]
#[enum_display(case = "Kebab")]
pub enum PatchType {
/// Use bin-diff to store only the bytes changed between two versions of each tile. Treats content as gzipped blobs, decoding them before diffing.
BinDiffGz,
/// Use bin-diff to store only the bytes changed between two versions of each tile. Treats content as blobs without any special encoding.
BinDiffRaw,
}
impl From<PatchTypeCli> for Option<PatchType> {
fn from(cli: PatchTypeCli) -> Self {
match cli {
PatchTypeCli::Whole => None,
PatchTypeCli::BinDiffGz => Some(BinDiffGz),
PatchTypeCli::BinDiffRaw => Some(BinDiffRaw),
}
}
}
pub trait BinDiffer<S: Send + 'static, T: Send + 'static>: Sized + Send + Sync + 'static { pub trait BinDiffer<S: Send + 'static, T: Send + 'static>: Sized + Send + Sync + 'static {
fn query( fn query(
@ -153,7 +185,6 @@ impl BinDiffDiffer {
dif_type: MbtType, dif_type: MbtType,
patch_type: PatchType, patch_type: PatchType,
) -> Self { ) -> Self {
assert_ne!(patch_type, Whole, "Invalid for BinDiffDiffer");
let insert_sql = format!( let insert_sql = format!(
"INSERT INTO {}(zoom_level, tile_column, tile_row, patch_data, tile_xxh3_64_hash) VALUES (?, ?, ?, ?, ?)", "INSERT INTO {}(zoom_level, tile_column, tile_row, patch_data, tile_xxh3_64_hash) VALUES (?, ?, ?, ?, ?)",
get_bsdiff_tbl_name(patch_type)); get_bsdiff_tbl_name(patch_type));
@ -219,7 +250,7 @@ impl BinDiffer<DifferBefore, DifferAfter> for BinDiffDiffer {
fn process(&self, value: DifferBefore) -> MbtResult<DifferAfter> { fn process(&self, value: DifferBefore) -> MbtResult<DifferAfter> {
let mut old_tile = value.old_tile_data; let mut old_tile = value.old_tile_data;
let mut new_tile = value.new_tile_data; let mut new_tile = value.new_tile_data;
if self.patch_type == PatchType::BinDiffGz { if self.patch_type == BinDiffGz {
old_tile = decode_gzip(&old_tile).inspect_err(|e| { old_tile = decode_gzip(&old_tile).inspect_err(|e| {
error!("Unable to gzip-decode source tile {:?}: {e}", value.coord); error!("Unable to gzip-decode source tile {:?}: {e}", value.coord);
})?; })?;
@ -258,14 +289,14 @@ impl BinDiffer<DifferBefore, DifferAfter> for BinDiffDiffer {
pub struct ApplierBefore { pub struct ApplierBefore {
coord: TileCoord, coord: TileCoord,
tile_data: Vec<u8>, old_tile: Vec<u8>,
patch_data: Vec<u8>, patch_data: Vec<u8>,
uncompressed_tile_hash: u64, uncompressed_tile_hash: u64,
} }
pub struct ApplierAfter { pub struct ApplierAfter {
coord: TileCoord, coord: TileCoord,
data: Vec<u8>, new_tile: Vec<u8>,
new_tile_hash: String, new_tile_hash: String,
} }
@ -322,7 +353,7 @@ impl BinDiffer<ApplierBefore, ApplierAfter> for BinDiffPatcher {
x: row.get(1), x: row.get(1),
y: row.get(2), y: row.get(2),
}, },
tile_data: row.get(3), old_tile: row.get(3),
patch_data: row.get(4), patch_data: row.get(4),
#[allow(clippy::cast_sign_loss)] #[allow(clippy::cast_sign_loss)]
uncompressed_tile_hash: row.get::<i64, _>(5) as u64, uncompressed_tile_hash: row.get::<i64, _>(5) as u64,
@ -336,12 +367,21 @@ impl BinDiffer<ApplierBefore, ApplierAfter> for BinDiffPatcher {
} }
fn process(&self, value: ApplierBefore) -> MbtResult<ApplierAfter> { fn process(&self, value: ApplierBefore) -> MbtResult<ApplierAfter> {
let tile_data = decode_gzip(&value.tile_data) let old_tile = if self.patch_type == BinDiffGz {
.inspect_err(|e| error!("Unable to gzip-decode source tile {:?}: {e}", value.coord))?; decode_gzip(&value.old_tile).inspect_err(|e| {
error!("Unable to gzip-decode source tile {:?}: {e}", value.coord);
})?
} else {
value.old_tile
};
let patch_data = decode_brotli(&value.patch_data) let patch_data = decode_brotli(&value.patch_data)
.inspect_err(|e| error!("Unable to brotli-decode patch data {:?}: {e}", value.coord))?; .inspect_err(|e| error!("Unable to brotli-decode patch data {:?}: {e}", value.coord))?;
let new_tile = BsdiffRawDiffer::patch(&tile_data, &patch_data)
let mut new_tile = BsdiffRawDiffer::patch(&old_tile, &patch_data)
.inspect_err(|e| error!("Unable to patch tile {:?}: {e}", value.coord))?; .inspect_err(|e| error!("Unable to patch tile {:?}: {e}", value.coord))?;
// Verify the hash of the patched tile is what we expect
let new_tile_hash = xxh3_64(&new_tile); let new_tile_hash = xxh3_64(&new_tile);
if new_tile_hash != value.uncompressed_tile_hash { if new_tile_hash != value.uncompressed_tile_hash {
return Err(MbtError::BinDiffIncorrectTileHash( return Err(MbtError::BinDiffIncorrectTileHash(
@ -351,16 +391,18 @@ impl BinDiffer<ApplierBefore, ApplierAfter> for BinDiffPatcher {
)); ));
} }
let data = encode_gzip(&new_tile)?; if self.patch_type == BinDiffGz {
new_tile = encode_gzip(&new_tile)?;
};
Ok(ApplierAfter { Ok(ApplierAfter {
coord: value.coord, coord: value.coord,
new_tile_hash: if self.dst_type == FlatWithHash { new_tile_hash: if self.dst_type == FlatWithHash {
format!("{:X}", md5::compute(&data)) format!("{:X}", md5::compute(&new_tile))
} else { } else {
String::default() // This is a fast noop, no memory alloc is performed String::default() // This is a fast noop, no memory alloc is performed
}, },
data, new_tile,
}) })
} }
@ -378,7 +420,7 @@ impl BinDiffer<ApplierBefore, ApplierAfter> for BinDiffPatcher {
.bind(value.coord.z) .bind(value.coord.z)
.bind(value.coord.x) .bind(value.coord.x)
.bind(value.coord.y) .bind(value.coord.y)
.bind(value.data); .bind(value.new_tile);
if self.dst_type == FlatWithHash { if self.dst_type == FlatWithHash {
q = q.bind(value.new_tile_hash); q = q.bind(value.new_tile_hash);

View File

@ -10,7 +10,8 @@ use sqlite_hashes::rusqlite::Connection;
use sqlx::{query, Connection as _, Executor as _, Row, SqliteConnection}; use sqlx::{query, Connection as _, Executor as _, Row, SqliteConnection};
use tilejson::Bounds; use tilejson::Bounds;
use crate::bindiff::{BinDiffDiffer, BinDiffPatcher, BinDiffer as _}; use crate::bindiff::PatchType::BinDiffGz;
use crate::bindiff::{BinDiffDiffer, BinDiffPatcher, BinDiffer as _, PatchType};
use crate::errors::MbtResult; use crate::errors::MbtResult;
use crate::mbtiles::PatchFileInfo; use crate::mbtiles::PatchFileInfo;
use crate::queries::{ use crate::queries::{
@ -19,7 +20,7 @@ use crate::queries::{
use crate::AggHashType::Verify; use crate::AggHashType::Verify;
use crate::IntegrityCheckType::Quick; use crate::IntegrityCheckType::Quick;
use crate::MbtType::{Flat, FlatWithHash, Normalized}; use crate::MbtType::{Flat, FlatWithHash, Normalized};
use crate::PatchType::{BinDiffGz, BinDiffRaw, Whole}; use crate::PatchType::BinDiffRaw;
use crate::{ use crate::{
action_with_rusqlite, get_bsdiff_tbl_name, invert_y_value, reset_db_settings, AggHashType, action_with_rusqlite, get_bsdiff_tbl_name, invert_y_value, reset_db_settings, AggHashType,
CopyType, MbtError, MbtType, MbtTypeCli, Mbtiles, AGG_TILES_HASH, AGG_TILES_HASH_AFTER_APPLY, CopyType, MbtError, MbtType, MbtTypeCli, Mbtiles, AGG_TILES_HASH, AGG_TILES_HASH_AFTER_APPLY,
@ -35,19 +36,6 @@ pub enum CopyDuplicateMode {
Abort, Abort,
} }
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, EnumDisplay)]
#[enum_display(case = "Kebab")]
#[cfg_attr(feature = "cli", derive(clap::ValueEnum))]
pub enum PatchType {
/// Patch file will contain the entire tile if it is different from the source
#[default]
Whole,
/// Use bin-diff to store only the bytes changed between two versions of each tile. Treats content as gzipped blobs, decoding them before diffing.
BinDiffGz,
/// Use bin-diff to store only the bytes changed between two versions of each tile. Treats content as blobs without any special encoding.
BinDiffRaw,
}
impl CopyDuplicateMode { impl CopyDuplicateMode {
#[must_use] #[must_use]
pub fn to_sql(&self) -> &'static str { pub fn to_sql(&self) -> &'static str {
@ -82,7 +70,7 @@ pub struct MbtilesCopier {
/// Bounding box to copy, in the format `min_lon,min_lat,max_lon,max_lat`. Can be used multiple times. /// Bounding box to copy, in the format `min_lon,min_lat,max_lon,max_lat`. Can be used multiple times.
pub bbox: Vec<Bounds>, pub bbox: Vec<Bounds>,
/// Compare source file with this file, and only copy non-identical tiles to destination. Also specifies the type of patch to generate. /// Compare source file with this file, and only copy non-identical tiles to destination. Also specifies the type of patch to generate.
pub diff_with_file: Option<(PathBuf, PatchType)>, pub diff_with_file: Option<(PathBuf, Option<PatchType>)>,
/// Apply a patch file while copying src to dst. /// Apply a patch file while copying src to dst.
pub apply_patch: Option<PathBuf>, pub apply_patch: Option<PathBuf>,
/// Skip generating a global hash for mbtiles validation. By default, `mbtiles` will compute `agg_tiles_hash` metadata value. /// Skip generating a global hash for mbtiles validation. By default, `mbtiles` will compute `agg_tiles_hash` metadata value.
@ -213,7 +201,7 @@ impl MbtileCopierInt {
async fn run_with_diff( async fn run_with_diff(
self, self,
dif_mbt: Mbtiles, dif_mbt: Mbtiles,
patch_type: PatchType, patch_type: Option<PatchType>,
) -> MbtResult<SqliteConnection> { ) -> MbtResult<SqliteConnection> {
let mut dif_conn = dif_mbt.open_readonly().await?; let mut dif_conn = dif_mbt.open_readonly().await?;
let dif_info = dif_mbt.examine_diff(&mut dif_conn).await?; let dif_info = dif_mbt.examine_diff(&mut dif_conn).await?;
@ -231,7 +219,7 @@ impl MbtileCopierInt {
dif_mbt.attach_to(&mut conn, "diffDb").await?; dif_mbt.attach_to(&mut conn, "diffDb").await?;
let dst_type = self.options.dst_type().unwrap_or(src_info.mbt_type); let dst_type = self.options.dst_type().unwrap_or(src_info.mbt_type);
if patch_type != Whole && matches!(dst_type, Normalized { .. }) { if patch_type.is_some() && matches!(dst_type, Normalized { .. }) {
return Err(MbtError::BinDiffRequiresFlatWithHash(dst_type)); return Err(MbtError::BinDiffRequiresFlatWithHash(dst_type));
} }
@ -243,11 +231,7 @@ impl MbtileCopierInt {
dif_type = dif_info.mbt_type, dif_type = dif_info.mbt_type,
what = self.copy_text(), what = self.copy_text(),
dst_path = self.dst_mbt.filepath(), dst_path = self.dst_mbt.filepath(),
patch = match patch_type { patch = patch_type_str(patch_type),
Whole => {""}
BinDiffGz => {" with bin-diff"}
BinDiffRaw => {" with bin-diff-raw"}
}
); );
self.init_schema(&mut conn, src_info.mbt_type, dst_type) self.init_schema(&mut conn, src_info.mbt_type, dst_type)
@ -265,7 +249,7 @@ impl MbtileCopierInt {
detach_db(&mut conn, "diffDb").await?; detach_db(&mut conn, "diffDb").await?;
detach_db(&mut conn, "sourceDb").await?; detach_db(&mut conn, "sourceDb").await?;
if patch_type != Whole { if let Some(patch_type) = patch_type {
BinDiffDiffer::new(self.src_mbt.clone(), dif_mbt, dif_info.mbt_type, patch_type) BinDiffDiffer::new(self.src_mbt.clone(), dif_mbt, dif_info.mbt_type, patch_type)
.run(&mut conn, self.get_where_clause("srcTiles.")) .run(&mut conn, self.get_where_clause("srcTiles."))
.await?; .await?;
@ -302,7 +286,7 @@ impl MbtileCopierInt {
let src_type = self.validate_src_file().await?.mbt_type; let src_type = self.validate_src_file().await?.mbt_type;
let dst_type = self.options.dst_type().unwrap_or(src_type); let dst_type = self.options.dst_type().unwrap_or(src_type);
if dif_info.patch_type != Whole && matches!(dst_type, Normalized { .. }) { if dif_info.patch_type.is_some() && matches!(dst_type, Normalized { .. }) {
return Err(MbtError::BinDiffRequiresFlatWithHash(dst_type)); return Err(MbtError::BinDiffRequiresFlatWithHash(dst_type));
} }
@ -320,11 +304,7 @@ impl MbtileCopierInt {
src_mbt = self.src_mbt, src_mbt = self.src_mbt,
what = self.copy_text(), what = self.copy_text(),
dst_path = self.dst_mbt.filepath(), dst_path = self.dst_mbt.filepath(),
patch = match dif_info.patch_type { patch = patch_type_str(dif_info.patch_type),
Whole => {""}
BinDiffGz => {" with bin-diff"}
BinDiffRaw => {" with bin-diff-raw"}
}
); );
self.init_schema(&mut conn, src_type, dst_type).await?; self.init_schema(&mut conn, src_type, dst_type).await?;
@ -339,13 +319,8 @@ impl MbtileCopierInt {
detach_db(&mut conn, "diffDb").await?; detach_db(&mut conn, "diffDb").await?;
detach_db(&mut conn, "sourceDb").await?; detach_db(&mut conn, "sourceDb").await?;
if dif_info.patch_type != Whole { if let Some(patch_type) = dif_info.patch_type {
BinDiffPatcher::new( BinDiffPatcher::new(self.src_mbt.clone(), dif_mbt.clone(), dst_type, patch_type)
self.src_mbt.clone(),
dif_mbt.clone(),
dst_type,
dif_info.patch_type,
)
.run(&mut conn, self.get_where_clause("srcTiles.")) .run(&mut conn, self.get_where_clause("srcTiles."))
.await?; .await?;
} }
@ -353,7 +328,7 @@ impl MbtileCopierInt {
// TODO: perhaps disable all except --copy all when using with diffs, or else is not making much sense // TODO: perhaps disable all except --copy all when using with diffs, or else is not making much sense
if self.options.copy.copy_tiles() && !self.options.skip_agg_tiles_hash { if self.options.copy.copy_tiles() && !self.options.skip_agg_tiles_hash {
self.dst_mbt.update_agg_tiles_hash(&mut conn).await?; self.dst_mbt.update_agg_tiles_hash(&mut conn).await?;
if dif_info.patch_type == BinDiffGz { if matches!(dif_info.patch_type, Some(BinDiffGz)) {
info!("Skipping {AGG_TILES_HASH_AFTER_APPLY} validation because re-gzip-ing could produce different tile data. Each bindiff-ed tile was still verified with a hash value"); info!("Skipping {AGG_TILES_HASH_AFTER_APPLY} validation because re-gzip-ing could produce different tile data. Each bindiff-ed tile was still verified with a hash value");
} else { } else {
let new_hash = self.dst_mbt.get_agg_tiles_hash(&mut conn).await?; let new_hash = self.dst_mbt.get_agg_tiles_hash(&mut conn).await?;
@ -375,7 +350,8 @@ impl MbtileCopierInt {
} }
} }
let hash_type = if dif_info.patch_type == BinDiffGz || self.options.skip_agg_tiles_hash { let hash_type =
if matches!(dif_info.patch_type, Some(BinDiffGz)) || self.options.skip_agg_tiles_hash {
AggHashType::Off AggHashType::Off
} else { } else {
Verify Verify
@ -731,11 +707,9 @@ fn get_select_from_apply_patch(
let src_tiles = query_for_dst("sourceDb", src_type, dst_type); let src_tiles = query_for_dst("sourceDb", src_type, dst_type);
let diff_tiles = query_for_dst("diffDb", dif_info.mbt_type, dst_type); let diff_tiles = query_for_dst("diffDb", dif_info.mbt_type, dst_type);
let (bindiff_from, bindiff_cond) = if dif_info.patch_type == Whole { let (bindiff_from, bindiff_cond) = if let Some(patch_type) = dif_info.patch_type {
(String::new(), "")
} else {
// do not copy any tiles that are in the patch table // do not copy any tiles that are in the patch table
let tbl = get_bsdiff_tbl_name(dif_info.patch_type); let tbl = get_bsdiff_tbl_name(patch_type);
( (
format!( format!(
" "
@ -746,6 +720,8 @@ fn get_select_from_apply_patch(
), ),
"AND bdTbl.patch_data ISNULL", "AND bdTbl.patch_data ISNULL",
) )
} else {
(String::new(), "")
}; };
// Take dif tile_data if it is set, otherwise take the one from src // Take dif tile_data if it is set, otherwise take the one from src
@ -769,7 +745,7 @@ fn get_select_from_apply_patch(
fn get_select_from_with_diff( fn get_select_from_with_diff(
dif_type: MbtType, dif_type: MbtType,
dst_type: MbtType, dst_type: MbtType,
patch_type: PatchType, patch_type: Option<PatchType>,
) -> String { ) -> String {
let tile_hash_expr; let tile_hash_expr;
let diff_tiles; let diff_tiles;
@ -792,10 +768,10 @@ fn get_select_from_with_diff(
}; };
} }
let sql_cond = if patch_type == Whole { let sql_cond = if patch_type.is_some() {
"OR srcTiles.tile_data != difTiles.tile_data"
} else {
"" ""
} else {
"OR srcTiles.tile_data != difTiles.tile_data"
}; };
format!( format!(
" "
@ -842,6 +818,17 @@ fn get_select_from(src_type: MbtType, dst_type: MbtType) -> &'static str {
} }
} }
fn patch_type_str(patch_type: Option<PatchType>) -> &'static str {
if let Some(v) = patch_type {
match v {
BinDiffGz => " with bin-diff on gzip-ed tiles",
BinDiffRaw => " with bin-diff-raw",
}
} else {
""
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use sqlx::{Decode, Sqlite, SqliteConnection, Type}; use sqlx::{Decode, Sqlite, SqliteConnection, Type};
@ -1022,7 +1009,7 @@ mod tests {
let opt = MbtilesCopier { let opt = MbtilesCopier {
src_file: src.clone(), src_file: src.clone(),
dst_file: dst.clone(), dst_file: dst.clone(),
diff_with_file: Some((diff_file.clone(), Whole)), diff_with_file: Some((diff_file.clone(), None)),
force: true, force: true,
..Default::default() ..Default::default()
}; };

View File

@ -102,7 +102,7 @@ pub enum MbtError {
#[error("Applying bindiff to tile {0} resulted in mismatching hash: expecting `{1}` != computed uncompressed value `{2}`")] #[error("Applying bindiff to tile {0} resulted in mismatching hash: expecting `{1}` != computed uncompressed value `{2}`")]
BinDiffIncorrectTileHash(String, String, String), BinDiffIncorrectTileHash(String, String, String),
#[error("Internal error creating bin-diff table")] #[error("Unable to generate or apply bin-diff patch")]
BindiffError, BindiffError,
#[error("BinDiff patch files can be only applied with `mbtiles copy --apply-patch` command")] #[error("BinDiff patch files can be only applied with `mbtiles copy --apply-patch` command")]

View File

@ -1,10 +1,11 @@
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
// Re-export sqlx // Re-export sqlx
pub use bindiff::{PatchType, PatchTypeCli};
pub use sqlx; pub use sqlx;
mod copier; mod copier;
pub use copier::{CopyDuplicateMode, MbtilesCopier, PatchType}; pub use copier::{CopyDuplicateMode, MbtilesCopier};
mod errors; mod errors;
pub use errors::{MbtError, MbtResult}; pub use errors::{MbtError, MbtResult};

View File

@ -10,8 +10,9 @@ use sqlite_hashes::register_md5_functions;
use sqlx::sqlite::SqliteConnectOptions; use sqlx::sqlite::SqliteConnectOptions;
use sqlx::{query, Connection as _, Executor, SqliteConnection, SqliteExecutor, Statement}; use sqlx::{query, Connection as _, Executor, SqliteConnection, SqliteExecutor, Statement};
use crate::bindiff::PatchType;
use crate::errors::{MbtError, MbtResult}; use crate::errors::{MbtError, MbtResult};
use crate::{invert_y_value, CopyDuplicateMode, MbtType, PatchType}; use crate::{invert_y_value, CopyDuplicateMode, MbtType};
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize, EnumDisplay)] #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize, EnumDisplay)]
#[enum_display(case = "Kebab")] #[enum_display(case = "Kebab")]
@ -48,7 +49,7 @@ pub struct PatchFileInfo {
pub agg_tiles_hash: Option<String>, pub agg_tiles_hash: Option<String>,
pub agg_tiles_hash_before_apply: Option<String>, pub agg_tiles_hash_before_apply: Option<String>,
pub agg_tiles_hash_after_apply: Option<String>, pub agg_tiles_hash_after_apply: Option<String>,
pub patch_type: PatchType, pub patch_type: Option<PatchType>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]

View File

@ -5,7 +5,6 @@ use sqlx::{query, Connection as _};
use crate::queries::detach_db; use crate::queries::detach_db;
use crate::MbtType::{Flat, FlatWithHash, Normalized}; use crate::MbtType::{Flat, FlatWithHash, Normalized};
use crate::PatchType::Whole;
use crate::{ use crate::{
MbtError, MbtResult, MbtType, Mbtiles, AGG_TILES_HASH, AGG_TILES_HASH_AFTER_APPLY, MbtError, MbtResult, MbtType, Mbtiles, AGG_TILES_HASH, AGG_TILES_HASH_AFTER_APPLY,
AGG_TILES_HASH_BEFORE_APPLY, AGG_TILES_HASH_BEFORE_APPLY,
@ -17,7 +16,7 @@ pub async fn apply_patch(base_file: PathBuf, patch_file: PathBuf, force: bool) -
let mut conn = patch_mbt.open_readonly().await?; let mut conn = patch_mbt.open_readonly().await?;
let patch_info = patch_mbt.examine_diff(&mut conn).await?; let patch_info = patch_mbt.examine_diff(&mut conn).await?;
if patch_info.patch_type != Whole { if patch_info.patch_type.is_some() {
return Err(MbtError::UnsupportedPatchType); return Err(MbtError::UnsupportedPatchType);
} }
patch_mbt.validate_diff_info(&patch_info, force)?; patch_mbt.validate_diff_info(&patch_info, force)?;

View File

@ -3,9 +3,10 @@ use martin_tile_utils::MAX_ZOOM;
use sqlite_compressions::rusqlite::Connection; use sqlite_compressions::rusqlite::Connection;
use sqlx::{query, Executor as _, Row, SqliteConnection, SqliteExecutor}; use sqlx::{query, Executor as _, Row, SqliteConnection, SqliteExecutor};
use crate::bindiff::PatchType;
use crate::errors::MbtResult; use crate::errors::MbtResult;
use crate::MbtError::InvalidZoomValue; use crate::MbtError::InvalidZoomValue;
use crate::{MbtType, PatchType}; use crate::MbtType;
/// Returns true if the database is empty (no tables/indexes/...) /// Returns true if the database is empty (no tables/indexes/...)
pub async fn is_empty_database<T>(conn: &mut T) -> MbtResult<bool> pub async fn is_empty_database<T>(conn: &mut T) -> MbtResult<bool>
@ -215,7 +216,6 @@ pub fn get_bsdiff_tbl_name(patch_type: PatchType) -> &'static str {
match patch_type { match patch_type {
PatchType::BinDiffRaw => "bsdiffraw", PatchType::BinDiffRaw => "bsdiffraw",
PatchType::BinDiffGz => "bsdiffrawgz", PatchType::BinDiffGz => "bsdiffrawgz",
PatchType::Whole => panic!("Unexpected PatchType::Whole"),
} }
} }
@ -241,7 +241,7 @@ where
/// Check if `MBTiles` has a table or a view named `bsdiffraw` or `bsdiffrawgz` with needed fields, /// Check if `MBTiles` has a table or a view named `bsdiffraw` or `bsdiffrawgz` with needed fields,
/// and return the corresponding patch type. If missing, return `PatchType::Whole` /// and return the corresponding patch type. If missing, return `PatchType::Whole`
pub async fn get_patch_type<T>(conn: &mut T) -> MbtResult<PatchType> pub async fn get_patch_type<T>(conn: &mut T) -> MbtResult<Option<PatchType>>
where where
for<'e> &'e mut T: SqliteExecutor<'e>, for<'e> &'e mut T: SqliteExecutor<'e>,
{ {
@ -272,11 +272,11 @@ where
.unwrap_or_default() .unwrap_or_default()
== 1 == 1
{ {
return Ok(pt); return Ok(Some(pt));
} }
} }
Ok(PatchType::Whole) Ok(None)
} }
pub async fn create_normalized_tables<T>(conn: &mut T) -> MbtResult<()> pub async fn create_normalized_tables<T>(conn: &mut T) -> MbtResult<()>

View File

@ -11,10 +11,10 @@ use martin_tile_utils::xyz_to_bbox;
use mbtiles::AggHashType::Verify; use mbtiles::AggHashType::Verify;
use mbtiles::IntegrityCheckType::Off; use mbtiles::IntegrityCheckType::Off;
use mbtiles::MbtTypeCli::{Flat, FlatWithHash, Normalized}; use mbtiles::MbtTypeCli::{Flat, FlatWithHash, Normalized};
use mbtiles::PatchType::{BinDiffRaw, Whole}; use mbtiles::PatchTypeCli::{BinDiffGz, BinDiffRaw};
use mbtiles::{ use mbtiles::{
apply_patch, init_mbtiles_schema, invert_y_value, CopyType, MbtResult, MbtTypeCli, Mbtiles, apply_patch, init_mbtiles_schema, invert_y_value, CopyType, MbtResult, MbtTypeCli, Mbtiles,
MbtilesCopier, PatchType, UpdateZoomType, MbtilesCopier, PatchTypeCli, UpdateZoomType,
}; };
use pretty_assertions::assert_eq as pretty_assert_eq; use pretty_assertions::assert_eq as pretty_assert_eq;
use rstest::{fixture, rstest}; use rstest::{fixture, rstest};
@ -22,6 +22,8 @@ use serde::Serialize;
use sqlx::{query, query_as, Executor as _, Row, SqliteConnection}; use sqlx::{query, query_as, Executor as _, Row, SqliteConnection};
use tokio::runtime::Handle; use tokio::runtime::Handle;
const GZIP_TILES: &str = "UPDATE tiles SET tile_data = gzip(tile_data);";
const TILES_V1: &str = " const TILES_V1: &str = "
INSERT INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES INSERT INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES
--(z, x, y, data) -- rules: keep if x=0, edit if x=1, remove if x=2 --(z, x, y, data) -- rules: keep if x=0, edit if x=1, remove if x=2
@ -138,21 +140,28 @@ macro_rules! open {
/// Create a new `SQLite` file of given type without `agg_tiles_hash` metadata value /// Create a new `SQLite` file of given type without `agg_tiles_hash` metadata value
macro_rules! new_file_no_hash { macro_rules! new_file_no_hash {
($function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {{ ($function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {{
new_file!(@true, $function, $dst_type_cli, $sql_meta, $sql_data, $($arg)*) new_file!(@true, $function, $dst_type_cli, $sql_meta, $sql_data, "", $($arg)*)
}}; }};
} }
/// Create a new `SQLite` file of type `$dst_type_cli` with the given metadata and tiles /// Create a new `SQLite` file of type `$dst_type_cli` with the given metadata and tiles
macro_rules! new_file { macro_rules! new_file {
($function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => { ($function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {
new_file!(@false, $function, $dst_type_cli, $sql_meta, $sql_data, $($arg)*) new_file!(@false, $function, $dst_type_cli, $sql_meta, $sql_data, "", $($arg)*)
}; };
(@$skip_agg:expr, $function:tt, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {{ (+ $sql:expr, $function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {
new_file!(@false, $function, $dst_type_cli, $sql_meta, $sql_data, $sql, $($arg)*)
};
(@ $skip_agg:expr, $function:tt, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $sql:expr, $($arg:tt)*) => {{
let (tmp_mbt, mut cn_tmp) = open!(@"temp", $function, $($arg)*); let (tmp_mbt, mut cn_tmp) = open!(@"temp", $function, $($arg)*);
init_mbtiles_schema(&mut cn_tmp, mbtiles::MbtType::Flat).await.unwrap(); init_mbtiles_schema(&mut cn_tmp, mbtiles::MbtType::Flat).await.unwrap();
cn_tmp.execute($sql_data).await.unwrap(); cn_tmp.execute($sql_data).await.unwrap();
cn_tmp.execute($sql_meta).await.unwrap(); cn_tmp.execute($sql_meta).await.unwrap();
if $sql != "" {
cn_tmp.execute($sql).await.unwrap();
}
let (dst_mbt, cn_dst) = open!($function, $($arg)*); let (dst_mbt, cn_dst) = open!($function, $($arg)*);
copy! { copy! {
@ -292,7 +301,7 @@ fn databases() -> Databases {
copy! { copy! {
result.path("v1", mbt_typ), result.path("v1", mbt_typ),
path(&dif_mbt), path(&dif_mbt),
diff_with_file => Some((result.path("v2", mbt_typ), Whole)), diff_with_file => Some((result.path("v2", mbt_typ), None)),
}; };
let dmp = dump(&mut dif_cn).await.unwrap(); let dmp = dump(&mut dif_cn).await.unwrap();
assert_dump!(&dmp, "{typ}__dif"); assert_dump!(&dmp, "{typ}__dif");
@ -302,21 +311,60 @@ fn databases() -> Databases {
} }
result.add("dif", mbt_typ, dmp, dif_mbt, Some(hash), dif_cn); result.add("dif", mbt_typ, dmp, dif_mbt, Some(hash), dif_cn);
// ----------------- bdr (v1 -> v2) -- bin-diff-raw ----------------- // ----------------- v1z -----------------
if mbt_typ == Flat || mbt_typ == FlatWithHash { let (v1z_mbt, mut v1z_cn) =
let (bdr_mbt, mut bdr_cn) = open!(databases, "{typ}__bdr"); new_file!(+GZIP_TILES, databases, mbt_typ, METADATA_V1, TILES_V1, "{typ}__v1z");
copy! { let dmp = dump(&mut v1z_cn).await.unwrap();
result.path("v1", mbt_typ), assert_dump!(&dmp, "{typ}__v1z");
path(&bdr_mbt), let hash = v1z_mbt.open_and_validate(Off, Verify).await.unwrap();
diff_with_file => Some((result.path("v2", mbt_typ), BinDiffRaw)),
};
let dmp = dump(&mut bdr_cn).await.unwrap();
assert_dump!(&dmp, "{typ}__bdr");
let hash = bdr_mbt.open_and_validate(Off, Verify).await.unwrap();
allow_duplicates! { allow_duplicates! {
assert_snapshot!(hash, @"585A88FEEC740448FF1EB4F96088FFE3"); assert_snapshot!(hash, @"C0CA886B149CE416242AB2AFE8E641AD");
}
result.add("v1z", mbt_typ, dmp, v1z_mbt, Some(hash), v1z_cn);
// ----------------- v2z -----------------
let (v2z_mbt, mut v2z_cn) =
new_file!(+GZIP_TILES, databases, mbt_typ, METADATA_V2, TILES_V2, "{typ}__v2z");
let dmp = dump(&mut v2z_cn).await.unwrap();
assert_dump!(&dmp, "{typ}__v2");
let hash = v2z_mbt.open_and_validate(Off, Verify).await.unwrap();
allow_duplicates! {
assert_snapshot!(hash, @"A18D0C39730FB52E5A547F096F5C60E8");
}
result.add("v2z", mbt_typ, dmp, v2z_mbt, Some(hash), v2z_cn);
// ----------------- bin-diff (v1 -> v2) -----------------
if mbt_typ == Flat || mbt_typ == FlatWithHash {
for (a, b, patch_type, pt) in [
("v1", "v2", BinDiffRaw, "bdr"),
("v1z", "v2z", BinDiffGz, "bdz"),
] {
let (bd_mbt, mut bd_cn) = open!(databases, "{typ}__{pt}");
copy! {
result.path(a, mbt_typ),
path(&bd_mbt),
diff_with_file => Some((result.path(b, mbt_typ), patch_type.into())),
};
let dmp = dump(&mut bd_cn).await.unwrap();
assert_dump!(&dmp, "{typ}__{pt}");
let hash = bd_mbt.open_and_validate(Off, Verify).await.unwrap();
match patch_type {
PatchTypeCli::Whole => {
unreachable!()
}
BinDiffGz => {
allow_duplicates!(
assert_snapshot!(hash, @"9AFEC3326B465CB939664C47A572D4C6")
);
}
BinDiffRaw => {
allow_duplicates!(
assert_snapshot!(hash, @"585A88FEEC740448FF1EB4F96088FFE3")
);
}
}
result.add(pt, mbt_typ, dmp, bd_mbt, Some(hash), bd_cn);
} }
result.add("bdr", mbt_typ, dmp, bdr_mbt, Some(hash), bdr_cn);
} }
// ----------------- v1_clone ----------------- // ----------------- v1_clone -----------------
@ -340,7 +388,7 @@ fn databases() -> Databases {
copy! { copy! {
result.path("v1", mbt_typ), result.path("v1", mbt_typ),
path(&dif_empty_mbt), path(&dif_empty_mbt),
diff_with_file => Some((result.path("v1_clone", mbt_typ), Whole)), diff_with_file => Some((result.path("v1_clone", mbt_typ), None)),
}; };
let dmp = dump(&mut dif_empty_cn).await.unwrap(); let dmp = dump(&mut dif_empty_cn).await.unwrap();
assert_dump!(&dmp, "{typ}__dif_empty"); assert_dump!(&dmp, "{typ}__dif_empty");
@ -491,7 +539,7 @@ async fn diff_and_patch(
copy! { copy! {
databases.path(a_db, a_type), databases.path(a_db, a_type),
path(&dif_mbt), path(&dif_mbt),
diff_with_file => Some((databases.path(b_db, b_type), Whole)), diff_with_file => Some((databases.path(b_db, b_type), None)),
dst_type_cli => dif_type, dst_type_cli => dif_type,
}; };
pretty_assert_eq!( pretty_assert_eq!(
@ -533,12 +581,15 @@ async fn diff_and_patch_bsdiff(
#[values(Flat, FlatWithHash)] a_type: MbtTypeCli, #[values(Flat, FlatWithHash)] a_type: MbtTypeCli,
#[values(Flat, FlatWithHash)] b_type: MbtTypeCli, #[values(Flat, FlatWithHash)] b_type: MbtTypeCli,
#[values(Flat, FlatWithHash)] dif_type: MbtTypeCli, #[values(Flat, FlatWithHash)] dif_type: MbtTypeCli,
#[values(BinDiffRaw)] patch_type: PatchType,
#[values(Flat, FlatWithHash)] dst_type: MbtTypeCli, #[values(Flat, FlatWithHash)] dst_type: MbtTypeCli,
#[values(("v1", "v2", "bdr"))] tilesets: (&'static str, &'static str, &'static str), #[values(
("v1", "v2", "bdr", BinDiffRaw),
("v1z", "v2z", "bdz", BinDiffGz),
)]
tilesets: (&'static str, &'static str, &'static str, PatchTypeCli),
#[notrace] databases: &Databases, #[notrace] databases: &Databases,
) -> MbtResult<()> { ) -> MbtResult<()> {
let (a_db, b_db, dif_db) = tilesets; let (a_db, b_db, dif_db, patch_type) = tilesets;
let dif = shorten(dif_type); let dif = shorten(dif_type);
let prefix = format!( let prefix = format!(
"{a_db}_{}--{b_db}_{}={dif}_{patch_type}", "{a_db}_{}--{b_db}_{}={dif}_{patch_type}",
@ -551,7 +602,7 @@ async fn diff_and_patch_bsdiff(
copy! { copy! {
databases.path(a_db, a_type), databases.path(a_db, a_type),
path(&dif_mbt), path(&dif_mbt),
diff_with_file => Some((databases.path(b_db, b_type), patch_type)), diff_with_file => Some((databases.path(b_db, b_type), patch_type.into())),
dst_type_cli => Some(dif_type), dst_type_cli => Some(dif_type),
}; };
pretty_assert_eq!(&dump(&mut dif_cn).await?, databases.dump(dif_db, dif_type)); pretty_assert_eq!(&dump(&mut dif_cn).await?, databases.dump(dif_db, dif_type));
@ -624,9 +675,8 @@ async fn test_one() {
FlatWithHash, FlatWithHash,
FlatWithHash, FlatWithHash,
FlatWithHash, FlatWithHash,
BinDiffRaw,
FlatWithHash, FlatWithHash,
("v1", "v2", "bdr"), ("v1", "v2", "bdr", BinDiffRaw),
&db, &db,
) )
.await .await

View File

@ -0,0 +1,67 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'bsdiffrawgz'
sql = '''
CREATE TABLE bsdiffrawgz (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
patch_data blob NOT NULL,
tile_xxh3_64_hash integer NOT NULL,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 5, 1, 1, blob(1B1E00F80799700B0AE100F45284A210A00708A0C03B), 479130493 )',
'( 5, 1, 2, blob(1B2000F8079970D30C62EDF2D8285E11400000BB2F01), -1097843426 )',
'( 5, 1, 3, blob(;), 953390274 )',
'( 6, 1, 4, blob(1B1F00F8077163E37063303653B324A12804E8010450E0ED01), 386481748 )',
]
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "9AFEC3326B465CB939664C47A572D4C6" )',
'( "agg_tiles_hash_after_apply", "A18D0C39730FB52E5A547F096F5C60E8" )',
'( "agg_tiles_hash_before_apply", "C0CA886B149CE416242AB2AFE8E641AD" )',
'( "md-edit", "value - v2" )',
'( "md-new", "value - new" )',
'( "md-remove", NULL )',
]
[[]]
type = 'table'
tbl_name = 'tiles'
sql = '''
CREATE TABLE tiles (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_data blob,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 5, 2, 2, NULL )',
'( 5, 2, 3, NULL )',
'( 5, 3, 7, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000) )',
'( 5, 3, 8, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000) )',
'( 6, 2, 6, NULL )',
]
[[]]
type = 'index'
tbl_name = 'bsdiffrawgz'
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'index'
tbl_name = 'tiles'

View File

@ -0,0 +1,50 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "C0CA886B149CE416242AB2AFE8E641AD" )',
'( "md-edit", "value - v1" )',
'( "md-remove", "value - remove" )',
'( "md-same", "value - same" )',
]
[[]]
type = 'table'
tbl_name = 'tiles'
sql = '''
CREATE TABLE tiles (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_data blob,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 3, 6, 7, blob(1F8B08000000000000FF2BCACF2F01005BF9F41604000000) )',
'( 5, 0, 0, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000) )',
'( 5, 0, 1, blob(1F8B08000000000000FF03000000000000000000) )',
'( 5, 1, 1, blob(1F8B08000000000000FF4B4DC92CD12D3304007ED8D6BF07000000) )',
'( 5, 1, 2, blob(1F8B08000000000000FF03000000000000000000) )',
'( 5, 1, 3, blob(1F8B08000000000000FFCBCBCFD34DCD2D28A9040006173DB509000000) )',
'( 5, 2, 2, blob(1F8B08000000000000FF2B4ACDCD2F4B0500301D806806000000) )',
'( 5, 2, 3, blob(1F8B08000000000000FF03000000000000000000) )',
'( 6, 0, 3, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000) )',
'( 6, 0, 5, blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000) )',
'( 6, 1, 4, blob(1F8B08000000000000FF4B4DC92CD12D3304007ED8D6BF07000000) )',
'( 6, 2, 6, blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000) )',
]
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'index'
tbl_name = 'tiles'

View File

@ -0,0 +1,49 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "A18D0C39730FB52E5A547F096F5C60E8" )',
'( "md-edit", "value - v2" )',
'( "md-new", "value - new" )',
'( "md-same", "value - same" )',
]
[[]]
type = 'table'
tbl_name = 'tiles'
sql = '''
CREATE TABLE tiles (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_data blob,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 3, 6, 7, blob(1F8B08000000000000FF2BCACF2F01005BF9F41604000000) )',
'( 5, 0, 0, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000) )',
'( 5, 0, 1, blob(1F8B08000000000000FF03000000000000000000) )',
'( 5, 1, 1, blob(1F8B08000000000000FF4B4DC92CD12D330200C489DF2607000000) )',
'( 5, 1, 2, blob(1F8B08000000000000FFCBCB2FD14DCD2D28A9040086D4937609000000) )',
'( 5, 1, 3, blob(1F8B08000000000000FF03000000000000000000) )',
'( 5, 3, 7, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000) )',
'( 5, 3, 8, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000) )',
'( 6, 0, 3, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000) )',
'( 6, 0, 5, blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000) )',
'( 6, 1, 4, blob(1F8B08000000000000FF4B4DC92CD12D334A04006367987408000000) )',
]
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'index'
tbl_name = 'tiles'

View File

@ -0,0 +1,75 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'bsdiffrawgz'
sql = '''
CREATE TABLE bsdiffrawgz (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
patch_data blob NOT NULL,
tile_xxh3_64_hash integer NOT NULL,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 5, 1, 1, blob(1B1E00F80799700B0AE100F45284A210A00708A0C03B), 479130493 )',
'( 5, 1, 2, blob(1B2000F8079970D30C62EDF2D8285E11400000BB2F01), -1097843426 )',
'( 5, 1, 3, blob(;), 953390274 )',
'( 6, 1, 4, blob(1B1F00F8077163E37063303653B324A12804E8010450E0ED01), 386481748 )',
]
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "9AFEC3326B465CB939664C47A572D4C6" )',
'( "agg_tiles_hash_after_apply", "A18D0C39730FB52E5A547F096F5C60E8" )',
'( "agg_tiles_hash_before_apply", "C0CA886B149CE416242AB2AFE8E641AD" )',
'( "md-edit", "value - v2" )',
'( "md-new", "value - new" )',
'( "md-remove", NULL )',
]
[[]]
type = 'table'
tbl_name = 'tiles_with_hash'
sql = '''
CREATE TABLE tiles_with_hash (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_data blob,
tile_hash text,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 5, 2, 2, NULL, "" )',
'( 5, 2, 3, NULL, "" )',
'( 5, 3, 7, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000), "E1A151E7B18F8B2C53F94DF4CA201026" )',
'( 5, 3, 8, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000), "E1A151E7B18F8B2C53F94DF4CA201026" )',
'( 6, 2, 6, NULL, "" )',
]
[[]]
type = 'index'
tbl_name = 'bsdiffrawgz'
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'index'
tbl_name = 'tiles_with_hash'
[[]]
type = 'view'
tbl_name = 'tiles'
sql = '''
CREATE VIEW tiles AS
SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash'''

View File

@ -0,0 +1,58 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "C0CA886B149CE416242AB2AFE8E641AD" )',
'( "md-edit", "value - v1" )',
'( "md-remove", "value - remove" )',
'( "md-same", "value - same" )',
]
[[]]
type = 'table'
tbl_name = 'tiles_with_hash'
sql = '''
CREATE TABLE tiles_with_hash (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_data blob,
tile_hash text,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 3, 6, 7, blob(1F8B08000000000000FF2BCACF2F01005BF9F41604000000), "0A62AF3A2A3D38C0E8FF098A684C3EC1" )',
'( 5, 0, 0, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000), "98AD49106F1CE0AA003027C229A70F7E" )',
'( 5, 0, 1, blob(1F8B08000000000000FF03000000000000000000), "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 1, 1, blob(1F8B08000000000000FF4B4DC92CD12D3304007ED8D6BF07000000), "A4D55DDE3B49D78DD9846688A0786F2D" )',
'( 5, 1, 2, blob(1F8B08000000000000FF03000000000000000000), "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 1, 3, blob(1F8B08000000000000FFCBCBCFD34DCD2D28A9040006173DB509000000), "4DE1D83BFD7CB6ACE583AA7D5A18725A" )',
'( 5, 2, 2, blob(1F8B08000000000000FF2B4ACDCD2F4B0500301D806806000000), "0D9BABF1C0099632D55F6274FB15419F" )',
'( 5, 2, 3, blob(1F8B08000000000000FF03000000000000000000), "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 6, 0, 3, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000), "98AD49106F1CE0AA003027C229A70F7E" )',
'( 6, 0, 5, blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000), "3CD93E79D6812F995906036D24282DBE" )',
'( 6, 1, 4, blob(1F8B08000000000000FF4B4DC92CD12D3304007ED8D6BF07000000), "A4D55DDE3B49D78DD9846688A0786F2D" )',
'( 6, 2, 6, blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000), "3CD93E79D6812F995906036D24282DBE" )',
]
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'index'
tbl_name = 'tiles_with_hash'
[[]]
type = 'view'
tbl_name = 'tiles'
sql = '''
CREATE VIEW tiles AS
SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash'''

View File

@ -0,0 +1,57 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "A18D0C39730FB52E5A547F096F5C60E8" )',
'( "md-edit", "value - v2" )',
'( "md-new", "value - new" )',
'( "md-same", "value - same" )',
]
[[]]
type = 'table'
tbl_name = 'tiles_with_hash'
sql = '''
CREATE TABLE tiles_with_hash (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_data blob,
tile_hash text,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 3, 6, 7, blob(1F8B08000000000000FF2BCACF2F01005BF9F41604000000), "0A62AF3A2A3D38C0E8FF098A684C3EC1" )',
'( 5, 0, 0, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000), "98AD49106F1CE0AA003027C229A70F7E" )',
'( 5, 0, 1, blob(1F8B08000000000000FF03000000000000000000), "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 1, 1, blob(1F8B08000000000000FF4B4DC92CD12D330200C489DF2607000000), "39794F1D8EEDEE7777FA89D4FD8D3154" )',
'( 5, 1, 2, blob(1F8B08000000000000FFCBCB2FD14DCD2D28A9040086D4937609000000), "94335B9F0CCBA1C1CB7F91B49EB34344" )',
'( 5, 1, 3, blob(1F8B08000000000000FF03000000000000000000), "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 3, 7, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000), "E1A151E7B18F8B2C53F94DF4CA201026" )',
'( 5, 3, 8, blob(1F8B08000000000000FFCB4B2D07004544E36B03000000), "E1A151E7B18F8B2C53F94DF4CA201026" )',
'( 6, 0, 3, blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000), "98AD49106F1CE0AA003027C229A70F7E" )',
'( 6, 0, 5, blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000), "3CD93E79D6812F995906036D24282DBE" )',
'( 6, 1, 4, blob(1F8B08000000000000FF4B4DC92CD12D334A04006367987408000000), "59A99A65DF08F16CE984BDFA6EBC95CF" )',
]
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'index'
tbl_name = 'tiles_with_hash'
[[]]
type = 'view'
tbl_name = 'tiles'
sql = '''
CREATE VIEW tiles AS
SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash'''

View File

@ -0,0 +1,97 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'images'
sql = '''
CREATE TABLE images (
tile_id text NOT NULL PRIMARY KEY,
tile_data blob)'''
values = [
'( "0A62AF3A2A3D38C0E8FF098A684C3EC1", blob(1F8B08000000000000FF2BCACF2F01005BF9F41604000000) )',
'( "0D9BABF1C0099632D55F6274FB15419F", blob(1F8B08000000000000FF2B4ACDCD2F4B0500301D806806000000) )',
'( "163BE0A88C70CA629FD516DBAADAD96A", blob(1F8B08000000000000FF03000000000000000000) )',
'( "3CD93E79D6812F995906036D24282DBE", blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000) )',
'( "4DE1D83BFD7CB6ACE583AA7D5A18725A", blob(1F8B08000000000000FFCBCBCFD34DCD2D28A9040006173DB509000000) )',
'( "98AD49106F1CE0AA003027C229A70F7E", blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000) )',
'( "A4D55DDE3B49D78DD9846688A0786F2D", blob(1F8B08000000000000FF4B4DC92CD12D3304007ED8D6BF07000000) )',
]
[[]]
type = 'table'
tbl_name = 'map'
sql = '''
CREATE TABLE map (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_id text,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 3, 6, 7, "0A62AF3A2A3D38C0E8FF098A684C3EC1" )',
'( 5, 0, 0, "98AD49106F1CE0AA003027C229A70F7E" )',
'( 5, 0, 1, "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 1, 1, "A4D55DDE3B49D78DD9846688A0786F2D" )',
'( 5, 1, 2, "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 1, 3, "4DE1D83BFD7CB6ACE583AA7D5A18725A" )',
'( 5, 2, 2, "0D9BABF1C0099632D55F6274FB15419F" )',
'( 5, 2, 3, "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 6, 0, 3, "98AD49106F1CE0AA003027C229A70F7E" )',
'( 6, 0, 5, "3CD93E79D6812F995906036D24282DBE" )',
'( 6, 1, 4, "A4D55DDE3B49D78DD9846688A0786F2D" )',
'( 6, 2, 6, "3CD93E79D6812F995906036D24282DBE" )',
]
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "C0CA886B149CE416242AB2AFE8E641AD" )',
'( "md-edit", "value - v1" )',
'( "md-remove", "value - remove" )',
'( "md-same", "value - same" )',
]
[[]]
type = 'index'
tbl_name = 'images'
[[]]
type = 'index'
tbl_name = 'map'
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'view'
tbl_name = 'tiles'
sql = '''
CREATE VIEW tiles AS
SELECT map.zoom_level AS zoom_level,
map.tile_column AS tile_column,
map.tile_row AS tile_row,
images.tile_data AS tile_data
FROM map
JOIN images ON images.tile_id = map.tile_id'''
[[]]
type = 'view'
tbl_name = 'tiles_with_hash'
sql = '''
CREATE VIEW tiles_with_hash AS
SELECT
map.zoom_level AS zoom_level,
map.tile_column AS tile_column,
map.tile_row AS tile_row,
images.tile_data AS tile_data,
images.tile_id AS tile_hash
FROM map
JOIN images ON images.tile_id = map.tile_id'''

View File

@ -0,0 +1,97 @@
---
source: mbtiles/tests/copy.rs
expression: actual_value
---
[[]]
type = 'table'
tbl_name = 'images'
sql = '''
CREATE TABLE images (
tile_id text NOT NULL PRIMARY KEY,
tile_data blob)'''
values = [
'( "0A62AF3A2A3D38C0E8FF098A684C3EC1", blob(1F8B08000000000000FF2BCACF2F01005BF9F41604000000) )',
'( "163BE0A88C70CA629FD516DBAADAD96A", blob(1F8B08000000000000FF03000000000000000000) )',
'( "39794F1D8EEDEE7777FA89D4FD8D3154", blob(1F8B08000000000000FF4B4DC92CD12D330200C489DF2607000000) )',
'( "3CD93E79D6812F995906036D24282DBE", blob(1F8B08000000000000FF33D4CD4E4D2DD035D42DCA050026D508C30B000000) )',
'( "59A99A65DF08F16CE984BDFA6EBC95CF", blob(1F8B08000000000000FF4B4DC92CD12D334A04006367987408000000) )',
'( "94335B9F0CCBA1C1CB7F91B49EB34344", blob(1F8B08000000000000FFCBCB2FD14DCD2D28A9040086D4937609000000) )',
'( "98AD49106F1CE0AA003027C229A70F7E", blob(1F8B08000000000000FF2B4ECC4D050044F150FC04000000) )',
'( "E1A151E7B18F8B2C53F94DF4CA201026", blob(1F8B08000000000000FFCB4B2D07004544E36B03000000) )',
]
[[]]
type = 'table'
tbl_name = 'map'
sql = '''
CREATE TABLE map (
zoom_level integer NOT NULL,
tile_column integer NOT NULL,
tile_row integer NOT NULL,
tile_id text,
PRIMARY KEY(zoom_level, tile_column, tile_row))'''
values = [
'( 3, 6, 7, "0A62AF3A2A3D38C0E8FF098A684C3EC1" )',
'( 5, 0, 0, "98AD49106F1CE0AA003027C229A70F7E" )',
'( 5, 0, 1, "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 1, 1, "39794F1D8EEDEE7777FA89D4FD8D3154" )',
'( 5, 1, 2, "94335B9F0CCBA1C1CB7F91B49EB34344" )',
'( 5, 1, 3, "163BE0A88C70CA629FD516DBAADAD96A" )',
'( 5, 3, 7, "E1A151E7B18F8B2C53F94DF4CA201026" )',
'( 5, 3, 8, "E1A151E7B18F8B2C53F94DF4CA201026" )',
'( 6, 0, 3, "98AD49106F1CE0AA003027C229A70F7E" )',
'( 6, 0, 5, "3CD93E79D6812F995906036D24282DBE" )',
'( 6, 1, 4, "59A99A65DF08F16CE984BDFA6EBC95CF" )',
]
[[]]
type = 'table'
tbl_name = 'metadata'
sql = '''
CREATE TABLE metadata (
name text NOT NULL PRIMARY KEY,
value text)'''
values = [
'( "agg_tiles_hash", "A18D0C39730FB52E5A547F096F5C60E8" )',
'( "md-edit", "value - v2" )',
'( "md-new", "value - new" )',
'( "md-same", "value - same" )',
]
[[]]
type = 'index'
tbl_name = 'images'
[[]]
type = 'index'
tbl_name = 'map'
[[]]
type = 'index'
tbl_name = 'metadata'
[[]]
type = 'view'
tbl_name = 'tiles'
sql = '''
CREATE VIEW tiles AS
SELECT map.zoom_level AS zoom_level,
map.tile_column AS tile_column,
map.tile_row AS tile_row,
images.tile_data AS tile_data
FROM map
JOIN images ON images.tile_id = map.tile_id'''
[[]]
type = 'view'
tbl_name = 'tiles_with_hash'
sql = '''
CREATE VIEW tiles_with_hash AS
SELECT
map.zoom_level AS zoom_level,
map.tile_column AS tile_column,
map.tile_row AS tile_row,
images.tile_data AS tile_data,
images.tile_id AS tile_hash
FROM map
JOIN images ON images.tile_id = map.tile_id'''

View File

@ -1,3 +1,3 @@
[INFO ] Comparing ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) and ./tests/fixtures/mbtiles/world_cities_modified.mbtiles (flat) into a new file tests/mbtiles_temp_files/world_cities_bindiff.mbtiles (flat) with bin-diff [INFO ] Comparing ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) and ./tests/fixtures/mbtiles/world_cities_modified.mbtiles (flat) into a new file tests/mbtiles_temp_files/world_cities_bindiff.mbtiles (flat) with bin-diff on gzip-ed tiles
[INFO ] Finished processing 1 bindiff tiles [INFO ] Finished processing 1 bindiff tiles
[INFO ] Adding a new metadata value agg_tiles_hash = 8D0CA32D7634E1278476D3D22A0F7B01 in tests/mbtiles_temp_files/world_cities_bindiff.mbtiles [INFO ] Adding a new metadata value agg_tiles_hash = 0A21AAF2C177B86DA3342A4F65794E49 in tests/mbtiles_temp_files/world_cities_bindiff.mbtiles

View File

@ -1,5 +1,5 @@
[INFO ] The patch file tests/mbtiles_temp_files/world_cities_bindiff.mbtiles expects to be applied to a tileset with agg_tiles_hash=84792BF4EE9AEDDC5B1A60E707011FEE, and should result in hash 578FB5BD64746C39E3D344662947FD0D after applying [INFO ] The patch file tests/mbtiles_temp_files/world_cities_bindiff.mbtiles expects to be applied to a tileset with agg_tiles_hash=84792BF4EE9AEDDC5B1A60E707011FEE, and should result in hash 578FB5BD64746C39E3D344662947FD0D after applying
[INFO ] Applying patch from tests/mbtiles_temp_files/world_cities_bindiff.mbtiles (flat) to ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) into a new file tests/mbtiles_temp_files/world_cities_modified2.mbtiles (flat) with bin-diff [INFO ] Applying patch from tests/mbtiles_temp_files/world_cities_bindiff.mbtiles (flat) to ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) into a new file tests/mbtiles_temp_files/world_cities_modified2.mbtiles (flat) with bin-diff on gzip-ed tiles
[INFO ] Finished processing 1 bindiff tiles [INFO ] Finished processing 1 bindiff tiles
[INFO ] Adding a new metadata value agg_tiles_hash = F863A28565B06142AED3E01EF8198CF5 in tests/mbtiles_temp_files/world_cities_modified2.mbtiles [INFO ] Adding a new metadata value agg_tiles_hash = 623863EF20ABEFCB4E30EEFBD82FFFDC in tests/mbtiles_temp_files/world_cities_modified2.mbtiles
[INFO ] Skipping agg_tiles_hash_after_apply validation because re-gzip-ing could produce different tile data. Each bindiff-ed tile was still verified with a hash value [INFO ] Skipping agg_tiles_hash_after_apply validation because re-gzip-ing could produce different tile data. Each bindiff-ed tile was still verified with a hash value

View File

@ -1,2 +1,2 @@
[INFO ] Comparing ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) and ./tests/fixtures/mbtiles/world_cities_modified.mbtiles (flat) into a new file tests/mbtiles_temp_files/world_cities_diff.mbtiles (flat) [INFO ] Comparing ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) and ./tests/fixtures/mbtiles/world_cities_modified.mbtiles (flat) into a new file tests/mbtiles_temp_files/world_cities_diff.mbtiles (flat)
[INFO ] Adding a new metadata value agg_tiles_hash = 958D6E4F59AC4B5334076BADBC505CAB in tests/mbtiles_temp_files/world_cities_diff.mbtiles [INFO ] Adding a new metadata value agg_tiles_hash = C9B90B490DF381D93F8CD8102A469F47 in tests/mbtiles_temp_files/world_cities_diff.mbtiles

Binary file not shown.