Implement postgres auto-publish (#546)

* NEW: support for #512 - pg table/function auto-discovery
  * can filter schemas
* can use patterns like `{schema}.{table}.{column}` and
`{schema}.{function}`
* NEW: add `disable_bounds` bool flag to allow disabling of the bounds
computation
* reworked integration tests to use yaml
This commit is contained in:
Yuri Astrakhan 2023-01-03 11:09:41 -05:00 committed by GitHub
parent 64ad07ce36
commit 928a700150
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 401 additions and 196 deletions

View File

@ -465,7 +465,7 @@ If you don't want to expose all of your tables and functions, you can list your
martin --config config.yaml
```
You can find an example of a configuration file [here](https://github.com/maplibre/martin/blob/main/tests/config.yaml).
You may wish to auto-generate a config file with `--save-config` argument. This will generate a config file with all of your tables and functions, and you can then edit it to remove any sources you don't want to expose.
```yaml
# Connection keep alive timeout [default: 75]
@ -489,6 +489,28 @@ postgres:
# Maximum connections pool size [default: 20]
pool_size: 20
# Control the automatic generation of bounds for spatial tables [default: false]
# If enabled, it will spend some time on startup to compute geometry bounds.
disable_bounds: false
# Enable automatic discovery of tables and functions. You may set this to `false` to disable.
auto_publish:
# Optionally limit to just these schemas
from_schemas:
- public
- my_schema
# Here we enable both tables and functions auto discovery.
# You can also enable just one of them by not mentioning the other,
# or setting it to false. Setting one to true disables the other one as well.
# E.g. `tables: false` enables just the functions auto-discovery.
tables:
# Optionally set a custom source ID based on the table name
id_format: 'table.{schema}.{table}.{column}'
# Add more schemas to the ones listed above
from_schemas: my_other_schema
functions:
id_format: '{schema}.{function}'
# Associative arrays of table sources
tables:

View File

@ -8,6 +8,9 @@ use crate::utils::OneOrMany;
#[derive(clap::Args, Debug, PartialEq, Default)]
#[command(about, version)]
pub struct PgArgs {
/// Disable the automatic generation of bounds for spatial tables.
#[arg(short = 'b', long)]
pub disable_bounds: bool,
/// Loads trusted root certificates from a file. The file should contain a sequence of PEM-formatted CA certificates.
#[cfg(feature = "ssl")]
#[arg(long)]
@ -46,7 +49,14 @@ impl PgArgs {
danger_accept_invalid_certs,
default_srid,
pool_size: self.pool_size,
..Default::default()
disable_bounds: if self.disable_bounds {
Some(true)
} else {
None
},
auto_publish: None,
tables: None,
functions: None,
})
.collect();

View File

@ -7,7 +7,6 @@ use actix_web::dev::Server;
use clap::Parser;
use log::{error, info, log_enabled};
use martin::args::{Args, OsEnv};
use martin::pg::PgConfig;
use martin::srv::{new_server, RESERVED_KEYWORDS};
use martin::Error::ConfigWriteError;
use martin::{read_config, Config, IdResolver, Result};
@ -46,12 +45,7 @@ async fn start(args: Args) -> Result<Server> {
.write_all(yaml.as_bytes())
.map_err(|e| ConfigWriteError(e, file_name.clone()))?;
}
} else if config
.postgres
.iter()
.any(|v| v.as_slice().iter().any(PgConfig::is_autodetect))
{
info!("Martin has been configured with automatic settings.");
} else {
info!("Use --save-config to save or print Martin configuration.");
}

View File

@ -23,7 +23,7 @@ mod test_utils;
pub use crate::args::Env;
pub use crate::config::{read_config, Config};
pub use crate::source::{IdResolver, Source, Sources, Xyz};
pub use crate::utils::{Error, Result};
pub use crate::utils::{BoolOrObject, Error, OneOrMany, Result};
// Ensure README.md contains valid code
#[cfg(doctest)]

View File

@ -6,9 +6,9 @@ use crate::config::report_unrecognized_config;
use crate::pg::config_function::FuncInfoSources;
use crate::pg::config_table::TableInfoSources;
use crate::pg::configurator::PgBuilder;
use crate::pg::utils::{Result, Schemas};
use crate::pg::utils::Result;
use crate::source::{IdResolver, Sources};
use crate::utils::{is_false, sorted_opt_map};
use crate::utils::{sorted_opt_map, BoolOrObject, OneOrMany};
pub trait PgInfo {
fn format_id(&self) -> String;
@ -27,19 +27,30 @@ pub struct PgConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub default_srid: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub disable_bounds: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pool_size: Option<u32>,
#[serde(skip)]
pub auto_tables: Option<Schemas>,
#[serde(skip)]
pub auto_functions: Option<Schemas>,
#[serde(skip_serializing_if = "Option::is_none")]
pub auto_publish: Option<BoolOrObject<PgCfgPublish>>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(serialize_with = "sorted_opt_map")]
pub tables: Option<TableInfoSources>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(serialize_with = "sorted_opt_map")]
pub functions: Option<FuncInfoSources>,
#[serde(skip)]
pub run_autodiscovery: bool,
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct PgCfgPublish {
pub from_schemas: Option<OneOrMany<String>>,
pub tables: Option<BoolOrObject<PgCfgPublishType>>,
pub functions: Option<BoolOrObject<PgCfgPublishType>>,
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct PgCfgPublishType {
pub from_schemas: Option<OneOrMany<String>>,
pub id_format: Option<String>,
}
impl PgConfig {
@ -55,7 +66,9 @@ impl PgConfig {
report_unrecognized_config(&format!("functions.{k}."), &v.unrecognized);
}
}
self.run_autodiscovery = self.tables.is_none() && self.functions.is_none();
if self.tables.is_none() && self.functions.is_none() && self.auto_publish.is_none() {
self.auto_publish = Some(BoolOrObject::Bool(true));
}
Ok(self)
}
@ -70,11 +83,6 @@ impl PgConfig {
tables.extend(funcs);
Ok(tables)
}
#[must_use]
pub fn is_autodetect(&self) -> bool {
self.run_autodiscovery
}
}
#[cfg(test)]
@ -102,7 +110,7 @@ mod tests {
&Config {
postgres: Some(One(PgConfig {
connection_string: some("postgresql://postgres@localhost/db"),
run_autodiscovery: true,
auto_publish: Some(BoolOrObject::Bool(true)),
..Default::default()
})),
..Default::default()
@ -122,12 +130,12 @@ mod tests {
postgres: Some(Many(vec![
PgConfig {
connection_string: some("postgres://postgres@localhost:5432/db"),
run_autodiscovery: true,
auto_publish: Some(BoolOrObject::Bool(true)),
..Default::default()
},
PgConfig {
connection_string: some("postgresql://postgres@localhost:5433/db"),
run_autodiscovery: true,
auto_publish: Some(BoolOrObject::Bool(true)),
..Default::default()
},
])),
@ -144,7 +152,7 @@ mod tests {
connection_string: 'postgres://postgres@localhost:5432/db'
default_srid: 4326
pool_size: 20
tables:
table_source:
schema: public
@ -161,7 +169,7 @@ mod tests {
geometry_type: GEOMETRY
properties:
gid: int4
functions:
function_zxy_query:
schema: public
@ -213,3 +221,10 @@ mod tests {
);
}
}
/// Helper to skip serialization if the value is `false`
#[allow(clippy::trivially_copy_pass_by_ref)]
#[cfg(feature = "ssl")]
pub fn is_false(value: &bool) -> bool {
!*value
}

View File

@ -13,18 +13,38 @@ use crate::pg::pg_source::{PgSource, PgSqlInfo};
use crate::pg::pool::Pool;
use crate::pg::table_source::{calc_srid, get_table_sources, merge_table_info, table_to_query};
use crate::pg::utils::PgError::InvalidTableExtent;
use crate::pg::utils::{Result, Schemas};
use crate::pg::utils::Result;
use crate::source::{IdResolver, Sources};
use crate::utils::{find_info, normalize_key, InfoMap};
use crate::utils::{find_info, normalize_key, BoolOrObject, InfoMap, OneOrMany};
pub type SqlFuncInfoMapMap = InfoMap<InfoMap<(PgSqlInfo, FunctionInfo)>>;
pub type SqlTableInfoMapMapMap = InfoMap<InfoMap<InfoMap<TableInfo>>>;
#[derive(Debug, PartialEq)]
pub struct PgBuilderPublish {
id_format: String,
schemas: Option<HashSet<String>>,
}
impl PgBuilderPublish {
pub fn new(
is_function: bool,
id_format: Option<&String>,
schemas: Option<HashSet<String>>,
) -> Self {
let id_format = id_format
.cloned()
.unwrap_or_else(|| (if is_function { "{function}" } else { "{table}" }).to_string());
Self { id_format, schemas }
}
}
pub struct PgBuilder {
pool: Pool,
default_srid: Option<i32>,
auto_functions: Schemas,
auto_tables: Schemas,
disable_bounds: bool,
auto_functions: Option<PgBuilderPublish>,
auto_tables: Option<PgBuilderPublish>,
id_resolver: IdResolver,
tables: TableInfoSources,
functions: FuncInfoSources,
@ -33,15 +53,16 @@ pub struct PgBuilder {
impl PgBuilder {
pub async fn new(config: &PgConfig, id_resolver: IdResolver) -> Result<Self> {
let pool = Pool::new(config).await?;
let auto = config.run_autodiscovery;
Ok(Self {
pool,
default_srid: config.default_srid,
auto_functions: config.auto_functions.clone().unwrap_or(Schemas::Bool(auto)),
auto_tables: config.auto_tables.clone().unwrap_or(Schemas::Bool(auto)),
disable_bounds: config.disable_bounds.unwrap_or_default(),
id_resolver,
tables: config.tables.clone().unwrap_or_default(),
functions: config.functions.clone().unwrap_or_default(),
auto_functions: new_auto_publish(config, true),
auto_tables: new_auto_publish(config, false),
})
}
@ -70,24 +91,45 @@ impl PgBuilder {
let Some(cfg_inf) = merge_table_info(self.default_srid,&id2, cfg_inf, src_inf) else { continue };
warn_on_rename(id, &id2, "Table");
info!("Configured {dup}source {id2} from {}", summary(&cfg_inf));
pending.push(table_to_query(id2, cfg_inf, self.pool.clone()));
pending.push(table_to_query(
id2,
cfg_inf,
self.pool.clone(),
self.disable_bounds,
));
}
// Sort the discovered sources by schema, table and geometry column to ensure a consistent behavior
for schema in self.auto_tables.get(|| all_tables.keys()) {
let Some(schema) = normalize_key(&all_tables, &schema, "schema", "") else { continue };
let tables = all_tables.remove(&schema).unwrap();
for (table, geoms) in tables.into_iter().sorted_by(by_key) {
for (column, mut src_inf) in geoms.into_iter().sorted_by(by_key) {
if used.contains(&(schema.as_str(), table.as_str(), column.as_str())) {
continue;
if let Some(auto_tables) = &self.auto_tables {
let schemas = auto_tables
.schemas
.as_ref()
.cloned()
.unwrap_or_else(|| all_tables.keys().cloned().collect());
for schema in schemas.iter().sorted() {
let Some(schema) = normalize_key(&all_tables, schema, "schema", "") else { continue };
let tables = all_tables.remove(&schema).unwrap();
for (table, geoms) in tables.into_iter().sorted_by(by_key) {
for (column, mut src_inf) in geoms.into_iter().sorted_by(by_key) {
if used.contains(&(schema.as_str(), table.as_str(), column.as_str())) {
continue;
}
let source_id = auto_tables
.id_format
.replace("{schema}", &schema)
.replace("{table}", &table)
.replace("{column}", &column);
let id2 = self.resolve_id(&source_id, &src_inf);
let Some(srid) = calc_srid(&src_inf.format_id(), &id2, src_inf.srid, 0, self.default_srid) else { continue };
src_inf.srid = srid;
info!("Discovered source {id2} from {}", summary(&src_inf));
pending.push(table_to_query(
id2,
src_inf,
self.pool.clone(),
self.disable_bounds,
));
}
let source_id = &table;
let id2 = self.resolve_id(source_id, &src_inf);
let Some(srid) = calc_srid(&src_inf.format_id(), &id2, src_inf.srid,0, self.default_srid) else {continue};
src_inf.srid = srid;
info!("Discovered source {id2} from {}", summary(&src_inf));
pending.push(table_to_query(id2, src_inf, self.pool.clone()));
}
}
}
@ -139,22 +181,32 @@ impl PgBuilder {
}
// Sort the discovered sources by schema and function name to ensure a consistent behavior
for schema in self.auto_functions.get(|| all_funcs.keys()) {
let Some(schema) = normalize_key(&all_funcs, &schema, "schema", "") else { continue };
let funcs = all_funcs.remove(&schema).unwrap();
for (name, (pg_sql, src_inf)) in funcs.into_iter().sorted_by(by_key) {
if used.contains(&(schema.as_str(), name.as_str())) {
continue;
if let Some(auto_funcs) = &self.auto_functions {
let schemas = auto_funcs
.schemas
.as_ref()
.cloned()
.unwrap_or_else(|| all_funcs.keys().cloned().collect());
for schema in schemas.iter().sorted() {
let Some(schema) = normalize_key(&all_funcs, schema, "schema", "") else { continue; };
let funcs = all_funcs.remove(&schema).unwrap();
for (name, (pg_sql, src_inf)) in funcs.into_iter().sorted_by(by_key) {
if used.contains(&(schema.as_str(), name.as_str())) {
continue;
}
let source_id = auto_funcs
.id_format
.replace("{schema}", &schema)
.replace("{function}", &name);
let id2 = self.resolve_id(&source_id, &src_inf);
self.add_func_src(&mut res, id2.clone(), &src_inf, pg_sql.clone());
info!("Discovered source {id2} from function {}", pg_sql.signature);
debug!("{}", pg_sql.query);
info_map.insert(id2, src_inf);
}
let source_id = &name;
let id2 = self.resolve_id(source_id, &src_inf);
self.add_func_src(&mut res, id2.clone(), &src_inf, pg_sql.clone());
info!("Discovered source {id2} from function {}", pg_sql.signature);
debug!("{}", pg_sql.query);
info_map.insert(id2, src_inf);
}
}
Ok((res, info_map))
}
@ -169,6 +221,41 @@ impl PgBuilder {
}
}
fn new_auto_publish(config: &PgConfig, is_function: bool) -> Option<PgBuilderPublish> {
let default = |schemas| Some(PgBuilderPublish::new(is_function, None, schemas));
if let Some(bo_a) = &config.auto_publish {
match bo_a {
BoolOrObject::Object(a) => match if is_function { &a.functions } else { &a.tables } {
Some(bo_i) => match bo_i {
BoolOrObject::Object(item) => Some(PgBuilderPublish::new(
is_function,
item.id_format.as_ref(),
merge_opt_hs(&a.from_schemas, &item.from_schemas),
)),
BoolOrObject::Bool(true) => default(merge_opt_hs(&a.from_schemas, &None)),
BoolOrObject::Bool(false) => None,
},
// If auto_publish.functions is set, and currently asking for .tables which is missing,
// .tables becomes the inverse of functions (i.e. an obj or true in tables means false in functions)
None => match if is_function { &a.tables } else { &a.functions } {
Some(bo_i) => match bo_i {
BoolOrObject::Object(_) | BoolOrObject::Bool(true) => None,
BoolOrObject::Bool(false) => default(merge_opt_hs(&a.from_schemas, &None)),
},
None => default(merge_opt_hs(&a.from_schemas, &None)),
},
},
BoolOrObject::Bool(true) => default(None),
BoolOrObject::Bool(false) => None,
}
} else if config.tables.is_some() || config.functions.is_some() {
None
} else {
default(None)
}
}
fn warn_on_rename(old_id: &String, new_id: &String, typ: &str) {
if old_id != new_id {
warn!("{typ} source {old_id} was renamed to {new_id} due to ID conflict");
@ -192,3 +279,126 @@ fn summary(info: &TableInfo) -> String {
fn by_key<T>(a: &(String, T), b: &(String, T)) -> Ordering {
a.0.cmp(&b.0)
}
/// Merge two optional list of strings into a hashset
fn merge_opt_hs(
a: &Option<OneOrMany<String>>,
b: &Option<OneOrMany<String>>,
) -> Option<HashSet<String>> {
if let Some(a) = a {
let mut res: HashSet<_> = a.iter().cloned().collect();
if let Some(b) = b {
res.extend(b.iter().cloned());
}
Some(res)
} else {
b.as_ref().map(|b| b.iter().cloned().collect())
}
}
#[cfg(test)]
mod tests {
use indoc::indoc;
use super::*;
#[allow(clippy::unnecessary_wraps)]
fn builder(id_format: &str, schemas: Option<&[&str]>) -> Option<PgBuilderPublish> {
Some(PgBuilderPublish {
id_format: id_format.to_string(),
schemas: schemas.map(|s| s.iter().map(|s| (*s).to_string()).collect()),
})
}
fn parse_yaml(content: &str) -> PgConfig {
serde_yaml::from_str(content).unwrap()
}
#[test]
fn test_auto_publish_no_auto() {
let config = parse_yaml("{}");
let res = new_auto_publish(&config, false);
assert_eq!(res, builder("{table}", None));
let res = new_auto_publish(&config, true);
assert_eq!(res, builder("{function}", None));
let config = parse_yaml("tables: {}");
assert_eq!(new_auto_publish(&config, false), None);
assert_eq!(new_auto_publish(&config, true), None);
let config = parse_yaml("functions: {}");
assert_eq!(new_auto_publish(&config, false), None);
assert_eq!(new_auto_publish(&config, true), None);
}
#[test]
fn test_auto_publish_bool() {
let config = parse_yaml("auto_publish: true");
let res = new_auto_publish(&config, false);
assert_eq!(res, builder("{table}", None));
let res = new_auto_publish(&config, true);
assert_eq!(res, builder("{function}", None));
let config = parse_yaml("auto_publish: false");
assert_eq!(new_auto_publish(&config, false), None);
assert_eq!(new_auto_publish(&config, true), None);
}
#[test]
fn test_auto_publish_obj_bool() {
let config = parse_yaml(indoc! {"
auto_publish:
from_schemas: public
tables: true"});
let res = new_auto_publish(&config, false);
assert_eq!(res, builder("{table}", Some(&["public"])));
assert_eq!(new_auto_publish(&config, true), None);
let config = parse_yaml(indoc! {"
auto_publish:
from_schemas: public
functions: true"});
assert_eq!(new_auto_publish(&config, false), None);
let res = new_auto_publish(&config, true);
assert_eq!(res, builder("{function}", Some(&["public"])));
let config = parse_yaml(indoc! {"
auto_publish:
from_schemas: public
tables: false"});
assert_eq!(new_auto_publish(&config, false), None);
let res = new_auto_publish(&config, true);
assert_eq!(res, builder("{function}", Some(&["public"])));
let config = parse_yaml(indoc! {"
auto_publish:
from_schemas: public
functions: false"});
let res = new_auto_publish(&config, false);
assert_eq!(res, builder("{table}", Some(&["public"])));
assert_eq!(new_auto_publish(&config, true), None);
}
#[test]
fn test_auto_publish_obj_obj() {
let config = parse_yaml(indoc! {"
auto_publish:
from_schemas: public
tables:
from_schemas: osm
id_format: '{schema}.{table}'"});
let res = new_auto_publish(&config, false);
assert_eq!(res, builder("{schema}.{table}", Some(&["public", "osm"])));
assert_eq!(new_auto_publish(&config, true), None);
let config = parse_yaml(indoc! {"
auto_publish:
tables:
from_schemas:
- osm
- public"});
let res = new_auto_publish(&config, false);
assert_eq!(res, builder("{table}", Some(&["public", "osm"])));
assert_eq!(new_auto_publish(&config, true), None);
}
}

View File

@ -8,9 +8,11 @@ mod pool;
mod table_source;
mod utils;
pub use config::PgConfig;
pub use config::{PgCfgPublish, PgCfgPublishType, PgConfig};
pub use config_function::FunctionInfo;
pub use config_table::TableInfo;
pub use function_source::get_function_sources;
pub use pool::{Pool, POOL_SIZE_DEFAULT};
pub use utils::{PgError, Schemas};
pub use utils::PgError;
pub use crate::utils::BoolOrObject;

View File

@ -71,13 +71,14 @@ pub async fn table_to_query(
id: String,
mut info: TableInfo,
pool: Pool,
disable_bounds: bool,
) -> Result<(String, PgSqlInfo, TableInfo)> {
let schema = escape_identifier(&info.schema);
let table = escape_identifier(&info.table);
let geometry_column = escape_identifier(&info.geometry_column);
let srid = info.srid;
if info.bounds.is_none() {
if info.bounds.is_none() && !disable_bounds {
info.bounds = pool
.get()
.await?

View File

@ -1,10 +1,8 @@
use std::collections::HashMap;
use itertools::Itertools;
use postgis::{ewkb, LineString, Point, Polygon};
use postgres::types::Json;
use semver::Version;
use serde::{Deserialize, Serialize};
use tilejson::Bounds;
use crate::source::{UrlQuery, Xyz};
@ -105,33 +103,3 @@ pub enum PgError {
UrlQuery,
),
}
/// A list of schemas to include in the discovery process, or a boolean to
/// indicate whether to run discovery at all.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Schemas {
Bool(bool),
List(Vec<String>),
}
impl Schemas {
/// Returns a list of schemas to include in the discovery process.
/// If self is a true, returns a list of all schemas produced by the callback.
pub fn get<'a, I, F>(&self, keys: F) -> Vec<String>
where
I: Iterator<Item = &'a String>,
F: FnOnce() -> I,
{
match self {
Schemas::List(lst) => lst.clone(),
Schemas::Bool(all) => {
if *all {
keys().sorted().map(String::to_string).collect()
} else {
Vec::new()
}
}
}
}
}

View File

@ -1,11 +1,11 @@
use itertools::Itertools;
use std::cmp::Ordering::Equal;
use std::collections::{BTreeMap, HashMap};
use std::io;
use std::path::PathBuf;
use itertools::Itertools;
use log::{error, info, warn};
use serde::{Serialize, Serializer};
use serde::{Deserialize, Serialize, Serializer};
use tilejson::{Bounds, TileJSON, VectorLayer};
use crate::pg::PgError;
@ -105,6 +105,14 @@ pub fn is_valid_zoom(zoom: i32, minzoom: Option<u8>, maxzoom: Option<u8>) -> boo
&& maxzoom.map_or(true, |maxzoom| zoom <= maxzoom.into())
}
/// A serde helper to store a boolean as an object.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum BoolOrObject<T> {
Bool(bool),
Object(T),
}
#[must_use]
pub fn create_tilejson(
name: String,
@ -148,9 +156,3 @@ pub fn sorted_opt_map<S: Serializer, T: Serialize>(
})
.serialize(serializer)
}
/// Helper to skip serialization if the value is `false`
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn is_false(value: &bool) -> bool {
!*value
}

View File

@ -1,17 +1,14 @@
listen_addresses: localhost:3111
postgres:
default_srid: 900913
disable_bounds: true
auto_publish: true
tables:
MixPoints:
schema: MixedCase
table: MixPoints
srid: 4326
geometry_column: Geom
bounds:
- -170.94984639004662
- -84.20025580733805
- 167.70892858284475
- 74.23573284753762
extent: 4096
buffer: 64
clip_geom: true
@ -24,11 +21,6 @@ postgres:
table: points1
srid: 4326
geometry_column: geom
bounds:
- -179.27313970132585
- -67.52518563265659
- 162.60117193735186
- 84.93092095128937
extent: 4096
buffer: 64
clip_geom: true
@ -40,11 +32,6 @@ postgres:
table: points2
srid: 4326
geometry_column: geom
bounds:
- -174.050750735362
- -80.46177157848345
- 179.11187181086706
- 81.13068764165727
extent: 4096
buffer: 64
clip_geom: true
@ -56,11 +43,6 @@ postgres:
table: points3857
srid: 3857
geometry_column: geom
bounds:
- -161.40590777554058
- -81.50727021609012
- 172.51549126768532
- 84.2440187164111
extent: 4096
buffer: 64
clip_geom: true
@ -72,11 +54,6 @@ postgres:
table: points_empty_srid
srid: 900913
geometry_column: geom
bounds:
- -162.35196679784573
- -84.49919770031491
- 178.47294677445652
- 82.7000012450467
extent: 4096
buffer: 64
clip_geom: true
@ -88,11 +65,6 @@ postgres:
table: table_source
srid: 4326
geometry_column: geom
bounds:
- -2.0
- -1.0
- 142.84131509869133
- 45.0
extent: 4096
buffer: 64
clip_geom: true
@ -104,11 +76,6 @@ postgres:
table: table_source_multiple_geom
srid: 4326
geometry_column: geom1
bounds:
- -136.62076049706184
- -78.3350299285405
- 176.56297743499888
- 75.78731065954437
extent: 4096
buffer: 64
clip_geom: true
@ -121,11 +88,6 @@ postgres:
table: table_source_multiple_geom
srid: 4326
geometry_column: geom2
bounds:
- -136.62076049706184
- -78.3350299285405
- 176.56297743499888
- 75.78731065954437
extent: 4096
buffer: 64
clip_geom: true

View File

@ -1,12 +1,11 @@
use ctor::ctor;
use indoc::indoc;
use itertools::Itertools;
use martin::pg::{get_function_sources, Schemas};
use martin::pg::get_function_sources;
use martin::Xyz;
#[path = "pg_utils.rs"]
mod utils;
#[allow(clippy::wildcard_imports)]
use utils::*;
pub mod utils;
pub use utils::*;
#[ctor]
fn init() {
@ -35,7 +34,7 @@ async fn get_function_sources_ok() {
#[actix_rt::test]
async fn function_source_tilejson() {
let mock = mock_sources(mock_cfg("connection_string: $DATABASE_URL")).await;
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let tilejson = source(&mock, "function_zxy_query").get_tilejson();
assert_eq!(tilejson.tilejson, "2.2.0");
@ -50,7 +49,7 @@ async fn function_source_tilejson() {
#[actix_rt::test]
async fn function_source_tile() {
let mock = mock_sources(mock_cfg("connection_string: $DATABASE_URL")).await;
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let src = source(&mock, "function_zxy_query");
let tile = src
.get_tile(&Xyz { z: 0, x: 0, y: 0 }, &None)
@ -68,9 +67,13 @@ async fn function_source_tile() {
#[actix_rt::test]
async fn function_source_schemas() {
let mut cfg = mock_cfg("connection_string: $DATABASE_URL");
cfg.auto_functions = Some(Schemas::List(vec!["MixedCase".to_owned()]));
cfg.auto_tables = Some(Schemas::Bool(false));
let cfg = mock_pgcfg(indoc! {"
connection_string: $DATABASE_URL
auto_publish:
tables: false
functions:
from_schemas: MixedCase
"});
let sources = mock_sources(cfg).await.0;
assert_eq!(
sources.keys().sorted().collect::<Vec<_>>(),

View File

@ -6,10 +6,8 @@ use indoc::indoc;
use martin::srv::IndexEntry;
use tilejson::{Bounds, TileJSON};
#[path = "pg_utils.rs"]
mod utils;
#[allow(clippy::wildcard_imports)]
use utils::*;
pub mod utils;
pub use utils::*;
#[ctor]
fn init() {
@ -18,7 +16,7 @@ fn init() {
macro_rules! create_app {
($sources:literal) => {{
let sources = mock_sources(mock_cfg($sources)).await.0;
let sources = mock_sources(mock_pgcfg($sources)).await.0;
let state = crate::utils::mock_app_data(sources).await;
::actix_web::test::init_service(
::actix_web::App::new()
@ -835,7 +833,7 @@ async fn get_health_returns_ok() {
#[actix_rt::test]
async fn tables_feature_id() {
let cfg = mock_cfg(indoc! {"
let cfg = mock_pgcfg(indoc! {"
connection_string: $DATABASE_URL
tables:
id_and_prop:

View File

@ -1,14 +1,11 @@
use martin::pg::Schemas;
use std::collections::HashMap;
use ctor::ctor;
use indoc::indoc;
use martin::Xyz;
#[path = "pg_utils.rs"]
mod utils;
#[allow(clippy::wildcard_imports)]
use utils::*;
pub mod utils;
pub use utils::*;
#[ctor]
fn init() {
@ -17,7 +14,7 @@ fn init() {
#[actix_rt::test]
async fn table_source() {
let mock = mock_sources(mock_cfg("connection_string: $DATABASE_URL")).await;
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
assert!(!mock.0.is_empty());
let source = table(&mock, "table_source");
@ -41,7 +38,7 @@ async fn table_source() {
#[actix_rt::test]
async fn tables_tilejson_ok() {
let mock = mock_sources(mock_cfg("connection_string: $DATABASE_URL")).await;
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let tilejson = source(&mock, "table_source").get_tilejson();
assert_eq!(tilejson.tilejson, "2.2.0");
@ -56,7 +53,7 @@ async fn tables_tilejson_ok() {
#[actix_rt::test]
async fn tables_tile_ok() {
let mock = mock_sources(mock_cfg("connection_string: $DATABASE_URL")).await;
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let tile = source(&mock, "table_source")
.get_tile(&Xyz { z: 0, x: 0, y: 0 }, &None)
.await
@ -67,7 +64,7 @@ async fn tables_tile_ok() {
#[actix_rt::test]
async fn tables_srid_ok() {
let mock = mock_sources(mock_cfg(indoc! {"
let mock = mock_sources(mock_pgcfg(indoc! {"
connection_string: $DATABASE_URL
default_srid: 900913
"}))
@ -88,7 +85,7 @@ async fn tables_srid_ok() {
#[actix_rt::test]
async fn tables_multiple_geom_ok() {
let mock = mock_sources(mock_cfg("connection_string: $DATABASE_URL")).await;
let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await;
let source = table(&mock, "table_source_multiple_geom");
assert_eq!(source.geometry_column, "geom1");
@ -99,9 +96,13 @@ async fn tables_multiple_geom_ok() {
#[actix_rt::test]
async fn table_source_schemas() {
let mut cfg = mock_cfg("connection_string: $DATABASE_URL");
cfg.auto_functions = Some(Schemas::Bool(false));
cfg.auto_tables = Some(Schemas::List(vec!["MixedCase".to_owned()]));
let cfg = mock_pgcfg(indoc! {"
connection_string: $DATABASE_URL
auto_publish:
tables:
from_schemas: MixedCase
functions: false
"});
let sources = mock_sources(cfg).await.0;
assert_eq!(sources.keys().collect::<Vec<_>>(), vec!["MixPoints"],);
}

View File

@ -64,8 +64,8 @@ test_pbf()
$CURL "$URL" > "$FILENAME"
if [[ $OSTYPE == linux* ]]; then
./tests/vtzero-check "$FILENAME"
./tests/vtzero-show "$FILENAME" > "$FILENAME.txt"
./tests/fixtures/vtzero-check "$FILENAME"
./tests/fixtures/vtzero-show "$FILENAME" > "$FILENAME.txt"
fi
}
@ -93,7 +93,7 @@ echo "Test auto configured Martin"
TEST_OUT_DIR="$(dirname "$0")/output/auto"
mkdir -p "$TEST_OUT_DIR"
ARG=(--default-srid 900913 --save-config "$(dirname "$0")/output/generated_config.yaml")
ARG=(--default-srid 900913 --disable-bounds --save-config "$(dirname "$0")/output/generated_config.yaml")
set -x
$MARTIN_BIN "${ARG[@]}" 2>&1 | tee test_log_1.txt &
PROCESS_ID=`jobs -p`

30
tests/utils/mod.rs Normal file
View File

@ -0,0 +1,30 @@
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::redundant_clone)]
#![allow(clippy::unused_async)]
mod pg_utils;
use actix_web::web::Data;
use martin::srv::AppState;
use martin::{Config, Sources};
pub use pg_utils::*;
#[path = "../../src/utils/test_utils.rs"]
mod test_utils;
#[allow(clippy::wildcard_imports)]
pub use test_utils::*;
pub async fn mock_app_data(sources: Sources) -> Data<AppState> {
Data::new(AppState { sources })
}
#[must_use]
pub fn mock_cfg(yaml: &str) -> Config {
let Ok(db_url) = std::env::var("DATABASE_URL") else {
panic!("DATABASE_URL env var is not set. Unable to do integration tests");
};
let env = FauxEnv(vec![("DATABASE_URL", db_url.into())].into_iter().collect());
let mut cfg: Config = subst::yaml::from_str(yaml, &env).unwrap();
cfg.finalize().unwrap();
cfg
}

View File

@ -1,19 +1,11 @@
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::redundant_clone)]
#![allow(clippy::unused_async)]
use actix_web::web::Data;
pub use martin::args::Env;
use martin::pg::{PgConfig, Pool, TableInfo};
use martin::srv::AppState;
use martin::{IdResolver, Source, Sources};
#[path = "../src/utils/test_utils.rs"]
mod test_utils;
#[allow(clippy::wildcard_imports)]
pub use test_utils::*;
use crate::FauxEnv;
//
// This file is used by many tests and benchmarks using the #[path] attribute.
// This file is used by many tests and benchmarks.
// Each function should allow dead_code as they might not be used by a specific test file.
//
@ -21,7 +13,7 @@ pub type MockSource = (Sources, PgConfig);
#[allow(dead_code)]
#[must_use]
pub fn mock_cfg(yaml: &str) -> PgConfig {
pub fn mock_pgcfg(yaml: &str) -> PgConfig {
let Ok(db_url) = std::env::var("DATABASE_URL") else {
panic!("DATABASE_URL env var is not set. Unable to do integration tests");
};
@ -33,7 +25,7 @@ pub fn mock_cfg(yaml: &str) -> PgConfig {
#[allow(dead_code)]
pub async fn mock_pool() -> Pool {
let cfg = mock_cfg("connection_string: $DATABASE_URL");
let cfg = mock_pgcfg("connection_string: $DATABASE_URL");
let res = Pool::new(&cfg).await;
res.expect("Failed to create pool")
}
@ -45,11 +37,6 @@ pub async fn mock_sources(mut config: PgConfig) -> MockSource {
(res, config)
}
#[allow(dead_code)]
pub async fn mock_app_data(sources: Sources) -> Data<AppState> {
Data::new(AppState { sources })
}
#[allow(dead_code)]
#[must_use]
pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a TableInfo {