cleanup func docs, lints (#842)

this also fixes new rust version lints
This commit is contained in:
Yuri Astrakhan 2023-08-27 21:07:29 -04:00 committed by GitHub
parent b3fb720a94
commit 1131c28c99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 75 additions and 36 deletions

View File

@ -1,4 +1,5 @@
[workspace]
resolver = "2"
members = ["martin", "martin-tile-utils", "martin-mbtiles"]
[workspace.package]

View File

@ -97,9 +97,9 @@ You can access this params using [json operators](https://www.postgresql.org/doc
...WHERE answer = (query_params->'objectParam'->>'answer')::int;
```
## Tilejson
## Modifying TileJSON
Martin will generate a basic [tilejson](https://github.com/mapbox/tilejson-spec) for each `function source`, eg, if there is a function `public.function_zxy_query_jsonb` , the basic `tilejson` will be:
Martin will automatically generate a basic [TileJSON](https://github.com/mapbox/tilejson-spec) manifest for each function source that will contain the name and description of the function, plus optionally `minzoom`, `maxzoom`, and `bounds` (if they were specified via one of the configuration methods). For example, if there is a function `public.function_zxy_query_jsonb`, the default `TileJSON` might look like this (note that URL will be automatically adjusted to match the request host):
```json
{
@ -107,28 +107,32 @@ Martin will generate a basic [tilejson](https://github.com/mapbox/tilejson-spec)
"tiles": [
"http://localhost:3111/function_zxy_query_jsonb/{z}/{x}/{y}"
],
"description": "public.function_zxy_query_jsonb",
"name": "function_zxy_query_jsonb"
"name": "function_zxy_query_jsonb",
"description": "public.function_zxy_query_jsonb"
}
```
But it's not too helpful for clients like `maplibre`. There are no `bounds`, `minzoom`, `maxzoom`, and even `vector_layers`.
### Comments as tilejson
### TileJSON in SQL Comments
For a helpful `tilejson`, Martin will read comments on function source as `tilejson`. This comment will be merged into the basic `tilejson` as a patch.
To modify automatically generated `TileJSON`, you can add a valid JSON as an SQL comment on the function. Martin will merge function comment into the generated `TileJSON` using [JSON Merge patch](https://www.rfc-editor.org/rfc/rfc7386). The following example adds `attribution` and `version` fields to the `TileJSON`.
1. Martin will generate a basic `tilejson` for each `function source`.
2. Matin will try to read comment on `function source` as `tilejson`.
3. If `step 2` failed, Martin will log a warn, and use the basic `tilejson` directly.
4. Else, Martin uses the generated `tilejson` as the "base", and applies all fields from the comment's `tilejson` as a "patch".
To add a comment as `tilejson` with validation:
**Note:** This example uses `EXECUTE` to ensure that the comment is a valid JSON (or else PostgreSQL will throw an error). You can use other methods of creating SQL comments.
```sql
DO $do$ BEGIN
EXECUTE 'COMMENT ON FUNCTION YOUR_FUNCTION (ARG1_TYPE,ARG2_TYPE,..ARGN_TYPE) IS $tj$' || $$
YOUR TILEJSO HERE
EXECUTE 'COMMENT ON FUNCTION my_function_name(INT4, INT4, INT4) IS $tj$' || $$
{
"description": "my new description",
"vector_layers": [
{
"id": "my_layer_id",
"fields": {
"field1": "String",
"field2": "Number"
}
}
]
}
$$::json || '$tj$';
END $do$;
```

View File

@ -1,7 +1,7 @@
use sqlite_hashes::rusqlite;
use std::path::PathBuf;
use martin_tile_utils::TileInfo;
use sqlite_hashes::rusqlite;
use crate::mbtiles::MbtType;

View File

@ -14,7 +14,8 @@ use futures::TryStreamExt;
use log::{debug, info, warn};
use martin_tile_utils::{Format, TileInfo};
use serde_json::{Value as JSONValue, Value};
use sqlite_hashes::{register_md5_function, rusqlite::Connection as RusqliteConnection};
use sqlite_hashes::register_md5_function;
use sqlite_hashes::rusqlite::Connection as RusqliteConnection;
use sqlx::{query, Row, SqliteExecutor};
use tilejson::{tilejson, Bounds, Center, TileJSON};

View File

@ -5,8 +5,8 @@ use std::path::{Path, PathBuf};
#[cfg(feature = "cli")]
use clap::{builder::ValueParser, error::ErrorKind, Args, ValueEnum};
use sqlite_hashes::rusqlite::params_from_iter;
use sqlite_hashes::{register_md5_function, rusqlite::Connection as RusqliteConnection};
use sqlite_hashes::register_md5_function;
use sqlite_hashes::rusqlite::{params_from_iter, Connection as RusqliteConnection};
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::{query, Connection, Row, SqliteConnection};

View File

@ -60,6 +60,8 @@ impl PgBuilder {
})
}
// FIXME: this function has gotten too long due to the new formatting rules, need to be refactored
#[allow(clippy::too_many_lines)]
pub async fn instantiate_tables(&self) -> Result<(Sources, TableInfoSources)> {
let mut db_tables_info = query_available_tables(&self.pool).await?;
@ -74,15 +76,29 @@ impl PgBuilder {
}
}
let Some(db_tables) = find_info(&db_tables_info, &cfg_inf.schema, "schema", id) else { continue };
let Some(db_geo_columns) = find_info(db_tables, &cfg_inf.table, "table", id) else { continue };
let Some(db_inf) = find_info(db_geo_columns, &cfg_inf.geometry_column, "geometry column", id) else { continue };
let Some(db_tables) = find_info(&db_tables_info, &cfg_inf.schema, "schema", id) else {
continue;
};
let Some(db_geo_columns) = find_info(db_tables, &cfg_inf.table, "table", id) else {
continue;
};
let Some(db_inf) = find_info(
db_geo_columns,
&cfg_inf.geometry_column,
"geometry column",
id,
) else {
continue;
};
let dup = !used.insert((&cfg_inf.schema, &cfg_inf.table, &cfg_inf.geometry_column));
let dup = if dup { "duplicate " } else { "" };
let id2 = self.resolve_id(id, cfg_inf);
let Some(merged_inf) = merge_table_info(self.default_srid, &id2, cfg_inf, db_inf) else { continue };
let Some(merged_inf) = merge_table_info(self.default_srid, &id2, cfg_inf, db_inf)
else {
continue;
};
warn_on_rename(id, &id2, "Table");
info!("Configured {dup}source {id2} from {}", summary(&merged_inf));
pending.push(table_to_query(
@ -108,7 +124,9 @@ impl PgBuilder {
);
for schema in schemas.iter().sorted() {
let Some(schema) = normalize_key(&db_tables_info, schema, "schema", "") else { continue };
let Some(schema) = normalize_key(&db_tables_info, schema, "schema", "") else {
continue;
};
let db_tables = db_tables_info.remove(&schema).unwrap();
for (table, geoms) in db_tables.into_iter().sorted_by(by_key) {
for (geom_column, mut db_inf) in geoms.into_iter().sorted_by(by_key) {
@ -121,7 +139,11 @@ impl PgBuilder {
.replace("{table}", &table)
.replace("{column}", &geom_column);
let id2 = self.resolve_id(&source_id, &db_inf);
let Some(srid) = calc_srid(&db_inf.format_id(), &id2, db_inf.srid, 0, self.default_srid) else { continue };
let Some(srid) =
calc_srid(&db_inf.format_id(), &id2, db_inf.srid, 0, self.default_srid)
else {
continue;
};
db_inf.srid = srid;
update_id_column(&id2, &mut db_inf, auto_tables);
info!("Discovered source {id2} from {}", summary(&db_inf));
@ -164,12 +186,16 @@ impl PgBuilder {
let mut used = HashSet::<(&str, &str)>::new();
for (id, cfg_inf) in &self.functions {
let Some(db_funcs) = find_info(&db_funcs_info, &cfg_inf.schema, "schema", id) else { continue };
let Some(db_funcs) = find_info(&db_funcs_info, &cfg_inf.schema, "schema", id) else {
continue;
};
if db_funcs.is_empty() {
warn!("No functions found in schema {}. Only functions like (z,x,y) -> bytea and similar are considered. See README.md", cfg_inf.schema);
continue;
}
let Some((pg_sql, _)) = find_info(db_funcs, &cfg_inf.function, "function", id) else { continue };
let Some((pg_sql, _)) = find_info(db_funcs, &cfg_inf.function, "function", id) else {
continue;
};
let dup = !used.insert((&cfg_inf.schema, &cfg_inf.function));
let dup = if dup { "duplicate " } else { "" };
@ -197,7 +223,9 @@ impl PgBuilder {
);
for schema in schemas.iter().sorted() {
let Some(schema) = normalize_key(&db_funcs_info, schema, "schema", "") else { continue; };
let Some(schema) = normalize_key(&db_funcs_info, schema, "schema", "") else {
continue;
};
let db_funcs = db_funcs_info.remove(&schema).unwrap();
for (func, (pg_sql, db_inf)) in db_funcs.into_iter().sorted_by(by_key) {
if used.contains(&(schema.as_str(), func.as_str())) {
@ -237,10 +265,14 @@ impl PgBuilder {
/// Try to find any ID column in a list of table columns (properties) that match one of the given `id_column` values.
/// If found, modify `id_column` value on the table info.
fn update_id_column(id: &str, inf: &mut TableInfo, auto_tables: &PgBuilderAuto) {
let Some(props) = inf.properties.as_mut() else { return };
let Some(try_columns) = &auto_tables.id_columns else { return };
let Some(props) = inf.properties.as_mut() else {
return;
};
let Some(try_columns) = &auto_tables.id_columns else {
return;
};
for key in try_columns.iter() {
for key in try_columns {
let (column, typ) = if let Some(typ) = props.get(key) {
(key, typ)
} else {

View File

@ -50,14 +50,14 @@ impl PgPool {
let version = get_conn(&pool, id.as_str())
.await?
.query_one(
r#"
r"
SELECT
(regexp_matches(
PostGIS_Lib_Version(),
'^(\d+\.\d+\.\d+)',
'g'
))[1] as version;
"#,
",
&[],
)
.await

View File

@ -23,7 +23,7 @@ pub fn json_to_hashmap(value: &serde_json::Value) -> InfoMap<String> {
#[must_use]
pub fn query_to_json(query: &UrlQuery) -> Json<InfoMap<serde_json::Value>> {
let mut query_as_json = HashMap::new();
for (k, v) in query.iter() {
for (k, v) in query {
let json_value: serde_json::Value =
serde_json::from_str(v).unwrap_or_else(|_| serde_json::Value::String(v.clone()));

View File

@ -1,9 +1,10 @@
use log::warn;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
use std::sync::{Arc, Mutex};
use log::warn;
#[derive(Debug, Default, Clone)]
pub struct IdResolver {
/// name -> unique name