martin/justfile

365 lines
13 KiB
Makefile
Raw Normal View History

#!/usr/bin/env just --justfile
2023-05-24 06:47:55 +03:00
set shell := ["bash", "-c"]
#export DATABASE_URL="postgres://postgres:postgres@localhost:5411/db"
# Set additional database connection parameters, e.g. just PGPARAMS='keepalives=0&keepalives_idle=15' psql
PGPARAMS := ""
PGPORT := "5411"
export DATABASE_URL := "postgres://postgres:postgres@localhost:" + PGPORT + "/db" + (if PGPARAMS != "" { "?" + PGPARAMS } else { "" })
export CARGO_TERM_COLOR := "always"
2023-05-24 06:47:55 +03:00
#export RUST_LOG := "debug"
#export RUST_LOG := "sqlx::query=info,trace"
#export RUST_BACKTRACE := "1"
dockercompose := `if docker-compose --version &> /dev/null; then echo "docker-compose"; else echo "docker compose"; fi`
@_default:
2024-09-12 02:37:54 +03:00
{{just_executable()}} --list --unsorted
2023-08-29 19:55:52 +03:00
# Start Martin server
run *ARGS="--webui enable-for-all":
2024-09-12 02:37:54 +03:00
cargo run -p martin -- {{ARGS}}
# Start Martin server
cp *ARGS:
2024-09-12 02:37:54 +03:00
cargo run --bin martin-cp -- {{ARGS}}
# Run mbtiles command
mbtiles *ARGS:
2024-09-12 02:37:54 +03:00
cargo run -p mbtiles -- {{ARGS}}
Improve PG performance by 28% (!!!) (#703) A very long overdue PostgreSQL querying performance optimization that should have used cached queries, but ... somehow didn't. Also, this PR adds two new `just` tasks: `run-release` and `bench-http` I used [oha](https://github.com/hatoo/oha) for its visual appeal. All tests were using keep-alive, which I think is relatively accurate because clients make many tile requests on the same connection. As a target, I used the same non-empty small tile to reduce the PostgreSQL indexing load. ❯ just run-release ❯ just bench-http `bench-http` runs this command: ``` oha -z 120s http://localhost:3000/function_zxy_query/18/235085/122323 ``` <pre> | before the change | after the change | |----------------------------------|----------------------------------| | Summary: | Summary: | | Success rate: 1.0000 | Success rate: 1.0000 | | Total: 120.0004 secs | Total: 120.0002 secs | | Slowest: 0.1339 secs | Slowest: 0.3505 secs | | Fastest: 0.0015 secs | Fastest: 0.0012 secs | | Average: 0.0076 secs | Average: 0.0055 secs | | Requests/sec: 6583.6946 | Requests/sec: 9073.5398 | | | | | Total data: 113.02 MiB | Total data: 155.76 MiB | | Size/request: 150 B | Size/request: 150 B | | Size/sec: 964.41 KiB | Size/sec: 1.30 MiB | | | | | Response time histogram: | Response time histogram: | | 0.002 [1] | 0.001 [1] | | 0.015 [785706] ■■■■■■■■■■■■■ | 0.036 [1088825] ■■■■■■■■■■■■■ | | 0.028 [4225] | 0.071 [0] | | 0.041 [111] | 0.106 [0] | | 0.054 [2] | 0.141 [0] | | 0.068 [0] | 0.176 [0] | | 0.081 [0] | 0.211 [0] | | 0.094 [0] | 0.246 [0] | | 0.107 [0] | 0.281 [0] | | 0.121 [0] | 0.316 [0] | | 0.134 [1] | 0.350 [1] | | | | | Latency distribution: | Latency distribution: | | 10% in 0.0057 secs | 10% in 0.0039 secs | | 25% in 0.0064 secs | 25% in 0.0045 secs | | 50% in 0.0073 secs | 50% in 0.0053 secs | | 75% in 0.0084 secs | 75% in 0.0063 secs | | 90% in 0.0098 secs | 90% in 0.0074 secs | | 95% in 0.0107 secs | 95% in 0.0082 secs | | 99% in 0.0135 secs | 99% in 0.0102 secs | </pre> Fixes #678
2023-06-04 22:02:00 +03:00
# Start release-compiled Martin server and a test database
run-release *ARGS: start
2024-09-12 02:37:54 +03:00
cargo run -- {{ARGS}}
Improve PG performance by 28% (!!!) (#703) A very long overdue PostgreSQL querying performance optimization that should have used cached queries, but ... somehow didn't. Also, this PR adds two new `just` tasks: `run-release` and `bench-http` I used [oha](https://github.com/hatoo/oha) for its visual appeal. All tests were using keep-alive, which I think is relatively accurate because clients make many tile requests on the same connection. As a target, I used the same non-empty small tile to reduce the PostgreSQL indexing load. ❯ just run-release ❯ just bench-http `bench-http` runs this command: ``` oha -z 120s http://localhost:3000/function_zxy_query/18/235085/122323 ``` <pre> | before the change | after the change | |----------------------------------|----------------------------------| | Summary: | Summary: | | Success rate: 1.0000 | Success rate: 1.0000 | | Total: 120.0004 secs | Total: 120.0002 secs | | Slowest: 0.1339 secs | Slowest: 0.3505 secs | | Fastest: 0.0015 secs | Fastest: 0.0012 secs | | Average: 0.0076 secs | Average: 0.0055 secs | | Requests/sec: 6583.6946 | Requests/sec: 9073.5398 | | | | | Total data: 113.02 MiB | Total data: 155.76 MiB | | Size/request: 150 B | Size/request: 150 B | | Size/sec: 964.41 KiB | Size/sec: 1.30 MiB | | | | | Response time histogram: | Response time histogram: | | 0.002 [1] | 0.001 [1] | | 0.015 [785706] ■■■■■■■■■■■■■ | 0.036 [1088825] ■■■■■■■■■■■■■ | | 0.028 [4225] | 0.071 [0] | | 0.041 [111] | 0.106 [0] | | 0.054 [2] | 0.141 [0] | | 0.068 [0] | 0.176 [0] | | 0.081 [0] | 0.211 [0] | | 0.094 [0] | 0.246 [0] | | 0.107 [0] | 0.281 [0] | | 0.121 [0] | 0.316 [0] | | 0.134 [1] | 0.350 [1] | | | | | Latency distribution: | Latency distribution: | | 10% in 0.0057 secs | 10% in 0.0039 secs | | 25% in 0.0064 secs | 25% in 0.0045 secs | | 50% in 0.0073 secs | 50% in 0.0053 secs | | 75% in 0.0084 secs | 75% in 0.0063 secs | | 90% in 0.0098 secs | 90% in 0.0074 secs | | 95% in 0.0107 secs | 95% in 0.0082 secs | | 99% in 0.0135 secs | 99% in 0.0102 secs | </pre> Fixes #678
2023-06-04 22:02:00 +03:00
# Start Martin server and open a test page
debug-page *ARGS: start
open tests/debug.html # run will not exit, so open debug page first
2024-09-12 02:37:54 +03:00
{{just_executable()}} run {{ARGS}}
# Run PSQL utility against the test database
psql *ARGS:
2024-09-12 02:37:54 +03:00
psql {{ARGS}} {{quote(DATABASE_URL)}}
# Run pg_dump utility against the test database
pg_dump *ARGS:
2024-09-12 02:37:54 +03:00
pg_dump {{ARGS}} {{quote(DATABASE_URL)}}
# Perform cargo clean to delete all build files
clean: clean-test stop && clean-martin-ui
cargo clean
clean-martin-ui:
rm -rf martin-ui/dist martin-ui/node_modules
cargo clean -p static-files
# Delete test output files
[private]
clean-test:
rm -rf tests/output
# Start a test database
2023-10-01 05:49:56 +03:00
start: (docker-up "db") docker-is-ready
# Start an ssl-enabled test database
2023-10-01 05:49:56 +03:00
start-ssl: (docker-up "db-ssl") docker-is-ready
# Start an ssl-enabled test database that requires a client certificate
start-ssl-cert: (docker-up "db-ssl-cert") docker-is-ready
# Start a legacy test database
2023-10-01 05:49:56 +03:00
start-legacy: (docker-up "db-legacy") docker-is-ready
# Start a specific test database, e.g. db or db-legacy
[private]
docker-up name: start-pmtiles-server
2024-09-12 02:37:54 +03:00
{{dockercompose}} up -d {{name}}
2023-10-01 05:49:56 +03:00
# Wait for the test database to be ready
[private]
docker-is-ready:
2024-09-12 02:37:54 +03:00
{{dockercompose}} run -T --rm db-is-ready
alias _down := stop
alias _stop-db := stop
# Restart the test database
restart:
# sometimes Just optimizes targets, so here we force stop & start by using external just executable
2024-09-12 02:37:54 +03:00
{{just_executable()}} stop
{{just_executable()}} start
# Stop the test database
stop:
2024-09-12 02:37:54 +03:00
{{dockercompose}} down --remove-orphans
# Start test server for testing HTTP pmtiles
start-pmtiles-server:
2024-09-12 02:37:54 +03:00
{{dockercompose}} up -d fileserver
# Run benchmark tests
2023-12-27 21:50:07 +03:00
bench:
cargo bench --bench bench
open target/criterion/report/index.html
# Run benchmark tests showing a flamegraph
flamegraph:
cargo bench --bench bench -- --profile-time=10
/opt/google/chrome/chrome "file://$PWD/target/criterion/get_table_source_tile/profile/flamegraph.svg"
2023-12-26 07:55:20 +03:00
# Start release-compiled Martin server and a test database
bench-server: start
cargo run --release -- tests/fixtures/mbtiles tests/fixtures/pmtiles
# Run HTTP requests benchmark using OHA tool. Use with `just bench-server`
bench-http: (cargo-install "oha")
@echo "ATTENTION: Make sure Martin was started with just bench-server"
Improve PG performance by 28% (!!!) (#703) A very long overdue PostgreSQL querying performance optimization that should have used cached queries, but ... somehow didn't. Also, this PR adds two new `just` tasks: `run-release` and `bench-http` I used [oha](https://github.com/hatoo/oha) for its visual appeal. All tests were using keep-alive, which I think is relatively accurate because clients make many tile requests on the same connection. As a target, I used the same non-empty small tile to reduce the PostgreSQL indexing load. ❯ just run-release ❯ just bench-http `bench-http` runs this command: ``` oha -z 120s http://localhost:3000/function_zxy_query/18/235085/122323 ``` <pre> | before the change | after the change | |----------------------------------|----------------------------------| | Summary: | Summary: | | Success rate: 1.0000 | Success rate: 1.0000 | | Total: 120.0004 secs | Total: 120.0002 secs | | Slowest: 0.1339 secs | Slowest: 0.3505 secs | | Fastest: 0.0015 secs | Fastest: 0.0012 secs | | Average: 0.0076 secs | Average: 0.0055 secs | | Requests/sec: 6583.6946 | Requests/sec: 9073.5398 | | | | | Total data: 113.02 MiB | Total data: 155.76 MiB | | Size/request: 150 B | Size/request: 150 B | | Size/sec: 964.41 KiB | Size/sec: 1.30 MiB | | | | | Response time histogram: | Response time histogram: | | 0.002 [1] | 0.001 [1] | | 0.015 [785706] ■■■■■■■■■■■■■ | 0.036 [1088825] ■■■■■■■■■■■■■ | | 0.028 [4225] | 0.071 [0] | | 0.041 [111] | 0.106 [0] | | 0.054 [2] | 0.141 [0] | | 0.068 [0] | 0.176 [0] | | 0.081 [0] | 0.211 [0] | | 0.094 [0] | 0.246 [0] | | 0.107 [0] | 0.281 [0] | | 0.121 [0] | 0.316 [0] | | 0.134 [1] | 0.350 [1] | | | | | Latency distribution: | Latency distribution: | | 10% in 0.0057 secs | 10% in 0.0039 secs | | 25% in 0.0064 secs | 25% in 0.0045 secs | | 50% in 0.0073 secs | 50% in 0.0053 secs | | 75% in 0.0084 secs | 75% in 0.0063 secs | | 90% in 0.0098 secs | 90% in 0.0074 secs | | 95% in 0.0107 secs | 95% in 0.0082 secs | | 99% in 0.0135 secs | 99% in 0.0102 secs | </pre> Fixes #678
2023-06-04 22:02:00 +03:00
@echo "Warming up..."
oha -z 5s --no-tui http://localhost:3000/function_zxy_query/18/235085/122323 > /dev/null
2023-12-26 07:55:20 +03:00
oha -z 60s http://localhost:3000/function_zxy_query/18/235085/122323
oha -z 5s --no-tui http://localhost:3000/png/0/0/0 > /dev/null
oha -z 60s http://localhost:3000/png/0/0/0
oha -z 5s --no-tui http://localhost:3000/stamen_toner__raster_CC-BY-ODbL_z3/0/0/0 > /dev/null
oha -z 60s http://localhost:3000/stamen_toner__raster_CC-BY-ODbL_z3/0/0/0
Improve PG performance by 28% (!!!) (#703) A very long overdue PostgreSQL querying performance optimization that should have used cached queries, but ... somehow didn't. Also, this PR adds two new `just` tasks: `run-release` and `bench-http` I used [oha](https://github.com/hatoo/oha) for its visual appeal. All tests were using keep-alive, which I think is relatively accurate because clients make many tile requests on the same connection. As a target, I used the same non-empty small tile to reduce the PostgreSQL indexing load. ❯ just run-release ❯ just bench-http `bench-http` runs this command: ``` oha -z 120s http://localhost:3000/function_zxy_query/18/235085/122323 ``` <pre> | before the change | after the change | |----------------------------------|----------------------------------| | Summary: | Summary: | | Success rate: 1.0000 | Success rate: 1.0000 | | Total: 120.0004 secs | Total: 120.0002 secs | | Slowest: 0.1339 secs | Slowest: 0.3505 secs | | Fastest: 0.0015 secs | Fastest: 0.0012 secs | | Average: 0.0076 secs | Average: 0.0055 secs | | Requests/sec: 6583.6946 | Requests/sec: 9073.5398 | | | | | Total data: 113.02 MiB | Total data: 155.76 MiB | | Size/request: 150 B | Size/request: 150 B | | Size/sec: 964.41 KiB | Size/sec: 1.30 MiB | | | | | Response time histogram: | Response time histogram: | | 0.002 [1] | 0.001 [1] | | 0.015 [785706] ■■■■■■■■■■■■■ | 0.036 [1088825] ■■■■■■■■■■■■■ | | 0.028 [4225] | 0.071 [0] | | 0.041 [111] | 0.106 [0] | | 0.054 [2] | 0.141 [0] | | 0.068 [0] | 0.176 [0] | | 0.081 [0] | 0.211 [0] | | 0.094 [0] | 0.246 [0] | | 0.107 [0] | 0.281 [0] | | 0.121 [0] | 0.316 [0] | | 0.134 [1] | 0.350 [1] | | | | | Latency distribution: | Latency distribution: | | 10% in 0.0057 secs | 10% in 0.0039 secs | | 25% in 0.0064 secs | 25% in 0.0045 secs | | 50% in 0.0073 secs | 50% in 0.0053 secs | | 75% in 0.0084 secs | 75% in 0.0063 secs | | 90% in 0.0098 secs | 90% in 0.0074 secs | | 95% in 0.0107 secs | 95% in 0.0082 secs | | 99% in 0.0135 secs | 99% in 0.0102 secs | </pre> Fixes #678
2023-06-04 22:02:00 +03:00
# Run all tests using a test database
test: start (test-cargo "--all-targets") test-doc test-int
# Run all tests using an SSL connection to a test database. Expected output won't match.
test-ssl: start-ssl (test-cargo "--all-targets") test-doc clean-test
tests/test.sh
2023-10-01 05:49:56 +03:00
# Run all tests using an SSL connection with client cert to a test database. Expected output won't match.
test-ssl-cert: start-ssl-cert
#!/usr/bin/env bash
set -euxo pipefail
# copy client cert to the tests folder from the docker container
KEY_DIR=target/certs
mkdir -p $KEY_DIR
docker cp martin-db-ssl-cert-1:/etc/ssl/certs/ssl-cert-snakeoil.pem $KEY_DIR/ssl-cert-snakeoil.pem
docker cp martin-db-ssl-cert-1:/etc/ssl/private/ssl-cert-snakeoil.key $KEY_DIR/ssl-cert-snakeoil.key
# export DATABASE_URL="$DATABASE_URL?sslmode=verify-full&sslrootcert=$KEY_DIR/ssl-cert-snakeoil.pem&sslcert=$KEY_DIR/ssl-cert-snakeoil.pem&sslkey=$KEY_DIR/ssl-cert-snakeoil.key"
export PGSSLROOTCERT="$KEY_DIR/ssl-cert-snakeoil.pem"
export PGSSLCERT="$KEY_DIR/ssl-cert-snakeoil.pem"
export PGSSLKEY="$KEY_DIR/ssl-cert-snakeoil.key"
2024-09-12 02:37:54 +03:00
{{just_executable()}} test-cargo --all-targets
{{just_executable()}} clean-test
{{just_executable()}} test-doc
2023-10-01 05:49:56 +03:00
tests/test.sh
# Run all tests using the oldest supported version of the database
test-legacy: start-legacy (test-cargo "--all-targets") test-doc test-int
# Run Rust unit tests (cargo test)
test-cargo *ARGS:
2024-09-12 02:37:54 +03:00
cargo test {{ARGS}}
# Run Rust doc tests
test-doc *ARGS:
2024-09-12 02:37:54 +03:00
cargo test --doc {{ARGS}}
# Run integration tests
test-int: clean-test install-sqlx
#!/usr/bin/env bash
set -euo pipefail
tests/test.sh
2024-09-12 02:37:54 +03:00
if [ "{{os()}}" != "linux" ]; then
echo "** Integration tests are only supported on Linux"
echo "** Skipping diffing with the expected output"
else
echo "** Comparing actual output with expected output..."
if ! diff --brief --recursive --new-file tests/output tests/expected; then
echo "** Expected output does not match actual output"
echo "** If this is expected, run 'just bless' to update expected output"
exit 1
else
echo "** Expected output matches actual output"
fi
fi
# Run AWS Lambda smoke test against SAM local
test-lambda:
tests/test-aws-lambda.sh
# Run integration tests and save its output as the new expected output (ordering is important, but in some cases run `bless-tests` before others)
bless: restart clean-test bless-insta-martin bless-insta-mbtiles bless-tests bless-int
# Bless integration tests
bless-int:
rm -rf tests/temp
tests/test.sh
rm -rf tests/expected && mv tests/output tests/expected
# Run test with bless-tests feature
bless-tests:
cargo test -p martin --features bless-tests
# Run integration tests and save its output as the new expected output
2023-10-22 01:41:21 +03:00
bless-insta-mbtiles *ARGS: (cargo-install "cargo-insta")
#rm -rf mbtiles/tests/snapshots
2024-09-12 02:37:54 +03:00
cargo insta test --accept --unreferenced=auto -p mbtiles {{ARGS}}
# Run integration tests and save its output as the new expected output
2023-10-22 01:41:21 +03:00
bless-insta-martin *ARGS: (cargo-install "cargo-insta")
2024-09-12 02:37:54 +03:00
cargo insta test --accept --unreferenced=auto -p martin {{ARGS}}
# Run integration tests and save its output as the new expected output
bless-insta-cp *ARGS: (cargo-install "cargo-insta")
2024-09-12 02:37:54 +03:00
cargo insta test --accept --bin martin-cp {{ARGS}}
# Build and open mdbook documentation
book: (cargo-install "mdbook")
2023-06-04 21:11:44 +03:00
mdbook serve docs --open --port 8321
# Build debian package
package-deb: (cargo-install "cargo-deb")
cargo deb -v -p martin --output target/debian/martin.deb
# Build and open code documentation
docs:
cargo doc --no-deps --open
# Run code coverage on tests and save its output in the coverage directory. Parameter could be html or lcov.
coverage FORMAT='html': (cargo-install "grcov")
#!/usr/bin/env bash
set -euo pipefail
if ! rustup component list | grep llvm-tools-preview > /dev/null; then \
echo "llvm-tools-preview could not be found. Installing..." ;\
rustup component add llvm-tools-preview ;\
fi
2024-09-12 02:37:54 +03:00
{{just_executable()}} clean
{{just_executable()}} start
PROF_DIR=target/prof
mkdir -p "$PROF_DIR"
PROF_DIR=$(realpath "$PROF_DIR")
2024-09-12 02:37:54 +03:00
OUTPUT_RESULTS_DIR=target/coverage/{{FORMAT}}
mkdir -p "$OUTPUT_RESULTS_DIR"
export CARGO_INCREMENTAL=0
export RUSTFLAGS=-Cinstrument-coverage
# Avoid problems with relative paths
export LLVM_PROFILE_FILE=$PROF_DIR/cargo-test-%p-%m.profraw
export MARTIN_PORT=3111
cargo test --all-targets
tests/test.sh
set -x
2023-05-24 06:47:55 +03:00
grcov --binary-path ./target/debug \
-s . \
2024-09-12 02:37:54 +03:00
-t {{FORMAT}} \
2023-05-24 06:47:55 +03:00
--branch \
--ignore 'benches/*' \
--ignore 'tests/*' \
--ignore-not-existing \
2024-09-12 02:37:54 +03:00
-o target/coverage/{{FORMAT}} \
2023-05-24 06:47:55 +03:00
--llvm \
"$PROF_DIR"
{ set +x; } 2>/dev/null
# if this is html, open it in the browser
2024-09-12 02:37:54 +03:00
if [ "{{FORMAT}}" = "html" ]; then
open "$OUTPUT_RESULTS_DIR/index.html"
fi
# Build and run martin docker image
docker-run *ARGS:
2024-09-12 02:37:54 +03:00
docker run -it --rm --net host -e DATABASE_URL -v $PWD/tests:/tests ghcr.io/maplibre/martin {{ARGS}}
# Do any git command, ensuring that the testing environment is set up. Accepts the same arguments as git.
[no-exit-message]
git *ARGS: start
2024-09-12 02:37:54 +03:00
git {{ARGS}}
# Print the connection string for the test database
print-conn-str:
2024-09-12 02:37:54 +03:00
@echo {{quote(DATABASE_URL)}}
# Run cargo fmt and cargo clippy
lint: fmt clippy
# Run cargo fmt
fmt:
cargo fmt --all -- --check
# Reformat markdown files using markdownlint-cli2
fmt-md:
docker run -it --rm -v $PWD:/workdir davidanson/markdownlint-cli2 --config /workdir/.github/files/config.markdownlint-cli2.jsonc --fix
# Run Nightly cargo fmt, ordering imports
fmt2:
cargo +nightly fmt -- --config imports_granularity=Module,group_imports=StdExternalCrate
# Run cargo check
check:
2024-09-12 01:46:48 +03:00
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin-tile-utils
RUSTFLAGS='-D warnings' cargo check --all-targets -p mbtiles
RUSTFLAGS='-D warnings' cargo check --all-targets -p mbtiles --no-default-features
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin --no-default-features
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin --no-default-features --features fonts
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin --no-default-features --features mbtiles
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin --no-default-features --features pmtiles
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin --no-default-features --features postgres
RUSTFLAGS='-D warnings' cargo check --all-targets -p martin --no-default-features --features sprites
2023-12-25 07:43:58 +03:00
# Verify doc build
check-doc:
RUSTDOCFLAGS="-D warnings" cargo doc --no-deps --workspace
# Run cargo clippy
clippy:
2024-09-12 01:46:48 +03:00
cargo clippy --workspace --all-targets -- -D warnings
# Validate markdown URLs with markdown-link-check
clippy-md:
docker run -it --rm -v ${PWD}:/workdir --entrypoint sh ghcr.io/tcort/markdown-link-check -c \
'echo -e "/workdir/README.md\n$(find /workdir/docs/src -name "*.md")" | tr "\n" "\0" | xargs -0 -P 5 -n1 -I{} markdown-link-check --config /workdir/.github/files/markdown.links.config.json {}'
2024-09-12 01:46:48 +03:00
# Update all dependencies including the breaking ones
update-breaking:
cargo +nightly -Z unstable-options update --breaking
# These steps automatically run before git push via a git hook
git-pre-push: env-info restart fmt clippy check-doc test check
# Get environment info
[private]
env-info:
2024-09-12 02:37:54 +03:00
@echo "OS is {{os()}}, arch is {{arch()}}"
{{just_executable()}} --version
rustc --version
cargo --version
2023-12-25 07:43:58 +03:00
rustup --version
# Update sqlite database schema.
prepare-sqlite: install-sqlx
mkdir -p mbtiles/.sqlx
cd mbtiles && cargo sqlx prepare --database-url sqlite://$PWD/../tests/fixtures/mbtiles/world_cities.mbtiles -- --lib --tests
# Install SQLX cli if not already installed.
[private]
install-sqlx: (cargo-install "cargo-sqlx" "sqlx-cli" "--no-default-features" "--features" "sqlite,native-tls")
# Check if a certain Cargo command is installed, and install it if needed
[private]
cargo-install $COMMAND $INSTALL_CMD="" *ARGS="":
2024-02-06 05:49:36 +03:00
#!/usr/bin/env sh
set -eu
2024-02-06 09:33:35 +03:00
if ! command -v $COMMAND > /dev/null; then
if ! command -v cargo-binstall > /dev/null; then
2024-09-12 02:37:54 +03:00
echo "$COMMAND could not be found. Installing it with cargo install ${INSTALL_CMD:-$COMMAND} --locked {{ARGS}}"
cargo install ${INSTALL_CMD:-$COMMAND} --locked {{ARGS}}
2024-02-06 05:49:36 +03:00
else
2024-09-12 02:37:54 +03:00
echo "$COMMAND could not be found. Installing it with cargo binstall ${INSTALL_CMD:-$COMMAND} --locked {{ARGS}}"
cargo binstall ${INSTALL_CMD:-$COMMAND} --locked {{ARGS}}
2024-02-06 05:49:36 +03:00
fi
fi