mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
be748992f4
<!-- The PR description should answer 2 important questions: --> ### What <!-- What is this PR trying to accomplish (and why, if it's not obvious)? --> <!-- Consider: do we need to add a changelog entry? --> <!-- Does this PR introduce new validation that might break old builds? --> <!-- Consider: do we need to put new checks behind a flag? --> ### How <!-- How is it trying to accomplish it (what are the implementation steps)? --> V3_GIT_ORIGIN_REV_ID: 894ba81666bb441e71b8e204b89e5732ac4f1c83
179 lines
6.1 KiB
Makefile
179 lines
6.1 KiB
Makefile
set positional-arguments := true
|
|
set shell := ["bash", "-c"]
|
|
|
|
default:
|
|
just --list
|
|
|
|
build:
|
|
cargo build --release --all-targets
|
|
|
|
audit:
|
|
cargo audit
|
|
|
|
audit-fix:
|
|
cargo audit fix
|
|
|
|
format:
|
|
cargo fmt --check
|
|
npx --yes prettier --check .
|
|
! command -v nix || nix fmt -- --check .
|
|
|
|
alias fmt := format
|
|
|
|
fix:
|
|
cargo clippy --all-targets --no-deps --fix --allow-no-vcs
|
|
cargo fmt
|
|
just fix-format
|
|
! command -v nix || nix fmt
|
|
|
|
fix-format:
|
|
npx --yes prettier --write .
|
|
|
|
run-local-with-shell:
|
|
#!/usr/bin/env bash
|
|
cargo run --bin custom-connector | ts "custom-connector:" &
|
|
OTLP_ENDPOINT=http://localhost:4317 \
|
|
cargo run --bin dev-auth-webhook | ts "dev-auth-webhook:" &
|
|
RUST_LOG=DEBUG cargo run --bin engine -- \
|
|
--otlp-endpoint http://localhost:4317 \
|
|
--authn-config-path static/auth/auth_config.json \
|
|
--metadata-path static/sample-schema.json \
|
|
--expose-internal-errors | ts "engine: " &
|
|
wait
|
|
|
|
# start all the docker deps for running tests (not engine)
|
|
start-docker-test-deps:
|
|
# start connectors and wait for health
|
|
docker compose -f ci.docker-compose.yaml up --wait postgres postgres_connector custom_connector custom_connector_ndc_v01 auth_hook
|
|
|
|
# start all the docker run time deps for the engine
|
|
start-docker-run-deps:
|
|
# start auth_hook and jaeger
|
|
docker compose up --wait auth_hook jaeger
|
|
|
|
# pull / build all docker deps
|
|
docker-refresh: stop-docker
|
|
docker compose -f ci.docker-compose.yaml pull postgres_connector
|
|
docker compose -f ci.docker-compose.yaml build custom_connector
|
|
|
|
alias refresh-docker := docker-refresh
|
|
|
|
# stop all the docker deps
|
|
stop-docker:
|
|
docker compose -f ci.docker-compose.yaml down -v
|
|
docker compose down -v
|
|
|
|
# run the tests using local engine (once)
|
|
test *ARGS: start-docker-test-deps
|
|
#!/usr/bin/env bash
|
|
if command -v cargo-nextest; then
|
|
COMMAND=(cargo nextest run)
|
|
else
|
|
COMMAND=(cargo test)
|
|
fi
|
|
COMMAND+=(--no-fail-fast "$@")
|
|
echo "${COMMAND[*]}"
|
|
"${COMMAND[@]}"
|
|
|
|
# run a watch process that runs the tests locally
|
|
watch: start-docker-test-deps start-docker-run-deps
|
|
RUST_LOG=DEBUG \
|
|
cargo watch -i "**/*.snap.new" \
|
|
-x test \
|
|
-x 'clippy --no-deps' \
|
|
-x 'run --bin engine -- \
|
|
--otlp-endpoint http://localhost:4317 \
|
|
--authn-config-path static/auth/auth_config.json \
|
|
--metadata-path static/sample-schema.json \
|
|
--unstable-feature enable-json-api \
|
|
--expose-internal-errors'
|
|
|
|
# check the code is fine
|
|
lint:
|
|
cargo clippy --all-targets --no-deps
|
|
! command -v nix || nix flake check
|
|
|
|
# ensure we don't have unused dependencies:
|
|
machete:
|
|
cargo machete --with-metadata
|
|
|
|
# update golden tests
|
|
update-golden-files: start-docker-test-deps
|
|
UPDATE_GOLDENFILES=1 just test
|
|
just fix-format
|
|
|
|
update-custom-connector-schema-in-test-metadata:
|
|
docker compose -f ci.docker-compose.yaml up --build --wait custom_connector
|
|
|
|
just update-schema-in-test-metadata "8102" "v0.2"
|
|
|
|
update-postgres-schema-in-test-metadata:
|
|
docker compose -f ci.docker-compose.yaml up --build --wait postgres postgres_connector
|
|
|
|
just update-schema-in-test-metadata "8080" "v0.1"
|
|
|
|
update-schema-in-test-metadata PORT NDC_VERSION: && fix-format
|
|
#!/usr/bin/env bash
|
|
set -e
|
|
|
|
new_capabilities=$(curl http://localhost:{{ PORT }}/capabilities | jq)
|
|
new_schema=$(curl http://localhost:{{ PORT }}/schema | jq)
|
|
ndc_version="{{ NDC_VERSION }}"
|
|
|
|
# Should only be tests that actually talk to the running connector and therefore must be up to date
|
|
test_directories=(./crates/engine/tests/execute)
|
|
|
|
find "${test_directories[@]}" -name '*.json' -print0 |
|
|
while IFS= read -r -d '' file; do
|
|
# Check if the file actually contains a custom connector DataConnectorLink
|
|
if jq -e '
|
|
(. | type == "object") and has("subgraphs") and (.subgraphs | length > 0) and (.subgraphs[] | has("objects") and (.objects | length > 0))
|
|
and any(.subgraphs[].objects[]; .kind == "DataConnectorLink" and .definition.url.singleUrl.value == "http://localhost:{{ PORT }}")' "$file" >/dev/null; then
|
|
|
|
# Update its schema, capabilities and version
|
|
jq --argjson newCapabilities "$new_capabilities" --argjson newSchema "$new_schema" --arg ndcVersion "$ndc_version" '
|
|
(.subgraphs[].objects[] | select(.kind == "DataConnectorLink" and .definition.url.singleUrl.value == "http://localhost:{{ PORT }}").definition.schema)
|
|
|= (.capabilities = $newCapabilities | .schema = $newSchema | .version = $ndcVersion)
|
|
' $file \
|
|
| sponge $file
|
|
|
|
echo "Updated $file"
|
|
else
|
|
echo "Skipping $file: Does not appear to be a metadata file with a matching connector"
|
|
fi
|
|
done
|
|
|
|
docker compose -f ci.docker-compose.yaml down
|
|
|
|
# ensures metadata objects in test json have their properties ordered as kind, version, then definition
|
|
reorder-json-in-test-metadata: && fix-format
|
|
#!/usr/bin/env bash
|
|
set -e
|
|
|
|
# Should only be folders that contain json metadata tests
|
|
test_directories=(./crates/engine/tests/execute ./crates/metadata-resolve/tests)
|
|
|
|
find "${test_directories[@]}" -name '*.json' -print0 |
|
|
while IFS= read -r -d '' file; do
|
|
# Check if the file actually contains metadata
|
|
if jq -e '(. | type == "object") and has("subgraphs") and (.subgraphs | length > 0) and (.subgraphs[] | has("objects") and (.objects | length > 0))' "$file" >/dev/null; then
|
|
|
|
# Reformat each metadata object so that kind, version, and definition properties come first
|
|
jq '.subgraphs[].objects |= map({ kind: .kind, version: .version, definition: .definition } + .)' $file \
|
|
| sponge $file
|
|
|
|
echo "Updated $file"
|
|
else
|
|
echo "Skipping $file: Does not appear to be a metadata file"
|
|
fi
|
|
done
|
|
|
|
# run the engine using schema from tests
|
|
run METADATA_PATH="static/sample-schema.json": start-docker-test-deps start-docker-run-deps
|
|
RUST_LOG=DEBUG cargo run --bin engine -- \
|
|
--otlp-endpoint http://localhost:4317 \
|
|
--authn-config-path static/auth/auth_config.json \
|
|
--metadata-path {{ METADATA_PATH }} \
|
|
--unstable-feature enable-json-api \
|
|
--expose-internal-errors
|