mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
server/tests: ephemeral BigQuery projects for CI test jobs
_Problem_ We currently run teardown/`DELETE` statements on the same, shared `hasura_test` dataset. This is not ideal as parallel test runs can and do affect each other, resulting in nondeterministic CI failures. Closes https://github.com/hasura/graphql-engine-mono/issues/2521 _Solution and design_ This PR introduces ephemeral, isolated projects for each test run _in CI only_. Projects are created within [the Google Cloud Platform `data-sources-test-bigquery` directory](https://console.cloud.google.com/iam-admin/settings?folder=704256416468&orgonly=true&supportedpurview=organizationId) on each test run, and destroyed afterwards. I've only introduced this change in CI for the time being: 1. this isn't as much of an issue locally because we're less likely to run bigquery tests in parallel. 2. to more quickly unblock https://github.com/hasura/graphql-engine/issues/7929. 3. to limit the number of new projects created until we have a better idea of our usage vs GCP quota/limits. Also updated the [internal wiki here](https://github.com/hasura/graphql-engine-mono/wiki/Testing-BigQuery) with this info. _To verify_ - CI: [this job](https://buildkite.com/hasura/graphql-engine-mono/builds/3770#89e5bac6-16fe-447e-bcda-85cd47ea1b77) successfully runs all tests on a temporary project & dataset - local: follow [these steps](https://github.com/hasura/graphql-engine-mono/wiki/Testing-BigQuery#ci--optional-dedicated-gcp-project-for-tests) to try the same setup locally PR-URL: https://github.com/hasura/graphql-engine-mono/pull/3240 GitOrigin-RevId: d88d9cb7922266bfa962cfcb481e0272b8929a5d
This commit is contained in:
parent
c861f0b09d
commit
a091110364
@ -1151,14 +1151,15 @@ backend-citus)
|
||||
backend-bigquery)
|
||||
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH BIGQUERY BACKEND ###########################################>\n"
|
||||
|
||||
source_data_sources_utils
|
||||
verify_bigquery_pytest_env
|
||||
source "$CIRCLECI_FOLDER/../scripts/bigquery.sh" && verify_bigquery_pytest_env
|
||||
|
||||
export HASURA_BIGQUERY_SERVICE_ACCOUNT=$(cat "$HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE")
|
||||
HASURA_BIGQUERY_SERVICE_ACCOUNT=$(cat "$HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE")
|
||||
export HASURA_BIGQUERY_SERVICE_ACCOUNT
|
||||
|
||||
run_hge_with_args serve
|
||||
wait_for_port 8080
|
||||
|
||||
source_data_sources_utils
|
||||
add_bigquery_source 8080
|
||||
|
||||
# See note [Specifying Pytests with -k flag]
|
||||
|
68
scripts/bigquery.sh
Normal file
68
scripts/bigquery.sh
Normal file
@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# helper functions used to run BigQuery tests
|
||||
# https://github.com/hasura/graphql-engine/tree/master/server/tests-py#running-bigquery-tests
|
||||
|
||||
|
||||
# === functions to test BigQuery locally and in CI
|
||||
|
||||
# checks that the required bigquery environment variables are available to run tests
|
||||
function verify_bigquery_pytest_env() {
|
||||
if [[ -z "${HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE:-}" || -z "${HASURA_BIGQUERY_PROJECT_ID:-}" ]]; then
|
||||
echo "HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE and HASURA_BIGQUERY_PROJECT_ID environment variables are needed to run these tests."
|
||||
echo "See https://github.com/hasura/graphql-engine/tree/master/server/tests-py#running-bigquery-tests for more information."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# === functions to test BigQuery in CI with ephemeral Google Cloud projects
|
||||
|
||||
# checks that the optional bigquery environment variables are available to run tests against a temporary project
|
||||
# HASURA_BIGQUERY_TEST_DIR is the ID for the new project's parent directory
|
||||
# https://cloud.google.com/iam/docs/resource-hierarchy-access-control
|
||||
# HASURA_BIGQUERY_BILLING_ACCT is the billing account ID that should be linked to the project in order to run queries
|
||||
# https://cloud.google.com/billing/docs/how-to/manage-billing-account
|
||||
function verify_temp_project_env() {
|
||||
if [[ -z "${HASURA_BIGQUERY_TEST_DIR:-}" || -z "${HASURA_BIGQUERY_BILLING_ACCT:-}" ]]; then
|
||||
echo "HASURA_BIGQUERY_TEST_DIR and HASURA_BIGQUERY_BILLING_ACCT environment variables are needed to create a temporary test project."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function generate_test_project() {
|
||||
# NOTE: project_id_part may be shortened & altered to meet gcloud project ID requirements:
|
||||
# https://cloud.google.com/resource-manager/docs/creating-managing-projects
|
||||
local project_id_part=${1:-$(uuidgen)}
|
||||
HASURA_BIGQUERY_PROJECT_ID=$(echo bq-"$project_id_part" | cut -c1-30 | tr '[:upper:]' '[:lower:]')
|
||||
echo ""
|
||||
echo "--- create a short-lived bigquery project id: $HASURA_BIGQUERY_PROJECT_ID"
|
||||
gcloud projects create "$HASURA_BIGQUERY_PROJECT_ID" --folder="$HASURA_BIGQUERY_TEST_DIR"
|
||||
# projects require linking to a billing account to run any queries
|
||||
# https://cloud.google.com/billing/docs
|
||||
gcloud beta billing projects link "$HASURA_BIGQUERY_PROJECT_ID" --billing-account "$HASURA_BIGQUERY_BILLING_ACCT"
|
||||
export HASURA_BIGQUERY_PROJECT_ID
|
||||
}
|
||||
|
||||
function create_hasura_test_dataset() {
|
||||
local name='hasura_test' # all bigquery tests expect a dataset to exist with this name
|
||||
echo ""
|
||||
echo "--- create a test dataset id: $HASURA_BIGQUERY_PROJECT_ID:$name"
|
||||
bq --location=US mk -d \
|
||||
--project_id "$HASURA_BIGQUERY_PROJECT_ID" \
|
||||
"$name"
|
||||
echo "ok"
|
||||
}
|
||||
|
||||
function create_temp_bigquery_project() {
|
||||
local project_id_part=${1:-$(uuidgen)}
|
||||
verify_temp_project_env
|
||||
generate_test_project "$project_id_part"
|
||||
create_hasura_test_dataset
|
||||
}
|
||||
|
||||
function delete_temp_bigquery_project() {
|
||||
local project_id=${1}
|
||||
echo ""
|
||||
echo "--- delete bigquery project id: $project_id"
|
||||
gcloud projects delete "$project_id" --quiet
|
||||
}
|
@ -126,12 +126,3 @@ function add_bigquery_source() {
|
||||
}
|
||||
'
|
||||
}
|
||||
|
||||
function verify_bigquery_pytest_env() {
|
||||
# check that required bigquery environment variables are present
|
||||
if [[ -z "${HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE:-}" || -z "${HASURA_BIGQUERY_PROJECT_ID:-}" ]]; then
|
||||
echo "HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE and HASURA_BIGQUERY_PROJECT_ID environment variables are needed to run these tests."
|
||||
echo "See https://github.com/hasura/graphql-engine/blob/master/server/py-tests/README.md#running-bigquery-tests for more information."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ case "$BACKEND" in
|
||||
postgres)
|
||||
;;
|
||||
bigquery)
|
||||
source "scripts/data-sources-util.sh"
|
||||
source "scripts/bigquery.sh"
|
||||
|
||||
verify_bigquery_pytest_env
|
||||
export HASURA_BIGQUERY_SERVICE_ACCOUNT=$(cat "$HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE")
|
||||
|
Loading…
Reference in New Issue
Block a user