mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
server/tests: fix BigQuery test failure
This PR addresses a couple of recent issues with BigQuery tests in CI, plus some refactoring. TL;DR: most issues were fixed by being [explicit about failures](http://redsymbol.net/articles/unofficial-bash-strict-mode/) and variables in scope. Thanks to @qrilka for addressing a couple of these in your last PR already, sharing my notes about them here too for posterity as I found the troubleshooting exercise useful. _commands listed in execution order_ `generate_test_project` * issue: this rarely fails AFAICT, but a failure in any of the `gcloud` commands will result in unclear failures further along the call stack * fix: fail faster with `set -e` `create_bigquery_dataset` * error: `BigQuery error in mk operation: Not found: Project`. examples [[1]](https://github.com/hasura/graphql-engine-mono/issues/3600)[[2]](https://buildkite.com/hasura/graphql-engine-mono/builds/4857#f70990eb-3721-45c6-b11a-af7e4ebd9fe3) * issue: a failure on a missing project, which I could verify was created successfully * cause: missing `HASURA_BIGQUERY_PROJECT_ID` , most likely caused by the above failure and dodgy subshell scoping * fix: pass `project_id` as an argument. This might not have been necessary since `generate_test_project` fails faster, but I thought it was clearer to pass explicitly. `ensure_bigquery_db` * issue: this could fail, but return a 0 exit code. [example](https://buildkite.com/hasura/graphql-engine-mono/builds/4836#e3dfbc1e-4034-40bb-beb1-181fb5d9489f) * fix: add a `--fail` flag to `curl` call. Kirill addressed in https://github.com/hasura/graphql-engine-mono/pull/3435/files#diff-0525000dbb36f436dcc17570541378de51471200d294b468c4b288e0292441b6R94 `delete_temp_bigquery_project` * error: `(gcloud.projects.delete) value for field [projectId] in collection [cloudresourcemanager.projects] is required but was not provided`. [example](https://buildkite.com/hasura/graphql-engine-mono/builds/4836#e3dfbc1e-4034-40bb-beb1-181fb5d9489f) * issue: attempting to delete a tmp file that didn't exist due to earlier failures * fix: Kirill addressed in https://github.com/hasura/graphql-engine-mono/pull/3435/files#diff-0525000dbb36f436dcc17570541378de51471200d294b468c4b288e0292441b6R106 to verify: check [bigquery](https://buildkite.com/hasura/graphql-engine-mono/builds/5115#41d059eb-329c-46fb-a745-b2b97fffd328) and [hspec](https://buildkite.com/hasura/graphql-engine-mono/builds/5115#5623a53d-c18d-478e-bf44-446e1287453b) jobs in CI PR-URL: https://github.com/hasura/graphql-engine-mono/pull/3616 Co-authored-by: Divi <32202683+imperfect-fourth@users.noreply.github.com> GitOrigin-RevId: b85420ef57036b4dbb05202b73ee9e954113bf9d
This commit is contained in:
parent
e87433c2bb
commit
fc0352a995
@ -7,7 +7,7 @@
|
||||
# === functions to test BigQuery locally and in CI
|
||||
|
||||
# checks that the required bigquery environment variables are available to run tests
|
||||
function verify_bigquery_pytest_env() {
|
||||
verify_bigquery_pytest_env() {
|
||||
if [[ -z "${HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE:-}" || -z "${HASURA_BIGQUERY_PROJECT_ID:-}" ]]; then
|
||||
echo "HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE and HASURA_BIGQUERY_PROJECT_ID environment variables are needed to run these tests."
|
||||
echo "See https://github.com/hasura/graphql-engine/tree/master/server/tests-py#running-bigquery-tests for more information."
|
||||
@ -18,56 +18,81 @@ function verify_bigquery_pytest_env() {
|
||||
# === functions to test BigQuery in CI with ephemeral Google Cloud projects
|
||||
|
||||
# checks that the optional bigquery environment variables are available to run tests against a temporary project
|
||||
# HASURA_BIGQUERY_TEST_DIR is the ID for the new project's parent directory
|
||||
# https://cloud.google.com/iam/docs/resource-hierarchy-access-control
|
||||
# HASURA_BIGQUERY_BILLING_ACCT is the billing account ID that should be linked to the project in order to run queries
|
||||
# https://cloud.google.com/billing/docs/how-to/manage-billing-account
|
||||
function verify_temp_project_env() {
|
||||
if [[ -z "${HASURA_BIGQUERY_TEST_DIR:-}" || -z "${HASURA_BIGQUERY_BILLING_ACCT:-}" ]]; then
|
||||
echo "HASURA_BIGQUERY_TEST_DIR and HASURA_BIGQUERY_BILLING_ACCT environment variables are needed to create a temporary test project."
|
||||
verify_temp_project_env() {
|
||||
if
|
||||
[[ -z "${HASURA_BIGQUERY_TEST_DIR:-}" \
|
||||
|| -z "${HASURA_BIGQUERY_BILLING_ACCT:-}" \
|
||||
|| -z "${HASURA_BIGQUERY_API_KEY:-}" \
|
||||
|| -z "${HASURA_BIGQUERY_IAM_ACCOUNT:-}" \
|
||||
]]; then
|
||||
echo "the following environment variables are needed to create a temporary test project:"
|
||||
echo "HASURA_BIGQUERY_API_KEY: the API associated with the project"
|
||||
# https://cloud.google.com/docs/authentication/api-keys
|
||||
echo "HASURA_BIGQUERY_BILLING_ACCT: the billing account ID that should be linked to the project in order to run queries"
|
||||
# https://cloud.google.com/billing/docs/how-to/manage-billing-account
|
||||
echo "HASURA_BIGQUERY_IAM_ACCOUNT: application default credentials"
|
||||
# https://google.aip.dev/auth/4110
|
||||
echo "HASURA_BIGQUERY_TEST_DIR: the ID for the new project's parent directory"
|
||||
# https://cloud.google.com/iam/docs/resource-hierarchy-access-control
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function generate_test_project() {
|
||||
# NOTE: project_id_part may be shortened & altered to meet gcloud project ID requirements:
|
||||
# https://cloud.google.com/resource-manager/docs/creating-managing-projects
|
||||
local project_id_part=${1:-$(uuidgen)}
|
||||
local tmp_id_file=${2}
|
||||
HASURA_BIGQUERY_PROJECT_ID=$(echo bq-"$project_id_part" | cut -c1-30 | tr '[:upper:]' '[:lower:]')
|
||||
echo ""
|
||||
echo "--- create a short-lived bigquery project id: $HASURA_BIGQUERY_PROJECT_ID"
|
||||
gcloud projects create "$HASURA_BIGQUERY_PROJECT_ID" --folder="$HASURA_BIGQUERY_TEST_DIR"
|
||||
# generates a BigQuery project with a given or random ID
|
||||
# https://cloud.google.com/resource-manager/docs/creating-managing-projects
|
||||
generate_test_project() (
|
||||
set -e
|
||||
local tmp_id_file=${1}
|
||||
# project_id_part may be altered to meet gcloud ID requirements
|
||||
local project_id_part=${2:-$(uuidgen)}
|
||||
local project_id
|
||||
project_id=$(echo bq-"$project_id_part" | cut -c1-30 | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
# projects require linking to a billing account to run any queries
|
||||
echo ""
|
||||
echo "--- create a short-lived bigquery project id: $project_id"
|
||||
gcloud projects create "$project_id" --folder="$HASURA_BIGQUERY_TEST_DIR"
|
||||
|
||||
# verify the project was created successfully
|
||||
gcloud projects describe "$project_id"
|
||||
|
||||
# store the project_id in a temporary file, so it's accessible outside of this subshell
|
||||
echo "$project_id" > "$tmp_id_file"
|
||||
|
||||
# link to a billing account so we can run queries
|
||||
# https://cloud.google.com/billing/docs
|
||||
gcloud beta billing projects link "$HASURA_BIGQUERY_PROJECT_ID" --billing-account "$HASURA_BIGQUERY_BILLING_ACCT"
|
||||
# checking project existence
|
||||
gcloud projects describe "$HASURA_BIGQUERY_PROJECT_ID" --quiet
|
||||
export HASURA_BIGQUERY_PROJECT_ID
|
||||
echo "$HASURA_BIGQUERY_PROJECT_ID" > "$tmp_id_file"
|
||||
}
|
||||
gcloud beta billing projects link "$project_id" --billing-account "$HASURA_BIGQUERY_BILLING_ACCT"
|
||||
)
|
||||
|
||||
function create_bigquery_dataset() {
|
||||
local dataset_name=${1}
|
||||
echo ""
|
||||
echo "--- create a test dataset id: $HASURA_BIGQUERY_PROJECT_ID:$dataset_name"
|
||||
bq --location=US mk -d \
|
||||
--project_id "$HASURA_BIGQUERY_PROJECT_ID" \
|
||||
"$dataset_name"
|
||||
echo "ok"
|
||||
}
|
||||
|
||||
function create_temp_bigquery_project() {
|
||||
local project_id_part=${1:-$(uuidgen)}
|
||||
# create a dataset within a specified project
|
||||
# https://cloud.google.com/bigquery/docs/datasets-intro
|
||||
create_bigquery_dataset() (
|
||||
set -e
|
||||
local project_id=${1}
|
||||
local dataset_name=${2}
|
||||
local tmp_id_file=${3}
|
||||
verify_temp_project_env
|
||||
generate_test_project "$project_id_part" "${tmp_id_file}"
|
||||
create_bigquery_dataset "$dataset_name"
|
||||
}
|
||||
echo ""
|
||||
echo "--- create a test dataset id: $project_id:$dataset_name"
|
||||
bq --location=US mk -d \
|
||||
--project_id "$project_id" \
|
||||
"$dataset_name"
|
||||
)
|
||||
|
||||
function delete_temp_bigquery_project() {
|
||||
# helper function to setup and verify a bigquery project
|
||||
setup_temp_bigquery_project() (
|
||||
local tmp_id_file=${1}
|
||||
local dataset_name=${2}
|
||||
local project_id_part=${3:-$(uuidgen)}
|
||||
|
||||
verify_temp_project_env
|
||||
generate_test_project "${tmp_id_file}" "$project_id_part"
|
||||
project_id="$(cat "$tmp_id_file")"
|
||||
create_bigquery_dataset "$project_id" "$dataset_name"
|
||||
ensure_bigquery_dataset "$project_id" "$dataset_name"
|
||||
|
||||
# suggested usage for CI tests:
|
||||
# export HASURA_BIGQUERY_PROJECT_ID=$(cat $tmp_id_file)
|
||||
)
|
||||
|
||||
delete_temp_bigquery_project() {
|
||||
local project_id=${1}
|
||||
echo ""
|
||||
echo "--- delete bigquery project id: $project_id"
|
||||
@ -87,14 +112,16 @@ authenticate_bigquery() {
|
||||
}
|
||||
|
||||
ensure_bigquery_dataset() {
|
||||
local dataset_name=${1}
|
||||
echo "--- :database: ensure the bigquery data source is accessible, i.e. we can access the $dataset_name dataset in bigquery project"
|
||||
local project_id=${1}
|
||||
local dataset_name=${2}
|
||||
|
||||
echo "--- :database: ensure we can access the $dataset_name dataset in bigquery project $project_id"
|
||||
for _ in $(seq 1 60);
|
||||
do
|
||||
curl --fail --output /dev/null \
|
||||
"https://content-bigquery.googleapis.com/bigquery/v2/projects/$HASURA_BIGQUERY_PROJECT_ID/datasets/$dataset_name/tables?alt=json&key=$HASURA_BIGQUERY_API_KEY" \
|
||||
"https://content-bigquery.googleapis.com/bigquery/v2/projects/$project_id/datasets/$dataset_name/tables?alt=json&key=$HASURA_BIGQUERY_API_KEY" \
|
||||
-H "Authorization: Bearer $(gcloud auth print-access-token "$HASURA_BIGQUERY_IAM_ACCOUNT" \
|
||||
--project="$HASURA_BIGQUERY_PROJECT_ID")" \
|
||||
--project="$project_id")" \
|
||||
&& echo "Success" && return 0
|
||||
echo -n .
|
||||
sleep 1
|
||||
|
@ -74,7 +74,7 @@ function add_bigquery_source() {
|
||||
metadata_url=http://127.0.0.1:$hasura_graphql_server_port/v1/metadata
|
||||
|
||||
echo ""
|
||||
echo "Adding BigQuery sources"
|
||||
echo "Adding BigQuery sources to project $HASURA_BIGQUERY_PROJECT_ID"
|
||||
curl --fail "$metadata_url" \
|
||||
--data-raw '
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user