server/tests: unify HASURA_BIGQUERY_SERVICE_KEY and HASURA_BIGQUERY_SERVICE_ACCOUNT env vars

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/3914
GitOrigin-RevId: 66f75420504d1b864b91599c2bdaa832784bb956
This commit is contained in:
Rakesh Emmadi 2022-03-14 13:19:36 +05:30 committed by hasura-bot
parent cf820852b6
commit b844c5d732
12 changed files with 34 additions and 35 deletions

View File

@ -1224,9 +1224,6 @@ backend-bigquery)
source "$CIRCLECI_FOLDER/../scripts/bigquery.sh" && verify_bigquery_pytest_env
HASURA_BIGQUERY_SERVICE_ACCOUNT=$(cat "$HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE")
export HASURA_BIGQUERY_SERVICE_ACCOUNT
run_hge_with_args serve
wait_for_port 8080

View File

@ -8,8 +8,8 @@
# checks that the required bigquery environment variables are available to run tests
verify_bigquery_pytest_env() {
if [[ -z "${HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE:-}" || -z "${HASURA_BIGQUERY_PROJECT_ID:-}" ]]; then
echo "HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE and HASURA_BIGQUERY_PROJECT_ID environment variables are needed to run these tests."
if [[ -z "${HASURA_BIGQUERY_SERVICE_KEY:-}" || -z "${HASURA_BIGQUERY_PROJECT_ID:-}" ]]; then
echo "HASURA_BIGQUERY_SERVICE_KEY and HASURA_BIGQUERY_PROJECT_ID environment variables are needed to run these tests."
echo "See https://github.com/hasura/graphql-engine/tree/master/server/tests-py#running-bigquery-tests for more information."
exit 1
fi

View File

@ -89,7 +89,7 @@ function add_bigquery_source() {
"tables": [],
"configuration": {
"service_account": {
"from_env": "HASURA_BIGQUERY_SERVICE_ACCOUNT"
"from_env": "HASURA_BIGQUERY_SERVICE_KEY"
},
"project_id": { "from_env": "HASURA_BIGQUERY_PROJECT_ID" },
"datasets": ["hasura"]
@ -101,7 +101,7 @@ function add_bigquery_source() {
"tables": [],
"configuration": {
"service_account": {
"from_env": "HASURA_BIGQUERY_SERVICE_ACCOUNT"
"from_env": "HASURA_BIGQUERY_SERVICE_KEY"
},
"project_id": { "from_env": "HASURA_BIGQUERY_PROJECT_ID" },
"datasets": ["hasura"]
@ -114,7 +114,7 @@ function add_bigquery_source() {
"configuration": {
"global_select_limit": 1,
"service_account": {
"from_env": "HASURA_BIGQUERY_SERVICE_ACCOUNT"
"from_env": "HASURA_BIGQUERY_SERVICE_KEY"
},
"project_id": { "from_env": "HASURA_BIGQUERY_PROJECT_ID" },
"datasets": ["hasura"]

View File

@ -38,7 +38,6 @@ case "$BACKEND" in
source "scripts/bigquery.sh"
verify_bigquery_pytest_env
export HASURA_BIGQUERY_SERVICE_ACCOUNT=$(cat "$HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE")
;;
citus)
;;

View File

@ -4,9 +4,8 @@
# https://cloud.google.com/iam/docs/creating-managing-service-accounts#iam-service-accounts-create-rest
project_id=${1}
service_account_file=${2}
api_key=${2}
service_account_email=${3} # eg. "<<SERVICE_ACCOUNT_NAME>>@<<PROJECT_NAME>>.iam.gserviceaccount.com"
api_key=$(cat "$service_account_file")
curl "https://content-bigquery.googleapis.com/bigquery/v2/projects/$project_id/queries?alt=json&key=$api_key" \
--data-binary '{"query":"select 123"}' \

View File

@ -31,6 +31,6 @@ See the [hasura.io: BigQuery getting started guide](https://hasura.io/docs/lates
Integration tests are run against short-lived projects. The following prerequisites are expected:
- A Google Cloud Console service account
- `HASURA_BIGQUERY_PROJECT_ID` environment variable
- `HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE` environment variable
- `HASURA_BIGQUERY_SERVICE_KEY` environment variable
See [these docs](https://github.com/hasura/graphql-engine/tree/master/server/tests-py#running-bigquery-tests) for more guidance on testing against a BigQuery data source.

View File

@ -25,7 +25,7 @@ import Data.Text.Extended (commaSeparated)
import GHC.Stack
import Harness.Constants as Constants
import Harness.Env
import Harness.Exceptions (SomeException, handle, tryInOrder)
import Harness.Exceptions (SomeException, handle)
import Harness.GraphqlEngine qualified as GraphqlEngine
import Harness.Quoter.Yaml (yaml)
import Harness.State (State)
@ -36,10 +36,7 @@ import Hasura.Backends.BigQuery.Source (ServiceAccount)
import Prelude
getServiceAccount :: HasCallStack => IO ServiceAccount
getServiceAccount =
tryInOrder
(getEnvJson Constants.bigqueryServiceAccountVar)
(getEnvJsonFile Constants.bigqueryServiceAccountFileVar)
getServiceAccount = getEnvJson Constants.bigqueryServiceKeyVar
getProjectId :: (HasCallStack) => IO Text
getProjectId = getEnvString Constants.bigqueryProjectIdVar

View File

@ -25,8 +25,7 @@ module Harness.Constants
sqlserverLivenessCheckIntervalMicroseconds,
sqlserverConnectInfo,
sqlserverDb,
bigqueryServiceAccountFileVar,
bigqueryServiceAccountVar,
bigqueryServiceKeyVar,
bigqueryProjectIdVar,
bigqueryDataset,
httpHealthCheckAttempts,
@ -193,11 +192,8 @@ mysqlConnectInfo =
Mysql.connectPort = mysqlPort
}
bigqueryServiceAccountFileVar :: String
bigqueryServiceAccountFileVar = "HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE"
bigqueryServiceAccountVar :: String
bigqueryServiceAccountVar = "HASURA_BIGQUERY_SERVICE_ACCOUNT"
bigqueryServiceKeyVar :: String
bigqueryServiceKeyVar = "HASURA_BIGQUERY_SERVICE_KEY"
bigqueryProjectIdVar :: String
bigqueryProjectIdVar = "HASURA_BIGQUERY_PROJECT_ID"

View File

@ -164,16 +164,23 @@ Running integration tests against a BigQuery data source is a little more involv
```
HASURA_BIGQUERY_PROJECT_ID=# the project ID of the service account
HASURA_BIGQUERY_SERVICE_ACCOUNT_EMAIL=# eg. "<<SERVICE_ACCOUNT_NAME>>@<<PROJECT_NAME>>.iam.gserviceaccount.com"
HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE=# the filepath to the downloaded service account key
HASURA_BIGQUERY_SERVICE_KEY=# the service account key
```
Before running the test suite either manually or via `dev.sh`:
1. Ensure you have access to a [Google Cloud Console service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts#creating). Store the project ID and account email in `HASURA_BIGQUERY_PROJECT_ID` and (optional) `HASURA_BIGQUERY_SERVICE_ACCOUNT_EMAIL` variables.
2. [Create and download a new service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). Store the filepath in a `HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE` variable.
2. [Create and download a new service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). Store the contents of file in a `HASURA_BIGQUERY_SERVICE_KEY` variable.
```bash
export HASURA_BIGQUERY_SERVICE_KEY=$(cat /path/to/service/account)
```
3. [Login and activate the service account](https://cloud.google.com/sdk/gcloud/reference/auth/activate-service-account), if it is not already activated.
4. Verify the service account is accessible via the [BigQuery API](https://cloud.google.com/bigquery/docs/reference/rest):
1. Run `source scripts/verify-bigquery-creds.sh $HASURA_BIGQUERY_PROJECT_ID $HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE $HASURA_BIGQUERY_SERVICE_ACCOUNT_EMAIL`. If the query succeeds, the service account is setup correctly to run tests against BigQuery locally.
5. Finally, run the BigQuery test suite with `HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE` and `HASURA_BIGQUERY_PROJECT_ID` environment variables set. For example:
1. Run the following command:
```bash
source scripts/verify-bigquery-creds.sh $HASURA_BIGQUERY_PROJECT_ID $HASURA_BIGQUERY_SERVICE_KEY $HASURA_BIGQUERY_SERVICE_ACCOUNT_EMAIL
```
If the query succeeds, the service account is setup correctly to run tests against BigQuery locally.
5. Finally, run the BigQuery test suite with `HASURA_BIGQUERY_SERVICE_KEY` and `HASURA_BIGQUERY_PROJECT_ID` environment variables set. For example:
```
scripts/dev.sh test --integration --backend bigquery -k TestGraphQLQueryBasicBigquery
```
@ -256,7 +263,7 @@ This means, for example, that if `teardown.yaml` untracks a table, and `schema_t
The current convention is to indicate the backend(s) tests can be run against in the class name. For example:
* `TestGraphQLQueryBasicMySQL` for tests that can only be run on MySQL
* `TestGraphQLQueryBasicCommon` for tests that can be run against more than one backend
* If a test class doesn't have a suffix specifying the backend, nor does its name end in `Common`, then it is likely a test written pre-v2.0 that
* If a test class doesn't have a suffix specifying the backend, nor does its name end in `Common`, then it is likely a test written pre-v2.0 that
can only be run on Postgres
This naming convention enables easier test filtering with [pytest command line flags](https://docs.pytest.org/en/6.2.x/usage.html#specifying-tests-selecting-tests).

View File

@ -7,7 +7,7 @@ args:
- name: hasura_global_limited
kind: bigquery
configuration:
service_account: {from_env: HASURA_BIGQUERY_SERVICE_ACCOUNT}
service_account: {from_env: HASURA_BIGQUERY_SERVICE_KEY}
project_id: {from_env: HASURA_BIGQUERY_PROJECT_ID}
datasets:
- hasura
@ -17,7 +17,7 @@ args:
- name: bigquery
kind: bigquery
configuration:
service_account: {from_env: HASURA_BIGQUERY_SERVICE_ACCOUNT}
service_account: {from_env: HASURA_BIGQUERY_SERVICE_KEY}
project_id: {from_env: HASURA_BIGQUERY_PROJECT_ID}
datasets:
- hasura
@ -27,7 +27,7 @@ args:
- name: bigquery2
kind: bigquery
configuration:
service_account: {from_env: HASURA_BIGQUERY_SERVICE_ACCOUNT}
service_account: {from_env: HASURA_BIGQUERY_SERVICE_KEY}
project_id: {from_env: HASURA_BIGQUERY_PROJECT_ID}
datasets:
- hasura

View File

@ -12,7 +12,7 @@ query:
- name: hasura_global_limited
kind: bigquery
configuration:
service_account: {from_env: HASURA_BIGQUERY_SERVICE_ACCOUNT}
service_account: {from_env: HASURA_BIGQUERY_SERVICE_KEY}
project_id: {from_env: HASURA_BIGQUERY_PROJECT_ID}
datasets:
- hasura

View File

@ -40,7 +40,9 @@
type: bigquery_set_table_customization
args:
source: bigquery
table: table_to_customize
table:
dataset: hasura
name: table_to_customize
configuration:
custom_name: customized_table
custom_column_names:
@ -70,7 +72,9 @@
type: bigquery_set_table_customization
args:
source: bigquery
table: table_to_customize
table:
dataset: hasura
name: table_to_customize
configuration:
custom_name: customized_table
custom_column_names: