server/bigquery: allow empty tables in replace_metadata. Default BigQueryField mode to Nullable

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2466
GitOrigin-RevId: 1f3599d1317235a31c98d7ed1ece2db92d82e916
This commit is contained in:
Abby Sassel 2021-10-01 19:29:03 +01:00 committed by hasura-bot
parent 80cb68332c
commit 3a6f4e1737
7 changed files with 91 additions and 14 deletions

View File

@ -14,10 +14,11 @@ function add_sources() {
add_mssql_source "$hasura_graphql_server_port" "$MSSQL_CONN_STR"
;;
mysql)
add_mysql_source "$hasura_graphql_server_port" "$MSSQL_CONN_STR"
add_mysql_source "$hasura_graphql_server_port"
;;
bigquery)
add_bigquery_source "$hasura_graphql_server_port"
;;
# bigquery deliberately omitted as its test setup is atypical. See:
# https://github.com/hasura/graphql-engine/blob/master/server/CONTRIBUTING.md#running-the-python-test-suite-on-bigquery
esac
echo ""
@ -68,6 +69,41 @@ function add_mysql_source() {
--data-raw '{"type":"replace_metadata","args":{"version":3,"sources":[{"name":"mysql","kind":"mysql","tables":[],"configuration":{"database":"hasura","user":"'"$MYSQL_USER"'","password":"'"$MYSQL_PASSWORD"'","host":"127.0.0.1","port":'"$MYSQL_PORT"',"pool_settings":{}}}]}}'
}
function add_bigquery_source() {
hasura_graphql_server_port=${1}
metadata_url=http://127.0.0.1:$hasura_graphql_server_port/v1/metadata
echo ""
echo "Adding BigQuery source"
curl "$metadata_url" \
--data-raw '
{
"type": "replace_metadata",
"args": {
"allow_inconsistent_metadata": true,
"metadata": {
"version": 3,
"sources": [
{
"name": "hasura_global_limited",
"kind": "bigquery",
"tables": [],
"configuration": {
"global_select_limit": 1,
"service_account": {
"from_env": "HASURA_BIGQUERY_SERVICE_ACCOUNT"
},
"project_id": { "from_env": "HASURA_BIGQUERY_PROJECT_ID" },
"datasets": ["hasura_test"]
}
}
]
}
}
}
'
}
function verify_bigquery_pytest_env() {
# check that required bigquery environment variables are present
if [[ -z "${HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE:-}" || -z "${HASURA_BIGQUERY_PROJECT_ID:-}" ]]; then

View File

@ -508,6 +508,7 @@ elif [ "$MODE" = "test" ]; then
echo ""
echo " Ok"
export HASURA_BIGQUERY_SERVICE_ACCOUNT=$(cat "$HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE")
add_sources $HASURA_GRAPHQL_SERVER_PORT
cd "$PROJECT_ROOT/server/tests-py"

View File

@ -21,7 +21,7 @@ import Control.Concurrent
import Control.Exception.Safe
import Control.Monad.Except
import Control.Monad.Reader
import Data.Aeson ((.:), (.:?), (.=))
import Data.Aeson ((.!=), (.:), (.:?), (.=))
import Data.Aeson qualified as Aeson
import Data.Aeson.Types qualified as Aeson
import Data.ByteString.Lazy qualified as L
@ -353,10 +353,7 @@ streamBigQuery ::
streamBigQuery credentials bigquery = do
jobResult <- createQueryJob credentials bigquery
case jobResult of
Right job -> do
records <- loop Nothing Nothing
-- liftIO (print records)
pure records
Right job -> loop Nothing Nothing
where
loop pageToken mrecordSet = do
results <- getJobResults credentials job Fetch {pageToken}
@ -568,8 +565,8 @@ createQueryJob sc@BigQuerySourceConfig {..} BigQuery {..} =
parseRecordSetPayload :: Aeson.Object -> Aeson.Parser RecordSet
parseRecordSetPayload resp = do
schema <- resp .: "schema"
columns <- schema .: "fields" :: Aeson.Parser (Vector BigQueryField)
mSchema <- resp .:? "schema"
columns <- maybe mempty (.: "fields") mSchema :: Aeson.Parser (Vector BigQueryField)
rowsJSON <- fmap (fromMaybe mempty) (resp .:? "rows" :: Aeson.Parser (Maybe (Vector Aeson.Value)))
rows <-
V.imapM
@ -754,7 +751,7 @@ instance Aeson.FromJSON BigQueryField where
fields <- o .: "fields"
pure (FieldSTRUCT fields)
| otherwise -> fail ("Unsupported field type: " ++ show flag)
mode <- o .: "mode"
mode <- o .:? "mode" .!= Nullable
pure BigQueryField {..}
)

View File

@ -59,7 +59,7 @@ instance FromJSON RestTableList where
case kind of
("bigquery#tableList" :: Text) -> do
nextPageToken <- o .:? "nextPageToken"
tables <- o .: "tables"
tables <- o .:? "tables" .!= []
pure RestTableList {..}
_ -> fail "Expected kind of bigquery#tableList"
)
@ -117,7 +117,7 @@ instance FromJSON RestFieldSchema where
( \o -> do
type' <- o .: "type"
name <- o .: "name"
mode <- fmap (fromMaybe Nullable) (o .:? "mode")
mode <- o .:? "mode" .!= Nullable
pure RestFieldSchema {..}
)

View File

@ -0,0 +1,20 @@
description: Replace schema cache (metadata) with no initial tables
url: /v1/metadata
status: 200
response:
message: "success"
query:
type: replace_metadata
args:
version: 3
allow_inconsistent_metadata: false
sources:
- name: hasura_global_limited
kind: bigquery
configuration:
service_account: {from_env: HASURA_BIGQUERY_SERVICE_ACCOUNT}
project_id: {from_env: HASURA_BIGQUERY_PROJECT_ID}
datasets:
- hasura_test
global_select_limit: 1
tables: []

View File

@ -0,0 +1,9 @@
description: Replace schema cache (metadata) with no initial tables
url: /v1/metadata
status: 200
response:
message: success
query:
type: replace_metadata
args:
tables: []

View File

@ -31,6 +31,9 @@ class TestMetadata:
def test_replace_metadata(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/replace_metadata.yaml')
def test_replace_metadata_no_tables(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/replace_metadata_no_tables.yaml')
def test_replace_metadata_wo_remote_schemas(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/replace_metadata_wo_rs.yaml')
@ -156,7 +159,7 @@ class TestMetadata:
def test_pg_function_tracking_with_comment(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/pg_track_function_with_comment_setup.yaml')
# make an introspection query to see if the description of the function has changed
introspection_query = """{
__schema {
@ -421,3 +424,14 @@ class TestSetTableCustomizationCommon:
def test_set_table_customization(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + hge_ctx.backend_suffix('/set_table_customization') + '.yaml')
@pytest.mark.parametrize("backend", ['bigquery'])
@usefixtures('per_method_tests_db_state')
class TestMetadataBigquery:
def test_replace_metadata_no_tables(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/replace_metadata_no_tables.yaml')
@classmethod
def dir(cls):
return "queries/v1/metadata/bigquery"