server/tests/bigquery: Expose internal error information + handle teardown robustly + retry on jobRateLimitExceeded error

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4810
Co-authored-by: Samir Talwar <47582+SamirTalwar@users.noreply.github.com>
GitOrigin-RevId: e8696491475a668976a0e86322b7a8772ed9f946
This commit is contained in:
Gil Mizrahi 2022-06-27 17:32:31 +03:00 committed by hasura-bot
parent d70ba47f5f
commit 38b1c0d774
19 changed files with 310 additions and 317 deletions

View File

@ -84,7 +84,7 @@ runSQL_ f (BigQueryRunSQL query source) = do
Execute.BigQuery {query = LT.fromStrict query, parameters = mempty}
case result of
Left executeProblem -> do
let errorMessage = Execute.executeProblemMessage executeProblem
let errorMessage = Execute.executeProblemMessage Execute.HideDetails executeProblem
throwError (err400 BigQueryError errorMessage) {qeInternal = Just $ ExtraInternal $ J.toJSON executeProblem}
Right recordSet ->
pure

View File

@ -10,12 +10,13 @@ module Hasura.Backends.BigQuery.Execute
executeBigQuery,
executeProblemMessage,
BigQuery (..),
OutputValue (..),
RecordSet (..),
Execute,
ExecuteProblem (..),
Value (..),
FieldNameText (..),
OutputValue (..),
RecordSet (..),
ShowDetails (..),
Value (..),
)
where
@ -33,6 +34,7 @@ import Data.Maybe
import Data.Text qualified as T
import Data.Text.Lazy qualified as LT
import Data.Text.Lazy.Builder qualified as LT
import Data.Text.Lazy.Encoding qualified as LT
import Data.Text.Read qualified as TR
import Data.Time
import Data.Time.Format.ISO8601 (iso8601Show)
@ -116,6 +118,12 @@ data ExecuteProblem
| RESTRequestNonOK Status Aeson.Value
deriving (Generic)
-- | We use this to hide certain details from the front-end, while allowing
-- them in tests. We have not actually decided whether showing the details is
-- insecure, but until we decide otherwise, it's probably best to err on the side
-- of caution.
data ShowDetails = HideDetails | InsecurelyShowDetails
instance Aeson.ToJSON ExecuteProblem where
toJSON =
Aeson.object . \case
@ -124,12 +132,16 @@ instance Aeson.ToJSON ExecuteProblem where
ExecuteRunBigQueryProblem problem -> ["execute_run_bigquery_problem" Aeson..= problem]
RESTRequestNonOK _ resp -> ["rest_request_non_ok" Aeson..= resp]
executeProblemMessage :: ExecuteProblem -> Text
executeProblemMessage = \case
GetJobDecodeProblem err -> "Fetching bigquery job status, cannot decode HTTP response; " <> tshow err
CreateQueryJobDecodeProblem err -> "Creating bigquery job, cannot decode HTTP response: " <> tshow err
ExecuteRunBigQueryProblem _ -> "Cannot execute bigquery request"
RESTRequestNonOK status _ -> "Bigquery HTTP request failed with status code " <> tshow (statusCode status) <> " and status message " <> tshow (statusMessage status)
executeProblemMessage :: ShowDetails -> ExecuteProblem -> Text
executeProblemMessage showDetails = \case
GetJobDecodeProblem err -> "Fetching BigQuery job status, cannot decode HTTP response; " <> tshow err
CreateQueryJobDecodeProblem err -> "Creating BigQuery job, cannot decode HTTP response: " <> tshow err
ExecuteRunBigQueryProblem _ -> "Cannot execute BigQuery request"
RESTRequestNonOK status body ->
let summary = "BigQuery HTTP request failed with status " <> tshow (statusCode status) <> " " <> tshow (statusMessage status)
in case showDetails of
HideDetails -> summary
InsecurelyShowDetails -> summary <> " and body:\n" <> LT.toStrict (LT.decodeUtf8 (Aeson.encode body))
-- | Execute monad; as queries are performed, the record sets are
-- stored in the map.

View File

@ -10,7 +10,6 @@ import Data.Text qualified as T
import Data.Text.Lazy qualified as LT
import Data.Text.Lazy.Builder qualified as LT
import Data.Vector qualified as V
import Hasura.Backends.BigQuery.Execute (executeProblemMessage)
import Hasura.Backends.BigQuery.Execute qualified as DataLoader
import Hasura.Backends.BigQuery.FromIr qualified as BigQuery
import Hasura.Backends.BigQuery.Plan
@ -78,7 +77,7 @@ bqDBQueryPlan userInfo sourceName sourceConfig qrf = do
sourceConfig
(DataLoader.executeSelect select)
case result of
Left err -> throw500WithDetail (executeProblemMessage err) $ Aeson.toJSON err
Left err -> throw500WithDetail (DataLoader.executeProblemMessage DataLoader.HideDetails err) $ Aeson.toJSON err
Right recordSet -> pure $! recordSetToEncJSON (BigQuery.selectCardinality select) recordSet
pure $ DBStepInfo @'BigQuery sourceName sourceConfig (Just (selectSQLTextForExplain select)) action

View File

@ -18,21 +18,17 @@ module Harness.Backend.BigQuery
dropTable,
untrackTable,
setup,
setupWithAdditionalRelationship,
teardown,
teardownWithAdditionalRelationship,
setupTablesAction,
setupPermissionsAction,
)
where
import Control.Concurrent.Extended
import Control.Monad (void)
import Data.Aeson
( Value (..),
object,
(.=),
)
import Data.Aeson.Key qualified as K
import Data.Foldable (for_)
import Data.String
import Data.Text (Text, pack, replace)
@ -53,8 +49,6 @@ import Harness.Test.Permissions qualified as Permissions
import Harness.Test.Schema
( BackendScalarType (..),
BackendScalarValue (..),
ManualRelationship (..),
Reference (..),
ScalarValue (..),
Table (..),
)
@ -63,7 +57,7 @@ import Harness.TestEnvironment (TestEnvironment)
import Hasura.Backends.BigQuery.Connection (initConnection)
import Hasura.Backends.BigQuery.Execute qualified as Execute
import Hasura.Backends.BigQuery.Source (ServiceAccount)
import Hasura.Prelude (onLeft, tshow)
import Hasura.Prelude (onLeft, seconds, tshow)
import Prelude
getServiceAccount :: HasCallStack => IO ServiceAccount
@ -110,7 +104,7 @@ bigQueryError e query =
error
( unlines
[ "BigQuery query error:",
T.unpack (Execute.executeProblemMessage e),
T.unpack (Execute.executeProblemMessage Execute.InsecurelyShowDetails e),
"SQL was:",
query
]
@ -266,40 +260,22 @@ args:
datasets: [*dataset]
|]
-- | Converts 'ManualRelationship' to 'Table'. Should be only used for
-- building the relationship.
relationshipToTable :: ManualRelationship -> Schema.Table
relationshipToTable ManualRelationship {..} =
(Schema.table relSourceTable)
{ tablePrimaryKey = [],
tableColumns = [],
tableData = [],
tableReferences =
[ Reference
{ referenceLocalColumn = relSourceColumn,
referenceTargetTable = relTargetTable,
referenceTargetColumn = relTargetColumn
}
]
}
-- | Same as 'setup' but also additional sets up the manual
-- relationship that might be required for some cases.
setupWithAdditionalRelationship :: [Schema.Table] -> [ManualRelationship] -> (TestEnvironment, ()) -> IO ()
setupWithAdditionalRelationship tables rels (testEnvironment, _) = do
setup tables (testEnvironment, ())
let relTables = map relationshipToTable rels
for_ relTables $ \table -> do
trackObjectRelationships BigQuery table testEnvironment
trackArrayRelationships BigQuery table testEnvironment
-- | Setup the schema in the most expected way.
-- NOTE: Certain test modules may warrant having their own local version.
setup :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
setup tables (testEnvironment, _) = do
setup tables' (testEnvironment, _) = do
let dataset = Constants.bigqueryDataset
source = defaultSource BigQuery
backendType = defaultBackendTypeString BigQuery
tables =
map
( \t ->
t
{ tableReferences = [],
tableManualRelationships = tableReferences t <> tableManualRelationships t
}
)
tables'
-- Clear and reconfigure the metadata
serviceAccount <- getServiceAccount
projectId <- getProjectId
@ -320,144 +296,29 @@ args:
|]
-- Setup and track tables
for_ tables $ \table -> do
createTable table
insertTable table
retryIfJobRateLimitExceeded $ createTable table
retryIfJobRateLimitExceeded $ insertTable table
trackTable testEnvironment table
-- Setup relationships
for_ tables $ \table -> do
trackObjectRelationships BigQuery table testEnvironment
trackArrayRelationships BigQuery table testEnvironment
Schema.trackObjectRelationships BigQuery table testEnvironment
Schema.trackArrayRelationships BigQuery table testEnvironment
-- | Teardown the schema and tracking in the most expected way.
-- NOTE: Certain test modules may warrant having their own version.
teardown :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
teardown (reverse -> tables) (testEnvironment, _) = do
-- Teardown relationships first
forFinally_ tables $ \table ->
untrackRelationships BigQuery table testEnvironment
-- Then teardown tables
forFinally_ tables $ \table -> do
untrackTable testEnvironment table
dropTable table
-- | Same as 'teardown' but also tears the manual relationship that
-- was setup.
teardownWithAdditionalRelationship :: [Schema.Table] -> [ManualRelationship] -> (TestEnvironment, ()) -> IO ()
teardownWithAdditionalRelationship tables rels (testEnvironment, _) = do
let relTables = map relationshipToTable rels
for_ relTables $ \table -> do
untrackRelationships BigQuery table testEnvironment
-- We do teardown in the reverse order to ensure that the tables
-- that have dependency are removed first. This has to be only done
-- for BigQuery backend since the metadata tracks the relationship
-- between them.
teardown (reverse tables) (testEnvironment, ())
-- | Bigquery specific function for tracking array relationships
trackArrayRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
trackArrayRelationships backend Table {tableName, tableReferences} testEnvironment = do
let source = defaultSource backend
dataset = Constants.bigqueryDataset
requestType = source <> "_create_array_relationship"
for_ tableReferences $ \Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
let relationshipName = Schema.mkArrayRelationshipName referenceTargetTable referenceTargetColumn
manualConfiguration :: Value
manualConfiguration =
object
[ "remote_table"
.= object
[ "dataset" .= String (T.pack dataset),
"name" .= String referenceTargetTable
],
"column_mapping"
.= object [K.fromText referenceLocalColumn .= referenceTargetColumn]
]
payload =
[yaml|
type: *requestType
args:
source: *source
table:
dataset: *dataset
name: *tableName
name: *relationshipName
using:
manual_configuration: *manualConfiguration
|]
GraphqlEngine.postMetadata_
testEnvironment
payload
-- | Bigquery specific function for tracking object relationships
trackObjectRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
trackObjectRelationships backend Table {tableName, tableReferences} testEnvironment = do
let source = defaultSource backend
dataset = Constants.bigqueryDataset
requestType = source <> "_create_object_relationship"
for_ tableReferences $ \ref@Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
let relationshipName = Schema.mkObjectRelationshipName ref
manualConfiguration :: Value
manualConfiguration =
object
[ "remote_table"
.= object
[ "dataset" .= String (T.pack dataset),
"name" .= String referenceTargetTable
],
"column_mapping"
.= object [K.fromText referenceLocalColumn .= referenceTargetColumn]
]
payload =
[yaml|
type: *requestType
args:
source: *source
table:
dataset: *dataset
name: *tableName
name: *relationshipName
using:
manual_configuration: *manualConfiguration
|]
GraphqlEngine.postMetadata_
testEnvironment
payload
-- | Bigquery specific function for untracking relationships
-- Overriding `Schema.untrackRelationships` here because bigquery's API expects a `dataset` key
untrackRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
untrackRelationships backend Table {tableName, tableReferences} testEnvironment = do
let source = defaultSource backend
dataset = Constants.bigqueryDataset
requestType = source <> "_drop_relationship"
for_ tableReferences $ \ref@Reference {referenceTargetTable, referenceTargetColumn} -> do
let arrayRelationshipName = Schema.mkArrayRelationshipName referenceTargetTable referenceTargetColumn
objectRelationshipName = Schema.mkObjectRelationshipName ref
-- drop array relationships
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table:
dataset: *dataset
name: *tableName
relationship: *arrayRelationshipName
|]
-- drop object relationships
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table:
dataset: *dataset
name: *tableName
relationship: *objectRelationshipName
|]
finally
-- Teardown relationships first
( forFinally_ tables $ \table ->
Schema.untrackRelationships BigQuery table testEnvironment
)
-- Then teardown tables
( forFinally_ tables $ \table -> do
finally
(untrackTable testEnvironment table)
(dropTable table)
)
setupTablesAction :: [Schema.Table] -> TestEnvironment -> SetupAction
setupTablesAction ts env =
@ -478,3 +339,21 @@ setupPermissions permissions env = Permissions.setup "bq" permissions env
-- | Remove the given permissions from the graphql engine in a TestEnvironment.
teardownPermissions :: [Permissions.Permission] -> TestEnvironment -> IO ()
teardownPermissions permissions env = Permissions.teardown "bq" permissions env
-- | We get @jobRateLimitExceeded@ errors from BigQuery if we run too many DML operations in short intervals.
-- This functions tries to fix that by retrying after a few seconds if there's an error.
-- Will always try at least once.
--
-- See <https://cloud.google.com/bigquery/docs/troubleshoot-quotas>.
retryIfJobRateLimitExceeded :: IO () -> IO ()
retryIfJobRateLimitExceeded action = retry 0
where
retry retryNumber = do
action `catch` \(SomeException err) ->
if "jobRateLimitExceeded" `T.isInfixOf` (tshow err)
&& retryNumber < maxRetriesRateLimitExceeded
then do
-- exponential backoff
sleep (seconds $ 2 ^ retryNumber)
retry (retryNumber + 1)
else throwIO err

View File

@ -112,7 +112,7 @@ createTable Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableRe
T.unpack $
T.unwords
[ "CREATE TABLE",
T.pack Constants.citusDb <> "." <> tableName,
T.pack Constants.citusDb <> "." <> wrapIdentifier tableName,
"(",
commaSeparated $
(mkColumn <$> tableColumns)
@ -263,14 +263,18 @@ setupPermissionsAction permissions env =
-- | Teardown the schema and tracking in the most expected way.
-- NOTE: Certain test modules may warrant having their own version.
teardown :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO ()
teardown tables (testEnvironment, _) = do
forFinally_ (reverse tables) $ \table ->
finally
(Schema.untrackRelationships Citus table testEnvironment)
( finally
teardown (reverse -> tables) (testEnvironment, _) = do
finally
-- Teardown relationships first
( forFinally_ tables $ \table ->
Schema.untrackRelationships Citus table testEnvironment
)
-- Then teardown tables
( forFinally_ tables $ \table ->
finally
(untrackTable testEnvironment table)
(dropTable table)
)
)
-- | Setup the given permissions to the graphql engine in a TestEnvironment.
setupPermissions :: [Permissions.Permission] -> TestEnvironment -> IO ()

View File

@ -237,14 +237,18 @@ setup tables (testEnvironment, _) = do
-- | Teardown the schema and tracking in the most expected way.
-- NOTE: Certain test modules may warrant having their own version.
teardown :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
teardown tables (testEnvironment, _) = do
forFinally_ (reverse tables) $ \table ->
finally
(Schema.untrackRelationships MySQL table testEnvironment)
( finally
teardown (reverse -> tables) (testEnvironment, _) = do
finally
-- Teardown relationships first
( forFinally_ tables $ \table ->
Schema.untrackRelationships MySQL table testEnvironment
)
-- Then teardown tables
( forFinally_ tables $ \table ->
finally
(untrackTable testEnvironment table)
(dropTable table)
)
)
setupTablesAction :: [Schema.Table] -> TestEnvironment -> SetupAction
setupTablesAction ts env =

View File

@ -39,7 +39,7 @@ import Harness.Constants as Constants
import Harness.Exceptions
import Harness.GraphqlEngine qualified as GraphqlEngine
import Harness.Quoter.Yaml (yaml)
import Harness.Test.Context (BackendType (Postgres), defaultBackendTypeString, defaultSource)
import Harness.Test.Context (BackendType (Postgres), defaultBackendTypeString, defaultSchema, defaultSource)
import Harness.Test.Fixture (SetupAction (..))
import Harness.Test.Permissions qualified as Permissions
import Harness.Test.Schema (BackendScalarType (..), BackendScalarValue (..), ScalarValue (..))
@ -119,7 +119,7 @@ createTable Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableRe
T.unpack $
T.unwords
[ "CREATE TABLE",
T.pack Constants.postgresDb <> "." <> tableName,
T.pack Constants.postgresDb <> "." <> wrapIdentifier tableName,
"(",
commaSeparated $
(mkColumn <$> tableColumns)
@ -170,7 +170,7 @@ mkReference Schema.Reference {referenceLocalColumn, referenceTargetTable, refere
wrapIdentifier referenceLocalColumn,
")",
"REFERENCES",
referenceTargetTable,
T.pack (defaultSchema Postgres) <> "." <> wrapIdentifier referenceTargetTable,
"(",
wrapIdentifier referenceTargetColumn,
")",
@ -261,14 +261,18 @@ setup tables (testEnvironment, _) = do
-- | Teardown the schema and tracking in the most expected way.
-- NOTE: Certain test modules may warrant having their own version.
teardown :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
teardown tables (testEnvironment, _) = do
forFinally_ (reverse tables) $ \table ->
finally
(Schema.untrackRelationships Postgres table testEnvironment)
( finally
teardown (reverse -> tables) (testEnvironment, _) = do
finally
-- Teardown relationships first
( forFinally_ tables $ \table ->
Schema.untrackRelationships Postgres table testEnvironment
)
-- Then teardown tables
( forFinally_ tables $ \table ->
finally
(untrackTable testEnvironment table)
(dropTable table)
)
)
setupTablesAction :: [Schema.Table] -> TestEnvironment -> SetupAction
setupTablesAction ts env =

View File

@ -257,14 +257,18 @@ setup tables (testEnvironment, _) = do
-- | Teardown the schema and tracking in the most expected way.
-- NOTE: Certain test modules may warrant having their own version.
teardown :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO ()
teardown tables (testEnvironment, _) = do
forFinally_ (reverse tables) $ \table ->
finally
(Schema.untrackRelationships SQLServer table testEnvironment)
( finally
teardown (reverse -> tables) (testEnvironment, _) = do
finally
-- Teardown relationships first
( forFinally_ tables $ \table ->
Schema.untrackRelationships SQLServer table testEnvironment
)
-- Then teardown tables
( forFinally_ tables $ \table ->
finally
(untrackTable testEnvironment table)
(dropTable table)
)
)
setupTablesAction :: [Schema.Table] -> TestEnvironment -> SetupAction
setupTablesAction ts env =

View File

@ -35,6 +35,7 @@ module Harness.Constants
citusDb,
serveOptions,
dataConnectorDb,
maxRetriesRateLimitExceeded,
)
where
@ -280,3 +281,6 @@ engineLogLevel = Nothing
-- These are important for the test suite.
testSuiteEnabledApis :: HashSet API
testSuiteEnabledApis = Set.fromList [METADATA, GRAPHQL, DEVELOPER, CONFIG]
maxRetriesRateLimitExceeded :: Int
maxRetriesRateLimitExceeded = 4

View File

@ -4,9 +4,11 @@ module Harness.Test.BackendType
defaultSource,
defaultBackendTypeString,
defaultSchema,
schemaKeyword,
)
where
import Data.Aeson.Key (Key)
import Harness.Constants qualified as Constants (bigqueryDataset, citusDb, dataConnectorDb, mysqlDb, postgresDb, sqlserverDb)
import Prelude
@ -49,3 +51,12 @@ defaultSchema = \case
BigQuery -> Constants.bigqueryDataset
Citus -> Constants.citusDb
DataConnector -> Constants.dataConnectorDb
schemaKeyword :: BackendType -> Key
schemaKeyword = \case
Postgres -> "schema"
MySQL -> "schema"
SQLServer -> "schema"
BigQuery -> "dataset"
Citus -> "schema"
DataConnector -> "schema"

View File

@ -14,6 +14,7 @@ module Harness.Test.Context
defaultSource,
defaultBackendTypeString,
defaultSchema,
schemaKeyword,
noLocalTestEnvironment,
Options (..),
combineOptions,

View File

@ -33,14 +33,20 @@ module Harness.Test.Schema
)
where
import Data.Aeson
( Value (..),
object,
(.=),
)
import Data.Aeson.Key qualified as K
import Data.Foldable (for_)
import Data.Text (Text)
import Data.Text (Text, pack)
import Data.Time (UTCTime, defaultTimeLocale)
import Data.Time.Format (parseTimeOrError)
import Harness.Exceptions
import Harness.GraphqlEngine qualified as GraphqlEngine
import Harness.Quoter.Yaml (yaml)
import Harness.Test.Context (BackendType, defaultBackendTypeString, defaultSchema, defaultSource)
import Harness.Test.Context (BackendType, defaultBackendTypeString, defaultSchema, defaultSource, schemaKeyword)
import Harness.TestEnvironment (TestEnvironment)
import Prelude
@ -59,6 +65,7 @@ data Table = Table
tableColumns :: [Column],
tablePrimaryKey :: [Text],
tableReferences :: [Reference],
tableManualRelationships :: [Reference],
tableData :: [[ScalarValue]],
tableUniqueConstraints :: [UniqueConstraint]
}
@ -70,7 +77,7 @@ data UniqueConstraint = UniqueConstraintColumns [Text] | UniqueConstraintExpress
-- | Create a table from just a name.
-- Use record updates to modify the result.
table :: Text -> Table
table tableName = Table tableName [] [] [] [] []
table tableName = Table tableName [] [] [] [] [] []
-- | Foreign keys for backends that support it.
data Reference = Reference
@ -273,13 +280,19 @@ args:
-- | Helper to create the object relationship name
mkObjectRelationshipName :: Reference -> Text
mkObjectRelationshipName Reference {referenceLocalColumn, referenceTargetTable} = referenceTargetTable <> "_by_" <> referenceLocalColumn
mkObjectRelationshipName Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} =
referenceTargetTable <> "_by_" <> referenceLocalColumn <> "_to_" <> referenceTargetColumn
-- | Unified track object relationships
trackObjectRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
trackObjectRelationships backend Table {tableName, tableReferences} testEnvironment = do
trackObjectRelationships backend Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do
let source = defaultSource backend
schema = defaultSchema backend
tableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String tableName
]
requestType = source <> "_create_object_relationship"
for_ tableReferences $ \ref@Reference {referenceLocalColumn} -> do
let relationshipName = mkObjectRelationshipName ref
@ -289,74 +302,141 @@ trackObjectRelationships backend Table {tableName, tableReferences} testEnvironm
type: *requestType
args:
source: *source
table:
name: *tableName
schema: *schema
table: *tableObj
name: *relationshipName
using:
foreign_key_constraint_on: *referenceLocalColumn
|]
for_ tableManualRelationships $ \ref@Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
let relationshipName = mkObjectRelationshipName ref
targetTableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String referenceTargetTable
]
manualConfiguration :: Value
manualConfiguration =
object
[ "remote_table" .= targetTableObj,
"column_mapping"
.= object [K.fromText referenceLocalColumn .= referenceTargetColumn]
]
payload =
[yaml|
type: *requestType
args:
source: *source
table: *tableObj
name: *relationshipName
using:
manual_configuration: *manualConfiguration
|]
GraphqlEngine.postMetadata_ testEnvironment payload
-- | Helper to create the array relationship name
mkArrayRelationshipName :: Text -> Text -> Text
mkArrayRelationshipName tableName referenceLocalColumn = tableName <> "s_by_" <> referenceLocalColumn
mkArrayRelationshipName :: Text -> Text -> Text -> Text
mkArrayRelationshipName tableName referenceLocalColumn referenceTargetColumn =
tableName <> "s_by_" <> referenceLocalColumn <> "_to_" <> referenceTargetColumn
-- | Unified track array relationships
trackArrayRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
trackArrayRelationships backend Table {tableName, tableReferences} testEnvironment = do
trackArrayRelationships backend Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do
let source = defaultSource backend
schema = defaultSchema backend
tableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String tableName
]
requestType = source <> "_create_array_relationship"
for_ tableReferences $ \Reference {referenceLocalColumn, referenceTargetTable} -> do
let relationshipName = mkArrayRelationshipName tableName referenceLocalColumn
for_ tableReferences $ \Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
let relationshipName = mkArrayRelationshipName tableName referenceTargetColumn referenceLocalColumn
targetTableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String referenceTargetTable
]
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table:
name: *referenceTargetTable
schema: *schema
table: *targetTableObj
name: *relationshipName
using:
foreign_key_constraint_on:
table:
name: *tableName
schema: *schema
table: *tableObj
column: *referenceLocalColumn
|]
for_ tableManualRelationships $ \Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
let relationshipName = mkArrayRelationshipName tableName referenceTargetColumn referenceLocalColumn
targetTableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String referenceTargetTable
]
manualConfiguration :: Value
manualConfiguration =
object
[ "remote_table"
.= tableObj,
"column_mapping"
.= object [K.fromText referenceTargetColumn .= referenceLocalColumn]
]
payload =
[yaml|
type: *requestType
args:
source: *source
table: *targetTableObj
name: *relationshipName
using:
manual_configuration: *manualConfiguration
|]
GraphqlEngine.postMetadata_ testEnvironment payload
-- | Unified untrack relationships
untrackRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
untrackRelationships backend Table {tableName, tableReferences} testEnvironment = do
untrackRelationships backend Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do
let source = defaultSource backend
schema = defaultSchema backend
tableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String tableName
]
requestType = source <> "_drop_relationship"
for_ tableReferences $ \ref@Reference {referenceLocalColumn, referenceTargetTable} -> do
let arrayRelationshipName = mkArrayRelationshipName tableName referenceLocalColumn
forFinally_ (tableManualRelationships <> tableReferences) $ \ref@Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
let arrayRelationshipName = mkArrayRelationshipName tableName referenceTargetColumn referenceLocalColumn
objectRelationshipName = mkObjectRelationshipName ref
-- drop array relationships
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table:
schema: *schema
name: *referenceTargetTable
relationship: *arrayRelationshipName
|]
-- drop object relationships
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table:
schema: *schema
name: *tableName
relationship: *objectRelationshipName
|]
targetTableObj =
object
[ schemaKeyword backend .= String (pack schema),
"name" .= String referenceTargetTable
]
finally
( -- drop array relationship
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table: *targetTableObj
relationship: *arrayRelationshipName
|]
)
( -- drop object relationship
GraphqlEngine.postMetadata_
testEnvironment
[yaml|
type: *requestType
args:
source: *source
table: *tableObj
relationship: *objectRelationshipName
|]
)

View File

@ -111,8 +111,8 @@ query {
# we put id=1 restrictions here because we don't assume ordering support
hasura_author(where: {id: {_eq: 1}}) {
id
# the _by_author_id part is necessary to distinguish between multiple foreign key relationships between the same two tables
articles_by_author_id(where: {id: {_eq: 1}}) {
# the _by_id_to_author_id part is necessary to distinguish between multiple foreign key relationships between the same two tables
articles_by_id_to_author_id(where: {id: {_eq: 1}}) {
id
}
}
@ -123,6 +123,6 @@ query {
data:
hasura_author:
- id: 1
articles_by_author_id:
articles_by_id_to_author_id:
- id: 1
|]

View File

@ -11,6 +11,7 @@ module Test.DisableRootFields.SelectPermission.DisableAllRootFieldsRelationshipS
import Harness.Backend.Postgres qualified as Postgres
import Harness.Backend.Sqlserver qualified as SQLServer
import Harness.Exceptions
import Harness.GraphqlEngine qualified as GraphqlEngine
import Harness.Quoter.Graphql (graphql)
import Harness.Quoter.Yaml (shouldReturnYaml, yaml)
@ -101,8 +102,9 @@ postgresSetup (testEnvironment, localTestEnvironment) = do
postgresTeardown :: (TestEnvironment, ()) -> IO ()
postgresTeardown (testEnvironment, localTestEnvironment) = do
postgresDropPermissions testEnvironment
Postgres.teardown schema (testEnvironment, localTestEnvironment)
finally
(postgresDropPermissions testEnvironment)
(Postgres.teardown schema (testEnvironment, localTestEnvironment))
-- No 'article' root fields will be exposed.
-- This scenario tests, when we want to disable querying a specific table but allow
@ -135,7 +137,7 @@ args:
role: user
permission:
filter:
articles_by_author_id:
articles_by_id_to_author_id:
author_id:
_eq: X-Hasura-User-Id
columns: '*'
@ -177,8 +179,9 @@ sqlServerSetup (testEnvironment, localTestEnvironment) = do
sqlServerTeardown :: (TestEnvironment, ()) -> IO ()
sqlServerTeardown (testEnvironment, localTestEnvironment) = do
mssqlDropPermissions testEnvironment
SQLServer.teardown schema (testEnvironment, localTestEnvironment)
finally
(mssqlDropPermissions testEnvironment)
(SQLServer.teardown schema (testEnvironment, localTestEnvironment))
-- No 'article' root fields will be exposed.
-- This scenario tests, when we want to disable querying a specific table but allow
@ -211,7 +214,7 @@ args:
role: user
permission:
filter:
articles_by_author_id:
articles_by_id_to_author_id:
author_id:
_eq: X-Hasura-User-Id
columns: '*'
@ -256,8 +259,8 @@ tests opts = describe "DisableAllRootFieldsRelationshipSpec" $ do
# we put id=1 restrictions here because we don't assume ordering support
hasura_author {
id
# the _by_author_id part is necessary to distinguish between multiple foreign key relationships between the same two tables
articles_by_author_id{
# the _by_id_to_author_id part is necessary to distinguish between multiple foreign key relationships between the same two tables
articles_by_id_to_author_id {
title
}
}
@ -269,7 +272,7 @@ tests opts = describe "DisableAllRootFieldsRelationshipSpec" $ do
data:
hasura_author:
- id: 1
articles_by_author_id:
articles_by_id_to_author_id:
- title: Article 1
- title: Article 3
|]

View File

@ -4,6 +4,7 @@
module Test.InsertCheckPermissionSpec (spec) where
import Harness.Backend.Sqlserver qualified as Sqlserver
import Harness.Exceptions
import Harness.GraphqlEngine qualified as GraphqlEngine
import Harness.Quoter.Graphql (graphql)
import Harness.Quoter.Yaml (shouldReturnYaml, yaml)
@ -87,7 +88,7 @@ args:
role: user
permission:
check:
author_by_author_id:
author_by_author_id_to_id:
id: X-Hasura-User-Id
columns:
- id
@ -103,7 +104,7 @@ args:
role: user
permission:
filter:
author_by_author_id:
author_by_author_id_to_id:
id: X-Hasura-User-Id
columns:
- id
@ -128,8 +129,9 @@ args:
mssqlTeardown :: (TestEnvironment, ()) -> IO ()
mssqlTeardown (testEnvironment, ()) = do
-- teardown permissions
GraphqlEngine.postMetadata_ testEnvironment $
[yaml|
let teardownPermissions =
GraphqlEngine.postMetadata_ testEnvironment $
[yaml|
type: bulk
args:
- type: mssql_drop_insert_permission
@ -155,8 +157,10 @@ args:
role: user
|]
-- and then rest of the teardown
Sqlserver.teardown schema (testEnvironment, ())
finally
teardownPermissions
-- and then rest of the teardown
(Sqlserver.teardown schema (testEnvironment, ()))
--------------------------------------------------------------------------------

View File

@ -268,7 +268,7 @@ data:
[graphql|
mutation {
insert_hasura_withrelationship(
objects: [{ nickname: "the a", alldefaults_by_time_id: {data: {} } }]
objects: [{ nickname: "the a", alldefaults_by_time_id_to_id: {data: {} } }]
on_conflict: {
constraint: withrelationship_pkey,
update_columns: []
@ -278,7 +278,7 @@ mutation {
returning {
id
nickname
alldefaults_by_time_id {
alldefaults_by_time_id_to_id {
id
}
}
@ -293,7 +293,7 @@ data:
returning:
- id: 1
nickname: "the a"
alldefaults_by_time_id:
alldefaults_by_time_id_to_id:
id: 1
|]

View File

@ -19,7 +19,6 @@ import Harness.Test.Context qualified as Context
import Harness.Test.Schema
( BackendScalarType (..),
BackendScalarValue (..),
ManualRelationship (..),
ScalarType (..),
ScalarValue (..),
Table (..),
@ -71,14 +70,8 @@ spec =
Context.Context
{ name = Context.Backend Context.BigQuery,
mkLocalTestEnvironment = Context.noLocalTestEnvironment,
setup =
Bigquery.setupWithAdditionalRelationship
schema
[authorArticles],
teardown =
Bigquery.teardownWithAdditionalRelationship
schema
[authorArticles],
setup = Bigquery.setup schema,
teardown = Bigquery.teardown schema,
customOptions =
Just $
Context.Options
@ -93,15 +86,6 @@ spec =
schema :: [Schema.Table]
schema = [author, article]
authorArticles :: ManualRelationship
authorArticles =
ManualRelationship
{ relSourceTable = "author",
relTargetTable = "article",
relSourceColumn = "id",
relTargetColumn = "author_id"
}
author :: Schema.Table
author =
(table "author")
@ -258,15 +242,15 @@ tests opts = do
query {
hasura_article(where: {id: {_eq: 1}}) {
id
author_by_author_id {
author_by_author_id_to_id {
id
articles_by_author_id(where: {id: {_eq: 1}}) {
articles_by_id_to_author_id(where: {id: {_eq: 1}}) {
id
author_by_author_id {
author_by_author_id_to_id {
id
articles_by_author_id(where: {id: {_eq: 1}}) {
articles_by_id_to_author_id(where: {id: {_eq: 1}}) {
id
author_by_author_id {
author_by_author_id_to_id {
id
}
}
@ -281,15 +265,15 @@ query {
data:
hasura_article:
- id: 1
author_by_author_id:
author_by_author_id_to_id:
id: 1
articles_by_author_id:
articles_by_id_to_author_id:
- id: 1
author_by_author_id:
author_by_author_id_to_id:
id: 1
articles_by_author_id:
articles_by_id_to_author_id:
- id: 1
author_by_author_id:
author_by_author_id_to_id:
id: 1
|]
-- Equivalent python suite: test_nested_select_query_where
@ -304,7 +288,7 @@ query {
hasura_author (where: {name: {_eq: "Author 1"}}) {
id
name
articles_by_author_id (where: {is_published: {_eq: true}}) {
articles_by_id_to_author_id (where: {is_published: {_eq: true}}) {
id
title
content
@ -318,7 +302,7 @@ data:
hasura_author:
- id: 1
name: Author 1
articles_by_author_id:
articles_by_id_to_author_id:
- id: 2
title: Article 2
content: Sample article content 2
@ -331,7 +315,7 @@ data:
id: 1
title: Article 1
content: Sample article content 1
author_by_author_id:
author_by_author_id_to_id:
id: 1
name: Author 1
|]
@ -340,7 +324,7 @@ author_by_author_id:
id: 2
title: Article 2
content: Sample article content 2
author_by_author_id:
author_by_author_id_to_id:
id: 1
name: Author 1
|]
@ -349,7 +333,7 @@ author_by_author_id:
id: 3
title: Article 3
content: Sample article content 3
author_by_author_id:
author_by_author_id_to_id:
id: 2
name: Author 2
|]
@ -363,7 +347,7 @@ query {
id
title
content
author_by_author_id {
author_by_author_id_to_id {
id
name
}
@ -382,11 +366,11 @@ query {
testEnvironment
[graphql|
query {
hasura_article (where: {author_by_author_id: {name: {_eq: "Author 1"}}} ) {
hasura_article (where: {author_by_author_id_to_id: {name: {_eq: "Author 1"}}} ) {
id
title
content
author_by_author_id {
author_by_author_id_to_id {
id
name
}
@ -402,7 +386,7 @@ query {
id: 1
title: Article 1
content: Sample article content 1
author_by_author_id:
author_by_author_id_to_id:
id: 1
name: Author 1
|],
@ -410,7 +394,7 @@ author_by_author_id:
id: 2
title: Article 2
content: Sample article content 2
author_by_author_id:
author_by_author_id_to_id:
id: 1
name: Author 1
|]

View File

@ -114,7 +114,7 @@ usingWhereClause opts = do
query {
hasura_article(where: {id: {_eq: 1}}) {
id
author_by_author_id {
author_by_author_id_to_id {
id
}
}
@ -125,7 +125,7 @@ query {
data:
hasura_article:
- id: 1
author_by_author_id:
author_by_author_id_to_id:
id: 1
|]
@ -140,7 +140,7 @@ nullField opts = do
query {
hasura_article(where: {id: {_eq: 4}}) {
id
author_by_author_id {
author_by_author_id_to_id {
id
}
}
@ -150,6 +150,6 @@ query {
[yaml|
data:
hasura_article:
- author_by_author_id: null
- author_by_author_id_to_id: null
id: 4
|]

View File

@ -111,7 +111,7 @@ internal:
locationType: parameter
message: 'Syntax error: Expected end of input but got keyword SOME at [1:1]'
path: "$"
error: Bigquery HTTP request failed with status code 400 and status message "Bad Request"
error: BigQuery HTTP request failed with status 400 "Bad Request"
code: bigquery-error
|]