2022-03-16 03:39:21 +03:00
|
|
|
{-# LANGUAGE QuasiQuotes #-}
|
2022-03-10 14:18:13 +03:00
|
|
|
{-# LANGUAGE ViewPatterns #-}
|
2022-02-09 18:26:14 +03:00
|
|
|
|
2022-03-16 03:39:21 +03:00
|
|
|
{-# OPTIONS -Wno-redundant-constraints #-}
|
|
|
|
|
2022-05-03 10:37:37 +03:00
|
|
|
-- | BigQuery helpers. This module contains BigQuery specific schema
|
|
|
|
-- setup/teardown functions because BigQuery API has a different API
|
|
|
|
-- (dataset field, manual_configuration field etc)
|
2022-02-09 18:26:14 +03:00
|
|
|
module Harness.Backend.BigQuery
|
|
|
|
( run_,
|
2022-04-13 20:08:46 +03:00
|
|
|
runSql_,
|
2022-02-09 18:26:14 +03:00
|
|
|
getServiceAccount,
|
|
|
|
getProjectId,
|
2022-03-01 01:47:51 +03:00
|
|
|
createTable,
|
2022-04-13 20:08:46 +03:00
|
|
|
defaultSourceMetadata,
|
2022-03-01 01:47:51 +03:00
|
|
|
insertTable,
|
|
|
|
trackTable,
|
|
|
|
dropTable,
|
|
|
|
untrackTable,
|
|
|
|
setup,
|
2022-05-03 10:37:37 +03:00
|
|
|
setupWithAdditionalRelationship,
|
2022-03-01 01:47:51 +03:00
|
|
|
teardown,
|
2022-05-03 10:37:37 +03:00
|
|
|
teardownWithAdditionalRelationship,
|
2022-02-09 18:26:14 +03:00
|
|
|
)
|
|
|
|
where
|
|
|
|
|
2022-04-13 20:08:46 +03:00
|
|
|
import Control.Monad (void)
|
2022-05-03 10:37:37 +03:00
|
|
|
import Data.Aeson
|
|
|
|
( Value (..),
|
|
|
|
object,
|
|
|
|
(.=),
|
|
|
|
)
|
2022-06-08 18:31:28 +03:00
|
|
|
import Data.Aeson.Key qualified as K
|
2022-03-01 01:47:51 +03:00
|
|
|
import Data.Foldable (for_)
|
2022-02-09 18:26:14 +03:00
|
|
|
import Data.String
|
2022-04-12 18:39:36 +03:00
|
|
|
import Data.Text (Text, pack, replace)
|
2022-03-01 01:47:51 +03:00
|
|
|
import Data.Text qualified as T
|
|
|
|
import Data.Text.Extended (commaSeparated)
|
2022-04-12 18:39:36 +03:00
|
|
|
import Data.Time (defaultTimeLocale, formatTime)
|
|
|
|
import GHC.Stack
|
2022-02-09 18:26:14 +03:00
|
|
|
import Harness.Constants as Constants
|
|
|
|
import Harness.Env
|
2022-04-13 20:08:46 +03:00
|
|
|
import Harness.Exceptions
|
2022-03-01 01:47:51 +03:00
|
|
|
import Harness.GraphqlEngine qualified as GraphqlEngine
|
|
|
|
import Harness.Quoter.Yaml (yaml)
|
2022-05-03 10:37:37 +03:00
|
|
|
import Harness.Test.Context
|
|
|
|
( BackendType (BigQuery),
|
|
|
|
defaultBackendTypeString,
|
|
|
|
defaultSource,
|
|
|
|
)
|
|
|
|
import Harness.Test.Schema
|
|
|
|
( BackendScalarType (..),
|
|
|
|
BackendScalarValue (..),
|
|
|
|
ManualRelationship (..),
|
|
|
|
Reference (..),
|
|
|
|
ScalarValue (..),
|
|
|
|
Table (..),
|
|
|
|
)
|
2022-03-01 01:47:51 +03:00
|
|
|
import Harness.Test.Schema qualified as Schema
|
2022-04-20 20:15:42 +03:00
|
|
|
import Harness.TestEnvironment (TestEnvironment)
|
2022-02-09 18:26:14 +03:00
|
|
|
import Hasura.Backends.BigQuery.Connection (initConnection)
|
2022-03-30 16:53:14 +03:00
|
|
|
import Hasura.Backends.BigQuery.Execute qualified as Execute
|
2022-02-09 18:26:14 +03:00
|
|
|
import Hasura.Backends.BigQuery.Source (ServiceAccount)
|
2022-04-12 18:39:36 +03:00
|
|
|
import Hasura.Prelude (onLeft, tshow)
|
2022-03-01 01:47:51 +03:00
|
|
|
import Prelude
|
2022-02-09 18:26:14 +03:00
|
|
|
|
2022-02-23 22:32:18 +03:00
|
|
|
getServiceAccount :: HasCallStack => IO ServiceAccount
|
2022-03-14 10:49:36 +03:00
|
|
|
getServiceAccount = getEnvJson Constants.bigqueryServiceKeyVar
|
2022-02-09 18:26:14 +03:00
|
|
|
|
|
|
|
getProjectId :: (HasCallStack) => IO Text
|
|
|
|
getProjectId = getEnvString Constants.bigqueryProjectIdVar
|
|
|
|
|
|
|
|
-- | Run a plain Standard SQL string against the server, ignore the
|
|
|
|
-- result. Just checks for errors.
|
|
|
|
run_ :: (HasCallStack) => ServiceAccount -> Text -> String -> IO ()
|
2022-03-30 16:53:14 +03:00
|
|
|
run_ serviceAccount projectId query = do
|
|
|
|
conn <- initConnection serviceAccount projectId Nothing
|
|
|
|
res <- Execute.executeBigQuery conn Execute.BigQuery {Execute.query = fromString query, Execute.parameters = mempty}
|
|
|
|
res `onLeft` (`bigQueryError` query)
|
2022-02-09 18:26:14 +03:00
|
|
|
|
2022-04-13 20:08:46 +03:00
|
|
|
runSql_ :: HasCallStack => String -> IO ()
|
|
|
|
runSql_ query = do
|
|
|
|
serviceAccount <- getServiceAccount
|
|
|
|
projectId <- getProjectId
|
|
|
|
catch
|
|
|
|
( bracket
|
|
|
|
(initConnection serviceAccount projectId Nothing)
|
|
|
|
(const (pure ()))
|
|
|
|
(\conn -> void $ handleResult <$> (Execute.executeBigQuery conn Execute.BigQuery {Execute.query = fromString query, Execute.parameters = mempty}))
|
|
|
|
)
|
|
|
|
( \(e :: SomeException) ->
|
|
|
|
error
|
|
|
|
( unlines
|
|
|
|
[ "BigQuery error:",
|
|
|
|
show e,
|
|
|
|
"SQL was:",
|
|
|
|
query
|
|
|
|
]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
where
|
|
|
|
handleResult :: Either Execute.ExecuteProblem () -> IO ()
|
|
|
|
handleResult (Left _) = throwString "Error handling bigquery"
|
|
|
|
handleResult (Right ()) = pure ()
|
|
|
|
|
2022-03-30 16:53:14 +03:00
|
|
|
bigQueryError :: HasCallStack => Execute.ExecuteProblem -> String -> IO ()
|
2022-02-09 18:26:14 +03:00
|
|
|
bigQueryError e query =
|
|
|
|
error
|
|
|
|
( unlines
|
|
|
|
[ "BigQuery query error:",
|
2022-03-30 16:53:14 +03:00
|
|
|
T.unpack (Execute.executeProblemMessage e),
|
2022-02-09 18:26:14 +03:00
|
|
|
"SQL was:",
|
|
|
|
query
|
|
|
|
]
|
|
|
|
)
|
2022-03-01 01:47:51 +03:00
|
|
|
|
|
|
|
-- | Serialize Table into a SQL statement, as needed, and execute it on the BigQuery backend
|
|
|
|
createTable :: Schema.Table -> IO ()
|
|
|
|
createTable Schema.Table {tableName, tableColumns} = do
|
|
|
|
serviceAccount <- getServiceAccount
|
|
|
|
projectId <- getProjectId
|
|
|
|
run_
|
|
|
|
serviceAccount
|
|
|
|
projectId
|
|
|
|
$ T.unpack $
|
|
|
|
T.unwords
|
|
|
|
[ "CREATE TABLE",
|
|
|
|
T.pack Constants.bigqueryDataset <> "." <> tableName,
|
|
|
|
"(",
|
|
|
|
commaSeparated $
|
|
|
|
(mkColumn <$> tableColumns),
|
|
|
|
-- Primary keys are not supported by BigQuery
|
|
|
|
-- Foreign keys are not support by BigQuery
|
|
|
|
");"
|
|
|
|
]
|
2022-03-10 14:18:13 +03:00
|
|
|
|
|
|
|
scalarType :: HasCallStack => Schema.ScalarType -> Text
|
|
|
|
scalarType = \case
|
|
|
|
Schema.TInt -> "INT64"
|
|
|
|
Schema.TStr -> "STRING"
|
|
|
|
Schema.TUTCTime -> "DATETIME"
|
|
|
|
Schema.TBool -> "BIT"
|
2022-04-12 18:39:36 +03:00
|
|
|
Schema.TCustomType txt -> Schema.getBackendScalarType txt bstBigQuery
|
2022-03-10 14:18:13 +03:00
|
|
|
|
2022-05-03 10:37:37 +03:00
|
|
|
-- | Create column. BigQuery doesn't support default values. Also,
|
|
|
|
-- currently we don't support specifying NOT NULL constraint.
|
2022-03-10 14:18:13 +03:00
|
|
|
mkColumn :: Schema.Column -> Text
|
2022-05-03 10:37:37 +03:00
|
|
|
mkColumn Schema.Column {columnName, columnType} =
|
2022-03-10 14:18:13 +03:00
|
|
|
T.unwords
|
|
|
|
[ columnName,
|
2022-05-03 10:37:37 +03:00
|
|
|
scalarType columnType
|
2022-03-10 14:18:13 +03:00
|
|
|
]
|
2022-03-01 01:47:51 +03:00
|
|
|
|
|
|
|
-- | Serialize tableData into an SQL insert statement and execute it.
|
|
|
|
insertTable :: Schema.Table -> IO ()
|
2022-03-15 19:08:47 +03:00
|
|
|
insertTable Schema.Table {tableName, tableColumns, tableData}
|
|
|
|
| null tableData = pure ()
|
|
|
|
| otherwise = do
|
|
|
|
serviceAccount <- getServiceAccount
|
|
|
|
projectId <- getProjectId
|
|
|
|
run_
|
|
|
|
serviceAccount
|
|
|
|
projectId
|
|
|
|
$ T.unpack $
|
|
|
|
T.unwords
|
|
|
|
[ "INSERT INTO",
|
|
|
|
T.pack Constants.bigqueryDataset <> "." <> tableName,
|
|
|
|
"(",
|
|
|
|
commaSeparated (Schema.columnName <$> tableColumns),
|
|
|
|
")",
|
|
|
|
"VALUES",
|
|
|
|
commaSeparated $ mkRow <$> tableData,
|
|
|
|
";"
|
|
|
|
]
|
|
|
|
|
2022-04-12 18:39:36 +03:00
|
|
|
-- | 'ScalarValue' serializer for BigQuery
|
|
|
|
serialize :: ScalarValue -> Text
|
|
|
|
serialize = \case
|
|
|
|
VInt i -> tshow i
|
|
|
|
VStr s -> "'" <> replace "'" "\'" s <> "'"
|
|
|
|
VUTCTime t -> pack $ formatTime defaultTimeLocale "'%F %T'" t
|
|
|
|
VBool b -> tshow @Int $ if b then 1 else 0
|
|
|
|
VNull -> "NULL"
|
2022-04-19 18:39:02 +03:00
|
|
|
VCustomValue bsv -> Schema.formatBackendScalarValueType $ Schema.backendScalarValue bsv bsvBigQuery
|
2022-04-12 18:39:36 +03:00
|
|
|
|
2022-03-15 19:08:47 +03:00
|
|
|
mkRow :: [Schema.ScalarValue] -> Text
|
|
|
|
mkRow row =
|
|
|
|
T.unwords
|
|
|
|
[ "(",
|
2022-04-12 18:39:36 +03:00
|
|
|
commaSeparated $ serialize <$> row,
|
2022-03-15 19:08:47 +03:00
|
|
|
")"
|
|
|
|
]
|
|
|
|
|
|
|
|
-- | Serialize Table into an SQL DROP statement and execute it
|
|
|
|
dropTable :: Schema.Table -> IO ()
|
|
|
|
dropTable Schema.Table {tableName} = do
|
2022-03-01 01:47:51 +03:00
|
|
|
serviceAccount <- getServiceAccount
|
|
|
|
projectId <- getProjectId
|
|
|
|
run_
|
|
|
|
serviceAccount
|
|
|
|
projectId
|
|
|
|
$ T.unpack $
|
|
|
|
T.unwords
|
2022-03-15 19:08:47 +03:00
|
|
|
[ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently
|
2022-03-01 01:47:51 +03:00
|
|
|
T.pack Constants.bigqueryDataset <> "." <> tableName,
|
|
|
|
";"
|
|
|
|
]
|
|
|
|
|
2022-03-10 14:18:13 +03:00
|
|
|
-- | Post an http request to start tracking
|
|
|
|
-- Overriding here because bigquery's API is uncommon
|
2022-04-20 20:15:42 +03:00
|
|
|
trackTable :: TestEnvironment -> Schema.Table -> IO ()
|
|
|
|
trackTable testEnvironment Schema.Table {tableName} = do
|
2022-03-10 14:18:13 +03:00
|
|
|
let datasetName = T.pack Constants.bigqueryDataset
|
2022-03-15 19:08:47 +03:00
|
|
|
source = defaultSource BigQuery
|
2022-03-10 14:18:13 +03:00
|
|
|
GraphqlEngine.postMetadata_
|
2022-04-20 20:15:42 +03:00
|
|
|
testEnvironment
|
2022-03-01 01:47:51 +03:00
|
|
|
[yaml|
|
|
|
|
type: bigquery_track_table
|
|
|
|
args:
|
2022-03-15 19:08:47 +03:00
|
|
|
source: *source
|
2022-03-01 01:47:51 +03:00
|
|
|
table:
|
2022-03-10 14:18:13 +03:00
|
|
|
dataset: *datasetName
|
2022-03-01 01:47:51 +03:00
|
|
|
name: *tableName
|
|
|
|
|]
|
|
|
|
|
|
|
|
-- | Post an http request to stop tracking the table
|
2022-03-10 14:18:13 +03:00
|
|
|
-- Overriding `Schema.trackTable` here because bigquery's API expects a `dataset` key
|
2022-04-20 20:15:42 +03:00
|
|
|
untrackTable :: TestEnvironment -> Schema.Table -> IO ()
|
|
|
|
untrackTable testEnvironment Schema.Table {tableName} = do
|
2022-03-10 14:18:13 +03:00
|
|
|
let datasetName = T.pack Constants.bigqueryDataset
|
2022-03-15 19:08:47 +03:00
|
|
|
source = defaultSource BigQuery
|
2022-03-10 14:18:13 +03:00
|
|
|
GraphqlEngine.postMetadata_
|
2022-04-20 20:15:42 +03:00
|
|
|
testEnvironment
|
2022-03-01 01:47:51 +03:00
|
|
|
[yaml|
|
|
|
|
type: bigquery_untrack_table
|
|
|
|
args:
|
2022-03-15 19:08:47 +03:00
|
|
|
source: *source
|
2022-03-01 01:47:51 +03:00
|
|
|
table:
|
2022-03-10 14:18:13 +03:00
|
|
|
dataset: *datasetName
|
2022-03-01 01:47:51 +03:00
|
|
|
name: *tableName
|
|
|
|
|]
|
|
|
|
|
2022-04-13 20:08:46 +03:00
|
|
|
-- | Metadata source information for the default BigQuery instance
|
|
|
|
defaultSourceMetadata :: IO Value
|
|
|
|
defaultSourceMetadata = do
|
|
|
|
let dataset = Constants.bigqueryDataset
|
|
|
|
source = defaultSource BigQuery
|
|
|
|
backendType = defaultBackendTypeString BigQuery
|
|
|
|
serviceAccount <- getServiceAccount
|
|
|
|
projectId <- getProjectId
|
|
|
|
pure $
|
|
|
|
[yaml|
|
|
|
|
type: replace_metadata
|
|
|
|
args:
|
|
|
|
version: 3
|
|
|
|
sources:
|
|
|
|
- name: *source
|
|
|
|
kind: *backendType
|
|
|
|
tables: []
|
|
|
|
configuration:
|
|
|
|
service_account: *serviceAccount
|
|
|
|
project_id: *projectId
|
|
|
|
datasets: [*dataset]
|
|
|
|
|]
|
|
|
|
|
2022-05-03 10:37:37 +03:00
|
|
|
-- | Converts 'ManualRelationship' to 'Table'. Should be only used for
|
|
|
|
-- building the relationship.
|
|
|
|
relationshipToTable :: ManualRelationship -> Schema.Table
|
|
|
|
relationshipToTable ManualRelationship {..} =
|
|
|
|
Table
|
|
|
|
{ tableName = relSourceTable,
|
|
|
|
tablePrimaryKey = [],
|
|
|
|
tableColumns = [],
|
|
|
|
tableData = [],
|
|
|
|
tableReferences =
|
|
|
|
[ Reference
|
|
|
|
{ referenceLocalColumn = relSourceColumn,
|
|
|
|
referenceTargetTable = relTargetTable,
|
|
|
|
referenceTargetColumn = relTargetColumn
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
-- | Same as 'setup' but also additional sets up the manual
|
|
|
|
-- relationship that might be required for some cases.
|
|
|
|
setupWithAdditionalRelationship :: [Schema.Table] -> [ManualRelationship] -> (TestEnvironment, ()) -> IO ()
|
|
|
|
setupWithAdditionalRelationship tables rels (testEnvironment, _) = do
|
|
|
|
setup tables (testEnvironment, ())
|
|
|
|
let relTables = map relationshipToTable rels
|
|
|
|
for_ relTables $ \table -> do
|
|
|
|
trackObjectRelationships BigQuery table testEnvironment
|
|
|
|
trackArrayRelationships BigQuery table testEnvironment
|
|
|
|
|
2022-03-01 01:47:51 +03:00
|
|
|
-- | Setup the schema in the most expected way.
|
|
|
|
-- NOTE: Certain test modules may warrant having their own local version.
|
2022-04-20 20:15:42 +03:00
|
|
|
setup :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
|
|
|
|
setup tables (testEnvironment, _) = do
|
2022-03-01 01:47:51 +03:00
|
|
|
let dataset = Constants.bigqueryDataset
|
2022-03-15 19:08:47 +03:00
|
|
|
source = defaultSource BigQuery
|
|
|
|
backendType = defaultBackendTypeString BigQuery
|
2022-03-01 01:47:51 +03:00
|
|
|
-- Clear and reconfigure the metadata
|
|
|
|
serviceAccount <- getServiceAccount
|
|
|
|
projectId <- getProjectId
|
|
|
|
GraphqlEngine.postMetadata_
|
2022-04-20 20:15:42 +03:00
|
|
|
testEnvironment
|
2022-03-01 01:47:51 +03:00
|
|
|
[yaml|
|
|
|
|
type: replace_metadata
|
|
|
|
args:
|
|
|
|
version: 3
|
|
|
|
sources:
|
2022-03-15 19:08:47 +03:00
|
|
|
- name: *source
|
|
|
|
kind: *backendType
|
2022-03-01 01:47:51 +03:00
|
|
|
tables: []
|
|
|
|
configuration:
|
|
|
|
service_account: *serviceAccount
|
|
|
|
project_id: *projectId
|
|
|
|
datasets: [*dataset]
|
|
|
|
|]
|
|
|
|
-- Setup and track tables
|
|
|
|
for_ tables $ \table -> do
|
|
|
|
createTable table
|
|
|
|
insertTable table
|
2022-04-20 20:15:42 +03:00
|
|
|
trackTable testEnvironment table
|
2022-03-10 14:18:13 +03:00
|
|
|
-- Setup relationships
|
|
|
|
for_ tables $ \table -> do
|
2022-05-03 10:37:37 +03:00
|
|
|
trackObjectRelationships BigQuery table testEnvironment
|
|
|
|
trackArrayRelationships BigQuery table testEnvironment
|
2022-03-01 01:47:51 +03:00
|
|
|
|
|
|
|
-- | Teardown the schema and tracking in the most expected way.
|
|
|
|
-- NOTE: Certain test modules may warrant having their own version.
|
2022-04-20 20:15:42 +03:00
|
|
|
teardown :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
|
|
|
|
teardown (reverse -> tables) (testEnvironment, _) = do
|
2022-03-10 14:18:13 +03:00
|
|
|
-- Teardown relationships first
|
2022-04-04 17:45:12 +03:00
|
|
|
forFinally_ tables $ \table ->
|
2022-05-03 10:37:37 +03:00
|
|
|
untrackRelationships BigQuery table testEnvironment
|
2022-03-10 14:18:13 +03:00
|
|
|
-- Then teardown tables
|
2022-04-04 17:45:12 +03:00
|
|
|
forFinally_ tables $ \table -> do
|
2022-04-20 20:15:42 +03:00
|
|
|
untrackTable testEnvironment table
|
2022-03-01 01:47:51 +03:00
|
|
|
dropTable table
|
2022-05-03 10:37:37 +03:00
|
|
|
|
|
|
|
-- | Same as 'teardown' but also tears the manual relationship that
|
|
|
|
-- was setup.
|
|
|
|
teardownWithAdditionalRelationship :: [Schema.Table] -> [ManualRelationship] -> (TestEnvironment, ()) -> IO ()
|
|
|
|
teardownWithAdditionalRelationship tables rels (testEnvironment, _) = do
|
|
|
|
let relTables = map relationshipToTable rels
|
|
|
|
for_ relTables $ \table -> do
|
|
|
|
untrackRelationships BigQuery table testEnvironment
|
|
|
|
-- We do teardown in the reverse order to ensure that the tables
|
|
|
|
-- that have dependency are removed first. This has to be only done
|
|
|
|
-- for BigQuery backend since the metadata tracks the relationship
|
|
|
|
-- between them.
|
|
|
|
teardown (reverse tables) (testEnvironment, ())
|
|
|
|
|
|
|
|
-- | Bigquery specific function for tracking array relationships
|
|
|
|
trackArrayRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
|
|
|
|
trackArrayRelationships backend Table {tableName, tableReferences} testEnvironment = do
|
|
|
|
let source = defaultSource backend
|
|
|
|
dataset = Constants.bigqueryDataset
|
|
|
|
requestType = source <> "_create_array_relationship"
|
|
|
|
for_ tableReferences $ \Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
|
|
|
|
let relationshipName = Schema.mkArrayRelationshipName referenceTargetTable referenceTargetColumn
|
|
|
|
manualConfiguration :: Value
|
|
|
|
manualConfiguration =
|
|
|
|
object
|
|
|
|
[ "remote_table"
|
|
|
|
.= object
|
|
|
|
[ "dataset" .= String (T.pack dataset),
|
|
|
|
"name" .= String referenceTargetTable
|
|
|
|
],
|
|
|
|
"column_mapping"
|
2022-06-08 18:31:28 +03:00
|
|
|
.= object [K.fromText referenceLocalColumn .= referenceTargetColumn]
|
2022-05-03 10:37:37 +03:00
|
|
|
]
|
|
|
|
payload =
|
|
|
|
[yaml|
|
|
|
|
type: *requestType
|
|
|
|
args:
|
|
|
|
source: *source
|
|
|
|
table:
|
|
|
|
dataset: *dataset
|
|
|
|
name: *tableName
|
|
|
|
name: *relationshipName
|
|
|
|
using:
|
|
|
|
manual_configuration: *manualConfiguration
|
|
|
|
|]
|
|
|
|
GraphqlEngine.postMetadata_
|
|
|
|
testEnvironment
|
|
|
|
payload
|
|
|
|
|
|
|
|
-- | Bigquery specific function for tracking object relationships
|
|
|
|
trackObjectRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
|
|
|
|
trackObjectRelationships backend Table {tableName, tableReferences} testEnvironment = do
|
|
|
|
let source = defaultSource backend
|
|
|
|
dataset = Constants.bigqueryDataset
|
|
|
|
requestType = source <> "_create_object_relationship"
|
|
|
|
for_ tableReferences $ \ref@Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} -> do
|
|
|
|
let relationshipName = Schema.mkObjectRelationshipName ref
|
|
|
|
manualConfiguration :: Value
|
|
|
|
manualConfiguration =
|
|
|
|
object
|
|
|
|
[ "remote_table"
|
|
|
|
.= object
|
|
|
|
[ "dataset" .= String (T.pack dataset),
|
|
|
|
"name" .= String referenceTargetTable
|
|
|
|
],
|
|
|
|
"column_mapping"
|
2022-06-08 18:31:28 +03:00
|
|
|
.= object [K.fromText referenceLocalColumn .= referenceTargetColumn]
|
2022-05-03 10:37:37 +03:00
|
|
|
]
|
|
|
|
payload =
|
|
|
|
[yaml|
|
|
|
|
type: *requestType
|
|
|
|
args:
|
|
|
|
source: *source
|
|
|
|
table:
|
|
|
|
dataset: *dataset
|
|
|
|
name: *tableName
|
|
|
|
name: *relationshipName
|
|
|
|
using:
|
|
|
|
manual_configuration: *manualConfiguration
|
|
|
|
|]
|
|
|
|
|
|
|
|
GraphqlEngine.postMetadata_
|
|
|
|
testEnvironment
|
|
|
|
payload
|
|
|
|
|
|
|
|
-- | Bigquery specific function for untracking relationships
|
|
|
|
-- Overriding `Schema.untrackRelationships` here because bigquery's API expects a `dataset` key
|
|
|
|
untrackRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
|
|
|
|
untrackRelationships backend Table {tableName, tableReferences} testEnvironment = do
|
|
|
|
let source = defaultSource backend
|
|
|
|
dataset = Constants.bigqueryDataset
|
|
|
|
requestType = source <> "_drop_relationship"
|
|
|
|
for_ tableReferences $ \ref@Reference {referenceTargetTable, referenceTargetColumn} -> do
|
|
|
|
let arrayRelationshipName = Schema.mkArrayRelationshipName referenceTargetTable referenceTargetColumn
|
|
|
|
objectRelationshipName = Schema.mkObjectRelationshipName ref
|
|
|
|
-- drop array relationships
|
|
|
|
GraphqlEngine.postMetadata_
|
|
|
|
testEnvironment
|
|
|
|
[yaml|
|
|
|
|
type: *requestType
|
|
|
|
args:
|
|
|
|
source: *source
|
|
|
|
table:
|
|
|
|
dataset: *dataset
|
|
|
|
name: *tableName
|
|
|
|
relationship: *arrayRelationshipName
|
|
|
|
|]
|
|
|
|
-- drop object relationships
|
|
|
|
GraphqlEngine.postMetadata_
|
|
|
|
testEnvironment
|
|
|
|
[yaml|
|
|
|
|
type: *requestType
|
|
|
|
args:
|
|
|
|
source: *source
|
|
|
|
table:
|
|
|
|
dataset: *dataset
|
|
|
|
name: *tableName
|
|
|
|
relationship: *objectRelationshipName
|
|
|
|
|]
|