mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-11-13 09:17:21 +03:00
tests: create BigQuery datasets for each test
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/5288 GitOrigin-RevId: 7caedacd280fc6f8ea71af16397bb2c9641effc8
This commit is contained in:
parent
6c14a018e4
commit
744c03a84e
@ -1159,6 +1159,7 @@ test-suite tests-hspec
|
||||
, unliftio-core
|
||||
, unordered-containers
|
||||
, utf8-string
|
||||
, uuid
|
||||
, vector
|
||||
, warp
|
||||
, websockets
|
||||
@ -1212,6 +1213,7 @@ test-suite tests-hspec
|
||||
Harness.Test.Introspection
|
||||
Harness.Test.Schema
|
||||
Harness.Test.Permissions
|
||||
Harness.Test.SchemaName
|
||||
|
||||
-- Harness.Quoter
|
||||
Harness.Quoter.Graphql
|
||||
|
@ -12,7 +12,7 @@ module Harness.Backend.BigQuery
|
||||
getServiceAccount,
|
||||
getProjectId,
|
||||
createTable,
|
||||
defaultSourceMetadata,
|
||||
createDataset,
|
||||
trackTable,
|
||||
dropTable,
|
||||
untrackTable,
|
||||
@ -24,7 +24,6 @@ module Harness.Backend.BigQuery
|
||||
where
|
||||
|
||||
import Control.Concurrent.Extended
|
||||
import Data.Aeson (Value (..))
|
||||
import Data.List qualified as List
|
||||
import Data.String
|
||||
import Data.Text qualified as T
|
||||
@ -36,8 +35,8 @@ import Harness.Env
|
||||
import Harness.Exceptions
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.BackendType (BackendType (BigQuery), defaultBackendTypeString, defaultSource)
|
||||
import Harness.Test.Fixture (SetupAction (..))
|
||||
import Harness.Test.BackendType (BackendType (BigQuery))
|
||||
import Harness.Test.Fixture (SetupAction (..), defaultBackendTypeString, defaultSource)
|
||||
import Harness.Test.Permissions qualified as Permissions
|
||||
import Harness.Test.Schema
|
||||
( BackendScalarType (..),
|
||||
@ -46,7 +45,8 @@ import Harness.Test.Schema
|
||||
Table (..),
|
||||
)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment (..))
|
||||
import Hasura.Backends.BigQuery.Connection (initConnection)
|
||||
import Hasura.Backends.BigQuery.Execute qualified as Execute
|
||||
import Hasura.Backends.BigQuery.Source (ServiceAccount)
|
||||
@ -102,18 +102,41 @@ bigQueryError e query =
|
||||
]
|
||||
)
|
||||
|
||||
-- | Serialize Table into a SQL statement, as needed, and execute it on the BigQuery backend
|
||||
createTable :: Schema.Table -> IO ()
|
||||
createTable table@Schema.Table {tableName, tableColumns} = do
|
||||
-- | create a new BigQuery dataset
|
||||
createDataset :: SchemaName -> IO ()
|
||||
createDataset schemaName = do
|
||||
serviceAccount <- getServiceAccount
|
||||
projectId <- getProjectId
|
||||
conn <- initConnection serviceAccount projectId Nothing
|
||||
res <- runExceptT $ Execute.insertDataset conn (unSchemaName schemaName)
|
||||
case res of
|
||||
Right _ -> pure ()
|
||||
Left e -> bigQueryError e mempty
|
||||
|
||||
-- | remove a new BigQuery dataset, used at the end of tests to clean up
|
||||
removeDataset :: SchemaName -> IO ()
|
||||
removeDataset schemaName = do
|
||||
serviceAccount <- getServiceAccount
|
||||
projectId <- getProjectId
|
||||
conn <- initConnection serviceAccount projectId Nothing
|
||||
res <- runExceptT $ Execute.deleteDataset conn (unSchemaName schemaName)
|
||||
case res of
|
||||
Right _ -> pure ()
|
||||
Left e -> bigQueryError e mempty
|
||||
|
||||
-- | Serialize Table into a SQL statement, as needed, and execute it on the BigQuery backend
|
||||
createTable :: SchemaName -> Schema.Table -> IO ()
|
||||
createTable schemaName table@Schema.Table {tableName, tableColumns} = do
|
||||
serviceAccount <- getServiceAccount
|
||||
projectId <- getProjectId
|
||||
|
||||
run_
|
||||
serviceAccount
|
||||
projectId
|
||||
$ T.unpack $
|
||||
T.unwords
|
||||
( [ "CREATE TABLE",
|
||||
T.pack Constants.bigqueryDataset <> "." <> tableName,
|
||||
unSchemaName schemaName <> "." <> tableName,
|
||||
"(",
|
||||
commaSeparated (mkColumn <$> tableColumns),
|
||||
-- Primary keys are not supported by BigQuery
|
||||
@ -169,84 +192,60 @@ serialize = \case
|
||||
VCustomValue bsv -> Schema.formatBackendScalarValueType $ Schema.backendScalarValue bsv bsvBigQuery
|
||||
|
||||
-- | Serialize Table into an SQL DROP statement and execute it
|
||||
dropTable :: Schema.Table -> IO ()
|
||||
dropTable Schema.Table {tableName} = do
|
||||
dropTable :: SchemaName -> Schema.Table -> IO ()
|
||||
dropTable schemaName Schema.Table {tableName} = do
|
||||
serviceAccount <- getServiceAccount
|
||||
projectId <- getProjectId
|
||||
|
||||
run_
|
||||
serviceAccount
|
||||
projectId
|
||||
$ T.unpack $
|
||||
T.unwords
|
||||
[ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently
|
||||
T.pack Constants.bigqueryDataset <> "." <> tableName,
|
||||
unSchemaName schemaName <> "." <> tableName,
|
||||
";"
|
||||
]
|
||||
|
||||
-- | Post an http request to start tracking
|
||||
-- Overriding here because bigquery's API is uncommon
|
||||
trackTable :: TestEnvironment -> Schema.Table -> IO ()
|
||||
trackTable testEnvironment Schema.Table {tableName} = do
|
||||
let datasetName = T.pack Constants.bigqueryDataset
|
||||
source = defaultSource BigQuery
|
||||
trackTable :: TestEnvironment -> SchemaName -> Schema.Table -> IO ()
|
||||
trackTable testEnvironment schemaName Schema.Table {tableName} = do
|
||||
let source = defaultSource BigQuery
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
[yaml|
|
||||
type: bigquery_track_table
|
||||
args:
|
||||
source: *source
|
||||
table:
|
||||
dataset: *datasetName
|
||||
name: *tableName
|
||||
|]
|
||||
type: bigquery_track_table
|
||||
args:
|
||||
source: *source
|
||||
table:
|
||||
dataset: *schemaName
|
||||
name: *tableName
|
||||
|]
|
||||
|
||||
-- | Post an http request to stop tracking the table
|
||||
-- Overriding `Schema.trackTable` here because bigquery's API expects a `dataset` key
|
||||
untrackTable :: TestEnvironment -> Schema.Table -> IO ()
|
||||
untrackTable testEnvironment Schema.Table {tableName} = do
|
||||
let datasetName = T.pack Constants.bigqueryDataset
|
||||
source = defaultSource BigQuery
|
||||
untrackTable :: TestEnvironment -> SchemaName -> Schema.Table -> IO ()
|
||||
untrackTable testEnvironment schemaName Schema.Table {tableName} = do
|
||||
let source = defaultSource BigQuery
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
[yaml|
|
||||
type: bigquery_untrack_table
|
||||
args:
|
||||
source: *source
|
||||
table:
|
||||
dataset: *datasetName
|
||||
name: *tableName
|
||||
|]
|
||||
|
||||
-- | Metadata source information for the default BigQuery instance
|
||||
defaultSourceMetadata :: IO Value
|
||||
defaultSourceMetadata = do
|
||||
let dataset = Constants.bigqueryDataset
|
||||
source = defaultSource BigQuery
|
||||
backendType = defaultBackendTypeString BigQuery
|
||||
serviceAccount <- getServiceAccount
|
||||
projectId <- getProjectId
|
||||
pure $
|
||||
[yaml|
|
||||
type: replace_metadata
|
||||
args:
|
||||
version: 3
|
||||
sources:
|
||||
- name: *source
|
||||
kind: *backendType
|
||||
tables: []
|
||||
configuration:
|
||||
service_account: *serviceAccount
|
||||
project_id: *projectId
|
||||
datasets: [*dataset]
|
||||
|]
|
||||
type: bigquery_untrack_table
|
||||
args:
|
||||
source: *source
|
||||
table:
|
||||
dataset: *schemaName
|
||||
name: *tableName
|
||||
|]
|
||||
|
||||
-- | Setup the schema in the most expected way.
|
||||
-- NOTE: Certain test modules may warrant having their own local version.
|
||||
setup :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
|
||||
setup tables' (testEnvironment, _) = do
|
||||
let dataset = Constants.bigqueryDataset
|
||||
source = defaultSource BigQuery
|
||||
let source = defaultSource BigQuery
|
||||
backendType = defaultBackendTypeString BigQuery
|
||||
schemaName = getSchemaName testEnvironment
|
||||
tables =
|
||||
map
|
||||
( \t ->
|
||||
@ -256,28 +255,30 @@ setup tables' (testEnvironment, _) = do
|
||||
}
|
||||
)
|
||||
tables'
|
||||
-- Clear and reconfigure the metadata
|
||||
serviceAccount <- getServiceAccount
|
||||
projectId <- getProjectId
|
||||
-- create the dataset
|
||||
createDataset schemaName
|
||||
-- Clear and reconfigure the metadata
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
[yaml|
|
||||
type: replace_metadata
|
||||
args:
|
||||
version: 3
|
||||
sources:
|
||||
- name: *source
|
||||
kind: *backendType
|
||||
tables: []
|
||||
configuration:
|
||||
service_account: *serviceAccount
|
||||
project_id: *projectId
|
||||
datasets: [*dataset]
|
||||
|]
|
||||
type: replace_metadata
|
||||
args:
|
||||
version: 3
|
||||
sources:
|
||||
- name: *source
|
||||
kind: *backendType
|
||||
tables: []
|
||||
configuration:
|
||||
service_account: *serviceAccount
|
||||
project_id: *projectId
|
||||
datasets: [*schemaName]
|
||||
|]
|
||||
-- Setup and track tables
|
||||
for_ tables $ \table -> do
|
||||
retryIfJobRateLimitExceeded $ createTable table
|
||||
trackTable testEnvironment table
|
||||
retryIfJobRateLimitExceeded $ createTable schemaName table
|
||||
trackTable testEnvironment schemaName table
|
||||
-- Setup relationships
|
||||
for_ tables $ \table -> do
|
||||
Schema.trackObjectRelationships BigQuery table testEnvironment
|
||||
@ -287,16 +288,21 @@ args:
|
||||
-- NOTE: Certain test modules may warrant having their own version.
|
||||
teardown :: [Schema.Table] -> (TestEnvironment, ()) -> IO ()
|
||||
teardown (reverse -> tables) (testEnvironment, _) = do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
finally
|
||||
-- Teardown relationships first
|
||||
( forFinally_ tables $ \table ->
|
||||
Schema.untrackRelationships BigQuery table testEnvironment
|
||||
)
|
||||
-- Then teardown tables
|
||||
( forFinally_ tables $ \table -> do
|
||||
finally
|
||||
(untrackTable testEnvironment table)
|
||||
(dropTable table)
|
||||
( finally
|
||||
( forFinally_ tables $ \table -> do
|
||||
finally
|
||||
(untrackTable testEnvironment schemaName table)
|
||||
(dropTable schemaName table)
|
||||
)
|
||||
-- remove test dataset
|
||||
(removeDataset schemaName)
|
||||
)
|
||||
|
||||
setupTablesAction :: [Schema.Table] -> TestEnvironment -> SetupAction
|
||||
|
@ -36,11 +36,12 @@ import Harness.Constants as Constants
|
||||
import Harness.Exceptions
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (BackendType (Postgres), defaultBackendTypeString, defaultSchema, defaultSource)
|
||||
import Harness.Test.BackendType (BackendType (Postgres), defaultBackendTypeString, defaultSource)
|
||||
import Harness.Test.Fixture (SetupAction (..))
|
||||
import Harness.Test.Permissions qualified as Permissions
|
||||
import Harness.Test.Schema (BackendScalarType (..), BackendScalarValue (..), ScalarValue (..))
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Hasura.Prelude
|
||||
import System.Process.Typed
|
||||
@ -108,8 +109,9 @@ connection_info:
|
||||
|]
|
||||
|
||||
-- | Serialize Table into a PL-SQL statement, as needed, and execute it on the Postgres backend
|
||||
createTable :: Schema.Table -> IO ()
|
||||
createTable Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableReferences, tableUniqueConstraints} = do
|
||||
createTable :: TestEnvironment -> Schema.Table -> IO ()
|
||||
createTable testEnv Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableReferences, tableUniqueConstraints} = do
|
||||
let schemaName = getSchemaName testEnv
|
||||
run_ $
|
||||
T.unpack $
|
||||
T.unwords
|
||||
@ -119,7 +121,7 @@ createTable Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableRe
|
||||
commaSeparated $
|
||||
(mkColumn <$> tableColumns)
|
||||
<> (bool [mkPrimaryKey pk] [] (null pk))
|
||||
<> (mkReference <$> tableReferences),
|
||||
<> (mkReference schemaName <$> tableReferences),
|
||||
");"
|
||||
]
|
||||
|
||||
@ -157,15 +159,15 @@ mkPrimaryKey key =
|
||||
")"
|
||||
]
|
||||
|
||||
mkReference :: Schema.Reference -> Text
|
||||
mkReference Schema.Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} =
|
||||
mkReference :: SchemaName -> Schema.Reference -> Text
|
||||
mkReference schemaName Schema.Reference {referenceLocalColumn, referenceTargetTable, referenceTargetColumn} =
|
||||
T.unwords
|
||||
[ "FOREIGN KEY",
|
||||
"(",
|
||||
wrapIdentifier referenceLocalColumn,
|
||||
")",
|
||||
"REFERENCES",
|
||||
T.pack (defaultSchema Postgres) <> "." <> wrapIdentifier referenceTargetTable,
|
||||
unSchemaName schemaName <> "." <> wrapIdentifier referenceTargetTable,
|
||||
"(",
|
||||
wrapIdentifier referenceTargetColumn,
|
||||
")",
|
||||
@ -245,7 +247,7 @@ setup tables (testEnvironment, _) = do
|
||||
GraphqlEngine.setSource testEnvironment defaultSourceMetadata Nothing
|
||||
-- Setup and track tables
|
||||
for_ tables $ \table -> do
|
||||
createTable table
|
||||
createTable testEnvironment table
|
||||
insertTable table
|
||||
trackTable testEnvironment table
|
||||
-- Setup relationships
|
||||
|
@ -6,6 +6,7 @@
|
||||
module Harness.Quoter.Yaml
|
||||
( yaml,
|
||||
interpolateYaml,
|
||||
ToYamlString (..),
|
||||
)
|
||||
where
|
||||
|
||||
|
@ -5,6 +5,7 @@
|
||||
-- | Templating yaml files.
|
||||
module Harness.Quoter.Yaml.InterpolateYaml
|
||||
( interpolateYaml,
|
||||
ToYamlString (..),
|
||||
)
|
||||
where
|
||||
|
||||
|
@ -3,13 +3,11 @@ module Harness.Test.BackendType
|
||||
( BackendType (..),
|
||||
defaultSource,
|
||||
defaultBackendTypeString,
|
||||
defaultSchema,
|
||||
schemaKeyword,
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Aeson.Key (Key)
|
||||
import Harness.Constants qualified as Constants (bigqueryDataset, citusDb, dataConnectorDb, mysqlDb, postgresDb, sqlserverDb)
|
||||
import Hasura.Prelude
|
||||
|
||||
-- | A supported backend type.
|
||||
@ -42,16 +40,6 @@ defaultBackendTypeString = \case
|
||||
Citus -> "citus"
|
||||
DataConnector -> "data-connector"
|
||||
|
||||
-- | The default hasura metadata schema name used for a given backend in this test suite project.
|
||||
defaultSchema :: BackendType -> String
|
||||
defaultSchema = \case
|
||||
Postgres -> Constants.postgresDb
|
||||
MySQL -> Constants.mysqlDb
|
||||
SQLServer -> Constants.sqlserverDb
|
||||
BigQuery -> Constants.bigqueryDataset
|
||||
Citus -> Constants.citusDb
|
||||
DataConnector -> Constants.dataConnectorDb
|
||||
|
||||
schemaKeyword :: BackendType -> Key
|
||||
schemaKeyword = \case
|
||||
Postgres -> "schema"
|
||||
|
@ -13,7 +13,6 @@ module Harness.Test.Context
|
||||
BackendType (..),
|
||||
defaultSource,
|
||||
defaultBackendTypeString,
|
||||
defaultSchema,
|
||||
schemaKeyword,
|
||||
noLocalTestEnvironment,
|
||||
Options (..),
|
||||
@ -23,11 +22,12 @@ module Harness.Test.Context
|
||||
)
|
||||
where
|
||||
|
||||
import Data.UUID.V4 (nextRandom)
|
||||
import Harness.Exceptions
|
||||
import Harness.Test.BackendType
|
||||
import Harness.Test.CustomOptions
|
||||
import Harness.Test.Hspec.Extended
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.TestEnvironment (TestEnvironment (..))
|
||||
import Hasura.Prelude
|
||||
import Test.Hspec (ActionWith, SpecWith, aroundAllWith, describe)
|
||||
import Test.Hspec.Core.Spec (mapSpecItem)
|
||||
@ -100,10 +100,22 @@ contextBracket ::
|
||||
((TestEnvironment, a) -> IO ()) ->
|
||||
TestEnvironment ->
|
||||
IO ()
|
||||
contextBracket Context {mkLocalTestEnvironment, setup, teardown} actionWith globalTestEnvironment =
|
||||
contextBracket Context {name, mkLocalTestEnvironment, setup, teardown} actionWith globalTestEnvironment =
|
||||
mask \restore -> do
|
||||
localTestEnvironment <- mkLocalTestEnvironment globalTestEnvironment
|
||||
let testEnvironment = (globalTestEnvironment, localTestEnvironment)
|
||||
|
||||
-- create a unique id to differentiate this set of tests
|
||||
uniqueTestId <- nextRandom
|
||||
|
||||
let globalTestEnvWithUnique =
|
||||
globalTestEnvironment
|
||||
{ backendType = case name of
|
||||
Backend db -> Just db
|
||||
_ -> Nothing,
|
||||
uniqueTestId = uniqueTestId
|
||||
}
|
||||
|
||||
let testEnvironment = (globalTestEnvWithUnique, localTestEnvironment)
|
||||
|
||||
_ <-
|
||||
catchRethrow
|
||||
@ -115,6 +127,7 @@ contextBracket Context {mkLocalTestEnvironment, setup, teardown} actionWith glob
|
||||
catchRethrow
|
||||
(restore $ actionWith testEnvironment)
|
||||
(teardown testEnvironment)
|
||||
|
||||
-- If no exception occurred, run the normal teardown function.
|
||||
teardown testEnvironment
|
||||
|
||||
|
@ -11,7 +11,6 @@ module Harness.Test.Fixture
|
||||
BackendType (..),
|
||||
defaultSource,
|
||||
defaultBackendTypeString,
|
||||
defaultSchema,
|
||||
noLocalTestEnvironment,
|
||||
SetupAction (..),
|
||||
Options (..),
|
||||
@ -21,11 +20,12 @@ module Harness.Test.Fixture
|
||||
)
|
||||
where
|
||||
|
||||
import Data.UUID.V4 (nextRandom)
|
||||
import Harness.Exceptions
|
||||
import Harness.Test.BackendType
|
||||
import Harness.Test.CustomOptions
|
||||
import Harness.Test.Hspec.Extended
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.TestEnvironment (TestEnvironment (..))
|
||||
import Hasura.Prelude
|
||||
import Test.Hspec (ActionWith, SpecWith, aroundAllWith, describe)
|
||||
import Test.Hspec.Core.Spec (mapSpecItem)
|
||||
@ -91,10 +91,22 @@ runWithLocalTestEnvironment fixtures tests =
|
||||
-- and at teardown, which is why we use a custom re-implementation of
|
||||
-- @bracket@.
|
||||
fixtureBracket :: Fixture b -> ((TestEnvironment, b) -> IO a) -> TestEnvironment -> IO ()
|
||||
fixtureBracket Fixture {mkLocalTestEnvironment, setupTeardown} actionWith globalTestEnvironment =
|
||||
fixtureBracket Fixture {name, mkLocalTestEnvironment, setupTeardown} actionWith globalTestEnvironment =
|
||||
mask \restore -> do
|
||||
localTestEnvironment <- mkLocalTestEnvironment globalTestEnvironment
|
||||
let testEnvironment = (globalTestEnvironment, localTestEnvironment)
|
||||
|
||||
-- create a unique id to differentiate this set of tests
|
||||
uniqueTestId <- nextRandom
|
||||
|
||||
let globalTestEnvWithUnique =
|
||||
globalTestEnvironment
|
||||
{ backendType = case name of
|
||||
Backend db -> Just db
|
||||
_ -> Nothing,
|
||||
uniqueTestId = uniqueTestId
|
||||
}
|
||||
|
||||
let testEnvironment = (globalTestEnvWithUnique, localTestEnvironment)
|
||||
|
||||
cleanup <- runSetupActions (setupTeardown testEnvironment)
|
||||
|
||||
|
@ -40,13 +40,13 @@ import Data.Aeson
|
||||
(.=),
|
||||
)
|
||||
import Data.Aeson.Key qualified as K
|
||||
import Data.Text qualified as T
|
||||
import Data.Time (UTCTime, defaultTimeLocale)
|
||||
import Data.Time.Format (parseTimeOrError)
|
||||
import Harness.Exceptions
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (BackendType, defaultBackendTypeString, defaultSchema, defaultSource, schemaKeyword)
|
||||
import Harness.Test.BackendType
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Hasura.Prelude
|
||||
|
||||
@ -259,7 +259,7 @@ parseUTCTimeOrError = VUTCTime . parseTimeOrError True defaultTimeLocale "%F %T"
|
||||
trackTable :: HasCallStack => BackendType -> String -> Table -> TestEnvironment -> IO ()
|
||||
trackTable backend source Table {tableName} testEnvironment = do
|
||||
let backendType = defaultBackendTypeString backend
|
||||
schema = defaultSchema backend
|
||||
schema = getSchemaName testEnvironment
|
||||
requestType = backendType <> "_track_table"
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -276,7 +276,7 @@ args:
|
||||
untrackTable :: HasCallStack => BackendType -> String -> Table -> TestEnvironment -> IO ()
|
||||
untrackTable backend source Table {tableName} testEnvironment = do
|
||||
let backendType = defaultBackendTypeString backend
|
||||
schema = defaultSchema backend
|
||||
schema = getSchemaName testEnvironment
|
||||
let requestType = backendType <> "_untrack_table"
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -297,11 +297,11 @@ mkObjectRelationshipName Reference {referenceLocalColumn, referenceTargetTable,
|
||||
-- | Unified track object relationships
|
||||
trackObjectRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
|
||||
trackObjectRelationships backend Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do
|
||||
let schema = getSchemaName testEnvironment
|
||||
let source = defaultSource backend
|
||||
schema = defaultSchema backend
|
||||
tableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String tableName
|
||||
]
|
||||
requestType = source <> "_create_object_relationship"
|
||||
@ -322,7 +322,7 @@ args:
|
||||
let relationshipName = mkObjectRelationshipName ref
|
||||
targetTableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String referenceTargetTable
|
||||
]
|
||||
manualConfiguration :: Value
|
||||
@ -353,11 +353,11 @@ mkArrayRelationshipName tableName referenceLocalColumn referenceTargetColumn =
|
||||
-- | Unified track array relationships
|
||||
trackArrayRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
|
||||
trackArrayRelationships backend Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do
|
||||
let schema = getSchemaName testEnvironment
|
||||
let source = defaultSource backend
|
||||
schema = defaultSchema backend
|
||||
tableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String tableName
|
||||
]
|
||||
requestType = source <> "_create_array_relationship"
|
||||
@ -365,7 +365,7 @@ trackArrayRelationships backend Table {tableName, tableReferences, tableManualRe
|
||||
let relationshipName = mkArrayRelationshipName tableName referenceTargetColumn referenceLocalColumn
|
||||
targetTableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String referenceTargetTable
|
||||
]
|
||||
GraphqlEngine.postMetadata_
|
||||
@ -385,7 +385,7 @@ args:
|
||||
let relationshipName = mkArrayRelationshipName tableName referenceTargetColumn referenceLocalColumn
|
||||
targetTableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String referenceTargetTable
|
||||
]
|
||||
manualConfiguration :: Value
|
||||
@ -412,11 +412,11 @@ args:
|
||||
-- | Unified untrack relationships
|
||||
untrackRelationships :: HasCallStack => BackendType -> Table -> TestEnvironment -> IO ()
|
||||
untrackRelationships backend Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do
|
||||
let schema = getSchemaName testEnvironment
|
||||
let source = defaultSource backend
|
||||
schema = defaultSchema backend
|
||||
tableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String tableName
|
||||
]
|
||||
requestType = source <> "_drop_relationship"
|
||||
@ -425,7 +425,7 @@ untrackRelationships backend Table {tableName, tableReferences, tableManualRelat
|
||||
objectRelationshipName = mkObjectRelationshipName ref
|
||||
targetTableObj =
|
||||
object
|
||||
[ schemaKeyword backend .= String (T.pack schema),
|
||||
[ schemaKeyword backend .= String (unSchemaName schema),
|
||||
"name" .= String referenceTargetTable
|
||||
]
|
||||
finally
|
||||
|
70
server/tests-hspec/Harness/Test/SchemaName.hs
Normal file
70
server/tests-hspec/Harness/Test/SchemaName.hs
Normal file
@ -0,0 +1,70 @@
|
||||
-- | This module defines `SchemaName`, for naming DB schemas/datasets used in
|
||||
-- tests
|
||||
module Harness.Test.SchemaName (SchemaName (..), getSchemaName) where
|
||||
|
||||
import Data.Aeson (ToJSON (..))
|
||||
import Data.Char qualified
|
||||
import Data.String
|
||||
import Data.Text (Text)
|
||||
import Data.Text qualified as T
|
||||
import Data.UUID
|
||||
import Harness.Constants qualified as Constants
|
||||
import Harness.Quoter.Graphql
|
||||
import Harness.Quoter.Yaml
|
||||
import Harness.Test.BackendType
|
||||
import Harness.TestEnvironment
|
||||
import Prelude
|
||||
|
||||
newtype SchemaName = SchemaName {unSchemaName :: Text}
|
||||
deriving newtype (Semigroup)
|
||||
|
||||
instance ToJSON SchemaName where
|
||||
toJSON (SchemaName sn) = toJSON sn
|
||||
|
||||
instance IsString SchemaName where
|
||||
fromString s = SchemaName (T.pack s)
|
||||
|
||||
instance ToGraphqlString SchemaName where
|
||||
showGql (SchemaName sn) = T.unpack sn
|
||||
|
||||
instance ToYamlString SchemaName where
|
||||
showYml (SchemaName sn) = T.unpack sn
|
||||
|
||||
-- | Given a `TestEnvironment`, returns a `SchemaName` to use in the test, used
|
||||
-- to separate out different test suites
|
||||
--
|
||||
-- This is used both in setup and teardown, and in individual tests
|
||||
--
|
||||
-- The `TestEnvironment` contains a `uniqueTestId` and `backendType`, from
|
||||
-- which we decide what the `SchemaName` should be.
|
||||
--
|
||||
-- The backendType is only required so we make changes for BigQuery for now,
|
||||
-- once we do this for all backends we'll just need the unique id.
|
||||
--
|
||||
-- For all other backends, we fall back to the Constants that were used before
|
||||
getSchemaName :: TestEnvironment -> SchemaName
|
||||
getSchemaName testEnv = case backendType testEnv of
|
||||
Nothing -> SchemaName "hasura" -- the `Nothing` case is for tests with multiple schemas
|
||||
Just db -> case db of
|
||||
Postgres -> SchemaName $ T.pack Constants.postgresDb
|
||||
MySQL -> SchemaName $ T.pack Constants.mysqlDb
|
||||
SQLServer -> SchemaName $ T.pack Constants.sqlserverDb
|
||||
BigQuery ->
|
||||
SchemaName $
|
||||
T.pack $
|
||||
"hasura_test_"
|
||||
<> showUUID (uniqueTestId testEnv)
|
||||
Citus -> SchemaName $ T.pack Constants.citusDb
|
||||
DataConnector -> SchemaName $ T.pack Constants.dataConnectorDb
|
||||
|
||||
-- | Sanitise UUID for use in BigQuery dataset name
|
||||
-- must be alphanumeric (plus underscores)
|
||||
showUUID :: UUID -> String
|
||||
showUUID =
|
||||
map
|
||||
( \a ->
|
||||
if Data.Char.isAlphaNum a
|
||||
then a
|
||||
else '_'
|
||||
)
|
||||
. show
|
@ -12,15 +12,26 @@ module Harness.TestEnvironment
|
||||
where
|
||||
|
||||
import Control.Concurrent (ThreadId, killThread)
|
||||
import Data.UUID (UUID)
|
||||
import Data.Word
|
||||
import Harness.Test.BackendType
|
||||
import Hasura.Prelude
|
||||
import System.Log.FastLogger qualified as FL
|
||||
|
||||
-- | A testEnvironment that's passed to all tests.
|
||||
data TestEnvironment = TestEnvironment
|
||||
{ server :: Server,
|
||||
{ -- | connection details for the instance of HGE we're connecting to
|
||||
server :: Server,
|
||||
-- | shared function to log information from tests
|
||||
logger :: FL.LogStr -> IO (),
|
||||
loggerCleanup :: IO ()
|
||||
-- | action to clean up logger
|
||||
loggerCleanup :: IO (),
|
||||
-- | a uuid generated for each test suite used to generate a unique
|
||||
-- `SchemaName`
|
||||
uniqueTestId :: UUID,
|
||||
-- | the main backend type of the test, if applicable (ie, where we are not
|
||||
-- testing `remote <-> remote` joins or someting similarly esoteric)
|
||||
backendType :: Maybe BackendType
|
||||
}
|
||||
|
||||
instance Show TestEnvironment where
|
||||
|
@ -6,6 +6,7 @@ module SpecHook
|
||||
where
|
||||
|
||||
import Control.Exception.Safe (bracket)
|
||||
import Data.UUID.V4 (nextRandom)
|
||||
import Harness.GraphqlEngine (startServerThread)
|
||||
import Harness.TestEnvironment (TestEnvironment (..), stopServer)
|
||||
import Hasura.Prelude
|
||||
@ -20,7 +21,15 @@ setupTestEnvironment = do
|
||||
server <- startServerThread ((,) <$> murlPrefix <*> mport)
|
||||
let logType = FL.LogFileNoRotate "tests-hspec.log" 1024
|
||||
(logger, loggerCleanup) <- FL.newFastLogger logType
|
||||
pure TestEnvironment {..}
|
||||
uniqueTestId <- nextRandom
|
||||
pure
|
||||
TestEnvironment
|
||||
{ server = server,
|
||||
uniqueTestId = uniqueTestId,
|
||||
backendType = Nothing,
|
||||
logger = logger,
|
||||
loggerCleanup = loggerCleanup
|
||||
}
|
||||
|
||||
teardownTestEnvironment :: TestEnvironment -> IO ()
|
||||
teardownTestEnvironment TestEnvironment {..} = do
|
||||
|
@ -6,14 +6,14 @@ module Test.BigQuery.ComputedFieldSpec (spec) where
|
||||
import Data.List.NonEmpty qualified as NE
|
||||
import Data.Text qualified as T
|
||||
import Harness.Backend.BigQuery qualified as BigQuery
|
||||
import Harness.Constants qualified as Constants
|
||||
import Harness.Exceptions (finally)
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml, yaml)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -41,12 +41,12 @@ spec =
|
||||
bigquerySetup :: (TestEnvironment, ()) -> IO ()
|
||||
bigquerySetup (testEnv, ()) = do
|
||||
BigQuery.setup [authorTable, articleTable] (testEnv, ())
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- Create functions in BigQuery
|
||||
BigQuery.runSql_ createFunctionsSQL
|
||||
BigQuery.runSql_ (createFunctionsSQL schemaName)
|
||||
|
||||
-- Add computed fields and define select permissions
|
||||
let dataset = Constants.bigqueryDataset
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnv
|
||||
[yaml|
|
||||
@ -57,11 +57,11 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles_1
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: fetch_articles_returns_table
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
@ -71,17 +71,17 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles_2
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: fetch_articles
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
return_table:
|
||||
name: article
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
|
||||
# Role user_1 has select permissions on author and article tables.
|
||||
# user_1 can query search_articles_1 computed field.
|
||||
@ -89,7 +89,7 @@ args:
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
role: user_1
|
||||
permission:
|
||||
@ -102,7 +102,7 @@ args:
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: article
|
||||
role: user_1
|
||||
permission:
|
||||
@ -114,7 +114,7 @@ args:
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
role: user_2
|
||||
permission:
|
||||
@ -125,8 +125,9 @@ args:
|
||||
bigqueryTeardown :: (TestEnvironment, ()) -> IO ()
|
||||
bigqueryTeardown (testEnv, ()) = do
|
||||
-- Drop permissions and computed fields metadata
|
||||
let dataset = Constants.bigqueryDataset
|
||||
dropComputedFieldsYaml =
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
let dropComputedFieldsYaml =
|
||||
[yaml|
|
||||
type: bulk
|
||||
args:
|
||||
@ -134,7 +135,7 @@ args:
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
role: user_1
|
||||
|
||||
@ -142,7 +143,7 @@ args:
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: article
|
||||
role: user_1
|
||||
|
||||
@ -150,7 +151,7 @@ args:
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
role: user_2
|
||||
|
||||
@ -159,7 +160,7 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles_1
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
|
||||
- type: bigquery_drop_computed_field
|
||||
@ -167,14 +168,14 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles_2
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
|]
|
||||
finally
|
||||
(GraphqlEngine.postMetadata_ testEnv dropComputedFieldsYaml)
|
||||
( finally
|
||||
-- Drop functions in BigQuery database
|
||||
(BigQuery.runSql_ dropFunctionsSQL)
|
||||
(BigQuery.runSql_ (dropFunctionsSQL schemaName))
|
||||
-- Teardown schema
|
||||
(BigQuery.teardown [authorTable, articleTable] (testEnv, ()))
|
||||
)
|
||||
@ -226,20 +227,20 @@ articleTable =
|
||||
]
|
||||
}
|
||||
|
||||
fetch_articles_returns_table :: T.Text
|
||||
fetch_articles_returns_table =
|
||||
T.pack Constants.bigqueryDataset <> ".fetch_articles_returns_table"
|
||||
fetch_articles_returns_table :: SchemaName -> T.Text
|
||||
fetch_articles_returns_table schemaName =
|
||||
unSchemaName schemaName <> ".fetch_articles_returns_table"
|
||||
|
||||
fetch_articles :: T.Text
|
||||
fetch_articles =
|
||||
T.pack Constants.bigqueryDataset <> ".fetch_articles"
|
||||
fetch_articles :: SchemaName -> T.Text
|
||||
fetch_articles schemaName =
|
||||
unSchemaName schemaName <> ".fetch_articles"
|
||||
|
||||
createFunctionsSQL :: String
|
||||
createFunctionsSQL =
|
||||
createFunctionsSQL :: SchemaName -> String
|
||||
createFunctionsSQL schemaName =
|
||||
T.unpack $
|
||||
T.unwords $
|
||||
[ "CREATE TABLE FUNCTION ",
|
||||
fetch_articles_returns_table,
|
||||
fetch_articles_returns_table schemaName,
|
||||
"(a_id INT64, search STRING)",
|
||||
"RETURNS TABLE<id INT64, title STRING, content STRING>",
|
||||
"AS (",
|
||||
@ -249,7 +250,7 @@ createFunctionsSQL =
|
||||
");"
|
||||
]
|
||||
<> [ "CREATE TABLE FUNCTION ",
|
||||
fetch_articles,
|
||||
fetch_articles schemaName,
|
||||
"(a_id INT64, search STRING)",
|
||||
"AS (",
|
||||
"SELECT t.* FROM",
|
||||
@ -258,28 +259,30 @@ createFunctionsSQL =
|
||||
");"
|
||||
]
|
||||
where
|
||||
articleTableSQL = T.pack Constants.bigqueryDataset <> ".article"
|
||||
articleTableSQL = unSchemaName schemaName <> ".article"
|
||||
|
||||
dropFunctionsSQL :: String
|
||||
dropFunctionsSQL =
|
||||
dropFunctionsSQL :: SchemaName -> String
|
||||
dropFunctionsSQL schemaName =
|
||||
T.unpack $
|
||||
T.unwords $
|
||||
[ "DROP TABLE FUNCTION " <> fetch_articles_returns_table <> ";",
|
||||
"DROP TABLE FUNCTION " <> fetch_articles <> ";"
|
||||
[ "DROP TABLE FUNCTION " <> fetch_articles_returns_table schemaName <> ";",
|
||||
"DROP TABLE FUNCTION " <> fetch_articles schemaName <> ";"
|
||||
]
|
||||
|
||||
-- * Tests
|
||||
|
||||
tests :: Context.Options -> SpecWith TestEnvironment
|
||||
tests opts = do
|
||||
it "Query with computed fields" $ \testEnv ->
|
||||
it "Query with computed fields" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
testEnv
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: {id: asc}){
|
||||
#{schemaName}_author(order_by: {id: asc}){
|
||||
id
|
||||
name
|
||||
search_articles_1(args: {search: "%1%"}){
|
||||
@ -297,9 +300,9 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: '1'
|
||||
name: Author 1
|
||||
search_articles_1:
|
||||
@ -317,14 +320,16 @@ data:
|
||||
author_id: '2'
|
||||
|]
|
||||
|
||||
it "Query with computed fields using limit and order_by" $ \testEnv ->
|
||||
it "Query with computed fields using limit and order_by" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
testEnv
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: {id: asc}){
|
||||
#{schemaName}_author(order_by: {id: asc}){
|
||||
id
|
||||
name
|
||||
search_articles_2(args: {search: "%by%"} limit: 1 order_by: {id: asc}){
|
||||
@ -337,9 +342,9 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: '1'
|
||||
name: Author 1
|
||||
search_articles_2:
|
||||
@ -356,7 +361,9 @@ data:
|
||||
title: Article 2 Title
|
||||
|]
|
||||
|
||||
it "Query with computed fields as user_1 role" $ \testEnv ->
|
||||
it "Query with computed fields as user_1 role" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphqlWithHeaders
|
||||
@ -364,7 +371,7 @@ data:
|
||||
[("X-Hasura-Role", "user_1")]
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: {id: asc}){
|
||||
#{schemaName}_author(order_by: {id: asc}){
|
||||
id
|
||||
name
|
||||
search_articles_1(args: {search: "%1%"}){
|
||||
@ -382,9 +389,9 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: '1'
|
||||
name: Author 1
|
||||
search_articles_1:
|
||||
@ -402,7 +409,9 @@ data:
|
||||
author_id: '2'
|
||||
|]
|
||||
|
||||
it "Query with computed field search_articles_1 as user_2 role" $ \testEnv ->
|
||||
it "Query with computed field search_articles_1 as user_2 role" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphqlWithHeaders
|
||||
@ -410,7 +419,7 @@ data:
|
||||
[("X-Hasura-Role", "user_2")]
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: {id: asc}){
|
||||
#{schemaName}_author(order_by: {id: asc}){
|
||||
id
|
||||
name
|
||||
search_articles_1(args: {search: "%1%"}){
|
||||
@ -422,16 +431,18 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
errors:
|
||||
- extensions:
|
||||
path: "$.selectionSet.hasura_author.selectionSet.search_articles_1"
|
||||
path: "$.selectionSet.#{schemaName}_author.selectionSet.search_articles_1"
|
||||
code: validation-failed
|
||||
message: |-
|
||||
field 'search_articles_1' not found in type: 'hasura_author'
|
||||
field 'search_articles_1' not found in type: '#{schemaName}_author'
|
||||
|]
|
||||
|
||||
it "Query with computed field search_articles_2 as user_2 role" $ \testEnv ->
|
||||
it "Query with computed field search_articles_2 as user_2 role" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphqlWithHeaders
|
||||
@ -439,7 +450,7 @@ errors:
|
||||
[("X-Hasura-Role", "user_2")]
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: {id: asc}){
|
||||
#{schemaName}_author(order_by: {id: asc}){
|
||||
id
|
||||
name
|
||||
search_articles_2(args: {search: "%keyword%"}){
|
||||
@ -452,11 +463,11 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
errors:
|
||||
- extensions:
|
||||
path: "$.selectionSet.hasura_author.selectionSet.search_articles_2"
|
||||
path: "$.selectionSet.#{schemaName}_author.selectionSet.search_articles_2"
|
||||
code: validation-failed
|
||||
message: |-
|
||||
field 'search_articles_2' not found in type: 'hasura_author'
|
||||
field 'search_articles_2' not found in type: '#{schemaName}_author'
|
||||
|]
|
||||
|
@ -6,12 +6,12 @@ module Test.BigQuery.Metadata.ComputedFieldSpec (spec) where
|
||||
import Data.List.NonEmpty qualified as NE
|
||||
import Data.Text qualified as T
|
||||
import Harness.Backend.BigQuery qualified as BigQuery
|
||||
import Harness.Constants qualified as Constants
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml, yaml)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -40,13 +40,17 @@ bigquerySetup :: (TestEnvironment, ()) -> IO ()
|
||||
bigquerySetup (testEnv, ()) = do
|
||||
BigQuery.setup [authorTable, articleTable] (testEnv, ())
|
||||
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- Create functions in BigQuery
|
||||
BigQuery.runSql_ createFunctionsSQL
|
||||
BigQuery.runSql_ (createFunctionsSQL schemaName)
|
||||
|
||||
bigqueryTeardown :: (TestEnvironment, ()) -> IO ()
|
||||
bigqueryTeardown (testEnv, ()) = do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- Drop functions in BigQuery database
|
||||
BigQuery.runSql_ dropFunctionsSQL
|
||||
BigQuery.runSql_ (dropFunctionsSQL schemaName)
|
||||
|
||||
-- Teardown schema
|
||||
BigQuery.teardown [authorTable, articleTable] (testEnv, ())
|
||||
@ -73,28 +77,28 @@ articleTable =
|
||||
tablePrimaryKey = ["id"]
|
||||
}
|
||||
|
||||
fetch_articles_returns_table :: T.Text
|
||||
fetch_articles_returns_table =
|
||||
T.pack Constants.bigqueryDataset <> ".fetch_articles_returns_table"
|
||||
fetch_articles_returns_table :: SchemaName -> T.Text
|
||||
fetch_articles_returns_table schemaName =
|
||||
unSchemaName schemaName <> ".fetch_articles_returns_table"
|
||||
|
||||
fetch_articles :: T.Text
|
||||
fetch_articles =
|
||||
T.pack Constants.bigqueryDataset <> ".fetch_articles"
|
||||
fetch_articles :: SchemaName -> T.Text
|
||||
fetch_articles schemaName =
|
||||
unSchemaName schemaName <> ".fetch_articles"
|
||||
|
||||
function_no_args :: T.Text
|
||||
function_no_args =
|
||||
T.pack Constants.bigqueryDataset <> ".function_no_args"
|
||||
function_no_args :: SchemaName -> T.Text
|
||||
function_no_args schemaName =
|
||||
unSchemaName schemaName <> ".function_no_args"
|
||||
|
||||
add_int :: T.Text
|
||||
add_int =
|
||||
T.pack Constants.bigqueryDataset <> ".add_int"
|
||||
add_int :: SchemaName -> T.Text
|
||||
add_int schemaName =
|
||||
unSchemaName schemaName <> ".add_int"
|
||||
|
||||
createFunctionsSQL :: String
|
||||
createFunctionsSQL =
|
||||
createFunctionsSQL :: SchemaName -> String
|
||||
createFunctionsSQL schemaName =
|
||||
T.unpack $
|
||||
T.unwords $
|
||||
[ "CREATE TABLE FUNCTION ",
|
||||
fetch_articles_returns_table,
|
||||
fetch_articles_returns_table schemaName,
|
||||
"(a_id INT64, search STRING)",
|
||||
"RETURNS TABLE<id INT64, title STRING, content STRING>",
|
||||
"AS (",
|
||||
@ -104,7 +108,7 @@ createFunctionsSQL =
|
||||
");"
|
||||
]
|
||||
<> [ "CREATE TABLE FUNCTION ",
|
||||
fetch_articles,
|
||||
fetch_articles schemaName,
|
||||
"(a_id INT64, search STRING)",
|
||||
"AS (",
|
||||
"SELECT t.* FROM",
|
||||
@ -113,7 +117,7 @@ createFunctionsSQL =
|
||||
");"
|
||||
]
|
||||
<> [ "CREATE TABLE FUNCTION ",
|
||||
function_no_args <> "()",
|
||||
function_no_args schemaName <> "()",
|
||||
"AS (",
|
||||
"SELECT t.* FROM",
|
||||
articleTableSQL,
|
||||
@ -121,28 +125,29 @@ createFunctionsSQL =
|
||||
]
|
||||
-- A scalar function
|
||||
<> [ "CREATE FUNCTION ",
|
||||
add_int <> "(a INT64, b INT64)",
|
||||
add_int schemaName <> "(a INT64, b INT64)",
|
||||
"RETURNS INT64 AS (a + b);"
|
||||
]
|
||||
where
|
||||
articleTableSQL = T.pack Constants.bigqueryDataset <> ".article"
|
||||
articleTableSQL = unSchemaName schemaName <> ".article"
|
||||
|
||||
dropFunctionsSQL :: String
|
||||
dropFunctionsSQL =
|
||||
dropFunctionsSQL :: SchemaName -> String
|
||||
dropFunctionsSQL schemaName =
|
||||
T.unpack $
|
||||
T.unwords $
|
||||
[ "DROP TABLE FUNCTION " <> fetch_articles_returns_table <> ";",
|
||||
"DROP TABLE FUNCTION " <> fetch_articles <> ";",
|
||||
"DROP TABLE FUNCTION " <> function_no_args <> ";",
|
||||
"DROP FUNCTION " <> add_int <> ";"
|
||||
[ "DROP TABLE FUNCTION " <> fetch_articles_returns_table schemaName <> ";",
|
||||
"DROP TABLE FUNCTION " <> fetch_articles schemaName <> ";",
|
||||
"DROP TABLE FUNCTION " <> function_no_args schemaName <> ";",
|
||||
"DROP FUNCTION " <> add_int schemaName <> ";"
|
||||
]
|
||||
|
||||
-- * Tests
|
||||
|
||||
tests :: Context.Options -> SpecWith TestEnvironment
|
||||
tests opts = do
|
||||
let dataset = Constants.bigqueryDataset
|
||||
it "Add computed field with non exist function - exception" $ \testEnv ->
|
||||
it "Add computed field with non exist function - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postMetadataWithStatus
|
||||
@ -154,22 +159,22 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles_1
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: non_exist_function_name
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: non_exist_function_name
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
@ -177,19 +182,21 @@ internal:
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: 'Inconsistent object: in table "hasura.author": in computed field "search_articles_1":
|
||||
no such function exists: "hasura.non_exist_function_name"'
|
||||
name: computed_field search_articles_1 in table hasura.author in source bigquery
|
||||
reason: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles_1":
|
||||
no such function exists: "#{schemaName}.non_exist_function_name"'
|
||||
name: computed_field search_articles_1 in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: 'Inconsistent object: in table "hasura.author": in computed field "search_articles_1":
|
||||
no such function exists: "hasura.non_exist_function_name"'
|
||||
error: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles_1":
|
||||
no such function exists: "#{schemaName}.non_exist_function_name"'
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
||||
it "Add computed field without returning table - exception" $ \testEnv ->
|
||||
it "Add computed field without returning table - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- The function 'fetch_articles' is not defined with 'RETURNS TABLE<>' clause,
|
||||
-- we need to provide `return_table` in the payload
|
||||
shouldReturnYaml
|
||||
@ -203,25 +210,25 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: fetch_articles
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
# return_table:
|
||||
# name: article
|
||||
# dataset: *dataset
|
||||
# dataset: *schemaName
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: fetch_articles
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
@ -229,23 +236,25 @@ internal:
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author"
|
||||
because function "hasura.fetch_articles" is not defined with ''RETURNS TABLE''.
|
||||
reason: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author"
|
||||
because function "#{schemaName}.fetch_articles" is not defined with ''RETURNS TABLE''.
|
||||
Expecting return table name.'
|
||||
name: computed_field search_articles in table hasura.author in source bigquery
|
||||
name: computed_field search_articles in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author" because
|
||||
function "hasura.fetch_articles" is not defined with ''RETURNS TABLE''. Expecting
|
||||
error: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author" because
|
||||
function "#{schemaName}.fetch_articles" is not defined with ''RETURNS TABLE''. Expecting
|
||||
return table name.'
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
||||
it "Add computed field with non exist returning table - exception" $ \testEnv ->
|
||||
it "Add computed field with non exist returning table - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- The function 'fetch_articles' is not defined with 'RETURNS TABLE<>' clause,
|
||||
-- we need to provide `return_table` in the payload
|
||||
shouldReturnYaml
|
||||
@ -259,28 +268,28 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: fetch_articles
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
return_table:
|
||||
name: non_exist_table
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: fetch_articles
|
||||
return_table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: non_exist_table
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
@ -288,23 +297,25 @@ internal:
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author"
|
||||
because function "hasura.fetch_articles" returning set of table "hasura.non_exist_table"
|
||||
reason: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author"
|
||||
because function "#{schemaName}.fetch_articles" returning set of table "#{schemaName}.non_exist_table"
|
||||
is not tracked'
|
||||
name: computed_field search_articles in table hasura.author in source bigquery
|
||||
name: computed_field search_articles in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author" because
|
||||
function "hasura.fetch_articles" returning set of table "hasura.non_exist_table"
|
||||
error: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author" because
|
||||
function "#{schemaName}.fetch_articles" returning set of table "#{schemaName}.non_exist_table"
|
||||
is not tracked'
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
||||
it "Add computed field with returning table when it is not required - exception" $ \testEnv ->
|
||||
it "Add computed field with returning table when it is not required - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- The function 'fetch_articles_returns_table' is defined with 'RETURNS TABLE<>' clause,
|
||||
-- we don't need to provide 'return_table' in the payload as the returning fields are inferred
|
||||
-- from the function definition
|
||||
@ -319,28 +330,28 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: fetch_articles_returns_table
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
return_table:
|
||||
name: article
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: fetch_articles_returns_table
|
||||
return_table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: article
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
@ -348,23 +359,25 @@ internal:
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author"
|
||||
because return table "hasura.article" is not required as the function "hasura.fetch_articles_returns_table"
|
||||
reason: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author"
|
||||
because return table "#{schemaName}.article" is not required as the function "#{schemaName}.fetch_articles_returns_table"
|
||||
returns arbitrary column fields'
|
||||
name: computed_field search_articles in table hasura.author in source bigquery
|
||||
name: computed_field search_articles in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author" because
|
||||
return table "hasura.article" is not required as the function "hasura.fetch_articles_returns_table"
|
||||
error: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author" because
|
||||
return table "#{schemaName}.article" is not required as the function "#{schemaName}.fetch_articles_returns_table"
|
||||
returns arbitrary column fields'
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
||||
it "Add computed field with a function that has no input arguments - exception" $ \testEnv ->
|
||||
it "Add computed field with a function that has no input arguments - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- The function 'function_no_args' has no input arguments
|
||||
shouldReturnYaml
|
||||
opts
|
||||
@ -377,28 +390,28 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: function_no_args
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
return_table:
|
||||
name: article
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: function_no_args
|
||||
return_table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: article
|
||||
argument_mapping:
|
||||
a_id: id
|
||||
@ -406,21 +419,23 @@ internal:
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author"
|
||||
because function "hasura.function_no_args" has no input arguments defined'
|
||||
name: computed_field search_articles in table hasura.author in source bigquery
|
||||
reason: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author"
|
||||
because function "#{schemaName}.function_no_args" has no input arguments defined'
|
||||
name: computed_field search_articles in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author" because
|
||||
function "hasura.function_no_args" has no input arguments defined'
|
||||
error: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author" because
|
||||
function "#{schemaName}.function_no_args" has no input arguments defined'
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
||||
it "Add computed field with a function that returns a scalar value - exception" $ \testEnv ->
|
||||
it "Add computed field with a function that returns a scalar value - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
-- The function 'add_int' returns a scalar value of type 'INT64', as of now we do not support
|
||||
-- scalar computed fields.
|
||||
shouldReturnYaml
|
||||
@ -434,44 +449,46 @@ args:
|
||||
source: bigquery
|
||||
name: add_int
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: add_int
|
||||
argument_mapping: {}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: add_int
|
||||
argument_mapping: {}
|
||||
name: add_int
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: |
|
||||
Inconsistent object: in table "hasura.author": in computed field "add_int": the computed field "add_int" cannot be added to table "hasura.author" for the following reasons:
|
||||
• function "hasura.add_int" is not a TABLE_VALUED_FUNCTION
|
||||
• function "hasura.add_int" is not defined with 'RETURNS TABLE'. Expecting return table name.
|
||||
name: computed_field add_int in table hasura.author in source bigquery
|
||||
Inconsistent object: in table "#{schemaName}.author": in computed field "add_int": the computed field "add_int" cannot be added to table "#{schemaName}.author" for the following reasons:
|
||||
• function "#{schemaName}.add_int" is not a TABLE_VALUED_FUNCTION
|
||||
• function "#{schemaName}.add_int" is not defined with 'RETURNS TABLE'. Expecting return table name.
|
||||
name: computed_field add_int in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: |
|
||||
Inconsistent object: in table "hasura.author": in computed field "add_int": the computed field "add_int" cannot be added to table "hasura.author" for the following reasons:
|
||||
• function "hasura.add_int" is not a TABLE_VALUED_FUNCTION
|
||||
• function "hasura.add_int" is not defined with 'RETURNS TABLE'. Expecting return table name.
|
||||
Inconsistent object: in table "#{schemaName}.author": in computed field "add_int": the computed field "add_int" cannot be added to table "#{schemaName}.author" for the following reasons:
|
||||
• function "#{schemaName}.add_int" is not a TABLE_VALUED_FUNCTION
|
||||
• function "#{schemaName}.add_int" is not defined with 'RETURNS TABLE'. Expecting return table name.
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
||||
it "Add computed field with invalid argument name in argument_mapping - exception" $ \testEnv ->
|
||||
it "Add computed field with invalid argument name in argument_mapping - exception" $ \testEnv -> do
|
||||
let schemaName = getSchemaName testEnv
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postMetadataWithStatus
|
||||
@ -483,22 +500,22 @@ args:
|
||||
source: bigquery
|
||||
name: search_articles
|
||||
table:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: author
|
||||
definition:
|
||||
function:
|
||||
dataset: *dataset
|
||||
dataset: *schemaName
|
||||
name: fetch_articles_returns_table
|
||||
argument_mapping:
|
||||
invalid_argument: id
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
internal:
|
||||
- definition:
|
||||
definition:
|
||||
function:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: fetch_articles_returns_table
|
||||
argument_mapping:
|
||||
invalid_argument: id
|
||||
@ -506,18 +523,18 @@ internal:
|
||||
source: bigquery
|
||||
comment:
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: #{schemaName}
|
||||
name: author
|
||||
reason: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author"
|
||||
because the argument "invalid_argument" is not one of function "hasura.fetch_articles_returns_table"
|
||||
reason: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author"
|
||||
because the argument "invalid_argument" is not one of function "#{schemaName}.fetch_articles_returns_table"
|
||||
input arguments'
|
||||
name: computed_field search_articles in table hasura.author in source bigquery
|
||||
name: computed_field search_articles in table #{schemaName}.author in source bigquery
|
||||
type: computed_field
|
||||
path: "$.args"
|
||||
error: 'Inconsistent object: in table "hasura.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "hasura.author" because
|
||||
the argument "invalid_argument" is not one of function "hasura.fetch_articles_returns_table"
|
||||
error: 'Inconsistent object: in table "#{schemaName}.author": in computed field "search_articles":
|
||||
the computed field "search_articles" cannot be added to table "#{schemaName}.author" because
|
||||
the argument "invalid_argument" is not one of function "#{schemaName}.fetch_articles_returns_table"
|
||||
input arguments'
|
||||
code: invalid-configuration
|
||||
|]
|
||||
|
@ -10,10 +10,11 @@ import Data.List.NonEmpty qualified as NE
|
||||
import Harness.Backend.BigQuery qualified as BigQuery
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -120,11 +121,13 @@ tests opts = do
|
||||
|
||||
describe "Understanding BigQuery values via GraphQL" do
|
||||
it "Selects all types" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_all_types:
|
||||
#{schemaName}_all_types:
|
||||
- string: 'ANOTHER STRING'
|
||||
bytes: BQQDAgEA
|
||||
integer: '3'
|
||||
@ -163,7 +166,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_all_types(order_by: [{ string: asc }]) {
|
||||
#{schemaName}_all_types(order_by: [{ string: asc }]) {
|
||||
string
|
||||
bytes
|
||||
integer
|
||||
@ -184,11 +187,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Aggregates all comparable types" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_all_types_aggregate:
|
||||
#{schemaName}_all_types_aggregate:
|
||||
aggregate:
|
||||
max:
|
||||
bignumeric: '23456789098765432'
|
||||
@ -222,7 +227,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_all_types_aggregate {
|
||||
#{schemaName}_all_types_aggregate {
|
||||
aggregate {
|
||||
max {
|
||||
string
|
||||
|
@ -14,7 +14,7 @@ import Harness.Backend.DataConnector qualified as DataConnector
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.BackendType (BackendType (..), defaultBackendTypeString, defaultSource)
|
||||
import Harness.Test.BackendType (BackendType (DataConnector), defaultBackendTypeString, defaultSource)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
|
@ -11,10 +11,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -124,34 +125,38 @@ longtable =
|
||||
|
||||
tests :: Context.Options -> SpecWith TestEnvironment
|
||||
tests opts = do
|
||||
it "select long table" $ \testEnvironment ->
|
||||
it "select long table" $ \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_i_need_a_table_with_a_long_name_to_test_rename_identifiers(order_by:[{id:asc}]) {
|
||||
#{schemaName}_i_need_a_table_with_a_long_name_to_test_rename_identifiers(order_by:[{id:asc}]) {
|
||||
id
|
||||
}
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_i_need_a_table_with_a_long_name_to_test_rename_identifiers:
|
||||
#{schemaName}_i_need_a_table_with_a_long_name_to_test_rename_identifiers:
|
||||
- id: 1
|
||||
- id: 2
|
||||
|]
|
||||
|
||||
it "select long column" $ \testEnvironment ->
|
||||
it "select long column" $ \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
testEnvironment
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
query {
|
||||
hasura_i_need_a_table_with_a_long_name_to_test_rename_identifiers(order_by:[{i_need_a_column_with_a_long_name_to_test_rename_identifiers:asc, i_need_a_column_with_a_long_name_but_is_different:asc}]) {
|
||||
#{schemaName}_i_need_a_table_with_a_long_name_to_test_rename_identifiers(order_by:[{i_need_a_column_with_a_long_name_to_test_rename_identifiers:asc, i_need_a_column_with_a_long_name_but_is_different:asc}]) {
|
||||
id
|
||||
regular_id
|
||||
i_need_a_column_with_a_long_name_to_test_rename_identifiers
|
||||
@ -159,9 +164,9 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_i_need_a_table_with_a_long_name_to_test_rename_identifiers:
|
||||
#{schemaName}_i_need_a_table_with_a_long_name_to_test_rename_identifiers:
|
||||
- id: 1
|
||||
regular_id: 1
|
||||
i_need_a_column_with_a_long_name_to_test_rename_identifiers: 1
|
||||
@ -170,14 +175,16 @@ data:
|
||||
i_need_a_column_with_a_long_name_to_test_rename_identifiers: 2
|
||||
|]
|
||||
|
||||
it "select long column via array relationship" $ \testEnvironment ->
|
||||
it "select long column via array relationship" $ \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
testEnvironment
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
query {
|
||||
hasura_regular(order_by:[{id:asc}]) {
|
||||
#{schemaName}_regular(order_by:[{id:asc}]) {
|
||||
id
|
||||
i_need_a_table_with_a_long_name_to_test_rename_identifierss_by_id_to_regular_id(order_by:[{i_need_a_column_with_a_long_name_to_test_rename_identifiers:asc, i_need_a_column_with_a_long_name_but_is_different:asc}]) {
|
||||
i_need_a_column_with_a_long_name_to_test_rename_identifiers
|
||||
@ -187,9 +194,9 @@ query {
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_regular:
|
||||
#{schemaName}_regular:
|
||||
- id: 1
|
||||
i_need_a_table_with_a_long_name_to_test_rename_identifierss_by_id_to_regular_id:
|
||||
- i_need_a_column_with_a_long_name_to_test_rename_identifiers: 1
|
||||
|
@ -17,11 +17,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql, postGraphqlWithPair)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -91,11 +91,13 @@ tests opts = do
|
||||
|
||||
describe "Mixes @include and @skip directives" do
|
||||
it "Returns the field when @include(if: true) and @skip(if: false)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: 1
|
||||
name: Author 1
|
||||
- id: 2
|
||||
@ -108,7 +110,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @include(if: true) @skip(if: false)
|
||||
name
|
||||
}
|
||||
@ -118,11 +120,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Doesn't return the field when @include(if: false) and @skip(if: false)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
- name: Author 2
|
||||
|]
|
||||
@ -133,7 +137,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @include(if: false) @skip(if: false)
|
||||
name
|
||||
}
|
||||
@ -143,11 +147,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Doesn't return the field when @include(if: false) and @skip(if: true)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
- name: Author 2
|
||||
|]
|
||||
@ -158,7 +164,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @include(if: false) @skip(if: true)
|
||||
name
|
||||
}
|
||||
@ -168,11 +174,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Doesn't return the field when @include(if: true) and @skip(if: true)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
- name: Author 2
|
||||
|]
|
||||
@ -183,7 +191,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query test($skip: Boolean!, $include: Boolean!) {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @include(if: $include) @skip(if: $skip)
|
||||
name
|
||||
}
|
||||
|
@ -17,11 +17,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql, postGraphqlWithPair)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -94,11 +94,13 @@ tests opts = do
|
||||
|
||||
describe "Include fields conditionally" do
|
||||
it "Includes field with @include(if: true)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: 1
|
||||
name: Author 1
|
||||
- id: 2
|
||||
@ -111,7 +113,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query test($include: Boolean!) {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @include(if: $include)
|
||||
name
|
||||
}
|
||||
@ -122,11 +124,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Doesn't include field with @include(if: false)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
- name: Author 2
|
||||
|]
|
||||
@ -137,7 +141,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @include(if: false)
|
||||
name
|
||||
}
|
||||
|
@ -17,11 +17,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql, postGraphqlWithPair)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -94,11 +94,13 @@ tests opts = do
|
||||
|
||||
describe "Skip fields conditionally" do
|
||||
it "Skips field with @skip(if: true)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
- name: Author 2
|
||||
|]
|
||||
@ -109,7 +111,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @skip(if: true)
|
||||
name
|
||||
}
|
||||
@ -119,11 +121,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Doesn't skip field with @skip(if: false)" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: 1
|
||||
name: Author 1
|
||||
- id: 2
|
||||
@ -136,7 +140,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query test($skip: Boolean!) {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id @skip(if: $skip)
|
||||
name
|
||||
}
|
||||
|
@ -17,11 +17,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml, yaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -91,12 +91,14 @@ tests opts = do
|
||||
|
||||
describe "Directives" do
|
||||
it "Rejects unknown directives" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
errors:
|
||||
- extensions:
|
||||
path: $.selectionSet.hasura_author.selectionSet
|
||||
path: $.selectionSet.#{schemaName}_author.selectionSet
|
||||
code: validation-failed
|
||||
message: |-
|
||||
directive 'exclude' is not defined in the schema
|
||||
@ -108,7 +110,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author {
|
||||
#{schemaName}_author {
|
||||
id @exclude(if: true)
|
||||
name
|
||||
}
|
||||
@ -118,12 +120,14 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Rejects duplicate directives" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
errors:
|
||||
- extensions:
|
||||
path: $.selectionSet.hasura_author.selectionSet
|
||||
path: $.selectionSet.#{schemaName}_author.selectionSet
|
||||
code: validation-failed
|
||||
message: |-
|
||||
the following directives are used more than once: ['include']
|
||||
@ -135,7 +139,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author {
|
||||
#{schemaName}_author {
|
||||
id @include(if: true) @include(if: true)
|
||||
name
|
||||
}
|
||||
@ -145,6 +149,8 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Rejects directives on wrong element" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
@ -162,7 +168,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query @include(if: true) {
|
||||
hasura_author {
|
||||
#{schemaName}_author {
|
||||
id
|
||||
name
|
||||
}
|
||||
|
@ -16,11 +16,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -94,11 +94,13 @@ tests opts = do
|
||||
shouldBe = shouldReturnYaml opts
|
||||
|
||||
it "Select by id" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
id: 1
|
||||
|]
|
||||
@ -109,7 +111,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(where: {id: {_eq: 1}}) {
|
||||
#{schemaName}_author(where: {id: {_eq: 1}}) {
|
||||
name
|
||||
id
|
||||
}
|
||||
|
@ -17,11 +17,12 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -120,11 +121,13 @@ tests opts = do
|
||||
|
||||
describe "Nested relationship queries" do
|
||||
it "Nests with 'where' clauses" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: "Author 1"
|
||||
articles_by_id_to_author_id:
|
||||
- id: 2
|
||||
@ -136,7 +139,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(
|
||||
#{schemaName}_author(
|
||||
order_by: [{ id: asc }],
|
||||
where: { id: { _eq: 1 } }
|
||||
) {
|
||||
@ -155,11 +158,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Nesting in the 'where' clause" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article:
|
||||
#{schemaName}_article:
|
||||
- id: 1
|
||||
author_id: 1
|
||||
- id: 2
|
||||
@ -172,7 +177,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article (
|
||||
#{schemaName}_article (
|
||||
order_by: [{ id: asc }],
|
||||
where: { author_by_author_id_to_id: { name: { _eq: "Author 1" } } }
|
||||
) {
|
||||
@ -185,11 +190,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Deep nesting" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article:
|
||||
#{schemaName}_article:
|
||||
- id: 1
|
||||
author_by_author_id_to_id:
|
||||
id: 1
|
||||
@ -209,7 +216,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article(where: {id: {_eq: 1}}) {
|
||||
#{schemaName}_article(where: {id: {_eq: 1}}) {
|
||||
id
|
||||
author_by_author_id_to_id {
|
||||
id
|
||||
|
@ -16,11 +16,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -95,11 +95,13 @@ tests opts = do
|
||||
|
||||
describe "Paginate query results" do
|
||||
it "Returns one element" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
id: 1
|
||||
|]
|
||||
@ -110,7 +112,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }], limit: 1) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }], limit: 1) {
|
||||
name
|
||||
id
|
||||
}
|
||||
|
@ -16,11 +16,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -95,11 +95,13 @@ tests opts = do
|
||||
|
||||
describe "Paginate query results" do
|
||||
it "Offsets results by one element" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 2
|
||||
id: 2
|
||||
- name: Author 3
|
||||
@ -114,7 +116,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }], offset: 1) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }], offset: 1) {
|
||||
name
|
||||
id
|
||||
}
|
||||
@ -124,11 +126,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Correctly handles ordering, offsets, and limits" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: 2
|
||||
name: Author 2
|
||||
|]
|
||||
@ -139,7 +143,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(limit: 1, offset: 2, order_by: [{ id: desc }]) {
|
||||
#{schemaName}_author(limit: 1, offset: 2, order_by: [{ id: desc }]) {
|
||||
id
|
||||
name
|
||||
}
|
||||
|
@ -17,11 +17,12 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml, yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -106,11 +107,13 @@ tests opts = do
|
||||
|
||||
describe "Simple object queries" do
|
||||
it "Fetch a list of authors" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: 1
|
||||
name: "Author 1"
|
||||
- id: 2
|
||||
@ -125,7 +128,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id
|
||||
name
|
||||
}
|
||||
@ -186,15 +189,17 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Fails on missing fields" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
errors:
|
||||
- extensions:
|
||||
code: validation-failed
|
||||
path: $.selectionSet.hasura_author.selectionSet.notPresentCol
|
||||
path: $.selectionSet.#{schemaName}_author.selectionSet.notPresentCol
|
||||
message: |-
|
||||
field 'notPresentCol' not found in type: 'hasura_author'
|
||||
field 'notPresentCol' not found in type: '#{schemaName}_author'
|
||||
|]
|
||||
|
||||
actual :: IO Value
|
||||
@ -203,7 +208,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author {
|
||||
#{schemaName}_author {
|
||||
id
|
||||
name
|
||||
notPresentCol
|
||||
|
@ -13,11 +13,12 @@ import Harness.Backend.Mysql qualified as Mysql
|
||||
import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphqlYaml)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -91,11 +92,13 @@ tests opts = describe "BasicFieldsSpec" do
|
||||
|
||||
describe "Use the `operationName` key" do
|
||||
it "Selects the correct operation" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Author 1
|
||||
id: 1
|
||||
- name: Author 2
|
||||
@ -106,7 +109,7 @@ tests opts = describe "BasicFieldsSpec" do
|
||||
actual =
|
||||
postGraphqlYaml
|
||||
testEnvironment
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
operationName: chooseThisOne
|
||||
query: |
|
||||
query ignoreThisOne {
|
||||
@ -115,7 +118,7 @@ tests opts = describe "BasicFieldsSpec" do
|
||||
}
|
||||
}
|
||||
query chooseThisOne {
|
||||
hasura_author(order_by:[{id:asc}]) {
|
||||
#{schemaName}_author(order_by:[{id:asc}]) {
|
||||
id
|
||||
name
|
||||
}
|
||||
|
@ -16,11 +16,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Fixture qualified as Fixture
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -93,11 +93,13 @@ tests opts = do
|
||||
|
||||
describe "Sorting results by IDs" do
|
||||
it "Ascending" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Bob
|
||||
id: 1
|
||||
- name: Alice
|
||||
@ -110,7 +112,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author (order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author (order_by: [{ id: asc }]) {
|
||||
name
|
||||
id
|
||||
}
|
||||
@ -120,11 +122,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Descending" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Alice
|
||||
id: 2
|
||||
- name: Bob
|
||||
@ -137,7 +141,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author (order_by: [{ id: desc }]) {
|
||||
#{schemaName}_author (order_by: [{ id: desc }]) {
|
||||
name
|
||||
id
|
||||
}
|
||||
@ -148,11 +152,13 @@ tests opts = do
|
||||
|
||||
describe "Sorting results by strings" do
|
||||
it "Ascending" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Alice
|
||||
id: 2
|
||||
- name: Bob
|
||||
@ -165,7 +171,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author (order_by: [{ name: asc }]) {
|
||||
#{schemaName}_author (order_by: [{ name: asc }]) {
|
||||
name
|
||||
id
|
||||
}
|
||||
@ -175,11 +181,13 @@ tests opts = do
|
||||
actual `shouldBe` expected
|
||||
|
||||
it "Descending" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- name: Bob
|
||||
id: 1
|
||||
- name: Alice
|
||||
@ -192,7 +200,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author (order_by: [{ name: desc }]) {
|
||||
#{schemaName}_author (order_by: [{ name: desc }]) {
|
||||
name
|
||||
id
|
||||
}
|
||||
|
@ -260,7 +260,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable track
|
||||
Postgres.createTable testEnvironment track
|
||||
Postgres.insertTable track
|
||||
Schema.trackTable Context.Postgres sourceName track testEnvironment
|
||||
|
||||
|
@ -49,6 +49,7 @@ import Harness.Test.Context (Context (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (Server, TestEnvironment, stopServer)
|
||||
import Hasura.Prelude
|
||||
|
||||
@ -196,7 +197,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable albumTable
|
||||
Postgres.createTable testEnvironment albumTable
|
||||
Postgres.insertTable albumTable
|
||||
Schema.trackTable Context.Postgres sourceName albumTable testEnvironment
|
||||
|
||||
@ -218,13 +219,13 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable track
|
||||
Postgres.createTable testEnvironment track
|
||||
Postgres.insertTable track
|
||||
Schema.trackTable Context.Postgres sourceName track testEnvironment
|
||||
|
||||
createSourceRemoteRelationship :: TestEnvironment -> IO ()
|
||||
createSourceRemoteRelationship testEnvironment = do
|
||||
let schemaName = Context.defaultSchema Context.Postgres
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
[yaml|
|
||||
@ -259,7 +260,8 @@ lhsPostgresTeardown = Postgres.dropTable track
|
||||
|
||||
createRemoteSchemaRemoteRelationship :: TestEnvironment -> IO ()
|
||||
createRemoteSchemaRemoteRelationship testEnvironment = do
|
||||
let schemaName = Context.defaultSchema Context.Postgres
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
[yaml|
|
||||
|
@ -34,6 +34,7 @@ import Harness.Test.Context (Context (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..))
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (Server, TestEnvironment, stopServer)
|
||||
import Harness.Yaml (shouldBeYaml, shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -213,9 +214,10 @@ lhsPostgresMkLocalTestEnvironment _ = pure Nothing
|
||||
|
||||
lhsPostgresSetup :: Value -> (TestEnvironment, Maybe Server) -> IO ()
|
||||
lhsPostgresSetup rhsTableName (testEnvironment, _) = do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let sourceName = "source"
|
||||
sourceConfig = Postgres.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.Postgres
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -226,7 +228,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable artist
|
||||
Postgres.createTable testEnvironment artist
|
||||
Postgres.insertTable artist
|
||||
Schema.trackTable Context.Postgres sourceName artist testEnvironment
|
||||
|
||||
@ -282,9 +284,10 @@ lhsSQLServerMkLocalTestEnvironment _ = pure Nothing
|
||||
|
||||
lhsSQLServerSetup :: Value -> (TestEnvironment, Maybe Server) -> IO ()
|
||||
lhsSQLServerSetup rhsTableName (testEnvironment, _) = do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let sourceName = "source"
|
||||
sourceConfig = SQLServer.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.SQLServer
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -536,9 +539,9 @@ lhsRemoteServerTeardown (_, maybeServer) = traverse_ stopServer maybeServer
|
||||
|
||||
rhsPostgresSetup :: (TestEnvironment, ()) -> IO ()
|
||||
rhsPostgresSetup (testEnvironment, _) = do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
let sourceName = "target"
|
||||
sourceConfig = Postgres.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.Postgres
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -549,7 +552,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable album
|
||||
Postgres.createTable testEnvironment album
|
||||
Postgres.insertTable album
|
||||
Schema.trackTable Context.Postgres sourceName album testEnvironment
|
||||
|
||||
@ -596,9 +599,10 @@ rhsPostgresTeardown _ = Postgres.dropTable album
|
||||
|
||||
rhsSQLServerSetup :: (TestEnvironment, ()) -> IO ()
|
||||
rhsSQLServerSetup (testEnvironment, _) = do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let sourceName = "target"
|
||||
sourceConfig = SQLServer.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.SQLServer
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
|
@ -31,6 +31,7 @@ import Harness.Test.Context (Context (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..))
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (Server, TestEnvironment, stopServer)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -217,7 +218,7 @@ lhsPostgresSetup :: Value -> (TestEnvironment, Maybe Server) -> IO ()
|
||||
lhsPostgresSetup rhsTableName (testEnvironment, _) = do
|
||||
let sourceName = "source"
|
||||
sourceConfig = Postgres.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.Postgres
|
||||
schemaName = getSchemaName testEnvironment
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -228,7 +229,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable track
|
||||
Postgres.createTable testEnvironment track
|
||||
Postgres.insertTable track
|
||||
Schema.trackTable Context.Postgres sourceName track testEnvironment
|
||||
GraphqlEngine.postMetadata_
|
||||
@ -285,7 +286,8 @@ lhsSQLServerSetup :: Value -> (TestEnvironment, Maybe Server) -> IO ()
|
||||
lhsSQLServerSetup rhsTableName (testEnvironment, _) = do
|
||||
let sourceName = "source"
|
||||
sourceConfig = SQLServer.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.SQLServer
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -551,7 +553,8 @@ rhsPostgresSetup :: (TestEnvironment, ()) -> IO ()
|
||||
rhsPostgresSetup (testEnvironment, _) = do
|
||||
let sourceName = "target"
|
||||
sourceConfig = Postgres.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.Postgres
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -562,7 +565,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable album
|
||||
Postgres.createTable testEnvironment album
|
||||
Postgres.insertTable album
|
||||
Schema.trackTable Context.Postgres sourceName album testEnvironment
|
||||
|
||||
@ -611,7 +614,8 @@ rhsSQLServerSetup :: (TestEnvironment, ()) -> IO ()
|
||||
rhsSQLServerSetup (testEnvironment, _) = do
|
||||
let sourceName = "target"
|
||||
sourceConfig = SQLServer.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.SQLServer
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
|
@ -30,6 +30,7 @@ import Harness.Test.Context (Context (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (Server, TestEnvironment, stopServer)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -137,7 +138,7 @@ lhsPostgresSetup :: (TestEnvironment, Maybe Server) -> IO ()
|
||||
lhsPostgresSetup (testEnvironment, _) = do
|
||||
let sourceName = "source"
|
||||
sourceConfig = Postgres.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.Postgres
|
||||
schemaName = getSchemaName testEnvironment
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
@ -148,7 +149,7 @@ args:
|
||||
configuration: *sourceConfig
|
||||
|]
|
||||
-- setup tables only
|
||||
Postgres.createTable track
|
||||
Postgres.createTable testEnvironment track
|
||||
Postgres.insertTable track
|
||||
Schema.trackTable Context.Postgres sourceName track testEnvironment
|
||||
GraphqlEngine.postMetadata_
|
||||
@ -189,7 +190,8 @@ lhsSQLServerSetup :: (TestEnvironment, Maybe Server) -> IO ()
|
||||
lhsSQLServerSetup (testEnvironment, _) = do
|
||||
let sourceName = "source"
|
||||
sourceConfig = SQLServer.defaultSourceConfiguration
|
||||
schemaName = Context.defaultSchema Context.SQLServer
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
-- Add remote source
|
||||
GraphqlEngine.postMetadata_
|
||||
testEnvironment
|
||||
|
@ -8,10 +8,11 @@ import Harness.Backend.BigQuery qualified as BigQuery
|
||||
import Harness.Exceptions
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml, yaml)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -93,6 +94,8 @@ bigquerySetup :: (TestEnvironment, ()) -> IO ()
|
||||
bigquerySetup (testEnvironment, ()) = do
|
||||
BigQuery.setup schema (testEnvironment, ())
|
||||
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
-- also setup permissions
|
||||
GraphqlEngine.postMetadata_ testEnvironment $
|
||||
[yaml|
|
||||
@ -102,7 +105,7 @@ bigquerySetup (testEnvironment, ()) = do
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: *schemaName
|
||||
name: article
|
||||
role: author
|
||||
permission:
|
||||
@ -114,7 +117,7 @@ bigquerySetup (testEnvironment, ()) = do
|
||||
args:
|
||||
source: bigquery
|
||||
table:
|
||||
dataset: hasura
|
||||
dataset: *schemaName
|
||||
name: article
|
||||
role: user
|
||||
permission:
|
||||
@ -126,6 +129,8 @@ bigquerySetup (testEnvironment, ()) = do
|
||||
|
||||
bigqueryTeardown :: (TestEnvironment, ()) -> IO ()
|
||||
bigqueryTeardown (testEnvironment, ()) = do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
-- teardown permissions
|
||||
let teardownPermissions =
|
||||
GraphqlEngine.postMetadata_ testEnvironment $
|
||||
@ -134,7 +139,7 @@ bigqueryTeardown (testEnvironment, ()) = do
|
||||
args:
|
||||
table:
|
||||
name: article
|
||||
dataset: hasura
|
||||
dataset: *schemaName
|
||||
role: user
|
||||
source: bigquery
|
||||
|]
|
||||
@ -152,6 +157,8 @@ tests :: Context.Options -> SpecWith TestEnvironment
|
||||
tests opts = do
|
||||
it "Author role cannot select articles with mismatching author_id and X-Hasura-User-Id" $ \testEnvironment -> do
|
||||
let userHeaders = [("X-Hasura-Role", "author"), ("X-Hasura-User-Id", "0")]
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphqlWithHeaders
|
||||
@ -159,20 +166,22 @@ tests opts = do
|
||||
userHeaders
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article {
|
||||
#{schemaName}_article {
|
||||
id
|
||||
author_id
|
||||
}
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article: []
|
||||
#{schemaName}_article: []
|
||||
|]
|
||||
|
||||
it "Author role can select articles with matching author_id and X-Hasura-User-Id" $ \testEnvironment -> do
|
||||
let userHeaders = [("X-Hasura-Role", "author"), ("X-Hasura-User-Id", "1")]
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphqlWithHeaders
|
||||
@ -180,22 +189,24 @@ tests opts = do
|
||||
userHeaders
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article {
|
||||
#{schemaName}_article {
|
||||
id
|
||||
author_id
|
||||
}
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article:
|
||||
#{schemaName}_article:
|
||||
- id: '1'
|
||||
author_id: '1'
|
||||
|]
|
||||
|
||||
it "User role can select published articles only" $ \testEnvironment -> do
|
||||
let userHeaders = [("X-Hasura-Role", "user"), ("X-Hasura-User-Id", "2")]
|
||||
schemaName = getSchemaName testEnvironment
|
||||
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphqlWithHeaders
|
||||
@ -203,7 +214,7 @@ tests opts = do
|
||||
userHeaders
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article {
|
||||
#{schemaName}_article {
|
||||
title
|
||||
content
|
||||
author_id
|
||||
@ -211,9 +222,9 @@ tests opts = do
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article:
|
||||
#{schemaName}_article:
|
||||
- author_id: '2'
|
||||
content: Sample article content 2
|
||||
title: Article 2
|
||||
|
@ -11,10 +11,11 @@ import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -120,11 +121,13 @@ tests opts = do
|
||||
|
||||
describe "Array relationships" do
|
||||
it "Select authors and their articles" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_author:
|
||||
#{schemaName}_author:
|
||||
- id: 1
|
||||
articles_by_id_to_author_id:
|
||||
- id: 1
|
||||
@ -143,7 +146,7 @@ tests opts = do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_author(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_author(order_by: [{ id: asc }]) {
|
||||
id
|
||||
|
||||
articles_by_id_to_author_id(order_by: [{ id: asc }]) {
|
||||
|
@ -16,12 +16,13 @@ import Harness.Backend.Mysql qualified as Mysql
|
||||
import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.GraphqlEngine (postGraphql)
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.Quoter.Yaml (interpolateYaml)
|
||||
import Harness.Test.BackendType (BackendType (..))
|
||||
import Harness.Test.Context (Options (..))
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema (Table (..), table)
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Harness.Test.SchemaName
|
||||
import Harness.TestEnvironment (TestEnvironment)
|
||||
import Harness.Yaml (shouldReturnYaml)
|
||||
import Hasura.Prelude
|
||||
@ -151,11 +152,13 @@ tests backend opts = describe "Object relationships" do
|
||||
shouldBe = shouldReturnYaml opts
|
||||
|
||||
it "Select articles and their authors" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article:
|
||||
#{schemaName}_article:
|
||||
- id: 1
|
||||
author_by_author_id_to_id:
|
||||
id: 1
|
||||
@ -177,7 +180,7 @@ tests backend opts = describe "Object relationships" do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_article(order_by: [{ id: asc }]) {
|
||||
id
|
||||
|
||||
author_by_author_id_to_id {
|
||||
@ -192,11 +195,13 @@ tests backend opts = describe "Object relationships" do
|
||||
unless (backend `elem` [MySQL, BigQuery]) do
|
||||
describe "Null relationships" do
|
||||
it "Select articles their (possibly null) co-authors" \testEnvironment -> do
|
||||
let schemaName = getSchemaName testEnvironment
|
||||
|
||||
let expected :: Value
|
||||
expected =
|
||||
[yaml|
|
||||
[interpolateYaml|
|
||||
data:
|
||||
hasura_article:
|
||||
#{schemaName}_article:
|
||||
- id: 1
|
||||
author_by_co_author_id_to_id: null
|
||||
- id: 2
|
||||
@ -212,7 +217,7 @@ tests backend opts = describe "Object relationships" do
|
||||
testEnvironment
|
||||
[graphql|
|
||||
query {
|
||||
hasura_article(order_by: [{ id: asc }]) {
|
||||
#{schemaName}_article(order_by: [{ id: asc }]) {
|
||||
id
|
||||
|
||||
author_by_co_author_id_to_id {
|
||||
|
Loading…
Reference in New Issue
Block a user