mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-13 19:33:55 +03:00
server: Split the integration tests out into their own directories.
We currently have a fairly intricate way of running our PostgreSQL and MSSQL integration tests (not the API tests). By splitting them out, we can simplify this a lot. Most prominently, we can rely on Cabal to be our argument parser instead of writing our own. We can also simplify how they're run in CI. They are currently (weirdly) run alongside the Python integration tests. This breaks them out into their own jobs for better visibility, and to avoid conflating the two. The changes are as follows: - The "unit" tests that rely on a running PostgreSQL database are extracted out to a new test directory so they can be run separately. - Most of the `Main` module comes with them. - We now refer to these as "integration" tests instead. - Likewise for the "unit" tests that rely on a running MS SQL Server database. These are a little simpler and we can use `hspec-discover`, with a `SpecHook` to extract the connection string from an environment variable. - Henceforth, these are the MS SQL Server integration tests. - New CI jobs have been added for each of these. - There wasn't actually a job for the MS SQL Server integration tests. It's pretty amazing they still run well. - The "haskell-tests" CI job, which used to run the PostgreSQL integration tests, has been removed. - The makefiles and contributing guide have been updated to run these. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6912 GitOrigin-RevId: 67bbe2941bba31793f63d04a9a693779d4463ee1
This commit is contained in:
parent
8fec25349f
commit
80c977da85
@ -1,4 +1,3 @@
|
||||
haskell-tests
|
||||
no-auth
|
||||
admin-secret
|
||||
admin-secret-unauthorized-role
|
||||
|
@ -223,11 +223,6 @@ run_pytest_parallel() {
|
||||
}
|
||||
|
||||
case "$SERVER_TEST_TO_RUN" in
|
||||
haskell-tests)
|
||||
echo -e "\n$(time_elapsed): <########## RUN GRAPHQL-ENGINE HASKELL TESTS ###########################################>\n"
|
||||
"${GRAPHQL_ENGINE_TESTS:?}" postgres
|
||||
;;
|
||||
|
||||
no-auth)
|
||||
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITHOUT ADMIN SECRET ###########################################>\n"
|
||||
|
||||
|
@ -65,8 +65,7 @@ ghcid-test-harness:
|
||||
|
||||
.PHONY: ghcid-test-backends
|
||||
## ghcid-test-backends: run all api tests in ghcid
|
||||
ghcid-test-backends: start-sqlserver remove-tix-file
|
||||
docker compose up -d --wait postgres citus cockroach mariadb dc-reference-agent dc-sqlite-agent
|
||||
ghcid-test-backends: start-backends remove-tix-file
|
||||
$(call run_ghcid_api_tests,api-tests:exe:api-tests)
|
||||
|
||||
.PHONY: ghcid-test-bigquery
|
||||
@ -78,8 +77,8 @@ ghcid-test-bigquery: remove-tix-file
|
||||
|
||||
.PHONY: ghcid-test-sqlserver
|
||||
## ghcid-test-sqlserver: run tests for SQL Server backend in ghcid
|
||||
ghcid-test-sqlserver: start-sqlserver remove-tix-file
|
||||
docker compose up -d --wait postgres
|
||||
ghcid-test-sqlserver: remove-tix-file
|
||||
docker compose up -d --wait postgres sqlserver{,-healthcheck,-init}
|
||||
$(call run_ghcid_api_tests,api-tests:exe:api-tests,SQLServer)
|
||||
|
||||
.PHONY: ghcid-test-mysql
|
||||
@ -103,6 +102,7 @@ ghcid-test-cockroach: remove-tix-file
|
||||
.PHONY: ghcid-test-data-connectors
|
||||
## ghcid-test-data-connectors: run tests for DataConnectors in ghcid
|
||||
ghcid-test-data-connectors: remove-tix-file
|
||||
docker compose build
|
||||
docker compose up -d --wait postgres dc-reference-agent dc-sqlite-agent
|
||||
$(call run_ghcid_api_tests,api-tests:exe:api-tests,DataConnector)
|
||||
|
||||
|
@ -8,6 +8,9 @@ endif
|
||||
|
||||
export MSSQL_IMAGE
|
||||
|
||||
TEST_MSSQL_CONNECTION_STRING = Driver={ODBC Driver 18 for SQL Server};Server=localhost,65003;Uid=sa;Pwd=Password!;Encrypt=optional
|
||||
TEST_POSTGRES_URL = postgres://hasura:hasura@localhost:65002/hasura
|
||||
|
||||
define stop_after
|
||||
@ echo $1 >&2
|
||||
@ $1 || EXIT_STATUS=$$?; \
|
||||
|
@ -7,9 +7,9 @@ test-bigquery: remove-tix-file
|
||||
cabal run api-tests:exe:api-tests -- -m 'BigQuery')
|
||||
|
||||
.PHONY: test-sqlserver
|
||||
## test-sqlserver: run tests for SQL Server backend
|
||||
test-sqlserver: start-sqlserver remove-tix-file
|
||||
docker compose up -d --wait postgres
|
||||
## test-sqlserver: run tests for MS SQL Server backend
|
||||
test-sqlserver: remove-tix-file
|
||||
docker compose up -d --wait postgres sqlserver{,-healthcheck,-init}
|
||||
$(call stop_after, \
|
||||
cabal run api-tests:exe:api-tests -- -m 'SQLServer')
|
||||
|
||||
@ -30,6 +30,7 @@ test-citus: remove-tix-file
|
||||
.PHONY: test-data-connectors
|
||||
## test-data-connectors: run tests for Data Connectors
|
||||
test-data-connectors: remove-tix-file
|
||||
docker compose build
|
||||
docker compose up -d --wait postgres dc-reference-agent dc-sqlite-agent
|
||||
$(call stop_after, \
|
||||
cabal run api-tests:exe:api-tests -- -m 'DataConnector')
|
||||
@ -43,7 +44,8 @@ test-cockroach: remove-tix-file
|
||||
|
||||
.PHONY: test-postgres
|
||||
## test-postgres: run tests for Postgres backend
|
||||
test-postgres: start-backends remove-tix-file
|
||||
test-postgres: remove-tix-file
|
||||
docker compose up -d --wait postgres
|
||||
$(call stop_after, \
|
||||
cabal run api-tests:exe:api-tests -- -m 'Postgres')
|
||||
|
||||
@ -57,4 +59,20 @@ test-backends: start-backends remove-tix-file
|
||||
.PHONY: test-unit
|
||||
## test-unit: run unit tests from main suite
|
||||
test-unit: remove-tix-file
|
||||
cabal run graphql-engine-tests -- unit
|
||||
cabal run graphql-engine:test:graphql-engine-tests
|
||||
|
||||
.PHONY: test-integration-mssql
|
||||
## test-integration-mssql: run MS SQL Server integration tests
|
||||
test-integration-mssql: remove-tix-file
|
||||
docker compose up -d --wait sqlserver{,-healthcheck,-init}
|
||||
$(call stop_after, \
|
||||
HASURA_MSSQL_CONN_STR='$(TEST_MSSQL_CONNECTION_STRING)' \
|
||||
cabal run graphql-engine:test:graphql-engine-test-mssql)
|
||||
|
||||
.PHONY: test-integration-postgres
|
||||
## test-integration-postgres: run PostgreSQL integration tests
|
||||
test-integration-postgres: remove-tix-file
|
||||
docker compose up -d --wait postgres
|
||||
$(call stop_after, \
|
||||
HASURA_GRAPHQL_DATABASE_URL='$(TEST_POSTGRES_URL)' \
|
||||
cabal run graphql-engine:test:graphql-engine-test-postgres)
|
||||
|
@ -129,9 +129,9 @@ If you want, you can also run the server and test suite manually against an inst
|
||||
The following command can be used to build and launch a local `graphql-engine` instance:
|
||||
|
||||
```
|
||||
cabal new-run -- exe:graphql-engine \
|
||||
--database-url='postgres://<user>:<password>@<host>:<port>/<dbname>' \
|
||||
serve --enable-console --console-assets-dir=console/static/dist
|
||||
$ cabal new-run -- exe:graphql-engine \
|
||||
--database-url='postgres://<user>:<password>@<host>:<port>/<dbname>' \
|
||||
serve --enable-console --console-assets-dir=console/static/dist
|
||||
```
|
||||
|
||||
This will launch a server on port 8080, and it will serve the console assets if they were built with `npm run server-build` as mentioned above.
|
||||
@ -157,7 +157,7 @@ All sets of tests require running databases:
|
||||
The easiest way to run the Python integration test suite is by running:
|
||||
|
||||
```sh
|
||||
scripts/dev.sh test --integration
|
||||
$ scripts/dev.sh test --integration
|
||||
```
|
||||
|
||||
For more details please check out the [README](./tests-py/README.md).
|
||||
@ -166,46 +166,39 @@ For more details please check out the [README](./tests-py/README.md).
|
||||
|
||||
There are three categories of unit tests:
|
||||
|
||||
- true unit tests
|
||||
- Postgres unit tests (require a postgres instance)
|
||||
- MSSQL unit tests (require a MSSQL instance)
|
||||
- unit tests
|
||||
- PostgreSQL integration tests (requires a PostgreSQL instance)
|
||||
- MS SQL Server integration tests (requires a MS SQL Server instance)
|
||||
|
||||
The easiest way to run these tests is through `dev.sh`:
|
||||
The easiest way to run these tests is through `make`, which will automatically spin up and shut down Docker containers for the databases:
|
||||
|
||||
```
|
||||
./scripts/dev.sh test --unit
|
||||
$ make test-unit
|
||||
$ make test-integration-postgres
|
||||
$ make test-integration-mssql
|
||||
```
|
||||
|
||||
If you want to limit to a specific set of tests:
|
||||
If you want to limit to a specific set of tests, use `HSPEC_MATCH`:
|
||||
|
||||
```
|
||||
./scripts/dev.sh test --unit --match "some pattern" mssql
|
||||
$ make test-unit HSPEC_MATCH='Memoize'
|
||||
```
|
||||
|
||||
Note that you have to use one of 'unit', 'postgres' or 'mssql' when
|
||||
using '--match'. There is no way to match without specifying the subset
|
||||
of tests to run.
|
||||
|
||||
Alternatively, you can run unit tests directly through cabal:
|
||||
Alternatively, you can use Cabal directly (though you'll have to start the databases yourself):
|
||||
|
||||
```
|
||||
cabal new-run -- test:graphql-engine-tests unit
|
||||
HASURA_GRAPHQL_DATABASE_URL='postgres://<user>:<password>@<host>:<port>/<dbname>' \
|
||||
cabal new-run -- test:graphql-engine-tests postgres
|
||||
$ cabal run -- graphql-engine:test:graphql-engine-tests
|
||||
$ HASURA_GRAPHQL_DATABASE_URL='postgres://<user>:<password>@<host>:<port>/<dbname>' \
|
||||
cabal run -- graphql-engine:test:graphql-engine-test-postgres
|
||||
```
|
||||
|
||||
##### Running the Haskell integration test suite
|
||||
|
||||
1. To run the Haskell integration test suite, you'll first need to bring up the database containers:
|
||||
Run `make test-backends`. This effectively runs the following two commands:
|
||||
|
||||
```sh
|
||||
docker compose up
|
||||
```
|
||||
|
||||
2. Once the containers are up, you can run the test suite via
|
||||
|
||||
```sh
|
||||
cabal run api-tests
|
||||
$ docker compose up --detach --wait
|
||||
$ cabal run api-tests:exe:api-tests
|
||||
```
|
||||
|
||||
For more details please check out the [README](./lib/api-tests/README.md).
|
||||
@ -218,7 +211,7 @@ workaround to allow loading both the `graphql-engine` library and the unit
|
||||
testing library in `ghcid` at the same time:
|
||||
|
||||
```sh
|
||||
ghcid -a -c "cabal repl graphql-engine-tests -f -O0 -fghci-load-test-with-lib" --test Main.main
|
||||
$ ghcid -a -c "cabal repl graphql-engine-tests -f -O0 -fghci-load-test-with-lib" --test Main.main
|
||||
```
|
||||
|
||||
This assumes you already have `HASURA_GRAPHQL_DATABASE_URL` and `HASURA_MSSQL_CONN_STR`
|
||||
@ -234,7 +227,7 @@ To build with profiling support, you need to both enable profiling via `cabal`
|
||||
and set the `profiling` flag. E.g.
|
||||
|
||||
```
|
||||
cabal build exe:graphql-engine -f profiling --enable-profiling
|
||||
$ cabal build exe:graphql-engine -f profiling --enable-profiling
|
||||
```
|
||||
|
||||
### Create Pull Request
|
||||
@ -279,7 +272,7 @@ instructions help in setting up a local hoogle server that enables searching thr
|
||||
Installing `hoogle` is fairly simple with `cabal`.
|
||||
|
||||
```bash
|
||||
cabal install hoogle
|
||||
$ cabal install hoogle
|
||||
```
|
||||
|
||||
### Step 2: Generating hoogle database
|
||||
|
@ -374,5 +374,5 @@ Use [ghcid](https://hackage.haskell.org/package/ghcid)
|
||||
And run this:
|
||||
|
||||
```sh
|
||||
ghcid -a -c "cabal repl graphql-engine:graphql-engine-tests -f -O0" --test Main.main --setup ":set args unit --match TheNameOfTestsIWantToRun" --width 100 --height 30
|
||||
ghcid -a -c "cabal repl graphql-engine:graphql-engine-tests -f -O0" --test Main.main --setup ":set args --match TheNameOfTestsIWantToRun" --width 100 --height 30
|
||||
```
|
||||
|
@ -1015,8 +1015,8 @@ test-suite graphql-engine-tests
|
||||
, graphql-parser
|
||||
, data-has
|
||||
, hedgehog
|
||||
, hspec >=2.8.3 && <3
|
||||
, hspec-core >=2.8.3 && <3
|
||||
, hspec
|
||||
, hspec-core
|
||||
, hspec-expectations
|
||||
, hspec-expectations-json
|
||||
, hspec-expectations-lifted
|
||||
@ -1036,7 +1036,6 @@ test-suite graphql-engine-tests
|
||||
, mmorph
|
||||
, monad-control
|
||||
, mtl
|
||||
, natural-transformation >=0.4 && <0.5
|
||||
, network-uri
|
||||
, openapi3
|
||||
, optparse-applicative
|
||||
@ -1060,9 +1059,6 @@ test-suite graphql-engine-tests
|
||||
, vector
|
||||
, yaml
|
||||
, shakespeare
|
||||
-- mssql support
|
||||
, odbc
|
||||
, resource-pool
|
||||
|
||||
if !flag(ghci-load-test-with-lib)
|
||||
build-depends: graphql-engine
|
||||
@ -1086,8 +1082,6 @@ test-suite graphql-engine-tests
|
||||
Data.Text.RawString
|
||||
Data.TimeSpec
|
||||
Data.TrieSpec
|
||||
Database.MSSQL.TransactionSuite
|
||||
Discover
|
||||
Hasura.AppSpec
|
||||
Hasura.Base.Error.TestInstances
|
||||
Hasura.Backends.BigQuery.SourceSpec
|
||||
@ -1103,7 +1097,6 @@ test-suite graphql-engine-tests
|
||||
Hasura.Backends.DataConnector.API.V0.ScalarSpec
|
||||
Hasura.Backends.DataConnector.API.V0.SchemaSpec
|
||||
Hasura.Backends.DataConnector.API.V0.TableSpec
|
||||
Hasura.Backends.MSSQL.ErrorSpec
|
||||
Hasura.Backends.MySQL.DataLoader.ExecuteTests
|
||||
Hasura.Backends.MySQL.TypesSpec
|
||||
Hasura.Backends.Postgres.Connection.VersionCheckSpec
|
||||
@ -1122,7 +1115,6 @@ test-suite graphql-engine-tests
|
||||
Hasura.Backends.Postgres.Translate.UpdateSpec
|
||||
Hasura.EncJSONSpec
|
||||
Hasura.EventingSpec
|
||||
Hasura.EventTriggerCleanupSuite
|
||||
Hasura.Generator.Common
|
||||
Hasura.GraphQL.NamespaceSpec
|
||||
Hasura.GraphQL.Schema.BoolExp.AggregationPredicatesSpec
|
||||
@ -1146,7 +1138,6 @@ test-suite graphql-engine-tests
|
||||
Hasura.Server.AuthSpec
|
||||
Hasura.Server.InitSpec
|
||||
Hasura.Server.Init.ArgSpec
|
||||
Hasura.Server.MigrateSuite
|
||||
Hasura.Server.Migrate.VersionSpec
|
||||
Hasura.Server.TelemetrySpec
|
||||
Hasura.Server.VersionSpec
|
||||
@ -1154,7 +1145,6 @@ test-suite graphql-engine-tests
|
||||
Hasura.Server.Init.ArgSpec
|
||||
Hasura.SessionSpec
|
||||
Hasura.SQL.WKTSpec
|
||||
Hasura.StreamingSubscriptionSuite
|
||||
Hasura.Tracing.TraceIdSpec
|
||||
Network.HTTP.Client.TransformableSpec
|
||||
Test.Aeson.Expectation
|
||||
@ -1172,3 +1162,35 @@ test-suite graphql-engine-tests
|
||||
Test.Parser.Monad
|
||||
Test.QuickCheck.Extended
|
||||
Test.SIString
|
||||
|
||||
test-suite graphql-engine-test-mssql
|
||||
import: common-all, common-exe, lib-depends
|
||||
build-tool-depends: hspec-discover:hspec-discover
|
||||
type: exitcode-stdio-1.0
|
||||
build-depends:
|
||||
graphql-engine
|
||||
, hspec
|
||||
, odbc
|
||||
hs-source-dirs: test-mssql
|
||||
main-is: Main.hs
|
||||
other-modules:
|
||||
Database.MSSQL.TransactionSpec
|
||||
Hasura.Backends.MSSQL.ErrorSpec
|
||||
SpecHook
|
||||
|
||||
test-suite graphql-engine-test-postgres
|
||||
import: common-all, common-exe, lib-depends
|
||||
build-tool-depends: hspec-discover:hspec-discover
|
||||
type: exitcode-stdio-1.0
|
||||
build-depends:
|
||||
graphql-engine
|
||||
, hspec
|
||||
, hspec-core
|
||||
, hspec-expectations-lifted
|
||||
, natural-transformation
|
||||
hs-source-dirs: test-postgres
|
||||
main-is: Main.hs
|
||||
other-modules:
|
||||
Hasura.EventTriggerCleanupSuite
|
||||
Hasura.StreamingSubscriptionSuite
|
||||
Hasura.Server.MigrateSuite
|
||||
|
@ -1,2 +0,0 @@
|
||||
{-# OPTIONS_GHC -F -pgmF hspec-discover -optF --module-name=Discover #-}
|
||||
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
|
@ -1,225 +1,2 @@
|
||||
{-# LANGUAGE UndecidableInstances #-}
|
||||
|
||||
module Main (main) where
|
||||
|
||||
import Control.Concurrent.MVar
|
||||
import Control.Natural ((:~>) (..))
|
||||
import Data.Aeson qualified as A
|
||||
import Data.ByteString.Lazy.Char8 qualified as BL
|
||||
import Data.ByteString.Lazy.UTF8 qualified as LBS
|
||||
import Data.Environment qualified as Env
|
||||
import Data.Time.Clock (getCurrentTime)
|
||||
import Data.URL.Template
|
||||
import Database.MSSQL.TransactionSuite qualified as TransactionSuite
|
||||
import Database.PG.Query qualified as PG
|
||||
import Discover qualified
|
||||
import Hasura.App
|
||||
( PGMetadataStorageAppT (..),
|
||||
mkMSSQLSourceResolver,
|
||||
mkPgSourceResolver,
|
||||
)
|
||||
import Hasura.Backends.Postgres.Connection.MonadTx
|
||||
import Hasura.Backends.Postgres.Connection.Settings
|
||||
import Hasura.Backends.Postgres.Execute.Types
|
||||
import Hasura.EventTriggerCleanupSuite qualified as EventTriggerCleanupSuite
|
||||
import Hasura.GraphQL.Schema.Options qualified as Options
|
||||
import Hasura.Logging
|
||||
import Hasura.Metadata.Class
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Schema.Cache
|
||||
import Hasura.RQL.DDL.Schema.Cache.Common
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Metadata (emptyMetadataDefaults)
|
||||
import Hasura.RQL.Types.ResizePool
|
||||
import Hasura.RQL.Types.SchemaCache.Build
|
||||
import Hasura.Server.Init
|
||||
import Hasura.Server.Migrate
|
||||
import Hasura.Server.MigrateSuite qualified as MigrateSuite
|
||||
import Hasura.Server.Types
|
||||
import Hasura.StreamingSubscriptionSuite qualified as StreamingSubscriptionSuite
|
||||
import Network.HTTP.Client qualified as HTTP
|
||||
import Network.HTTP.Client.TLS qualified as HTTP
|
||||
import Options.Applicative
|
||||
import System.Environment (getEnvironment)
|
||||
import System.Exit (exitFailure)
|
||||
import Test.Hspec
|
||||
import Test.Hspec.Runner qualified as Hspec
|
||||
|
||||
data TestSuites
|
||||
= -- | Run all test suites. It probably doesn't make sense to be able to specify additional
|
||||
-- hspec args here.
|
||||
AllSuites
|
||||
| -- | Args to pass through to hspec (as if from 'getArgs'), and the specific suite to run.
|
||||
SingleSuite ![String] !TestSuite
|
||||
|
||||
data TestSuite
|
||||
= UnitSuite
|
||||
| PostgresSuite
|
||||
| MSSQLSuite
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
parseArgs >>= \case
|
||||
AllSuites -> do
|
||||
streamingSubscriptionSuite <- StreamingSubscriptionSuite.buildStreamingSubscriptionSuite
|
||||
eventTriggerLogCleanupSuite <- EventTriggerCleanupSuite.buildEventTriggerCleanupSuite
|
||||
postgresSpecs <- buildPostgresSpecs
|
||||
mssqlSpecs <- buildMSSQLSpecs
|
||||
runHspec [] (Discover.spec *> postgresSpecs *> mssqlSpecs *> streamingSubscriptionSuite *> eventTriggerLogCleanupSuite)
|
||||
SingleSuite hspecArgs suite -> do
|
||||
runHspec hspecArgs =<< case suite of
|
||||
UnitSuite -> pure Discover.spec
|
||||
PostgresSuite -> buildPostgresSpecs
|
||||
MSSQLSuite -> buildMSSQLSpecs
|
||||
|
||||
buildMSSQLSpecs :: IO (SpecWith ())
|
||||
buildMSSQLSpecs = do
|
||||
env <- liftIO getEnvironment
|
||||
connStr <- flip onLeft printErrExit $
|
||||
runWithEnv env $ do
|
||||
let envVar = fst mssqlConnectionString
|
||||
maybeV <- considerEnv envVar
|
||||
onNothing maybeV $
|
||||
throwError $
|
||||
"Expected: " <> envVar
|
||||
|
||||
-- We use "suite" to denote a set of tests that can't (yet) be detected and
|
||||
-- run by @hspec-discover@.
|
||||
pure $ describe "Database.MSSQL.TransactionSuite" $ TransactionSuite.suite connStr
|
||||
|
||||
mssqlConnectionString :: (String, String)
|
||||
mssqlConnectionString =
|
||||
( "HASURA_MSSQL_CONN_STR",
|
||||
"SQL Server database connection string. Example DRIVER={ODBC Driver 18 for SQL Server};SERVER=localhost,1433;Uid=user;Pwd=pass;Encrypt=optional"
|
||||
)
|
||||
|
||||
buildPostgresSpecs :: IO Spec
|
||||
buildPostgresSpecs = do
|
||||
env <- getEnvironment
|
||||
let envMap = Env.mkEnvironment env
|
||||
|
||||
pgUrlText <- flip onLeft printErrExit $
|
||||
runWithEnv env $ do
|
||||
let envVar = _envVar databaseUrlOption
|
||||
maybeV <- considerEnv envVar
|
||||
onNothing maybeV $
|
||||
throwError $
|
||||
"Expected: " <> envVar
|
||||
|
||||
let pgConnInfo = PG.ConnInfo 1 $ PG.CDDatabaseURI $ txtToBs pgUrlText
|
||||
urlConf = UrlValue $ InputWebhook $ mkPlainURLTemplate pgUrlText
|
||||
sourceConnInfo =
|
||||
PostgresSourceConnInfo urlConf (Just setPostgresPoolSettings) True PG.ReadCommitted Nothing
|
||||
sourceConfig = PostgresConnConfiguration sourceConnInfo Nothing defaultPostgresExtensionsSchema
|
||||
|
||||
pgPool <- PG.initPGPool pgConnInfo PG.defaultConnParams {PG.cpConns = 1} print
|
||||
let pgContext = mkPGExecCtx PG.Serializable pgPool NeverResizePool
|
||||
|
||||
logger :: Logger Hasura = Logger $ \l -> do
|
||||
let (logLevel, logType :: EngineLogType Hasura, logDetail) = toEngineLog l
|
||||
t <- liftIO $ getFormattedTime Nothing
|
||||
liftIO $ putStrLn $ LBS.toString $ A.encode $ EngineLog t logLevel logType logDetail
|
||||
|
||||
setupCacheRef = do
|
||||
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
|
||||
let sqlGenCtx =
|
||||
SQLGenCtx
|
||||
Options.Don'tStringifyNumbers
|
||||
Options.Don'tDangerouslyCollapseBooleans
|
||||
Options.Don'tOptimizePermissionFilters
|
||||
Options.EnableBigQueryStringNumericInput
|
||||
maintenanceMode = MaintenanceModeDisabled
|
||||
readOnlyMode = ReadOnlyModeDisabled
|
||||
serverConfigCtx =
|
||||
ServerConfigCtx
|
||||
Options.InferFunctionPermissions
|
||||
Options.DisableRemoteSchemaPermissions
|
||||
sqlGenCtx
|
||||
maintenanceMode
|
||||
mempty
|
||||
EventingEnabled
|
||||
readOnlyMode
|
||||
Nothing -- We are not testing the naming convention here, so defaulting to hasura-default
|
||||
emptyMetadataDefaults
|
||||
cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) mkMSSQLSourceResolver serverConfigCtx
|
||||
pgLogger = print
|
||||
|
||||
run :: MetadataStorageT (PGMetadataStorageAppT CacheBuild) a -> IO a
|
||||
run =
|
||||
runMetadataStorageT
|
||||
>>> flip runPGMetadataStorageAppT (pgPool, pgLogger)
|
||||
>>> runCacheBuild cacheBuildParams
|
||||
>>> runExceptT
|
||||
>=> flip onLeft printErrJExit
|
||||
>=> flip onLeft printErrJExit
|
||||
|
||||
(metadata, schemaCache) <- run do
|
||||
metadata <-
|
||||
snd
|
||||
<$> (liftEitherM . runExceptT . runTx pgContext PG.ReadWrite)
|
||||
(migrateCatalog (Just sourceConfig) defaultPostgresExtensionsSchema maintenanceMode =<< liftIO getCurrentTime)
|
||||
schemaCache <- lift $ lift $ buildRebuildableSchemaCache logger envMap metadata
|
||||
pure (metadata, schemaCache)
|
||||
|
||||
cacheRef <- newMVar schemaCache
|
||||
pure $ NT (run . flip MigrateSuite.runCacheRefT cacheRef . fmap fst . runMetadataT metadata emptyMetadataDefaults)
|
||||
|
||||
-- We use "suite" to denote a set of tests that can't (yet) be detected and
|
||||
-- run by @hspec-discover@.
|
||||
streamingSubscriptionSuite <- StreamingSubscriptionSuite.buildStreamingSubscriptionSuite
|
||||
eventTriggerLogCleanupSuite <- EventTriggerCleanupSuite.buildEventTriggerCleanupSuite
|
||||
|
||||
pure $ do
|
||||
describe "Migrate suite" $
|
||||
beforeAll setupCacheRef $
|
||||
describe "Hasura.Server.Migrate" $
|
||||
MigrateSuite.suite sourceConfig pgContext pgConnInfo
|
||||
describe "Streaming subscription suite" $ streamingSubscriptionSuite
|
||||
describe "Event trigger log cleanup suite" $ eventTriggerLogCleanupSuite
|
||||
|
||||
parseArgs :: IO TestSuites
|
||||
parseArgs =
|
||||
execParser $
|
||||
info (helper <*> (parseNoCommand <|> parseSubCommand)) $
|
||||
fullDesc <> header "Hasura GraphQL Engine test suite"
|
||||
where
|
||||
parseNoCommand = pure AllSuites
|
||||
parseSubCommand = SingleSuite <$> parseHspecPassThroughArgs <*> subCmd
|
||||
where
|
||||
subCmd =
|
||||
subparser $
|
||||
mconcat
|
||||
[ command "unit" $
|
||||
info (pure UnitSuite) $
|
||||
progDesc "Only run unit tests",
|
||||
command "postgres" $
|
||||
info (pure PostgresSuite) $
|
||||
progDesc "Only run Postgres integration tests",
|
||||
command "mssql" $
|
||||
info (pure MSSQLSuite) $
|
||||
progDesc "Only run SQL Server unit tests"
|
||||
]
|
||||
-- Add additional arguments and tweak as needed:
|
||||
hspecArgs = ["match", "skip"]
|
||||
-- parse to a list of arguments as they'd appear from 'getArgs':
|
||||
parseHspecPassThroughArgs :: Parser [String]
|
||||
parseHspecPassThroughArgs = fmap concat $
|
||||
for hspecArgs $ \nm ->
|
||||
fmap (maybe [] (\a -> ["--" <> nm, a])) $
|
||||
optional $
|
||||
strOption
|
||||
( long nm
|
||||
<> metavar "<PATTERN>"
|
||||
<> help "Flag passed through to hspec (see hspec docs)."
|
||||
)
|
||||
|
||||
runHspec :: [String] -> Spec -> IO ()
|
||||
runHspec hspecArgs m = do
|
||||
config <- Hspec.readConfig Hspec.defaultConfig hspecArgs
|
||||
Hspec.evaluateSummary =<< Hspec.runSpec m config
|
||||
|
||||
printErrExit :: String -> IO a
|
||||
printErrExit = (*> exitFailure) . putStrLn
|
||||
|
||||
printErrJExit :: (A.ToJSON a) => a -> IO b
|
||||
printErrJExit = (*> exitFailure) . BL.putStrLn . A.encode
|
||||
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}
|
||||
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
|
||||
|
@ -1,4 +1,4 @@
|
||||
module Database.MSSQL.TransactionSuite (suite) where
|
||||
module Database.MSSQL.TransactionSpec (spec) where
|
||||
|
||||
import Control.Exception.Base (bracket)
|
||||
import Data.ByteString (ByteString)
|
||||
@ -41,13 +41,13 @@ newtype Transaction = Transaction
|
||||
{ unTransaction :: [Query]
|
||||
}
|
||||
|
||||
suite :: Text -> Spec
|
||||
suite connString = do
|
||||
runBasicChecks connString
|
||||
transactionStateTests connString
|
||||
spec :: SpecWith ConnectionString
|
||||
spec = do
|
||||
runBasicChecks
|
||||
transactionStateTests
|
||||
|
||||
runBasicChecks :: Text -> Spec
|
||||
runBasicChecks connString =
|
||||
runBasicChecks :: SpecWith ConnectionString
|
||||
runBasicChecks =
|
||||
describe "runTx transaction basic checks" $ do
|
||||
run
|
||||
TestCase
|
||||
@ -115,10 +115,6 @@ runBasicChecks connString =
|
||||
runWith = unitQuery,
|
||||
description = "Bad syntax error/transaction rollback"
|
||||
}
|
||||
where
|
||||
-- Partially apply connString to runTest for convenience
|
||||
run :: forall a. Eq a => Show a => TestCase a -> Spec
|
||||
run = runTest connString
|
||||
|
||||
-- | Test COMMIT and ROLLBACK for Active and NoActive states.
|
||||
--
|
||||
@ -126,8 +122,8 @@ runBasicChecks connString =
|
||||
-- in a TRY..CATCH block, which is not currently doable with our current API.
|
||||
-- Consider changing the API to allow such a test if we ever end up having
|
||||
-- bugs because of it.
|
||||
transactionStateTests :: Text -> Spec
|
||||
transactionStateTests connString =
|
||||
transactionStateTests :: SpecWith ConnectionString
|
||||
transactionStateTests =
|
||||
describe "runTx Transaction State -> Action" $ do
|
||||
run
|
||||
TestCase
|
||||
@ -183,10 +179,6 @@ transactionStateTests connString =
|
||||
runWith = unitQuery,
|
||||
description = "NoActive -> ROLLBACK"
|
||||
}
|
||||
where
|
||||
-- Partially apply connString to runTest for convenience
|
||||
run :: forall a. Eq a => Show a => TestCase a -> Spec
|
||||
run = runTest connString
|
||||
|
||||
-- | Run a 'TestCase' by executing the queries in order. The last 'ODBC.Query'
|
||||
-- is the one we check he result against.
|
||||
@ -204,14 +196,14 @@ transactionStateTests connString =
|
||||
--
|
||||
-- Please also note that we are discarding 'Left's from "setup" transactions
|
||||
-- (all but the last transaction). See the 'runSetup' helper below.
|
||||
runTest :: forall a. Eq a => Show a => Text -> TestCase a -> Spec
|
||||
runTest connString TestCase {..} =
|
||||
it description do
|
||||
run :: forall a. Eq a => Show a => TestCase a -> SpecWith ConnectionString
|
||||
run TestCase {..} =
|
||||
it description \connString -> do
|
||||
case reverse transactions of
|
||||
[] -> expectationFailure "Empty transaction list: nothing to do."
|
||||
(mainTransaction : leadingTransactions) -> do
|
||||
-- Run all transactions before the last (main) transaction.
|
||||
runSetup (reverse leadingTransactions)
|
||||
runSetup connString (reverse leadingTransactions)
|
||||
-- Get the result from the last transaction.
|
||||
result <-
|
||||
runInConn connString $
|
||||
@ -235,12 +227,12 @@ runTest connString TestCase {..} =
|
||||
expectationFailure $
|
||||
"Expected error but got success: " <> show res
|
||||
where
|
||||
runSetup :: [Transaction] -> IO ()
|
||||
runSetup [] = pure ()
|
||||
runSetup (t : ts) = do
|
||||
runSetup :: ConnectionString -> [Transaction] -> IO ()
|
||||
runSetup _ [] = pure ()
|
||||
runSetup connString (t : ts) = do
|
||||
-- Discards 'Left's.
|
||||
_ <- runInConn connString (runQueries unitQuery $ unTransaction t)
|
||||
runSetup ts
|
||||
runSetup connString ts
|
||||
|
||||
runQueries :: (Query -> TxT IO x) -> [Query] -> TxT IO x
|
||||
runQueries _ [] = error $ "Expected at least one query per transaction in " <> description
|
||||
@ -248,16 +240,16 @@ runTest connString TestCase {..} =
|
||||
runQueries f (x : xs) = unitQuery x *> runQueries f xs
|
||||
|
||||
-- | spec helper functions
|
||||
runInConn :: Text -> TxT IO a -> IO (Either MSSQLTxError a)
|
||||
runInConn :: ConnectionString -> TxT IO a -> IO (Either MSSQLTxError a)
|
||||
runInConn connString query =
|
||||
bracket
|
||||
(createMinimalPool connString)
|
||||
drainMSSQLPool
|
||||
(runExceptT . runTx ReadCommitted query)
|
||||
|
||||
createMinimalPool :: Text -> IO MSSQLPool
|
||||
createMinimalPool :: ConnectionString -> IO MSSQLPool
|
||||
createMinimalPool connString =
|
||||
initMSSQLPool (ConnectionString connString) $ ConnectionOptions 1 1 5
|
||||
initMSSQLPool connString $ ConnectionOptions 1 1 5
|
||||
|
||||
invalidSyntaxError :: String
|
||||
invalidSyntaxError =
|
@ -4,10 +4,10 @@ import Hasura.Backends.MSSQL.SQL.Error
|
||||
import Hasura.Prelude
|
||||
import Test.Hspec
|
||||
|
||||
spec :: Spec
|
||||
spec :: SpecWith a
|
||||
spec = do
|
||||
it "test parseErrorClass all classes" $ mapM_ testParseErrorClass testCases
|
||||
it "test parseErrorClass invalid SQLSTATE" $ (parseErrorClass "99999") `shouldBe` Nothing
|
||||
it "test parseErrorClass all classes" $ const $ mapM_ testParseErrorClass testCases
|
||||
it "test parseErrorClass invalid SQLSTATE" $ const $ (parseErrorClass "99999") `shouldBe` Nothing
|
||||
where
|
||||
testParseErrorClass :: (String, ErrorClass) -> Expectation
|
||||
testParseErrorClass (sqlStateCode, expectedClass) =
|
2
server/test-mssql/Main.hs
Normal file
2
server/test-mssql/Main.hs
Normal file
@ -0,0 +1,2 @@
|
||||
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}
|
||||
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
|
13
server/test-mssql/SpecHook.hs
Normal file
13
server/test-mssql/SpecHook.hs
Normal file
@ -0,0 +1,13 @@
|
||||
module SpecHook (hook) where
|
||||
|
||||
import Data.Text qualified as Text
|
||||
import Database.MSSQL.Pool (ConnectionString (..))
|
||||
import Hasura.Prelude
|
||||
import System.Environment (getEnv)
|
||||
import Test.Hspec
|
||||
|
||||
hook :: SpecWith ConnectionString -> Spec
|
||||
hook = beforeWith (\_ -> ConnectionString . Text.pack <$> getEnv connectionStringEnvironmentVariable)
|
||||
|
||||
connectionStringEnvironmentVariable :: String
|
||||
connectionStringEnvironmentVariable = "HASURA_MSSQL_CONN_STR"
|
130
server/test-postgres/Main.hs
Normal file
130
server/test-postgres/Main.hs
Normal file
@ -0,0 +1,130 @@
|
||||
{-# LANGUAGE UndecidableInstances #-}
|
||||
|
||||
module Main (main) where
|
||||
|
||||
import Control.Concurrent.MVar
|
||||
import Control.Natural ((:~>) (..))
|
||||
import Data.Aeson qualified as A
|
||||
import Data.ByteString.Lazy.Char8 qualified as BL
|
||||
import Data.ByteString.Lazy.UTF8 qualified as LBS
|
||||
import Data.Environment qualified as Env
|
||||
import Data.Time.Clock (getCurrentTime)
|
||||
import Data.URL.Template
|
||||
import Database.PG.Query qualified as PG
|
||||
import Hasura.App
|
||||
( PGMetadataStorageAppT (..),
|
||||
mkMSSQLSourceResolver,
|
||||
mkPgSourceResolver,
|
||||
)
|
||||
import Hasura.Backends.Postgres.Connection.MonadTx
|
||||
import Hasura.Backends.Postgres.Connection.Settings
|
||||
import Hasura.Backends.Postgres.Execute.Types
|
||||
import Hasura.EventTriggerCleanupSuite qualified as EventTriggerCleanupSuite
|
||||
import Hasura.GraphQL.Schema.Options qualified as Options
|
||||
import Hasura.Logging
|
||||
import Hasura.Metadata.Class
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Schema.Cache
|
||||
import Hasura.RQL.DDL.Schema.Cache.Common
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Metadata (emptyMetadataDefaults)
|
||||
import Hasura.RQL.Types.ResizePool
|
||||
import Hasura.RQL.Types.SchemaCache.Build
|
||||
import Hasura.Server.Init
|
||||
import Hasura.Server.Migrate
|
||||
import Hasura.Server.MigrateSuite qualified as MigrateSuite
|
||||
import Hasura.Server.Types
|
||||
import Hasura.StreamingSubscriptionSuite qualified as StreamingSubscriptionSuite
|
||||
import Network.HTTP.Client qualified as HTTP
|
||||
import Network.HTTP.Client.TLS qualified as HTTP
|
||||
import System.Environment (getEnvironment)
|
||||
import System.Exit (exitFailure)
|
||||
import Test.Hspec
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
env <- getEnvironment
|
||||
let envMap = Env.mkEnvironment env
|
||||
|
||||
pgUrlText <- flip onLeft printErrExit $
|
||||
runWithEnv env $ do
|
||||
let envVar = _envVar databaseUrlOption
|
||||
maybeV <- considerEnv envVar
|
||||
onNothing maybeV $
|
||||
throwError $
|
||||
"Expected: " <> envVar
|
||||
|
||||
let pgConnInfo = PG.ConnInfo 1 $ PG.CDDatabaseURI $ txtToBs pgUrlText
|
||||
urlConf = UrlValue $ InputWebhook $ mkPlainURLTemplate pgUrlText
|
||||
sourceConnInfo =
|
||||
PostgresSourceConnInfo urlConf (Just setPostgresPoolSettings) True PG.ReadCommitted Nothing
|
||||
sourceConfig = PostgresConnConfiguration sourceConnInfo Nothing defaultPostgresExtensionsSchema
|
||||
|
||||
pgPool <- PG.initPGPool pgConnInfo PG.defaultConnParams {PG.cpConns = 1} print
|
||||
let pgContext = mkPGExecCtx PG.Serializable pgPool NeverResizePool
|
||||
|
||||
logger :: Logger Hasura = Logger $ \l -> do
|
||||
let (logLevel, logType :: EngineLogType Hasura, logDetail) = toEngineLog l
|
||||
t <- liftIO $ getFormattedTime Nothing
|
||||
liftIO $ putStrLn $ LBS.toString $ A.encode $ EngineLog t logLevel logType logDetail
|
||||
|
||||
setupCacheRef = do
|
||||
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
|
||||
let sqlGenCtx =
|
||||
SQLGenCtx
|
||||
Options.Don'tStringifyNumbers
|
||||
Options.Don'tDangerouslyCollapseBooleans
|
||||
Options.Don'tOptimizePermissionFilters
|
||||
Options.EnableBigQueryStringNumericInput
|
||||
maintenanceMode = MaintenanceModeDisabled
|
||||
readOnlyMode = ReadOnlyModeDisabled
|
||||
serverConfigCtx =
|
||||
ServerConfigCtx
|
||||
Options.InferFunctionPermissions
|
||||
Options.DisableRemoteSchemaPermissions
|
||||
sqlGenCtx
|
||||
maintenanceMode
|
||||
mempty
|
||||
EventingEnabled
|
||||
readOnlyMode
|
||||
Nothing -- We are not testing the naming convention here, so defaulting to hasura-default
|
||||
emptyMetadataDefaults
|
||||
cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) mkMSSQLSourceResolver serverConfigCtx
|
||||
pgLogger = print
|
||||
|
||||
run :: MetadataStorageT (PGMetadataStorageAppT CacheBuild) a -> IO a
|
||||
run =
|
||||
runMetadataStorageT
|
||||
>>> flip runPGMetadataStorageAppT (pgPool, pgLogger)
|
||||
>>> runCacheBuild cacheBuildParams
|
||||
>>> runExceptT
|
||||
>=> flip onLeft printErrJExit
|
||||
>=> flip onLeft printErrJExit
|
||||
|
||||
(metadata, schemaCache) <- run do
|
||||
metadata <-
|
||||
snd
|
||||
<$> (liftEitherM . runExceptT . runTx pgContext PG.ReadWrite)
|
||||
(migrateCatalog (Just sourceConfig) defaultPostgresExtensionsSchema maintenanceMode =<< liftIO getCurrentTime)
|
||||
schemaCache <- lift $ lift $ buildRebuildableSchemaCache logger envMap metadata
|
||||
pure (metadata, schemaCache)
|
||||
|
||||
cacheRef <- newMVar schemaCache
|
||||
pure $ NT (run . flip MigrateSuite.runCacheRefT cacheRef . fmap fst . runMetadataT metadata emptyMetadataDefaults)
|
||||
|
||||
streamingSubscriptionSuite <- StreamingSubscriptionSuite.buildStreamingSubscriptionSuite
|
||||
eventTriggerLogCleanupSuite <- EventTriggerCleanupSuite.buildEventTriggerCleanupSuite
|
||||
|
||||
hspec do
|
||||
describe "Migrate suite" $
|
||||
beforeAll setupCacheRef $
|
||||
describe "Hasura.Server.Migrate" $
|
||||
MigrateSuite.suite sourceConfig pgContext pgConnInfo
|
||||
describe "Streaming subscription suite" $ streamingSubscriptionSuite
|
||||
describe "Event trigger log cleanup suite" $ eventTriggerLogCleanupSuite
|
||||
|
||||
printErrExit :: String -> IO a
|
||||
printErrExit = (*> exitFailure) . putStrLn
|
||||
|
||||
printErrJExit :: (A.ToJSON a) => a -> IO b
|
||||
printErrJExit = (*> exitFailure) . BL.putStrLn . A.encode
|
Loading…
Reference in New Issue
Block a user