feat(tests): Introduce AggregationPredicatesSpec

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/5686
GitOrigin-RevId: 85b39ad569180929e5620c45bf9a98ef6ee99d42
This commit is contained in:
Philip Lykke Carlsen 2022-09-07 12:09:41 +00:00 committed by hasura-bot
parent 8bc34e12e3
commit d43a30e8fc
11 changed files with 568 additions and 17 deletions

View File

@ -936,6 +936,8 @@ test-suite graphql-engine-tests
type: exitcode-stdio-1.0
build-depends:
aeson
, aeson-pretty
, aeson-qq
, async
, attoparsec
, base
@ -1058,7 +1060,9 @@ test-suite graphql-engine-tests
Hasura.EventingSpec
Hasura.Generator.Common
Hasura.GraphQL.NamespaceSpec
Hasura.GraphQL.Schema.BoolExp.AggregationPredicatesSpec
Hasura.GraphQL.Schema.Build.UpdateSpec
Hasura.GraphQL.Schema.Introspection
Hasura.GraphQL.Schema.RemoteTest
Hasura.IncrementalSpec
Hasura.Metadata.DTO.MetadataDTOSpec
@ -1086,11 +1090,13 @@ test-suite graphql-engine-tests
Hasura.SQL.WKTSpec
Hasura.StreamingSubscriptionSuite
Network.HTTP.Client.TransformableSpec
Test.Aeson.Expectation
Test.Aeson.Utils
Test.Backend.Postgres.Delete
Test.Backend.Postgres.Insert
Test.Backend.Postgres.Misc
Test.Backend.Postgres.Update
Test.Hspec.Extended
Test.Parser.Delete
Test.Parser.Expectation
Test.Parser.Field

View File

@ -58,7 +58,7 @@ defaultAggregationPredicatesParser aggFns si ti = runMaybeT do
arrayRelationships <&> \rel -> do
relTable <- askTableInfo si (riRTable rel)
relGqlName <- textToName $ relNameToTxt $ riName rel
typeGqlName <- (<> Name.__ <> relGqlName) <$> getTableGQLName relTable
typeGqlName <- (<> Name.__ <> relGqlName <> Name.__ <> Name._aggregate) <$> getTableGQLName ti
-- We only make a field for aggregations over a relation if at least
-- some aggregation predicates are callable.
@ -102,7 +102,7 @@ defaultAggregationPredicatesParser aggFns si ti = runMaybeT do
G.Name ->
(InputFieldsParser n [AggregationPredicate b (UnpreparedValue b)]) ->
(InputFieldsParser n (Maybe (AggregationPredicatesImplementation b (UnpreparedValue b))))
relAggregateField rel typeGqlName relGqlName =
relAggregateField rel relGqlName typeGqlName =
P.fieldOptional (relGqlName <> Name.__ <> Name._aggregate) Nothing
. P.object typeGqlName Nothing
. fmap (AggregationPredicatesImplementation rel)

View File

@ -61,7 +61,7 @@ data SourceInfo b = SourceInfo
{ _siName :: SourceName,
_siTables :: TableCache b,
_siFunctions :: FunctionCache b,
_siConfiguration :: SourceConfig b,
_siConfiguration :: ~(SourceConfig b),
_siQueryTagsConfig :: Maybe QueryTagsConfig,
_siCustomization :: SourceCustomization
}

View File

@ -0,0 +1,306 @@
{-# LANGUAGE QuasiQuotes #-}
-- | This module contain unit tests of the schema of the default implementation
-- of aggregation predicates.
module Hasura.GraphQL.Schema.BoolExp.AggregationPredicatesSpec (spec) where
import Data.Aeson.QQ (aesonQQ)
import Data.HashMap.Strict qualified as HM
import Data.Text.NonEmpty (nonEmptyTextQQ)
import Hasura.Backends.Postgres.Instances.Schema ()
import Hasura.Backends.Postgres.SQL.Types
( PGScalarType (PGInteger, PGText),
QualifiedTable,
)
import Hasura.Backends.Postgres.SQL.Value (PGScalarValue (..))
import Hasura.GraphQL.Parser.Internal.Input (ifParser)
import Hasura.GraphQL.Schema.BoolExp.AggregationPredicates
( ArgumentsSignature (..),
FunctionSignature (..),
defaultAggregationPredicatesParser,
)
import Hasura.GraphQL.Schema.Introspection (queryInputFieldsParserIntrospection)
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp (OpExpG (AEQ))
import Hasura.RQL.IR.BoolExp.AggregationPredicates
import Hasura.RQL.IR.Value (UnpreparedValue (UVParameter))
import Hasura.RQL.Types.Column (ColumnType (ColumnScalar), ColumnValue (..))
import Hasura.RQL.Types.Common (InsertOrder (..), RelName (..), RelType (..), SourceName (..))
import Hasura.RQL.Types.Relationships.Local (RelInfo (..))
import Hasura.RQL.Types.Source (SourceInfo (..))
import Hasura.RQL.Types.SourceCustomization (SourceCustomization (SourceCustomization))
import Hasura.RQL.Types.Table
( TableCoreInfoG (_tciName),
TableInfo (_tiCoreInfo),
)
import Hasura.SQL.Backend (BackendType (Postgres), PostgresKind (Vanilla))
import Language.GraphQL.Draft.Syntax qualified as G
import Language.GraphQL.Draft.Syntax.QQ qualified as G
import Test.Aeson.Expectation (shouldBeSubsetOf)
import Test.Hspec
import Test.Hspec.Extended
import Test.Parser.Field qualified as GQL
import Test.Parser.Internal
( ColumnInfoBuilder
( ColumnInfoBuilder,
cibIsPrimaryKey,
cibName,
cibNullable,
cibType
),
TableInfoBuilder (columns, relations),
buildTableInfo,
mkTable,
tableInfoBuilder,
)
import Test.Parser.Monad
( ParserTest (runParserTest),
notImplementedYet,
runSchemaTest,
)
import Type.Reflection (Typeable, typeRep)
{- Notes:
AggregationPredicates are defined as a standalone feature. It should be possible
to test them without reference to an existing backend.
We cannot do that however, since backends have the closed datakind `BackendType`.
-}
newtype Unshowable a = Unshowable {unUnshowable :: a}
deriving (Eq, Ord)
instance Typeable a => Show (Unshowable a) where
show _ = "Unshowable<" ++ show (typeRep @a) ++ ">"
spec :: Spec
spec = do
describe "Aggregation Predicates Schema Parsers" do
describe "When no aggregation functions are given" do
it "Yields no parsers" do
let maybeParser =
runSchemaTest $
defaultAggregationPredicatesParser @('Postgres 'Vanilla) @_ @_ @ParserTest
[]
sourceInfo
albumTableInfo
(Unshowable maybeParser) `shouldSatisfy` (isNothing . unUnshowable)
describe "When some aggregation functions are given" do
let maybeParser =
runSchemaTest $
defaultAggregationPredicatesParser @('Postgres 'Vanilla) @_ @_ @ParserTest
[ FunctionSignature
{ fnName = "count",
fnGQLName = [G.name|count|],
fnArguments = ArgumentsStar,
fnReturnType = PGInteger
}
]
sourceInfo
albumTableInfo
it "Positively yields a parser" do
(Unshowable maybeParser) `shouldSatisfy` (isJust . unUnshowable)
dependentSpec maybeParser $ do
it "Defines the expected GraphQL types" \parser -> do
introspectionResult <-
queryInputFieldsParserIntrospection
parser
[GQL.field|
__schema {
types {
name
fields {
name
type { name }
}
inputFields {
name
type { name }
}
}
} |]
let expectedTopLevel =
[aesonQQ|
{ "types": [
{
"name": "album_tracks_aggregate",
"fields": null,
"inputFields": [
{
"name": "count",
"type": {
"name": "album_tracks_aggregate_count"
}
}
]
}
]
}
|]
let expectedCountType =
[aesonQQ|
{ "types": [
{
"name": "album_tracks_aggregate_count",
"fields": null,
"inputFields": [
{
"name": "arguments",
"type": {
"name": null
}
},
{
"name": "distinct",
"type": {
"name": "Boolean"
}
},
{
"name": "filter",
"type": {
"name": "track_bool_exp"
}
},
{
"name": "predicate",
"type": {
"name": null
}
}
]
}
]
}
|]
expectedTopLevel `shouldBeSubsetOf` introspectionResult
expectedCountType `shouldBeSubsetOf` introspectionResult
it "Parses an example field" \parser -> do
let input =
[GQL.inputfields|
tracks_aggregate: {
count: {
arguments: [],
predicate: {_eq : 42 },
distinct: true
}
}
|]
actual <- runParserTest $ ifParser parser input
let expected :: [AggregationPredicatesImplementation ('Postgres 'Vanilla) (UnpreparedValue ('Postgres 'Vanilla))]
expected =
[ AggregationPredicatesImplementation
{ aggRelation = tracksRel,
aggPredicates =
[ AggregationPredicate
{ aggPredFunctionName = "count",
aggPredArguments = AggregationPredicateArgumentsStar,
aggPredDistinct = True,
aggPredFilter = Nothing,
aggPredPredicate =
[ AEQ
True
( UVParameter
Nothing
( ColumnValue
{ cvType = ColumnScalar PGInteger,
cvValue = PGValInteger 42
}
)
)
]
}
]
}
]
actual `shouldBe` expected
where
albumTableInfo :: TableInfo ('Postgres 'Vanilla)
albumTableInfo =
buildTableInfo
( (tableInfoBuilder (mkTable "album"))
{ columns =
[ ColumnInfoBuilder
{ cibName = "id",
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = True
},
ColumnInfoBuilder
{ cibName = "title",
cibType = ColumnScalar PGText,
cibNullable = False,
cibIsPrimaryKey = False
}
],
relations = [tracksRel]
}
)
trackTableInfo :: TableInfo ('Postgres 'Vanilla)
trackTableInfo =
buildTableInfo
( (tableInfoBuilder (mkTable "track"))
{ columns =
[ ColumnInfoBuilder
{ cibName = "id",
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = True
},
ColumnInfoBuilder
{ cibName = "title",
cibType = ColumnScalar PGText,
cibNullable = False,
cibIsPrimaryKey = False
},
ColumnInfoBuilder
{ cibName = "duration_seconds",
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = False
},
ColumnInfoBuilder
{ cibName = "album_id",
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = False
}
]
}
)
tracksRel :: RelInfo ('Postgres 'Vanilla)
tracksRel =
RelInfo
{ riName = RelName [nonEmptyTextQQ|tracks|],
riType = ArrRel,
riMapping = HM.fromList [("id", "album_id")],
riRTable = (mkTable "track"),
riIsManual = False,
riInsertOrder = AfterParent
}
sourceInfo :: SourceInfo ('Postgres 'Vanilla)
sourceInfo =
SourceInfo
{ _siName = SNDefault,
_siTables = makeTableCache [albumTableInfo, trackTableInfo],
_siFunctions = mempty,
_siConfiguration = notImplementedYet "SourceConfig",
_siQueryTagsConfig = Nothing,
_siCustomization = SourceCustomization Nothing Nothing Nothing
}
makeTableCache :: [TableInfo ('Postgres 'Vanilla)] -> HashMap QualifiedTable (TableInfo ('Postgres 'Vanilla))
makeTableCache tables = HM.fromList [(_tciName $ _tiCoreInfo ti, ti) | ti <- tables]

View File

@ -0,0 +1,86 @@
{-# OPTIONS_GHC -Wno-unused-imports #-}
-- | This module contains functions to help with making assertions on the result
-- of parser introspection queries.
module Hasura.GraphQL.Schema.Introspection
( queryInputFieldsParserIntrospection,
)
where
import Data.Aeson qualified as A
import Data.Aeson.Encode.Pretty qualified as AP
import Data.Aeson.Ordered qualified as AO
import Data.ByteString.Lazy.Char8 qualified as LBS
import Data.HashMap.Strict qualified as HM
import Data.Text qualified as T
import Data.Text.NonEmpty
import Hasura.Backends.Postgres.Instances.Schema ()
import Hasura.Backends.Postgres.SQL.DML (SQLExp (SELit))
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value (PGScalarValue (..))
import Hasura.Base.ErrorMessage
import Hasura.Base.ToErrorValue (ToErrorValue (toErrorValue))
import Hasura.GraphQL.Parser.Name qualified as GName
import Hasura.GraphQL.Schema.BoolExp.AggregationPredicates
( ArgumentsSignature (..),
FunctionSignature (..),
defaultAggregationPredicatesParser,
)
import Hasura.GraphQL.Schema.Introspect qualified as I
import Hasura.GraphQL.Schema.Parser qualified as P
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp (OpExpG (AEQ))
import Hasura.RQL.IR.BoolExp.AggregationPredicates
import Hasura.RQL.IR.Value
import Hasura.RQL.Types.Column (ColumnType (ColumnScalar), ColumnValue (..))
import Hasura.RQL.Types.Common (InsertOrder (..), RelName (..), RelType (..), SourceName (..))
import Hasura.RQL.Types.Relationships.Local
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.SourceCustomization (SourceCustomization (SourceCustomization))
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend (BackendType (Postgres), PostgresKind (Vanilla))
import Language.GraphQL.Draft.Syntax qualified as G
import Language.GraphQL.Draft.Syntax.QQ qualified as G
import Test.Hspec
import Test.Hspec.Extended (dependentSpec)
import Test.Parser.Field qualified as GQL
import Test.Parser.Internal
import Test.Parser.Monad
import Type.Reflection (Typeable, typeRep)
-- | Produce an introspection parser for an 'InputFieldsParser'.
-- Use the "Test.Parser.Field.field" quasi-quoter to construct the introspection query.
queryInputFieldsParserIntrospection ::
forall n a.
-- | The Parser to introspect
P.InputFieldsParser n a ->
-- | The Introspection query
G.Field G.NoFragments P.Variable ->
IO A.Value
queryInputFieldsParserIntrospection parser field = do
introspectionParser <- introspectDefintions (P.ifDefinitions parser)
runParserTest $ P.fParser introspectionParser field
introspectDefintions ::
forall n a.
(P.HasTypeDefinitions a, P.MonadParse n) =>
a ->
IO (P.FieldParser n A.Value)
introspectDefintions definitions = do
let introParser :: Either P.ConflictingDefinitions (P.FieldParser n A.Value) = do
types <- P.collectTypeDefinitions [P.TypeDefinitionsWrapper definitions]
let schema =
P.Schema
{ sDescription = Nothing,
sTypes = types,
sQueryType =
P.TNamed
P.NonNullable
$ P.Definition GName._String Nothing Nothing [] (P.TIObject (P.ObjectInfo [] [])),
sMutationType = Nothing,
sSubscriptionType = Nothing,
sDirectives = []
}
return $ (AO.fromOrdered . ($ schema)) <$> I.schema @n
onLeft introParser (error . T.unpack . fromErrorMessage . toErrorValue)

View File

@ -0,0 +1,60 @@
-- | This module contains functions that help express expectations about json
-- values.
module Test.Aeson.Expectation
( shouldBeSubsetOf,
jsonSubsetOf,
)
where
import Data.Aeson qualified as A
import Data.Aeson.Encode.Pretty qualified as AP
import Data.Aeson.KeyMap qualified as A
import Data.ByteString.Lazy.Char8 qualified as LBS
import Data.Text qualified as T
import Data.Text.Encoding (decodeUtf8)
import Data.These
import Data.Vector qualified as V
import Hasura.Backends.Postgres.Instances.Schema ()
import Hasura.Prelude
import Test.Hspec
-- | Assert that one json value should be a subset of another, in the sense of 'jsonSubsetOf'.
shouldBeSubsetOf :: A.Value -> A.Value -> IO ()
shouldBeSubsetOf subset superset | subset `jsonSubsetOf` superset = return ()
shouldBeSubsetOf subset superset =
expectationFailure $
T.unpack $
decodeUtf8 $
LBS.toStrict $
AP.encodePretty subset <> " is not a subset of " <> AP.encodePretty superset
-- | Compute whether one json value 'sub' is a subset of another value 'sup', in the sense that:
--
-- * For arrays, there is a contiguous segment in 'sup' in which all elements are subset-related with 'sub' in order
-- * For objects, the keys of 'sub' are a subset of those of 'sup', and all their associated values are also subset-related
-- * Leaf values are identical
jsonSubsetOf :: A.Value -> A.Value -> Bool
jsonSubsetOf (A.Array sub) (A.Array sup) = sub `subarrayOf` sup
jsonSubsetOf (A.Object sub) (A.Object sup) = sub `subobjectOf` sup
jsonSubsetOf (A.String sub) (A.String sup) = sub == sup
jsonSubsetOf (A.Number sub) (A.Number sup) = sub == sup
jsonSubsetOf (A.Bool sub) (A.Bool sup) = sub == sup
jsonSubsetOf A.Null A.Null = True
jsonSubsetOf _sub _sup = False
subobjectOf :: A.KeyMap A.Value -> A.KeyMap A.Value -> Bool
subobjectOf sub sup =
A.foldr (&&) True $
A.alignWith
( \case
This _ -> False -- key is only in the sub
That _ -> True -- key is only in sup
These l r -> l `jsonSubsetOf` r
)
sub
sup
subarrayOf :: V.Vector A.Value -> V.Vector A.Value -> Bool
subarrayOf sub sup | V.length sub > V.length sup = False
subarrayOf sub sup | V.and $ V.zipWith jsonSubsetOf sub sup = True
subarrayOf sub sup = subarrayOf sub (V.tail sup)

View File

@ -0,0 +1,40 @@
-- | This module contains useful generic extensions to the Hspec testing framework.
--
-- Note: that, at the time of this writing this module has a namesake in the
-- 'tests-hspec' test suite. We might consider merging the two.
module Test.Hspec.Extended
( dependentSpec,
dependentSpecWith,
)
where
import Test.Hspec
import Prelude
-- | Mark specs as pending depending on some value.
--
-- Using this function to build specs results in clearer test results when a set
-- of tests can only conceivably succeed if some earlier test also succeeded.
dependentSpecWith ::
forall a b c.
(HasCallStack) =>
(a -> b -> c) ->
Maybe a ->
SpecWith c ->
SpecWith b
dependentSpecWith inject (Just a) specs = aroundWith (\spec b -> spec (inject a b)) specs
dependentSpecWith _ Nothing specs = aroundWith (\_ _ -> pendingWith "Depends on the success of a previous test") specs
-- | Mark specs as pending depending on some value.
--
-- Using this function to build specs results in clearer test results when a set
-- of tests can only conceivably succeed if some earlier test also succeeded.
--
-- This is a simplified version where only the dependent specs take arguments.
dependentSpec ::
forall a.
(HasCallStack) =>
Maybe a ->
SpecWith a ->
Spec
dependentSpec = dependentSpecWith const

View File

@ -84,7 +84,7 @@ data UpdateExpectationBuilder = UpdateExpectationBuilder
-- | Run a test given the schema and field.
runUpdateFieldTest :: UpdateTestSetup -> Expectation
runUpdateFieldTest UpdateTestSetup {..} =
case runSchemaTest $ mkParser (TableInfoBuilder table utsColumns) of
case runSchemaTest $ mkParser ((tableInfoBuilder table) {columns = utsColumns}) of
[] -> expectationFailure "expected at least one parser"
parsers ->
case find (byName (Syntax._fName utsField)) parsers of

View File

@ -1,5 +1,12 @@
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskellQuotes #-}
-- | QuasiQuoter for parsing GraphQL fields in tests. See 'field' for details.
module Test.Parser.Field (field) where
module Test.Parser.Field
( field,
inputfields,
)
where
import Control.Monad.Trans.Except
import Data.Attoparsec.Text qualified as Parser
@ -46,3 +53,30 @@ field =
fixField f = do
x <- except $ mapLeft (T.unpack . showQErr) $ runInlineM mempty . inlineField $ f
traverse (throwE . ("Variables are not supported in tests yet: " ++) . show) x
-- | Quasi-Quoter for GraphQL input fields.
-- Example usage:
-- > [GQL.inputfields|
-- > where: { name: { _eq: "old name"}},
-- > _set: { name: "new name" }
-- > |],
--
-- Note that because the graphql parser library does not expose a parser for
-- input fields directly we instead wrap the input text in dummy field syntax,
-- delegate to the 'field' quasi-quoter, and extract the inputfields from there.
inputfields :: QuasiQuoter
inputfields =
QuasiQuoter
{ quoteExp = inputfieldExp,
quotePat = \_ -> fail "invalid",
quoteType = \_ -> fail "invalid",
quoteDec = \_ -> fail "invalid"
}
where
inputfieldExp :: String -> ExpQ
inputfieldExp input = do
applied <- TH.AppE <$> [e|fmap GraphQLValue . GraphQL._fArguments|] <*> quoteExp field ("field(" ++ input ++ ")")
-- For some reason the type is 'InputValue v' for some rigid 'v' if we
-- don't add this type annotation.
annotated <- TH.SigE applied <$> [t|HashMap GraphQL.Name (InputValue Variable)|]
return annotated

View File

@ -7,6 +7,7 @@ module Test.Parser.Internal
mkParser,
Parser,
TableInfoBuilder (..),
tableInfoBuilder,
buildTableInfo,
)
where
@ -29,6 +30,7 @@ import Hasura.RQL.Types.Column (ColumnInfo (..), ColumnMutability (..), ColumnTy
import Hasura.RQL.Types.Common (Comment (..), FieldName (..), OID (..))
import Hasura.RQL.Types.Instances ()
import Hasura.RQL.Types.Permission (AllowedRootFields (..))
import Hasura.RQL.Types.Relationships.Local (RelInfo (..), fromRel)
import Hasura.RQL.Types.Source (SourceInfo)
import Hasura.RQL.Types.Table (Constraint (..), CustomRootField (..), FieldInfo (..), PrimaryKey (..), RolePermInfo (..), SelPermInfo (..), TableConfig (..), TableCoreInfoG (..), TableCustomRootFields (..), TableInfo (..), UpdPermInfo (..))
import Hasura.SQL.Backend (BackendType (Postgres), PostgresKind (Vanilla))
@ -106,17 +108,21 @@ mkParser tib =
name :: C.GQLNameIdentifier
name = C.fromAutogeneratedName (unsafeMkName $ getTableTxt $ qName (table tib))
toHashPair :: ColumnInfoBuilder -> (FieldName, FieldInfo PG)
toHashPair cib = (coerce $ cibName cib, FIColumn $ mkColumnInfo cib)
-- | Inputs for building 'TableInfo's.
-- The expectation is that this will be extended freely as new tests need more
-- elaborate setup.
data TableInfoBuilder = TableInfoBuilder
{ table :: QualifiedTable,
columns :: [ColumnInfoBuilder]
columns :: [ColumnInfoBuilder],
relations :: [RelInfo PG]
}
-- | A smart constructor for an empty 'TableInfoBuilder'.
-- This should make it easier to maintain existing test code when new fields are
-- added.
tableInfoBuilder :: QualifiedTable -> TableInfoBuilder
tableInfoBuilder table = TableInfoBuilder {columns = [], relations = [], ..}
-- | Build a 'TableInfo' from a 'TableInfoBuilder.
-- The expectation is that this will be extended freely as new tests need more
-- elaborate setup.
@ -172,11 +178,23 @@ buildTableInfo TableInfoBuilder {..} = tableInfo
}
fieldInfoMap :: HM.HashMap FieldName (FieldInfo PG)
fieldInfoMap =
fieldInfoMap = HM.unions [columnFields, relationFields]
columnFields :: HM.HashMap FieldName (FieldInfo PG)
columnFields =
HM.fromList
. fmap toHashPair
. fmap toCIHashPair
$ columns
toCIHashPair :: ColumnInfoBuilder -> (FieldName, FieldInfo PG)
toCIHashPair cib = (coerce $ cibName cib, FIColumn $ mkColumnInfo cib)
toRelHashPair :: RelInfo PG -> (FieldName, FieldInfo PG)
toRelHashPair ri = (fromRel $ riName ri, FIRelationship ri)
relationFields :: HM.HashMap FieldName (FieldInfo PG)
relationFields = HM.fromList . fmap toRelHashPair $ relations
tableConfig :: TableConfig PG
tableConfig =
TableConfig

View File

@ -36,12 +36,13 @@ import Test.HUnit.Lang (assertFailure)
-- | Placeholder value for test inputs that are not relevant yet.
notImplementedYet :: HasCallStack => String -> a
notImplementedYet thing =
error $
( unlines
[ "\"" ++ thing ++ "\" is not yet defined, because it hasn't been touched by tests yet.",
"If you see this message you likely need to provide/mock a value here"
]
)
withFrozenCallStack $
error $
( unlines
[ "\"" ++ thing ++ "\" is not yet defined, because it hasn't been touched by tests yet.",
"If you see this message you likely need to provide/mock a value here"
]
)
-- | Monad builder environment.
--