Remove Unique from Definition

GraphQL types can refer to each other in a circular way. The PDV framework used to use values of type `Unique` to recognize two fragments of GraphQL schema as being the same instance. Internally, this is based on `Data.Unique` from the `base` package, which simply increases a counter on every creation of a `Unique` object.

**NB**: The `Unique` values are _not_ used for knot tying the schema combinators themselves (i.e. `Parser`s). The knot tying for `Parser`s is purely based on keys provided to `memoizeOn`. The `Unique` values are _only_ used to recognize two pieces of GraphQL _schema_ as being identical. Originally, the idea was that this would help us with a perfectly correct identification of GraphQL types. But this fully correct equality checking of GraphQL types was never implemented, and does not seem to be necessary to prevent bugs.

Specifically, these `Unique` values are stored as part of `data Definition a`, which specifies a part of our internal abstract syntax tree for the GraphQL types that we expose. The `Unique` values get initialized by the `SchemaT` effect.

In #2894 and #2895, we are experimenting with how (parts of) the GraphQL types can be hidden behind certain permission predicates. This would allow a single GraphQL schema in memory to serve all roles, implementing #2711. The permission predicates get evaluated at query parsing time when we know what role is doing a certain request, thus outputting the correct GraphQL types for that role.

If the approach of #2895 is followed, then the `Definition` objects, and thus the `Unique` values, would be hidden behind the permission predicates. Since the permission predicates are evaluated only after the schema is already supposed to be built, this means that the permission predicates would prevent us from initializing the `Unique` values, rendering them useless.

The simplest remedy to this is to remove our usage of `Unique` altogether from the GraphQL schema and schema combinators. It doesn't serve a functional purpose, doesn't prevent bugs, and requires extra bookkeeping.

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2980
GitOrigin-RevId: 50d3f9e0b9fbf578ac49c8fc773ba64a94b1f43d
This commit is contained in:
Auke Booij 2021-12-01 17:20:35 +01:00 committed by hasura-bot
parent 0a4194a1bc
commit caf9957aca
31 changed files with 101 additions and 228 deletions

View File

@ -261,7 +261,7 @@ float = Parser
v -> typeMismatch floatScalar "a float" v
}
where
schemaType = NonNullable $ TNamed $ mkDefinition "Float" Nothing TIScalar
schemaType = NonNullable $ TNamed $ Definition "Float" Nothing TIScalar
```
This allows us to incrementally unpack JSON values without having to fully

View File

@ -52,7 +52,6 @@ import Control.Monad.STM (atomically)
import Control.Monad.Stateless
import Control.Monad.Trans.Control (MonadBaseControl (..))
import Control.Monad.Trans.Managed (ManagedT (..), allocate_)
import Control.Monad.Unique
import Data.Aeson qualified as A
import Data.ByteString.Char8 qualified as BC
import Data.ByteString.Lazy.Char8 qualified as BLC
@ -308,7 +307,6 @@ newtype PGMetadataStorageAppT m a = PGMetadataStorageAppT {runPGMetadataStorageA
MonadMask,
HasHttpManagerM,
HasServerConfigCtx,
MonadUnique,
MonadReader (Q.PGPool, Q.PGLogger)
)
via (ReaderT (Q.PGPool, Q.PGLogger) m)

View File

@ -208,7 +208,7 @@ bqColumnParser columnType (G.Nullability isNullable) =
BigQuery.DatetimeScalarType -> pure $ possiblyNullable scalarType $ BigQuery.DatetimeValue . BigQuery.Datetime <$> P.string
BigQuery.GeographyScalarType -> pure $ possiblyNullable scalarType $ BigQuery.GeographyValue . BigQuery.Geography <$> P.string
BigQuery.TimestampScalarType -> do
let schemaType = P.Nullable . P.TNamed $ P.mkDefinition stringScalar Nothing P.TIScalar
let schemaType = P.Nullable . P.TNamed $ P.Definition stringScalar Nothing P.TIScalar
pure $
possiblyNullable scalarType $
Parser
@ -233,11 +233,11 @@ bqColumnParser columnType (G.Nullability isNullable) =
| otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'BigQuery)
mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo,
( P.Definition value (G.Description <$> description) P.EnumValueInfo,
BigQuery.StringValue $ G.unName value
)
throughJSON scalarName =
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition scalarName Nothing P.TIScalar
let schemaType = P.NonNullable $ P.TNamed $ P.Definition scalarName Nothing P.TIScalar
in Parser
{ pType = schemaType,
pParser =
@ -278,7 +278,7 @@ bqOrderByOperators =
)
]
where
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
define name desc = P.Definition name (Just desc) P.EnumValueInfo
bqComparisonExps ::
forall m n r.

View File

@ -275,7 +275,7 @@ msColumnParser columnType (G.Nullability isNullable) =
MSSQL.BitType -> pure $ ODBC.BoolValue <$> P.boolean
_ -> do
name <- MSSQL.mkMSSQLScalarTypeName scalarType
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
let schemaType = P.NonNullable $ P.TNamed $ P.Definition name Nothing P.TIScalar
pure $
Parser
{ pType = schemaType,
@ -296,7 +296,7 @@ msColumnParser columnType (G.Nullability isNullable) =
| otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'MSSQL)
mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo,
( P.Definition value (G.Description <$> description) P.EnumValueInfo,
ODBC.TextValue $ G.unName value
)
@ -333,7 +333,7 @@ msOrderByOperators =
)
]
where
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
define name desc = P.Definition name (Just desc) P.EnumValueInfo
msComparisonExps ::
forall m n r.

View File

@ -185,7 +185,7 @@ columnParser' columnType (G.Nullability isNullable) =
MySQL.Timestamp -> pure $ possiblyNullable scalarType $ MySQL.TimestampValue <$> P.string
_ -> do
name <- MySQL.mkMySQLScalarTypeName scalarType
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
let schemaType = P.NonNullable $ P.TNamed $ P.Definition name Nothing P.TIScalar
pure $
Parser
{ pType = schemaType,
@ -207,11 +207,11 @@ columnParser' columnType (G.Nullability isNullable) =
| otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, RQL.ScalarValue 'MySQL)
mkEnumValue (RQL.EnumValue value, EnumValueInfo description) =
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo,
( P.Definition value (G.Description <$> description) P.EnumValueInfo,
MySQL.VarcharValue $ G.unName value
)
throughJSON scalarName =
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition scalarName Nothing P.TIScalar
let schemaType = P.NonNullable $ P.TNamed $ P.Definition scalarName Nothing P.TIScalar
in Parser
{ pType = schemaType,
pParser =
@ -248,7 +248,7 @@ orderByOperators' =
)
]
where
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
define name desc = P.Definition name (Just desc) P.EnumValueInfo
-- | TODO: Make this as thorough as the one for MSSQL/PostgreSQL
comparisonExps' ::

View File

@ -43,7 +43,6 @@ where
import Control.Lens (makeLenses)
import Control.Monad.Morph (hoist)
import Control.Monad.Trans.Control (MonadBaseControl (..))
import Control.Monad.Unique
import Control.Monad.Validate
import Data.Aeson
import Data.Aeson.Casing (aesonDrop)
@ -176,9 +175,6 @@ withTraceContext ctx tx = setTraceContextInTx ctx >> tx
deriving instance Tracing.MonadTrace m => Tracing.MonadTrace (Q.TxET e m)
instance (MonadIO m) => MonadUnique (Q.TxET e m) where
newUnique = liftIO newUnique
checkDbConnection :: MonadIO m => Q.PGPool -> m Bool
checkDbConnection pool = do
e <- liftIO $ runExceptT $ Q.runTx' pool select1Query

View File

@ -214,7 +214,7 @@ columnParser columnType (G.Nullability isNullable) =
--
-- TODO: introduce new dedicated scalars for Postgres column types.
name <- mkScalarTypeName scalarType
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
let schemaType = P.NonNullable $ P.TNamed $ P.Definition name Nothing P.TIScalar
pure $
Parser
{ pType = schemaType,
@ -237,7 +237,7 @@ columnParser columnType (G.Nullability isNullable) =
| otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue)
mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo,
( P.Definition value (G.Description <$> description) P.EnumValueInfo,
PGValText $ G.unName value
)
@ -283,7 +283,7 @@ orderByOperators =
)
]
where
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
define name desc = P.Definition name (Just desc) P.EnumValueInfo
comparisonExps ::
forall pgKind m n r.

View File

@ -9,18 +9,15 @@ where
import Data.Has
import Data.HashMap.Strict qualified as Map
import Data.Text.Extended
import Data.Tuple.Extended
import GHC.Stack (HasCallStack)
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class.Parse
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.GraphQL.Parser.Schema (HasDefinition)
import Hasura.Prelude
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.Table
-- import Hasura.SQL.Backend
import Hasura.Session (RoleName)
import Language.Haskell.TH qualified as TH
import Type.Reflection (Typeable)
@ -103,8 +100,8 @@ class (Monad m, MonadParse n) => MonadSchema n m | m -> n where
-- 'memoizeOn' :: 'MonadSchema' n m => 'TH.Name' -> a -> m (FieldParser n b) -> m (FieldParser n b)
-- @
memoizeOn ::
forall p d a b.
(HasCallStack, HasDefinition (p n b) d, Ord a, Typeable p, Typeable a, Typeable b) =>
forall p a b.
(HasCallStack, Ord a, Typeable p, Typeable a, Typeable b) =>
-- | A unique name used to identify the function being memoized. There isnt
-- really any metaprogramming going on here, we just use a Template Haskell
-- 'TH.Name' as a convenient source for a static, unique identifier.
@ -152,46 +149,3 @@ memoize ::
(a -> m (Parser k n b)) ->
(a -> m (Parser k n b))
memoize name f a = memoizeOn name a (f a)
memoize2 ::
(HasCallStack, MonadSchema n m, Ord a, Ord b, Typeable a, Typeable b, Typeable c, Typeable k) =>
TH.Name ->
(a -> b -> m (Parser k n c)) ->
(a -> b -> m (Parser k n c))
memoize2 name = curry . memoize name . uncurry
memoize3 ::
( HasCallStack,
MonadSchema n m,
Ord a,
Ord b,
Ord c,
Typeable a,
Typeable b,
Typeable c,
Typeable d,
Typeable k
) =>
TH.Name ->
(a -> b -> c -> m (Parser k n d)) ->
(a -> b -> c -> m (Parser k n d))
memoize3 name = curry3 . memoize name . uncurry3
memoize4 ::
( HasCallStack,
MonadSchema n m,
Ord a,
Ord b,
Ord c,
Ord d,
Typeable a,
Typeable b,
Typeable c,
Typeable d,
Typeable e,
Typeable k
) =>
TH.Name ->
(a -> b -> c -> d -> m (Parser k n e)) ->
(a -> b -> c -> d -> m (Parser k n e))
memoize4 name = curry4 . memoize name . uncurry4

View File

@ -188,7 +188,7 @@ field ::
field name description parser = case pType parser of
NonNullable typ ->
InputFieldsParser
{ ifDefinitions = [mkDefinition name description $ IFRequired typ],
{ ifDefinitions = [Definition name description $ IFRequired typ],
ifParser = \values -> withPath (++ [Key (unName name)]) do
value <-
onNothing (M.lookup name values) $
@ -215,7 +215,7 @@ fieldOptional ::
fieldOptional name description parser =
InputFieldsParser
{ ifDefinitions =
[ mkDefinition name description $
[ Definition name description $
IFOptional (nullableType $ pType parser) Nothing
],
ifParser =
@ -240,7 +240,7 @@ fieldWithDefault ::
InputFieldsParser m a
fieldWithDefault name description defaultValue parser =
InputFieldsParser
{ ifDefinitions = [mkDefinition name description $ IFOptional (pType parser) (Just defaultValue)],
{ ifDefinitions = [Definition name description $ IFOptional (pType parser) (Just defaultValue)],
ifParser =
M.lookup name
>>> withPath (++ [Key (unName name)]) . \case
@ -270,7 +270,7 @@ enum name description values =
other -> typeMismatch name "an enum value" other
}
where
schemaType = NonNullable $ TNamed $ mkDefinition name description $ TIEnum (fst <$> values)
schemaType = NonNullable $ TNamed $ Definition name description $ TIEnum (fst <$> values)
valuesMap = M.fromList $ over (traverse . _1) dName $ toList values
validate value =
onNothing (M.lookup value valuesMap) $
@ -314,7 +314,7 @@ object name description parser =
schemaType =
NonNullable $
TNamed $
mkDefinition name description $
Definition name description $
TIInputObject (InputObjectInfo (ifDefinitions parser))
fieldNames = S.fromList (dName <$> ifDefinitions parser)
parseFields fields = do

View File

@ -50,9 +50,6 @@ data FieldParser m a = FieldParser
}
deriving (Functor)
instance HasDefinition (FieldParser m a) FieldInfo where
definitionLens f parser = definitionLens f (fDefinition parser) <&> \fDefinition -> parser {fDefinition}
infixl 1 `bindField`
bindField :: Monad m => FieldParser m a -> (a -> m b) -> FieldParser m b
@ -88,17 +85,17 @@ nullable parser =
-- | Decorate a schema field as NON_NULL
nonNullableField :: forall m a. FieldParser m a -> FieldParser m a
nonNullableField (FieldParser (Definition n u d (FieldInfo as t)) p) =
FieldParser (Definition n u d (FieldInfo as (nonNullableType t))) p
nonNullableField (FieldParser (Definition n d (FieldInfo as t)) p) =
FieldParser (Definition n d (FieldInfo as (nonNullableType t))) p
-- | Decorate a schema field as NULL
nullableField :: forall m a. FieldParser m a -> FieldParser m a
nullableField (FieldParser (Definition n u d (FieldInfo as t)) p) =
FieldParser (Definition n u d (FieldInfo as (nullableType t))) p
nullableField (FieldParser (Definition n d (FieldInfo as t)) p) =
FieldParser (Definition n d (FieldInfo as (nullableType t))) p
multipleField :: forall m a. FieldParser m a -> FieldParser m a
multipleField (FieldParser (Definition n u d (FieldInfo as t)) p) =
FieldParser (Definition n u d (FieldInfo as (Nullable (TList t)))) p
multipleField (FieldParser (Definition n d (FieldInfo as t)) p) =
FieldParser (Definition n d (FieldInfo as (Nullable (TList t)))) p
-- | Decorate a schema field with reference to given @'G.GType'
wrapFieldParser :: forall m a. G.GType -> FieldParser m a -> FieldParser m a
@ -164,7 +161,7 @@ selectionSetObject name description parsers implementsInterfaces =
{ pType =
Nullable $
TNamed $
mkDefinition name description $
Definition name description $
TIObject $ ObjectInfo (map fDefinition parsers) interfaces,
pParser = \input -> withPath (++ [Key "selectionSet"]) do
-- Not all fields have a selection set, but if they have one, it
@ -221,7 +218,7 @@ selectionSetInterface name description fields objectImplementations =
{ pType =
Nullable $
TNamed $
mkDefinition name description $
Definition name description $
TIInterface $ InterfaceInfo (map fDefinition fields) objects,
pParser = \input -> for objectImplementations (($ input) . pParser)
-- Note: This is somewhat suboptimal, since it parses a query against every
@ -249,7 +246,7 @@ selectionSetUnion name description objectImplementations =
{ pType =
Nullable $
TNamed $
mkDefinition name description $
Definition name description $
TIUnion $ UnionInfo objects,
pParser = \input -> for objectImplementations (($ input) . pParser)
}
@ -289,7 +286,7 @@ rawSelection ::
rawSelection name description argumentsParser resultParser =
FieldParser
{ fDefinition =
mkDefinition name description $
Definition name description $
FieldInfo (ifDefinitions argumentsParser) (pType resultParser),
fParser = \Field {_fAlias, _fArguments, _fSelectionSet} -> do
unless (null _fSelectionSet) $
@ -341,7 +338,7 @@ rawSubselection ::
rawSubselection name description argumentsParser bodyParser =
FieldParser
{ fDefinition =
mkDefinition name description $
Definition name description $
FieldInfo (ifDefinitions argumentsParser) (pType bodyParser),
fParser = \Field {_fAlias, _fArguments, _fSelectionSet} -> do
-- check for extraneous arguments here, since the InputFieldsParser just

View File

@ -135,7 +135,7 @@ unsafeRawScalar ::
Parser 'Both n (InputValue Variable)
unsafeRawScalar name description =
Parser
{ pType = NonNullable $ TNamed $ mkDefinition name description TIScalar,
{ pType = NonNullable $ TNamed $ Definition name description TIScalar,
pParser = pure
}
@ -148,7 +148,7 @@ jsonScalar name description =
pParser = valueToJSON $ toGraphQLType schemaType
}
where
schemaType = NonNullable $ TNamed $ mkDefinition name description TIScalar
schemaType = NonNullable $ TNamed $ Definition name description TIScalar
--------------------------------------------------------------------------------
-- Local helpers
@ -165,7 +165,7 @@ mkScalar name description parser =
pParser = peelVariable (toGraphQLType schemaType) >=> parser
}
where
schemaType = NonNullable $ TNamed $ mkDefinition name description TIScalar
schemaType = NonNullable $ TNamed $ Definition name description TIScalar
convertWith ::
MonadParse m =>

View File

@ -61,9 +61,6 @@ data Parser k m a = Parser
instance HasName (Parser k m a) where
getName = getName . pType
instance HasDefinition (Parser k m a) (TypeInfo k) where
definitionLens f parser = definitionLens f (pType parser) <&> \pType -> parser {pType}
type family ParserInput k where
-- see Note [The 'Both kind] in Hasura.GraphQL.Parser.Schema
ParserInput 'Both = InputValue Variable

View File

@ -9,7 +9,6 @@ module Hasura.GraphQL.Parser.Monad
)
where
import Control.Monad.Unique
import Control.Monad.Validate
import Data.Dependent.Map (DMap)
import Data.Dependent.Map qualified as DM
@ -21,7 +20,6 @@ import Data.Proxy (Proxy (..))
import Data.Sequence.NonEmpty qualified as NE
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
import Hasura.Prelude
import Language.Haskell.TH qualified as TH
import System.IO.Unsafe (unsafeInterleaveIO)
@ -40,7 +38,7 @@ runSchemaT = flip evalStateT mempty . unSchemaT
-- | see Note [SchemaT requires MonadIO]
instance
(MonadIO m, MonadUnique m, MonadParse n) =>
(MonadIO m, MonadParse n) =>
MonadSchema n (SchemaT n m)
where
memoizeOn name key buildParser = SchemaT do
@ -84,8 +82,7 @@ instance
]
put $! DM.insert parserId parserById parsersById
unique <- newUnique
parser <- addDefinitionUnique unique <$> unSchemaT buildParser
parser <- unSchemaT buildParser
liftIO $ writeIORef cell (Just parser)
pure parser
@ -96,7 +93,7 @@ instance
deriving instance Monad m => MonadReader a (SchemaT n (ReaderT a m))
instance
(MonadIO m, MonadUnique m, MonadParse n) =>
(MonadIO m, MonadParse n) =>
MonadSchema n (ReaderT a (SchemaT n m))
where
memoizeOn name key = mapReaderT (memoizeOn name key)

View File

@ -12,6 +12,7 @@ module Hasura.GraphQL.Parser.Schema
Type (..),
NonNullableType (..),
TypeInfo (..),
getTypeInfo,
SomeTypeInfo (..),
eqType,
eqNonNullableType,
@ -32,9 +33,6 @@ module Hasura.GraphQL.Parser.Schema
-- * Definitions
Definition (..),
mkDefinition,
addDefinitionUnique,
HasDefinition (..),
-- * Schemas
Schema (..),
@ -53,7 +51,6 @@ module Hasura.GraphQL.Parser.Schema
where
import Control.Lens.Extended
import Control.Monad.Unique
import Data.Aeson qualified as J
import Data.Functor.Classes
import Data.Has
@ -324,10 +321,6 @@ eqType _ _ = False
instance HasName (Type k) where
getName = getName . discardNullability
instance HasDefinition (Type k) (TypeInfo k) where
definitionLens f (NonNullable t) = NonNullable <$> definitionLens f t
definitionLens f (Nullable t) = Nullable <$> definitionLens f t
discardNullability :: Type k -> NonNullableType k
discardNullability (NonNullable t) = t
discardNullability (Nullable t) = t
@ -367,10 +360,6 @@ instance HasName (NonNullableType k) where
getName (TNamed definition) = getName definition
getName (TList t) = getName t
instance HasDefinition (NonNullableType k) (TypeInfo k) where
definitionLens f (TNamed definition) = TNamed <$> f definition
definitionLens f (TList t) = TList <$> definitionLens f t
{- Note [The interfaces story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GraphQL interfaces are not conceptually complicated, but they pose some
@ -541,19 +530,20 @@ eqTypeInfo (TIUnion (UnionInfo objects1)) (TIUnion (UnionInfo objects2)) =
Set.fromList (fmap dName objects1) == Set.fromList (fmap dName objects2)
eqTypeInfo _ _ = False
getObjectInfo :: Type k -> Maybe (Definition ObjectInfo)
getObjectInfo = traverse getTI . (^. definitionLens)
where
getTI :: TypeInfo k -> Maybe ObjectInfo
getTI (TIObject oi) = Just oi
getTI _ = Nothing
getTypeInfo :: Type k -> Definition (TypeInfo k)
getTypeInfo t = case discardNullability t of
TNamed d -> d
TList t' -> getTypeInfo t'
getInterfaceInfo :: Type 'Output -> Maybe (Definition InterfaceInfo)
getInterfaceInfo = traverse getTI . (^. definitionLens)
where
getTI :: TypeInfo 'Output -> Maybe InterfaceInfo
getTI (TIInterface ii) = Just ii
getTI _ = Nothing
getObjectInfo :: Type k -> Maybe (Definition ObjectInfo)
getObjectInfo t = case getTypeInfo t of
d@Definition {dInfo = TIObject oi} -> Just d {dInfo = oi}
_ -> Nothing
getInterfaceInfo :: Type k -> Maybe (Definition InterfaceInfo)
getInterfaceInfo t = case getTypeInfo t of
d@Definition {dInfo = TIInterface ii} -> Just d {dInfo = ii}
_ -> Nothing
data SomeTypeInfo = forall k. SomeTypeInfo (TypeInfo k)
@ -562,13 +552,6 @@ instance Eq SomeTypeInfo where
data Definition a = Definition
{ dName :: Name,
-- | A unique identifier used to break cycles in mutually-recursive type
-- definitions. If two 'Definition's have the same 'Unique', they can be
-- assumed to be identical. Note that the inverse is /not/ true: two
-- definitions with different 'Unique's might still be otherwise identical.
--
-- Also see Note [Tying the knot] in Hasura.GraphQL.Parser.Class.
dUnique :: Maybe Unique,
dDescription :: Maybe Description,
-- | Lazy to allow mutually-recursive type definitions.
dInfo :: ~a
@ -579,39 +562,19 @@ instance Hashable a => Hashable (Definition a) where
hashWithSalt salt Definition {..} =
salt `hashWithSalt` dName `hashWithSalt` dInfo
mkDefinition :: Name -> Maybe Description -> a -> Definition a
mkDefinition name description info = Definition name Nothing description info
instance Eq a => Eq (Definition a) where
(==) = eq1
instance Eq1 Definition where
liftEq
eq
(Definition name1 maybeUnique1 _ info1)
(Definition name2 maybeUnique2 _ info2)
| Just unique1 <- maybeUnique1,
Just unique2 <- maybeUnique2,
unique1 == unique2 =
True
| otherwise =
name1 == name2 && eq info1 info2
(Definition name1 _ info1)
(Definition name2 _ info2) =
name1 == name2 && eq info1 info2
instance HasName (Definition a) where
getName = dName
class HasDefinition s a | s -> a where
definitionLens :: Lens' s (Definition a)
instance HasDefinition (Definition a) a where
definitionLens = id
-- | Adds a 'Unique' to a 'Definition' that does not yet have one. If the
-- definition already has a 'Unique', the existing 'Unique' is kept.
addDefinitionUnique :: HasDefinition s a => Unique -> s -> s
addDefinitionUnique unique = over definitionLens \definition ->
definition {dUnique = dUnique definition <|> Just unique}
-- | Enum values have no extra information except for the information common to
-- all definitions, so this is just a placeholder for use as @'Definition'
-- 'EnumValueInfo'@.

View File

@ -12,7 +12,6 @@ where
import Control.Arrow.Extended (left)
import Control.Exception (try)
import Control.Lens (set, (^.))
import Control.Monad.Unique
import Data.Aeson ((.:), (.:?))
import Data.Aeson qualified as J
import Data.Aeson.Types qualified as J
@ -142,7 +141,7 @@ validateSchemaCustomizationsDistinct remoteSchemaCustomizer (RemoteSchemaIntrosp
-- and also is called by schema cache rebuilding code in "Hasura.RQL.DDL.Schema.Cache".
fetchRemoteSchema ::
forall m.
(MonadIO m, MonadUnique m, MonadError QErr m, Tracing.MonadTrace m) =>
(MonadIO m, MonadError QErr m, Tracing.MonadTrace m) =>
Env.Environment ->
HTTP.Manager ->
RemoteSchemaName ->

View File

@ -6,7 +6,6 @@ module Hasura.GraphQL.Schema
where
import Control.Lens.Extended
import Control.Monad.Unique
import Data.Aeson.Ordered qualified as JO
import Data.Has
import Data.HashMap.Strict qualified as Map
@ -51,7 +50,6 @@ buildGQLContext ::
forall m.
( MonadError QErr m,
MonadIO m,
MonadUnique m,
HasServerConfigCtx m
) =>
GraphQLQueryType ->
@ -102,7 +100,7 @@ buildGQLContext queryType sources allRemoteSchemas allActions nonObjectCustomTyp
queryFieldNames :: [G.Name] <-
case P.discardNullability $ P.parserType $ fst adminHasuraDBContext of
-- It really ought to be this case; anything else is a programming error.
P.TNamed (P.Definition _ _ _ (P.TIObject (P.ObjectInfo rootFields _interfaces))) ->
P.TNamed (P.Definition _ _ (P.TIObject (P.ObjectInfo rootFields _interfaces))) ->
pure $ fmap P.dName rootFields
_ -> throw500 "We encountered an root query of unexpected GraphQL type. It should be an object type."
let mutationFieldNames :: [G.Name]
@ -158,7 +156,7 @@ customizeFields SourceCustomization {..} =
buildRoleContext ::
forall m.
(MonadError QErr m, MonadIO m, MonadUnique m) =>
(MonadError QErr m, MonadIO m) =>
(SQLGenCtx, GraphQLQueryType, FunctionPermissionsCtx) ->
SourceCache ->
RemoteSchemaCache ->
@ -259,7 +257,7 @@ buildRoleContext
buildRelayRoleContext ::
forall m.
(MonadError QErr m, MonadIO m, MonadUnique m) =>
(MonadError QErr m, MonadIO m) =>
(SQLGenCtx, GraphQLQueryType, FunctionPermissionsCtx) ->
SourceCache ->
[ActionInfo] ->
@ -344,7 +342,7 @@ buildRelayRoleContext
buildFullestDBSchema ::
forall m.
(MonadError QErr m, MonadIO m, MonadUnique m) =>
(MonadError QErr m, MonadIO m) =>
QueryContext ->
SourceCache ->
[ActionInfo] ->
@ -403,8 +401,7 @@ buildFullestDBSchema queryContext sources allActionInfos nonObjectCustomTypes =
unauthenticatedContext ::
forall m.
( MonadError QErr m,
MonadIO m,
MonadUnique m
MonadIO m
) =>
[P.FieldParser (P.ParseT Identity) (NamespacedField RemoteField)] ->
[P.FieldParser (P.ParseT Identity) (NamespacedField RemoteField)] ->
@ -426,7 +423,7 @@ unauthenticatedContext adminQueryRemotes adminMutationRemotes remoteSchemaPermsC
buildRoleBasedRemoteSchemaParser ::
forall m.
(MonadError QErr m, MonadUnique m, MonadIO m) =>
(MonadError QErr m, MonadIO m) =>
RoleName ->
RemoteSchemaCache ->
m [(RemoteSchemaName, RemoteRelationshipQueryContext)]

View File

@ -360,7 +360,7 @@ customScalarParser = \case
| _stdName == boolScalar -> J.toJSON <$> P.boolean
| otherwise -> P.jsonScalar _stdName _stdDescription
ASTReusedScalar name pgScalarType ->
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
let schemaType = P.NonNullable $ P.TNamed $ P.Definition name Nothing P.TIScalar
in P.Parser
{ pType = schemaType,
pParser =
@ -381,7 +381,7 @@ customEnumParser (EnumTypeDefinition typeName description enumValues) =
enumValues <&> \enumValue ->
let valueName = G.unEnumValue $ _evdValue enumValue
in (,J.toJSON valueName) $
P.mkDefinition
P.Definition
valueName
(_evdDescription enumValue)
P.EnumValueInfo

View File

@ -155,8 +155,8 @@ typeIntrospection fakeSchema = do
name'printer <- P.subselection $$(G.litName "__type") Nothing nameArg typeField
return $ case Map.lookup (fst name'printer) (sTypes fakeSchema) of
Nothing -> J.Null
Just (P.Definition n u d (P.SomeTypeInfo i)) ->
snd name'printer (SomeType (P.Nullable (P.TNamed (P.Definition n u d i))))
Just (P.Definition n d (P.SomeTypeInfo i)) ->
snd name'printer (SomeType (P.Nullable (P.TNamed (P.Definition n d i))))
-- | Generate a __schema introspection parser.
schema ::
@ -215,17 +215,17 @@ typeField =
J.String "NON_NULL"
P.Nullable (P.TList _) ->
J.String "LIST"
P.Nullable (P.TNamed (P.Definition _ _ _ P.TIScalar)) ->
P.Nullable (P.TNamed (P.Definition _ _ P.TIScalar)) ->
J.String "SCALAR"
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIEnum _))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIEnum _))) ->
J.String "ENUM"
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIInputObject _))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIInputObject _))) ->
J.String "INPUT_OBJECT"
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIObject _))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIObject _))) ->
J.String "OBJECT"
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIInterface _))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIInterface _))) ->
J.String "INTERFACE"
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIUnion _))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIUnion _))) ->
J.String "UNION"
name :: FieldParser n (SomeType -> J.Value)
name =
@ -233,7 +233,7 @@ typeField =
$> \case
SomeType tp ->
case tp of
P.Nullable (P.TNamed (P.Definition name' _ _ _)) ->
P.Nullable (P.TNamed (P.Definition name' _ _)) ->
nameAsJSON name'
_ -> J.Null
description :: FieldParser n (SomeType -> J.Value)
@ -242,7 +242,7 @@ typeField =
$> \case
SomeType tp ->
case P.discardNullability tp of
P.TNamed (P.Definition _ _ (Just desc) _) ->
P.TNamed (P.Definition _ (Just desc) _) ->
J.String (G.unDescription desc)
_ -> J.Null
fields :: FieldParser n (SomeType -> J.Value)
@ -253,9 +253,9 @@ typeField =
\case
SomeType tp ->
case tp of
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIObject (P.ObjectInfo fields' _interfaces')))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIObject (P.ObjectInfo fields' _interfaces')))) ->
J.Array $ V.fromList $ snd includeDeprecated'printer <$> sortOn P.dName fields'
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIInterface (P.InterfaceInfo fields' _objects')))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIInterface (P.InterfaceInfo fields' _objects')))) ->
J.Array $ V.fromList $ snd includeDeprecated'printer <$> sortOn P.dName fields'
_ -> J.Null
interfaces :: FieldParser n (SomeType -> J.Value)
@ -265,7 +265,7 @@ typeField =
\case
SomeType tp ->
case tp of
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIObject (P.ObjectInfo _fields' interfaces')))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIObject (P.ObjectInfo _fields' interfaces')))) ->
J.Array $ V.fromList $ printer . SomeType . P.Nullable . P.TNamed . fmap P.TIInterface <$> sortOn P.dName interfaces'
_ -> J.Null
possibleTypes :: FieldParser n (SomeType -> J.Value)
@ -275,9 +275,9 @@ typeField =
\case
SomeType tp ->
case tp of
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIInterface (P.InterfaceInfo _fields' objects')))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIInterface (P.InterfaceInfo _fields' objects')))) ->
J.Array $ V.fromList $ printer . SomeType . P.Nullable . P.TNamed . fmap P.TIObject <$> sortOn P.dName objects'
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIUnion (P.UnionInfo objects')))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIUnion (P.UnionInfo objects')))) ->
J.Array $ V.fromList $ printer . SomeType . P.Nullable . P.TNamed . fmap P.TIObject <$> sortOn P.dName objects'
_ -> J.Null
enumValues :: FieldParser n (SomeType -> J.Value)
@ -288,7 +288,7 @@ typeField =
\case
SomeType tp ->
case tp of
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIEnum vals))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIEnum vals))) ->
J.Array $ V.fromList $ fmap (snd includeDeprecated'printer) $ sortOn P.dName $ toList vals
_ -> J.Null
inputFields :: FieldParser n (SomeType -> J.Value)
@ -298,7 +298,7 @@ typeField =
\case
SomeType tp ->
case tp of
P.Nullable (P.TNamed (P.Definition _ _ _ (P.TIInputObject (P.InputObjectInfo fieldDefs)))) ->
P.Nullable (P.TNamed (P.Definition _ _ (P.TIInputObject (P.InputObjectInfo fieldDefs)))) ->
J.Array $ V.fromList $ map printer $ sortOn P.dName fieldDefs
_ -> J.Null
ofType :: FieldParser n (SomeType -> J.Value)
@ -440,7 +440,7 @@ typeKind =
]
)
where
mkDefinition name = (P.Definition name Nothing Nothing P.EnumValueInfo, ())
mkDefinition name = (P.Definition name Nothing P.EnumValueInfo, ())
{-
type __Field {
@ -581,8 +581,8 @@ schemaSet fakeSchema =
schemaTypeToSomeType ::
P.Definition P.SomeTypeInfo ->
SomeType
schemaTypeToSomeType (P.Definition n u d (P.SomeTypeInfo i)) =
SomeType $ P.Nullable $ P.TNamed (P.Definition n u d i)
schemaTypeToSomeType (P.Definition n d (P.SomeTypeInfo i)) =
SomeType $ P.Nullable $ P.TNamed (P.Definition n d i)
queryType :: FieldParser n J.Value
queryType = do
printer <- P.subselection_ $$(G.litName "queryType") Nothing typeField

View File

@ -384,7 +384,7 @@ conflictConstraint constraints sourceName tableInfo =
constraintEnumValues <- for constraints \constraint -> do
name <- textToName $ toTxt $ _cName constraint
pure
( P.mkDefinition name (Just "unique or primary key constraint") P.EnumValueInfo,
( P.Definition name (Just "unique or primary key constraint") P.EnumValueInfo,
_cName constraint
)
enumName <- P.mkTypename $ tableGQLName <> $$(G.litName "_constraint")

View File

@ -9,7 +9,6 @@ module Hasura.GraphQL.Schema.Remote
)
where
import Control.Monad.Unique
import Data.Has
import Data.HashMap.Strict qualified as Map
import Data.HashMap.Strict.InsOrd qualified as OMap
@ -35,7 +34,7 @@ import Language.GraphQL.Draft.Syntax qualified as G
buildRemoteParser ::
forall m n.
(MonadIO m, MonadUnique m, MonadError QErr m, MonadParse n) =>
(MonadIO m, MonadError QErr m, MonadParse n) =>
IntrospectionResult ->
RemoteSchemaInfo ->
m (ParsedIntrospectionG n)
@ -350,7 +349,7 @@ remoteFieldScalarParser customizeTypename (G.ScalarTypeDefinition description na
}
where
customizedTypename = runMkTypename customizeTypename name
schemaType = NonNullable $ TNamed $ mkDefinition customizedTypename description TIScalar
schemaType = NonNullable $ TNamed $ Definition customizedTypename description TIScalar
gType = toGraphQLType schemaType
mkRemoteGType = \case
@ -365,7 +364,7 @@ remoteFieldEnumParser ::
remoteFieldEnumParser customizeTypename (G.EnumTypeDefinition desc name _directives valueDefns) =
let enumValDefns =
valueDefns <&> \(G.EnumValueDefinition enumDesc enumName _) ->
( mkDefinition (G.unEnumValue enumName) enumDesc P.EnumValueInfo,
( Definition (G.unEnumValue enumName) enumDesc P.EnumValueInfo,
G.VEnum enumName
)
in fmap (Altered False,) $ P.enum (runMkTypename customizeTypename name) desc $ NE.fromList enumValDefns
@ -841,12 +840,12 @@ remoteFieldFromDefinition ::
m (FieldParser n (G.Field G.NoFragments RemoteSchemaVariable))
remoteFieldFromDefinition schemaDoc parentTypeName (G.FieldDefinition description name argsDefinition gType _) = do
let addNullableList :: FieldParser n a -> FieldParser n a
addNullableList (P.FieldParser (Definition name' un desc (FieldInfo args typ)) parser) =
P.FieldParser (Definition name' un desc (FieldInfo args (Nullable (TList typ)))) parser
addNullableList (P.FieldParser (Definition name' desc (FieldInfo args typ)) parser) =
P.FieldParser (Definition name' desc (FieldInfo args (Nullable (TList typ)))) parser
addNonNullableList :: FieldParser n a -> FieldParser n a
addNonNullableList (P.FieldParser (Definition name' un desc (FieldInfo args typ)) parser) =
P.FieldParser (Definition name' un desc (FieldInfo args (NonNullable (TList typ)))) parser
addNonNullableList (P.FieldParser (Definition name' desc (FieldInfo args typ)) parser) =
P.FieldParser (Definition name' desc (FieldInfo args (NonNullable (TList typ)))) parser
-- TODO add directives, deprecation
convertType :: G.GType -> m (FieldParser n (G.Field G.NoFragments RemoteSchemaVariable))

View File

@ -80,7 +80,7 @@ tableSelectColumnsEnum sourceName tableInfo selectPermissions = do
]
where
define name =
P.mkDefinition name (Just $ G.Description "column name") P.EnumValueInfo
P.Definition name (Just $ G.Description "column name") P.EnumValueInfo
-- | Table update columns enum
--
@ -111,8 +111,8 @@ tableUpdateColumnsEnum tableInfo updatePermissions = do
Just values -> P.enum enumName enumDesc values
Nothing -> P.enum enumName altDesc $ pure (placeholder, Nothing)
where
define name = P.mkDefinition name (Just $ G.Description "column name") P.EnumValueInfo
placeholder = P.mkDefinition @P.EnumValueInfo $$(G.litName "_PLACEHOLDER") (Just $ G.Description "placeholder (do not use)") P.EnumValueInfo
define name = P.Definition name (Just $ G.Description "column name") P.EnumValueInfo
placeholder = P.Definition @P.EnumValueInfo $$(G.litName "_PLACEHOLDER") (Just $ G.Description "placeholder (do not use)") P.EnumValueInfo
tablePermissions ::
forall m n r b.

View File

@ -14,7 +14,6 @@ module Hasura.RQL.DDL.RemoteSchema
where
import Control.Lens ((^.))
import Control.Monad.Unique
import Data.Environment qualified as Env
import Data.HashMap.Strict qualified as Map
import Data.HashMap.Strict.InsOrd qualified as OMap
@ -34,7 +33,6 @@ runAddRemoteSchema ::
( QErrM m,
CacheRWM m,
MonadIO m,
MonadUnique m,
HasHttpManagerM m,
MetadataM m,
Tracing.MonadTrace m
@ -125,7 +123,7 @@ addRemoteSchemaP1 name = do
<> name <<> " already exists"
addRemoteSchemaP2Setup ::
(QErrM m, MonadIO m, MonadUnique m, HasHttpManagerM m, Tracing.MonadTrace m) =>
(QErrM m, MonadIO m, HasHttpManagerM m, Tracing.MonadTrace m) =>
Env.Environment ->
AddRemoteSchemaQuery ->
m RemoteSchemaCtx
@ -213,7 +211,6 @@ runUpdateRemoteSchema ::
( QErrM m,
CacheRWM m,
MonadIO m,
MonadUnique m,
HasHttpManagerM m,
MetadataM m,
Tracing.MonadTrace m

View File

@ -24,7 +24,6 @@ import Control.Arrow.Extended
import Control.Concurrent.Async.Lifted.Safe qualified as LA
import Control.Lens hiding ((.=))
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Control.Retry qualified as Retry
import Data.Aeson
import Data.Align (align)
@ -147,7 +146,6 @@ newtype CacheRWT m a
Applicative,
Monad,
MonadIO,
MonadUnique,
MonadReader r,
MonadError e,
MonadTx,
@ -227,7 +225,6 @@ buildSchemaCacheRule ::
Inc.ArrowDistribute arr,
Inc.ArrowCache m arr,
MonadIO m,
MonadUnique m,
MonadBaseControl IO m,
MonadError QErr m,
MonadReader BuildReason m,
@ -560,7 +557,6 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
Inc.ArrowCache m arr,
ArrowWriter (Seq CollectedInfo) arr,
MonadIO m,
MonadUnique m,
MonadError QErr m,
MonadReader BuildReason m,
MonadBaseControl IO m,
@ -1134,7 +1130,6 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
ArrowWriter (Seq CollectedInfo) arr,
Inc.ArrowCache m arr,
MonadIO m,
MonadUnique m,
HasHttpManagerM m
) =>
( Inc.Dependency (HashMap RemoteSchemaName Inc.InvalidationKey),

View File

@ -30,7 +30,6 @@ module Hasura.RQL.Types
where
import Control.Lens (Traversal', at, preview, (^.))
import Control.Monad.Unique
import Data.HashMap.Strict qualified as M
import Data.Text.Extended
import Database.PG.Query qualified as Q
@ -242,7 +241,6 @@ newtype HasSystemDefinedT m a = HasSystemDefinedT {unHasSystemDefinedT :: Reader
Monad,
MonadTrans,
MonadIO,
MonadUnique,
MonadError e,
MonadTx,
HasHttpManagerM,

View File

@ -8,7 +8,6 @@ module Hasura.RQL.Types.Run
where
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Hasura.Base.Error
import Hasura.Metadata.Class
import Hasura.Prelude
@ -35,9 +34,6 @@ newtype RunT m a = RunT {unRunT :: ReaderT RunCtx (ExceptT QErr m) a}
Tracing.MonadTrace
)
instance (MonadIO m) => MonadUnique (RunT m) where
newUnique = liftIO newUnique
instance (MonadMetadataStorage m) => MonadMetadataStorageQueryAPI (RunT m)
deriving instance (MonadIO m, MonadBase IO m) => MonadBase IO (RunT m)

View File

@ -28,7 +28,6 @@ import Control.Arrow.Extended
import Control.Lens
import Control.Monad.Morph
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Data.Aeson (Value, toJSON)
import Data.Aeson.TH
import Data.HashMap.Strict.Extended qualified as M
@ -210,7 +209,6 @@ newtype MetadataT m a = MetadataT {unMetadataT :: StateT Metadata m a}
Monad,
MonadTrans,
MonadIO,
MonadUnique,
MonadReader r,
MonadError e,
MonadTx,

View File

@ -9,7 +9,6 @@ module Hasura.Server.API.Metadata
where
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.Types qualified as A
@ -355,7 +354,6 @@ runMetadataQueryM ::
CacheRWM m,
Tracing.MonadTrace m,
UserInfoM m,
MonadUnique m,
HTTP.HasHttpManagerM m,
MetadataM m,
MonadMetadataStorageQueryAPI m,
@ -379,7 +377,6 @@ runMetadataQueryV1M ::
CacheRWM m,
Tracing.MonadTrace m,
UserInfoM m,
MonadUnique m,
HTTP.HasHttpManagerM m,
MetadataM m,
MonadMetadataStorageQueryAPI m,

View File

@ -8,7 +8,6 @@ module Hasura.Server.API.Query
where
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
@ -288,7 +287,6 @@ runQueryM ::
UserInfoM m,
MonadBaseControl IO m,
MonadIO m,
MonadUnique m,
HasHttpManagerM m,
HasServerConfigCtx m,
Tracing.MonadTrace m,

View File

@ -24,7 +24,6 @@ import Control.Lens (over, view, (^?))
import Control.Monad.Catch (MonadCatch, MonadMask, MonadThrow)
import Control.Monad.Morph
import Control.Monad.Trans.Control
import Control.Monad.Unique
import Data.Aeson qualified as J
import Data.Aeson.Lens qualified as JL
import Data.Binary qualified as Bin
@ -84,7 +83,7 @@ data TraceContext = TraceContext
-- | The 'TraceT' monad transformer adds the ability to keep track of
-- the current trace context.
newtype TraceT m a = TraceT {unTraceT :: ReaderT (TraceContext, Reporter) (WriterT TracingMetadata m) a}
deriving (Functor, Applicative, Monad, MonadIO, MonadUnique, MonadMask, MonadCatch, MonadThrow)
deriving (Functor, Applicative, Monad, MonadIO, MonadMask, MonadCatch, MonadThrow)
instance MonadTrans TraceT where
lift = TraceT . lift . lift

View File

@ -44,12 +44,12 @@ fakeInputFieldValue = \case
fromNNT :: forall k. ('Input <: k) => NonNullableType k -> G.Value Variable
fromNNT = \case
TList t -> G.VList [fromT t, fromT t]
TNamed (Definition name _ _ info) -> case info of
TNamed (Definition name _ info) -> case info of
TIScalar -> fakeScalar name
TIEnum ei -> G.VEnum $ G.EnumValue $ dName $ NE.head ei
TIInputObject (InputObjectInfo oi) -> G.VObject $
M.fromList $ do
Definition fieldName _ _ fieldInfo <- oi
Definition fieldName _ fieldInfo <- oi
pure (fieldName, fakeInputFieldValue fieldInfo)
_ -> error "impossible"
@ -57,5 +57,5 @@ fakeDirective :: DirectiveInfo -> G.Directive Variable
fakeDirective DirectiveInfo {..} =
G.Directive diName $
M.fromList $
diArguments <&> \(Definition argName _ _ argInfo) ->
diArguments <&> \(Definition argName _ argInfo) ->
(argName, fakeInputFieldValue argInfo)

View File

@ -5,7 +5,6 @@ module Hasura.Server.MigrateSpec (CacheRefT (..), spec) where
import Control.Concurrent.MVar.Lifted
import Control.Monad.Morph
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Control.Natural ((:~>) (..))
import Data.Aeson (encode)
import Data.ByteString.Lazy.UTF8 qualified as LBS
@ -43,7 +42,6 @@ newtype CacheRefT m a = CacheRefT {runCacheRefT :: MVar RebuildableSchemaCache -
MonadBase b,
MonadBaseControl b,
MonadTx,
MonadUnique,
UserInfoM,
HTTP.HasHttpManagerM,
HasServerConfigCtx,