server: support for custom directives

Co-authored-by: Aravind K P <8335904+scriptonist@users.noreply.github.com>
GitOrigin-RevId: f11b3b2e964af4860c3bb0fd9efec6be54c2e88b
This commit is contained in:
Antoine Leblanc 2021-05-20 11:03:02 +01:00 committed by hasura-bot
parent 9e3f5a9f01
commit 5238bb8011
26 changed files with 1486 additions and 939 deletions

View File

@ -2,11 +2,6 @@
## Next release
### Bug fixes and improvements
(Add entries below in the order of: server, console, cli, docs, others)
### Breaking Changes
- In this release, the name of the computed field argument has changed from `<function_name>_args` to
@ -16,6 +11,8 @@
### Bug fixes and improvements
- server: fix a bug where `@skip` and `@include` were not allowed on the same field
- server: properly reject queries containing unknown or misplaced directives
- server: fix bigint overflow, incorrect geojson format in event trigger payload (fix #3697) (fix #2368)
- server: fix introspection output not being consistently ordered
- server: forward the x-request-id header when generated by graphql-engine (instead of being user-provided) (fix #6654)

File diff suppressed because it is too large Load Diff

View File

@ -112,6 +112,7 @@ library
, containers
, deepseq
, dependent-map >=0.4 && <0.5
, dependent-sum
, exceptions
, safe-exceptions
, fast-logger
@ -538,6 +539,7 @@ library
, Hasura.GraphQL.Parser.Class.Parse
, Hasura.GraphQL.Parser.Collect
, Hasura.GraphQL.Parser.Column
, Hasura.GraphQL.Parser.Directives
, Hasura.GraphQL.Parser.Internal.Parser
, Hasura.GraphQL.Parser.Internal.Types
, Hasura.GraphQL.Parser.Monad
@ -610,17 +612,22 @@ test-suite graphql-engine-tests
, bytestring
, containers
, cron
, dependent-map
, dependent-sum
, graphql-engine
, graphql-parser
, hspec >=2.6.1 && <3
, hspec-core >=2.6.1 && <3
, hspec-expectations
, hspec-expectations-lifted
, http-client
, http-types
, http-client-tls
, http-types
, jose
, kan-extensions
, lens
, lifted-base
, kan-extensions
, mmorph
, monad-control
, mtl
, natural-transformation >=0.4 && <0.5
@ -628,14 +635,12 @@ test-suite graphql-engine-tests
, pg-client
, process
, QuickCheck
, text
, safe
, split
, text
, time
, transformers-base
, unordered-containers
, text
, mmorph
hs-source-dirs: src-test
main-is: Main.hs
other-modules:
@ -646,6 +651,8 @@ test-suite graphql-engine-tests
Data.TimeSpec
Hasura.CacheBoundedSpec
Hasura.EventingSpec
Hasura.GraphQL.Parser.DirectivesTest
Hasura.GraphQL.Parser.TestUtils
Hasura.IncrementalSpec
Hasura.RQL.MetadataSpec
Hasura.RQL.Types.EndpointSpec

View File

@ -53,12 +53,11 @@ bqDBQueryPlan
-> HTTP.Manager
-> [HTTP.Header]
-> UserInfo
-> [G.Directive G.Name]
-> SourceName
-> SourceConfig 'BigQuery
-> QueryDB 'BigQuery (UnpreparedValue 'BigQuery)
-> m ExecutionStep
bqDBQueryPlan _env _manager _reqHeaders userInfo _directives sourceName sourceConfig qrf = do
bqDBQueryPlan _env _manager _reqHeaders userInfo sourceName sourceConfig qrf = do
select <- planNoPlan userInfo qrf
let (!headAndTail, !plannedActionsList) =
DataLoader.runPlan

View File

@ -61,12 +61,11 @@ msDBQueryPlan
-> HTTP.Manager
-> [HTTP.Header]
-> UserInfo
-> [G.Directive G.Name]
-> SourceName
-> SourceConfig 'MSSQL
-> QueryDB 'MSSQL (UnpreparedValue 'MSSQL)
-> m ExecutionStep
msDBQueryPlan _env _manager _reqHeaders userInfo _directives sourceName sourceConfig qrf = do
msDBQueryPlan _env _manager _reqHeaders userInfo sourceName sourceConfig qrf = do
select <- fromSelect <$> planNoPlan userInfo qrf
let queryString = ODBC.renderQuery $ toQueryPretty select
pool = _mscConnectionPool sourceConfig

View File

@ -66,12 +66,11 @@ pgDBQueryPlan
-> HTTP.Manager
-> [HTTP.Header]
-> UserInfo
-> [G.Directive G.Name]
-> SourceName
-> SourceConfig ('Postgres pgKind)
-> QueryDB ('Postgres pgKind) (UnpreparedValue ('Postgres pgKind))
-> m ExecutionStep
pgDBQueryPlan env manager reqHeaders userInfo _directives sourceName sourceConfig qrf = do
pgDBQueryPlan env manager reqHeaders userInfo sourceName sourceConfig qrf = do
(preparedQuery, PlanningSt _ _ planVals expectedVariables) <- flip runStateT initPlanningSt $ traverseQueryDB @('Postgres pgKind) prepareWithPlan qrf
validateSessionVariables expectedVariables $ _uiSession userInfo
let (action, preparedSQL) = mkCurPlanTx env manager reqHeaders userInfo $ irToRootFieldPlan planVals preparedQuery

View File

@ -49,6 +49,8 @@ import qualified Hasura.Tracing as Tracing
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.GraphQL.Parser.Column (UnpreparedValue)
import Hasura.GraphQL.Parser.Directives
import Hasura.GraphQL.Parser.Monad
import Hasura.GraphQL.Parser.Schema (Variable)
import Hasura.GraphQL.RemoteServer (execRemoteGQ)
import Hasura.GraphQL.Transport.HTTP.Protocol
@ -300,7 +302,7 @@ getResolvedExecPlan
-> (GQLReqUnparsed, GQLReqParsed)
-> m (Telem.CacheHit, (G.SelectionSet G.NoFragments Variable, ResolvedExecutionPlan))
getResolvedExecPlan env logger {- planCache-} userInfo sqlGenCtx
sc scVer queryType httpManager reqHeaders (reqUnparsed, reqParsed) = -- do
sc _scVer queryType httpManager reqHeaders (reqUnparsed, reqParsed) = -- do
-- See Note [Temporarily disabling query plan caching]
-- planM <- liftIO $ EP.getPlan scVer (_uiRole userInfo) opNameM queryStr
@ -316,7 +318,7 @@ getResolvedExecPlan env logger {- planCache-} userInfo sqlGenCtx
-- Nothing -> (Telem.Miss,) <$> noExistingPlan
(Telem.Miss,) <$> noExistingPlan
where
GQLReq opNameM queryStr queryVars = reqUnparsed
-- GQLReq opNameM queryStr queryVars = reqUnparsed
-- addPlanToCache plan =
-- liftIO $ EP.addPlan scVer (userRole userInfo)
-- opNameM queryStr plan planCache
@ -332,21 +334,20 @@ getResolvedExecPlan env logger {- planCache-} userInfo sqlGenCtx
(gCtx, queryParts) <- getExecPlanPartial userInfo sc queryType reqParsed
case queryParts of
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs dirs selSet -> do
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
(executionPlan, queryRootFields, normalizedSelectionSet) <-
EQ.convertQuerySelSet env logger gCtx userInfo httpManager reqHeaders dirs inlinedSelSet varDefs (_grVariables reqUnparsed) (scSetGraphqlIntrospectionOptions sc)
EQ.convertQuerySelSet env logger gCtx userInfo httpManager reqHeaders directives inlinedSelSet varDefs (_grVariables reqUnparsed) (scSetGraphqlIntrospectionOptions sc)
pure $ (normalizedSelectionSet, QueryExecutionPlan executionPlan queryRootFields)
-- See Note [Temporarily disabling query plan caching]
-- traverse_ (addPlanToCache . EP.RPQuery) plan
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs _ selSet -> do
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
(executionPlan, normalizedSelectionSet) <-
EM.convertMutationSelectionSet env logger gCtx sqlGenCtx userInfo httpManager reqHeaders
inlinedSelSet varDefs (_grVariables reqUnparsed) (scSetGraphqlIntrospectionOptions sc)
EM.convertMutationSelectionSet env logger gCtx sqlGenCtx userInfo httpManager reqHeaders directives inlinedSelSet varDefs (_grVariables reqUnparsed) (scSetGraphqlIntrospectionOptions sc)
pure $ (normalizedSelectionSet, MutationExecutionPlan executionPlan)
-- See Note [Temporarily disabling query plan caching]
-- traverse_ (addPlanToCache . EP.RPQuery) plan
@ -354,22 +355,20 @@ getResolvedExecPlan env logger {- planCache-} userInfo sqlGenCtx
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
-- Parse as query to check correctness
(unpreparedAST, _reusability, normalizedSelectionSet) <-
EQ.parseGraphQLQuery gCtx varDefs (_grVariables reqUnparsed) inlinedSelSet
-- A subscription should have exactly one root field
-- As an internal testing feature, we support subscribing to multiple
-- root fields in a subcription. First, we check if the corresponding directive
-- (@_multiple_top_level_fields) is set.
(unpreparedAST, _reusability, normalizedDirectives, normalizedSelectionSet) <-
EQ.parseGraphQLQuery gCtx varDefs (_grVariables reqUnparsed) directives inlinedSelSet
-- Process directives on the subscription
(dirMap, _) <- (`onLeft` reportParseErrors) =<<
runParseT (parseDirectives customDirectives (G.DLExecutable G.EDLSUBSCRIPTION) normalizedDirectives)
-- A subscription should have exactly one root field.
-- However, for testing purposes, we may allow several root fields; we check for this by
-- looking for directive "_multiple_top_level_fields" on the subscription. THIS IS NOT A
-- SUPPORTED FEATURE. We might remove it in the future without warning. DO NOT USE THIS.
allowMultipleRootFields <- withDirective dirMap multipleRootFields $ pure . isJust
case inlinedSelSet of
[] -> throw500 "empty selset for subscription"
[_] -> pure ()
(_:rst) ->
let multipleAllowed =
-- TODO!!!
-- We support directives we don't expose in the schema?!
G.Directive $$(G.litName "_multiple_top_level_fields") mempty `elem` directives
in
unless (multipleAllowed || null rst) $
throw400 ValidationFailed "subscriptions must select one top level field"
[] -> throw500 "empty selset for subscription"
_ -> unless allowMultipleRootFields $
throw400 ValidationFailed "subscriptions must select one top level field"
subscriptionPlan <- buildSubscriptionPlan userInfo unpreparedAST
pure (normalizedSelectionSet, SubscriptionExecutionPlan subscriptionPlan)

View File

@ -57,7 +57,6 @@ class ( Backend b
-> HTTP.Manager
-> [HTTP.Header]
-> UserInfo
-> [G.Directive G.Name]
-> SourceName
-> SourceConfig b
-> QueryDB b (UnpreparedValue b)

View File

@ -7,7 +7,6 @@ import Hasura.Prelude
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Sequence.NonEmpty as NE
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
@ -26,6 +25,7 @@ import Hasura.GraphQL.Execute.Instances ()
import Hasura.GraphQL.Execute.Remote
import Hasura.GraphQL.Execute.Resolve
import Hasura.GraphQL.Parser
import Hasura.GraphQL.Parser.Directives
import Hasura.Metadata.Class
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
@ -69,21 +69,26 @@ convertMutationSelectionSet
-> UserInfo
-> HTTP.Manager
-> HTTP.RequestHeaders
-> [G.Directive G.Name]
-> G.SelectionSet G.NoFragments G.Name
-> [G.VariableDefinition]
-> Maybe GH.VariableValues
-> SetGraphqlIntrospectionOptions
-> m (ExecutionPlan, G.SelectionSet G.NoFragments Variable)
convertMutationSelectionSet env logger gqlContext SQLGenCtx{stringifyNum} userInfo manager reqHeaders fields varDefs varValsM introspectionDisabledRoles = do
convertMutationSelectionSet env logger gqlContext SQLGenCtx{stringifyNum} userInfo manager reqHeaders directives fields varDefs varValsM introspectionDisabledRoles = do
mutationParser <- onNothing (gqlMutationParser gqlContext) $
throw400 ValidationFailed "no mutations exist"
resolvedSelSet <- resolveVariables varDefs (fromMaybe Map.empty varValsM) fields
(resolvedDirectives, resolvedSelSet) <- resolveVariables varDefs (fromMaybe Map.empty varValsM) directives fields
-- Parse the GraphQL query into the RQL AST
(unpreparedQueries, _reusability)
:: (OMap.InsOrdHashMap G.Name (MutationRootField UnpreparedValue), QueryReusability)
<-(mutationParser >>> (`onLeft` reportParseErrors)) resolvedSelSet
-- Process directives on the mutation
(_dirMap, _) <- (`onLeft` reportParseErrors) =<<
runParseT (parseDirectives customDirectives (G.DLExecutable G.EDLMUTATION) resolvedDirectives)
-- Transform the RQL AST into a prepared SQL query
txs <- for unpreparedQueries \case
RFDB sourceName exists ->
@ -102,10 +107,3 @@ convertMutationSelectionSet env logger gqlContext SQLGenCtx{stringifyNum} userIn
-- definition which is currently being ignored for actions that are mutations
RFRaw s -> flip onLeft throwError =<< executeIntrospection userInfo s introspectionDisabledRoles
return (txs, resolvedSelSet)
where
reportParseErrors errs = case NE.head errs of
-- TODO: Our error reporting machinery doesnt currently support reporting
-- multiple errors at once, so were throwing away all but the first one
-- here. It would be nice to report all of them!
ParseError{ pePath, peMessage, peCode } ->
throwError (err400 peCode peMessage){ qePath = pePath }

View File

@ -13,7 +13,6 @@ import qualified Data.Aeson as J
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Sequence.NonEmpty as NESeq
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
@ -31,6 +30,7 @@ import Hasura.GraphQL.Execute.Instances ()
import Hasura.GraphQL.Execute.Remote
import Hasura.GraphQL.Execute.Resolve
import Hasura.GraphQL.Parser
import Hasura.GraphQL.Parser.Directives
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
@ -41,22 +41,18 @@ parseGraphQLQuery
=> GQLContext
-> [G.VariableDefinition]
-> Maybe (HashMap G.Name J.Value)
-> [G.Directive G.Name]
-> G.SelectionSet G.NoFragments G.Name
-> m ( InsOrdHashMap G.Name (QueryRootField UnpreparedValue)
, QueryReusability
, [G.Directive Variable]
, G.SelectionSet G.NoFragments Variable
)
parseGraphQLQuery gqlContext varDefs varValsM fields = do
resolvedSelSet <- resolveVariables varDefs (fromMaybe Map.empty varValsM) fields
parseGraphQLQuery gqlContext varDefs varValsM directives fields = do
(resolvedDirectives, resolvedSelSet) <- resolveVariables varDefs (fromMaybe Map.empty varValsM) directives fields
(parsedQuery, queryReusability) <- (gqlQueryParser gqlContext >>> (`onLeft` reportParseErrors)) resolvedSelSet
pure (parsedQuery, queryReusability, resolvedSelSet)
where
reportParseErrors errs = case NESeq.head errs of
-- TODO: Our error reporting machinery doesnt currently support reporting
-- multiple errors at once, so were throwing away all but the first one
-- here. It would be nice to report all of them!
ParseError{ pePath, peMessage, peCode } ->
throwError (err400 peCode peMessage){ qePath = pePath }
pure (parsedQuery, queryReusability, resolvedDirectives, resolvedSelSet)
convertQuerySelSet
:: forall m .
@ -79,15 +75,21 @@ convertQuerySelSet
convertQuerySelSet env logger gqlContext userInfo manager reqHeaders directives fields varDefs varValsM
introspectionDisabledRoles = do
-- Parse the GraphQL query into the RQL AST
(unpreparedQueries, _reusability, normalizedSelectionSet) <- parseGraphQLQuery gqlContext varDefs varValsM fields
(unpreparedQueries, _reusability, normalizedDirectives, normalizedSelectionSet) <-
parseGraphQLQuery gqlContext varDefs varValsM directives fields
-- Transform the query plans into an execution plan
let usrVars = _uiSession userInfo
-- Process directives on the query
(_dirMap, _) <- (`onLeft` reportParseErrors) =<<
runParseT (parseDirectives customDirectives (G.DLExecutable G.EDLQUERY) normalizedDirectives)
executionPlan <- for unpreparedQueries \case
RFDB sourceName exists ->
AB.dispatchAnyBackend @BackendExecute exists
\(SourceConfigWith sourceConfig (QDBR db)) ->
mkDBQueryPlan env manager reqHeaders userInfo directives sourceName sourceConfig db
mkDBQueryPlan env manager reqHeaders userInfo sourceName sourceConfig db
RFRemote rf -> do
RemoteFieldG remoteSchemaInfo remoteField <- for rf $ resolveRemoteVariable userInfo
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeQuery [G.SelectionField remoteField]

View File

@ -23,9 +23,12 @@ resolveVariables
. (MonadError QErr m, Traversable fragments)
=> [G.VariableDefinition]
-> GH.VariableValues
-> [G.Directive G.Name]
-> G.SelectionSet fragments G.Name
-> m (G.SelectionSet fragments Variable)
resolveVariables definitions jsonValues selSet = do
-> m ( [G.Directive Variable]
, G.SelectionSet fragments Variable
)
resolveVariables definitions jsonValues directives selSet = do
variablesByName <- Map.groupOnNE getName <$> traverse buildVariable definitions
uniqueVariables <- flip Map.traverseWithKey variablesByName
\variableName variableDefinitions ->
@ -33,8 +36,10 @@ resolveVariables definitions jsonValues selSet = do
a :| [] -> return a
_ -> throw400 ParseFailed
$ "multiple definitions for variable " <>> variableName
(selSet', usedVariables) <- flip runStateT mempty $
traverse (traverse (resolveVariable uniqueVariables)) selSet
((directives', selSet'), usedVariables) <- flip runStateT mempty $ do
d <- traverse (traverse (resolveVariable uniqueVariables)) directives
s <- traverse (traverse (resolveVariable uniqueVariables)) selSet
pure (d, s)
let variablesByNameSet = HS.fromList . Map.keys $ variablesByName
jsonVariableNames = HS.fromList $ Map.keys jsonValues
-- At the time of writing, this check is disabled using
@ -60,7 +65,8 @@ resolveVariables definitions jsonValues selSet = do
<> T.concat (L.intersperse ", " $
map G.unName $ HS.toList $
HS.difference jsonVariableNames usedVariables)
return selSet'
return (directives', selSet')
where
buildVariable :: G.VariableDefinition -> m Variable
buildVariable G.VariableDefinition{ G._vdName, G._vdType, G._vdDefaultValue } = do

View File

@ -89,21 +89,23 @@ explainGQLQuery sc (GQLExplain query userVarsRaw maybeIsRelay) = do
fragments = mapMaybe takeFragment $ GH.unGQLExecDoc $ GH._grQuery query
(graphQLContext, queryParts) <- E.getExecPlanPartial userInfo sc queryType query
case queryParts of
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs _ selSet -> do
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- E.inlineSelectionSet fragments selSet
(unpreparedQueries, _, _) <-
E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) inlinedSelSet
(unpreparedQueries, _, _, _) <-
E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) directives inlinedSelSet
-- TODO: validate directives here
encJFromList <$>
for (OMap.toList unpreparedQueries) (uncurry (explainQueryField userInfo))
G.TypedOperationDefinition G.OperationTypeMutation _ _ _ _ ->
throw400 InvalidParams "only queries can be explained"
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs _ selSet -> do
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- E.inlineSelectionSet fragments selSet
(unpreparedQueries, _, _) <- E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) inlinedSelSet
(unpreparedQueries, _, _, _) <- E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) directives inlinedSelSet
-- TODO: validate directives here
validSubscription <- E.buildSubscriptionPlan userInfo unpreparedQueries
case validSubscription of
E.SEAsyncActionsWithNoRelationships _ -> throw400 NotSupported "async action query fields without relationships to table cannot be explained"

View File

@ -14,14 +14,13 @@ module Hasura.GraphQL.Parser.Collect
import Hasura.Prelude
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.List.Extended (duplicates)
import Language.GraphQL.Draft.Syntax
import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.Text.Extended
import Language.GraphQL.Draft.Syntax
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Directives
import Hasura.GraphQL.Parser.Schema
@ -32,12 +31,10 @@ collectFields
=> t Name
-- ^ The names of the object types and interface types the 'SelectionSet' is
-- selecting against.
-> (InputValue Variable -> m Bool)
-- ^ Please pass 'runParser boolean' here (passed explicitly to avoid cyclic imports)
-> SelectionSet NoFragments Variable
-> m (InsOrdHashMap Name (Field NoFragments Variable))
collectFields objectTypeNames boolParser selectionSet =
mergeFields =<< flattenSelectionSet objectTypeNames boolParser selectionSet
collectFields objectTypeNames selectionSet =
mergeFields =<< flattenSelectionSet objectTypeNames selectionSet
-- | Flattens inline fragments in a selection set. For example,
--
@ -93,24 +90,21 @@ flattenSelectionSet
:: (MonadParse m, Foldable t)
=> t Name
-- ^ The name of the object type the 'SelectionSet' is selecting against.
-> (InputValue Variable -> m Bool)
-- ^ Please pass 'runParser boolean' here (passed explicitly to avoid cyclic imports)
-> SelectionSet NoFragments Variable
-> m [Field NoFragments Variable]
flattenSelectionSet objectTypeNames boolParser = fmap concat . traverse flattenSelection
flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection
where
-- The easy case: just a single field.
flattenSelection (SelectionField field) = do
validateDirectives (_fDirectives field)
applyInclusionDirectives (_fDirectives field) $ pure [field]
applyInclusionDirectives EDLFIELD (_fDirectives field) $ pure [field]
-- Note: The 'SelectionFragmentSpread' case has already been eliminated by
-- the fragment inliner.
-- TODO: handle directives on fragment spread.
-- The involved case: we have an inline fragment to process.
flattenSelection (SelectionInlineFragment fragment) = do
validateDirectives (_ifDirectives fragment)
applyInclusionDirectives (_ifDirectives fragment) $
applyInclusionDirectives EDLINLINE_FRAGMENT (_ifDirectives fragment) $
case _ifTypeCondition fragment of
-- No type condition, so the fragment unconditionally applies.
Nothing -> flattenInlineFragment fragment
@ -131,29 +125,16 @@ flattenSelectionSet objectTypeNames boolParser = fmap concat . traverse flattenS
Text.intercalate ", " (fmap dquoteTxt (toList objectTypeNames))
-}
flattenInlineFragment InlineFragment{ _ifDirectives, _ifSelectionSet } = do
validateDirectives _ifDirectives
flattenSelectionSet objectTypeNames boolParser _ifSelectionSet
flattenInlineFragment InlineFragment{ _ifSelectionSet } = do
flattenSelectionSet objectTypeNames _ifSelectionSet
applyInclusionDirectives directives continue
| Just directive <- find ((== $$(litName "include")) . _dName) directives
= applyInclusionDirective id directive continue
| Just directive <- find ((== $$(litName "skip")) . _dName) directives
= applyInclusionDirective not directive continue
| otherwise = continue
applyInclusionDirective adjust Directive{ _dName, _dArguments } continue = do
ifArgument <- Map.lookup $$(litName "if") _dArguments `onNothing`
parseError ("missing \"if\" argument for " <> _dName <<> " directive")
value <- boolParser $ GraphQLValue ifArgument
if adjust value then continue else pure []
validateDirectives directives =
case nonEmpty $ toList $ duplicates $ map _dName directives of
Nothing -> pure ()
Just duplicatedDirectives -> parseError
$ "the following directives are used more than once: "
<> commaSeparated duplicatedDirectives
applyInclusionDirectives location directives continue = do
dirMap <- parseDirectives inclusionDirectives (DLExecutable location) directives
shouldSkip <- withDirective dirMap skip $ pure . fromMaybe False
shouldInclude <- withDirective dirMap include $ pure . fromMaybe True
if shouldInclude && not shouldSkip
then continue
else pure []
-- | Merges fields according to the rules in the GraphQL specification, specifically
-- <§ 5.3.2 Field Selection Merging http://spec.graphql.org/June2018/#sec-Field-Selection-Merging>.
@ -185,36 +166,12 @@ mergeFields = foldM addField OMap.empty
{ _fAlias = Just alias
, _fName = _fName oldField
, _fArguments = _fArguments oldField
-- see Note [Drop directives from merged fields]
, _fDirectives = []
, _fDirectives = _fDirectives oldField <> _fDirectives newField
-- see Note [Lazily merge selection sets]
, _fSelectionSet = _fSelectionSet oldField ++ _fSelectionSet newField
}
{- Note [Drop directives from merged fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we merge two fields, what do we do with directives? The GraphQL spec isnt
very clear here, but it does explicitly state that directives only need to be
unique per unmerged field (§ 5.7.3 Directives Are Unique Per Location,
http://spec.graphql.org/June2018/#sec-Directives-Are-Unique-Per-Location). For
clarity, here is the example given by the spec:
query ($foo: Boolean = true, $bar: Boolean = false) {
field @skip(if: $foo) {
subfieldA
}
field @skip(if: $bar) {
subfieldB
}
}
The spec says this is totally fine, since the @skip directives appear in
different places. This forces our hand: we *must* process @include/@skip
directives prior to merging fields. And conveniently, aside from @include/@skip,
we dont care about directives, so we dont bother reconciling them during field
merging---we just drop them.
Note [Lazily merge selection sets]
{- Note [Lazily merge selection sets]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Field merging is described in a recursive way in the GraphQL spec (§ 5.3.2 Field
Selection Merging http://spec.graphql.org/June2018/#sec-Field-Selection-Merging).

View File

@ -0,0 +1,14 @@
module Hasura.GraphQL.Parser.Collect where
import Hasura.Prelude
import Language.GraphQL.Draft.Syntax
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
collectFields
:: (MonadParse m, Foldable t)
=> t Name
-> SelectionSet NoFragments Variable
-> m (InsOrdHashMap Name (Field NoFragments Variable))

View File

@ -0,0 +1,274 @@
-- | Definition of all supported GraphQL directives.
module Hasura.GraphQL.Parser.Directives
( -- list of directives, for the schema
directivesInfo
, inclusionDirectives
, customDirectives
-- lookup keys for directives
, include
, skip
, cached
, multipleRootFields
-- parsing utilities
, parseDirectives
, withDirective
-- exposed for tests
, Directive(..)
, DirectiveKey(..)
, skipDirective
, includeDirective
, cachedDirective
, multipleRootFieldsDirective
) where
import Hasura.Prelude
import qualified Data.Dependent.Map as DM
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashSet as S
import qualified Language.GraphQL.Draft.Syntax as G
import Data.Dependent.Sum (DSum (..))
import Data.GADT.Compare.Extended
import Data.List.Extended (duplicates)
import Data.Parser.JSONPath
import Data.Text.Extended
import Data.Typeable (eqT)
import Type.Reflection (Typeable, typeRep, (:~:) (..))
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Internal.Parser
import Hasura.GraphQL.Parser.Schema
-- | Returns the schema information for all supported directives, for each of
-- which a @DirectiveDefinition@ will be inserted into the schema.
--
-- To add a new custom directives to the schema, add it to @customDirectives@.
-- Inclusion directives are treated separately, as they are not concerned with
-- altering the result of the parsers, but about whether a given field should be
-- parsed at all (see @collectFields@ for more information).
--
-- Alternatively, you could manually add a @DirectiveInfo@ to this list;
-- however, if you do so, and do not provide a corresponding definition in
-- @customDirectives@, calls to @parseDirectives@ will NOT attempt to parse that
-- new directive: they will only validate that it is used correctly. You can use
-- this option to implement directives that do not match our simple inclusion /
-- custom dichotomy.
--
-- To create a directive definition, you need to create a @Directive@ using
-- @mkDirective@, and export from this module its corresponding @DirectiveKey@
-- (see @skip@ below for an example). Make sure the type in the key matches the
-- type of the parser!
--
-- Directives may be "hidden", in which case they won't advertised in the
-- schema, but silently accepted. This is un-advisable and should only be used
-- when there's no other way around it.
directivesInfo :: forall m. MonadParse m => [DirectiveInfo]
directivesInfo = do
dir <- inclusionDirectives @m <> customDirectives @m
guard $ dAdvertised dir
pure $ dDefinition dir
-- | Not exported, only used internally; identical to 'directivesInfo', but also
-- contains hidden directives.
allDirectives :: forall m. MonadParse m => [DirectiveInfo]
allDirectives = map dDefinition $ inclusionDirectives @m <> customDirectives @m
inclusionDirectives :: forall m. MonadParse m => [Directive m]
inclusionDirectives = [includeDirective @m, skipDirective @m]
customDirectives :: forall m. MonadParse m => [Directive m]
customDirectives = [cachedDirective @m, multipleRootFieldsDirective @m]
-- | Parses directives, given a location. Ensures that all directives are known
-- and match the location; subsequently builds a dependent map of the results,
-- that can be then introspected with @withDirective@. The list of parsers that
-- should be applied is given as an argument: if a valid directive is found, but
-- for which no parser is provided, it will be ignored.
--
-- Example use:
--
-- dMap <- parseDirectives customDirectives (DLExecutable EDLQUERY) directives
-- withDirective dMap cached $ onJust \_ -> tagAsCached
parseDirectives
:: forall m. MonadParse m
=> [Directive m]
-> G.DirectiveLocation
-> [G.Directive Variable]
-> m (DM.DMap DirectiveKey Identity)
parseDirectives directiveParsers location givenDirectives = do
result <- catMaybes <$> for givenDirectives \directive -> do
let name = G._dName directive
-- check the directive has a matching definition
DirectiveInfo { diLocations } <-
find (\di -> diName di == name) (allDirectives @m)
`onNothing` parseError ("directive " <> name <<> " is not defined in the schema")
-- check that it is allowed at the current location
unless (location `elem` diLocations) $
parseError $ "directive " <> name <<> " is not allowed on " <> humanReadable location
-- if we are expecting to parse it now, create a dmap entry
case find (\d -> diName (dDefinition d) == name) directiveParsers of
Nothing -> pure Nothing
Just (Directive { dParser }) -> do
result <- dParser directive
pure $ Just (name, DirectiveKey name :=> pure result)
-- check that the result does not contain duplicates
let dups = duplicates $ fst <$> result
unless (null dups) $
parseError $ "the following directives are used more than once: " <> commaSeparated dups
pure $ DM.fromList $ snd <$> result
where
humanReadable = \case
G.DLExecutable G.EDLQUERY -> "a query"
G.DLExecutable G.EDLMUTATION -> "a mutation"
G.DLExecutable G.EDLSUBSCRIPTION -> "a subscription"
G.DLExecutable G.EDLFIELD -> "a field"
G.DLExecutable G.EDLFRAGMENT_DEFINITION -> "a fragment definition"
G.DLExecutable G.EDLFRAGMENT_SPREAD -> "a fragment spread"
G.DLExecutable G.EDLINLINE_FRAGMENT -> "an inline fragment"
G.DLTypeSystem G.TSDLSCHEMA -> "the schema"
G.DLTypeSystem G.TSDLSCALAR -> "a scalar definition"
G.DLTypeSystem G.TSDLOBJECT -> "an object definition"
G.DLTypeSystem G.TSDLFIELD_DEFINITION -> "a field definition"
G.DLTypeSystem G.TSDLARGUMENT_DEFINITION -> "an argument definition"
G.DLTypeSystem G.TSDLINTERFACE -> "an interface definition"
G.DLTypeSystem G.TSDLUNION -> "an union definition"
G.DLTypeSystem G.TSDLENUM -> "an enum definition"
G.DLTypeSystem G.TSDLENUM_VALUE -> "an enum value definition"
G.DLTypeSystem G.TSDLINPUT_OBJECT -> "an input object definition"
G.DLTypeSystem G.TSDLINPUT_FIELD_DEFINITION -> "an input field definition"
withDirective
:: DM.DMap DirectiveKey Identity
-> DirectiveKey a
-> (Maybe a -> m b)
-> m b
withDirective dmap key callback = callback $ runIdentity <$> DM.lookup key dmap
-- Cached custom directive.
cachedDirective :: forall m. MonadParse m => Directive m
cachedDirective = mkDirective
$$(G.litName "cached")
(Just "whether this query should be cached (Hasura Cloud only)")
True
[G.DLExecutable G.EDLQUERY]
ttlArgument
where
ttlArgument :: InputFieldsParser m Int
ttlArgument = fieldWithDefault $$(G.litName "ttl") (Just "measured in seconds") (G.VInt 60) $ fromIntegral <$> int
cached :: DirectiveKey Int
cached = DirectiveKey $$(G.litName "cached")
-- Subscription tests custom directive.
multipleRootFieldsDirective :: forall m. MonadParse m => Directive m
multipleRootFieldsDirective = mkDirective
$$(G.litName "_multiple_top_level_fields")
(Just "INTERNAL TESTING TOOL DO NOT USE")
False -- not advertised in the schema
[G.DLExecutable G.EDLSUBSCRIPTION]
(pure ())
multipleRootFields :: DirectiveKey ()
multipleRootFields = DirectiveKey $$(G.litName "_multiple_top_level_fields")
-- Built-in inclusion directives
skipDirective :: MonadParse m => Directive m
skipDirective = mkDirective
$$(G.litName "skip")
(Just "whether this query should be skipped")
True
[ G.DLExecutable G.EDLFIELD
, G.DLExecutable G.EDLFRAGMENT_SPREAD
, G.DLExecutable G.EDLINLINE_FRAGMENT
]
ifArgument
includeDirective :: MonadParse m => Directive m
includeDirective = mkDirective
$$(G.litName "include")
(Just "whether this query should be included")
True
[ G.DLExecutable G.EDLFIELD
, G.DLExecutable G.EDLFRAGMENT_SPREAD
, G.DLExecutable G.EDLINLINE_FRAGMENT
]
ifArgument
skip :: DirectiveKey Bool
skip = DirectiveKey $$(G.litName "skip")
include :: DirectiveKey Bool
include = DirectiveKey $$(G.litName "include")
ifArgument :: MonadParse m => InputFieldsParser m Bool
ifArgument = field $$(G.litName "if") Nothing boolean
-- Parser type for directives.
data Directive m where
Directive :: forall m a. (MonadParse m, Typeable a) =>
{ dDefinition :: DirectiveInfo
, dAdvertised :: Bool
, dParser :: G.Directive Variable -> m a
} -> Directive m
data DirectiveKey a where
DirectiveKey :: Typeable a => G.Name -> DirectiveKey a
instance GEq DirectiveKey where
geq (DirectiveKey name1 :: DirectiveKey a1)
(DirectiveKey name2 :: DirectiveKey a2)
| name1 == name2
, Just Refl <- eqT @a1 @a2
= Just Refl
| otherwise = Nothing
instance GCompare DirectiveKey where
gcompare (DirectiveKey name1 :: DirectiveKey a1)
(DirectiveKey name2 :: DirectiveKey a2)
= strengthenOrdering (compare name1 name2)
`extendGOrdering` gcompare (typeRep @a1) (typeRep @a2)
`extendGOrdering` GEQ
mkDirective
:: (MonadParse m, Typeable a)
=> G.Name
-> Maybe G.Description
-> Bool
-> [G.DirectiveLocation]
-> InputFieldsParser m a
-> Directive m
mkDirective name description advertised location argsParser = Directive
{ dDefinition = DirectiveInfo name description (ifDefinitions argsParser) location
, dAdvertised = advertised
, dParser = \(G.Directive _name arguments) -> withPath (++[Key $ G.unName name]) $ do
for_ (M.keys arguments) \argumentName ->
unless (argumentName `S.member` argumentNames) $
parseError $ name <<> " has no argument named " <>> argumentName
withPath (++[Key "args"]) $ ifParser argsParser $ GraphQLValue <$> arguments
}
where
argumentNames = S.fromList (dName <$> ifDefinitions argsParser)

View File

@ -0,0 +1,42 @@
module Hasura.GraphQL.Parser.Directives
( customDirectives
, parseDirectives
, withDirective
) where
import Hasura.Prelude
import qualified Data.Dependent.Map as DM
import qualified Language.GraphQL.Draft.Syntax as G
import Type.Reflection (Typeable)
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
customDirectives :: forall m. MonadParse m => [Directive m]
parseDirectives
:: forall m. MonadParse m
=> [Directive m]
-> G.DirectiveLocation
-> [G.Directive Variable]
-> m (DM.DMap DirectiveKey Identity)
withDirective
:: DM.DMap DirectiveKey Identity
-> DirectiveKey a
-> (Maybe a -> m b)
-> m b
data Directive m where
Directive :: forall m a. (MonadParse m, Typeable a) =>
{ dDefinition :: DirectiveInfo
, dParser :: G.Directive Variable -> m a
} -> Directive m
data DirectiveKey a where
DirectiveKey :: Typeable a => G.Name -> DirectiveKey a

View File

@ -10,31 +10,32 @@ module Hasura.GraphQL.Parser.Internal.Parser
, ParserInput
) where
import Hasura.Prelude
import Hasura.Prelude
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as S
import qualified Data.List.Extended as LE
import qualified Data.UUID as UUID
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as S
import qualified Data.List.Extended as LE
import qualified Data.UUID as UUID
import Control.Lens.Extended hiding (enum, index)
import Data.Int (Int32, Int64)
import Data.Parser.JSONPath
import Data.Scientific (toBoundedInteger)
import Data.Text.Extended
import Data.Type.Equality
import Language.GraphQL.Draft.Syntax hiding (Definition)
import Control.Lens.Extended hiding (enum, index)
import Data.Int (Int32, Int64)
import Data.Parser.JSONPath
import Data.Scientific (toBoundedInteger)
import Data.Text.Extended
import Data.Type.Equality
import Language.GraphQL.Draft.Syntax hiding (Definition)
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class.Parse
import Hasura.GraphQL.Parser.Collect
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.Types.CustomTypes
import Hasura.Server.Utils (englishList)
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class.Parse
import {-# SOURCE #-} Hasura.GraphQL.Parser.Collect
import {-# SOURCE #-} Hasura.GraphQL.Parser.Directives
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.Types.CustomTypes
import Hasura.Server.Utils (englishList)
-- | The constraint @(''Input' '<:' k)@ entails @('ParserInput' k ~ 'Value')@,
@ -624,16 +625,21 @@ selectionSetObject name description parsers implementsInterfaces = Parser
-- TODO(PDV) This probably accepts invalid queries, namely queries that use
-- type names that do not exist.
fields <- collectFields (name:parsedInterfaceNames) (runParser boolean) input
for fields \selectionField@Field{ _fName, _fAlias } -> if
| _fName == $$(litName "__typename") ->
pure $ SelectTypename name
| Just parser <- M.lookup _fName parserMap ->
withPath (++[Key (unName _fName)]) $
SelectField <$> parser selectionField
| otherwise ->
withPath (++[Key (unName _fName)]) $
parseError $ "field " <> _fName <<> " not found in type: " <> squote name
fields <- collectFields (name:parsedInterfaceNames) input
for fields \selectionField@Field{ _fName, _fAlias, _fDirectives } -> do
parsedValue <-
if | _fName == $$(litName "__typename") ->
pure $ SelectTypename name
| Just parser <- M.lookup _fName parserMap ->
withPath (++[Key (unName _fName)]) $
SelectField <$> parser selectionField
| otherwise ->
withPath (++[Key (unName _fName)]) $
parseError $ "field " <> _fName <<> " not found in type: " <> squote name
_dirMap <- parseDirectives customDirectives (DLExecutable EDLFIELD) _fDirectives
-- insert processing of custom directives here
pure parsedValue
}
where
parserMap = parsers
@ -763,8 +769,8 @@ subselection
. MonadParse m
=> Name
-> Maybe Description
-> InputFieldsParser m a -- ^ parser for the input arguments
-> Parser 'Output m b -- ^ parser for the subselection set
-> InputFieldsParser m a -- ^ parser for the input arguments
-> Parser 'Output m b -- ^ parser for the subselection set
-> FieldParser m (a, b)
subselection name description argumentsParser bodyParser =
rawSubselection name description argumentsParser bodyParser
@ -775,8 +781,8 @@ rawSubselection
. MonadParse m
=> Name
-> Maybe Description
-> InputFieldsParser m a -- ^ parser for the input arguments
-> Parser 'Output m b -- ^ parser for the subselection set
-> InputFieldsParser m a -- ^ parser for the input arguments
-> Parser 'Output m b -- ^ parser for the subselection set
-> FieldParser m (Maybe Name, HashMap Name (Value Variable), a, b)
rawSubselection name description argumentsParser bodyParser = FieldParser
{ fDefinition = mkDefinition name description $

View File

@ -8,6 +8,7 @@ module Hasura.GraphQL.Parser.Monad
, ParseT
, runParseT
, ParseError(..)
, reportParseErrors
) where
import Hasura.Prelude
@ -27,10 +28,11 @@ import Data.Proxy (Proxy (..))
import System.IO.Unsafe (unsafeInterleaveIO)
import Type.Reflection (Typeable, typeRep, (:~:) (..))
import Hasura.Base.Error (Code)
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
-- -------------------------------------------------------------------------------------------------
-- schema construction
@ -189,3 +191,14 @@ data ParseError = ParseError
, peMessage :: Text
, peCode :: Code
}
reportParseErrors
:: MonadError QErr m
=> NESeq ParseError
-> m a
reportParseErrors errs = case NE.head errs of
-- TODO: Our error reporting machinery doesnt currently support reporting
-- multiple errors at once, so were throwing away all but the first one
-- here. It would be nice to report all of them!
ParseError{ pePath, peMessage, peCode } ->
throwError (err400 peCode peMessage){ qePath = pePath }

View File

@ -28,6 +28,7 @@ import Hasura.GraphQL.Execute.Types
import Hasura.GraphQL.Parser (Kind (..), Parser, Schema (..),
UnpreparedValue (..))
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Directives (directivesInfo)
import Hasura.GraphQL.Parser.Internal.Parser (FieldParser (..))
import Hasura.GraphQL.Schema.Backend
import Hasura.GraphQL.Schema.Common
@ -551,7 +552,7 @@ buildQueryParser pgQueryFields remoteFields allActions nonObjectCustomTypes muta
queryWithIntrospectionHelper allQueryFields mutationParser subscriptionParser
queryWithIntrospectionHelper
:: (MonadSchema n m, MonadError QErr m)
:: forall n m. (MonadSchema n m, MonadError QErr m)
=> [P.FieldParser n (QueryRootField UnpreparedValue)]
-> Maybe (Parser 'Output n (OMap.InsOrdHashMap G.Name (MutationRootField UnpreparedValue)))
-> Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue))
@ -575,7 +576,7 @@ queryWithIntrospectionHelper basicQueryFP mutationP subscriptionP = do
, sQueryType = P.parserType basicQueryP
, sMutationType = P.parserType <$> mutationP
, sSubscriptionType = Just $ P.parserType subscriptionP
, sDirectives = defaultDirectives
, sDirectives = directivesInfo @n
}
let partialQueryFields =
basicQueryFP ++ (fmap RFRaw <$> [schema partialSchema, typeIntrospection partialSchema])

View File

@ -98,19 +98,6 @@ mkDescriptionWith descM defaultTxt = G.Description $ case descM of
Nothing -> defaultTxt
Just (PG.PGDescription descTxt) -> T.unlines [descTxt, "\n", defaultTxt]
-- | The default @'skip' and @'include' directives
defaultDirectives :: [P.DirectiveInfo]
defaultDirectives =
[mkDirective $$(G.litName "skip"), mkDirective $$(G.litName "include")]
where
ifInputField =
P.mkDefinition $$(G.litName "if") Nothing $ P.IFRequired $ P.TNamed $
P.mkDefinition $$(G.litName "Boolean") Nothing P.TIScalar
dirLocs = map G.DLExecutable
[G.EDLFIELD, G.EDLFRAGMENT_SPREAD, G.EDLINLINE_FRAGMENT]
mkDirective name =
P.DirectiveInfo name Nothing [ifInputField] dirLocs
-- TODO why do we do these validations at this point? What does it mean to track
-- a function but not add it to the schema...?
-- Auke:

View File

@ -0,0 +1,32 @@
module Hasura.GraphQL.Parser.DirectivesTest where
import Hasura.Prelude
import qualified Data.Dependent.Map as DM
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Test.Hspec
import Hasura.GraphQL.Parser.Directives
import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Parser.TestUtils
spec :: Spec
spec = do
testDirective skipDirective skip
testDirective includeDirective include
testDirective cachedDirective cached
testDirective multipleRootFieldsDirective multipleRootFields
testDirective :: Directive TestMonad -> DirectiveKey a -> Spec
testDirective dir key = do
let name = diName $ dDefinition dir
location = head $ diLocations $ dDefinition dir
directive = fakeDirective $ dDefinition dir
describe (T.unpack $ G.unName name) $ do
it "has the same type in the key and the directive" $
flip shouldBe (Right True) $ runTest $ do
dmap <- parseDirectives [dir] location [directive]
pure $ isJust $ runIdentity <$> DM.lookup key dmap

View File

@ -0,0 +1,56 @@
module Hasura.GraphQL.Parser.TestUtils where
import Hasura.Prelude
import qualified Data.HashMap.Strict as M
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Parser.Class.Parse
import Hasura.GraphQL.Parser.Schema
-- test monad
newtype TestMonad a = TestMonad { runTest :: Either Text a }
deriving (Functor, Applicative, Monad)
instance MonadParse TestMonad where
withPath = const id
parseErrorWith = const $ TestMonad . Left
markNotReusable = pure ()
-- values generation
fakeScalar :: G.Name -> G.Value Variable
fakeScalar = G.unName >>> \case
"Int" -> G.VInt 4242
"Boolean" -> G.VBoolean False
name -> error $ "no test value implemented for scalar " <> T.unpack name
fakeInputFieldValue :: InputFieldInfo -> G.Value Variable
fakeInputFieldValue = \case
IFOptional t _ -> fromT t
IFRequired nnt -> fromNNT nnt
where
fromT :: forall k. ('Input <: k) => Type k -> G.Value Variable
fromT = \case
NonNullable nnt -> fromNNT nnt
Nullable nnt -> fromNNT nnt
fromNNT :: forall k. ('Input <: k) => NonNullableType k -> G.Value Variable
fromNNT = \case
TList t -> G.VList [fromT t, fromT t]
TNamed (Definition name _ _ info) -> case info of
TIScalar -> fakeScalar name
TIEnum ei -> G.VEnum $ G.EnumValue $ dName $ NE.head ei
TIInputObject (InputObjectInfo oi) -> G.VObject $ M.fromList $ do
Definition fieldName _ _ fieldInfo <- oi
pure (fieldName, fakeInputFieldValue fieldInfo)
_ -> error "impossible"
fakeDirective :: DirectiveInfo -> G.Directive Variable
fakeDirective DirectiveInfo{..} =
G.Directive diName $ M.fromList $ diArguments <&> \(Definition argName _ _ argInfo) ->
(argName, fakeInputFieldValue argInfo)

View File

@ -2,22 +2,37 @@ module Main (main) where
import Hasura.Prelude
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Environment as Env
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Test.Hspec.Runner as Hspec
import Control.Concurrent.MVar
import Control.Natural ((:~>) (..))
import Data.Time.Clock (getCurrentTime)
import Control.Natural ((:~>) (..))
import Data.Time.Clock (getCurrentTime)
import Data.URL.Template
import Options.Applicative
import System.Environment (getEnvironment)
import System.Exit (exitFailure)
import System.Environment (getEnvironment)
import System.Exit (exitFailure)
import Test.Hspec
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Environment as Env
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Test.Hspec.Runner as Hspec
import qualified Data.NonNegativeIntSpec as NonNegetiveIntSpec
import qualified Data.Parser.CacheControlSpec as CacheControlParser
import qualified Data.Parser.JSONPathSpec as JsonPath
import qualified Data.Parser.URLTemplate as URLTemplate
import qualified Data.TimeSpec as TimeSpec
import qualified Hasura.CacheBoundedSpec as CacheBoundedSpec
import qualified Hasura.EventingSpec as EventingSpec
import qualified Hasura.GraphQL.Parser.DirectivesTest as GraphQLDirectivesSpec
import qualified Hasura.IncrementalSpec as IncrementalSpec
import qualified Hasura.RQL.Types.EndpointSpec as EndpointSpec
import qualified Hasura.SQL.WKTSpec as WKTSpec
import qualified Hasura.Server.AuthSpec as AuthSpec
import qualified Hasura.Server.MigrateSpec as MigrateSpec
import qualified Hasura.Server.TelemetrySpec as TelemetrySpec
import Hasura.RQL.DDL.Schema.Cache
import Hasura.RQL.DDL.Schema.Cache.Common
@ -28,20 +43,6 @@ import Hasura.Server.Migrate
import Hasura.Server.Types
import Hasura.Server.Version
import qualified Data.NonNegativeIntSpec as NonNegetiveIntSpec
import qualified Data.Parser.CacheControlSpec as CacheControlParser
import qualified Data.Parser.JSONPathSpec as JsonPath
import qualified Data.Parser.URLTemplate as URLTemplate
import qualified Data.TimeSpec as TimeSpec
import qualified Hasura.IncrementalSpec as IncrementalSpec
-- import qualified Hasura.RQL.MetadataSpec as MetadataSpec
import qualified Hasura.CacheBoundedSpec as CacheBoundedSpec
import qualified Hasura.EventingSpec as EventingSpec
import qualified Hasura.RQL.Types.EndpointSpec as EndpointSpec
import qualified Hasura.SQL.WKTSpec as WKTSpec
import qualified Hasura.Server.AuthSpec as AuthSpec
import qualified Hasura.Server.MigrateSpec as MigrateSpec
import qualified Hasura.Server.TelemetrySpec as TelemetrySpec
data TestSuites
= AllSuites !(Maybe URLTemplate)
@ -65,19 +66,20 @@ main = withVersion $$(getVersionFromEnvironment) $ parseArgs >>= \case
unitSpecs :: Spec
unitSpecs = do
describe "Data.Parser.CacheControl" CacheControlParser.spec
describe "Data.Parser.URLTemplate" URLTemplate.spec
describe "Data.Parser.JSONPath" JsonPath.spec
describe "Hasura.Incremental" IncrementalSpec.spec
-- describe "Hasura.RQL.Metadata" MetadataSpec.spec -- Commenting until optimizing the test in CI
describe "Data.Time" TimeSpec.spec
describe "Data.NonNegativeInt" NonNegetiveIntSpec.spec
describe "Hasura.Server.Telemetry" TelemetrySpec.spec
describe "Hasura.Server.Auth" AuthSpec.spec
describe "Data.Parser.CacheControl" CacheControlParser.spec
describe "Data.Parser.JSONPath" JsonPath.spec
describe "Data.Parser.URLTemplate" URLTemplate.spec
describe "Data.Time" TimeSpec.spec
describe "Hasura.Cache.Bounded" CacheBoundedSpec.spec
describe "Hasura.Eventing" EventingSpec.spec
describe "Hasura.GraphQL.Parser.Directives" GraphQLDirectivesSpec.spec
describe "Hasura.Incremental" IncrementalSpec.spec
describe "Hasura.RQL.Types.Endpoint" EndpointSpec.spec
describe "Hasura.SQL.WKT" WKTSpec.spec
describe "Hasura.Eventing" EventingSpec.spec
describe "Hasura.Server.Auth" AuthSpec.spec
describe "Hasura.Server.Telemetry" TelemetrySpec.spec
buildPostgresSpecs :: HasVersion => Maybe URLTemplate -> IO Spec
buildPostgresSpecs maybeUrlTemplate = do

View File

@ -0,0 +1,74 @@
- url: /v1/graphql
status: 200
query:
query: |
query author_with_both($includeId: Boolean!, $skipId: Boolean!) {
author {
id @include(if: $includeId) @skip(if: $skipId)
name
}
}
variables:
includeId: false
skipId: false
response:
data:
author:
- name: Author 1
- name: Author 2
- url: /v1/graphql
status: 200
query:
query: |
query author_with_both($includeId: Boolean!, $skipId: Boolean!) {
author {
id @include(if: $includeId) @skip(if: $skipId)
name
}
}
variables:
includeId: true
skipId: false
response:
data:
author:
- id: 1
name: Author 1
- id: 2
name: Author 2
- url: /v1/graphql
status: 200
query:
query: |
query author_with_both($includeId: Boolean!, $skipId: Boolean!) {
author {
id @include(if: $includeId) @skip(if: $skipId)
name
}
}
variables:
includeId: false
skipId: true
response:
data:
author:
- name: Author 1
- name: Author 2
- url: /v1/graphql
status: 200
query:
query: |
query author_with_both($includeId: Boolean!, $skipId: Boolean!) {
author {
id @include(if: $includeId) @skip(if: $skipId)
name
}
}
variables:
includeId: true
skipId: true
response:
data:
author:
- name: Author 1
- name: Author 2

View File

@ -0,0 +1,53 @@
- description: rejects unknown directives
url: /v1/graphql
status: 200
query:
query: |
query {
author {
id @exclude(if: true)
name
}
}
response:
errors:
- extensions:
path: $.selectionSet.author.selectionSet
code: validation-failed
message: directive "exclude" is not defined in the schema
- description: rejects duplicate directives
url: /v1/graphql
status: 200
query:
query: |
query {
author {
id @include(if: true) @include(if: true)
name
}
}
response:
errors:
- extensions:
path: $.selectionSet.author.selectionSet
code: validation-failed
message: 'the following directives are used more than once: include'
- description: rejects directives on wrong element
url: /v1/graphql
status: 200
query:
query: |
query @include(if: true) {
author {
id
name
}
}
response:
errors:
- extensions:
path: $
code: validation-failed
message: directive "include" is not allowed on a query

View File

@ -127,6 +127,12 @@ class TestGraphQLQueryBasicCommon:
def test_select_query_author_with_include_directive(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_include_directive.yaml', transport)
def test_select_query_author_with_skip_include_directive(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_skip_include_directives.yaml', transport)
def test_select_query_author_with_wrong_directive_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_wrong_directive_err.yaml', transport)
def test_select_query_where(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_where.yaml', transport)