few actions' fixes and improvements (fix #3977, #4061 & close #4021) (#4109)

* add 'ID' to default scalars for custom types, fix #4061

* preserve cookie headers from sync action webhook, close #4021

* validate action webhook response to conform to output type, fix #3977

* fix tests, don't run actions' tests on PG version < 10

* update CHANGELOG.md

* no-op refactor, use types from http-network more

Co-authored-by: Vamshi Surabhi <0x777@users.noreply.github.com>
This commit is contained in:
Rakesh Emmadi 2020-03-20 12:16:45 +05:30 committed by GitHub
parent 2a24b660cd
commit f80b69e931
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 390 additions and 169 deletions

View File

@ -87,4 +87,7 @@
- auto-include `__typename` field in custom types' objects (fix #4063)
- squash some potential space leaks (#3937)
- docs: bump MarupSafe version (#4102)
- console: add design system base components (#3866)
- server: validate action webhook response to conform to action output type (fix #3977)
- server: preserve cookie headers from sync action webhook (close #4021)
- server: add 'ID' to default scalars in custom types (fix #4061)
- console: add design system base components (#3866)

View File

@ -47,7 +47,8 @@ import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types
import Hasura.Server.Context
import Hasura.Server.Utils (RequestId, mkClientHeadersForward)
import Hasura.Server.Utils (RequestId, mkClientHeadersForward,
mkSetCookieHeaders)
import Hasura.Server.Version (HasVersion)
import qualified Hasura.GraphQL.Execute.LiveQuery as EL
@ -170,12 +171,11 @@ getExecPlanPartial userInfo sc enableAL req = do
-- to be executed
data ExecOp
= ExOpQuery !LazyRespTx !(Maybe EQ.GeneratedSqlMap)
| ExOpMutation !LazyRespTx
| ExOpMutation !N.ResponseHeaders !LazyRespTx
| ExOpSubs !EL.LiveQueryPlan
-- The graphql query is resolved into an execution operation
type ExecPlanResolved
= GQExecPlan ExecOp
type ExecPlanResolved = GQExecPlan ExecOp
getResolvedExecPlan
:: (HasVersion, MonadError QErr m, MonadIO m)
@ -215,8 +215,9 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
getExecPlanPartial userInfo sc enableAL req
forM partialExecPlan $ \(gCtx, rootSelSet) ->
case rootSelSet of
VQ.RMutation selSet ->
ExOpMutation <$> getMutOp gCtx sqlGenCtx userInfo httpManager reqHeaders selSet
VQ.RMutation selSet -> do
(tx, respHeaders) <- getMutOp gCtx sqlGenCtx userInfo httpManager reqHeaders selSet
pure $ ExOpMutation respHeaders tx
VQ.RQuery selSet -> do
(queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx userInfo queryReusability selSet
traverse_ (addPlanToCache . EP.RPQuery) plan
@ -286,16 +287,16 @@ resolveMutSelSet
, MonadIO m
)
=> VQ.SelSet
-> m LazyRespTx
-> m (LazyRespTx, N.ResponseHeaders)
resolveMutSelSet fields = do
aliasedTxs <- forM (toList fields) $ \fld -> do
fldRespTx <- case VQ._fName fld of
"__typename" -> return $ return $ encJFromJValue mutationRootName
_ -> fmap liftTx . evalReusabilityT $ GR.mutFldToTx fld
"__typename" -> return (return $ encJFromJValue mutationRootName, [])
_ -> evalReusabilityT $ GR.mutFldToTx fld
return (G.unName $ G.unAlias $ VQ._fAlias fld, fldRespTx)
-- combines all transactions into a single transaction
return $ liftTx $ toSingleTx aliasedTxs
return (liftTx $ toSingleTx aliasedTxs, concatMap (snd . snd) aliasedTxs)
where
-- A list of aliased transactions for eg
-- [("f1", Tx r1), ("f2", Tx r2)]
@ -304,7 +305,7 @@ resolveMutSelSet fields = do
-- toSingleTx :: [(Text, LazyRespTx)] -> LazyRespTx
toSingleTx aliasedTxs =
fmap encJFromAssocList $
forM aliasedTxs $ \(al, tx) -> (,) al <$> tx
forM aliasedTxs $ \(al, (tx, _)) -> (,) al <$> tx
getMutOp
:: (HasVersion, MonadError QErr m, MonadIO m)
@ -314,17 +315,16 @@ getMutOp
-> HTTP.Manager
-> [N.Header]
-> VQ.SelSet
-> m LazyRespTx
-> m (LazyRespTx, N.ResponseHeaders)
getMutOp ctx sqlGenCtx userInfo manager reqHeaders selSet =
runE_ $ resolveMutSelSet selSet
peelReaderT $ resolveMutSelSet selSet
where
runE_ action = do
res <- runExceptT $ runReaderT action
peelReaderT action =
runReaderT action
( userInfo, queryCtxMap, mutationCtxMap
, typeMap, fldMap, ordByCtx, insCtxMap, sqlGenCtx
, manager, reqHeaders
)
either throwError return res
where
queryCtxMap = _gQueryCtxMap ctx
mutationCtxMap = _gMutationCtxMap ctx
@ -414,9 +414,7 @@ execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do
L.unLogger logger $ QueryLog q Nothing reqId
(time, res) <- withElapsedTime $ liftIO $ try $ HTTP.httpLbs req manager
resp <- either httpThrow return res
let cookieHdrs = getCookieHdr (resp ^.. Wreq.responseHeader "Set-Cookie")
respHdrs = Just $ mkRespHeaders cookieHdrs
!httpResp = HttpResponse (encJFromLBS $ resp ^. Wreq.responseBody) respHdrs
let !httpResp = HttpResponse (encJFromLBS $ resp ^. Wreq.responseBody) $ mkSetCookieHeaders resp
return (time, httpResp)
where
@ -428,7 +426,3 @@ execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do
userInfoToHdrs = map (\(k, v) -> (CI.mk $ CS.cs k, CS.cs v)) $
userInfoToList userInfo
getCookieHdr = fmap (\h -> ("Set-Cookie", h))
mkRespHeaders = map (\(k, v) -> Header (bsToTxt $ CI.original k, bsToTxt v))

View File

@ -1,5 +1,6 @@
module Hasura.GraphQL.Resolve
( mutFldToTx
, queryFldToPGAST
, traverseQueryRootFldAST
, UnresolvedVal(..)
@ -120,29 +121,30 @@ mutFldToTx
, MonadIO m
)
=> V.Field
-> m RespTx
-> m (RespTx, HTTP.ResponseHeaders)
mutFldToTx fld = do
userInfo <- asks getter
opCtx <- getOpCtx $ V._fName fld
let noRespHeaders = fmap (,[])
case opCtx of
MCInsert ctx -> do
validateHdrs userInfo (_iocHeaders ctx)
RI.convertInsert (userRole userInfo) (_iocTable ctx) fld
noRespHeaders $ RI.convertInsert (userRole userInfo) (_iocTable ctx) fld
MCInsertOne ctx -> do
validateHdrs userInfo (_iocHeaders ctx)
RI.convertInsertOne (userRole userInfo) (_iocTable ctx) fld
noRespHeaders $ RI.convertInsertOne (userRole userInfo) (_iocTable ctx) fld
MCUpdate ctx -> do
validateHdrs userInfo (_uocHeaders ctx)
RM.convertUpdate ctx fld
noRespHeaders $ RM.convertUpdate ctx fld
MCUpdateByPk ctx -> do
validateHdrs userInfo (_uocHeaders ctx)
RM.convertUpdateByPk ctx fld
noRespHeaders $ RM.convertUpdateByPk ctx fld
MCDelete ctx -> do
validateHdrs userInfo (_docHeaders ctx)
RM.convertDelete ctx fld
noRespHeaders $ RM.convertDelete ctx fld
MCDeleteByPk ctx -> do
validateHdrs userInfo (_docHeaders ctx)
RM.convertDeleteByPk ctx fld
noRespHeaders $ RM.convertDeleteByPk ctx fld
MCAction ctx ->
RA.resolveActionMutation fld ctx (userVars userInfo)

View File

@ -41,7 +41,7 @@ import Hasura.RQL.DDL.Schema.Cache
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
import Hasura.RQL.Types
import Hasura.RQL.Types.Run
import Hasura.Server.Utils (mkClientHeadersForward)
import Hasura.Server.Utils (mkClientHeadersForward, mkSetCookieHeaders)
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import Hasura.SQL.Value (PGScalarValue (..), pgScalarValueToJson,
@ -97,13 +97,13 @@ resolveActionMutation
=> Field
-> ActionExecutionContext
-> UserVars
-> m RespTx
-> m (RespTx, HTTP.ResponseHeaders)
resolveActionMutation field executionContext sessionVariables =
case executionContext of
ActionExecutionSyncWebhook executionContextSync ->
resolveActionMutationSync field executionContextSync sessionVariables
ActionExecutionAsync ->
resolveActionMutationAsync field sessionVariables
(,[]) <$> resolveActionMutationAsync field sessionVariables
-- | Synchronously execute webhook handler and resolve response to action "output"
resolveActionMutationSync
@ -121,14 +121,15 @@ resolveActionMutationSync
=> Field
-> SyncActionExecutionContext
-> UserVars
-> m RespTx
-> m (RespTx, HTTP.ResponseHeaders)
resolveActionMutationSync field executionContext sessionVariables = do
let inputArgs = J.toJSON $ fmap annInpValueToJson $ _fArguments field
actionContext = ActionContext actionName
handlerPayload = ActionWebhookPayload actionContext sessionVariables inputArgs
manager <- asks getter
reqHeaders <- asks getter
webhookRes <- callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resolvedWebhook handlerPayload
(webhookRes, respHeaders) <- callWebhook manager outputType outputFields reqHeaders confHeaders
forwardClientHeaders resolvedWebhook handlerPayload
let webhookResponseExpression = RS.AEInput $ UVSQL $
toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
selectAstUnresolved <-
@ -136,9 +137,9 @@ resolveActionMutationSync field executionContext sessionVariables = do
(_fType field) $ _fSelSet field
astResolved <- RS.traverseAnnSimpleSel resolveValTxt selectAstUnresolved
let jsonAggType = mkJsonAggSelect outputType
return $ asSingleRowJsonResp (RS.selectQuerySQL jsonAggType astResolved) []
return $ (,respHeaders) $ asSingleRowJsonResp (RS.selectQuerySQL jsonAggType astResolved) []
where
SyncActionExecutionContext actionName outputType definitionList resolvedWebhook confHeaders
SyncActionExecutionContext actionName outputType outputFields definitionList resolvedWebhook confHeaders
forwardClientHeaders = executionContext
{- Note: [Async action architecture]
@ -281,9 +282,6 @@ asyncActionsProcessor cacheRef pgPool httpManager = forever $ do
A.mapConcurrently_ (callHandler actionCache) asyncInvocations
threadDelay (1 * 1000 * 1000)
where
getActionDefinition actionCache actionName =
_aiDefinition <$> Map.lookup actionName actionCache
runTx :: (Monoid a) => Q.TxE QErr a -> IO a
runTx q = do
res <- runExceptT $ Q.runTx' pgPool q
@ -293,20 +291,23 @@ asyncActionsProcessor cacheRef pgPool httpManager = forever $ do
callHandler actionCache actionLogItem = do
let ActionLogItem actionId actionName reqHeaders
sessionVariables inputPayload = actionLogItem
case getActionDefinition actionCache actionName of
case Map.lookup actionName actionCache of
Nothing -> return ()
Just definition -> do
let webhookUrl = _adHandler definition
Just actionInfo -> do
let definition = _aiDefinition actionInfo
outputFields = _aiOutputFields actionInfo
webhookUrl = _adHandler definition
forwardClientHeaders = _adForwardClientHeaders definition
confHeaders = _adHeaders definition
outputType = _adOutputType definition
actionContext = ActionContext actionName
eitherRes <- runExceptT $ callWebhook httpManager outputType reqHeaders confHeaders
forwardClientHeaders webhookUrl $
ActionWebhookPayload actionContext sessionVariables inputPayload
eitherRes <- runExceptT $
callWebhook httpManager outputType outputFields reqHeaders confHeaders
forwardClientHeaders webhookUrl $
ActionWebhookPayload actionContext sessionVariables inputPayload
case eitherRes of
Left e -> setError actionId e
Right responsePayload -> setCompleted actionId $ J.toJSON responsePayload
Left e -> setError actionId e
Right (responsePayload, _) -> setCompleted actionId $ J.toJSON responsePayload
setError :: UUID.UUID -> QErr -> IO ()
setError actionId e =
@ -361,13 +362,15 @@ callWebhook
:: (HasVersion, MonadIO m, MonadError QErr m)
=> HTTP.Manager
-> GraphQLType
-> ActionOutputFields
-> [HTTP.Header]
-> [HeaderConf]
-> Bool
-> ResolvedWebhook
-> ActionWebhookPayload
-> m ActionWebhookResponse
callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resolvedWebhook actionWebhookPayload = do
-> m (ActionWebhookResponse, HTTP.ResponseHeaders)
callWebhook manager outputType outputFields reqHeaders confHeaders
forwardClientHeaders resolvedWebhook actionWebhookPayload = do
resolvedConfHeaders <- makeHeadersFromConf confHeaders
let clientHeaders = if forwardClientHeaders then mkClientHeadersForward reqHeaders else []
contentType = ("Content-Type", "application/json")
@ -396,14 +399,19 @@ callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resol
if | HTTP.statusIsSuccessful responseStatus -> do
let expectingArray = isListType outputType
addInternalToErr e = e{qeInternal = Just webhookResponseObject}
throw400Detail t = throwError $ addInternalToErr $ err400 Unexpected t
webhookResponse <- modifyQErr addInternalToErr $ decodeValue responseValue
case webhookResponse of
AWRArray{} -> when (not expectingArray) $
throw400Detail "expecting object for action webhook response but got array"
AWRObject{} -> when expectingArray $
throw400Detail "expecting array for action webhook response but got object"
pure webhookResponse
-- Incase any error, add webhook response in internal
modifyQErr addInternalToErr $ do
webhookResponse <- decodeValue responseValue
case webhookResponse of
AWRArray objs -> do
when (not expectingArray) $
throwUnexpected "expecting object for action webhook response but got array"
mapM_ validateResponseObject objs
AWRObject obj -> do
when expectingArray $
throwUnexpected "expecting array for action webhook response but got object"
validateResponseObject obj
pure (webhookResponse, mkSetCookieHeaders responseWreq)
| HTTP.statusIsClientError responseStatus -> do
ActionWebhookErrorResponse message maybeCode <-
@ -414,6 +422,23 @@ callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resol
| otherwise ->
throw500WithDetail "internal error" webhookResponseObject
where
throwUnexpected = throw400 Unexpected
-- Webhook response object should conform to action output fields
validateResponseObject obj = do
-- Fields not specified in the output type shouldn't be present in the response
let extraFields = filter (not . flip Map.member outputFields) $ map G.Name $ Map.keys obj
when (not $ null extraFields) $ throwUnexpected $
"unexpected fields in webhook response: " <> showNames extraFields
void $ flip Map.traverseWithKey outputFields $ \fieldName fieldTy ->
-- When field is non-nullable, it has to present in the response with no null value
when (not $ G.isNullable fieldTy) $ case Map.lookup (G.unName fieldName) obj of
Nothing -> throwUnexpected $
"field " <> fieldName <<> " expected in webhook response, but not found"
Just v -> when (v == J.Null) $ throwUnexpected $
"expecting not null value for field " <>> fieldName
annInpValueToJson :: AnnInpVal -> J.Value
annInpValueToJson annInpValue =

View File

@ -107,6 +107,7 @@ data SyncActionExecutionContext
= SyncActionExecutionContext
{ _saecName :: !ActionName
, _saecOutputType :: !GraphQLType
, _saecOutputFields :: !ActionOutputFields
, _saecDefinitionList :: ![(PGCol, PGScalarType)]
, _saecWebhook :: !ResolvedWebhook
, _saecHeaders :: ![HeaderConf]

View File

@ -68,6 +68,7 @@ mkMutationField actionName actionInfo definitionList =
ActionSynchronous ->
ActionExecutionSyncWebhook $ SyncActionExecutionContext actionName
(_adOutputType definition)
(_aiOutputFields actionInfo)
definitionList
(_adHandler definition)
(_adHeaders definition)

View File

@ -20,6 +20,7 @@ import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.Logging as L
import qualified Hasura.Server.Telemetry.Counters as Telem
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Types as HTTP
runGQ
:: ( HasVersion
@ -41,8 +42,8 @@ runGQ reqId userInfo reqHdrs req = do
userInfo sqlGenCtx enableAL sc scVer httpManager reqHdrs req
case execPlan of
E.GExPHasura resolvedOp -> do
(telemTimeIO, telemQueryType, resp) <- runHasuraGQ reqId req userInfo resolvedOp
return (telemCacheHit, Telem.Local, (telemTimeIO, telemQueryType, HttpResponse resp Nothing))
(telemTimeIO, telemQueryType, respHdrs, resp) <- runHasuraGQ reqId req userInfo resolvedOp
return (telemCacheHit, Telem.Local, (telemTimeIO, telemQueryType, HttpResponse resp respHdrs))
E.GExPRemote rsi opDef -> do
let telemQueryType | G._todType opDef == G.OperationTypeMutation = Telem.Mutation
| otherwise = Telem.Query
@ -73,7 +74,7 @@ runGQBatched reqId userInfo reqHdrs reqs =
-- responses with distinct headers, so just do the simplest thing
-- in this case, and don't forward any.
let removeHeaders =
flip HttpResponse Nothing
flip HttpResponse []
. encJFromList
. map (either (encJFromJValue . encodeGQErr False) _hrBody)
try = flip catchError (pure . Left) . fmap Right
@ -89,7 +90,7 @@ runHasuraGQ
-> GQLReqUnparsed
-> UserInfo
-> E.ExecOp
-> m (DiffTime, Telem.QueryType, EncJSON)
-> m (DiffTime, Telem.QueryType, HTTP.ResponseHeaders, EncJSON)
-- ^ Also return 'Mutation' when the operation was a mutation, and the time
-- spent in the PG query; for telemetry.
runHasuraGQ reqId query userInfo resolvedOp = do
@ -98,15 +99,15 @@ runHasuraGQ reqId query userInfo resolvedOp = do
E.ExOpQuery tx genSql -> do
-- log the generated SQL and the graphql query
L.unLogger logger $ QueryLog query genSql reqId
runLazyTx' pgExecCtx tx
E.ExOpMutation tx -> do
([],) <$> runLazyTx' pgExecCtx tx
E.ExOpMutation respHeaders tx -> do
-- log the graphql query
L.unLogger logger $ QueryLog query Nothing reqId
runLazyTx pgExecCtx Q.ReadWrite $ withUserInfo userInfo tx
(respHeaders,) <$> runLazyTx pgExecCtx Q.ReadWrite (withUserInfo userInfo tx)
E.ExOpSubs _ ->
throw400 UnexpectedPayload
"subscriptions are not supported over HTTP, use websockets instead"
resp <- liftEither respE
(respHdrs, resp) <- liftEither respE
let !json = encodeGQResp $ GQSuccess $ encJToLBS resp
telemQueryType = case resolvedOp of E.ExOpMutation{} -> Telem.Mutation ; _ -> Telem.Query
return (telemTimeIO, telemQueryType, json)
return (telemTimeIO, telemQueryType, respHdrs, json)

View File

@ -63,7 +63,7 @@ import qualified Hasura.Server.Telemetry.Counters as Telem
-- this to track a connection's operations so we can remove them from 'LiveQueryState', and
-- log.
--
-- NOTE!: This must be kept consistent with the global 'LiveQueryState', in 'onClose'
-- NOTE!: This must be kept consistent with the global 'LiveQueryState', in 'onClose'
-- and 'onStart'.
type OperationMap
= STMMap.Map OperationId (LQ.LiveQueryId, Maybe OperationName)
@ -325,7 +325,8 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
runHasuraGQ timerTot telemCacheHit reqId query userInfo = \case
E.ExOpQuery opTx genSql ->
execQueryOrMut Telem.Query genSql $ runLazyTx' pgExecCtx opTx
E.ExOpMutation opTx ->
-- Response headers discarded over websockets
E.ExOpMutation _ opTx ->
execQueryOrMut Telem.Mutation Nothing $
runLazyTx pgExecCtx Q.ReadWrite $ withUserInfo userInfo opTx
E.ExOpSubs lqOp -> do

View File

@ -81,7 +81,7 @@ resolveAction
:: (QErrM m, MonadIO m)
=> (NonObjectTypeMap, AnnotatedObjects)
-> ActionDefinitionInput
-> m ResolvedActionDefinition
-> m (ResolvedActionDefinition, ActionOutputFields)
resolveAction customTypes actionDefinition = do
let responseType = unGraphQLType $ _adOutputType actionDefinition
responseBaseType = G.getBaseType responseType
@ -96,8 +96,10 @@ resolveAction customTypes actionDefinition = do
<> showNamedTy argumentBaseType <>
" should be a scalar/enum/input_object"
-- Check if the response type is an object
getObjectTypeInfo responseBaseType
traverse resolveWebhook actionDefinition
annFields <- _aotAnnotatedFields <$> getObjectTypeInfo responseBaseType
let outputFields = Map.fromList $ map (unObjectFieldName *** fst) $ Map.toList annFields
resolvedDef <- traverse resolveWebhook actionDefinition
pure (resolvedDef, outputFields)
where
getNonObjectTypeInfo typeName = do
let nonObjectTypeMap = unNonObjectTypeMap $ fst $ customTypes

View File

@ -48,8 +48,7 @@ validateCustomTypeDefinitions tableCache customTypes = do
enumTypes =
Set.fromList $ map (unEnumTypeName . _etdName) enumDefinitions
-- TODO, clean it up maybe?
defaultScalars = map G.NamedType ["Int", "Float", "String", "Boolean"]
defaultScalars = map G.NamedType ["Int", "Float", "String", "Boolean", "ID"]
validateEnum
:: (MonadValidate [CustomTypeValidationError] m)

View File

@ -256,7 +256,7 @@ class (ToJSON a) => IsPerm a where
getPermAcc2
:: DropPerm a -> PermAccessor (PermInfo a)
getPermAcc2 _ = permAccessor
addPermP2 :: (IsPerm a, MonadTx m, HasSystemDefined m) => QualifiedTable -> PermDef a -> m ()
addPermP2 tn pd = do
let pt = permAccToType $ getPermAcc1 pd

View File

@ -266,10 +266,10 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
addActionContext e = "in action " <> name <<> "; " <> e
(| withRecordInconsistency (
(| modifyErrA ( do
resolvedDef <- bindErrorA -< resolveAction resolvedCustomTypes def
(resolvedDef, outFields) <- bindErrorA -< resolveAction resolvedCustomTypes def
let permissionInfos = map (ActionPermissionInfo . _apmRole) actionPermissions
permissionMap = mapFromL _apiRole permissionInfos
returnA -< ActionInfo name resolvedDef permissionMap comment
returnA -< ActionInfo name outFields resolvedDef permissionMap comment
)
|) addActionContext)
|) metadataObj)

View File

@ -13,8 +13,10 @@ module Hasura.RQL.Types.Action
, ResolvedWebhook(..)
, ResolvedActionDefinition
, ActionOutputFields
, ActionInfo(..)
, aiName
, aiOutputFields
, aiDefinition
, aiPermissions
, aiComment
@ -117,13 +119,15 @@ data ActionPermissionInfo
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ActionPermissionInfo)
type ActionPermissionMap = Map.HashMap RoleName ActionPermissionInfo
type ActionOutputFields = Map.HashMap G.Name G.GType
data ActionInfo
= ActionInfo
{ _aiName :: !ActionName
, _aiDefinition :: !ResolvedActionDefinition
, _aiPermissions :: !ActionPermissionMap
, _aiComment :: !(Maybe Text)
{ _aiName :: !ActionName
, _aiOutputFields :: !ActionOutputFields
, _aiDefinition :: !ResolvedActionDefinition
, _aiPermissions :: !ActionPermissionMap
, _aiComment :: !(Maybe Text)
} deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) ''ActionInfo)
$(makeLenses ''ActionInfo)

View File

@ -7,7 +7,7 @@ import Control.Concurrent.MVar.Lifted
import Control.Exception (IOException, try)
import Control.Lens (view, _2)
import Control.Monad.Stateless
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson hiding (json)
import Data.Either (isRight)
import Data.Int (Int64)
@ -21,6 +21,7 @@ import Web.Spock.Core ((<//>))
import qualified Control.Concurrent.Async.Lifted.Safe as LA
import qualified Data.ByteString.Lazy as BL
import qualified Data.CaseInsensitive as CI
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import qualified Data.Text as T
@ -71,7 +72,7 @@ data SchemaCacheRef
-- 1. Allow maximum throughput for serving requests (/v1/graphql) (as each
-- request reads the current schemacache)
-- 2. We don't want to process more than one request at any point of time
-- which would modify the schema cache as such queries are expensive.
-- which would modify the schema cache as such queries are expensive.
--
-- Another option is to consider removing this lock in place of `_scrCache ::
-- MVar ...` if it's okay or in fact correct to block during schema update in
@ -79,7 +80,7 @@ data SchemaCacheRef
-- situation (in between building new schemacache and before writing it to
-- the IORef) where we serve a request with a stale schemacache but I guess
-- it is an okay trade-off to pay for a higher throughput (I remember doing a
-- bunch of benchmarks to test this hypothesis).
-- bunch of benchmarks to test this hypothesis).
, _scrCache :: IORef (RebuildableSchemaCache Run, SchemaCacheVer)
, _scrOnChange :: IO ()
-- ^ an action to run when schemacache changes
@ -143,7 +144,7 @@ withSCUpdate scr logger action = do
(!res, !newSC) <- action
liftIO $ do
-- update schemacache in IO reference
modifyIORef' cacheRef $ \(_, prevVer) ->
modifyIORef' cacheRef $ \(_, prevVer) ->
let !newVer = incSchemaCacheVer prevVer
in (newSC, newVer)
-- log any inconsistent objects
@ -198,6 +199,10 @@ buildQCtx = do
sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask
return $ QCtx userInfo cache sqlGenCtx
setHeader :: MonadIO m => HTTP.Header -> Spock.ActionT m ()
setHeader (headerName, headerValue) =
Spock.setHeader (bsToTxt $ CI.original headerName) (bsToTxt headerValue)
-- | Typeclass representing the metadata API authorization effect
class MetadataApiAuthorization m where
authorizeMetadataApi :: RQLQuery -> UserInfo -> Handler m ()
@ -270,24 +275,22 @@ mkSpockAction serverCtx qErrEncoder qErrModifier apiHandler = do
case result of
JSONResp (HttpResponse encJson h) ->
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime (encJToLBS encJson)
(pure jsonHeader <> mkHeaders h) reqHeaders
(pure jsonHeader <> h) reqHeaders
RawResp (HttpResponse rawBytes h) ->
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime rawBytes (mkHeaders h) reqHeaders
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime rawBytes h reqHeaders
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime respBytes respHeaders reqHeaders = do
let (compressedResp, mEncodingHeader, mCompressionType) =
compressResponse (Wai.requestHeaders req) respBytes
encodingHeader = maybe [] pure mEncodingHeader
reqIdHeader = (requestIdHeader, unRequestId reqId)
reqIdHeader = (requestIdHeader, txtToBs $ unRequestId reqId)
allRespHeaders = pure reqIdHeader <> encodingHeader <> respHeaders
lift $ logHttpSuccess logger userInfo reqId req reqBody respBytes compressedResp qTime mCompressionType reqHeaders
mapM_ (uncurry Spock.setHeader) allRespHeaders
mapM_ setHeader allRespHeaders
Spock.lazyBytes compressedResp
mkHeaders = maybe [] (map unHeader)
v1QueryHandler
:: (HasVersion, MonadIO m, MonadBaseControl IO m, MetadataApiAuthorization m)
v1QueryHandler
:: (HasVersion, MonadIO m, MonadBaseControl IO m, MetadataApiAuthorization m)
=> RQLQuery -> Handler m (HttpResponse EncJSON)
v1QueryHandler query = do
userInfo <- asks hcUser
@ -296,7 +299,7 @@ v1QueryHandler query = do
logger <- scLogger . hcServerCtx <$> ask
res <- bool (fst <$> dbAction) (withSCUpdate scRef logger dbAction) $
queryModifiesSchemaCache query
return $ HttpResponse res Nothing
return $ HttpResponse res []
where
-- Hit postgres
dbAction = do
@ -341,14 +344,14 @@ gqlExplainHandler query = do
sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask
enableAL <- scEnableAllowlist . hcServerCtx <$> ask
res <- GE.explainGQLQuery pgExecCtx sc sqlGenCtx enableAL query
return $ HttpResponse res Nothing
return $ HttpResponse res []
v1Alpha1PGDumpHandler :: (MonadIO m) => PGD.PGDumpReqBody -> Handler m APIResp
v1Alpha1PGDumpHandler b = do
onlyAdmin
ci <- scConnInfo . hcServerCtx <$> ask
output <- PGD.execPGDump b ci
return $ RawResp $ HttpResponse output (Just [Header sqlHeader])
return $ RawResp $ HttpResponse output [sqlHeader]
consoleAssetsHandler
:: (MonadIO m, HttpLog m)
@ -366,7 +369,7 @@ consoleAssetsHandler logger dir path = do
either (onError reqHeaders) onSuccess eFileContents
where
onSuccess c = do
mapM_ (uncurry Spock.setHeader) headers
mapM_ setHeader headers
Spock.lazyBytes c
onError :: (MonadIO m, HttpLog m) => [HTTP.Header] -> IOException -> Spock.ActionT m ()
onError hdrs = raiseGenericApiError logger hdrs . err404 NotFound . T.pack . show
@ -375,7 +378,7 @@ consoleAssetsHandler logger dir path = do
(fileName, encHeader) = case T.stripSuffix ".gz" fn of
Just v -> (v, [gzipHeader])
Nothing -> (fn, [])
mimeType = bsToTxt $ defaultMimeLookup fileName
mimeType = defaultMimeLookup fileName
headers = ("Content-Type", mimeType) : encHeader
class (Monad m) => ConsoleRenderer m where
@ -552,7 +555,7 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
else Spock.setStatus HTTP.status500 >> Spock.text "ERROR"
Spock.get "v1/version" $ do
uncurry Spock.setHeader jsonHeader
setHeader jsonHeader
Spock.lazyBytes $ encode $ object [ "version" .= currentVersion ]
when enableMetadata $ do
@ -578,7 +581,7 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
mkGetHandler $ do
onlyAdmin
let res = encJFromJValue $ runGetConfig (scAuthMode serverCtx)
return $ JSONResp $ HttpResponse res Nothing
return $ JSONResp $ HttpResponse res []
when enableGraphQL $ do
Spock.post "v1alpha1/graphql" $ spockAction GH.encodeGQErr id $
@ -592,22 +595,22 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ EKG.sampleAll $ scEkgStore serverCtx
return $ JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) []
Spock.get "dev/plan_cache" $ spockAction encodeQErr id $
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ E.dumpPlanCache $ scPlanCache serverCtx
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue respJ) []
Spock.get "dev/subscriptions" $ spockAction encodeQErr id $
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ EL.dumpLiveQueriesState False $ scLQState serverCtx
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue respJ) []
Spock.get "dev/subscriptions/extended" $ spockAction encodeQErr id $
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ EL.dumpLiveQueriesState True $ scLQState serverCtx
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue respJ) []
forM_ [Spock.GET, Spock.POST] $ \m -> Spock.hookAny m $ \_ -> do
req <- Spock.request
@ -672,6 +675,6 @@ raiseGenericApiError logger headers qErr = do
reqBody <- liftIO $ Wai.strictRequestBody req
reqId <- getRequestId $ Wai.requestHeaders req
lift $ logHttpError logger Nothing reqId req (Left reqBody) qErr headers
uncurry Spock.setHeader jsonHeader
setHeader jsonHeader
Spock.setStatus $ qeStatus qErr
Spock.lazyBytes $ encode qErr

View File

@ -19,20 +19,20 @@ module Hasura.Server.Auth
) where
import Control.Concurrent.Extended (forkImmortal)
import Control.Exception (try)
import Control.Exception (try)
import Control.Lens
import Data.Aeson
import Data.IORef (newIORef)
import Data.Time.Clock (UTCTime)
import Hasura.Server.Version (HasVersion)
import Data.IORef (newIORef)
import Data.Time.Clock (UTCTime)
import Hasura.Server.Version (HasVersion)
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Network.HTTP.Client as H
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Network.HTTP.Client as H
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import Hasura.HTTP
import Hasura.Logging
@ -294,7 +294,7 @@ getUserInfoWithExpTime logger manager rawHeaders = \case
userInfoWhenNoAdminSecret = \case
Nothing -> throw401 $ adminSecretHeader <> "/"
<> deprecatedAccessKeyHeader <> " required, but not found"
<> deprecatedAccessKeyHeader <> " required, but not found"
Just role -> return $ mkUserInfo role usrVars
withNoExpTime a = (, Nothing) <$> a

View File

@ -29,7 +29,7 @@ import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.Server.Auth.JWT.Internal (parseHmacKey, parseRsaKey)
import Hasura.Server.Auth.JWT.Logging
import Hasura.Server.Utils (fmapL, userRoleHeader)
import Hasura.Server.Utils (fmapL, getRequestHeader, userRoleHeader)
import Hasura.Server.Version (HasVersion)
import qualified Control.Concurrent.Extended as C
@ -297,8 +297,7 @@ processAuthZHeader jwtCtx headers authzHeader = do
-- see if there is a x-hasura-role header, or else pick the default role
getCurrentRole defaultRole =
let userRoleHeaderB = CS.cs userRoleHeader
mUserRole = snd <$> find (\h -> fst h == CI.mk userRoleHeaderB) headers
let mUserRole = getRequestHeader userRoleHeader headers
in maybe defaultRole RoleName $ mUserRole >>= mkNonEmptyText . bsToTxt
decodeJSON val = case J.fromJSON val of

View File

@ -24,7 +24,7 @@ compressionTypeToTxt CTGZip = "gzip"
compressResponse
:: NH.RequestHeaders
-> BL.ByteString
-> (BL.ByteString, Maybe (Text, Text), Maybe CompressionType)
-> (BL.ByteString, Maybe NH.Header, Maybe CompressionType)
compressResponse reqHeaders unCompressedResp =
let compressionTypeM = getRequestedCompression reqHeaders
appendCompressionType (res, headerM) = (res, headerM, compressionTypeM)

View File

@ -1,20 +1,13 @@
module Hasura.Server.Context
( HttpResponse(..)
, Header (..)
, Headers
)
(HttpResponse(..))
where
import Hasura.Prelude
newtype Header
= Header { unHeader :: (Text, Text) }
deriving (Show, Eq)
type Headers = [Header]
import qualified Network.HTTP.Types as HTTP
data HttpResponse a
= HttpResponse
{ _hrBody :: !a
, _hrHeaders :: !(Maybe Headers)
, _hrHeaders :: !HTTP.ResponseHeaders
} deriving (Functor, Foldable, Traversable)

View File

@ -1,6 +1,7 @@
{-# LANGUAGE TypeApplications #-}
module Hasura.Server.Utils where
import Control.Lens ((^..))
import Data.Aeson
import Data.Char
import Data.List (find)
@ -21,6 +22,7 @@ import qualified Data.UUID.V4 as UUID
import qualified Language.Haskell.TH.Syntax as TH
import qualified Network.HTTP.Client as HC
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wreq as Wreq
import qualified Text.Regex.TDFA as TDFA
import qualified Text.Regex.TDFA.ByteString as TDFA
@ -30,45 +32,42 @@ newtype RequestId
= RequestId { unRequestId :: Text }
deriving (Show, Eq, ToJSON, FromJSON)
jsonHeader :: (T.Text, T.Text)
jsonHeader :: HTTP.Header
jsonHeader = ("Content-Type", "application/json; charset=utf-8")
sqlHeader :: (T.Text, T.Text)
sqlHeader :: HTTP.Header
sqlHeader = ("Content-Type", "application/sql; charset=utf-8")
htmlHeader :: (T.Text, T.Text)
htmlHeader :: HTTP.Header
htmlHeader = ("Content-Type", "text/html; charset=utf-8")
gzipHeader :: (T.Text, T.Text)
gzipHeader :: HTTP.Header
gzipHeader = ("Content-Encoding", "gzip")
brHeader :: (T.Text, T.Text)
brHeader = ("Content-Encoding", "br")
userRoleHeader :: T.Text
userRoleHeader :: IsString a => a
userRoleHeader = "x-hasura-role"
deprecatedAccessKeyHeader :: T.Text
deprecatedAccessKeyHeader :: IsString a => a
deprecatedAccessKeyHeader = "x-hasura-access-key"
adminSecretHeader :: T.Text
adminSecretHeader :: IsString a => a
adminSecretHeader = "x-hasura-admin-secret"
userIdHeader :: T.Text
userIdHeader :: IsString a => a
userIdHeader = "x-hasura-user-id"
requestIdHeader :: T.Text
requestIdHeader :: IsString a => a
requestIdHeader = "x-request-id"
getRequestHeader :: B.ByteString -> [HTTP.Header] -> Maybe B.ByteString
getRequestHeader :: HTTP.HeaderName -> [HTTP.Header] -> Maybe B.ByteString
getRequestHeader hdrName hdrs = snd <$> mHeader
where
mHeader = find (\h -> fst h == CI.mk hdrName) hdrs
mHeader = find (\h -> fst h == hdrName) hdrs
getRequestId :: (MonadIO m) => [HTTP.Header] -> m RequestId
getRequestId headers =
-- generate a request id for every request if the client has not sent it
case getRequestHeader (txtToBs requestIdHeader) headers of
case getRequestHeader requestIdHeader headers of
Nothing -> RequestId <$> liftIO generateFingerprint
Just reqId -> return $ RequestId $ bsToTxt reqId
@ -173,6 +172,12 @@ mkClientHeadersForward reqHeaders =
"User-Agent" -> Just ("X-Forwarded-User-Agent", hdrValue)
_ -> Nothing
mkSetCookieHeaders :: Wreq.Response a -> HTTP.ResponseHeaders
mkSetCookieHeaders resp =
map (headerName,) $ resp ^.. Wreq.responseHeader headerName
where
headerName = "Set-Cookie"
filterRequestHeaders :: [HTTP.Header] -> [HTTP.Header]
filterRequestHeaders =
filterHeaders $ Set.fromList commonClientHeadersIgnored

View File

@ -257,7 +257,12 @@ def evts_webhook(request):
web_server.join()
@pytest.fixture(scope='module')
def actions_webhook(hge_ctx):
def actions_fixture(hge_ctx):
pg_version = hge_ctx.pg_version
if pg_version < 100000: # version less than 10.0
pytest.skip('Actions are not supported on Postgres version < 10')
# Start actions' webhook server
webhook_httpd = ActionsWebhookServer(hge_ctx, server_address=('127.0.0.1', 5593))
web_server = threading.Thread(target=webhook_httpd.serve_forever)
web_server.start()

View File

@ -186,6 +186,10 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
elif req_path == "/invalid-response":
self._send_response(HTTPStatus.OK, "some-string")
elif req_path == "/mirror-action":
resp, status = self.mirror_action()
self._send_response(status, resp)
else:
self.send_response(HTTPStatus.NO_CONTENT)
self.end_headers()
@ -263,6 +267,11 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
response = resp['data']['insert_user']['returning']
return response, HTTPStatus.OK
def mirror_action(self):
response = self.req_json['input']['arg']
return response, HTTPStatus.OK
def check_email(self, email):
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
return re.search(regex,email)
@ -279,6 +288,7 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
def _send_response(self, status, body):
self.send_response(status)
self.send_header('Content-Type', 'application/json')
self.send_header('Set-Cookie', 'abcd')
self.end_headers()
self.wfile.write(json.dumps(body).encode("utf-8"))
@ -333,7 +343,7 @@ class EvtsWebhookHandler(http.server.BaseHTTPRequestHandler):
"headers": req_headers})
# A very slightly more sane/performant http server.
# See: https://stackoverflow.com/a/14089457/176841
# See: https://stackoverflow.com/a/14089457/176841
#
# TODO use this elsewhere, or better yet: use e.g. bottle + waitress
class ThreadedHTTPServer(ThreadingMixIn, http.server.HTTPServer):
@ -409,7 +419,7 @@ class HGECtx:
self.ws_client = GQLWsClient(self, '/v1/graphql')
# HGE version
result = subprocess.run(['../../scripts/get-version.sh'], shell=False, stdout=subprocess.PIPE, check=True)
env_version = os.getenv('VERSION')
self.version = env_version if env_version else result.stdout.decode('utf-8').strip()
@ -421,6 +431,11 @@ class HGECtx:
raise HGECtxError(repr(e))
assert st_code == 200, resp
# Postgres version
pg_version_text = self.sql('show server_version_num').fetchone()['server_version_num']
self.pg_version = int(pg_version_text)
def reflect_tables(self):
self.meta.reflect(bind=self.engine)

View File

@ -0,0 +1,22 @@
description: Expected field not found in response
url: /v1/graphql
status: 200
response:
errors:
- extensions:
internal:
webhook_response:
name: Alice
path: $
code: unexpected
message: field "id" expected in webhook response, but not found
query:
variables:
name: Alice
query: |
mutation ($name: String) {
mirror(arg: {name: $name}){
id
name
}
}

View File

@ -0,0 +1,24 @@
description: Null value for non-null output field
url: /v1/graphql
status: 200
response:
errors:
- extensions:
internal:
webhook_response:
name: Alice
id: null
path: $
code: unexpected
message: expecting not null value for field "id"
query:
variables:
id: null
name: Alice
query: |
mutation ($id: ID, $name: String) {
mirror(arg: {id: $id, name: $name}){
id
name
}
}

View File

@ -0,0 +1,19 @@
description: A successful query to mirror action
url: /v1/graphql
status: 200
response:
data:
mirror:
id: some-id
name: Alice
query:
variables:
id: some-id
name: Alice
query: |
mutation ($id: ID, $name: String) {
mirror(arg: {id: $id, name: $name}){
id
name
}
}

View File

@ -0,0 +1,26 @@
description: Unexpected extra field in response
url: /v1/graphql
status: 200
response:
errors:
- extensions:
internal:
webhook_response:
age: 25
name: Alice
id: some-id
path: $
code: unexpected
message: 'unexpected fields in webhook response: age'
query:
variables:
id: some-id
name: Alice
age: 25
query: |
mutation ($id: ID, $name: String, $age: Int) {
mirror(arg: {id: $id, name: $name, age: $age}){
id
name
}
}

View File

@ -25,6 +25,15 @@ args:
- name: email
type: String!
- name: InObject
fields:
- name: id
type: ID
- name: name
type: String
- name: age
type: Int
objects:
- name: UserId
fields:
@ -37,6 +46,13 @@ args:
field_mapping:
id: id
- name: OutObject
fields:
- name: id
type: ID! # For issue https://github.com/hasura/graphql-engine/issues/4061
- name: name
type: String
- type: create_action
args:
name: create_user
@ -60,3 +76,14 @@ args:
type: '[UserInput!]!'
output_type: '[UserId]'
handler: http://127.0.0.1:5593/create-users
- type: create_action
args:
name: mirror
definition:
kind: synchronous
arguments:
- name: arg
type: 'InObject!'
output_type: 'OutObject'
handler: http://127.0.0.1:5593/mirror-action

View File

@ -8,6 +8,10 @@ args:
args:
name: create_users
clear_data: true
- type: drop_action
args:
name: mirror
clear_data: true
# clear custom types
- type: set_custom_types
args: {}

View File

@ -10,14 +10,14 @@ TODO:- Test Actions metadata
"""
use_action_fixtures = pytest.mark.usefixtures(
"actions_webhook",
"actions_fixture",
'per_class_db_schema_for_mutation_tests',
'per_method_db_data_for_mutation_tests'
)
@pytest.mark.parametrize("transport", ['http', 'websocket'])
@use_action_fixtures
class TestActionsSync:
class TestActionsSyncWebsocket:
@classmethod
def dir(cls):
@ -35,28 +35,74 @@ class TestActionsSync:
def test_create_users_success(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/create_users_success.yaml', transport)
def test_invalid_webhook_response(self, hge_ctx, transport):
@use_action_fixtures
class TestActionsSync:
@classmethod
def dir(cls):
return 'queries/actions/sync'
def test_invalid_webhook_response(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/invalid_webhook_response.yaml')
def test_expecting_object_response(self, hge_ctx, transport):
def test_expecting_object_response(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/expecting_object_response.yaml')
def test_expecting_array_response(self, hge_ctx, transport):
def test_expecting_array_response(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/expecting_array_response.yaml')
# Webhook response validation tests. See https://github.com/hasura/graphql-engine/issues/3977
def test_mirror_action_not_null(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/mirror_action_not_null.yaml')
def test_mirror_action_unexpected_field(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/mirror_action_unexpected_field.yaml')
def test_mirror_action_no_field(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/mirror_action_no_field.yaml')
def test_mirror_action_success(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/mirror_action_success.yaml')
def mk_headers_with_secret(hge_ctx, headers={}):
admin_secret = hge_ctx.hge_key
if admin_secret:
headers['X-Hasura-Admin-Secret'] = admin_secret
return headers
@use_action_fixtures
class TestActionsSyncResponseHeaders:
@classmethod
def dir(cls):
return 'queries/actions/sync'
# See https://github.com/hasura/graphql-engine/issues/4021
def test_set_cookie_header(self, hge_ctx):
mutation = '''
mutation {
create_user(email: "clarke@gmail.com", name: "Clarke"){
id
}
}
'''
query = {
'query': mutation,
'variables': {}
}
status, resp, resp_headers = hge_ctx.anyq('/v1/graphql', query, mk_headers_with_secret(hge_ctx))
assert status == 200, resp
assert 'data' in resp, resp
assert ('Set-Cookie' in resp_headers and
resp_headers['Set-Cookie'] == 'abcd'), resp_headers
@use_action_fixtures
class TestActionsAsync:
@classmethod
def dir(cls):
return 'queries/actions/async'
def mk_headers_with_secret(self, hge_ctx, headers={}):
admin_secret = hge_ctx.hge_key
if admin_secret:
headers['X-Hasura-Admin-Secret'] = admin_secret
return headers
def test_create_user_fail(self, hge_ctx):
graphql_mutation = '''
mutation {
@ -67,11 +113,11 @@ class TestActionsAsync:
'query': graphql_mutation,
'variables': {}
}
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, self.mk_headers_with_secret(hge_ctx))
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, mk_headers_with_secret(hge_ctx))
assert status == 200, resp
assert 'data' in resp
action_id = resp['data']['create_user']
time.sleep(2)
time.sleep(3)
query_async = '''
query ($action_id: uuid!){
@ -118,11 +164,11 @@ class TestActionsAsync:
'query': graphql_mutation,
'variables': {}
}
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, self.mk_headers_with_secret(hge_ctx))
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, mk_headers_with_secret(hge_ctx))
assert status == 200, resp
assert 'data' in resp
action_id = resp['data']['create_user']
time.sleep(2)
time.sleep(3)
query_async = '''
query ($action_id: uuid!){
@ -185,7 +231,7 @@ class TestActionsAsync:
'query': graphql_mutation,
'variables': {}
}
headers_user_1 = self.mk_headers_with_secret(hge_ctx, {
headers_user_1 = mk_headers_with_secret(hge_ctx, {
'X-Hasura-Role': 'user',
'X-Hasura-User-Id': '1'
})
@ -194,7 +240,7 @@ class TestActionsAsync:
assert status == 200, resp
assert 'data' in resp
action_id = resp['data']['create_user']
time.sleep(2)
time.sleep(3)
query_async = '''
query ($action_id: uuid!){
@ -213,7 +259,7 @@ class TestActionsAsync:
}
}
headers_user_2 = self.mk_headers_with_secret(hge_ctx, {
headers_user_2 = mk_headers_with_secret(hge_ctx, {
'X-Hasura-Role': 'user',
'X-Hasura-User-Id': '2'
})