mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
This commit is contained in:
parent
5abd18a156
commit
00d5a5c1a3
@ -235,6 +235,14 @@ Input Object
|
||||
{
|
||||
field1: value,
|
||||
field2: value,
|
||||
<object-rel-name>: {
|
||||
data: <Input-Object>!,
|
||||
on_conflict: <Conflict-Clause>
|
||||
},
|
||||
<array-rel-name>: {
|
||||
data: [<Input-Object>!]!,
|
||||
on_conflict: <Conflict-Clause>
|
||||
}
|
||||
..
|
||||
},
|
||||
..
|
||||
@ -249,6 +257,12 @@ E.g.:
|
||||
{
|
||||
title: "Software is eating the world",
|
||||
content: "This week, Hewlett-Packard...",
|
||||
author: {
|
||||
data: {
|
||||
id: 1,
|
||||
name: "Sydney"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -132,6 +132,60 @@ Insert multiple objects of the same type in the same mutation
|
||||
}
|
||||
}
|
||||
|
||||
Insert nested object and get nested object in response
|
||||
-----------------------------------------------------
|
||||
**Example:** Insert a new ``article`` object with its ``author`` and return the inserted article object with its author in the response
|
||||
|
||||
|
||||
.. graphiql::
|
||||
:view_only:
|
||||
:query:
|
||||
mutation insert_article {
|
||||
insert_article(
|
||||
objects: [
|
||||
{
|
||||
id: 21,
|
||||
title: "Article 1",
|
||||
content: "Sample article content",
|
||||
author: {
|
||||
data: {
|
||||
id: 3,
|
||||
name: "Sidney"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
) {
|
||||
returning {
|
||||
id
|
||||
title
|
||||
author {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
:response:
|
||||
{
|
||||
"data": {
|
||||
"insert_article": {
|
||||
"affected_rows": 2,
|
||||
"returning": [
|
||||
{
|
||||
"id": 21,
|
||||
"title": "Article 1",
|
||||
"author": {
|
||||
"id": 3,
|
||||
"name": "Sidney"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Insert object and get nested object in response
|
||||
-----------------------------------------------
|
||||
**Example:** Insert a new ``article`` object and return the inserted article object with its author in the response
|
||||
@ -297,4 +351,4 @@ OR
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -255,3 +255,52 @@ ignore the request:
|
||||
}
|
||||
|
||||
In this case, the insert mutation is ignored because there is a conflict.
|
||||
|
||||
Upsert in nested mutations
|
||||
--------------------------
|
||||
You can specify ``on_conflict`` clause while inserting nested objects
|
||||
|
||||
|
||||
.. graphiql::
|
||||
:view_only:
|
||||
:query:
|
||||
mutation upsert_author_article {
|
||||
insert_author(
|
||||
objects: [
|
||||
{ name: "John",
|
||||
id: 10,
|
||||
articles: {
|
||||
data: [
|
||||
{
|
||||
id: 1,
|
||||
title: "Article 1 title",
|
||||
content: "Article 1 content"
|
||||
}
|
||||
],
|
||||
on_conflict: {
|
||||
constraint: article_pkey,
|
||||
update_columns: [title, content]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
) {
|
||||
affected_rows
|
||||
}
|
||||
}
|
||||
:response:
|
||||
{
|
||||
"data": {
|
||||
"insert_author": {
|
||||
"affected_rows": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.. warning::
|
||||
Inserting nested objects fails when
|
||||
|
||||
1. Any of upsert in object relationships does not affect any rows (``update_columns: []`` or ``action: ignore``)
|
||||
|
||||
2. Array relationships are queued for insert and parent insert does not affect any rows (``update_columns: []`` or ``action: ignore``)
|
||||
|
@ -189,6 +189,7 @@ library
|
||||
, Hasura.GraphQL.Resolve.Context
|
||||
, Hasura.GraphQL.Resolve.InputValue
|
||||
, Hasura.GraphQL.Resolve.Introspect
|
||||
, Hasura.GraphQL.Resolve.Insert
|
||||
, Hasura.GraphQL.Resolve.Mutation
|
||||
, Hasura.GraphQL.Resolve.Select
|
||||
|
||||
|
@ -23,6 +23,7 @@ import Hasura.GraphQL.Validate.Field
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import qualified Hasura.GraphQL.Resolve.Insert as RI
|
||||
import qualified Hasura.GraphQL.Resolve.Mutation as RM
|
||||
import qualified Hasura.GraphQL.Resolve.Select as RS
|
||||
|
||||
@ -30,7 +31,7 @@ import qualified Hasura.GraphQL.Resolve.Select as RS
|
||||
buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString
|
||||
buildTx userInfo gCtx fld = do
|
||||
opCxt <- getOpCtx $ _fName fld
|
||||
join $ fmap fst $ runConvert (fldMap, orderByCtx) $ case opCxt of
|
||||
join $ fmap fst $ runConvert (fldMap, orderByCtx, insCtxMap) $ case opCxt of
|
||||
|
||||
OCSelect tn permFilter permLimit hdrs ->
|
||||
validateHdrs hdrs >> RS.convertSelect tn permFilter permLimit fld
|
||||
@ -38,8 +39,8 @@ buildTx userInfo gCtx fld = do
|
||||
OCSelectPkey tn permFilter hdrs ->
|
||||
validateHdrs hdrs >> RS.convertSelectByPKey tn permFilter fld
|
||||
-- RS.convertSelect tn permFilter fld
|
||||
OCInsert tn vn cols hdrs ->
|
||||
validateHdrs hdrs >> RM.convertInsert roleName (tn, vn) cols fld
|
||||
OCInsert tn hdrs ->
|
||||
validateHdrs hdrs >> RI.convertInsert roleName tn fld
|
||||
-- RM.convertInsert (tn, vn) cols fld
|
||||
OCUpdate tn permFilter hdrs ->
|
||||
validateHdrs hdrs >> RM.convertUpdate tn permFilter fld
|
||||
@ -52,6 +53,7 @@ buildTx userInfo gCtx fld = do
|
||||
opCtxMap = _gOpCtxMap gCtx
|
||||
fldMap = _gFields gCtx
|
||||
orderByCtx = _gOrdByEnums gCtx
|
||||
insCtxMap = _gInsCtxMap gCtx
|
||||
|
||||
getOpCtx f =
|
||||
onNothing (Map.lookup f opCtxMap) $ throw500 $
|
||||
|
@ -3,13 +3,18 @@
|
||||
{-# LANGUAGE MultiWayIf #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
|
||||
module Hasura.GraphQL.Resolve.Context
|
||||
( FieldMap
|
||||
( InsResp(..)
|
||||
, FieldMap
|
||||
, OrdByResolveCtx
|
||||
, OrdByResolveCtxElem
|
||||
, NullsOrder(..)
|
||||
, OrdTy(..)
|
||||
, RelationInfoMap
|
||||
, InsCtx(..)
|
||||
, InsCtxMap
|
||||
, RespTx
|
||||
, InsertTxConflictCtx(..)
|
||||
, getFldInfo
|
||||
@ -28,6 +33,9 @@ module Hasura.GraphQL.Resolve.Context
|
||||
import Data.Has
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.Aeson.Casing as J
|
||||
import qualified Data.Aeson.TH as J
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.Sequence as Seq
|
||||
@ -44,6 +52,13 @@ import Hasura.SQL.Value
|
||||
import qualified Hasura.RQL.DML.Select as RS
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
data InsResp
|
||||
= InsResp
|
||||
{ _irAffectedRows :: !Int
|
||||
, _irResponse :: !(Maybe J.Object)
|
||||
} deriving (Show, Eq)
|
||||
$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) ''InsResp)
|
||||
|
||||
type FieldMap
|
||||
= Map.HashMap (G.NamedType, G.Name)
|
||||
(Either PGColInfo (RelInfo, S.BoolExp, Maybe Int, Bool))
|
||||
@ -66,6 +81,18 @@ type OrdByResolveCtxElem = RS.AnnOrderByItem
|
||||
type OrdByResolveCtx
|
||||
= Map.HashMap (G.NamedType, G.EnumValue) OrdByResolveCtxElem
|
||||
|
||||
-- insert context
|
||||
type RelationInfoMap = Map.HashMap RelName RelInfo
|
||||
|
||||
data InsCtx
|
||||
= InsCtx
|
||||
{ icView :: !QualifiedTable
|
||||
, icColumns :: ![PGColInfo]
|
||||
, icRelations :: !RelationInfoMap
|
||||
} deriving (Show, Eq)
|
||||
|
||||
type InsCtxMap = Map.HashMap QualifiedTable InsCtx
|
||||
|
||||
getFldInfo
|
||||
:: (MonadError QErr m, MonadReader r m, Has FieldMap r)
|
||||
=> G.NamedType -> G.Name -> m (Either PGColInfo (RelInfo, S.BoolExp, Maybe Int, Bool))
|
||||
@ -126,7 +153,7 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $
|
||||
type PrepArgs = Seq.Seq Q.PrepArg
|
||||
|
||||
type Convert =
|
||||
StateT PrepArgs (ReaderT (FieldMap, OrdByResolveCtx) (Except QErr))
|
||||
StateT PrepArgs (ReaderT (FieldMap, OrdByResolveCtx, InsCtxMap) (Except QErr))
|
||||
|
||||
prepare
|
||||
:: (MonadState PrepArgs m)
|
||||
@ -138,7 +165,7 @@ prepare (colTy, colVal) = do
|
||||
|
||||
runConvert
|
||||
:: (MonadError QErr m)
|
||||
=> (FieldMap, OrdByResolveCtx) -> Convert a -> m (a, PrepArgs)
|
||||
=> (FieldMap, OrdByResolveCtx, InsCtxMap) -> Convert a -> m (a, PrepArgs)
|
||||
runConvert ctx m =
|
||||
either throwError return $
|
||||
runExcept $ runReaderT (runStateT m Seq.empty) ctx
|
||||
|
@ -12,8 +12,10 @@ module Hasura.GraphQL.Resolve.InputValue
|
||||
, asPGColVal
|
||||
, asEnumVal
|
||||
, withObject
|
||||
, asObject
|
||||
, withObjectM
|
||||
, withArray
|
||||
, asArray
|
||||
, withArrayM
|
||||
, parseMany
|
||||
, asPGColText
|
||||
@ -80,6 +82,11 @@ withObject fn v = case v of
|
||||
<> G.showGT (G.TypeNamed nt)
|
||||
_ -> tyMismatch "object" v
|
||||
|
||||
asObject
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGValue -> m AnnGObject
|
||||
asObject = withObject (\_ o -> return o)
|
||||
|
||||
withObjectM
|
||||
:: (MonadError QErr m)
|
||||
=> (G.NamedType -> Maybe AnnGObject -> m a) -> AnnGValue -> m a
|
||||
@ -103,6 +110,11 @@ withArray fn v = case v of
|
||||
<> G.showGT (G.TypeList lt)
|
||||
_ -> tyMismatch "array" v
|
||||
|
||||
asArray
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGValue -> m [AnnGValue]
|
||||
asArray = withArray (\_ vals -> return vals)
|
||||
|
||||
parseMany
|
||||
:: (MonadError QErr m)
|
||||
=> (AnnGValue -> m a) -> AnnGValue -> m (Maybe [a])
|
||||
|
587
server/src-lib/Hasura/GraphQL/Resolve/Insert.hs
Normal file
587
server/src-lib/Hasura/GraphQL/Resolve/Insert.hs
Normal file
@ -0,0 +1,587 @@
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Hasura.GraphQL.Resolve.Insert
|
||||
(convertInsert)
|
||||
where
|
||||
|
||||
import Data.Foldable (foldrM)
|
||||
import Data.Has
|
||||
import Data.List (intersect, union)
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.ByteString.Builder as BB
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.HashSet as Set
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Vector as V
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Hasura.RQL.DML.Insert as RI
|
||||
import qualified Hasura.RQL.DML.Returning as RR
|
||||
import qualified Hasura.RQL.DML.Select as RS
|
||||
import qualified Hasura.RQL.GBoolExp as RG
|
||||
import qualified Hasura.RQL.GBoolExp as RB
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
import Hasura.GraphQL.Resolve.BoolExp
|
||||
import Hasura.GraphQL.Resolve.Context
|
||||
import Hasura.GraphQL.Resolve.InputValue
|
||||
import Hasura.GraphQL.Resolve.Mutation
|
||||
import Hasura.GraphQL.Resolve.Select
|
||||
import Hasura.GraphQL.Validate.Field
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
import Hasura.RQL.DML.Internal (dmlTxErrorHandler)
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
data RelData a
|
||||
= RelData
|
||||
{ _rdInsObj :: a
|
||||
, _rdConflictClause :: !(Maybe AnnGValue)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
type ObjRelData = RelData AnnGObject
|
||||
type ArrRelData = RelData [AnnGObject]
|
||||
|
||||
type PGColWithValue = (PGCol, PGColValue)
|
||||
|
||||
type AnnSelFlds = [(FieldName, RS.AnnFld)]
|
||||
type WithExp = (S.CTE, Seq.Seq Q.PrepArg)
|
||||
|
||||
mkConflictClause :: RI.ConflictCtx -> RI.ConflictClauseP1
|
||||
mkConflictClause (RI.CCDoNothing constrM) =
|
||||
RI.CP1DoNothing $ fmap RI.Constraint constrM
|
||||
mkConflictClause (RI.CCUpdate constr updCols) =
|
||||
RI.CP1Update (RI.Constraint constr) updCols
|
||||
|
||||
parseAction
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGObject -> m (Maybe ConflictAction)
|
||||
parseAction obj = withPathK "action" $
|
||||
mapM parseVal $ Map.lookup "action" obj
|
||||
where
|
||||
parseVal val = do
|
||||
(enumTy, enumVal) <- asEnumVal val
|
||||
case G.unName $ G.unEnumValue enumVal of
|
||||
"ignore" -> return CAIgnore
|
||||
"update" -> return CAUpdate
|
||||
_ -> throw500 $
|
||||
"only \"ignore\" and \"updated\" allowed for enum type "
|
||||
<> showNamedTy enumTy
|
||||
|
||||
parseConstraint
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGObject -> m ConstraintName
|
||||
parseConstraint obj = withPathK "constraint" $ do
|
||||
v <- onNothing (Map.lookup "constraint" obj) $ throw500
|
||||
"\"constraint\" is expected, but not found"
|
||||
parseVal v
|
||||
where
|
||||
parseVal v = do
|
||||
(_, enumVal) <- asEnumVal v
|
||||
return $ ConstraintName $ G.unName $ G.unEnumValue enumVal
|
||||
|
||||
parseUpdCols
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGObject -> m (Maybe [PGCol])
|
||||
parseUpdCols obj = withPathK "update_columns" $
|
||||
mapM parseVal $ Map.lookup "update_columns" obj
|
||||
where
|
||||
parseVal val = flip withArray val $ \_ enumVals ->
|
||||
forM enumVals $ \eVal -> do
|
||||
(_, v) <- asEnumVal eVal
|
||||
return $ PGCol $ G.unName $ G.unEnumValue v
|
||||
|
||||
parseOnConflict
|
||||
:: (MonadError QErr m)
|
||||
=> [PGCol] -> AnnGValue -> m RI.ConflictClauseP1
|
||||
parseOnConflict inpCols val = withPathK "on_conflict" $
|
||||
flip withObject val $ \_ obj -> do
|
||||
actionM <- parseAction obj
|
||||
constraint <- parseConstraint obj
|
||||
updColsM <- parseUpdCols obj
|
||||
-- consider "action" if "update_columns" is not mentioned
|
||||
return $ mkConflictClause $ case (updColsM, actionM) of
|
||||
(Just [], _) -> RI.CCDoNothing $ Just constraint
|
||||
(Just cols, _) -> RI.CCUpdate constraint cols
|
||||
(Nothing, Just CAIgnore) -> RI.CCDoNothing $ Just constraint
|
||||
(Nothing, _) -> RI.CCUpdate constraint inpCols
|
||||
|
||||
parseRelObj
|
||||
:: MonadError QErr m
|
||||
=> AnnGObject
|
||||
-> m (Either ObjRelData ArrRelData)
|
||||
parseRelObj annObj = do
|
||||
let conflictClauseM = Map.lookup "on_conflict" annObj
|
||||
dataVal <- onNothing (Map.lookup "data" annObj) $ throw500 "\"data\" object not found"
|
||||
case dataVal of
|
||||
AGObject _ (Just obj) -> return $ Left $ RelData obj conflictClauseM
|
||||
AGArray _ (Just vals) -> do
|
||||
objs <- forM vals asObject
|
||||
return $ Right $ RelData objs conflictClauseM
|
||||
_ -> throw500 "unexpected type for \"data\""
|
||||
|
||||
toSQLExps :: (MonadError QErr m, MonadState PrepArgs m)
|
||||
=> [(PGCol, AnnGValue)] -> m [(PGCol, S.SQLExp)]
|
||||
toSQLExps cols =
|
||||
forM cols $ \(c, v) -> do
|
||||
prepExpM <- asPGColValM v >>= mapM prepare
|
||||
let prepExp = fromMaybe (S.SEUnsafe "NULL") prepExpM
|
||||
return (c, prepExp)
|
||||
|
||||
mkSQLRow :: [PGCol] -> [(PGCol, S.SQLExp)] -> [S.SQLExp]
|
||||
mkSQLRow tableCols withPGCol =
|
||||
Map.elems $ Map.union (Map.fromList withPGCol) defVals
|
||||
where
|
||||
defVals = Map.fromList $ zip tableCols (repeat $ S.SEUnsafe "DEFAULT")
|
||||
|
||||
mkInsertQ :: QualifiedTable
|
||||
-> Maybe RI.ConflictClauseP1 -> [(PGCol, AnnGValue)]
|
||||
-> [PGCol] -> RoleName
|
||||
-> Q.TxE QErr WithExp
|
||||
mkInsertQ vn onConflictM insCols tableCols role = do
|
||||
(givenCols, args) <- flip runStateT Seq.Empty $ toSQLExps insCols
|
||||
let sqlConflict = RI.toSQLConflict <$> onConflictM
|
||||
sqlExps = mkSQLRow tableCols givenCols
|
||||
sqlInsert = S.SQLInsert vn tableCols [sqlExps] sqlConflict $ Just S.returningStar
|
||||
if isAdmin role then return (S.CTEInsert sqlInsert, args)
|
||||
else do
|
||||
ccM <- mapM RI.extractConflictCtx onConflictM
|
||||
RI.setConflictCtx ccM
|
||||
return (S.CTEInsert (sqlInsert{S.siConflict=Nothing}), args)
|
||||
|
||||
-- | resolve a graphQL object to columns, object and array relations
|
||||
fetchColsAndRels
|
||||
:: MonadError QErr m
|
||||
=> AnnGObject
|
||||
-> m ( [(PGCol, PGColType, PGColValue)] -- ^ columns
|
||||
, [(RelName, ObjRelData)] -- ^ object relations
|
||||
, [(RelName, ArrRelData)] -- ^ array relations
|
||||
)
|
||||
fetchColsAndRels annObj = foldrM go ([], [], []) $ Map.toList annObj
|
||||
where
|
||||
go (gName, annVal) (cols, objRels, arrRels) =
|
||||
case annVal of
|
||||
AGScalar colty mColVal -> do
|
||||
let col = PGCol $ G.unName gName
|
||||
colVal = fromMaybe (PGNull colty) mColVal
|
||||
return ((col, colty, colVal):cols, objRels, arrRels)
|
||||
AGObject _ (Just obj) -> do
|
||||
let relName = RelName $ G.unName gName
|
||||
relObj <- parseRelObj obj
|
||||
return $ either
|
||||
(\relData -> (cols, (relName, relData):objRels, arrRels))
|
||||
(\relData -> (cols, objRels, (relName, relData):arrRels))
|
||||
relObj
|
||||
_ -> throw500 "unexpected Array or Enum for input cols"
|
||||
|
||||
-- | process array relation and return relation data, insert context
|
||||
-- | of remote table and relation info
|
||||
processObjRel
|
||||
:: (MonadError QErr m)
|
||||
=> InsCtxMap
|
||||
-> [(RelName, ObjRelData)]
|
||||
-> RelationInfoMap
|
||||
-> m [(ObjRelData, InsCtx, RelInfo)]
|
||||
processObjRel insCtxMap objRels relInfoMap =
|
||||
forM objRels $ \(relName, rd) -> withPathK (getRelTxt relName) $ do
|
||||
relInfo <- onNothing (Map.lookup relName relInfoMap) $ throw500 $
|
||||
"object relationship with name " <> relName <<> " not found"
|
||||
let remoteTable = riRTable relInfo
|
||||
insCtx <- getInsCtx insCtxMap remoteTable
|
||||
return (rd, insCtx, relInfo)
|
||||
|
||||
-- | process array relation and return dependent columns,
|
||||
-- | relation data, insert context of remote table and relation info
|
||||
processArrRel
|
||||
:: (MonadError QErr m)
|
||||
=> InsCtxMap
|
||||
-> [(RelName, ArrRelData)]
|
||||
-> RelationInfoMap
|
||||
-> m [([PGCol], ArrRelData, InsCtx, RelInfo)]
|
||||
processArrRel insCtxMap arrRels relInfoMap =
|
||||
forM arrRels $ \(relName, rd) -> withPathK (getRelTxt relName) $ do
|
||||
relInfo <- onNothing (Map.lookup relName relInfoMap) $ throw500 $
|
||||
"relation with name " <> relName <<> " not found"
|
||||
let depCols = map fst $ riMapping relInfo
|
||||
remoteTable = riRTable relInfo
|
||||
insCtx <- getInsCtx insCtxMap remoteTable
|
||||
return (depCols, rd, insCtx, relInfo)
|
||||
|
||||
-- | insert an object relationship and return affected rows
|
||||
-- | and parent dependent columns
|
||||
insertObjRel
|
||||
:: RoleName
|
||||
-> InsCtxMap
|
||||
-> InsCtx
|
||||
-> RelInfo
|
||||
-> ObjRelData
|
||||
-> Q.TxE QErr (Int, [PGColWithValue])
|
||||
insertObjRel role insCtxMap insCtx relInfo relData =
|
||||
withPathK relNameTxt $ do
|
||||
(aRows, withExp) <- insertObj role insCtxMap tn insObj
|
||||
insCtx [] onConflictM "data"
|
||||
when (aRows == 0) $ throwVE $ "cannot proceed to insert object relation "
|
||||
<> relName <<> " since insert to table " <> tn <<> " affects zero rows"
|
||||
retColsWithVals <- insertAndRetCols tn withExp $
|
||||
getColInfos rCols allCols
|
||||
let c = mergeListsWith mapCols retColsWithVals
|
||||
(\(_, rCol) (col, _) -> rCol == col)
|
||||
(\(lCol, _) (_, colVal) -> (lCol, colVal))
|
||||
return (aRows, c)
|
||||
where
|
||||
RelData insObj onConflictM = relData
|
||||
relName = riName relInfo
|
||||
relNameTxt = getRelTxt relName
|
||||
mapCols = riMapping relInfo
|
||||
tn = riRTable relInfo
|
||||
rCols = map snd mapCols
|
||||
allCols = icColumns insCtx
|
||||
|
||||
-- | insert an array relationship and return affected rows
|
||||
insertArrRel
|
||||
:: RoleName
|
||||
-> InsCtxMap
|
||||
-> InsCtx
|
||||
-> RelInfo
|
||||
-> [PGColWithValue]
|
||||
-> ArrRelData
|
||||
-> Q.TxE QErr Int
|
||||
insertArrRel role insCtxMap insCtx relInfo resCols relData =
|
||||
withPathK relNameTxt $ do
|
||||
let addCols = mergeListsWith resCols colMapping
|
||||
(\(col, _) (lCol, _) -> col == lCol)
|
||||
(\(_, colVal) (_, rCol) -> (rCol, colVal))
|
||||
|
||||
resBS <- insertMultipleObjects role insCtxMap tn insCtx
|
||||
insObjs addCols mutFlds onConflictM True
|
||||
resObj <- decodeFromBS resBS
|
||||
onNothing (Map.lookup ("affected_rows" :: T.Text) resObj) $
|
||||
throw500 "affected_rows not returned in array rel insert"
|
||||
where
|
||||
colMapping = riMapping relInfo
|
||||
tn = riRTable relInfo
|
||||
relNameTxt = getRelTxt $ riName relInfo
|
||||
RelData insObjs onConflictM = relData
|
||||
mutFlds = [("affected_rows", RR.MCount)]
|
||||
|
||||
-- | validate an insert object based on insert columns,
|
||||
-- | insert object relations and additional columns from parent
|
||||
validateInsert
|
||||
:: (MonadError QErr m)
|
||||
=> [PGCol] -- ^ inserting columns
|
||||
-> [RelInfo] -- ^ object relation inserts
|
||||
-> [PGCol] -- ^ additional fields from parent
|
||||
-> m ()
|
||||
validateInsert insCols objRels addCols = do
|
||||
-- validate insertCols
|
||||
unless (null insConflictCols) $ throwVE $
|
||||
"cannot insert " <> pgColsToText insConflictCols
|
||||
<> " columns as their values are already being determined by parent insert"
|
||||
|
||||
forM_ objRels $ \relInfo -> do
|
||||
let lCols = map fst $ riMapping relInfo
|
||||
relName = riName relInfo
|
||||
relNameTxt = getRelTxt relName
|
||||
lColConflicts = lCols `intersect` (addCols <> insCols)
|
||||
withPathK relNameTxt $ unless (null lColConflicts) $ throwVE $
|
||||
"cannot insert object relation ship " <> relName
|
||||
<<> " as " <> pgColsToText lColConflicts
|
||||
<> " column values are already determined"
|
||||
where
|
||||
insConflictCols = insCols `intersect` addCols
|
||||
pgColsToText cols = T.intercalate ", " $ map getPGColTxt cols
|
||||
|
||||
-- | insert an object with object and array relationships
|
||||
insertObj
|
||||
:: RoleName
|
||||
-> InsCtxMap
|
||||
-> QualifiedTable
|
||||
-> AnnGObject -- ^ object to be inserted
|
||||
-> InsCtx -- ^ required insert context
|
||||
-> [PGColWithValue] -- ^ additional fields
|
||||
-> Maybe AnnGValue -- ^ on conflict context
|
||||
-> T.Text -- ^ error path
|
||||
-> Q.TxE QErr (Int, WithExp)
|
||||
insertObj role insCtxMap tn annObj ctx addCols onConflictValM errP = do
|
||||
-- get all insertable columns, object and array relations
|
||||
(cols, objRels, arrRels) <- withErrPath $ fetchColsAndRels annObj
|
||||
|
||||
processedObjRels <- processObjRel insCtxMap objRels relInfoMap
|
||||
|
||||
-- validate insert
|
||||
validateInsert (map _1 cols) (map _3 processedObjRels) $ map fst addCols
|
||||
|
||||
-- insert all object relations and fetch this insert dependent column values
|
||||
objInsRes <- forM processedObjRels $ \(relData, insCtx, relInfo) ->
|
||||
insertObjRel role insCtxMap insCtx relInfo relData
|
||||
|
||||
-- prepare final insert columns
|
||||
let objInsAffRows = sum $ map fst objInsRes
|
||||
objRelDeterminedCols = concatMap snd objInsRes
|
||||
objRelInsCols = mkPGColWithTypeAndVal tableColInfos objRelDeterminedCols
|
||||
addInsCols = mkPGColWithTypeAndVal tableColInfos addCols
|
||||
finalInsCols = map pgColToAnnGVal (cols <> objRelInsCols <> addInsCols)
|
||||
|
||||
-- fetch array rel deps Cols
|
||||
processedArrRels <- processArrRel insCtxMap arrRels relInfoMap
|
||||
|
||||
-- prepare final returning columns
|
||||
let arrDepCols = concatMap (\(a, _, _, _) -> a) processedArrRels
|
||||
arrDepColsWithInfo = getColInfos arrDepCols tableColInfos
|
||||
|
||||
onConflictM <- forM onConflictValM $ parseOnConflict (map fst finalInsCols)
|
||||
|
||||
-- calculate affected rows
|
||||
let anyRowsAffected = not $ or $ fmap RI.isDoNothing onConflictM
|
||||
thisInsAffRows = bool 0 1 anyRowsAffected
|
||||
preArrRelInsAffRows = objInsAffRows + thisInsAffRows
|
||||
|
||||
-- prepare insert query as with expression
|
||||
insQ <- mkInsertQ vn onConflictM finalInsCols (map pgiName tableColInfos) role
|
||||
|
||||
let insertWithArrRels = cannotInsArrRelErr thisInsAffRows >>
|
||||
withArrRels preArrRelInsAffRows insQ
|
||||
arrDepColsWithInfo processedArrRels
|
||||
insertWithoutArrRels = withNoArrRels preArrRelInsAffRows insQ
|
||||
|
||||
bool insertWithArrRels insertWithoutArrRels $ null arrDepColsWithInfo
|
||||
|
||||
where
|
||||
InsCtx vn tableColInfos relInfoMap = ctx
|
||||
withErrPath = withPathK errP
|
||||
|
||||
withNoArrRels affRows insQ = return (affRows, insQ)
|
||||
|
||||
withArrRels affRows insQ arrDepColsWithType processedArrRels = do
|
||||
arrDepColsWithVal <- insertAndRetCols tn insQ arrDepColsWithType
|
||||
|
||||
arrInsARows <- forM processedArrRels $ \(_, rd, insCtx, relInfo) ->
|
||||
insertArrRel role insCtxMap insCtx relInfo arrDepColsWithVal rd
|
||||
|
||||
let totalAffRows = affRows + sum arrInsARows
|
||||
|
||||
selQ <- mkSelQ tn tableColInfos arrDepColsWithVal
|
||||
return (totalAffRows, selQ)
|
||||
|
||||
cannotInsArrRelErr affRows = when (affRows == 0) $ throwVE $
|
||||
"cannot proceed to insert array relations since insert to table "
|
||||
<> tn <<> " affects zero rows"
|
||||
|
||||
|
||||
mkBoolExp
|
||||
:: (MonadError QErr m, MonadState PrepArgs m)
|
||||
=> QualifiedTable -> [(PGColInfo, PGColValue)]
|
||||
-> m (GBoolExp RG.AnnSQLBoolExp)
|
||||
mkBoolExp tn colInfoVals =
|
||||
RG.convBoolRhs (RG.mkBoolExpBuilder prepare) (S.mkQual tn) boolExp
|
||||
where
|
||||
boolExp = BoolAnd $ map (BoolCol . uncurry f) colInfoVals
|
||||
f ci@(PGColInfo _ colTy _) colVal =
|
||||
RB.AVCol ci [RB.OEVal $ RB.AEQ (colTy, colVal)]
|
||||
|
||||
mkSelQ :: QualifiedTable
|
||||
-> [PGColInfo] -> [PGColWithValue] -> Q.TxE QErr WithExp
|
||||
mkSelQ tn allColInfos pgColsWithVal = do
|
||||
(whereExp, args) <- flip runStateT Seq.Empty $ mkBoolExp tn colWithInfos
|
||||
let sqlSel = S.mkSelect { S.selExtr = [S.selectStar]
|
||||
, S.selFrom = Just $ S.mkSimpleFromExp tn
|
||||
, S.selWhere = Just $ S.WhereFrag $ RG.cBoolExp whereExp
|
||||
}
|
||||
|
||||
return (S.CTESelect sqlSel, args)
|
||||
where
|
||||
colWithInfos = mergeListsWith pgColsWithVal allColInfos
|
||||
(\(c, _) ci -> c == pgiName ci)
|
||||
(\(_, v) ci -> (ci, v))
|
||||
|
||||
execWithExp
|
||||
:: QualifiedTable
|
||||
-> WithExp
|
||||
-> AnnSelFlds
|
||||
-> Q.TxE QErr RespBody
|
||||
execWithExp tn (withExp, args) annFlds = do
|
||||
let annSel = RS.AnnSel annFlds tn frmItemM
|
||||
(S.BELit True) Nothing RS.noTableArgs
|
||||
sqlSel = RS.mkSQLSelect True annSel
|
||||
selWith = S.SelectWith [(alias, withExp)] sqlSel
|
||||
sqlBuilder = toSQL selWith
|
||||
runIdentity . Q.getRow
|
||||
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sqlBuilder) (toList args) True
|
||||
where
|
||||
alias = S.Alias $ Iden $ snakeCaseTable tn <> "__rel_insert_result"
|
||||
frmItemM = Just $ S.FIIden $ toIden alias
|
||||
|
||||
insertAndRetCols
|
||||
:: QualifiedTable
|
||||
-> WithExp
|
||||
-> [PGColInfo]
|
||||
-> Q.TxE QErr [PGColWithValue]
|
||||
insertAndRetCols tn withExp retCols = do
|
||||
resBS <- execWithExp tn withExp annSelFlds
|
||||
resObj <- decodeFromBS resBS
|
||||
forM retCols $ \(PGColInfo col colty _) -> do
|
||||
val <- onNothing (Map.lookup (getPGColTxt col) resObj) $
|
||||
throw500 $ "column " <> col <<> "not returned by postgres"
|
||||
pgColVal <- RB.pgValParser colty val
|
||||
return (col, pgColVal)
|
||||
where
|
||||
annSelFlds = flip map retCols $ \pgci ->
|
||||
(fromPGCol $ pgiName pgci, RS.FCol pgci)
|
||||
|
||||
buildReturningResp
|
||||
:: QualifiedTable
|
||||
-> [WithExp]
|
||||
-> AnnSelFlds
|
||||
-> Q.TxE QErr RespBody
|
||||
buildReturningResp tn withExps annFlds = do
|
||||
respList <- forM withExps $ \withExp ->
|
||||
execWithExp tn withExp annFlds
|
||||
let bsVector = V.fromList respList
|
||||
return $ BB.toLazyByteString $ RR.encodeJSONVector BB.lazyByteString bsVector
|
||||
|
||||
-- | insert multiple Objects in postgres
|
||||
insertMultipleObjects
|
||||
:: RoleName -- ^ role name
|
||||
-> InsCtxMap -- ^ insert context map
|
||||
-> QualifiedTable -- ^ table
|
||||
-> InsCtx -- ^ insert context
|
||||
-> [AnnGObject] -- ^ objects to be inserted
|
||||
-> [PGColWithValue] -- ^ additional fields
|
||||
-> RR.MutFlds -- ^ returning fields
|
||||
-> Maybe AnnGValue -- ^ On Conflict Clause
|
||||
-> Bool -- ^ is an Array relation
|
||||
-> Q.TxE QErr RespBody
|
||||
insertMultipleObjects role insCtxMap tn ctx insObjs
|
||||
addCols mutFlds onConflictValM isArrRel
|
||||
= do
|
||||
|
||||
-- fetch insertable columns, object and array relationships
|
||||
colsObjArrRels <- withErrPath $ indexedMapM fetchColsAndRels insObjs
|
||||
let insCols = map _1 colsObjArrRels
|
||||
insColNames = Set.toList $ Set.fromList $
|
||||
concatMap (map _1) insCols
|
||||
allInsObjRels = concatMap _2 colsObjArrRels
|
||||
allInsArrRels = concatMap _3 colsObjArrRels
|
||||
anyRelsToInsert = not $ null allInsArrRels && null allInsObjRels
|
||||
|
||||
onConflictM <- forM onConflictValM $ parseOnConflict insColNames
|
||||
|
||||
let withoutRels = withoutRelsInsert insCols onConflictM
|
||||
|
||||
bool withoutRels withRelsInsert anyRelsToInsert
|
||||
|
||||
where
|
||||
InsCtx vn tableColInfos _ = ctx
|
||||
tableCols = map pgiName tableColInfos
|
||||
|
||||
errP = bool "objects" "data" isArrRel
|
||||
withErrPath = withPathK errP
|
||||
|
||||
-- insert all column rows at one go
|
||||
withoutRelsInsert insCols onConflictM = withErrPath $ do
|
||||
indexedForM_ insCols $ \insCol ->
|
||||
validateInsert (map _1 insCol) [] $ map fst addCols
|
||||
|
||||
let addColsWithType = mkPGColWithTypeAndVal tableColInfos addCols
|
||||
withAddCols = flip map insCols $ union addColsWithType
|
||||
|
||||
(sqlRows, prepArgs) <- flip runStateT Seq.Empty $ do
|
||||
rowsWithCol <- mapM (toSQLExps . map pgColToAnnGVal) withAddCols
|
||||
return $ map (mkSQLRow tableCols) rowsWithCol
|
||||
|
||||
let insQP1 = RI.InsertQueryP1 tn vn tableCols sqlRows onConflictM mutFlds
|
||||
p1 = (insQP1, prepArgs)
|
||||
bool (RI.nonAdminInsert p1) (RI.insertP2 p1) $ isAdmin role
|
||||
|
||||
-- insert each object with relations
|
||||
withRelsInsert = withErrPath $ do
|
||||
insResps <- indexedForM insObjs $ \obj ->
|
||||
insertObj role insCtxMap tn obj ctx addCols onConflictValM errP
|
||||
|
||||
let affRows = sum $ map fst insResps
|
||||
withExps = map snd insResps
|
||||
respTups <- forM mutFlds $ \(t, mutFld) -> do
|
||||
jsonVal <- case mutFld of
|
||||
RR.MCount -> do
|
||||
-- when it is a array relation perform insert
|
||||
-- and return calculated affected rows
|
||||
when isArrRel $ void $ buildReturningResp tn withExps []
|
||||
return $ J.toJSON affRows
|
||||
RR.MExp txt -> return $ J.toJSON txt
|
||||
RR.MRet annSel -> do
|
||||
let annFlds = RS._asFields annSel
|
||||
bs <- buildReturningResp tn withExps annFlds
|
||||
decodeFromBS bs
|
||||
return (t, jsonVal)
|
||||
return $ J.encode $ Map.fromList respTups
|
||||
|
||||
prefixErrPath :: (MonadError QErr m) => Field -> m a -> m a
|
||||
prefixErrPath fld =
|
||||
withPathK "selectionSet" . fieldAsPath fld . withPathK "args"
|
||||
|
||||
convertInsert
|
||||
:: RoleName
|
||||
-> QualifiedTable -- table
|
||||
-> Field -- the mutation field
|
||||
-> Convert RespTx
|
||||
convertInsert role tn fld = prefixErrPath fld $ do
|
||||
insCtxMap <- getInsCtxMap
|
||||
insCtx <- getInsCtx insCtxMap tn
|
||||
annVals <- withArg arguments "objects" asArray
|
||||
annObjs <- forM annVals asObject
|
||||
mutFlds <- convertMutResp tn (_fType fld) $ _fSelSet fld
|
||||
return $ prefixErrPath fld $ insertMultipleObjects role insCtxMap tn
|
||||
insCtx annObjs [] mutFlds onConflictM False
|
||||
where
|
||||
arguments = _fArguments fld
|
||||
onConflictM = Map.lookup "on_conflict" arguments
|
||||
|
||||
-- helper functions
|
||||
getInsCtxMap
|
||||
:: (Has InsCtxMap r, MonadReader r m)
|
||||
=> m InsCtxMap
|
||||
getInsCtxMap = asks getter
|
||||
|
||||
getInsCtx
|
||||
:: MonadError QErr m
|
||||
=> InsCtxMap -> QualifiedTable -> m InsCtx
|
||||
getInsCtx ctxMap tn =
|
||||
onNothing (Map.lookup tn ctxMap) $ throw500 $ "table " <> tn <<> " not found"
|
||||
|
||||
mergeListsWith
|
||||
:: [a] -> [b] -> (a -> b -> Bool) -> (a -> b -> c) -> [c]
|
||||
mergeListsWith _ [] _ _ = []
|
||||
mergeListsWith [] _ _ _ = []
|
||||
mergeListsWith (x:xs) l b f = case find (b x) l of
|
||||
Nothing -> mergeListsWith xs l b f
|
||||
Just y -> f x y : mergeListsWith xs l b f
|
||||
|
||||
mkPGColWithTypeAndVal :: [PGColInfo] -> [PGColWithValue]
|
||||
-> [(PGCol, PGColType, PGColValue)]
|
||||
mkPGColWithTypeAndVal pgColInfos pgColWithVal =
|
||||
mergeListsWith pgColInfos pgColWithVal
|
||||
(\ci (c, _) -> pgiName ci == c)
|
||||
(\ci (c, v) -> (c, pgiType ci, v))
|
||||
|
||||
pgColToAnnGVal
|
||||
:: (PGCol, PGColType, PGColValue)
|
||||
-> (PGCol, AnnGValue)
|
||||
pgColToAnnGVal (col, colTy, colVal) =
|
||||
(col, pgColValToAnnGVal colTy colVal)
|
||||
|
||||
_1 :: (a, b, c) -> a
|
||||
_1 (x, _, _) = x
|
||||
|
||||
_2 :: (a, b, c) -> b
|
||||
_2 (_, y, _) = y
|
||||
|
||||
_3 :: (a, b, c) -> c
|
||||
_3 (_, _, z) = z
|
@ -6,18 +6,16 @@
|
||||
|
||||
module Hasura.GraphQL.Resolve.Mutation
|
||||
( convertUpdate
|
||||
, convertInsert
|
||||
, convertDelete
|
||||
, convertMutResp
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.HashSet as Set
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import qualified Hasura.RQL.DML.Delete as RD
|
||||
import qualified Hasura.RQL.DML.Insert as RI
|
||||
import qualified Hasura.RQL.DML.Returning as RR
|
||||
import qualified Hasura.RQL.DML.Select as RS
|
||||
import qualified Hasura.RQL.DML.Update as RU
|
||||
@ -68,94 +66,6 @@ convertRowObj val =
|
||||
let prepExp = fromMaybe (S.SEUnsafe "NULL") prepExpM
|
||||
return (PGCol $ G.unName k, prepExp)
|
||||
|
||||
mkConflictClause :: RI.ConflictCtx -> RI.ConflictClauseP1
|
||||
mkConflictClause (RI.CCDoNothing constrM) =
|
||||
RI.CP1DoNothing $ fmap RI.Constraint constrM
|
||||
mkConflictClause (RI.CCUpdate constr updCols) =
|
||||
RI.CP1Update (RI.Constraint constr) updCols
|
||||
|
||||
parseAction
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGObject -> m (Maybe ConflictAction)
|
||||
parseAction obj =
|
||||
mapM parseVal $ Map.lookup "action" obj
|
||||
where
|
||||
parseVal val = do
|
||||
(enumTy, enumVal) <- asEnumVal val
|
||||
withPathK "action" $ case G.unName $ G.unEnumValue enumVal of
|
||||
"ignore" -> return CAIgnore
|
||||
"update" -> return CAUpdate
|
||||
_ -> throw500 $
|
||||
"only \"ignore\" and \"updated\" allowed for enum type "
|
||||
<> showNamedTy enumTy
|
||||
|
||||
parseConstraint
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGObject -> m ConstraintName
|
||||
parseConstraint obj = do
|
||||
v <- onNothing (Map.lookup "constraint" obj) $ throw500
|
||||
"\"constraint\" is expected, but not found"
|
||||
parseVal v
|
||||
where
|
||||
parseVal v = do
|
||||
(_, enumVal) <- asEnumVal v
|
||||
return $ ConstraintName $ G.unName $ G.unEnumValue enumVal
|
||||
|
||||
parseUpdCols
|
||||
:: (MonadError QErr m)
|
||||
=> AnnGObject -> m (Maybe [PGCol])
|
||||
parseUpdCols obj =
|
||||
mapM parseVal $ Map.lookup "update_columns" obj
|
||||
where
|
||||
parseVal val = flip withArray val $ \_ enumVals ->
|
||||
forM enumVals $ \eVal -> do
|
||||
(_, v) <- asEnumVal eVal
|
||||
return $ PGCol $ G.unName $ G.unEnumValue v
|
||||
|
||||
parseOnConflict
|
||||
:: (MonadError QErr m)
|
||||
=> [PGCol] -> AnnGValue -> m RI.ConflictCtx
|
||||
parseOnConflict inpCols val =
|
||||
flip withObject val $ \_ obj -> do
|
||||
actionM <- parseAction obj
|
||||
constraint <- parseConstraint obj
|
||||
updColsM <- parseUpdCols obj
|
||||
-- consider "action" if "update_columns" is not mentioned
|
||||
return $ case (updColsM, actionM) of
|
||||
(Just [], _) -> RI.CCDoNothing $ Just constraint
|
||||
(Just cols, _) -> RI.CCUpdate constraint cols
|
||||
(Nothing, Just CAIgnore) -> RI.CCDoNothing $ Just constraint
|
||||
(Nothing, _) -> RI.CCUpdate constraint inpCols
|
||||
|
||||
convertInsert
|
||||
:: RoleName
|
||||
-> (QualifiedTable, QualifiedTable) -- table, view
|
||||
-> [PGCol] -- all the columns in this table
|
||||
-> Field -- the mutation field
|
||||
-> Convert RespTx
|
||||
convertInsert role (tn, vn) tableCols fld = do
|
||||
insTuples <- withArg arguments "objects" asRowExps
|
||||
let inpCols = Set.toList $ Set.fromList $ concatMap fst insTuples
|
||||
conflictCtxM <- withArgM arguments "on_conflict" $ parseOnConflict inpCols
|
||||
let onConflictM = fmap mkConflictClause conflictCtxM
|
||||
mutFlds <- convertMutResp tn (_fType fld) $ _fSelSet fld
|
||||
args <- get
|
||||
let rows = map snd insTuples
|
||||
p1Query = RI.InsertQueryP1 tn vn tableCols rows onConflictM mutFlds
|
||||
p1 = (p1Query, args)
|
||||
return $
|
||||
bool (RI.nonAdminInsert p1) (RI.insertP2 p1) $ isAdmin role
|
||||
where
|
||||
arguments = _fArguments fld
|
||||
asRowExps = withArray (const $ mapM rowExpWithDefaults)
|
||||
rowExpWithDefaults val = do
|
||||
givenCols <- convertRowObj val
|
||||
let inpCols = map fst givenCols
|
||||
sqlExps = Map.elems $ Map.union (Map.fromList givenCols) defVals
|
||||
return (inpCols, sqlExps)
|
||||
|
||||
defVals = Map.fromList $ zip tableCols (repeat $ S.SEUnsafe "DEFAULT")
|
||||
|
||||
type ApplySQLOp = (PGCol, S.SQLExp) -> S.SQLExp
|
||||
|
||||
rhsExpOp :: S.SQLOp -> S.AnnType -> ApplySQLOp
|
||||
|
@ -9,6 +9,7 @@ module Hasura.GraphQL.Resolve.Select
|
||||
( convertSelect
|
||||
, convertSelectByPKey
|
||||
, fromSelSet
|
||||
, fieldAsPath
|
||||
) where
|
||||
|
||||
import Data.Has
|
||||
|
@ -18,6 +18,9 @@ module Hasura.GraphQL.Schema
|
||||
, OrdByResolveCtxElem
|
||||
, NullsOrder(..)
|
||||
, OrdTy(..)
|
||||
, InsCtx(..)
|
||||
, InsCtxMap
|
||||
, RelationInfoMap
|
||||
) where
|
||||
|
||||
import Data.Has
|
||||
@ -32,6 +35,7 @@ import Hasura.GraphQL.Resolve.Context
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DML.Internal (mkAdminRolePermInfo)
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
@ -41,18 +45,31 @@ import qualified Hasura.SQL.DML as S
|
||||
defaultTypes :: [TypeInfo]
|
||||
defaultTypes = $(fromSchemaDocQ defaultSchema)
|
||||
|
||||
getInsPerm :: TableInfo -> RoleName -> Maybe InsPermInfo
|
||||
getInsPerm tabInfo role
|
||||
| role == adminRole = _permIns $ mkAdminRolePermInfo tabInfo
|
||||
| otherwise = Map.lookup role rolePermInfoMap >>= _permIns
|
||||
where
|
||||
rolePermInfoMap = tiRolePermInfoMap tabInfo
|
||||
|
||||
getTabInfo
|
||||
:: MonadError QErr m
|
||||
=> TableCache -> QualifiedTable -> m TableInfo
|
||||
getTabInfo tc t =
|
||||
onNothing (Map.lookup t tc) $
|
||||
throw500 $ "table not found: " <>> t
|
||||
|
||||
type OpCtxMap = Map.HashMap G.Name OpCtx
|
||||
|
||||
data OpCtx
|
||||
-- tn, vn, cols, req hdrs
|
||||
= OCInsert QualifiedTable QualifiedTable [PGCol] [T.Text]
|
||||
-- table, req hdrs
|
||||
= OCInsert QualifiedTable [T.Text]
|
||||
-- tn, filter exp, limit, req hdrs
|
||||
| OCSelect QualifiedTable S.BoolExp (Maybe Int) [T.Text]
|
||||
-- tn, filter exp, reqt hdrs
|
||||
| OCSelectPkey QualifiedTable S.BoolExp [T.Text]
|
||||
-- tn, filter exp, req hdrs
|
||||
| OCUpdate QualifiedTable S.BoolExp [T.Text]
|
||||
|
||||
-- tn, filter exp, req hdrs
|
||||
| OCDelete QualifiedTable S.BoolExp [T.Text]
|
||||
deriving (Show, Eq)
|
||||
@ -66,6 +83,7 @@ data GCtx
|
||||
, _gMutRoot :: !(Maybe ObjTyInfo)
|
||||
, _gSubRoot :: !(Maybe ObjTyInfo)
|
||||
, _gOpCtxMap :: !OpCtxMap
|
||||
, _gInsCtxMap :: !InsCtxMap
|
||||
} deriving (Show, Eq)
|
||||
|
||||
instance Has TypeMap GCtx where
|
||||
@ -126,6 +144,12 @@ isRelNullable fim ri = isNullable
|
||||
lColInfos = getColInfos lCols allCols
|
||||
isNullable = any pgiIsNullable lColInfos
|
||||
|
||||
isUpsertAllowed :: [TableConstraint] -> Bool -> Bool
|
||||
isUpsertAllowed constraints upsertPerm =
|
||||
not (null uniqueOrPrimaryCons) && upsertPerm
|
||||
where
|
||||
uniqueOrPrimaryCons = filter isUniqueOrPrimary constraints
|
||||
|
||||
mkColName :: PGCol -> G.Name
|
||||
mkColName (PGCol n) = G.Name n
|
||||
|
||||
@ -668,6 +692,17 @@ mkInsInpTy :: QualifiedTable -> G.NamedType
|
||||
mkInsInpTy tn =
|
||||
G.NamedType $ qualTableToName tn <> "_insert_input"
|
||||
|
||||
-- table_obj_rel_insert_input
|
||||
mkObjInsInpTy :: QualifiedTable -> G.NamedType
|
||||
mkObjInsInpTy tn =
|
||||
G.NamedType $ qualTableToName tn <> "_obj_rel_insert_input"
|
||||
|
||||
-- table_arr_rel_insert_input
|
||||
mkArrInsInpTy :: QualifiedTable -> G.NamedType
|
||||
mkArrInsInpTy tn =
|
||||
G.NamedType $ qualTableToName tn <> "_arr_rel_insert_input"
|
||||
|
||||
|
||||
-- table_on_conflict
|
||||
mkOnConflictInpTy :: QualifiedTable -> G.NamedType
|
||||
mkOnConflictInpTy tn =
|
||||
@ -682,6 +717,46 @@ mkConstraintInpTy tn =
|
||||
mkColumnInpTy :: QualifiedTable -> G.NamedType
|
||||
mkColumnInpTy tn =
|
||||
G.NamedType $ qualTableToName tn <> "_column"
|
||||
{-
|
||||
input table_obj_rel_insert_input {
|
||||
data: table_insert_input!
|
||||
on_conflict: table_on_conflict
|
||||
}
|
||||
|
||||
-}
|
||||
|
||||
{-
|
||||
input table_arr_rel_insert_input {
|
||||
data: [table_insert_input!]!
|
||||
on_conflict: table_on_conflict
|
||||
}
|
||||
|
||||
-}
|
||||
|
||||
mkRelInsInps
|
||||
:: QualifiedTable -> Bool -> [InpObjTyInfo]
|
||||
mkRelInsInps tn upsertAllowed = [objRelInsInp, arrRelInsInp]
|
||||
where
|
||||
onConflictInpVal =
|
||||
InpValInfo Nothing "on_conflict" $ G.toGT $ mkOnConflictInpTy tn
|
||||
|
||||
onConflictInp = bool [] [onConflictInpVal] upsertAllowed
|
||||
|
||||
objRelDesc = G.Description $
|
||||
"input type for inserting object relation for remote table " <>> tn
|
||||
|
||||
objRelDataInp = InpValInfo Nothing "data" $ G.toGT $
|
||||
G.toNT $ mkInsInpTy tn
|
||||
objRelInsInp = InpObjTyInfo (Just objRelDesc) (mkObjInsInpTy tn)
|
||||
$ fromInpValL $ objRelDataInp : onConflictInp
|
||||
|
||||
arrRelDesc = G.Description $
|
||||
"input type for inserting array relation for remote table " <>> tn
|
||||
|
||||
arrRelDataInp = InpValInfo Nothing "data" $ G.toGT $
|
||||
G.toNT $ G.toLT $ G.toNT $ mkInsInpTy tn
|
||||
arrRelInsInp = InpObjTyInfo (Just arrRelDesc) (mkArrInsInpTy tn)
|
||||
$ fromInpValL $ arrRelDataInp : onConflictInp
|
||||
|
||||
{-
|
||||
|
||||
@ -695,13 +770,25 @@ input table_insert_input {
|
||||
-}
|
||||
|
||||
mkInsInp
|
||||
:: QualifiedTable -> [PGColInfo] -> InpObjTyInfo
|
||||
mkInsInp tn cols =
|
||||
:: QualifiedTable -> InsCtx -> InpObjTyInfo
|
||||
mkInsInp tn insCtx =
|
||||
InpObjTyInfo (Just desc) (mkInsInpTy tn) $ fromInpValL $
|
||||
map mkPGColInp cols
|
||||
map mkPGColInp cols <> relInps
|
||||
where
|
||||
desc = G.Description $
|
||||
"input type for inserting data into table " <>> tn
|
||||
cols = icColumns insCtx
|
||||
relInfoMap = icRelations insCtx
|
||||
|
||||
relInps = flip map (Map.toList relInfoMap) $
|
||||
\(relName, relInfo) ->
|
||||
let rty = riType relInfo
|
||||
remoteQT = riRTable relInfo
|
||||
in case rty of
|
||||
ObjRel -> InpValInfo Nothing (G.Name $ getRelTxt relName) $
|
||||
G.toGT $ mkObjInsInpTy remoteQT
|
||||
ArrRel -> InpValInfo Nothing (G.Name $ getRelTxt relName) $
|
||||
G.toGT $ mkArrInsInpTy remoteQT
|
||||
|
||||
{-
|
||||
|
||||
@ -740,8 +827,8 @@ insert_table(
|
||||
-}
|
||||
|
||||
mkInsMutFld
|
||||
:: QualifiedTable -> [TableConstraint] -> Bool -> ObjFldInfo
|
||||
mkInsMutFld tn constraints isUpsertAllowed =
|
||||
:: QualifiedTable -> Bool -> ObjFldInfo
|
||||
mkInsMutFld tn upsertAllowed =
|
||||
ObjFldInfo (Just desc) fldName (fromInpValL inputVals) $
|
||||
G.toGT $ mkMutRespTy tn
|
||||
where
|
||||
@ -756,9 +843,8 @@ mkInsMutFld tn constraints isUpsertAllowed =
|
||||
InpValInfo (Just objsArgDesc) "objects" $ G.toGT $
|
||||
G.toNT $ G.toLT $ G.toNT $ mkInsInpTy tn
|
||||
|
||||
uniqueOrPrimaryCons = filter isUniqueOrPrimary constraints
|
||||
onConflictInpVal = bool (Just onConflictArg) Nothing
|
||||
(null uniqueOrPrimaryCons || not isUpsertAllowed)
|
||||
onConflictInpVal = bool Nothing (Just onConflictArg)
|
||||
upsertAllowed
|
||||
|
||||
onConflictDesc = "on conflict condition"
|
||||
onConflictArg =
|
||||
@ -868,8 +954,7 @@ instance Monoid RootFlds where
|
||||
|
||||
mkOnConflictTypes
|
||||
:: QualifiedTable -> [TableConstraint] -> [PGCol] -> Bool -> [TypeInfo]
|
||||
mkOnConflictTypes tn c cols isUpsertAllowed =
|
||||
bool tyInfos [] (null constraints || not isUpsertAllowed)
|
||||
mkOnConflictTypes tn c cols = bool [] tyInfos
|
||||
where
|
||||
tyInfos = [ TIEnum mkConflictActionTy
|
||||
, TIEnum $ mkConstriantTy tn constraints
|
||||
@ -880,8 +965,8 @@ mkOnConflictTypes tn c cols isUpsertAllowed =
|
||||
|
||||
mkGCtxRole'
|
||||
:: QualifiedTable
|
||||
-- insert cols, is upsert allowed
|
||||
-> Maybe ([PGColInfo], Bool)
|
||||
-- insert perm
|
||||
-> Maybe (InsCtx, Bool)
|
||||
-- select permission
|
||||
-> Maybe [SelField]
|
||||
-- update cols
|
||||
@ -900,12 +985,14 @@ mkGCtxRole' tn insPermM selFldsM updColsM delPermM pkeyCols constraints allCols
|
||||
|
||||
where
|
||||
|
||||
upsertPerm = or $ fmap snd insPermM
|
||||
upsertAllowed = isUpsertAllowed constraints upsertPerm
|
||||
ordByEnums = fromMaybe Map.empty ordByResCtxM
|
||||
onConflictTypes = mkOnConflictTypes tn constraints allCols $
|
||||
or $ fmap snd insPermM
|
||||
onConflictTypes = mkOnConflictTypes tn constraints allCols upsertAllowed
|
||||
jsonOpTys = fromMaybe [] updJSONOpInpObjTysM
|
||||
relInsInpObjTys = map TIInpObj relInsInpObjs
|
||||
|
||||
allTypes = onConflictTypes <> jsonOpTys <> catMaybes
|
||||
allTypes = relInsInpObjTys <> onConflictTypes <> jsonOpTys <> catMaybes
|
||||
[ TIInpObj <$> insInpObjM
|
||||
, TIInpObj <$> updSetInpObjM
|
||||
, TIInpObj <$> updIncInpObjM
|
||||
@ -927,12 +1014,14 @@ mkGCtxRole' tn insPermM selFldsM updColsM delPermM pkeyCols constraints allCols
|
||||
-- helper
|
||||
mkColFldMap ty = mapFromL ((ty,) . nameFromSelFld) . map Left
|
||||
|
||||
insColsM = fst <$> insPermM
|
||||
insCtxM = fst <$> insPermM
|
||||
insColsM = icColumns <$> insCtxM
|
||||
-- insert input type
|
||||
insInpObjM = mkInsInp tn <$> insColsM
|
||||
-- fields used in insert input object
|
||||
insInpObjM = mkInsInp tn <$> insCtxM
|
||||
-- column fields used in insert input object
|
||||
insInpObjFldsM = mkColFldMap (mkInsInpTy tn) <$> insColsM
|
||||
|
||||
-- relationship input objects
|
||||
relInsInpObjs = maybe [] (const $ mkRelInsInps tn upsertAllowed) insCtxM
|
||||
-- update set input type
|
||||
updSetInpObjM = mkUpdSetInp tn <$> updColsM
|
||||
-- update increment input type
|
||||
@ -960,7 +1049,7 @@ mkGCtxRole' tn insPermM selFldsM updColsM delPermM pkeyCols constraints allCols
|
||||
|
||||
-- mut resp obj
|
||||
mutRespObjM =
|
||||
if isJust insColsM || isJust updColsM || isJust delPermM
|
||||
if isJust insCtxM || isJust updColsM || isJust delPermM
|
||||
then Just $ mkMutRespObj tn $ isJust selFldsM
|
||||
else Nothing
|
||||
|
||||
@ -983,7 +1072,7 @@ getRootFldsRole'
|
||||
-> [PGCol]
|
||||
-> [TableConstraint]
|
||||
-> FieldInfoMap
|
||||
-> Maybe (QualifiedTable, [T.Text], Bool) -- insert perm
|
||||
-> Maybe ([T.Text], Bool) -- insert perm
|
||||
-> Maybe (S.BoolExp, Maybe Int, [T.Text]) -- select filter
|
||||
-> Maybe ([PGCol], S.BoolExp, [T.Text]) -- update filter
|
||||
-> Maybe (S.BoolExp, [T.Text]) -- delete filter
|
||||
@ -997,10 +1086,11 @@ getRootFldsRole' tn primCols constraints fields insM selM updM delM =
|
||||
, getPKeySelDet selM $ getColInfos primCols colInfos
|
||||
]
|
||||
colInfos = fst $ validPartitionFieldInfoMap fields
|
||||
getInsDet (vn, hdrs, isUpsertAllowed) =
|
||||
( OCInsert tn vn (map pgiName colInfos) hdrs
|
||||
, Right $ mkInsMutFld tn constraints isUpsertAllowed
|
||||
)
|
||||
getInsDet (hdrs, upsertPerm) =
|
||||
let upsertAllowed = isUpsertAllowed constraints upsertPerm
|
||||
in ( OCInsert tn hdrs
|
||||
, Right $ mkInsMutFld tn upsertAllowed
|
||||
)
|
||||
getUpdDet (updCols, updFltr, hdrs) =
|
||||
( OCUpdate tn updFltr hdrs
|
||||
, Right $ mkUpdMutFld tn $ getColInfos updCols colInfos
|
||||
@ -1039,7 +1129,7 @@ getSelFlds tableCache fields role selPermInfo =
|
||||
return $ fmap Left $ bool Nothing (Just pgColInfo) $
|
||||
Set.member (pgiName pgColInfo) allowedCols
|
||||
FIRelationship relInfo -> do
|
||||
remTableInfo <- getTabInfo $ riRTable relInfo
|
||||
remTableInfo <- getTabInfo tableCache $ riRTable relInfo
|
||||
let remTableSelPermM =
|
||||
Map.lookup role (tiRolePermInfoMap remTableInfo) >>= _permSel
|
||||
return $ flip fmap remTableSelPermM $
|
||||
@ -1050,9 +1140,34 @@ getSelFlds tableCache fields role selPermInfo =
|
||||
)
|
||||
where
|
||||
allowedCols = spiCols selPermInfo
|
||||
getTabInfo tn =
|
||||
onNothing (Map.lookup tn tableCache) $
|
||||
throw500 $ "remote table not found: " <>> tn
|
||||
|
||||
mkInsCtx
|
||||
:: MonadError QErr m
|
||||
=> RoleName
|
||||
-> TableCache -> FieldInfoMap -> InsPermInfo -> m InsCtx
|
||||
mkInsCtx role tableCache fields insPermInfo = do
|
||||
relTupsM <- forM rels $ \relInfo -> do
|
||||
let remoteTable = riRTable relInfo
|
||||
relName = riName relInfo
|
||||
remoteTableInfo <- getTabInfo tableCache remoteTable
|
||||
case getInsPerm remoteTableInfo role of
|
||||
Nothing -> return Nothing
|
||||
Just _ -> return $ Just (relName, relInfo)
|
||||
|
||||
let relInfoMap = Map.fromList $ catMaybes relTupsM
|
||||
return $ InsCtx iView cols relInfoMap
|
||||
where
|
||||
cols = getCols fields
|
||||
rels = getRels fields
|
||||
iView = ipiView insPermInfo
|
||||
|
||||
mkAdminInsCtx :: QualifiedTable -> FieldInfoMap -> InsCtx
|
||||
mkAdminInsCtx tn fields =
|
||||
InsCtx tn cols relInfoMap
|
||||
where
|
||||
relInfoMap = mapFromL riName rels
|
||||
cols = getCols fields
|
||||
rels = getRels fields
|
||||
|
||||
mkGCtxRole
|
||||
:: (MonadError QErr m)
|
||||
@ -1063,15 +1178,18 @@ mkGCtxRole
|
||||
-> [TableConstraint]
|
||||
-> RoleName
|
||||
-> RolePermInfo
|
||||
-> m (TyAgg, RootFlds)
|
||||
-> m (TyAgg, RootFlds, InsCtxMap)
|
||||
mkGCtxRole tableCache tn fields pCols constraints role permInfo = do
|
||||
selFldsM <- mapM (getSelFlds tableCache fields role) $ _permSel permInfo
|
||||
let insColsM = ((colInfos,) . ipiAllowUpsert) <$> _permIns permInfo
|
||||
updColsM = filterColInfos . upiCols <$> _permUpd permInfo
|
||||
tyAgg = mkGCtxRole' tn insColsM selFldsM updColsM
|
||||
tabInsCtxM <- forM (_permIns permInfo) $ \ipi -> do
|
||||
tic <- mkInsCtx role tableCache fields ipi
|
||||
return (tic, ipiAllowUpsert ipi)
|
||||
let updColsM = filterColInfos . upiCols <$> _permUpd permInfo
|
||||
tyAgg = mkGCtxRole' tn tabInsCtxM selFldsM updColsM
|
||||
(void $ _permDel permInfo) pColInfos constraints allCols
|
||||
rootFlds = getRootFldsRole tn pCols constraints fields permInfo
|
||||
return (tyAgg, rootFlds)
|
||||
insCtxMap = maybe Map.empty (Map.singleton tn) $ fmap fst tabInsCtxM
|
||||
return (tyAgg, rootFlds, insCtxMap)
|
||||
where
|
||||
colInfos = fst $ validPartitionFieldInfoMap fields
|
||||
allCols = map pgiName colInfos
|
||||
@ -1091,7 +1209,7 @@ getRootFldsRole tn pCols constraints fields (RolePermInfo insM selM updM delM) =
|
||||
(mkIns <$> insM) (mkSel <$> selM)
|
||||
(mkUpd <$> updM) (mkDel <$> delM)
|
||||
where
|
||||
mkIns i = (ipiView i, ipiRequiredHeaders i, ipiAllowUpsert i)
|
||||
mkIns i = (ipiRequiredHeaders i, ipiAllowUpsert i)
|
||||
mkSel s = (spiFilter s, spiLimit s, spiRequiredHeaders s)
|
||||
mkUpd u = ( Set.toList $ upiCols u
|
||||
, upiFilter u
|
||||
@ -1103,13 +1221,15 @@ mkGCtxMapTable
|
||||
:: (MonadError QErr m)
|
||||
=> TableCache
|
||||
-> TableInfo
|
||||
-> m (Map.HashMap RoleName (TyAgg, RootFlds))
|
||||
-> m (Map.HashMap RoleName (TyAgg, RootFlds, InsCtxMap))
|
||||
mkGCtxMapTable tableCache (TableInfo tn _ fields rolePerms constraints pkeyCols _) = do
|
||||
m <- Map.traverseWithKey (mkGCtxRole tableCache tn fields pkeyCols validConstraints) rolePerms
|
||||
let adminCtx = mkGCtxRole' tn (Just (colInfos, True))
|
||||
let adminInsCtx = mkAdminInsCtx tn fields
|
||||
adminCtx = mkGCtxRole' tn (Just (adminInsCtx, True))
|
||||
(Just selFlds) (Just colInfos) (Just ())
|
||||
pkeyColInfos validConstraints allCols
|
||||
return $ Map.insert adminRole (adminCtx, adminRootFlds) m
|
||||
adminInsCtxMap = Map.singleton tn adminInsCtx
|
||||
return $ Map.insert adminRole (adminCtx, adminRootFlds, adminInsCtxMap) m
|
||||
where
|
||||
validConstraints = mkValidConstraints constraints
|
||||
colInfos = fst $ validPartitionFieldInfoMap fields
|
||||
@ -1121,7 +1241,7 @@ mkGCtxMapTable tableCache (TableInfo tn _ fields rolePerms constraints pkeyCols
|
||||
noFilter = S.BELit True
|
||||
adminRootFlds =
|
||||
getRootFldsRole' tn pkeyCols constraints fields
|
||||
(Just (tn, [], True)) (Just (noFilter, Nothing, []))
|
||||
(Just ([], True)) (Just (noFilter, Nothing, []))
|
||||
(Just (allCols, noFilter, [])) (Just (noFilter, []))
|
||||
|
||||
mkScalarTyInfo :: PGColType -> ScalarTyInfo
|
||||
@ -1136,13 +1256,14 @@ mkGCtxMap tableCache = do
|
||||
typesMapL <- mapM (mkGCtxMapTable tableCache) $
|
||||
filter tableFltr $ Map.elems tableCache
|
||||
let typesMap = foldr (Map.unionWith mappend) Map.empty typesMapL
|
||||
return $ Map.map (uncurry mkGCtx) typesMap
|
||||
return $ flip Map.map typesMap $ \(ty, flds, insCtxMap) ->
|
||||
mkGCtx ty flds insCtxMap
|
||||
where
|
||||
tableFltr ti = not (tiSystemDefined ti)
|
||||
&& isValidTableName (tiName ti)
|
||||
|
||||
mkGCtx :: TyAgg -> RootFlds -> GCtx
|
||||
mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) =
|
||||
mkGCtx :: TyAgg -> RootFlds -> InsCtxMap -> GCtx
|
||||
mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) insCtxMap =
|
||||
let queryRoot = mkObjTyInfo (Just "query root") (G.NamedType "query_root") $
|
||||
mapFromL _fiName (schemaFld:typeFld:qFlds)
|
||||
colTys = Set.toList $ Set.fromList $ map pgiType $
|
||||
@ -1156,8 +1277,8 @@ mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) =
|
||||
] <>
|
||||
scalarTys <> compTys <> defaultTypes
|
||||
-- for now subscription root is query root
|
||||
in GCtx allTys fldInfos ordByEnums queryRoot mutRootM (Just queryRoot) $
|
||||
Map.map fst flds
|
||||
in GCtx allTys fldInfos ordByEnums queryRoot mutRootM (Just queryRoot)
|
||||
(Map.map fst flds) insCtxMap
|
||||
where
|
||||
|
||||
mkMutRoot =
|
||||
@ -1187,4 +1308,4 @@ mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) =
|
||||
|
||||
getGCtx :: RoleName -> Map.HashMap RoleName GCtx -> GCtx
|
||||
getGCtx rn =
|
||||
fromMaybe (mkGCtx mempty mempty) . Map.lookup rn
|
||||
fromMaybe (mkGCtx mempty mempty mempty) . Map.lookup rn
|
||||
|
@ -29,6 +29,7 @@ module Hasura.GraphQL.Validate.Types
|
||||
, getObjTyM
|
||||
, mkScalarTy
|
||||
, pgColTyToScalar
|
||||
, pgColValToAnnGVal
|
||||
, getNamedTy
|
||||
, mkTyInfoMap
|
||||
, fromTyDef
|
||||
@ -285,6 +286,9 @@ instance J.ToJSON AnnGValue where
|
||||
-- J.
|
||||
-- J.toJSON [J.toJSON ty, J.toJSON valM]
|
||||
|
||||
pgColValToAnnGVal :: PGColType -> PGColValue -> AnnGValue
|
||||
pgColValToAnnGVal colTy colVal = AGScalar colTy $ Just colVal
|
||||
|
||||
hasNullVal :: AnnGValue -> Bool
|
||||
hasNullVal = \case
|
||||
AGScalar _ Nothing -> True
|
||||
|
@ -36,6 +36,10 @@ data ConflictClauseP1
|
||||
| CP1Update !ConflictTarget ![PGCol]
|
||||
deriving (Show, Eq)
|
||||
|
||||
isDoNothing :: ConflictClauseP1 -> Bool
|
||||
isDoNothing (CP1DoNothing _) = True
|
||||
isDoNothing _ = False
|
||||
|
||||
data InsertQueryP1
|
||||
= InsertQueryP1
|
||||
{ iqp1Table :: !QualifiedTable
|
||||
@ -51,14 +55,16 @@ mkSQLInsert (InsertQueryP1 tn vn cols vals c mutFlds) =
|
||||
mkSelWith tn (S.CTEInsert insert) mutFlds
|
||||
where
|
||||
insert =
|
||||
S.SQLInsert vn cols vals (toSQLConflict c) $ Just S.returningStar
|
||||
toSQLConflict conflict = case conflict of
|
||||
Nothing -> Nothing
|
||||
Just (CP1DoNothing Nothing) -> Just $ S.DoNothing Nothing
|
||||
Just (CP1DoNothing (Just ct)) -> Just $ S.DoNothing $ Just $ toSQLCT ct
|
||||
Just (CP1Update ct inpCols) -> Just $ S.Update (toSQLCT ct)
|
||||
(S.buildSEWithExcluded inpCols)
|
||||
S.SQLInsert vn cols vals (toSQLConflict <$> c) $ Just S.returningStar
|
||||
|
||||
toSQLConflict :: ConflictClauseP1 -> S.SQLConflict
|
||||
toSQLConflict conflict = case conflict of
|
||||
(CP1DoNothing Nothing) -> S.DoNothing Nothing
|
||||
(CP1DoNothing (Just ct)) -> S.DoNothing $ Just $ toSQLCT ct
|
||||
(CP1Update ct inpCols) -> S.Update (toSQLCT ct)
|
||||
(S.buildSEWithExcluded inpCols)
|
||||
|
||||
where
|
||||
toSQLCT ct = case ct of
|
||||
Column pgCols -> S.SQLColumn pgCols
|
||||
Constraint cn -> S.SQLConstraint cn
|
||||
|
@ -21,6 +21,7 @@ module Hasura.RQL.Types.Error
|
||||
-- Aeson helpers
|
||||
, runAesonParser
|
||||
, decodeValue
|
||||
, decodeFromBS
|
||||
|
||||
-- Modify error messages
|
||||
, modifyErr
|
||||
@ -38,12 +39,13 @@ module Hasura.RQL.Types.Error
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Internal
|
||||
import Data.Aeson.Types
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Database.PG.Query as Q
|
||||
import Hasura.Prelude
|
||||
import Text.Show (Show (..))
|
||||
import Text.Show (Show (..))
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Types as N
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Types as N
|
||||
|
||||
data Code
|
||||
= PermissionDenied
|
||||
@ -276,3 +278,6 @@ runAesonParser p =
|
||||
|
||||
decodeValue :: (FromJSON a, QErrM m) => Value -> m a
|
||||
decodeValue = liftIResult . ifromJSON
|
||||
|
||||
decodeFromBS :: (FromJSON a, QErrM m) => BL.ByteString -> m a
|
||||
decodeFromBS = either (throw500 . T.pack) decodeValue . eitherDecode
|
||||
|
@ -316,8 +316,8 @@ mkSQLOpExp
|
||||
-> SQLExp -- result
|
||||
mkSQLOpExp op lhs rhs = SEOpApp op [lhs, rhs]
|
||||
|
||||
toEmptyArrWhenNull :: SQLExp -> SQLExp
|
||||
toEmptyArrWhenNull e = SEFnApp "coalesce" [e, SELit "[]"] Nothing
|
||||
handleIfNull :: SQLExp -> SQLExp -> SQLExp
|
||||
handleIfNull l e = SEFnApp "coalesce" [e, l] Nothing
|
||||
|
||||
getExtrAlias :: Extractor -> Maybe Alias
|
||||
getExtrAlias (Extractor _ ma) = ma
|
||||
@ -564,8 +564,11 @@ instance ToSQL UsingExp where
|
||||
newtype RetExp = RetExp [Extractor]
|
||||
deriving (Show, Eq)
|
||||
|
||||
selectStar :: Extractor
|
||||
selectStar = Extractor SEStar Nothing
|
||||
|
||||
returningStar :: RetExp
|
||||
returningStar = RetExp [Extractor SEStar Nothing]
|
||||
returningStar = RetExp [selectStar]
|
||||
|
||||
instance ToSQL RetExp where
|
||||
toSQL (RetExp [])
|
||||
|
@ -170,6 +170,10 @@ qualTableToTxt (QualifiedTable (SchemaName "public") tn) =
|
||||
qualTableToTxt (QualifiedTable sn tn) =
|
||||
getSchemaTxt sn <> "." <> getTableTxt tn
|
||||
|
||||
snakeCaseTable :: QualifiedTable -> T.Text
|
||||
snakeCaseTable (QualifiedTable sn tn) =
|
||||
getSchemaTxt sn <> "_" <> getTableTxt tn
|
||||
|
||||
newtype PGCol
|
||||
= PGCol { getPGColTxt :: T.Text }
|
||||
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift)
|
||||
|
@ -55,9 +55,13 @@ gqlSpecFiles =
|
||||
[ "introspection.yaml"
|
||||
, "introspection_user_role.yaml"
|
||||
, "insert_mutation/author.yaml"
|
||||
, "insert_mutation/author_articles_nested.yaml"
|
||||
, "insert_mutation/author_articles_nested_error.yaml"
|
||||
, "simple_select_query_author.yaml"
|
||||
, "select_query_author_by_pkey.yaml"
|
||||
, "insert_mutation/article.yaml"
|
||||
, "insert_mutation/article_author_nested.yaml"
|
||||
, "insert_mutation/article_author_nested_error.yaml"
|
||||
, "insert_mutation/article_on_conflict.yaml"
|
||||
, "insert_mutation/article_on_conflict_user_role.yaml"
|
||||
, "insert_mutation/article_on_conflict_update_columns.yaml"
|
||||
|
@ -12,6 +12,6 @@ query:
|
||||
}
|
||||
response:
|
||||
errors:
|
||||
- path: $
|
||||
- path: $.selectionSet.insert_address.args.objects
|
||||
error: Check constraint violation. insert check constraint failed
|
||||
code: permission-error
|
||||
|
@ -14,6 +14,6 @@ query:
|
||||
}
|
||||
response:
|
||||
errors:
|
||||
- path: $
|
||||
- path: $.selectionSet.insert_address.args.objects
|
||||
error: "Not-NULL violation. null value in column \"door_no\" violates not-null constraint"
|
||||
code: constraint-violation
|
||||
|
@ -0,0 +1,42 @@
|
||||
description: Insert article and it's author via nested mutation
|
||||
url: /v1alpha1/graphql
|
||||
status: 200
|
||||
query:
|
||||
query: |
|
||||
mutation article_author{
|
||||
insert_article(
|
||||
objects: [
|
||||
{
|
||||
title: "Article by author 4",
|
||||
content: "Article content for article by author 4",
|
||||
is_published: true
|
||||
author: {
|
||||
data: {
|
||||
name: "Article 4"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Article by author 5",
|
||||
content: "Article content for article by author 5",
|
||||
is_published: true
|
||||
author: {
|
||||
data: {
|
||||
name: "Article 5"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
title
|
||||
content
|
||||
author{
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
description: Insert article and it's author via nested mutation (Error)
|
||||
url: /v1alpha1/graphql
|
||||
status: 400
|
||||
query:
|
||||
query: |
|
||||
mutation article_author{
|
||||
insert_article(
|
||||
objects: [
|
||||
{
|
||||
title: "Article by author 4",
|
||||
content: "Article content for article by author 4",
|
||||
is_published: true,
|
||||
author_id: 4
|
||||
author: {
|
||||
data: {
|
||||
name: "Article 4"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Article by author 5",
|
||||
content: "Article content for article by author 5",
|
||||
is_published: true,
|
||||
author_id: 5
|
||||
author: {
|
||||
data: {
|
||||
name: "Article 5"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
title
|
||||
content
|
||||
author{
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
description: Insert author and it's articles via nested mutation
|
||||
url: /v1alpha1/graphql
|
||||
status: 200
|
||||
query:
|
||||
query: |
|
||||
mutation nested_author_insert {
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
name: "Author 3",
|
||||
articles: {
|
||||
data: [
|
||||
{
|
||||
title: "An article by author 3",
|
||||
content: "Content for article by author 4",
|
||||
is_published: false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
) {
|
||||
affected_rows
|
||||
returning {
|
||||
id
|
||||
name
|
||||
articles {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
description: Insert author and it's articles via nested mutation (Error)
|
||||
url: /v1alpha1/graphql
|
||||
status: 400
|
||||
query:
|
||||
query: |
|
||||
mutation nested_author_insert {
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
name: "Author 3",
|
||||
articles: {
|
||||
data: [
|
||||
{
|
||||
title: "An article by author 3",
|
||||
content: "Content for article by author 4",
|
||||
is_published: false,
|
||||
author_id: 3
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
) {
|
||||
affected_rows
|
||||
returning {
|
||||
id
|
||||
name
|
||||
articles {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -14,6 +14,6 @@ query:
|
||||
}
|
||||
response:
|
||||
errors:
|
||||
- path: $
|
||||
- path: $.selectionSet.insert_author.args.objects
|
||||
error: "Uniqueness violation. duplicate key value violates unique constraint \"author_name_key\""
|
||||
code: constraint-violation
|
||||
|
@ -14,6 +14,6 @@ query:
|
||||
}
|
||||
response:
|
||||
errors:
|
||||
- path: $
|
||||
- path: $.selectionSet.insert_address.args.objects
|
||||
error: "Not-NULL violation. null value in column \"door_no\" violates not-null constraint"
|
||||
code: constraint-violation
|
||||
|
@ -22,6 +22,6 @@ query:
|
||||
}
|
||||
response:
|
||||
errors:
|
||||
- path: $
|
||||
- path: $.selectionSet.insert_author.args.objects
|
||||
error: "Uniqueness violation. duplicate key value violates unique constraint \"author_name_key\""
|
||||
code: constraint-violation
|
||||
|
@ -0,0 +1,36 @@
|
||||
description: Insert article while upserting (do nothing) it's author (Error)
|
||||
url: /v1alpha1/graphql
|
||||
status: 400
|
||||
query:
|
||||
query: |
|
||||
mutation article_author{
|
||||
insert_article(
|
||||
objects: [
|
||||
{
|
||||
title: "Article 1 by Author 2",
|
||||
content: "Article content for Article 1 by Author 2",
|
||||
is_published: true
|
||||
author: {
|
||||
data: {
|
||||
name: "Author 2"
|
||||
}
|
||||
on_conflict: {
|
||||
constraint: author_pkey,
|
||||
update_columns: []
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
title
|
||||
content
|
||||
author{
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
description: Insert article and it's author via nested mutation
|
||||
url: /v1alpha1/graphql
|
||||
status: 200
|
||||
query:
|
||||
query: |
|
||||
mutation article_author{
|
||||
insert_article(
|
||||
objects: [
|
||||
{
|
||||
id: 3,
|
||||
title: "Article 3 by Author 2",
|
||||
content: "Article content for Article 1 by Author 2",
|
||||
is_published: true
|
||||
author: {
|
||||
data: {
|
||||
id: 2,
|
||||
name: "Author 2"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
title: "Article 4 by Author 3",
|
||||
content: "Article content for Article 1 by Author 3",
|
||||
is_published: true
|
||||
author: {
|
||||
data: {
|
||||
id: 3,
|
||||
name: "Author 3"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
title
|
||||
content
|
||||
author{
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
response:
|
||||
data:
|
||||
insert_article:
|
||||
affected_rows: 4
|
||||
returning:
|
||||
- id: 3
|
||||
title: "Article 3 by Author 2"
|
||||
content: "Article content for Article 1 by Author 2"
|
||||
author:
|
||||
id: 2
|
||||
name: Author 2
|
||||
- id: 4
|
||||
title: "Article 4 by Author 3"
|
||||
content: "Article content for Article 1 by Author 3"
|
||||
author:
|
||||
id: 3
|
||||
name: Author 3
|
@ -0,0 +1,50 @@
|
||||
description: Insert article and it's author via nested mutation (Error)
|
||||
url: /v1alpha1/graphql
|
||||
status: 400
|
||||
query:
|
||||
query: |
|
||||
mutation article_author{
|
||||
insert_article(
|
||||
objects: [
|
||||
{
|
||||
title: "Article by author 4",
|
||||
content: "Article content for article by author 4",
|
||||
is_published: true,
|
||||
author_id: 4
|
||||
author: {
|
||||
data: {
|
||||
name: "Article 4"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Article by author 5",
|
||||
content: "Article content for article by author 5",
|
||||
is_published: true,
|
||||
author_id: 5
|
||||
author: {
|
||||
data: {
|
||||
name: "Article 5"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
title
|
||||
content
|
||||
author{
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
response:
|
||||
errors:
|
||||
- path: $.selectionSet.insert_article.args.objects[0].author
|
||||
error: "cannot insert object relation ship \"author\" as author_id column values are already determined"
|
||||
code: validation-failed
|
@ -0,0 +1,44 @@
|
||||
description: Upsert author (do nothing) and it's articles (Error)
|
||||
url: /v1alpha1/graphql
|
||||
status: 400
|
||||
query:
|
||||
query: |
|
||||
mutation nested_author_insert {
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
name: "Author 1",
|
||||
articles: {
|
||||
data: [
|
||||
{
|
||||
title: "Article 1 by Author 1",
|
||||
content: "Content for Article 1 by Author 1",
|
||||
is_published: false
|
||||
},
|
||||
{
|
||||
title: "Article 2 by Author 1",
|
||||
content: "Content for Article 2 by Author 1",
|
||||
is_published: false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
on_conflict: {
|
||||
constraint: author_pkey,
|
||||
update_columns: []
|
||||
}
|
||||
) {
|
||||
affected_rows
|
||||
returning {
|
||||
id
|
||||
name
|
||||
articles {
|
||||
id
|
||||
title
|
||||
content
|
||||
is_published
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
description: Insert author and it's articles via nested mutation
|
||||
url: /v1alpha1/graphql
|
||||
status: 200
|
||||
query:
|
||||
query: |
|
||||
mutation nested_author_insert {
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
id: 1,
|
||||
name: "Author 1",
|
||||
articles: {
|
||||
data: [
|
||||
{
|
||||
id: 1,
|
||||
title: "Article 1 by Author 1",
|
||||
content: "Content for Article 1 by Author 1",
|
||||
is_published: false
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
title: "Article 2 by Author 1",
|
||||
content: "Content for Article 2 by Author 1",
|
||||
is_published: false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
) {
|
||||
affected_rows
|
||||
returning {
|
||||
id
|
||||
name
|
||||
articles {
|
||||
id
|
||||
title
|
||||
content
|
||||
is_published
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
response:
|
||||
data:
|
||||
insert_author:
|
||||
affected_rows: 3
|
||||
returning:
|
||||
- id: 1
|
||||
name: Author 1
|
||||
articles:
|
||||
- id: 1
|
||||
title: "Article 1 by Author 1"
|
||||
content: "Content for Article 1 by Author 1"
|
||||
is_published: false
|
||||
- id: 2
|
||||
title: "Article 2 by Author 1"
|
||||
content: "Content for Article 2 by Author 1"
|
||||
is_published: false
|
@ -0,0 +1,38 @@
|
||||
description: Insert author and it's articles via nested mutation (Error)
|
||||
url: /v1alpha1/graphql
|
||||
status: 400
|
||||
response:
|
||||
errors:
|
||||
- path: $.selectionSet.insert_author.args.objects[0].articles.data[0]
|
||||
error: cannot insert author_id columns as their values are already being determined by parent insert
|
||||
code: validation-failed
|
||||
query:
|
||||
query: |
|
||||
mutation nested_author_insert {
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
name: "Author 3",
|
||||
articles: {
|
||||
data: [
|
||||
{
|
||||
title: "An article by author 3",
|
||||
content: "Content for article by author 4",
|
||||
is_published: false,
|
||||
author_id: 3
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
) {
|
||||
affected_rows
|
||||
returning {
|
||||
id
|
||||
name
|
||||
articles {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
#Author table
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
create table author(
|
||||
id serial primary key,
|
||||
name text unique,
|
||||
is_registered boolean not null default false
|
||||
);
|
||||
- type: track_table
|
||||
args:
|
||||
schema: public
|
||||
name: author
|
||||
|
||||
#Article table
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE article (
|
||||
id SERIAL PRIMARY KEY,
|
||||
title TEXT,
|
||||
content TEXT,
|
||||
author_id INTEGER REFERENCES author(id),
|
||||
is_published BOOLEAN,
|
||||
published_on TIMESTAMP
|
||||
)
|
||||
- type: track_table
|
||||
args:
|
||||
schema: public
|
||||
name: article
|
||||
|
||||
#Create relationships
|
||||
- type: create_object_relationship
|
||||
args:
|
||||
table: article
|
||||
name: author
|
||||
using:
|
||||
foreign_key_constraint_on: author_id
|
||||
|
||||
- type: create_array_relationship
|
||||
args:
|
||||
table: author
|
||||
name: articles
|
||||
using:
|
||||
foreign_key_constraint_on:
|
||||
table: article
|
||||
column: author_id
|
@ -0,0 +1,18 @@
|
||||
type: bulk
|
||||
args:
|
||||
#Drop relationship first
|
||||
- type: drop_relationship
|
||||
args:
|
||||
relationship: articles
|
||||
table:
|
||||
schema: public
|
||||
name: author
|
||||
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
drop table article
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
drop table author
|
@ -133,6 +133,34 @@ class TestGraphqlInsertConstraints(object):
|
||||
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
||||
class TestGraphqlNestedInserts(object):
|
||||
|
||||
def test_author_with_articles(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir + "/author_with_articles.yaml")
|
||||
|
||||
def test_author_with_articles_author_id_fail(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir + "/author_with_articles_author_id_fail.yaml")
|
||||
|
||||
def test_articles_with_author(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir + "/articles_with_author.yaml")
|
||||
|
||||
def test_articles_with_author_author_id_fail(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir + "/articles_with_author_author_id_fail.yaml")
|
||||
|
||||
def test_author_upsert_articles_fail(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir + "/author_upsert_articles_fail.yaml")
|
||||
|
||||
def test_articles_author_upsert_fail(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir + "/articles_author_upsert_fail.yaml")
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def transact(self, request, hge_ctx):
|
||||
self.dir = "queries/graphql_mutation/insert/nested"
|
||||
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
|
||||
assert st_code == 200, resp
|
||||
yield
|
||||
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
||||
class TestGraphqlUpdateBasic:
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user