From 27e2d647bbae678d03995d724f35c30069db8cd7 Mon Sep 17 00:00:00 2001 From: Rakesh Emmadi Date: Fri, 20 Jul 2018 16:21:20 +0530 Subject: [PATCH] add _inc and jsonb operators to update_mutation (close #159) (#169) --- .../Hasura/GraphQL/Resolve/Mutation.hs | 79 +++++- server/src-lib/Hasura/GraphQL/Schema.hs | 242 ++++++++++++++++-- server/src-lib/Hasura/RQL/DML/Internal.hs | 2 +- server/src-lib/Hasura/RQL/DML/Update.hs | 4 +- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 8 + server/src-lib/Hasura/SQL/DML.hs | 58 ++++- server/src-lib/Hasura/SQL/Types.hs | 10 + server/test/Spec.hs | 30 ++- server/test/testcases/create_tables.yaml | 7 + .../article.yaml} | 0 .../testcases/insert_mutation/article.yaml | 32 +++ .../article_on_conflict.yaml} | 0 .../article_on_conflict_error_01.yaml} | 0 .../article_on_conflict_error_02.yaml} | 0 .../article_on_conflict_error_03.yaml} | 0 .../article_on_conflict_ignore.yaml} | 0 ...rticle_on_conflict_ignore_constraint.yaml} | 0 .../author.yaml} | 0 .../testcases/insert_mutation/person.yaml | 24 ++ .../insert_mutation/person_array.yaml | 27 ++ server/test/testcases/track_tables.yaml | 4 + .../author.yaml} | 0 .../update_mutation/person_append.yaml | 24 ++ .../person_delete_at_path.yaml | 19 ++ .../update_mutation/person_delete_elem.yaml | 19 ++ .../update_mutation/person_delete_key.yaml | 19 ++ .../update_mutation/person_error_01.yaml | 16 ++ .../testcases/update_mutation/person_inc.yaml | 19 ++ .../update_mutation/person_prepend.yaml | 23 ++ .../testcases/update_mutation/person_set.yaml | 24 ++ 30 files changed, 643 insertions(+), 47 deletions(-) rename server/test/testcases/{delete_mutation_article.yaml => delete_mutation/article.yaml} (100%) create mode 100644 server/test/testcases/insert_mutation/article.yaml rename server/test/testcases/{insert_mutation_article_on_conflict.yaml => insert_mutation/article_on_conflict.yaml} (100%) rename server/test/testcases/{insert_mutation_article_on_conflict_error_01.yaml => insert_mutation/article_on_conflict_error_01.yaml} (100%) rename server/test/testcases/{insert_mutation_article_on_conflict_error_02.yaml => insert_mutation/article_on_conflict_error_02.yaml} (100%) rename server/test/testcases/{insert_mutation_article_on_conflict_error_03.yaml => insert_mutation/article_on_conflict_error_03.yaml} (100%) rename server/test/testcases/{insert_mutation_article_on_conflict_ignore.yaml => insert_mutation/article_on_conflict_ignore.yaml} (100%) rename server/test/testcases/{insert_mutation_article_on_conflict_ignore_constraint.yaml => insert_mutation/article_on_conflict_ignore_constraint.yaml} (100%) rename server/test/testcases/{insert_mutation_author.yaml => insert_mutation/author.yaml} (100%) create mode 100644 server/test/testcases/insert_mutation/person.yaml create mode 100644 server/test/testcases/insert_mutation/person_array.yaml rename server/test/testcases/{update_mutation_author.yaml => update_mutation/author.yaml} (100%) create mode 100644 server/test/testcases/update_mutation/person_append.yaml create mode 100644 server/test/testcases/update_mutation/person_delete_at_path.yaml create mode 100644 server/test/testcases/update_mutation/person_delete_elem.yaml create mode 100644 server/test/testcases/update_mutation/person_delete_key.yaml create mode 100644 server/test/testcases/update_mutation/person_error_01.yaml create mode 100644 server/test/testcases/update_mutation/person_inc.yaml create mode 100644 server/test/testcases/update_mutation/person_prepend.yaml create mode 100644 server/test/testcases/update_mutation/person_set.yaml diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs index 39a8988ac7a..80c8d485331 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs @@ -1,7 +1,8 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiWayIf #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} module Hasura.GraphQL.Resolve.Mutation ( convertUpdate @@ -29,6 +30,7 @@ import Hasura.GraphQL.Validate.Field import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types import Hasura.SQL.Types +import Hasura.SQL.Value withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m (Map.HashMap Text a) withSelSet selSet f = @@ -133,6 +135,44 @@ convertInsert (tn, vn) tableCols fld = do return $ Map.elems $ Map.union (Map.fromList givenCols) defVals defVals = Map.fromList $ zip tableCols (repeat $ S.SEUnsafe "DEFAULT") +type ApplySQLOp = (PGCol, S.SQLExp) -> S.SQLExp + +rhsExpOp :: S.SQLOp -> S.AnnType -> ApplySQLOp +rhsExpOp op annTy (col, e) = + S.mkSQLOpExp op (S.SEIden $ toIden col) annExp + where + annExp = S.SETyAnn e annTy + +lhsExpOp :: S.SQLOp -> S.AnnType -> ApplySQLOp +lhsExpOp op annTy (col, e) = + S.mkSQLOpExp op annExp $ S.SEIden $ toIden col + where + annExp = S.SETyAnn e annTy + +convObjWithOp + :: (MonadError QErr m) + => ApplySQLOp -> AnnGValue -> m [(PGCol, S.SQLExp)] +convObjWithOp opFn val = + flip withObject val $ \_ obj -> forM (Map.toList obj) $ \(k, v) -> do + (_, colVal) <- asPGColVal v + let pgCol = PGCol $ G.unName k + encVal = txtEncoder colVal + sqlExp = opFn (pgCol, encVal) + return (pgCol, sqlExp) + +convDeleteAtPathObj + :: (MonadError QErr m) + => AnnGValue -> m [(PGCol, S.SQLExp)] +convDeleteAtPathObj val = + flip withObject val $ \_ obj -> forM (Map.toList obj) $ \(k, v) -> do + vals <- flip withArray v $ \_ annVals -> mapM asPGColVal annVals + let valExps = map (txtEncoder . snd) vals + pgCol = PGCol $ G.unName k + annEncVal = S.SETyAnn (S.SEArray valExps) S.textArrType + sqlExp = S.SEOpApp S.jsonbDeleteAtPathOp + [S.SEIden $ toIden pgCol, annEncVal] + return (pgCol, sqlExp) + convertUpdate :: QualifiedTable -- table -> S.BoolExp -- the filter expression @@ -140,11 +180,38 @@ convertUpdate -> Convert RespTx convertUpdate tn filterExp fld = do -- a set expression is same as a row object - setExp <- withArg args "_set" convertRowObj + setExpM <- withArgM args "_set" convertRowObj + -- where bool expression to filter column whereExp <- withArg args "where" $ convertBoolExp tn + -- increment operator on integer columns + incExpM <- withArgM args "_inc" $ + convObjWithOp $ rhsExpOp S.incOp S.intType + -- append jsonb value + appendExpM <- withArgM args "_append" $ + convObjWithOp $ rhsExpOp S.jsonbConcatOp S.jsonbType + -- prepend jsonb value + prependExpM <- withArgM args "_prepend" $ + convObjWithOp $ lhsExpOp S.jsonbConcatOp S.jsonbType + -- delete a key in jsonb object + deleteKeyExpM <- withArgM args "_delete_key" $ + convObjWithOp $ rhsExpOp S.jsonbDeleteOp S.textType + -- delete an element in jsonb array + deleteElemExpM <- withArgM args "_delete_elem" $ + convObjWithOp $ rhsExpOp S.jsonbDeleteOp S.intType + -- delete at path in jsonb value + deleteAtPathExpM <- withArgM args "_delete_at_path" convDeleteAtPathObj + mutFlds <- convertMutResp (_fType fld) $ _fSelSet fld prepArgs <- get - let p1 = RU.UpdateQueryP1 tn setExp (filterExp, whereExp) mutFlds + let updExpsM = [ setExpM, incExpM, appendExpM, prependExpM + , deleteKeyExpM, deleteElemExpM, deleteAtPathExpM + ] + updExp = concat $ catMaybes updExpsM + -- atleast one of update operators is expected + unless (any isJust updExpsM) $ throw400 Unexpected $ + "atleast any one of _set, _inc, _append, _prepend, _delete_key, _delete_elem and " + <> " _delete_at_path operator is expected" + let p1 = RU.UpdateQueryP1 tn updExp (filterExp, whereExp) mutFlds return $ RU.updateP2 (p1, prepArgs) where args = _fArguments fld diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index a50c2e4c1fe..8620f608571 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -339,30 +339,208 @@ input table_set_input { coln: coltyn } -} -mkUpdInp +mkUpdSetInp :: QualifiedTable -> [PGColInfo] -> InpObjTyInfo -mkUpdInp tn cols = +mkUpdSetInp tn cols = InpObjTyInfo (Just desc) (mkUpdSetTy tn) $ fromInpValL $ map mkPGColInp cols where desc = G.Description $ "input type for updating data in table " <>> tn +-- table_inc_input +mkUpdIncTy :: QualifiedTable -> G.NamedType +mkUpdIncTy tn = + G.NamedType $ qualTableToName tn <> "_inc_input" + +{- +input table_inc_input { + integer-col1: int + . + . + integer-coln: int +} +-} + +mkUpdIncInp + :: QualifiedTable -> Maybe [PGColInfo] -> Maybe InpObjTyInfo +mkUpdIncInp tn = maybe Nothing mkType + where + mkType cols = let intCols = onlyIntCols cols + incObjTy = + InpObjTyInfo (Just desc) (mkUpdIncTy tn) $ + fromInpValL $ map mkPGColInp intCols + in bool (Just incObjTy) Nothing $ null intCols + desc = G.Description $ + "input type for incrementing integer columne in table " <>> tn + +-- table__input +mkJSONOpTy :: QualifiedTable -> G.Name -> G.NamedType +mkJSONOpTy tn op = + G.NamedType $ qualTableToName tn <> op <> "_input" + +-- json ops are _concat, _delete_key, _delete_elem, _delete_at_path +{- +input table_concat_input { + jsonb-col1: json + . + . + jsonb-coln: json +} +-} + +{- +input table_delete_key_input { + jsonb-col1: string + . + . + jsonb-coln: string +} +-} + +{- +input table_delete_elem_input { + jsonb-col1: int + . + . + jsonb-coln: int +} +-} + +{- +input table_delete_at_path_input { + jsonb-col1: [string] + . + . + jsonb-coln: [string] +} +-} + +-- jsonb operators and descriptions +prependOp :: G.Name +prependOp = "_prepend" + +prependDesc :: G.Description +prependDesc = "prepend existing jsonb value of filtered columns with new jsonb value" + +appendOp :: G.Name +appendOp = "_append" + +appendDesc :: G.Description +appendDesc = "append existing jsonb value of filtered columns with new jsonb value" + +deleteKeyOp :: G.Name +deleteKeyOp = "_delete_key" + +deleteKeyDesc :: G.Description +deleteKeyDesc = "delete key/value pair or string element." + <> " key/value pairs are matched based on their key value" + +deleteElemOp :: G.Name +deleteElemOp = "_delete_elem" + +deleteElemDesc :: G.Description +deleteElemDesc = "delete the array element with specified index (negative integers count from the end)." + <> " throws an error if top level container is not an array" + +deleteAtPathOp :: G.Name +deleteAtPathOp = "_delete_at_path" + +deleteAtPathDesc :: G.Description +deleteAtPathDesc = "delete the field or element with specified path" + <> " (for JSON arrays, negative integers count from the end)" + +mkUpdJSONOpInp + :: QualifiedTable -> [PGColInfo] -> [InpObjTyInfo] +mkUpdJSONOpInp tn cols = bool inpObjs [] $ null jsonbCols + where + jsonbCols = onlyJSONBCols cols + jsonbColNames = map pgiName jsonbCols + + inpObjs = [ prependInpObj, appendInpObj, deleteKeyInpObj + , deleteElemInpObj, deleteAtPathInpObj + ] + + appendInpObj = + InpObjTyInfo (Just appendDesc) (mkJSONOpTy tn appendOp) $ + fromInpValL $ map mkPGColInp jsonbCols + + prependInpObj = + InpObjTyInfo (Just prependDesc) (mkJSONOpTy tn prependOp) $ + fromInpValL $ map mkPGColInp jsonbCols + + deleteKeyInpObj = + InpObjTyInfo (Just deleteKeyDesc) (mkJSONOpTy tn deleteKeyOp) $ + fromInpValL $ map deleteKeyInpVal jsonbColNames + deleteKeyInpVal c = InpValInfo Nothing (G.Name $ getPGColTxt c) $ + G.toGT $ G.NamedType "String" + + deleteElemInpObj = + InpObjTyInfo (Just deleteElemDesc) (mkJSONOpTy tn deleteElemOp) $ + fromInpValL $ map deleteElemInpVal jsonbColNames + deleteElemInpVal c = InpValInfo Nothing (G.Name $ getPGColTxt c) $ + G.toGT $ G.NamedType "Int" + + deleteAtPathInpObj = + InpObjTyInfo (Just deleteAtPathDesc) (mkJSONOpTy tn deleteAtPathOp) $ + fromInpValL $ map deleteAtPathInpVal jsonbColNames + deleteAtPathInpVal c = InpValInfo Nothing (G.Name $ getPGColTxt c) $ + G.toGT $ G.toLT $ G.NamedType "String" + {- update_table( where : table_bool_exp! - _set : table_set_input! + _set : table_set_input + _inc : table_inc_input + _concat: table_concat_input + _delete_key: table_delete_key_input + _delete_elem: table_delete_elem_input + _delete_path_at: table_delete_path_at_input ): table_mutation_response -} +mkIncInpVal :: QualifiedTable -> [PGColInfo] -> Maybe InpValInfo +mkIncInpVal tn cols = bool (Just incArg) Nothing $ null intCols + where + intCols = onlyIntCols cols + incArgDesc = "increments the integer columns with given value of the filtered values" + incArg = + InpValInfo (Just incArgDesc) "_inc" $ G.toGT $ mkUpdIncTy tn + +mkJSONOpInpVals :: QualifiedTable -> [PGColInfo] -> [InpValInfo] +mkJSONOpInpVals tn cols = bool jsonbOpArgs [] $ null jsonbCols + where + jsonbCols = onlyJSONBCols cols + jsonbOpArgs = [appendArg, prependArg, deleteKeyArg, deleteElemArg, deleteAtPathArg] + + appendArg = + InpValInfo (Just appendDesc) appendOp $ G.toGT $ mkJSONOpTy tn appendOp + + prependArg = + InpValInfo (Just prependDesc) prependOp $ G.toGT $ mkJSONOpTy tn prependOp + + deleteKeyArg = + InpValInfo (Just deleteKeyDesc) deleteKeyOp $ + G.toGT $ mkJSONOpTy tn deleteKeyOp + + deleteElemArg = + InpValInfo (Just deleteElemDesc) deleteElemOp $ + G.toGT $ mkJSONOpTy tn deleteElemOp + + deleteAtPathArg = + InpValInfo (Just deleteAtPathDesc) deleteAtPathOp $ + G.toGT $ mkJSONOpTy tn deleteAtPathOp + mkUpdMutFld - :: QualifiedTable -> ObjFldInfo -mkUpdMutFld tn = - ObjFldInfo (Just desc) fldName (fromInpValL [filterArg, setArg]) $ + :: QualifiedTable -> [PGColInfo] -> ObjFldInfo +mkUpdMutFld tn cols = + ObjFldInfo (Just desc) fldName (fromInpValL inputValues) $ G.toGT $ mkMutRespTy tn where + inputValues = [filterArg, setArg] <> incArg + <> mkJSONOpInpVals tn cols desc = G.Description $ "update data of the table: " <>> tn fldName = "update_" <> qualTableToName tn @@ -374,8 +552,9 @@ mkUpdMutFld tn = setArgDesc = "sets the columns of the filtered rows to the given values" setArg = - InpValInfo (Just setArgDesc) "_set" $ G.toGT $ - G.toNT $ mkUpdSetTy tn + InpValInfo (Just setArgDesc) "_set" $ G.toGT $ mkUpdSetTy tn + + incArg = maybeToList $ mkIncInpVal tn cols {- @@ -484,9 +663,8 @@ mkInsMutFld tn constraints = InpValInfo (Just objsArgDesc) "objects" $ G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkInsInpTy tn - onConflictInpVal = case filter isUniqueOrPrimary constraints of - [] -> Nothing - _ -> Just onConflictArg + uniqueOrPrimaryCons = filter isUniqueOrPrimary constraints + onConflictInpVal = bool (Just onConflictArg) Nothing $ null uniqueOrPrimaryCons onConflictDesc = "on conflict condition" onConflictArg = @@ -580,12 +758,13 @@ instance Monoid RootFlds where mappend = (<>) mkOnConflictTypes :: QualifiedTable -> [TableConstraint] -> [TypeInfo] -mkOnConflictTypes tn c = case filter isUniqueOrPrimary c of - [] -> [] - constraints -> [ TIEnum mkConflictActionTy - , TIEnum $ mkConstriantTy tn constraints - , TIInpObj $ mkOnConflictInp tn - ] +mkOnConflictTypes tn c = bool tyInfos [] $ null constraints + where + tyInfos = [ TIEnum mkConflictActionTy + , TIEnum $ mkConstriantTy tn constraints + , TIInpObj $ mkOnConflictInp tn + ] + constraints = filter isUniqueOrPrimary c mkGCtxRole' :: QualifiedTable @@ -607,10 +786,12 @@ mkGCtxRole' tn insColsM selFldsM updColsM delPermM constraints = ordByEnums = fromMaybe Map.empty ordByResCtxM onConflictTypes = mkOnConflictTypes tn constraints + jsonOpTys = fromMaybe [] updJSONOpInpObjTysM - allTypes = onConflictTypes <> catMaybes + allTypes = onConflictTypes <> jsonOpTys <> catMaybes [ TIInpObj <$> insInpObjM , TIInpObj <$> updSetInpObjM + , TIInpObj <$> updIncInpObjM , TIInpObj <$> boolExpInpObjM , TIObj <$> noRelsObjM , TIObj <$> mutRespObjM @@ -636,7 +817,12 @@ mkGCtxRole' tn insColsM selFldsM updColsM delPermM constraints = insInpObjFldsM = mkColFldMap (mkInsInpTy tn) <$> insColsM -- update set input type - updSetInpObjM = mkUpdInp tn <$> updColsM + updSetInpObjM = mkUpdSetInp tn <$> updColsM + -- update increment input type + updIncInpObjM = mkUpdIncInp tn updColsM + -- update json operator input type + updJSONOpInpObjsM = mkUpdJSONOpInp tn <$> updColsM + updJSONOpInpObjTysM = map TIInpObj <$> updJSONOpInpObjsM -- fields used in set input object updSetInpObjFldsM = mkColFldMap (mkUpdSetTy tn) <$> updColsM @@ -685,20 +871,24 @@ getRootFldsRole' -> FieldInfoMap -> Maybe (QualifiedTable, [T.Text]) -- insert view -> Maybe (S.BoolExp, [T.Text]) -- select filter - -> Maybe (S.BoolExp, [T.Text]) -- update filter + -> Maybe ([PGCol], S.BoolExp, [T.Text]) -- update filter -> Maybe (S.BoolExp, [T.Text]) -- delete filter -> RootFlds getRootFldsRole' tn constraints fields insM selM updM delM = RootFlds mFlds where + getUpdColInfos cols = flip filter (getCols fields) $ \c -> + pgiName c `elem` cols mFlds = mapFromL (either _fiName _fiName . snd) $ catMaybes [ getInsDet <$> insM, getSelDet <$> selM , getUpdDet <$> updM, getDelDet <$> delM] colInfos = fst $ partitionFieldInfos $ Map.elems fields getInsDet (vn, hdrs) = (OCInsert tn vn (map pgiName colInfos) hdrs, Right $ mkInsMutFld tn constraints) - getUpdDet (updFltr, hdrs) = - (OCUpdate tn updFltr hdrs, Right $ mkUpdMutFld tn) + getUpdDet (updCols, updFltr, hdrs) = + ( OCUpdate tn updFltr hdrs + , Right $ mkUpdMutFld tn $ getUpdColInfos updCols + ) getDelDet (delFltr, hdrs) = (OCDelete tn delFltr hdrs, Right $ mkDelMutFld tn) getSelDet (selFltr, hdrs) = @@ -772,7 +962,10 @@ getRootFldsRole tn constraints fields (RolePermInfo insM selM updM delM) = where mkIns i = (ipiView i, ipiRequiredHeaders i) mkSel s = (spiFilter s, spiRequiredHeaders s) - mkUpd u = (upiFilter u, upiRequiredHeaders u) + mkUpd u = ( Set.toList $ upiCols u + , upiFilter u + , upiRequiredHeaders u + ) mkDel d = (dpiFilter d, dpiRequiredHeaders d) mkGCtxMapTable @@ -787,13 +980,14 @@ mkGCtxMapTable tableCache (TableInfo tn _ fields rolePerms constraints) = do return $ Map.insert adminRole (adminCtx, adminRootFlds) m where colInfos = fst $ partitionFieldInfos $ Map.elems fields + allCols = map pgiName colInfos selFlds = flip map (Map.elems fields) $ \case FIColumn pgColInfo -> Left pgColInfo FIRelationship relInfo -> Right (relInfo, noFilter) noFilter = S.BELit True adminRootFlds = getRootFldsRole' tn constraints fields (Just (tn, [])) (Just (noFilter, [])) - (Just (noFilter, [])) (Just (noFilter, [])) + (Just (allCols, noFilter, [])) (Just (noFilter, [])) mkScalarTyInfo :: PGColType -> ScalarTyInfo mkScalarTyInfo = ScalarTyInfo Nothing diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 83af333b6cb..4df7df1d8fc 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -236,7 +236,7 @@ mkColExtrAl :: (IsIden a) => Maybe a -> (PGCol, PGColType) -> S.Extractor mkColExtrAl alM (c, pct) = if pct == PGGeometry || pct == PGGeography then S.mkAliasedExtrFromExp - ((S.SEFnApp "ST_AsGeoJSON" [S.mkSIdenExp c] Nothing) `S.SETyAnn` "json") alM + ((S.SEFnApp "ST_AsGeoJSON" [S.mkSIdenExp c] Nothing) `S.SETyAnn` S.jsonType) alM else S.mkAliasedExtr c alM -- validate headers diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index e3ad1a42a9b..cfe99038d48 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -59,7 +59,7 @@ convInc -> m (PGCol, S.SQLExp) convInc f col colType val = do prepExp <- f colType val - return (col, S.SEOpApp "+" [S.mkSIdenExp col, prepExp]) + return (col, S.SEOpApp S.incOp [S.mkSIdenExp col, prepExp]) convMul :: (QErrM m) @@ -70,7 +70,7 @@ convMul -> m (PGCol, S.SQLExp) convMul f col colType val = do prepExp <- f colType val - return (col, S.SEOpApp "*" [S.mkSIdenExp col, prepExp]) + return (col, S.SEOpApp S.mulOp [S.mkSIdenExp col, prepExp]) convSet :: (QErrM m) diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 70a1002a470..1813d96a121 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -14,6 +14,8 @@ module Hasura.RQL.Types.SchemaCache , TableInfo(..) , TableConstraint(..) , ConstraintType(..) + , onlyIntCols + , onlyJSONBCols , isUniqueOrPrimary , mkTableInfo , addTableToCache @@ -183,6 +185,12 @@ data PGColInfo $(deriveToJSON (aesonDrop 3 snakeCase) ''PGColInfo) +onlyIntCols :: [PGColInfo] -> [PGColInfo] +onlyIntCols = filter (isIntegerType . pgiType) + +onlyJSONBCols :: [PGColInfo] -> [PGColInfo] +onlyJSONBCols = filter (isJSONBType . pgiType) + data RelInfo = RelInfo { riName :: !RelName diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs index 336d6443b03..259b0d65935 100644 --- a/server/src-lib/Hasura/SQL/DML.hs +++ b/server/src-lib/Hasura/SQL/DML.hs @@ -198,6 +198,44 @@ instance ToSQL QIden where toSQL (QIden qual iden) = mconcat [toSQL qual, BB.char7 '.', toSQL iden] +newtype SQLOp + = SQLOp {sqlOpTxt :: T.Text} + deriving (Show, Eq) + +incOp :: SQLOp +incOp = SQLOp "+" + +mulOp :: SQLOp +mulOp = SQLOp "*" + +jsonbConcatOp :: SQLOp +jsonbConcatOp = SQLOp "||" + +jsonbDeleteOp :: SQLOp +jsonbDeleteOp = SQLOp "-" + +jsonbDeleteAtPathOp :: SQLOp +jsonbDeleteAtPathOp = SQLOp "#-" + +newtype AnnType + = AnnType {unAnnType :: T.Text} + deriving (Show, Eq) + +intType :: AnnType +intType = AnnType "int" + +textType :: AnnType +textType = AnnType "text" + +textArrType :: AnnType +textArrType = AnnType "text[]" + +jsonType :: AnnType +jsonType = AnnType "json" + +jsonbType :: AnnType +jsonbType = AnnType "jsonb" + data SQLExp = SEPrep !Int | SELit !T.Text @@ -207,11 +245,12 @@ data SQLExp | SEIden !Iden | SEQIden !QIden | SEFnApp !T.Text ![SQLExp] !(Maybe OrderByExp) - | SEOpApp !T.Text ![SQLExp] - | SETyAnn !SQLExp !T.Text + | SEOpApp !SQLOp ![SQLExp] + | SETyAnn !SQLExp !AnnType | SECond !BoolExp !SQLExp !SQLExp | SEBool !BoolExp | SEExcluded !T.Text + | SEArray ![SQLExp] deriving (Show, Eq) newtype Alias @@ -240,9 +279,10 @@ instance ToSQL SQLExp where toSQL (SEFnApp name args mObe) = TE.encodeUtf8Builder name <> paren ((", " <+> args) <-> toSQL mObe) toSQL (SEOpApp op args) = - paren (op <+> args) + paren (sqlOpTxt op <+> args) toSQL (SETyAnn e ty) = - paren (toSQL e) <> BB.string7 "::" <> TE.encodeUtf8Builder ty + paren (toSQL e) <> BB.string7 "::" + <> TE.encodeUtf8Builder (unAnnType ty) toSQL (SECond cond te fe) = BB.string7 "CASE WHEN" <-> toSQL cond <-> BB.string7 "THEN" <-> toSQL te <-> @@ -251,10 +291,19 @@ instance ToSQL SQLExp where toSQL (SEBool be) = toSQL be toSQL (SEExcluded t) = BB.string7 "EXCLUDED." <> toSQL (PGCol t) + toSQL (SEArray exps) = BB.string7 "ARRAY" <> BB.char7 '[' + <> (", " <+> exps) <> BB.char7 ']' data Extractor = Extractor !SQLExp !(Maybe Alias) deriving (Show, Eq) +mkSQLOpExp + :: SQLOp + -> SQLExp -- lhs + -> SQLExp -- rhs + -> SQLExp -- result +mkSQLOpExp op lhs rhs = SEOpApp op [lhs, rhs] + getExtrAlias :: Extractor -> Maybe Alias getExtrAlias (Extractor _ ma) = ma @@ -565,3 +614,4 @@ instance ToSQL SelectWith where "WITH " <> (", " <+> map f ctes) <-> toSQL sel where f (Alias al, q) = toSQL al <-> "AS" <-> paren (toSQL q) + diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index c2850c650cb..55483aa9398 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -303,3 +303,13 @@ pgTypeOid PGJSONB = PTI.jsonb pgTypeOid PGGeometry = PTI.text pgTypeOid PGGeography = PTI.text pgTypeOid (PGUnknown _) = PTI.auto + +isIntegerType :: PGColType -> Bool +isIntegerType PGInteger = True +isIntegerType PGSmallInt = True +isIntegerType PGBigInt = True +isIntegerType _ = False + +isJSONBType :: PGColType -> Bool +isJSONBType PGJSONB = True +isJSONBType _ = False diff --git a/server/test/Spec.hs b/server/test/Spec.hs index 0f6b2edb47e..f2c7cf8c020 100644 --- a/server/test/Spec.hs +++ b/server/test/Spec.hs @@ -39,18 +39,28 @@ querySpecFiles = gqlSpecFiles :: [FilePath] gqlSpecFiles = [ "introspection.yaml" - , "insert_mutation_author.yaml" + , "insert_mutation/author.yaml" , "simple_select_query_author.yaml" - , "insert_mutation_article.yaml" - , "insert_mutation_article_on_conflict.yaml" - , "insert_mutation_article_on_conflict_ignore.yaml" - , "insert_mutation_article_on_conflict_ignore_constraint.yaml" - , "insert_mutation_article_on_conflict_error_01.yaml" - , "insert_mutation_article_on_conflict_error_02.yaml" - , "insert_mutation_article_on_conflict_error_03.yaml" + , "insert_mutation/article.yaml" + , "insert_mutation/article_on_conflict.yaml" + , "insert_mutation/article_on_conflict_ignore.yaml" + , "insert_mutation/article_on_conflict_ignore_constraint.yaml" + , "insert_mutation/article_on_conflict_error_01.yaml" + , "insert_mutation/article_on_conflict_error_02.yaml" + , "insert_mutation/article_on_conflict_error_03.yaml" + , "insert_mutation/person.yaml" + , "insert_mutation/person_array.yaml" , "nested_select_query_article.yaml" - , "update_mutation_author.yaml" - , "delete_mutation_article.yaml" + , "update_mutation/author.yaml" + , "update_mutation/person_set.yaml" + , "update_mutation/person_append.yaml" + , "update_mutation/person_prepend.yaml" + , "update_mutation/person_delete_key.yaml" + , "update_mutation/person_delete_elem.yaml" + , "update_mutation/person_delete_at_path.yaml" + , "update_mutation/person_inc.yaml" + , "update_mutation/person_error_01.yaml" + , "delete_mutation/article.yaml" ] readTestCase :: FilePath -> IO TestCase diff --git a/server/test/testcases/create_tables.yaml b/server/test/testcases/create_tables.yaml index 1a421dd5602..d24772b7661 100644 --- a/server/test/testcases/create_tables.yaml +++ b/server/test/testcases/create_tables.yaml @@ -18,3 +18,10 @@ query: is_published BOOLEAN, published_on TIMESTAMP ) + - type: run_sql + args: + sql: | + CREATE TABLE person ( + id SERIAL PRIMARY KEY, + details JSONB NOT NULL + ) diff --git a/server/test/testcases/delete_mutation_article.yaml b/server/test/testcases/delete_mutation/article.yaml similarity index 100% rename from server/test/testcases/delete_mutation_article.yaml rename to server/test/testcases/delete_mutation/article.yaml diff --git a/server/test/testcases/insert_mutation/article.yaml b/server/test/testcases/insert_mutation/article.yaml new file mode 100644 index 00000000000..6237c33a086 --- /dev/null +++ b/server/test/testcases/insert_mutation/article.yaml @@ -0,0 +1,32 @@ +description: Inserts article data via GraphQL mutation +url: /v1alpha1/graphql +status: 200 +query: + query: | + mutation insert_article { + insert_article ( + objects: [ + { + title: "Article 1", + content: "Sample article content", + author_id: 1 + }, + { + title: "Article 2", + content: "Sample article content", + author_id: 1 + }, + { + title: "Article 3", + content: "Sample article content", + author_id: 2 + } + ] + ) { + returning { + id + title + content + } + } + } diff --git a/server/test/testcases/insert_mutation_article_on_conflict.yaml b/server/test/testcases/insert_mutation/article_on_conflict.yaml similarity index 100% rename from server/test/testcases/insert_mutation_article_on_conflict.yaml rename to server/test/testcases/insert_mutation/article_on_conflict.yaml diff --git a/server/test/testcases/insert_mutation_article_on_conflict_error_01.yaml b/server/test/testcases/insert_mutation/article_on_conflict_error_01.yaml similarity index 100% rename from server/test/testcases/insert_mutation_article_on_conflict_error_01.yaml rename to server/test/testcases/insert_mutation/article_on_conflict_error_01.yaml diff --git a/server/test/testcases/insert_mutation_article_on_conflict_error_02.yaml b/server/test/testcases/insert_mutation/article_on_conflict_error_02.yaml similarity index 100% rename from server/test/testcases/insert_mutation_article_on_conflict_error_02.yaml rename to server/test/testcases/insert_mutation/article_on_conflict_error_02.yaml diff --git a/server/test/testcases/insert_mutation_article_on_conflict_error_03.yaml b/server/test/testcases/insert_mutation/article_on_conflict_error_03.yaml similarity index 100% rename from server/test/testcases/insert_mutation_article_on_conflict_error_03.yaml rename to server/test/testcases/insert_mutation/article_on_conflict_error_03.yaml diff --git a/server/test/testcases/insert_mutation_article_on_conflict_ignore.yaml b/server/test/testcases/insert_mutation/article_on_conflict_ignore.yaml similarity index 100% rename from server/test/testcases/insert_mutation_article_on_conflict_ignore.yaml rename to server/test/testcases/insert_mutation/article_on_conflict_ignore.yaml diff --git a/server/test/testcases/insert_mutation_article_on_conflict_ignore_constraint.yaml b/server/test/testcases/insert_mutation/article_on_conflict_ignore_constraint.yaml similarity index 100% rename from server/test/testcases/insert_mutation_article_on_conflict_ignore_constraint.yaml rename to server/test/testcases/insert_mutation/article_on_conflict_ignore_constraint.yaml diff --git a/server/test/testcases/insert_mutation_author.yaml b/server/test/testcases/insert_mutation/author.yaml similarity index 100% rename from server/test/testcases/insert_mutation_author.yaml rename to server/test/testcases/insert_mutation/author.yaml diff --git a/server/test/testcases/insert_mutation/person.yaml b/server/test/testcases/insert_mutation/person.yaml new file mode 100644 index 00000000000..19333d2b709 --- /dev/null +++ b/server/test/testcases/insert_mutation/person.yaml @@ -0,0 +1,24 @@ +description: Inserts person data via GraphQL mutation +url: /v1alpha1/graphql +status: 200 +query: + variables: + value: + name: + first: john + last: murphy + query: | + mutation insert_person($value: jsonb) { + insert_person( + objects: [ + { + details: $value + } + ] + ) { + returning { + id + details + } + } + } diff --git a/server/test/testcases/insert_mutation/person_array.yaml b/server/test/testcases/insert_mutation/person_array.yaml new file mode 100644 index 00000000000..ad84a17e3ea --- /dev/null +++ b/server/test/testcases/insert_mutation/person_array.yaml @@ -0,0 +1,27 @@ +description: Inserts persons data via GraphQL mutation +url: /v1alpha1/graphql +status: 200 +query: + variables: + value: + - name: + first: thelonious + last: jaha + - name: + first: clarke + last: griffin + query: | + mutation insert_person($value: jsonb) { + insert_person( + objects: [ + { + details: $value + } + ] + ) { + returning { + id + details + } + } + } diff --git a/server/test/testcases/track_tables.yaml b/server/test/testcases/track_tables.yaml index 10d737fe6c1..92034be0a6b 100644 --- a/server/test/testcases/track_tables.yaml +++ b/server/test/testcases/track_tables.yaml @@ -12,3 +12,7 @@ query: args: schema: public name: article + - type: track_table + args: + schema: public + name: person diff --git a/server/test/testcases/update_mutation_author.yaml b/server/test/testcases/update_mutation/author.yaml similarity index 100% rename from server/test/testcases/update_mutation_author.yaml rename to server/test/testcases/update_mutation/author.yaml diff --git a/server/test/testcases/update_mutation/person_append.yaml b/server/test/testcases/update_mutation/person_append.yaml new file mode 100644 index 00000000000..13631bb91bc --- /dev/null +++ b/server/test/testcases/update_mutation/person_append.yaml @@ -0,0 +1,24 @@ +description: Updated person data using _append operator +url: /v1alpha1/graphql +status: 200 +query: + variables: + value: + address: + country: Australia + city: Sydney + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 1}}, + _append: { + details: $value + } + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_delete_at_path.yaml b/server/test/testcases/update_mutation/person_delete_at_path.yaml new file mode 100644 index 00000000000..dba228d6d22 --- /dev/null +++ b/server/test/testcases/update_mutation/person_delete_at_path.yaml @@ -0,0 +1,19 @@ +description: Updated person data using _delete_at_path operator +url: /v1alpha1/graphql +status: 200 +query: + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 1}}, + _delete_at_path: { + details: ["name", "last"] + } + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_delete_elem.yaml b/server/test/testcases/update_mutation/person_delete_elem.yaml new file mode 100644 index 00000000000..406680f3ded --- /dev/null +++ b/server/test/testcases/update_mutation/person_delete_elem.yaml @@ -0,0 +1,19 @@ +description: Updated person data using _delete_elem operator +url: /v1alpha1/graphql +status: 200 +query: + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 2}}, + _delete_elem: { + details: 0 + } + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_delete_key.yaml b/server/test/testcases/update_mutation/person_delete_key.yaml new file mode 100644 index 00000000000..449c131a890 --- /dev/null +++ b/server/test/testcases/update_mutation/person_delete_key.yaml @@ -0,0 +1,19 @@ +description: Updated person data using _delete_key operator +url: /v1alpha1/graphql +status: 200 +query: + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 1}}, + _delete_key: { + details: "address" + } + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_error_01.yaml b/server/test/testcases/update_mutation/person_error_01.yaml new file mode 100644 index 00000000000..d7e35befa9d --- /dev/null +++ b/server/test/testcases/update_mutation/person_error_01.yaml @@ -0,0 +1,16 @@ +description: Updated person data without any operator +url: /v1alpha1/graphql +status: 400 +query: + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 2}} + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_inc.yaml b/server/test/testcases/update_mutation/person_inc.yaml new file mode 100644 index 00000000000..b6962458ebd --- /dev/null +++ b/server/test/testcases/update_mutation/person_inc.yaml @@ -0,0 +1,19 @@ +description: Updated person data using _inc operator +url: /v1alpha1/graphql +status: 200 +query: + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 2}}, + _inc: { + id: 1 + } + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_prepend.yaml b/server/test/testcases/update_mutation/person_prepend.yaml new file mode 100644 index 00000000000..346f934cb4c --- /dev/null +++ b/server/test/testcases/update_mutation/person_prepend.yaml @@ -0,0 +1,23 @@ +description: Updated person data using _prepend operator +url: /v1alpha1/graphql +status: 200 +query: + variables: + value: + university: + name: Sydney university + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 1}}, + _prepend: { + details: $value + } + ){ + affected_rows + returning{ + id + details + } + } + } diff --git a/server/test/testcases/update_mutation/person_set.yaml b/server/test/testcases/update_mutation/person_set.yaml new file mode 100644 index 00000000000..4c51a3e2b72 --- /dev/null +++ b/server/test/testcases/update_mutation/person_set.yaml @@ -0,0 +1,24 @@ +description: Updated person data using _set operator +url: /v1alpha1/graphql +status: 200 +query: + variables: + value: + name: + first: john + last: taylor + query: | + mutation update_person($value: jsonb) { + update_person( + where: {id: {_eq: 1}}, + _set: { + details: $value + } + ){ + affected_rows + returning{ + id + details + } + } + }