From 9be6f706e6d3b1b265d5bb7ed5563cb169a22219 Mon Sep 17 00:00:00 2001 From: Alexis King Date: Tue, 23 Jul 2019 16:42:59 +0530 Subject: [PATCH 01/10] Refactor GCtx to split query and mutation root operations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This mostly simplifies the RootFlds type to make it clearer what it’s used for, but it has the convenient side-effect of preventing some “impossible” cases using the type system. --- server/src-lib/Hasura/GraphQL/Context.hs | 27 +++-- server/src-lib/Hasura/GraphQL/Execute.hs | 12 +- .../src-lib/Hasura/GraphQL/Execute/Query.hs | 2 +- server/src-lib/Hasura/GraphQL/Explain.hs | 4 +- server/src-lib/Hasura/GraphQL/Resolve.hs | 42 +++---- .../src-lib/Hasura/GraphQL/Resolve/Types.hs | 18 ++- server/src-lib/Hasura/GraphQL/Schema.hs | 104 ++++++++++-------- 7 files changed, 116 insertions(+), 93 deletions(-) diff --git a/server/src-lib/Hasura/GraphQL/Context.hs b/server/src-lib/Hasura/GraphQL/Context.hs index 488f005c5ba..737e5c4fe94 100644 --- a/server/src-lib/Hasura/GraphQL/Context.hs +++ b/server/src-lib/Hasura/GraphQL/Context.hs @@ -12,16 +12,24 @@ import Hasura.GraphQL.Resolve.Types import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types.Permission +-- | A /GraphQL context/, aka the final output of GraphQL schema generation. Used to both validate +-- incoming queries and respond to introspection queries. +-- +-- Combines information from 'TyAgg', 'RootFields', and 'InsCtxMap' datatypes and adds a bit more on +-- top. Constructed via the 'mkGCtx' smart constructor. data GCtx = GCtx - { _gTypes :: !TypeMap - , _gFields :: !FieldMap - , _gOrdByCtx :: !OrdByCtx - , _gQueryRoot :: !ObjTyInfo - , _gMutRoot :: !(Maybe ObjTyInfo) - , _gSubRoot :: !(Maybe ObjTyInfo) - , _gOpCtxMap :: !OpCtxMap - , _gInsCtxMap :: !InsCtxMap + -- GraphQL type information + { _gTypes :: !TypeMap + , _gFields :: !FieldMap + , _gQueryRoot :: !ObjTyInfo + , _gMutRoot :: !(Maybe ObjTyInfo) + , _gSubRoot :: !(Maybe ObjTyInfo) + -- Postgres type information + , _gOrdByCtx :: !OrdByCtx + , _gQueryCtxMap :: !QueryCtxMap + , _gMutationCtxMap :: !MutationCtxMap + , _gInsCtxMap :: !InsCtxMap } deriving (Show, Eq) data RemoteGCtx @@ -60,8 +68,7 @@ emptyGCtx = let queryRoot = mkQueryRootTyInfo [] allTys = mkTyInfoMap $ TIObj queryRoot:defaultTypes -- for now subscription root is query root - in GCtx allTys mempty mempty queryRoot Nothing Nothing - mempty mempty + in GCtx allTys mempty queryRoot Nothing Nothing mempty mempty mempty mempty defaultTypes :: [TypeInfo] defaultTypes = $(fromSchemaDocQ defaultSchema TLHasuraType) diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs index 4b7b851f56f..f5810b1efa6 100644 --- a/server/src-lib/Hasura/GraphQL/Execute.hs +++ b/server/src-lib/Hasura/GraphQL/Execute.hs @@ -224,7 +224,8 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx -- Monad for resolving a hasura query/mutation type E m = ReaderT ( UserInfo - , OpCtxMap + , QueryCtxMap + , MutationCtxMap , TypeMap , FieldMap , OrdByCtx @@ -241,10 +242,11 @@ runE -> m a runE ctx sqlGenCtx userInfo action = do res <- runExceptT $ runReaderT action - (userInfo, opCtxMap, typeMap, fldMap, ordByCtx, insCtxMap, sqlGenCtx) + (userInfo, queryCtxMap, mutationCtxMap, typeMap, fldMap, ordByCtx, insCtxMap, sqlGenCtx) either throwError return res where - opCtxMap = _gOpCtxMap ctx + queryCtxMap = _gQueryCtxMap ctx + mutationCtxMap = _gMutationCtxMap ctx typeMap = _gTypes ctx fldMap = _gFields ctx ordByCtx = _gOrdByCtx ctx @@ -268,7 +270,7 @@ resolveMutSelSet :: ( MonadError QErr m , MonadReader r m , Has UserInfo r - , Has OpCtxMap r + , Has MutationCtxMap r , Has FieldMap r , Has OrdByCtx r , Has SQLGenCtx r @@ -308,7 +310,7 @@ getMutOp ctx sqlGenCtx userInfo selSet = getSubsOpM :: ( MonadError QErr m , MonadReader r m - , Has OpCtxMap r + , Has QueryCtxMap r , Has FieldMap r , Has OrdByCtx r , Has SQLGenCtx r diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index 26c8cb9bc30..f315bc949d8 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -208,7 +208,7 @@ convertQuerySelSet :: ( MonadError QErr m , MonadReader r m , Has TypeMap r - , Has OpCtxMap r + , Has QueryCtxMap r , Has FieldMap r , Has OrdByCtx r , Has SQLGenCtx r diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs index e7000785b80..70504fde889 100644 --- a/server/src-lib/Hasura/GraphQL/Explain.hs +++ b/server/src-lib/Hasura/GraphQL/Explain.hs @@ -87,7 +87,7 @@ explainField userInfo gCtx sqlGenCtx fld = "__typename" -> return $ FieldPlan fName Nothing Nothing _ -> do unresolvedAST <- - runExplain (opCtxMap, userInfo, fldMap, orderByCtx, sqlGenCtx) $ + runExplain (queryCtxMap, userInfo, fldMap, orderByCtx, sqlGenCtx) $ RS.queryFldToPGAST fld resolvedAST <- RS.traverseQueryRootFldAST (resolveVal userInfo) unresolvedAST @@ -99,7 +99,7 @@ explainField userInfo gCtx sqlGenCtx fld = where fName = GV._fName fld - opCtxMap = _gOpCtxMap gCtx + queryCtxMap = _gQueryCtxMap gCtx fldMap = _gFields gCtx orderByCtx = _gOrdByCtx gCtx diff --git a/server/src-lib/Hasura/GraphQL/Resolve.hs b/server/src-lib/Hasura/GraphQL/Resolve.hs index 28e11c0dfe2..fe865a28e0a 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve.hs @@ -44,7 +44,7 @@ validateHdrs userInfo hdrs = do queryFldToPGAST :: ( MonadError QErr m, MonadReader r m, Has FieldMap r , Has OrdByCtx r, Has SQLGenCtx r, Has UserInfo r - , Has OpCtxMap r + , Has QueryCtxMap r ) => V.Field -> m RS.QueryRootFldUnresolved @@ -52,32 +52,26 @@ queryFldToPGAST fld = do opCtx <- getOpCtx $ V._fName fld userInfo <- asks getter case opCtx of - OCSelect ctx -> do + QCSelect ctx -> do validateHdrs userInfo (_socHeaders ctx) RS.convertSelect ctx fld - OCSelectPkey ctx -> do + QCSelectPkey ctx -> do validateHdrs userInfo (_spocHeaders ctx) RS.convertSelectByPKey ctx fld - OCSelectAgg ctx -> do + QCSelectAgg ctx -> do validateHdrs userInfo (_socHeaders ctx) RS.convertAggSelect ctx fld - OCFuncQuery ctx -> do + QCFuncQuery ctx -> do validateHdrs userInfo (_fqocHeaders ctx) RS.convertFuncQuerySimple ctx fld - OCFuncAggQuery ctx -> do + QCFuncAggQuery ctx -> do validateHdrs userInfo (_fqocHeaders ctx) RS.convertFuncQueryAgg ctx fld - OCInsert _ -> - throw500 "unexpected OCInsert for query field context" - OCUpdate _ -> - throw500 "unexpected OCUpdate for query field context" - OCDelete _ -> - throw500 "unexpected OCDelete for query field context" queryFldToSQL :: ( MonadError QErr m, MonadReader r m, Has FieldMap r , Has OrdByCtx r, Has SQLGenCtx r, Has UserInfo r - , Has OpCtxMap r + , Has QueryCtxMap r ) => PrepFn m -> V.Field @@ -94,7 +88,7 @@ mutFldToTx :: ( MonadError QErr m , MonadReader r m , Has UserInfo r - , Has OpCtxMap r + , Has MutationCtxMap r , Has FieldMap r , Has OrdByCtx r , Has SQLGenCtx r @@ -106,33 +100,23 @@ mutFldToTx fld = do userInfo <- asks getter opCtx <- getOpCtx $ V._fName fld case opCtx of - OCInsert ctx -> do + MCInsert ctx -> do let roleName = userRole userInfo validateHdrs userInfo (_iocHeaders ctx) RI.convertInsert roleName (_iocTable ctx) fld - OCUpdate ctx -> do + MCUpdate ctx -> do validateHdrs userInfo (_uocHeaders ctx) RM.convertUpdate ctx fld - OCDelete ctx -> do + MCDelete ctx -> do validateHdrs userInfo (_docHeaders ctx) RM.convertDelete ctx fld - OCSelect _ -> - throw500 "unexpected query field context for a mutation field" - OCSelectPkey _ -> - throw500 "unexpected query field context for a mutation field" - OCSelectAgg _ -> - throw500 "unexpected query field context for a mutation field" - OCFuncQuery _ -> - throw500 "unexpected query field context for a mutation field" - OCFuncAggQuery _ -> - throw500 "unexpected query field context for a mutation field" getOpCtx :: ( MonadError QErr m , MonadReader r m - , Has OpCtxMap r + , Has (OpCtxMap a) r ) - => G.Name -> m OpCtx + => G.Name -> m a getOpCtx f = do opCtxMap <- asks getter onNothing (Map.lookup f opCtxMap) $ throw500 $ diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs index 82783d4cec3..d24156330e5 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs @@ -15,7 +15,23 @@ import Hasura.SQL.Value import qualified Hasura.SQL.DML as S -type OpCtxMap = Map.HashMap G.Name OpCtx +data QueryCtx + = QCSelect !SelOpCtx + | QCSelectPkey !SelPkOpCtx + | QCSelectAgg !SelOpCtx + | QCFuncQuery !FuncQOpCtx + | QCFuncAggQuery !FuncQOpCtx + deriving (Show, Eq) + +data MutationCtx + = MCInsert !InsOpCtx + | MCUpdate !UpdOpCtx + | MCDelete !DelOpCtx + deriving (Show, Eq) + +type OpCtxMap a = Map.HashMap G.Name a +type QueryCtxMap = OpCtxMap QueryCtx +type MutationCtxMap = OpCtxMap MutationCtx data InsOpCtx = InsOpCtx diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 883d7ccc14f..0ab00e18f43 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -4,7 +4,8 @@ module Hasura.GraphQL.Schema , buildGCtxMapPG , getGCtx , GCtx(..) - , OpCtx(..) + , QueryCtx(..) + , MutationCtx(..) , InsCtx(..) , InsCtxMap , RelationInfoMap @@ -291,22 +292,26 @@ getRootFldsRole' -> Maybe ([PGCol], PreSetColsPartial, AnnBoolExpPartialSQL, [T.Text]) -- update filter -> Maybe (AnnBoolExpPartialSQL, [T.Text]) -- delete filter -> Maybe ViewInfo - -> RootFlds + -> RootFields getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM = - RootFlds mFlds + RootFields + { rootQueryFields = makeFieldMap + $ funcQueries + <> funcAggQueries + <> catMaybes + [ getSelDet <$> selM + , getSelAggDet selM + , getPKeySelDet selM $ getColInfos primCols colInfos + ] + , rootMutationFields = makeFieldMap $ catMaybes + [ mutHelper viIsInsertable getInsDet insM + , mutHelper viIsUpdatable getUpdDet updM + , mutHelper viIsDeletable getDelDet delM + ] + } where + makeFieldMap = mapFromL (_fiName . snd) allCols = getCols fields - mFlds = mapFromL (either _fiName _fiName . snd) $ - funcQueries <> - funcAggQueries <> - catMaybes - [ mutHelper viIsInsertable getInsDet insM - , mutHelper viIsUpdatable getUpdDet updM - , mutHelper viIsDeletable getDelDet delM - , getSelDet <$> selM, getSelAggDet selM - , getPKeySelDet selM $ getColInfos primCols colInfos - ] - funcQueries = maybe [] getFuncQueryFlds selM funcAggQueries = maybe [] getFuncAggQueryFlds selM @@ -317,50 +322,50 @@ getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM = colInfos = fst $ validPartitionFieldInfoMap fields getInsDet (hdrs, upsertPerm) = let isUpsertable = upsertable constraints upsertPerm $ isJust viM - in ( OCInsert $ InsOpCtx tn $ hdrs `union` maybe [] (\(_, _, _, x) -> x) updM - , Right $ mkInsMutFld tn isUpsertable + in ( MCInsert . InsOpCtx tn $ hdrs `union` maybe [] (\(_, _, _, x) -> x) updM + , mkInsMutFld tn isUpsertable ) getUpdDet (updCols, preSetCols, updFltr, hdrs) = - ( OCUpdate $ UpdOpCtx tn hdrs updFltr preSetCols allCols - , Right $ mkUpdMutFld tn $ getColInfos updCols colInfos + ( MCUpdate $ UpdOpCtx tn hdrs updFltr preSetCols allCols + , mkUpdMutFld tn $ getColInfos updCols colInfos ) getDelDet (delFltr, hdrs) = - ( OCDelete $ DelOpCtx tn hdrs delFltr allCols - , Right $ mkDelMutFld tn + ( MCDelete $ DelOpCtx tn hdrs delFltr allCols + , mkDelMutFld tn ) getSelDet (selFltr, pLimit, hdrs, _) = - selFldHelper OCSelect mkSelFld selFltr pLimit hdrs + selFldHelper QCSelect mkSelFld selFltr pLimit hdrs getSelAggDet (Just (selFltr, pLimit, hdrs, True)) = - Just $ selFldHelper OCSelectAgg mkAggSelFld selFltr pLimit hdrs + Just $ selFldHelper QCSelectAgg mkAggSelFld selFltr pLimit hdrs getSelAggDet _ = Nothing selFldHelper f g pFltr pLimit hdrs = ( f $ SelOpCtx tn hdrs pFltr pLimit - , Left $ g tn + , g tn ) getPKeySelDet Nothing _ = Nothing getPKeySelDet _ [] = Nothing getPKeySelDet (Just (selFltr, _, hdrs, _)) pCols = Just - ( OCSelectPkey $ SelPkOpCtx tn hdrs selFltr $ + ( QCSelectPkey . SelPkOpCtx tn hdrs selFltr $ mapFromL (mkColName . pgiName) pCols - , Left $ mkSelFldPKey tn pCols + , mkSelFldPKey tn pCols ) getFuncQueryFlds (selFltr, pLimit, hdrs, _) = - funcFldHelper OCFuncQuery mkFuncQueryFld selFltr pLimit hdrs + funcFldHelper QCFuncQuery mkFuncQueryFld selFltr pLimit hdrs getFuncAggQueryFlds (selFltr, pLimit, hdrs, True) = - funcFldHelper OCFuncAggQuery mkFuncAggQueryFld selFltr pLimit hdrs + funcFldHelper QCFuncAggQuery mkFuncAggQueryFld selFltr pLimit hdrs getFuncAggQueryFlds _ = [] funcFldHelper f g pFltr pLimit hdrs = flip map funcs $ \fi -> - ( f $ FuncQOpCtx tn hdrs pFltr pLimit (fiName fi) $ mkFuncArgItemSeq fi - , Left $ g fi + ( f . FuncQOpCtx tn hdrs pFltr pLimit (fiName fi) $ mkFuncArgItemSeq fi + , g fi ) mkFuncArgItemSeq fi = Seq.fromList $ @@ -465,7 +470,7 @@ mkGCtxRole -> Maybe ViewInfo -> RoleName -> RolePermInfo - -> m (TyAgg, RootFlds, InsCtxMap) + -> m (TyAgg, RootFields, InsCtxMap) mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do selPermM <- mapM (getSelPerm tableCache fields role) $ _permSel permInfo tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do @@ -496,7 +501,7 @@ getRootFldsRole -> [FunctionInfo] -> Maybe ViewInfo -> RolePermInfo - -> RootFlds + -> RootFields getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM updM delM) = getRootFldsRole' tn pCols constraints fields funcs (mkIns <$> insM) (mkSel <$> selM) @@ -519,7 +524,7 @@ mkGCtxMapTable => TableCache -> FunctionCache -> TableInfo - -> m (Map.HashMap RoleName (TyAgg, RootFlds, InsCtxMap)) + -> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap)) mkGCtxMapTable tableCache funcCache tabInfo = do m <- Map.traverseWithKey (mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo) rolePerms @@ -598,6 +603,10 @@ ppGCtx gCtx = mRootO = _gMutRoot gCtx sRootO = _gSubRoot gCtx +-- | A /types aggregate/, which holds role-specific information about visible GraphQL types. +-- Importantly, it holds more than just the information needed by GraphQL: it also includes how the +-- GraphQL types relate to Postgres types, which is used to validate literals provided for +-- Postgres-specific scalars. data TyAgg = TyAgg { _taTypes :: !TypeMap @@ -615,21 +624,23 @@ instance Monoid TyAgg where mempty = TyAgg Map.empty Map.empty Set.empty Map.empty mappend = (<>) -newtype RootFlds - = RootFlds - { _taMutation :: Map.HashMap G.Name (OpCtx, Either ObjFldInfo ObjFldInfo) +-- | A role-specific mapping from root field names to allowed operations. +data RootFields + = RootFields + { rootQueryFields :: !(Map.HashMap G.Name (QueryCtx, ObjFldInfo)) + , rootMutationFields :: !(Map.HashMap G.Name (MutationCtx, ObjFldInfo)) } deriving (Show, Eq) -instance Semigroup RootFlds where - (RootFlds m1) <> (RootFlds m2) - = RootFlds (Map.union m1 m2) +instance Semigroup RootFields where + RootFields a1 b1 <> RootFields a2 b2 + = RootFields (Map.union a1 a2) (Map.union b1 b2) -instance Monoid RootFlds where - mempty = RootFlds Map.empty +instance Monoid RootFields where + mempty = RootFields Map.empty Map.empty mappend = (<>) -mkGCtx :: TyAgg -> RootFlds -> InsCtxMap -> GCtx -mkGCtx tyAgg (RootFlds flds) insCtxMap = +mkGCtx :: TyAgg -> RootFields -> InsCtxMap -> GCtx +mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap = let queryRoot = mkQueryRootTyInfo qFlds scalarTys = map (TIScalar . mkHsraScalarTyInfo) (Set.toList allScalarTypes) compTys = map (TIInpObj . mkCompExpInp) (Set.toList allComparableTypes) @@ -642,8 +653,8 @@ mkGCtx tyAgg (RootFlds flds) insCtxMap = ] <> scalarTys <> compTys <> defaultTypes <> wiredInGeoInputTypes -- for now subscription root is query root - in GCtx allTys fldInfos ordByEnums queryRoot mutRootM subRootM - (Map.map fst flds) insCtxMap + in GCtx allTys fldInfos queryRoot mutRootM subRootM ordByEnums + (Map.map fst queryFields) (Map.map fst mutationFields) insCtxMap where TyAgg tyInfos fldInfos scalars ordByEnums = tyAgg colTys = Set.fromList $ map pgiType $ lefts $ Map.elems fldInfos @@ -655,7 +666,10 @@ mkGCtx tyAgg (RootFlds flds) insCtxMap = mkHsraObjTyInfo (Just "subscription root") (G.NamedType "subscription_root") Set.empty . mapFromL _fiName subRootM = bool (Just $ mkSubRoot qFlds) Nothing $ null qFlds - (qFlds, mFlds) = partitionEithers $ map snd $ Map.elems flds + + qFlds = rootFieldInfos queryFields + mFlds = rootFieldInfos mutationFields + rootFieldInfos = map snd . Map.elems anyGeoTypes = any isGeoType colTys allComparableTypes = From d1179f7f9859cbd735d64a9e12f3f3c007b6917f Mon Sep 17 00:00:00 2001 From: Alexis King Date: Tue, 6 Aug 2019 10:24:08 -0500 Subject: [PATCH 02/10] =?UTF-8?q?Don=E2=80=99t=20use=20Show=20for=20conver?= =?UTF-8?q?ting=20PGColTypes=20to=20SQL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src-lib/Hasura/GraphQL/Execute/Query.hs | 2 +- .../Hasura/GraphQL/Resolve/InputValue.hs | 8 +- .../src-lib/Hasura/GraphQL/Schema/BoolExp.hs | 33 ++--- .../src-lib/Hasura/GraphQL/Validate/Types.hs | 2 +- server/src-lib/Hasura/RQL/GBoolExp.hs | 15 +-- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 2 +- server/src-lib/Hasura/SQL/DML.hs | 4 +- server/src-lib/Hasura/SQL/Types.hs | 120 +++++++++--------- 8 files changed, 86 insertions(+), 100 deletions(-) diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index f315bc949d8..858aac29447 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -79,7 +79,7 @@ data ReusableQueryPlan instance J.ToJSON ReusableQueryPlan where toJSON (ReusableQueryPlan varTypes fldPlans) = - J.object [ "variables" J..= show varTypes + J.object [ "variables" J..= varTypes , "field_plans" J..= fldPlans ] diff --git a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs index fa703e93e28..8015b102afd 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs @@ -19,12 +19,12 @@ module Hasura.GraphQL.Resolve.InputValue import Hasura.Prelude -import qualified Data.Text as T import qualified Language.GraphQL.Draft.Syntax as G import Hasura.GraphQL.Resolve.Context import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types +import Hasura.SQL.Types ((<>>)) import Hasura.SQL.Value withNotNull @@ -58,10 +58,8 @@ asPGColVal asPGColVal v = case _aivValue v of AGScalar colTy (Just val) -> return $ AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) colTy val - AGScalar colTy Nothing -> - throw500 $ "unexpected null for ty " - <> T.pack (show colTy) - _ -> tyMismatch "pgvalue" v + AGScalar colTy Nothing -> throw500 $ "unexpected null for ty " <>> colTy + _ -> tyMismatch "pgvalue" v asEnumVal :: (MonadError QErr m) diff --git a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs index 258d6de50d2..36d6e14feb1 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs @@ -16,19 +16,17 @@ import Hasura.Prelude import Hasura.RQL.Types import Hasura.SQL.Types +addTypeSuffix :: T.Text -> G.NamedType -> G.NamedType +addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix + +typeToDescription :: G.NamedType -> G.Description +typeToDescription = G.Description . G.unName . G.unNamedType + mkCompExpTy :: PGColType -> G.NamedType -mkCompExpTy = - G.NamedType . mkCompExpName - -mkCompExpName :: PGColType -> G.Name -mkCompExpName pgColTy = - G.Name $ T.pack (show pgColTy) <> "_comparison_exp" - -mkCastExpName :: PGColType -> G.Name -mkCastExpName pgColTy = G.Name $ T.pack (show pgColTy) <> "_cast_exp" +mkCompExpTy = addTypeSuffix "_comparison_exp" . mkScalarTy mkCastExpTy :: PGColType -> G.NamedType -mkCastExpTy = G.NamedType . mkCastExpName +mkCastExpTy = addTypeSuffix "_cast_exp" . mkScalarTy -- TODO(shahidhk) this should ideally be st_d_within_geometry {- @@ -59,13 +57,13 @@ mkCastExpressionInputType sourceType targetTypes = where description = mconcat [ "Expression to compare the result of casting a column of type " - , G.Description (T.pack $ show sourceType) + , typeToDescription $ mkScalarTy sourceType , ". Multiple cast targets are combined with logical 'AND'." ] targetFields = map targetField targetTypes targetField targetType = InpValInfo Nothing - (G.Name . T.pack $ show targetType) + (G.unNamedType $ mkScalarTy targetType) Nothing (G.toGT $ mkCompExpTy targetType) @@ -85,11 +83,9 @@ mkCompExpInp colTy = , maybeToList castOpInputValue ]) TLHasuraType where - tyDesc = mconcat - [ "expression to compare columns of type " - , G.Description (T.pack $ show colTy) - , ". All fields are combined with logical 'AND'." - ] + tyDesc = + "expression to compare columns of type " <> colTyDesc + <> ". All fields are combined with logical 'AND'." isStringTy = case colTy of PGVarchar -> True PGText -> True @@ -158,8 +154,7 @@ mkCompExpInp colTy = geoOpToInpVal (op, desc) = InpValInfo (Just desc) op Nothing $ G.toGT $ mkScalarTy colTy - - colTyDesc = G.Description $ T.pack $ show colTy + colTyDesc = typeToDescription $ mkScalarTy colTy -- operators applicable only to geometry types geomOps :: [(G.Name, G.Description)] diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs index d85a83beefb..94541d48515 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs @@ -553,7 +553,7 @@ pgColTyToScalar = \case PGFloat -> "Float" PGText -> "String" PGVarchar -> "String" - t -> T.pack $ show t + t -> toSQLTxt t mkScalarTy :: PGColType -> G.NamedType mkScalarTy = diff --git a/server/src-lib/Hasura/RQL/GBoolExp.hs b/server/src-lib/Hasura/RQL/GBoolExp.hs index fcfe725a4e8..8f25dc8dd02 100644 --- a/server/src-lib/Hasura/RQL/GBoolExp.hs +++ b/server/src-lib/Hasura/RQL/GBoolExp.hs @@ -40,7 +40,7 @@ instance DQuote ColumnReference where ColumnReferenceColumn column -> getPGColTxt $ pgiName column ColumnReferenceCast reference targetType -> - dquoteTxt reference <> "::" <> T.pack (show targetType) + dquoteTxt reference <> "::" <> dquoteTxt targetType parseOperationsExpression :: forall m v @@ -266,13 +266,9 @@ parseOperationsExpression rhsParser fim columnInfo = parseVal = decodeValue val buildMsg :: PGColType -> [PGColType] -> QErr -buildMsg ty expTys = - err400 UnexpectedPayload $ mconcat - [ " is of type " <> T.pack (show ty) - , "; this operator works " - , "only on columns of type " - , T.intercalate "/" $ map (T.dquote . T.pack . show) expTys - ] +buildMsg ty expTys = err400 UnexpectedPayload + $ " is of type " <> ty <<> "; this operator works only on columns of type " + <> T.intercalate "/" (map dquote expTys) textOnlyOp :: (MonadError QErr m) => PGColType -> m () textOnlyOp PGText = return () @@ -432,11 +428,10 @@ mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol) mkCastsExp casts = sqlAll . flip map (M.toList casts) $ \(targetType, operations) -> - let targetAnn = pgTypeToAnnType targetType + let targetAnn = S.mkTypeAnn $ PgTypeSimple targetType in sqlAll $ map (mkCompExp (S.SETyAnn lhs targetAnn)) operations sqlAll = foldr (S.BEBin S.AndOp) (S.BELit True) - pgTypeToAnnType = S.TypeAnn . T.pack . show hasStaticExp :: OpExpG PartialSQLExp -> Bool hasStaticExp = has (template . filtered isStaticValue) diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index e78f88cfc25..963394fba16 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -382,7 +382,7 @@ data FunctionArg = FunctionArg { faName :: !(Maybe FunctionArgName) , faType :: !PGColType - } deriving(Show, Eq) + } deriving (Show, Eq) $(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg) diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs index f7db300e9cf..9c681c46d1a 100644 --- a/server/src-lib/Hasura/SQL/DML.hs +++ b/server/src-lib/Hasura/SQL/DML.hs @@ -226,7 +226,7 @@ newtype TypeAnn deriving (Show, Eq, Data) mkTypeAnn :: PgType -> TypeAnn -mkTypeAnn = TypeAnn . T.pack . show +mkTypeAnn = TypeAnn . toSQLTxt intTypeAnn :: TypeAnn intTypeAnn = mkTypeAnn $ PgTypeSimple PGInteger @@ -286,7 +286,7 @@ data SQLExp deriving (Show, Eq, Data) withTyAnn :: PGColType -> SQLExp -> SQLExp -withTyAnn colTy v = SETyAnn v $ TypeAnn $ T.pack $ show colTy +withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PgTypeSimple colTy instance J.ToJSON SQLExp where toJSON = J.toJSON . toSQLTxt diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index c8c35f7e915..e3784f48cfe 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -8,7 +8,6 @@ import Hasura.Prelude import Data.Aeson import Data.Aeson.Encoding (text) import Data.Aeson.Types (toJSONKeyText) -import Data.String (fromString) import Instances.TH.Lift () import Language.Haskell.TH.Syntax (Lift) @@ -52,6 +51,21 @@ class DQuote a where instance DQuote T.Text where dquoteTxt = id + {-# INLINE dquoteTxt #-} + +dquote :: (DQuote a) => a -> T.Text +dquote = T.dquote . dquoteTxt +{-# INLINE dquote #-} + +infixr 6 <>> +(<>>) :: (DQuote a) => T.Text -> a -> T.Text +(<>>) lTxt a = lTxt <> dquote a +{-# INLINE (<>>) #-} + +infixr 6 <<> +(<<>) :: (DQuote a) => a -> T.Text -> T.Text +(<<>) a rTxt = dquote a <> rTxt +{-# INLINE (<<>) #-} pgFmtIden :: T.Text -> T.Text pgFmtIden x = @@ -69,18 +83,6 @@ pgFmtLit x = trimNullChars :: T.Text -> T.Text trimNullChars = T.takeWhile (/= '\x0') -infixr 6 <>> -(<>>) :: (DQuote a) => T.Text -> a -> T.Text -(<>>) lTxt a = - lTxt <> T.dquote (dquoteTxt a) -{-# INLINE (<>>) #-} - -infixr 6 <<> -(<<>) :: (DQuote a) => a -> T.Text -> T.Text -(<<>) a rTxt = - T.dquote (dquoteTxt a) <> rTxt -{-# INLINE (<<>) #-} - instance (ToSQL a) => ToSQL (Maybe a) where toSQL (Just a) = toSQL a toSQL Nothing = mempty @@ -265,43 +267,41 @@ data PGColType | PGGeometry | PGGeography | PGUnknown !T.Text - deriving (Eq, Lift, Generic, Data) + deriving (Show, Eq, Lift, Generic, Data) instance Hashable PGColType -instance Show PGColType where - show PGSmallInt = "smallint" - show PGInteger = "integer" - show PGBigInt = "bigint" - show PGSerial = "serial" - show PGBigSerial = "bigserial" - show PGFloat = "real" - show PGDouble = "float8" - show PGNumeric = "numeric" - show PGBoolean = "boolean" - show PGChar = "character" - show PGVarchar = "varchar" - show PGText = "text" - show PGDate = "date" - show PGTimeStampTZ = "timestamptz" - show PGTimeTZ = "timetz" - show PGJSON = "json" - show PGJSONB = "jsonb" - show PGGeometry = "geometry" - show PGGeography = "geography" - show (PGUnknown t) = T.unpack t +instance ToSQL PGColType where + toSQL = \case + PGSmallInt -> "smallint" + PGInteger -> "integer" + PGBigInt -> "bigint" + PGSerial -> "serial" + PGBigSerial -> "bigserial" + PGFloat -> "real" + PGDouble -> "float8" + PGNumeric -> "numeric" + PGBoolean -> "boolean" + PGChar -> "character" + PGVarchar -> "varchar" + PGText -> "text" + PGDate -> "date" + PGTimeStampTZ -> "timestamptz" + PGTimeTZ -> "timetz" + PGJSON -> "json" + PGJSONB -> "jsonb" + PGGeometry -> "geometry" + PGGeography -> "geography" + PGUnknown t -> TB.text t instance ToJSON PGColType where - toJSON pct = String $ T.pack $ show pct + toJSON = String . toSQLTxt instance ToJSONKey PGColType where - toJSONKey = toJSONKeyText (T.pack . show) - -instance ToSQL PGColType where - toSQL pct = fromString $ show pct + toJSONKey = toJSONKeyText toSQLTxt instance DQuote PGColType where - dquoteTxt = T.pack . show + dquoteTxt = toSQLTxt txtToPgColTy :: Text -> PGColType txtToPgColTy t = case t of @@ -380,26 +380,6 @@ pgTypeOid PGGeometry = PTI.text pgTypeOid PGGeography = PTI.text pgTypeOid (PGUnknown _) = PTI.auto --- TODO: This is incorrect modelling as PGColType --- will capture anything under PGUnknown --- This should be fixed when support for --- all types is merged. - -data PgType - = PgTypeSimple !PGColType - | PgTypeArray !PGColType - deriving (Eq, Data) - -instance Show PgType where - show = \case - PgTypeSimple ty -> show ty - -- typename array is an sql standard way - -- of declaring types - PgTypeArray ty -> show ty <> " array" - -instance ToJSON PgType where - toJSON = toJSON . show - isIntegerType :: PGColType -> Bool isIntegerType PGInteger = True isIntegerType PGSmallInt = True @@ -438,3 +418,21 @@ isGeoType = \case PGGeometry -> True PGGeography -> True _ -> False + +-- | The type of all Postgres types (i.e. scalars and arrays). +-- +-- TODO: This is incorrect modeling, as 'PGScalarType' will capture anything (under 'PGUnknown'). +-- This should be fixed when support for all types is merged. +data PgType + = PgTypeSimple !PGColType + | PgTypeArray !PGColType + deriving (Show, Eq, Data) + +instance ToSQL PgType where + toSQL = \case + PgTypeSimple ty -> toSQL ty + -- typename array is an sql standard way of declaring types + PgTypeArray ty -> toSQL ty <> " array" + +instance ToJSON PgType where + toJSON = toJSON . toSQLTxt From 86663f9af7232061b6d345091d1b04fa4caa919b Mon Sep 17 00:00:00 2001 From: Alexis King Date: Tue, 6 Aug 2019 10:27:35 -0500 Subject: [PATCH 03/10] Rename PGColType to PGScalarType --- .../src-lib/Hasura/GraphQL/Execute/Query.hs | 6 ++-- .../src-lib/Hasura/GraphQL/Resolve/BoolExp.hs | 2 +- .../src-lib/Hasura/GraphQL/Resolve/Context.hs | 2 +- .../src-lib/Hasura/GraphQL/Resolve/Insert.hs | 8 ++--- .../src-lib/Hasura/GraphQL/Resolve/Types.hs | 2 +- server/src-lib/Hasura/GraphQL/Schema.hs | 2 +- .../src-lib/Hasura/GraphQL/Schema/BoolExp.hs | 8 ++--- .../src-lib/Hasura/GraphQL/Schema/Function.hs | 2 +- .../src-lib/Hasura/GraphQL/Schema/Select.hs | 4 +-- server/src-lib/Hasura/GraphQL/Validate.hs | 6 ++-- .../src-lib/Hasura/GraphQL/Validate/Field.hs | 2 +- .../src-lib/Hasura/GraphQL/Validate/Types.hs | 12 +++---- server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs | 2 +- .../src-lib/Hasura/RQL/DDL/Schema/Function.hs | 4 +-- server/src-lib/Hasura/RQL/DML/Count.hs | 2 +- server/src-lib/Hasura/RQL/DML/Delete.hs | 2 +- server/src-lib/Hasura/RQL/DML/Insert.hs | 4 +-- server/src-lib/Hasura/RQL/DML/Internal.hs | 6 ++-- server/src-lib/Hasura/RQL/DML/Returning.hs | 4 +-- server/src-lib/Hasura/RQL/DML/Select.hs | 6 ++-- server/src-lib/Hasura/RQL/DML/Update.hs | 18 +++++----- server/src-lib/Hasura/RQL/GBoolExp.hs | 12 +++---- server/src-lib/Hasura/RQL/Types.hs | 2 +- server/src-lib/Hasura/RQL/Types/BoolExp.hs | 2 +- server/src-lib/Hasura/RQL/Types/Common.hs | 2 +- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 2 +- server/src-lib/Hasura/SQL/DML.hs | 2 +- server/src-lib/Hasura/SQL/Types.hs | 36 +++++++++---------- server/src-lib/Hasura/SQL/Value.hs | 16 ++++----- 29 files changed, 89 insertions(+), 89 deletions(-) diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index 858aac29447..85e0bc03476 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -34,7 +34,7 @@ import Hasura.RQL.Types import Hasura.SQL.Types import Hasura.SQL.Value -type PlanVariables = Map.HashMap G.Variable (Int, PGColType) +type PlanVariables = Map.HashMap G.Variable (Int, PGScalarType) type PrepArgMap = IntMap.IntMap Q.PrepArg data PGPlan @@ -63,7 +63,7 @@ instance J.ToJSON RootFieldPlan where RFPRaw encJson -> J.toJSON $ TBS.fromBS encJson RFPPostgres pgPlan -> J.toJSON pgPlan -type VariableTypes = Map.HashMap G.Variable PGColType +type VariableTypes = Map.HashMap G.Variable PGScalarType data QueryPlan = QueryPlan @@ -156,7 +156,7 @@ initPlanningSt = getVarArgNum :: (MonadState PlanningSt m) - => G.Variable -> PGColType -> m Int + => G.Variable -> PGScalarType -> m Int getVarArgNum var colTy = do PlanningSt curArgNum vars prepped <- get case Map.lookup var vars of diff --git a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs index 17fadb527d3..0e358e01279 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs @@ -22,7 +22,7 @@ type OpExp = OpExpG UnresolvedVal parseOpExps :: (MonadError QErr m) - => PGColType -> AnnInpVal -> m [OpExp] + => PGScalarType -> AnnInpVal -> m [OpExp] parseOpExps colTy annVal = do opExpsM <- flip withObjectM annVal $ \nt objM -> forM objM $ \obj -> forM (OMap.toList obj) $ \(k, v) -> diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs index 7d466e1d870..833bcf7d45d 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs @@ -136,7 +136,7 @@ withPrepArgs m = runStateT m Seq.empty prepareColVal :: (MonadState PrepArgs m) - => PGColType -> PGColValue -> m S.SQLExp + => PGScalarType -> PGColValue -> m S.SQLExp prepareColVal colTy colVal = do preparedArgs <- get put (preparedArgs Seq.|> binEncoder colVal) diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs index ce9d30cfed6..aa89ded5c46 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs @@ -81,7 +81,7 @@ data CTEExp data AnnInsObj = AnnInsObj - { _aioColumns :: ![(PGCol, PGColType, PGColValue)] + { _aioColumns :: ![(PGCol, PGScalarType, PGColValue)] , _aioObjRels :: ![ObjRelIns] , _aioArrRels :: ![ArrRelIns] } deriving (Show, Eq) @@ -185,7 +185,7 @@ parseOnConflict tn updFiltrM val = withPathK "on_conflict" $ toSQLExps :: (MonadError QErr m, MonadState PrepArgs m) - => [(PGCol, PGColType, PGColValue)] + => [(PGCol, PGScalarType, PGColValue)] -> m [(PGCol, S.SQLExp)] toSQLExps cols = forM cols $ \(c, ty, v) -> do @@ -200,7 +200,7 @@ mkInsertQ :: MonadError QErr m => QualifiedTable -> Maybe RI.ConflictClauseP1 - -> [(PGCol, PGColType, PGColValue)] + -> [(PGCol, PGScalarType, PGColValue)] -> [PGCol] -> Map.HashMap PGCol S.SQLExp -> RoleName @@ -535,7 +535,7 @@ mergeListsWith (x:xs) l b f = case find (b x) l of Just y -> f x y : mergeListsWith xs l b f mkPGColWithTypeAndVal :: [PGColInfo] -> [PGColWithValue] - -> [(PGCol, PGColType, PGColValue)] + -> [(PGCol, PGScalarType, PGColValue)] mkPGColWithTypeAndVal pgColInfos pgColWithVal = mergeListsWith pgColInfos pgColWithVal (\ci (c, _) -> pgiName ci == c) diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs index d24156330e5..a79d4698ecf 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs @@ -141,7 +141,7 @@ data AnnPGVal = AnnPGVal { _apvVariable :: !(Maybe G.Variable) , _apvIsNullable :: !Bool - , _apvType :: !PGColType + , _apvType :: !PGScalarType , _apvValue :: !PGColValue } deriving (Show, Eq) diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 0ab00e18f43..c5c3054a51b 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -611,7 +611,7 @@ data TyAgg = TyAgg { _taTypes :: !TypeMap , _taFields :: !FieldMap - , _taScalars :: !(Set.HashSet PGColType) + , _taScalars :: !(Set.HashSet PGScalarType) , _taOrdBy :: !OrdByCtx } deriving (Show, Eq) diff --git a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs index 36d6e14feb1..6ccbbb2fa77 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs @@ -22,10 +22,10 @@ addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name s typeToDescription :: G.NamedType -> G.Description typeToDescription = G.Description . G.unName . G.unNamedType -mkCompExpTy :: PGColType -> G.NamedType +mkCompExpTy :: PGScalarType -> G.NamedType mkCompExpTy = addTypeSuffix "_comparison_exp" . mkScalarTy -mkCastExpTy :: PGColType -> G.NamedType +mkCastExpTy :: PGScalarType -> G.NamedType mkCastExpTy = addTypeSuffix "_cast_exp" . mkScalarTy -- TODO(shahidhk) this should ideally be st_d_within_geometry @@ -51,7 +51,7 @@ stDWithinGeographyInpTy = G.NamedType "st_d_within_geography_input" -- | Makes an input type declaration for the @_cast@ field of a comparison expression. -- (Currently only used for casting between geometry and geography types.) -mkCastExpressionInputType :: PGColType -> [PGColType] -> InpObjTyInfo +mkCastExpressionInputType :: PGScalarType -> [PGScalarType] -> InpObjTyInfo mkCastExpressionInputType sourceType targetTypes = mkHsraInpTyInfo (Just description) (mkCastExpTy sourceType) (fromInpValL targetFields) where @@ -68,7 +68,7 @@ mkCastExpressionInputType sourceType targetTypes = (G.toGT $ mkCompExpTy targetType) --- | make compare expression input type -mkCompExpInp :: PGColType -> InpObjTyInfo +mkCompExpInp :: PGScalarType -> InpObjTyInfo mkCompExpInp colTy = InpObjTyInfo (Just tyDesc) (mkCompExpTy colTy) (fromInpValL $ concat [ map (mk colScalarTy) typedOps diff --git a/server/src-lib/Hasura/GraphQL/Schema/Function.hs b/server/src-lib/Hasura/GraphQL/Schema/Function.hs index 66569770cee..8e96526b6d3 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Function.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Function.hs @@ -36,7 +36,7 @@ input function_args { procFuncArgs :: Seq.Seq FunctionArg - -> (PGColType -> Text -> a) -> [a] + -> (PGScalarType -> Text -> a) -> [a] procFuncArgs argSeq f = fst $ foldl mkItem ([], 1::Int) argSeq where diff --git a/server/src-lib/Hasura/GraphQL/Schema/Select.hs b/server/src-lib/Hasura/GraphQL/Schema/Select.hs index ef6fc05f22d..80b0c098c5a 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Select.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Select.hs @@ -50,7 +50,7 @@ mkTableByPkName :: QualifiedTable -> G.Name mkTableByPkName tn = qualObjectToName tn <> "_by_pk" -- Support argument params for PG columns -mkPGColParams :: PGColType -> ParamMap +mkPGColParams :: PGScalarType -> ParamMap mkPGColParams = \case PGJSONB -> jsonParams PGJSON -> jsonParams @@ -222,7 +222,7 @@ type table__fields{ mkTableColAggFldsObj :: QualifiedTable -> G.Name - -> (PGColType -> G.NamedType) + -> (PGScalarType -> G.NamedType) -> [PGColInfo] -> ObjTyInfo mkTableColAggFldsObj tn op f cols = diff --git a/server/src-lib/Hasura/GraphQL/Validate.hs b/server/src-lib/Hasura/GraphQL/Validate.hs index 5bb0e36f6be..3ba96158d16 100644 --- a/server/src-lib/Hasura/GraphQL/Validate.hs +++ b/server/src-lib/Hasura/GraphQL/Validate.hs @@ -33,7 +33,7 @@ import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types import Hasura.RQL.Types.QueryCollection -import Hasura.SQL.Types (PGColType) +import Hasura.SQL.Types (PGScalarType) import Hasura.SQL.Value (PGColValue, parsePGValue) @@ -118,8 +118,8 @@ getAnnVarVals varDefsL inpVals = withPathK "variableValues" $ do showVars :: (Functor f, Foldable f) => f G.Variable -> Text showVars = showNames . fmap G.unVariable -type VarPGTypes = Map.HashMap G.Variable PGColType -type AnnPGVarVals = Map.HashMap G.Variable (PGColType, PGColValue) +type VarPGTypes = Map.HashMap G.Variable PGScalarType +type AnnPGVarVals = Map.HashMap G.Variable (PGScalarType, PGColValue) -- this is in similar spirit to getAnnVarVals, however -- here it is much simpler and can get rid of typemap requirement diff --git a/server/src-lib/Hasura/GraphQL/Validate/Field.hs b/server/src-lib/Hasura/GraphQL/Validate/Field.hs index 0ce137859e5..49a247fc236 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Field.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Field.hs @@ -26,7 +26,7 @@ import Hasura.SQL.Value -- data ScalarInfo -- = SIBuiltin !GBuiltin --- | SICustom !PGColType +-- | SICustom !PGScalarType -- deriving (Show, Eq) -- data GBuiltin diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs index 94541d48515..840dc8e0c5a 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs @@ -324,15 +324,15 @@ mkHsraInpTyInfo descM ty flds = data ScalarTyInfo = ScalarTyInfo { _stiDesc :: !(Maybe G.Description) - , _stiType :: !PGColType + , _stiType :: !PGScalarType , _stiLoc :: !TypeLoc } deriving (Show, Eq, TH.Lift) -mkHsraScalarTyInfo :: PGColType -> ScalarTyInfo +mkHsraScalarTyInfo :: PGScalarType -> ScalarTyInfo mkHsraScalarTyInfo ty = ScalarTyInfo Nothing ty TLHasuraType instance EquatableGType ScalarTyInfo where - type EqProps ScalarTyInfo = PGColType + type EqProps ScalarTyInfo = PGScalarType getEqProps = _stiType fromScalarTyDef @@ -546,7 +546,7 @@ isSubTypeBase subTyInfo supTyInfo = case (subTyInfo,supTyInfo) of notSubTyErr = throwError $ "Type " <> showTy subTyInfo <> " is not a sub type of " <> showTy supTyInfo -- map postgres types to builtin scalars -pgColTyToScalar :: PGColType -> Text +pgColTyToScalar :: PGScalarType -> Text pgColTyToScalar = \case PGInteger -> "Int" PGBoolean -> "Boolean" @@ -555,7 +555,7 @@ pgColTyToScalar = \case PGVarchar -> "String" t -> toSQLTxt t -mkScalarTy :: PGColType -> G.NamedType +mkScalarTy :: PGScalarType -> G.NamedType mkScalarTy = G.NamedType . G.Name . pgColTyToScalar @@ -660,7 +660,7 @@ data AnnInpVal type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal data AnnGValue - = AGScalar !PGColType !(Maybe PGColValue) + = AGScalar !PGScalarType !(Maybe PGColValue) | AGEnum !G.NamedType !(Maybe G.EnumValue) | AGObject !G.NamedType !(Maybe AnnGObject) | AGArray !G.ListType !(Maybe [AnnInpVal]) diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs index 701351d842b..aae14ae5139 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs @@ -40,7 +40,7 @@ data PGColMeta = PGColMeta { pcmColumnName :: !PGCol , pcmOrdinalPosition :: !Int - , pcmDataType :: !PGColType + , pcmDataType :: !PGScalarType , pcmIsNullable :: !Bool } deriving (Show, Eq) diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Function.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Function.hs index 1807770c6d0..23b58865d4b 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Function.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Function.hs @@ -42,13 +42,13 @@ data RawFuncInfo , rfiReturnTypeName :: !T.Text , rfiReturnTypeType :: !PGTypType , rfiReturnsSet :: !Bool - , rfiInputArgTypes :: ![PGColType] + , rfiInputArgTypes :: ![PGScalarType] , rfiInputArgNames :: ![T.Text] , rfiReturnsTable :: !Bool } deriving (Show, Eq) $(deriveJSON (aesonDrop 3 snakeCase) ''RawFuncInfo) -mkFunctionArgs :: [PGColType] -> [T.Text] -> [FunctionArg] +mkFunctionArgs :: [PGScalarType] -> [T.Text] -> [FunctionArg] mkFunctionArgs tys argNames = bool withNames withNoNames $ null argNames where diff --git a/server/src-lib/Hasura/RQL/DML/Count.hs b/server/src-lib/Hasura/RQL/DML/Count.hs index 8289f350afc..67b1d4b15ef 100644 --- a/server/src-lib/Hasura/RQL/DML/Count.hs +++ b/server/src-lib/Hasura/RQL/DML/Count.hs @@ -63,7 +63,7 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) = validateCountQWith :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> CountQuery -> m CountQueryP1 validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs index 244c88aaa3d..7fa9a5f1c7a 100644 --- a/server/src-lib/Hasura/RQL/DML/Delete.hs +++ b/server/src-lib/Hasura/RQL/DML/Delete.hs @@ -60,7 +60,7 @@ mkDeleteCTE (AnnDel tn (fltr, wc) _ _) = validateDeleteQWith :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> DeleteQuery -> m AnnDel validateDeleteQWith sessVarBldr prepValBldr diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index bd0260ae2bf..cc6281886be 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -64,7 +64,7 @@ toSQLConflict conflict = case conflict of convObj :: (UserInfoM m, QErrM m) - => (PGColType -> Value -> m S.SQLExp) + => (PGScalarType -> Value -> m S.SQLExp) -> HM.HashMap PGCol S.SQLExp -> HM.HashMap PGCol S.SQLExp -> FieldInfoMap @@ -160,7 +160,7 @@ convInsertQuery :: (UserInfoM m, QErrM m, CacheRM m) => (Value -> m [InsObj]) -> SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> InsertQuery -> m InsertQueryP1 convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRetCols) = do diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 3015230fc0a..a0113466b65 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -142,7 +142,7 @@ checkPermOnCol pt allowedCols pgCol = do ] binRHSBuilder - :: PGColType -> Value -> DMLP1 S.SQLExp + :: PGScalarType -> Value -> DMLP1 S.SQLExp binRHSBuilder colType val = do preparedArgs <- get binVal <- runAesonParser (convToBin colType) val @@ -245,7 +245,7 @@ convBoolExp -> SelPermInfo -> BoolExp -> SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> m AnnBoolExpSQL convBoolExp cim spi be sessVarBldr prepValBldr = do abe <- annBoolExp rhsParser cim be @@ -266,7 +266,7 @@ dmlTxErrorHandler p2Res = Just (code, msg) -> err400 code msg where err = simplifyError p2Res -toJSONableExp :: Bool -> PGColType -> S.SQLExp -> S.SQLExp +toJSONableExp :: Bool -> PGScalarType -> S.SQLExp -> S.SQLExp toJSONableExp strfyNum colTy expn | colTy == PGGeometry || colTy == PGGeography = S.SEFnApp "ST_AsGeoJSON" diff --git a/server/src-lib/Hasura/RQL/DML/Returning.hs b/server/src-lib/Hasura/RQL/DML/Returning.hs index f81de53d5bb..87fa172d4fa 100644 --- a/server/src-lib/Hasura/RQL/DML/Returning.hs +++ b/server/src-lib/Hasura/RQL/DML/Returning.hs @@ -50,7 +50,7 @@ hasNestedFld = any isNestedMutFld FArr _ -> True _ -> False -pgColsFromMutFld :: MutFld -> [(PGCol, PGColType)] +pgColsFromMutFld :: MutFld -> [(PGCol, PGScalarType)] pgColsFromMutFld = \case MCount -> [] MExp _ -> [] @@ -59,7 +59,7 @@ pgColsFromMutFld = \case FCol (PGColInfo col colTy _) _ -> Just (col, colTy) _ -> Nothing -pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColType)] +pgColsFromMutFlds :: MutFlds -> [(PGCol, PGScalarType)] pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd) pgColsToSelFlds :: [PGColInfo] -> [(FieldName, AnnFld)] diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs index 919e4ad08c4..6d2408c6dec 100644 --- a/server/src-lib/Hasura/RQL/DML/Select.hs +++ b/server/src-lib/Hasura/RQL/DML/Select.hs @@ -149,7 +149,7 @@ convSelectQ -> SelPermInfo -- Additional select permission info -> SelectQExt -- Given Select Query -> SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> m AnnSimpleSel convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do @@ -217,7 +217,7 @@ convExtRel -> Maybe RelName -> SelectQExt -> SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> m (Either ObjSel ArrSel) convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do -- Point to the name key @@ -250,7 +250,7 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do convSelectQuery :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> SelectQuery -> m AnnSimpleSel convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index 9bb6e2c9d2d..f57fdb6361e 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -67,9 +67,9 @@ mkUpdateCTE (AnnUpd tn setExps (permFltr, wc) _ _) = convInc :: (QErrM m) - => (PGColType -> Value -> m S.SQLExp) + => (PGScalarType -> Value -> m S.SQLExp) -> PGCol - -> PGColType + -> PGScalarType -> Value -> m (PGCol, S.SQLExp) convInc f col colType val = do @@ -78,9 +78,9 @@ convInc f col colType val = do convMul :: (QErrM m) - => (PGColType -> Value -> m S.SQLExp) + => (PGScalarType -> Value -> m S.SQLExp) -> PGCol - -> PGColType + -> PGScalarType -> Value -> m (PGCol, S.SQLExp) convMul f col colType val = do @@ -89,16 +89,16 @@ convMul f col colType val = do convSet :: (QErrM m) - => (PGColType -> Value -> m S.SQLExp) + => (PGScalarType -> Value -> m S.SQLExp) -> PGCol - -> PGColType + -> PGScalarType -> Value -> m (PGCol, S.SQLExp) convSet f col colType val = do prepExp <- f colType val return (col, prepExp) -convDefault :: (Monad m) => PGCol -> PGColType -> () -> m (PGCol, S.SQLExp) +convDefault :: (Monad m) => PGCol -> PGScalarType -> () -> m (PGCol, S.SQLExp) convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT") convOp @@ -107,7 +107,7 @@ convOp -> [PGCol] -> UpdPermInfo -> [(PGCol, a)] - -> (PGCol -> PGColType -> a -> m (PGCol, S.SQLExp)) + -> (PGCol -> PGScalarType -> a -> m (PGCol, S.SQLExp)) -> m [(PGCol, S.SQLExp)] convOp fieldInfoMap preSetCols updPerm objs conv = forM objs $ \(pgCol, a) -> do @@ -129,7 +129,7 @@ convOp fieldInfoMap preSetCols updPerm objs conv = validateUpdateQueryWith :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (PGColType -> Value -> m S.SQLExp) + -> (PGScalarType -> Value -> m S.SQLExp) -> UpdateQuery -> m AnnUpd validateUpdateQueryWith sessVarBldr prepValBldr uq = do diff --git a/server/src-lib/Hasura/RQL/GBoolExp.hs b/server/src-lib/Hasura/RQL/GBoolExp.hs index 8f25dc8dd02..18ac6d7796e 100644 --- a/server/src-lib/Hasura/RQL/GBoolExp.hs +++ b/server/src-lib/Hasura/RQL/GBoolExp.hs @@ -27,10 +27,10 @@ type OpRhsParser m v = -- number of times. Used within 'parseOperationsExpression' for bookkeeping. data ColumnReference = ColumnReferenceColumn !PGColInfo - | ColumnReferenceCast !ColumnReference !PGColType + | ColumnReferenceCast !ColumnReference !PGScalarType deriving (Show, Eq) -columnReferenceType :: ColumnReference -> PGColType +columnReferenceType :: ColumnReference -> PGScalarType columnReferenceType = \case ColumnReferenceColumn column -> pgiType column ColumnReferenceCast _ targetType -> targetType @@ -265,12 +265,12 @@ parseOperationsExpression rhsParser fim columnInfo = parseVal :: (FromJSON a) => m a parseVal = decodeValue val -buildMsg :: PGColType -> [PGColType] -> QErr +buildMsg :: PGScalarType -> [PGScalarType] -> QErr buildMsg ty expTys = err400 UnexpectedPayload $ " is of type " <> ty <<> "; this operator works only on columns of type " <> T.intercalate "/" (map dquote expTys) -textOnlyOp :: (MonadError QErr m) => PGColType -> m () +textOnlyOp :: (MonadError QErr m) => PGScalarType -> m () textOnlyOp PGText = return () textOnlyOp PGVarchar = return () textOnlyOp ty = @@ -357,13 +357,13 @@ convColRhs tableQual = \case pgValParser :: (MonadError QErr m) - => PGColType -> Value -> m PGColValue + => PGScalarType -> Value -> m PGColValue pgValParser ty = runAesonParser (parsePGValue ty) txtRHSBuilder :: (MonadError QErr m) - => PGColType -> Value -> m S.SQLExp + => PGScalarType -> Value -> m S.SQLExp txtRHSBuilder ty val = toTxtValue ty <$> pgValParser ty val diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs index 4e83aa7a067..c2cbd3c8bcb 100644 --- a/server/src-lib/Hasura/RQL/Types.hs +++ b/server/src-lib/Hasura/RQL/Types.hs @@ -176,7 +176,7 @@ askPGType => FieldInfoMap -> PGCol -> T.Text - -> m PGColType + -> m PGScalarType askPGType m c msg = pgiType <$> askPGColInfo m c msg diff --git a/server/src-lib/Hasura/RQL/Types/BoolExp.hs b/server/src-lib/Hasura/RQL/Types/BoolExp.hs index 8cd06711a2b..55b3739d78c 100644 --- a/server/src-lib/Hasura/RQL/Types/BoolExp.hs +++ b/server/src-lib/Hasura/RQL/Types/BoolExp.hs @@ -121,7 +121,7 @@ data DWithinGeogOp a = } deriving (Show, Eq, Functor, Foldable, Traversable, Data) $(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeogOp) -type CastExp a = M.HashMap PGColType [OpExpG a] +type CastExp a = M.HashMap PGScalarType [OpExpG a] data OpExpG a = ACast !(CastExp a) diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs index 908262b708b..0d837c95b4d 100644 --- a/server/src-lib/Hasura/RQL/Types/Common.hs +++ b/server/src-lib/Hasura/RQL/Types/Common.hs @@ -41,7 +41,7 @@ import qualified PostgreSQL.Binary.Decoding as PD data PGColInfo = PGColInfo { pgiName :: !PGCol - , pgiType :: !PGColType + , pgiType :: !PGScalarType , pgiIsNullable :: !Bool } deriving (Show, Eq) diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 963394fba16..580afb1f4b8 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -381,7 +381,7 @@ newtype FunctionArgName = data FunctionArg = FunctionArg { faName :: !(Maybe FunctionArgName) - , faType :: !PGColType + , faType :: !PGScalarType } deriving (Show, Eq) $(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg) diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs index 9c681c46d1a..845ae24b9bb 100644 --- a/server/src-lib/Hasura/SQL/DML.hs +++ b/server/src-lib/Hasura/SQL/DML.hs @@ -285,7 +285,7 @@ data SQLExp | SECount !CountType deriving (Show, Eq, Data) -withTyAnn :: PGColType -> SQLExp -> SQLExp +withTyAnn :: PGScalarType -> SQLExp -> SQLExp withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PgTypeSimple colTy instance J.ToJSON SQLExp where diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index e3784f48cfe..7f955dbbf30 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -246,7 +246,7 @@ showPGCols :: (Foldable t) => t PGCol -> T.Text showPGCols cols = T.intercalate ", " $ map (T.dquote . getPGColTxt) $ toList cols -data PGColType +data PGScalarType = PGSmallInt | PGInteger | PGBigInt @@ -269,9 +269,9 @@ data PGColType | PGUnknown !T.Text deriving (Show, Eq, Lift, Generic, Data) -instance Hashable PGColType +instance Hashable PGScalarType -instance ToSQL PGColType where +instance ToSQL PGScalarType where toSQL = \case PGSmallInt -> "smallint" PGInteger -> "integer" @@ -294,16 +294,16 @@ instance ToSQL PGColType where PGGeography -> "geography" PGUnknown t -> TB.text t -instance ToJSON PGColType where +instance ToJSON PGScalarType where toJSON = String . toSQLTxt -instance ToJSONKey PGColType where +instance ToJSONKey PGScalarType where toJSONKey = toJSONKeyText toSQLTxt -instance DQuote PGColType where +instance DQuote PGScalarType where dquoteTxt = toSQLTxt -txtToPgColTy :: Text -> PGColType +txtToPgColTy :: Text -> PGScalarType txtToPgColTy t = case t of "serial" -> PGSerial "bigserial" -> PGBigSerial @@ -353,11 +353,11 @@ txtToPgColTy t = case t of _ -> PGUnknown t -instance FromJSON PGColType where +instance FromJSON PGScalarType where parseJSON (String t) = return $ txtToPgColTy t - parseJSON _ = fail "Expecting a string for PGColType" + parseJSON _ = fail "Expecting a string for PGScalarType" -pgTypeOid :: PGColType -> PQ.Oid +pgTypeOid :: PGScalarType -> PQ.Oid pgTypeOid PGSmallInt = PTI.int2 pgTypeOid PGInteger = PTI.int4 pgTypeOid PGBigInt = PTI.int8 @@ -380,23 +380,23 @@ pgTypeOid PGGeometry = PTI.text pgTypeOid PGGeography = PTI.text pgTypeOid (PGUnknown _) = PTI.auto -isIntegerType :: PGColType -> Bool +isIntegerType :: PGScalarType -> Bool isIntegerType PGInteger = True isIntegerType PGSmallInt = True isIntegerType PGBigInt = True isIntegerType _ = False -isNumType :: PGColType -> Bool +isNumType :: PGScalarType -> Bool isNumType PGFloat = True isNumType PGDouble = True isNumType PGNumeric = True isNumType ty = isIntegerType ty -isJSONBType :: PGColType -> Bool +isJSONBType :: PGScalarType -> Bool isJSONBType PGJSONB = True isJSONBType _ = False -isComparableType :: PGColType -> Bool +isComparableType :: PGScalarType -> Bool isComparableType PGJSON = False isComparableType PGJSONB = False isComparableType PGGeometry = False @@ -405,7 +405,7 @@ isComparableType PGBoolean = False isComparableType (PGUnknown _) = False isComparableType _ = True -isBigNum :: PGColType -> Bool +isBigNum :: PGScalarType -> Bool isBigNum = \case PGBigInt -> True PGBigSerial -> True @@ -413,7 +413,7 @@ isBigNum = \case PGDouble -> True _ -> False -isGeoType :: PGColType -> Bool +isGeoType :: PGScalarType -> Bool isGeoType = \case PGGeometry -> True PGGeography -> True @@ -424,8 +424,8 @@ isGeoType = \case -- TODO: This is incorrect modeling, as 'PGScalarType' will capture anything (under 'PGUnknown'). -- This should be fixed when support for all types is merged. data PgType - = PgTypeSimple !PGColType - | PgTypeArray !PGColType + = PgTypeSimple !PGScalarType + | PgTypeArray !PGScalarType deriving (Show, Eq, Data) instance ToSQL PgType where diff --git a/server/src-lib/Hasura/SQL/Value.hs b/server/src-lib/Hasura/SQL/Value.hs index 9f06725ca56..9f39b1c483e 100644 --- a/server/src-lib/Hasura/SQL/Value.hs +++ b/server/src-lib/Hasura/SQL/Value.hs @@ -39,7 +39,7 @@ data PGColValue | PGValDate !Day | PGValTimeStampTZ !UTCTime | PGValTimeTZ !ZonedTimeOfDay - | PGNull !PGColType + | PGNull !PGScalarType | PGValJSON !Q.JSON | PGValJSONB !Q.JSONB | PGValGeo !GeometryWithCRS @@ -133,7 +133,7 @@ textToPrepVal :: Text -> Q.PrepArg textToPrepVal t = (PTI.auto, Just (TE.encodeUtf8 t, PQ.Text)) -parsePGValue' :: PGColType +parsePGValue' :: PGScalarType -> Value -> AT.Parser PGColValue parsePGValue' ty Null = @@ -181,19 +181,19 @@ parsePGValue' (PGUnknown _) (String t) = parsePGValue' (PGUnknown tyName) _ = fail $ "A string is expected for type : " ++ T.unpack tyName -parsePGValue :: PGColType -> Value -> AT.Parser PGColValue +parsePGValue :: PGScalarType -> Value -> AT.Parser PGColValue parsePGValue pct val = case val of String t -> parsePGValue' pct val <|> return (PGValUnknown t) _ -> parsePGValue' pct val -convToBin :: PGColType +convToBin :: PGScalarType -> Value -> AT.Parser Q.PrepArg convToBin ty val = binEncoder <$> parsePGValue ty val -convToTxt :: PGColType +convToTxt :: PGScalarType -> Value -> AT.Parser S.SQLExp convToTxt ty val = @@ -209,7 +209,7 @@ iresToEither (ISuccess a) = return a pgValFromJVal :: (FromJSON a) => Value -> Either String a pgValFromJVal = iresToEither . ifromJSON -withGeoVal :: PGColType -> S.SQLExp -> S.SQLExp +withGeoVal :: PGScalarType -> S.SQLExp -> S.SQLExp withGeoVal ty v = bool v applyGeomFromGeoJson isGeoTy where @@ -221,11 +221,11 @@ withGeoVal ty v = PGGeography -> True _ -> False -toPrepParam :: Int -> PGColType -> S.SQLExp +toPrepParam :: Int -> PGScalarType -> S.SQLExp toPrepParam i ty = withGeoVal ty $ S.SEPrep i -toTxtValue :: PGColType -> PGColValue -> S.SQLExp +toTxtValue :: PGScalarType -> PGColValue -> S.SQLExp toTxtValue ty val = S.withTyAnn ty txtVal where From ed26da59a6d00b34e67707b754a9622848064e75 Mon Sep 17 00:00:00 2001 From: Alexis King Date: Mon, 22 Jul 2019 18:17:13 +0530 Subject: [PATCH 04/10] Add support for GraphQL enum types via enum table references These changes also add a new type, PGColumnType, between PGColInfo and PGScalarType, and they process PGRawColumnType values into PGColumnType values during schema cache generation. --- .circleci/config.yml | 10 +- .circleci/server-builder.dockerfile | 2 + cli/commands/migrate_test.go | 5 +- server/graphql-engine.cabal | 80 +-- server/src-exec/Migrate.hs | 103 ++-- server/src-lib/Control/Lens/Extended.hs | 19 + server/src-lib/Hasura/Db.hs | 5 +- server/src-lib/Hasura/Events/Lib.hs | 2 +- .../Hasura/GraphQL/Execute/LiveQuery.hs | 24 +- .../src-lib/Hasura/GraphQL/Execute/Query.hs | 18 +- server/src-lib/Hasura/GraphQL/Explain.hs | 4 +- .../src-lib/Hasura/GraphQL/Resolve/BoolExp.hs | 31 +- .../src-lib/Hasura/GraphQL/Resolve/Context.hs | 15 +- .../Hasura/GraphQL/Resolve/InputValue.hs | 75 +-- .../src-lib/Hasura/GraphQL/Resolve/Insert.hs | 54 +- .../Hasura/GraphQL/Resolve/Introspect.hs | 17 +- .../Hasura/GraphQL/Resolve/Mutation.hs | 10 +- .../src-lib/Hasura/GraphQL/Resolve/Select.hs | 10 +- .../src-lib/Hasura/GraphQL/Resolve/Types.hs | 11 +- server/src-lib/Hasura/GraphQL/Schema.hs | 113 ++-- .../src-lib/Hasura/GraphQL/Schema/BoolExp.hs | 81 +-- .../src-lib/Hasura/GraphQL/Schema/Common.hs | 23 +- .../Hasura/GraphQL/Schema/Mutation/Common.hs | 2 +- .../Hasura/GraphQL/Schema/Mutation/Insert.hs | 14 +- .../src-lib/Hasura/GraphQL/Schema/OrderBy.hs | 4 +- .../src-lib/Hasura/GraphQL/Schema/Select.hs | 33 +- server/src-lib/Hasura/GraphQL/Validate.hs | 11 +- .../Hasura/GraphQL/Validate/InputValue.hs | 26 +- .../src-lib/Hasura/GraphQL/Validate/Types.hs | 54 +- server/src-lib/Hasura/Prelude.hs | 11 +- server/src-lib/Hasura/RQL/DDL/EventTrigger.hs | 10 +- server/src-lib/Hasura/RQL/DDL/Metadata.hs | 29 +- server/src-lib/Hasura/RQL/DDL/Permission.hs | 32 +- .../Hasura/RQL/DDL/Permission/Internal.hs | 52 +- server/src-lib/Hasura/RQL/DDL/Relationship.hs | 20 +- .../Hasura/RQL/DDL/Relationship/Rename.hs | 2 +- server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs | 13 +- server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs | 135 +++++ .../src-lib/Hasura/RQL/DDL/Schema/Rename.hs | 7 +- server/src-lib/Hasura/RQL/DDL/Schema/Table.hs | 501 +++++++++++------- server/src-lib/Hasura/RQL/DML/Count.hs | 4 +- server/src-lib/Hasura/RQL/DML/Delete.hs | 6 +- server/src-lib/Hasura/RQL/DML/Insert.hs | 20 +- server/src-lib/Hasura/RQL/DML/Internal.hs | 72 +-- server/src-lib/Hasura/RQL/DML/Mutation.hs | 2 +- server/src-lib/Hasura/RQL/DML/Returning.hs | 6 +- server/src-lib/Hasura/RQL/DML/Select.hs | 28 +- server/src-lib/Hasura/RQL/DML/Update.hs | 24 +- server/src-lib/Hasura/RQL/GBoolExp.hs | 120 ++--- server/src-lib/Hasura/RQL/Types.hs | 33 +- server/src-lib/Hasura/RQL/Types/BoolExp.hs | 8 +- server/src-lib/Hasura/RQL/Types/Catalog.hs | 48 +- server/src-lib/Hasura/RQL/Types/Column.hs | 156 ++++++ server/src-lib/Hasura/RQL/Types/Common.hs | 12 +- server/src-lib/Hasura/RQL/Types/Metadata.hs | 8 +- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 145 +++-- server/src-lib/Hasura/SQL/DML.hs | 17 +- server/src-lib/Hasura/SQL/Error.hs | 95 ++++ server/src-lib/Hasura/SQL/Rewrite.hs | 1 + server/src-lib/Hasura/SQL/Types.hs | 50 +- server/src-lib/Hasura/SQL/Value.hs | 102 ++-- server/src-lib/Hasura/Server/Init.hs | 4 +- server/src-lib/Hasura/Server/Query.hs | 3 + server/src-lib/Hasura/Server/Telemetry.hs | 16 +- server/src-rsr/catalog_metadata.sql | 44 +- server/src-rsr/initialise.sql | 64 ++- server/src-rsr/migrate_from_19_to_20.sql | 158 ++++++ server/src-rsr/table_meta.sql | 19 +- server/stack.yaml | 1 + server/stack.yaml.lock | 7 + .../enums/delete_where_enum_field.yaml | 21 + .../enums/insert_enum_field.yaml | 24 + .../enums/insert_enum_field_bad_value.yaml | 19 + .../graphql_mutation/enums/schema_setup.yaml | 28 + .../enums/schema_teardown.yaml | 8 + .../enums/update_enum_field.yaml | 21 + .../enums/update_where_enum_field.yaml | 21 + .../graphql_mutation/enums/values_setup.yaml | 11 + .../enums/values_teardown.yaml | 8 + ...hor_article_operator_ne_not_found_err.yaml | 2 +- ...icle_unexpected_operator_in_where_err.yaml | 2 +- .../graphql_query/enums/introspect.yaml | 57 ++ .../enums/select_enum_field.yaml | 18 + .../enums/select_where_enum_eq.yaml | 17 + .../enums/select_where_enum_eq_bad_value.yaml | 14 + .../enums/select_where_enum_eq_string.yaml | 14 + .../enums/select_where_enum_eq_variable.yaml | 21 + ...lect_where_enum_eq_variable_bad_value.yaml | 16 + .../queries/graphql_query/enums/setup.yaml | 31 ++ .../queries/graphql_query/enums/teardown.yaml | 8 + .../queries/inconsistent_objects/test.yaml | 15 +- .../queries/v1/metadata/export_metadata.yaml | 2 + .../v1/set_table_is_enum/add_and_remove.yaml | 128 +++++ .../v1/set_table_is_enum/add_invalid.yaml | 15 + .../queries/v1/set_table_is_enum/setup.yaml | 45 ++ .../v1/set_table_is_enum/teardown.yaml | 9 + server/tests-py/test_graphql_mutations.py | 21 + server/tests-py/test_graphql_queries.py | 27 + server/tests-py/test_inconsistent_meta.py | 1 + server/tests-py/test_v1_queries.py | 11 + 100 files changed, 2436 insertions(+), 1140 deletions(-) create mode 100644 server/src-lib/Control/Lens/Extended.hs create mode 100644 server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs create mode 100644 server/src-lib/Hasura/RQL/Types/Column.hs create mode 100644 server/src-lib/Hasura/SQL/Error.hs create mode 100644 server/src-rsr/migrate_from_19_to_20.sql create mode 100644 server/tests-py/queries/graphql_mutation/enums/delete_where_enum_field.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/insert_enum_field.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/insert_enum_field_bad_value.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/schema_setup.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/schema_teardown.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/update_enum_field.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/update_where_enum_field.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/values_setup.yaml create mode 100644 server/tests-py/queries/graphql_mutation/enums/values_teardown.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/introspect.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/select_enum_field.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/select_where_enum_eq.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/select_where_enum_eq_bad_value.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/select_where_enum_eq_string.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable_bad_value.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/setup.yaml create mode 100644 server/tests-py/queries/graphql_query/enums/teardown.yaml create mode 100644 server/tests-py/queries/v1/set_table_is_enum/add_and_remove.yaml create mode 100644 server/tests-py/queries/v1/set_table_is_enum/add_invalid.yaml create mode 100644 server/tests-py/queries/v1/set_table_is_enum/setup.yaml create mode 100644 server/tests-py/queries/v1/set_table_is_enum/teardown.yaml diff --git a/.circleci/config.yml b/.circleci/config.yml index 34ba13f93b2..8022ca007d8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -159,7 +159,7 @@ jobs: # build the server binary, and package into docker image build_server: docker: - - image: hasura/graphql-engine-server-builder:20190507-1 + - image: hasura/graphql-engine-server-builder:20190811 working_directory: ~/graphql-engine steps: - attach_workspace: @@ -235,7 +235,7 @@ jobs: environment: PG_VERSION: "11_1" docker: - - image: hasura/graphql-engine-server-builder:20190507-1 + - image: hasura/graphql-engine-server-builder:20190811 # TODO: change this to circleci postgis when they have one for pg 11 - image: mdillon/postgis:11-alpine <<: *test_pg_env @@ -245,7 +245,7 @@ jobs: environment: PG_VERSION: "10_6" docker: - - image: hasura/graphql-engine-server-builder:20190507-1 + - image: hasura/graphql-engine-server-builder:20190811 - image: circleci/postgres:10.6-alpine-postgis <<: *test_pg_env @@ -254,7 +254,7 @@ jobs: environment: PG_VERSION: "9_6" docker: - - image: hasura/graphql-engine-server-builder:20190507-1 + - image: hasura/graphql-engine-server-builder:20190811 - image: circleci/postgres:9.6-alpine-postgis <<: *test_pg_env @@ -263,7 +263,7 @@ jobs: environment: PG_VERSION: "9_5" docker: - - image: hasura/graphql-engine-server-builder:20190507-1 + - image: hasura/graphql-engine-server-builder:20190811 - image: circleci/postgres:9.5-alpine-postgis <<: *test_pg_env diff --git a/.circleci/server-builder.dockerfile b/.circleci/server-builder.dockerfile index 55b3b23c771..683f2ababb4 100644 --- a/.circleci/server-builder.dockerfile +++ b/.circleci/server-builder.dockerfile @@ -28,3 +28,5 @@ RUN apt-get -y update \ && rm -rf /usr/share/doc/ \ && rm -rf /usr/share/man/ \ && rm -rf /usr/share/locale/ + +ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 diff --git a/cli/commands/migrate_test.go b/cli/commands/migrate_test.go index 6a4d5009034..c0f1276ff65 100644 --- a/cli/commands/migrate_test.go +++ b/cli/commands/migrate_test.go @@ -34,7 +34,6 @@ var testMetadataPrev = map[string][]byte{ "metadata": []byte(`allowlist: [] functions: [] query_collections: [] -query_templates: [] remote_schemas: [] tables: - array_relationships: [] @@ -49,7 +48,6 @@ tables: "empty-metadata": []byte(`allowlist: [] functions: [] query_collections: [] -query_templates: [] remote_schemas: [] tables: [] `), @@ -65,6 +63,7 @@ tables: delete_permissions: [] event_triggers: [] insert_permissions: [] + is_enum: false object_relationships: [] select_permissions: [] table: test @@ -264,7 +263,7 @@ func mustWriteFile(t testing.TB, dir, file string, body string) { func compareMetadata(t testing.TB, metadataFile string, actualType string, serverVersion *semver.Version) { var actualData []byte - c, err := semver.NewConstraint("<= v1.0.0-beta.3") + c, err := semver.NewConstraint("<= v1.0.0-beta.5") if err != nil { t.Fatal(err) } diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 8f28d2c6232..137318fd74b 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -56,6 +56,7 @@ library , containers , monad-control , monad-time + , monad-validate , fast-logger , wai , postgresql-binary @@ -171,16 +172,17 @@ library , Hasura.RQL.Instances , Hasura.RQL.Types.SchemaCache , Hasura.RQL.Types.SchemaCacheTypes - , Hasura.RQL.Types.Common - , Hasura.RQL.Types.Catalog , Hasura.RQL.Types.BoolExp - , Hasura.RQL.Types.Permission - , Hasura.RQL.Types.Error + , Hasura.RQL.Types.Catalog + , Hasura.RQL.Types.Column + , Hasura.RQL.Types.Common , Hasura.RQL.Types.DML + , Hasura.RQL.Types.Error , Hasura.RQL.Types.EventTrigger - , Hasura.RQL.Types.RemoteSchema , Hasura.RQL.Types.Metadata + , Hasura.RQL.Types.Permission , Hasura.RQL.Types.QueryCollection + , Hasura.RQL.Types.RemoteSchema , Hasura.RQL.DDL.Deps , Hasura.RQL.DDL.Permission.Internal , Hasura.RQL.DDL.Permission.Triggers @@ -188,10 +190,11 @@ library , Hasura.RQL.DDL.Relationship , Hasura.RQL.DDL.Relationship.Rename , Hasura.RQL.DDL.Relationship.Types - , Hasura.RQL.DDL.Schema.Table - , Hasura.RQL.DDL.Schema.Rename - , Hasura.RQL.DDL.Schema.Function + , Hasura.RQL.DDL.Schema.Enum , Hasura.RQL.DDL.Schema.Diff + , Hasura.RQL.DDL.Schema.Function + , Hasura.RQL.DDL.Schema.Rename + , Hasura.RQL.DDL.Schema.Table , Hasura.RQL.DDL.Metadata , Hasura.RQL.DDL.Utils , Hasura.RQL.DDL.EventTrigger @@ -258,6 +261,7 @@ library , Hasura.HTTP + , Control.Lens.Extended , Data.Text.Extended , Data.Aeson.Extended , Data.Sequence.NonEmpty @@ -266,11 +270,12 @@ library , Data.Parser.JSONPath , Hasura.SQL.DML + , Hasura.SQL.Error + , Hasura.SQL.GeoJSON + , Hasura.SQL.Rewrite + , Hasura.SQL.Time , Hasura.SQL.Types , Hasura.SQL.Value - , Hasura.SQL.GeoJSON - , Hasura.SQL.Time - , Hasura.SQL.Rewrite , Network.URI.Extended , Ops , Migrate @@ -278,28 +283,29 @@ library other-modules: Hasura.Server.Auth.JWT.Internal , Hasura.Server.Auth.JWT.Logging - default-extensions: EmptyCase - FlexibleContexts - FlexibleInstances - InstanceSigs - MultiParamTypeClasses - LambdaCase - MultiWayIf - TupleSections + default-extensions: ApplicativeDo + BangPatterns + DeriveDataTypeable DeriveFoldable DeriveFunctor DeriveGeneric DeriveLift DeriveTraversable + EmptyCase + FlexibleContexts + FlexibleInstances GeneralizedNewtypeDeriving - BangPatterns + InstanceSigs + LambdaCase + MultiParamTypeClasses + MultiWayIf + NoImplicitPrelude OverloadedStrings + QuasiQuotes ScopedTypeVariables TemplateHaskell - QuasiQuotes + TupleSections TypeFamilies - NoImplicitPrelude - DeriveDataTypeable if flag(profile) @@ -308,6 +314,8 @@ library cpp-options: -DDeveloperAPIs ghc-options: -O2 + -foptimal-applicative-do + -fdefer-typed-holes -Wall -Wcompat -Wincomplete-record-updates @@ -315,27 +323,29 @@ library -Wredundant-constraints executable graphql-engine - default-extensions: EmptyCase - FlexibleContexts - FlexibleInstances - InstanceSigs - MultiParamTypeClasses - LambdaCase - MultiWayIf - TupleSections + default-extensions: ApplicativeDo + BangPatterns + DeriveDataTypeable DeriveFoldable DeriveFunctor DeriveGeneric DeriveLift DeriveTraversable + EmptyCase + FlexibleContexts + FlexibleInstances GeneralizedNewtypeDeriving - BangPatterns + InstanceSigs + LambdaCase + MultiParamTypeClasses + MultiWayIf + NoImplicitPrelude OverloadedStrings + QuasiQuotes ScopedTypeVariables TemplateHaskell - QuasiQuotes + TupleSections TypeFamilies - NoImplicitPrelude main-is: Main.hs default-language: Haskell2010 @@ -368,6 +378,8 @@ executable graphql-engine ghc-prof-options: -rtsopts -fprof-auto -fno-prof-count-entries ghc-options: -O2 + -foptimal-applicative-do + -fdefer-typed-holes -Wall -Wcompat -Wincomplete-record-updates diff --git a/server/src-exec/Migrate.hs b/server/src-exec/Migrate.hs index 05964267179..722fd7257d6 100644 --- a/server/src-exec/Migrate.hs +++ b/server/src-exec/Migrate.hs @@ -19,7 +19,7 @@ import qualified Data.Yaml.TH as Y import qualified Database.PG.Query as Q curCatalogVer :: T.Text -curCatalogVer = "19" +curCatalogVer = "20" migrateMetadata :: ( MonadTx m @@ -344,6 +344,12 @@ from18To19 = do $(Q.sqlFromFile "src-rsr/migrate_from_18_to_19.sql") return () +from19To20 :: (MonadTx m) => m () +from19To20 = do + Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler + $(Q.sqlFromFile "src-rsr/migrate_from_19_to_20.sql") + pure () + migrateCatalog :: ( MonadTx m , CacheRWM m @@ -353,70 +359,39 @@ migrateCatalog , HasSQLGenCtx m ) => UTCTime -> m String -migrateCatalog migrationTime = do - preVer <- getCatalogVersion - if | preVer == curCatalogVer -> - return $ "already at the latest version. current version: " - <> show curCatalogVer - | preVer == "0.8" -> from08ToCurrent - | preVer == "1" -> from1ToCurrent - | preVer == "2" -> from2ToCurrent - | preVer == "3" -> from3ToCurrent - | preVer == "4" -> from4ToCurrent - | preVer == "5" -> from5ToCurrent - | preVer == "6" -> from6ToCurrent - | preVer == "7" -> from7ToCurrent - | preVer == "8" -> from8ToCurrent - | preVer == "9" -> from9ToCurrent - | preVer == "10" -> from10ToCurrent - | preVer == "11" -> from11ToCurrent - | preVer == "12" -> from12ToCurrent - | preVer == "13" -> from13ToCurrent - | preVer == "14" -> from14ToCurrent - | preVer == "15" -> from15ToCurrent - | preVer == "16" -> from16ToCurrent - | preVer == "17" -> from17ToCurrent - | preVer == "18" -> from18ToCurrent - | otherwise -> throw400 NotSupported $ - "unsupported version : " <> preVer +migrateCatalog migrationTime = migrateFrom =<< getCatalogVersion where - from18ToCurrent = from18To19 >> postMigrate - - from17ToCurrent = from17To18 >> from18ToCurrent - - from16ToCurrent = from16To17 >> from17ToCurrent - - from15ToCurrent = from15To16 >> from16ToCurrent - - from14ToCurrent = from14To15 >> from15ToCurrent - - from13ToCurrent = from13To14 >> from14ToCurrent - - from12ToCurrent = from12To13 >> from13ToCurrent - - from11ToCurrent = from11To12 >> from12ToCurrent - - from10ToCurrent = from10To11 >> from11ToCurrent - - from9ToCurrent = from9To10 >> from10ToCurrent - - from8ToCurrent = from8To9 >> from9ToCurrent - - from7ToCurrent = from7To8 >> from8ToCurrent - - from6ToCurrent = from6To7 >> from7ToCurrent - - from5ToCurrent = from5To6 >> from6ToCurrent - - from4ToCurrent = from4To5 >> from5ToCurrent - - from3ToCurrent = from3To4 >> from4ToCurrent - - from2ToCurrent = from2To3 >> from3ToCurrent - - from1ToCurrent = from1To2 >> from2ToCurrent - - from08ToCurrent = from08To1 >> from1ToCurrent + migrateFrom previousVersion + | previousVersion == curCatalogVer = + return $ "already at the latest version. current version: " <> show curCatalogVer + | [] <- neededMigrations = + throw400 NotSupported $ "unsupported version : " <> previousVersion + | otherwise = + traverse_ snd neededMigrations >> postMigrate + where + neededMigrations = dropWhile ((/= previousVersion) . fst) migrations + migrations = + [ ("0.8", from08To1) + , ("1", from1To2) + , ("2", from2To3) + , ("3", from3To4) + , ("4", from4To5) + , ("5", from5To6) + , ("6", from6To7) + , ("7", from7To8) + , ("8", from8To9) + , ("9", from9To10) + , ("10", from10To11) + , ("11", from11To12) + , ("12", from12To13) + , ("13", from13To14) + , ("14", from14To15) + , ("15", from15To16) + , ("16", from16To17) + , ("17", from17To18) + , ("18", from18To19) + , ("19", from19To20) + ] postMigrate = do -- update the catalog version diff --git a/server/src-lib/Control/Lens/Extended.hs b/server/src-lib/Control/Lens/Extended.hs new file mode 100644 index 00000000000..2a1346f523d --- /dev/null +++ b/server/src-lib/Control/Lens/Extended.hs @@ -0,0 +1,19 @@ +module Control.Lens.Extended + ( module Control.Lens + , (^..) + , (^@..) + ) where + +import Control.Lens hiding ((^..), (^@..)) +import Data.Monoid (Endo) +import GHC.Exts (IsList, Item, fromList) + +infixl 8 ^.. +(^..) :: (IsList l, Item l ~ a) => s -> Getting (Endo [a]) s a -> l +v ^.. l = fromList (toListOf l v) +{-# INLINE (^..) #-} + +infixl 8 ^@.. +(^@..) :: (IsList l, Item l ~ (i, a)) => s -> IndexedGetting i (Endo [(i, a)]) s a -> l +v ^@.. l = fromList (itoListOf l v) +{-# INLINE (^@..) #-} diff --git a/server/src-lib/Hasura/Db.hs b/server/src-lib/Hasura/Db.hs index 446e8a3306f..25796f478ea 100644 --- a/server/src-lib/Hasura/Db.hs +++ b/server/src-lib/Hasura/Db.hs @@ -14,6 +14,8 @@ module Hasura.Db , defaultTxErrorHandler ) where +import Control.Monad.Validate + import qualified Data.Aeson.Extended as J import qualified Database.PG.Query as Q @@ -34,9 +36,10 @@ class (MonadError QErr m) => MonadTx m where instance (MonadTx m) => MonadTx (StateT s m) where liftTx = lift . liftTx - instance (MonadTx m) => MonadTx (ReaderT s m) where liftTx = lift . liftTx +instance (MonadTx m) => MonadTx (ValidateT e m) where + liftTx = lift . liftTx data LazyTx e a = LTErr !e diff --git a/server/src-lib/Hasura/Events/Lib.hs b/server/src-lib/Hasura/Events/Lib.hs index 573cc474361..7610d541574 100644 --- a/server/src-lib/Hasura/Events/Lib.hs +++ b/server/src-lib/Hasura/Events/Lib.hs @@ -432,7 +432,7 @@ tryWebhook headers responseTimeout ep webhook = do getEventTriggerInfoFromEvent :: SchemaCache -> Event -> Maybe EventTriggerInfo getEventTriggerInfoFromEvent sc e = let table = eTable e tableInfo = M.lookup table $ scTables sc - in M.lookup ( tmName $ eTrigger e) =<< (tiEventTriggerInfoMap <$> tableInfo) + in M.lookup ( tmName $ eTrigger e) =<< (_tiEventTriggerInfoMap <$> tableInfo) fetchEvents :: Q.TxE QErr [Event] fetchEvents = diff --git a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs index fa442f27f8b..6f43aea375e 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs @@ -166,18 +166,18 @@ toMultiplexedQueryVar => GR.UnresolvedVal -> m S.SQLExp toMultiplexedQueryVar = \case GR.UVPG annPGVal -> - let GR.AnnPGVal varM isNullable colTy colVal = annPGVal + let GR.AnnPGVal varM isNullable _ colVal = annPGVal in case (varM, isNullable) of -- we don't check for nullability as -- this is only used for reusable plans -- the check has to be made before this (Just var, _) -> do - modify $ Map.insert var (colTy, colVal) - return $ fromResVars (PgTypeSimple colTy) + modify $ Map.insert var colVal + return $ fromResVars (PGTypeSimple $ pstType colVal) [ "variables" , G.unName $ G.unVariable var ] - _ -> return $ toTxtValue colTy colVal + _ -> return $ toTxtValue colVal GR.UVSessVar ty sessVar -> return $ fromResVars ty [ "user", T.toLower sessVar] GR.UVSQL sqlExp -> return sqlExp @@ -198,19 +198,19 @@ subsOpFromPGAST , MonadIO m ) - -- | to validate arguments => PGExecCtx + -- ^ to validate arguments - -- | used as part of an identifier in the underlying live query systems - -- to avoid unnecessary load on Postgres where possible -> GH.GQLReqUnparsed + -- ^ used as part of an identifier in the underlying live query systems + -- to avoid unnecessary load on Postgres where possible - -- | variable definitions as seen in the subscription, needed in - -- checking whether the subscription can be multiplexed or not -> [G.VariableDefinition] + -- ^ variable definitions as seen in the subscription, needed in + -- checking whether the subscription can be multiplexed or not - -- | The alias and the partially processed live query field -> (G.Alias, GR.QueryRootFldUnresolved) + -- ^ The alias and the partially processed live query field -> m (LiveQueryOp, Maybe SubsPlan) subsOpFromPGAST pgExecCtx reqUnparsed varDefs (fldAls, astUnresolved) = do @@ -273,10 +273,10 @@ validateAnnVarValsOnPg pgExecCtx annVarVals = do Q.Discard _ <- runTx' $ liftTx $ Q.rawQE valPgErrHandler (Q.fromBuilder $ toSQL valSel) [] False - return $ fmap (txtEncodedPGVal . snd) annVarVals + return $ fmap (txtEncodedPGVal . pstValue) annVarVals where - mkExtrs = map (flip S.Extractor Nothing . uncurry toTxtValue) + mkExtrs = map (flip S.Extractor Nothing . toTxtValue) mkValidationSel vars = S.mkSelect { S.selExtr = mkExtrs vars } runTx' tx = do diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index 85e0bc03476..66c4390a0b2 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -34,7 +34,7 @@ import Hasura.RQL.Types import Hasura.SQL.Types import Hasura.SQL.Value -type PlanVariables = Map.HashMap G.Variable (Int, PGScalarType) +type PlanVariables = Map.HashMap G.Variable (Int, PGColumnType) type PrepArgMap = IntMap.IntMap Q.PrepArg data PGPlan @@ -63,7 +63,7 @@ instance J.ToJSON RootFieldPlan where RFPRaw encJson -> J.toJSON $ TBS.fromBS encJson RFPPostgres pgPlan -> J.toJSON pgPlan -type VariableTypes = Map.HashMap G.Variable PGScalarType +type VariableTypes = Map.HashMap G.Variable PGColumnType data QueryPlan = QueryPlan @@ -116,9 +116,9 @@ withPlan usrVars (PGPlan q reqVars prepMap) annVars = do where getVar accum (var, (prepNo, _)) = do let varName = G.unName $ G.unVariable var - (_, colVal) <- onNothing (Map.lookup var annVars) $ + colVal <- onNothing (Map.lookup var annVars) $ throw500 $ "missing variable in annVars : " <> varName - let prepVal = binEncoder colVal + let prepVal = toBinaryValue colVal return $ IntMap.insert prepNo prepVal accum -- turn the current plan into a transaction @@ -156,7 +156,7 @@ initPlanningSt = getVarArgNum :: (MonadState PlanningSt m) - => G.Variable -> PGScalarType -> m Int + => G.Variable -> PGColumnType -> m Int getVarArgNum var colTy = do PlanningSt curArgNum vars prepped <- get case Map.lookup var vars of @@ -190,15 +190,15 @@ prepareWithPlan = \case argNum <- case (varM, isNullable) of (Just var, False) -> getVarArgNum var colTy _ -> getNextArgNum - addPrepArg argNum $ binEncoder colVal - return $ toPrepParam argNum colTy + addPrepArg argNum $ toBinaryValue colVal + return $ toPrepParam argNum (pstType colVal) R.UVSessVar ty sessVar -> do let sessVarVal = S.SEOpApp (S.SQLOp "->>") [S.SEPrep 1, S.SELit $ T.toLower sessVar] return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of - PgTypeSimple colTy -> withGeoVal colTy sessVarVal - PgTypeArray _ -> sessVarVal + PGTypeSimple colTy -> withGeoVal colTy sessVarVal + PGTypeArray _ -> sessVarVal R.UVSQL sqlExp -> return sqlExp queryRootName :: Text diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs index 70504fde889..7c7ea0b7d93 100644 --- a/server/src-lib/Hasura/GraphQL/Explain.hs +++ b/server/src-lib/Hasura/GraphQL/Explain.hs @@ -61,8 +61,8 @@ resolveVal userInfo = \case RS.UVSessVar ty sessVar -> do sessVarVal <- S.SELit <$> getSessVarVal userInfo sessVar return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of - PgTypeSimple colTy -> withGeoVal colTy sessVarVal - PgTypeArray _ -> sessVarVal + PGTypeSimple colTy -> withGeoVal colTy sessVarVal + PGTypeArray _ -> sessVarVal RS.UVSQL sqlExp -> return sqlExp getSessVarVal diff --git a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs index 0e358e01279..bff7b79480d 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs @@ -22,7 +22,7 @@ type OpExp = OpExpG UnresolvedVal parseOpExps :: (MonadError QErr m) - => PGScalarType -> AnnInpVal -> m [OpExp] + => PGColumnType -> AnnInpVal -> m [OpExp] parseOpExps colTy annVal = do opExpsM <- flip withObjectM annVal $ \nt objM -> forM objM $ \obj -> forM (OMap.toList obj) $ \(k, v) -> @@ -56,8 +56,8 @@ parseOpExps colTy annVal = do "_contained_in" -> fmap AContainedIn <$> asOpRhs v "_has_key" -> fmap AHasKey <$> asOpRhs v - "_has_keys_any" -> fmap AHasKeysAny <$> asPGArray PGText v - "_has_keys_all" -> fmap AHasKeysAll <$> asPGArray PGText v + "_has_keys_any" -> fmap AHasKeysAny <$> asPGArray (PGColumnScalar PGText) v + "_has_keys_all" -> fmap AHasKeysAll <$> asPGArray (PGColumnScalar PGText) v -- geometry/geography type related operators "_st_contains" -> fmap ASTContains <$> asOpRhs v @@ -77,13 +77,18 @@ parseOpExps colTy annVal = do <> showName k return $ catMaybes $ fromMaybe [] opExpsM where - asOpRhs = fmap (fmap UVPG) . asPGColValM + asOpRhs = fmap (fmap UVPG) . asPGColumnValueM asPGArray rhsTy v = do - valsM <- parseMany asPGColVal v + valsM <- parseMany asPGColumnValue v forM valsM $ \vals -> do - let arrayExp = S.SEArray $ map (txtEncoder . _apvValue) vals - return $ UVSQL $ S.SETyAnn arrayExp $ S.mkTypeAnn $ PgTypeArray rhsTy + let arrayExp = S.SEArray $ map (txtEncoder . pstValue . _apvValue) vals + return $ UVSQL $ S.SETyAnn arrayExp $ S.mkTypeAnn $ + -- Safe here because asPGColumnValue ensured all the values are of the right type, but if the + -- list is empty, we don’t actually have a scalar type to use, so we need to use + -- unsafePGColumnToRepresentation to create it. (It would be nice to refactor things to + -- somehow get rid of this.) + PGTypeArray (unsafePGColumnToRepresentation rhsTy) resolveIsNull v = case _aivValue v of AGScalar _ Nothing -> return Nothing @@ -95,18 +100,18 @@ parseOpExps colTy annVal = do parseAsSTDWithinObj obj = do distanceVal <- onNothing (OMap.lookup "distance" obj) $ throw500 "expected \"distance\" input field in st_d_within" - dist <- UVPG <$> asPGColVal distanceVal + dist <- UVPG <$> asPGColumnValue distanceVal fromVal <- onNothing (OMap.lookup "from" obj) $ throw500 "expected \"from\" input field in st_d_within" - from <- UVPG <$> asPGColVal fromVal + from <- UVPG <$> asPGColumnValue fromVal case colTy of - PGGeography -> do + PGColumnScalar PGGeography -> do useSpheroidVal <- onNothing (OMap.lookup "use_spheroid" obj) $ throw500 "expected \"use_spheroid\" input field in st_d_within" - useSpheroid <- UVPG <$> asPGColVal useSpheroidVal + useSpheroid <- UVPG <$> asPGColumnValue useSpheroidVal return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid - PGGeometry -> + PGColumnScalar PGGeometry -> return $ ASTDWithinGeom $ DWithinGeomOp dist from _ -> throw500 "expected PGGeometry/PGGeography column for st_d_within" @@ -117,7 +122,7 @@ parseCastExpression = withObjectM $ \_ objM -> forM objM $ \obj -> do targetExps <- forM (OMap.toList obj) $ \(targetTypeName, castedComparisonExpressionInput) -> do let targetType = txtToPgColTy $ G.unName targetTypeName - castedComparisonExpressions <- parseOpExps targetType castedComparisonExpressionInput + castedComparisonExpressions <- parseOpExps (PGColumnScalar targetType) castedComparisonExpressionInput return (targetType, castedComparisonExpressions) return $ Map.fromList targetExps diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs index 833bcf7d45d..c38e5e0b0cf 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs @@ -112,10 +112,8 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $ type PrepArgs = Seq.Seq Q.PrepArg -prepare - :: (MonadState PrepArgs m) => AnnPGVal -> m S.SQLExp -prepare (AnnPGVal _ _ colTy colVal) = - prepareColVal colTy colVal +prepare :: (MonadState PrepArgs m) => AnnPGVal -> m S.SQLExp +prepare (AnnPGVal _ _ _ scalarValue) = prepareColVal scalarValue resolveValPrep :: (MonadState PrepArgs m) @@ -136,15 +134,14 @@ withPrepArgs m = runStateT m Seq.empty prepareColVal :: (MonadState PrepArgs m) - => PGScalarType -> PGColValue -> m S.SQLExp -prepareColVal colTy colVal = do + => PGScalarTyped PGColValue -> m S.SQLExp +prepareColVal (PGScalarTyped scalarType colVal) = do preparedArgs <- get put (preparedArgs Seq.|> binEncoder colVal) - return $ toPrepParam (Seq.length preparedArgs + 1) colTy + return $ toPrepParam (Seq.length preparedArgs + 1) scalarType txtConverter :: Applicative f => AnnPGVal -> f S.SQLExp -txtConverter (AnnPGVal _ _ a b) = - pure $ toTxtValue a b +txtConverter (AnnPGVal _ _ _ scalarValue) = pure $ toTxtValue scalarValue withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m [(Text, a)] withSelSet selSet f = diff --git a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs index 8015b102afd..dd3c11d8495 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs @@ -1,8 +1,9 @@ module Hasura.GraphQL.Resolve.InputValue ( withNotNull , tyMismatch - , asPGColValM - , asPGColVal + , asPGColumnTypeAndValueM + , asPGColumnValueM + , asPGColumnValue , asEnumVal , asEnumValM , withObject @@ -21,10 +22,12 @@ import Hasura.Prelude import qualified Language.GraphQL.Draft.Syntax as G +import qualified Hasura.RQL.Types as RQL + import Hasura.GraphQL.Resolve.Context import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types -import Hasura.SQL.Types ((<>>)) +import Hasura.SQL.Types import Hasura.SQL.Value withNotNull @@ -41,41 +44,43 @@ tyMismatch expectedTy v = getAnnInpValKind (_aivValue v) <> " for value of type " <> G.showGT (_aivType v) -asPGColValM +asPGColumnTypeAndValueM :: (MonadError QErr m) - => AnnInpVal -> m (Maybe AnnPGVal) -asPGColValM annInpVal = case val of - AGScalar colTy valM -> - return $ fmap (AnnPGVal varM (G.isNullable ty) colTy) valM - _ -> - tyMismatch "pgvalue" annInpVal - where - AnnInpVal ty varM val = annInpVal - -asPGColVal - :: (MonadError QErr m) - => AnnInpVal -> m AnnPGVal -asPGColVal v = case _aivValue v of - AGScalar colTy (Just val) -> - return $ AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) colTy val - AGScalar colTy Nothing -> throw500 $ "unexpected null for ty " <>> colTy + => AnnInpVal + -> m (PGColumnType, PGScalarTyped (Maybe PGColValue)) +asPGColumnTypeAndValueM v = case _aivValue v of + AGScalar colTy val -> pure (PGColumnScalar colTy, PGScalarTyped colTy val) + AGEnum _ (AGEReference reference maybeValue) -> do + let maybeScalarValue = PGValText . RQL.getEnumValue <$> maybeValue + pure (PGColumnEnumReference reference, PGScalarTyped PGText maybeScalarValue) _ -> tyMismatch "pgvalue" v -asEnumVal - :: (MonadError QErr m) - => AnnInpVal -> m (G.NamedType, G.EnumValue) -asEnumVal v = case _aivValue v of - AGEnum ty (Just val) -> return (ty, val) - AGEnum ty Nothing -> - throw500 $ "unexpected null for ty " <> showNamedTy ty - _ -> tyMismatch "enum" v +asPGColumnTypeAndAnnValueM :: (MonadError QErr m) => AnnInpVal -> m (PGColumnType, Maybe AnnPGVal) +asPGColumnTypeAndAnnValueM v = do + (columnType, scalarValueM) <- asPGColumnTypeAndValueM v + let mkAnnPGColVal = AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) columnType + pure (columnType, mkAnnPGColVal <$> sequence scalarValueM) -asEnumValM - :: (MonadError QErr m) - => AnnInpVal -> m (G.NamedType, Maybe G.EnumValue) +asPGColumnValueM :: (MonadError QErr m) => AnnInpVal -> m (Maybe AnnPGVal) +asPGColumnValueM = fmap snd . asPGColumnTypeAndAnnValueM + +asPGColumnValue :: (MonadError QErr m) => AnnInpVal -> m AnnPGVal +asPGColumnValue v = do + (columnType, annPGValM) <- asPGColumnTypeAndAnnValueM v + onNothing annPGValM $ throw500 ("unexpected null for type " <>> columnType) + +-- | Note: only handles “synthetic” enums (see 'EnumValuesInfo'). Enum table references are handled +-- by 'asPGColumnTypeAndValueM' and its variants. +asEnumVal :: (MonadError QErr m) => AnnInpVal -> m (G.NamedType, G.EnumValue) +asEnumVal = asEnumValM >=> \case + (ty, Just val) -> pure (ty, val) + (ty, Nothing) -> throw500 $ "unexpected null for ty " <> showNamedTy ty + +-- | Like 'asEnumVal', only handles “synthetic” enums. +asEnumValM :: (MonadError QErr m) => AnnInpVal -> m (G.NamedType, Maybe G.EnumValue) asEnumValM v = case _aivValue v of - AGEnum ty valM -> return (ty, valM) - _ -> tyMismatch "enum" v + AGEnum ty (AGESynthetic valM) -> return (ty, valM) + _ -> tyMismatch "enum" v withObject :: (MonadError QErr m) @@ -144,12 +149,12 @@ asPGColText :: (MonadError QErr m) => AnnInpVal -> m Text asPGColText val = do - pgColVal <- _apvValue <$> asPGColVal val + pgColVal <- pstValue . _apvValue <$> asPGColumnValue val onlyText pgColVal asPGColTextM :: (MonadError QErr m) => AnnInpVal -> m (Maybe Text) asPGColTextM val = do - pgColValM <- fmap _apvValue <$> asPGColValM val + pgColValM <- fmap (pstValue . _apvValue) <$> asPGColumnValueM val mapM onlyText pgColValM diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs index aa89ded5c46..1fee4e5f5d6 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs @@ -5,7 +5,6 @@ where import Data.Has import Hasura.EncJSON import Hasura.Prelude -import Hasura.Server.Utils import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J @@ -19,7 +18,6 @@ import qualified Language.GraphQL.Draft.Syntax as G import qualified Database.PG.Query as Q import qualified Hasura.RQL.DML.Insert as RI import qualified Hasura.RQL.DML.Returning as RR -import qualified Hasura.RQL.GBoolExp as RB import qualified Hasura.SQL.DML as S @@ -71,7 +69,7 @@ data RelIns a type ObjRelIns = RelIns SingleObjIns type ArrRelIns = RelIns MultiObjIns -type PGColWithValue = (PGCol, PGColValue) +type PGColWithValue = (PGCol, PGScalarTyped PGColValue) data CTEExp = CTEExp @@ -81,7 +79,7 @@ data CTEExp data AnnInsObj = AnnInsObj - { _aioColumns :: ![(PGCol, PGScalarType, PGColValue)] + { _aioColumns :: ![PGColWithValue] , _aioObjRels :: ![ObjRelIns] , _aioArrRels :: ![ArrRelIns] } deriving (Show, Eq) @@ -104,12 +102,17 @@ traverseInsObj -> m AnnInsObj traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) = case _aivValue annVal of - AGScalar colty mColVal -> do - let col = PGCol $ G.unName gName - colVal = fromMaybe (PGNull colty) mColVal - return (AnnInsObj ((col, colty, colVal):cols) objRels arrRels) + AGScalar{} -> parseValue + AGEnum{} -> parseValue + _ -> parseObject + where + parseValue = do + (_, PGScalarTyped scalarType maybeScalarValue) <- asPGColumnTypeAndValueM annVal + let columnName = PGCol $ G.unName gName + scalarValue = fromMaybe (PGNull scalarType) maybeScalarValue + pure $ AnnInsObj ((columnName, PGScalarTyped scalarType scalarValue):cols) objRels arrRels - _ -> do + parseObject = do objM <- asObjectM annVal -- if relational insert input is 'null' then ignore -- return default value @@ -124,8 +127,7 @@ traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) = let rTable = riRTable relInfo InsCtx rtView rtCols rtDefVals rtRelInfoMap rtUpdPerm <- getInsCtx rTable - rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting) - rtDefVals + rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting) rtDefVals withPathK (G.unName gName) $ case riType relInfo of ObjRel -> do @@ -185,11 +187,11 @@ parseOnConflict tn updFiltrM val = withPathK "on_conflict" $ toSQLExps :: (MonadError QErr m, MonadState PrepArgs m) - => [(PGCol, PGScalarType, PGColValue)] + => [PGColWithValue] -> m [(PGCol, S.SQLExp)] toSQLExps cols = - forM cols $ \(c, ty, v) -> do - prepExp <- prepareColVal ty v + forM cols $ \(c, v) -> do + prepExp <- prepareColVal v return (c, prepExp) mkSQLRow :: Map.HashMap PGCol S.SQLExp -> [(PGCol, S.SQLExp)] -> [S.SQLExp] @@ -200,7 +202,7 @@ mkInsertQ :: MonadError QErr m => QualifiedTable -> Maybe RI.ConflictClauseP1 - -> [(PGCol, PGScalarType, PGColValue)] + -> [PGColWithValue] -> [PGCol] -> Map.HashMap PGCol S.SQLExp -> RoleName @@ -232,13 +234,13 @@ fetchFromColVals => ColVals -> [PGColInfo] -> (PGColInfo -> a) - -> m [(a, PGColValue)] + -> m [(a, PGScalarTyped PGColValue)] fetchFromColVals colVal reqCols f = forM reqCols $ \ci -> do let valM = Map.lookup (pgiName ci) colVal val <- onNothing valM $ throw500 $ "column " <> pgiName ci <<> " not found in given colVal" - pgColVal <- RB.pgValParser (pgiType ci) val + pgColVal <- parsePGScalarValue (pgiType ci) val return (f ci, pgColVal) mkSelCTE @@ -365,7 +367,7 @@ insertObj -> Q.TxE QErr (Int, CTEExp) insertObj strfyNum role tn singleObjIns addCols = do -- validate insert - validateInsert (map _1 cols) (map _riRelInfo objRels) $ map fst addCols + validateInsert (map fst cols) (map _riRelInfo objRels) $ map fst addCols -- insert all object relations and fetch this insert dependent column values objInsRes <- forM objRels $ insertObjRel strfyNum role @@ -373,9 +375,7 @@ insertObj strfyNum role tn singleObjIns addCols = do -- prepare final insert columns let objRelAffRows = sum $ map fst objInsRes objRelDeterminedCols = concatMap snd objInsRes - objRelInsCols = mkPGColWithTypeAndVal allCols objRelDeterminedCols - addInsCols = mkPGColWithTypeAndVal allCols addCols - finalInsCols = cols <> objRelInsCols <> addInsCols + finalInsCols = cols <> objRelDeterminedCols <> addCols -- prepare insert query as with expression (CTEExp cte insPArgs, ccM) <- @@ -435,10 +435,9 @@ insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds errP = -- insert all column rows at one go withoutRelsInsert = withErrPath $ do indexedForM_ insCols $ \insCol -> - validateInsert (map _1 insCol) [] $ map fst addCols + validateInsert (map fst insCol) [] $ map fst addCols - let addColsWithType = mkPGColWithTypeAndVal tableColInfos addCols - withAddCols = flip map insCols $ union addColsWithType + let withAddCols = flip map insCols $ union addCols tableCols = map pgiName tableColInfos (sqlRows, prepArgs) <- flip runStateT Seq.Empty $ do @@ -533,10 +532,3 @@ mergeListsWith [] _ _ _ = [] mergeListsWith (x:xs) l b f = case find (b x) l of Nothing -> mergeListsWith xs l b f Just y -> f x y : mergeListsWith xs l b f - -mkPGColWithTypeAndVal :: [PGColInfo] -> [PGColWithValue] - -> [(PGCol, PGScalarType, PGColValue)] -mkPGColWithTypeAndVal pgColInfos pgColWithVal = - mergeListsWith pgColInfos pgColWithVal - (\ci (c, _) -> pgiName ci == c) - (\ci (c, v) -> (c, pgiType ci, v)) diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs index a095b641b2a..29dfb0fb72f 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs @@ -6,19 +6,20 @@ module Hasura.GraphQL.Resolve.Introspect import Data.Has import Hasura.Prelude -import qualified Data.Aeson as J -import qualified Data.HashMap.Strict as Map -import qualified Data.HashSet as Set -import qualified Data.Text as T -import qualified Language.GraphQL.Draft.Syntax as G +import qualified Data.Aeson as J +import qualified Data.HashMap.Strict as Map +import qualified Data.HashSet as Set +import qualified Data.Text as T +import qualified Language.GraphQL.Draft.Syntax as G import Hasura.GraphQL.Resolve.Context import Hasura.GraphQL.Resolve.InputValue -import Hasura.GraphQL.Validate.InputValue import Hasura.GraphQL.Validate.Context import Hasura.GraphQL.Validate.Field +import Hasura.GraphQL.Validate.InputValue import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types +import Hasura.SQL.Types import Hasura.SQL.Value data TypeKind @@ -163,7 +164,7 @@ enumTypeR (EnumTyInfo descM n vals _) fld = "name" -> retJ $ namedTyToTxt n "description" -> retJ $ fmap G.unDescription descM "enumValues" -> fmap J.toJSON $ mapM (enumValueR subFld) $ - sortOn _eviVal $ Map.elems vals + sortOn _eviVal $ Map.elems (normalizeEnumValues vals) _ -> return J.Null -- 4.5.2.6 @@ -339,7 +340,7 @@ typeR => Field -> m J.Value typeR fld = do name <- withArg args "name" $ \arg -> do - pgColVal <- _apvValue <$> asPGColVal arg + pgColVal <- pstValue . _apvValue <$> asPGColumnValue arg case pgColVal of PGValText t -> return t _ -> throw500 "expecting string for name arg of __type" diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs index a88308ffb45..b34bc3d4445 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs @@ -18,8 +18,8 @@ import qualified Hasura.RQL.DML.Delete as RD import qualified Hasura.RQL.DML.Returning as RR import qualified Hasura.RQL.DML.Update as RU -import qualified Hasura.SQL.DML as S import qualified Hasura.RQL.DML.Select as RS +import qualified Hasura.SQL.DML as S import Hasura.EncJSON import Hasura.GraphQL.Resolve.BoolExp @@ -60,7 +60,7 @@ convertRowObj convertRowObj val = flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do - prepExpM <- fmap UVPG <$> asPGColValM v + prepExpM <- fmap UVPG <$> asPGColumnValueM v let prepExp = fromMaybe (UVSQL $ S.SEUnsafe "NULL") prepExpM return (PGCol $ G.unName k, prepExp) @@ -83,7 +83,7 @@ convObjWithOp => ApplySQLOp -> AnnInpVal -> m [(PGCol, UnresolvedVal)] convObjWithOp opFn val = flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do - colVal <- _apvValue <$> asPGColVal v + colVal <- pstValue . _apvValue <$> asPGColumnValue v let pgCol = PGCol $ G.unName k -- TODO: why are we using txtEncoder here? encVal = txtEncoder colVal @@ -95,8 +95,8 @@ convDeleteAtPathObj => AnnInpVal -> m [(PGCol, UnresolvedVal)] convDeleteAtPathObj val = flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do - vals <- flip withArray v $ \_ annVals -> mapM asPGColVal annVals - let valExps = map (txtEncoder . _apvValue) vals + vals <- flip withArray v $ \_ annVals -> mapM asPGColumnValue annVals + let valExps = map (txtEncoder . pstValue . _apvValue) vals pgCol = PGCol $ G.unName k annEncVal = S.SETyAnn (S.SEArray valExps) S.textArrTypeAnn sqlExp = S.SEOpApp S.jsonbDeleteAtPathOp diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs index b5f03777b49..b5d568999c9 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs @@ -115,7 +115,7 @@ parseTableArgs args = do ordByExpML <- withArgM args "order_by" parseOrderBy let ordByExpM = NE.nonEmpty =<< ordByExpML limitExpM <- withArgM args "limit" parseLimit - offsetExpM <- withArgM args "offset" $ asPGColVal >=> txtConverter + offsetExpM <- withArgM args "offset" $ asPGColumnValue >=> txtConverter distOnColsML <- withArgM args "distinct_on" parseColumns let distOnColsM = NE.nonEmpty =<< distOnColsML mapM_ (validateDistOn ordByExpM) distOnColsM @@ -255,7 +255,7 @@ parseOrderByEnum = \case parseLimit :: ( MonadError QErr m ) => AnnInpVal -> m Int parseLimit v = do - pgColVal <- _apvValue <$> asPGColVal v + pgColVal <- pstValue . _apvValue <$> asPGColumnValue v limit <- maybe noIntErr return $ pgColValueToInt pgColVal -- validate int value onlyPositiveInt limit @@ -273,7 +273,7 @@ pgColValToBoolExp pgColValToBoolExp colArgMap colValMap = do colExps <- forM colVals $ \(name, val) -> BoolFld <$> do - opExp <- AEQ True . UVPG <$> asPGColVal val + opExp <- AEQ True . UVPG <$> asPGColumnValue val colInfo <- onNothing (Map.lookup name colArgMap) $ throw500 $ "column name " <> showName name <> " not found in column arguments map" @@ -341,7 +341,7 @@ convertCount args = do maybe (return S.CTStar) (mkCType isDistinct) columnsM where parseDistinct v = do - val <- _apvValue <$> asPGColVal v + val <- pstValue . _apvValue <$> asPGColumnValue v case val of PGValBoolean b -> return b _ -> @@ -417,7 +417,7 @@ parseFunctionArgs parseFunctionArgs argSeq val = fmap catMaybes $ flip withObject val $ \_ obj -> fmap toList $ forM argSeq $ \(FuncArgItem argName) -> - forM (OMap.lookup argName obj) $ fmap (maybe nullSQL UVPG) . asPGColValM + forM (OMap.lookup argName obj) $ fmap (maybe nullSQL UVPG) . asPGColumnValueM where nullSQL = UVSQL $ S.SEUnsafe "NULL" diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs index a79d4698ecf..992e266a54b 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs @@ -8,6 +8,7 @@ import qualified Data.Text as T import qualified Language.GraphQL.Draft.Syntax as G import Hasura.RQL.Types.BoolExp +import Hasura.RQL.Types.Column import Hasura.RQL.Types.Common import Hasura.RQL.Types.Permission import Hasura.SQL.Types @@ -141,8 +142,12 @@ data AnnPGVal = AnnPGVal { _apvVariable :: !(Maybe G.Variable) , _apvIsNullable :: !Bool - , _apvType :: !PGScalarType - , _apvValue :: !PGColValue + , _apvType :: !PGColumnType + -- ^ Note: '_apvValue' is a @'PGScalarTyped' 'PGColValue'@, so it includes its type as a + -- 'PGScalarType'. However, we /also/ need to keep the original 'PGColumnType' information around + -- in case we need to re-parse a new value with its type because we’re reusing a cached query + -- plan. + , _apvValue :: !(PGScalarTyped PGColValue) } deriving (Show, Eq) type PrepFn m = AnnPGVal -> m S.SQLExp @@ -156,7 +161,7 @@ partialSQLExpToUnresolvedVal = \case -- A value that will be converted to an sql expression eventually data UnresolvedVal -- From a session variable - = UVSessVar !PgType !SessVar + = UVSessVar !(PGType PGScalarType) !SessVar -- This is postgres | UVPG !AnnPGVal -- This is a full resolved sql expression diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index c5c3054a51b..747eec20f72 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -17,6 +17,7 @@ module Hasura.GraphQL.Schema , checkSchemaConflicts ) where +import Control.Lens.Extended hiding (op) import qualified Data.HashMap.Strict as Map import qualified Data.HashSet as Set @@ -44,16 +45,16 @@ import Hasura.GraphQL.Schema.OrderBy import Hasura.GraphQL.Schema.Select import Hasura.GraphQL.Schema.Merge -getInsPerm :: TableInfo -> RoleName -> Maybe InsPermInfo +getInsPerm :: TableInfo PGColInfo -> RoleName -> Maybe InsPermInfo getInsPerm tabInfo role | role == adminRole = _permIns $ mkAdminRolePermInfo tabInfo | otherwise = Map.lookup role rolePermInfoMap >>= _permIns where - rolePermInfoMap = tiRolePermInfoMap tabInfo + rolePermInfoMap = _tiRolePermInfoMap tabInfo getTabInfo :: MonadError QErr m - => TableCache -> QualifiedTable -> m TableInfo + => TableCache PGColInfo -> QualifiedTable -> m (TableInfo PGColInfo) getTabInfo tc t = onNothing (Map.lookup t tc) $ throw500 $ "table not found: " <>> t @@ -67,32 +68,32 @@ isValidCol = isValidName . G.Name . getPGColTxt isValidRel :: ToTxt a => RelName -> QualifiedObject a -> Bool isValidRel rn rt = isValidName (mkRelName rn) && isValidObjectName rt -isValidField :: FieldInfo -> Bool +isValidField :: FieldInfo PGColInfo -> Bool isValidField = \case FIColumn (PGColInfo col _ _) -> isValidCol col FIRelationship (RelInfo rn _ _ remTab _) -> isValidRel rn remTab upsertable :: [ConstraintName] -> Bool -> Bool -> Bool -upsertable uniqueOrPrimaryCons isUpsertAllowed view = - not (null uniqueOrPrimaryCons) && isUpsertAllowed && not view +upsertable uniqueOrPrimaryCons isUpsertAllowed isAView = + not (null uniqueOrPrimaryCons) && isUpsertAllowed && not isAView -toValidFieldInfos :: FieldInfoMap -> [FieldInfo] +toValidFieldInfos :: FieldInfoMap PGColInfo -> [FieldInfo PGColInfo] toValidFieldInfos = filter isValidField . Map.elems -validPartitionFieldInfoMap :: FieldInfoMap -> ([PGColInfo], [RelInfo]) +validPartitionFieldInfoMap :: FieldInfoMap PGColInfo -> ([PGColInfo], [RelInfo]) validPartitionFieldInfoMap = partitionFieldInfos . toValidFieldInfos -getValidCols :: FieldInfoMap -> [PGColInfo] +getValidCols :: FieldInfoMap PGColInfo -> [PGColInfo] getValidCols = fst . validPartitionFieldInfoMap -getValidRels :: FieldInfoMap -> [RelInfo] +getValidRels :: FieldInfoMap PGColInfo -> [RelInfo] getValidRels = snd . validPartitionFieldInfoMap mkValidConstraints :: [ConstraintName] -> [ConstraintName] mkValidConstraints = filter (isValidName . G.Name . getConstraintTxt) -isRelNullable :: FieldInfoMap -> RelInfo -> Bool +isRelNullable :: FieldInfoMap PGColInfo -> RelInfo -> Bool isRelNullable fim ri = isNullable where lCols = map fst $ riMapping ri @@ -113,24 +114,26 @@ isAggFld = flip elem (numAggOps <> compAggOps) mkGCtxRole' :: QualifiedTable - -- insert permission -> Maybe ([PGColInfo], RelationInfoMap) - -- select permission + -- ^ insert permission -> Maybe (Bool, [SelField]) - -- update cols + -- ^ select permission -> Maybe [PGColInfo] - -- delete cols + -- ^ update cols -> Maybe () - -- primary key columns + -- ^ delete cols -> [PGColInfo] - -- constraints + -- ^ primary key columns -> [ConstraintName] + -- ^ constraints -> Maybe ViewInfo - -- all functions -> [FunctionInfo] + -- ^ all functions + -> Maybe EnumValues + -- ^ present iff this table is an enum table (see "Hasura.RQL.Schema.Enum") -> TyAgg mkGCtxRole' tn insPermM selPermM updColsM - delPermM pkeyCols constraints viM funcs = + delPermM pkeyCols constraints viM funcs enumValuesM = TyAgg (mkTyInfoMap allTypes) fieldMap scalars ordByCtx where @@ -163,6 +166,7 @@ mkGCtxRole' tn insPermM selPermM updColsM , TIInpObj <$> mutHelper viIsUpdatable updIncInpObjM , TIObj <$> mutRespObjM , TIEnum <$> selColInpTyM + , TIEnum <$> tableEnumTypeM ] mutHelper :: (ViewInfo -> Bool) -> Maybe a -> Maybe a @@ -172,7 +176,7 @@ mkGCtxRole' tn insPermM selPermM updColsM [ insInpObjFldsM, updSetInpObjFldsM , boolExpInpObjFldsM , selObjFldsM ] - scalars = Set.unions [selByPkScalarSet, funcArgScalarSet] + scalars = selByPkScalarSet <> funcArgScalarSet -- helper mkColFldMap ty cols = Map.fromList $ flip map cols $ @@ -210,8 +214,7 @@ mkGCtxRole' tn insPermM selPermM updColsM -- funcargs input type funcArgInpObjs = mapMaybe mkFuncArgsInp funcs -- funcArgCtx = Map.unions funcArgCtxs - funcArgScalarSet = Set.fromList $ - concatMap (map faType . toList . fiInputArgs) funcs + funcArgScalarSet = funcs ^.. folded.to fiInputArgs.folded.to faType -- helper mkFldMap ty = Map.fromList . concatMap (mkFld ty) @@ -260,32 +263,36 @@ mkGCtxRole' tn insPermM selPermM updColsM getCompCols = onlyComparableCols . lefts onlyFloat = const $ mkScalarTy PGFloat - mkTypeMaker "sum" = mkScalarTy + mkTypeMaker "sum" = mkColumnType mkTypeMaker _ = onlyFloat mkColAggFldsObjs flds = let numCols = getNumCols flds compCols = getCompCols flds mkNumObjFld n = mkTableColAggFldsObj tn n (mkTypeMaker n) numCols - mkCompObjFld n = mkTableColAggFldsObj tn n mkScalarTy compCols + mkCompObjFld n = mkTableColAggFldsObj tn n mkColumnType compCols numFldsObjs = bool (map mkNumObjFld numAggOps) [] $ null numCols compFldsObjs = bool (map mkCompObjFld compAggOps) [] $ null compCols in numFldsObjs <> compFldsObjs -- the fields used in table object selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM -- the scalar set for table_by_pk arguments - selByPkScalarSet = Set.fromList $ map pgiType pkeyCols + selByPkScalarSet = pkeyCols ^.. folded.to pgiType._PGColumnScalar ordByInpCtxM = mkOrdByInpObj tn <$> selFldsM (ordByInpObjM, ordByCtxM) = case ordByInpCtxM of Just (a, b) -> (Just a, Just b) Nothing -> (Nothing, Nothing) + tableEnumTypeM = enumValuesM <&> \enumValues -> + mkHsraEnumTyInfo Nothing (mkTableEnumType tn) $ + EnumValuesReference (EnumReference tn enumValues) + getRootFldsRole' :: QualifiedTable -> [PGCol] -> [ConstraintName] - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> [FunctionInfo] -> Maybe ([T.Text], Bool) -- insert perm -> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter @@ -372,15 +379,15 @@ getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM = procFuncArgs (fiInputArgs fi) $ \_ t -> FuncArgItem $ G.Name t -getSelPermission :: TableInfo -> RoleName -> Maybe SelPermInfo +getSelPermission :: TableInfo PGColInfo -> RoleName -> Maybe SelPermInfo getSelPermission tabInfo role = - Map.lookup role (tiRolePermInfoMap tabInfo) >>= _permSel + Map.lookup role (_tiRolePermInfoMap tabInfo) >>= _permSel getSelPerm :: (MonadError QErr m) - => TableCache + => TableCache PGColInfo -- all the fields of a table - -> FieldInfoMap + -> FieldInfoMap PGColInfo -- role and its permission -> RoleName -> SelPermInfo -> m (Bool, [SelField]) @@ -406,8 +413,8 @@ getSelPerm tableCache fields role selPermInfo = do mkInsCtx :: MonadError QErr m => RoleName - -> TableCache - -> FieldInfoMap + -> TableCache PGColInfo + -> FieldInfoMap PGColInfo -> InsPermInfo -> Maybe UpdPermInfo -> m InsCtx @@ -417,7 +424,7 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do relName = riName relInfo remoteTableInfo <- getTabInfo tableCache remoteTable let insPermM = getInsPerm remoteTableInfo role - viewInfoM = tiViewInfo remoteTableInfo + viewInfoM = _tiViewInfo remoteTableInfo return $ bool Nothing (Just (relName, relInfo)) $ isInsertable insPermM viewInfoM && isValidRel relName remoteTable @@ -438,15 +445,15 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do mkAdminInsCtx :: MonadError QErr m => QualifiedTable - -> TableCache - -> FieldInfoMap + -> TableCache PGColInfo + -> FieldInfoMap PGColInfo -> m InsCtx mkAdminInsCtx tn tc fields = do relTupsM <- forM rels $ \relInfo -> do let remoteTable = riRTable relInfo relName = riName relInfo remoteTableInfo <- getTabInfo tc remoteTable - let viewInfoM = tiViewInfo remoteTableInfo + let viewInfoM = _tiViewInfo remoteTableInfo return $ bool Nothing (Just (relName, relInfo)) $ isMutable viIsInsertable viewInfoM && isValidRel relName remoteTable @@ -461,17 +468,18 @@ mkAdminInsCtx tn tc fields = do mkGCtxRole :: (MonadError QErr m) - => TableCache + => TableCache PGColInfo -> QualifiedTable - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> [PGCol] -> [ConstraintName] -> [FunctionInfo] -> Maybe ViewInfo + -> Maybe EnumValues -> RoleName -> RolePermInfo -> m (TyAgg, RootFields, InsCtxMap) -mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do +mkGCtxRole tableCache tn fields pCols constraints funcs viM enumValuesM role permInfo = do selPermM <- mapM (getSelPerm tableCache fields role) $ _permSel permInfo tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do ctx <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo @@ -482,7 +490,7 @@ mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do insCtxM = fst <$> tabInsInfoM updColsM = filterColInfos . upiCols <$> _permUpd permInfo tyAgg = mkGCtxRole' tn insPermM selPermM updColsM - (void $ _permDel permInfo) pColInfos constraints viM funcs + (void $ _permDel permInfo) pColInfos constraints viM funcs enumValuesM rootFlds = getRootFldsRole tn pCols constraints fields funcs viM permInfo insCtxMap = maybe Map.empty (Map.singleton tn) insCtxM return (tyAgg, rootFlds, insCtxMap) @@ -497,7 +505,7 @@ getRootFldsRole :: QualifiedTable -> [PGCol] -> [ConstraintName] - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> [FunctionInfo] -> Maybe ViewInfo -> RolePermInfo @@ -521,21 +529,22 @@ getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM up mkGCtxMapTable :: (MonadError QErr m) - => TableCache + => TableCache PGColInfo -> FunctionCache - -> TableInfo + -> TableInfo PGColInfo -> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap)) mkGCtxMapTable tableCache funcCache tabInfo = do m <- Map.traverseWithKey - (mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo) rolePerms + (mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo enumValues) + rolePerms adminInsCtx <- mkAdminInsCtx tn tableCache fields let adminCtx = mkGCtxRole' tn (Just (colInfos, icRelations adminInsCtx)) (Just (True, selFlds)) (Just colInfos) (Just ()) - pkeyColInfos validConstraints viewInfo tabFuncs + pkeyColInfos validConstraints viewInfo tabFuncs enumValues adminInsCtxMap = Map.singleton tn adminInsCtx return $ Map.insert adminRole (adminCtx, adminRootFlds, adminInsCtxMap) m where - TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _ = tabInfo + TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _ enumValues = tabInfo validConstraints = mkValidConstraints constraints colInfos = getValidCols fields validColNames = map pgiName colInfos @@ -556,7 +565,7 @@ noFilter = annBoolExpTrue mkGCtxMap :: (MonadError QErr m) - => TableCache -> FunctionCache -> m GCtxMap + => TableCache PGColInfo -> FunctionCache -> m GCtxMap mkGCtxMap tableCache functionCache = do typesMapL <- mapM (mkGCtxMapTable tableCache functionCache) $ filter tableFltr $ Map.elems tableCache @@ -564,8 +573,8 @@ mkGCtxMap tableCache functionCache = do return $ flip Map.map typesMap $ \(ty, flds, insCtxMap) -> mkGCtx ty flds insCtxMap where - tableFltr ti = not (tiSystemDefined ti) - && isValidObjectName (tiName ti) + tableFltr ti = not (_tiSystemDefined ti) + && isValidObjectName (_tiName ti) -- | build GraphQL schema from postgres tables and functions buildGCtxMapPG @@ -671,13 +680,13 @@ mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap = mFlds = rootFieldInfos mutationFields rootFieldInfos = map snd . Map.elems - anyGeoTypes = any isGeoType colTys + anyGeoTypes = any (isScalarColumnWhere isGeoType) colTys allComparableTypes = if anyGeoTypes -- due to casting, we need to generate both geometry and geography -- operations even if just one of the two appears in the schema - then Set.union (Set.fromList [PGGeometry, PGGeography]) colTys + then Set.union (Set.fromList [PGColumnScalar PGGeometry, PGColumnScalar PGGeography]) colTys else colTys - allScalarTypes = allComparableTypes <> scalars + allScalarTypes = (allComparableTypes ^.. folded._PGColumnScalar) <> scalars wiredInGeoInputTypes = guard anyGeoTypes *> map TIInpObj geoInputTypes diff --git a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs index 6ccbbb2fa77..3137a2a0f76 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs @@ -6,7 +6,6 @@ module Hasura.GraphQL.Schema.BoolExp , mkBoolExpInp ) where -import qualified Data.Text as T import qualified Data.HashMap.Strict as Map import qualified Language.GraphQL.Draft.Syntax as G @@ -16,17 +15,14 @@ import Hasura.Prelude import Hasura.RQL.Types import Hasura.SQL.Types -addTypeSuffix :: T.Text -> G.NamedType -> G.NamedType -addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix - typeToDescription :: G.NamedType -> G.Description typeToDescription = G.Description . G.unName . G.unNamedType -mkCompExpTy :: PGScalarType -> G.NamedType -mkCompExpTy = addTypeSuffix "_comparison_exp" . mkScalarTy +mkCompExpTy :: PGColumnType -> G.NamedType +mkCompExpTy = addTypeSuffix "_comparison_exp" . mkColumnType -mkCastExpTy :: PGScalarType -> G.NamedType -mkCastExpTy = addTypeSuffix "_cast_exp" . mkScalarTy +mkCastExpTy :: PGColumnType -> G.NamedType +mkCastExpTy = addTypeSuffix "_cast_exp" . mkColumnType -- TODO(shahidhk) this should ideally be st_d_within_geometry {- @@ -51,48 +47,46 @@ stDWithinGeographyInpTy = G.NamedType "st_d_within_geography_input" -- | Makes an input type declaration for the @_cast@ field of a comparison expression. -- (Currently only used for casting between geometry and geography types.) -mkCastExpressionInputType :: PGScalarType -> [PGScalarType] -> InpObjTyInfo +mkCastExpressionInputType :: PGColumnType -> [PGColumnType] -> InpObjTyInfo mkCastExpressionInputType sourceType targetTypes = mkHsraInpTyInfo (Just description) (mkCastExpTy sourceType) (fromInpValL targetFields) where description = mconcat [ "Expression to compare the result of casting a column of type " - , typeToDescription $ mkScalarTy sourceType + , typeToDescription $ mkColumnType sourceType , ". Multiple cast targets are combined with logical 'AND'." ] targetFields = map targetField targetTypes targetField targetType = InpValInfo Nothing - (G.unNamedType $ mkScalarTy targetType) + (G.unNamedType $ mkColumnType targetType) Nothing (G.toGT $ mkCompExpTy targetType) --- | make compare expression input type -mkCompExpInp :: PGScalarType -> InpObjTyInfo +mkCompExpInp :: PGColumnType -> InpObjTyInfo mkCompExpInp colTy = InpObjTyInfo (Just tyDesc) (mkCompExpTy colTy) (fromInpValL $ concat - [ map (mk colScalarTy) typedOps - , map (mk $ G.toLT $ G.toNT colScalarTy) listOps - , bool [] (map (mk $ mkScalarTy PGText) stringOps) isStringTy - , bool [] (map jsonbOpToInpVal jsonbOps) isJsonbTy - , bool [] (stDWithinGeoOpInpVal stDWithinGeometryInpTy : - map geoOpToInpVal (geoOps ++ geomOps)) isGeometryType - , bool [] (stDWithinGeoOpInpVal stDWithinGeographyInpTy : - map geoOpToInpVal geoOps) isGeographyType + [ map (mk colGqlType) typedOps + , map (mk $ G.toLT $ G.toNT colGqlType) listOps + , guard (isScalarWhere isStringType) *> map (mk $ mkScalarTy PGText) stringOps + , guard (isScalarWhere (== PGJSONB)) *> map jsonbOpToInpVal jsonbOps + , guard (isScalarWhere (== PGGeometry)) *> + (stDWithinGeoOpInpVal stDWithinGeometryInpTy : map geoOpToInpVal (geoOps ++ geomOps)) + , guard (isScalarWhere (== PGGeography)) *> + (stDWithinGeoOpInpVal stDWithinGeographyInpTy : map geoOpToInpVal geoOps) , [InpValInfo Nothing "_is_null" Nothing $ G.TypeNamed (G.Nullability True) $ G.NamedType "Boolean"] - , maybeToList castOpInputValue + , castOpInputValues ]) TLHasuraType where + colGqlType = mkColumnType colTy + colTyDesc = typeToDescription colGqlType tyDesc = "expression to compare columns of type " <> colTyDesc <> ". All fields are combined with logical 'AND'." - isStringTy = case colTy of - PGVarchar -> True - PGText -> True - _ -> False + isScalarWhere = flip isScalarColumnWhere colTy mk t n = InpValInfo Nothing n Nothing $ G.toGT t - colScalarTy = mkScalarTy colTy - -- colScalarListTy = GA.GTList colGTy + typedOps = ["_eq", "_neq", "_gt", "_lt", "_gte", "_lte"] listOps = @@ -105,10 +99,7 @@ mkCompExpInp colTy = , "_similar", "_nsimilar" ] - isJsonbTy = case colTy of - PGJSONB -> True - _ -> False - jsonbOpToInpVal (op, ty, desc) = InpValInfo (Just desc) op Nothing ty + jsonbOpToInpVal (opName, ty, desc) = InpValInfo (Just desc) opName Nothing ty jsonbOps = [ ( "_contains" , G.toGT $ mkScalarTy PGJSONB @@ -132,9 +123,9 @@ mkCompExpInp colTy = ) ] - castOpInputValue = + castOpInputValues = -- currently, only geometry/geography types support casting - guard (isGeoType colTy) $> + guard (isScalarWhere isGeoType) $> InpValInfo Nothing "_cast" Nothing (G.toGT $ mkCastExpTy colTy) stDWithinGeoOpInpVal ty = @@ -142,19 +133,8 @@ mkCompExpInp colTy = stDWithinGeoDesc = "is the column within a distance from a " <> colTyDesc <> " value" - -- Geometry related ops - isGeometryType = case colTy of - PGGeometry -> True - _ -> False - - -- Geography related ops - isGeographyType = case colTy of - PGGeography -> True - _ -> False - - geoOpToInpVal (op, desc) = - InpValInfo (Just desc) op Nothing $ G.toGT $ mkScalarTy colTy - colTyDesc = typeToDescription $ mkScalarTy colTy + geoOpToInpVal (opName, desc) = + InpValInfo (Just desc) opName Nothing $ G.toGT colGqlType -- operators applicable only to geometry types geomOps :: [(G.Name, G.Description)] @@ -192,12 +172,10 @@ geoInputTypes :: [InpObjTyInfo] geoInputTypes = [ stDWithinGeometryInputType , stDWithinGeographyInputType - , castGeometryInputType - , castGeographyInputType + , mkCastExpressionInputType (PGColumnScalar PGGeometry) [PGColumnScalar PGGeography] + , mkCastExpressionInputType (PGColumnScalar PGGeography) [PGColumnScalar PGGeometry] ] - where - stDWithinGeometryInputType = mkHsraInpTyInfo Nothing stDWithinGeometryInpTy $ fromInpValL [ InpValInfo Nothing "from" Nothing $ G.toGT $ G.toNT $ mkScalarTy PGGeometry @@ -211,9 +189,6 @@ geoInputTypes = Nothing "use_spheroid" (Just $ G.VCBoolean True) $ G.toGT $ mkScalarTy PGBoolean ] - castGeometryInputType = mkCastExpressionInputType PGGeometry [PGGeography] - castGeographyInputType = mkCastExpressionInputType PGGeography [PGGeometry] - mkBoolExpName :: QualifiedTable -> G.Name mkBoolExpName tn = qualObjectToName tn <> "_bool_exp" diff --git a/server/src-lib/Hasura/GraphQL/Schema/Common.hs b/server/src-lib/Hasura/GraphQL/Schema/Common.hs index 809a4ad0bf6..0d9c8e479f5 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Common.hs @@ -1,14 +1,17 @@ module Hasura.GraphQL.Schema.Common ( qualObjectToName + , addTypeSuffix , fromInpValL , mkColName + , mkColumnType , mkRelName , mkAggRelName , SelField , mkTableTy + , mkTableEnumType , mkTableAggTy , mkColumnEnumVal @@ -22,12 +25,14 @@ import Hasura.Prelude import Hasura.RQL.Types import Hasura.SQL.Types -type SelField = - Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool) +type SelField = Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool) qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name qualObjectToName = G.Name . snakeCaseQualObject +addTypeSuffix :: Text -> G.NamedType -> G.NamedType +addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix + fromInpValL :: [InpValInfo] -> Map.HashMap G.Name InpValInfo fromInpValL = mapFromL _iviName @@ -40,13 +45,19 @@ mkAggRelName rn = G.Name $ relNameToTxt rn <> "_aggregate" mkColName :: PGCol -> G.Name mkColName (PGCol n) = G.Name n +mkColumnType :: PGColumnType -> G.NamedType +mkColumnType = \case + PGColumnScalar scalarType -> mkScalarTy scalarType + PGColumnEnumReference (EnumReference enumTable _) -> mkTableEnumType enumTable + mkTableTy :: QualifiedTable -> G.NamedType -mkTableTy = - G.NamedType . qualObjectToName +mkTableTy = G.NamedType . qualObjectToName + +mkTableEnumType :: QualifiedTable -> G.NamedType +mkTableEnumType = addTypeSuffix "_enum" . mkTableTy mkTableAggTy :: QualifiedTable -> G.NamedType -mkTableAggTy tn = - G.NamedType $ qualObjectToName tn <> "_aggregate" +mkTableAggTy = addTypeSuffix "_aggregate" . mkTableTy -- used for 'distinct_on' in select and upsert's 'update columns' mkColumnEnumVal :: PGCol -> EnumValInfo diff --git a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs index c1c6441f8c7..cf33345af9d 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs @@ -17,7 +17,7 @@ import Hasura.SQL.Types mkPGColInp :: PGColInfo -> InpValInfo mkPGColInp (PGColInfo colName colTy _) = InpValInfo Nothing (G.Name $ getPGColTxt colName) Nothing $ - G.toGT $ mkScalarTy colTy + G.toGT $ mkColumnType colTy -- table_mutation_response mkMutRespTy :: QualifiedTable -> G.NamedType diff --git a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs index 5e1f54f7df4..9f4171aa444 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs @@ -177,11 +177,11 @@ mkInsMutFld tn isUpsertable = onConflictArg = InpValInfo (Just onConflictDesc) "on_conflict" Nothing $ G.toGT $ mkOnConflictInpTy tn -mkConstriantTy :: QualifiedTable -> [ConstraintName] -> EnumTyInfo -mkConstriantTy tn cons = enumTyInfo +mkConstraintTy :: QualifiedTable -> [ConstraintName] -> EnumTyInfo +mkConstraintTy tn cons = enumTyInfo where enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkConstraintInpTy tn) $ - mapFromL _eviVal $ map mkConstraintEnumVal cons + EnumValuesSynthetic . mapFromL _eviVal $ map mkConstraintEnumVal cons desc = G.Description $ "unique or primary key constraints on table " <>> tn @@ -194,15 +194,15 @@ mkUpdColumnTy :: QualifiedTable -> [PGCol] -> EnumTyInfo mkUpdColumnTy tn cols = enumTyInfo where enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkUpdColumnInpTy tn) $ - mapFromL _eviVal $ map mkColumnEnumVal cols + EnumValuesSynthetic . mapFromL _eviVal $ map mkColumnEnumVal cols desc = G.Description $ "update columns of table " <>> tn mkConflictActionTy :: Bool -> EnumTyInfo mkConflictActionTy updAllowed = - mkHsraEnumTyInfo (Just desc) conflictActionTy $ mapFromL _eviVal $ - [enumValIgnore] <> bool [] [enumValUpdate] updAllowed + mkHsraEnumTyInfo (Just desc) conflictActionTy $ + EnumValuesSynthetic . mapFromL _eviVal $ [enumValIgnore] <> bool [] [enumValUpdate] updAllowed where desc = G.Description "conflict action" enumValIgnore = EnumValInfo (Just "ignore the insert on this row") @@ -216,7 +216,7 @@ mkOnConflictTypes tn uniqueOrPrimaryCons cols = bool [] tyInfos where tyInfos = [ TIEnum $ mkConflictActionTy isUpdAllowed - , TIEnum $ mkConstriantTy tn uniqueOrPrimaryCons + , TIEnum $ mkConstraintTy tn uniqueOrPrimaryCons , TIEnum $ mkUpdColumnTy tn cols , TIInpObj $ mkOnConflictInp tn ] diff --git a/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs b/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs index 14c331da700..6aae3eafbd5 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs @@ -21,8 +21,8 @@ ordByTy = G.NamedType "order_by" ordByEnumTy :: EnumTyInfo ordByEnumTy = - mkHsraEnumTyInfo (Just desc) ordByTy $ mapFromL _eviVal $ - map mkEnumVal enumVals + mkHsraEnumTyInfo (Just desc) ordByTy $ + EnumValuesSynthetic . mapFromL _eviVal $ map mkEnumVal enumVals where desc = G.Description "column ordering options" mkEnumVal (n, d) = diff --git a/server/src-lib/Hasura/GraphQL/Schema/Select.hs b/server/src-lib/Hasura/GraphQL/Schema/Select.hs index 80b0c098c5a..706b34011d0 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Select.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Select.hs @@ -28,7 +28,7 @@ mkSelColumnTy :: QualifiedTable -> [PGCol] -> EnumTyInfo mkSelColumnTy tn cols = enumTyInfo where enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkSelColumnInpTy tn) $ - mapFromL _eviVal $ map mkColumnEnumVal cols + EnumValuesSynthetic . mapFromL _eviVal $ map mkColumnEnumVal cols desc = G.Description $ "select columns of table " <>> tn @@ -39,8 +39,7 @@ mkSelColumnInpTy tn = G.NamedType $ qualObjectToName tn <> "_select_column" mkTableAggFldsTy :: QualifiedTable -> G.NamedType -mkTableAggFldsTy tn = - G.NamedType $ qualObjectToName tn <> "_aggregate_fields" +mkTableAggFldsTy = addTypeSuffix "_aggregate_fields" . mkTableTy mkTableColAggFldsTy :: G.Name -> QualifiedTable -> G.NamedType mkTableColAggFldsTy op tn = @@ -50,17 +49,13 @@ mkTableByPkName :: QualifiedTable -> G.Name mkTableByPkName tn = qualObjectToName tn <> "_by_pk" -- Support argument params for PG columns -mkPGColParams :: PGScalarType -> ParamMap -mkPGColParams = \case - PGJSONB -> jsonParams - PGJSON -> jsonParams - _ -> Map.empty - where - pathDesc = "JSON select path" - jsonParams = Map.fromList - [ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $ - G.toGT $ mkScalarTy PGText) - ] +mkPGColParams :: PGColumnType -> ParamMap +mkPGColParams colType + | isScalarColumnWhere isJSONType colType = + let pathDesc = "JSON select path" + in Map.fromList + [ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $ G.toGT $ mkScalarTy PGText) ] + | otherwise = Map.empty mkPGColFld :: PGColInfo -> ObjFldInfo mkPGColFld (PGColInfo colName colTy isNullable) = @@ -68,9 +63,9 @@ mkPGColFld (PGColInfo colName colTy isNullable) = where n = G.Name $ getPGColTxt colName ty = bool notNullTy nullTy isNullable - scalarTy = mkScalarTy colTy - notNullTy = G.toGT $ G.toNT scalarTy - nullTy = G.toGT scalarTy + columnType = mkColumnType colTy + notNullTy = G.toGT $ G.toNT columnType + nullTy = G.toGT columnType -- where: table_bool_exp -- limit: Int @@ -222,7 +217,7 @@ type table__fields{ mkTableColAggFldsObj :: QualifiedTable -> G.Name - -> (PGScalarType -> G.NamedType) + -> (PGColumnType -> G.NamedType) -> [PGColInfo] -> ObjTyInfo mkTableColAggFldsObj tn op f cols = @@ -274,7 +269,7 @@ mkSelFldPKey tn cols = args = fromInpValL $ map colInpVal cols ty = G.toGT $ mkTableTy tn colInpVal (PGColInfo n typ _) = - InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkScalarTy typ + InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkColumnType typ {- diff --git a/server/src-lib/Hasura/GraphQL/Validate.hs b/server/src-lib/Hasura/GraphQL/Validate.hs index 3ba96158d16..03979662c97 100644 --- a/server/src-lib/Hasura/GraphQL/Validate.hs +++ b/server/src-lib/Hasura/GraphQL/Validate.hs @@ -33,9 +33,8 @@ import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types import Hasura.RQL.Types.QueryCollection -import Hasura.SQL.Types (PGScalarType) -import Hasura.SQL.Value (PGColValue, - parsePGValue) +import Hasura.SQL.Types (PGScalarTyped) +import Hasura.SQL.Value (PGColValue) data QueryParts = QueryParts @@ -118,8 +117,8 @@ getAnnVarVals varDefsL inpVals = withPathK "variableValues" $ do showVars :: (Functor f, Foldable f) => f G.Variable -> Text showVars = showNames . fmap G.unVariable -type VarPGTypes = Map.HashMap G.Variable PGScalarType -type AnnPGVarVals = Map.HashMap G.Variable (PGScalarType, PGColValue) +type VarPGTypes = Map.HashMap G.Variable PGColumnType +type AnnPGVarVals = Map.HashMap G.Variable (PGScalarTyped PGColValue) -- this is in similar spirit to getAnnVarVals, however -- here it is much simpler and can get rid of typemap requirement @@ -142,7 +141,7 @@ getAnnPGVarVals varTypes varValsM = -- TODO: we don't have the graphql type -- " of type: " <> T.pack (show varType) <> " in variableValues" - (varType,) <$> runAesonParser (parsePGValue varType) varVal + parsePGScalarValue varType varVal where varVals = fromMaybe Map.empty varValsM diff --git a/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs b/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs index a08858c28c0..1582ae13dd9 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs @@ -19,6 +19,8 @@ import qualified Data.Text as T import qualified Data.Vector as V import qualified Language.GraphQL.Draft.Syntax as G +import qualified Hasura.RQL.Types as RQL + import Hasura.GraphQL.Utils import Hasura.GraphQL.Validate.Context import Hasura.GraphQL.Validate.Types @@ -249,21 +251,27 @@ validateNamedTypeVal inpValParser (nullability, nt) val = do fmap (AGObject nt) . mapM (validateObject inpValParser ioti) TIEnum eti -> withParsed gType (getEnum inpValParser) val $ - fmap (AGEnum nt) . mapM (validateEnum eti) + fmap (AGEnum nt) . validateEnum eti TIScalar (ScalarTyInfo _ pgColTy _) -> withParsed gType (getScalar inpValParser) val $ fmap (AGScalar pgColTy) . mapM (validateScalar pgColTy) where throwUnexpTypeErr ty = throw500 $ "unexpected " <> ty <> " type info for: " <> showNamedTy nt - validateEnum enumTyInfo enumVal = - if Map.member enumVal (_etiValues enumTyInfo) - then return enumVal - else throwVE $ "unexpected value " <> - showName (G.unEnumValue enumVal) <> - " for enum: " <> showNamedTy nt - validateScalar pgColTy = - runAesonParser (parsePGValue pgColTy) + + validateEnum enumTyInfo maybeEnumValue = case (_etiValues enumTyInfo, maybeEnumValue) of + (EnumValuesSynthetic _, Nothing) -> pure $ AGESynthetic Nothing + (EnumValuesReference reference, Nothing) -> pure $ AGEReference reference Nothing + (EnumValuesSynthetic values, Just enumValue) + | Map.member enumValue values -> pure $ AGESynthetic (Just enumValue) + (EnumValuesReference reference@(EnumReference _ values), Just enumValue) + | rqlEnumValue <- RQL.EnumValue . G.unName $ G.unEnumValue enumValue + , Map.member rqlEnumValue values + -> pure $ AGEReference reference (Just rqlEnumValue) + (_, Just enumValue) -> throwVE $ + "unexpected value " <> showName (G.unEnumValue enumValue) <> " for enum: " <> showNamedTy nt + + validateScalar pgColTy = runAesonParser (parsePGValue pgColTy) gType = G.TypeNamed nullability nt validateList diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs index 840dc8e0c5a..9f30caed059 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs @@ -21,6 +21,8 @@ module Hasura.GraphQL.Validate.Types , EnumTyInfo(..) , mkHsraEnumTyInfo + , EnumValuesInfo(..) + , normalizeEnumValues , EnumValInfo(..) , InpObjFldMap , InpObjTyInfo(..) @@ -52,6 +54,7 @@ module Hasura.GraphQL.Validate.Types , TypeLoc (..) , typeEq , AnnGValue(..) + , AnnGEnumValue(..) , AnnGObject , hasNullVal , getAnnInpValKind @@ -60,7 +63,6 @@ module Hasura.GraphQL.Validate.Types ) where import Hasura.Prelude -import Instances.TH.Lift () import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J @@ -73,14 +75,15 @@ import qualified Language.GraphQL.Draft.Syntax as G import qualified Language.GraphQL.Draft.TH as G import qualified Language.Haskell.TH.Syntax as TH +import qualified Hasura.RQL.Types.Column as RQL + import Hasura.GraphQL.Utils import Hasura.RQL.Instances () import Hasura.RQL.Types.RemoteSchema import Hasura.SQL.Types import Hasura.SQL.Value --- | Typeclass for equating relevant properties of various GraphQL types --- | defined below +-- | Typeclass for equating relevant properties of various GraphQL types defined below class EquatableGType a where type EqProps a getEqProps :: a -> EqProps a @@ -99,21 +102,39 @@ fromEnumValDef :: G.EnumValueDefinition -> EnumValInfo fromEnumValDef (G.EnumValueDefinition descM val _) = EnumValInfo descM val False +data EnumValuesInfo + = EnumValuesSynthetic !(Map.HashMap G.EnumValue EnumValInfo) + -- ^ Values for an enum that exists only in the GraphQL schema and does not have any external + -- source of truth. + | EnumValuesReference !RQL.EnumReference + -- ^ Values for an enum that is backed by an enum table reference (see "Hasura.RQL.Schema.Enum"). + deriving (Show, Eq, TH.Lift) + +normalizeEnumValues :: EnumValuesInfo -> Map.HashMap G.EnumValue EnumValInfo +normalizeEnumValues = \case + EnumValuesSynthetic values -> values + EnumValuesReference (RQL.EnumReference _ values) -> + mapFromL _eviVal . flip map (Map.toList values) $ + \(RQL.EnumValue name, RQL.EnumValueInfo maybeDescription) -> EnumValInfo + { _eviVal = G.EnumValue $ G.Name name + , _eviDesc = G.Description <$> maybeDescription + , _eviIsDeprecated = False } + data EnumTyInfo = EnumTyInfo { _etiDesc :: !(Maybe G.Description) , _etiName :: !G.NamedType - , _etiValues :: !(Map.HashMap G.EnumValue EnumValInfo) + , _etiValues :: !EnumValuesInfo , _etiLoc :: !TypeLoc } deriving (Show, Eq, TH.Lift) instance EquatableGType EnumTyInfo where type EqProps EnumTyInfo = (G.NamedType, Map.HashMap G.EnumValue EnumValInfo) - getEqProps ety = (,) (_etiName ety) (_etiValues ety) + getEqProps ety = (,) (_etiName ety) (normalizeEnumValues $ _etiValues ety) fromEnumTyDef :: G.EnumTypeDefinition -> TypeLoc -> EnumTyInfo fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc = - EnumTyInfo descM (G.NamedType n) enumVals loc + EnumTyInfo descM (G.NamedType n) (EnumValuesSynthetic enumVals) loc where enumVals = Map.fromList [(G._evdName valDef, fromEnumValDef valDef) | valDef <- valDefs] @@ -121,7 +142,7 @@ fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc = mkHsraEnumTyInfo :: Maybe G.Description -> G.NamedType - -> Map.HashMap G.EnumValue EnumValInfo + -> EnumValuesInfo -> EnumTyInfo mkHsraEnumTyInfo descM ty enumVals = EnumTyInfo descM ty enumVals TLHasuraType @@ -659,9 +680,15 @@ data AnnInpVal type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal +-- | See 'EnumValuesInfo' for information about what these cases mean. +data AnnGEnumValue + = AGESynthetic !(Maybe G.EnumValue) + | AGEReference !RQL.EnumReference !(Maybe RQL.EnumValue) + deriving (Show, Eq) + data AnnGValue = AGScalar !PGScalarType !(Maybe PGColValue) - | AGEnum !G.NamedType !(Maybe G.EnumValue) + | AGEnum !G.NamedType !AnnGEnumValue | AGObject !G.NamedType !(Maybe AnnGObject) | AGArray !G.ListType !(Maybe [AnnInpVal]) deriving (Show, Eq) @@ -678,11 +705,12 @@ instance J.ToJSON AnnGValue where hasNullVal :: AnnGValue -> Bool hasNullVal = \case - AGScalar _ Nothing -> True - AGEnum _ Nothing -> True - AGObject _ Nothing -> True - AGArray _ Nothing -> True - _ -> False + AGScalar _ Nothing -> True + AGEnum _ (AGESynthetic Nothing) -> True + AGEnum _ (AGEReference _ Nothing) -> True + AGObject _ Nothing -> True + AGArray _ Nothing -> True + _ -> False getAnnInpValKind :: AnnGValue -> Text getAnnInpValKind = \case diff --git a/server/src-lib/Hasura/Prelude.hs b/server/src-lib/Hasura/Prelude.hs index f724ab3a758..a9b60af9c6a 100644 --- a/server/src-lib/Hasura/Prelude.hs +++ b/server/src-lib/Hasura/Prelude.hs @@ -3,11 +3,12 @@ module Hasura.Prelude , onNothing , onJust , onLeft + , choice , bsToTxt , txtToBs ) where -import Control.Applicative as M ((<|>)) +import Control.Applicative as M (Alternative (..)) import Control.Monad as M (void, when) import Control.Monad.Base as M import Control.Monad.Except as M @@ -19,7 +20,9 @@ import Data.Bool as M (bool) import Data.Data as M (Data (..)) import Data.Either as M (lefts, partitionEithers, rights) -import Data.Foldable as M (foldrM, toList) +import Data.Foldable as M (foldrM, for_, toList, + traverse_) +import Data.Function as M (on, (&)) import Data.Functor as M (($>), (<&>)) import Data.Hashable as M (Hashable) import Data.List as M (find, foldl', group, @@ -33,6 +36,7 @@ import Data.Ord as M (comparing) import Data.Semigroup as M (Semigroup (..)) import Data.String as M (IsString) import Data.Text as M (Text) +import Data.Traversable as M (for) import Data.Word as M (Word64) import GHC.Generics as M (Generic) import Prelude as M hiding (fail, init, lookup) @@ -51,6 +55,9 @@ onJust m action = maybe (return ()) action m onLeft :: (Monad m) => Either e a -> (e -> m a) -> m a onLeft e f = either f return e +choice :: (Alternative f) => [f a] -> f a +choice = foldr (<|>) empty + bsToTxt :: B.ByteString -> Text bsToTxt = TE.decodeUtf8With TE.lenientDecode diff --git a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs index 2a62a888116..1435cec2432 100644 --- a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs @@ -228,7 +228,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re -- can only replace for same table when replace $ do ti' <- askTabInfoFromTrigger name - when (tiName ti' /= tiName ti) $ throw400 NotSupported "cannot replace table or schema for trigger" + when (_tiName ti' /= _tiName ti) $ throw400 NotSupported "cannot replace table or schema for trigger" assertCols ti insert assertCols ti update @@ -242,7 +242,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re let cols = sosColumns sos case cols of SubCStar -> return () - SubCArray pgcols -> forM_ pgcols (assertPGCol (tiFieldInfoMap ti) "") + SubCArray pgcols -> forM_ pgcols (assertPGCol (_tiFieldInfoMap ti) "") --(QErrM m, CacheRWM m, MonadTx m, MonadIO m) @@ -285,7 +285,7 @@ subTableP2 :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasSQLGenCtx m) => QualifiedTable -> Bool -> EventTriggerConf -> m () subTableP2 qt replace etc = do - allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt + allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt strfyNum <- stringifyNum <$> askSQLGenCtx if replace then do @@ -309,7 +309,7 @@ unsubTableP1 unsubTableP1 (DeleteEventTriggerQuery name) = do adminOnly ti <- askTabInfoFromTrigger name - return $ tiName ti + return $ _tiName ti unsubTableP2 :: (QErrM m, CacheRWM m, MonadTx m) @@ -363,7 +363,7 @@ runInvokeEventTrigger (InvokeEventTriggerQuery name payload) = do trigInfo <- askEventTriggerInfo name assertManual $ etiOpsDef trigInfo ti <- askTabInfoFromTrigger name - eid <-liftTx $ insertManualEvent (tiName ti) name payload + eid <-liftTx $ insertManualEvent (_tiName ti) name payload return $ encJFromJValue $ object ["event_id" .= eid] where assertManual (TriggerOpsDef _ _ _ man) = case man of diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index 8369e1f5520..70904e034d1 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -55,6 +55,7 @@ import qualified Hasura.RQL.Types.RemoteSchema as TRS data TableMeta = TableMeta { _tmTable :: !QualifiedTable + , _tmIsEnum :: !Bool , _tmObjectRelationships :: ![DR.ObjRelDef] , _tmArrayRelationships :: ![DR.ArrRelDef] , _tmInsertPermissions :: ![DP.InsPermDef] @@ -64,9 +65,9 @@ data TableMeta , _tmEventTriggers :: ![DTS.EventTriggerConf] } deriving (Show, Eq, Lift) -mkTableMeta :: QualifiedTable -> TableMeta -mkTableMeta qt = - TableMeta qt [] [] [] [] [] [] [] +mkTableMeta :: QualifiedTable -> Bool -> TableMeta +mkTableMeta qt isEnum = + TableMeta qt isEnum [] [] [] [] [] [] [] makeLenses ''TableMeta @@ -78,6 +79,7 @@ instance FromJSON TableMeta where TableMeta <$> o .: tableKey + <*> o .:? isEnumKey .!= False <*> o .:? orKey .!= [] <*> o .:? arKey .!= [] <*> o .:? ipKey .!= [] @@ -88,6 +90,7 @@ instance FromJSON TableMeta where where tableKey = "table" + isEnumKey = "is_enum" orKey = "object_relationships" arKey = "array_relationships" ipKey = "insert_permissions" @@ -100,8 +103,8 @@ instance FromJSON TableMeta where HS.fromList (M.keys o) `HS.difference` expectedKeySet expectedKeySet = - HS.fromList [ tableKey, orKey, arKey, ipKey - , spKey, upKey, dpKey, etKey + HS.fromList [ tableKey, isEnumKey, orKey, arKey + , ipKey, spKey, upKey, dpKey, etKey ] parseJSON _ = @@ -225,8 +228,11 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) = withPathK "tables" $ do -- tables and views - indexedForM_ (map _tmTable tables) $ \tableName -> - void $ DT.trackExistingTableOrViewP2 tableName False + indexedForM_ tables $ \tableMeta -> do + let trackQuery = DT.TrackTable + { DT.tName = tableMeta ^. tmTable + , DT.tIsEnum = tableMeta ^. tmIsEnum } + void $ DT.trackExistingTableOrViewP2 trackQuery -- Relationships indexedForM_ tables $ \table -> do @@ -288,7 +294,7 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) = processPerms tabInfo perms = indexedForM_ perms $ \permDef -> do permInfo <- DP.addPermP1 tabInfo permDef - DP.addPermP2 (tiName tabInfo) permDef permInfo + DP.addPermP2 (_tiName tabInfo) permDef permInfo runReplaceMetadata :: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m @@ -311,8 +317,9 @@ $(deriveToJSON defaultOptions ''ExportMetadata) fetchMetadata :: Q.TxE QErr ReplaceMetadata fetchMetadata = do tables <- Q.catchE defaultTxErrorHandler fetchTables - let qts = map (uncurry QualifiedObject) tables - tableMetaMap = M.fromList $ zip qts $ map mkTableMeta qts + let tableMetaMap = M.fromList . flip map tables $ \(schema, name, isEnum) -> + let qualifiedName = QualifiedObject schema name + in (qualifiedName, mkTableMeta qualifiedName isEnum) -- Fetch all the relationships relationships <- Q.catchE defaultTxErrorHandler fetchRelationships @@ -384,7 +391,7 @@ fetchMetadata = do fetchTables = Q.listQ [Q.sql| - SELECT table_schema, table_name from hdb_catalog.hdb_table + SELECT table_schema, table_name, is_enum from hdb_catalog.hdb_table WHERE is_system_defined = 'false' |] () False diff --git a/server/src-lib/Hasura/RQL/DDL/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Permission.hs index c5ac70dd5ca..c3703bdd528 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission.hs @@ -108,20 +108,20 @@ dropView vn = procSetObj :: (QErrM m) - => TableInfo -> Maybe ColVals + => TableInfo PGColInfo -> Maybe ColVals -> m (PreSetColsPartial, [Text], [SchemaDependency]) procSetObj ti mObj = do (setColTups, deps) <- withPathK "set" $ fmap unzip $ forM (HM.toList setObj) $ \(pgCol, val) -> do ty <- askPGType fieldInfoMap pgCol $ "column " <> pgCol <<> " not found in table " <>> tn - sqlExp <- valueParser (PgTypeSimple ty) val + sqlExp <- valueParser (PGTypeSimple ty) val let dep = mkColDep (getDepReason sqlExp) tn pgCol return ((pgCol, sqlExp), dep) return (HM.fromList setColTups, depHeaders, deps) where - fieldInfoMap = tiFieldInfoMap ti - tn = tiName ti + fieldInfoMap = _tiFieldInfoMap ti + tn = _tiName ti setObj = fromMaybe mempty mObj depHeaders = getDepHeadersFromVal $ Object $ HM.fromList $ map (first getPGColTxt) $ HM.toList setObj @@ -130,7 +130,7 @@ procSetObj ti mObj = do buildInsPermInfo :: (QErrM m, CacheRM m) - => TableInfo + => TableInfo PGColInfo -> PermDef InsPerm -> m (WithDeps InsPermInfo) buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) = @@ -148,8 +148,8 @@ buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) = insColsWithoutPresets = insCols \\ HM.keys setColsSQL return (InsPermInfo (HS.fromList insColsWithoutPresets) vn be setColsSQL reqHdrs, deps) where - fieldInfoMap = tiFieldInfoMap tabInfo - tn = tiName tabInfo + fieldInfoMap = _tiFieldInfoMap tabInfo + tn = _tiName tabInfo vn = buildViewName tn rn PTInsert allCols = map pgiName $ getCols fieldInfoMap insCols = fromMaybe allCols $ convColSpec fieldInfoMap <$> mCols @@ -213,7 +213,7 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm) buildSelPermInfo :: (QErrM m, CacheRM m) - => TableInfo + => TableInfo PGColInfo -> SelPerm -> m (WithDeps SelPermInfo) buildSelPermInfo tabInfo sp = do @@ -235,8 +235,8 @@ buildSelPermInfo tabInfo sp = do return (SelPermInfo (HS.fromList pgCols) tn be mLimit allowAgg depHeaders, deps) where - tn = tiName tabInfo - fieldInfoMap = tiFieldInfoMap tabInfo + tn = _tiName tabInfo + fieldInfoMap = _tiFieldInfoMap tabInfo allowAgg = or $ spAllowAggregations sp autoInferredErr = "permissions for relationships are automatically inferred" @@ -283,7 +283,7 @@ type CreateUpdPerm = CreatePerm UpdPerm buildUpdPermInfo :: (QErrM m, CacheRM m) - => TableInfo + => TableInfo PGColInfo -> UpdPerm -> m (WithDeps UpdPermInfo) buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do @@ -305,8 +305,8 @@ buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do return (UpdPermInfo (HS.fromList updColsWithoutPreSets) tn be setColsSQL reqHeaders, deps) where - tn = tiName tabInfo - fieldInfoMap = tiFieldInfoMap tabInfo + tn = _tiName tabInfo + fieldInfoMap = _tiFieldInfoMap tabInfo updCols = convColSpec fieldInfoMap colSpec relInUpdErr = "relationships can't be used in update" @@ -347,7 +347,7 @@ type CreateDelPerm = CreatePerm DelPerm buildDelPermInfo :: (QErrM m, CacheRM m) - => TableInfo + => TableInfo PGColInfo -> DelPerm -> m (WithDeps DelPermInfo) buildDelPermInfo tabInfo (DelPerm fltr) = do @@ -357,8 +357,8 @@ buildDelPermInfo tabInfo (DelPerm fltr) = do depHeaders = getDependentHeaders fltr return (DelPermInfo tn be depHeaders, deps) where - tn = tiName tabInfo - fieldInfoMap = tiFieldInfoMap tabInfo + tn = _tiName tabInfo + fieldInfoMap = _tiFieldInfoMap tabInfo type DropDelPerm = DropPerm DelPerm diff --git a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs index 98042702b00..dcab14a3ff2 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs @@ -23,6 +23,7 @@ import Hasura.RQL.GBoolExp import Hasura.RQL.Types import Hasura.Server.Utils import Hasura.SQL.Types +import Hasura.SQL.Value import qualified Database.PG.Query as Q @@ -39,7 +40,7 @@ instance ToJSON PermColSpec where toJSON (PCCols cols) = toJSON cols toJSON PCStar = "*" -convColSpec :: FieldInfoMap -> PermColSpec -> [PGCol] +convColSpec :: FieldInfoMap PGColInfo -> PermColSpec -> [PGCol] convColSpec _ (PCCols cols) = cols convColSpec cim PCStar = map pgiName $ getCols cim @@ -47,18 +48,18 @@ assertPermNotDefined :: (MonadError QErr m) => RoleName -> PermAccessor a - -> TableInfo + -> TableInfo PGColInfo -> m () assertPermNotDefined roleName pa tableInfo = when (permissionIsDefined rpi pa || roleName == adminRole) $ throw400 AlreadyExists $ mconcat [ "'" <> T.pack (show $ permAccToType pa) <> "'" - , " permission on " <>> tiName tableInfo + , " permission on " <>> _tiName tableInfo , " for role " <>> roleName , " already exists" ] where - rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo + rpi = M.lookup roleName $ _tiRolePermInfoMap tableInfo permissionIsDefined :: Maybe RolePermInfo -> PermAccessor a -> Bool @@ -69,21 +70,21 @@ assertPermDefined :: (MonadError QErr m) => RoleName -> PermAccessor a - -> TableInfo + -> TableInfo PGColInfo -> m () assertPermDefined roleName pa tableInfo = unless (permissionIsDefined rpi pa) $ throw400 PermissionDenied $ mconcat [ "'" <> T.pack (show $ permAccToType pa) <> "'" - , " permission on " <>> tiName tableInfo + , " permission on " <>> _tiName tableInfo , " for role " <>> roleName , " does not exist" ] where - rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo + rpi = M.lookup roleName $ _tiRolePermInfoMap tableInfo askPermInfo :: (MonadError QErr m) - => TableInfo + => TableInfo PGColInfo -> RoleName -> PermAccessor c -> m c @@ -91,14 +92,14 @@ askPermInfo tabInfo roleName pa = case M.lookup roleName rpim >>= (^. paL) of Just c -> return c Nothing -> throw400 PermissionDenied $ mconcat - [ pt <> " permisison on " <>> tiName tabInfo + [ pt <> " permisison on " <>> _tiName tabInfo , " for role " <>> roleName , " does not exist" ] where paL = permAccToLens pa pt = permTypeToCode $ permAccToType pa - rpim = tiRolePermInfoMap tabInfo + rpim = _tiRolePermInfoMap tabInfo savePermToCatalog :: (ToJSON a) @@ -174,7 +175,7 @@ data CreatePermP1Res a procBoolExp :: (QErrM m, CacheRM m) - => QualifiedTable -> FieldInfoMap -> BoolExp + => QualifiedTable -> FieldInfoMap PGColInfo -> BoolExp -> m (AnnBoolExpPartialSQL, [SchemaDependency]) procBoolExp tn fieldInfoMap be = do abe <- annBoolExp valueParser fieldInfoMap be @@ -204,22 +205,21 @@ getDependentHeaders (BoolExp boolExp) = valueParser :: (MonadError QErr m) - => PgType -> Value -> m PartialSQLExp + => PGType PGColumnType -> Value -> m PartialSQLExp valueParser pgType = \case -- When it is a special variable String t - | isUserVar t -> return $ PSESessVar pgType t - | isReqUserId t -> return $ PSESessVar pgType userIdHeader - | otherwise -> return $ PSESQLExp $ - S.SETyAnn (S.SELit t) $ S.mkTypeAnn pgType - + | isUserVar t -> return $ mkScalarSessionVar pgType t + | isReqUserId t -> return $ mkScalarSessionVar pgType userIdHeader -- Typical value as Aeson's value val -> case pgType of - PgTypeSimple columnType -> PSESQLExp <$> txtRHSBuilder columnType val - PgTypeArray ofType -> do + PGTypeSimple columnType -> PSESQLExp . toTxtValue <$> parsePGScalarValue columnType val + PGTypeArray ofType -> do vals <- runAesonParser parseJSON val - arrayExp <- S.SEArray <$> indexedForM vals (txtRHSBuilder ofType) - return $ PSESQLExp $ S.SETyAnn arrayExp $ S.mkTypeAnn pgType + PGScalarTyped scalarType scalarValues <- parsePGScalarValues ofType vals + return . PSESQLExp $ S.SETyAnn + (S.SEArray $ map (toTxtValue . PGScalarTyped scalarType) scalarValues) + (S.mkTypeAnn $ PGTypeArray scalarType) injectDefaults :: QualifiedTable -> QualifiedTable -> Q.Query injectDefaults qv qt = @@ -258,7 +258,7 @@ class (ToJSON a) => IsPerm a where buildPermInfo :: (QErrM m, CacheRM m) - => TableInfo + => TableInfo PGColInfo -> PermDef a -> m (WithDeps (PermInfo a)) @@ -282,7 +282,7 @@ class (ToJSON a) => IsPerm a where getPermAcc2 _ = permAccessor validateViewPerm - :: (IsPerm a, QErrM m) => PermDef a -> TableInfo -> m () + :: (IsPerm a, QErrM m) => PermDef a -> TableInfo PGColInfo -> m () validateViewPerm permDef tableInfo = case permAcc of PASelect -> return () @@ -290,13 +290,13 @@ validateViewPerm permDef tableInfo = PAUpdate -> mutableView tn viIsUpdatable viewInfo "updatable" PADelete -> mutableView tn viIsDeletable viewInfo "deletable" where - tn = tiName tableInfo - viewInfo = tiViewInfo tableInfo + tn = _tiName tableInfo + viewInfo = _tiViewInfo tableInfo permAcc = getPermAcc1 permDef addPermP1 :: (QErrM m, CacheRM m, IsPerm a) - => TableInfo -> PermDef a -> m (WithDeps (PermInfo a)) + => TableInfo PGColInfo -> PermDef a -> m (WithDeps (PermInfo a)) addPermP1 tabInfo pd = do assertPermNotDefined (pdRole pd) (getPermAcc1 pd) tabInfo buildPermInfo tabInfo pd diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship.hs b/server/src-lib/Hasura/RQL/DDL/Relationship.hs index 4995a4425d3..0ae16f68a79 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship.hs @@ -35,14 +35,14 @@ import Instances.TH.Lift () validateManualConfig :: (QErrM m, CacheRM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> RelManualConfig -> m () validateManualConfig fim rm = do let colMapping = M.toList $ rmColumns rm remoteQt = rmTable rm remoteTabInfo <- askTabInfo remoteQt - let remoteFim = tiFieldInfoMap remoteTabInfo + let remoteFim = _tiFieldInfoMap remoteTabInfo forM_ colMapping $ \(lCol, rCol) -> do assertPGCol fim "" lCol assertPGCol remoteFim "" rCol @@ -70,14 +70,14 @@ persistRel (QualifiedObject sn tn) rn relType relDef comment = checkForFldConfilct :: (MonadError QErr m) - => TableInfo + => TableInfo PGColInfo -> FieldName -> m () checkForFldConfilct tabInfo f = - case HM.lookup f (tiFieldInfoMap tabInfo) of + case HM.lookup f (_tiFieldInfoMap tabInfo) of Just _ -> throw400 AlreadyExists $ mconcat [ "column/relationship " <>> f - , " of table " <>> tiName tabInfo + , " of table " <>> _tiName tabInfo , " already exists" ] Nothing -> return () @@ -90,7 +90,7 @@ validateObjRel validateObjRel qt (RelDef rn ru _) = do tabInfo <- askTabInfo qt checkForFldConfilct tabInfo (fromRel rn) - let fim = tiFieldInfoMap tabInfo + let fim = _tiFieldInfoMap tabInfo case ru of RUFKeyOn cn -> assertPGCol fim "" cn RUManual (ObjRelManualConfig rm) -> validateManualConfig fim rm @@ -168,11 +168,11 @@ validateArrRel validateArrRel qt (RelDef rn ru _) = do tabInfo <- askTabInfo qt checkForFldConfilct tabInfo (fromRel rn) - let fim = tiFieldInfoMap tabInfo + let fim = _tiFieldInfoMap tabInfo case ru of RUFKeyOn (ArrRelUsingFKeyOn remoteQt rcn) -> do remoteTabInfo <- askTabInfo remoteQt - let rfim = tiFieldInfoMap remoteTabInfo + let rfim = _tiFieldInfoMap remoteTabInfo -- Check if 'using' column exists assertPGCol rfim "" rcn RUManual (ArrRelManualConfig rm) -> @@ -229,7 +229,7 @@ dropRelP1 :: (UserInfoM m, QErrM m, CacheRM m) => DropRel -> m [SchemaObjId] dropRelP1 (DropRel qt rn cascade) = do adminOnly tabInfo <- askTabInfo qt - _ <- askRelType (tiFieldInfoMap tabInfo) rn "" + _ <- askRelType (_tiFieldInfoMap tabInfo) rn "" sc <- askSchemaCache let depObjs = getDependentObjs sc relObjId when (depObjs /= [] && not (or cascade)) $ reportDeps depObjs @@ -279,7 +279,7 @@ validateRelP1 validateRelP1 qt rn = do adminOnly tabInfo <- askTabInfo qt - askRelType (tiFieldInfoMap tabInfo) rn "" + askRelType (_tiFieldInfoMap tabInfo) rn "" setRelCommentP2 :: (QErrM m, MonadTx m) diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs b/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs index c2f7594e94d..839ccbbc9ba 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs @@ -27,7 +27,7 @@ renameRelP2 qt newRN relInfo = do oldSC <- askSchemaCache tabInfo <- askTabInfo qt -- check for conflicts in fieldInfoMap - case HM.lookup (fromRel newRN) $ tiFieldInfoMap tabInfo of + case HM.lookup (fromRel newRN) $ _tiFieldInfoMap tabInfo of Nothing -> return () Just _ -> throw400 AlreadyExists $ "cannot rename relationship " <> oldRN diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs index aae14ae5139..d39486e60d2 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs @@ -42,6 +42,7 @@ data PGColMeta , pcmOrdinalPosition :: !Int , pcmDataType :: !PGScalarType , pcmIsNullable :: !Bool + , pcmReferences :: ![QualifiedTable] } deriving (Show, Eq) $(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''PGColMeta) @@ -87,8 +88,8 @@ data TableDiff = TableDiff { _tdNewName :: !(Maybe QualifiedTable) , _tdDroppedCols :: ![PGCol] - , _tdAddedCols :: ![PGColInfo] - , _tdAlteredCols :: ![(PGColInfo, PGColInfo)] + , _tdAddedCols :: ![PGRawColInfo] + , _tdAlteredCols :: ![(PGRawColInfo, PGRawColInfo)] , _tdDroppedFKeyCons :: ![ConstraintName] -- The final list of uniq/primary constraint names -- used for generating types on_conflict clauses @@ -116,8 +117,8 @@ getTableDiff oldtm newtm = existingCols = getOverlap pcmOrdinalPosition oldCols newCols - pcmToPci (PGColMeta colName _ colType isNullable) - = PGColInfo colName colType isNullable + pcmToPci (PGColMeta colName _ colType isNullable references) + = PGRawColInfo colName colType isNullable references alteredCols = flip map (filter (uncurry (/=)) existingCols) $ pcmToPci *** pcmToPci @@ -137,7 +138,7 @@ getTableDiff oldtm newtm = getTableChangeDeps :: (QErrM m, CacheRWM m) - => TableInfo -> TableDiff -> m [SchemaObjId] + => TableInfo PGColInfo -> TableDiff -> m [SchemaObjId] getTableChangeDeps ti tableDiff = do sc <- askSchemaCache -- for all the dropped columns @@ -150,7 +151,7 @@ getTableChangeDeps ti tableDiff = do return $ getDependentObjs sc objId return $ droppedConsDeps <> droppedColDeps where - tn = tiName ti + tn = _tiName ti TableDiff _ droppedCols _ _ droppedFKeyConstraints _ = tableDiff data SchemaDiff diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs new file mode 100644 index 00000000000..e856bce6bdf --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs @@ -0,0 +1,135 @@ +-- | Types and functions for interacting with and manipulating SQL enums represented by +-- /single-column tables/, __not__ native Postgres enum types. Native enum types in Postgres are +-- difficult to change, so we discourage their use, but we might add support for native enum types +-- in the future. +module Hasura.RQL.DDL.Schema.Enum ( + -- * Re-exports from "Hasura.RQL.Types.Column" + EnumReference(..) + , EnumValues + , EnumValueInfo(..) + , EnumValue(..) + + -- * Loading enum values + , fetchAndValidateEnumValues + ) where + +import Hasura.Prelude + +import Control.Monad.Validate +import Data.List (delete) + +import qualified Data.HashMap.Strict as M +import qualified Data.List.NonEmpty as NE +import qualified Data.Text as T +import qualified Database.PG.Query as Q +import qualified Language.GraphQL.Draft.Syntax as G + +import Hasura.Db +import Hasura.GraphQL.Utils +import Hasura.RQL.Types.Column +import Hasura.RQL.Types.Error +import Hasura.SQL.Types + +import qualified Hasura.SQL.DML as S + +data EnumTableIntegrityError + = EnumTableMissingPrimaryKey + | EnumTableMultiColumnPrimaryKey ![PGCol] + | EnumTableNonTextualPrimaryKey !PGRawColInfo + | EnumTableNoEnumValues + | EnumTableInvalidEnumValueNames !(NE.NonEmpty T.Text) + | EnumTableNonTextualCommentColumn !PGRawColInfo + | EnumTableTooManyColumns ![PGCol] + deriving (Show, Eq) + +fetchAndValidateEnumValues + :: (MonadTx m) + => QualifiedTable + -> [PGRawColInfo] + -> [PGRawColInfo] + -> m EnumValues +fetchAndValidateEnumValues tableName primaryKeyColumns columnInfos = + either (throw400 ConstraintViolation . showErrors) pure =<< runValidateT fetchAndValidate + where + fetchAndValidate :: (MonadTx m, MonadValidate [EnumTableIntegrityError] m) => m EnumValues + fetchAndValidate = do + maybePrimaryKey <- tolerate validatePrimaryKey + maybeCommentColumn <- validateColumns maybePrimaryKey + enumValues <- maybe (refute mempty) (fetchEnumValues maybeCommentColumn) maybePrimaryKey + validateEnumValues enumValues + pure enumValues + where + validatePrimaryKey = case primaryKeyColumns of + [] -> refute [EnumTableMissingPrimaryKey] + [column] -> case prciType column of + PGText -> pure column + _ -> refute [EnumTableNonTextualPrimaryKey column] + _ -> refute [EnumTableMultiColumnPrimaryKey $ map prciName primaryKeyColumns] + + validateColumns primaryKeyColumn = do + let nonPrimaryKeyColumns = maybe columnInfos (`delete` columnInfos) primaryKeyColumn + case nonPrimaryKeyColumns of + [] -> pure Nothing + [column] -> case prciType column of + PGText -> pure $ Just column + _ -> dispute [EnumTableNonTextualCommentColumn column] $> Nothing + columns -> dispute [EnumTableTooManyColumns $ map prciName columns] $> Nothing + + fetchEnumValues maybeCommentColumn primaryKeyColumn = do + let nullExtr = S.Extractor S.SENull Nothing + commentExtr = maybe nullExtr (S.mkExtr . prciName) maybeCommentColumn + query = Q.fromBuilder $ toSQL S.mkSelect + { S.selFrom = Just $ S.mkSimpleFromExp tableName + , S.selExtr = [S.mkExtr (prciName primaryKeyColumn), commentExtr] } + fmap mkEnumValues . liftTx $ Q.withQE defaultTxErrorHandler query () True + + mkEnumValues rows = M.fromList . flip map rows $ \(key, comment) -> + (EnumValue key, EnumValueInfo comment) + + validateEnumValues enumValues = do + let enumValueNames = map (G.Name . getEnumValue) (M.keys enumValues) + when (null enumValueNames) $ + refute [EnumTableNoEnumValues] + let badNames = map G.unName $ filter (not . isValidEnumName) enumValueNames + for_ (NE.nonEmpty badNames) $ \someBadNames -> + refute [EnumTableInvalidEnumValueNames someBadNames] + + -- https://graphql.github.io/graphql-spec/June2018/#EnumValue + isValidEnumName name = + isValidName name && name `notElem` ["true", "false", "null"] + + showErrors :: [EnumTableIntegrityError] -> T.Text + showErrors allErrors = + "the table " <> tableName <<> " cannot be used as an enum " <> reasonsMessage + where + reasonsMessage = case allErrors of + [singleError] -> "because " <> showOne singleError + _ -> "for the following reasons:\n" <> T.unlines + (map ((" • " <>) . showOne) allErrors) + + showOne :: EnumTableIntegrityError -> T.Text + showOne = \case + EnumTableMissingPrimaryKey -> "the table must have a primary key" + EnumTableMultiColumnPrimaryKey cols -> + "the table’s primary key must not span multiple columns (" + <> T.intercalate ", " (map dquoteTxt $ sort cols) <> ")" + EnumTableNonTextualPrimaryKey colInfo -> typeMismatch "primary key" colInfo PGText + EnumTableNoEnumValues -> "the table must have at least one row" + EnumTableInvalidEnumValueNames values -> + let pluralString = " are not valid GraphQL enum value names" + valuesString = case NE.reverse (NE.sort values) of + value NE.:| [] -> "value " <> value <<> " is not a valid GraphQL enum value name" + value2 NE.:| [value1] -> "values " <> value1 <<> " and " <> value2 <<> pluralString + lastValue NE.:| otherValues -> + "values " <> T.intercalate ", " (map dquoteTxt $ reverse otherValues) <> ", and " + <> lastValue <<> pluralString + in "the " <> valuesString + EnumTableNonTextualCommentColumn colInfo -> typeMismatch "comment column" colInfo PGText + EnumTableTooManyColumns cols -> + "the table must have exactly one primary key and optionally one comment column, not " + <> T.pack (show $ length cols) <> " columns (" + <> T.intercalate ", " (map dquoteTxt $ sort cols) <> ")" + where + typeMismatch description colInfo expected = + "the table’s " <> description <> " (" <> prciName colInfo <<> ") must have type " + <> expected <<> ", not type " <>> prciType colInfo diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs index 44f64fd5d8d..4a6eb0e7b09 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs @@ -1,3 +1,6 @@ +-- | Functions for mutating the catalog (with integrity checking) to incorporate schema changes +-- discovered after applying a user-supplied SQL query. None of these functions modify the schema +-- cache, so it must be reloaded after the catalog is updated. module Hasura.RQL.DDL.Schema.Rename ( renameTableInCatalog , renameColInCatalog @@ -70,7 +73,7 @@ renameTableInCatalog newQT oldQT = do renameColInCatalog :: (MonadTx m, CacheRM m) - => PGCol -> PGCol -> QualifiedTable -> TableInfo -> m () + => PGCol -> PGCol -> QualifiedTable -> TableInfo PGColInfo -> m () renameColInCatalog oCol nCol qt ti = do sc <- askSchemaCache -- Check if any relation exists with new column name @@ -90,7 +93,7 @@ renameColInCatalog oCol nCol qt ti = do where errMsg = "cannot rename column " <> oCol <<> " to " <>> nCol assertFldNotExists = - case M.lookup (fromPGCol oCol) $ tiFieldInfoMap ti of + case M.lookup (fromPGCol oCol) $ _tiFieldInfoMap ti of Just (FIRelationship _) -> throw400 AlreadyExists $ "cannot rename column " <> oCol <<> " to " <> nCol <<> " in table " <> qt <<> diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs index ed60b710e09..72b1e346113 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs @@ -16,6 +16,7 @@ import Hasura.RQL.DDL.Permission.Internal import Hasura.RQL.DDL.Relationship import Hasura.RQL.DDL.RemoteSchema import Hasura.RQL.DDL.Schema.Diff +import Hasura.RQL.DDL.Schema.Enum import Hasura.RQL.DDL.Schema.Function import Hasura.RQL.DDL.Schema.Rename import Hasura.RQL.DDL.Utils @@ -28,6 +29,7 @@ import Hasura.SQL.Types import qualified Database.PG.Query as Q import qualified Hasura.GraphQL.Schema as GS +import Control.Lens.Extended hiding ((.=)) import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH @@ -48,23 +50,52 @@ delTableFromCatalog (QualifiedObject sn tn) = WHERE table_schema = $1 AND table_name = $2 |] (sn, tn) False -saveTableToCatalog :: QualifiedTable -> Q.Tx () -saveTableToCatalog (QualifiedObject sn tn) = +saveTableToCatalog :: TrackTable -> Q.Tx () +saveTableToCatalog (TrackTable (QualifiedObject sn tn) isEnum) = Q.unitQ [Q.sql| - INSERT INTO "hdb_catalog"."hdb_table" VALUES ($1, $2) - |] (sn, tn) False + INSERT INTO "hdb_catalog"."hdb_table" (table_schema, table_name, is_enum) + VALUES ($1, $2, $3) + |] (sn, tn, isEnum) False -newtype TrackTable +data TrackTable = TrackTable - { tName :: QualifiedTable } - deriving (Show, Eq, FromJSON, ToJSON, Lift) + { tName :: !QualifiedTable + , tIsEnum :: !Bool + } deriving (Show, Eq, Lift) + +instance FromJSON TrackTable where + parseJSON v = withOptions <|> withoutOptions + where + withOptions = flip (withObject "TrackTable") v $ \o -> TrackTable + <$> o .: "table" + <*> o .:? "is_enum" .!= False + withoutOptions = TrackTable <$> parseJSON v <*> pure False + +instance ToJSON TrackTable where + toJSON (TrackTable name isEnum) + | isEnum = object [ "table" .= name, "is_enum" .= isEnum ] + | otherwise = toJSON name + +data SetTableIsEnum + = SetTableIsEnum + { stieTable :: !QualifiedTable + , stieIsEnum :: !Bool + } deriving (Show, Eq, Lift) +$(deriveJSON (aesonDrop 4 snakeCase) ''SetTableIsEnum) + +data UntrackTable = + UntrackTable + { utTable :: !QualifiedTable + , utCascade :: !(Maybe Bool) + } deriving (Show, Eq, Lift) +$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable) -- | Track table/view, Phase 1: -- Validate table tracking operation. Fails if table is already being tracked, -- or if a function with the same name is being tracked. trackExistingTableOrViewP1 :: (CacheRM m, UserInfoM m, QErrM m) => TrackTable -> m () -trackExistingTableOrViewP1 (TrackTable vn) = do +trackExistingTableOrViewP1 TrackTable { tName = vn } = do adminOnly rawSchemaCache <- askSchemaCache when (M.member vn $ scTables rawSchemaCache) $ @@ -74,42 +105,39 @@ trackExistingTableOrViewP1 (TrackTable vn) = do throw400 NotSupported $ "function with name " <> vn <<> " already exists" trackExistingTableOrViewP2 - :: (QErrM m, CacheRWM m, MonadTx m) - => QualifiedTable -> Bool -> m EncJSON -trackExistingTableOrViewP2 vn isSystemDefined = do + :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) + => TrackTable -> m EncJSON +trackExistingTableOrViewP2 query@TrackTable { tName = tableName } = do sc <- askSchemaCache let defGCtx = scDefaultRemoteGCtx sc - GS.checkConflictingNode defGCtx $ GS.qualObjectToName vn + GS.checkConflictingNode defGCtx $ GS.qualObjectToName tableName - tables <- liftTx fetchTableCatalog - case tables of - [] -> throw400 NotExists $ "no such table/view exists in postgres : " <>> vn - [ti] -> addTableToCache ti - _ -> throw500 $ "more than one row found for: " <>> vn - liftTx $ Q.catchE defaultTxErrorHandler $ saveTableToCatalog vn + liftTx $ Q.catchE defaultTxErrorHandler $ saveTableToCatalog query + buildSchemaCacheFor (MOTable tableName) return successMsg - where - QualifiedObject sn tn = vn - mkTableInfo (cols, pCols, constraints, viewInfoM) = - let colMap = M.fromList $ flip map (Q.getAltJ cols) $ - \c -> (fromPGCol $ pgiName c, FIColumn c) - in TableInfo vn isSystemDefined colMap mempty (Q.getAltJ constraints) - (Q.getAltJ pCols) (Q.getAltJ viewInfoM) mempty - fetchTableCatalog = map mkTableInfo <$> - Q.listQE defaultTxErrorHandler [Q.sql| - SELECT columns, primary_key_columns, - constraints, view_info - FROM hdb_catalog.hdb_table_info_agg - WHERE table_schema = $1 AND table_name = $2 - |] (sn, tn) True runTrackTableQ - :: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m) + :: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => TrackTable -> m EncJSON runTrackTableQ q = do trackExistingTableOrViewP1 q - trackExistingTableOrViewP2 (tName q) False + trackExistingTableOrViewP2 q + +runSetExistingTableIsEnumQ + :: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) + => SetTableIsEnum -> m EncJSON +runSetExistingTableIsEnumQ (SetTableIsEnum tableName isEnum) = do + adminOnly + void $ askTabInfo tableName -- assert that table is tracked + + liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| + UPDATE "hdb_catalog"."hdb_table" SET is_enum = $3 + WHERE table_schema = $1 AND table_name = $2 + |] (qSchema tableName, qName tableName, isEnum) False + + buildSchemaCacheFor (MOTable tableName) + return successMsg purgeDep :: (CacheRWM m, MonadTx m) => SchemaObjId -> m () @@ -133,134 +161,6 @@ purgeDep schemaObjId = case schemaObjId of _ -> throw500 $ "unexpected dependent object : " <> reportSchemaObj schemaObjId -processTableChanges :: (MonadTx m, CacheRWM m) - => TableInfo -> TableDiff -> m Bool -processTableChanges ti tableDiff = do - -- If table rename occurs then don't replace constraints and - -- process dropped/added columns, because schema reload happens eventually - sc <- askSchemaCache - let tn = tiName ti - withOldTabName = do - -- replace constraints - replaceConstraints tn - -- for all the dropped columns - procDroppedCols tn - -- for all added columns - procAddedCols tn - -- for all altered columns - procAlteredCols sc tn - - withNewTabName newTN = do - let tnGQL = GS.qualObjectToName newTN - defGCtx = scDefaultRemoteGCtx sc - -- check for GraphQL schema conflicts on new name - GS.checkConflictingNode defGCtx tnGQL - void $ procAlteredCols sc tn - -- update new table in catalog - renameTableInCatalog newTN tn - return True - - maybe withOldTabName withNewTabName mNewName - - where - TableDiff mNewName droppedCols addedCols alteredCols _ constraints = tableDiff - replaceConstraints tn = flip modTableInCache tn $ \tInfo -> - return $ tInfo {tiUniqOrPrimConstraints = constraints} - - procDroppedCols tn = - forM_ droppedCols $ \droppedCol -> - -- Drop the column from the cache - delColFromCache droppedCol tn - - procAddedCols tn = - -- In the newly added columns check that there is no conflict with relationships - forM_ addedCols $ \pci@(PGColInfo colName _ _) -> - case M.lookup (fromPGCol colName) $ tiFieldInfoMap ti of - Just (FIRelationship _) -> - throw400 AlreadyExists $ "cannot add column " <> colName - <<> " in table " <> tn <<> - " as a relationship with the name already exists" - _ -> addColToCache colName pci tn - - procAlteredCols sc tn = fmap or $ forM alteredCols $ - \( PGColInfo oColName oColTy oNullable - , npci@(PGColInfo nColName nColTy nNullable) - ) -> - if | oColName /= nColName -> do - renameColInCatalog oColName nColName tn ti - return True - - | oColTy /= nColTy -> do - let colId = SOTableObj tn $ TOCol oColName - typeDepObjs = getDependentObjsWith (== DROnType) sc colId - - -- Raise exception if any objects found which are dependant on column type - unless (null typeDepObjs) $ throw400 DependencyError $ - "cannot change type of column " <> oColName <<> " in table " - <> tn <<> " because of the following dependencies : " <> - reportSchemaObjs typeDepObjs - - -- Update column type in cache - updColInCache nColName npci tn - - -- If any dependant permissions found with the column whose type - -- being altered is provided with a session variable, - -- then rebuild permission info and update the cache - let sessVarDepObjs = - getDependentObjsWith (== DRSessionVariable) sc colId - forM_ sessVarDepObjs $ \objId -> - case objId of - SOTableObj qt (TOPerm rn pt) -> rebuildPermInfo qt rn pt - _ -> throw500 - "unexpected schema dependency found for altering column type" - return False - - | oNullable /= nNullable -> do - updColInCache nColName npci tn - return False - - | otherwise -> return False - -delTableAndDirectDeps - :: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m () -delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do - liftTx $ Q.catchE defaultTxErrorHandler $ do - Q.unitQ [Q.sql| - DELETE FROM "hdb_catalog"."hdb_relationship" - WHERE table_schema = $1 AND table_name = $2 - |] (sn, tn) False - Q.unitQ [Q.sql| - DELETE FROM "hdb_catalog"."hdb_permission" - WHERE table_schema = $1 AND table_name = $2 - |] (sn, tn) False - Q.unitQ [Q.sql| - DELETE FROM "hdb_catalog"."event_triggers" - WHERE schema_name = $1 AND table_name = $2 - |] (sn, tn) False - delTableFromCatalog qtn - delTableFromCache qtn - -processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool -processSchemaChanges schemaDiff = do - -- Purge the dropped tables - mapM_ delTableAndDirectDeps droppedTables - - sc <- askSchemaCache - fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do - ti <- case M.lookup oldQtn $ scTables sc of - Just ti -> return ti - Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn - processTableChanges ti tableDiff - where - SchemaDiff droppedTables alteredTables = schemaDiff - -data UntrackTable = - UntrackTable - { utTable :: !QualifiedTable - , utCascade :: !(Maybe Bool) - } deriving (Show, Eq, Lift) -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable) - unTrackExistingTableOrViewP1 :: (CacheRM m, UserInfoM m, QErrM m) => UntrackTable -> m () unTrackExistingTableOrViewP1 (UntrackTable vn _) = do @@ -269,7 +169,7 @@ unTrackExistingTableOrViewP1 (UntrackTable vn _) = do case M.lookup vn (scTables rawSchemaCache) of Just ti -> -- Check if table/view is system defined - when (tiSystemDefined ti) $ throw400 NotSupported $ + when (_tiSystemDefined ti) $ throw400 NotSupported $ vn <<> " is system defined, cannot untrack" Nothing -> throw400 AlreadyUntracked $ "view/table already untracked : " <>> vn @@ -306,17 +206,220 @@ runUntrackTableQ q = do unTrackExistingTableOrViewP1 q unTrackExistingTableOrViewP2 q -handleInconsistentObj +processTableChanges :: (MonadTx m, CacheRWM m) + => TableInfo PGColInfo -> TableDiff -> m Bool +processTableChanges ti tableDiff = do + -- If table rename occurs then don't replace constraints and + -- process dropped/added columns, because schema reload happens eventually + sc <- askSchemaCache + let tn = _tiName ti + withOldTabName = do + replaceConstraints tn + procDroppedCols tn + procAddedCols tn + procAlteredCols sc tn + + withNewTabName newTN = do + let tnGQL = GS.qualObjectToName newTN + defGCtx = scDefaultRemoteGCtx sc + -- check for GraphQL schema conflicts on new name + GS.checkConflictingNode defGCtx tnGQL + void $ procAlteredCols sc tn + -- update new table in catalog + renameTableInCatalog newTN tn + return True + + maybe withOldTabName withNewTabName mNewName + + where + TableDiff mNewName droppedCols addedCols alteredCols _ constraints = tableDiff + replaceConstraints tn = flip modTableInCache tn $ \tInfo -> + return $ tInfo {_tiUniqOrPrimConstraints = constraints} + + procDroppedCols tn = + forM_ droppedCols $ \droppedCol -> + -- Drop the column from the cache + delColFromCache droppedCol tn + + procAddedCols tn = + -- In the newly added columns check that there is no conflict with relationships + forM_ addedCols $ \rawInfo@(PGRawColInfo colName _ _ _) -> + case M.lookup (fromPGCol colName) $ _tiFieldInfoMap ti of + Just (FIRelationship _) -> + throw400 AlreadyExists $ "cannot add column " <> colName + <<> " in table " <> tn <<> + " as a relationship with the name already exists" + _ -> do + info <- processColumnInfoUsingCache tn rawInfo + addColToCache colName info tn + + procAlteredCols sc tn = fmap or $ forM alteredCols $ + \( PGRawColInfo oldName oldType _ _ + , newRawInfo@(PGRawColInfo newName newType _ _) ) -> do + let performColumnUpdate = do + newInfo <- processColumnInfoUsingCache tn newRawInfo + updColInCache newName newInfo tn + + if | oldName /= newName -> renameColInCatalog oldName newName tn ti $> True + + | oldType /= newType -> do + let colId = SOTableObj tn $ TOCol oldName + typeDepObjs = getDependentObjsWith (== DROnType) sc colId + + unless (null typeDepObjs) $ throw400 DependencyError $ + "cannot change type of column " <> oldName <<> " in table " + <> tn <<> " because of the following dependencies : " <> + reportSchemaObjs typeDepObjs + + performColumnUpdate + + -- If any dependent permissions found with the column whose type being altered is + -- provided with a session variable, then rebuild permission info and update the cache + let sessVarDepObjs = getDependentObjsWith (== DRSessionVariable) sc colId + forM_ sessVarDepObjs $ \case + SOTableObj qt (TOPerm rn pt) -> rebuildPermInfo qt rn pt + _ -> throw500 "unexpected schema dependency found for altering column type" + pure False + + | otherwise -> performColumnUpdate $> False + +delTableAndDirectDeps + :: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m () +delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do + liftTx $ Q.catchE defaultTxErrorHandler $ do + Q.unitQ [Q.sql| + DELETE FROM "hdb_catalog"."hdb_relationship" + WHERE table_schema = $1 AND table_name = $2 + |] (sn, tn) False + Q.unitQ [Q.sql| + DELETE FROM "hdb_catalog"."hdb_permission" + WHERE table_schema = $1 AND table_name = $2 + |] (sn, tn) False + Q.unitQ [Q.sql| + DELETE FROM "hdb_catalog"."event_triggers" + WHERE schema_name = $1 AND table_name = $2 + |] (sn, tn) False + delTableFromCatalog qtn + delTableFromCache qtn + +processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool +processSchemaChanges schemaDiff = do + -- Purge the dropped tables + mapM_ delTableAndDirectDeps droppedTables + + sc <- askSchemaCache + fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do + ti <- case M.lookup oldQtn $ scTables sc of + Just ti -> return ti + Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn + processTableChanges ti tableDiff + where + SchemaDiff droppedTables alteredTables = schemaDiff + +-- | Builds an initial @'TableCache' 'PGColInfo'@ from catalog information. Does not fill in +-- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains +-- columns, not relationships; those pieces of information are filled in by later stages. +buildTableCache + :: forall m. (MonadTx m, CacheRWM m) + => [CatalogTable] -> m (TableCache PGColInfo) +buildTableCache = processTableCache <=< buildRawTableCache + where + withTable name = withSchemaObject $ + InconsistentMetadataObj (MOTable name) MOTTable (toJSON name) + + -- Step 1: Build the raw table cache from metadata information. + buildRawTableCache :: [CatalogTable] -> m (TableCache PGRawColInfo) + buildRawTableCache catalogTables = fmap (M.fromList . catMaybes) . for catalogTables $ + \(CatalogTable name isSystemDefined isEnum maybeInfo) -> withTable name $ do + catalogInfo <- onNothing maybeInfo $ + throw400 NotExists $ "no such table/view exists in postgres: " <>> name + + let CatalogTableInfo columns constraints primaryKeyColumnNames viewInfo = catalogInfo + columnFields = M.fromList . flip map columns $ \column -> + (fromPGCol $ prciName column, FIColumn column) + + primaryKeyColumns = flip filter columns $ \column -> + prciName column `elem` primaryKeyColumnNames + fetchEnumValues = fetchAndValidateEnumValues name primaryKeyColumns columns + + maybeEnumValues <- if isEnum then Just <$> fetchEnumValues else pure Nothing + + let info = TableInfo + { _tiName = name + , _tiSystemDefined = isSystemDefined + , _tiFieldInfoMap = columnFields + , _tiRolePermInfoMap = mempty + , _tiUniqOrPrimConstraints = constraints + , _tiPrimaryKeyCols = primaryKeyColumnNames + , _tiViewInfo = viewInfo + , _tiEventTriggerInfoMap = mempty + , _tiEnumValues = maybeEnumValues } + pure (name, info) + + -- Step 2: Process the raw table cache to replace Postgres column types with logical column + -- types. + processTableCache :: TableCache PGRawColInfo -> m (TableCache PGColInfo) + processTableCache rawTables = fmap (M.mapMaybe id) . for rawTables $ \rawInfo -> do + let tableName = _tiName rawInfo + withTable tableName $ rawInfo + & tiFieldInfoMap.traverse._FIColumn %%~ processColumnInfo enumTables tableName + where + enumTables = M.mapMaybe _tiEnumValues rawTables + +-- | “Processes” a 'PGRawColInfo' into a 'PGColInfo' by resolving its type using a map of known +-- enum tables. +processColumnInfo + :: (QErrM m) + => M.HashMap QualifiedTable EnumValues -- ^ known enum tables + -> QualifiedTable -- ^ the table this column belongs to + -> PGRawColInfo -- ^ the column’s raw information + -> m PGColInfo +processColumnInfo enumTables tableName rawInfo = do + resolvedType <- resolveColumnType + pure PGColInfo + { pgiName = prciName rawInfo + , pgiType = resolvedType + , pgiIsNullable = prciIsNullable rawInfo } + where + resolveColumnType = + case prciReferences rawInfo of + -- no referenced tables? definitely not an enum + [] -> pure $ PGColumnScalar (prciType rawInfo) + + -- one referenced table? might be an enum, so check if the referenced table is an enum + [referencedTableName] -> pure $ M.lookup referencedTableName enumTables & maybe + (PGColumnScalar $ prciType rawInfo) + (PGColumnEnumReference . EnumReference referencedTableName) + + -- multiple referenced tables? we could check if any of them are enums, but the schema is + -- strange, so let’s just reject it + referencedTables -> throw400 ConstraintViolation + $ "cannot handle exotic schema: column " <> prciName rawInfo <<> " in table " + <> tableName <<> " references multiple foreign tables (" + <> T.intercalate ", " (map dquote referencedTables) <> ")?" + +-- | Like 'processColumnInfo', but uses the information in the current schema cache to resolve a +-- column’s type. +processColumnInfoUsingCache :: (CacheRM m, QErrM m) => QualifiedTable -> PGRawColInfo -> m PGColInfo +processColumnInfoUsingCache tableName rawInfo = do + tables <- scTables <$> askSchemaCache + processColumnInfo (M.mapMaybe _tiEnumValues tables) tableName rawInfo + +withSchemaObject :: (QErrM m, CacheRWM m) => (T.Text -> InconsistentMetadataObj) - -> m () - -> m () -handleInconsistentObj f action = - action `catchError` \err -> do + -> m a + -> m (Maybe a) +withSchemaObject f action = + (Just <$> action) `catchError` \err -> do sc <- askSchemaCache let inconsObj = f $ qeError err allInconsObjs = inconsObj:scInconsistentObjs sc - writeSchemaCache $ sc{scInconsistentObjs = allInconsObjs} + writeSchemaCache sc { scInconsistentObjs = allInconsObjs } + pure Nothing + +withSchemaObject_ :: (QErrM m, CacheRWM m) => (T.Text -> InconsistentMetadataObj) -> m () -> m () +withSchemaObject_ f = void . withSchemaObject f checkNewInconsistentMeta :: (QErrM m) @@ -344,6 +447,26 @@ buildSchemaCacheStrict = do let err = err400 Unexpected "cannot continue due to inconsistent metadata" throwError err{qeInternal = Just $ toJSON inconsObjs} +-- | Rebuilds the schema cache. If an object with the given object id became newly inconsistent, +-- raises an error about it specifically. Otherwise, raises a generic metadata inconsistency error. +buildSchemaCacheFor + :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) + => MetadataObjId -> m () +buildSchemaCacheFor objectId = do + oldSchemaCache <- askSchemaCache + buildSchemaCache + newSchemaCache <- askSchemaCache + + let diffInconsistentObjects = getDifference _moId `on` scInconsistentObjs + newInconsistentObjects = newSchemaCache `diffInconsistentObjects` oldSchemaCache + + for_ (find ((== objectId) . _moId) newInconsistentObjects) $ \matchingObject -> + throw400 ConstraintViolation (_moReason matchingObject) + + unless (null newInconsistentObjects) $ + throwError (err400 Unexpected "cannot continue due to new inconsistent metadata") + { qeInternal = Just $ toJSON newInconsistentObjects } + buildSchemaCache :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => m () @@ -372,17 +495,7 @@ buildSchemaCacheG withSetup = do let fkeys = HS.fromList fkeys' -- tables - forM_ tables $ \ct -> do - let qt = _ctTable ct - isSysDef = _ctSystemDefined ct - tableInfoM = _ctInfo ct - mkInconsObj = InconsistentMetadataObj (MOTable qt) - MOTTable $ toJSON $ TrackTable qt - modifyErr (\e -> "table " <> qt <<> "; " <> e) $ - handleInconsistentObj mkInconsObj $ do - ti <- onNothing tableInfoM $ throw400 NotExists $ - "no such table/view exists in postgres : " <>> qt - addTableToCache $ ti{tiSystemDefined = isSysDef} + modTableCache =<< buildTableCache tables -- relationships forM_ relationships $ \(CatalogRelation qt rn rt rDef cmnt) -> do @@ -390,7 +503,7 @@ buildSchemaCacheG withSetup = do def = toJSON $ WithTable qt $ RelDef rn rDef cmnt mkInconsObj = InconsistentMetadataObj objId (MOTRel rt) def modifyErr (\e -> "table " <> qt <<> "; rel " <> rn <<> "; " <> e) $ - handleInconsistentObj mkInconsObj $ + withSchemaObject_ mkInconsObj $ case rt of ObjRel -> do using <- decodeValue rDef @@ -409,7 +522,7 @@ buildSchemaCacheG withSetup = do def = toJSON $ WithTable qt $ PermDef rn pDef cmnt mkInconsObj = InconsistentMetadataObj objId (MOTPerm pt) def modifyErr (\e -> "table " <> qt <<> "; role " <> rn <<> "; " <> e) $ - handleInconsistentObj mkInconsObj $ + withSchemaObject_ mkInconsObj $ case pt of PTInsert -> permHelper withSetup sqlGenCtx qt rn pDef PAInsert PTSelect -> permHelper withSetup sqlGenCtx qt rn pDef PASelect @@ -421,10 +534,10 @@ buildSchemaCacheG withSetup = do let objId = MOTableObj qt $ MTOTrigger trn def = object ["table" .= qt, "configuration" .= configuration] mkInconsObj = InconsistentMetadataObj objId MOTEventTrigger def - handleInconsistentObj mkInconsObj $ do + withSchemaObject_ mkInconsObj $ do etc <- decodeValue configuration subTableP2Setup qt etc - allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt + allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt when withSetup $ liftTx $ mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc) @@ -434,7 +547,7 @@ buildSchemaCacheG withSetup = do mkInconsObj = InconsistentMetadataObj (MOFunction qf) MOTFunction def modifyErr (\e -> "function " <> qf <<> "; " <> e) $ - handleInconsistentObj mkInconsObj $ do + withSchemaObject_ mkInconsObj $ do rawfi <- onNothing rawfiM $ throw400 NotExists $ "no such function exists in postgres : " <>> qf trackFunctionP2Setup qf rawfi @@ -463,7 +576,7 @@ buildSchemaCacheG withSetup = do let AddRemoteSchemaQuery name _ _ = rs mkInconsObj = InconsistentMetadataObj (MORemoteSchema name) MOTRemoteSchema (toJSON rs) - handleInconsistentObj mkInconsObj $ do + withSchemaObject_ mkInconsObj $ do rsCtx <- addRemoteSchemaP2Setup rs sc <- askSchemaCache let gCtxMap = scGCtxMap sc @@ -475,11 +588,6 @@ buildSchemaCacheG withSetup = do , scDefaultRemoteGCtx = mergedDefGCtx } -fetchCatalogData :: Q.TxE QErr CatalogMetadata -fetchCatalogData = - (Q.getAltJ . runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler - $(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True - data RunSQL = RunSQL { rSql :: T.Text @@ -517,7 +625,6 @@ execWithMDCheck :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => RunSQL -> m EncJSON execWithMDCheck (RunSQL t cascade _) = do - -- Drop hdb_views so no interference is caused to the sql query liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews @@ -582,16 +689,16 @@ execWithMDCheck (RunSQL t cascade _) = do postSc <- askSchemaCache -- recreate the insert permission infra forM_ (M.elems $ scTables postSc) $ \ti -> do - let tn = tiName ti - forM_ (M.elems $ tiRolePermInfoMap ti) $ \rpi -> + let tn = _tiName ti + forM_ (M.elems $ _tiRolePermInfoMap ti) $ \rpi -> maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi strfyNum <- stringifyNum <$> askSQLGenCtx --recreate triggers forM_ (M.elems $ scTables postSc) $ \ti -> do - let tn = tiName ti - cols = getCols $ tiFieldInfoMap ti - forM_ (M.toList $ tiEventTriggerInfoMap ti) $ \(trn, eti) -> do + let tn = _tiName ti + cols = getCols $ _tiFieldInfoMap ti + forM_ (M.toList $ _tiEventTriggerInfoMap ti) $ \(trn, eti) -> do let fullspec = etiOpsDef eti liftTx $ mkAllTriggersQ trn tn cols strfyNum fullspec diff --git a/server/src-lib/Hasura/RQL/DML/Count.hs b/server/src-lib/Hasura/RQL/DML/Count.hs index 67b1d4b15ef..c0f683aa73b 100644 --- a/server/src-lib/Hasura/RQL/DML/Count.hs +++ b/server/src-lib/Hasura/RQL/DML/Count.hs @@ -63,7 +63,7 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) = validateCountQWith :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> CountQuery -> m CountQueryP1 validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do @@ -73,7 +73,7 @@ validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do selPerm <- modifyErr (<> selNecessaryMsg) $ askSelPermInfo tableInfo - let colInfoMap = tiFieldInfoMap tableInfo + let colInfoMap = _tiFieldInfoMap tableInfo forM_ mDistCols $ \distCols -> do let distColAsrns = [ checkSelOnCol selPerm diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs index 7fa9a5f1c7a..33f5025fc81 100644 --- a/server/src-lib/Hasura/RQL/DML/Delete.hs +++ b/server/src-lib/Hasura/RQL/DML/Delete.hs @@ -60,7 +60,7 @@ mkDeleteCTE (AnnDel tn (fltr, wc) _ _) = validateDeleteQWith :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> DeleteQuery -> m AnnDel validateDeleteQWith sessVarBldr prepValBldr @@ -69,7 +69,7 @@ validateDeleteQWith sessVarBldr prepValBldr -- If table is view then check if it deletable mutableView tableName viIsDeletable - (tiViewInfo tableInfo) "deletable" + (_tiViewInfo tableInfo) "deletable" -- Check if the role has delete permissions delPerm <- askDelPermInfo tableInfo @@ -81,7 +81,7 @@ validateDeleteQWith sessVarBldr prepValBldr selPerm <- modifyErr (<> selNecessaryMsg) $ askSelPermInfo tableInfo - let fieldInfoMap = tiFieldInfoMap tableInfo + let fieldInfoMap = _tiFieldInfoMap tableInfo allCols = getCols fieldInfoMap -- convert the returning cols into sql returing exp diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index cc6281886be..be473aa9948 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -64,10 +64,10 @@ toSQLConflict conflict = case conflict of convObj :: (UserInfoM m, QErrM m) - => (PGScalarType -> Value -> m S.SQLExp) + => (PGColumnType -> Value -> m S.SQLExp) -> HM.HashMap PGCol S.SQLExp -> HM.HashMap PGCol S.SQLExp - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> InsObj -> m ([PGCol], [S.SQLExp]) convObj prepFn defInsVals setInsVals fieldInfoMap insObj = do @@ -99,7 +99,7 @@ validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol -> buildConflictClause :: (UserInfoM m, QErrM m) => SessVarBldr m - -> TableInfo + -> TableInfo PGColInfo -> [PGCol] -> OnConflict -> m ConflictClauseP1 @@ -131,8 +131,8 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act) (Just _, Just _, _) -> throw400 UnexpectedPayload "'constraint' and 'constraint_on' cannot be set at a time" where - fieldInfoMap = tiFieldInfoMap tableInfo - toSQLBool = toSQLBoolExp (S.mkQual $ tiName tableInfo) + fieldInfoMap = _tiFieldInfoMap tableInfo + toSQLBool = toSQLBoolExp (S.mkQual $ _tiName tableInfo) validateCols c = do let targetcols = getPGCols c @@ -140,11 +140,11 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act) \pgCol -> askPGType fieldInfoMap pgCol "" validateConstraint c = do - let tableConsNames = tiUniqOrPrimConstraints tableInfo + let tableConsNames = _tiUniqOrPrimConstraints tableInfo withPathK "constraint" $ unless (c `elem` tableConsNames) $ throw400 Unexpected $ "constraint " <> getConstraintTxt c - <<> " for table " <> tiName tableInfo + <<> " for table " <> _tiName tableInfo <<> " does not exist" getUpdPerm = do @@ -160,7 +160,7 @@ convInsertQuery :: (UserInfoM m, QErrM m, CacheRM m) => (Value -> m [InsObj]) -> SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> InsertQuery -> m InsertQueryP1 convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRetCols) = do @@ -172,7 +172,7 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet -- If table is view then check if it is insertable mutableView tableName viIsInsertable - (tiViewInfo tableInfo) "insertable" + (_tiViewInfo tableInfo) "insertable" -- Check if the role has insert permissions insPerm <- askInsPermInfo tableInfo @@ -180,7 +180,7 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet -- Check if all dependent headers are present validateHeaders $ ipiRequiredHeaders insPerm - let fieldInfoMap = tiFieldInfoMap tableInfo + let fieldInfoMap = _tiFieldInfoMap tableInfo setInsVals = ipiSet insPerm -- convert the returning cols into sql returing exp diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index a0113466b65..224fcbaebe5 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -41,13 +41,13 @@ instance UserInfoM DMLP1 where instance HasSQLGenCtx DMLP1 where askSQLGenCtx = DMLP1 $ lift askSQLGenCtx -mkAdminRolePermInfo :: TableInfo -> RolePermInfo +mkAdminRolePermInfo :: TableInfo PGColInfo -> RolePermInfo mkAdminRolePermInfo ti = RolePermInfo (Just i) (Just s) (Just u) (Just d) where - pgCols = map pgiName $ getCols $ tiFieldInfoMap ti + pgCols = map pgiName $ getCols $ _tiFieldInfoMap ti - tn = tiName ti + tn = _tiName ti i = InsPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty [] s = SelPermInfo (HS.fromList pgCols) tn annBoolExpTrue Nothing True [] @@ -57,14 +57,14 @@ mkAdminRolePermInfo ti = askPermInfo' :: (UserInfoM m) => PermAccessor c - -> TableInfo + -> TableInfo PGColInfo -> m (Maybe c) askPermInfo' pa tableInfo = do roleName <- askCurRole let mrpi = getRolePermInfo roleName return $ mrpi >>= (^. permAccToLens pa) where - rpim = tiRolePermInfoMap tableInfo + rpim = _tiRolePermInfoMap tableInfo getRolePermInfo roleName | roleName == adminRole = Just $ mkAdminRolePermInfo tableInfo | otherwise = M.lookup roleName rpim @@ -72,7 +72,7 @@ askPermInfo' pa tableInfo = do askPermInfo :: (UserInfoM m, QErrM m) => PermAccessor c - -> TableInfo + -> TableInfo PGColInfo -> m c askPermInfo pa tableInfo = do roleName <- askCurRole @@ -80,38 +80,38 @@ askPermInfo pa tableInfo = do case mPermInfo of Just c -> return c Nothing -> throw400 PermissionDenied $ mconcat - [ pt <> " on " <>> tiName tableInfo + [ pt <> " on " <>> _tiName tableInfo , " for role " <>> roleName , " is not allowed. " ] where pt = permTypeToCode $ permAccToType pa -isTabUpdatable :: RoleName -> TableInfo -> Bool +isTabUpdatable :: RoleName -> TableInfo PGColInfo -> Bool isTabUpdatable role ti | role == adminRole = True | otherwise = isJust $ M.lookup role rpim >>= _permUpd where - rpim = tiRolePermInfoMap ti + rpim = _tiRolePermInfoMap ti askInsPermInfo :: (UserInfoM m, QErrM m) - => TableInfo -> m InsPermInfo + => TableInfo PGColInfo -> m InsPermInfo askInsPermInfo = askPermInfo PAInsert askSelPermInfo :: (UserInfoM m, QErrM m) - => TableInfo -> m SelPermInfo + => TableInfo PGColInfo -> m SelPermInfo askSelPermInfo = askPermInfo PASelect askUpdPermInfo :: (UserInfoM m, QErrM m) - => TableInfo -> m UpdPermInfo + => TableInfo PGColInfo -> m UpdPermInfo askUpdPermInfo = askPermInfo PAUpdate askDelPermInfo :: (UserInfoM m, QErrM m) - => TableInfo -> m DelPermInfo + => TableInfo PGColInfo -> m DelPermInfo askDelPermInfo = askPermInfo PADelete verifyAsrns :: (MonadError QErr m) => [a -> m ()] -> [a] -> m () @@ -142,27 +142,27 @@ checkPermOnCol pt allowedCols pgCol = do ] binRHSBuilder - :: PGScalarType -> Value -> DMLP1 S.SQLExp + :: PGColumnType -> Value -> DMLP1 S.SQLExp binRHSBuilder colType val = do preparedArgs <- get - binVal <- runAesonParser (convToBin colType) val - put (preparedArgs DS.|> binVal) - return $ toPrepParam (DS.length preparedArgs + 1) colType + scalarValue <- parsePGScalarValue colType val + put (preparedArgs DS.|> toBinaryValue scalarValue) + return $ toPrepParam (DS.length preparedArgs + 1) (pstType scalarValue) fetchRelTabInfo :: (QErrM m, CacheRM m) => QualifiedTable - -> m TableInfo + -> m (TableInfo PGColInfo) fetchRelTabInfo refTabName = -- Internal error modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName -type SessVarBldr m = PgType -> SessVar -> m S.SQLExp +type SessVarBldr m = PGType PGScalarType -> SessVar -> m S.SQLExp fetchRelDet :: (UserInfoM m, QErrM m, CacheRM m) => RelName -> QualifiedTable - -> m (FieldInfoMap, SelPermInfo) + -> m (FieldInfoMap PGColInfo, SelPermInfo) fetchRelDet relName refTabName = do roleName <- askCurRole -- Internal error @@ -171,7 +171,7 @@ fetchRelDet relName refTabName = do refSelPerm <- modifyErr (relPermErr refTabName roleName) $ askSelPermInfo refTabInfo - return (tiFieldInfoMap refTabInfo, refSelPerm) + return (_tiFieldInfoMap refTabInfo, refSelPerm) where relPermErr rTable roleName _ = mconcat @@ -215,16 +215,16 @@ convPartialSQLExp f = \case PSESessVar colTy sessVar -> f colTy sessVar sessVarFromCurrentSetting - :: (Applicative f) => PgType -> SessVar -> f S.SQLExp + :: (Applicative f) => PGType PGScalarType -> SessVar -> f S.SQLExp sessVarFromCurrentSetting pgType sessVar = pure $ sessVarFromCurrentSetting' pgType sessVar -sessVarFromCurrentSetting' :: PgType -> SessVar -> S.SQLExp +sessVarFromCurrentSetting' :: PGType PGScalarType -> SessVar -> S.SQLExp sessVarFromCurrentSetting' ty sessVar = flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of - PgTypeSimple baseTy -> withGeoVal baseTy sessVarVal - PgTypeArray _ -> sessVarVal + PGTypeSimple baseTy -> withGeoVal baseTy sessVarVal + PGTypeArray _ -> sessVarVal where curSess = S.SEUnsafe "current_setting('hasura.user')::json" sessVarVal = S.SEOpApp (S.SQLOp "->>") @@ -241,23 +241,25 @@ checkSelPerm spi sessVarBldr = convBoolExp :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> SelPermInfo -> BoolExp -> SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> m AnnBoolExpSQL convBoolExp cim spi be sessVarBldr prepValBldr = do abe <- annBoolExp rhsParser cim be checkSelPerm spi sessVarBldr abe where rhsParser pgType val = case pgType of - PgTypeSimple ty -> prepValBldr ty val - PgTypeArray ofTy -> do - -- for arrays we don't use the prepared builder + PGTypeSimple ty -> prepValBldr ty val + PGTypeArray ofTy -> do + -- for arrays, we don't use the prepared builder vals <- runAesonParser parseJSON val - arrayExp <- S.SEArray <$> indexedForM vals (txtRHSBuilder ofTy) - return $ S.SETyAnn arrayExp $ S.mkTypeAnn pgType + PGScalarTyped scalarType scalarValues <- parsePGScalarValues ofTy vals + return $ S.SETyAnn + (S.SEArray $ map (toTxtValue . PGScalarTyped scalarType) scalarValues) + (S.mkTypeAnn $ PGTypeArray scalarType) dmlTxErrorHandler :: Q.PGTxErr -> QErr dmlTxErrorHandler p2Res = @@ -266,16 +268,16 @@ dmlTxErrorHandler p2Res = Just (code, msg) -> err400 code msg where err = simplifyError p2Res -toJSONableExp :: Bool -> PGScalarType -> S.SQLExp -> S.SQLExp +toJSONableExp :: Bool -> PGColumnType -> S.SQLExp -> S.SQLExp toJSONableExp strfyNum colTy expn - | colTy == PGGeometry || colTy == PGGeography = + | isScalarColumnWhere isGeoType colTy = S.SEFnApp "ST_AsGeoJSON" [ expn , S.SEUnsafe "15" -- max decimal digits , S.SEUnsafe "4" -- to print out crs ] Nothing `S.SETyAnn` S.jsonTypeAnn - | isBigNum colTy && strfyNum = + | isScalarColumnWhere isBigNum colTy && strfyNum = expn `S.SETyAnn` S.textTypeAnn | otherwise = expn diff --git a/server/src-lib/Hasura/RQL/DML/Mutation.hs b/server/src-lib/Hasura/RQL/DML/Mutation.hs index 1885ae904f9..3688c55b7ea 100644 --- a/server/src-lib/Hasura/RQL/DML/Mutation.hs +++ b/server/src-lib/Hasura/RQL/DML/Mutation.hs @@ -108,7 +108,7 @@ mkSelCTEFromColVals qt allCols colVals = let pgCol = pgiName ci val <- onNothing (Map.lookup pgCol colVal) $ throw500 $ "column " <> pgCol <<> " not found in returning values" - runAesonParser (convToTxt (pgiType ci)) val + toTxtValue <$> parsePGScalarValue (pgiType ci) val selNoRows = S.mkSelect { S.selExtr = [S.selectStar] diff --git a/server/src-lib/Hasura/RQL/DML/Returning.hs b/server/src-lib/Hasura/RQL/DML/Returning.hs index 87fa172d4fa..662c60b754c 100644 --- a/server/src-lib/Hasura/RQL/DML/Returning.hs +++ b/server/src-lib/Hasura/RQL/DML/Returning.hs @@ -50,7 +50,7 @@ hasNestedFld = any isNestedMutFld FArr _ -> True _ -> False -pgColsFromMutFld :: MutFld -> [(PGCol, PGScalarType)] +pgColsFromMutFld :: MutFld -> [(PGCol, PGColumnType)] pgColsFromMutFld = \case MCount -> [] MExp _ -> [] @@ -59,7 +59,7 @@ pgColsFromMutFld = \case FCol (PGColInfo col colTy _) _ -> Just (col, colTy) _ -> Nothing -pgColsFromMutFlds :: MutFlds -> [(PGCol, PGScalarType)] +pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColumnType)] pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd) pgColsToSelFlds :: [PGColInfo] -> [(FieldName, AnnFld)] @@ -111,7 +111,7 @@ mkSelWith qt cte mutFlds singleObj strfyNum = checkRetCols :: (UserInfoM m, QErrM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> SelPermInfo -> [PGCol] -> m [PGColInfo] diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs index 6d2408c6dec..75e7b390fd8 100644 --- a/server/src-lib/Hasura/RQL/DML/Select.hs +++ b/server/src-lib/Hasura/RQL/DML/Select.hs @@ -30,7 +30,7 @@ import qualified Database.PG.Query as Q import qualified Hasura.SQL.DML as S convSelCol :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> SelPermInfo -> SelCol -> m [ExtCol] @@ -50,7 +50,7 @@ convSelCol fieldInfoMap spi (SCStar wildcard) = convWildcard :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> SelPermInfo -> Wildcard -> m [ExtCol] @@ -71,14 +71,14 @@ convWildcard fieldInfoMap (SelPermInfo cols _ _ _ _ _) wildcard = mRelSelPerm <- askPermInfo' PASelect relTabInfo forM mRelSelPerm $ \rspi -> do - rExtCols <- convWildcard (tiFieldInfoMap relTabInfo) rspi wc + rExtCols <- convWildcard (_tiFieldInfoMap relTabInfo) rspi wc return $ ECRel relName Nothing $ SelectG rExtCols Nothing Nothing Nothing Nothing relExtCols wc = mapM (mkRelCol wc) relColInfos resolveStar :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> SelPermInfo -> SelectQ -> m SelectQExt @@ -105,7 +105,7 @@ resolveStar fim spi (SelectG selCols mWh mOb mLt mOf) = do convOrderByElem :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (FieldInfoMap, SelPermInfo) + -> (FieldInfoMap PGColInfo, SelPermInfo) -> OrderByCol -> m AnnObCol convOrderByElem sessVarBldr (flds, spi) = \case @@ -115,7 +115,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case FIColumn colInfo -> do checkSelOnCol spi (pgiName colInfo) let ty = pgiType colInfo - if ty == PGGeography || ty == PGGeometry + if isScalarColumnWhere isGeoType ty then throw400 UnexpectedPayload $ mconcat [ fldName <<> " has type 'geometry'" , " and cannot be used in order_by" @@ -145,11 +145,11 @@ convOrderByElem sessVarBldr (flds, spi) = \case convSelectQ :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) - => FieldInfoMap -- Table information of current table + => FieldInfoMap PGColInfo -- Table information of current table -> SelPermInfo -- Additional select permission info -> SelectQExt -- Given Select Query -> SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> m AnnSimpleSel convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do @@ -200,7 +200,7 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do convExtSimple :: (UserInfoM m, QErrM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> SelPermInfo -> PGCol -> m PGColInfo @@ -212,12 +212,12 @@ convExtSimple fieldInfoMap selPermInfo pgCol = do convExtRel :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> RelName -> Maybe RelName -> SelectQExt -> SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> m (Either ObjSel ArrSel) convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do -- Point to the name key @@ -250,15 +250,15 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do convSelectQuery :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) => SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> SelectQuery -> m AnnSimpleSel convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do tabInfo <- withPathK "table" $ askTabInfo qt selPermInfo <- askSelPermInfo tabInfo - extSelQ <- resolveStar (tiFieldInfoMap tabInfo) selPermInfo selQ + extSelQ <- resolveStar (_tiFieldInfoMap tabInfo) selPermInfo selQ validateHeaders $ spiRequiredHeaders selPermInfo - convSelectQ (tiFieldInfoMap tabInfo) selPermInfo + convSelectQ (_tiFieldInfoMap tabInfo) selPermInfo extSelQ sessVarBldr prepArgBuilder mkFuncSelectSimple diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index f57fdb6361e..3a0b958ad5d 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -67,9 +67,9 @@ mkUpdateCTE (AnnUpd tn setExps (permFltr, wc) _ _) = convInc :: (QErrM m) - => (PGScalarType -> Value -> m S.SQLExp) + => (PGColumnType -> Value -> m S.SQLExp) -> PGCol - -> PGScalarType + -> PGColumnType -> Value -> m (PGCol, S.SQLExp) convInc f col colType val = do @@ -78,9 +78,9 @@ convInc f col colType val = do convMul :: (QErrM m) - => (PGScalarType -> Value -> m S.SQLExp) + => (PGColumnType -> Value -> m S.SQLExp) -> PGCol - -> PGScalarType + -> PGColumnType -> Value -> m (PGCol, S.SQLExp) convMul f col colType val = do @@ -89,25 +89,25 @@ convMul f col colType val = do convSet :: (QErrM m) - => (PGScalarType -> Value -> m S.SQLExp) + => (PGColumnType -> Value -> m S.SQLExp) -> PGCol - -> PGScalarType + -> PGColumnType -> Value -> m (PGCol, S.SQLExp) convSet f col colType val = do prepExp <- f colType val return (col, prepExp) -convDefault :: (Monad m) => PGCol -> PGScalarType -> () -> m (PGCol, S.SQLExp) +convDefault :: (Monad m) => PGCol -> PGColumnType -> () -> m (PGCol, S.SQLExp) convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT") convOp :: (UserInfoM m, QErrM m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> [PGCol] -> UpdPermInfo -> [(PGCol, a)] - -> (PGCol -> PGScalarType -> a -> m (PGCol, S.SQLExp)) + -> (PGCol -> PGColumnType -> a -> m (PGCol, S.SQLExp)) -> m [(PGCol, S.SQLExp)] convOp fieldInfoMap preSetCols updPerm objs conv = forM objs $ \(pgCol, a) -> do @@ -129,7 +129,7 @@ convOp fieldInfoMap preSetCols updPerm objs conv = validateUpdateQueryWith :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (PGScalarType -> Value -> m S.SQLExp) + -> (PGColumnType -> Value -> m S.SQLExp) -> UpdateQuery -> m AnnUpd validateUpdateQueryWith sessVarBldr prepValBldr uq = do @@ -138,7 +138,7 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do -- If it is view then check if it is updatable mutableView tableName viIsUpdatable - (tiViewInfo tableInfo) "updatable" + (_tiViewInfo tableInfo) "updatable" -- Check if the role has update permissions updPerm <- askUpdPermInfo tableInfo @@ -150,7 +150,7 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do selPerm <- modifyErr (<> selNecessaryMsg) $ askSelPermInfo tableInfo - let fieldInfoMap = tiFieldInfoMap tableInfo + let fieldInfoMap = _tiFieldInfoMap tableInfo allCols = getCols fieldInfoMap preSetObj = upiSet updPerm preSetCols = M.keys preSetObj diff --git a/server/src-lib/Hasura/RQL/GBoolExp.hs b/server/src-lib/Hasura/RQL/GBoolExp.hs index 18ac6d7796e..54cfd84bdfb 100644 --- a/server/src-lib/Hasura/RQL/GBoolExp.hs +++ b/server/src-lib/Hasura/RQL/GBoolExp.hs @@ -2,14 +2,11 @@ module Hasura.RQL.GBoolExp ( toSQLBoolExp , getBoolExpDeps , annBoolExp - , txtRHSBuilder - , pgValParser ) where import Hasura.Prelude import Hasura.RQL.Types import Hasura.SQL.Types -import Hasura.SQL.Value import qualified Hasura.SQL.DML as S @@ -21,16 +18,16 @@ import qualified Data.HashMap.Strict as M import qualified Data.Text.Extended as T type OpRhsParser m v = - PgType -> Value -> m v + PGType PGColumnType -> Value -> m v -- | Represents a reference to a Postgres column, possibly casted an arbitrary -- number of times. Used within 'parseOperationsExpression' for bookkeeping. data ColumnReference = ColumnReferenceColumn !PGColInfo - | ColumnReferenceCast !ColumnReference !PGScalarType + | ColumnReferenceCast !ColumnReference !PGColumnType deriving (Show, Eq) -columnReferenceType :: ColumnReference -> PGScalarType +columnReferenceType :: ColumnReference -> PGColumnType columnReferenceType = \case ColumnReferenceColumn column -> pgiType column ColumnReferenceCast _ targetType -> targetType @@ -46,7 +43,7 @@ parseOperationsExpression :: forall m v . (MonadError QErr m) => OpRhsParser m v - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> PGColInfo -> Value -> m [OpExpG v] @@ -59,7 +56,7 @@ parseOperationsExpression rhsParser fim columnInfo = Object o -> mapM (parseOperation column) (M.toList o) val -> pure . AEQ False <$> rhsParser columnType val where - columnType = PgTypeSimple $ columnReferenceType column + columnType = PGTypeSimple $ columnReferenceType column parseOperation :: ColumnReference -> (T.Text, Value) -> m (OpExpG v) parseOperation column (opStr, val) = withPathK opStr $ @@ -114,17 +111,17 @@ parseOperationsExpression rhsParser fim columnInfo = "_is_null" -> parseIsNull -- jsonb type - "_contains" -> jsonbOnlyOp $ AContains <$> parseOne - "$contains" -> jsonbOnlyOp $ AContains <$> parseOne - "_contained_in" -> jsonbOnlyOp $ AContainedIn <$> parseOne - "$contained_in" -> jsonbOnlyOp $ AContainedIn <$> parseOne - "_has_key" -> jsonbOnlyOp $ AHasKey <$> parseWithTy PGText - "$has_key" -> jsonbOnlyOp $ AHasKey <$> parseWithTy PGText + "_contains" -> guardType [PGJSONB] >> AContains <$> parseOne + "$contains" -> guardType [PGJSONB] >> AContains <$> parseOne + "_contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne + "$contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne + "_has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText) + "$has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText) - "_has_keys_any" -> jsonbOnlyOp $ AHasKeysAny <$> parseManyWithType PGText - "$has_keys_any" -> jsonbOnlyOp $ AHasKeysAny <$> parseManyWithType PGText - "_has_keys_all" -> jsonbOnlyOp $ AHasKeysAll <$> parseManyWithType PGText - "$has_keys_all" -> jsonbOnlyOp $ AHasKeysAll <$> parseManyWithType PGText + "_has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText) + "$has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText) + "_has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText) + "$has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText) -- geometry types "_st_contains" -> parseGeometryOp ASTContains @@ -177,12 +174,12 @@ parseOperationsExpression rhsParser fim columnInfo = parseLt = ALT <$> parseOne -- < parseGte = AGTE <$> parseOne -- >= parseLte = ALTE <$> parseOne -- <= - parseLike = textOnlyOp colTy >> ALIKE <$> parseOne - parseNlike = textOnlyOp colTy >> ANLIKE <$> parseOne - parseIlike = textOnlyOp colTy >> AILIKE <$> parseOne - parseNilike = textOnlyOp colTy >> ANILIKE <$> parseOne - parseSimilar = textOnlyOp colTy >> ASIMILAR <$> parseOne - parseNsimilar = textOnlyOp colTy >> ANSIMILAR <$> parseOne + parseLike = guardType stringTypes >> ALIKE <$> parseOne + parseNlike = guardType stringTypes >> ANLIKE <$> parseOne + parseIlike = guardType stringTypes >> AILIKE <$> parseOne + parseNilike = guardType stringTypes >> ANILIKE <$> parseOne + parseSimilar = guardType stringTypes >> ASIMILAR <$> parseOne + parseNsimilar = guardType stringTypes >> ANSIMILAR <$> parseOne parseIsNull = bool ANISNOTNULL ANISNULL -- is null <$> parseVal @@ -199,7 +196,7 @@ parseOperationsExpression rhsParser fim columnInfo = parsedCastOperations <- forM (M.toList castOperations) $ \(targetTypeName, castedComparisons) -> do let targetType = txtToPgColTy targetTypeName - castedColumn = ColumnReferenceCast column targetType + castedColumn = ColumnReferenceCast column (PGColumnScalar targetType) checkValidCast targetType parsedCastedComparisons <- withPathK targetTypeName $ parseOperations castedColumn castedComparisons @@ -207,31 +204,27 @@ parseOperationsExpression rhsParser fim columnInfo = return . ACast $ M.fromList parsedCastOperations checkValidCast targetType = case (colTy, targetType) of - (PGGeometry, PGGeography) -> return () - (PGGeography, PGGeometry) -> return () + (PGColumnScalar PGGeometry, PGGeography) -> return () + (PGColumnScalar PGGeography, PGGeometry) -> return () _ -> throw400 UnexpectedPayload $ "cannot cast column of type " <> colTy <<> " to type " <>> targetType - jsonbOnlyOp m = case colTy of - PGJSONB -> m - ty -> throwError $ buildMsg ty [PGJSONB] - parseGeometryOp f = - geometryOp colTy >> f <$> parseOneNoSess colTy val + guardType [PGGeometry] >> f <$> parseOneNoSess colTy val parseGeometryOrGeographyOp f = - geometryOrGeographyOp colTy >> f <$> parseOneNoSess colTy val + guardType geoTypes >> f <$> parseOneNoSess colTy val parseSTDWithinObj = case colTy of - PGGeometry -> do + PGColumnScalar PGGeometry -> do DWithinGeomOp distVal fromVal <- parseVal - dist <- withPathK "distance" $ parseOneNoSess PGFloat distVal + dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal from <- withPathK "from" $ parseOneNoSess colTy fromVal return $ ASTDWithinGeom $ DWithinGeomOp dist from - PGGeography -> do + PGColumnScalar PGGeography -> do DWithinGeogOp distVal fromVal sphVal <- parseVal - dist <- withPathK "distance" $ parseOneNoSess PGFloat distVal + dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal from <- withPathK "from" $ parseOneNoSess colTy fromVal - useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess PGBoolean sphVal + useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess (PGColumnScalar PGBoolean) sphVal return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid _ -> throwError $ buildMsg colTy [PGGeometry, PGGeography] @@ -246,36 +239,23 @@ parseOperationsExpression rhsParser fim columnInfo = "incompatible column types : " <> column <<> ", " <>> rhsCol else return rhsCol - geometryOp PGGeometry = return () - geometryOp ty = - throwError $ buildMsg ty [PGGeometry] - geometryOrGeographyOp PGGeometry = return () - geometryOrGeographyOp PGGeography = return () - geometryOrGeographyOp ty = - throwError $ buildMsg ty [PGGeometry, PGGeography] - - parseWithTy ty = rhsParser (PgTypeSimple ty) val + parseWithTy ty = rhsParser (PGTypeSimple ty) val -- parse one with the column's type parseOne = parseWithTy colTy - parseOneNoSess ty = rhsParser (PgTypeSimple ty) + parseOneNoSess ty = rhsParser (PGTypeSimple ty) - parseManyWithType ty = rhsParser (PgTypeArray ty) val + parseManyWithType ty = rhsParser (PGTypeArray ty) val + + guardType validTys = unless (isScalarColumnWhere (`elem` validTys) colTy) $ + throwError $ buildMsg colTy validTys + buildMsg ty expTys = err400 UnexpectedPayload + $ " is of type " <> ty <<> "; this operator works only on columns of type " + <> T.intercalate "/" (map dquote expTys) parseVal :: (FromJSON a) => m a parseVal = decodeValue val -buildMsg :: PGScalarType -> [PGScalarType] -> QErr -buildMsg ty expTys = err400 UnexpectedPayload - $ " is of type " <> ty <<> "; this operator works only on columns of type " - <> T.intercalate "/" (map dquote expTys) - -textOnlyOp :: (MonadError QErr m) => PGScalarType -> m () -textOnlyOp PGText = return () -textOnlyOp PGVarchar = return () -textOnlyOp ty = - throwError $ buildMsg ty [PGVarchar, PGText] - -- This convoluted expression instead of col = val -- to handle the case of col : null equalsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp @@ -295,7 +275,7 @@ notEqualsBoolExpBuilder qualColExp rhsExp = annBoolExp :: (QErrM m, CacheRM m) => OpRhsParser m v - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> BoolExp -> m (AnnBoolExp v) annBoolExp rhsParser fim (BoolExp boolExp) = @@ -304,13 +284,13 @@ annBoolExp rhsParser fim (BoolExp boolExp) = annColExp :: (QErrM m, CacheRM m) => OpRhsParser m v - -> FieldInfoMap + -> FieldInfoMap PGColInfo -> ColExp -> m (AnnBoolExpFld v) annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do colInfo <- askFieldInfo colInfoMap fieldName case colInfo of - FIColumn (PGColInfo _ PGJSON _) -> + FIColumn (PGColInfo _ (PGColumnScalar PGJSON) _) -> throwError (err400 UnexpectedPayload "JSON column can not be part of where clause") FIColumn pgi -> AVCol pgi <$> parseOperationsExpression rhsParser colInfoMap pgi colVal @@ -355,18 +335,6 @@ convColRhs tableQual = \case where mkQCol q = S.SEQIden . S.QIden q . toIden -pgValParser - :: (MonadError QErr m) - => PGScalarType -> Value -> m PGColValue -pgValParser ty = - runAesonParser (parsePGValue ty) - -txtRHSBuilder - :: (MonadError QErr m) - => PGScalarType -> Value -> m S.SQLExp -txtRHSBuilder ty val = - toTxtValue ty <$> pgValParser ty val - mkColCompExp :: S.Qual -> PGCol -> OpExpG S.SQLExp -> S.BoolExp mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol) @@ -428,7 +396,7 @@ mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol) mkCastsExp casts = sqlAll . flip map (M.toList casts) $ \(targetType, operations) -> - let targetAnn = S.mkTypeAnn $ PgTypeSimple targetType + let targetAnn = S.mkTypeAnn $ PGTypeSimple targetType in sqlAll $ map (mkCompExp (S.SETyAnn lhs targetAnn)) operations sqlAll = foldr (S.BEBin S.AndOp) (S.BELit True) diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs index c2cbd3c8bcb..db114bd0c02 100644 --- a/server/src-lib/Hasura/RQL/Types.hs +++ b/server/src-lib/Hasura/RQL/Types.hs @@ -41,6 +41,7 @@ import Hasura.Db as R import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.Types.BoolExp as R +import Hasura.RQL.Types.Column as R import Hasura.RQL.Types.Common as R import Hasura.RQL.Types.DML as R import Hasura.RQL.Types.Error as R @@ -60,9 +61,9 @@ import qualified Network.HTTP.Client as HTTP getFieldInfoMap :: QualifiedTable - -> SchemaCache -> Maybe FieldInfoMap + -> SchemaCache -> Maybe (FieldInfoMap PGColInfo) getFieldInfoMap tn = - fmap tiFieldInfoMap . M.lookup tn . scTables + fmap _tiFieldInfoMap . M.lookup tn . scTables data QCtx = QCtx @@ -85,7 +86,7 @@ class (Monad m) => UserInfoM m where askTabInfo :: (QErrM m, CacheRM m) - => QualifiedTable -> m TableInfo + => QualifiedTable -> m (TableInfo PGColInfo) askTabInfo tabName = do rawSchemaCache <- askSchemaCache liftMaybe (err400 NotExists errMsg) $ M.lookup tabName $ scTables rawSchemaCache @@ -94,11 +95,11 @@ askTabInfo tabName = do askTabInfoFromTrigger :: (QErrM m, CacheRM m) - => TriggerName -> m TableInfo + => TriggerName -> m (TableInfo PGColInfo) askTabInfoFromTrigger trn = do sc <- askSchemaCache let tabInfos = M.elems $ scTables sc - liftMaybe (err400 NotExists errMsg) $ find (isJust.M.lookup trn.tiEventTriggerInfoMap) tabInfos + liftMaybe (err400 NotExists errMsg) $ find (isJust.M.lookup trn._tiEventTriggerInfoMap) tabInfos where errMsg = "event trigger " <> triggerNameToTxt trn <<> " does not exist" @@ -107,7 +108,7 @@ askEventTriggerInfo => TriggerName -> m EventTriggerInfo askEventTriggerInfo trn = do ti <- askTabInfoFromTrigger trn - let etim = tiEventTriggerInfoMap ti + let etim = _tiEventTriggerInfoMap ti liftMaybe (err400 NotExists errMsg) $ M.lookup trn etim where errMsg = "event trigger " <> triggerNameToTxt trn <<> " does not exist" @@ -164,7 +165,7 @@ liftP1WithQCtx r m = askFieldInfoMap :: (QErrM m, CacheRM m) - => QualifiedTable -> m FieldInfoMap + => QualifiedTable -> m (FieldInfoMap PGColInfo) askFieldInfoMap tabName = do mFieldInfoMap <- getFieldInfoMap tabName <$> askSchemaCache maybe (throw400 NotExists errMsg) return mFieldInfoMap @@ -173,19 +174,19 @@ askFieldInfoMap tabName = do askPGType :: (MonadError QErr m) - => FieldInfoMap + => FieldInfoMap PGColInfo -> PGCol -> T.Text - -> m PGScalarType + -> m PGColumnType askPGType m c msg = pgiType <$> askPGColInfo m c msg askPGColInfo :: (MonadError QErr m) - => FieldInfoMap + => FieldInfoMap columnInfo -> PGCol -> T.Text - -> m PGColInfo + -> m columnInfo askPGColInfo m c msg = do colInfo <- modifyErr ("column " <>) $ askFieldInfo m (fromPGCol c) @@ -200,16 +201,16 @@ askPGColInfo m c msg = do ] assertPGCol :: (MonadError QErr m) - => FieldInfoMap + => FieldInfoMap columnInfo -> T.Text -> PGCol -> m () assertPGCol m msg c = do - _ <- askPGType m c msg + _ <- askPGColInfo m c msg return () askRelType :: (MonadError QErr m) - => FieldInfoMap + => FieldInfoMap columnInfo -> RelName -> T.Text -> m RelInfo @@ -226,9 +227,9 @@ askRelType m r msg = do ] askFieldInfo :: (MonadError QErr m) - => FieldInfoMap + => FieldInfoMap columnInfo -> FieldName - -> m FieldInfo + -> m (FieldInfo columnInfo) askFieldInfo m f = case M.lookup f m of Just colInfo -> return colInfo diff --git a/server/src-lib/Hasura/RQL/Types/BoolExp.hs b/server/src-lib/Hasura/RQL/Types/BoolExp.hs index 55b3739d78c..c8b1fd2e601 100644 --- a/server/src-lib/Hasura/RQL/Types/BoolExp.hs +++ b/server/src-lib/Hasura/RQL/Types/BoolExp.hs @@ -20,6 +20,7 @@ module Hasura.RQL.Types.BoolExp , AnnBoolExpFldSQL , AnnBoolExpSQL , PartialSQLExp(..) + , mkScalarSessionVar , isStaticValue , AnnBoolExpFldPartialSQL , AnnBoolExpPartialSQL @@ -30,6 +31,7 @@ module Hasura.RQL.Types.BoolExp ) where import Hasura.Prelude +import Hasura.RQL.Types.Column import Hasura.RQL.Types.Common import Hasura.RQL.Types.Permission import qualified Hasura.SQL.DML as S @@ -280,10 +282,14 @@ type PreSetCols = M.HashMap PGCol S.SQLExp -- doesn't resolve the session variable data PartialSQLExp - = PSESessVar !PgType !SessVar + = PSESessVar !(PGType PGScalarType) !SessVar | PSESQLExp !S.SQLExp deriving (Show, Eq, Data) +mkScalarSessionVar :: PGType PGColumnType -> SessVar -> PartialSQLExp +mkScalarSessionVar columnType = + PSESessVar (unsafePGColumnToRepresentation <$> columnType) + instance ToJSON PartialSQLExp where toJSON = \case PSESessVar colTy sessVar -> toJSON (colTy, sessVar) diff --git a/server/src-lib/Hasura/RQL/Types/Catalog.hs b/server/src-lib/Hasura/RQL/Types/Catalog.hs index 2e6b5329139..faecdeed0aa 100644 --- a/server/src-lib/Hasura/RQL/Types/Catalog.hs +++ b/server/src-lib/Hasura/RQL/Types/Catalog.hs @@ -1,8 +1,30 @@ -module Hasura.RQL.Types.Catalog where +-- | This module provides 'fetchCatalogData', which loads the entire catalog in one go from the +-- database, consulting tables such as @hdb_catalog.hdb_table@. It is used by +-- 'Hasura.RQL.Schema.Table.buildSchemaCache' to seed or reload the schema cache. +module Hasura.RQL.Types.Catalog + ( fetchCatalogData + , CatalogMetadata(..) + + , CatalogTable(..) + , CatalogTableInfo(..) + + , CatalogRelation(..) + , CatalogPermission(..) + , CatalogEventTrigger(..) + , CatalogFunction(..) + ) where import Hasura.Prelude +import qualified Database.PG.Query as Q + +import Data.Aeson +import Data.Aeson.Casing +import Data.Aeson.TH + +import Hasura.Db import Hasura.RQL.DDL.Schema.Function +import Hasura.RQL.Types.Column import Hasura.RQL.Types.Common import Hasura.RQL.Types.EventTrigger import Hasura.RQL.Types.Permission @@ -11,15 +33,21 @@ import Hasura.RQL.Types.RemoteSchema import Hasura.RQL.Types.SchemaCache import Hasura.SQL.Types -import Data.Aeson -import Data.Aeson.Casing -import Data.Aeson.TH +data CatalogTableInfo + = CatalogTableInfo + { _ctiColumns :: ![PGRawColInfo] + , _ctiConstraints :: ![ConstraintName] + , _ctiPrimaryKeyColumns :: ![PGCol] + , _ctiViewInfo :: !(Maybe ViewInfo) + } deriving (Show, Eq) +$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo) data CatalogTable = CatalogTable - { _ctTable :: !QualifiedTable - , _ctSystemDefined :: !Bool - , _ctInfo :: !(Maybe TableInfo) + { _ctName :: !QualifiedTable + , _ctIsSystemDefined :: !Bool + , _ctIsEnum :: !Bool + , _ctInfo :: !(Maybe CatalogTableInfo) } deriving (Show, Eq) $(deriveJSON (aesonDrop 3 snakeCase) ''CatalogTable) @@ -70,3 +98,9 @@ data CatalogMetadata , _cmAllowlistCollections :: ![CollectionDef] } deriving (Show, Eq) $(deriveJSON (aesonDrop 3 snakeCase) ''CatalogMetadata) + +-- | See "Hasura.RQL.Types.Catalog". +fetchCatalogData :: (MonadTx m) => m CatalogMetadata +fetchCatalogData = + liftTx $ Q.getAltJ . runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler + $(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True diff --git a/server/src-lib/Hasura/RQL/Types/Column.hs b/server/src-lib/Hasura/RQL/Types/Column.hs new file mode 100644 index 00000000000..75ba84f9b3e --- /dev/null +++ b/server/src-lib/Hasura/RQL/Types/Column.hs @@ -0,0 +1,156 @@ +module Hasura.RQL.Types.Column + ( PGColumnType(..) + , _PGColumnScalar + , _PGColumnEnumReference + , isScalarColumnWhere + + , parsePGScalarValue + , parsePGScalarValues + , unsafePGColumnToRepresentation + + , PGColInfo(..) + , PGRawColInfo(..) + , onlyIntCols + , onlyNumCols + , onlyJSONBCols + , onlyComparableCols + , getColInfos + + , EnumReference(..) + , EnumValues + , EnumValue(..) + , EnumValueInfo(..) + ) where + +import Hasura.Prelude + +import qualified Data.HashMap.Strict as M +import qualified Data.Text as T + +import Control.Lens.TH +import Data.Aeson +import Data.Aeson.Casing +import Data.Aeson.TH +import Language.Haskell.TH.Syntax (Lift) + +import Hasura.RQL.Instances () +import Hasura.RQL.Types.Error +import Hasura.SQL.Types +import Hasura.SQL.Value + +newtype EnumValue + = EnumValue { getEnumValue :: T.Text } + deriving (Show, Eq, Lift, Hashable, ToJSON, ToJSONKey, FromJSON, FromJSONKey) + +newtype EnumValueInfo + = EnumValueInfo + { evComment :: Maybe T.Text + } deriving (Show, Eq, Lift, Hashable) +$(deriveJSON (aesonDrop 2 snakeCase) ''EnumValueInfo) + +type EnumValues = M.HashMap EnumValue EnumValueInfo + +-- | Represents a reference to an “enum table,” a single-column Postgres table that is referenced +-- via foreign key. +data EnumReference + = EnumReference + { erTable :: !QualifiedTable + , erValues :: !EnumValues + } deriving (Show, Eq, Generic, Lift) +instance Hashable EnumReference +$(deriveJSON (aesonDrop 2 snakeCase) ''EnumReference) + +-- | The type we use for columns, which are currently always “scalars” (though see the note about +-- 'PGType'). Unlike 'PGScalarType', which represents a type that /Postgres/ knows about, this type +-- characterizes distinctions we make but Postgres doesn’t. +data PGColumnType + -- | Ordinary Postgres columns. + = PGColumnScalar !PGScalarType + -- | Columns that reference enum tables (see "Hasura.RQL.Schema.Enum"). This is not actually a + -- distinct type from the perspective of Postgres (at the time of this writing, we ensure they + -- always have type @text@), but we really want to distinguish this case, since we treat it + -- /completely/ differently in the GraphQL schema. + | PGColumnEnumReference !EnumReference + deriving (Show, Eq, Generic) +instance Hashable PGColumnType +$(deriveToJSON defaultOptions{constructorTagModifier = drop 8} ''PGColumnType) +$(makePrisms ''PGColumnType) + +instance DQuote PGColumnType where + dquoteTxt = \case + PGColumnScalar scalar -> dquoteTxt scalar + PGColumnEnumReference (EnumReference tableName _) -> dquoteTxt tableName + +isScalarColumnWhere :: (PGScalarType -> Bool) -> PGColumnType -> Bool +isScalarColumnWhere f = \case + PGColumnScalar scalar -> f scalar + PGColumnEnumReference _ -> False + +-- | Gets the representation type associated with a 'PGColumnType'. Avoid using this if possible. +-- Prefer 'parsePGScalarValue', 'parsePGScalarValues', or +-- 'Hasura.RQL.Types.BoolExp.mkScalarSessionVar'. +unsafePGColumnToRepresentation :: PGColumnType -> PGScalarType +unsafePGColumnToRepresentation = \case + PGColumnScalar scalarType -> scalarType + PGColumnEnumReference _ -> PGText + +parsePGScalarValue :: (MonadError QErr m) => PGColumnType -> Value -> m (PGScalarTyped PGColValue) +parsePGScalarValue columnType value = case columnType of + PGColumnScalar scalarType -> + PGScalarTyped scalarType <$> runAesonParser (parsePGValue scalarType) value + PGColumnEnumReference (EnumReference tableName enumValues) -> do + let typeName = snakeCaseQualObject tableName + flip runAesonParser value . withText (T.unpack typeName) $ \textValue -> do + let enumTextValues = map getEnumValue $ M.keys enumValues + unless (textValue `elem` enumTextValues) $ + fail . T.unpack + $ "expected one of the values " <> T.intercalate ", " (map dquote enumTextValues) + <> " for type " <> typeName <<> ", given " <>> textValue + pure $ PGScalarTyped PGText (PGValText textValue) + +parsePGScalarValues + :: (MonadError QErr m) + => PGColumnType -> [Value] -> m (PGScalarTyped [PGColValue]) +parsePGScalarValues columnType values = do + scalarValues <- indexedMapM (fmap pstValue . parsePGScalarValue columnType) values + pure $ PGScalarTyped (unsafePGColumnToRepresentation columnType) scalarValues + +-- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of +-- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the +-- 'pcirReferences' field and other table data to eventually resolve the type to a 'PGColumnType'. +data PGRawColInfo + = PGRawColInfo + { prciName :: !PGCol + , prciType :: !PGScalarType + , prciIsNullable :: !Bool + , prciReferences :: ![QualifiedTable] + -- ^ only stores single-column references to primary key of foreign tables (used for detecting + -- references to enum tables) + } deriving (Show, Eq) +$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColInfo) + +-- | “Resolved” column info, produced from a 'PGRawColInfo' value that has been combined with other +-- schema information to produce a 'PGColumnType'. +data PGColInfo + = PGColInfo + { pgiName :: !PGCol + , pgiType :: !PGColumnType + , pgiIsNullable :: !Bool + } deriving (Show, Eq) +$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColInfo) + +onlyIntCols :: [PGColInfo] -> [PGColInfo] +onlyIntCols = filter (isScalarColumnWhere isIntegerType . pgiType) + +onlyNumCols :: [PGColInfo] -> [PGColInfo] +onlyNumCols = filter (isScalarColumnWhere isNumType . pgiType) + +onlyJSONBCols :: [PGColInfo] -> [PGColInfo] +onlyJSONBCols = filter (isScalarColumnWhere (== PGJSONB) . pgiType) + +onlyComparableCols :: [PGColInfo] -> [PGColInfo] +onlyComparableCols = filter (isScalarColumnWhere isComparableType . pgiType) + +getColInfos :: [PGCol] -> [PGColInfo] -> [PGColInfo] +getColInfos cols allColInfos = + flip filter allColInfos $ \ci -> pgiName ci `elem` cols diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs index 0d837c95b4d..0816c58e9b3 100644 --- a/server/src-lib/Hasura/RQL/Types/Common.hs +++ b/server/src-lib/Hasura/RQL/Types/Common.hs @@ -1,6 +1,5 @@ module Hasura.RQL.Types.Common - ( PGColInfo(..) - , RelName(..) + ( RelName(..) , relNameToTxt , RelType(..) , rootRelName @@ -38,15 +37,6 @@ import Instances.TH.Lift () import Language.Haskell.TH.Syntax (Lift) import qualified PostgreSQL.Binary.Decoding as PD -data PGColInfo - = PGColInfo - { pgiName :: !PGCol - , pgiType :: !PGScalarType - , pgiIsNullable :: !Bool - } deriving (Show, Eq) - -$(deriveJSON (aesonDrop 3 snakeCase) ''PGColInfo) - newtype NonEmptyText = NonEmptyText {unNonEmptyText :: T.Text} deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote) diff --git a/server/src-lib/Hasura/RQL/Types/Metadata.hs b/server/src-lib/Hasura/RQL/Types/Metadata.hs index 41afd1d5071..ff890eab4c3 100644 --- a/server/src-lib/Hasura/RQL/Types/Metadata.hs +++ b/server/src-lib/Hasura/RQL/Types/Metadata.hs @@ -18,7 +18,8 @@ data MetadataObjType | MOTEventTrigger | MOTFunction | MOTRemoteSchema - deriving (Eq) + deriving (Eq, Generic) +instance Hashable MetadataObjType instance Show MetadataObjType where show MOTTable = "table" @@ -36,7 +37,6 @@ data TableMetadataObjId | MTOPerm !RoleName !PermType | MTOTrigger !TriggerName deriving (Show, Eq, Generic) - instance Hashable TableMetadataObjId data MetadataObjId @@ -45,7 +45,6 @@ data MetadataObjId | MORemoteSchema !RemoteSchemaName | MOTableObj !QualifiedTable !TableMetadataObjId deriving (Show, Eq, Generic) - instance Hashable MetadataObjId data InconsistentMetadataObj @@ -54,7 +53,8 @@ data InconsistentMetadataObj , _moType :: !MetadataObjType , _moDef :: !Value , _moReason :: !T.Text - } deriving (Show, Eq) + } deriving (Show, Eq, Generic) +instance Hashable InconsistentMetadataObj instance ToJSON InconsistentMetadataObj where toJSON (InconsistentMetadataObj _ ty info rsn) = diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 580afb1f4b8..b2bf1f6cfd7 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -2,27 +2,36 @@ {-# LANGUAGE RankNTypes #-} module Hasura.RQL.Types.SchemaCache - ( TableCache - , SchemaCache(..) + ( SchemaCache(..) , SchemaCacheVer , initSchemaCacheVer , incSchemaCacheVer , emptySchemaCache + + , TableCache + , modTableCache + , addTableToCache + , modTableInCache + , delTableFromCache + , TableInfo(..) + , tiName + , tiSystemDefined + , tiFieldInfoMap + , tiRolePermInfoMap + , tiUniqOrPrimConstraints + , tiPrimaryKeyCols + , tiViewInfo + , tiEventTriggerInfoMap + , tiEnumValues + , TableConstraint(..) , ConstraintType(..) , ViewInfo(..) , isMutable , mutableView - , onlyIntCols - , onlyNumCols - , onlyJSONBCols - , onlyComparableCols , isUniqueOrPrimary , isForeignKey - , addTableToCache - , modTableInCache - , delTableFromCache , RemoteSchemaCtx(..) , RemoteSchemaMap @@ -36,17 +45,16 @@ module Hasura.RQL.Types.SchemaCache , FieldInfoMap , FieldInfo(..) + , _FIColumn + , _FIRelationship , fieldInfoToEither , partitionFieldInfos , partitionFieldInfosWith , getCols , getRels - , PGColInfo(..) , isPGColInfo - , getColInfos , RelInfo(..) - -- , addFldToCache , addColToCache , addRelToCache @@ -107,6 +115,7 @@ import qualified Hasura.GraphQL.Context as GC import Hasura.Prelude import Hasura.RQL.Types.BoolExp +import Hasura.RQL.Types.Column import Hasura.RQL.Types.Common import Hasura.RQL.Types.Error import Hasura.RQL.Types.EventTrigger @@ -137,58 +146,42 @@ mkColDep :: DependencyReason -> QualifiedTable -> PGCol -> SchemaDependency mkColDep reason tn col = flip SchemaDependency reason . SOTableObj tn $ TOCol col -onlyIntCols :: [PGColInfo] -> [PGColInfo] -onlyIntCols = filter (isIntegerType . pgiType) - -onlyNumCols :: [PGColInfo] -> [PGColInfo] -onlyNumCols = filter (isNumType . pgiType) - -onlyJSONBCols :: [PGColInfo] -> [PGColInfo] -onlyJSONBCols = filter (isJSONBType . pgiType) - -onlyComparableCols :: [PGColInfo] -> [PGColInfo] -onlyComparableCols = filter (isComparableType . pgiType) - -getColInfos :: [PGCol] -> [PGColInfo] -> [PGColInfo] -getColInfos cols allColInfos = - flip filter allColInfos $ \ci -> pgiName ci `elem` cols - type WithDeps a = (a, [SchemaDependency]) -data FieldInfo - = FIColumn !PGColInfo +data FieldInfo columnInfo + = FIColumn !columnInfo | FIRelationship !RelInfo deriving (Show, Eq) - $(deriveToJSON defaultOptions { constructorTagModifier = snakeCase . drop 2 , sumEncoding = TaggedObject "type" "detail" } ''FieldInfo) +$(makePrisms ''FieldInfo) -fieldInfoToEither :: FieldInfo -> Either PGColInfo RelInfo +fieldInfoToEither :: FieldInfo columnInfo -> Either columnInfo RelInfo fieldInfoToEither (FIColumn l) = Left l fieldInfoToEither (FIRelationship r) = Right r -partitionFieldInfos :: [FieldInfo] -> ([PGColInfo], [RelInfo]) +partitionFieldInfos :: [FieldInfo columnInfo] -> ([columnInfo], [RelInfo]) partitionFieldInfos = partitionFieldInfosWith (id, id) -partitionFieldInfosWith :: (PGColInfo -> a, RelInfo -> b) - -> [FieldInfo] -> ([a], [b]) +partitionFieldInfosWith :: (columnInfo -> a, RelInfo -> b) + -> [FieldInfo columnInfo] -> ([a], [b]) partitionFieldInfosWith fns = partitionEithers . map (biMapEither fns . fieldInfoToEither) where biMapEither (f1, f2) = either (Left . f1) (Right . f2) -type FieldInfoMap = M.HashMap FieldName FieldInfo +type FieldInfoMap columnInfo = M.HashMap FieldName (FieldInfo columnInfo) -getCols :: FieldInfoMap -> [PGColInfo] +getCols :: FieldInfoMap columnInfo -> [columnInfo] getCols fim = lefts $ map fieldInfoToEither $ M.elems fim -getRels :: FieldInfoMap -> [RelInfo] +getRels :: FieldInfoMap columnInfo -> [RelInfo] getRels fim = rights $ map fieldInfoToEither $ M.elems fim -isPGColInfo :: FieldInfo -> Bool +isPGColInfo :: FieldInfo columnInfo -> Bool isPGColInfo (FIColumn _) = True isPGColInfo _ = False @@ -331,32 +324,20 @@ mutableView qt f mVI operation = unless (isMutable f mVI) $ throw400 NotSupported $ "view " <> qt <<> " is not " <> operation -data TableInfo +data TableInfo columnInfo = TableInfo - { tiName :: !QualifiedTable - , tiSystemDefined :: !Bool - , tiFieldInfoMap :: !FieldInfoMap - , tiRolePermInfoMap :: !RolePermInfoMap - , tiUniqOrPrimConstraints :: ![ConstraintName] - , tiPrimaryKeyCols :: ![PGCol] - , tiViewInfo :: !(Maybe ViewInfo) - , tiEventTriggerInfoMap :: !EventTriggerInfoMap + { _tiName :: !QualifiedTable + , _tiSystemDefined :: !Bool + , _tiFieldInfoMap :: !(FieldInfoMap columnInfo) + , _tiRolePermInfoMap :: !RolePermInfoMap + , _tiUniqOrPrimConstraints :: ![ConstraintName] + , _tiPrimaryKeyCols :: ![PGCol] + , _tiViewInfo :: !(Maybe ViewInfo) + , _tiEventTriggerInfoMap :: !EventTriggerInfoMap + , _tiEnumValues :: !(Maybe EnumValues) } deriving (Show, Eq) - $(deriveToJSON (aesonDrop 2 snakeCase) ''TableInfo) - -instance FromJSON TableInfo where - parseJSON = withObject "TableInfo" $ \o -> do - name <- o .: "name" - columns <- o .: "columns" - pkeyCols <- o .: "primary_key_columns" - constraints <- o .: "constraints" - viewInfoM <- o .:? "view_info" - isSystemDefined <- o .:? "is_system_defined" .!= False - let colMap = M.fromList $ flip map columns $ - \c -> (fromPGCol $ pgiName c, FIColumn c) - return $ TableInfo name isSystemDefined colMap mempty - constraints pkeyCols viewInfoM mempty +$(makeLenses ''TableInfo) data FunctionType = FTVOLATILE @@ -398,7 +379,7 @@ data FunctionInfo $(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionInfo) -type TableCache = M.HashMap QualifiedTable TableInfo -- info of all tables +type TableCache columnInfo = M.HashMap QualifiedTable (TableInfo columnInfo) -- info of all tables type FunctionCache = M.HashMap QualifiedFunction FunctionInfo -- info of all functions data RemoteSchemaCtx @@ -443,7 +424,7 @@ incSchemaCacheVer (SchemaCacheVer prev) = data SchemaCache = SchemaCache - { scTables :: !TableCache + { scTables :: !(TableCache PGColInfo) , scFunctions :: !FunctionCache , scRemoteSchemas :: !RemoteSchemaMap , scAllowlist :: !(HS.HashSet GQLQuery) @@ -486,19 +467,19 @@ emptySchemaCache = SchemaCache M.empty M.empty M.empty HS.empty M.empty GC.emptyGCtx mempty [] -modTableCache :: (CacheRWM m) => TableCache -> m () +modTableCache :: (CacheRWM m) => TableCache PGColInfo -> m () modTableCache tc = do sc <- askSchemaCache writeSchemaCache $ sc { scTables = tc } addTableToCache :: (QErrM m, CacheRWM m) - => TableInfo -> m () + => TableInfo PGColInfo -> m () addTableToCache ti = do sc <- askSchemaCache assertTableNotExists tn sc modTableCache $ M.insert tn ti $ scTables sc where - tn = tiName ti + tn = _tiName ti delTableFromCache :: (QErrM m, CacheRWM m) => QualifiedTable -> m () @@ -514,7 +495,7 @@ delTableFromCache tn = do getTableInfoFromCache :: (QErrM m) => QualifiedTable -> SchemaCache - -> m TableInfo + -> m (TableInfo PGColInfo) getTableInfoFromCache tn sc = case M.lookup tn (scTables sc) of Nothing -> throw500 $ "table not found in cache : " <>> tn @@ -530,7 +511,7 @@ assertTableNotExists tn sc = Just _ -> throw500 $ "table exists in cache : " <>> tn modTableInCache :: (QErrM m, CacheRWM m) - => (TableInfo -> m TableInfo) + => (TableInfo PGColInfo -> m (TableInfo PGColInfo)) -> QualifiedTable -> m () modTableInCache f tn = do @@ -558,17 +539,17 @@ addRelToCache rn ri deps tn = do addFldToCache :: (QErrM m, CacheRWM m) - => FieldName -> FieldInfo + => FieldName -> FieldInfo PGColInfo -> QualifiedTable -> m () addFldToCache fn fi = modTableInCache modFieldInfoMap where modFieldInfoMap ti = do - let fim = tiFieldInfoMap ti + let fim = _tiFieldInfoMap ti case M.lookup fn fim of Just _ -> throw500 "field already exists " Nothing -> return $ - ti { tiFieldInfoMap = M.insert fn fi fim } + ti { _tiFieldInfoMap = M.insert fn fi fim } delFldFromCache :: (QErrM m, CacheRWM m) => FieldName -> QualifiedTable -> m () @@ -576,10 +557,10 @@ delFldFromCache fn = modTableInCache modFieldInfoMap where modFieldInfoMap ti = do - let fim = tiFieldInfoMap ti + let fim = _tiFieldInfoMap ti case M.lookup fn fim of Just _ -> return $ - ti { tiFieldInfoMap = M.delete fn fim } + ti { _tiFieldInfoMap = M.delete fn fim } Nothing -> throw500 "field does not exist" delColFromCache :: (QErrM m, CacheRWM m) @@ -639,8 +620,8 @@ addEventTriggerToCache qt eti deps = do where trn = etiName eti modEventTriggerInfo ti = do - let etim = tiEventTriggerInfoMap ti - return $ ti { tiEventTriggerInfoMap = M.insert trn eti etim} + let etim = _tiEventTriggerInfoMap ti + return $ ti { _tiEventTriggerInfoMap = M.insert trn eti etim} schObjId = SOTableObj qt $ TOTrigger trn delEventTriggerFromCache @@ -653,8 +634,8 @@ delEventTriggerFromCache qt trn = do modDepMapInCache (removeFromDepMap schObjId) where modEventTriggerInfo ti = do - let etim = tiEventTriggerInfoMap ti - return $ ti { tiEventTriggerInfoMap = M.delete trn etim } + let etim = _tiEventTriggerInfoMap ti + return $ ti { _tiEventTriggerInfoMap = M.delete trn etim } schObjId = SOTableObj qt $ TOTrigger trn addFunctionToCache @@ -713,11 +694,11 @@ addPermToCache tn rn pa i deps = do where paL = permAccToLens pa modRolePermInfo ti = do - let rpim = tiRolePermInfoMap ti + let rpim = _tiRolePermInfoMap ti rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim newRPI = rpi & paL ?~ i assertPermNotExists pa rpi - return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim } + return $ ti { _tiRolePermInfoMap = M.insert rn newRPI rpim } schObjId = SOTableObj tn $ TOPerm rn $ permAccToType pa assertPermNotExists @@ -746,11 +727,11 @@ delPermFromCache pa rn tn = do where paL = permAccToLens pa modRolePermInfo ti = do - let rpim = tiRolePermInfoMap ti + let rpim = _tiRolePermInfoMap ti rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim assertPermExists pa rpi let newRPI = rpi & paL .~ Nothing - return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim } + return $ ti { _tiRolePermInfoMap = M.insert rn newRPI rpim } schObjId = SOTableObj tn $ TOPerm rn $ permAccToType pa addRemoteSchemaToCache diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs index 845ae24b9bb..ff8bd543d4b 100644 --- a/server/src-lib/Hasura/SQL/DML.hs +++ b/server/src-lib/Hasura/SQL/DML.hs @@ -225,23 +225,23 @@ newtype TypeAnn = TypeAnn {unTypeAnn :: T.Text} deriving (Show, Eq, Data) -mkTypeAnn :: PgType -> TypeAnn +mkTypeAnn :: PGType PGScalarType -> TypeAnn mkTypeAnn = TypeAnn . toSQLTxt intTypeAnn :: TypeAnn -intTypeAnn = mkTypeAnn $ PgTypeSimple PGInteger +intTypeAnn = mkTypeAnn $ PGTypeSimple PGInteger textTypeAnn :: TypeAnn -textTypeAnn = mkTypeAnn $ PgTypeSimple PGText +textTypeAnn = mkTypeAnn $ PGTypeSimple PGText textArrTypeAnn :: TypeAnn -textArrTypeAnn = mkTypeAnn $ PgTypeArray PGText +textArrTypeAnn = mkTypeAnn $ PGTypeArray PGText jsonTypeAnn :: TypeAnn -jsonTypeAnn = mkTypeAnn $ PgTypeSimple PGJSON +jsonTypeAnn = mkTypeAnn $ PGTypeSimple PGJSON jsonbTypeAnn :: TypeAnn -jsonbTypeAnn = mkTypeAnn $ PgTypeSimple PGJSONB +jsonbTypeAnn = mkTypeAnn $ PGTypeSimple PGJSONB data CountType = CTStar @@ -266,6 +266,7 @@ instance ToSQL TupleExp where data SQLExp = SEPrep !Int + | SENull | SELit !T.Text | SEUnsafe !T.Text | SESelect !Select @@ -286,7 +287,7 @@ data SQLExp deriving (Show, Eq, Data) withTyAnn :: PGScalarType -> SQLExp -> SQLExp -withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PgTypeSimple colTy +withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeSimple colTy instance J.ToJSON SQLExp where toJSON = J.toJSON . toSQLTxt @@ -310,6 +311,8 @@ countStar = SECount CTStar instance ToSQL SQLExp where toSQL (SEPrep argNumber) = TB.char '$' <> fromString (show argNumber) + toSQL SENull = + TB.text "null" toSQL (SELit tv) = TB.text $ pgFmtLit tv toSQL (SEUnsafe t) = diff --git a/server/src-lib/Hasura/SQL/Error.hs b/server/src-lib/Hasura/SQL/Error.hs new file mode 100644 index 00000000000..25d6def5a5d --- /dev/null +++ b/server/src-lib/Hasura/SQL/Error.hs @@ -0,0 +1,95 @@ +-- | Functions and datatypes for interpreting Postgres errors. +module Hasura.SQL.Error where + +import Hasura.Prelude + +import Control.Lens.TH (makePrisms) + +import qualified Data.Text as T +import qualified Database.PG.Query.Connection as Q + +-- | The top-level error code type. Errors in Postgres are divided into different /classes/, which +-- are further subdivided into individual error codes. Even if a particular status code is not known +-- to the application, it’s possible to determine its class and handle it appropriately. +data PgErrorType + = PgDataException !(Maybe (PgErrorCode PgDataException)) + | PgIntegrityConstraintViolation !(Maybe (PgErrorCode PgIntegrityConstraintViolation)) + | PgSyntaxErrorOrAccessRuleViolation !(Maybe (PgErrorCode PgSyntaxErrorOrAccessRuleViolation)) + deriving (Show, Eq) + +data PgErrorCode a + = PgErrorGeneric + -- ^ represents errors that have the non-specific @000@ status code + | PgErrorSpecific !a + -- ^ represents errors with a known, more specific status code + deriving (Show, Eq, Functor) + +data PgDataException + = PgInvalidDatetimeFormat + | PgInvalidParameterValue + | PgInvalidTextRepresentation + deriving (Show, Eq) + +data PgIntegrityConstraintViolation + = PgRestrictViolation + | PgNotNullViolation + | PgForeignKeyViolation + | PgUniqueViolation + | PgCheckViolation + | PgExclusionViolation + deriving (Show, Eq) + +data PgSyntaxErrorOrAccessRuleViolation + = PgUndefinedObject + | PgInvalidColumnReference + deriving (Show, Eq) + +$(makePrisms ''PgErrorType) +$(makePrisms ''PgErrorCode) + +pgErrorType :: Q.PGStmtErrDetail -> Maybe PgErrorType +pgErrorType errorDetails = parseTypes =<< Q.edStatusCode errorDetails + where + parseTypes fullCodeText = choice + [ withClass "22" PgDataException + [ code "007" PgInvalidDatetimeFormat + , code "023" PgInvalidParameterValue + , code "P02" PgInvalidTextRepresentation + ] + , withClass "23" PgIntegrityConstraintViolation + [ code "001" PgRestrictViolation + , code "502" PgNotNullViolation + , code "503" PgForeignKeyViolation + , code "505" PgUniqueViolation + , code "514" PgCheckViolation + , code "P01" PgExclusionViolation + ] + , withClass "42" PgSyntaxErrorOrAccessRuleViolation + [ code "704" PgUndefinedObject + , code "P10" PgInvalidColumnReference + ] + ] + where + (classText, codeText) = T.splitAt 2 fullCodeText + + withClass :: T.Text -> (Maybe a -> b) -> [Maybe a] -> Maybe b + withClass expectedClassText mkClass codes = + guard (classText == expectedClassText) $> mkClass (choice codes) + + code :: T.Text -> a -> Maybe (PgErrorCode a) + code expectedCodeText codeValue = + guard (codeText == expectedCodeText) $> PgErrorSpecific codeValue + +pgErrorToText :: Q.PGStmtErrDetail -> T.Text +pgErrorToText errorDetail = + fromMaybe "postgres error" (Q.edMessage errorDetail) + <> maybe "" formatDescription (Q.edDescription errorDetail) + <> maybe "" formatHint (Q.edHint errorDetail) + where + formatDescription description = ";\n" <> prefixLines " " description + formatHint hint = "\n hint: " <> prefixLinesExceptFirst " " hint + + prefixLinesExceptFirst prefix content = + T.intercalate ("\n" <> prefix) (T.lines content) + prefixLines prefix content = + prefix <> prefixLinesExceptFirst prefix content diff --git a/server/src-lib/Hasura/SQL/Rewrite.hs b/server/src-lib/Hasura/SQL/Rewrite.hs index 420f22c342b..2e11e911b47 100644 --- a/server/src-lib/Hasura/SQL/Rewrite.hs +++ b/server/src-lib/Hasura/SQL/Rewrite.hs @@ -146,6 +146,7 @@ uOrderBy (S.OrderByExp ordByItems) = uSqlExp :: S.SQLExp -> Uniq S.SQLExp uSqlExp = restoringIdens . \case S.SEPrep i -> return $ S.SEPrep i + S.SENull -> return S.SENull S.SELit t -> return $ S.SELit t S.SEUnsafe t -> return $ S.SEUnsafe t S.SESelect s -> S.SESelect <$> uSelect s diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index 7f955dbbf30..d3fbc123b13 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -7,6 +7,7 @@ import Hasura.Prelude import Data.Aeson import Data.Aeson.Encoding (text) +import Data.Aeson.TH import Data.Aeson.Types (toJSONKeyText) import Instances.TH.Lift () import Language.Haskell.TH.Syntax (Lift) @@ -392,9 +393,15 @@ isNumType PGDouble = True isNumType PGNumeric = True isNumType ty = isIntegerType ty -isJSONBType :: PGScalarType -> Bool -isJSONBType PGJSONB = True -isJSONBType _ = False +stringTypes :: [PGScalarType] +stringTypes = [PGVarchar, PGText] +isStringType :: PGScalarType -> Bool +isStringType = (`elem` stringTypes) + +jsonTypes :: [PGScalarType] +jsonTypes = [PGJSON, PGJSONB] +isJSONType :: PGScalarType -> Bool +isJSONType = (`elem` jsonTypes) isComparableType :: PGScalarType -> Bool isComparableType PGJSON = False @@ -413,26 +420,33 @@ isBigNum = \case PGDouble -> True _ -> False +geoTypes :: [PGScalarType] +geoTypes = [PGGeometry, PGGeography] isGeoType :: PGScalarType -> Bool -isGeoType = \case - PGGeometry -> True - PGGeography -> True - _ -> False +isGeoType = (`elem` geoTypes) --- | The type of all Postgres types (i.e. scalars and arrays). +data PGScalarTyped a + = PGScalarTyped + { pstType :: !PGScalarType + , pstValue :: !a + } deriving (Show, Eq, Functor, Foldable, Traversable) + +-- | The type of all Postgres types (i.e. scalars and arrays). This type is parameterized so that +-- we can have both @'PGType' 'PGScalarType'@ and @'PGType' 'Hasura.RQL.Types.PGColumnType'@, for +-- when we care about the distinction made by 'Hasura.RQL.Types.PGColumnType'. If we ever change +-- 'Hasura.RQL.Types.PGColumnType' to handle arrays, not just scalars, then the parameterization can +-- go away. -- -- TODO: This is incorrect modeling, as 'PGScalarType' will capture anything (under 'PGUnknown'). -- This should be fixed when support for all types is merged. -data PgType - = PgTypeSimple !PGScalarType - | PgTypeArray !PGScalarType - deriving (Show, Eq, Data) +data PGType a + = PGTypeSimple !a + | PGTypeArray !a + deriving (Show, Eq, Data, Functor) +$(deriveJSON defaultOptions{constructorTagModifier = drop 6} ''PGType) -instance ToSQL PgType where +instance (ToSQL a) => ToSQL (PGType a) where toSQL = \case - PgTypeSimple ty -> toSQL ty + PGTypeSimple ty -> toSQL ty -- typename array is an sql standard way of declaring types - PgTypeArray ty -> toSQL ty <> " array" - -instance ToJSON PgType where - toJSON = toJSON . toSQLTxt + PGTypeArray ty -> toSQL ty <> " array" diff --git a/server/src-lib/Hasura/SQL/Value.hs b/server/src-lib/Hasura/SQL/Value.hs index 9f39b1c483e..13560c5713b 100644 --- a/server/src-lib/Hasura/SQL/Value.hs +++ b/server/src-lib/Hasura/SQL/Value.hs @@ -136,50 +136,29 @@ textToPrepVal t = parsePGValue' :: PGScalarType -> Value -> AT.Parser PGColValue -parsePGValue' ty Null = - return $ PGNull ty -parsePGValue' PGSmallInt val = - PGValSmallInt <$> parseJSON val -parsePGValue' PGInteger val = - PGValInteger <$> parseJSON val -parsePGValue' PGBigInt val = - PGValBigInt <$> parseJSON val -parsePGValue' PGSerial val = - PGValInteger <$> parseJSON val -parsePGValue' PGBigSerial val = - PGValBigInt <$> parseJSON val -parsePGValue' PGFloat val = - PGValFloat <$> parseJSON val -parsePGValue' PGDouble val = - PGValDouble <$> parseJSON val -parsePGValue' PGNumeric val = - PGValNumeric <$> parseJSON val -parsePGValue' PGBoolean val = - PGValBoolean <$> parseJSON val -parsePGValue' PGChar val = - PGValChar <$> parseJSON val -parsePGValue' PGVarchar val = - PGValVarchar <$> parseJSON val -parsePGValue' PGText val = - PGValText <$> parseJSON val -parsePGValue' PGDate val = - PGValDate <$> parseJSON val -parsePGValue' PGTimeStampTZ val = - PGValTimeStampTZ <$> parseJSON val -parsePGValue' PGTimeTZ val = - PGValTimeTZ <$> parseJSON val -parsePGValue' PGJSON val = - PGValJSON . Q.JSON <$> parseJSON val -parsePGValue' PGJSONB val = - PGValJSONB . Q.JSONB <$> parseJSON val -parsePGValue' PGGeometry val = - PGValGeo <$> parseJSON val -parsePGValue' PGGeography val = - PGValGeo <$> parseJSON val -parsePGValue' (PGUnknown _) (String t) = - return $ PGValUnknown t -parsePGValue' (PGUnknown tyName) _ = - fail $ "A string is expected for type : " ++ T.unpack tyName +parsePGValue' ty v = case (ty, v) of + (_, Null) -> return $ PGNull ty + (PGSmallInt, val) -> PGValSmallInt <$> parseJSON val + (PGInteger, val) -> PGValInteger <$> parseJSON val + (PGBigInt, val) -> PGValBigInt <$> parseJSON val + (PGSerial, val) -> PGValInteger <$> parseJSON val + (PGBigSerial, val) -> PGValBigInt <$> parseJSON val + (PGFloat, val) -> PGValFloat <$> parseJSON val + (PGDouble, val) -> PGValDouble <$> parseJSON val + (PGNumeric, val) -> PGValNumeric <$> parseJSON val + (PGBoolean, val) -> PGValBoolean <$> parseJSON val + (PGChar, val) -> PGValChar <$> parseJSON val + (PGVarchar, val) -> PGValVarchar <$> parseJSON val + (PGText, val) -> PGValText <$> parseJSON val + (PGDate, val) -> PGValDate <$> parseJSON val + (PGTimeStampTZ, val) -> PGValTimeStampTZ <$> parseJSON val + (PGTimeTZ, val) -> PGValTimeTZ <$> parseJSON val + (PGJSON, val) -> PGValJSON . Q.JSON <$> parseJSON val + (PGJSONB, val) -> PGValJSONB . Q.JSONB <$> parseJSON val + (PGGeometry, val) -> PGValGeo <$> parseJSON val + (PGGeography, val) -> PGValGeo <$> parseJSON val + (PGUnknown _, String t) -> return $ PGValUnknown t + (PGUnknown tyName, _) -> fail $ "A string is expected for type : " ++ T.unpack tyName parsePGValue :: PGScalarType -> Value -> AT.Parser PGColValue parsePGValue pct val = @@ -187,18 +166,6 @@ parsePGValue pct val = String t -> parsePGValue' pct val <|> return (PGValUnknown t) _ -> parsePGValue' pct val -convToBin :: PGScalarType - -> Value - -> AT.Parser Q.PrepArg -convToBin ty val = - binEncoder <$> parsePGValue ty val - -convToTxt :: PGScalarType - -> Value - -> AT.Parser S.SQLExp -convToTxt ty val = - toTxtValue ty <$> parsePGValue ty val - readEitherTxt :: (Read a) => T.Text -> Either String a readEitherTxt = readEither . T.unpack @@ -210,26 +177,19 @@ pgValFromJVal :: (FromJSON a) => Value -> Either String a pgValFromJVal = iresToEither . ifromJSON withGeoVal :: PGScalarType -> S.SQLExp -> S.SQLExp -withGeoVal ty v = - bool v applyGeomFromGeoJson isGeoTy - where - applyGeomFromGeoJson = - S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing - - isGeoTy = case ty of - PGGeometry -> True - PGGeography -> True - _ -> False +withGeoVal ty v + | isGeoType ty = S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing + | otherwise = v toPrepParam :: Int -> PGScalarType -> S.SQLExp toPrepParam i ty = withGeoVal ty $ S.SEPrep i -toTxtValue :: PGScalarType -> PGColValue -> S.SQLExp -toTxtValue ty val = - S.withTyAnn ty txtVal - where - txtVal = withGeoVal ty $ txtEncoder val +toBinaryValue :: PGScalarTyped PGColValue -> Q.PrepArg +toBinaryValue = binEncoder . pstValue + +toTxtValue :: PGScalarTyped PGColValue -> S.SQLExp +toTxtValue (PGScalarTyped ty val) = S.withTyAnn ty . withGeoVal ty $ txtEncoder val pgColValueToInt :: PGColValue -> Maybe Int pgColValueToInt (PGValInteger i) = Just $ fromIntegral i diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs index af63f7bb880..f1463e2b0ff 100644 --- a/server/src-lib/Hasura/Server/Init.hs +++ b/server/src-lib/Hasura/Server/Init.hs @@ -656,11 +656,11 @@ connInfoErrModifier :: String -> String connInfoErrModifier s = "Fatal Error : " ++ s mkConnInfo ::RawConnInfo -> Either String Q.ConnInfo -mkConnInfo (RawConnInfo mHost mPort mUser pass mURL mDB opts mRetries) = +mkConnInfo (RawConnInfo mHost mPort mUser password mURL mDB opts mRetries) = case (mHost, mPort, mUser, mDB, mURL) of (Just host, Just port, Just user, Just db, Nothing) -> - return $ Q.ConnInfo host port user pass db opts retries + return $ Q.ConnInfo host port user password db opts retries (_, _, _, _, Just dbURL) -> maybe (throwError invalidUrlMsg) withRetries $ parseDatabaseUrl dbURL opts diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index dc6a2acaf82..2753491b69f 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -33,6 +33,7 @@ data RQLQuery = RQAddExistingTableOrView !TrackTable | RQTrackTable !TrackTable | RQUntrackTable !UntrackTable + | RQSetTableIsEnum !SetTableIsEnum | RQTrackFunction !TrackFunction | RQUntrackFunction !UnTrackFunction @@ -173,6 +174,7 @@ queryNeedsReload qi = case qi of RQUntrackTable _ -> True RQTrackFunction _ -> True RQUntrackFunction _ -> True + RQSetTableIsEnum _ -> True RQCreateObjectRelationship _ -> True RQCreateArrayRelationship _ -> True @@ -242,6 +244,7 @@ runQueryM rq = RQAddExistingTableOrView q -> runTrackTableQ q RQTrackTable q -> runTrackTableQ q RQUntrackTable q -> runUntrackTableQ q + RQSetTableIsEnum q -> runSetExistingTableIsEnumQ q RQTrackFunction q -> runTrackFunc q RQUntrackFunction q -> runUntrackFunc q diff --git a/server/src-lib/Hasura/Server/Telemetry.hs b/server/src-lib/Hasura/Server/Telemetry.hs index cd40cc09f31..2e72175aa62 100644 --- a/server/src-lib/Hasura/Server/Telemetry.hs +++ b/server/src-lib/Hasura/Server/Telemetry.hs @@ -128,8 +128,8 @@ runTelemetry (Logger logger) manager cacheRef dbId instanceId = do computeMetrics :: SchemaCache -> Metrics computeMetrics sc = - let nTables = Map.size $ Map.filter (isNothing . tiViewInfo) usrTbls - nViews = Map.size $ Map.filter (isJust . tiViewInfo) usrTbls + let nTables = Map.size $ Map.filter (isNothing . _tiViewInfo) usrTbls + nViews = Map.size $ Map.filter (isJust . _tiViewInfo) usrTbls allRels = join $ Map.elems $ Map.map relsOfTbl usrTbls (manualRels, autoRels) = partition riIsManual allRels relMetrics = RelationshipMetric (length manualRels) (length autoRels) @@ -143,23 +143,23 @@ computeMetrics sc = permMetrics = PermissionMetric selPerms insPerms updPerms delPerms nRoles evtTriggers = Map.size $ Map.filter (not . Map.null) - $ Map.map tiEventTriggerInfoMap usrTbls + $ Map.map _tiEventTriggerInfoMap usrTbls rmSchemas = Map.size $ scRemoteSchemas sc funcs = Map.size $ Map.filter (not . fiSystemDefined) $ scFunctions sc in Metrics nTables nViews relMetrics permMetrics evtTriggers rmSchemas funcs where - usrTbls = Map.filter (not . tiSystemDefined) $ scTables sc + usrTbls = Map.filter (not . _tiSystemDefined) $ scTables sc calcPerms :: (RolePermInfo -> Maybe a) -> [RolePermInfo] -> Int calcPerms fn perms = length $ catMaybes $ map fn perms - relsOfTbl :: TableInfo -> [RelInfo] - relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . tiFieldInfoMap + relsOfTbl :: TableInfo PGColInfo -> [RelInfo] + relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . _tiFieldInfoMap - permsOfTbl :: TableInfo -> [(RoleName, RolePermInfo)] - permsOfTbl = Map.toList . tiRolePermInfoMap + permsOfTbl :: TableInfo PGColInfo -> [(RoleName, RolePermInfo)] + permsOfTbl = Map.toList . _tiRolePermInfoMap getDbId :: Q.TxE QErr Text diff --git a/server/src-rsr/catalog_metadata.sql b/server/src-rsr/catalog_metadata.sql index efed613b804..693ebf292b8 100644 --- a/server/src-rsr/catalog_metadata.sql +++ b/server/src-rsr/catalog_metadata.sql @@ -14,38 +14,28 @@ from select coalesce(json_agg( json_build_object( - 'table', - json_build_object( + 'name', json_build_object( 'name', ht.table_name, 'schema', ht.table_schema ), - 'system_defined', ht.is_system_defined, - 'info', tables.info + 'is_enum', ht.is_enum, + 'is_system_defined', ht.is_system_defined, + 'info', t.info ) ), '[]') as items - from - hdb_catalog.hdb_table as ht - left outer join ( - select - table_schema, - table_name, - json_build_object( - 'name', - json_build_object( - 'schema', table_schema, - 'name', table_name - ), - 'columns', columns, - 'primary_key_columns', primary_key_columns, - 'constraints', constraints, - 'view_info', view_info - ) as info - from - hdb_catalog.hdb_table_info_agg - ) as tables on ( - tables.table_schema = ht.table_schema - and tables.table_name = ht.table_name - ) + from hdb_catalog.hdb_table as ht + left outer join ( + select + table_schema, + table_name, + jsonb_build_object( + 'columns', columns, + 'primary_key_columns', primary_key_columns, + 'constraints', constraints, + 'view_info', view_info + ) as info + from hdb_catalog.hdb_table_info_agg + ) as t using (table_schema, table_name) ) as tables, ( select diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql index 568e3b3b295..ff63f74f8ad 100644 --- a/server/src-rsr/initialise.sql +++ b/server/src-rsr/initialise.sql @@ -14,26 +14,11 @@ CREATE TABLE hdb_catalog.hdb_table table_schema TEXT, table_name TEXT, is_system_defined boolean default false, + is_enum boolean NOT NULL DEFAULT false, PRIMARY KEY (table_schema, table_name) ); -CREATE FUNCTION hdb_catalog.hdb_table_oid_check() RETURNS trigger AS -$function$ - BEGIN - IF (EXISTS (SELECT 1 FROM information_schema.tables st WHERE st.table_schema = NEW.table_schema AND st.table_name = NEW.table_name)) THEN - return NEW; - ELSE - RAISE foreign_key_violation using message = 'table_schema, table_name not in information_schema.tables'; - return NULL; - END IF; - END; -$function$ -LANGUAGE plpgsql; - -CREATE TRIGGER hdb_table_oid_check BEFORE INSERT OR UPDATE ON hdb_catalog.hdb_table - FOR EACH ROW EXECUTE PROCEDURE hdb_catalog.hdb_table_oid_check(); - CREATE TABLE hdb_catalog.hdb_relationship ( table_schema TEXT, @@ -83,7 +68,9 @@ SELECT min(q.ref_table) :: text as ref_table, json_object_agg(ac.attname, afc.attname) as column_mapping, min(q.confupdtype) :: text as on_update, - min(q.confdeltype) :: text as on_delete + min(q.confdeltype) :: text as on_delete, + json_agg(ac.attname) as columns, + json_agg(afc.attname) as ref_columns FROM (SELECT ctn.nspname AS table_schema, @@ -431,6 +418,37 @@ CREATE TRIGGER hdb_schema_update_event_notifier AFTER INSERT OR UPDATE ON hdb_catalog.hdb_schema_update_event FOR EACH ROW EXECUTE PROCEDURE hdb_catalog.hdb_schema_update_event_notifier(); +CREATE VIEW hdb_catalog.hdb_column AS + WITH primary_key_references AS ( + SELECT fkey.table_schema AS src_table_schema + , fkey.table_name AS src_table_name + , fkey.columns->>0 AS src_column_name + , json_agg(json_build_object( + 'schema', fkey.ref_table_table_schema, + 'name', fkey.ref_table + )) AS ref_tables + FROM hdb_catalog.hdb_foreign_key_constraint AS fkey + JOIN hdb_catalog.hdb_primary_key AS pkey + ON pkey.table_schema = fkey.ref_table_table_schema + AND pkey.table_name = fkey.ref_table + AND pkey.columns::jsonb = fkey.ref_columns::jsonb + WHERE json_array_length(fkey.columns) = 1 + GROUP BY fkey.table_schema + , fkey.table_name + , fkey.columns->>0) + SELECT columns.table_schema + , columns.table_name + , columns.column_name AS name + , columns.udt_name AS type + , columns.is_nullable + , columns.ordinal_position + , coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references + FROM information_schema.columns +LEFT JOIN primary_key_references AS pkey_refs + ON columns.table_schema = pkey_refs.src_table_schema + AND columns.table_name = pkey_refs.src_table_name + AND columns.column_name = pkey_refs.src_column_name; + CREATE VIEW hdb_catalog.hdb_table_info_agg AS ( select tables.table_name as table_name, @@ -447,16 +465,14 @@ from c.table_schema, json_agg( json_build_object( - 'name', - column_name, - 'type', - udt_name, - 'is_nullable', - is_nullable :: boolean + 'name', name, + 'type', type, + 'is_nullable', is_nullable :: boolean, + 'references', primary_key_references ) ) as columns from - information_schema.columns c + hdb_catalog.hdb_column c group by c.table_schema, c.table_name diff --git a/server/src-rsr/migrate_from_19_to_20.sql b/server/src-rsr/migrate_from_19_to_20.sql new file mode 100644 index 00000000000..bb8d5b1a82f --- /dev/null +++ b/server/src-rsr/migrate_from_19_to_20.sql @@ -0,0 +1,158 @@ +ALTER TABLE hdb_catalog.hdb_table + ADD COLUMN is_enum boolean NOT NULL DEFAULT false; + +DROP TRIGGER hdb_table_oid_check ON hdb_catalog.hdb_table; +DROP FUNCTION hdb_catalog.hdb_table_oid_check(); + +CREATE OR REPLACE VIEW hdb_catalog.hdb_foreign_key_constraint AS +SELECT + q.table_schema :: text, + q.table_name :: text, + q.constraint_name :: text, + min(q.constraint_oid) :: integer as constraint_oid, + min(q.ref_table_table_schema) :: text as ref_table_table_schema, + min(q.ref_table) :: text as ref_table, + json_object_agg(ac.attname, afc.attname) as column_mapping, + min(q.confupdtype) :: text as on_update, + min(q.confdeltype) :: text as on_delete, + json_agg(ac.attname) as columns, + json_agg(afc.attname) as ref_columns +FROM + (SELECT + ctn.nspname AS table_schema, + ct.relname AS table_name, + r.conrelid AS table_id, + r.conname as constraint_name, + r.oid as constraint_oid, + cftn.nspname AS ref_table_table_schema, + cft.relname as ref_table, + r.confrelid as ref_table_id, + r.confupdtype, + r.confdeltype, + UNNEST (r.conkey) AS column_id, + UNNEST (r.confkey) AS ref_column_id + FROM + pg_catalog.pg_constraint r + JOIN pg_catalog.pg_class ct + ON r.conrelid = ct.oid + JOIN pg_catalog.pg_namespace ctn + ON ct.relnamespace = ctn.oid + JOIN pg_catalog.pg_class cft + ON r.confrelid = cft.oid + JOIN pg_catalog.pg_namespace cftn + ON cft.relnamespace = cftn.oid + WHERE + r.contype = 'f' + ) q + JOIN pg_catalog.pg_attribute ac + ON q.column_id = ac.attnum + AND q.table_id = ac.attrelid + JOIN pg_catalog.pg_attribute afc + ON q.ref_column_id = afc.attnum + AND q.ref_table_id = afc.attrelid +GROUP BY q.table_schema, q.table_name, q.constraint_name; + +CREATE VIEW hdb_catalog.hdb_column AS + WITH primary_key_references AS ( + SELECT fkey.table_schema AS src_table_schema + , fkey.table_name AS src_table_name + , fkey.columns->>0 AS src_column_name + , json_agg(json_build_object( + 'schema', fkey.ref_table_table_schema, + 'name', fkey.ref_table + )) AS ref_tables + FROM hdb_catalog.hdb_foreign_key_constraint AS fkey + JOIN hdb_catalog.hdb_primary_key AS pkey + ON pkey.table_schema = fkey.ref_table_table_schema + AND pkey.table_name = fkey.ref_table + AND pkey.columns::jsonb = fkey.ref_columns::jsonb + WHERE json_array_length(fkey.columns) = 1 + GROUP BY fkey.table_schema + , fkey.table_name + , fkey.columns->>0) + SELECT columns.table_schema + , columns.table_name + , columns.column_name AS name + , columns.udt_name AS type + , columns.is_nullable + , columns.ordinal_position + , coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references + FROM information_schema.columns +LEFT JOIN primary_key_references AS pkey_refs + ON columns.table_schema = pkey_refs.src_table_schema + AND columns.table_name = pkey_refs.src_table_name + AND columns.column_name = pkey_refs.src_column_name; + +CREATE OR REPLACE VIEW hdb_catalog.hdb_table_info_agg AS ( +select + tables.table_name as table_name, + tables.table_schema as table_schema, + coalesce(columns.columns, '[]') as columns, + coalesce(pk.columns, '[]') as primary_key_columns, + coalesce(constraints.constraints, '[]') as constraints, + coalesce(views.view_info, 'null') as view_info +from + information_schema.tables as tables + left outer join ( + select + c.table_name, + c.table_schema, + json_agg( + json_build_object( + 'name', name, + 'type', type, + 'is_nullable', is_nullable :: boolean, + 'references', primary_key_references + ) + ) as columns + from + hdb_catalog.hdb_column c + group by + c.table_schema, + c.table_name + ) columns on ( + tables.table_schema = columns.table_schema + AND tables.table_name = columns.table_name + ) + left outer join ( + select * from hdb_catalog.hdb_primary_key + ) pk on ( + tables.table_schema = pk.table_schema + AND tables.table_name = pk.table_name + ) + left outer join ( + select + c.table_schema, + c.table_name, + json_agg(constraint_name) as constraints + from + information_schema.table_constraints c + where + c.constraint_type = 'UNIQUE' + or c.constraint_type = 'PRIMARY KEY' + group by + c.table_schema, + c.table_name + ) constraints on ( + tables.table_schema = constraints.table_schema + AND tables.table_name = constraints.table_name + ) + left outer join ( + select + table_schema, + table_name, + json_build_object( + 'is_updatable', + (is_updatable::boolean OR is_trigger_updatable::boolean), + 'is_deletable', + (is_updatable::boolean OR is_trigger_deletable::boolean), + 'is_insertable', + (is_insertable_into::boolean OR is_trigger_insertable_into::boolean) + ) as view_info + from + information_schema.views v + ) views on ( + tables.table_schema = views.table_schema + AND tables.table_name = views.table_name + ) +); diff --git a/server/src-rsr/table_meta.sql b/server/src-rsr/table_meta.sql index b6c539c9291..087094fe3b4 100644 --- a/server/src-rsr/table_meta.sql +++ b/server/src-rsr/table_meta.sql @@ -20,21 +20,16 @@ FROM table_schema, table_name, json_agg( - ( - SELECT - r - FROM - ( - SELECT - column_name, - udt_name AS data_type, - ordinal_position, - is_nullable :: boolean - ) r + json_build_object( + 'column_name', name, + 'data_type', type, + 'is_nullable', is_nullable :: boolean, + 'ordinal_position', ordinal_position, + 'references', primary_key_references ) ) as columns FROM - information_schema.columns + hdb_catalog.hdb_column GROUP BY table_schema, table_name diff --git a/server/stack.yaml b/server/stack.yaml index 6b78d5fadd9..9b61426f9d0 100644 --- a/server/stack.yaml +++ b/server/stack.yaml @@ -32,6 +32,7 @@ extra-deps: - reroute-0.5.0.0 - Spock-core-0.13.0.0 +- monad-validate-1.2.0.0 # Override default flag values for local packages and extra-deps flags: {} diff --git a/server/stack.yaml.lock b/server/stack.yaml.lock index 5367dd13598..2297ee47c55 100644 --- a/server/stack.yaml.lock +++ b/server/stack.yaml.lock @@ -95,6 +95,13 @@ packages: sha256: 86140298020f68bb09d07b26a6a6f1666fc3a02715d7986b09150727247a1a84 original: hackage: Spock-core-0.13.0.0 +- completed: + hackage: monad-validate-1.2.0.0@sha256:9850f408431098b28806dd464b6825a88a0b56c84f380d7fe0454c1df9d6f881,3505 + pantry-tree: + size: 713 + sha256: 8e049bd12ce2bd470909578f2ee8eb80b89d5ff88860afa30e29dd4eafecfa3e + original: + hackage: monad-validate-1.2.0.0 snapshots: - completed: size: 498167 diff --git a/server/tests-py/queries/graphql_mutation/enums/delete_where_enum_field.yaml b/server/tests-py/queries/graphql_mutation/enums/delete_where_enum_field.yaml new file mode 100644 index 00000000000..e8a9bcb2a93 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/delete_where_enum_field.yaml @@ -0,0 +1,21 @@ +description: Test deleting records filtered by an enum reference +url: /v1/graphql +status: 200 +response: + data: + delete_users: + affected_rows: 1 + returning: + - name: Alyssa + favorite_color: red +query: + query: | + mutation { + delete_users(where: {favorite_color: {_eq: red}}) { + affected_rows + returning { + name + favorite_color + } + } + } diff --git a/server/tests-py/queries/graphql_mutation/enums/insert_enum_field.yaml b/server/tests-py/queries/graphql_mutation/enums/insert_enum_field.yaml new file mode 100644 index 00000000000..bebef11882b --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/insert_enum_field.yaml @@ -0,0 +1,24 @@ +description: Test inserting a record that references an enum table +url: /v1/graphql +status: 200 +response: + data: + insert_users: + returning: + - name: Matthew + favorite_color: yellow + - name: Robby + favorite_color: purple +query: + query: | + mutation { + insert_users(objects: [ + { name: "Matthew", favorite_color: yellow }, + { name: "Robby", favorite_color: purple } + ]) { + returning { + name + favorite_color + } + } + } diff --git a/server/tests-py/queries/graphql_mutation/enums/insert_enum_field_bad_value.yaml b/server/tests-py/queries/graphql_mutation/enums/insert_enum_field_bad_value.yaml new file mode 100644 index 00000000000..ca4f5d6b647 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/insert_enum_field_bad_value.yaml @@ -0,0 +1,19 @@ +description: Test inserting a record with an invalid enum value +url: /v1/graphql +status: 200 +response: + errors: + - message: 'unexpected value "not_a_real_color" for enum: ''colors_enum''' + extensions: + code: validation-failed + path: $.selectionSet.insert_users.args.objects[0].favorite_color +query: + query: | + mutation { + insert_users(objects: [{ name: "Matthew", favorite_color: not_a_real_color }]) { + returning { + name + favorite_color + } + } + } diff --git a/server/tests-py/queries/graphql_mutation/enums/schema_setup.yaml b/server/tests-py/queries/graphql_mutation/enums/schema_setup.yaml new file mode 100644 index 00000000000..dd42ea54377 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/schema_setup.yaml @@ -0,0 +1,28 @@ +type: bulk +args: + +- type: run_sql + args: + sql: | + CREATE TABLE colors + ( value text PRIMARY KEY + , comment text ); + INSERT INTO colors (value, comment) VALUES + ('red', '#FF0000'), + ('green', '#00FF00'), + ('blue', '#0000FF'), + ('orange', '#FFFF00'), + ('yellow', '#00FFFF'), + ('purple', '#FF00FF'); + + CREATE TABLE users + ( id serial PRIMARY KEY + , name text NOT NULL + , favorite_color text NOT NULL REFERENCES colors ); + +- type: track_table + args: + table: colors + is_enum: true +- type: track_table + args: users diff --git a/server/tests-py/queries/graphql_mutation/enums/schema_teardown.yaml b/server/tests-py/queries/graphql_mutation/enums/schema_teardown.yaml new file mode 100644 index 00000000000..e72c029f262 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/schema_teardown.yaml @@ -0,0 +1,8 @@ +type: bulk +args: +- type: run_sql + args: + sql: | + DROP TABLE users; + DROP TABLE colors; + cascade: true diff --git a/server/tests-py/queries/graphql_mutation/enums/update_enum_field.yaml b/server/tests-py/queries/graphql_mutation/enums/update_enum_field.yaml new file mode 100644 index 00000000000..c9f5e398537 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/update_enum_field.yaml @@ -0,0 +1,21 @@ +description: Test updating a record that references an enum table +url: /v1/graphql +status: 200 +response: + data: + update_users: + affected_rows: 1 + returning: + - name: Alyssa + favorite_color: blue +query: + query: | + mutation { + update_users(where: {id: {_eq: 1}}, _set: {favorite_color: blue}) { + affected_rows + returning { + name + favorite_color + } + } + } diff --git a/server/tests-py/queries/graphql_mutation/enums/update_where_enum_field.yaml b/server/tests-py/queries/graphql_mutation/enums/update_where_enum_field.yaml new file mode 100644 index 00000000000..c7ba85e9345 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/update_where_enum_field.yaml @@ -0,0 +1,21 @@ +description: Test updating records filtered by an enum reference +url: /v1/graphql +status: 200 +response: + data: + update_users: + affected_rows: 1 + returning: + - name: Alyssa + favorite_color: blue +query: + query: | + mutation { + update_users(where: {favorite_color: {_eq: red}}, _set: {favorite_color: blue}) { + affected_rows + returning { + name + favorite_color + } + } + } diff --git a/server/tests-py/queries/graphql_mutation/enums/values_setup.yaml b/server/tests-py/queries/graphql_mutation/enums/values_setup.yaml new file mode 100644 index 00000000000..3a449b9d286 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/values_setup.yaml @@ -0,0 +1,11 @@ +type: bulk +args: + +- type: insert + args: + table: users + objects: + - name: Alyssa + favorite_color: red + - name: Ben + favorite_color: blue diff --git a/server/tests-py/queries/graphql_mutation/enums/values_teardown.yaml b/server/tests-py/queries/graphql_mutation/enums/values_teardown.yaml new file mode 100644 index 00000000000..8256707cb06 --- /dev/null +++ b/server/tests-py/queries/graphql_mutation/enums/values_teardown.yaml @@ -0,0 +1,8 @@ +type: bulk +args: + +- type: run_sql + args: + sql: | + DELETE FROM users; + SELECT setval('users_id_seq', 1, FALSE); diff --git a/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_operator_ne_not_found_err.yaml b/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_operator_ne_not_found_err.yaml index 4ae94d325ef..bc147af7686 100644 --- a/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_operator_ne_not_found_err.yaml +++ b/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_operator_ne_not_found_err.yaml @@ -7,7 +7,7 @@ response: code: validation-failed path: $.selectionSet.author.args.where.name._ne message: |- - field "_ne" not found in type: 'text_comparison_exp' + field "_ne" not found in type: 'String_comparison_exp' query: query: | query { diff --git a/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_unexpected_operator_in_where_err.yaml b/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_unexpected_operator_in_where_err.yaml index d78c7b1d507..fd2682baa7b 100644 --- a/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_unexpected_operator_in_where_err.yaml +++ b/server/tests-py/queries/graphql_query/boolexp/basic/select_author_article_unexpected_operator_in_where_err.yaml @@ -7,7 +7,7 @@ response: code: validation-failed path: $.selectionSet.author.args.where.id._unexpected message: |- - field "_unexpected" not found in type: 'integer_comparison_exp' + field "_unexpected" not found in type: 'Int_comparison_exp' query: query: | query { diff --git a/server/tests-py/queries/graphql_query/enums/introspect.yaml b/server/tests-py/queries/graphql_query/enums/introspect.yaml new file mode 100644 index 00000000000..33d84f923bb --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/introspect.yaml @@ -0,0 +1,57 @@ +description: Test introspecting enum types +url: /v1/graphql +status: 200 +response: + data: + colors: + name: colors_enum + kind: ENUM + enumValues: + - name: blue + description: '#0000FF' + - name: green + description: '#00FF00' + - name: orange + description: '#FFFF00' + - name: purple + description: '#FF00FF' + - name: red + description: '#FF0000' + - name: yellow + description: '#00FFFF' + users: + fields: + - name: favorite_color + type: + ofType: + name: colors_enum + - name: id + type: + ofType: + name: Int + - name: name + type: + ofType: + name: String +query: + query: | + { + colors: __type(name: "colors_enum") { + name + kind + enumValues { + name + description + } + } + users: __type(name: "users") { + fields { + name + type { + ofType { + name + } + } + } + } + } diff --git a/server/tests-py/queries/graphql_query/enums/select_enum_field.yaml b/server/tests-py/queries/graphql_query/enums/select_enum_field.yaml new file mode 100644 index 00000000000..82c6220138c --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/select_enum_field.yaml @@ -0,0 +1,18 @@ +description: Test querying a table that references an enum table +url: /v1/graphql +status: 200 +response: + data: + users: + - name: Alyssa + favorite_color: red + - name: Ben + favorite_color: blue +query: + query: | + { + users { + name + favorite_color + } + } diff --git a/server/tests-py/queries/graphql_query/enums/select_where_enum_eq.yaml b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq.yaml new file mode 100644 index 00000000000..cdc3c3806bf --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq.yaml @@ -0,0 +1,17 @@ +description: Test querying a table that references an enum table and filtering on enum equality +url: /v1/graphql +status: 200 +response: + data: + like_red: + - name: Alyssa + like_blue: + - name: Ben + like_green: [] +query: + query: | + { + like_red: users(where: { favorite_color: { _eq: red }}) { name } + like_blue: users(where: { favorite_color: { _eq: blue }}) { name } + like_green: users(where: { favorite_color: { _eq: green }}) { name } + } diff --git a/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_bad_value.yaml b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_bad_value.yaml new file mode 100644 index 00000000000..7a359e73eec --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_bad_value.yaml @@ -0,0 +1,14 @@ +description: Test validation of enum input values +url: /v1/graphql +status: 200 +response: + errors: + - message: 'unexpected value "not_a_real_color" for enum: ''colors_enum''' + extensions: + code: validation-failed + path: $.selectionSet.users.args.where.favorite_color._eq +query: + query: | + { + users(where: { favorite_color: { _eq: not_a_real_color }}) { name } + } diff --git a/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_string.yaml b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_string.yaml new file mode 100644 index 00000000000..2f8d378208a --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_string.yaml @@ -0,0 +1,14 @@ +description: Test enum input values cannot be string literals +url: /v1/graphql +status: 200 +response: + errors: + - message: expecting an enum + extensions: + code: validation-failed + path: $.selectionSet.users.args.where.favorite_color._eq +query: + query: | + { + users(where: { favorite_color: { _eq: "not_a_real_color" }}) { name } + } diff --git a/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable.yaml b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable.yaml new file mode 100644 index 00000000000..446db21ec1b --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable.yaml @@ -0,0 +1,21 @@ +description: Test querying a table that references an enum table and filtering on enum equality via a variable +url: /v1/graphql +status: 200 +response: + data: + like_1: + - name: Alyssa + like_2: + - name: Ben + like_3: [] +query: + query: | + query ($color_1: colors_enum, $color_2: colors_enum, $color_3: colors_enum) { + like_1: users(where: { favorite_color: { _eq: $color_1 }}) { name } + like_2: users(where: { favorite_color: { _eq: $color_2 }}) { name } + like_3: users(where: { favorite_color: { _eq: $color_3 }}) { name } + } + variables: + color_1: red + color_2: blue + color_3: green diff --git a/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable_bad_value.yaml b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable_bad_value.yaml new file mode 100644 index 00000000000..36a663ddf01 --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/select_where_enum_eq_variable_bad_value.yaml @@ -0,0 +1,16 @@ +description: Test validation of enum values in variables +url: /v1/graphql +status: 200 +response: + errors: + - message: 'unexpected value "not_a_real_color" for enum: ''colors_enum''' + extensions: + code: validation-failed + path: $.variableValues.color +query: + query: | + query ($color: colors_enum) { + users(where: { favorite_color: { _eq: $color }}) { name } + } + variables: + color: not_a_real_color diff --git a/server/tests-py/queries/graphql_query/enums/setup.yaml b/server/tests-py/queries/graphql_query/enums/setup.yaml new file mode 100644 index 00000000000..ae88c2f1e50 --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/setup.yaml @@ -0,0 +1,31 @@ +type: bulk +args: + +- type: run_sql + args: + sql: | + CREATE TABLE colors + ( value text PRIMARY KEY + , comment text ); + INSERT INTO colors (value, comment) VALUES + ('red', '#FF0000'), + ('green', '#00FF00'), + ('blue', '#0000FF'), + ('orange', '#FFFF00'), + ('yellow', '#00FFFF'), + ('purple', '#FF00FF'); + + CREATE TABLE users + ( id serial PRIMARY KEY + , name text NOT NULL + , favorite_color text NOT NULL REFERENCES colors ); + INSERT INTO users (name, favorite_color) VALUES + ('Alyssa', 'red'), + ('Ben', 'blue'); + +- type: track_table + args: + table: colors + is_enum: true +- type: track_table + args: users diff --git a/server/tests-py/queries/graphql_query/enums/teardown.yaml b/server/tests-py/queries/graphql_query/enums/teardown.yaml new file mode 100644 index 00000000000..e72c029f262 --- /dev/null +++ b/server/tests-py/queries/graphql_query/enums/teardown.yaml @@ -0,0 +1,8 @@ +type: bulk +args: +- type: run_sql + args: + sql: | + DROP TABLE users; + DROP TABLE colors; + cascade: true diff --git a/server/tests-py/queries/inconsistent_objects/test.yaml b/server/tests-py/queries/inconsistent_objects/test.yaml index c39db09d678..8301c3199a0 100644 --- a/server/tests-py/queries/inconsistent_objects/test.yaml +++ b/server/tests-py/queries/inconsistent_objects/test.yaml @@ -7,7 +7,7 @@ setup: args: sql: | create table author( - id serial primary key, + id serial primary key, name text unique ); create table article( @@ -24,7 +24,7 @@ setup: args: schema: public name: article - + #Object relationship - type: create_object_relationship args: @@ -32,7 +32,7 @@ setup: name: author using: foreign_key_constraint_on: author_id - + #Array relationship - type: create_array_relationship args: @@ -53,7 +53,7 @@ inconsistent_objects: column: author_id table: article name: articles - comment: + comment: table: author reason: table "article" does not exist type: array_relation @@ -61,15 +61,15 @@ inconsistent_objects: using: foreign_key_constraint_on: author_id name: author - comment: + comment: table: article reason: table "article" does not exist type: object_relation - definition: article - reason: 'no such table/view exists in postgres : "article"' + reason: 'no such table/view exists in postgres: "article"' type: table -# Teardown +# Teardown teardown: type: bulk args: @@ -77,4 +77,3 @@ teardown: args: sql: | drop table author - diff --git a/server/tests-py/queries/v1/metadata/export_metadata.yaml b/server/tests-py/queries/v1/metadata/export_metadata.yaml index c7e4846e451..9ee6ebc0c35 100644 --- a/server/tests-py/queries/v1/metadata/export_metadata.yaml +++ b/server/tests-py/queries/v1/metadata/export_metadata.yaml @@ -5,6 +5,7 @@ response: remote_schemas: [] tables: - table: author + is_enum: false object_relationships: [] array_relationships: - using: @@ -19,6 +20,7 @@ response: delete_permissions: [] event_triggers: [] - table: article + is_enum: false object_relationships: - using: foreign_key_constraint_on: author_id diff --git a/server/tests-py/queries/v1/set_table_is_enum/add_and_remove.yaml b/server/tests-py/queries/v1/set_table_is_enum/add_and_remove.yaml new file mode 100644 index 00000000000..d79680b6cce --- /dev/null +++ b/server/tests-py/queries/v1/set_table_is_enum/add_and_remove.yaml @@ -0,0 +1,128 @@ +- description: Mark a valid enum table as an enum + url: /v1/query + status: 200 + response: + message: success + query: + type: set_table_is_enum + args: + table: weekdays + is_enum: true + +- description: Check that marking a table as an enum changed the schema + url: /v1/graphql + status: 200 + response: + data: + weekdays: + name: weekdays_enum + kind: ENUM + enumValues: + - name: friday + description: null + - name: monday + description: null + - name: saturday + description: null + - name: sunday + description: null + - name: thursday + description: null + - name: tuesday + description: null + - name: wednesday + description: null + employees: + fields: + - name: favorite_color + type: + ofType: + name: colors_enum + - name: gets_paid_on + type: + ofType: + name: weekdays_enum + - name: id + type: + ofType: + name: Int + - name: name + type: + ofType: + name: String + query: + query: | + { + weekdays: __type(name: "weekdays_enum") { + name + kind + enumValues { + name + description + } + } + employees: __type(name: "employees") { + fields { + name + type { + ofType { + name + } + } + } + } + } + +- description: Mark an existing enum table as not an enum + url: /v1/query + status: 200 + response: + message: success + query: + type: set_table_is_enum + args: + table: colors + is_enum: false + +- description: Check that marking a table as not an enum changed the schema + url: /v1/graphql + status: 200 + response: + data: + colors: null + employees: + fields: + - name: favorite_color + type: + ofType: + name: String + - name: gets_paid_on + type: + ofType: + name: weekdays_enum + - name: id + type: + ofType: + name: Int + - name: name + type: + ofType: + name: String + query: + query: | + { + colors: __type(name: "colors_enum") { + name + kind + } + employees: __type(name: "employees") { + fields { + name + type { + ofType { + name + } + } + } + } + } diff --git a/server/tests-py/queries/v1/set_table_is_enum/add_invalid.yaml b/server/tests-py/queries/v1/set_table_is_enum/add_invalid.yaml new file mode 100644 index 00000000000..c0735b27448 --- /dev/null +++ b/server/tests-py/queries/v1/set_table_is_enum/add_invalid.yaml @@ -0,0 +1,15 @@ +description: Attempts to mark a non-enum table as an enum are rejected +url: /v1/query +status: 400 +response: + code: constraint-violation + error: | + the table "employees" cannot be used as an enum for the following reasons: + • the table’s primary key ("id") must have type "text", not type "integer" + • the table must have exactly one primary key and optionally one comment column, not 4 columns (favorite_color, gets_paid_on, id, name) + path: $.args +query: + type: set_table_is_enum + args: + table: employees + is_enum: true diff --git a/server/tests-py/queries/v1/set_table_is_enum/setup.yaml b/server/tests-py/queries/v1/set_table_is_enum/setup.yaml new file mode 100644 index 00000000000..1e7fafa33ce --- /dev/null +++ b/server/tests-py/queries/v1/set_table_is_enum/setup.yaml @@ -0,0 +1,45 @@ +type: bulk +args: + +- type: run_sql + args: + sql: | + CREATE TABLE colors + ( value text PRIMARY KEY + , comment text ); + INSERT INTO colors (value, comment) VALUES + ('red', '#FF0000'), + ('green', '#00FF00'), + ('blue', '#0000FF'), + ('orange', '#FFFF00'), + ('yellow', '#00FFFF'), + ('purple', '#FF00FF'); + + CREATE TABLE weekdays + ( value text PRIMARY KEY ); + INSERT INTO weekdays (value) VALUES + ('sunday'), + ('monday'), + ('tuesday'), + ('wednesday'), + ('thursday'), + ('friday'), + ('saturday'); + + CREATE TABLE employees + ( id serial PRIMARY KEY + , name text NOT NULL + , favorite_color text NOT NULL REFERENCES colors + , gets_paid_on text NOT NULL REFERENCES weekdays ); + INSERT INTO employees (name, favorite_color, gets_paid_on) VALUES + ('Alyssa', 'red', 'monday'), + ('Ben', 'blue', 'friday'); + +- type: track_table + args: + table: colors + is_enum: true +- type: track_table + args: weekdays +- type: track_table + args: employees diff --git a/server/tests-py/queries/v1/set_table_is_enum/teardown.yaml b/server/tests-py/queries/v1/set_table_is_enum/teardown.yaml new file mode 100644 index 00000000000..da709797a36 --- /dev/null +++ b/server/tests-py/queries/v1/set_table_is_enum/teardown.yaml @@ -0,0 +1,9 @@ +type: bulk +args: +- type: run_sql + args: + sql: | + DROP TABLE employees; + DROP TABLE weekdays; + DROP TABLE colors; + cascade: true diff --git a/server/tests-py/test_graphql_mutations.py b/server/tests-py/test_graphql_mutations.py index d710076b71b..1f2498e9da8 100644 --- a/server/tests-py/test_graphql_mutations.py +++ b/server/tests-py/test_graphql_mutations.py @@ -385,3 +385,24 @@ class TestGraphqlDeletePermissions(DefaultTestMutations): @classmethod def dir(cls): return "queries/graphql_mutation/delete/permissions" + +@pytest.mark.parametrize('transport', ['http', 'websocket']) +class TestGraphQLMutateEnums(DefaultTestMutations): + @classmethod + def dir(cls): + return 'queries/graphql_mutation/enums' + + def test_insert_enum_field(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/insert_enum_field.yaml', transport) + + def test_insert_enum_field_bad_value(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/insert_enum_field_bad_value.yaml', transport) + + def test_update_enum_field(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/update_enum_field.yaml', transport) + + def test_update_where_enum_field(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/update_where_enum_field.yaml', transport) + + def test_delete_where_enum_field(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/delete_where_enum_field.yaml', transport) diff --git a/server/tests-py/test_graphql_queries.py b/server/tests-py/test_graphql_queries.py index 9dbc1765a11..20874081014 100644 --- a/server/tests-py/test_graphql_queries.py +++ b/server/tests-py/test_graphql_queries.py @@ -417,3 +417,30 @@ class TestGraphQLQueryFunctions(DefaultTestSelectQueries): @classmethod def dir(cls): return 'queries/graphql_query/functions' + +@pytest.mark.parametrize('transport', ['http', 'websocket']) +class TestGraphQLQueryEnums(DefaultTestSelectQueries): + @classmethod + def dir(cls): + return 'queries/graphql_query/enums' + + def test_introspect(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/introspect.yaml', transport) + + def test_select_enum_field(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/select_enum_field.yaml', transport) + + def test_select_where_enum_eq(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq.yaml', transport) + + def test_select_where_enum_eq_bad_value(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_bad_value.yaml', transport) + + def test_select_where_enum_eq_string(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_string.yaml', transport) + + def test_select_where_enum_eq_variable(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_variable.yaml', transport) + + def test_select_where_enum_eq_variable_bad_value(self, hge_ctx, transport): + check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_variable_bad_value.yaml', transport) diff --git a/server/tests-py/test_inconsistent_meta.py b/server/tests-py/test_inconsistent_meta.py index fe1aa43e8a0..1af8e8419ee 100644 --- a/server/tests-py/test_inconsistent_meta.py +++ b/server/tests-py/test_inconsistent_meta.py @@ -1,6 +1,7 @@ import pytest import yaml import json +import jsondiff from validate import json_ordered diff --git a/server/tests-py/test_v1_queries.py b/server/tests-py/test_v1_queries.py index f669de253d4..c4ba9cf4684 100644 --- a/server/tests-py/test_v1_queries.py +++ b/server/tests-py/test_v1_queries.py @@ -624,3 +624,14 @@ class TestNonEmptyText: @classmethod def dir(cls): return "queries/v1/non_empty_text" + +class TestSetTableIsEnum(DefaultTestQueries): + @classmethod + def dir(cls): + return 'queries/v1/set_table_is_enum' + + def test_add_and_remove(self, hge_ctx): + check_query_f(hge_ctx, self.dir() + '/add_and_remove.yaml') + + def test_add_invalid(self, hge_ctx): + check_query_f(hge_ctx, self.dir() + '/add_invalid.yaml') From 7fa1452ca045af1e17e21f09a5351c32d52a38b2 Mon Sep 17 00:00:00 2001 From: Alexis King Date: Sun, 11 Aug 2019 10:34:38 -0500 Subject: [PATCH 05/10] server: Rename a whole bunch of things MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * PGTypeSimple → PGTypeScalar * PGScalarTyped → WithScalarType * PGColValue → PGScalarValue * PGColInfo → PGColumnInfo * PGRawColInfo → PGRawColumnInfo * mkScalarSessionVar → mkTypedSessionVar --- .../Hasura/GraphQL/Execute/LiveQuery.hs | 2 +- .../src-lib/Hasura/GraphQL/Execute/Query.hs | 2 +- server/src-lib/Hasura/GraphQL/Explain.hs | 2 +- .../src-lib/Hasura/GraphQL/Resolve/Context.hs | 8 +-- .../Hasura/GraphQL/Resolve/InputValue.hs | 8 +-- .../src-lib/Hasura/GraphQL/Resolve/Insert.hs | 16 +++--- .../src-lib/Hasura/GraphQL/Resolve/Types.hs | 16 +++--- server/src-lib/Hasura/GraphQL/Schema.hs | 52 +++++++++---------- .../src-lib/Hasura/GraphQL/Schema/BoolExp.hs | 2 +- .../src-lib/Hasura/GraphQL/Schema/Common.hs | 2 +- .../Hasura/GraphQL/Schema/Mutation/Common.hs | 4 +- .../Hasura/GraphQL/Schema/Mutation/Insert.hs | 2 +- .../Hasura/GraphQL/Schema/Mutation/Update.hs | 12 ++--- .../src-lib/Hasura/GraphQL/Schema/Select.hs | 10 ++-- server/src-lib/Hasura/GraphQL/Validate.hs | 6 +-- .../src-lib/Hasura/GraphQL/Validate/Types.hs | 2 +- server/src-lib/Hasura/RQL/DDL/EventTrigger.hs | 10 ++-- server/src-lib/Hasura/RQL/DDL/Permission.hs | 12 ++--- .../Hasura/RQL/DDL/Permission/Internal.hs | 26 +++++----- server/src-lib/Hasura/RQL/DDL/Relationship.hs | 4 +- server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs | 8 +-- server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs | 8 +-- .../src-lib/Hasura/RQL/DDL/Schema/Rename.hs | 2 +- server/src-lib/Hasura/RQL/DDL/Schema/Table.hs | 26 +++++----- server/src-lib/Hasura/RQL/DML/Delete.hs | 2 +- server/src-lib/Hasura/RQL/DML/Insert.hs | 6 +-- server/src-lib/Hasura/RQL/DML/Internal.hs | 32 ++++++------ server/src-lib/Hasura/RQL/DML/Mutation.hs | 6 +-- server/src-lib/Hasura/RQL/DML/Returning.hs | 12 ++--- server/src-lib/Hasura/RQL/DML/Select.hs | 16 +++--- .../src-lib/Hasura/RQL/DML/Select/Internal.hs | 2 +- server/src-lib/Hasura/RQL/DML/Select/Types.hs | 4 +- server/src-lib/Hasura/RQL/DML/Update.hs | 4 +- server/src-lib/Hasura/RQL/GBoolExp.hs | 22 ++++---- server/src-lib/Hasura/RQL/Types.hs | 10 ++-- server/src-lib/Hasura/RQL/Types/BoolExp.hs | 8 +-- server/src-lib/Hasura/RQL/Types/Catalog.hs | 2 +- server/src-lib/Hasura/RQL/Types/Column.hs | 40 +++++++------- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 16 +++--- server/src-lib/Hasura/SQL/DML.hs | 10 ++-- server/src-lib/Hasura/SQL/Types.hs | 8 +-- server/src-lib/Hasura/SQL/Value.hs | 20 +++---- server/src-lib/Hasura/Server/Telemetry.hs | 4 +- 43 files changed, 233 insertions(+), 233 deletions(-) diff --git a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs index 6f43aea375e..c2d5c25b8cc 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs @@ -173,7 +173,7 @@ toMultiplexedQueryVar = \case -- the check has to be made before this (Just var, _) -> do modify $ Map.insert var colVal - return $ fromResVars (PGTypeSimple $ pstType colVal) + return $ fromResVars (PGTypeScalar $ pstType colVal) [ "variables" , G.unName $ G.unVariable var ] diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index 66c4390a0b2..b3411e3455a 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -197,7 +197,7 @@ prepareWithPlan = \case S.SEOpApp (S.SQLOp "->>") [S.SEPrep 1, S.SELit $ T.toLower sessVar] return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of - PGTypeSimple colTy -> withGeoVal colTy sessVarVal + PGTypeScalar colTy -> withGeoVal colTy sessVarVal PGTypeArray _ -> sessVarVal R.UVSQL sqlExp -> return sqlExp diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs index 7c7ea0b7d93..277b5ea4f96 100644 --- a/server/src-lib/Hasura/GraphQL/Explain.hs +++ b/server/src-lib/Hasura/GraphQL/Explain.hs @@ -61,7 +61,7 @@ resolveVal userInfo = \case RS.UVSessVar ty sessVar -> do sessVarVal <- S.SELit <$> getSessVarVal userInfo sessVar return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of - PGTypeSimple colTy -> withGeoVal colTy sessVarVal + PGTypeScalar colTy -> withGeoVal colTy sessVarVal PGTypeArray _ -> sessVarVal RS.UVSQL sqlExp -> return sqlExp diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs index c38e5e0b0cf..2859dba8395 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs @@ -52,7 +52,7 @@ import qualified Hasura.SQL.DML as S getFldInfo :: (MonadError QErr m, MonadReader r m, Has FieldMap r) => G.NamedType -> G.Name - -> m (Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int)) + -> m (Either PGColumnInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int)) getFldInfo nt n = do fldMap <- asks getter onNothing (Map.lookup (nt,n) fldMap) $ @@ -61,7 +61,7 @@ getFldInfo nt n = do getPGColInfo :: (MonadError QErr m, MonadReader r m, Has FieldMap r) - => G.NamedType -> G.Name -> m PGColInfo + => G.NamedType -> G.Name -> m PGColumnInfo getPGColInfo nt n = do fldInfo <- getFldInfo nt n case fldInfo of @@ -134,8 +134,8 @@ withPrepArgs m = runStateT m Seq.empty prepareColVal :: (MonadState PrepArgs m) - => PGScalarTyped PGColValue -> m S.SQLExp -prepareColVal (PGScalarTyped scalarType colVal) = do + => WithScalarType PGScalarValue -> m S.SQLExp +prepareColVal (WithScalarType scalarType colVal) = do preparedArgs <- get put (preparedArgs Seq.|> binEncoder colVal) return $ toPrepParam (Seq.length preparedArgs + 1) scalarType diff --git a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs index dd3c11d8495..acbbe77c93f 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs @@ -47,12 +47,12 @@ tyMismatch expectedTy v = asPGColumnTypeAndValueM :: (MonadError QErr m) => AnnInpVal - -> m (PGColumnType, PGScalarTyped (Maybe PGColValue)) + -> m (PGColumnType, WithScalarType (Maybe PGScalarValue)) asPGColumnTypeAndValueM v = case _aivValue v of - AGScalar colTy val -> pure (PGColumnScalar colTy, PGScalarTyped colTy val) + AGScalar colTy val -> pure (PGColumnScalar colTy, WithScalarType colTy val) AGEnum _ (AGEReference reference maybeValue) -> do let maybeScalarValue = PGValText . RQL.getEnumValue <$> maybeValue - pure (PGColumnEnumReference reference, PGScalarTyped PGText maybeScalarValue) + pure (PGColumnEnumReference reference, WithScalarType PGText maybeScalarValue) _ -> tyMismatch "pgvalue" v asPGColumnTypeAndAnnValueM :: (MonadError QErr m) => AnnInpVal -> m (PGColumnType, Maybe AnnPGVal) @@ -139,7 +139,7 @@ parseMany fn v = case _aivValue v of onlyText :: (MonadError QErr m) - => PGColValue -> m Text + => PGScalarValue -> m Text onlyText = \case PGValText t -> return t PGValVarchar t -> return t diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs index 1fee4e5f5d6..80afdffa14a 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs @@ -47,7 +47,7 @@ data AnnIns a { _aiInsObj :: !a , _aiConflictClause :: !(Maybe RI.ConflictClauseP1) , _aiView :: !QualifiedTable - , _aiTableCols :: ![PGColInfo] + , _aiTableCols :: ![PGColumnInfo] , _aiDefVals :: !(Map.HashMap PGCol S.SQLExp) } deriving (Show, Eq, Functor, Foldable, Traversable) @@ -69,7 +69,7 @@ data RelIns a type ObjRelIns = RelIns SingleObjIns type ArrRelIns = RelIns MultiObjIns -type PGColWithValue = (PGCol, PGScalarTyped PGColValue) +type PGColWithValue = (PGCol, WithScalarType PGScalarValue) data CTEExp = CTEExp @@ -107,10 +107,10 @@ traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) = _ -> parseObject where parseValue = do - (_, PGScalarTyped scalarType maybeScalarValue) <- asPGColumnTypeAndValueM annVal + (_, WithScalarType scalarType maybeScalarValue) <- asPGColumnTypeAndValueM annVal let columnName = PGCol $ G.unName gName scalarValue = fromMaybe (PGNull scalarType) maybeScalarValue - pure $ AnnInsObj ((columnName, PGScalarTyped scalarType scalarValue):cols) objRels arrRels + pure $ AnnInsObj ((columnName, WithScalarType scalarType scalarValue):cols) objRels arrRels parseObject = do objM <- asObjectM annVal @@ -232,9 +232,9 @@ asSingleObject = \case fetchFromColVals :: MonadError QErr m => ColVals - -> [PGColInfo] - -> (PGColInfo -> a) - -> m [(a, PGScalarTyped PGColValue)] + -> [PGColumnInfo] + -> (PGColumnInfo -> a) + -> m [(a, WithScalarType PGScalarValue)] fetchFromColVals colVal reqCols f = forM reqCols $ \ci -> do let valM = Map.lookup (pgiName ci) colVal @@ -246,7 +246,7 @@ fetchFromColVals colVal reqCols f = mkSelCTE :: MonadError QErr m => QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> Maybe ColVals -> m CTEExp mkSelCTE tn allCols colValM = do diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs index 992e266a54b..3b509a17284 100644 --- a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs @@ -72,7 +72,7 @@ data UpdOpCtx , _uocHeaders :: ![T.Text] , _uocFilter :: !AnnBoolExpPartialSQL , _uocPresetCols :: !PreSetColsPartial - , _uocAllCols :: ![PGColInfo] + , _uocAllCols :: ![PGColumnInfo] } deriving (Show, Eq) data DelOpCtx @@ -80,7 +80,7 @@ data DelOpCtx { _docTable :: !QualifiedTable , _docHeaders :: ![T.Text] , _docFilter :: !AnnBoolExpPartialSQL - , _docAllCols :: ![PGColInfo] + , _docAllCols :: ![PGColumnInfo] } deriving (Show, Eq) data OpCtx @@ -96,11 +96,11 @@ data OpCtx type FieldMap = Map.HashMap (G.NamedType, G.Name) - (Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int)) + (Either PGColumnInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int)) -- order by context data OrdByItem - = OBIPGCol !PGColInfo + = OBIPGCol !PGColumnInfo | OBIRel !RelInfo !AnnBoolExpPartialSQL | OBIAgg !RelInfo !AnnBoolExpPartialSQL deriving (Show, Eq) @@ -128,7 +128,7 @@ data UpdPermForIns data InsCtx = InsCtx { icView :: !QualifiedTable - , icAllCols :: ![PGColInfo] + , icAllCols :: ![PGColumnInfo] , icSet :: !PreSetColsPartial , icRelations :: !RelationInfoMap , icUpdPerm :: !(Maybe UpdPermForIns) @@ -136,18 +136,18 @@ data InsCtx type InsCtxMap = Map.HashMap QualifiedTable InsCtx -type PGColArgMap = Map.HashMap G.Name PGColInfo +type PGColArgMap = Map.HashMap G.Name PGColumnInfo data AnnPGVal = AnnPGVal { _apvVariable :: !(Maybe G.Variable) , _apvIsNullable :: !Bool , _apvType :: !PGColumnType - -- ^ Note: '_apvValue' is a @'PGScalarTyped' 'PGColValue'@, so it includes its type as a + -- ^ Note: '_apvValue' is a @'WithScalarType' 'PGScalarValue'@, so it includes its type as a -- 'PGScalarType'. However, we /also/ need to keep the original 'PGColumnType' information around -- in case we need to re-parse a new value with its type because we’re reusing a cached query -- plan. - , _apvValue :: !(PGScalarTyped PGColValue) + , _apvValue :: !(WithScalarType PGScalarValue) } deriving (Show, Eq) type PrepFn m = AnnPGVal -> m S.SQLExp diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 747eec20f72..c3590ada824 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -45,7 +45,7 @@ import Hasura.GraphQL.Schema.OrderBy import Hasura.GraphQL.Schema.Select import Hasura.GraphQL.Schema.Merge -getInsPerm :: TableInfo PGColInfo -> RoleName -> Maybe InsPermInfo +getInsPerm :: TableInfo PGColumnInfo -> RoleName -> Maybe InsPermInfo getInsPerm tabInfo role | role == adminRole = _permIns $ mkAdminRolePermInfo tabInfo | otherwise = Map.lookup role rolePermInfoMap >>= _permIns @@ -54,7 +54,7 @@ getInsPerm tabInfo role getTabInfo :: MonadError QErr m - => TableCache PGColInfo -> QualifiedTable -> m (TableInfo PGColInfo) + => TableCache PGColumnInfo -> QualifiedTable -> m (TableInfo PGColumnInfo) getTabInfo tc t = onNothing (Map.lookup t tc) $ throw500 $ "table not found: " <>> t @@ -68,32 +68,32 @@ isValidCol = isValidName . G.Name . getPGColTxt isValidRel :: ToTxt a => RelName -> QualifiedObject a -> Bool isValidRel rn rt = isValidName (mkRelName rn) && isValidObjectName rt -isValidField :: FieldInfo PGColInfo -> Bool +isValidField :: FieldInfo PGColumnInfo -> Bool isValidField = \case - FIColumn (PGColInfo col _ _) -> isValidCol col + FIColumn (PGColumnInfo col _ _) -> isValidCol col FIRelationship (RelInfo rn _ _ remTab _) -> isValidRel rn remTab upsertable :: [ConstraintName] -> Bool -> Bool -> Bool upsertable uniqueOrPrimaryCons isUpsertAllowed isAView = not (null uniqueOrPrimaryCons) && isUpsertAllowed && not isAView -toValidFieldInfos :: FieldInfoMap PGColInfo -> [FieldInfo PGColInfo] +toValidFieldInfos :: FieldInfoMap PGColumnInfo -> [FieldInfo PGColumnInfo] toValidFieldInfos = filter isValidField . Map.elems -validPartitionFieldInfoMap :: FieldInfoMap PGColInfo -> ([PGColInfo], [RelInfo]) +validPartitionFieldInfoMap :: FieldInfoMap PGColumnInfo -> ([PGColumnInfo], [RelInfo]) validPartitionFieldInfoMap = partitionFieldInfos . toValidFieldInfos -getValidCols :: FieldInfoMap PGColInfo -> [PGColInfo] +getValidCols :: FieldInfoMap PGColumnInfo -> [PGColumnInfo] getValidCols = fst . validPartitionFieldInfoMap -getValidRels :: FieldInfoMap PGColInfo -> [RelInfo] +getValidRels :: FieldInfoMap PGColumnInfo -> [RelInfo] getValidRels = snd . validPartitionFieldInfoMap mkValidConstraints :: [ConstraintName] -> [ConstraintName] mkValidConstraints = filter (isValidName . G.Name . getConstraintTxt) -isRelNullable :: FieldInfoMap PGColInfo -> RelInfo -> Bool +isRelNullable :: FieldInfoMap PGColumnInfo -> RelInfo -> Bool isRelNullable fim ri = isNullable where lCols = map fst $ riMapping ri @@ -114,15 +114,15 @@ isAggFld = flip elem (numAggOps <> compAggOps) mkGCtxRole' :: QualifiedTable - -> Maybe ([PGColInfo], RelationInfoMap) + -> Maybe ([PGColumnInfo], RelationInfoMap) -- ^ insert permission -> Maybe (Bool, [SelField]) -- ^ select permission - -> Maybe [PGColInfo] + -> Maybe [PGColumnInfo] -- ^ update cols -> Maybe () -- ^ delete cols - -> [PGColInfo] + -> [PGColumnInfo] -- ^ primary key columns -> [ConstraintName] -- ^ constraints @@ -292,7 +292,7 @@ getRootFldsRole' :: QualifiedTable -> [PGCol] -> [ConstraintName] - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -> [FunctionInfo] -> Maybe ([T.Text], Bool) -- insert perm -> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter @@ -379,15 +379,15 @@ getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM = procFuncArgs (fiInputArgs fi) $ \_ t -> FuncArgItem $ G.Name t -getSelPermission :: TableInfo PGColInfo -> RoleName -> Maybe SelPermInfo +getSelPermission :: TableInfo PGColumnInfo -> RoleName -> Maybe SelPermInfo getSelPermission tabInfo role = Map.lookup role (_tiRolePermInfoMap tabInfo) >>= _permSel getSelPerm :: (MonadError QErr m) - => TableCache PGColInfo + => TableCache PGColumnInfo -- all the fields of a table - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -- role and its permission -> RoleName -> SelPermInfo -> m (Bool, [SelField]) @@ -413,8 +413,8 @@ getSelPerm tableCache fields role selPermInfo = do mkInsCtx :: MonadError QErr m => RoleName - -> TableCache PGColInfo - -> FieldInfoMap PGColInfo + -> TableCache PGColumnInfo + -> FieldInfoMap PGColumnInfo -> InsPermInfo -> Maybe UpdPermInfo -> m InsCtx @@ -445,8 +445,8 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do mkAdminInsCtx :: MonadError QErr m => QualifiedTable - -> TableCache PGColInfo - -> FieldInfoMap PGColInfo + -> TableCache PGColumnInfo + -> FieldInfoMap PGColumnInfo -> m InsCtx mkAdminInsCtx tn tc fields = do relTupsM <- forM rels $ \relInfo -> do @@ -468,9 +468,9 @@ mkAdminInsCtx tn tc fields = do mkGCtxRole :: (MonadError QErr m) - => TableCache PGColInfo + => TableCache PGColumnInfo -> QualifiedTable - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -> [PGCol] -> [ConstraintName] -> [FunctionInfo] @@ -505,7 +505,7 @@ getRootFldsRole :: QualifiedTable -> [PGCol] -> [ConstraintName] - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -> [FunctionInfo] -> Maybe ViewInfo -> RolePermInfo @@ -529,9 +529,9 @@ getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM up mkGCtxMapTable :: (MonadError QErr m) - => TableCache PGColInfo + => TableCache PGColumnInfo -> FunctionCache - -> TableInfo PGColInfo + -> TableInfo PGColumnInfo -> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap)) mkGCtxMapTable tableCache funcCache tabInfo = do m <- Map.traverseWithKey @@ -565,7 +565,7 @@ noFilter = annBoolExpTrue mkGCtxMap :: (MonadError QErr m) - => TableCache PGColInfo -> FunctionCache -> m GCtxMap + => TableCache PGColumnInfo -> FunctionCache -> m GCtxMap mkGCtxMap tableCache functionCache = do typesMapL <- mapM (mkGCtxMapTable tableCache functionCache) $ filter tableFltr $ Map.elems tableCache diff --git a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs index 3137a2a0f76..1e15d270a3b 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs @@ -228,7 +228,7 @@ mkBoolExpInp tn fields = ] mkFldExpInp = \case - Left (PGColInfo colName colTy _) -> + Left (PGColumnInfo colName colTy _) -> mk (mkColName colName) (mkCompExpTy colTy) Right (RelInfo relName _ _ remTab _, _, _, _, _) -> mk (mkRelName relName) (mkBoolExpTy remTab) diff --git a/server/src-lib/Hasura/GraphQL/Schema/Common.hs b/server/src-lib/Hasura/GraphQL/Schema/Common.hs index 0d9c8e479f5..fda7b9326c4 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Common.hs @@ -25,7 +25,7 @@ import Hasura.Prelude import Hasura.RQL.Types import Hasura.SQL.Types -type SelField = Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool) +type SelField = Either PGColumnInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool) qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name qualObjectToName = G.Name . snakeCaseQualObject diff --git a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs index cf33345af9d..87db87d2136 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Common.hs @@ -14,8 +14,8 @@ import Hasura.Prelude import Hasura.RQL.Types import Hasura.SQL.Types -mkPGColInp :: PGColInfo -> InpValInfo -mkPGColInp (PGColInfo colName colTy _) = +mkPGColInp :: PGColumnInfo -> InpValInfo +mkPGColInp (PGColumnInfo colName colTy _) = InpValInfo Nothing (G.Name $ getPGColTxt colName) Nothing $ G.toGT $ mkColumnType colTy diff --git a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs index 9f4171aa444..44f3b4c9160 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Insert.hs @@ -105,7 +105,7 @@ input table_insert_input { -} mkInsInp - :: QualifiedTable -> [PGColInfo] -> RelationInfoMap -> InpObjTyInfo + :: QualifiedTable -> [PGColumnInfo] -> RelationInfoMap -> InpObjTyInfo mkInsInp tn insCols relInfoMap = mkHsraInpTyInfo (Just desc) (mkInsInpTy tn) $ fromInpValL $ map mkPGColInp insCols <> relInps diff --git a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Update.hs b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Update.hs index 07b033e8404..9ce80d9746b 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Mutation/Update.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Mutation/Update.hs @@ -30,7 +30,7 @@ input table_set_input { } -} mkUpdSetInp - :: QualifiedTable -> [PGColInfo] -> InpObjTyInfo + :: QualifiedTable -> [PGColumnInfo] -> InpObjTyInfo mkUpdSetInp tn cols = mkHsraInpTyInfo (Just desc) (mkUpdSetTy tn) $ fromInpValL $ map mkPGColInp cols @@ -53,7 +53,7 @@ input table_inc_input { -} mkUpdIncInp - :: QualifiedTable -> Maybe [PGColInfo] -> Maybe InpObjTyInfo + :: QualifiedTable -> Maybe [PGColumnInfo] -> Maybe InpObjTyInfo mkUpdIncInp tn = maybe Nothing mkType where mkType cols = let intCols = onlyIntCols cols @@ -141,7 +141,7 @@ deleteAtPathDesc = "delete the field or element with specified path" <> " (for JSON arrays, negative integers count from the end)" mkUpdJSONOpInp - :: QualifiedTable -> [PGColInfo] -> [InpObjTyInfo] + :: QualifiedTable -> [PGColumnInfo] -> [InpObjTyInfo] mkUpdJSONOpInp tn cols = bool inpObjs [] $ null jsonbCols where jsonbCols = onlyJSONBCols cols @@ -191,7 +191,7 @@ update_table( -} -mkIncInpVal :: QualifiedTable -> [PGColInfo] -> Maybe InpValInfo +mkIncInpVal :: QualifiedTable -> [PGColumnInfo] -> Maybe InpValInfo mkIncInpVal tn cols = bool (Just incArg) Nothing $ null intCols where intCols = onlyIntCols cols @@ -199,7 +199,7 @@ mkIncInpVal tn cols = bool (Just incArg) Nothing $ null intCols incArg = InpValInfo (Just incArgDesc) "_inc" Nothing $ G.toGT $ mkUpdIncTy tn -mkJSONOpInpVals :: QualifiedTable -> [PGColInfo] -> [InpValInfo] +mkJSONOpInpVals :: QualifiedTable -> [PGColumnInfo] -> [InpValInfo] mkJSONOpInpVals tn cols = bool jsonbOpArgs [] $ null jsonbCols where jsonbCols = onlyJSONBCols cols @@ -224,7 +224,7 @@ mkJSONOpInpVals tn cols = bool jsonbOpArgs [] $ null jsonbCols G.toGT $ mkJSONOpTy tn deleteAtPathOp mkUpdMutFld - :: QualifiedTable -> [PGColInfo] -> ObjFldInfo + :: QualifiedTable -> [PGColumnInfo] -> ObjFldInfo mkUpdMutFld tn cols = mkHsraObjFldInfo (Just desc) fldName (fromInpValL inputValues) $ G.toGT $ mkMutRespTy tn diff --git a/server/src-lib/Hasura/GraphQL/Schema/Select.hs b/server/src-lib/Hasura/GraphQL/Schema/Select.hs index 706b34011d0..830c81a0384 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Select.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Select.hs @@ -57,8 +57,8 @@ mkPGColParams colType [ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $ G.toGT $ mkScalarTy PGText) ] | otherwise = Map.empty -mkPGColFld :: PGColInfo -> ObjFldInfo -mkPGColFld (PGColInfo colName colTy isNullable) = +mkPGColFld :: PGColumnInfo -> ObjFldInfo +mkPGColFld (PGColumnInfo colName colTy isNullable) = mkHsraObjFldInfo Nothing n (mkPGColParams colTy) ty where n = G.Name $ getPGColTxt colName @@ -218,7 +218,7 @@ mkTableColAggFldsObj :: QualifiedTable -> G.Name -> (PGColumnType -> G.NamedType) - -> [PGColInfo] + -> [PGColumnInfo] -> ObjTyInfo mkTableColAggFldsObj tn op f cols = mkHsraObjTyInfo (Just desc) (mkTableColAggFldsTy op tn) Set.empty $ mapFromL _fiName $ @@ -258,7 +258,7 @@ table_by_pk( ): table -} mkSelFldPKey - :: QualifiedTable -> [PGColInfo] + :: QualifiedTable -> [PGColumnInfo] -> ObjFldInfo mkSelFldPKey tn cols = mkHsraObjFldInfo (Just desc) fldName args ty @@ -268,7 +268,7 @@ mkSelFldPKey tn cols = fldName = mkTableByPkName tn args = fromInpValL $ map colInpVal cols ty = G.toGT $ mkTableTy tn - colInpVal (PGColInfo n typ _) = + colInpVal (PGColumnInfo n typ _) = InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkColumnType typ {- diff --git a/server/src-lib/Hasura/GraphQL/Validate.hs b/server/src-lib/Hasura/GraphQL/Validate.hs index 03979662c97..a4befe6c115 100644 --- a/server/src-lib/Hasura/GraphQL/Validate.hs +++ b/server/src-lib/Hasura/GraphQL/Validate.hs @@ -33,8 +33,8 @@ import Hasura.GraphQL.Validate.Types import Hasura.RQL.Types import Hasura.RQL.Types.QueryCollection -import Hasura.SQL.Types (PGScalarTyped) -import Hasura.SQL.Value (PGColValue) +import Hasura.SQL.Types (WithScalarType) +import Hasura.SQL.Value (PGScalarValue) data QueryParts = QueryParts @@ -118,7 +118,7 @@ showVars :: (Functor f, Foldable f) => f G.Variable -> Text showVars = showNames . fmap G.unVariable type VarPGTypes = Map.HashMap G.Variable PGColumnType -type AnnPGVarVals = Map.HashMap G.Variable (PGScalarTyped PGColValue) +type AnnPGVarVals = Map.HashMap G.Variable (WithScalarType PGScalarValue) -- this is in similar spirit to getAnnVarVals, however -- here it is much simpler and can get rid of typemap requirement diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs index 9f30caed059..a1ff7f0386e 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs @@ -687,7 +687,7 @@ data AnnGEnumValue deriving (Show, Eq) data AnnGValue - = AGScalar !PGScalarType !(Maybe PGColValue) + = AGScalar !PGScalarType !(Maybe PGScalarValue) | AGEnum !G.NamedType !AnnGEnumValue | AGObject !G.NamedType !(Maybe AnnGObject) | AGArray !G.ListType !(Maybe [AnnInpVal]) diff --git a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs index 1435cec2432..94afe4fd124 100644 --- a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs @@ -55,7 +55,7 @@ getTriggerSql :: Ops -> TriggerName -> QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> Bool -> SubscribeOpSpec -> Maybe T.Text @@ -118,7 +118,7 @@ getTriggerSql op trn qt allCols strfyNum spec = mkAllTriggersQ :: TriggerName -> QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> Bool -> TriggerOpsDef -> Q.TxE QErr () @@ -133,7 +133,7 @@ mkAllTriggersQ trn qt allCols strfyNum fullspec = do mkTriggerQ :: TriggerName -> QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> Bool -> Ops -> SubscribeOpSpec @@ -151,7 +151,7 @@ delTriggerQ trn = mapM_ (\op -> Q.unitQE addEventTriggerToCatalog :: QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> Bool -> EventTriggerConf -> Q.TxE QErr () @@ -179,7 +179,7 @@ delEventTriggerFromCatalog trn = do updateEventTriggerToCatalog :: QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> Bool -> EventTriggerConf -> Q.TxE QErr () diff --git a/server/src-lib/Hasura/RQL/DDL/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Permission.hs index c3703bdd528..ff2b7144de6 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission.hs @@ -108,14 +108,14 @@ dropView vn = procSetObj :: (QErrM m) - => TableInfo PGColInfo -> Maybe ColVals + => TableInfo PGColumnInfo -> Maybe ColVals -> m (PreSetColsPartial, [Text], [SchemaDependency]) procSetObj ti mObj = do (setColTups, deps) <- withPathK "set" $ fmap unzip $ forM (HM.toList setObj) $ \(pgCol, val) -> do ty <- askPGType fieldInfoMap pgCol $ "column " <> pgCol <<> " not found in table " <>> tn - sqlExp <- valueParser (PGTypeSimple ty) val + sqlExp <- valueParser (PGTypeScalar ty) val let dep = mkColDep (getDepReason sqlExp) tn pgCol return ((pgCol, sqlExp), dep) return (HM.fromList setColTups, depHeaders, deps) @@ -130,7 +130,7 @@ procSetObj ti mObj = do buildInsPermInfo :: (QErrM m, CacheRM m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> PermDef InsPerm -> m (WithDeps InsPermInfo) buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) = @@ -213,7 +213,7 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm) buildSelPermInfo :: (QErrM m, CacheRM m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> SelPerm -> m (WithDeps SelPermInfo) buildSelPermInfo tabInfo sp = do @@ -283,7 +283,7 @@ type CreateUpdPerm = CreatePerm UpdPerm buildUpdPermInfo :: (QErrM m, CacheRM m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> UpdPerm -> m (WithDeps UpdPermInfo) buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do @@ -347,7 +347,7 @@ type CreateDelPerm = CreatePerm DelPerm buildDelPermInfo :: (QErrM m, CacheRM m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> DelPerm -> m (WithDeps DelPermInfo) buildDelPermInfo tabInfo (DelPerm fltr) = do diff --git a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs index dcab14a3ff2..82023cc7f5a 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs @@ -40,7 +40,7 @@ instance ToJSON PermColSpec where toJSON (PCCols cols) = toJSON cols toJSON PCStar = "*" -convColSpec :: FieldInfoMap PGColInfo -> PermColSpec -> [PGCol] +convColSpec :: FieldInfoMap PGColumnInfo -> PermColSpec -> [PGCol] convColSpec _ (PCCols cols) = cols convColSpec cim PCStar = map pgiName $ getCols cim @@ -48,7 +48,7 @@ assertPermNotDefined :: (MonadError QErr m) => RoleName -> PermAccessor a - -> TableInfo PGColInfo + -> TableInfo PGColumnInfo -> m () assertPermNotDefined roleName pa tableInfo = when (permissionIsDefined rpi pa || roleName == adminRole) @@ -70,7 +70,7 @@ assertPermDefined :: (MonadError QErr m) => RoleName -> PermAccessor a - -> TableInfo PGColInfo + -> TableInfo PGColumnInfo -> m () assertPermDefined roleName pa tableInfo = unless (permissionIsDefined rpi pa) $ throw400 PermissionDenied $ mconcat @@ -84,7 +84,7 @@ assertPermDefined roleName pa tableInfo = askPermInfo :: (MonadError QErr m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> RoleName -> PermAccessor c -> m c @@ -175,7 +175,7 @@ data CreatePermP1Res a procBoolExp :: (QErrM m, CacheRM m) - => QualifiedTable -> FieldInfoMap PGColInfo -> BoolExp + => QualifiedTable -> FieldInfoMap PGColumnInfo -> BoolExp -> m (AnnBoolExpPartialSQL, [SchemaDependency]) procBoolExp tn fieldInfoMap be = do abe <- annBoolExp valueParser fieldInfoMap be @@ -209,16 +209,16 @@ valueParser valueParser pgType = \case -- When it is a special variable String t - | isUserVar t -> return $ mkScalarSessionVar pgType t - | isReqUserId t -> return $ mkScalarSessionVar pgType userIdHeader + | isUserVar t -> return $ mkTypedSessionVar pgType t + | isReqUserId t -> return $ mkTypedSessionVar pgType userIdHeader -- Typical value as Aeson's value val -> case pgType of - PGTypeSimple columnType -> PSESQLExp . toTxtValue <$> parsePGScalarValue columnType val + PGTypeScalar columnType -> PSESQLExp . toTxtValue <$> parsePGScalarValue columnType val PGTypeArray ofType -> do vals <- runAesonParser parseJSON val - PGScalarTyped scalarType scalarValues <- parsePGScalarValues ofType vals + WithScalarType scalarType scalarValues <- parsePGScalarValues ofType vals return . PSESQLExp $ S.SETyAnn - (S.SEArray $ map (toTxtValue . PGScalarTyped scalarType) scalarValues) + (S.SEArray $ map (toTxtValue . WithScalarType scalarType) scalarValues) (S.mkTypeAnn $ PGTypeArray scalarType) injectDefaults :: QualifiedTable -> QualifiedTable -> Q.Query @@ -258,7 +258,7 @@ class (ToJSON a) => IsPerm a where buildPermInfo :: (QErrM m, CacheRM m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> PermDef a -> m (WithDeps (PermInfo a)) @@ -282,7 +282,7 @@ class (ToJSON a) => IsPerm a where getPermAcc2 _ = permAccessor validateViewPerm - :: (IsPerm a, QErrM m) => PermDef a -> TableInfo PGColInfo -> m () + :: (IsPerm a, QErrM m) => PermDef a -> TableInfo PGColumnInfo -> m () validateViewPerm permDef tableInfo = case permAcc of PASelect -> return () @@ -296,7 +296,7 @@ validateViewPerm permDef tableInfo = addPermP1 :: (QErrM m, CacheRM m, IsPerm a) - => TableInfo PGColInfo -> PermDef a -> m (WithDeps (PermInfo a)) + => TableInfo PGColumnInfo -> PermDef a -> m (WithDeps (PermInfo a)) addPermP1 tabInfo pd = do assertPermNotDefined (pdRole pd) (getPermAcc1 pd) tabInfo buildPermInfo tabInfo pd diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship.hs b/server/src-lib/Hasura/RQL/DDL/Relationship.hs index 0ae16f68a79..6d45ac79e50 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship.hs @@ -35,7 +35,7 @@ import Instances.TH.Lift () validateManualConfig :: (QErrM m, CacheRM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> RelManualConfig -> m () validateManualConfig fim rm = do @@ -70,7 +70,7 @@ persistRel (QualifiedObject sn tn) rn relType relDef comment = checkForFldConfilct :: (MonadError QErr m) - => TableInfo PGColInfo + => TableInfo PGColumnInfo -> FieldName -> m () checkForFldConfilct tabInfo f = diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs index d39486e60d2..8d749dd34d0 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs @@ -88,8 +88,8 @@ data TableDiff = TableDiff { _tdNewName :: !(Maybe QualifiedTable) , _tdDroppedCols :: ![PGCol] - , _tdAddedCols :: ![PGRawColInfo] - , _tdAlteredCols :: ![(PGRawColInfo, PGRawColInfo)] + , _tdAddedCols :: ![PGRawColumnInfo] + , _tdAlteredCols :: ![(PGRawColumnInfo, PGRawColumnInfo)] , _tdDroppedFKeyCons :: ![ConstraintName] -- The final list of uniq/primary constraint names -- used for generating types on_conflict clauses @@ -118,7 +118,7 @@ getTableDiff oldtm newtm = existingCols = getOverlap pcmOrdinalPosition oldCols newCols pcmToPci (PGColMeta colName _ colType isNullable references) - = PGRawColInfo colName colType isNullable references + = PGRawColumnInfo colName colType isNullable references alteredCols = flip map (filter (uncurry (/=)) existingCols) $ pcmToPci *** pcmToPci @@ -138,7 +138,7 @@ getTableDiff oldtm newtm = getTableChangeDeps :: (QErrM m, CacheRWM m) - => TableInfo PGColInfo -> TableDiff -> m [SchemaObjId] + => TableInfo PGColumnInfo -> TableDiff -> m [SchemaObjId] getTableChangeDeps ti tableDiff = do sc <- askSchemaCache -- for all the dropped columns diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs index e856bce6bdf..442a24e8360 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs @@ -35,18 +35,18 @@ import qualified Hasura.SQL.DML as S data EnumTableIntegrityError = EnumTableMissingPrimaryKey | EnumTableMultiColumnPrimaryKey ![PGCol] - | EnumTableNonTextualPrimaryKey !PGRawColInfo + | EnumTableNonTextualPrimaryKey !PGRawColumnInfo | EnumTableNoEnumValues | EnumTableInvalidEnumValueNames !(NE.NonEmpty T.Text) - | EnumTableNonTextualCommentColumn !PGRawColInfo + | EnumTableNonTextualCommentColumn !PGRawColumnInfo | EnumTableTooManyColumns ![PGCol] deriving (Show, Eq) fetchAndValidateEnumValues :: (MonadTx m) => QualifiedTable - -> [PGRawColInfo] - -> [PGRawColInfo] + -> [PGRawColumnInfo] + -> [PGRawColumnInfo] -> m EnumValues fetchAndValidateEnumValues tableName primaryKeyColumns columnInfos = either (throw400 ConstraintViolation . showErrors) pure =<< runValidateT fetchAndValidate diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs index 4a6eb0e7b09..2ff5595fedd 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs @@ -73,7 +73,7 @@ renameTableInCatalog newQT oldQT = do renameColInCatalog :: (MonadTx m, CacheRM m) - => PGCol -> PGCol -> QualifiedTable -> TableInfo PGColInfo -> m () + => PGCol -> PGCol -> QualifiedTable -> TableInfo PGColumnInfo -> m () renameColInCatalog oCol nCol qt ti = do sc <- askSchemaCache -- Check if any relation exists with new column name diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs index 72b1e346113..3b9a6d134aa 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs @@ -207,7 +207,7 @@ runUntrackTableQ q = do unTrackExistingTableOrViewP2 q processTableChanges :: (MonadTx m, CacheRWM m) - => TableInfo PGColInfo -> TableDiff -> m Bool + => TableInfo PGColumnInfo -> TableDiff -> m Bool processTableChanges ti tableDiff = do -- If table rename occurs then don't replace constraints and -- process dropped/added columns, because schema reload happens eventually @@ -243,7 +243,7 @@ processTableChanges ti tableDiff = do procAddedCols tn = -- In the newly added columns check that there is no conflict with relationships - forM_ addedCols $ \rawInfo@(PGRawColInfo colName _ _ _) -> + forM_ addedCols $ \rawInfo@(PGRawColumnInfo colName _ _ _) -> case M.lookup (fromPGCol colName) $ _tiFieldInfoMap ti of Just (FIRelationship _) -> throw400 AlreadyExists $ "cannot add column " <> colName @@ -254,8 +254,8 @@ processTableChanges ti tableDiff = do addColToCache colName info tn procAlteredCols sc tn = fmap or $ forM alteredCols $ - \( PGRawColInfo oldName oldType _ _ - , newRawInfo@(PGRawColInfo newName newType _ _) ) -> do + \( PGRawColumnInfo oldName oldType _ _ + , newRawInfo@(PGRawColumnInfo newName newType _ _) ) -> do let performColumnUpdate = do newInfo <- processColumnInfoUsingCache tn newRawInfo updColInCache newName newInfo tn @@ -316,19 +316,19 @@ processSchemaChanges schemaDiff = do where SchemaDiff droppedTables alteredTables = schemaDiff --- | Builds an initial @'TableCache' 'PGColInfo'@ from catalog information. Does not fill in +-- | Builds an initial @'TableCache' 'PGColumnInfo'@ from catalog information. Does not fill in -- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains -- columns, not relationships; those pieces of information are filled in by later stages. buildTableCache :: forall m. (MonadTx m, CacheRWM m) - => [CatalogTable] -> m (TableCache PGColInfo) + => [CatalogTable] -> m (TableCache PGColumnInfo) buildTableCache = processTableCache <=< buildRawTableCache where withTable name = withSchemaObject $ InconsistentMetadataObj (MOTable name) MOTTable (toJSON name) -- Step 1: Build the raw table cache from metadata information. - buildRawTableCache :: [CatalogTable] -> m (TableCache PGRawColInfo) + buildRawTableCache :: [CatalogTable] -> m (TableCache PGRawColumnInfo) buildRawTableCache catalogTables = fmap (M.fromList . catMaybes) . for catalogTables $ \(CatalogTable name isSystemDefined isEnum maybeInfo) -> withTable name $ do catalogInfo <- onNothing maybeInfo $ @@ -358,7 +358,7 @@ buildTableCache = processTableCache <=< buildRawTableCache -- Step 2: Process the raw table cache to replace Postgres column types with logical column -- types. - processTableCache :: TableCache PGRawColInfo -> m (TableCache PGColInfo) + processTableCache :: TableCache PGRawColumnInfo -> m (TableCache PGColumnInfo) processTableCache rawTables = fmap (M.mapMaybe id) . for rawTables $ \rawInfo -> do let tableName = _tiName rawInfo withTable tableName $ rawInfo @@ -366,17 +366,17 @@ buildTableCache = processTableCache <=< buildRawTableCache where enumTables = M.mapMaybe _tiEnumValues rawTables --- | “Processes” a 'PGRawColInfo' into a 'PGColInfo' by resolving its type using a map of known +-- | “Processes” a 'PGRawColumnInfo' into a 'PGColumnInfo' by resolving its type using a map of known -- enum tables. processColumnInfo :: (QErrM m) => M.HashMap QualifiedTable EnumValues -- ^ known enum tables -> QualifiedTable -- ^ the table this column belongs to - -> PGRawColInfo -- ^ the column’s raw information - -> m PGColInfo + -> PGRawColumnInfo -- ^ the column’s raw information + -> m PGColumnInfo processColumnInfo enumTables tableName rawInfo = do resolvedType <- resolveColumnType - pure PGColInfo + pure PGColumnInfo { pgiName = prciName rawInfo , pgiType = resolvedType , pgiIsNullable = prciIsNullable rawInfo } @@ -400,7 +400,7 @@ processColumnInfo enumTables tableName rawInfo = do -- | Like 'processColumnInfo', but uses the information in the current schema cache to resolve a -- column’s type. -processColumnInfoUsingCache :: (CacheRM m, QErrM m) => QualifiedTable -> PGRawColInfo -> m PGColInfo +processColumnInfoUsingCache :: (CacheRM m, QErrM m) => QualifiedTable -> PGRawColumnInfo -> m PGColumnInfo processColumnInfoUsingCache tableName rawInfo = do tables <- scTables <$> askSchemaCache processColumnInfo (M.mapMaybe _tiEnumValues tables) tableName rawInfo diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs index 33f5025fc81..6ca995f12de 100644 --- a/server/src-lib/Hasura/RQL/DML/Delete.hs +++ b/server/src-lib/Hasura/RQL/DML/Delete.hs @@ -30,7 +30,7 @@ data AnnDelG v { dqp1Table :: !QualifiedTable , dqp1Where :: !(AnnBoolExp v, AnnBoolExp v) , dqp1MutFlds :: !(MutFldsG v) - , dqp1AllCols :: ![PGColInfo] + , dqp1AllCols :: ![PGColumnInfo] } deriving (Show, Eq) traverseAnnDel diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index be473aa9948..cea0ff33881 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -39,7 +39,7 @@ data InsertQueryP1 , iqp1Tuples :: ![[S.SQLExp]] , iqp1Conflict :: !(Maybe ConflictClauseP1) , iqp1MutFlds :: !MutFlds - , iqp1AllCols :: ![PGColInfo] + , iqp1AllCols :: ![PGColumnInfo] } deriving (Show, Eq) mkInsertCTE :: InsertQueryP1 -> S.CTE @@ -67,7 +67,7 @@ convObj => (PGColumnType -> Value -> m S.SQLExp) -> HM.HashMap PGCol S.SQLExp -> HM.HashMap PGCol S.SQLExp - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -> InsObj -> m ([PGCol], [S.SQLExp]) convObj prepFn defInsVals setInsVals fieldInfoMap insObj = do @@ -99,7 +99,7 @@ validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol -> buildConflictClause :: (UserInfoM m, QErrM m) => SessVarBldr m - -> TableInfo PGColInfo + -> TableInfo PGColumnInfo -> [PGCol] -> OnConflict -> m ConflictClauseP1 diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 224fcbaebe5..2c84fc3adf0 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -41,7 +41,7 @@ instance UserInfoM DMLP1 where instance HasSQLGenCtx DMLP1 where askSQLGenCtx = DMLP1 $ lift askSQLGenCtx -mkAdminRolePermInfo :: TableInfo PGColInfo -> RolePermInfo +mkAdminRolePermInfo :: TableInfo PGColumnInfo -> RolePermInfo mkAdminRolePermInfo ti = RolePermInfo (Just i) (Just s) (Just u) (Just d) where @@ -57,7 +57,7 @@ mkAdminRolePermInfo ti = askPermInfo' :: (UserInfoM m) => PermAccessor c - -> TableInfo PGColInfo + -> TableInfo PGColumnInfo -> m (Maybe c) askPermInfo' pa tableInfo = do roleName <- askCurRole @@ -72,7 +72,7 @@ askPermInfo' pa tableInfo = do askPermInfo :: (UserInfoM m, QErrM m) => PermAccessor c - -> TableInfo PGColInfo + -> TableInfo PGColumnInfo -> m c askPermInfo pa tableInfo = do roleName <- askCurRole @@ -87,7 +87,7 @@ askPermInfo pa tableInfo = do where pt = permTypeToCode $ permAccToType pa -isTabUpdatable :: RoleName -> TableInfo PGColInfo -> Bool +isTabUpdatable :: RoleName -> TableInfo PGColumnInfo -> Bool isTabUpdatable role ti | role == adminRole = True | otherwise = isJust $ M.lookup role rpim >>= _permUpd @@ -96,22 +96,22 @@ isTabUpdatable role ti askInsPermInfo :: (UserInfoM m, QErrM m) - => TableInfo PGColInfo -> m InsPermInfo + => TableInfo PGColumnInfo -> m InsPermInfo askInsPermInfo = askPermInfo PAInsert askSelPermInfo :: (UserInfoM m, QErrM m) - => TableInfo PGColInfo -> m SelPermInfo + => TableInfo PGColumnInfo -> m SelPermInfo askSelPermInfo = askPermInfo PASelect askUpdPermInfo :: (UserInfoM m, QErrM m) - => TableInfo PGColInfo -> m UpdPermInfo + => TableInfo PGColumnInfo -> m UpdPermInfo askUpdPermInfo = askPermInfo PAUpdate askDelPermInfo :: (UserInfoM m, QErrM m) - => TableInfo PGColInfo -> m DelPermInfo + => TableInfo PGColumnInfo -> m DelPermInfo askDelPermInfo = askPermInfo PADelete verifyAsrns :: (MonadError QErr m) => [a -> m ()] -> [a] -> m () @@ -152,7 +152,7 @@ binRHSBuilder colType val = do fetchRelTabInfo :: (QErrM m, CacheRM m) => QualifiedTable - -> m (TableInfo PGColInfo) + -> m (TableInfo PGColumnInfo) fetchRelTabInfo refTabName = -- Internal error modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName @@ -162,7 +162,7 @@ type SessVarBldr m = PGType PGScalarType -> SessVar -> m S.SQLExp fetchRelDet :: (UserInfoM m, QErrM m, CacheRM m) => RelName -> QualifiedTable - -> m (FieldInfoMap PGColInfo, SelPermInfo) + -> m (FieldInfoMap PGColumnInfo, SelPermInfo) fetchRelDet relName refTabName = do roleName <- askCurRole -- Internal error @@ -188,7 +188,7 @@ checkOnColExp -> AnnBoolExpFldSQL -> m AnnBoolExpFldSQL checkOnColExp spi sessVarBldr annFld = case annFld of - AVCol (PGColInfo cn _ _) _ -> do + AVCol (PGColumnInfo cn _ _) _ -> do checkSelOnCol spi cn return annFld AVRel relInfo nesAnn -> do @@ -223,7 +223,7 @@ sessVarFromCurrentSetting' :: PGType PGScalarType -> SessVar -> S.SQLExp sessVarFromCurrentSetting' ty sessVar = flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of - PGTypeSimple baseTy -> withGeoVal baseTy sessVarVal + PGTypeScalar baseTy -> withGeoVal baseTy sessVarVal PGTypeArray _ -> sessVarVal where curSess = S.SEUnsafe "current_setting('hasura.user')::json" @@ -241,7 +241,7 @@ checkSelPerm spi sessVarBldr = convBoolExp :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> SelPermInfo -> BoolExp -> SessVarBldr m @@ -252,13 +252,13 @@ convBoolExp cim spi be sessVarBldr prepValBldr = do checkSelPerm spi sessVarBldr abe where rhsParser pgType val = case pgType of - PGTypeSimple ty -> prepValBldr ty val + PGTypeScalar ty -> prepValBldr ty val PGTypeArray ofTy -> do -- for arrays, we don't use the prepared builder vals <- runAesonParser parseJSON val - PGScalarTyped scalarType scalarValues <- parsePGScalarValues ofTy vals + WithScalarType scalarType scalarValues <- parsePGScalarValues ofTy vals return $ S.SETyAnn - (S.SEArray $ map (toTxtValue . PGScalarTyped scalarType) scalarValues) + (S.SEArray $ map (toTxtValue . WithScalarType scalarType) scalarValues) (S.mkTypeAnn $ PGTypeArray scalarType) dmlTxErrorHandler :: Q.PGTxErr -> QErr diff --git a/server/src-lib/Hasura/RQL/DML/Mutation.hs b/server/src-lib/Hasura/RQL/DML/Mutation.hs index 3688c55b7ea..dc0b7a83422 100644 --- a/server/src-lib/Hasura/RQL/DML/Mutation.hs +++ b/server/src-lib/Hasura/RQL/DML/Mutation.hs @@ -27,7 +27,7 @@ data Mutation { _mTable :: !QualifiedTable , _mQuery :: !(S.CTE, DS.Seq Q.PrepArg) , _mFields :: !MutFlds - , _mCols :: ![PGColInfo] + , _mCols :: ![PGColumnInfo] , _mStrfyNum :: !Bool } deriving (Show, Eq) @@ -57,7 +57,7 @@ mutateAndSel (Mutation qt q mutFlds allCols strfyNum) = do mutateAndFetchCols :: QualifiedTable - -> [PGColInfo] + -> [PGColumnInfo] -> (S.CTE, DS.Seq Q.PrepArg) -> Bool -> Q.TxE QErr MutateResp @@ -89,7 +89,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum = mkSelCTEFromColVals :: MonadError QErr m - => QualifiedTable -> [PGColInfo] -> [ColVals] -> m S.CTE + => QualifiedTable -> [PGColumnInfo] -> [ColVals] -> m S.CTE mkSelCTEFromColVals qt allCols colVals = S.CTESelect <$> case colVals of [] -> return selNoRows diff --git a/server/src-lib/Hasura/RQL/DML/Returning.hs b/server/src-lib/Hasura/RQL/DML/Returning.hs index 662c60b754c..9ae1b2567c3 100644 --- a/server/src-lib/Hasura/RQL/DML/Returning.hs +++ b/server/src-lib/Hasura/RQL/DML/Returning.hs @@ -56,18 +56,18 @@ pgColsFromMutFld = \case MExp _ -> [] MRet selFlds -> flip mapMaybe selFlds $ \(_, annFld) -> case annFld of - FCol (PGColInfo col colTy _) _ -> Just (col, colTy) - _ -> Nothing + FCol (PGColumnInfo col colTy _) _ -> Just (col, colTy) + _ -> Nothing pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColumnType)] pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd) -pgColsToSelFlds :: [PGColInfo] -> [(FieldName, AnnFld)] +pgColsToSelFlds :: [PGColumnInfo] -> [(FieldName, AnnFld)] pgColsToSelFlds cols = flip map cols $ \pgColInfo -> (fromPGCol $ pgiName pgColInfo, FCol pgColInfo Nothing) -mkDefaultMutFlds :: Maybe [PGColInfo] -> MutFlds +mkDefaultMutFlds :: Maybe [PGColumnInfo] -> MutFlds mkDefaultMutFlds = \case Nothing -> mutFlds Just cols -> ("returning", MRet $ pgColsToSelFlds cols):mutFlds @@ -111,10 +111,10 @@ mkSelWith qt cte mutFlds singleObj strfyNum = checkRetCols :: (UserInfoM m, QErrM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> SelPermInfo -> [PGCol] - -> m [PGColInfo] + -> m [PGColumnInfo] checkRetCols fieldInfoMap selPermInfo cols = do mapM_ (checkSelOnCol selPermInfo) cols forM cols $ \col -> askPGColInfo fieldInfoMap col relInRetErr diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs index 75e7b390fd8..bcc5a48a761 100644 --- a/server/src-lib/Hasura/RQL/DML/Select.hs +++ b/server/src-lib/Hasura/RQL/DML/Select.hs @@ -30,7 +30,7 @@ import qualified Database.PG.Query as Q import qualified Hasura.SQL.DML as S convSelCol :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> SelPermInfo -> SelCol -> m [ExtCol] @@ -50,7 +50,7 @@ convSelCol fieldInfoMap spi (SCStar wildcard) = convWildcard :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> SelPermInfo -> Wildcard -> m [ExtCol] @@ -78,7 +78,7 @@ convWildcard fieldInfoMap (SelPermInfo cols _ _ _ _ _) wildcard = relExtCols wc = mapM (mkRelCol wc) relColInfos resolveStar :: (UserInfoM m, QErrM m, CacheRM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> SelPermInfo -> SelectQ -> m SelectQExt @@ -105,7 +105,7 @@ resolveStar fim spi (SelectG selCols mWh mOb mLt mOf) = do convOrderByElem :: (UserInfoM m, QErrM m, CacheRM m) => SessVarBldr m - -> (FieldInfoMap PGColInfo, SelPermInfo) + -> (FieldInfoMap PGColumnInfo, SelPermInfo) -> OrderByCol -> m AnnObCol convOrderByElem sessVarBldr (flds, spi) = \case @@ -145,7 +145,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case convSelectQ :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) - => FieldInfoMap PGColInfo -- Table information of current table + => FieldInfoMap PGColumnInfo -- Table information of current table -> SelPermInfo -- Additional select permission info -> SelectQExt -- Given Select Query -> SessVarBldr m @@ -200,10 +200,10 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do convExtSimple :: (UserInfoM m, QErrM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> SelPermInfo -> PGCol - -> m PGColInfo + -> m PGColumnInfo convExtSimple fieldInfoMap selPermInfo pgCol = do checkSelOnCol selPermInfo pgCol askPGColInfo fieldInfoMap pgCol relWhenPGErr @@ -212,7 +212,7 @@ convExtSimple fieldInfoMap selPermInfo pgCol = do convExtRel :: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> RelName -> Maybe RelName -> SelectQExt diff --git a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs index 8b523939c99..f76d608f446 100644 --- a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs @@ -203,7 +203,7 @@ buildJsonObject pfx parAls arrRelCtx strfyNum flds = ANIField (fldAls, arrSel) in S.mkQIdenExp arrPfx fldAls - toSQLCol :: PGColInfo -> Maybe ColOp -> S.SQLExp + toSQLCol :: PGColumnInfo -> Maybe ColOp -> S.SQLExp toSQLCol col colOpM = toJSONableExp strfyNum (pgiType col) $ case colOpM of Nothing -> colNameExp diff --git a/server/src-lib/Hasura/RQL/DML/Select/Types.hs b/server/src-lib/Hasura/RQL/DML/Select/Types.hs index ca735502c6b..7658449d91a 100644 --- a/server/src-lib/Hasura/RQL/DML/Select/Types.hs +++ b/server/src-lib/Hasura/RQL/DML/Select/Types.hs @@ -49,7 +49,7 @@ data AnnAggOrdBy deriving (Show, Eq) data AnnObColG v - = AOCPG !PGColInfo + = AOCPG !PGColumnInfo | AOCObj !RelInfo !(AnnBoolExp v) !(AnnObColG v) | AOCAgg !RelInfo !(AnnBoolExp v) !AnnAggOrdBy deriving (Show, Eq) @@ -121,7 +121,7 @@ data ColOp } deriving (Show, Eq) data AnnFldG v - = FCol !PGColInfo !(Maybe ColOp) + = FCol !PGColumnInfo !(Maybe ColOp) | FObj !(ObjSelG v) | FArr !(ArrSelG v) | FExp !T.Text diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index 3a0b958ad5d..1627c5d26bd 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -36,7 +36,7 @@ data AnnUpdG v -- however the session variable can still be -- converted as desired , uqp1MutFlds :: !(MutFldsG v) - , uqp1AllCols :: ![PGColInfo] + , uqp1AllCols :: ![PGColumnInfo] } deriving (Show, Eq) traverseAnnUpd @@ -103,7 +103,7 @@ convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT") convOp :: (UserInfoM m, QErrM m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> [PGCol] -> UpdPermInfo -> [(PGCol, a)] diff --git a/server/src-lib/Hasura/RQL/GBoolExp.hs b/server/src-lib/Hasura/RQL/GBoolExp.hs index 54cfd84bdfb..d438cea777f 100644 --- a/server/src-lib/Hasura/RQL/GBoolExp.hs +++ b/server/src-lib/Hasura/RQL/GBoolExp.hs @@ -23,7 +23,7 @@ type OpRhsParser m v = -- | Represents a reference to a Postgres column, possibly casted an arbitrary -- number of times. Used within 'parseOperationsExpression' for bookkeeping. data ColumnReference - = ColumnReferenceColumn !PGColInfo + = ColumnReferenceColumn !PGColumnInfo | ColumnReferenceCast !ColumnReference !PGColumnType deriving (Show, Eq) @@ -43,8 +43,8 @@ parseOperationsExpression :: forall m v . (MonadError QErr m) => OpRhsParser m v - -> FieldInfoMap PGColInfo - -> PGColInfo + -> FieldInfoMap PGColumnInfo + -> PGColumnInfo -> Value -> m [OpExpG v] parseOperationsExpression rhsParser fim columnInfo = @@ -56,7 +56,7 @@ parseOperationsExpression rhsParser fim columnInfo = Object o -> mapM (parseOperation column) (M.toList o) val -> pure . AEQ False <$> rhsParser columnType val where - columnType = PGTypeSimple $ columnReferenceType column + columnType = PGTypeScalar $ columnReferenceType column parseOperation :: ColumnReference -> (T.Text, Value) -> m (OpExpG v) parseOperation column (opStr, val) = withPathK opStr $ @@ -239,11 +239,11 @@ parseOperationsExpression rhsParser fim columnInfo = "incompatible column types : " <> column <<> ", " <>> rhsCol else return rhsCol - parseWithTy ty = rhsParser (PGTypeSimple ty) val + parseWithTy ty = rhsParser (PGTypeScalar ty) val -- parse one with the column's type parseOne = parseWithTy colTy - parseOneNoSess ty = rhsParser (PGTypeSimple ty) + parseOneNoSess ty = rhsParser (PGTypeScalar ty) parseManyWithType ty = rhsParser (PGTypeArray ty) val @@ -275,7 +275,7 @@ notEqualsBoolExpBuilder qualColExp rhsExp = annBoolExp :: (QErrM m, CacheRM m) => OpRhsParser m v - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -> BoolExp -> m (AnnBoolExp v) annBoolExp rhsParser fim (BoolExp boolExp) = @@ -284,13 +284,13 @@ annBoolExp rhsParser fim (BoolExp boolExp) = annColExp :: (QErrM m, CacheRM m) => OpRhsParser m v - -> FieldInfoMap PGColInfo + -> FieldInfoMap PGColumnInfo -> ColExp -> m (AnnBoolExpFld v) annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do colInfo <- askFieldInfo colInfoMap fieldName case colInfo of - FIColumn (PGColInfo _ (PGColumnScalar PGJSON) _) -> + FIColumn (PGColumnInfo _ (PGColumnScalar PGJSON) _) -> throwError (err400 UnexpectedPayload "JSON column can not be part of where clause") FIColumn pgi -> AVCol pgi <$> parseOperationsExpression rhsParser colInfoMap pgi colVal @@ -313,7 +313,7 @@ convBoolRhs' tq = convColRhs :: S.Qual -> AnnBoolExpFldSQL -> State Word64 S.BoolExp convColRhs tableQual = \case - AVCol (PGColInfo cn _ _) opExps -> do + AVCol (PGColumnInfo cn _ _) opExps -> do let bExps = map (mkColCompExp tableQual cn) opExps return $ foldr (S.BEBin S.AndOp) (S.BELit True) bExps @@ -396,7 +396,7 @@ mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol) mkCastsExp casts = sqlAll . flip map (M.toList casts) $ \(targetType, operations) -> - let targetAnn = S.mkTypeAnn $ PGTypeSimple targetType + let targetAnn = S.mkTypeAnn $ PGTypeScalar targetType in sqlAll $ map (mkCompExp (S.SETyAnn lhs targetAnn)) operations sqlAll = foldr (S.BEBin S.AndOp) (S.BELit True) diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs index db114bd0c02..07a303b792f 100644 --- a/server/src-lib/Hasura/RQL/Types.hs +++ b/server/src-lib/Hasura/RQL/Types.hs @@ -61,7 +61,7 @@ import qualified Network.HTTP.Client as HTTP getFieldInfoMap :: QualifiedTable - -> SchemaCache -> Maybe (FieldInfoMap PGColInfo) + -> SchemaCache -> Maybe (FieldInfoMap PGColumnInfo) getFieldInfoMap tn = fmap _tiFieldInfoMap . M.lookup tn . scTables @@ -86,7 +86,7 @@ class (Monad m) => UserInfoM m where askTabInfo :: (QErrM m, CacheRM m) - => QualifiedTable -> m (TableInfo PGColInfo) + => QualifiedTable -> m (TableInfo PGColumnInfo) askTabInfo tabName = do rawSchemaCache <- askSchemaCache liftMaybe (err400 NotExists errMsg) $ M.lookup tabName $ scTables rawSchemaCache @@ -95,7 +95,7 @@ askTabInfo tabName = do askTabInfoFromTrigger :: (QErrM m, CacheRM m) - => TriggerName -> m (TableInfo PGColInfo) + => TriggerName -> m (TableInfo PGColumnInfo) askTabInfoFromTrigger trn = do sc <- askSchemaCache let tabInfos = M.elems $ scTables sc @@ -165,7 +165,7 @@ liftP1WithQCtx r m = askFieldInfoMap :: (QErrM m, CacheRM m) - => QualifiedTable -> m (FieldInfoMap PGColInfo) + => QualifiedTable -> m (FieldInfoMap PGColumnInfo) askFieldInfoMap tabName = do mFieldInfoMap <- getFieldInfoMap tabName <$> askSchemaCache maybe (throw400 NotExists errMsg) return mFieldInfoMap @@ -174,7 +174,7 @@ askFieldInfoMap tabName = do askPGType :: (MonadError QErr m) - => FieldInfoMap PGColInfo + => FieldInfoMap PGColumnInfo -> PGCol -> T.Text -> m PGColumnType diff --git a/server/src-lib/Hasura/RQL/Types/BoolExp.hs b/server/src-lib/Hasura/RQL/Types/BoolExp.hs index c8b1fd2e601..154ceaf0943 100644 --- a/server/src-lib/Hasura/RQL/Types/BoolExp.hs +++ b/server/src-lib/Hasura/RQL/Types/BoolExp.hs @@ -20,7 +20,7 @@ module Hasura.RQL.Types.BoolExp , AnnBoolExpFldSQL , AnnBoolExpSQL , PartialSQLExp(..) - , mkScalarSessionVar + , mkTypedSessionVar , isStaticValue , AnnBoolExpFldPartialSQL , AnnBoolExpPartialSQL @@ -238,7 +238,7 @@ opExpToJPair f = \case opExpsToJSON = object . map (opExpToJPair f) data AnnBoolExpFld a - = AVCol !PGColInfo ![OpExpG a] + = AVCol !PGColumnInfo ![OpExpG a] | AVRel !RelInfo !(AnnBoolExp a) deriving (Show, Eq, Functor, Foldable, Traversable) @@ -286,8 +286,8 @@ data PartialSQLExp | PSESQLExp !S.SQLExp deriving (Show, Eq, Data) -mkScalarSessionVar :: PGType PGColumnType -> SessVar -> PartialSQLExp -mkScalarSessionVar columnType = +mkTypedSessionVar :: PGType PGColumnType -> SessVar -> PartialSQLExp +mkTypedSessionVar columnType = PSESessVar (unsafePGColumnToRepresentation <$> columnType) instance ToJSON PartialSQLExp where diff --git a/server/src-lib/Hasura/RQL/Types/Catalog.hs b/server/src-lib/Hasura/RQL/Types/Catalog.hs index faecdeed0aa..2ea43051151 100644 --- a/server/src-lib/Hasura/RQL/Types/Catalog.hs +++ b/server/src-lib/Hasura/RQL/Types/Catalog.hs @@ -35,7 +35,7 @@ import Hasura.SQL.Types data CatalogTableInfo = CatalogTableInfo - { _ctiColumns :: ![PGRawColInfo] + { _ctiColumns :: ![PGRawColumnInfo] , _ctiConstraints :: ![ConstraintName] , _ctiPrimaryKeyColumns :: ![PGCol] , _ctiViewInfo :: !(Maybe ViewInfo) diff --git a/server/src-lib/Hasura/RQL/Types/Column.hs b/server/src-lib/Hasura/RQL/Types/Column.hs index 75ba84f9b3e..908ca58c997 100644 --- a/server/src-lib/Hasura/RQL/Types/Column.hs +++ b/server/src-lib/Hasura/RQL/Types/Column.hs @@ -8,8 +8,8 @@ module Hasura.RQL.Types.Column , parsePGScalarValues , unsafePGColumnToRepresentation - , PGColInfo(..) - , PGRawColInfo(..) + , PGColumnInfo(..) + , PGRawColumnInfo(..) , onlyIntCols , onlyNumCols , onlyJSONBCols @@ -88,16 +88,16 @@ isScalarColumnWhere f = \case -- | Gets the representation type associated with a 'PGColumnType'. Avoid using this if possible. -- Prefer 'parsePGScalarValue', 'parsePGScalarValues', or --- 'Hasura.RQL.Types.BoolExp.mkScalarSessionVar'. +-- 'Hasura.RQL.Types.BoolExp.mkTypedSessionVar'. unsafePGColumnToRepresentation :: PGColumnType -> PGScalarType unsafePGColumnToRepresentation = \case PGColumnScalar scalarType -> scalarType PGColumnEnumReference _ -> PGText -parsePGScalarValue :: (MonadError QErr m) => PGColumnType -> Value -> m (PGScalarTyped PGColValue) +parsePGScalarValue :: (MonadError QErr m) => PGColumnType -> Value -> m (WithScalarType PGScalarValue) parsePGScalarValue columnType value = case columnType of PGColumnScalar scalarType -> - PGScalarTyped scalarType <$> runAesonParser (parsePGValue scalarType) value + WithScalarType scalarType <$> runAesonParser (parsePGValue scalarType) value PGColumnEnumReference (EnumReference tableName enumValues) -> do let typeName = snakeCaseQualObject tableName flip runAesonParser value . withText (T.unpack typeName) $ \textValue -> do @@ -106,20 +106,20 @@ parsePGScalarValue columnType value = case columnType of fail . T.unpack $ "expected one of the values " <> T.intercalate ", " (map dquote enumTextValues) <> " for type " <> typeName <<> ", given " <>> textValue - pure $ PGScalarTyped PGText (PGValText textValue) + pure $ WithScalarType PGText (PGValText textValue) parsePGScalarValues :: (MonadError QErr m) - => PGColumnType -> [Value] -> m (PGScalarTyped [PGColValue]) + => PGColumnType -> [Value] -> m (WithScalarType [PGScalarValue]) parsePGScalarValues columnType values = do scalarValues <- indexedMapM (fmap pstValue . parsePGScalarValue columnType) values - pure $ PGScalarTyped (unsafePGColumnToRepresentation columnType) scalarValues + pure $ WithScalarType (unsafePGColumnToRepresentation columnType) scalarValues -- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of -- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the -- 'pcirReferences' field and other table data to eventually resolve the type to a 'PGColumnType'. -data PGRawColInfo - = PGRawColInfo +data PGRawColumnInfo + = PGRawColumnInfo { prciName :: !PGCol , prciType :: !PGScalarType , prciIsNullable :: !Bool @@ -127,30 +127,30 @@ data PGRawColInfo -- ^ only stores single-column references to primary key of foreign tables (used for detecting -- references to enum tables) } deriving (Show, Eq) -$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColInfo) +$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColumnInfo) --- | “Resolved” column info, produced from a 'PGRawColInfo' value that has been combined with other +-- | “Resolved” column info, produced from a 'PGRawColumnInfo' value that has been combined with other -- schema information to produce a 'PGColumnType'. -data PGColInfo - = PGColInfo +data PGColumnInfo + = PGColumnInfo { pgiName :: !PGCol , pgiType :: !PGColumnType , pgiIsNullable :: !Bool } deriving (Show, Eq) -$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColInfo) +$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColumnInfo) -onlyIntCols :: [PGColInfo] -> [PGColInfo] +onlyIntCols :: [PGColumnInfo] -> [PGColumnInfo] onlyIntCols = filter (isScalarColumnWhere isIntegerType . pgiType) -onlyNumCols :: [PGColInfo] -> [PGColInfo] +onlyNumCols :: [PGColumnInfo] -> [PGColumnInfo] onlyNumCols = filter (isScalarColumnWhere isNumType . pgiType) -onlyJSONBCols :: [PGColInfo] -> [PGColInfo] +onlyJSONBCols :: [PGColumnInfo] -> [PGColumnInfo] onlyJSONBCols = filter (isScalarColumnWhere (== PGJSONB) . pgiType) -onlyComparableCols :: [PGColInfo] -> [PGColInfo] +onlyComparableCols :: [PGColumnInfo] -> [PGColumnInfo] onlyComparableCols = filter (isScalarColumnWhere isComparableType . pgiType) -getColInfos :: [PGCol] -> [PGColInfo] -> [PGColInfo] +getColInfos :: [PGCol] -> [PGColumnInfo] -> [PGColumnInfo] getColInfos cols allColInfos = flip filter allColInfos $ \ci -> pgiName ci `elem` cols diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index b2bf1f6cfd7..20167bbd549 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -424,7 +424,7 @@ incSchemaCacheVer (SchemaCacheVer prev) = data SchemaCache = SchemaCache - { scTables :: !(TableCache PGColInfo) + { scTables :: !(TableCache PGColumnInfo) , scFunctions :: !FunctionCache , scRemoteSchemas :: !RemoteSchemaMap , scAllowlist :: !(HS.HashSet GQLQuery) @@ -467,13 +467,13 @@ emptySchemaCache = SchemaCache M.empty M.empty M.empty HS.empty M.empty GC.emptyGCtx mempty [] -modTableCache :: (CacheRWM m) => TableCache PGColInfo -> m () +modTableCache :: (CacheRWM m) => TableCache PGColumnInfo -> m () modTableCache tc = do sc <- askSchemaCache writeSchemaCache $ sc { scTables = tc } addTableToCache :: (QErrM m, CacheRWM m) - => TableInfo PGColInfo -> m () + => TableInfo PGColumnInfo -> m () addTableToCache ti = do sc <- askSchemaCache assertTableNotExists tn sc @@ -495,7 +495,7 @@ delTableFromCache tn = do getTableInfoFromCache :: (QErrM m) => QualifiedTable -> SchemaCache - -> m (TableInfo PGColInfo) + -> m (TableInfo PGColumnInfo) getTableInfoFromCache tn sc = case M.lookup tn (scTables sc) of Nothing -> throw500 $ "table not found in cache : " <>> tn @@ -511,7 +511,7 @@ assertTableNotExists tn sc = Just _ -> throw500 $ "table exists in cache : " <>> tn modTableInCache :: (QErrM m, CacheRWM m) - => (TableInfo PGColInfo -> m (TableInfo PGColInfo)) + => (TableInfo PGColumnInfo -> m (TableInfo PGColumnInfo)) -> QualifiedTable -> m () modTableInCache f tn = do @@ -522,7 +522,7 @@ modTableInCache f tn = do addColToCache :: (QErrM m, CacheRWM m) - => PGCol -> PGColInfo + => PGCol -> PGColumnInfo -> QualifiedTable -> m () addColToCache cn ci = addFldToCache (fromPGCol cn) (FIColumn ci) @@ -539,7 +539,7 @@ addRelToCache rn ri deps tn = do addFldToCache :: (QErrM m, CacheRWM m) - => FieldName -> FieldInfo PGColInfo + => FieldName -> FieldInfo PGColumnInfo -> QualifiedTable -> m () addFldToCache fn fi = modTableInCache modFieldInfoMap @@ -578,7 +578,7 @@ delRelFromCache rn tn = do updColInCache :: (QErrM m, CacheRWM m) - => PGCol -> PGColInfo + => PGCol -> PGColumnInfo -> QualifiedTable -> m () updColInCache cn ci tn = do delColFromCache cn tn diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs index ff8bd543d4b..fd29daf6ea0 100644 --- a/server/src-lib/Hasura/SQL/DML.hs +++ b/server/src-lib/Hasura/SQL/DML.hs @@ -229,19 +229,19 @@ mkTypeAnn :: PGType PGScalarType -> TypeAnn mkTypeAnn = TypeAnn . toSQLTxt intTypeAnn :: TypeAnn -intTypeAnn = mkTypeAnn $ PGTypeSimple PGInteger +intTypeAnn = mkTypeAnn $ PGTypeScalar PGInteger textTypeAnn :: TypeAnn -textTypeAnn = mkTypeAnn $ PGTypeSimple PGText +textTypeAnn = mkTypeAnn $ PGTypeScalar PGText textArrTypeAnn :: TypeAnn textArrTypeAnn = mkTypeAnn $ PGTypeArray PGText jsonTypeAnn :: TypeAnn -jsonTypeAnn = mkTypeAnn $ PGTypeSimple PGJSON +jsonTypeAnn = mkTypeAnn $ PGTypeScalar PGJSON jsonbTypeAnn :: TypeAnn -jsonbTypeAnn = mkTypeAnn $ PGTypeSimple PGJSONB +jsonbTypeAnn = mkTypeAnn $ PGTypeScalar PGJSONB data CountType = CTStar @@ -287,7 +287,7 @@ data SQLExp deriving (Show, Eq, Data) withTyAnn :: PGScalarType -> SQLExp -> SQLExp -withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeSimple colTy +withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeScalar colTy instance J.ToJSON SQLExp where toJSON = J.toJSON . toSQLTxt diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs index d3fbc123b13..05253147d79 100644 --- a/server/src-lib/Hasura/SQL/Types.hs +++ b/server/src-lib/Hasura/SQL/Types.hs @@ -425,8 +425,8 @@ geoTypes = [PGGeometry, PGGeography] isGeoType :: PGScalarType -> Bool isGeoType = (`elem` geoTypes) -data PGScalarTyped a - = PGScalarTyped +data WithScalarType a + = WithScalarType { pstType :: !PGScalarType , pstValue :: !a } deriving (Show, Eq, Functor, Foldable, Traversable) @@ -440,13 +440,13 @@ data PGScalarTyped a -- TODO: This is incorrect modeling, as 'PGScalarType' will capture anything (under 'PGUnknown'). -- This should be fixed when support for all types is merged. data PGType a - = PGTypeSimple !a + = PGTypeScalar !a | PGTypeArray !a deriving (Show, Eq, Data, Functor) $(deriveJSON defaultOptions{constructorTagModifier = drop 6} ''PGType) instance (ToSQL a) => ToSQL (PGType a) where toSQL = \case - PGTypeSimple ty -> toSQL ty + PGTypeScalar ty -> toSQL ty -- typename array is an sql standard way of declaring types PGTypeArray ty -> toSQL ty <> " array" diff --git a/server/src-lib/Hasura/SQL/Value.hs b/server/src-lib/Hasura/SQL/Value.hs index 13560c5713b..1743b528a46 100644 --- a/server/src-lib/Hasura/SQL/Value.hs +++ b/server/src-lib/Hasura/SQL/Value.hs @@ -25,7 +25,7 @@ import qualified Database.PostgreSQL.LibPQ as PQ import qualified PostgreSQL.Binary.Encoding as PE -- Binary value. Used in prepared sq -data PGColValue +data PGScalarValue = PGValInteger !Int32 | PGValSmallInt !Int16 | PGValBigInt !Int64 @@ -58,7 +58,7 @@ instance ToJSON TxtEncodedPGVal where TENull -> Null TELit t -> String t -txtEncodedPGVal :: PGColValue -> TxtEncodedPGVal +txtEncodedPGVal :: PGScalarValue -> TxtEncodedPGVal txtEncodedPGVal colVal = case colVal of PGValInteger i -> TELit $ T.pack $ show i PGValSmallInt i -> TELit $ T.pack $ show i @@ -85,12 +85,12 @@ txtEncodedPGVal colVal = case colVal of AE.encodeToLazyText o PGValUnknown t -> TELit t -txtEncoder :: PGColValue -> S.SQLExp +txtEncoder :: PGScalarValue -> S.SQLExp txtEncoder colVal = case txtEncodedPGVal colVal of TENull -> S.SEUnsafe "NULL" TELit t -> S.SELit t -binEncoder :: PGColValue -> Q.PrepArg +binEncoder :: PGScalarValue -> Q.PrepArg binEncoder colVal = case colVal of PGValInteger i -> Q.toPrepVal i @@ -135,7 +135,7 @@ textToPrepVal t = parsePGValue' :: PGScalarType -> Value - -> AT.Parser PGColValue + -> AT.Parser PGScalarValue parsePGValue' ty v = case (ty, v) of (_, Null) -> return $ PGNull ty (PGSmallInt, val) -> PGValSmallInt <$> parseJSON val @@ -160,7 +160,7 @@ parsePGValue' ty v = case (ty, v) of (PGUnknown _, String t) -> return $ PGValUnknown t (PGUnknown tyName, _) -> fail $ "A string is expected for type : " ++ T.unpack tyName -parsePGValue :: PGScalarType -> Value -> AT.Parser PGColValue +parsePGValue :: PGScalarType -> Value -> AT.Parser PGScalarValue parsePGValue pct val = case val of String t -> parsePGValue' pct val <|> return (PGValUnknown t) @@ -185,13 +185,13 @@ toPrepParam :: Int -> PGScalarType -> S.SQLExp toPrepParam i ty = withGeoVal ty $ S.SEPrep i -toBinaryValue :: PGScalarTyped PGColValue -> Q.PrepArg +toBinaryValue :: WithScalarType PGScalarValue -> Q.PrepArg toBinaryValue = binEncoder . pstValue -toTxtValue :: PGScalarTyped PGColValue -> S.SQLExp -toTxtValue (PGScalarTyped ty val) = S.withTyAnn ty . withGeoVal ty $ txtEncoder val +toTxtValue :: WithScalarType PGScalarValue -> S.SQLExp +toTxtValue (WithScalarType ty val) = S.withTyAnn ty . withGeoVal ty $ txtEncoder val -pgColValueToInt :: PGColValue -> Maybe Int +pgColValueToInt :: PGScalarValue -> Maybe Int pgColValueToInt (PGValInteger i) = Just $ fromIntegral i pgColValueToInt (PGValSmallInt i) = Just $ fromIntegral i pgColValueToInt (PGValBigInt i) = Just $ fromIntegral i diff --git a/server/src-lib/Hasura/Server/Telemetry.hs b/server/src-lib/Hasura/Server/Telemetry.hs index 2e72175aa62..6d7cd4d16f5 100644 --- a/server/src-lib/Hasura/Server/Telemetry.hs +++ b/server/src-lib/Hasura/Server/Telemetry.hs @@ -155,10 +155,10 @@ computeMetrics sc = calcPerms :: (RolePermInfo -> Maybe a) -> [RolePermInfo] -> Int calcPerms fn perms = length $ catMaybes $ map fn perms - relsOfTbl :: TableInfo PGColInfo -> [RelInfo] + relsOfTbl :: TableInfo PGColumnInfo -> [RelInfo] relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . _tiFieldInfoMap - permsOfTbl :: TableInfo PGColInfo -> [(RoleName, RolePermInfo)] + permsOfTbl :: TableInfo PGColumnInfo -> [(RoleName, RolePermInfo)] permsOfTbl = Map.toList . _tiRolePermInfoMap From 00862fcad805e6c283a6598493383ed36119708e Mon Sep 17 00:00:00 2001 From: Alexis King Date: Fri, 23 Aug 2019 07:57:09 -0500 Subject: [PATCH 06/10] Centralize handling of expected PostgreSQL errors --- server/src-lib/Hasura/Db.hs | 44 ++++++- .../Hasura/GraphQL/Execute/LiveQuery.hs | 29 +---- server/src-lib/Hasura/RQL/DML/Internal.hs | 65 +++------- server/src-lib/Hasura/SQL/Error.hs | 112 +++++++++--------- 4 files changed, 116 insertions(+), 134 deletions(-) diff --git a/server/src-lib/Hasura/Db.hs b/server/src-lib/Hasura/Db.hs index 25796f478ea..6d1ec196b49 100644 --- a/server/src-lib/Hasura/Db.hs +++ b/server/src-lib/Hasura/Db.hs @@ -12,17 +12,21 @@ module Hasura.Db , RespTx , LazyRespTx , defaultTxErrorHandler + , mkTxErrorHandler ) where +import Control.Lens import Control.Monad.Validate -import qualified Data.Aeson.Extended as J -import qualified Database.PG.Query as Q +import qualified Data.Aeson.Extended as J +import qualified Database.PG.Query as Q +import qualified Database.PG.Query.Connection as Q import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.Types.Error import Hasura.RQL.Types.Permission +import Hasura.SQL.Error import Hasura.SQL.Types data PGExecCtx @@ -79,9 +83,39 @@ setHeadersTx uVars = pgFmtLit (J.encodeToStrictText uVars) defaultTxErrorHandler :: Q.PGTxErr -> QErr -defaultTxErrorHandler txe = - let e = internalError "postgres query error" - in e {qeInternal = Just $ J.toJSON txe} +defaultTxErrorHandler = mkTxErrorHandler (const False) + +-- | Constructs a transaction error handler given a predicate that determines which errors are +-- expected and should be reported to the user. All other errors are considered internal errors. +mkTxErrorHandler :: (PGErrorType -> Bool) -> Q.PGTxErr -> QErr +mkTxErrorHandler isExpectedError txe = fromMaybe unexpectedError expectedError + where + unexpectedError = (internalError "postgres query error") { qeInternal = Just $ J.toJSON txe } + expectedError = uncurry err400 <$> do + errorDetail <- Q.getPGStmtErr txe + message <- Q.edMessage errorDetail + errorType <- pgErrorType errorDetail + guard $ isExpectedError errorType + pure $ case errorType of + PGIntegrityConstraintViolation code -> + let cv = (ConstraintViolation,) + customMessage = (code ^? _Just._PGErrorSpecific) <&> \case + PGRestrictViolation -> cv "Can not delete or update due to data being referred. " + PGNotNullViolation -> cv "Not-NULL violation. " + PGForeignKeyViolation -> cv "Foreign key violation. " + PGUniqueViolation -> cv "Uniqueness violation. " + PGCheckViolation -> (PermissionError, "Check constraint violation. ") + PGExclusionViolation -> cv "Exclusion violation. " + in maybe (ConstraintViolation, message) (fmap (<> message)) customMessage + + PGDataException code -> case code of + Just (PGErrorSpecific PGInvalidEscapeSequence) -> (BadRequest, message) + _ -> (DataException, message) + + PGSyntaxErrorOrAccessRuleViolation code -> (ConstraintError,) $ case code of + Just (PGErrorSpecific PGInvalidColumnReference) -> + "there is no unique or exclusion constraint on target column(s)" + _ -> message withUserInfo :: UserInfo -> LazyTx QErr a -> LazyTx QErr a withUserInfo uInfo = \case diff --git a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs index c2d5c25b8cc..974e4dafc45 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery.hs @@ -24,6 +24,7 @@ module Hasura.GraphQL.Execute.LiveQuery , subsOpFromPGAST ) where +import Control.Lens import Data.Has import qualified Control.Concurrent.STM as STM @@ -32,7 +33,6 @@ import qualified Data.HashMap.Strict as Map import qualified Data.HashSet as Set import qualified Data.Text as T import qualified Database.PG.Query as Q -import qualified Database.PG.Query.Connection as Q import qualified Language.GraphQL.Draft.Syntax as G import qualified Hasura.GraphQL.Execute.LiveQuery.Fallback as LQF @@ -49,6 +49,7 @@ import Hasura.Prelude import Hasura.RQL.DML.Select (asSingleRowJsonResp) import Hasura.RQL.Types +import Hasura.SQL.Error import Hasura.SQL.Types import Hasura.SQL.Value @@ -272,7 +273,7 @@ validateAnnVarValsOnPg pgExecCtx annVarVals = do let valSel = mkValidationSel $ Map.elems annVarVals Q.Discard _ <- runTx' $ liftTx $ - Q.rawQE valPgErrHandler (Q.fromBuilder $ toSQL valSel) [] False + Q.rawQE dataExnErrHandler (Q.fromBuilder $ toSQL valSel) [] False return $ fmap (txtEncodedPGVal . pstValue) annVarVals where @@ -283,27 +284,9 @@ validateAnnVarValsOnPg pgExecCtx annVarVals = do res <- liftIO $ runExceptT (runLazyTx' pgExecCtx tx) liftEither res --- | The error handler that is used to errors in the validation SQL. --- It tries to specifically read few PG error codes which indicate --- that the format of the value provided for a type is incorrect -valPgErrHandler :: Q.PGTxErr -> QErr -valPgErrHandler txErr = - fromMaybe (defaultTxErrorHandler txErr) $ do - stmtErr <- Q.getPGStmtErr txErr - codeMsg <- getPGCodeMsg stmtErr - (qErrCode, qErrMsg) <- extractError codeMsg - return $ err400 qErrCode qErrMsg - where - getPGCodeMsg pged = - (,) <$> Q.edStatusCode pged <*> Q.edMessage pged - extractError = \case - -- invalid text representation - ("22P02", msg) -> return (DataException, msg) - -- invalid parameter value - ("22023", msg) -> return (DataException, msg) - -- invalid input values - ("22007", msg) -> return (DataException, msg) - _ -> Nothing + -- Explicitly look for the class of errors raised when the format of a value provided + -- for a type is incorrect. + dataExnErrHandler = mkTxErrorHandler (has _PGDataException) -- | Use the existing plan with new variables and session variables -- to create a live query operation diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 2c84fc3adf0..cd181471f5d 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -1,22 +1,22 @@ module Hasura.RQL.DML.Internal where -import qualified Database.PG.Query as Q -import qualified Database.PG.Query.Connection as Q -import qualified Hasura.SQL.DML as S +import qualified Database.PG.Query as Q +import qualified Hasura.SQL.DML as S import Hasura.Prelude import Hasura.RQL.GBoolExp import Hasura.RQL.Types +import Hasura.SQL.Error import Hasura.SQL.Types import Hasura.SQL.Value import Control.Lens import Data.Aeson.Types -import qualified Data.HashMap.Strict as M -import qualified Data.HashSet as HS -import qualified Data.Sequence as DS -import qualified Data.Text as T +import qualified Data.HashMap.Strict as M +import qualified Data.HashSet as HS +import qualified Data.Sequence as DS +import qualified Data.Text as T newtype DMLP1 a = DMLP1 {unDMLP1 :: StateT (DS.Seq Q.PrepArg) P1 a} @@ -262,11 +262,13 @@ convBoolExp cim spi be sessVarBldr prepValBldr = do (S.mkTypeAnn $ PGTypeArray scalarType) dmlTxErrorHandler :: Q.PGTxErr -> QErr -dmlTxErrorHandler p2Res = - case err of - Nothing -> defaultTxErrorHandler p2Res - Just (code, msg) -> err400 code msg - where err = simplifyError p2Res +dmlTxErrorHandler = mkTxErrorHandler $ \case + PGIntegrityConstraintViolation _ -> True + PGDataException _ -> True + PGSyntaxErrorOrAccessRuleViolation (Just (PGErrorSpecific code)) -> code `elem` + [ PGUndefinedObject + , PGInvalidColumnReference ] + _ -> False toJSONableExp :: Bool -> PGColumnType -> S.SQLExp -> S.SQLExp toJSONableExp strfyNum colTy expn @@ -289,45 +291,6 @@ validateHeaders depHeaders = do unless (hdr `elem` map T.toLower headers) $ throw400 NotFound $ hdr <<> " header is expected but not found" -simplifyError :: Q.PGTxErr -> Maybe (Code, T.Text) -simplifyError txErr = do - stmtErr <- Q.getPGStmtErr txErr - codeMsg <- getPGCodeMsg stmtErr - extractError codeMsg - where - getPGCodeMsg pged = - (,) <$> Q.edStatusCode pged <*> Q.edMessage pged - extractError = \case - -- restrict violation - ("23001", msg) -> - return (ConstraintViolation, "Can not delete or update due to data being referred. " <> msg) - -- not null violation - ("23502", msg) -> - return (ConstraintViolation, "Not-NULL violation. " <> msg) - -- foreign key violation - ("23503", msg) -> - return (ConstraintViolation, "Foreign key violation. " <> msg) - -- unique violation - ("23505", msg) -> - return (ConstraintViolation, "Uniqueness violation. " <> msg) - -- check violation - ("23514", msg) -> - return (PermissionError, "Check constraint violation. " <> msg) - -- invalid text representation - ("22P02", msg) -> return (DataException, msg) - -- invalid parameter value - ("22023", msg) -> return (DataException, msg) - -- no unique constraint on the columns - ("42P10", _) -> - return (ConstraintError, "there is no unique or exclusion constraint on target column(s)") - -- no constraint - ("42704", msg) -> return (ConstraintError, msg) - -- invalid input values - ("22007", msg) -> return (DataException, msg) - -- invalid escape sequence - ("22025", msg) -> return (BadRequest, msg) - _ -> Nothing - -- validate limit and offset int values onlyPositiveInt :: MonadError QErr m => Int -> m () onlyPositiveInt i = when (i < 0) $ throw400 NotSupported diff --git a/server/src-lib/Hasura/SQL/Error.hs b/server/src-lib/Hasura/SQL/Error.hs index 25d6def5a5d..37f96e56a10 100644 --- a/server/src-lib/Hasura/SQL/Error.hs +++ b/server/src-lib/Hasura/SQL/Error.hs @@ -1,5 +1,19 @@ -- | Functions and datatypes for interpreting Postgres errors. -module Hasura.SQL.Error where +module Hasura.SQL.Error + ( PGErrorType(..) + , _PGDataException + , _PGIntegrityConstraintViolation + , _PGSyntaxErrorOrAccessRuleViolation + , pgErrorType + + , PGErrorCode(..) + , _PGErrorGeneric + , _PGErrorSpecific + + , PGDataException(..) + , PGIntegrityConstraintViolation(..) + , PGSyntaxErrorOrAccessRuleViolation(..) + ) where import Hasura.Prelude @@ -11,62 +25,64 @@ import qualified Database.PG.Query.Connection as Q -- | The top-level error code type. Errors in Postgres are divided into different /classes/, which -- are further subdivided into individual error codes. Even if a particular status code is not known -- to the application, it’s possible to determine its class and handle it appropriately. -data PgErrorType - = PgDataException !(Maybe (PgErrorCode PgDataException)) - | PgIntegrityConstraintViolation !(Maybe (PgErrorCode PgIntegrityConstraintViolation)) - | PgSyntaxErrorOrAccessRuleViolation !(Maybe (PgErrorCode PgSyntaxErrorOrAccessRuleViolation)) +data PGErrorType + = PGDataException !(Maybe (PGErrorCode PGDataException)) + | PGIntegrityConstraintViolation !(Maybe (PGErrorCode PGIntegrityConstraintViolation)) + | PGSyntaxErrorOrAccessRuleViolation !(Maybe (PGErrorCode PGSyntaxErrorOrAccessRuleViolation)) deriving (Show, Eq) -data PgErrorCode a - = PgErrorGeneric +data PGErrorCode a + = PGErrorGeneric -- ^ represents errors that have the non-specific @000@ status code - | PgErrorSpecific !a + | PGErrorSpecific !a -- ^ represents errors with a known, more specific status code deriving (Show, Eq, Functor) -data PgDataException - = PgInvalidDatetimeFormat - | PgInvalidParameterValue - | PgInvalidTextRepresentation +data PGDataException + = PGInvalidDatetimeFormat + | PGInvalidParameterValue + | PGInvalidEscapeSequence + | PGInvalidTextRepresentation deriving (Show, Eq) -data PgIntegrityConstraintViolation - = PgRestrictViolation - | PgNotNullViolation - | PgForeignKeyViolation - | PgUniqueViolation - | PgCheckViolation - | PgExclusionViolation +data PGIntegrityConstraintViolation + = PGRestrictViolation + | PGNotNullViolation + | PGForeignKeyViolation + | PGUniqueViolation + | PGCheckViolation + | PGExclusionViolation deriving (Show, Eq) -data PgSyntaxErrorOrAccessRuleViolation - = PgUndefinedObject - | PgInvalidColumnReference +data PGSyntaxErrorOrAccessRuleViolation + = PGUndefinedObject + | PGInvalidColumnReference deriving (Show, Eq) -$(makePrisms ''PgErrorType) -$(makePrisms ''PgErrorCode) +$(makePrisms ''PGErrorType) +$(makePrisms ''PGErrorCode) -pgErrorType :: Q.PGStmtErrDetail -> Maybe PgErrorType +pgErrorType :: Q.PGStmtErrDetail -> Maybe PGErrorType pgErrorType errorDetails = parseTypes =<< Q.edStatusCode errorDetails where parseTypes fullCodeText = choice - [ withClass "22" PgDataException - [ code "007" PgInvalidDatetimeFormat - , code "023" PgInvalidParameterValue - , code "P02" PgInvalidTextRepresentation + [ withClass "22" PGDataException + [ code "007" PGInvalidDatetimeFormat + , code "023" PGInvalidParameterValue + , code "025" PGInvalidEscapeSequence + , code "P02" PGInvalidTextRepresentation ] - , withClass "23" PgIntegrityConstraintViolation - [ code "001" PgRestrictViolation - , code "502" PgNotNullViolation - , code "503" PgForeignKeyViolation - , code "505" PgUniqueViolation - , code "514" PgCheckViolation - , code "P01" PgExclusionViolation + , withClass "23" PGIntegrityConstraintViolation + [ code "001" PGRestrictViolation + , code "502" PGNotNullViolation + , code "503" PGForeignKeyViolation + , code "505" PGUniqueViolation + , code "514" PGCheckViolation + , code "P01" PGExclusionViolation ] - , withClass "42" PgSyntaxErrorOrAccessRuleViolation - [ code "704" PgUndefinedObject - , code "P10" PgInvalidColumnReference + , withClass "42" PGSyntaxErrorOrAccessRuleViolation + [ code "704" PGUndefinedObject + , code "P10" PGInvalidColumnReference ] ] where @@ -76,20 +92,6 @@ pgErrorType errorDetails = parseTypes =<< Q.edStatusCode errorDetails withClass expectedClassText mkClass codes = guard (classText == expectedClassText) $> mkClass (choice codes) - code :: T.Text -> a -> Maybe (PgErrorCode a) + code :: T.Text -> a -> Maybe (PGErrorCode a) code expectedCodeText codeValue = - guard (codeText == expectedCodeText) $> PgErrorSpecific codeValue - -pgErrorToText :: Q.PGStmtErrDetail -> T.Text -pgErrorToText errorDetail = - fromMaybe "postgres error" (Q.edMessage errorDetail) - <> maybe "" formatDescription (Q.edDescription errorDetail) - <> maybe "" formatHint (Q.edHint errorDetail) - where - formatDescription description = ";\n" <> prefixLines " " description - formatHint hint = "\n hint: " <> prefixLinesExceptFirst " " hint - - prefixLinesExceptFirst prefix content = - T.intercalate ("\n" <> prefix) (T.lines content) - prefixLines prefix content = - prefix <> prefixLinesExceptFirst prefix content + guard (codeText == expectedCodeText) $> PGErrorSpecific codeValue From d4dcd28baae7b78de666ac2a0fe64a3b516195f2 Mon Sep 17 00:00:00 2001 From: Alexis King Date: Mon, 12 Aug 2019 14:41:05 -0500 Subject: [PATCH 07/10] Slightly rearrange and cleanup Hasura.SQL.Value --- server/src-lib/Hasura/SQL/Value.hs | 178 +++++++++++++---------------- 1 file changed, 77 insertions(+), 101 deletions(-) diff --git a/server/src-lib/Hasura/SQL/Value.hs b/server/src-lib/Hasura/SQL/Value.hs index 1743b528a46..d4b4b54c724 100644 --- a/server/src-lib/Hasura/SQL/Value.hs +++ b/server/src-lib/Hasura/SQL/Value.hs @@ -1,4 +1,18 @@ -module Hasura.SQL.Value where +module Hasura.SQL.Value + ( PGScalarValue(..) + , pgColValueToInt + , withGeoVal + , parsePGValue + + , TxtEncodedPGVal + , txtEncodedPGVal + + , binEncoder + , txtEncoder + , toBinaryValue + , toTxtValue + , toPrepParam + ) where import Hasura.SQL.GeoJSON import Hasura.SQL.Time @@ -9,7 +23,6 @@ import qualified Database.PG.Query.PTI as PTI import qualified Hasura.SQL.DML as S import Data.Aeson -import Data.Aeson.Internal import Data.Int import Data.Scientific import Data.Time @@ -46,6 +59,46 @@ data PGScalarValue | PGValUnknown !T.Text deriving (Show, Eq) +pgColValueToInt :: PGScalarValue -> Maybe Int +pgColValueToInt (PGValInteger i) = Just $ fromIntegral i +pgColValueToInt (PGValSmallInt i) = Just $ fromIntegral i +pgColValueToInt (PGValBigInt i) = Just $ fromIntegral i +pgColValueToInt _ = Nothing + +withGeoVal :: PGScalarType -> S.SQLExp -> S.SQLExp +withGeoVal ty v + | isGeoType ty = S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing + | otherwise = v + +parsePGValue :: PGScalarType -> Value -> AT.Parser PGScalarValue +parsePGValue ty val = case (ty, val) of + (_ , Null) -> pure $ PGNull ty + (PGUnknown _, String t) -> pure $ PGValUnknown t + (_ , String t) -> parseTyped <|> pure (PGValUnknown t) + (_ , _) -> parseTyped + where + parseTyped = case ty of + PGSmallInt -> PGValSmallInt <$> parseJSON val + PGInteger -> PGValInteger <$> parseJSON val + PGBigInt -> PGValBigInt <$> parseJSON val + PGSerial -> PGValInteger <$> parseJSON val + PGBigSerial -> PGValBigInt <$> parseJSON val + PGFloat -> PGValFloat <$> parseJSON val + PGDouble -> PGValDouble <$> parseJSON val + PGNumeric -> PGValNumeric <$> parseJSON val + PGBoolean -> PGValBoolean <$> parseJSON val + PGChar -> PGValChar <$> parseJSON val + PGVarchar -> PGValVarchar <$> parseJSON val + PGText -> PGValText <$> parseJSON val + PGDate -> PGValDate <$> parseJSON val + PGTimeStampTZ -> PGValTimeStampTZ <$> parseJSON val + PGTimeTZ -> PGValTimeTZ <$> parseJSON val + PGJSON -> PGValJSON . Q.JSON <$> parseJSON val + PGJSONB -> PGValJSONB . Q.JSONB <$> parseJSON val + PGGeometry -> PGValGeo <$> parseJSON val + PGGeography -> PGValGeo <$> parseJSON val + PGUnknown tyName -> fail $ "A string is expected for type : " ++ T.unpack tyName + data TxtEncodedPGVal = TENull | TELit !Text @@ -85,114 +138,37 @@ txtEncodedPGVal colVal = case colVal of AE.encodeToLazyText o PGValUnknown t -> TELit t +binEncoder :: PGScalarValue -> Q.PrepArg +binEncoder colVal = case colVal of + PGValInteger i -> Q.toPrepVal i + PGValSmallInt i -> Q.toPrepVal i + PGValBigInt i -> Q.toPrepVal i + PGValFloat f -> Q.toPrepVal f + PGValDouble d -> Q.toPrepVal d + PGValNumeric sc -> Q.toPrepVal sc + PGValBoolean b -> Q.toPrepVal b + PGValChar t -> Q.toPrepVal t + PGValVarchar t -> Q.toPrepVal t + PGValText t -> Q.toPrepVal t + PGValDate d -> Q.toPrepVal d + PGValTimeStampTZ u -> Q.toPrepVal u + PGValTimeTZ (ZonedTimeOfDay t z) -> Q.toPrepValHelper PTI.timetz PE.timetz_int (t, z) + PGNull ty -> (pgTypeOid ty, Nothing) + PGValJSON u -> Q.toPrepVal u + PGValJSONB u -> Q.toPrepVal u + PGValGeo o -> Q.toPrepVal $ TL.toStrict $ AE.encodeToLazyText o + PGValUnknown t -> (PTI.auto, Just (TE.encodeUtf8 t, PQ.Text)) + txtEncoder :: PGScalarValue -> S.SQLExp txtEncoder colVal = case txtEncodedPGVal colVal of TENull -> S.SEUnsafe "NULL" TELit t -> S.SELit t -binEncoder :: PGScalarValue -> Q.PrepArg -binEncoder colVal = case colVal of - PGValInteger i -> - Q.toPrepVal i - PGValSmallInt i -> - Q.toPrepVal i - PGValBigInt i -> - Q.toPrepVal i - PGValFloat f -> - Q.toPrepVal f - PGValDouble d -> - Q.toPrepVal d - PGValNumeric sc -> - Q.toPrepVal sc - PGValBoolean b -> - Q.toPrepVal b - PGValChar t -> - Q.toPrepVal t - PGValVarchar t -> - Q.toPrepVal t - PGValText t -> - Q.toPrepVal t - PGValDate d -> - Q.toPrepVal d - PGValTimeStampTZ u -> - Q.toPrepVal u - PGValTimeTZ (ZonedTimeOfDay t z) -> - Q.toPrepValHelper PTI.timetz PE.timetz_int (t, z) - PGNull ty -> - (pgTypeOid ty, Nothing) - PGValJSON u -> - Q.toPrepVal u - PGValJSONB u -> - Q.toPrepVal u - PGValGeo o -> - Q.toPrepVal $ TL.toStrict $ AE.encodeToLazyText o - PGValUnknown t -> - textToPrepVal t - -textToPrepVal :: Text -> Q.PrepArg -textToPrepVal t = - (PTI.auto, Just (TE.encodeUtf8 t, PQ.Text)) - -parsePGValue' :: PGScalarType - -> Value - -> AT.Parser PGScalarValue -parsePGValue' ty v = case (ty, v) of - (_, Null) -> return $ PGNull ty - (PGSmallInt, val) -> PGValSmallInt <$> parseJSON val - (PGInteger, val) -> PGValInteger <$> parseJSON val - (PGBigInt, val) -> PGValBigInt <$> parseJSON val - (PGSerial, val) -> PGValInteger <$> parseJSON val - (PGBigSerial, val) -> PGValBigInt <$> parseJSON val - (PGFloat, val) -> PGValFloat <$> parseJSON val - (PGDouble, val) -> PGValDouble <$> parseJSON val - (PGNumeric, val) -> PGValNumeric <$> parseJSON val - (PGBoolean, val) -> PGValBoolean <$> parseJSON val - (PGChar, val) -> PGValChar <$> parseJSON val - (PGVarchar, val) -> PGValVarchar <$> parseJSON val - (PGText, val) -> PGValText <$> parseJSON val - (PGDate, val) -> PGValDate <$> parseJSON val - (PGTimeStampTZ, val) -> PGValTimeStampTZ <$> parseJSON val - (PGTimeTZ, val) -> PGValTimeTZ <$> parseJSON val - (PGJSON, val) -> PGValJSON . Q.JSON <$> parseJSON val - (PGJSONB, val) -> PGValJSONB . Q.JSONB <$> parseJSON val - (PGGeometry, val) -> PGValGeo <$> parseJSON val - (PGGeography, val) -> PGValGeo <$> parseJSON val - (PGUnknown _, String t) -> return $ PGValUnknown t - (PGUnknown tyName, _) -> fail $ "A string is expected for type : " ++ T.unpack tyName - -parsePGValue :: PGScalarType -> Value -> AT.Parser PGScalarValue -parsePGValue pct val = - case val of - String t -> parsePGValue' pct val <|> return (PGValUnknown t) - _ -> parsePGValue' pct val - -readEitherTxt :: (Read a) => T.Text -> Either String a -readEitherTxt = readEither . T.unpack - -iresToEither :: IResult a -> Either String a -iresToEither (IError _ msg) = Left msg -iresToEither (ISuccess a) = return a - -pgValFromJVal :: (FromJSON a) => Value -> Either String a -pgValFromJVal = iresToEither . ifromJSON - -withGeoVal :: PGScalarType -> S.SQLExp -> S.SQLExp -withGeoVal ty v - | isGeoType ty = S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing - | otherwise = v - toPrepParam :: Int -> PGScalarType -> S.SQLExp -toPrepParam i ty = - withGeoVal ty $ S.SEPrep i +toPrepParam i ty = withGeoVal ty $ S.SEPrep i toBinaryValue :: WithScalarType PGScalarValue -> Q.PrepArg toBinaryValue = binEncoder . pstValue toTxtValue :: WithScalarType PGScalarValue -> S.SQLExp toTxtValue (WithScalarType ty val) = S.withTyAnn ty . withGeoVal ty $ txtEncoder val - -pgColValueToInt :: PGScalarValue -> Maybe Int -pgColValueToInt (PGValInteger i) = Just $ fromIntegral i -pgColValueToInt (PGValSmallInt i) = Just $ fromIntegral i -pgColValueToInt (PGValBigInt i) = Just $ fromIntegral i -pgColValueToInt _ = Nothing From c46ecc72dc7654bdcfa4b98feaba1d7c307ee03e Mon Sep 17 00:00:00 2001 From: Alexis King Date: Tue, 13 Aug 2019 18:34:37 -0500 Subject: [PATCH 08/10] Refactor non-table functions out of Hasura.RQL.DDL.Table --- server/graphql-engine.cabal | 9 +- server/src-exec/Migrate.hs | 14 +- server/src-exec/Ops.hs | 2 +- server/src-lib/Hasura/RQL/DDL/Metadata.hs | 24 +- .../Hasura/RQL/DDL/Relationship/Rename.hs | 12 +- server/src-lib/Hasura/RQL/DDL/Schema.hs | 117 +++++ server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs | 353 ++++++++++++++ .../Hasura/RQL/DDL/Schema/Cache.hs-boot | 20 + .../src-lib/Hasura/RQL/DDL/Schema/Catalog.hs | 39 ++ server/src-lib/Hasura/RQL/DDL/Schema/Table.hs | 455 ++---------------- server/src-lib/Hasura/RQL/Types/Catalog.hs | 17 +- .../src-lib/Hasura/RQL/Types/SchemaCache.hs | 4 - server/src-lib/Hasura/Server/App.hs | 2 +- server/src-lib/Hasura/Server/Query.hs | 3 +- server/src-lib/Hasura/Server/SchemaUpdate.hs | 22 +- 15 files changed, 606 insertions(+), 487 deletions(-) create mode 100644 server/src-lib/Hasura/RQL/DDL/Schema.hs create mode 100644 server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs create mode 100644 server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs-boot create mode 100644 server/src-lib/Hasura/RQL/DDL/Schema/Catalog.hs diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 137318fd74b..ae471383803 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -190,8 +190,11 @@ library , Hasura.RQL.DDL.Relationship , Hasura.RQL.DDL.Relationship.Rename , Hasura.RQL.DDL.Relationship.Types - , Hasura.RQL.DDL.Schema.Enum + , Hasura.RQL.DDL.Schema + , Hasura.RQL.DDL.Schema.Cache + , Hasura.RQL.DDL.Schema.Catalog , Hasura.RQL.DDL.Schema.Diff + , Hasura.RQL.DDL.Schema.Enum , Hasura.RQL.DDL.Schema.Function , Hasura.RQL.DDL.Schema.Rename , Hasura.RQL.DDL.Schema.Table @@ -285,6 +288,7 @@ library default-extensions: ApplicativeDo BangPatterns + ConstraintKinds DeriveDataTypeable DeriveFoldable DeriveFunctor @@ -305,6 +309,7 @@ library ScopedTypeVariables TemplateHaskell TupleSections + TypeApplications TypeFamilies @@ -325,6 +330,7 @@ library executable graphql-engine default-extensions: ApplicativeDo BangPatterns + ConstraintKinds DeriveDataTypeable DeriveFoldable DeriveFunctor @@ -345,6 +351,7 @@ executable graphql-engine ScopedTypeVariables TemplateHaskell TupleSections + TypeApplications TypeFamilies main-is: Main.hs diff --git a/server/src-exec/Migrate.hs b/server/src-exec/Migrate.hs index 722fd7257d6..f12b59960ad 100644 --- a/server/src-exec/Migrate.hs +++ b/server/src-exec/Migrate.hs @@ -4,19 +4,19 @@ module Migrate ) where -import Data.Time.Clock (UTCTime) -import Language.Haskell.TH.Syntax (Q, TExp, unTypeQ) +import Data.Time.Clock (UTCTime) +import Language.Haskell.TH.Syntax (Q, TExp, unTypeQ) import Hasura.Prelude -import Hasura.RQL.DDL.Schema.Table +import Hasura.RQL.DDL.Schema import Hasura.RQL.Types import Hasura.Server.Query -import qualified Data.Aeson as A -import qualified Data.Text as T -import qualified Data.Yaml.TH as Y +import qualified Data.Aeson as A +import qualified Data.Text as T +import qualified Data.Yaml.TH as Y -import qualified Database.PG.Query as Q +import qualified Database.PG.Query as Q curCatalogVer :: T.Text curCatalogVer = "20" diff --git a/server/src-exec/Ops.hs b/server/src-exec/Ops.hs index a10ec65b741..747ff094fc0 100644 --- a/server/src-exec/Ops.hs +++ b/server/src-exec/Ops.hs @@ -10,7 +10,7 @@ import Migrate (curCatalogVer) import Hasura.EncJSON import Hasura.Prelude -import Hasura.RQL.DDL.Schema.Table +import Hasura.RQL.DDL.Schema import Hasura.RQL.Types import Hasura.Server.Query import Hasura.SQL.Types diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index 70904e034d1..38c80a5d97d 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -47,8 +47,7 @@ import qualified Hasura.RQL.DDL.Permission.Internal as DP import qualified Hasura.RQL.DDL.QueryCollection as DQC import qualified Hasura.RQL.DDL.Relationship as DR import qualified Hasura.RQL.DDL.RemoteSchema as DRS -import qualified Hasura.RQL.DDL.Schema.Function as DF -import qualified Hasura.RQL.DDL.Schema.Table as DT +import qualified Hasura.RQL.DDL.Schema as DS import qualified Hasura.RQL.Types.EventTrigger as DTS import qualified Hasura.RQL.Types.RemoteSchema as TRS @@ -139,7 +138,7 @@ runClearMetadata runClearMetadata _ = do adminOnly liftTx clearMetadata - DT.buildSchemaCacheStrict + DS.buildSchemaCacheStrict return successMsg data ReplaceMetadata @@ -223,16 +222,16 @@ applyQP2 applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) = do liftTx clearMetadata - DT.buildSchemaCacheStrict + DS.buildSchemaCacheStrict withPathK "tables" $ do -- tables and views indexedForM_ tables $ \tableMeta -> do - let trackQuery = DT.TrackTable - { DT.tName = tableMeta ^. tmTable - , DT.tIsEnum = tableMeta ^. tmIsEnum } - void $ DT.trackExistingTableOrViewP2 trackQuery + let trackQuery = DS.TrackTable + { DS.tName = tableMeta ^. tmTable + , DS.tIsEnum = tableMeta ^. tmIsEnum } + void $ DS.trackExistingTableOrViewP2 trackQuery -- Relationships indexedForM_ tables $ \table -> do @@ -263,7 +262,7 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) = -- sql functions withPathK "functions" $ - indexedMapM_ (void . DF.trackFunctionP2) functions + indexedMapM_ (void . DS.trackFunctionP2) functions -- query collections withPathK "query_collections" $ @@ -444,7 +443,7 @@ runReloadMetadata => ReloadMetadata -> m EncJSON runReloadMetadata _ = do adminOnly - DT.buildSchemaCache + DS.buildSchemaCache return successMsg data DumpInternalState @@ -506,9 +505,8 @@ runDropInconsistentMetadata _ = do purgeMetadataObj :: MonadTx m => MetadataObjId -> m () purgeMetadataObj = liftTx . \case - (MOTable qt) -> - Q.catchE defaultTxErrorHandler $ DT.delTableFromCatalog qt - (MOFunction qf) -> DF.delFunctionFromCatalog qf + (MOTable qt) -> DS.deleteTableFromCatalog qt + (MOFunction qf) -> DS.delFunctionFromCatalog qf (MORemoteSchema rsn) -> DRS.removeRemoteSchemaFromCatalog rsn (MOTableObj qt (MTORel rn _)) -> DR.delRelFromCatalog qt rn (MOTableObj qt (MTOPerm rn pt)) -> DP.dropPermFromCatalog qt rn pt diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs b/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs index 839ccbbc9ba..e11dcf5cbe1 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs @@ -6,9 +6,9 @@ import Hasura.EncJSON import Hasura.Prelude import Hasura.RQL.DDL.Relationship (validateRelP1) import Hasura.RQL.DDL.Relationship.Types -import Hasura.RQL.DDL.Schema.Rename (renameRelInCatalog) -import Hasura.RQL.DDL.Schema.Table (buildSchemaCache, - checkNewInconsistentMeta) +import Hasura.RQL.DDL.Schema (buildSchemaCache, + renameRelInCatalog, + withNewInconsistentObjsCheck) import Hasura.RQL.Types import Hasura.SQL.Types @@ -23,8 +23,7 @@ renameRelP2 , HasSQLGenCtx m ) => QualifiedTable -> RelName -> RelInfo -> m () -renameRelP2 qt newRN relInfo = do - oldSC <- askSchemaCache +renameRelP2 qt newRN relInfo = withNewInconsistentObjsCheck $ do tabInfo <- askTabInfo qt -- check for conflicts in fieldInfoMap case HM.lookup (fromRel newRN) $ _tiFieldInfoMap tabInfo of @@ -37,9 +36,6 @@ renameRelP2 qt newRN relInfo = do renameRelInCatalog qt oldRN newRN -- update schema cache buildSchemaCache - newSC <- askSchemaCache - -- check for new inconsistency - checkNewInconsistentMeta oldSC newSC where oldRN = riName relInfo diff --git a/server/src-lib/Hasura/RQL/DDL/Schema.hs b/server/src-lib/Hasura/RQL/DDL/Schema.hs new file mode 100644 index 00000000000..aeefb130944 --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/Schema.hs @@ -0,0 +1,117 @@ +{-| This module (along with the various @Hasura.RQL.DDL.Schema.*@ modules) provides operations to +load and modify the Hasura catalog and schema cache. + +* The /catalog/ refers to the set of PostgreSQL tables and views that store all schema information + known by Hasura. This includes any tracked Postgres tables, views, and functions, all remote + schemas, and any additionaly Hasura-specific information such as permissions and relationships. + + Primitive functions for loading and modifying the catalog are defined in + "Hasura.RQL.DDL.Schema.Catalog", but most uses are wrapped by other functions to synchronize + catalog information with the information in the schema cache. + +* The /schema cache/ is a process-global value of type 'SchemaCache' that stores an in-memory + representation of the data stored in the catalog. The in-memory representation is not identical + to the data in the catalog, since it has some post-processing applied to it in order to make it + easier to consume for other parts of the system, such as GraphQL schema generation. For example, + although column information is represented by 'PGRawColumnInfo', the schema cache contains + “processed” 'PGColumnInfo' values, instead. + + Ultimately, the catalog is the source of truth for all information contained in the schema + cache, but to avoid rebuilding the entire schema cache on every change to the catalog, various + functions incrementally update the cache when they modify the catalog. +-} +module Hasura.RQL.DDL.Schema + ( module Hasura.RQL.DDL.Schema.Cache + , module Hasura.RQL.DDL.Schema.Catalog + , module Hasura.RQL.DDL.Schema.Function + , module Hasura.RQL.DDL.Schema.Rename + , module Hasura.RQL.DDL.Schema.Table + + , RunSQL(..) + , runRunSQL + ) where + +import Hasura.Prelude + +import qualified Data.Text as T +import qualified Data.Text.Encoding as TE +import qualified Database.PG.Query as Q +import qualified Database.PostgreSQL.LibPQ as PQ + +import Data.Aeson +import Data.Aeson.Casing +import Data.Aeson.TH +import Language.Haskell.TH.Syntax (Lift) + +import Hasura.EncJSON +import Hasura.RQL.DDL.Schema.Cache +import Hasura.RQL.DDL.Schema.Catalog +import Hasura.RQL.DDL.Schema.Function +import Hasura.RQL.DDL.Schema.Rename +import Hasura.RQL.DDL.Schema.Table +import Hasura.RQL.Instances () +import Hasura.RQL.Types +import Hasura.Server.Utils (matchRegex) + +data RunSQL + = RunSQL + { rSql :: Text + , rCascade :: !(Maybe Bool) + , rCheckMetadataConsistency :: !(Maybe Bool) + } deriving (Show, Eq, Lift) +$(deriveJSON (aesonDrop 1 snakeCase){omitNothingFields=True} ''RunSQL) + +runRunSQL + :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) + => RunSQL -> m EncJSON +runRunSQL (RunSQL t cascade mChkMDCnstcy) = do + adminOnly + isMDChkNeeded <- maybe (isAltrDropReplace t) return mChkMDCnstcy + bool (execRawSQL t) (withMetadataCheck (or cascade) $ execRawSQL t) isMDChkNeeded + where + execRawSQL :: (MonadTx m) => Text -> m EncJSON + execRawSQL = + fmap (encJFromJValue @RunSQLRes) . + liftTx . Q.multiQE rawSqlErrHandler . Q.fromText + where + rawSqlErrHandler txe = + let e = err400 PostgresError "query execution failed" + in e {qeInternal = Just $ toJSON txe} + + isAltrDropReplace :: QErrM m => T.Text -> m Bool + isAltrDropReplace = either throwErr return . matchRegex regex False + where + throwErr s = throw500 $ "compiling regex failed: " <> T.pack s + regex = "alter|drop|replace|create function" + +data RunSQLRes + = RunSQLRes + { rrResultType :: !Text + , rrResult :: !Value + } deriving (Show, Eq) +$(deriveJSON (aesonDrop 2 snakeCase) ''RunSQLRes) + +instance Q.FromRes RunSQLRes where + fromRes (Q.ResultOkEmpty _) = + return $ RunSQLRes "CommandOk" Null + fromRes (Q.ResultOkData res) = do + csvRows <- resToCSV res + return $ RunSQLRes "TuplesOk" $ toJSON csvRows + where + resToCSV :: PQ.Result -> ExceptT T.Text IO [[Text]] + resToCSV r = do + nr <- liftIO $ PQ.ntuples r + nc <- liftIO $ PQ.nfields r + + hdr <- forM [0..pred nc] $ \ic -> do + colNameBS <- liftIO $ PQ.fname r ic + maybe (return "unknown") decodeBS colNameBS + + rows <- forM [0..pred nr] $ \ir -> + forM [0..pred nc] $ \ic -> do + cellValBS <- liftIO $ PQ.getvalue r ir ic + maybe (return "NULL") decodeBS cellValBS + + return $ hdr:rows + + decodeBS = either (throwError . T.pack . show) return . TE.decodeUtf8' diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs new file mode 100644 index 00000000000..b69a2f51767 --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -0,0 +1,353 @@ +{-| Top-level functions concerned specifically with operations on the schema cache, such as +rebuilding it from the catalog and incorporating schema changes. See the module documentation for +"Hasura.RQL.DDL.Schema" for more details. + +__Note__: this module is __mutually recursive__ with other @Hasura.RQL.DDL.Schema.*@ modules, which +both define pieces of the implementation of building the schema cache and define handlers that +trigger schema cache rebuilds. -} +module Hasura.RQL.DDL.Schema.Cache + ( CacheBuildM + , buildSchemaCache + , buildSchemaCacheFor + , buildSchemaCacheStrict + , buildSchemaCacheWithoutSetup + + , withNewInconsistentObjsCheck + , withMetadataCheck + , purgeDependentObject + + , withSchemaObject + , withSchemaObject_ + ) where + +import Hasura.Prelude + +import qualified Data.HashMap.Strict as M +import qualified Data.HashSet as HS +import qualified Data.Text as T +import qualified Database.PG.Query as Q + +import Data.Aeson + +import qualified Hasura.GraphQL.Schema as GS + +import Hasura.Db +import Hasura.GraphQL.RemoteServer +import Hasura.RQL.DDL.Deps +import Hasura.RQL.DDL.EventTrigger +import Hasura.RQL.DDL.Permission +import Hasura.RQL.DDL.Permission.Internal +import Hasura.RQL.DDL.Relationship +import Hasura.RQL.DDL.RemoteSchema +import Hasura.RQL.DDL.Schema.Catalog +import Hasura.RQL.DDL.Schema.Diff +import Hasura.RQL.DDL.Schema.Function +import Hasura.RQL.DDL.Schema.Table +import Hasura.RQL.DDL.Utils +import Hasura.RQL.Types +import Hasura.RQL.Types.Catalog +import Hasura.RQL.Types.QueryCollection +import Hasura.SQL.Types + +type CacheBuildM m = (CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) + +buildSchemaCache :: (CacheBuildM m) => m () +buildSchemaCache = buildSchemaCacheWithOptions True + +buildSchemaCacheWithoutSetup :: (CacheBuildM m) => m () +buildSchemaCacheWithoutSetup = buildSchemaCacheWithOptions False + +buildSchemaCacheWithOptions :: (CacheBuildM m) => Bool -> m () +buildSchemaCacheWithOptions withSetup = do + -- clean hdb_views + when withSetup $ liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews + -- reset the current schemacache + writeSchemaCache emptySchemaCache + sqlGenCtx <- askSQLGenCtx + + -- fetch all catalog metadata + CatalogMetadata tables relationships permissions + eventTriggers remoteSchemas functions fkeys' allowlistDefs + <- liftTx fetchCatalogData + + let fkeys = HS.fromList fkeys' + + -- tables + modTableCache =<< buildTableCache tables + + -- relationships + forM_ relationships $ \(CatalogRelation qt rn rt rDef cmnt) -> do + let objId = MOTableObj qt $ MTORel rn rt + def = toJSON $ WithTable qt $ RelDef rn rDef cmnt + mkInconsObj = InconsistentMetadataObj objId (MOTRel rt) def + modifyErr (\e -> "table " <> qt <<> "; rel " <> rn <<> "; " <> e) $ + withSchemaObject_ mkInconsObj $ + case rt of + ObjRel -> do + using <- decodeValue rDef + let relDef = RelDef rn using Nothing + validateObjRel qt relDef + objRelP2Setup qt fkeys relDef + ArrRel -> do + using <- decodeValue rDef + let relDef = RelDef rn using Nothing + validateArrRel qt relDef + arrRelP2Setup qt fkeys relDef + + -- permissions + forM_ permissions $ \(CatalogPermission qt rn pt pDef cmnt) -> do + let objId = MOTableObj qt $ MTOPerm rn pt + def = toJSON $ WithTable qt $ PermDef rn pDef cmnt + mkInconsObj = InconsistentMetadataObj objId (MOTPerm pt) def + modifyErr (\e -> "table " <> qt <<> "; role " <> rn <<> "; " <> e) $ + withSchemaObject_ mkInconsObj $ + case pt of + PTInsert -> permHelper withSetup sqlGenCtx qt rn pDef PAInsert + PTSelect -> permHelper withSetup sqlGenCtx qt rn pDef PASelect + PTUpdate -> permHelper withSetup sqlGenCtx qt rn pDef PAUpdate + PTDelete -> permHelper withSetup sqlGenCtx qt rn pDef PADelete + + -- event triggers + forM_ eventTriggers $ \(CatalogEventTrigger qt trn configuration) -> do + let objId = MOTableObj qt $ MTOTrigger trn + def = object ["table" .= qt, "configuration" .= configuration] + mkInconsObj = InconsistentMetadataObj objId MOTEventTrigger def + withSchemaObject_ mkInconsObj $ do + etc <- decodeValue configuration + subTableP2Setup qt etc + allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt + when withSetup $ liftTx $ + mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc) + + -- sql functions + forM_ functions $ \(CatalogFunction qf rawfiM) -> do + let def = toJSON $ TrackFunction qf + mkInconsObj = + InconsistentMetadataObj (MOFunction qf) MOTFunction def + modifyErr (\e -> "function " <> qf <<> "; " <> e) $ + withSchemaObject_ mkInconsObj $ do + rawfi <- onNothing rawfiM $ + throw400 NotExists $ "no such function exists in postgres : " <>> qf + trackFunctionP2Setup qf rawfi + + -- allow list + replaceAllowlist $ concatMap _cdQueries allowlistDefs + + -- build GraphQL context with tables and functions + GS.buildGCtxMapPG + + -- remote schemas + forM_ remoteSchemas resolveSingleRemoteSchema + + where + permHelper setup sqlGenCtx qt rn pDef pa = do + qCtx <- mkAdminQCtx sqlGenCtx <$> askSchemaCache + perm <- decodeValue pDef + let permDef = PermDef rn perm Nothing + createPerm = WithTable qt permDef + (permInfo, deps) <- liftP1WithQCtx qCtx $ createPermP1 createPerm + when setup $ addPermP2Setup qt permDef permInfo + addPermToCache qt rn pa permInfo deps + -- p2F qt rn p1Res + + resolveSingleRemoteSchema rs = do + let AddRemoteSchemaQuery name _ _ = rs + mkInconsObj = InconsistentMetadataObj (MORemoteSchema name) + MOTRemoteSchema (toJSON rs) + withSchemaObject_ mkInconsObj $ do + rsCtx <- addRemoteSchemaP2Setup rs + sc <- askSchemaCache + let gCtxMap = scGCtxMap sc + defGCtx = scDefaultRemoteGCtx sc + rGCtx = convRemoteGCtx $ rscGCtx rsCtx + mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx + mergedDefGCtx <- mergeGCtx defGCtx rGCtx + writeSchemaCache sc { scGCtxMap = mergedGCtxMap + , scDefaultRemoteGCtx = mergedDefGCtx + } + +-- | Rebuilds the schema cache. If an object with the given object id became newly inconsistent, +-- raises an error about it specifically. Otherwise, raises a generic metadata inconsistency error. +buildSchemaCacheFor :: (CacheBuildM m) => MetadataObjId -> m () +buildSchemaCacheFor objectId = do + oldSchemaCache <- askSchemaCache + buildSchemaCache + newSchemaCache <- askSchemaCache + + let diffInconsistentObjects = getDifference _moId `on` scInconsistentObjs + newInconsistentObjects = newSchemaCache `diffInconsistentObjects` oldSchemaCache + + for_ (find ((== objectId) . _moId) newInconsistentObjects) $ \matchingObject -> + throw400 ConstraintViolation (_moReason matchingObject) + + unless (null newInconsistentObjects) $ + throwError (err400 Unexpected "cannot continue due to new inconsistent metadata") + { qeInternal = Just $ toJSON newInconsistentObjects } + +-- | Like 'buildSchemaCache', but fails if there is any inconsistent metadata. +buildSchemaCacheStrict :: (CacheBuildM m) => m () +buildSchemaCacheStrict = do + buildSchemaCache + sc <- askSchemaCache + let inconsObjs = scInconsistentObjs sc + unless (null inconsObjs) $ do + let err = err400 Unexpected "cannot continue due to inconsistent metadata" + throwError err{qeInternal = Just $ toJSON inconsObjs} + +-- | Executes the given action, and if any new 'InconsistentMetadataObj's are added to the schema +-- cache as a result of its execution, raises an error. +withNewInconsistentObjsCheck :: (QErrM m, CacheRM m) => m a -> m a +withNewInconsistentObjsCheck action = do + originalObjects <- scInconsistentObjs <$> askSchemaCache + result <- action + currentObjects <- scInconsistentObjs <$> askSchemaCache + checkNewInconsistentMeta originalObjects currentObjects + pure result + +-- | @'withMetadataCheck' cascade action@ runs @action@ and checks if the schema changed as a +-- result. If it did, it checks to ensure the changes do not violate any integrity constraints, and +-- if not, incorporates them into the schema cache. +withMetadataCheck :: (CacheBuildM m) => Bool -> m a -> m a +withMetadataCheck cascade action = do + -- Drop hdb_views so no interference is caused to the sql query + liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews + + -- Get the metadata before the sql query, everything, need to filter this + oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta + oldFuncMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta + + -- Run the action + res <- action + + -- Get the metadata after the sql query + newMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta + newFuncMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta + sc <- askSchemaCache + let existingInconsistentObjs = scInconsistentObjs sc + existingTables = M.keys $ scTables sc + oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables + schemaDiff = getSchemaDiff oldMeta newMeta + existingFuncs = M.keys $ scFunctions sc + oldFuncMeta = flip filter oldFuncMetaU $ \fm -> funcFromMeta fm `elem` existingFuncs + FunctionDiff droppedFuncs alteredFuncs = getFuncDiff oldFuncMeta newFuncMeta + overloadedFuncs = getOverloadedFuncs existingFuncs newFuncMeta + + -- Do not allow overloading functions + unless (null overloadedFuncs) $ + throw400 NotSupported $ "the following tracked function(s) cannot be overloaded: " + <> reportFuncs overloadedFuncs + + indirectDeps <- getSchemaChangeDeps schemaDiff + + -- Report back with an error if cascade is not set + when (indirectDeps /= [] && not cascade) $ reportDepsExt indirectDeps [] + + -- Purge all the indirect dependents from state + mapM_ purgeDependentObject indirectDeps + + -- Purge all dropped functions + let purgedFuncs = flip mapMaybe indirectDeps $ \dep -> + case dep of + SOFunction qf -> Just qf + _ -> Nothing + + forM_ (droppedFuncs \\ purgedFuncs) $ \qf -> do + liftTx $ delFunctionFromCatalog qf + delFunctionFromCache qf + + -- Process altered functions + forM_ alteredFuncs $ \(qf, newTy) -> + when (newTy == FTVOLATILE) $ + throw400 NotSupported $ + "type of function " <> qf <<> " is altered to \"VOLATILE\" which is not supported now" + + -- update the schema cache and hdb_catalog with the changes + reloadRequired <- processSchemaChanges schemaDiff + + let withReload = do -- in case of any rename + buildSchemaCache + currentInconsistentObjs <- scInconsistentObjs <$> askSchemaCache + checkNewInconsistentMeta existingInconsistentObjs currentInconsistentObjs + + withoutReload = do + postSc <- askSchemaCache + -- recreate the insert permission infra + forM_ (M.elems $ scTables postSc) $ \ti -> do + let tn = _tiName ti + forM_ (M.elems $ _tiRolePermInfoMap ti) $ \rpi -> + maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi + + strfyNum <- stringifyNum <$> askSQLGenCtx + --recreate triggers + forM_ (M.elems $ scTables postSc) $ \ti -> do + let tn = _tiName ti + cols = getCols $ _tiFieldInfoMap ti + forM_ (M.toList $ _tiEventTriggerInfoMap ti) $ \(trn, eti) -> do + let fullspec = etiOpsDef eti + liftTx $ mkAllTriggersQ trn tn cols strfyNum fullspec + + bool withoutReload withReload reloadRequired + + return res + where + reportFuncs = T.intercalate ", " . map dquoteTxt + + processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool + processSchemaChanges schemaDiff = do + -- Purge the dropped tables + mapM_ delTableAndDirectDeps droppedTables + + sc <- askSchemaCache + fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do + ti <- case M.lookup oldQtn $ scTables sc of + Just ti -> return ti + Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn + processTableChanges ti tableDiff + where + SchemaDiff droppedTables alteredTables = schemaDiff + +checkNewInconsistentMeta + :: (QErrM m) + => [InconsistentMetadataObj] -> [InconsistentMetadataObj] -> m () +checkNewInconsistentMeta originalInconsMeta currentInconsMeta = + unless (null newInconsMetaObjects) $ + throwError (err500 Unexpected "cannot continue due to newly found inconsistent metadata") + { qeInternal = Just $ toJSON newInconsMetaObjects } + where + newInconsMetaObjects = getDifference _moId currentInconsMeta originalInconsMeta + +purgeDependentObject :: (CacheRWM m, MonadTx m) => SchemaObjId -> m () +purgeDependentObject schemaObjId = case schemaObjId of + (SOTableObj tn (TOPerm rn pt)) -> do + liftTx $ dropPermFromCatalog tn rn pt + withPermType pt delPermFromCache rn tn + + (SOTableObj qt (TORel rn)) -> do + liftTx $ delRelFromCatalog qt rn + delRelFromCache rn qt + + (SOFunction qf) -> do + liftTx $ delFunctionFromCatalog qf + delFunctionFromCache qf + + (SOTableObj qt (TOTrigger trn)) -> do + liftTx $ delEventTriggerFromCatalog trn + delEventTriggerFromCache qt trn + + _ -> throw500 $ + "unexpected dependent object : " <> reportSchemaObj schemaObjId + +-- | @'withSchemaObject' f action@ runs @action@, and if it raises any errors, applies @f@ to the +-- error message to produce an 'InconsistentMetadataObj', then adds the object to the schema cache +-- and returns 'Nothing' instead of aborting. +withSchemaObject :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m a -> m (Maybe a) +withSchemaObject f action = + (Just <$> action) `catchError` \err -> do + sc <- askSchemaCache + let inconsObj = f $ qeError err + allInconsObjs = inconsObj:scInconsistentObjs sc + writeSchemaCache sc { scInconsistentObjs = allInconsObjs } + pure Nothing + +withSchemaObject_ :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m () -> m () +withSchemaObject_ f = void . withSchemaObject f diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs-boot b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs-boot new file mode 100644 index 00000000000..277a141fab0 --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs-boot @@ -0,0 +1,20 @@ +module Hasura.RQL.DDL.Schema.Cache where + +import Hasura.Prelude + +import Hasura.Db +import Hasura.RQL.Types + +type CacheBuildM m = (CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) + +buildSchemaCacheStrict :: (CacheBuildM m) => m () +buildSchemaCacheFor :: (CacheBuildM m) => MetadataObjId -> m () +buildSchemaCache :: (CacheBuildM m) => m () +buildSchemaCacheWithoutSetup :: (CacheBuildM m) => m () + +withNewInconsistentObjsCheck :: (QErrM m, CacheRM m) => m a -> m a +withMetadataCheck :: (CacheBuildM m) => Bool -> m a -> m a +purgeDependentObject :: (CacheRWM m, MonadTx m) => SchemaObjId -> m () + +withSchemaObject :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m a -> m (Maybe a) +withSchemaObject_ :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m () -> m () diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Catalog.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Catalog.hs new file mode 100644 index 00000000000..ce60454bc9c --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Catalog.hs @@ -0,0 +1,39 @@ +-- | Functions for loading and modifying the catalog. See the module documentation for +-- "Hasura.RQL.DDL.Schema" for more details. +module Hasura.RQL.DDL.Schema.Catalog + ( fetchCatalogData + , saveTableToCatalog + , updateTableIsEnumInCatalog + , deleteTableFromCatalog + ) where + +import Hasura.Prelude + +import qualified Database.PG.Query as Q + +import Hasura.Db +import Hasura.RQL.Types.Catalog +import Hasura.SQL.Types + +fetchCatalogData :: (MonadTx m) => m CatalogMetadata +fetchCatalogData = liftTx $ Q.getAltJ . runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler + $(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True + +saveTableToCatalog :: (MonadTx m) => QualifiedTable -> Bool -> m () +saveTableToCatalog (QualifiedObject sn tn) isEnum = liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| + INSERT INTO "hdb_catalog"."hdb_table" (table_schema, table_name, is_enum) + VALUES ($1, $2, $3) + |] (sn, tn, isEnum) False + +updateTableIsEnumInCatalog :: (MonadTx m) => QualifiedTable -> Bool -> m () +updateTableIsEnumInCatalog (QualifiedObject sn tn) isEnum = + liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| + UPDATE "hdb_catalog"."hdb_table" SET is_enum = $3 + WHERE table_schema = $1 AND table_name = $2 + |] (sn, tn, isEnum) False + +deleteTableFromCatalog :: (MonadTx m) => QualifiedTable -> m () +deleteTableFromCatalog (QualifiedObject sn tn) = liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| + DELETE FROM "hdb_catalog"."hdb_table" + WHERE table_schema = $1 AND table_name = $2 + |] (sn, tn) False diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs index 3b9a6d134aa..6a1fc9e23bb 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs @@ -1,61 +1,46 @@ -{- | -Description: Create/delete SQL tables to/from Hasura metadata. --} +-- | Description: Create/delete SQL tables to/from Hasura metadata. +module Hasura.RQL.DDL.Schema.Table + ( TrackTable(..) + , runTrackTableQ + , trackExistingTableOrViewP2 -{-# LANGUAGE TypeApplications #-} + , UntrackTable(..) + , runUntrackTableQ -module Hasura.RQL.DDL.Schema.Table where + , SetTableIsEnum(..) + , runSetExistingTableIsEnumQ + + , buildTableCache + , delTableAndDirectDeps + , processTableChanges + ) where import Hasura.EncJSON -import Hasura.GraphQL.RemoteServer import Hasura.Prelude import Hasura.RQL.DDL.Deps -import Hasura.RQL.DDL.EventTrigger import Hasura.RQL.DDL.Permission -import Hasura.RQL.DDL.Permission.Internal -import Hasura.RQL.DDL.Relationship -import Hasura.RQL.DDL.RemoteSchema +import {-# SOURCE #-} Hasura.RQL.DDL.Schema.Cache +import Hasura.RQL.DDL.Schema.Catalog import Hasura.RQL.DDL.Schema.Diff import Hasura.RQL.DDL.Schema.Enum -import Hasura.RQL.DDL.Schema.Function import Hasura.RQL.DDL.Schema.Rename -import Hasura.RQL.DDL.Utils import Hasura.RQL.Types import Hasura.RQL.Types.Catalog -import Hasura.RQL.Types.QueryCollection -import Hasura.Server.Utils (matchRegex) import Hasura.SQL.Types -import qualified Database.PG.Query as Q -import qualified Hasura.GraphQL.Schema as GS +import qualified Database.PG.Query as Q +import qualified Hasura.GraphQL.Schema as GS -import Control.Lens.Extended hiding ((.=)) +import Control.Lens.Extended hiding ((.=)) import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH -import Instances.TH.Lift () -import Language.Haskell.TH.Syntax (Lift) -import Network.URI.Extended () +import Instances.TH.Lift () +import Language.Haskell.TH.Syntax (Lift) +import Network.URI.Extended () -import qualified Data.HashMap.Strict as M -import qualified Data.HashSet as HS -import qualified Data.Text as T -import qualified Data.Text.Encoding as TE -import qualified Database.PostgreSQL.LibPQ as PQ - -delTableFromCatalog :: QualifiedTable -> Q.Tx () -delTableFromCatalog (QualifiedObject sn tn) = - Q.unitQ [Q.sql| - DELETE FROM "hdb_catalog"."hdb_table" - WHERE table_schema = $1 AND table_name = $2 - |] (sn, tn) False - -saveTableToCatalog :: TrackTable -> Q.Tx () -saveTableToCatalog (TrackTable (QualifiedObject sn tn) isEnum) = - Q.unitQ [Q.sql| - INSERT INTO "hdb_catalog"."hdb_table" (table_schema, table_name, is_enum) - VALUES ($1, $2, $3) - |] (sn, tn, isEnum) False +import qualified Data.HashMap.Strict as M +import qualified Data.Text as T data TrackTable = TrackTable @@ -107,14 +92,12 @@ trackExistingTableOrViewP1 TrackTable { tName = vn } = do trackExistingTableOrViewP2 :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) => TrackTable -> m EncJSON -trackExistingTableOrViewP2 query@TrackTable { tName = tableName } = do +trackExistingTableOrViewP2 (TrackTable tableName isEnum) = do sc <- askSchemaCache let defGCtx = scDefaultRemoteGCtx sc GS.checkConflictingNode defGCtx $ GS.qualObjectToName tableName - - liftTx $ Q.catchE defaultTxErrorHandler $ saveTableToCatalog query + saveTableToCatalog tableName isEnum buildSchemaCacheFor (MOTable tableName) - return successMsg runTrackTableQ @@ -130,37 +113,10 @@ runSetExistingTableIsEnumQ runSetExistingTableIsEnumQ (SetTableIsEnum tableName isEnum) = do adminOnly void $ askTabInfo tableName -- assert that table is tracked - - liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql| - UPDATE "hdb_catalog"."hdb_table" SET is_enum = $3 - WHERE table_schema = $1 AND table_name = $2 - |] (qSchema tableName, qName tableName, isEnum) False - + updateTableIsEnumInCatalog tableName isEnum buildSchemaCacheFor (MOTable tableName) return successMsg -purgeDep :: (CacheRWM m, MonadTx m) - => SchemaObjId -> m () -purgeDep schemaObjId = case schemaObjId of - (SOTableObj tn (TOPerm rn pt)) -> do - liftTx $ dropPermFromCatalog tn rn pt - withPermType pt delPermFromCache rn tn - - (SOTableObj qt (TORel rn)) -> do - liftTx $ delRelFromCatalog qt rn - delRelFromCache rn qt - - (SOFunction qf) -> do - liftTx $ delFunctionFromCatalog qf - delFunctionFromCache qf - - (SOTableObj qt (TOTrigger trn)) -> do - liftTx $ delEventTriggerFromCatalog trn - delEventTriggerFromCache qt trn - - _ -> throw500 $ - "unexpected dependent object : " <> reportSchemaObj schemaObjId - unTrackExistingTableOrViewP1 :: (CacheRM m, UserInfoM m, QErrM m) => UntrackTable -> m () unTrackExistingTableOrViewP1 (UntrackTable vn _) = do @@ -187,8 +143,8 @@ unTrackExistingTableOrViewP2 (UntrackTable qtn cascade) = do -- Report bach with an error if cascade is not set when (indirectDeps /= [] && not (or cascade)) $ reportDepsExt indirectDeps [] - -- Purge all the dependants from state - mapM_ purgeDep indirectDeps + -- Purge all the dependents from state + mapM_ purgeDependentObject indirectDeps -- delete the table and its direct dependencies delTableAndDirectDeps qtn @@ -299,23 +255,9 @@ delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do DELETE FROM "hdb_catalog"."event_triggers" WHERE schema_name = $1 AND table_name = $2 |] (sn, tn) False - delTableFromCatalog qtn + deleteTableFromCatalog qtn delTableFromCache qtn -processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool -processSchemaChanges schemaDiff = do - -- Purge the dropped tables - mapM_ delTableAndDirectDeps droppedTables - - sc <- askSchemaCache - fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do - ti <- case M.lookup oldQtn $ scTables sc of - Just ti -> return ti - Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn - processTableChanges ti tableDiff - where - SchemaDiff droppedTables alteredTables = schemaDiff - -- | Builds an initial @'TableCache' 'PGColumnInfo'@ from catalog information. Does not fill in -- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains -- columns, not relationships; those pieces of information are filled in by later stages. @@ -404,340 +346,3 @@ processColumnInfoUsingCache :: (CacheRM m, QErrM m) => QualifiedTable -> PGRawCo processColumnInfoUsingCache tableName rawInfo = do tables <- scTables <$> askSchemaCache processColumnInfo (M.mapMaybe _tiEnumValues tables) tableName rawInfo - -withSchemaObject - :: (QErrM m, CacheRWM m) - => (T.Text -> InconsistentMetadataObj) - -> m a - -> m (Maybe a) -withSchemaObject f action = - (Just <$> action) `catchError` \err -> do - sc <- askSchemaCache - let inconsObj = f $ qeError err - allInconsObjs = inconsObj:scInconsistentObjs sc - writeSchemaCache sc { scInconsistentObjs = allInconsObjs } - pure Nothing - -withSchemaObject_ :: (QErrM m, CacheRWM m) => (T.Text -> InconsistentMetadataObj) -> m () -> m () -withSchemaObject_ f = void . withSchemaObject f - -checkNewInconsistentMeta - :: (QErrM m) - => SchemaCache -- old schema cache - -> SchemaCache -- new schema cache - -> m () -checkNewInconsistentMeta oldSC newSC = - unless (null newInconsMetaObjects) $ do - let err = err500 Unexpected - "cannot continue due to newly found inconsistent metadata" - throwError err{qeInternal = Just $ toJSON newInconsMetaObjects} - where - oldInconsMeta = scInconsistentObjs oldSC - newInconsMeta = scInconsistentObjs newSC - newInconsMetaObjects = getDifference _moId newInconsMeta oldInconsMeta - -buildSchemaCacheStrict - :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => m () -buildSchemaCacheStrict = do - buildSchemaCache - sc <- askSchemaCache - let inconsObjs = scInconsistentObjs sc - unless (null inconsObjs) $ do - let err = err400 Unexpected "cannot continue due to inconsistent metadata" - throwError err{qeInternal = Just $ toJSON inconsObjs} - --- | Rebuilds the schema cache. If an object with the given object id became newly inconsistent, --- raises an error about it specifically. Otherwise, raises a generic metadata inconsistency error. -buildSchemaCacheFor - :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => MetadataObjId -> m () -buildSchemaCacheFor objectId = do - oldSchemaCache <- askSchemaCache - buildSchemaCache - newSchemaCache <- askSchemaCache - - let diffInconsistentObjects = getDifference _moId `on` scInconsistentObjs - newInconsistentObjects = newSchemaCache `diffInconsistentObjects` oldSchemaCache - - for_ (find ((== objectId) . _moId) newInconsistentObjects) $ \matchingObject -> - throw400 ConstraintViolation (_moReason matchingObject) - - unless (null newInconsistentObjects) $ - throwError (err400 Unexpected "cannot continue due to new inconsistent metadata") - { qeInternal = Just $ toJSON newInconsistentObjects } - -buildSchemaCache - :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => m () -buildSchemaCache = buildSchemaCacheG True - -buildSCWithoutSetup - :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => m () -buildSCWithoutSetup = buildSchemaCacheG False - -buildSchemaCacheG - :: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => Bool -> m () -buildSchemaCacheG withSetup = do - -- clean hdb_views - when withSetup $ liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews - -- reset the current schemacache - writeSchemaCache emptySchemaCache - sqlGenCtx <- askSQLGenCtx - - -- fetch all catalog metadata - CatalogMetadata tables relationships permissions - eventTriggers remoteSchemas functions fkeys' allowlistDefs - <- liftTx fetchCatalogData - - let fkeys = HS.fromList fkeys' - - -- tables - modTableCache =<< buildTableCache tables - - -- relationships - forM_ relationships $ \(CatalogRelation qt rn rt rDef cmnt) -> do - let objId = MOTableObj qt $ MTORel rn rt - def = toJSON $ WithTable qt $ RelDef rn rDef cmnt - mkInconsObj = InconsistentMetadataObj objId (MOTRel rt) def - modifyErr (\e -> "table " <> qt <<> "; rel " <> rn <<> "; " <> e) $ - withSchemaObject_ mkInconsObj $ - case rt of - ObjRel -> do - using <- decodeValue rDef - let relDef = RelDef rn using Nothing - validateObjRel qt relDef - objRelP2Setup qt fkeys relDef - ArrRel -> do - using <- decodeValue rDef - let relDef = RelDef rn using Nothing - validateArrRel qt relDef - arrRelP2Setup qt fkeys relDef - - -- permissions - forM_ permissions $ \(CatalogPermission qt rn pt pDef cmnt) -> do - let objId = MOTableObj qt $ MTOPerm rn pt - def = toJSON $ WithTable qt $ PermDef rn pDef cmnt - mkInconsObj = InconsistentMetadataObj objId (MOTPerm pt) def - modifyErr (\e -> "table " <> qt <<> "; role " <> rn <<> "; " <> e) $ - withSchemaObject_ mkInconsObj $ - case pt of - PTInsert -> permHelper withSetup sqlGenCtx qt rn pDef PAInsert - PTSelect -> permHelper withSetup sqlGenCtx qt rn pDef PASelect - PTUpdate -> permHelper withSetup sqlGenCtx qt rn pDef PAUpdate - PTDelete -> permHelper withSetup sqlGenCtx qt rn pDef PADelete - - -- event triggers - forM_ eventTriggers $ \(CatalogEventTrigger qt trn configuration) -> do - let objId = MOTableObj qt $ MTOTrigger trn - def = object ["table" .= qt, "configuration" .= configuration] - mkInconsObj = InconsistentMetadataObj objId MOTEventTrigger def - withSchemaObject_ mkInconsObj $ do - etc <- decodeValue configuration - subTableP2Setup qt etc - allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt - when withSetup $ liftTx $ - mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc) - - -- sql functions - forM_ functions $ \(CatalogFunction qf rawfiM) -> do - let def = toJSON $ TrackFunction qf - mkInconsObj = - InconsistentMetadataObj (MOFunction qf) MOTFunction def - modifyErr (\e -> "function " <> qf <<> "; " <> e) $ - withSchemaObject_ mkInconsObj $ do - rawfi <- onNothing rawfiM $ - throw400 NotExists $ "no such function exists in postgres : " <>> qf - trackFunctionP2Setup qf rawfi - - -- allow list - replaceAllowlist $ concatMap _cdQueries allowlistDefs - - -- build GraphQL context with tables and functions - GS.buildGCtxMapPG - - -- remote schemas - forM_ remoteSchemas resolveSingleRemoteSchema - - where - permHelper setup sqlGenCtx qt rn pDef pa = do - qCtx <- mkAdminQCtx sqlGenCtx <$> askSchemaCache - perm <- decodeValue pDef - let permDef = PermDef rn perm Nothing - createPerm = WithTable qt permDef - (permInfo, deps) <- liftP1WithQCtx qCtx $ createPermP1 createPerm - when setup $ addPermP2Setup qt permDef permInfo - addPermToCache qt rn pa permInfo deps - -- p2F qt rn p1Res - - resolveSingleRemoteSchema rs = do - let AddRemoteSchemaQuery name _ _ = rs - mkInconsObj = InconsistentMetadataObj (MORemoteSchema name) - MOTRemoteSchema (toJSON rs) - withSchemaObject_ mkInconsObj $ do - rsCtx <- addRemoteSchemaP2Setup rs - sc <- askSchemaCache - let gCtxMap = scGCtxMap sc - defGCtx = scDefaultRemoteGCtx sc - rGCtx = convRemoteGCtx $ rscGCtx rsCtx - mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx - mergedDefGCtx <- mergeGCtx defGCtx rGCtx - writeSchemaCache sc { scGCtxMap = mergedGCtxMap - , scDefaultRemoteGCtx = mergedDefGCtx - } - -data RunSQL - = RunSQL - { rSql :: T.Text - , rCascade :: !(Maybe Bool) - , rCheckMetadataConsistency :: !(Maybe Bool) - } deriving (Show, Eq, Lift) - -$(deriveJSON (aesonDrop 1 snakeCase){omitNothingFields=True} ''RunSQL) - -data RunSQLRes - = RunSQLRes - { rrResultType :: !T.Text - , rrResult :: !Value - } deriving (Show, Eq) - -$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RunSQLRes) - -instance Q.FromRes RunSQLRes where - fromRes (Q.ResultOkEmpty _) = - return $ RunSQLRes "CommandOk" Null - fromRes (Q.ResultOkData res) = do - csvRows <- resToCSV res - return $ RunSQLRes "TuplesOk" $ toJSON csvRows - -execRawSQL :: (MonadTx m) => T.Text -> m EncJSON -execRawSQL = - fmap (encJFromJValue @RunSQLRes) . - liftTx . Q.multiQE rawSqlErrHandler . Q.fromText - where - rawSqlErrHandler txe = - let e = err400 PostgresError "query execution failed" - in e {qeInternal = Just $ toJSON txe} - -execWithMDCheck - :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => RunSQL -> m EncJSON -execWithMDCheck (RunSQL t cascade _) = do - -- Drop hdb_views so no interference is caused to the sql query - liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews - - -- Get the metadata before the sql query, everything, need to filter this - oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta - oldFuncMetaU <- - liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta - - -- Run the SQL - res <- execRawSQL t - - -- Get the metadata after the sql query - newMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta - newFuncMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta - sc <- askSchemaCache - let existingTables = M.keys $ scTables sc - oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables - schemaDiff = getSchemaDiff oldMeta newMeta - existingFuncs = M.keys $ scFunctions sc - oldFuncMeta = flip filter oldFuncMetaU $ \fm -> funcFromMeta fm `elem` existingFuncs - FunctionDiff droppedFuncs alteredFuncs = getFuncDiff oldFuncMeta newFuncMeta - overloadedFuncs = getOverloadedFuncs existingFuncs newFuncMeta - - -- Do not allow overloading functions - unless (null overloadedFuncs) $ - throw400 NotSupported $ "the following tracked function(s) cannot be overloaded: " - <> reportFuncs overloadedFuncs - - indirectDeps <- getSchemaChangeDeps schemaDiff - - -- Report back with an error if cascade is not set - when (indirectDeps /= [] && not (or cascade)) $ reportDepsExt indirectDeps [] - - -- Purge all the indirect dependents from state - mapM_ purgeDep indirectDeps - - -- Purge all dropped functions - let purgedFuncs = flip mapMaybe indirectDeps $ \dep -> - case dep of - SOFunction qf -> Just qf - _ -> Nothing - - forM_ (droppedFuncs \\ purgedFuncs) $ \qf -> do - liftTx $ delFunctionFromCatalog qf - delFunctionFromCache qf - - -- Process altered functions - forM_ alteredFuncs $ \(qf, newTy) -> - when (newTy == FTVOLATILE) $ - throw400 NotSupported $ - "type of function " <> qf <<> " is altered to \"VOLATILE\" which is not supported now" - - -- update the schema cache and hdb_catalog with the changes - reloadRequired <- processSchemaChanges schemaDiff - - let withReload = do -- in case of any rename - buildSchemaCache - newSC <- askSchemaCache - checkNewInconsistentMeta sc newSC - - withoutReload = do - postSc <- askSchemaCache - -- recreate the insert permission infra - forM_ (M.elems $ scTables postSc) $ \ti -> do - let tn = _tiName ti - forM_ (M.elems $ _tiRolePermInfoMap ti) $ \rpi -> - maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi - - strfyNum <- stringifyNum <$> askSQLGenCtx - --recreate triggers - forM_ (M.elems $ scTables postSc) $ \ti -> do - let tn = _tiName ti - cols = getCols $ _tiFieldInfoMap ti - forM_ (M.toList $ _tiEventTriggerInfoMap ti) $ \(trn, eti) -> do - let fullspec = etiOpsDef eti - liftTx $ mkAllTriggersQ trn tn cols strfyNum fullspec - - bool withoutReload withReload reloadRequired - - return res - where - reportFuncs = T.intercalate ", " . map dquoteTxt - -isAltrDropReplace :: QErrM m => T.Text -> m Bool -isAltrDropReplace = either throwErr return . matchRegex regex False - where - throwErr s = throw500 $ "compiling regex failed: " <> T.pack s - regex = "alter|drop|replace|create function" - -runRunSQL - :: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m) - => RunSQL -> m EncJSON -runRunSQL q@(RunSQL t _ mChkMDCnstcy) = do - adminOnly - isMDChkNeeded <- maybe (isAltrDropReplace t) return mChkMDCnstcy - bool (execRawSQL t) (execWithMDCheck q) isMDChkNeeded - --- Should be used only after checking the status -resToCSV :: PQ.Result -> ExceptT T.Text IO [[T.Text]] -resToCSV r = do - nr <- liftIO $ PQ.ntuples r - nc <- liftIO $ PQ.nfields r - - hdr <- forM [0..pred nc] $ \ic -> do - colNameBS <- liftIO $ PQ.fname r ic - maybe (return "unknown") decodeBS colNameBS - - rows <- forM [0..pred nr] $ \ir -> - forM [0..pred nc] $ \ic -> do - cellValBS <- liftIO $ PQ.getvalue r ir ic - maybe (return "NULL") decodeBS cellValBS - - return $ hdr:rows - - where - decodeBS = either (throwError . T.pack . show) return . TE.decodeUtf8' diff --git a/server/src-lib/Hasura/RQL/Types/Catalog.hs b/server/src-lib/Hasura/RQL/Types/Catalog.hs index 2ea43051151..3c95ff66d53 100644 --- a/server/src-lib/Hasura/RQL/Types/Catalog.hs +++ b/server/src-lib/Hasura/RQL/Types/Catalog.hs @@ -1,9 +1,7 @@ --- | This module provides 'fetchCatalogData', which loads the entire catalog in one go from the --- database, consulting tables such as @hdb_catalog.hdb_table@. It is used by --- 'Hasura.RQL.Schema.Table.buildSchemaCache' to seed or reload the schema cache. +-- | Types that represent the raw data stored in the catalog. See also: the module documentation for +-- "Hasura.RQL.DDL.Schema". module Hasura.RQL.Types.Catalog - ( fetchCatalogData - , CatalogMetadata(..) + ( CatalogMetadata(..) , CatalogTable(..) , CatalogTableInfo(..) @@ -16,13 +14,10 @@ module Hasura.RQL.Types.Catalog import Hasura.Prelude -import qualified Database.PG.Query as Q - import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH -import Hasura.Db import Hasura.RQL.DDL.Schema.Function import Hasura.RQL.Types.Column import Hasura.RQL.Types.Common @@ -98,9 +93,3 @@ data CatalogMetadata , _cmAllowlistCollections :: ![CollectionDef] } deriving (Show, Eq) $(deriveJSON (aesonDrop 3 snakeCase) ''CatalogMetadata) - --- | See "Hasura.RQL.Types.Catalog". -fetchCatalogData :: (MonadTx m) => m CatalogMetadata -fetchCatalogData = - liftTx $ Q.getAltJ . runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler - $(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 20167bbd549..46a44b2799d 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -447,16 +447,12 @@ modDepMapInCache f = do writeSchemaCache $ sc { scDepMap = f (scDepMap sc)} class (Monad m) => CacheRM m where - - -- Get the schema cache askSchemaCache :: m SchemaCache instance (Monad m) => CacheRM (StateT SchemaCache m) where askSchemaCache = get class (CacheRM m) => CacheRWM m where - - -- Get the schema cache writeSchemaCache :: SchemaCache -> m () instance (Monad m) => CacheRWM (StateT SchemaCache m) where diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs index 6c8472d62cd..734489ee929 100644 --- a/server/src-lib/Hasura/Server/App.hs +++ b/server/src-lib/Hasura/Server/App.hs @@ -45,7 +45,7 @@ import qualified Hasura.Server.PGDump as PGD import Hasura.EncJSON import Hasura.Prelude hiding (get, put) -import Hasura.RQL.DDL.Schema.Table +import Hasura.RQL.DDL.Schema import Hasura.RQL.Types import Hasura.Server.Auth (AuthMode (..), getUserInfo) diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs index 2753491b69f..671165bbbe0 100644 --- a/server/src-lib/Hasura/Server/Query.hs +++ b/server/src-lib/Hasura/Server/Query.hs @@ -16,8 +16,7 @@ import Hasura.RQL.DDL.QueryCollection import Hasura.RQL.DDL.Relationship import Hasura.RQL.DDL.Relationship.Rename import Hasura.RQL.DDL.RemoteSchema -import Hasura.RQL.DDL.Schema.Function -import Hasura.RQL.DDL.Schema.Table +import Hasura.RQL.DDL.Schema import Hasura.RQL.DML.Count import Hasura.RQL.DML.Delete import Hasura.RQL.DML.Insert diff --git a/server/src-lib/Hasura/Server/SchemaUpdate.hs b/server/src-lib/Hasura/Server/SchemaUpdate.hs index 56e704f1860..1ed8c9d26af 100644 --- a/server/src-lib/Hasura/Server/SchemaUpdate.hs +++ b/server/src-lib/Hasura/Server/SchemaUpdate.hs @@ -5,10 +5,10 @@ where import Hasura.Prelude import Hasura.Logging -import Hasura.RQL.DDL.Schema.Table (buildSCWithoutSetup) +import Hasura.RQL.DDL.Schema (buildSchemaCacheWithoutSetup) import Hasura.RQL.Types -import Hasura.Server.App (SchemaCacheRef (..), withSCUpdate) -import Hasura.Server.Init (InstanceId (..)) +import Hasura.Server.App (SchemaCacheRef (..), withSCUpdate) +import Hasura.Server.Init (InstanceId (..)) import Hasura.Server.Logging import Hasura.Server.Query @@ -16,13 +16,13 @@ import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH -import qualified Control.Concurrent as C -import qualified Control.Concurrent.STM as STM -import qualified Data.Text as T -import qualified Data.Time as UTC -import qualified Database.PG.Query as PG -import qualified Database.PostgreSQL.LibPQ as PQ -import qualified Network.HTTP.Client as HTTP +import qualified Control.Concurrent as C +import qualified Control.Concurrent.STM as STM +import qualified Data.Text as T +import qualified Data.Time as UTC +import qualified Database.PG.Query as PG +import qualified Database.PostgreSQL.LibPQ as PQ +import qualified Network.HTTP.Client as HTTP pgChannel :: PG.PGChannel pgChannel = "hasura_schema_update" @@ -204,7 +204,7 @@ refreshSchemaCache sqlGenCtx pool logger httpManager cacheRef threadType msg = d -- Reload schema cache from catalog resE <- liftIO $ runExceptT $ withSCUpdate cacheRef logger $ peelRun emptySchemaCache adminUserInfo - httpManager sqlGenCtx (PGExecCtx pool PG.Serializable) buildSCWithoutSetup + httpManager sqlGenCtx (PGExecCtx pool PG.Serializable) buildSchemaCacheWithoutSetup case resE of Left e -> logError logger threadType $ TEQueryError e Right _ -> From c48904551e396e1484816f61f3d4ed31bfd21e1a Mon Sep 17 00:00:00 2001 From: Alexis King Date: Thu, 15 Aug 2019 16:19:47 -0500 Subject: [PATCH 09/10] Document enum tables and enum table metadata APIs --- docs/_static/hasura-custom.css | 4 + .../schema-metadata-api/table-view.rst | 53 +++- docs/graphql/manual/schema/enums.rst | 257 +++++++++++------- 3 files changed, 221 insertions(+), 93 deletions(-) diff --git a/docs/_static/hasura-custom.css b/docs/_static/hasura-custom.css index d97496eae19..a34142e648a 100644 --- a/docs/_static/hasura-custom.css +++ b/docs/_static/hasura-custom.css @@ -186,6 +186,10 @@ ul { position: relative; } +#docs-content span.target { + font-style: italic; +} + /*** random overrides ***/ .wy-plain-list-decimal ol, diff --git a/docs/graphql/manual/api-reference/schema-metadata-api/table-view.rst b/docs/graphql/manual/api-reference/schema-metadata-api/table-view.rst index cc8d8279fd8..d76c799573a 100644 --- a/docs/graphql/manual/api-reference/schema-metadata-api/table-view.rst +++ b/docs/graphql/manual/api-reference/schema-metadata-api/table-view.rst @@ -49,6 +49,57 @@ Args syntax - true - :ref:`TableName ` - Name of the table + * - is_enum + - false + - Boolean + - When set to ``true``, creates the table as an :ref:`enum table `. + +.. _set_table_is_enum: + +set_table_is_enum +----------------- + +``set_table_is_enum`` sets whether an already-tracked table should be used as an :ref:`enum table `. + +Use table ``user_role`` as an enum table: + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type": "set_table_is_enum", + "args": { + "table": { + "schema": "public", + "name": "user_role" + }, + "is_enum": true + } + } + +.. _set_table_is_enum_syntax: + +Args syntax +^^^^^^^^^^^ + +.. list-table:: + :header-rows: 1 + + * - Key + - Required + - Schema + - Description + * - table + - true + - :ref:`TableName ` + - Name of the table + * - is_enum + - true + - Boolean + - Whether or not the table should be used as an :ref:`enum table `. .. _untrack_table: @@ -76,7 +127,7 @@ Remove a table/view ``author``: } } - + .. _untrack_table_syntax: Args syntax diff --git a/docs/graphql/manual/schema/enums.rst b/docs/graphql/manual/schema/enums.rst index df775303e89..2bd835aeac4 100644 --- a/docs/graphql/manual/schema/enums.rst +++ b/docs/graphql/manual/schema/enums.rst @@ -1,103 +1,176 @@ Enum type fields ================ -.. contents:: Table of contents - :backlinks: none - :depth: 1 - :local: +Enum type fields are restricted to a fixed set of allowed values. In a relational database such as +Postgres, an enum type field in a table can be defined in two ways: -Enum type fields can only take a value from a fixed set of allowed values. +1. Using `native Postgres enum types `__. -In a relational database such as Postgres, an enum type field in a table can be defined by: + While the most obvious solution, native enum types have significant drawbacks: they are not easily mutable. + New values cannot be added to an enum inside a transaction (that is, ``ALTER TYPE ... ADD VALUE`` is not + supported by transactional DDL), and values cannot be removed from an enum at all without completely dropping + and recreating it (which cannot be done if the enum is in use by *any* tables, views, or functions). Therefore, + native enum types should only be used for enums that are guaranteed to *never* change, such as days of the + week. -- using native database enum types -- setting a foreign-key to a reference table which contains the list of allowed values. +2. Using `foreign-key references `__ to a single-column + table. -`Postgres Enum types `__ are not easily mutable. Hence -they should be used only for enums which are not going to change over time. e.g. measurement units, days of the -week, etc. + This approach represents an enum using ordinary relational database concepts. The enum type is represented by a + table, and the values of the enum are rows in the table. Columns in other tables that use the enum are ordinary + foreign-key references to the enum table. -For enums whose values are dynamic and will require updates, the reference table approach is recommended. e.g. list -of tags, list of teams, etc. + For enums with values that are dynamic and may require updates, such as a list of tags or user roles, this + approach is strongly recommended. Modifying an enum defined this way is easy: simply insert, update, or delete + rows in the enum table (and updates or deletes can even be cascaded to references, and they may be done within + a transaction). + +Given the limitations of native Postgres enum types, Hasura currently only generates GraphQL enum types for enums +defined using the second approach (i.e. referenced tables). You may use native Postgres enum types in your database +schema, but they will essentially be treated like text fields in the generated GraphQL schema. Therefore, this guide +focuses primarily on modeling an enum using a reference table, but you may still use native Postgres enum types to +help maintain data consistency in your database. + +Example: Modeling an enum using an enum table +--------------------------------------------- + +Let’s say we have a database that tracks user information, and users may only have one of three specific roles: user, +moderator, or administrator. To represent that, we might have a ``users`` table with the following schema: + +.. code-block:: sql + + CREATE TABLE users ( + id serial PRIMARY KEY, + name text NOT NULL, + role text NOT NULL + ); + +Now we can insert some users into our database: + +.. code-block:: sql + + INSERT INTO users (name, role) VALUES + ('Alyssa', 'administrator'), + ('Ben', 'moderator'), + ('Gerald', 'user'); + +This works alright, but it doesn’t prevent us from inserting nonsensical values for ``role``, such as + +.. code-block:: sql + + INSERT INTO users (name, role) VALUES + ('Hal', 'spaghetti'); + +which we certainly don’t want. Let’s create an enum to restrict the allowed values. + +Create an enum table +^^^^^^^^^^^^^^^^^^^^ + +To represent our enum, we’re going to create an _`enum table`, which for Hasura’s purposes is any table that meets +the following restrictions: + +1. The table must have a single-column primary key of type ``text``. The values of this column are the legal values + of the enum, and they must all be `valid GraphQL enum value names + `__. +2. Optionally, the table may have a second column, also of type ``text``, which will be used as a description of each + value in the generated GraphQL schema. +3. The table may not contain any other columns. + +For example, to create an enum that represents our user roles, we would create the following table: + +.. code-block:: sql + + CREATE TABLE user_role ( + value text PRIMARY KEY, + comment text + ); + + INSERT INTO user_role (value, comment) VALUES + ('user', 'Ordinary users'), + ('moderator', 'Users with the privilege to ban users'), + ('administrator', 'Users with the privilege to set users’ roles'); + +Use the enum table +^^^^^^^^^^^^^^^^^^ + +Now that we’ve created an enum table, we need to update our ``users`` table to reference it: + +.. code-block:: sql + + ALTER TABLE users ADD CONSTRAINT + users_role_fkey FOREIGN KEY (role) REFERENCES user_role; + +Next, we need to tell Hasura that this table represents an enum. We can do that by passing ``true`` for the +``is_enum`` option of the :ref:`track_table` API, or we can use the :ref:`set_table_is_enum` API to change whether or +not an already-tracked table should be used as an enum: + +.. code-block:: http + + POST /v1/query HTTP/1.1 + Content-Type: application/json + X-Hasura-Role: admin + + { + "type": "track_table", + "args": { + "table": { + "schema": "public", + "name": "user_role" + }, + "is_enum": true + } + } + +Make queries using enum values +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Once the table has been tracked as an enum, the GraphQL schema will be updated to reflect that the ``role`` column of +the ``users`` table only permits the values in the ``user_role`` table: + +.. code-block:: graphql + + type users { + id: Int! + name: String! + role: user_role_enum! + } + + enum user_role_enum { + "Users with the privilege to set users’ roles" + administrator + + "Users with the privilege to ban users" + moderator + + "Ordinary users" + user + } + +When making queries that filter on the ``role`` column, use the name of the enum value directly rather than providing +a string: + +.. graphiql:: + :view_only: + :query: + { + users(where: {role: {_eq: administrator}}) { + id + name + } + } + :response: + { + "data": { + "users": [ + { + "id": 1, + "name": "Alyssa" + } + ] + } + } .. admonition:: Current limitations - Hasura currently does not generate GraphQL enums. This feature is being worked upon. Hence this guide is currently - only tailored towards helping you maintain data consistency in your database - - -**For example**, let's say we have a table ``magazine`` with fields ``(id, title, issue_month, issue_year)`` -and we would like to restrict the values of the ``issue_month`` field to just the months of the year (i.e. January, -February, and so on). - -The following are the approaches we can use to achieve this: - -Option 1: Using native Postgres enum type ------------------------------------------ - -Create a Postgres enum type -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Open the Hasura console and head to the ``Data -> SQL`` interface. - -Run the following SQL statement: - -.. code-block:: sql - - CREATE TYPE month AS ENUM ('January', 'February', 'March', 'and so on...'); - -Set column type as the Postgres enum type -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Run the following SQL statement if the table doesn't yet exist: - -.. code-block:: sql - :emphasize-lines: 4 - - CREATE TABLE magazine( - id serial PRIMARY KEY, - title text NOT NULL, - issue_month month, - issue_year integer - ); - -If table exists, run the following SQL statement: - -.. code-block:: sql - - ALTER TABLE magazine - ALTER COLUMN issue_month TYPE month using issue_month::month; - - -Now the ``issue_month`` field can only take values from the months of the year. - -See `Postgres Enum types documentation `__ for more info. - -Option 2: Using a reference table ---------------------------------- - -Create a reference table for the enum -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Open the Hasura console and head to the ``Data -> Create table`` interface. - -Create a table ``months_of_the_year`` with just one column ``month``, which is the primary key: - -.. thumbnail:: ../../../img/graphql/manual/schema/enum-create-ref-table.png - -Add the allowed enum values to the reference table -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Head to the ``GraphiQL`` tab of the console and run an insert mutation to insert the allowed enum values: - -.. thumbnail:: ../../../img/graphql/manual/schema/enum-insert-ref-values.png - -Add a foreign-key constraint to the reference table -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Head to the ``Data -> magazine -> Modify`` tab of the console and set a foreign-key to the ``months_of_the_year`` table -using the fields: ``issue_month -> months_of_the_year :: month``: - -.. thumbnail:: ../../../img/graphql/manual/schema/enum-set-foreign-key.png - -Now the ``issue_month`` field can only take values from the months of the year. + Currently, Hasura does not automatically detect changes to the contents of enum tables, so the GraphQL schema will + only be updated after manually reloading metadata after inserting, updating, or deleting rows from an enum table. From 78ec90679401dba4e3870759f22f7fa3e3bcb9bc Mon Sep 17 00:00:00 2001 From: Alexis King Date: Fri, 23 Aug 2019 08:17:27 -0500 Subject: [PATCH 10/10] server: Include number of enum tables in metrics --- server/src-lib/Hasura/Server/Telemetry.hs | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/server/src-lib/Hasura/Server/Telemetry.hs b/server/src-lib/Hasura/Server/Telemetry.hs index 6d7cd4d16f5..b38bb826252 100644 --- a/server/src-lib/Hasura/Server/Telemetry.hs +++ b/server/src-lib/Hasura/Server/Telemetry.hs @@ -58,6 +58,7 @@ data Metrics = Metrics { _mtTables :: !Int , _mtViews :: !Int + , _mtEnumTables :: !Int , _mtRelationships :: !RelationshipMetric , _mtPermissions :: !PermissionMetric , _mtEventTriggers :: !Int @@ -128,12 +129,13 @@ runTelemetry (Logger logger) manager cacheRef dbId instanceId = do computeMetrics :: SchemaCache -> Metrics computeMetrics sc = - let nTables = Map.size $ Map.filter (isNothing . _tiViewInfo) usrTbls - nViews = Map.size $ Map.filter (isJust . _tiViewInfo) usrTbls - allRels = join $ Map.elems $ Map.map relsOfTbl usrTbls + let nTables = countUserTables (isNothing . _tiViewInfo) + nViews = countUserTables (isJust . _tiViewInfo) + nEnumTables = countUserTables (isJust . _tiEnumValues) + allRels = join $ Map.elems $ Map.map relsOfTbl userTables (manualRels, autoRels) = partition riIsManual allRels relMetrics = RelationshipMetric (length manualRels) (length autoRels) - rolePerms = join $ Map.elems $ Map.map permsOfTbl usrTbls + rolePerms = join $ Map.elems $ Map.map permsOfTbl userTables nRoles = length $ nub $ fst <$> rolePerms allPerms = snd <$> rolePerms insPerms = calcPerms _permIns allPerms @@ -143,14 +145,15 @@ computeMetrics sc = permMetrics = PermissionMetric selPerms insPerms updPerms delPerms nRoles evtTriggers = Map.size $ Map.filter (not . Map.null) - $ Map.map _tiEventTriggerInfoMap usrTbls + $ Map.map _tiEventTriggerInfoMap userTables rmSchemas = Map.size $ scRemoteSchemas sc funcs = Map.size $ Map.filter (not . fiSystemDefined) $ scFunctions sc - in Metrics nTables nViews relMetrics permMetrics evtTriggers rmSchemas funcs + in Metrics nTables nViews nEnumTables relMetrics permMetrics evtTriggers rmSchemas funcs where - usrTbls = Map.filter (not . _tiSystemDefined) $ scTables sc + userTables = Map.filter (not . _tiSystemDefined) $ scTables sc + countUserTables predicate = length . filter predicate $ Map.elems userTables calcPerms :: (RolePermInfo -> Maybe a) -> [RolePermInfo] -> Int calcPerms fn perms = length $ catMaybes $ map fn perms