mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
Use a significantly more efficient table_info_agg view
Also, use the view in Schema.Diff to share some more logic.
This commit is contained in:
parent
447bac74e7
commit
c322e8a5d4
@ -1,27 +0,0 @@
|
||||
module Control.Arrow.Embed
|
||||
( ArrowEmbed(..)
|
||||
) where
|
||||
|
||||
import Prelude hiding (id, (.))
|
||||
|
||||
import Control.Arrow.Extended
|
||||
import Control.Category
|
||||
|
||||
-- | Allows “embedding” an arrow in another arrow, assuming the target arrow supports the
|
||||
-- necessary operations. For example, a 'Kleisli' arrow can be embedded in any arrow that implements
|
||||
-- 'ArrowKleisli'.
|
||||
class (Arrow arr1, Arrow arr2) => ArrowEmbed arr1 arr2 where
|
||||
embedA :: arr1 a b -> arr2 a b
|
||||
|
||||
instance (ArrowKleisli m arr) => ArrowEmbed (Kleisli m) arr where
|
||||
embedA (Kleisli f) = arrM f
|
||||
|
||||
instance (ArrowChoice arr1, ArrowChoice arr2, ArrowError e arr2, ArrowEmbed arr1 arr2)
|
||||
=> ArrowEmbed (ErrorA e arr1) arr2 where
|
||||
embedA (ErrorA f) = embedA f >>> (throwA ||| returnA)
|
||||
|
||||
instance (ArrowReader r arr2, ArrowEmbed arr1 arr2) => ArrowEmbed (ReaderA r arr1) arr2 where
|
||||
embedA (ReaderA f) = (id &&& askA) >>> embedA f
|
||||
|
||||
instance (ArrowWriter w arr2, ArrowEmbed arr1 arr2) => ArrowEmbed (WriterA w arr1) arr2 where
|
||||
embedA (WriterA f) = embedA f >>> second tellA >>> arr fst
|
@ -23,6 +23,7 @@ module Control.Arrow.Extended
|
||||
, bindA
|
||||
|
||||
, ArrowError(..)
|
||||
, liftEitherA
|
||||
, mapErrorA
|
||||
, ErrorA(..)
|
||||
|
||||
@ -181,6 +182,9 @@ class (Arrow arr) => ArrowError e arr | arr -> e where
|
||||
-- see Note [Weird control operator types]
|
||||
catchA :: arr (a, s) b -> arr (a, (e, s)) b -> arr (a, s) b
|
||||
|
||||
liftEitherA :: (ArrowChoice arr, ArrowError e arr) => arr (Either e a) a
|
||||
liftEitherA = throwA ||| returnA
|
||||
|
||||
mapErrorA :: (ArrowError e arr) => arr (a, s) b -> arr (a, ((e -> e), s)) b
|
||||
mapErrorA f = proc (a, (g, s)) -> (f -< (a, s)) `catchA` \e -> throwA -< g e
|
||||
|
||||
|
@ -169,7 +169,7 @@ parseOnConflict
|
||||
-> m RI.ConflictClauseP1
|
||||
parseOnConflict tn updFiltrM allColMap val = withPathK "on_conflict" $
|
||||
flip withObject val $ \_ obj -> do
|
||||
constraint <- RI.Constraint <$> parseConstraint obj
|
||||
constraint <- RI.CTConstraint <$> parseConstraint obj
|
||||
updCols <- getUpdCols obj
|
||||
case updCols of
|
||||
[] -> return $ RI.CP1DoNothing $ Just constraint
|
||||
|
@ -126,8 +126,7 @@ mkGCtxRole'
|
||||
-- ^ update cols
|
||||
-> Maybe ()
|
||||
-- ^ delete cols
|
||||
-> [PGColumnInfo]
|
||||
-- ^ primary key columns
|
||||
-> Maybe (PrimaryKey PGColumnInfo)
|
||||
-> [ConstraintName]
|
||||
-- ^ constraints
|
||||
-> Maybe ViewInfo
|
||||
@ -283,7 +282,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
-- the fields used in table object
|
||||
selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM
|
||||
-- the scalar set for table_by_pk arguments
|
||||
selByPkScalarSet = pkeyCols ^.. folded.to pgiType._PGColumnScalar
|
||||
selByPkScalarSet = pkeyCols ^.. folded.to _pkColumns.folded.to pgiType._PGColumnScalar
|
||||
|
||||
ordByInpCtxM = mkOrdByInpObj tn <$> selFldsM
|
||||
(ordByInpObjM, ordByCtxM) = case ordByInpCtxM of
|
||||
@ -318,7 +317,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
|
||||
getRootFldsRole'
|
||||
:: QualifiedTable
|
||||
-> [PGColumnInfo]
|
||||
-> Maybe (PrimaryKey PGColumnInfo)
|
||||
-> [ConstraintName]
|
||||
-> FieldInfoMap FieldInfo
|
||||
-> [FunctionInfo]
|
||||
@ -329,7 +328,7 @@ getRootFldsRole'
|
||||
-> Maybe ViewInfo
|
||||
-> TableConfig -- custom config
|
||||
-> RootFields
|
||||
getRootFldsRole' tn primCols constraints fields funcs insM
|
||||
getRootFldsRole' tn primaryKey constraints fields funcs insM
|
||||
selM updM delM viM tableConfig =
|
||||
RootFields
|
||||
{ rootQueryFields = makeFieldMap $
|
||||
@ -338,7 +337,7 @@ getRootFldsRole' tn primCols constraints fields funcs insM
|
||||
<> catMaybes
|
||||
[ getSelDet <$> selM
|
||||
, getSelAggDet selM
|
||||
, getPKeySelDet selM primCols
|
||||
, getPKeySelDet <$> selM <*> primaryKey
|
||||
]
|
||||
, rootMutationFields = makeFieldMap $ catMaybes
|
||||
[ mutHelper viIsInsertable getInsDet insM
|
||||
@ -397,12 +396,11 @@ getRootFldsRole' tn primCols constraints fields funcs insM
|
||||
)
|
||||
|
||||
selByPkCustName = getCustomNameWith _tcrfSelectByPk
|
||||
getPKeySelDet Nothing _ = Nothing
|
||||
getPKeySelDet _ [] = Nothing
|
||||
getPKeySelDet (Just (selFltr, _, hdrs, _)) pCols = Just
|
||||
( QCSelectPkey . SelPkOpCtx tn hdrs selFltr $ mkPGColGNameMap pCols
|
||||
, mkSelFldPKey selByPkCustName tn pCols
|
||||
)
|
||||
getPKeySelDet (selFltr, _, hdrs, _) key =
|
||||
let keyColumns = toList $ _pkColumns key
|
||||
in ( QCSelectPkey . SelPkOpCtx tn hdrs selFltr $ mkPGColGNameMap keyColumns
|
||||
, mkSelFldPKey selByPkCustName tn keyColumns
|
||||
)
|
||||
|
||||
getFuncQueryFlds (selFltr, pLimit, hdrs, _) =
|
||||
funcFldHelper QCFuncQuery mkFuncQueryFld selFltr pLimit hdrs
|
||||
@ -602,7 +600,7 @@ mkGCtxRole
|
||||
-> QualifiedTable
|
||||
-> Maybe PGDescription
|
||||
-> FieldInfoMap FieldInfo
|
||||
-> [PGColumnInfo]
|
||||
-> Maybe (PrimaryKey PGColumnInfo)
|
||||
-> [ConstraintName]
|
||||
-> [FunctionInfo]
|
||||
-> Maybe ViewInfo
|
||||
@ -610,7 +608,7 @@ mkGCtxRole
|
||||
-> RoleName
|
||||
-> RolePermInfo
|
||||
-> m (TyAgg, RootFields, InsCtxMap)
|
||||
mkGCtxRole tableCache tn descM fields pColInfos constraints funcs viM tabConfigM role permInfo = do
|
||||
mkGCtxRole tableCache tn descM fields primaryKey constraints funcs viM tabConfigM role permInfo = do
|
||||
selPermM <- mapM (getSelPerm tableCache fields role) $ _permSel permInfo
|
||||
tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do
|
||||
ctx <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo
|
||||
@ -620,8 +618,8 @@ mkGCtxRole tableCache tn descM fields pColInfos constraints funcs viM tabConfigM
|
||||
insCtxM = fst <$> tabInsInfoM
|
||||
updColsM = filterColFlds . upiCols <$> _permUpd permInfo
|
||||
tyAgg = mkGCtxRole' tn descM insPermM selPermM updColsM
|
||||
(void $ _permDel permInfo) pColInfos constraints viM funcs
|
||||
rootFlds = getRootFldsRole tn pColInfos constraints fields funcs
|
||||
(void $ _permDel permInfo) primaryKey constraints viM funcs
|
||||
rootFlds = getRootFldsRole tn primaryKey constraints fields funcs
|
||||
viM permInfo tabConfigM
|
||||
insCtxMap = maybe Map.empty (Map.singleton tn) insCtxM
|
||||
return (tyAgg, rootFlds, insCtxMap)
|
||||
@ -633,7 +631,7 @@ mkGCtxRole tableCache tn descM fields pColInfos constraints funcs viM tabConfigM
|
||||
|
||||
getRootFldsRole
|
||||
:: QualifiedTable
|
||||
-> [PGColumnInfo]
|
||||
-> Maybe (PrimaryKey PGColumnInfo)
|
||||
-> [ConstraintName]
|
||||
-> FieldInfoMap FieldInfo
|
||||
-> [FunctionInfo]
|
||||
@ -667,26 +665,24 @@ mkGCtxMapTable
|
||||
-> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
|
||||
mkGCtxMapTable tableCache funcCache tabInfo = do
|
||||
m <- flip Map.traverseWithKey rolePerms $
|
||||
mkGCtxRole tableCache tn descM fields pkeyColInfos validConstraints
|
||||
mkGCtxRole tableCache tn descM fields primaryKey validConstraints
|
||||
tabFuncs viewInfo customConfig
|
||||
adminInsCtx <- mkAdminInsCtx tn tableCache fields
|
||||
adminSelFlds <- mkAdminSelFlds fields tableCache
|
||||
let adminCtx = mkGCtxRole' tn descM (Just (cols, icRelations adminInsCtx))
|
||||
(Just (True, adminSelFlds)) (Just cols) (Just ())
|
||||
pkeyColInfos validConstraints viewInfo tabFuncs
|
||||
primaryKey validConstraints viewInfo tabFuncs
|
||||
adminInsCtxMap = Map.singleton tn adminInsCtx
|
||||
return $ Map.insert adminRole (adminCtx, adminRootFlds, adminInsCtxMap) m
|
||||
where
|
||||
TableInfo coreInfo rolePerms _ = tabInfo
|
||||
TableCoreInfo tn descM _ fields constraints pkeyCols viewInfo _ customConfig = coreInfo
|
||||
validConstraints = mkValidConstraints constraints
|
||||
TableCoreInfo tn descM _ fields primaryKey _ _ viewInfo _ customConfig = coreInfo
|
||||
validConstraints = mkValidConstraints $ map _cName (tciUniqueOrPrimaryKeyConstraints coreInfo)
|
||||
cols = getValidCols fields
|
||||
colInfos = getCols fields
|
||||
pkeyColInfos = getColInfos pkeyCols colInfos
|
||||
tabFuncs = filter (isValidObjectName . fiName) $
|
||||
getFuncsOfTable tn funcCache
|
||||
adminRootFlds =
|
||||
getRootFldsRole' tn pkeyColInfos validConstraints fields tabFuncs
|
||||
getRootFldsRole' tn primaryKey validConstraints fields tabFuncs
|
||||
(Just ([], True)) (Just (noFilter, Nothing, [], True))
|
||||
(Just (cols, mempty, noFilter, [])) (Just (noFilter, []))
|
||||
viewInfo customConfig
|
||||
|
@ -73,7 +73,7 @@ parseDropNotice t = do
|
||||
[_, cn, _, _, tn] -> do
|
||||
qt <- dottedTxtToQualTable tn
|
||||
return $ Right $ SOTableObj qt $
|
||||
TOCons $ ConstraintName cn
|
||||
TOForeignKey $ ConstraintName cn
|
||||
_ -> throw500 $ "failed to parse constraint cascade line : " <> cl
|
||||
| otherwise = return $ Left cl
|
||||
|
||||
|
@ -109,7 +109,7 @@ dropView vn =
|
||||
|
||||
procSetObj
|
||||
:: (QErrM m)
|
||||
=> TableCoreInfo FieldInfo -> Maybe (ColumnValues Value)
|
||||
=> TableCoreInfo -> Maybe (ColumnValues Value)
|
||||
-> m (PreSetColsPartial, [Text], [SchemaDependency])
|
||||
procSetObj ti mObj = do
|
||||
(setColTups, deps) <- withPathK "set" $
|
||||
@ -131,7 +131,7 @@ procSetObj ti mObj = do
|
||||
|
||||
buildInsPermInfo
|
||||
:: (QErrM m, TableCoreInfoRM m)
|
||||
=> TableCoreInfo FieldInfo
|
||||
=> TableCoreInfo
|
||||
-> PermDef InsPerm
|
||||
-> m (WithDeps InsPermInfo)
|
||||
buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) =
|
||||
@ -221,7 +221,7 @@ instance FromJSON SelPerm where
|
||||
|
||||
buildSelPermInfo
|
||||
:: (QErrM m, TableCoreInfoRM m)
|
||||
=> TableCoreInfo FieldInfo
|
||||
=> TableCoreInfo
|
||||
-> SelPerm
|
||||
-> m (WithDeps SelPermInfo)
|
||||
buildSelPermInfo tabInfo sp = withPathK "permission" $ do
|
||||
@ -304,7 +304,7 @@ type CreateUpdPerm = CreatePerm UpdPerm
|
||||
|
||||
buildUpdPermInfo
|
||||
:: (QErrM m, TableCoreInfoRM m)
|
||||
=> TableCoreInfo FieldInfo
|
||||
=> TableCoreInfo
|
||||
-> UpdPerm
|
||||
-> m (WithDeps UpdPermInfo)
|
||||
buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do
|
||||
@ -366,7 +366,7 @@ type CreateDelPerm = CreatePerm DelPerm
|
||||
|
||||
buildDelPermInfo
|
||||
:: (QErrM m, TableCoreInfoRM m)
|
||||
=> TableCoreInfo FieldInfo
|
||||
=> TableCoreInfo
|
||||
-> DelPerm
|
||||
-> m (WithDeps DelPermInfo)
|
||||
buildDelPermInfo tabInfo (DelPerm fltr) = do
|
||||
|
@ -260,7 +260,7 @@ class (ToJSON a) => IsPerm a where
|
||||
|
||||
buildPermInfo
|
||||
:: (QErrM m, TableCoreInfoRM m)
|
||||
=> TableCoreInfo FieldInfo
|
||||
=> TableCoreInfo
|
||||
-> PermDef a
|
||||
-> m (WithDeps (PermInfo a))
|
||||
|
||||
@ -284,7 +284,7 @@ class (ToJSON a) => IsPerm a where
|
||||
|
||||
-- FIXME: Push into addPermP1
|
||||
validateViewPerm
|
||||
:: (IsPerm a, QErrM m) => PermDef a -> TableCoreInfo fieldInfo -> m ()
|
||||
:: (IsPerm a, QErrM m) => PermDef a -> TableCoreInfo -> m ()
|
||||
validateViewPerm permDef tableInfo =
|
||||
case permAcc of
|
||||
PASelect -> return ()
|
||||
|
@ -24,7 +24,6 @@ import Hasura.SQL.Types
|
||||
|
||||
import Data.Aeson.Types
|
||||
import qualified Data.HashMap.Strict as HM
|
||||
import qualified Data.HashSet as HS
|
||||
import qualified Data.Map.Strict as M
|
||||
import Data.Tuple (swap)
|
||||
import Instances.TH.Lift ()
|
||||
@ -87,10 +86,10 @@ delRelFromCatalog (QualifiedObject sn tn) rn =
|
||||
objRelP2Setup
|
||||
:: (QErrM m)
|
||||
=> QualifiedTable
|
||||
-> HS.HashSet ForeignKey
|
||||
-> [ForeignKey]
|
||||
-> RelDef ObjRelUsing
|
||||
-> m (RelInfo, [SchemaDependency])
|
||||
objRelP2Setup qt fkeys (RelDef rn ru _) = case ru of
|
||||
objRelP2Setup qt foreignKeys (RelDef rn ru _) = case ru of
|
||||
RUManual (ObjRelManualConfig rm) -> do
|
||||
let refqt = rmTable rm
|
||||
(lCols, rCols) = unzip $ M.toList $ rmColumns rm
|
||||
@ -98,33 +97,35 @@ objRelP2Setup qt fkeys (RelDef rn ru _) = case ru of
|
||||
dependencies = map (mkDependency qt DRLeftColumn) lCols
|
||||
<> map (mkDependency refqt DRRightColumn) rCols
|
||||
pure (RelInfo rn ObjRel (zip lCols rCols) refqt True, dependencies)
|
||||
|
||||
RUFKeyOn cn -> do
|
||||
ForeignKey _ refqt _ consName colMap <-
|
||||
getRequiredFkey cn fkeys $ \fk -> _fkTable fk == qt
|
||||
RUFKeyOn columnName -> do
|
||||
ForeignKey constraint foreignTable colMap <- getRequiredFkey columnName foreignKeys
|
||||
let dependencies =
|
||||
[ SchemaDependency (SOTableObj qt $ TOCons consName) DRFkey
|
||||
, SchemaDependency (SOTableObj qt $ TOCol cn) DRUsingColumn
|
||||
[ SchemaDependency (SOTableObj qt $ TOForeignKey (_cName constraint)) DRFkey
|
||||
, SchemaDependency (SOTableObj qt $ TOCol columnName) DRUsingColumn
|
||||
-- this needs to be added explicitly to handle the remote table being untracked. In this case,
|
||||
-- neither the using_col nor the constraint name will help.
|
||||
, SchemaDependency (SOTable refqt) DRRemoteTable
|
||||
, SchemaDependency (SOTable foreignTable) DRRemoteTable
|
||||
]
|
||||
pure (RelInfo rn ObjRel (HM.toList colMap) refqt False, dependencies)
|
||||
pure (RelInfo rn ObjRel (HM.toList colMap) foreignTable False, dependencies)
|
||||
|
||||
arrRelP2Setup
|
||||
:: (QErrM m)
|
||||
=> QualifiedTable -> HS.HashSet ForeignKey -> ArrRelDef -> m (RelInfo, [SchemaDependency])
|
||||
arrRelP2Setup qt fkeys (RelDef rn ru _) = case ru of
|
||||
=> HashMap QualifiedTable [ForeignKey]
|
||||
-> QualifiedTable
|
||||
-> ArrRelDef
|
||||
-> m (RelInfo, [SchemaDependency])
|
||||
arrRelP2Setup foreignKeys qt (RelDef rn ru _) = case ru of
|
||||
RUManual (ArrRelManualConfig rm) -> do
|
||||
let refqt = rmTable rm
|
||||
(lCols, rCols) = unzip $ M.toList $ rmColumns rm
|
||||
deps = map (\c -> SchemaDependency (SOTableObj qt $ TOCol c) DRLeftColumn) lCols
|
||||
<> map (\c -> SchemaDependency (SOTableObj refqt $ TOCol c) DRRightColumn) rCols
|
||||
return (RelInfo rn ArrRel (zip lCols rCols) refqt True, deps)
|
||||
pure (RelInfo rn ArrRel (zip lCols rCols) refqt True, deps)
|
||||
RUFKeyOn (ArrRelUsingFKeyOn refqt refCol) -> do
|
||||
ForeignKey _ _ _ consName colMap <- getRequiredFkey refCol fkeys $
|
||||
\fk -> _fkTable fk == refqt && _fkRefTable fk == qt
|
||||
let deps = [ SchemaDependency (SOTableObj refqt $ TOCons consName) DRRemoteFkey
|
||||
foreignTableForeignKeys <- getTableInfo refqt foreignKeys
|
||||
let keysThatReferenceUs = filter ((== qt) . _fkForeignTable) foreignTableForeignKeys
|
||||
ForeignKey constraint _ colMap <- getRequiredFkey refCol keysThatReferenceUs
|
||||
let deps = [ SchemaDependency (SOTableObj refqt $ TOForeignKey (_cName constraint)) DRRemoteFkey
|
||||
, SchemaDependency (SOTableObj refqt $ TOCol refCol) DRUsingColumn
|
||||
-- we don't need to necessarily track the remote table like we did in
|
||||
-- case of obj relationships as the remote table is indirectly
|
||||
@ -132,7 +133,7 @@ arrRelP2Setup qt fkeys (RelDef rn ru _) = case ru of
|
||||
, SchemaDependency (SOTable refqt) DRRemoteTable
|
||||
]
|
||||
mapping = HM.toList colMap
|
||||
return (RelInfo rn ArrRel (map swap mapping) refqt False, deps)
|
||||
pure (RelInfo rn ArrRel (map swap mapping) refqt False, deps)
|
||||
|
||||
purgeRelDep :: (MonadTx m) => SchemaObjId -> m ()
|
||||
purgeRelDep (SOTableObj tn (TOPerm rn pt)) = purgePerm tn rn pt
|
||||
@ -176,16 +177,14 @@ setRelComment (SetRelComment (QualifiedObject sn tn) rn comment) =
|
||||
getRequiredFkey
|
||||
:: (QErrM m)
|
||||
=> PGCol
|
||||
-> HS.HashSet ForeignKey
|
||||
-> (ForeignKey -> Bool)
|
||||
-> [ForeignKey]
|
||||
-> m ForeignKey
|
||||
getRequiredFkey col fkeySet preCondition =
|
||||
case filterFkeys of
|
||||
getRequiredFkey col fkeys =
|
||||
case filteredFkeys of
|
||||
[] -> throw400 ConstraintError
|
||||
"no foreign constraint exists on the given column"
|
||||
[k] -> return k
|
||||
_ -> throw400 ConstraintError
|
||||
"more than one foreign key constraint exists on the given column"
|
||||
where
|
||||
filterFkeys = HS.toList $ HS.filter filterFn fkeySet
|
||||
filterFn k = preCondition k && HM.keys (_fkColumnMapping k) == [col]
|
||||
filteredFkeys = filter ((== [col]) . HM.keys . _fkColumnMapping) fkeys
|
||||
|
@ -138,24 +138,22 @@ buildSchemaCacheRule = proc inputs -> do
|
||||
=> (CatalogMetadata, InvalidationMap) `arr` BuildOutputs
|
||||
buildAndCollectInfo = proc (catalogMetadata, invalidationMap) -> do
|
||||
let CatalogMetadata tables relationships permissions
|
||||
eventTriggers remoteSchemas functions fkeys' allowlistDefs
|
||||
eventTriggers remoteSchemas functions allowlistDefs
|
||||
computedFields = catalogMetadata
|
||||
|
||||
-- tables
|
||||
tableRawInfos <- buildTableCache -< tables
|
||||
let tableNames = HS.fromList $ M.keys tableRawInfos
|
||||
|
||||
-- relationships and computed fields
|
||||
let relationshipsByTable = M.groupOn _crTable relationships
|
||||
computedFieldsByTable = M.groupOn (_afcTable . _cccComputedField) computedFields
|
||||
fkeys = HS.fromList fkeys'
|
||||
tableCoreInfos <- (tableRawInfos >- returnA)
|
||||
>-> (\info -> (info, relationshipsByTable) >- alignExtraTableInfo mkRelationshipMetadataObject)
|
||||
>-> (\info -> (info, computedFieldsByTable) >- alignExtraTableInfo mkComputedFieldMetadataObject)
|
||||
>-> (| Inc.keyed (\_ ((tableRawInfo, tableRelationships), tableComputedFields) -> do
|
||||
let columns = _tciFieldInfoMap tableRawInfo
|
||||
allFields <- addNonColumnFields -<
|
||||
(fkeys, tableNames, columns, tableRelationships, tableComputedFields)
|
||||
(tableRawInfos, columns, tableRelationships, tableComputedFields)
|
||||
returnA -< tableRawInfo { _tciFieldInfoMap = allFields }) |)
|
||||
|
||||
-- permissions and event triggers
|
||||
@ -183,6 +181,7 @@ buildSchemaCacheRule = proc inputs -> do
|
||||
-- bindA -< liftIO $ putStrLn $ "----> [build/events] " <> show eventTime
|
||||
|
||||
-- sql functions
|
||||
let tableNames = HS.fromList $ M.keys tableCache
|
||||
functionCache <- (mapFromL _cfFunction functions >- returnA)
|
||||
>-> (| Inc.keyed (\_ (CatalogFunction qf systemDefined config funcDefs) -> do
|
||||
let definition = toJSON $ TrackFunction qf
|
||||
@ -299,7 +298,7 @@ buildSchemaCacheRule = proc inputs -> do
|
||||
(| onNothingA (returnA -< (remoteSchemas, gCtxMap, defGCtx)) |) <-<
|
||||
(| withRecordInconsistency (case M.lookup name remoteSchemas of
|
||||
Just _ -> throwA -< err400 AlreadyExists "duplicate definition for remote schema"
|
||||
Nothing -> (throwA ||| returnA) <<< bindA -< runExceptT do
|
||||
Nothing -> liftEitherA <<< bindA -< runExceptT do
|
||||
rsCtx <- addRemoteSchemaP2Setup remoteSchema
|
||||
let rGCtx = convRemoteGCtx $ rscGCtx rsCtx
|
||||
mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx
|
||||
|
@ -64,7 +64,7 @@ type CacheBuildA = WriterA (Seq CollectedInfo) (Inc.Rule CacheBuildM)
|
||||
bindErrorA
|
||||
:: (ArrowChoice arr, ArrowKleisli m arr, ArrowError e arr, MonadError e m)
|
||||
=> arr (m a) a
|
||||
bindErrorA = (throwA ||| returnA) <<< arrM \m -> (Right <$> m) `catchError` (pure . Left)
|
||||
bindErrorA = liftEitherA <<< arrM \m -> (Right <$> m) `catchError` (pure . Left)
|
||||
{-# INLINABLE bindErrorA #-}
|
||||
|
||||
withRecordDependencies
|
||||
|
@ -96,7 +96,6 @@ pruneDanglingDependents cache = fmap (M.filter (not . null)) . traverse do
|
||||
Left $ "function " <> functionName <<> " is not tracked"
|
||||
SOTableObj tableName tableObjectId -> do
|
||||
tableInfo <- resolveTable tableName
|
||||
-- let coreInfo = _tiCoreInfo tableInfo
|
||||
case tableObjectId of
|
||||
TOCol columnName ->
|
||||
void $ resolveField tableInfo (fromPGCol columnName) _FIColumn "column"
|
||||
@ -104,12 +103,11 @@ pruneDanglingDependents cache = fmap (M.filter (not . null)) . traverse do
|
||||
void $ resolveField tableInfo (fromRel relName) _FIRelationship "relationship"
|
||||
TOComputedField fieldName ->
|
||||
void $ resolveField tableInfo (fromComputedField fieldName) _FIComputedField "computed field"
|
||||
TOCons _constraintName ->
|
||||
-- FIXME: foreign key constraints
|
||||
pure ()
|
||||
-- unless (constraintName `elem` _tciUniqueOrPrimaryKeyConstraints coreInfo) $ do
|
||||
-- Left $ "no unique or primary key constraint named " <> constraintName <<> " is "
|
||||
-- <> "defined for table " <>> tableName
|
||||
TOForeignKey constraintName -> do
|
||||
let foreignKeys = _tciForeignKeys $ _tiCoreInfo tableInfo
|
||||
unless (isJust $ find ((== constraintName) . _cName . _fkConstraint) foreignKeys) $
|
||||
Left $ "no foreign key constraint named " <> constraintName <<> " is "
|
||||
<> "defined for table " <>> tableName
|
||||
TOPerm roleName permType -> withPermType permType \accessor -> do
|
||||
let permLens = permAccToLens accessor
|
||||
unless (has (tiRolePermInfoMap.ix roleName.permLens._Just) tableInfo) $
|
||||
|
@ -8,6 +8,7 @@ module Hasura.RQL.DDL.Schema.Cache.Fields
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.HashMap.Strict.Extended as M
|
||||
import qualified Data.HashSet as HS
|
||||
import qualified Data.Sequence as Seq
|
||||
|
||||
import Control.Arrow.Extended
|
||||
@ -26,8 +27,7 @@ import Hasura.SQL.Types
|
||||
-- see Note [Specialization of buildRebuildableSchemaCache] in Hasura.RQL.DDL.Schema.Cache
|
||||
{-# SPECIALIZE addNonColumnFields
|
||||
:: CacheBuildA
|
||||
( HashSet ForeignKey
|
||||
, HashSet QualifiedTable
|
||||
( HashMap QualifiedTable TableRawInfo
|
||||
, FieldInfoMap PGColumnInfo
|
||||
, [CatalogRelation]
|
||||
, [CatalogComputedField]
|
||||
@ -36,14 +36,14 @@ import Hasura.SQL.Types
|
||||
addNonColumnFields
|
||||
:: ( Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
|
||||
, ArrowKleisli m arr, MonadError QErr m )
|
||||
=> ( HashSet ForeignKey -- ^ all foreign keys
|
||||
, HashSet QualifiedTable -- ^ the names of all tracked tables
|
||||
=> ( HashMap QualifiedTable TableRawInfo
|
||||
, FieldInfoMap PGColumnInfo
|
||||
, [CatalogRelation]
|
||||
, [CatalogComputedField]
|
||||
) `arr` FieldInfoMap FieldInfo
|
||||
addNonColumnFields =
|
||||
proc (foreignKeys, trackedTableNames, columns, relationships, computedFields) -> do
|
||||
proc (rawTableInfo, columns, relationships, computedFields) -> do
|
||||
let foreignKeys = _tciForeignKeys <$> rawTableInfo
|
||||
relationshipInfos <-
|
||||
(| Inc.keyed (\_ relationshipsByName -> do
|
||||
maybeRelationship <- noDuplicates mkRelationshipMetadataObject -< relationshipsByName
|
||||
@ -54,6 +54,7 @@ addNonColumnFields =
|
||||
|) maybeRelationship)
|
||||
|) (M.groupOn _crRelName relationships)
|
||||
|
||||
let trackedTableNames = HS.fromList $ M.keys rawTableInfo
|
||||
computedFieldInfos <-
|
||||
(| Inc.keyed (\_ computedFieldsByName -> do
|
||||
maybeComputedField <- noDuplicates mkComputedFieldMetadataObject -< computedFieldsByName
|
||||
@ -97,9 +98,8 @@ mkRelationshipMetadataObject (CatalogRelation qt rn rt rDef cmnt) =
|
||||
in MetadataObject objectId definition
|
||||
|
||||
buildRelationship
|
||||
:: ( Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
|
||||
, ArrowKleisli m arr, MonadError QErr m )
|
||||
=> (HashSet ForeignKey, CatalogRelation) `arr` Maybe RelInfo
|
||||
:: (Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr)
|
||||
=> (HashMap QualifiedTable [ForeignKey], CatalogRelation) `arr` Maybe RelInfo
|
||||
buildRelationship = proc (foreignKeys, relationship) -> do
|
||||
let CatalogRelation tableName rn rt rDef _ = relationship
|
||||
metadataObject = mkRelationshipMetadataObject relationship
|
||||
@ -107,13 +107,14 @@ buildRelationship = proc (foreignKeys, relationship) -> do
|
||||
addRelationshipContext e = "in relationship " <> rn <<> ": " <> e
|
||||
(| withRecordInconsistency (
|
||||
(| modifyErrA (do
|
||||
(info, dependencies) <- bindErrorA -< case rt of
|
||||
(info, dependencies) <- liftEitherA -< case rt of
|
||||
ObjRel -> do
|
||||
using <- decodeValue rDef
|
||||
objRelP2Setup tableName foreignKeys (RelDef rn using Nothing)
|
||||
tableForeignKeys <- getTableInfo tableName foreignKeys
|
||||
objRelP2Setup tableName tableForeignKeys (RelDef rn using Nothing)
|
||||
ArrRel -> do
|
||||
using <- decodeValue rDef
|
||||
arrRelP2Setup tableName foreignKeys (RelDef rn using Nothing)
|
||||
arrRelP2Setup foreignKeys tableName (RelDef rn using Nothing)
|
||||
recordDependencies -< (metadataObject, schemaObject, dependencies)
|
||||
returnA -< info)
|
||||
|) (addTableContext tableName . addRelationshipContext))
|
||||
|
@ -27,7 +27,7 @@ import Hasura.SQL.Types
|
||||
{-# SPECIALIZE buildTablePermissions
|
||||
:: CacheBuildA
|
||||
( TableCoreCache
|
||||
, TableCoreInfo FieldInfo
|
||||
, TableCoreInfo
|
||||
, [CatalogPermission]
|
||||
) RolePermInfoMap #-}
|
||||
|
||||
@ -35,7 +35,7 @@ buildTablePermissions
|
||||
:: ( Inc.ArrowCache arr, Inc.ArrowDistribute arr, ArrowKleisli m arr
|
||||
, ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m )
|
||||
=> ( TableCoreCache
|
||||
, TableCoreInfo FieldInfo
|
||||
, TableCoreInfo
|
||||
, [CatalogPermission]
|
||||
) `arr` RolePermInfoMap
|
||||
buildTablePermissions = proc (tableCache, tableInfo, tablePermissions) ->
|
||||
@ -90,7 +90,7 @@ buildPermission
|
||||
, ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m
|
||||
, Eq a, IsPerm a, FromJSON a, Eq (PermInfo a) )
|
||||
=> ( TableCoreCache
|
||||
, TableCoreInfo FieldInfo
|
||||
, TableCoreInfo
|
||||
, [CatalogPermission]
|
||||
) `arr` Maybe (PermInfo a)
|
||||
buildPermission = proc (tableCache, tableInfo, permissions) ->
|
||||
|
@ -1,7 +1,5 @@
|
||||
module Hasura.RQL.DDL.Schema.Diff
|
||||
( TableMeta(..)
|
||||
, PGColMeta(..)
|
||||
, ConstraintMeta(..)
|
||||
, fetchTableMeta
|
||||
, ComputedFieldMeta(..)
|
||||
|
||||
@ -25,6 +23,7 @@ module Hasura.RQL.DDL.Schema.Diff
|
||||
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.RQL.Types.Catalog
|
||||
import Hasura.Server.Utils (duplicates)
|
||||
import Hasura.SQL.Types
|
||||
|
||||
@ -38,28 +37,9 @@ import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.HashSet as HS
|
||||
import qualified Data.List.NonEmpty as NE
|
||||
|
||||
data PGColMeta
|
||||
= PGColMeta
|
||||
{ pcmColumnName :: !PGCol
|
||||
, pcmOrdinalPosition :: !Int
|
||||
, pcmDataType :: !PGScalarType
|
||||
, pcmIsNullable :: !Bool
|
||||
, pcmReferences :: ![QualifiedTable]
|
||||
, pcmDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''PGColMeta)
|
||||
|
||||
data ConstraintMeta
|
||||
= ConstraintMeta
|
||||
{ cmName :: !ConstraintName
|
||||
, cmOid :: !Int
|
||||
, cmType :: !ConstraintType
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ConstraintMeta)
|
||||
|
||||
data FunctionMeta
|
||||
= FunctionMeta
|
||||
{ fmOid :: !Int
|
||||
{ fmOid :: !OID
|
||||
, fmFunction :: !QualifiedFunction
|
||||
, fmType :: !FunctionType
|
||||
} deriving (Show, Eq)
|
||||
@ -74,21 +54,15 @@ $(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ComputedFieldMeta
|
||||
|
||||
data TableMeta
|
||||
= TableMeta
|
||||
{ tmOid :: !Int
|
||||
, tmTable :: !QualifiedTable
|
||||
, tmDescription :: !(Maybe PGDescription)
|
||||
, tmColumns :: ![PGColMeta]
|
||||
, tmConstraints :: ![ConstraintMeta]
|
||||
, tmForeignKeys :: ![ForeignKey]
|
||||
{ tmTable :: !QualifiedTable
|
||||
, tmInfo :: !CatalogTableInfo
|
||||
, tmComputedFields :: ![ComputedFieldMeta]
|
||||
} deriving (Show, Eq)
|
||||
|
||||
fetchTableMeta :: Q.Tx [TableMeta]
|
||||
fetchTableMeta = do
|
||||
res <- Q.listQ $(Q.sqlFromFile "src-rsr/table_meta.sql") () False
|
||||
forM res $ \(ts, tn, toid, descM, cols, constrnts, fkeys, computedFields) ->
|
||||
return $ TableMeta toid (QualifiedObject ts tn) descM (Q.getAltJ cols)
|
||||
(Q.getAltJ constrnts) (Q.getAltJ fkeys) (Q.getAltJ computedFields)
|
||||
fetchTableMeta = Q.listQ $(Q.sqlFromFile "src-rsr/table_meta.sql") () False <&>
|
||||
map \(schema, name, Q.AltJ info, Q.AltJ computedFields) ->
|
||||
TableMeta (QualifiedObject schema name) info computedFields
|
||||
|
||||
getOverlap :: (Eq k, Hashable k) => (v -> k) -> [v] -> [v] -> [(v, v)]
|
||||
getOverlap getKey left right =
|
||||
@ -130,40 +104,30 @@ getTableDiff oldtm newtm =
|
||||
droppedFKeyConstraints computedFieldDiff uniqueOrPrimaryCons mNewDesc
|
||||
where
|
||||
mNewName = bool (Just $ tmTable newtm) Nothing $ tmTable oldtm == tmTable newtm
|
||||
oldCols = tmColumns oldtm
|
||||
newCols = tmColumns newtm
|
||||
oldCols = _ctiColumns $ tmInfo oldtm
|
||||
newCols = _ctiColumns $ tmInfo newtm
|
||||
|
||||
uniqueOrPrimaryCons =
|
||||
[cmName cm | cm <- tmConstraints newtm, isUniqueOrPrimary (cmType cm)]
|
||||
uniqueOrPrimaryCons = map _cName $
|
||||
maybeToList (_pkConstraint <$> _ctiPrimaryKey (tmInfo newtm))
|
||||
<> _ctiUniqueConstraints (tmInfo newtm)
|
||||
|
||||
mNewDesc = tmDescription newtm
|
||||
mNewDesc = _ctiDescription $ tmInfo newtm
|
||||
|
||||
droppedCols =
|
||||
map pcmColumnName $ getDifference pcmOrdinalPosition oldCols newCols
|
||||
|
||||
addedCols =
|
||||
map pcmToPci $ getDifference pcmOrdinalPosition newCols oldCols
|
||||
|
||||
existingCols = getOverlap pcmOrdinalPosition oldCols newCols
|
||||
|
||||
pcmToPci (PGColMeta colName _ colType isNullable references descM)
|
||||
= PGRawColumnInfo colName colType isNullable references descM
|
||||
|
||||
alteredCols =
|
||||
flip map (filter (uncurry (/=)) existingCols) $ pcmToPci *** pcmToPci
|
||||
droppedCols = map prciName $ getDifference prciPosition oldCols newCols
|
||||
addedCols = getDifference prciPosition newCols oldCols
|
||||
existingCols = getOverlap prciPosition oldCols newCols
|
||||
alteredCols = filter (uncurry (/=)) existingCols
|
||||
|
||||
-- foreign keys are considered dropped only if their oid
|
||||
-- and (ref-table, column mapping) are changed
|
||||
droppedFKeyConstraints = map _fkConstraint $ HS.toList $
|
||||
droppedFKeyConstraints = map (_cName . _fkConstraint) $ HS.toList $
|
||||
droppedFKeysWithOid `HS.intersection` droppedFKeysWithUniq
|
||||
|
||||
tmForeignKeys = fmap unCatalogForeignKey . _ctiForeignKeys . tmInfo
|
||||
droppedFKeysWithOid = HS.fromList $
|
||||
getDifference _fkOid (tmForeignKeys oldtm) (tmForeignKeys newtm)
|
||||
|
||||
(getDifference (_cOid . _fkConstraint) `on` tmForeignKeys) oldtm newtm
|
||||
droppedFKeysWithUniq = HS.fromList $
|
||||
getDifference mkFKeyUniqId (tmForeignKeys oldtm) (tmForeignKeys newtm)
|
||||
|
||||
mkFKeyUniqId (ForeignKey _ reftn _ _ colMap) = (reftn, colMap)
|
||||
(getDifference mkFKeyUniqId `on` tmForeignKeys) oldtm newtm
|
||||
mkFKeyUniqId (ForeignKey _ reftn colMap) = (reftn, colMap)
|
||||
|
||||
-- calculate computed field diff
|
||||
oldComputedFieldMeta = tmComputedFields oldtm
|
||||
@ -196,7 +160,7 @@ getTableChangeDeps tn tableDiff = do
|
||||
return $ getDependentObjs sc objId
|
||||
-- for all dropped constraints
|
||||
droppedConsDeps <- fmap concat $ forM droppedFKeyConstraints $ \droppedCons -> do
|
||||
let objId = SOTableObj tn $ TOCons droppedCons
|
||||
let objId = SOTableObj tn $ TOForeignKey droppedCons
|
||||
return $ getDependentObjs sc objId
|
||||
return $ droppedConsDeps <> droppedColDeps <> droppedComputedFieldDeps
|
||||
where
|
||||
@ -213,9 +177,9 @@ getSchemaDiff :: [TableMeta] -> [TableMeta] -> SchemaDiff
|
||||
getSchemaDiff oldMeta newMeta =
|
||||
SchemaDiff droppedTables survivingTables
|
||||
where
|
||||
droppedTables = map tmTable $ getDifference tmOid oldMeta newMeta
|
||||
droppedTables = map tmTable $ getDifference (_ctiOid . tmInfo) oldMeta newMeta
|
||||
survivingTables =
|
||||
flip map (getOverlap tmOid oldMeta newMeta) $ \(oldtm, newtm) ->
|
||||
flip map (getOverlap (_ctiOid . tmInfo) oldMeta newMeta) $ \(oldtm, newtm) ->
|
||||
(tmTable oldtm, getTableDiff oldtm newtm)
|
||||
|
||||
getSchemaChangeDeps
|
||||
|
@ -9,7 +9,8 @@ module Hasura.RQL.DDL.Schema.Enum (
|
||||
, EnumValueInfo(..)
|
||||
, EnumValue(..)
|
||||
|
||||
-- * Loading enum values
|
||||
-- * Loading table info
|
||||
, resolveEnumReferences
|
||||
, fetchAndValidateEnumValues
|
||||
) where
|
||||
|
||||
@ -25,6 +26,7 @@ import qualified Database.PG.Query as Q
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Hasura.Db
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Error
|
||||
import Hasura.Server.Utils (makeReasonMessage)
|
||||
@ -32,6 +34,26 @@ import Hasura.SQL.Types
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
-- | Given a map of enum tables, computes all enum references implied by the given set of foreign
|
||||
-- keys. A foreign key constitutes an enum reference iff the following conditions hold:
|
||||
--
|
||||
-- 1. The key only includes a single column.
|
||||
-- 2. The referenced column is the table’s primary key.
|
||||
-- 3. The referenced table is, in fact, an enum table.
|
||||
resolveEnumReferences
|
||||
:: HashMap QualifiedTable (PrimaryKey PGCol, EnumValues)
|
||||
-> [ForeignKey]
|
||||
-> HashMap PGCol (NonEmpty EnumReference)
|
||||
resolveEnumReferences enumTables =
|
||||
M.fromListWith (<>) . map (fmap (:|[])) . mapMaybe resolveEnumReference
|
||||
where
|
||||
resolveEnumReference :: ForeignKey -> Maybe (PGCol, EnumReference)
|
||||
resolveEnumReference foreignKey = do
|
||||
[(localColumn, foreignColumn)] <- pure $ M.toList (_fkColumnMapping foreignKey)
|
||||
(primaryKey, enumValues) <- M.lookup (_fkForeignTable foreignKey) enumTables
|
||||
guard (_pkColumns primaryKey == foreignColumn:|[])
|
||||
pure (localColumn, EnumReference (_fkForeignTable foreignKey) enumValues)
|
||||
|
||||
data EnumTableIntegrityError
|
||||
= EnumTableMissingPrimaryKey
|
||||
| EnumTableMultiColumnPrimaryKey ![PGCol]
|
||||
@ -45,26 +67,27 @@ data EnumTableIntegrityError
|
||||
fetchAndValidateEnumValues
|
||||
:: (MonadTx m)
|
||||
=> QualifiedTable
|
||||
-> [PGRawColumnInfo]
|
||||
-> Maybe (PrimaryKey PGRawColumnInfo)
|
||||
-> [PGRawColumnInfo]
|
||||
-> m EnumValues
|
||||
fetchAndValidateEnumValues tableName primaryKeyColumns columnInfos =
|
||||
fetchAndValidateEnumValues tableName maybePrimaryKey columnInfos =
|
||||
either (throw400 ConstraintViolation . showErrors) pure =<< runValidateT fetchAndValidate
|
||||
where
|
||||
fetchAndValidate :: (MonadTx m, MonadValidate [EnumTableIntegrityError] m) => m EnumValues
|
||||
fetchAndValidate = do
|
||||
maybePrimaryKey <- tolerate validatePrimaryKey
|
||||
maybeCommentColumn <- validateColumns maybePrimaryKey
|
||||
enumValues <- maybe (refute mempty) (fetchEnumValues maybeCommentColumn) maybePrimaryKey
|
||||
primaryKeyColumn <- tolerate validatePrimaryKey
|
||||
maybeCommentColumn <- validateColumns primaryKeyColumn
|
||||
enumValues <- maybe (refute mempty) (fetchEnumValues maybeCommentColumn) primaryKeyColumn
|
||||
validateEnumValues enumValues
|
||||
pure enumValues
|
||||
where
|
||||
validatePrimaryKey = case primaryKeyColumns of
|
||||
[] -> refute [EnumTableMissingPrimaryKey]
|
||||
[column] -> case prciType column of
|
||||
PGText -> pure column
|
||||
_ -> refute [EnumTableNonTextualPrimaryKey column]
|
||||
_ -> refute [EnumTableMultiColumnPrimaryKey $ map prciName primaryKeyColumns]
|
||||
validatePrimaryKey = case maybePrimaryKey of
|
||||
Nothing -> refute [EnumTableMissingPrimaryKey]
|
||||
Just primaryKey -> case _pkColumns primaryKey of
|
||||
column :| [] -> case prciType column of
|
||||
PGText -> pure column
|
||||
_ -> refute [EnumTableNonTextualPrimaryKey column]
|
||||
columns -> refute [EnumTableMultiColumnPrimaryKey $ map prciName (toList columns)]
|
||||
|
||||
validateColumns primaryKeyColumn = do
|
||||
let nonPrimaryKeyColumns = maybe columnInfos (`delete` columnInfos) primaryKeyColumn
|
||||
|
@ -204,7 +204,7 @@ runUntrackTableQ q = do
|
||||
unTrackExistingTableOrViewP1 q
|
||||
unTrackExistingTableOrViewP2 q
|
||||
|
||||
processTableChanges :: (MonadTx m, CacheRM m) => TableCoreInfo FieldInfo -> TableDiff -> m ()
|
||||
processTableChanges :: (MonadTx m, CacheRM m) => TableCoreInfo -> TableDiff -> m ()
|
||||
processTableChanges ti tableDiff = do
|
||||
-- If table rename occurs then don't replace constraints and
|
||||
-- process dropped/added columns, because schema reload happens eventually
|
||||
@ -237,8 +237,8 @@ processTableChanges ti tableDiff = do
|
||||
liftTx $ updateTableConfig tn $ TableConfig customFields modifiedCustomColumnNames
|
||||
|
||||
procAlteredCols sc tn = for_ alteredCols $
|
||||
\( PGRawColumnInfo oldName oldType _ _ _
|
||||
, PGRawColumnInfo newName newType _ _ _ ) -> do
|
||||
\( PGRawColumnInfo oldName _ oldType _ _
|
||||
, PGRawColumnInfo newName _ newType _ _ ) -> do
|
||||
if | oldName /= newName -> renameColInCatalog oldName newName tn (_tciFieldInfoMap ti)
|
||||
|
||||
| oldType /= newType -> do
|
||||
@ -293,13 +293,14 @@ delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
|
||||
buildTableCache
|
||||
:: forall arr m
|
||||
. (Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr, ArrowKleisli m arr, MonadTx m)
|
||||
=> [CatalogTable] `arr` M.HashMap QualifiedTable (TableCoreInfo PGColumnInfo)
|
||||
=> [CatalogTable] `arr` M.HashMap QualifiedTable TableRawInfo
|
||||
buildTableCache = proc catalogTables -> do
|
||||
rawTableInfos <-
|
||||
(| Inc.keyed (| withTable (\tables -> buildRawTableInfo <<< noDuplicateTables -< tables) |)
|
||||
|) (M.groupOnNE _ctName catalogTables)
|
||||
let rawTableCache = M.catMaybes rawTableInfos
|
||||
enumTables = M.mapMaybe _tciEnumValues rawTableCache
|
||||
enumTables = flip M.mapMaybe rawTableCache \rawTableInfo ->
|
||||
(,) <$> _tciPrimaryKey rawTableInfo <*> _tciEnumValues rawTableInfo
|
||||
tableInfos <-
|
||||
(| Inc.keyed (| withTable (\table -> processTableInfo -< (enumTables, table)) |)
|
||||
|) rawTableCache
|
||||
@ -314,18 +315,20 @@ buildTableCache = proc catalogTables -> do
|
||||
_ -> throwA -< err400 AlreadyExists "duplication definition for table"
|
||||
|
||||
-- Step 1: Build the raw table cache from metadata information.
|
||||
buildRawTableInfo :: ErrorA QErr arr CatalogTable (TableCoreInfo PGRawColumnInfo)
|
||||
buildRawTableInfo :: ErrorA QErr arr CatalogTable (TableCoreInfoG PGRawColumnInfo PGCol)
|
||||
buildRawTableInfo = proc (CatalogTable name systemDefined isEnum config maybeInfo) -> do
|
||||
catalogInfo <-
|
||||
(| onNothingA (throwA -<
|
||||
err400 NotExists $ "no such table/view exists in postgres: " <>> name)
|
||||
|) maybeInfo
|
||||
|
||||
let CatalogTableInfo columns constraints primaryKeyColumnNames viewInfo maybeDesc = catalogInfo
|
||||
primaryKeyColumns = flip filter columns $ \column ->
|
||||
prciName column `elem` primaryKeyColumnNames
|
||||
maybeEnumValues <- if isEnum
|
||||
then bindA -< Just <$> fetchAndValidateEnumValues name primaryKeyColumns columns
|
||||
let columns = _ctiColumns catalogInfo
|
||||
columnMap = mapFromL (fromPGCol . prciName) columns
|
||||
primaryKey = _ctiPrimaryKey catalogInfo
|
||||
|
||||
rawPrimaryKey <- liftEitherA -< traverse (resolvePrimaryKeyColumns columnMap) primaryKey
|
||||
enumValues <- if isEnum
|
||||
then bindErrorA -< Just <$> fetchAndValidateEnumValues name rawPrimaryKey columns
|
||||
else returnA -< Nothing
|
||||
|
||||
-- validate tableConfig
|
||||
@ -334,13 +337,14 @@ buildTableCache = proc catalogTables -> do
|
||||
returnA -< TableCoreInfo
|
||||
{ _tciName = name
|
||||
, _tciSystemDefined = systemDefined
|
||||
, _tciFieldInfoMap = mapFromL (fromPGCol . prciName) columns
|
||||
, _tciUniqueOrPrimaryKeyConstraints = constraints
|
||||
, _tciPrimaryKeyColumns = primaryKeyColumnNames
|
||||
, _tciViewInfo = viewInfo
|
||||
, _tciEnumValues = maybeEnumValues
|
||||
, _tciFieldInfoMap = columnMap
|
||||
, _tciPrimaryKey = primaryKey
|
||||
, _tciUniqueConstraints = _ctiUniqueConstraints catalogInfo
|
||||
, _tciForeignKeys = unCatalogForeignKey <$> _ctiForeignKeys catalogInfo
|
||||
, _tciViewInfo = _ctiViewInfo catalogInfo
|
||||
, _tciEnumValues = enumValues
|
||||
, _tciCustomConfig = config
|
||||
, _tciDescription = maybeDesc
|
||||
, _tciDescription = _ctiDescription catalogInfo
|
||||
}
|
||||
|
||||
-- validateTableConfig :: TableCoreInfo a -> TableConfig -> m ()
|
||||
@ -363,25 +367,39 @@ buildTableCache = proc catalogTables -> do
|
||||
-- types.
|
||||
processTableInfo
|
||||
:: ErrorA QErr arr
|
||||
( M.HashMap QualifiedTable EnumValues
|
||||
, TableCoreInfo PGRawColumnInfo
|
||||
) (TableCoreInfo PGColumnInfo)
|
||||
processTableInfo = (throwA ||| returnA) <<< arr \(enumTables, rawInfo) -> runExcept do
|
||||
( M.HashMap QualifiedTable (PrimaryKey PGCol, EnumValues)
|
||||
, TableCoreInfoG PGRawColumnInfo PGCol
|
||||
) TableRawInfo
|
||||
processTableInfo = proc (enumTables, rawInfo) -> liftEitherA -< do
|
||||
let tableName = _tciName rawInfo
|
||||
enumReferences = resolveEnumReferences enumTables (_tciForeignKeys rawInfo)
|
||||
customFields = _tcCustomColumnNames $ _tciCustomConfig rawInfo
|
||||
process = processColumnInfo enumTables customFields tableName
|
||||
traverseOf (tciFieldInfoMap.traverse) process rawInfo
|
||||
|
||||
columnInfoMap <- _tciFieldInfoMap rawInfo
|
||||
& traverse (processColumnInfo enumReferences customFields tableName)
|
||||
primaryKey <- traverse (resolvePrimaryKeyColumns columnInfoMap) (_tciPrimaryKey rawInfo)
|
||||
|
||||
pure rawInfo
|
||||
{ _tciFieldInfoMap = columnInfoMap
|
||||
, _tciPrimaryKey = primaryKey
|
||||
}
|
||||
|
||||
resolvePrimaryKeyColumns
|
||||
:: (QErrM n) => HashMap FieldName a -> PrimaryKey PGCol -> n (PrimaryKey a)
|
||||
resolvePrimaryKeyColumns columnMap = traverseOf (pkColumns.traverse) \columnName ->
|
||||
M.lookup (fromPGCol columnName) columnMap
|
||||
`onNothing` throw500 "column in primary key not in table!"
|
||||
|
||||
-- | “Processes” a 'PGRawColumnInfo' into a 'PGColumnInfo' by resolving its type using a map of
|
||||
-- known enum tables.
|
||||
processColumnInfo
|
||||
:: (QErrM n)
|
||||
=> M.HashMap QualifiedTable EnumValues -- ^ known enum tables
|
||||
=> M.HashMap PGCol (NonEmpty EnumReference)
|
||||
-> CustomColumnNames -- ^ customised graphql names
|
||||
-> QualifiedTable -- ^ the table this column belongs to
|
||||
-> PGRawColumnInfo -- ^ the column’s raw information
|
||||
-> n PGColumnInfo
|
||||
processColumnInfo enumTables customFields tableName rawInfo = do
|
||||
processColumnInfo tableEnumReferences customFields tableName rawInfo = do
|
||||
resolvedType <- resolveColumnType
|
||||
pure PGColumnInfo
|
||||
{ pgiColumn = pgCol
|
||||
@ -392,25 +410,19 @@ buildTableCache = proc catalogTables -> do
|
||||
}
|
||||
where
|
||||
pgCol = prciName rawInfo
|
||||
graphqlName = fromMaybe (G.Name $ getPGColTxt pgCol) $
|
||||
M.lookup pgCol customFields
|
||||
graphqlName = fromMaybe (G.Name $ getPGColTxt pgCol) $ M.lookup pgCol customFields
|
||||
resolveColumnType =
|
||||
case prciReferences rawInfo of
|
||||
-- no referenced tables? definitely not an enum
|
||||
[] -> pure $ PGColumnScalar (prciType rawInfo)
|
||||
|
||||
-- one referenced table? might be an enum, so check if the referenced table is an enum
|
||||
[referencedTableName] -> pure $ M.lookup referencedTableName enumTables & maybe
|
||||
(PGColumnScalar $ prciType rawInfo)
|
||||
(PGColumnEnumReference . EnumReference referencedTableName)
|
||||
|
||||
-- multiple referenced tables? we could check if any of them are enums, but the schema
|
||||
-- is strange, so let’s just reject it
|
||||
referencedTables -> throw400 ConstraintViolation
|
||||
$ "cannot handle exotic schema: column " <> prciName rawInfo <<> " in table "
|
||||
<> tableName <<> " references multiple foreign tables ("
|
||||
<> T.intercalate ", " (map dquote referencedTables) <> ")?"
|
||||
case M.lookup pgCol tableEnumReferences of
|
||||
-- no references? not an enum
|
||||
Nothing -> pure $ PGColumnScalar (prciType rawInfo)
|
||||
-- one reference? is an enum
|
||||
Just (enumReference:|[]) -> pure $ PGColumnEnumReference enumReference
|
||||
-- multiple referenced enums? the schema is strange, so let’s reject it
|
||||
Just enumReferences -> throw400 ConstraintViolation
|
||||
$ "column " <> prciName rawInfo <<> " in table " <> tableName
|
||||
<<> " references multiple enum tables ("
|
||||
<> T.intercalate ", " (map (dquote . erTable) $ toList enumReferences) <> ")"
|
||||
|
||||
-- see Note [Specialization of buildRebuildableSchemaCache] in Hasura.RQL.DDL.Schema.Cache
|
||||
{-# SPECIALIZE buildTableCache
|
||||
:: CacheBuildA [CatalogTable] (M.HashMap QualifiedTable (TableCoreInfo PGColumnInfo)) #-}
|
||||
:: CacheBuildA [CatalogTable] (M.HashMap QualifiedTable TableRawInfo) #-}
|
||||
|
@ -22,8 +22,8 @@ import qualified Database.PG.Query as Q
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
data ConflictTarget
|
||||
= Column ![PGCol]
|
||||
| Constraint !ConstraintName
|
||||
= CTColumn ![PGCol]
|
||||
| CTConstraint !ConstraintName
|
||||
deriving (Show, Eq)
|
||||
|
||||
data ConflictClauseP1
|
||||
@ -59,8 +59,8 @@ toSQLConflict conflict = case conflict of
|
||||
|
||||
where
|
||||
toSQLCT ct = case ct of
|
||||
Column pgCols -> S.SQLColumn pgCols
|
||||
Constraint cn -> S.SQLConstraint cn
|
||||
CTColumn pgCols -> S.SQLColumn pgCols
|
||||
CTConstraint cn -> S.SQLConstraint cn
|
||||
|
||||
convObj
|
||||
:: (UserInfoM m, QErrM m)
|
||||
@ -108,10 +108,10 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
|
||||
(Nothing, Nothing, CAIgnore) -> return $ CP1DoNothing Nothing
|
||||
(Just col, Nothing, CAIgnore) -> do
|
||||
validateCols col
|
||||
return $ CP1DoNothing $ Just $ Column $ getPGCols col
|
||||
return $ CP1DoNothing $ Just $ CTColumn $ getPGCols col
|
||||
(Nothing, Just cons, CAIgnore) -> do
|
||||
validateConstraint cons
|
||||
return $ CP1DoNothing $ Just $ Constraint cons
|
||||
return $ CP1DoNothing $ Just $ CTConstraint cons
|
||||
(Nothing, Nothing, CAUpdate) -> throw400 UnexpectedPayload
|
||||
"Expecting 'constraint' or 'constraint_on' when the 'action' is 'update'"
|
||||
(Just col, Nothing, CAUpdate) -> do
|
||||
@ -119,14 +119,14 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
|
||||
(updFltr, preSet) <- getUpdPerm
|
||||
resolvedUpdFltr <- convAnnBoolExpPartialSQL sessVarBldr updFltr
|
||||
resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) preSet
|
||||
return $ CP1Update (Column $ getPGCols col) inpCols resolvedPreSet $
|
||||
return $ CP1Update (CTColumn $ getPGCols col) inpCols resolvedPreSet $
|
||||
toSQLBool resolvedUpdFltr
|
||||
(Nothing, Just cons, CAUpdate) -> do
|
||||
validateConstraint cons
|
||||
(updFltr, preSet) <- getUpdPerm
|
||||
resolvedUpdFltr <- convAnnBoolExpPartialSQL sessVarBldr updFltr
|
||||
resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) preSet
|
||||
return $ CP1Update (Constraint cons) inpCols resolvedPreSet $
|
||||
return $ CP1Update (CTConstraint cons) inpCols resolvedPreSet $
|
||||
toSQLBool resolvedUpdFltr
|
||||
(Just _, Just _, _) -> throw400 UnexpectedPayload
|
||||
"'constraint' and 'constraint_on' cannot be set at a time"
|
||||
@ -141,7 +141,7 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
|
||||
\pgCol -> askPGType fieldInfoMap pgCol ""
|
||||
|
||||
validateConstraint c = do
|
||||
let tableConsNames = _tciUniqueOrPrimaryKeyConstraints coreInfo
|
||||
let tableConsNames = _cName <$> tciUniqueOrPrimaryKeyConstraints coreInfo
|
||||
withPathK "constraint" $
|
||||
unless (c `elem` tableConsNames) $
|
||||
throw400 Unexpected $ "constraint " <> getConstraintTxt c
|
||||
@ -270,7 +270,7 @@ extractConflictCtx cp =
|
||||
constraintName <- extractConstraintName conflictTar
|
||||
return $ CCUpdate constraintName inpCols preSet filtr
|
||||
where
|
||||
extractConstraintName (Constraint cn) = return cn
|
||||
extractConstraintName (CTConstraint cn) = return cn
|
||||
extractConstraintName _ = throw400 NotSupported
|
||||
"\"constraint_on\" not supported for non admin insert. use \"constraint\" instead"
|
||||
|
||||
|
@ -28,7 +28,7 @@ newtype DMLP1T m a
|
||||
runDMLP1T :: DMLP1T m a -> m (a, DS.Seq Q.PrepArg)
|
||||
runDMLP1T = flip runStateT DS.empty . unDMLP1T
|
||||
|
||||
mkAdminRolePermInfo :: TableCoreInfo FieldInfo -> RolePermInfo
|
||||
mkAdminRolePermInfo :: TableCoreInfo -> RolePermInfo
|
||||
mkAdminRolePermInfo ti =
|
||||
RolePermInfo (Just i) (Just s) (Just u) (Just d)
|
||||
where
|
||||
|
@ -19,6 +19,7 @@ module Hasura.RQL.Types
|
||||
, mkAdminQCtx
|
||||
, askTabInfo
|
||||
, isTableTracked
|
||||
, getTableInfo
|
||||
, askTableCoreInfo
|
||||
, askFieldInfoMap
|
||||
, askPGType
|
||||
@ -182,9 +183,16 @@ instance (Monad m) => HasSystemDefined (HasSystemDefinedT m) where
|
||||
liftMaybe :: (QErrM m) => QErr -> Maybe a -> m a
|
||||
liftMaybe e = maybe (throwError e) return
|
||||
|
||||
askTableCoreInfo :: (QErrM m, TableCoreInfoRM m) => QualifiedTable -> m (TableCoreInfo FieldInfo)
|
||||
askTableCoreInfo tableName = lookupTableCoreInfo tableName >>=
|
||||
(`onNothing` throw400 NotExists ("table " <> tableName <<> " does not exist"))
|
||||
throwTableDoesNotExist :: (QErrM m) => QualifiedTable -> m a
|
||||
throwTableDoesNotExist tableName = throw400 NotExists ("table " <> tableName <<> " does not exist")
|
||||
|
||||
getTableInfo :: (QErrM m) => QualifiedTable -> HashMap QualifiedTable a -> m a
|
||||
getTableInfo tableName infoMap =
|
||||
M.lookup tableName infoMap `onNothing` throwTableDoesNotExist tableName
|
||||
|
||||
askTableCoreInfo :: (QErrM m, TableCoreInfoRM m) => QualifiedTable -> m TableCoreInfo
|
||||
askTableCoreInfo tableName =
|
||||
lookupTableCoreInfo tableName >>= (`onNothing` throwTableDoesNotExist tableName)
|
||||
|
||||
askFieldInfoMap :: (QErrM m, TableCoreInfoRM m) => QualifiedTable -> m (FieldInfoMap FieldInfo)
|
||||
askFieldInfoMap = fmap _tciFieldInfoMap . askTableCoreInfo
|
||||
|
@ -5,6 +5,7 @@ module Hasura.RQL.Types.Catalog
|
||||
|
||||
, CatalogTable(..)
|
||||
, CatalogTableInfo(..)
|
||||
, CatalogForeignKey(..)
|
||||
|
||||
, CatalogRelation(..)
|
||||
, CatalogComputedField(..)
|
||||
@ -15,6 +16,8 @@ module Hasura.RQL.Types.Catalog
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
@ -30,16 +33,40 @@ import Hasura.RQL.Types.RemoteSchema
|
||||
import Hasura.RQL.Types.SchemaCache
|
||||
import Hasura.SQL.Types
|
||||
|
||||
newtype CatalogForeignKey
|
||||
= CatalogForeignKey
|
||||
{ unCatalogForeignKey :: ForeignKey
|
||||
} deriving (Show, Eq, NFData)
|
||||
|
||||
instance FromJSON CatalogForeignKey where
|
||||
parseJSON = withObject "CatalogForeignKey" \o -> do
|
||||
constraint <- o .: "constraint"
|
||||
foreignTable <- o .: "foreign_table"
|
||||
|
||||
columns <- o .: "columns"
|
||||
foreignColumns <- o .: "foreign_columns"
|
||||
unless (length columns == length foreignColumns) $
|
||||
fail "columns and foreign_columns differ in length"
|
||||
|
||||
pure $ CatalogForeignKey ForeignKey
|
||||
{ _fkConstraint = constraint
|
||||
, _fkForeignTable = foreignTable
|
||||
, _fkColumnMapping = M.fromList $ zip columns foreignColumns
|
||||
}
|
||||
|
||||
data CatalogTableInfo
|
||||
= CatalogTableInfo
|
||||
{ _ctiColumns :: ![PGRawColumnInfo]
|
||||
, _ctiConstraints :: ![ConstraintName]
|
||||
, _ctiPrimaryKeyColumns :: ![PGCol]
|
||||
{ _ctiOid :: !OID
|
||||
, _ctiColumns :: ![PGRawColumnInfo]
|
||||
, _ctiPrimaryKey :: !(Maybe (PrimaryKey PGCol))
|
||||
, _ctiUniqueConstraints :: ![Constraint]
|
||||
-- ^ Does /not/ include the primary key!
|
||||
, _ctiForeignKeys :: ![CatalogForeignKey]
|
||||
, _ctiViewInfo :: !(Maybe ViewInfo)
|
||||
, _ctiDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogTableInfo
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo)
|
||||
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo)
|
||||
|
||||
data CatalogTable
|
||||
= CatalogTable
|
||||
@ -50,7 +77,7 @@ data CatalogTable
|
||||
, _ctInfo :: !(Maybe CatalogTableInfo)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogTable
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogTable)
|
||||
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogTable)
|
||||
|
||||
data CatalogRelation
|
||||
= CatalogRelation
|
||||
@ -61,7 +88,7 @@ data CatalogRelation
|
||||
, _crComment :: !(Maybe Text)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogRelation
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogRelation)
|
||||
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogRelation)
|
||||
|
||||
data CatalogPermission
|
||||
= CatalogPermission
|
||||
@ -72,7 +99,7 @@ data CatalogPermission
|
||||
, _cpComment :: !(Maybe Text)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogPermission
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogPermission)
|
||||
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogPermission)
|
||||
|
||||
data CatalogComputedField
|
||||
= CatalogComputedField
|
||||
@ -80,7 +107,7 @@ data CatalogComputedField
|
||||
, _cccFunctionInfo :: ![RawFunctionInfo] -- ^ multiple functions with same name
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogComputedField
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogComputedField)
|
||||
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogComputedField)
|
||||
|
||||
data CatalogEventTrigger
|
||||
= CatalogEventTrigger
|
||||
@ -89,7 +116,7 @@ data CatalogEventTrigger
|
||||
, _cetDef :: !Value
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogEventTrigger
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogEventTrigger)
|
||||
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogEventTrigger)
|
||||
|
||||
data CatalogFunction
|
||||
= CatalogFunction
|
||||
@ -99,7 +126,7 @@ data CatalogFunction
|
||||
, _cfInfo :: ![RawFunctionInfo] -- ^ multiple functions with same name
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogFunction
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
|
||||
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
|
||||
|
||||
data CatalogMetadata
|
||||
= CatalogMetadata
|
||||
@ -109,9 +136,8 @@ data CatalogMetadata
|
||||
, _cmEventTriggers :: ![CatalogEventTrigger]
|
||||
, _cmRemoteSchemas :: ![AddRemoteSchemaQuery]
|
||||
, _cmFunctions :: ![CatalogFunction]
|
||||
, _cmForeignKeys :: ![ForeignKey]
|
||||
, _cmAllowlistCollections :: ![CollectionDef]
|
||||
, _cmComputedFields :: ![CatalogComputedField]
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData CatalogMetadata
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogMetadata)
|
||||
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogMetadata)
|
||||
|
@ -126,18 +126,19 @@ parsePGScalarValues columnType values = do
|
||||
data PGRawColumnInfo
|
||||
= PGRawColumnInfo
|
||||
{ prciName :: !PGCol
|
||||
, prciPosition :: !Int
|
||||
-- ^ The “ordinal position” of the column according to Postgres. Numbering starts at 1 and
|
||||
-- increases. Dropping a column does /not/ cause the columns to be renumbered, so a column can be
|
||||
-- consistently identified by its position.
|
||||
, prciType :: !PGScalarType
|
||||
, prciIsNullable :: !Bool
|
||||
, prciReferences :: ![QualifiedTable]
|
||||
-- ^ only stores single-column references to primary key of foreign tables (used for detecting
|
||||
-- references to enum tables)
|
||||
, prciDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData PGRawColumnInfo
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColumnInfo)
|
||||
|
||||
-- | “Resolved” column info, produced from a 'PGRawColumnInfo' value that has been combined with other
|
||||
-- schema information to produce a 'PGColumnType'.
|
||||
-- | “Resolved” column info, produced from a 'PGRawColumnInfo' value that has been combined with
|
||||
-- other schema information to produce a 'PGColumnType'.
|
||||
data PGColumnInfo
|
||||
= PGColumnInfo
|
||||
{ pgiColumn :: !PGCol
|
||||
|
@ -14,6 +14,12 @@ module Hasura.RQL.Types.Common
|
||||
, WithTable(..)
|
||||
, ColumnValues
|
||||
, MutateResp(..)
|
||||
|
||||
, OID(..)
|
||||
, Constraint(..)
|
||||
, PrimaryKey(..)
|
||||
, pkConstraint
|
||||
, pkColumns
|
||||
, ForeignKey(..)
|
||||
, CustomColumnNames
|
||||
|
||||
@ -36,6 +42,7 @@ import Data.Aeson.TH
|
||||
import Data.Aeson.Types
|
||||
import Instances.TH.Lift ()
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
import Control.Lens (makeLenses)
|
||||
|
||||
import qualified Data.HashMap.Strict as HM
|
||||
import qualified Data.Text as T
|
||||
@ -173,18 +180,37 @@ $(deriveJSON (aesonDrop 3 snakeCase) ''MutateResp)
|
||||
|
||||
type ColMapping = HM.HashMap PGCol PGCol
|
||||
|
||||
-- | Postgres OIDs. <https://www.postgresql.org/docs/12/datatype-oid.html>
|
||||
newtype OID = OID { unOID :: Int }
|
||||
deriving (Show, Eq, NFData, Hashable, ToJSON, FromJSON, Q.FromCol)
|
||||
|
||||
data Constraint
|
||||
= Constraint
|
||||
{ _cName :: !ConstraintName
|
||||
, _cOid :: !OID
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData Constraint
|
||||
instance Hashable Constraint
|
||||
$(deriveJSON (aesonDrop 2 snakeCase) ''Constraint)
|
||||
|
||||
data PrimaryKey a
|
||||
= PrimaryKey
|
||||
{ _pkConstraint :: !Constraint
|
||||
, _pkColumns :: !(NonEmpty a)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance (NFData a) => NFData (PrimaryKey a)
|
||||
$(makeLenses ''PrimaryKey)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''PrimaryKey)
|
||||
|
||||
data ForeignKey
|
||||
= ForeignKey
|
||||
{ _fkTable :: !QualifiedTable
|
||||
, _fkRefTable :: !QualifiedTable
|
||||
, _fkOid :: !Int
|
||||
, _fkConstraint :: !ConstraintName
|
||||
{ _fkConstraint :: !Constraint
|
||||
, _fkForeignTable :: !QualifiedTable
|
||||
, _fkColumnMapping :: !ColMapping
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData ForeignKey
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''ForeignKey)
|
||||
|
||||
instance Hashable ForeignKey
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''ForeignKey)
|
||||
|
||||
type CustomColumnNames = HM.HashMap PGCol G.Name
|
||||
|
||||
|
@ -17,30 +17,30 @@ module Hasura.RQL.Types.SchemaCache
|
||||
, TableCoreCache
|
||||
, TableCache
|
||||
|
||||
, TableCoreInfo(..)
|
||||
, TableCoreInfoG(..)
|
||||
, TableRawInfo
|
||||
, TableCoreInfo
|
||||
, tciName
|
||||
, tciDescription
|
||||
, tciSystemDefined
|
||||
, tciFieldInfoMap
|
||||
, tciUniqueOrPrimaryKeyConstraints
|
||||
, tciPrimaryKeyColumns
|
||||
, tciPrimaryKey
|
||||
, tciUniqueConstraints
|
||||
, tciForeignKeys
|
||||
, tciViewInfo
|
||||
, tciEnumValues
|
||||
, tciCustomConfig
|
||||
, tciUniqueOrPrimaryKeyConstraints
|
||||
|
||||
, TableInfo(..)
|
||||
, tiCoreInfo
|
||||
, tiRolePermInfoMap
|
||||
, tiEventTriggerInfoMap
|
||||
|
||||
, TableConstraint(..)
|
||||
, ConstraintType(..)
|
||||
, ViewInfo(..)
|
||||
, checkForFieldConflict
|
||||
, isMutable
|
||||
, mutableView
|
||||
, isUniqueOrPrimary
|
||||
, isForeignKey
|
||||
|
||||
, RemoteSchemaCtx(..)
|
||||
, RemoteSchemaMap
|
||||
@ -276,55 +276,6 @@ $(deriveToJSON (aesonDrop 3 snakeCase) ''EventTriggerInfo)
|
||||
|
||||
type EventTriggerInfoMap = M.HashMap TriggerName EventTriggerInfo
|
||||
|
||||
data ConstraintType
|
||||
= CTCHECK
|
||||
| CTFOREIGNKEY
|
||||
| CTPRIMARYKEY
|
||||
| CTUNIQUE
|
||||
deriving (Eq, Generic)
|
||||
instance NFData ConstraintType
|
||||
|
||||
constraintTyToTxt :: ConstraintType -> T.Text
|
||||
constraintTyToTxt ty = case ty of
|
||||
CTCHECK -> "CHECK"
|
||||
CTFOREIGNKEY -> "FOREIGN KEY"
|
||||
CTPRIMARYKEY -> "PRIMARY KEY"
|
||||
CTUNIQUE -> "UNIQUE"
|
||||
|
||||
instance Show ConstraintType where
|
||||
show = T.unpack . constraintTyToTxt
|
||||
|
||||
instance FromJSON ConstraintType where
|
||||
parseJSON = withText "ConstraintType" $ \case
|
||||
"CHECK" -> return CTCHECK
|
||||
"FOREIGN KEY" -> return CTFOREIGNKEY
|
||||
"PRIMARY KEY" -> return CTPRIMARYKEY
|
||||
"UNIQUE" -> return CTUNIQUE
|
||||
c -> fail $ "unexpected ConstraintType: " <> T.unpack c
|
||||
|
||||
instance ToJSON ConstraintType where
|
||||
toJSON = String . constraintTyToTxt
|
||||
|
||||
isUniqueOrPrimary :: ConstraintType -> Bool
|
||||
isUniqueOrPrimary = \case
|
||||
CTPRIMARYKEY -> True
|
||||
CTUNIQUE -> True
|
||||
_ -> False
|
||||
|
||||
isForeignKey :: ConstraintType -> Bool
|
||||
isForeignKey = \case
|
||||
CTFOREIGNKEY -> True
|
||||
_ -> False
|
||||
|
||||
data TableConstraint
|
||||
= TableConstraint
|
||||
{ tcType :: !ConstraintType
|
||||
, tcName :: !ConstraintName
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance NFData TableConstraint
|
||||
|
||||
$(deriveJSON (aesonDrop 2 snakeCase) ''TableConstraint)
|
||||
|
||||
data ViewInfo
|
||||
= ViewInfo
|
||||
{ viIsUpdatable :: !Bool
|
||||
@ -363,24 +314,38 @@ instance FromJSON TableConfig where
|
||||
<$> obj .:? "custom_root_fields" .!= GC.emptyCustomRootFields
|
||||
<*> obj .:? "custom_column_names" .!= M.empty
|
||||
|
||||
data TableCoreInfo fieldInfo
|
||||
-- | The @field@ and @primaryKeyColumn@ type parameters vary as the schema cache is built and more
|
||||
-- information is accumulated. See 'TableRawInfo' and 'TableCoreInfo'.
|
||||
data TableCoreInfoG field primaryKeyColumn
|
||||
= TableCoreInfo
|
||||
{ _tciName :: !QualifiedTable
|
||||
, _tciDescription :: !(Maybe PGDescription)
|
||||
, _tciSystemDefined :: !SystemDefined
|
||||
, _tciFieldInfoMap :: !(FieldInfoMap fieldInfo)
|
||||
, _tciUniqueOrPrimaryKeyConstraints :: ![ConstraintName]
|
||||
, _tciPrimaryKeyColumns :: ![PGCol]
|
||||
, _tciFieldInfoMap :: !(FieldInfoMap field)
|
||||
, _tciPrimaryKey :: !(Maybe (PrimaryKey primaryKeyColumn))
|
||||
, _tciUniqueConstraints :: ![Constraint]
|
||||
-- ^ Does /not/ include the primary key; use 'tciUniqueOrPrimaryKeyConstraints' if you need both.
|
||||
, _tciForeignKeys :: ![ForeignKey]
|
||||
, _tciViewInfo :: !(Maybe ViewInfo)
|
||||
, _tciEnumValues :: !(Maybe EnumValues)
|
||||
, _tciCustomConfig :: !TableConfig
|
||||
} deriving (Show, Eq)
|
||||
$(deriveToJSON (aesonDrop 4 snakeCase) ''TableCoreInfo)
|
||||
$(makeLenses ''TableCoreInfo)
|
||||
$(deriveToJSON (aesonDrop 4 snakeCase) ''TableCoreInfoG)
|
||||
$(makeLenses ''TableCoreInfoG)
|
||||
|
||||
-- | The result of the initial processing step for table info. Includes all basic information, but
|
||||
-- is missing non-column fields.
|
||||
type TableRawInfo = TableCoreInfoG PGColumnInfo PGColumnInfo
|
||||
-- | Fully-processed table info that includes non-column fields.
|
||||
type TableCoreInfo = TableCoreInfoG FieldInfo PGColumnInfo
|
||||
|
||||
tciUniqueOrPrimaryKeyConstraints :: TableCoreInfoG a b -> [Constraint]
|
||||
tciUniqueOrPrimaryKeyConstraints info =
|
||||
maybeToList (_pkConstraint <$> _tciPrimaryKey info) <> _tciUniqueConstraints info
|
||||
|
||||
data TableInfo
|
||||
= TableInfo
|
||||
{ _tiCoreInfo :: TableCoreInfo FieldInfo
|
||||
{ _tiCoreInfo :: TableCoreInfo
|
||||
, _tiRolePermInfoMap :: !RolePermInfoMap
|
||||
, _tiEventTriggerInfoMap :: !EventTriggerInfoMap
|
||||
} deriving (Show, Eq)
|
||||
@ -389,7 +354,7 @@ $(makeLenses ''TableInfo)
|
||||
|
||||
checkForFieldConflict
|
||||
:: (MonadError QErr m)
|
||||
=> TableCoreInfo fieldInfo
|
||||
=> TableCoreInfoG a b
|
||||
-> FieldName
|
||||
-> m ()
|
||||
checkForFieldConflict tableInfo f =
|
||||
@ -401,7 +366,7 @@ checkForFieldConflict tableInfo f =
|
||||
]
|
||||
Nothing -> return ()
|
||||
|
||||
type TableCoreCache = M.HashMap QualifiedTable (TableCoreInfo FieldInfo)
|
||||
type TableCoreCache = M.HashMap QualifiedTable TableCoreInfo
|
||||
type TableCache = M.HashMap QualifiedTable TableInfo -- info of all tables
|
||||
type FunctionCache = M.HashMap QualifiedFunction FunctionInfo -- info of all functions
|
||||
|
||||
@ -451,8 +416,8 @@ getFuncsOfTable qt fc = flip filter allFuncs $ \f -> qt == fiReturnType f
|
||||
-- | A more limited version of 'CacheRM' that is used when building the schema cache, since the
|
||||
-- entire schema cache has not been built yet.
|
||||
class (Monad m) => TableCoreInfoRM m where
|
||||
lookupTableCoreInfo :: QualifiedTable -> m (Maybe (TableCoreInfo FieldInfo))
|
||||
default lookupTableCoreInfo :: (CacheRM m) => QualifiedTable -> m (Maybe (TableCoreInfo FieldInfo))
|
||||
lookupTableCoreInfo :: QualifiedTable -> m (Maybe TableCoreInfo)
|
||||
default lookupTableCoreInfo :: (CacheRM m) => QualifiedTable -> m (Maybe TableCoreInfo)
|
||||
lookupTableCoreInfo tableName = fmap _tiCoreInfo . M.lookup tableName . scTables <$> askSchemaCache
|
||||
|
||||
instance (TableCoreInfoRM m) => TableCoreInfoRM (ReaderT r m) where
|
||||
|
@ -18,11 +18,10 @@ data TableObjId
|
||||
= TOCol !PGCol
|
||||
| TORel !RelName
|
||||
| TOComputedField !ComputedFieldName
|
||||
| TOCons !ConstraintName
|
||||
| TOForeignKey !ConstraintName
|
||||
| TOPerm !RoleName !PermType
|
||||
| TOTrigger !TriggerName
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance Hashable TableObjId
|
||||
|
||||
data SchemaObjId
|
||||
@ -40,7 +39,7 @@ reportSchemaObj (SOTableObj tn (TOCol cn)) =
|
||||
"column " <> qualObjectToText tn <> "." <> getPGColTxt cn
|
||||
reportSchemaObj (SOTableObj tn (TORel cn)) =
|
||||
"relationship " <> qualObjectToText tn <> "." <> relNameToTxt cn
|
||||
reportSchemaObj (SOTableObj tn (TOCons cn)) =
|
||||
reportSchemaObj (SOTableObj tn (TOForeignKey cn)) =
|
||||
"constraint " <> qualObjectToText tn <> "." <> getConstraintTxt cn
|
||||
reportSchemaObj (SOTableObj tn (TOPerm rn pt)) =
|
||||
"permission " <> qualObjectToText tn <> "." <> roleNameToTxt rn
|
||||
|
@ -12,7 +12,7 @@ import Hasura.Prelude
|
||||
import qualified Data.Text as T
|
||||
|
||||
latestCatalogVersion :: Integer
|
||||
latestCatalogVersion = 28
|
||||
latestCatalogVersion = 29
|
||||
|
||||
latestCatalogVersionString :: T.Text
|
||||
latestCatalogVersionString = T.pack $ show latestCatalogVersion
|
||||
|
@ -1,4 +1,4 @@
|
||||
{-# OPTIONS_GHC -fforce-recomp #-}
|
||||
-- {-# OPTIONS_GHC -fforce-recomp #-}
|
||||
module Hasura.Server.Version
|
||||
( currentVersion
|
||||
, consoleVersion
|
||||
|
@ -1,21 +1,20 @@
|
||||
select
|
||||
json_build_object(
|
||||
'tables', tables.items,
|
||||
'tables', tables.items :: json,
|
||||
'relations', relations.items,
|
||||
'permissions', permissions.items,
|
||||
'event_triggers', event_triggers.items,
|
||||
'remote_schemas', remote_schemas.items,
|
||||
'functions', functions.items,
|
||||
'foreign_keys', foreign_keys.items,
|
||||
'allowlist_collections', allowlist.item,
|
||||
'computed_fields', computed_field.items
|
||||
)
|
||||
from
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(
|
||||
json_build_object(
|
||||
'name', json_build_object(
|
||||
coalesce(jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'name', jsonb_build_object(
|
||||
'name', ht.table_name,
|
||||
'schema', ht.table_schema
|
||||
),
|
||||
@ -25,20 +24,8 @@ from
|
||||
'info', t.info
|
||||
)
|
||||
), '[]') as items
|
||||
from hdb_catalog.hdb_table as ht
|
||||
left outer join (
|
||||
select
|
||||
table_schema,
|
||||
table_name,
|
||||
jsonb_build_object(
|
||||
'description', description,
|
||||
'columns', columns,
|
||||
'primary_key_columns', primary_key_columns,
|
||||
'constraints', constraints,
|
||||
'view_info', view_info
|
||||
) as info
|
||||
from hdb_catalog.hdb_table_info_agg
|
||||
) as t using (table_schema, table_name)
|
||||
from hdb_catalog.hdb_table ht
|
||||
left join hdb_catalog.hdb_table_info_agg t using (table_schema, table_name)
|
||||
) as tables,
|
||||
(
|
||||
select
|
||||
@ -144,35 +131,6 @@ from
|
||||
) hf_agg on 'true'
|
||||
) as q
|
||||
) as functions,
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(foreign_key.info), '[]') as items
|
||||
from
|
||||
(
|
||||
select
|
||||
json_build_object(
|
||||
'table',
|
||||
json_build_object(
|
||||
'schema', f.table_schema,
|
||||
'name', f.table_name
|
||||
),
|
||||
'ref_table',
|
||||
json_build_object(
|
||||
'schema', f.ref_table_table_schema,
|
||||
'name', f.ref_table
|
||||
),
|
||||
'oid', f.constraint_oid,
|
||||
'constraint', f.constraint_name,
|
||||
'column_mapping', f.column_mapping
|
||||
) as info
|
||||
from
|
||||
hdb_catalog.hdb_foreign_key_constraint f
|
||||
left outer join hdb_catalog.hdb_table ht
|
||||
on ( ht.table_schema = f.table_schema
|
||||
and ht.table_name = f.table_name
|
||||
)
|
||||
) as foreign_key
|
||||
) as foreign_keys,
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(hqc.collection_defn), '[]') as item
|
||||
|
@ -426,129 +426,108 @@ CREATE TRIGGER hdb_schema_update_event_notifier AFTER INSERT OR UPDATE ON
|
||||
hdb_catalog.hdb_schema_update_event FOR EACH ROW EXECUTE PROCEDURE
|
||||
hdb_catalog.hdb_schema_update_event_notifier();
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_column AS
|
||||
WITH primary_key_references AS (
|
||||
SELECT fkey.table_schema AS src_table_schema
|
||||
, fkey.table_name AS src_table_name
|
||||
, fkey.columns->>0 AS src_column_name
|
||||
, json_agg(json_build_object(
|
||||
'schema', fkey.ref_table_table_schema,
|
||||
'name', fkey.ref_table
|
||||
)) AS ref_tables
|
||||
FROM hdb_catalog.hdb_foreign_key_constraint AS fkey
|
||||
JOIN hdb_catalog.hdb_primary_key AS pkey
|
||||
ON pkey.table_schema = fkey.ref_table_table_schema
|
||||
AND pkey.table_name = fkey.ref_table
|
||||
AND pkey.columns::jsonb = fkey.ref_columns::jsonb
|
||||
WHERE json_array_length(fkey.columns) = 1
|
||||
GROUP BY fkey.table_schema
|
||||
, fkey.table_name
|
||||
, fkey.columns->>0)
|
||||
SELECT columns.table_schema
|
||||
, columns.table_name
|
||||
, columns.column_name AS name
|
||||
, columns.udt_name AS type
|
||||
, columns.is_nullable
|
||||
, columns.ordinal_position
|
||||
, coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references
|
||||
, col_description(pg_class.oid, columns.ordinal_position) AS description
|
||||
FROM information_schema.columns
|
||||
JOIN pg_class ON pg_class.relname = columns.table_name
|
||||
JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
|
||||
AND pg_namespace.nspname = columns.table_schema
|
||||
LEFT JOIN primary_key_references AS pkey_refs
|
||||
ON columns.table_schema = pkey_refs.src_table_schema
|
||||
AND columns.table_name = pkey_refs.src_table_name
|
||||
AND columns.column_name = pkey_refs.src_column_name;
|
||||
CREATE VIEW hdb_catalog.hdb_table_info_agg AS
|
||||
SELECT
|
||||
schema.nspname AS table_schema,
|
||||
"table".relname AS table_name,
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_table_info_agg AS (
|
||||
select
|
||||
tables.table_name as table_name,
|
||||
tables.table_schema as table_schema,
|
||||
descriptions.description,
|
||||
coalesce(columns.columns, '[]') as columns,
|
||||
coalesce(pk.columns, '[]') as primary_key_columns,
|
||||
coalesce(constraints.constraints, '[]') as constraints,
|
||||
coalesce(views.view_info, 'null') as view_info
|
||||
from
|
||||
information_schema.tables as tables
|
||||
left outer join (
|
||||
select
|
||||
c.table_name,
|
||||
c.table_schema,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'name', name,
|
||||
'type', type,
|
||||
'is_nullable', is_nullable :: boolean,
|
||||
'references', primary_key_references,
|
||||
'description', description
|
||||
)
|
||||
) as columns
|
||||
from
|
||||
hdb_catalog.hdb_column c
|
||||
group by
|
||||
c.table_schema,
|
||||
c.table_name
|
||||
) columns on (
|
||||
tables.table_schema = columns.table_schema
|
||||
AND tables.table_name = columns.table_name
|
||||
)
|
||||
left outer join (
|
||||
select * from hdb_catalog.hdb_primary_key
|
||||
) pk on (
|
||||
tables.table_schema = pk.table_schema
|
||||
AND tables.table_name = pk.table_name
|
||||
)
|
||||
left outer join (
|
||||
select
|
||||
c.table_schema,
|
||||
c.table_name,
|
||||
json_agg(constraint_name) as constraints
|
||||
from
|
||||
information_schema.table_constraints c
|
||||
where
|
||||
c.constraint_type = 'UNIQUE'
|
||||
or c.constraint_type = 'PRIMARY KEY'
|
||||
group by
|
||||
c.table_schema,
|
||||
c.table_name
|
||||
) constraints on (
|
||||
tables.table_schema = constraints.table_schema
|
||||
AND tables.table_name = constraints.table_name
|
||||
)
|
||||
left outer join (
|
||||
select
|
||||
table_schema,
|
||||
table_name,
|
||||
json_build_object(
|
||||
'is_updatable',
|
||||
(is_updatable::boolean OR is_trigger_updatable::boolean),
|
||||
'is_deletable',
|
||||
(is_updatable::boolean OR is_trigger_deletable::boolean),
|
||||
'is_insertable',
|
||||
(is_insertable_into::boolean OR is_trigger_insertable_into::boolean)
|
||||
) as view_info
|
||||
from
|
||||
information_schema.views v
|
||||
) views on (
|
||||
tables.table_schema = views.table_schema
|
||||
AND tables.table_name = views.table_name
|
||||
)
|
||||
left outer join (
|
||||
select
|
||||
pc.relname as table_name,
|
||||
pn.nspname as table_schema,
|
||||
pd.description
|
||||
from pg_class pc
|
||||
left join pg_namespace pn on pn.oid = pc.relnamespace
|
||||
left join pg_description pd on pd.objoid = pc.oid
|
||||
where pd.objsubid = 0
|
||||
) descriptions on (
|
||||
tables.table_schema = descriptions.table_schema
|
||||
AND tables.table_name = descriptions.table_name
|
||||
)
|
||||
);
|
||||
-- This field corresponds to the `CatalogTableInfo` Haskell type
|
||||
jsonb_build_object(
|
||||
'oid', "table".oid :: integer,
|
||||
'columns', coalesce(columns.info, '[]'),
|
||||
'primary_key', primary_key.info,
|
||||
-- Note: unique_constraints does NOT include primary key constraints!
|
||||
'unique_constraints', coalesce(unique_constraints.info, '[]'),
|
||||
'foreign_keys', coalesce(foreign_key_constraints.info, '[]'),
|
||||
'view_info', CASE "table".relkind WHEN 'v' THEN jsonb_build_object(
|
||||
'is_updatable', ((pg_catalog.pg_relation_is_updatable("table".oid, true) & 4) = 4),
|
||||
'is_insertable', ((pg_catalog.pg_relation_is_updatable("table".oid, true) & 8) = 8),
|
||||
'is_deletable', ((pg_catalog.pg_relation_is_updatable("table".oid, true) & 16) = 16)
|
||||
) END,
|
||||
'description', description.description
|
||||
) AS info
|
||||
|
||||
-- table & schema
|
||||
FROM pg_catalog.pg_class "table"
|
||||
JOIN pg_catalog.pg_namespace schema
|
||||
ON schema.oid = "table".relnamespace
|
||||
|
||||
-- description
|
||||
LEFT JOIN pg_catalog.pg_description description
|
||||
ON description.classoid = 'pg_catalog.pg_class'::regclass
|
||||
AND description.objoid = "table".oid
|
||||
AND description.objsubid = 0
|
||||
|
||||
-- columns
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg(jsonb_build_object(
|
||||
'name', "column".attname,
|
||||
'position', "column".attnum,
|
||||
'type', "type".typname,
|
||||
'is_nullable', NOT "column".attnotnull,
|
||||
'description', pg_catalog.col_description("table".oid, "column".attnum)
|
||||
)) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
LEFT JOIN pg_catalog.pg_type "type"
|
||||
ON "type".oid = "column".atttypid
|
||||
WHERE "column".attrelid = "table".oid
|
||||
-- columns where attnum <= 0 are special, system-defined columns
|
||||
AND "column".attnum > 0
|
||||
-- dropped columns still exist in the system catalog as “zombie” columns, so ignore those
|
||||
AND NOT "column".attisdropped
|
||||
) columns ON true
|
||||
|
||||
-- primary key
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_build_object(
|
||||
'constraint', jsonb_build_object('name', class.relname, 'oid', class.oid :: integer),
|
||||
'columns', coalesce(columns.info, '[]')
|
||||
) AS info
|
||||
FROM pg_catalog.pg_index index
|
||||
JOIN pg_catalog.pg_class class
|
||||
ON class.oid = index.indexrelid
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg("column".attname) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
WHERE "column".attrelid = "table".oid
|
||||
AND "column".attnum = ANY (index.indkey)
|
||||
) AS columns ON true
|
||||
WHERE index.indrelid = "table".oid
|
||||
AND index.indisprimary
|
||||
) primary_key ON true
|
||||
|
||||
-- unique constraints
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg(jsonb_build_object('name', class.relname, 'oid', class.oid :: integer)) AS info
|
||||
FROM pg_catalog.pg_index index
|
||||
JOIN pg_catalog.pg_class class
|
||||
ON class.oid = index.indexrelid
|
||||
WHERE index.indrelid = "table".oid
|
||||
AND index.indisunique
|
||||
AND NOT index.indisprimary
|
||||
) unique_constraints ON true
|
||||
|
||||
-- foreign keys
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg(jsonb_build_object(
|
||||
'constraint', jsonb_build_object(
|
||||
'name', foreign_key.constraint_name,
|
||||
'oid', foreign_key.constraint_oid :: integer
|
||||
),
|
||||
'columns', foreign_key.columns,
|
||||
'foreign_table', jsonb_build_object(
|
||||
'schema', foreign_key.ref_table_table_schema,
|
||||
'name', foreign_key.ref_table
|
||||
),
|
||||
'foreign_columns', foreign_key.ref_columns
|
||||
)) AS info
|
||||
FROM hdb_catalog.hdb_foreign_key_constraint foreign_key
|
||||
WHERE foreign_key.table_schema = schema.nspname
|
||||
AND foreign_key.table_name = "table".relname
|
||||
) foreign_key_constraints ON true
|
||||
|
||||
-- all these identify table-like things
|
||||
WHERE "table".relkind IN ('r', 't', 'v', 'm', 'f', 'p');
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_function_info_agg AS (
|
||||
SELECT
|
||||
|
105
server/src-rsr/migrations/28_to_29.sql
Normal file
105
server/src-rsr/migrations/28_to_29.sql
Normal file
@ -0,0 +1,105 @@
|
||||
DROP VIEW hdb_catalog.hdb_table_info_agg;
|
||||
DROP VIEW hdb_catalog.hdb_column;
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_table_info_agg AS
|
||||
SELECT
|
||||
schema.nspname AS table_schema,
|
||||
"table".relname AS table_name,
|
||||
|
||||
-- This field corresponds to the `CatalogTableInfo` Haskell type
|
||||
jsonb_build_object(
|
||||
'oid', "table".oid :: integer,
|
||||
'columns', coalesce(columns.info, '[]'),
|
||||
'primary_key', primary_key.info,
|
||||
-- Note: unique_constraints does NOT include primary key constraints!
|
||||
'unique_constraints', coalesce(unique_constraints.info, '[]'),
|
||||
'foreign_keys', coalesce(foreign_key_constraints.info, '[]'),
|
||||
'view_info', CASE "table".relkind WHEN 'v' THEN jsonb_build_object(
|
||||
'is_updatable', ((pg_catalog.pg_relation_is_updatable("table".oid, true) & 4) = 4),
|
||||
'is_insertable', ((pg_catalog.pg_relation_is_updatable("table".oid, true) & 8) = 8),
|
||||
'is_deletable', ((pg_catalog.pg_relation_is_updatable("table".oid, true) & 16) = 16)
|
||||
) END,
|
||||
'description', description.description
|
||||
) AS info
|
||||
|
||||
-- table & schema
|
||||
FROM pg_catalog.pg_class "table"
|
||||
JOIN pg_catalog.pg_namespace schema
|
||||
ON schema.oid = "table".relnamespace
|
||||
|
||||
-- description
|
||||
LEFT JOIN pg_catalog.pg_description description
|
||||
ON description.classoid = 'pg_catalog.pg_class'::regclass
|
||||
AND description.objoid = "table".oid
|
||||
AND description.objsubid = 0
|
||||
|
||||
-- columns
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg(jsonb_build_object(
|
||||
'name', "column".attname,
|
||||
'position', "column".attnum,
|
||||
'type', "type".typname,
|
||||
'is_nullable', NOT "column".attnotnull,
|
||||
'description', pg_catalog.col_description("table".oid, "column".attnum)
|
||||
)) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
LEFT JOIN pg_catalog.pg_type "type"
|
||||
ON "type".oid = "column".atttypid
|
||||
WHERE "column".attrelid = "table".oid
|
||||
-- columns where attnum <= 0 are special, system-defined columns
|
||||
AND "column".attnum > 0
|
||||
-- dropped columns still exist in the system catalog as “zombie” columns, so ignore those
|
||||
AND NOT "column".attisdropped
|
||||
) columns ON true
|
||||
|
||||
-- primary key
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_build_object(
|
||||
'constraint', jsonb_build_object('name', class.relname, 'oid', class.oid :: integer),
|
||||
'columns', coalesce(columns.info, '[]')
|
||||
) AS info
|
||||
FROM pg_catalog.pg_index index
|
||||
JOIN pg_catalog.pg_class class
|
||||
ON class.oid = index.indexrelid
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg("column".attname) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
WHERE "column".attrelid = "table".oid
|
||||
AND "column".attnum = ANY (index.indkey)
|
||||
) AS columns ON true
|
||||
WHERE index.indrelid = "table".oid
|
||||
AND index.indisprimary
|
||||
) primary_key ON true
|
||||
|
||||
-- unique constraints
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg(jsonb_build_object('name', class.relname, 'oid', class.oid :: integer)) AS info
|
||||
FROM pg_catalog.pg_index index
|
||||
JOIN pg_catalog.pg_class class
|
||||
ON class.oid = index.indexrelid
|
||||
WHERE index.indrelid = "table".oid
|
||||
AND index.indisunique
|
||||
AND NOT index.indisprimary
|
||||
) unique_constraints ON true
|
||||
|
||||
-- foreign keys
|
||||
LEFT JOIN LATERAL
|
||||
( SELECT jsonb_agg(jsonb_build_object(
|
||||
'constraint', jsonb_build_object(
|
||||
'name', foreign_key.constraint_name,
|
||||
'oid', foreign_key.constraint_oid :: integer
|
||||
),
|
||||
'columns', foreign_key.columns,
|
||||
'foreign_table', jsonb_build_object(
|
||||
'schema', foreign_key.ref_table_table_schema,
|
||||
'name', foreign_key.ref_table
|
||||
),
|
||||
'foreign_columns', foreign_key.ref_columns
|
||||
)) AS info
|
||||
FROM hdb_catalog.hdb_foreign_key_constraint foreign_key
|
||||
WHERE foreign_key.table_schema = schema.nspname
|
||||
AND foreign_key.table_name = "table".relname
|
||||
) foreign_key_constraints ON true
|
||||
|
||||
-- all these identify table-like things
|
||||
WHERE "table".relkind IN ('r', 't', 'v', 'm', 'f', 'p');
|
@ -1,108 +1,9 @@
|
||||
SELECT
|
||||
t.table_schema,
|
||||
t.table_name,
|
||||
t.table_oid,
|
||||
t.description,
|
||||
coalesce(c.columns, '[]') as columns,
|
||||
coalesce(f.constraints, '[]') as constraints,
|
||||
coalesce(fk.fkeys, '[]') as foreign_keys,
|
||||
t.info :: json,
|
||||
coalesce(cc.computed_fields, '[]') as computed_fields
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
c.oid as table_oid,
|
||||
c.relname as table_name,
|
||||
n.nspname as table_schema,
|
||||
pd.description as description
|
||||
FROM
|
||||
pg_catalog.pg_class c
|
||||
JOIN pg_catalog.pg_namespace as n ON c.relnamespace = n.oid
|
||||
LEFT JOIN pg_catalog.pg_description pd on (c.oid = pd.objoid and pd.objsubid = 0)
|
||||
) t
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
table_schema,
|
||||
table_name,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'column_name', name,
|
||||
'data_type', type,
|
||||
'is_nullable', is_nullable :: boolean,
|
||||
'ordinal_position', ordinal_position,
|
||||
'references', primary_key_references,
|
||||
'description', description
|
||||
)
|
||||
) as columns
|
||||
FROM
|
||||
hdb_catalog.hdb_column
|
||||
GROUP BY
|
||||
table_schema,
|
||||
table_name
|
||||
) c ON (
|
||||
t.table_schema = c.table_schema
|
||||
AND t.table_name = c.table_name
|
||||
)
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
tc.table_schema,
|
||||
tc.table_name,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'name',
|
||||
tc.constraint_name,
|
||||
'oid',
|
||||
r.oid :: integer,
|
||||
'type',
|
||||
tc.constraint_type
|
||||
)
|
||||
) as constraints
|
||||
FROM
|
||||
information_schema.table_constraints tc
|
||||
JOIN pg_catalog.pg_constraint r ON tc.constraint_name = r.conname
|
||||
GROUP BY
|
||||
table_schema,
|
||||
table_name
|
||||
) f ON (
|
||||
t.table_schema = f.table_schema
|
||||
AND t.table_name = f.table_name
|
||||
)
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
f.table_schema,
|
||||
f.table_name,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'table',
|
||||
json_build_object(
|
||||
'schema',
|
||||
f.table_schema,
|
||||
'name',
|
||||
f.table_name
|
||||
),
|
||||
'ref_table',
|
||||
json_build_object(
|
||||
'schema',
|
||||
f.ref_table_table_schema,
|
||||
'name',
|
||||
f.ref_table
|
||||
),
|
||||
'oid',
|
||||
f.constraint_oid,
|
||||
'constraint',
|
||||
f.constraint_name,
|
||||
'column_mapping',
|
||||
f.column_mapping
|
||||
)
|
||||
) as fkeys
|
||||
FROM
|
||||
hdb_catalog.hdb_foreign_key_constraint f
|
||||
GROUP BY
|
||||
table_schema,
|
||||
table_name
|
||||
) fk ON (
|
||||
fk.table_schema = t.table_schema
|
||||
AND fk.table_name = t.table_name
|
||||
)
|
||||
FROM hdb_catalog.hdb_table_info_agg t
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
c.table_schema,
|
||||
@ -135,3 +36,4 @@ WHERE
|
||||
t.table_schema NOT LIKE 'pg_%'
|
||||
AND t.table_schema <> 'information_schema'
|
||||
AND t.table_schema <> 'hdb_catalog'
|
||||
AND t.table_schema <> 'hdb_views'
|
||||
|
@ -69,7 +69,7 @@ inconsistent_objects:
|
||||
comment:
|
||||
table: author
|
||||
reason: >-
|
||||
in table "author": in relationship "articles": no foreign constraint exists on the given column
|
||||
in table "author": in relationship "articles": table "article" does not exist
|
||||
type: array_relation
|
||||
|
||||
# Teardown
|
||||
|
@ -2,12 +2,19 @@ description: Attempts to mark a non-enum table as an enum are rejected
|
||||
url: /v1/query
|
||||
status: 400
|
||||
response:
|
||||
code: constraint-violation
|
||||
error: |
|
||||
the table "employees" cannot be used as an enum for the following reasons:
|
||||
• the table’s primary key ("id") must have type "text", not type "integer"
|
||||
• the table must have exactly one primary key and optionally one comment column, not 4 columns (favorite_color, gets_paid_on, id, name)
|
||||
internal:
|
||||
- definition: employees
|
||||
reason: "the table \"employees\" cannot be used as an enum for the following reasons:\n\
|
||||
\ • the table’s primary key (\"id\") must have type \"text\", not type \"integer\"\
|
||||
\n • the table must have exactly one primary key and optionally one comment\
|
||||
\ column, not 4 columns (favorite_color, gets_paid_on, id, name)\n"
|
||||
type: table
|
||||
path: $.args
|
||||
error: "the table \"employees\" cannot be used as an enum for the following reasons:\n\
|
||||
\ • the table’s primary key (\"id\") must have type \"text\", not type \"integer\"\
|
||||
\n • the table must have exactly one primary key and optionally one comment column,\
|
||||
\ not 4 columns (favorite_color, gets_paid_on, id, name)\n"
|
||||
code: constraint-violation
|
||||
query:
|
||||
type: set_table_is_enum
|
||||
args:
|
||||
|
Loading…
Reference in New Issue
Block a user