mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-21 14:31:55 +03:00
a153e96309
An incremental PR towards https://github.com/hasura/graphql-engine/pull/5797 - Expands `MonadMetadataStorage` with operations related to async actions and setting/updating metadata GitOrigin-RevId: 53386b7b2d007e162050b826d0708897f0b4c8f6
223 lines
8.6 KiB
Haskell
223 lines
8.6 KiB
Haskell
{-# LANGUAGE Arrows #-}
|
||
{-# LANGUAGE RecordWildCards #-}
|
||
{-# LANGUAGE UndecidableInstances #-}
|
||
|
||
-- | Types/functions shared between modules that implement "Hasura.RQL.DDL.Schema.Cache". Other
|
||
-- modules should not import this module directly.
|
||
module Hasura.RQL.DDL.Schema.Cache.Common where
|
||
|
||
import Hasura.Prelude
|
||
|
||
import qualified Data.HashMap.Strict.Extended as M
|
||
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||
import qualified Data.HashSet as HS
|
||
import qualified Data.Sequence as Seq
|
||
import qualified Network.HTTP.Client as HTTP
|
||
|
||
import Control.Arrow.Extended
|
||
import Control.Lens
|
||
import Control.Monad.Trans.Control (MonadBaseControl)
|
||
import Control.Monad.Unique
|
||
import Data.Text.Extended
|
||
|
||
import qualified Hasura.Incremental as Inc
|
||
|
||
import Hasura.Backends.Postgres.SQL.Types
|
||
import Hasura.RQL.Types
|
||
|
||
-- | 'InvalidationKeys' used to apply requested 'CacheInvalidations'.
|
||
data InvalidationKeys = InvalidationKeys
|
||
{ _ikMetadata :: !Inc.InvalidationKey
|
||
, _ikRemoteSchemas :: !(HashMap RemoteSchemaName Inc.InvalidationKey)
|
||
} deriving (Show, Eq, Generic)
|
||
instance Inc.Cacheable InvalidationKeys
|
||
instance Inc.Select InvalidationKeys
|
||
$(makeLenses ''InvalidationKeys)
|
||
|
||
initialInvalidationKeys :: InvalidationKeys
|
||
initialInvalidationKeys = InvalidationKeys Inc.initialInvalidationKey mempty
|
||
|
||
invalidateKeys :: CacheInvalidations -> InvalidationKeys -> InvalidationKeys
|
||
invalidateKeys CacheInvalidations{..} InvalidationKeys{..} = InvalidationKeys
|
||
{ _ikMetadata = if ciMetadata then Inc.invalidate _ikMetadata else _ikMetadata
|
||
, _ikRemoteSchemas = foldl' (flip invalidateRemoteSchema) _ikRemoteSchemas ciRemoteSchemas }
|
||
where
|
||
invalidateRemoteSchema = M.alter $ Just . maybe Inc.initialInvalidationKey Inc.invalidate
|
||
|
||
data TableBuildInput
|
||
= TableBuildInput
|
||
{ _tbiName :: !QualifiedTable
|
||
, _tbiIsEnum :: !Bool
|
||
, _tbiConfiguration :: !TableConfig
|
||
} deriving (Show, Eq, Generic)
|
||
instance NFData TableBuildInput
|
||
instance Inc.Cacheable TableBuildInput
|
||
|
||
data NonColumnTableInputs
|
||
= NonColumnTableInputs
|
||
{ _nctiTable :: !QualifiedTable
|
||
, _nctiObjectRelationships :: ![ObjRelDef]
|
||
, _nctiArrayRelationships :: ![ArrRelDef]
|
||
, _nctiComputedFields :: ![ComputedFieldMetadata]
|
||
, _nctiRemoteRelationships :: ![RemoteRelationshipMetadata]
|
||
} deriving (Show, Eq, Generic)
|
||
-- instance NFData NonColumnTableInputs
|
||
-- instance Inc.Cacheable NonColumnTableInputs
|
||
|
||
data TablePermissionInputs
|
||
= TablePermissionInputs
|
||
{ _tpiTable :: !QualifiedTable
|
||
, _tpiInsert :: ![InsPermDef 'Postgres]
|
||
, _tpiSelect :: ![SelPermDef 'Postgres]
|
||
, _tpiUpdate :: ![UpdPermDef 'Postgres]
|
||
, _tpiDelete :: ![DelPermDef 'Postgres]
|
||
} deriving (Show, Eq, Generic)
|
||
instance Inc.Cacheable TablePermissionInputs
|
||
|
||
mkTableInputs :: TableMetadata -> (TableBuildInput, NonColumnTableInputs, TablePermissionInputs)
|
||
mkTableInputs TableMetadata{..} =
|
||
(buildInput, nonColumns, permissions)
|
||
where
|
||
buildInput = TableBuildInput _tmTable _tmIsEnum _tmConfiguration
|
||
nonColumns = NonColumnTableInputs _tmTable
|
||
(OMap.elems _tmObjectRelationships)
|
||
(OMap.elems _tmArrayRelationships)
|
||
(OMap.elems _tmComputedFields)
|
||
(OMap.elems _tmRemoteRelationships)
|
||
permissions = TablePermissionInputs _tmTable
|
||
(OMap.elems _tmInsertPermissions)
|
||
(OMap.elems _tmSelectPermissions)
|
||
(OMap.elems _tmUpdatePermissions)
|
||
(OMap.elems _tmDeletePermissions)
|
||
|
||
-- | The direct output of 'buildSchemaCacheRule'. Contains most of the things necessary to build a
|
||
-- schema cache, but dependencies and inconsistent metadata objects are collected via a separate
|
||
-- 'MonadWriter' side channel.
|
||
data BuildOutputs
|
||
= BuildOutputs
|
||
{ _boTables :: !(TableCache 'Postgres)
|
||
, _boActions :: !ActionCache
|
||
, _boFunctions :: !FunctionCache
|
||
, _boRemoteSchemas :: !(HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject))
|
||
-- ^ We preserve the 'MetadataObject' from the original catalog metadata in the output so we can
|
||
-- reuse it later if we need to mark the remote schema inconsistent during GraphQL schema
|
||
-- generation (because of field conflicts).
|
||
, _boAllowlist :: !(HS.HashSet GQLQuery)
|
||
, _boCustomTypes :: !(AnnotatedCustomTypes 'Postgres)
|
||
, _boCronTriggers :: !(M.HashMap TriggerName CronTriggerInfo)
|
||
}
|
||
$(makeLenses ''BuildOutputs)
|
||
|
||
-- | Parameters required for schema cache build
|
||
data CacheBuildParams
|
||
= CacheBuildParams
|
||
{ _cbpManager :: !HTTP.Manager
|
||
, _cbpSqlGenCtx :: !SQLGenCtx
|
||
}
|
||
|
||
-- | The monad in which @'RebuildableSchemaCache' is being run
|
||
newtype CacheBuild a
|
||
= CacheBuild {unCacheBuild :: ReaderT CacheBuildParams (LazyTxT QErr IO) a}
|
||
deriving ( Functor, Applicative, Monad
|
||
, MonadError QErr
|
||
, MonadReader CacheBuildParams
|
||
, MonadIO
|
||
, MonadTx
|
||
, MonadBase IO
|
||
, MonadBaseControl IO
|
||
, MonadUnique
|
||
)
|
||
|
||
instance HasHttpManager CacheBuild where
|
||
askHttpManager = asks _cbpManager
|
||
|
||
instance HasSQLGenCtx CacheBuild where
|
||
askSQLGenCtx = asks _cbpSqlGenCtx
|
||
|
||
runCacheBuild
|
||
:: ( MonadIO m
|
||
, HasHttpManager m
|
||
, HasSQLGenCtx m
|
||
, MonadTx m
|
||
)
|
||
=> CacheBuild a -> m a
|
||
runCacheBuild (CacheBuild m) = do
|
||
httpManager <- askHttpManager
|
||
sqlGenCtx <- askSQLGenCtx
|
||
let params = CacheBuildParams httpManager sqlGenCtx
|
||
liftTx $ lazyTxToQTx (runReaderT m params)
|
||
|
||
data RebuildableSchemaCache
|
||
= RebuildableSchemaCache
|
||
{ lastBuiltSchemaCache :: !SchemaCache
|
||
, _rscInvalidationMap :: !InvalidationKeys
|
||
, _rscRebuild :: !(Inc.Rule (ReaderT BuildReason CacheBuild) (Metadata, InvalidationKeys) SchemaCache)
|
||
}
|
||
$(makeLenses ''RebuildableSchemaCache)
|
||
|
||
bindErrorA
|
||
:: (ArrowChoice arr, ArrowKleisli m arr, ArrowError e arr, MonadError e m)
|
||
=> arr (m a) a
|
||
bindErrorA = liftEitherA <<< arrM \m -> (Right <$> m) `catchError` (pure . Left)
|
||
{-# INLINE bindErrorA #-}
|
||
|
||
withRecordDependencies
|
||
:: (ArrowWriter (Seq CollectedInfo) arr)
|
||
=> WriterA (Seq SchemaDependency) arr (e, s) a
|
||
-> arr (e, (MetadataObject, (SchemaObjId, s))) a
|
||
withRecordDependencies f = proc (e, (metadataObject, (schemaObjectId, s))) -> do
|
||
(result, dependencies) <- runWriterA f -< (e, s)
|
||
recordDependencies -< (metadataObject, schemaObjectId, toList dependencies)
|
||
returnA -< result
|
||
{-# INLINABLE withRecordDependencies #-}
|
||
|
||
noDuplicates
|
||
:: (ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr)
|
||
=> (a -> MetadataObject)
|
||
-> [a] `arr` Maybe a
|
||
noDuplicates mkMetadataObject = proc values -> case values of
|
||
[] -> returnA -< Nothing
|
||
[value] -> returnA -< Just value
|
||
value:_ -> do
|
||
let objectId = _moId $ mkMetadataObject value
|
||
definitions = map (_moDefinition . mkMetadataObject) values
|
||
tellA -< Seq.singleton $ CIInconsistency (DuplicateObjects objectId definitions)
|
||
returnA -< Nothing
|
||
{-# INLINABLE noDuplicates #-}
|
||
|
||
-- | Processes a list of catalog metadata into a map of processed information, marking any duplicate
|
||
-- entries inconsistent.
|
||
buildInfoMap
|
||
:: ( ArrowChoice arr, Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
|
||
, Eq k, Hashable k )
|
||
=> (a -> k)
|
||
-> (a -> MetadataObject)
|
||
-> (e, a) `arr` Maybe b
|
||
-> (e, [a]) `arr` HashMap k b
|
||
buildInfoMap extractKey mkMetadataObject buildInfo = proc (e, infos) ->
|
||
(M.groupOn extractKey infos >- returnA)
|
||
>-> (| Inc.keyed (\_ duplicateInfos ->
|
||
(duplicateInfos >- noDuplicates mkMetadataObject)
|
||
>-> (| traverseA (\info -> (e, info) >- buildInfo) |)
|
||
>-> (\info -> join info >- returnA)) |)
|
||
>-> (\infoMap -> M.catMaybes infoMap >- returnA)
|
||
{-# INLINABLE buildInfoMap #-}
|
||
|
||
-- | Like 'buildInfo', but includes each processed info’s associated 'MetadataObject' in the result.
|
||
-- This is useful if the results will be further processed, and the 'MetadataObject' is still needed
|
||
-- to mark the object inconsistent.
|
||
buildInfoMapPreservingMetadata
|
||
:: ( ArrowChoice arr, Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
|
||
, Eq k, Hashable k )
|
||
=> (a -> k)
|
||
-> (a -> MetadataObject)
|
||
-> (e, a) `arr` Maybe b
|
||
-> (e, [a]) `arr` HashMap k (b, MetadataObject)
|
||
buildInfoMapPreservingMetadata extractKey mkMetadataObject buildInfo =
|
||
buildInfoMap extractKey mkMetadataObject proc (e, info) ->
|
||
((e, info) >- buildInfo) >-> \result -> result <&> (, mkMetadataObject info) >- returnA
|
||
{-# INLINABLE buildInfoMapPreservingMetadata #-}
|
||
|
||
addTableContext :: QualifiedTable -> Text -> Text
|
||
addTableContext tableName e = "in table " <> tableName <<> ": " <> e
|