graphql-engine/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs
Rakesh Emmadi a2cf9a53c2 server: move to storing metadata as a json blob (#115)
GitOrigin-RevId: 3d1a7618a4ec086c2d255549a6c15087201e9ab0
2020-12-08 14:23:28 +00:00

184 lines
7.5 KiB
Haskell
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

{-# LANGUAGE Arrows #-}
{-# LANGUAGE RecordWildCards #-}
-- | Types/functions shared between modules that implement "Hasura.RQL.DDL.Schema.Cache". Other
-- modules should not import this module directly.
module Hasura.RQL.DDL.Schema.Cache.Common where
import Hasura.Prelude
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as HS
import qualified Data.Sequence as Seq
import Control.Arrow.Extended
import Control.Lens
import Data.Text.Extended
import qualified Hasura.Incremental as Inc
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.Types
import Hasura.RQL.Types.Run
-- | 'InvalidationKeys' used to apply requested 'CacheInvalidations'.
data InvalidationKeys = InvalidationKeys
{ _ikMetadata :: !Inc.InvalidationKey
, _ikRemoteSchemas :: !(HashMap RemoteSchemaName Inc.InvalidationKey)
} deriving (Show, Eq, Generic)
instance Inc.Cacheable InvalidationKeys
instance Inc.Select InvalidationKeys
$(makeLenses ''InvalidationKeys)
initialInvalidationKeys :: InvalidationKeys
initialInvalidationKeys = InvalidationKeys Inc.initialInvalidationKey mempty
invalidateKeys :: CacheInvalidations -> InvalidationKeys -> InvalidationKeys
invalidateKeys CacheInvalidations{..} InvalidationKeys{..} = InvalidationKeys
{ _ikMetadata = if ciMetadata then Inc.invalidate _ikMetadata else _ikMetadata
, _ikRemoteSchemas = foldl' (flip invalidateRemoteSchema) _ikRemoteSchemas ciRemoteSchemas }
where
invalidateRemoteSchema = M.alter $ Just . maybe Inc.initialInvalidationKey Inc.invalidate
data TableBuildInput
= TableBuildInput
{ _tbiName :: !QualifiedTable
, _tbiIsEnum :: !Bool
, _tbiConfiguration :: !TableConfig
} deriving (Show, Eq, Generic)
instance NFData TableBuildInput
instance Inc.Cacheable TableBuildInput
data NonColumnTableInputs
= NonColumnTableInputs
{ _nctiTable :: !QualifiedTable
, _nctiObjectRelationships :: ![ObjRelDef]
, _nctiArrayRelationships :: ![ArrRelDef]
, _nctiComputedFields :: ![ComputedFieldMetadata]
, _nctiRemoteRelationships :: ![RemoteRelationshipMetadata]
} deriving (Show, Eq, Generic)
-- instance NFData NonColumnTableInputs
-- instance Inc.Cacheable NonColumnTableInputs
data TablePermissionInputs
= TablePermissionInputs
{ _tpiTable :: !QualifiedTable
, _tpiInsert :: ![InsPermDef 'Postgres]
, _tpiSelect :: ![SelPermDef 'Postgres]
, _tpiUpdate :: ![UpdPermDef 'Postgres]
, _tpiDelete :: ![DelPermDef 'Postgres]
} deriving (Show, Eq, Generic)
instance Inc.Cacheable TablePermissionInputs
mkTableInputs :: TableMetadata -> (TableBuildInput, NonColumnTableInputs, TablePermissionInputs)
mkTableInputs TableMetadata{..} =
(buildInput, nonColumns, permissions)
where
buildInput = TableBuildInput _tmTable _tmIsEnum _tmConfiguration
nonColumns = NonColumnTableInputs _tmTable
(OMap.elems _tmObjectRelationships)
(OMap.elems _tmArrayRelationships)
(OMap.elems _tmComputedFields)
(OMap.elems _tmRemoteRelationships)
permissions = TablePermissionInputs _tmTable
(OMap.elems _tmInsertPermissions)
(OMap.elems _tmSelectPermissions)
(OMap.elems _tmUpdatePermissions)
(OMap.elems _tmDeletePermissions)
-- | The direct output of 'buildSchemaCacheRule'. Contains most of the things necessary to build a
-- schema cache, but dependencies and inconsistent metadata objects are collected via a separate
-- 'MonadWriter' side channel.
data BuildOutputs
= BuildOutputs
{ _boTables :: !(TableCache 'Postgres)
, _boActions :: !ActionCache
, _boFunctions :: !FunctionCache
, _boRemoteSchemas :: !(HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject))
-- ^ We preserve the 'MetadataObject' from the original catalog metadata in the output so we can
-- reuse it later if we need to mark the remote schema inconsistent during GraphQL schema
-- generation (because of field conflicts).
, _boAllowlist :: !(HS.HashSet GQLQuery)
, _boCustomTypes :: !(AnnotatedCustomTypes 'Postgres)
, _boCronTriggers :: !(M.HashMap TriggerName CronTriggerInfo)
}
$(makeLenses ''BuildOutputs)
data RebuildableSchemaCache m
= RebuildableSchemaCache
{ lastBuiltSchemaCache :: !SchemaCache
, _rscInvalidationMap :: !InvalidationKeys
, _rscRebuild :: !(Inc.Rule (ReaderT BuildReason m) (Metadata, InvalidationKeys) SchemaCache)
}
$(makeLenses ''RebuildableSchemaCache)
type CacheBuildM = ReaderT BuildReason Run
type CacheBuildA = WriterA (Seq CollectedInfo) (Inc.Rule CacheBuildM)
bindErrorA
:: (ArrowChoice arr, ArrowKleisli m arr, ArrowError e arr, MonadError e m)
=> arr (m a) a
bindErrorA = liftEitherA <<< arrM \m -> (Right <$> m) `catchError` (pure . Left)
{-# INLINE bindErrorA #-}
withRecordDependencies
:: (ArrowWriter (Seq CollectedInfo) arr)
=> WriterA (Seq SchemaDependency) arr (e, s) a
-> arr (e, (MetadataObject, (SchemaObjId, s))) a
withRecordDependencies f = proc (e, (metadataObject, (schemaObjectId, s))) -> do
(result, dependencies) <- runWriterA f -< (e, s)
recordDependencies -< (metadataObject, schemaObjectId, toList dependencies)
returnA -< result
{-# INLINABLE withRecordDependencies #-}
noDuplicates
:: (ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr)
=> (a -> MetadataObject)
-> [a] `arr` Maybe a
noDuplicates mkMetadataObject = proc values -> case values of
[] -> returnA -< Nothing
[value] -> returnA -< Just value
value:_ -> do
let objectId = _moId $ mkMetadataObject value
definitions = map (_moDefinition . mkMetadataObject) values
tellA -< Seq.singleton $ CIInconsistency (DuplicateObjects objectId definitions)
returnA -< Nothing
{-# INLINABLE noDuplicates #-}
-- | Processes a list of catalog metadata into a map of processed information, marking any duplicate
-- entries inconsistent.
buildInfoMap
:: ( ArrowChoice arr, Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
, Eq k, Hashable k )
=> (a -> k)
-> (a -> MetadataObject)
-> (e, a) `arr` Maybe b
-> (e, [a]) `arr` HashMap k b
buildInfoMap extractKey mkMetadataObject buildInfo = proc (e, infos) ->
(M.groupOn extractKey infos >- returnA)
>-> (| Inc.keyed (\_ duplicateInfos ->
(duplicateInfos >- noDuplicates mkMetadataObject)
>-> (| traverseA (\info -> (e, info) >- buildInfo) |)
>-> (\info -> join info >- returnA)) |)
>-> (\infoMap -> M.catMaybes infoMap >- returnA)
{-# INLINABLE buildInfoMap #-}
-- | Like 'buildInfo', but includes each processed infos associated 'MetadataObject' in the result.
-- This is useful if the results will be further processed, and the 'MetadataObject' is still needed
-- to mark the object inconsistent.
buildInfoMapPreservingMetadata
:: ( ArrowChoice arr, Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
, Eq k, Hashable k )
=> (a -> k)
-> (a -> MetadataObject)
-> (e, a) `arr` Maybe b
-> (e, [a]) `arr` HashMap k (b, MetadataObject)
buildInfoMapPreservingMetadata extractKey mkMetadataObject buildInfo =
buildInfoMap extractKey mkMetadataObject proc (e, info) ->
((e, info) >- buildInfo) >-> \result -> result <&> (, mkMetadataObject info) >- returnA
{-# INLINABLE buildInfoMapPreservingMetadata #-}
addTableContext :: QualifiedTable -> Text -> Text
addTableContext tableName e = "in table " <> tableName <<> ": " <> e