diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 08d8de26e0e..6581ce27763 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -881,6 +881,7 @@ library , Hasura.RQL.DDL.Schema , Hasura.RQL.DDL.Schema.Cache , Hasura.RQL.DDL.Schema.Cache.Common + , Hasura.RQL.DDL.Schema.Cache.Config , Hasura.RQL.DDL.Schema.Cache.Dependencies , Hasura.RQL.DDL.Schema.Cache.Fields , Hasura.RQL.DDL.Schema.Cache.Permission diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 2a258138c5c..9db6ed1bfe5 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -122,6 +122,7 @@ import Hasura.RQL.DDL.ApiLimit (MonadGetApiTimeLimit (..)) import Hasura.RQL.DDL.EventTrigger (MonadEventLogCleanup (..)) import Hasura.RQL.DDL.Schema.Cache import Hasura.RQL.DDL.Schema.Cache.Common +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DDL.Schema.Catalog import Hasura.RQL.Types.Allowlist import Hasura.RQL.Types.Backend @@ -500,21 +501,18 @@ initialiseAppContext :: AppInit -> m (AppStateRef Hasura) initialiseAppContext env serveOptions@ServeOptions {..} AppInit {..} = do - AppEnv {..} <- askAppEnv + appEnv@AppEnv {..} <- askAppEnv let Loggers _ logger pgLogger = appEnvLoggers sqlGenCtx = initSQLGenCtx soExperimentalFeatures soStringifyNum soDangerousBooleanCollapse - serverConfigCtx = - ServerConfigCtx + cacheStaticConfig = buildCacheStaticConfig appEnv + cacheDynamicConfig = + CacheDynamicConfig soInferFunctionPermissions soEnableRemoteSchemaPermissions sqlGenCtx - soEnableMaintenanceMode soExperimentalFeatures - soEventingMode - soReadOnlyMode soDefaultNamingConvention soMetadataDefaults - (CheckFeatureFlag $ checkFeatureFlag env) soApolloFederationStatus -- Create the schema cache @@ -522,10 +520,11 @@ initialiseAppContext env serveOptions@ServeOptions {..} AppInit {..} = do buildFirstSchemaCache env logger - serverConfigCtx (mkPgSourceResolver pgLogger) mkMSSQLSourceResolver aiMetadataWithResourceVersion + cacheStaticConfig + cacheDynamicConfig appEnvManager -- Build the RebuildableAppContext. @@ -589,26 +588,28 @@ buildFirstSchemaCache :: (MonadIO m) => Env.Environment -> Logger Hasura -> - ServerConfigCtx -> SourceResolver ('Postgres 'Vanilla) -> SourceResolver ('MSSQL) -> MetadataWithResourceVersion -> + CacheStaticConfig -> + CacheDynamicConfig -> HTTP.Manager -> m RebuildableSchemaCache buildFirstSchemaCache env logger - serverConfigCtx pgSourceResolver mssqlSourceResolver metadataWithVersion + cacheStaticConfig + cacheDynamicConfig httpManager = do - let cacheBuildParams = CacheBuildParams httpManager pgSourceResolver mssqlSourceResolver + let cacheBuildParams = CacheBuildParams httpManager pgSourceResolver mssqlSourceResolver cacheStaticConfig buildReason = CatalogSync result <- runExceptT $ runCacheBuild cacheBuildParams $ - buildRebuildableSchemaCacheWithReason buildReason logger env metadataWithVersion serverConfigCtx + buildRebuildableSchemaCacheWithReason buildReason logger env metadataWithVersion cacheDynamicConfig result `onLeft` \err -> do -- TODO: we used to bundle the first schema cache build with the catalog -- migration, using the same error handler for both, meaning that an @@ -665,6 +666,14 @@ runAppM c (AppM a) = ignoreTraceT $ runReaderT a c instance HasAppEnv AppM where askAppEnv = ask +instance HasFeatureFlagChecker AppM where + checkFlag f = AppM do + CheckFeatureFlag runCheckFeatureFlag <- asks appEnvCheckFeatureFlag + liftIO $ runCheckFeatureFlag f + +instance HasCacheStaticConfig AppM where + askCacheStaticConfig = buildCacheStaticConfig <$> askAppEnv + instance MonadTrace AppM where newTraceWith c p n (AppM a) = AppM $ newTraceWith c p n a newSpanWith i n (AppM a) = AppM $ newSpanWith i n a @@ -879,6 +888,8 @@ runHGEServer :: UserAuthentication m, HttpLog m, HasAppEnv m, + HasCacheStaticConfig m, + HasFeatureFlagChecker m, ConsoleRenderer m, MonadVersionAPIWithExtraData m, MonadMetadataApiAuthorization m, @@ -971,6 +982,8 @@ mkHGEServer :: UserAuthentication m, HttpLog m, HasAppEnv m, + HasCacheStaticConfig m, + HasFeatureFlagChecker m, ConsoleRenderer m, MonadVersionAPIWithExtraData m, MonadMetadataApiAuthorization m, diff --git a/server/src-lib/Hasura/App/State.hs b/server/src-lib/Hasura/App/State.hs index a9f7d31999b..42bc23c5968 100644 --- a/server/src-lib/Hasura/App/State.hs +++ b/server/src-lib/Hasura/App/State.hs @@ -13,10 +13,11 @@ module Hasura.App.State -- * init functions buildRebuildableAppContext, rebuildRebuildableAppContext, - initSQLGenCtx, - -- * server config - buildServerConfigCtx, + -- * subsets + initSQLGenCtx, + buildCacheStaticConfig, + buildCacheDynamicConfig, ) where @@ -36,6 +37,7 @@ import Hasura.GraphQL.Schema.Options qualified as Options import Hasura.Incremental qualified as Inc import Hasura.Logging qualified as L import Hasura.Prelude +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.Types.Common import Hasura.RQL.Types.Metadata import Hasura.RQL.Types.SchemaCache (MetadataResourceVersion) @@ -300,6 +302,9 @@ buildAppContextRule = proc (ServeOptions {..}, env, _keys) -> do | otherwise -> InternalErrorsDisabled returnA -< responseInternalErrorsConfig +-------------------------------------------------------------------------------- +-- subsets + initSQLGenCtx :: HashSet ExperimentalFeature -> Options.StringifyNumbers -> Options.DangerouslyCollapseBooleans -> SQLGenCtx initSQLGenCtx experimentalFeatures stringifyNum dangerousBooleanCollapse = let optimizePermissionFilters @@ -311,26 +316,23 @@ initSQLGenCtx experimentalFeatures stringifyNum dangerousBooleanCollapse = | otherwise = Options.DisableBigQueryStringNumericInput in SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput --------------------------------------------------------------------------------- --- server config - --- | We are trying to slowly get rid of 'HasServerConfigCtx' (and consequently --- of 'ServercConfigtx') in favour of smaller / more specific ad-hoc --- types. However, in the meantime, it is often required to builda --- 'ServerConfigCtx' at the boundary between parts of the code that use it and --- part of the code that use the new 'AppEnv' and 'AppContext'. -buildServerConfigCtx :: AppEnv -> AppContext -> ServerConfigCtx -buildServerConfigCtx AppEnv {..} AppContext {..} = - ServerConfigCtx - { _sccFunctionPermsCtx = acFunctionPermsCtx, - _sccRemoteSchemaPermsCtx = acRemoteSchemaPermsCtx, - _sccSQLGenCtx = acSQLGenCtx, - _sccMaintenanceMode = appEnvEnableMaintenanceMode, - _sccExperimentalFeatures = acExperimentalFeatures, - _sccEventingMode = appEnvEventingMode, - _sccReadOnlyMode = appEnvEnableReadOnlyMode, - _sccDefaultNamingConvention = acDefaultNamingConvention, - _sccMetadataDefaults = acMetadataDefaults, - _sccCheckFeatureFlag = appEnvCheckFeatureFlag, - _sccApolloFederationStatus = acApolloFederationStatus +buildCacheStaticConfig :: AppEnv -> CacheStaticConfig +buildCacheStaticConfig AppEnv {..} = + CacheStaticConfig + { _cscMaintenanceMode = appEnvEnableMaintenanceMode, + _cscEventingMode = appEnvEventingMode, + _cscReadOnlyMode = appEnvEnableReadOnlyMode, + _cscCheckFeatureFlag = appEnvCheckFeatureFlag + } + +buildCacheDynamicConfig :: AppContext -> CacheDynamicConfig +buildCacheDynamicConfig AppContext {..} = + CacheDynamicConfig + { _cdcFunctionPermsCtx = acFunctionPermsCtx, + _cdcRemoteSchemaPermsCtx = acRemoteSchemaPermsCtx, + _cdcSQLGenCtx = acSQLGenCtx, + _cdcExperimentalFeatures = acExperimentalFeatures, + _cdcDefaultNamingConvention = acDefaultNamingConvention, + _cdcMetadataDefaults = acMetadataDefaults, + _cdcApolloFederationStatus = acApolloFederationStatus } diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs index db5ce0739c3..4b5b6fb3819 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs @@ -214,7 +214,7 @@ dropDanglingSQLTrigger sourceConfig triggerName table ops = createTableEventTrigger :: MonadIO m => - ServerConfigCtx -> + SQLGenCtx -> MSSQLSourceConfig -> TableName -> [ColumnInfo 'MSSQL] -> @@ -223,7 +223,7 @@ createTableEventTrigger :: TriggerOpsDef 'MSSQL -> Maybe (PrimaryKey 'MSSQL (ColumnInfo 'MSSQL)) -> m (Either QErr ()) -createTableEventTrigger _serverConfigCtx sourceConfig table columns triggerName triggerOnReplication opsDefinition primaryKeyMaybe = do +createTableEventTrigger _sqlGen sourceConfig table columns triggerName triggerOnReplication opsDefinition primaryKeyMaybe = do liftIO $ runMSSQLSourceWriteTx sourceConfig $ do mkAllTriggersQ triggerName table triggerOnReplication columns opsDefinition primaryKeyMaybe @@ -233,7 +233,7 @@ createMissingSQLTriggers :: MonadError QErr m, MonadBaseControl IO m ) => - ServerConfigCtx -> + SQLGenCtx -> MSSQLSourceConfig -> TableName -> ([ColumnInfo 'MSSQL], Maybe (PrimaryKey 'MSSQL (ColumnInfo 'MSSQL))) -> diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs index 88954b31ca6..10dc5837001 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs @@ -205,7 +205,7 @@ createMissingSQLTriggers :: MonadBaseControl IO m, Backend ('Postgres pgKind) ) => - ServerConfigCtx -> + SQLGenCtx -> PGSourceConfig -> TableName ('Postgres pgKind) -> ([(ColumnInfo ('Postgres pgKind))], Maybe (PrimaryKey ('Postgres pgKind) (ColumnInfo ('Postgres pgKind)))) -> @@ -241,7 +241,7 @@ createMissingSQLTriggers serverConfigCtx sourceConfig table (allCols, _) trigger createTableEventTrigger :: (Backend ('Postgres pgKind), MonadIO m, MonadBaseControl IO m) => - ServerConfigCtx -> + SQLGenCtx -> PGSourceConfig -> QualifiedTable -> [ColumnInfo ('Postgres pgKind)] -> @@ -674,7 +674,7 @@ pgIdenTrigger op = QualifiedTriggerName . pgFmtIdentifier . unQualifiedTriggerNa -- | Define the pgSQL trigger functions on database events. mkTriggerFunctionQ :: forall pgKind m. - (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m) => + (Backend ('Postgres pgKind), MonadTx m, MonadReader SQLGenCtx m) => TriggerName -> QualifiedTable -> [ColumnInfo ('Postgres pgKind)] -> @@ -682,7 +682,7 @@ mkTriggerFunctionQ :: SubscribeOpSpec ('Postgres pgKind) -> m QualifiedTriggerName mkTriggerFunctionQ triggerName (QualifiedObject schema table) allCols op (SubscribeOpSpec listenColumns deliveryColumns') = do - strfyNum <- stringifyNum . _sccSQLGenCtx <$> ask + strfyNum <- asks stringifyNum let dbQualifiedTriggerName = pgIdenTrigger op triggerName () <- liftTx $ @@ -795,7 +795,7 @@ checkIfFunctionExistsQ triggerName op = do mkTrigger :: forall pgKind m. - (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m) => + (Backend ('Postgres pgKind), MonadTx m, MonadReader SQLGenCtx m) => TriggerName -> QualifiedTable -> TriggerOnReplication -> @@ -830,7 +830,7 @@ mkTrigger triggerName table triggerOnReplication allCols op subOpSpec = do mkAllTriggersQ :: forall pgKind m. - (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m) => + (Backend ('Postgres pgKind), MonadTx m, MonadReader SQLGenCtx m) => TriggerName -> QualifiedTable -> TriggerOnReplication -> diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs index bdc70050252..5939ef2ce1f 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs @@ -59,7 +59,6 @@ import Hasura.RQL.Types.Source import Hasura.RQL.Types.Table import Hasura.SQL.AnyBackend qualified as AB import Hasura.SQL.Backend -import Hasura.Server.Types import Hasura.Server.Utils (quoteRegex) import Hasura.Session import Hasura.Tracing qualified as Tracing @@ -207,7 +206,6 @@ runRunSQL :: FetchTableMetadata pgKind, FetchFunctionMetadata pgKind, CacheRWM m, - HasServerConfigCtx m, MetadataM m, MonadBaseControl IO m, MonadError QErr m, @@ -215,9 +213,10 @@ runRunSQL :: Tracing.MonadTrace m, UserInfoM m ) => + SQLGenCtx -> RunSQL -> m EncJSON -runRunSQL q@RunSQL {..} = do +runRunSQL sqlGen q@RunSQL {..} = do sourceConfig <- askSourceConfig @('Postgres pgKind) rSource traceCtx <- Tracing.currentContext userInfo <- askUserInfo @@ -225,7 +224,7 @@ runRunSQL q@RunSQL {..} = do if (isSchemaCacheBuildRequiredRunSQL q) then do -- see Note [Checking metadata consistency in run_sql] - withMetadataCheck @pgKind rSource rCascade rTxAccessMode $ + withMetadataCheck @pgKind sqlGen rSource rCascade rTxAccessMode $ withTraceContext traceCtx $ withUserInfo userInfo $ execRawSQL rSql @@ -250,18 +249,18 @@ withMetadataCheck :: FetchTableMetadata pgKind, FetchFunctionMetadata pgKind, CacheRWM m, - HasServerConfigCtx m, MetadataM m, MonadBaseControl IO m, MonadError QErr m, MonadIO m ) => + SQLGenCtx -> SourceName -> Bool -> PG.TxAccess -> PG.TxET QErr m a -> m a -withMetadataCheck source cascade txAccess runSQLQuery = do +withMetadataCheck sqlGen source cascade txAccess runSQLQuery = do SourceInfo _ tableCache functionCache _logicalModels _customReturnTypes sourceConfig _ _ <- askSourceInfo @('Postgres pgKind) source -- Run SQL query and metadata checker in a transaction @@ -281,14 +280,14 @@ withMetadataCheck source cascade txAccess runSQLQuery = do recreateEventTriggers :: PGSourceConfig -> SchemaCache -> m () recreateEventTriggers sourceConfig schemaCache = do let tables = fromMaybe mempty $ unsafeTableCache @('Postgres pgKind) source $ scSources schemaCache - serverConfigCtx <- askServerConfigCtx liftEitherM $ runPgSourceWriteTx sourceConfig RunSQLQuery $ forM_ (M.elems tables) $ \(TableInfo coreInfo _ eventTriggers _) -> do let table = _tciName coreInfo columns = getCols $ _tciFieldInfoMap coreInfo forM_ (M.toList eventTriggers) $ \(triggerName, EventTriggerInfo {etiOpsDef, etiTriggerOnReplication}) -> do - flip runReaderT serverConfigCtx $ mkAllTriggersQ triggerName table etiTriggerOnReplication columns etiOpsDef + flip runReaderT sqlGen $ + mkAllTriggersQ triggerName table etiTriggerOnReplication columns etiOpsDef -- | @'runTxWithMetadataCheck source sourceConfig txAccess tableCache functionCache cascadeDependencies tx' checks for -- changes in GraphQL Engine metadata when a @'tx' is executed on the database alters Postgres diff --git a/server/src-lib/Hasura/CustomReturnType/API.hs b/server/src-lib/Hasura/CustomReturnType/API.hs index 83e52abaae6..3c344f7b214 100644 --- a/server/src-lib/Hasura/CustomReturnType/API.hs +++ b/server/src-lib/Hasura/CustomReturnType/API.hs @@ -42,8 +42,8 @@ import Hasura.RQL.Types.Permission (PermDef (_pdRole), SelPerm) import Hasura.RQL.Types.SchemaCache.Build import Hasura.SQL.AnyBackend qualified as AB import Hasura.SQL.Backend -import Hasura.Server.Init.FeatureFlag as FF -import Hasura.Server.Types (HasServerConfigCtx (..), ServerConfigCtx (..)) +import Hasura.Server.Init.FeatureFlag (HasFeatureFlagChecker (..)) +import Hasura.Server.Init.FeatureFlag qualified as FF import Hasura.Session (RoleName) -- | Default implementation of the 'track_custom_return_type' request payload. @@ -122,8 +122,7 @@ runGetCustomReturnType :: forall b m. ( BackendMetadata b, MetadataM m, - HasServerConfigCtx m, - MonadIO m, + HasFeatureFlagChecker m, MonadError QErr m ) => GetCustomReturnType b -> @@ -151,8 +150,7 @@ runTrackCustomReturnType :: CacheRWM m, MetadataM m, MonadError QErr m, - HasServerConfigCtx m, - MonadIO m + HasFeatureFlagChecker m ) => TrackCustomReturnType b -> m EncJSON @@ -273,7 +271,7 @@ instance runCreateSelectCustomReturnTypePermission :: forall b m. - (Backend b, CacheRWM m, MetadataM m, MonadError QErr m, MonadIO m, HasServerConfigCtx m) => + (Backend b, CacheRWM m, MetadataM m, MonadError QErr m, HasFeatureFlagChecker m) => CreateCustomReturnTypePermission SelPerm b -> m EncJSON runCreateSelectCustomReturnTypePermission CreateCustomReturnTypePermission {..} = do @@ -312,7 +310,7 @@ instance FromJSON (DropCustomReturnTypePermission b) where runDropSelectCustomReturnTypePermission :: forall b m. - (Backend b, CacheRWM m, MetadataM m, MonadError QErr m, MonadIO m, HasServerConfigCtx m) => + (Backend b, CacheRWM m, MetadataM m, MonadError QErr m, HasFeatureFlagChecker m) => DropCustomReturnTypePermission b -> m EncJSON runDropSelectCustomReturnTypePermission DropCustomReturnTypePermission {..} = do @@ -340,14 +338,10 @@ dropCustomReturnTypeInMetadata source name = do %~ OMap.delete name -- | check feature flag is enabled before carrying out any actions -throwIfFeatureDisabled :: (HasServerConfigCtx m, MonadIO m, MonadError QErr m) => m () +throwIfFeatureDisabled :: (HasFeatureFlagChecker m, MonadError QErr m) => m () throwIfFeatureDisabled = do - configCtx <- askServerConfigCtx - let CheckFeatureFlag runCheckFeatureFlag = _sccCheckFeatureFlag configCtx - - enableCustomReturnTypes <- liftIO (runCheckFeatureFlag FF.logicalModelInterface) - - unless enableCustomReturnTypes (throw500 "CustomReturnTypes is disabled!") + enableCustomReturnTypes <- checkFlag FF.logicalModelInterface + unless enableCustomReturnTypes $ throw500 "CustomReturnTypes is disabled!" -- | Check whether a custom return type with the given root field name exists for -- the given source. diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 9a5e878c1f6..e8e90413311 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -97,7 +97,11 @@ buildGQLContext :: ( MonadError QErr m, MonadIO m ) => - ServerConfigCtx -> + Options.InferFunctionPermissions -> + Options.RemoteSchemaPermissions -> + HashSet ExperimentalFeature -> + SQLGenCtx -> + ApolloFederationStatus -> SourceCache -> HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject) -> ActionCache -> @@ -114,66 +118,75 @@ buildGQLContext :: GQLContext ) ) -buildGQLContext ServerConfigCtx {..} sources allRemoteSchemas allActions customTypes = do - let remoteSchemasRoles = concatMap (Map.keys . _rscPermissions . fst . snd) $ Map.toList allRemoteSchemas - actionRoles = - Set.insert adminRoleName $ - Set.fromList (allActionInfos ^.. folded . aiPermissions . to Map.keys . folded) - <> Set.fromList (bool mempty remoteSchemasRoles $ _sccRemoteSchemaPermsCtx == Options.EnableRemoteSchemaPermissions) - allActionInfos = Map.elems allActions - allTableRoles = Set.fromList $ getTableRoles =<< Map.elems sources - allCustomReturnTypeRoles = Set.fromList $ getCustomReturnTypeRoles =<< Map.elems sources - allRoles = actionRoles <> allTableRoles <> allCustomReturnTypeRoles +buildGQLContext + functionPermissions + remoteSchemaPermissions + experimentalFeatures + sqlGen + apolloFederationStatus + sources + allRemoteSchemas + allActions + customTypes = do + let remoteSchemasRoles = concatMap (Map.keys . _rscPermissions . fst . snd) $ Map.toList allRemoteSchemas + actionRoles = + Set.insert adminRoleName $ + Set.fromList (allActionInfos ^.. folded . aiPermissions . to Map.keys . folded) + <> Set.fromList (bool mempty remoteSchemasRoles $ remoteSchemaPermissions == Options.EnableRemoteSchemaPermissions) + allActionInfos = Map.elems allActions + allTableRoles = Set.fromList $ getTableRoles =<< Map.elems sources + allCustomReturnTypeRoles = Set.fromList $ getCustomReturnTypeRoles =<< Map.elems sources + allRoles = actionRoles <> allTableRoles <> allCustomReturnTypeRoles - contexts <- - -- Buld role contexts in parallel. We'd prefer deterministic parallelism - -- but that isn't really acheivable (see mono #3829). NOTE: the admin role - -- will still be a bottleneck here, even on huge_schema which has many - -- roles. - fmap Map.fromList $ - forConcurrentlyEIO 10 (Set.toList allRoles) $ \role -> do - (role,) - <$> concurrentlyEIO - ( buildRoleContext - (_sccSQLGenCtx, _sccFunctionPermsCtx) - sources - allRemoteSchemas - allActionInfos - customTypes - role - _sccRemoteSchemaPermsCtx - _sccExperimentalFeatures - _sccApolloFederationStatus - ) - ( buildRelayRoleContext - (_sccSQLGenCtx, _sccFunctionPermsCtx) - sources - allActionInfos - customTypes - role - _sccExperimentalFeatures - ) - let hasuraContexts = fst <$> contexts - relayContexts = snd <$> contexts + contexts <- + -- Buld role contexts in parallel. We'd prefer deterministic parallelism + -- but that isn't really acheivable (see mono #3829). NOTE: the admin role + -- will still be a bottleneck here, even on huge_schema which has many + -- roles. + fmap Map.fromList $ + forConcurrentlyEIO 10 (Set.toList allRoles) $ \role -> do + (role,) + <$> concurrentlyEIO + ( buildRoleContext + (sqlGen, functionPermissions) + sources + allRemoteSchemas + allActionInfos + customTypes + role + remoteSchemaPermissions + experimentalFeatures + apolloFederationStatus + ) + ( buildRelayRoleContext + (sqlGen, functionPermissions) + sources + allActionInfos + customTypes + role + experimentalFeatures + ) + let hasuraContexts = fst <$> contexts + relayContexts = snd <$> contexts - adminIntrospection <- - case Map.lookup adminRoleName hasuraContexts of - Just (_context, _errors, introspection) -> pure introspection - Nothing -> throw500 "buildGQLContext failed to build for the admin role" - (unauthenticated, unauthenticatedRemotesErrors) <- unauthenticatedContext allRemoteSchemas _sccRemoteSchemaPermsCtx - pure - ( ( adminIntrospection, - view _1 <$> hasuraContexts, - unauthenticated, - Set.unions $ unauthenticatedRemotesErrors : (view _2 <$> Map.elems hasuraContexts) - ), - ( relayContexts, - -- Currently, remote schemas are exposed through Relay, but ONLY through - -- the unauthenticated role. This is probably an oversight. See - -- hasura/graphql-engine-mono#3883. - unauthenticated + adminIntrospection <- + case Map.lookup adminRoleName hasuraContexts of + Just (_context, _errors, introspection) -> pure introspection + Nothing -> throw500 "buildGQLContext failed to build for the admin role" + (unauthenticated, unauthenticatedRemotesErrors) <- unauthenticatedContext allRemoteSchemas remoteSchemaPermissions + pure + ( ( adminIntrospection, + view _1 <$> hasuraContexts, + unauthenticated, + Set.unions $ unauthenticatedRemotesErrors : (view _2 <$> Map.elems hasuraContexts) + ), + ( relayContexts, + -- Currently, remote schemas are exposed through Relay, but ONLY through + -- the unauthenticated role. This is probably an oversight. See + -- hasura/graphql-engine-mono#3883. + unauthenticated + ) ) - ) buildSchemaOptions :: (SQLGenCtx, Options.InferFunctionPermissions) -> diff --git a/server/src-lib/Hasura/LogicalModel/API.hs b/server/src-lib/Hasura/LogicalModel/API.hs index cdcfbe8fe95..a0d8a95c156 100644 --- a/server/src-lib/Hasura/LogicalModel/API.hs +++ b/server/src-lib/Hasura/LogicalModel/API.hs @@ -37,8 +37,8 @@ import Hasura.RQL.Types.SchemaCache.Build import Hasura.SQL.AnyBackend qualified as AB import Hasura.SQL.Backend import Hasura.SQL.Tag -import Hasura.Server.Init.FeatureFlag as FF -import Hasura.Server.Types (HasServerConfigCtx (..), ServerConfigCtx (..)) +import Hasura.Server.Init.FeatureFlag (HasFeatureFlagChecker (..)) +import Hasura.Server.Init.FeatureFlag qualified as FF -- | Default implementation of the 'track_logical_model' request payload. data TrackLogicalModel (b :: BackendType) = TrackLogicalModel @@ -90,9 +90,9 @@ deriving via logicalModelTrackToMetadata :: forall b m. ( BackendMetadata b, - MetadataM m, + MonadError QErr m, MonadIO m, - MonadError QErr m + MetadataM m ) => Env.Environment -> SourceConnConfiguration b -> @@ -145,8 +145,7 @@ runGetLogicalModel :: forall b m. ( BackendMetadata b, MetadataM m, - HasServerConfigCtx m, - MonadIO m, + HasFeatureFlagChecker m, MonadError QErr m ) => GetLogicalModel b -> @@ -167,11 +166,11 @@ runGetLogicalModel q = do runTrackLogicalModel :: forall b m. ( BackendMetadata b, + MonadError QErr m, + MonadIO m, CacheRWM m, MetadataM m, - MonadError QErr m, - HasServerConfigCtx m, - MonadIO m + HasFeatureFlagChecker m ) => Env.Environment -> TrackLogicalModel b -> @@ -273,13 +272,9 @@ dropLogicalModelInMetadata source rootFieldName = do %~ OMap.delete rootFieldName -- | check feature flag is enabled before carrying out any actions -throwIfFeatureDisabled :: (HasServerConfigCtx m, MonadIO m, MonadError QErr m) => m () +throwIfFeatureDisabled :: (HasFeatureFlagChecker m, MonadError QErr m) => m () throwIfFeatureDisabled = do - configCtx <- askServerConfigCtx - let CheckFeatureFlag runCheckFeatureFlag = _sccCheckFeatureFlag configCtx - - enableLogicalModels <- liftIO (runCheckFeatureFlag FF.logicalModelInterface) - + enableLogicalModels <- checkFlag FF.logicalModelInterface unless enableLogicalModels (throw500 "LogicalModels is disabled!") -- | Check whether a logical model with the given root field name exists for diff --git a/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs b/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs index 9dd5d4444b5..d49a91838ba 100644 --- a/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs +++ b/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs @@ -9,7 +9,6 @@ where -------------------------------------------------------------------------------- --------------------------------------------------------------------------------- import Data.Aeson (FromJSON, (.:), (.=)) import Data.Aeson qualified as Aeson import Data.HashMap.Strict qualified as HashMap @@ -31,13 +30,12 @@ instance FromJSON GetFeatureFlag where runGetFeatureFlag :: ( MonadError Error.QErr m, - Types.HasServerConfigCtx m, MonadIO m ) => + Types.CheckFeatureFlag -> GetFeatureFlag -> m EncJSON -runGetFeatureFlag GetFeatureFlag {..} = do - Types.CheckFeatureFlag getFeatureFlag <- Types._sccCheckFeatureFlag <$> Types.askServerConfigCtx +runGetFeatureFlag (Types.CheckFeatureFlag getFeatureFlag) GetFeatureFlag {..} = do let flagM = HashMap.lookup gfgIdentifier $ FeatureFlag.getFeatureFlags $ FeatureFlag.featureFlags case flagM of Nothing -> Error.throw400 Error.NotFound $ "Feature Flag '" <> gfgIdentifier <> "' not found" diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs index 3d4e795fc47..6e6770d485d 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -61,6 +61,7 @@ import Hasura.RQL.DDL.InheritedRoles (resolveInheritedRole) import Hasura.RQL.DDL.RemoteRelationship (CreateRemoteSchemaRemoteRelationship (..), PartiallyResolvedSource (..), buildRemoteFieldInfo, getRemoteSchemaEntityJoinColumns) import Hasura.RQL.DDL.ScheduledTrigger import Hasura.RQL.DDL.Schema.Cache.Common +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DDL.Schema.Cache.Dependencies import Hasura.RQL.DDL.Schema.Cache.Fields import Hasura.RQL.DDL.Schema.Cache.Permission @@ -147,7 +148,7 @@ buildRebuildableSchemaCache :: Logger Hasura -> Env.Environment -> MetadataWithResourceVersion -> - ServerConfigCtx -> + CacheDynamicConfig -> CacheBuild RebuildableSchemaCache buildRebuildableSchemaCache = buildRebuildableSchemaCacheWithReason CatalogSync @@ -157,12 +158,12 @@ buildRebuildableSchemaCacheWithReason :: Logger Hasura -> Env.Environment -> MetadataWithResourceVersion -> - ServerConfigCtx -> + CacheDynamicConfig -> CacheBuild RebuildableSchemaCache -buildRebuildableSchemaCacheWithReason reason logger env metadataWithVersion serverConfigCtx = do +buildRebuildableSchemaCacheWithReason reason logger env metadataWithVersion dynamicConfig = do result <- flip runReaderT reason $ - Inc.build (buildSchemaCacheRule logger env) (metadataWithVersion, serverConfigCtx, initialInvalidationKeys, Nothing) + Inc.build (buildSchemaCacheRule logger env) (metadataWithVersion, dynamicConfig, initialInvalidationKeys, Nothing) pure $ RebuildableSchemaCache (Inc.result result) initialInvalidationKeys (Inc.rebuildRule result) @@ -172,11 +173,11 @@ newtype CacheRWT m a -- (which added Control.Monad.Trans.Writer.CPS) are leaky, and we don’t have -- that yet. -- - -- The use of 'ReaderT ServerConfigCtx' is only here to avoid manually - -- passing the 'ServerConfigCtx' to every function that builds the cache. It + -- The use of 'ReaderT CacheDynamicConfig' is only here to avoid manually + -- passing the 'CacheDynamicConfig' to every function that builds the cache. It -- should ultimately be reduced to 'AppContext', or even better a relevant -- subset thereof. - CacheRWT (ReaderT ServerConfigCtx (StateT (RebuildableSchemaCache, CacheInvalidations) m) a) + CacheRWT (ReaderT CacheDynamicConfig (StateT (RebuildableSchemaCache, CacheInvalidations) m) a) deriving newtype ( Functor, Applicative, @@ -188,13 +189,11 @@ newtype CacheRWT m a Tracing.MonadTrace, MonadBase b, MonadBaseControl b, - ProvidesNetwork + ProvidesNetwork, + FF.HasFeatureFlagChecker ) deriving anyclass (MonadQueryTags) -instance Monad m => HasServerConfigCtx (CacheRWT m) where - askServerConfigCtx = CacheRWT ask - instance MonadReader r m => MonadReader r (CacheRWT m) where ask = lift ask local f (CacheRWT m) = CacheRWT $ mapReaderT (local f) m @@ -209,7 +208,7 @@ instance (MonadGetApiTimeLimit m) => MonadGetApiTimeLimit (CacheRWT m) where runCacheRWT :: Monad m => - ServerConfigCtx -> + CacheDynamicConfig -> RebuildableSchemaCache -> CacheRWT m a -> m (a, RebuildableSchemaCache, CacheInvalidations) @@ -228,19 +227,20 @@ instance ( MonadIO m, MonadError QErr m, ProvidesNetwork m, - MonadResolveSource m + MonadResolveSource m, + HasCacheStaticConfig m ) => CacheRWM (CacheRWT m) where buildSchemaCacheWithOptions buildReason invalidations metadata = CacheRWT do - serverConfigCtx <- ask + dynamicConfig <- ask (RebuildableSchemaCache lastBuiltSC invalidationKeys rule, oldInvalidations) <- get let metadataWithVersion = MetadataWithResourceVersion metadata $ scMetadataResourceVersion lastBuiltSC newInvalidationKeys = invalidateKeys invalidations invalidationKeys result <- runCacheBuildM $ flip runReaderT buildReason $ - Inc.build rule (metadataWithVersion, serverConfigCtx, newInvalidationKeys, Nothing) + Inc.build rule (metadataWithVersion, dynamicConfig, newInvalidationKeys, Nothing) let schemaCache = Inc.result result prunedInvalidationKeys = pruneInvalidationKeys schemaCache newInvalidationKeys !newCache = RebuildableSchemaCache schemaCache prunedInvalidationKeys (Inc.rebuildRule result) @@ -335,19 +335,20 @@ buildSchemaCacheRule :: MonadError QErr m, MonadReader BuildReason m, ProvidesNetwork m, - MonadResolveSource m + MonadResolveSource m, + HasCacheStaticConfig m ) => Logger Hasura -> Env.Environment -> - (MetadataWithResourceVersion, ServerConfigCtx, InvalidationKeys, Maybe StoredIntrospection) `arr` SchemaCache -buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDefaults resourceVersion, serverConfigCtx, invalidationKeys, storedIntrospection) -> do + (MetadataWithResourceVersion, CacheDynamicConfig, InvalidationKeys, Maybe StoredIntrospection) `arr` SchemaCache +buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDefaults resourceVersion, dynamicConfig, invalidationKeys, storedIntrospection) -> do invalidationKeysDep <- Inc.newDependency -< invalidationKeys - let metadataDefaults = _sccMetadataDefaults serverConfigCtx + let metadataDefaults = _cdcMetadataDefaults dynamicConfig metadata@Metadata {..} = overrideMetadataDefaults metadataNoDefaults metadataDefaults metadataDep <- Inc.newDependency -< metadata (inconsistentObjects, (resolvedOutputs, dependencyInconsistentObjects, resolvedDependencies), ((adminIntrospection, gqlContext, gqlContextUnauth, inconsistentRemoteSchemas), (relayContext, relayContextUnauth))) <- - Inc.cache buildOutputsAndSchema -< (metadataDep, serverConfigCtx, invalidationKeysDep, storedIntrospection) + Inc.cache buildOutputsAndSchema -< (metadataDep, dynamicConfig, invalidationKeysDep, storedIntrospection) let (resolvedEndpoints, endpointCollectedInfo) = runIdentity $ runWriterT $ buildRESTEndpoints _metaQueryCollections (OMap.elems _metaRestEndpoints) (cronTriggersMap, cronTriggersCollectedInfo) = runIdentity $ runWriterT $ buildCronTriggers (OMap.elems _metaCronTriggers) @@ -442,15 +443,19 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe } where -- See Note [Avoiding GraphQL schema rebuilds when changing irrelevant Metadata] - buildOutputsAndSchema = proc (metadataDep, serverConfigCtx, invalidationKeysDep, storedIntrospection) -> do - (outputs, collectedInfo) <- runWriterA buildAndCollectInfo -< (serverConfigCtx, metadataDep, invalidationKeysDep, storedIntrospection) + buildOutputsAndSchema = proc (metadataDep, dynamicConfig, invalidationKeysDep, storedIntrospection) -> do + (outputs, collectedInfo) <- runWriterA buildAndCollectInfo -< (dynamicConfig, metadataDep, invalidationKeysDep, storedIntrospection) let (inconsistentObjects, unresolvedDependencies) = partitionEithers $ toList collectedInfo out2@(resolvedOutputs, _dependencyInconsistentObjects, _resolvedDependencies) <- resolveDependencies -< (outputs, unresolvedDependencies) out3 <- bindA -< do buildGQLContext - serverConfigCtx + (_cdcFunctionPermsCtx dynamicConfig) + (_cdcRemoteSchemaPermsCtx dynamicConfig) + (_cdcExperimentalFeatures dynamicConfig) + (_cdcSQLGenCtx dynamicConfig) + (_cdcApolloFederationStatus dynamicConfig) (_boSources resolvedOutputs) (_boRemoteSchemas resolvedOutputs) (_boActions resolvedOutputs) @@ -484,7 +489,8 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr, MonadIO m, MonadBaseControl IO m, - ProvidesNetwork m + ProvidesNetwork m, + HasCacheStaticConfig m ) => (Inc.Dependency (BackendMap BackendInvalidationKeysWrapper), [AB.AnyBackend BackendConfigWrapper]) `arr` BackendCache resolveBackendCache = proc (backendInvalidationMap, backendConfigs) -> do @@ -586,17 +592,19 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe MonadIO m, BackendMetadata b, MonadError QErr m, - MonadBaseControl IO m + MonadBaseControl IO m, + HasCacheStaticConfig m ) => - (Proxy b, ServerConfigCtx, Bool, SourceConfig b) `arr` (RecreateEventTriggers, SourceCatalogMigrationState) - initCatalogIfNeeded = Inc.cache proc (Proxy, serverConfigCtx, atleastOneTrigger, sourceConfig) -> do + (Proxy b, Bool, SourceConfig b) `arr` (RecreateEventTriggers, SourceCatalogMigrationState) + initCatalogIfNeeded = Inc.cache proc (Proxy, atleastOneTrigger, sourceConfig) -> do bindA -< do if atleastOneTrigger then do - let maintenanceMode = _sccMaintenanceMode serverConfigCtx - eventingMode = _sccEventingMode serverConfigCtx - readOnlyMode = _sccReadOnlyMode serverConfigCtx + cacheStaticConfig <- askCacheStaticConfig + let maintenanceMode = _cscMaintenanceMode cacheStaticConfig + eventingMode = _cscEventingMode cacheStaticConfig + readOnlyMode = _cscReadOnlyMode cacheStaticConfig if -- when safe mode is enabled, don't perform any migrations @@ -632,9 +640,10 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe MonadError QErr m, MonadIO m, BackendMetadata b, - GetAggregationPredicatesDeps b + GetAggregationPredicatesDeps b, + HasCacheStaticConfig m ) => - ( ServerConfigCtx, + ( CacheDynamicConfig, HashMap SourceName (AB.AnyBackend PartiallyResolvedSource), SourceMetadata b, SourceConfig b, @@ -646,7 +655,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe OrderedRoles ) `arr` (SourceInfo b) - buildSource = proc (serverConfigCtx, allSources, sourceMetadata, sourceConfig, tablesRawInfo, eventTriggerInfoMaps, _dbTables, dbFunctions, remoteSchemaMap, orderedRoles) -> do + buildSource = proc (dynamicConfig, allSources, sourceMetadata, sourceConfig, tablesRawInfo, eventTriggerInfoMaps, _dbTables, dbFunctions, remoteSchemaMap, orderedRoles) -> do let SourceMetadata sourceName _backendKind tables functions logicalModels customReturnTypes _ queryTagsConfig sourceCustomization _healthCheckConfig = sourceMetadata tablesMetadata = OMap.elems tables (_, nonColumnInputs, permissions) = unzip3 $ map mkTableInputs tablesMetadata @@ -683,8 +692,8 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe -- not forcing the evaluation here results in a measurable negative impact -- on memory residency as measured by our benchmark - let !defaultNC = _sccDefaultNamingConvention serverConfigCtx - !isNamingConventionEnabled = EFNamingConventions `elem` (_sccExperimentalFeatures serverConfigCtx) + let !defaultNC = _cdcDefaultNamingConvention dynamicConfig + !isNamingConventionEnabled = EFNamingConventions `elem` (_cdcExperimentalFeatures dynamicConfig) !namingConv <- bindA -< @@ -732,7 +741,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe areLogicalModelsEnabled <- bindA -< do - let CheckFeatureFlag checkFeatureFlag = _sccCheckFeatureFlag serverConfigCtx + CheckFeatureFlag checkFeatureFlag <- _cscCheckFeatureFlag <$> askCacheStaticConfig liftIO @m $ checkFeatureFlag FF.logicalModelInterface let mkCustomReturnTypeMetadataObject :: CustomReturnTypeMetadata b -> MetadataObject @@ -821,10 +830,11 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe MonadReader BuildReason m, MonadBaseControl IO m, ProvidesNetwork m, - MonadResolveSource m + MonadResolveSource m, + HasCacheStaticConfig m ) => - (ServerConfigCtx, Inc.Dependency Metadata, Inc.Dependency InvalidationKeys, Maybe StoredIntrospection) `arr` BuildOutputs - buildAndCollectInfo = proc (serverConfigCtx, metadataDep, invalidationKeys, storedIntrospection) -> do + (CacheDynamicConfig, Inc.Dependency Metadata, Inc.Dependency InvalidationKeys, Maybe StoredIntrospection) `arr` BuildOutputs + buildAndCollectInfo = proc (dynamicConfig, metadataDep, invalidationKeys, storedIntrospection) -> do sources <- Inc.dependOn -< Inc.selectD #_metaSources metadataDep remoteSchemas <- Inc.dependOn -< Inc.selectD #_metaRemoteSchemas metadataDep customTypes <- Inc.dependOn -< Inc.selectD #_metaCustomTypes metadataDep @@ -860,8 +870,8 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe let remoteSchemaInvalidationKeys = Inc.selectD #_ikRemoteSchemas invalidationKeys remoteSchemaMap <- buildRemoteSchemas env -< ((remoteSchemaInvalidationKeys, orderedRoles, fmap encJToLBS . siRemotes <$> storedIntrospection), OMap.elems remoteSchemas) let remoteSchemaCtxMap = M.map fst remoteSchemaMap - !defaultNC = _sccDefaultNamingConvention serverConfigCtx - !isNamingConventionEnabled = EFNamingConventions `elem` (_sccExperimentalFeatures serverConfigCtx) + !defaultNC = _cdcDefaultNamingConvention dynamicConfig + !isNamingConventionEnabled = EFNamingConventions `elem` (_cdcExperimentalFeatures dynamicConfig) let backendInvalidationKeys = Inc.selectD #_ikBackends invalidationKeys backendCache <- resolveBackendCache -< (backendInvalidationKeys, BackendMap.elems backendConfigs) @@ -875,7 +885,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe Inc.keyed ( \_ exists -> AB.dispatchAnyBackendArrow @BackendMetadata @BackendEventTrigger - ( proc (backendInfoAndSourceMetadata :: BackendInfoAndSourceMetadata b, (serverConfigCtx, invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled)) -> do + ( proc (backendInfoAndSourceMetadata :: BackendInfoAndSourceMetadata b, (dynamicConfig, invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled)) -> do let sourceMetadata = _bcasmSourceMetadata backendInfoAndSourceMetadata sourceName = _smName sourceMetadata sourceInvalidationsKeys = Inc.selectD #_ikSources invalidationKeys @@ -904,7 +914,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe eventTriggers = map (_tmTable &&& OMap.elems . _tmEventTriggers) tablesMetadata numEventTriggers = sum $ map (length . snd) eventTriggers - (recreateEventTriggers, sourceCatalogMigrationState) <- initCatalogIfNeeded -< (Proxy :: Proxy b, serverConfigCtx, numEventTriggers > 0, sourceConfig) + (recreateEventTriggers, sourceCatalogMigrationState) <- initCatalogIfNeeded -< (Proxy :: Proxy b, numEventTriggers > 0, sourceConfig) bindA -< unLogger logger (sourceName, sourceCatalogMigrationState) @@ -915,7 +925,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe (| Inc.keyed ( \_ (tableCoreInfo, (_, eventTriggerConfs)) -> - buildTableEventTriggers -< (serverConfigCtx, sourceName, sourceConfig, tableCoreInfo, eventTriggerConfs, metadataInvalidationKey, recreateEventTriggers) + buildTableEventTriggers -< (dynamicConfig, sourceName, sourceConfig, tableCoreInfo, eventTriggerConfs, metadataInvalidationKey, recreateEventTriggers) ) |) (tablesCoreInfo `alignTableMap` mapFromL fst eventTriggers) @@ -926,7 +936,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe PartiallyResolvedSource sourceMetadata sourceConfig source tablesCoreInfo eventTriggerInfoMaps ) -< - (exists, (serverConfigCtx, invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled)) + (exists, (dynamicConfig, invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled)) ) |) (M.fromList $ OMap.toList backendInfoAndSourceMetadata) let partiallyResolvedSources = catMaybes partiallyResolvedSourcesMaybes @@ -946,7 +956,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe AB.dispatchAnyBackendArrow @BackendMetadata @GetAggregationPredicatesDeps ( proc ( partiallyResolvedSource :: PartiallyResolvedSource b, - (serverConfigCtx, allResolvedSources, remoteSchemaCtxMap, orderedRoles) + (dynamicConfig, allResolvedSources, remoteSchemaCtxMap, orderedRoles) ) -> do let PartiallyResolvedSource sourceMetadata sourceConfig introspection tablesInfo eventTriggers = partiallyResolvedSource @@ -954,7 +964,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe so <- Inc.cache buildSource -< - ( serverConfigCtx, + ( dynamicConfig, allResolvedSources, sourceMetadata, sourceConfig, @@ -969,7 +979,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe ) -< ( exists, - (serverConfigCtx, partiallyResolvedSources, remoteSchemaCtxMap, orderedRoles) + (dynamicConfig, partiallyResolvedSources, remoteSchemaCtxMap, orderedRoles) ) ) |) partiallyResolvedSources @@ -1098,7 +1108,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe mkEventTriggerMetadataObject :: forall b a c. Backend b => - (ServerConfigCtx, a, SourceName, c, TableName b, RecreateEventTriggers, EventTriggerConf b) -> + (CacheDynamicConfig, a, SourceName, c, TableName b, RecreateEventTriggers, EventTriggerConf b) -> MetadataObject mkEventTriggerMetadataObject (_, _, source, _, table, _, eventTriggerConf) = let objectId = @@ -1133,9 +1143,10 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe MonadBaseControl IO m, MonadReader BuildReason m, BackendMetadata b, - BackendEventTrigger b + BackendEventTrigger b, + HasCacheStaticConfig m ) => - ( ServerConfigCtx, + ( CacheDynamicConfig, SourceName, SourceConfig b, TableCoreInfoG b (ColumnInfo b) (ColumnInfo b), @@ -1144,15 +1155,15 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe RecreateEventTriggers ) `arr` (EventTriggerInfoMap b) - buildTableEventTriggers = proc (serverConfigCtx, sourceName, sourceConfig, tableInfo, eventTriggerConfs, metadataInvalidationKey, migrationRecreateEventTriggers) -> + buildTableEventTriggers = proc (dynamicConfig, sourceName, sourceConfig, tableInfo, eventTriggerConfs, metadataInvalidationKey, migrationRecreateEventTriggers) -> buildInfoMap (etcName . (^. _7)) (mkEventTriggerMetadataObject @b) buildEventTrigger -< - (tableInfo, map (serverConfigCtx,metadataInvalidationKey,sourceName,sourceConfig,_tciName tableInfo,migrationRecreateEventTriggers,) eventTriggerConfs) + (tableInfo, map (dynamicConfig,metadataInvalidationKey,sourceName,sourceConfig,_tciName tableInfo,migrationRecreateEventTriggers,) eventTriggerConfs) where - buildEventTrigger = proc (tableInfo, (serverConfigCtx, metadataInvalidationKey, source, sourceConfig, table, migrationRecreateEventTriggers, eventTriggerConf)) -> do + buildEventTrigger = proc (tableInfo, (dynamicConfig, metadataInvalidationKey, source, sourceConfig, table, migrationRecreateEventTriggers, eventTriggerConf)) -> do let triggerName = etcName eventTriggerConf triggerOnReplication = etcTriggerOnReplication eventTriggerConf - metadataObject = mkEventTriggerMetadataObject @b (serverConfigCtx, metadataInvalidationKey, source, sourceConfig, table, migrationRecreateEventTriggers, eventTriggerConf) + metadataObject = mkEventTriggerMetadataObject @b (dynamicConfig, metadataInvalidationKey, source, sourceConfig, table, migrationRecreateEventTriggers, eventTriggerConf) schemaObjectId = SOSourceObj source $ AB.mkAnyBackend $ @@ -1169,13 +1180,14 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe withRecordInconsistency ( do (info, dependencies) <- bindErrorA -< modifyErr (addTableContext @b table . addTriggerContext) $ buildEventTriggerInfo @b env source table eventTriggerConf + staticConfig <- bindA -< askCacheStaticConfig let isCatalogUpdate = case buildReason of CatalogUpdate _ -> True CatalogSync -> False tableColumns = M.elems $ _tciFieldInfoMap tableInfo - if ( _sccMaintenanceMode serverConfigCtx == MaintenanceModeDisabled - && _sccReadOnlyMode serverConfigCtx == ReadOnlyModeDisabled + if ( _cscMaintenanceMode staticConfig == MaintenanceModeDisabled + && _cscReadOnlyMode staticConfig == ReadOnlyModeDisabled ) then do bindA @@ -1187,7 +1199,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe liftEitherM $ createTableEventTrigger @b - serverConfigCtx + (_cdcSQLGenCtx dynamicConfig) sourceConfig table tableColumns @@ -1199,7 +1211,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe then do recreateTriggerIfNeeded -< - ( serverConfigCtx, + ( dynamicConfig, table, tableColumns, triggerName, @@ -1214,7 +1226,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe bindA -< createMissingSQLTriggers - serverConfigCtx + (_cdcSQLGenCtx dynamicConfig) sourceConfig table (tableColumns, _tciPrimaryKey tableInfo) @@ -1234,7 +1246,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe -- computation will not be done again. Inc.cache proc - ( serverConfigCtx, + ( dynamicConfig, tableName, tableColumns, triggerName, @@ -1248,7 +1260,7 @@ buildSchemaCacheRule logger env = proc (MetadataWithResourceVersion metadataNoDe -< do liftEitherM $ createTableEventTrigger @b - serverConfigCtx + (_cdcSQLGenCtx dynamicConfig) sourceConfig tableName tableColumns diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs index d5242f0fc9d..4cc5bf2dacc 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs @@ -59,6 +59,7 @@ import Hasura.CustomReturnType.Types (CustomReturnTypeName) import Hasura.EncJSON import Hasura.Incremental qualified as Inc import Hasura.Prelude +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.Types.Backend import Hasura.RQL.Types.Column import Hasura.RQL.Types.Common @@ -79,7 +80,6 @@ import Hasura.SQL.AnyBackend import Hasura.SQL.Backend import Hasura.SQL.BackendMap (BackendMap) import Hasura.SQL.BackendMap qualified as BackendMap -import Hasura.Server.Types import Hasura.Services import Hasura.Session import Network.HTTP.Client.Transformable qualified as HTTP @@ -257,7 +257,8 @@ $(makeLenses ''BuildOutputs) data CacheBuildParams = CacheBuildParams { _cbpManager :: HTTP.Manager, _cbpPGSourceResolver :: SourceResolver ('Postgres 'Vanilla), - _cbpMSSQLSourceResolver :: SourceResolver 'MSSQL + _cbpMSSQLSourceResolver :: SourceResolver 'MSSQL, + _cbpStaticConfig :: CacheStaticConfig } -- | The monad in which @'RebuildableSchemaCache' is being run @@ -273,6 +274,9 @@ newtype CacheBuild a = CacheBuild (ReaderT CacheBuildParams (ExceptT QErr IO) a) MonadBaseControl IO ) +instance HasCacheStaticConfig CacheBuild where + askCacheStaticConfig = asks _cbpStaticConfig + instance ProvidesNetwork CacheBuild where askHTTPManager = asks _cbpManager @@ -294,7 +298,8 @@ runCacheBuildM :: ( MonadIO m, MonadError QErr m, MonadResolveSource m, - ProvidesNetwork m + ProvidesNetwork m, + HasCacheStaticConfig m ) => CacheBuild a -> m a @@ -304,12 +309,13 @@ runCacheBuildM m = do <$> askHTTPManager <*> getPGSourceResolver <*> getMSSQLSourceResolver + <*> askCacheStaticConfig runCacheBuild params m data RebuildableSchemaCache = RebuildableSchemaCache { lastBuiltSchemaCache :: SchemaCache, _rscInvalidationMap :: InvalidationKeys, - _rscRebuild :: Inc.Rule (ReaderT BuildReason CacheBuild) (MetadataWithResourceVersion, ServerConfigCtx, InvalidationKeys, Maybe StoredIntrospection) SchemaCache + _rscRebuild :: Inc.Rule (ReaderT BuildReason CacheBuild) (MetadataWithResourceVersion, CacheDynamicConfig, InvalidationKeys, Maybe StoredIntrospection) SchemaCache } bindErrorA :: diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Config.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Config.hs new file mode 100644 index 00000000000..29eca90e0ad --- /dev/null +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Config.hs @@ -0,0 +1,70 @@ +module Hasura.RQL.DDL.Schema.Cache.Config + ( -- * static config + CacheStaticConfig (..), + HasCacheStaticConfig (..), + + -- * dynamic config + CacheDynamicConfig (..), + ) +where + +import Hasura.GraphQL.Schema.NamingCase (NamingCase) +import Hasura.GraphQL.Schema.Options qualified as Options +import Hasura.Prelude +import Hasura.RQL.Types.Common (SQLGenCtx) +import Hasura.RQL.Types.Metadata (MetadataDefaults) +import Hasura.Server.Types + +-------------------------------------------------------------------------------- +-- static config + +-- | This type aggregates all of the "static" configuration of the cache build. +-- +-- Static arguments are the ones that will not change during the execution of +-- the engine. They are a subset of the environment of the engine (see 'AppEnv' +-- and Note [Hasura Application State] for more information). +-- +-- While 'AppEnv' has access to the union of *all* the static configuration of +-- the engine, more specific parts of the code should avoid relying directly on +-- it to avoid being tied to unrelated parts of the codebase. (See FIXME). +data CacheStaticConfig = CacheStaticConfig + { _cscMaintenanceMode :: MaintenanceMode (), + _cscEventingMode :: EventingMode, + _cscReadOnlyMode :: ReadOnlyMode, + _cscCheckFeatureFlag :: CheckFeatureFlag + } + +class Monad m => HasCacheStaticConfig m where + askCacheStaticConfig :: m CacheStaticConfig + +instance HasCacheStaticConfig m => HasCacheStaticConfig (ReaderT r m) where + askCacheStaticConfig = lift askCacheStaticConfig + +instance HasCacheStaticConfig m => HasCacheStaticConfig (ExceptT e m) where + askCacheStaticConfig = lift askCacheStaticConfig + +instance HasCacheStaticConfig m => HasCacheStaticConfig (StateT s m) where + askCacheStaticConfig = lift askCacheStaticConfig + +-------------------------------------------------------------------------------- +-- dynamic config + +-- | This type aggregates all of the "dynamic" configuration of the cache build. +-- +-- Dynamic arguments are the ones that might change during the execution of the +-- engine. They are a subset of the 'AppContext' (see +-- Note [Hasura Application State] for more information). +-- +-- While 'AppContext' has access to the union of *all* the dynamic configuration +-- of the engine, more specific parts of the code should avoid relying directly +-- on it to avoid being tied to unrelated parts of the codebase. (See FIXME). +data CacheDynamicConfig = CacheDynamicConfig + { _cdcFunctionPermsCtx :: Options.InferFunctionPermissions, + _cdcRemoteSchemaPermsCtx :: Options.RemoteSchemaPermissions, + _cdcSQLGenCtx :: SQLGenCtx, + _cdcExperimentalFeatures :: HashSet ExperimentalFeature, + _cdcDefaultNamingConvention :: NamingCase, + _cdcMetadataDefaults :: MetadataDefaults, + _cdcApolloFederationStatus :: ApolloFederationStatus + } + deriving (Eq) diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs index 717b729472c..82600218a91 100644 --- a/server/src-lib/Hasura/RQL/DML/Delete.hs +++ b/server/src-lib/Hasura/RQL/DML/Delete.hs @@ -29,7 +29,6 @@ import Hasura.RQL.Types.Common import Hasura.RQL.Types.Metadata import Hasura.RQL.Types.SchemaCache import Hasura.SQL.Backend -import Hasura.Server.Types import Hasura.Session import Hasura.Tracing qualified as Tracing @@ -109,17 +108,17 @@ runDelete :: ( QErrM m, UserInfoM m, CacheRM m, - HasServerConfigCtx m, MonadIO m, Tracing.MonadTrace m, MonadBaseControl IO m, MetadataM m ) => + SQLGenCtx -> DeleteQuery -> m EncJSON -runDelete q = do +runDelete sqlGen q = do sourceConfig <- askSourceConfig @('Postgres 'Vanilla) (doSource q) - strfyNum <- stringifyNum . _sccSQLGenCtx <$> askServerConfigCtx + let strfyNum = stringifyNum sqlGen userInfo <- askUserInfo validateDeleteQ q >>= runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index accfd82b5d9..5ac093053ee 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -29,7 +29,6 @@ import Hasura.RQL.Types.Metadata import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.Table import Hasura.SQL.Backend -import Hasura.Server.Types import Hasura.Session import Hasura.Tracing qualified as Tracing @@ -243,19 +242,19 @@ runInsert :: ( QErrM m, UserInfoM m, CacheRM m, - HasServerConfigCtx m, MonadIO m, Tracing.MonadTrace m, MonadBaseControl IO m, MetadataM m ) => + SQLGenCtx -> InsertQuery -> m EncJSON -runInsert q = do +runInsert sqlGen q = do sourceConfig <- askSourceConfig @('Postgres 'Vanilla) (iqSource q) userInfo <- askUserInfo res <- convInsQ q - strfyNum <- stringifyNum . _sccSQLGenCtx <$> askServerConfigCtx + let strfyNum = stringifyNum sqlGen runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery $ flip runReaderT emptyQueryTagsComment $ execInsertQuery strfyNum Nothing userInfo res diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 2605a8db9c5..35670d5e61a 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -55,7 +55,6 @@ import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.Table import Hasura.SQL.Backend import Hasura.SQL.Types -import Hasura.Server.Types import Hasura.Session newtype DMLP1T m a = DMLP1T {unDMLP1T :: StateT (DS.Seq PG.PrepArg) m a} @@ -69,8 +68,7 @@ newtype DMLP1T m a = DMLP1T {unDMLP1T :: StateT (DS.Seq PG.PrepArg) m a} TableCoreInfoRM b, TableInfoRM b, CacheRM, - UserInfoM, - HasServerConfigCtx + UserInfoM ) runDMLP1T :: DMLP1T m a -> m (a, DS.Seq PG.PrepArg) diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs index 72628e5e45b..4f8ae259522 100644 --- a/server/src-lib/Hasura/RQL/DML/Select.hs +++ b/server/src-lib/Hasura/RQL/DML/Select.hs @@ -31,7 +31,6 @@ import Hasura.RQL.Types.Relationships.Local import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.Table import Hasura.SQL.Backend -import Hasura.Server.Types import Hasura.Session import Hasura.Tracing qualified as Tracing @@ -174,9 +173,9 @@ convOrderByElem sessVarBldr (flds, spi) = \case convSelectQ :: ( UserInfoM m, QErrM m, - TableInfoRM ('Postgres 'Vanilla) m, - HasServerConfigCtx m + TableInfoRM ('Postgres 'Vanilla) m ) => + SQLGenCtx -> TableName ('Postgres 'Vanilla) -> FieldInfoMap (FieldInfo ('Postgres 'Vanilla)) -> -- Table information of current table SelPermInfo ('Postgres 'Vanilla) -> -- Additional select permission info @@ -184,7 +183,7 @@ convSelectQ :: SessionVariableBuilder m -> ValueParser ('Postgres 'Vanilla) m S.SQLExp -> m (AnnSimpleSelect ('Postgres 'Vanilla)) -convSelectQ table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do +convSelectQ sqlGen table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do -- Convert where clause wClause <- forM (sqWhere selQ) $ \boolExp -> withPathK "where" $ @@ -200,6 +199,7 @@ convSelectQ table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do (ECRel relName mAlias relSelQ) -> do annRel <- convExtRel + sqlGen fieldInfoMap relName mAlias @@ -230,8 +230,7 @@ convSelectQ table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do let tabFrom = FromTable table tabPerm = TablePerm resolvedSelFltr mPermLimit tabArgs = SelectArgs wClause annOrdByM mQueryLimit (fromIntegral <$> mQueryOffset) Nothing - - strfyNum <- stringifyNum . _sccSQLGenCtx <$> askServerConfigCtx + strfyNum = stringifyNum sqlGen pure $ AnnSelectG annFlds tabFrom tabPerm tabArgs strfyNum Nothing where mQueryOffset = sqOffset selQ @@ -254,9 +253,9 @@ convExtSimple fieldInfoMap selPermInfo pgCol = do convExtRel :: ( UserInfoM m, QErrM m, - TableInfoRM ('Postgres 'Vanilla) m, - HasServerConfigCtx m + TableInfoRM ('Postgres 'Vanilla) m ) => + SQLGenCtx -> FieldInfoMap (FieldInfo ('Postgres 'Vanilla)) -> RelName -> Maybe RelName -> @@ -264,14 +263,14 @@ convExtRel :: SessionVariableBuilder m -> ValueParser ('Postgres 'Vanilla) m S.SQLExp -> m (Either (ObjectRelationSelect ('Postgres 'Vanilla)) (ArraySelect ('Postgres 'Vanilla))) -convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do +convExtRel sqlGen fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do -- Point to the name key relInfo <- withPathK "name" $ askRelType fieldInfoMap relName pgWhenRelErr let (RelInfo _ relTy colMapping relTab _ _) = relInfo (relCIM, relSPI) <- fetchRelDet relName relTab - annSel <- convSelectQ relTab relCIM relSPI selQ sessVarBldr prepValBldr + annSel <- convSelectQ sqlGen relTab relCIM relSPI selQ sessVarBldr prepValBldr case relTy of ObjRel -> do when misused $ throw400 UnexpectedPayload objRelMisuseMsg @@ -304,20 +303,20 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do convSelectQuery :: ( UserInfoM m, QErrM m, - TableInfoRM ('Postgres 'Vanilla) m, - HasServerConfigCtx m + TableInfoRM ('Postgres 'Vanilla) m ) => + SQLGenCtx -> SessionVariableBuilder m -> ValueParser ('Postgres 'Vanilla) m S.SQLExp -> SelectQuery -> m (AnnSimpleSelect ('Postgres 'Vanilla)) -convSelectQuery sessVarBldr prepArgBuilder (DMLQuery _ qt selQ) = do +convSelectQuery sqlGen sessVarBldr prepArgBuilder (DMLQuery _ qt selQ) = do tabInfo <- withPathK "table" $ askTableInfoSource qt selPermInfo <- askSelPermInfo tabInfo let fieldInfo = _tciFieldInfoMap $ _tiCoreInfo tabInfo extSelQ <- resolveStar fieldInfo selPermInfo selQ validateHeaders $ spiRequiredHeaders selPermInfo - convSelectQ qt fieldInfo selPermInfo extSelQ sessVarBldr prepArgBuilder + convSelectQ sqlGen qt fieldInfo selPermInfo extSelQ sessVarBldr prepArgBuilder selectP2 :: JsonAggSelect -> (AnnSimpleSelect ('Postgres 'Vanilla), DS.Seq PG.PrepArg) -> PG.TxE QErr EncJSON selectP2 jsonAggSelect (sel, p) = @@ -330,15 +329,16 @@ selectP2 jsonAggSelect (sel, p) = mkSQLSelect jsonAggSelect sel phaseOne :: - (QErrM m, UserInfoM m, CacheRM m, HasServerConfigCtx m) => + (QErrM m, UserInfoM m, CacheRM m) => + SQLGenCtx -> SelectQuery -> m (AnnSimpleSelect ('Postgres 'Vanilla), DS.Seq PG.PrepArg) -phaseOne query = do +phaseOne sqlGen query = do let sourceName = getSourceDMLQuery query tableCache :: TableCache ('Postgres 'Vanilla) <- fold <$> askTableCache sourceName flip runTableCacheRT tableCache $ runDMLP1T $ - convSelectQuery sessVarFromCurrentSetting (valueParserWithCollectableType binRHSBuilder) query + convSelectQuery sqlGen sessVarFromCurrentSetting (valueParserWithCollectableType binRHSBuilder) query phaseTwo :: (MonadTx m) => (AnnSimpleSelect ('Postgres 'Vanilla), DS.Seq PG.PrepArg) -> m EncJSON phaseTwo = @@ -348,14 +348,14 @@ runSelect :: ( QErrM m, UserInfoM m, CacheRM m, - HasServerConfigCtx m, MonadIO m, MonadBaseControl IO m, Tracing.MonadTrace m, MetadataM m ) => + SQLGenCtx -> SelectQuery -> m EncJSON -runSelect q = do +runSelect sqlGen q = do sourceConfig <- askSourceConfig @('Postgres 'Vanilla) (getSourceDMLQuery q) - phaseOne q >>= runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadOnly Nothing) LegacyRQLQuery . phaseTwo + phaseOne sqlGen q >>= runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadOnly Nothing) LegacyRQLQuery . phaseTwo diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index f9f40a8394e..7d64818ef36 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -34,7 +34,6 @@ import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.Table import Hasura.SQL.Backend import Hasura.SQL.Types -import Hasura.Server.Types import Hasura.Session import Hasura.Tracing qualified as Tracing @@ -224,18 +223,18 @@ runUpdate :: ( QErrM m, UserInfoM m, CacheRM m, - HasServerConfigCtx m, MonadBaseControl IO m, MonadIO m, Tracing.MonadTrace m, MetadataM m ) => + SQLGenCtx -> UpdateQuery -> m EncJSON -runUpdate q = do +runUpdate sqlGen q = do sourceConfig <- askSourceConfig @('Postgres 'Vanilla) (uqSource q) userInfo <- askUserInfo - strfyNum <- stringifyNum . _sccSQLGenCtx <$> askServerConfigCtx + let strfyNum = stringifyNum sqlGen validateUpdateQuery q >>= runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery . flip runReaderT emptyQueryTagsComment diff --git a/server/src-lib/Hasura/RQL/Types/Eventing/Backend.hs b/server/src-lib/Hasura/RQL/Types/Eventing/Backend.hs index f8fe69bc708..3f74b72a8d6 100644 --- a/server/src-lib/Hasura/RQL/Types/Eventing/Backend.hs +++ b/server/src-lib/Hasura/RQL/Types/Eventing/Backend.hs @@ -19,7 +19,7 @@ import Hasura.RQL.Types.Eventing import Hasura.RQL.Types.Source import Hasura.RQL.Types.Table (PrimaryKey) import Hasura.SQL.Backend -import Hasura.Server.Types (MaintenanceMode, ServerConfigCtx) +import Hasura.Server.Types (MaintenanceMode) import Hasura.Session (UserInfo) import Hasura.Tracing qualified as Tracing @@ -188,7 +188,7 @@ class Backend b => BackendEventTrigger (b :: BackendType) where -- exist then it will create it. createMissingSQLTriggers :: (MonadIO m, MonadError QErr m, MonadBaseControl IO m, Backend b) => - ServerConfigCtx -> + SQLGenCtx -> SourceConfig b -> TableName b -> ([ColumnInfo b], Maybe (PrimaryKey b (ColumnInfo b))) -> @@ -199,7 +199,7 @@ class Backend b => BackendEventTrigger (b :: BackendType) where createTableEventTrigger :: (MonadBaseControl IO m, MonadIO m, MonadError QErr m) => - ServerConfigCtx -> + SQLGenCtx -> SourceConfig b -> TableName b -> [ColumnInfo b] -> diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 3947ce6bc0d..f9e0b1900d6 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -161,7 +161,6 @@ import Hasura.SQL.Backend import Hasura.SQL.BackendMap (BackendMap) import Hasura.SQL.BackendMap qualified as BackendMap import Hasura.SQL.Tag (HasTag (backendTag), reify) -import Hasura.Server.Types import Hasura.Session import Hasura.Tracing (TraceT) import Language.GraphQL.Draft.Syntax qualified as G @@ -651,7 +650,7 @@ instance (TableInfoRM b m) => TableInfoRM b (TraceT m) where newtype TableCacheRT b m a = TableCacheRT {runTableCacheRT :: TableCache b -> m a} deriving - (Functor, Applicative, Monad, MonadIO, MonadError e, MonadState s, MonadWriter w, Postgres.MonadTx, UserInfoM, HasServerConfigCtx) + (Functor, Applicative, Monad, MonadIO, MonadError e, MonadState s, MonadWriter w, Postgres.MonadTx, UserInfoM) via (ReaderT (TableCache b) m) deriving (MonadTrans) via (ReaderT (TableCache b)) diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs index b871ee34543..1bf8b2f2247 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs @@ -59,7 +59,7 @@ import Hasura.RQL.Types.Metadata.Object import Hasura.RQL.Types.QueryCollection import Hasura.RQL.Types.SchemaCache import Hasura.RemoteSchema.Metadata (RemoteSchemaName) -import Hasura.Server.Types +import Hasura.Server.Init.FeatureFlag (HasFeatureFlagChecker) import Hasura.Services.Network import Hasura.Session import Hasura.Tracing (TraceT) @@ -239,7 +239,8 @@ newtype MetadataT m a = MetadataT {unMetadataT :: StateT Metadata m a} Tracing.MonadTrace, MonadBase b, MonadBaseControl b, - ProvidesNetwork + ProvidesNetwork, + HasFeatureFlagChecker ) deriving anyclass (MonadQueryTags) @@ -250,9 +251,6 @@ instance (Monad m) => MetadataM (MetadataT m) where instance (UserInfoM m) => UserInfoM (MetadataT m) where askUserInfo = lift askUserInfo -instance HasServerConfigCtx m => HasServerConfigCtx (MetadataT m) where - askServerConfigCtx = lift askServerConfigCtx - -- | @runMetadataT@ puts a stateful metadata in scope. @MetadataDefaults@ is -- provided so that it can be considered from the --metadataDefaults arguments. runMetadataT :: Metadata -> MetadataDefaults -> MetadataT m a -> m (a, Metadata) diff --git a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs index 818705892ae..b1f5a8fd73d 100644 --- a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs +++ b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs @@ -23,7 +23,6 @@ import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.SchemaCache.Build import Hasura.RemoteSchema.Metadata import Hasura.RemoteSchema.SchemaCache.Permission -import Hasura.Server.Types import Hasura.Session data AddRemoteSchemaPermission = AddRemoteSchemaPermission @@ -51,14 +50,13 @@ $(J.deriveJSON hasuraJSON ''DropRemoteSchemaPermissions) runAddRemoteSchemaPermissions :: ( QErrM m, CacheRWM m, - HasServerConfigCtx m, MetadataM m ) => + Options.RemoteSchemaPermissions -> AddRemoteSchemaPermission -> m EncJSON -runAddRemoteSchemaPermissions q = do +runAddRemoteSchemaPermissions remoteSchemaPermsCtx q = do metadata <- getMetadata - remoteSchemaPermsCtx <- _sccRemoteSchemaPermsCtx <$> askServerConfigCtx unless (remoteSchemaPermsCtx == Options.EnableRemoteSchemaPermissions) $ do throw400 ConstraintViolation $ "remote schema permissions can only be added when " diff --git a/server/src-lib/Hasura/Server/API/Metadata.hs b/server/src-lib/Hasura/Server/API/Metadata.hs index 32e451d56e4..9c613998374 100644 --- a/server/src-lib/Hasura/Server/API/Metadata.hs +++ b/server/src-lib/Hasura/Server/API/Metadata.hs @@ -23,6 +23,7 @@ import Hasura.Base.Error import Hasura.CustomReturnType.API qualified as CustomReturnType import Hasura.EncJSON import Hasura.Function.API qualified as Functions +import Hasura.GraphQL.Schema.Options qualified as Options import Hasura.Logging qualified as L import Hasura.LogicalModel.API qualified as LogicalModels import Hasura.Metadata.Class @@ -50,6 +51,7 @@ import Hasura.RQL.DDL.Relationship.Suggest import Hasura.RQL.DDL.RemoteRelationship import Hasura.RQL.DDL.ScheduledTrigger import Hasura.RQL.DDL.Schema +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DDL.Schema.Source import Hasura.RQL.DDL.SourceKinds import Hasura.RQL.DDL.Webhook.Transform.Validation @@ -79,6 +81,7 @@ import Hasura.SQL.AnyBackend import Hasura.SQL.Backend import Hasura.Server.API.Backend import Hasura.Server.API.Instances () +import Hasura.Server.Init.FeatureFlag (HasFeatureFlagChecker) import Hasura.Server.Logging (SchemaSyncLog (..), SchemaSyncThreadType (TTMetadataApi)) import Hasura.Server.Types import Hasura.Server.Utils (APIVersion (..)) @@ -388,6 +391,8 @@ runMetadataQuery :: MonadError QErr m, MonadBaseControl IO m, HasAppEnv m, + HasCacheStaticConfig m, + HasFeatureFlagChecker m, Tracing.MonadTrace m, MonadMetadataStorage m, MonadResolveSource m, @@ -401,7 +406,7 @@ runMetadataQuery :: RQLMetadata -> m (EncJSON, RebuildableSchemaCache) runMetadataQuery appContext schemaCache RQLMetadata {..} = do - appEnv@AppEnv {..} <- askAppEnv + AppEnv {..} <- askAppEnv let logger = _lsLogger appEnvLoggers MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" $ liftEitherM fetchMetadata let exportsMetadata = \case @@ -427,13 +432,18 @@ runMetadataQuery appContext schemaCache RQLMetadata {..} = do if (exportsMetadata _rqlMetadata || queryModifiesMetadata _rqlMetadata) then emptyMetadataDefaults else acMetadataDefaults appContext - serverConfigCtx = buildServerConfigCtx appEnv appContext + dynamicConfig = buildCacheDynamicConfig appContext ((r, modMetadata), modSchemaCache, cacheInvalidations) <- - runMetadataQueryM (acEnvironment appContext) currentResourceVersion _rqlMetadata + runMetadataQueryM + (acEnvironment appContext) + appEnvCheckFeatureFlag + (acRemoteSchemaPermsCtx appContext) + currentResourceVersion + _rqlMetadata -- TODO: remove this straight runReaderT that provides no actual new info & flip runReaderT logger & runMetadataT metadata metadataDefaults - & runCacheRWT serverConfigCtx schemaCache + & runCacheRWT dynamicConfig schemaCache -- set modified metadata in storage if queryModifiesMetadata _rqlMetadata then case (appEnvEnableMaintenanceMode, appEnvEnableReadOnlyMode) of @@ -464,7 +474,7 @@ runMetadataQuery appContext schemaCache RQLMetadata {..} = do (_, modSchemaCache', _) <- Tracing.newSpan "setMetadataResourceVersionInSchemaCache" $ setMetadataResourceVersionInSchemaCache newResourceVersion - & runCacheRWT serverConfigCtx modSchemaCache + & runCacheRWT dynamicConfig modSchemaCache pure (r, modSchemaCache') (MaintenanceModeEnabled (), ReadOnlyModeDisabled) -> @@ -607,25 +617,27 @@ runMetadataQueryM :: UserInfoM m, MetadataM m, MonadMetadataStorage m, - HasServerConfigCtx m, MonadReader r m, Has (L.Logger L.Hasura) r, MonadError QErr m, MonadEventLogCleanup m, ProvidesHasuraServices m, - MonadGetApiTimeLimit m + MonadGetApiTimeLimit m, + HasFeatureFlagChecker m ) => Env.Environment -> + CheckFeatureFlag -> + Options.RemoteSchemaPermissions -> MetadataResourceVersion -> RQLMetadataRequest -> m EncJSON -runMetadataQueryM env currentResourceVersion = +runMetadataQueryM env checkFeatureFlag remoteSchemaPerms currentResourceVersion = withPathK "args" . \case -- NOTE: This is a good place to install tracing, since it's involved in -- the recursive case via "bulk": RMV1 q -> Tracing.newSpan ("v1 " <> T.pack (constrName q)) $ - runMetadataQueryV1M env currentResourceVersion q + runMetadataQueryV1M env checkFeatureFlag remoteSchemaPerms currentResourceVersion q RMV2 q -> Tracing.newSpan ("v2 " <> T.pack (constrName q)) $ runMetadataQueryV2M currentResourceVersion q @@ -639,19 +651,21 @@ runMetadataQueryV1M :: UserInfoM m, MetadataM m, MonadMetadataStorage m, - HasServerConfigCtx m, MonadReader r m, Has (L.Logger L.Hasura) r, MonadError QErr m, MonadEventLogCleanup m, ProvidesHasuraServices m, - MonadGetApiTimeLimit m + MonadGetApiTimeLimit m, + HasFeatureFlagChecker m ) => Env.Environment -> + CheckFeatureFlag -> + Options.RemoteSchemaPermissions -> MetadataResourceVersion -> RQLMetadataV1 -> m EncJSON -runMetadataQueryV1M env currentResourceVersion = \case +runMetadataQueryV1M env checkFeatureFlag remoteSchemaPerms currentResourceVersion = \case RMAddSource q -> dispatchMetadata (runAddSource env) q RMDropSource q -> runDropSource q RMRenameSource q -> runRenameSource q @@ -718,7 +732,7 @@ runMetadataQueryV1M env currentResourceVersion = \case RMRemoveRemoteSchema q -> runRemoveRemoteSchema q RMReloadRemoteSchema q -> runReloadRemoteSchema q RMIntrospectRemoteSchema q -> runIntrospectRemoteSchema q - RMAddRemoteSchemaPermissions q -> runAddRemoteSchemaPermissions q + RMAddRemoteSchemaPermissions q -> runAddRemoteSchemaPermissions remoteSchemaPerms q RMDropRemoteSchemaPermissions q -> runDropRemoteSchemaPermissions q RMCreateRemoteSchemaRemoteRelationship q -> runCreateRemoteSchemaRemoteRelationship q RMUpdateRemoteSchemaRemoteRelationship q -> runUpdateRemoteSchemaRemoteRelationship q @@ -790,8 +804,8 @@ runMetadataQueryV1M env currentResourceVersion = \case RMSetQueryTagsConfig q -> runSetQueryTagsConfig q RMSetOpenTelemetryConfig q -> runSetOpenTelemetryConfig q RMSetOpenTelemetryStatus q -> runSetOpenTelemetryStatus q - RMGetFeatureFlag q -> runGetFeatureFlag q - RMBulk q -> encJFromList <$> indexedMapM (runMetadataQueryM env currentResourceVersion) q + RMGetFeatureFlag q -> runGetFeatureFlag checkFeatureFlag q + RMBulk q -> encJFromList <$> indexedMapM (runMetadataQueryM env checkFeatureFlag remoteSchemaPerms currentResourceVersion) q where dispatch :: (forall b. Backend b => i b -> a) -> diff --git a/server/src-lib/Hasura/Server/API/Query.hs b/server/src-lib/Hasura/Server/API/Query.hs index e7757633f8a..a030d12ec87 100644 --- a/server/src-lib/Hasura/Server/API/Query.hs +++ b/server/src-lib/Hasura/Server/API/Query.hs @@ -38,6 +38,7 @@ import Hasura.RQL.DDL.Relationship.Rename import Hasura.RQL.DDL.RemoteRelationship import Hasura.RQL.DDL.ScheduledTrigger import Hasura.RQL.DDL.Schema +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DML.Count import Hasura.RQL.DML.Delete import Hasura.RQL.DML.Insert @@ -180,6 +181,7 @@ runQuery :: ( MonadIO m, MonadError QErr m, HasAppEnv m, + HasCacheStaticConfig m, Tracing.MonadTrace m, MonadBaseControl IO m, MonadMetadataStorage m, @@ -195,7 +197,7 @@ runQuery :: RQLQuery -> m (EncJSON, RebuildableSchemaCache) runQuery appContext sc query = do - appEnv@AppEnv {..} <- askAppEnv + AppEnv {..} <- askAppEnv let logger = _lsLogger appEnvLoggers when ((appEnvEnableReadOnlyMode == ReadOnlyModeEnabled) && queryModifiesUserDB query) $ throw400 NotSupported "Cannot run write queries when read-only mode is enabled" @@ -207,15 +209,15 @@ runQuery appContext sc query = do if (exportsMetadata query) then emptyMetadataDefaults else acMetadataDefaults appContext - serverConfigCtx = buildServerConfigCtx appEnv appContext + dynamicConfig = buildCacheDynamicConfig appContext MetadataWithResourceVersion metadata currentResourceVersion <- liftEitherM fetchMetadata ((result, updatedMetadata), updatedCache, invalidations) <- - runQueryM (acEnvironment appContext) query + runQueryM (acEnvironment appContext) (acSQLGenCtx appContext) query -- TODO: remove this straight runReaderT that provides no actual new info & flip runReaderT logger & runMetadataT metadata metadataDefaults - & runCacheRWT serverConfigCtx sc + & runCacheRWT dynamicConfig sc when (queryModifiesSchemaCache query) $ do case appEnvEnableMaintenanceMode of MaintenanceModeDisabled -> do @@ -391,7 +393,6 @@ runQueryM :: UserInfoM m, MonadBaseControl IO m, MonadIO m, - HasServerConfigCtx m, Tracing.MonadTrace m, MetadataM m, MonadMetadataStorage m, @@ -404,9 +405,10 @@ runQueryM :: MonadGetApiTimeLimit m ) => Env.Environment -> + SQLGenCtx -> RQLQuery -> m EncJSON -runQueryM env rq = withPathK "args" $ case rq of +runQueryM env sqlGen rq = withPathK "args" $ case rq of RQV1 q -> runQueryV1M q RQV2 q -> runQueryV2M q where @@ -436,10 +438,10 @@ runQueryM env rq = withPathK "args" $ case rq of RQSetPermissionComment q -> runSetPermComment q RQGetInconsistentMetadata q -> runGetInconsistentMetadata q RQDropInconsistentMetadata q -> runDropInconsistentMetadata q - RQInsert q -> runInsert q - RQSelect q -> runSelect q - RQUpdate q -> runUpdate q - RQDelete q -> runDelete q + RQInsert q -> runInsert sqlGen q + RQSelect q -> runSelect sqlGen q + RQUpdate q -> runUpdate sqlGen q + RQDelete q -> runDelete sqlGen q RQCount q -> runCount q RQAddRemoteSchema q -> runAddRemoteSchema env q RQUpdateRemoteSchema q -> runUpdateRemoteSchema env q @@ -475,9 +477,9 @@ runQueryM env rq = withPathK "args" $ case rq of RQCreateRestEndpoint q -> runCreateEndpoint q RQDropRestEndpoint q -> runDropEndpoint q RQDumpInternalState q -> runDumpInternalState q - RQRunSql q -> runRunSQL @'Vanilla q + RQRunSql q -> runRunSQL @'Vanilla sqlGen q RQSetCustomTypes q -> runSetCustomTypes q - RQBulk qs -> encJFromList <$> indexedMapM (runQueryM env) qs + RQBulk qs -> encJFromList <$> indexedMapM (runQueryM env sqlGen) qs runQueryV2M = \case RQV2TrackTable q -> runTrackTableV2Q q diff --git a/server/src-lib/Hasura/Server/API/V2Query.hs b/server/src-lib/Hasura/Server/API/V2Query.hs index 25b54008f7c..88c391c22a7 100644 --- a/server/src-lib/Hasura/Server/API/V2Query.hs +++ b/server/src-lib/Hasura/Server/API/V2Query.hs @@ -13,7 +13,6 @@ import Control.Lens (preview, _Right) import Control.Monad.Trans.Control (MonadBaseControl) import Data.Aeson import Data.Aeson.Types (Parser) -import Data.Environment qualified as Env import Data.Text qualified as T import GHC.Generics.Extended (constrName) import Hasura.App.State @@ -29,6 +28,7 @@ import Hasura.Metadata.Class import Hasura.Prelude import Hasura.QueryTags import Hasura.RQL.DDL.Schema +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DML.Count import Hasura.RQL.DML.Delete import Hasura.RQL.DML.Insert @@ -41,6 +41,7 @@ import Hasura.RQL.DML.Types UpdateQuery, ) import Hasura.RQL.DML.Update +import Hasura.RQL.Types.Common import Hasura.RQL.Types.Metadata import Hasura.RQL.Types.SchemaCache (MetadataWithResourceVersion (MetadataWithResourceVersion)) import Hasura.RQL.Types.SchemaCache.Build @@ -107,6 +108,7 @@ runQuery :: MonadBaseControl IO m, MonadError QErr m, HasAppEnv m, + HasCacheStaticConfig m, Tracing.MonadTrace m, MonadMetadataStorage m, MonadResolveSource m, @@ -119,17 +121,17 @@ runQuery :: RQLQuery -> m (EncJSON, RebuildableSchemaCache) runQuery appContext schemaCache rqlQuery = do - appEnv@AppEnv {..} <- askAppEnv + AppEnv {..} <- askAppEnv when ((appEnvEnableReadOnlyMode == ReadOnlyModeEnabled) && queryModifiesUserDB rqlQuery) $ throw400 NotSupported "Cannot run write queries when read-only mode is enabled" - let serverConfigCtx = buildServerConfigCtx appEnv appContext + let dynamicConfig = buildCacheDynamicConfig appContext MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" $ liftEitherM fetchMetadata ((result, updatedMetadata), updatedCache, invalidations) <- - runQueryM (acEnvironment appContext) rqlQuery + runQueryM (acSQLGenCtx appContext) rqlQuery -- We can use defaults here unconditionally, since there is no MD export function in V2Query & runMetadataT metadata (acMetadataDefaults appContext) - & runCacheRWT serverConfigCtx schemaCache + & runCacheRWT dynamicConfig schemaCache when (queryModifiesSchema rqlQuery) $ do case appEnvEnableMaintenanceMode of MaintenanceModeDisabled -> do @@ -170,33 +172,32 @@ runQueryM :: MonadBaseControl IO m, UserInfoM m, CacheRWM m, - HasServerConfigCtx m, Tracing.MonadTrace m, MetadataM m, MonadQueryTags m ) => - Env.Environment -> + SQLGenCtx -> RQLQuery -> m EncJSON -runQueryM env rq = Tracing.newSpan (T.pack $ constrName rq) $ case rq of - RQInsert q -> runInsert q - RQSelect q -> runSelect q - RQUpdate q -> runUpdate q - RQDelete q -> runDelete q +runQueryM sqlGen rq = Tracing.newSpan (T.pack $ constrName rq) $ case rq of + RQInsert q -> runInsert sqlGen q + RQSelect q -> runSelect sqlGen q + RQUpdate q -> runUpdate sqlGen q + RQDelete q -> runDelete sqlGen q RQCount q -> runCount q - RQRunSql q -> Postgres.runRunSQL @'Vanilla q + RQRunSql q -> Postgres.runRunSQL @'Vanilla sqlGen q RQMssqlRunSql q -> MSSQL.runSQL q RQMysqlRunSql q -> MySQL.runSQL q - RQCitusRunSql q -> Postgres.runRunSQL @'Citus q - RQCockroachRunSql q -> Postgres.runRunSQL @'Cockroach q + RQCitusRunSql q -> Postgres.runRunSQL @'Citus sqlGen q + RQCockroachRunSql q -> Postgres.runRunSQL @'Cockroach sqlGen q RQBigqueryRunSql q -> BigQuery.runSQL q RQDataConnectorRunSql t q -> DataConnector.runSQL t q RQBigqueryDatabaseInspection q -> BigQuery.runDatabaseInspection q - RQBulk l -> encJFromList <$> indexedMapM (runQueryM env) l + RQBulk l -> encJFromList <$> indexedMapM (runQueryM sqlGen) l RQConcurrentBulk l -> do when (queryModifiesSchema rq) $ throw500 "Only read-only queries are allowed in a concurrent_bulk" - encJFromList <$> mapConcurrently (runQueryM env) l + encJFromList <$> mapConcurrently (runQueryM sqlGen) l queryModifiesUserDB :: RQLQuery -> Bool queryModifiesUserDB = \case diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs index 86ce307ab7a..e85431ca7ac 100644 --- a/server/src-lib/Hasura/Server/App.hs +++ b/server/src-lib/Hasura/Server/App.hs @@ -68,6 +68,7 @@ import Hasura.QueryTags import Hasura.RQL.DDL.ApiLimit (MonadGetApiTimeLimit) import Hasura.RQL.DDL.EventTrigger (MonadEventLogCleanup) import Hasura.RQL.DDL.Schema +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.Types.Endpoint as EP import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.Source @@ -134,6 +135,8 @@ newtype Handler m a = Handler (ReaderT HandlerCtx (ExceptT QErr m) a) MonadError QErr, MonadTrace, HasAppEnv, + HasCacheStaticConfig, + HasFeatureFlagChecker, HasResourceLimits, MonadResolveSource, E.MonadGQLExecutionCheck, @@ -423,6 +426,7 @@ v1QueryHandler :: MonadMetadataStorage m, MonadResolveSource m, HasAppEnv m, + HasCacheStaticConfig m, MonadQueryTags m, MonadEventLogCleanup m, ProvidesNetwork m, @@ -457,6 +461,8 @@ v1MetadataHandler :: MonadMetadataApiAuthorization m, MonadEventLogCleanup m, HasAppEnv m, + HasCacheStaticConfig m, + HasFeatureFlagChecker m, ProvidesNetwork m, MonadGetApiTimeLimit m, UserInfoM m @@ -486,6 +492,7 @@ v2QueryHandler :: MonadMetadataStorage m, MonadResolveSource m, HasAppEnv m, + HasCacheStaticConfig m, MonadQueryTags m, ProvidesNetwork m, UserInfoM m @@ -713,6 +720,8 @@ mkWaiApp :: MonadVersionAPIWithExtraData m, HttpLog m, HasAppEnv m, + HasCacheStaticConfig m, + HasFeatureFlagChecker m, UserAuthentication m, MonadMetadataApiAuthorization m, E.MonadGQLExecutionCheck m, @@ -759,6 +768,8 @@ httpApp :: MonadVersionAPIWithExtraData m, HttpLog m, HasAppEnv m, + HasCacheStaticConfig m, + HasFeatureFlagChecker m, UserAuthentication m, MonadMetadataApiAuthorization m, E.MonadGQLExecutionCheck m, diff --git a/server/src-lib/Hasura/Server/Init/FeatureFlag.hs b/server/src-lib/Hasura/Server/Init/FeatureFlag.hs index 75692308fb3..07dcd3994c5 100644 --- a/server/src-lib/Hasura/Server/Init/FeatureFlag.hs +++ b/server/src-lib/Hasura/Server/Init/FeatureFlag.hs @@ -7,6 +7,7 @@ module Hasura.Server.Init.FeatureFlag checkFeatureFlag, Identifier (..), FeatureFlags (..), + HasFeatureFlagChecker (..), featureFlags, logicalModelInterface, ) @@ -59,6 +60,20 @@ featureFlags = -------------------------------------------------------------------------------- +class Monad m => HasFeatureFlagChecker m where + checkFlag :: FeatureFlag -> m Bool + +instance HasFeatureFlagChecker m => HasFeatureFlagChecker (ReaderT r m) where + checkFlag = lift . checkFlag + +instance HasFeatureFlagChecker m => HasFeatureFlagChecker (ExceptT e m) where + checkFlag = lift . checkFlag + +instance HasFeatureFlagChecker m => HasFeatureFlagChecker (StateT s m) where + checkFlag = lift . checkFlag + +-------------------------------------------------------------------------------- + testFlag :: FeatureFlag testFlag = FeatureFlag diff --git a/server/src-lib/Hasura/Server/SchemaUpdate.hs b/server/src-lib/Hasura/Server/SchemaUpdate.hs index 90ec44743e3..4d7f5e41ff1 100644 --- a/server/src-lib/Hasura/Server/SchemaUpdate.hs +++ b/server/src-lib/Hasura/Server/SchemaUpdate.hs @@ -27,6 +27,7 @@ import Hasura.Logging import Hasura.Metadata.Class import Hasura.Prelude import Hasura.RQL.DDL.Schema (runCacheRWT) +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DDL.Schema.Catalog import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.SchemaCache.Build @@ -139,6 +140,7 @@ startSchemaSyncListenerThread logger pool instanceId interval metaVersionRef = d startSchemaSyncProcessorThread :: ( C.ForkableMonadIO m, HasAppEnv m, + HasCacheStaticConfig m, MonadMetadataStorage m, MonadResolveSource m, ProvidesNetwork m @@ -244,6 +246,7 @@ processor :: forall m void impl. ( C.ForkableMonadIO m, HasAppEnv m, + HasCacheStaticConfig m, MonadMetadataStorage m, MonadResolveSource m, ProvidesNetwork m @@ -263,6 +266,7 @@ refreshSchemaCache :: ( MonadIO m, MonadBaseControl IO m, HasAppEnv m, + HasCacheStaticConfig m, MonadMetadataStorage m, MonadResolveSource m, ProvidesNetwork m @@ -277,15 +281,15 @@ refreshSchemaCache appStateRef threadType logTVar = do - appEnv@AppEnv {..} <- askAppEnv + AppEnv {..} <- askAppEnv let logger = _lsLogger appEnvLoggers respErr <- runExceptT $ withSchemaCacheUpdate appStateRef logger (Just logTVar) $ do rebuildableCache <- liftIO $ fst <$> getRebuildableSchemaCacheWithVersion appStateRef appContext <- liftIO $ getAppContext appStateRef - let serverConfigCtx = buildServerConfigCtx appEnv appContext + let dynamicConfig = buildCacheDynamicConfig appContext (msg, cache, _) <- - runCacheRWT serverConfigCtx rebuildableCache $ do + runCacheRWT dynamicConfig rebuildableCache $ do schemaCache <- askSchemaCache let engineResourceVersion = scMetadataResourceVersion schemaCache unless (engineResourceVersion == resourceVersion) $ do diff --git a/server/src-lib/Hasura/Server/Types.hs b/server/src-lib/Hasura/Server/Types.hs index 72b9cee38a1..f2b301f8023 100644 --- a/server/src-lib/Hasura/Server/Types.hs +++ b/server/src-lib/Hasura/Server/Types.hs @@ -13,10 +13,7 @@ module Hasura.Server.Types PGVersion (PGVersion), pgToDbVersion, RequestId (..), - ServerConfigCtx (..), - HasServerConfigCtx (..), CheckFeatureFlag (..), - askMetadataDefaults, getRequestId, ApolloFederationStatus (..), isApolloFederationEnabled, @@ -24,14 +21,9 @@ module Hasura.Server.Types where import Data.Aeson -import Data.HashSet qualified as Set import Data.Text (intercalate, unpack) import Database.PG.Query qualified as PG -import Hasura.GraphQL.Schema.NamingCase -import Hasura.GraphQL.Schema.Options qualified as Options import Hasura.Prelude hiding (intercalate) -import Hasura.RQL.Types.Common -import Hasura.RQL.Types.Metadata (MetadataDefaults) import Hasura.Server.Init.FeatureFlag (CheckFeatureFlag (..)) import Hasura.Server.Utils import Network.HTTP.Types qualified as HTTP @@ -141,69 +133,6 @@ data ReadOnlyMode = ReadOnlyModeEnabled | ReadOnlyModeDisabled data EventingMode = EventingEnabled | EventingDisabled deriving (Show, Eq) --- | This type represents an aggregate of different configuration options used --- throughout the engine. The fields are the union of a subset of 'AppEnv' and a --- subset of 'AppContext'. --- --- This type should be considered as deprecated: avoid using it directly when --- you can use 'AppEnv' or 'AppContext', and avoid using the entirety of it when --- you only need a subset of the fields. Also avoid adding new fields if --- possible, but if you do so, make sure to adjust the @Eq@ instance --- accordingly. -data ServerConfigCtx = ServerConfigCtx - { _sccFunctionPermsCtx :: Options.InferFunctionPermissions, - _sccRemoteSchemaPermsCtx :: Options.RemoteSchemaPermissions, - _sccSQLGenCtx :: SQLGenCtx, - _sccMaintenanceMode :: MaintenanceMode (), - _sccExperimentalFeatures :: Set.HashSet ExperimentalFeature, - _sccEventingMode :: EventingMode, - _sccReadOnlyMode :: ReadOnlyMode, - -- | stores global default naming convention - _sccDefaultNamingConvention :: NamingCase, - _sccMetadataDefaults :: MetadataDefaults, - _sccCheckFeatureFlag :: CheckFeatureFlag, - _sccApolloFederationStatus :: ApolloFederationStatus - } - --- We are currently using the entire 'ServerConfigCtx' as an input to the schema --- cache build, and it therefore requires an 'Eq' instance. However, only a few --- fields will change over time: those coming from the 'AppContext', and not --- those coming from the 'AppEnv'. Consequently, this instance only checks the --- relevant fields. --- --- The way to fix this will be to use a smaller type as the input to the schema --- build, such as 'AppContext' (or, rather, a relevant subset), on which a --- "correct" @Eq@ instance can be defined. -instance Eq ServerConfigCtx where - (==) = (==) `on` extractDynamicFields - where - extractDynamicFields ServerConfigCtx {..} = - ( _sccFunctionPermsCtx, - _sccRemoteSchemaPermsCtx, - _sccSQLGenCtx, - _sccExperimentalFeatures, - _sccDefaultNamingConvention, - _sccMetadataDefaults, - _sccApolloFederationStatus - ) - -askMetadataDefaults :: HasServerConfigCtx m => m MetadataDefaults -askMetadataDefaults = do - ServerConfigCtx {_sccMetadataDefaults} <- askServerConfigCtx - pure _sccMetadataDefaults - -class (Monad m) => HasServerConfigCtx m where - askServerConfigCtx :: m ServerConfigCtx - -instance HasServerConfigCtx m => HasServerConfigCtx (ReaderT r m) where - askServerConfigCtx = lift askServerConfigCtx - -instance HasServerConfigCtx m => HasServerConfigCtx (ExceptT e m) where - askServerConfigCtx = lift askServerConfigCtx - -instance HasServerConfigCtx m => HasServerConfigCtx (StateT s m) where - askServerConfigCtx = lift askServerConfigCtx - -- | Whether or not to enable apollo federation fields. data ApolloFederationStatus = ApolloFederationEnabled | ApolloFederationDisabled deriving stock (Show, Eq, Ord, Generic) diff --git a/server/test-postgres/Main.hs b/server/test-postgres/Main.hs index 661a998d822..f3f0d5a830f 100644 --- a/server/test-postgres/Main.hs +++ b/server/test-postgres/Main.hs @@ -31,13 +31,13 @@ import Hasura.Logging import Hasura.Prelude import Hasura.RQL.DDL.Schema.Cache import Hasura.RQL.DDL.Schema.Cache.Common +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.Types.Common import Hasura.RQL.Types.Metadata (emptyMetadataDefaults) import Hasura.RQL.Types.ResizePool import Hasura.RQL.Types.SchemaCache import Hasura.RQL.Types.SchemaCache.Build import Hasura.Server.Init -import Hasura.Server.Init.FeatureFlag as FF import Hasura.Server.Metrics (ServerMetricsSpec, createServerMetrics) import Hasura.Server.Migrate import Hasura.Server.Prometheus (makeDummyPrometheusMetrics) @@ -107,20 +107,22 @@ main = do Options.EnableBigQueryStringNumericInput maintenanceMode = MaintenanceModeDisabled readOnlyMode = ReadOnlyModeDisabled - serverConfigCtx = - ServerConfigCtx + staticConfig = + CacheStaticConfig + maintenanceMode + EventingEnabled + readOnlyMode + (CheckFeatureFlag $ checkFeatureFlag mempty) + dynamicConfig = + CacheDynamicConfig Options.InferFunctionPermissions Options.DisableRemoteSchemaPermissions sqlGenCtx - maintenanceMode mempty - EventingEnabled - readOnlyMode (_default defaultNamingConventionOption) emptyMetadataDefaults - (CheckFeatureFlag $ FF.checkFeatureFlag mempty) ApolloFederationDisabled - cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) mkMSSQLSourceResolver + cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) mkMSSQLSourceResolver staticConfig (_appInit, appEnv) <- lowerManagedT $ @@ -145,11 +147,11 @@ main = do snd <$> (liftEitherM . runExceptT . _pecRunTx pgContext (PGExecCtxInfo (Tx PG.ReadWrite Nothing) InternalRawQuery)) (migrateCatalog (Just sourceConfig) defaultPostgresExtensionsSchema maintenanceMode =<< liftIO getCurrentTime) - schemaCache <- runCacheBuild cacheBuildParams $ buildRebuildableSchemaCache logger envMap metadataWithVersion serverConfigCtx + schemaCache <- runCacheBuild cacheBuildParams $ buildRebuildableSchemaCache logger envMap metadataWithVersion dynamicConfig pure (_mwrvMetadata metadataWithVersion, schemaCache) cacheRef <- newMVar schemaCache - pure $ NT (run . flip MigrateSuite.runCacheRefT (serverConfigCtx, cacheRef) . fmap fst . runMetadataT metadata emptyMetadataDefaults) + pure $ NT (run . flip MigrateSuite.runCacheRefT (dynamicConfig, cacheRef) . fmap fst . runMetadataT metadata emptyMetadataDefaults) streamingSubscriptionSuite <- StreamingSubscriptionSuite.buildStreamingSubscriptionSuite eventTriggerLogCleanupSuite <- EventTriggerCleanupSuite.buildEventTriggerCleanupSuite diff --git a/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs b/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs index 608d1229140..13d42cb8b2f 100644 --- a/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs +++ b/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs @@ -22,6 +22,7 @@ import Hasura.RQL.DDL.EventTrigger (MonadEventLogCleanup (..)) import Hasura.RQL.DDL.Metadata (ClearMetadata (..), runClearMetadata) import Hasura.RQL.DDL.Schema import Hasura.RQL.DDL.Schema.Cache.Common +import Hasura.RQL.DDL.Schema.Cache.Config import Hasura.RQL.DDL.Schema.LegacyCatalog (recreateSystemMetadata) import Hasura.RQL.Types.Common import Hasura.RQL.Types.SchemaCache @@ -38,7 +39,7 @@ import Test.Hspec.Expectations.Lifted -- -- NOTE: downgrade test disabled for now (see #5273) -newtype CacheRefT m a = CacheRefT {runCacheRefT :: (ServerConfigCtx, MVar RebuildableSchemaCache) -> m a} +newtype CacheRefT m a = CacheRefT {runCacheRefT :: (CacheDynamicConfig, MVar RebuildableSchemaCache) -> m a} deriving ( Functor, Applicative, @@ -47,18 +48,16 @@ newtype CacheRefT m a = CacheRefT {runCacheRefT :: (ServerConfigCtx, MVar Rebuil MonadError e, MonadBase b, MonadBaseControl b, - MonadReader (ServerConfigCtx, MVar RebuildableSchemaCache), + MonadReader (CacheDynamicConfig, MVar RebuildableSchemaCache), MonadTx, + HasCacheStaticConfig, UserInfoM, MonadMetadataStorage, MonadResolveSource, ProvidesNetwork, MonadGetApiTimeLimit ) - via (ReaderT (ServerConfigCtx, MVar RebuildableSchemaCache) m) - -instance Monad m => HasServerConfigCtx (CacheRefT m) where - askServerConfigCtx = asks fst + via (ReaderT (CacheDynamicConfig, MVar RebuildableSchemaCache) m) instance MonadTrans CacheRefT where lift = CacheRefT . const @@ -80,20 +79,21 @@ instance MonadBaseControl IO m, MonadError QErr m, MonadResolveSource m, - ProvidesNetwork m + ProvidesNetwork m, + HasCacheStaticConfig m ) => CacheRWM (CacheRefT m) where buildSchemaCacheWithOptions reason invalidations metadata = do - (serverConfigCtx, scVar) <- ask + (dynamicConfig, scVar) <- ask modifyMVar scVar \schemaCache -> do - ((), cache, _) <- runCacheRWT serverConfigCtx schemaCache (buildSchemaCacheWithOptions reason invalidations metadata) + ((), cache, _) <- runCacheRWT dynamicConfig schemaCache (buildSchemaCacheWithOptions reason invalidations metadata) pure (cache, ()) setMetadataResourceVersionInSchemaCache resourceVersion = do - (serverConfigCtx, scVar) <- ask + (dynamicConfig, scVar) <- ask modifyMVar scVar \schemaCache -> do - ((), cache, _) <- runCacheRWT serverConfigCtx schemaCache (setMetadataResourceVersionInSchemaCache resourceVersion) + ((), cache, _) <- runCacheRWT dynamicConfig schemaCache (setMetadataResourceVersionInSchemaCache resourceVersion) pure (cache, ()) instance Example (MetadataT (CacheRefT m) ()) where @@ -114,7 +114,8 @@ suite :: MonadMetadataStorage m, MonadEventLogCleanup m, ProvidesNetwork m, - MonadGetApiTimeLimit m + MonadGetApiTimeLimit m, + HasCacheStaticConfig m ) => PostgresConnConfiguration -> PGExecCtx -> @@ -127,9 +128,9 @@ suite srcConfig pgExecCtx pgConnInfo = do liftIO $ putStrLn $ LBS.toString $ encode $ EngineLog t logLevel logType logDetail migrateCatalogAndBuildCache env time = do - serverConfigCtx <- askServerConfigCtx + dynamicConfig <- asks fst (migrationResult, metadataWithVersion) <- runTx' pgExecCtx $ migrateCatalog (Just srcConfig) (ExtensionsSchema "public") MaintenanceModeDisabled time - (,migrationResult) <$> runCacheBuildM (buildRebuildableSchemaCache logger env metadataWithVersion serverConfigCtx) + (,migrationResult) <$> runCacheBuildM (buildRebuildableSchemaCache logger env metadataWithVersion dynamicConfig) dropAndInit env time = lift do scVar <- asks snd