diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index fc61774d90a..f0de283b4a7 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -756,6 +756,8 @@ library , Hasura.Server.Migrate.Internal , Hasura.Server.Auth.JWT.Internal , Hasura.Server.Auth.JWT.Logging + , Hasura.Services + , Hasura.Services.Network , Hasura.RemoteSchema.Metadata.Base , Hasura.RemoteSchema.Metadata.Customization , Hasura.RemoteSchema.Metadata.Permission @@ -987,7 +989,6 @@ library , Hasura.Tracing.TraceId , Hasura.QueryTags , Network.HTTP.Client.Transformable - , Network.HTTP.Client.Manager , Network.HTTP.Client.DynamicTlsPermissions , Network.HTTP.Client.Restricted , Network.HTTP.Client.Blocklisting diff --git a/server/lib/test-harness/src/Harness/GraphqlEngine.hs b/server/lib/test-harness/src/Harness/GraphqlEngine.hs index 14623e69831..33d1f216c91 100644 --- a/server/lib/test-harness/src/Harness/GraphqlEngine.hs +++ b/server/lib/test-harness/src/Harness/GraphqlEngine.hs @@ -317,24 +317,29 @@ runApp serveOptions = do env <- Env.getEnvironment initTime <- liftIO getCurrentTime globalCtx <- App.initGlobalCtx env metadataDbUrl rci - (ekgStore, serverMetrics) <- liftIO do - store <- EKG.newStore @TestMetricsSpec - serverMetrics <- liftIO . createServerMetrics $ EKG.subset ServerSubset store - pure (EKG.subset EKG.emptyOf store, serverMetrics) + (ekgStore, serverMetrics) <- + liftIO $ do + store <- EKG.newStore @TestMetricsSpec + serverMetrics <- + liftIO $ createServerMetrics $ EKG.subset ServerSubset store + pure (EKG.subset EKG.emptyOf store, serverMetrics) prometheusMetrics <- makeDummyPrometheusMetrics - let managedServerCtx = App.initialiseServerCtx env globalCtx serveOptions Nothing serverMetrics prometheusMetrics sampleAlways (FeatureFlag.checkFeatureFlag env) + let featureFlag = FeatureFlag.checkFeatureFlag env + managedServerCtx = App.initialiseServerCtx env globalCtx serveOptions Nothing serverMetrics prometheusMetrics sampleAlways featureFlag runManagedT managedServerCtx \serverCtx@ServerCtx {..} -> do let Loggers _ _ pgLogger = scLoggers - flip App.runPGMetadataStorageAppT (scMetadataDbPool, pgLogger) . lowerManagedT $ - App.runHGEServer - (const $ pure ()) - env - serveOptions - serverCtx - initTime - Nothing - ekgStore - (FeatureFlag.checkFeatureFlag env) + appContext = App.AppContext scManager pgLogger scMetadataDbPool + flip App.runPGMetadataStorageAppT appContext $ + lowerManagedT $ + App.runHGEServer + (const $ pure ()) + env + serveOptions + serverCtx + initTime + Nothing + ekgStore + featureFlag -- | Used only for 'runApp' above. data TestMetricsSpec name metricType tags diff --git a/server/src-emit-metadata-openapi/Main.hs b/server/src-emit-metadata-openapi/Main.hs index afee344a397..1cf73c2fb7d 100644 --- a/server/src-emit-metadata-openapi/Main.hs +++ b/server/src-emit-metadata-openapi/Main.hs @@ -1,11 +1,9 @@ module Main (main) where import Data.Aeson.Encode.Pretty (encodePretty) -import Data.ByteString.Lazy qualified as LBS +import Data.ByteString.Lazy.Char8 qualified as LBS import Hasura.Server.MetadataOpenAPI (metadataOpenAPI) import Prelude main :: IO () -main = do - LBS.putStr $ encodePretty metadataOpenAPI - putStrLn "" +main = LBS.putStrLn $ encodePretty metadataOpenAPI diff --git a/server/src-exec/Main.hs b/server/src-exec/Main.hs index 5897917c546..7291a9d2df2 100644 --- a/server/src-exec/Main.hs +++ b/server/src-exec/Main.hs @@ -97,7 +97,10 @@ runApp env (HGEOptions rci metadataDbUrl hgeCmd) = do C.forkImmortal "ourIdleGC" logger $ GC.ourIdleGC logger (seconds 0.3) (seconds 10) (seconds 60) - flip runPGMetadataStorageAppT (scMetadataDbPool, pgLogger) . lowerManagedT $ do + -- TODO: why don't we just run a reader with ServerCtx from here? + -- the AppContext doesn't add any new information + let appContext = AppContext scManager pgLogger scMetadataDbPool + flip runPGMetadataStorageAppT appContext . lowerManagedT $ do runHGEServer (const $ pure ()) env serveOptions serverCtx initTime Nothing ekgStore (FeatureFlag.checkFeatureFlag env) HCExport -> do GlobalCtx {..} <- initGlobalCtx env metadataDbUrl rci diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index f522c95fc78..41f2d3769b6 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -3,11 +3,16 @@ {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE ViewPatterns #-} --- | Imported by 'server/src-exec/Main.hs'. +-- | Defines the CE version of the engine. +-- +-- This module contains everything that is required to run the community edition +-- of the engine: the base application monad and the implementation of all its +-- behaviour classes. module Hasura.App ( ExitCode (AuthConfigurationError, DatabaseMigrationError, DowngradeProcessError, MetadataCleanError, MetadataExportError, SchemaCacheInitError), ExitException (ExitException), GlobalCtx (..), + AppContext (..), PGMetadataStorageAppT (runPGMetadataStorageAppT), accessDeniedErrMsg, flushLogger, @@ -140,13 +145,13 @@ import Hasura.Server.SchemaUpdate import Hasura.Server.Telemetry import Hasura.Server.Types import Hasura.Server.Version +import Hasura.Services import Hasura.Session import Hasura.ShutdownLatch import Hasura.Tracing qualified as Tracing import Network.HTTP.Client qualified as HTTP import Network.HTTP.Client.Blocklisting (Blocklist) import Network.HTTP.Client.CreateManager (mkHttpManager) -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) import Network.Wai (Application) import Network.Wai.Handler.Warp qualified as Warp import Options.Applicative @@ -267,8 +272,17 @@ initGlobalCtx env metadataDbUrl defaultPgConnInfo = do let mdConnInfo = mkConnInfoFromMDb mdUrl mkGlobalCtx mdConnInfo (Just (dbUrl, srcConnInfo)) +-- | Base application context. +-- +-- This defines all base information required to run the engine. +data AppContext = AppContext + { _acHTTPManager :: HTTP.Manager, + _acPGLogger :: PG.PGLogger, + _acPGPool :: PG.PGPool + } + -- | An application with Postgres database as a metadata storage -newtype PGMetadataStorageAppT m a = PGMetadataStorageAppT {runPGMetadataStorageAppT :: (PG.PGPool, PG.PGLogger) -> m a} +newtype PGMetadataStorageAppT m a = PGMetadataStorageAppT {runPGMetadataStorageAppT :: AppContext -> m a} deriving ( Functor, Applicative, @@ -278,17 +292,19 @@ newtype PGMetadataStorageAppT m a = PGMetadataStorageAppT {runPGMetadataStorageA MonadCatch, MonadThrow, MonadMask, - HasHttpManagerM, HasServerConfigCtx, - MonadReader (PG.PGPool, PG.PGLogger), + MonadReader AppContext, MonadBase b, MonadBaseControl b ) - via (ReaderT (PG.PGPool, PG.PGLogger) m) + via (ReaderT AppContext m) deriving ( MonadTrans ) - via (ReaderT (PG.PGPool, PG.PGLogger)) + via (ReaderT AppContext) + +instance Monad m => ProvidesNetwork (PGMetadataStorageAppT m) where + askHTTPManager = asks _acHTTPManager resolvePostgresConnInfo :: (MonadIO m) => Env.Environment -> UrlConf -> Maybe Int -> m PG.ConnInfo @@ -596,7 +612,8 @@ runHGEServer :: MonadMetadataStorageQueryAPI m, MonadResolveSource m, EB.MonadQueryTags m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => (ServerCtx -> Spock.SpockT m ()) -> Env.Environment -> @@ -687,7 +704,8 @@ mkHGEServer :: MonadMetadataStorageQueryAPI m, MonadResolveSource m, EB.MonadQueryTags m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => (ServerCtx -> Spock.SpockT m ()) -> Env.Environment -> @@ -765,7 +783,6 @@ mkHGEServer setupHook env ServeOptions {..} serverCtx@ServerCtx {..} ekgStore ch _ <- startSchemaSyncProcessorThread logger - scManager scMetaVersionRef cacheRef scInstanceId @@ -1006,7 +1023,6 @@ mkHGEServer setupHook env ServeOptions {..} serverCtx@ServerCtx {..} ekgStore ch logger (getSchemaCache cacheRef) (leActionEvents lockedEventsCtx) - scManager scPrometheusMetrics sleepTime Nothing @@ -1137,7 +1153,7 @@ instance (MonadIO m) => WS.MonadWSLog (PGMetadataStorageAppT m) where logWSLog logger = unLogger logger instance (Monad m) => MonadResolveSource (PGMetadataStorageAppT m) where - getPGSourceResolver = mkPgSourceResolver <$> asks snd + getPGSourceResolver = mkPgSourceResolver <$> asks _acPGLogger getMSSQLSourceResolver = return mkMSSQLSourceResolver instance (Monad m) => EB.MonadQueryTags (PGMetadataStorageAppT m) where @@ -1153,7 +1169,7 @@ runInSeparateTx :: PG.TxE QErr a -> PGMetadataStorageAppT m (Either QErr a) runInSeparateTx tx = do - pool <- asks fst + pool <- asks _acPGPool liftIO $ runExceptT $ PG.runTx pool (PG.RepeatableRead, Nothing) tx notifySchemaCacheSyncTx :: MetadataResourceVersion -> InstanceId -> CacheInvalidations -> PG.TxE QErr () diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs index caa95b60f69..b3348fb3516 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs @@ -46,11 +46,11 @@ import Hasura.SQL.Backend (BackendSourceKind (..), BackendType (..)) import Hasura.SQL.Types (CollectableType (..)) import Hasura.Server.Migrate.Version (SourceCatalogMigrationState (..)) import Hasura.Server.Utils qualified as HSU +import Hasura.Services.Network import Hasura.Session (SessionVariable, mkSessionVariable) import Hasura.Tracing (ignoreTraceT) import Language.GraphQL.Draft.Syntax qualified as GQL import Network.HTTP.Client qualified as HTTP -import Network.HTTP.Client.Manager import Servant.Client.Core.HasClient ((//)) import Servant.Client.Generic (genericClient) import Witch qualified @@ -82,7 +82,7 @@ resolveBackendInfo' :: ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr, MonadIO m, MonadBaseControl IO m, - HasHttpManagerM m + ProvidesNetwork m ) => Logger Hasura -> (Inc.Dependency (Maybe (HashMap DC.DataConnectorName Inc.InvalidationKey)), InsOrdHashMap DC.DataConnectorName DC.DataConnectorOptions) `arr` HashMap DC.DataConnectorName DC.DataConnectorInfo @@ -103,12 +103,12 @@ resolveBackendInfo' logger = proc (invalidationKeys, optionsMap) -> do ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr, MonadIO m, MonadBaseControl IO m, - HasHttpManagerM m + ProvidesNetwork m ) => (Inc.Dependency (Maybe (HashMap DC.DataConnectorName Inc.InvalidationKey)), DC.DataConnectorName, DC.DataConnectorOptions) `arr` Maybe DC.DataConnectorInfo getDataConnectorCapabilitiesIfNeeded = Inc.cache proc (invalidationKeys, dataConnectorName, dataConnectorOptions) -> do let metadataObj = MetadataObject (MODataConnectorAgent dataConnectorName) $ J.toJSON dataConnectorName - httpMgr <- bindA -< askHttpManager + httpMgr <- bindA -< askHTTPManager Inc.dependOn -< Inc.selectMaybeD (Inc.ConstS dataConnectorName) invalidationKeys (| withRecordInconsistency diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs index 09aefea50c2..0f140abf914 100644 --- a/server/src-lib/Hasura/GraphQL/Execute.hs +++ b/server/src-lib/Hasura/GraphQL/Execute.hs @@ -59,19 +59,21 @@ import Hasura.SQL.Backend import Hasura.Server.Init qualified as Init import Hasura.Server.Prometheus (PrometheusMetrics) import Hasura.Server.Types (ReadOnlyMode (..), RequestId (..)) +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP -- | Execution context +-- +-- TODO: can this be deduplicated with Run? is there anything in here that isn't +-- already in the stack? data ExecutionCtx = ExecutionCtx { _ecxLogger :: L.Logger L.Hasura, _ecxSqlGenCtx :: SQLGenCtx, _ecxSchemaCache :: SchemaCache, _ecxSchemaCacheVer :: SchemaCacheVer, - _ecxHttpManager :: HTTP.Manager, _ecxEnableAllowList :: Init.AllowListStatus, _ecxReadOnlyMode :: ReadOnlyMode, _ecxPrometheusMetrics :: PrometheusMetrics @@ -310,6 +312,8 @@ checkQueryInAllowlist allowListStatus allowlistMode userInfo req schemaCache = -- | Construct a 'ResolvedExecutionPlan' from a 'GQLReqParsed' and a -- bunch of metadata. +-- +-- Labelling it as inlineable fixed a performance regression on GHC 8.10.7. {-# INLINEABLE getResolvedExecPlan #-} getResolvedExecPlan :: forall m. @@ -319,7 +323,8 @@ getResolvedExecPlan :: MonadBaseControl IO m, Tracing.MonadTrace m, EC.MonadGQLExecutionCheck m, - EB.MonadQueryTags m + EB.MonadQueryTags m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> @@ -330,7 +335,6 @@ getResolvedExecPlan :: SchemaCache -> SchemaCacheVer -> ET.GraphQLQueryType -> - HTTP.Manager -> [HTTP.Header] -> GQLReqUnparsed -> SingleOperation -> -- the first step of the execution plan @@ -347,7 +351,6 @@ getResolvedExecPlan sc _scVer queryType - httpManager reqHeaders reqUnparsed queryParts -- the first step of the execution plan @@ -366,7 +369,6 @@ getResolvedExecPlan prometheusMetrics gCtx userInfo - httpManager reqHeaders directives inlinedSelSet @@ -387,7 +389,6 @@ getResolvedExecPlan gCtx sqlGenCtx userInfo - httpManager reqHeaders directives inlinedSelSet diff --git a/server/src-lib/Hasura/GraphQL/Execute/Action.hs b/server/src-lib/Hasura/GraphQL/Execute/Action.hs index 25ea6aa5488..eaba304865f 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Action.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Action.hs @@ -79,6 +79,7 @@ import Hasura.Server.Utils ( mkClientHeadersForward, mkSetCookieHeaders, ) +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G @@ -137,6 +138,7 @@ asSingleRowJsonResp query args = -- | Synchronously execute webhook handler and resolve response to action "output" resolveActionExecution :: + HTTP.Manager -> Env.Environment -> L.Logger L.Hasura -> PrometheusMetrics -> @@ -145,7 +147,7 @@ resolveActionExecution :: ActionExecContext -> Maybe GQLQueryText -> ActionExecution -resolveActionExecution env logger prometheusMetrics _userInfo IR.AnnActionExecution {..} ActionExecContext {..} gqlQueryText = +resolveActionExecution httpManager env logger prometheusMetrics _userInfo IR.AnnActionExecution {..} ActionExecContext {..} gqlQueryText = ActionExecution $ first (encJFromOrderedValue . makeActionResponseNoRelations _aaeFields _aaeOutputType _aaeOutputFields True) <$> runWebhook where handlerPayload = ActionWebhookPayload (ActionContext _aaeName) _aecSessionVariables _aaePayload gqlQueryText @@ -154,10 +156,11 @@ resolveActionExecution env logger prometheusMetrics _userInfo IR.AnnActionExecut (MonadIO m, MonadError QErr m, Tracing.MonadTrace m) => m (ActionWebhookResponse, HTTP.ResponseHeaders) runWebhook = + -- TODO: do we need to add the logger as a reader? can't we just give it as an argument? flip runReaderT logger $ callWebhook env - _aecManager + httpManager prometheusMetrics _aaeOutputType _aaeOutputFields @@ -430,18 +433,18 @@ asyncActionsProcessor :: MonadBaseControl IO m, LA.Forall (LA.Pure m), Tracing.HasReporter m, - MonadMetadataStorage m + MonadMetadataStorage m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> IO SchemaCache -> STM.TVar (Set LockedActionEventId) -> - HTTP.Manager -> PrometheusMetrics -> Milliseconds -> Maybe GH.GQLQueryText -> m (Forever m) -asyncActionsProcessor env logger getSCFromRef' lockedActionEvents httpManager prometheusMetrics sleepTime gqlQueryText = +asyncActionsProcessor env logger getSCFromRef' lockedActionEvents prometheusMetrics sleepTime gqlQueryText = return $ Forever () $ const $ do @@ -467,6 +470,7 @@ asyncActionsProcessor env logger getSCFromRef' lockedActionEvents httpManager pr where callHandler :: ActionCache -> ActionLogItem -> m () callHandler actionCache actionLogItem = Tracing.runTraceT Tracing.sampleAlways "async actions processor" do + httpManager <- askHTTPManager let ActionLogItem actionId actionName @@ -488,6 +492,7 @@ asyncActionsProcessor env logger getSCFromRef' lockedActionEvents httpManager pr metadataResponseTransform = _adResponseTransform definition eitherRes <- runExceptT $ + -- TODO: do we need to add the logger as a reader? can't we just give it as an argument? flip runReaderT logger $ callWebhook env diff --git a/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs b/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs index 0213ce1acb5..d85e8340834 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs @@ -34,34 +34,36 @@ import Hasura.RQL.Types.QueryTags import Hasura.SQL.AnyBackend qualified as AB import Hasura.Server.Prometheus (PrometheusMetrics (..)) import Hasura.Server.Types (RequestId (..)) +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP convertMutationAction :: ( MonadIO m, MonadError QErr m, - MonadMetadataStorage m + MonadMetadataStorage m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> PrometheusMetrics -> UserInfo -> - HTTP.Manager -> HTTP.RequestHeaders -> Maybe GH.GQLQueryText -> ActionMutation Void -> m ActionExecutionPlan -convertMutationAction env logger prometheusMetrics userInfo manager reqHeaders gqlQueryText = \case - AMSync s -> - pure $ AEPSync $ resolveActionExecution env logger prometheusMetrics userInfo s actionExecContext gqlQueryText - AMAsync s -> - AEPAsyncMutation <$> resolveActionMutationAsync s reqHeaders userSession +convertMutationAction env logger prometheusMetrics userInfo reqHeaders gqlQueryText action = do + httpManager <- askHTTPManager + case action of + AMSync s -> + pure $ AEPSync $ resolveActionExecution httpManager env logger prometheusMetrics userInfo s actionExecContext gqlQueryText + AMAsync s -> + AEPAsyncMutation <$> resolveActionMutationAsync s reqHeaders userSession where userSession = _uiSession userInfo - actionExecContext = ActionExecContext manager reqHeaders $ _uiSession userInfo + actionExecContext = ActionExecContext reqHeaders (_uiSession userInfo) convertMutationSelectionSet :: forall m. @@ -70,7 +72,8 @@ convertMutationSelectionSet :: MonadError QErr m, MonadMetadataStorage m, MonadGQLExecutionCheck m, - MonadQueryTags m + MonadQueryTags m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> @@ -78,7 +81,6 @@ convertMutationSelectionSet :: GQLContext -> SQLGenCtx -> UserInfo -> - HTTP.Manager -> HTTP.RequestHeaders -> [G.Directive G.Name] -> G.SelectionSet G.NoFragments G.Name -> @@ -96,7 +98,6 @@ convertMutationSelectionSet gqlContext SQLGenCtx {stringifyNum} userInfo - manager reqHeaders directives fields @@ -146,7 +147,7 @@ convertMutationSelectionSet (actionName, _fch) <- pure $ case noRelsDBAST of AMSync s -> (_aaeName s, _aaeForwardClientHeaders s) AMAsync s -> (_aamaName s, _aamaForwardClientHeaders s) - plan <- convertMutationAction env logger prometheusMetrics userInfo manager reqHeaders (Just (GH._grQuery gqlUnparsed)) noRelsDBAST + plan <- convertMutationAction env logger prometheusMetrics userInfo reqHeaders (Just (GH._grQuery gqlUnparsed)) noRelsDBAST pure $ ExecStepAction plan (ActionsInfo actionName _fch) remoteJoins -- `_fch` represents the `forward_client_headers` option from the action -- definition which is currently being ignored for actions that are mutations RFRaw customFieldVal -> flip onLeft throwError =<< executeIntrospection userInfo customFieldVal introspectionDisabledRoles diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index 9e8bde4f8e4..e7b5f60c597 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -34,9 +34,9 @@ import Hasura.RQL.Types.QueryTags import Hasura.SQL.AnyBackend qualified as AB import Hasura.Server.Prometheus (PrometheusMetrics (..)) import Hasura.Server.Types (RequestId (..)) +import Hasura.Services.Network import Hasura.Session import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP parseGraphQLQuery :: @@ -61,14 +61,14 @@ convertQuerySelSet :: forall m. ( MonadError QErr m, MonadGQLExecutionCheck m, - MonadQueryTags m + MonadQueryTags m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> PrometheusMetrics -> GQLContext -> UserInfo -> - HTTP.Manager -> HTTP.RequestHeaders -> [G.Directive G.Name] -> G.SelectionSet G.NoFragments G.Name -> @@ -85,7 +85,6 @@ convertQuerySelSet prometheusMetrics gqlContext userInfo - manager reqHeaders directives fields @@ -127,9 +126,23 @@ convertQuerySelSet let (noRelsRemoteField, remoteJoins) = RJ.getRemoteJoinsGraphQLField remoteField pure $ buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeQuery noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed) RFAction action -> do + httpManager <- askHTTPManager let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionQuery action (actionExecution, actionName, fch) <- pure $ case noRelsDBAST of - AQQuery s -> (AEPSync $ resolveActionExecution env logger prometheusMetrics userInfo s (ActionExecContext manager reqHeaders (_uiSession userInfo)) (Just (GH._grQuery gqlUnparsed)), _aaeName s, _aaeForwardClientHeaders s) + AQQuery s -> + ( AEPSync $ + resolveActionExecution + httpManager + env + logger + prometheusMetrics + userInfo + s + (ActionExecContext reqHeaders (_uiSession userInfo)) + (Just (GH._grQuery gqlUnparsed)), + _aaeName s, + _aaeForwardClientHeaders s + ) AQAsync s -> (AEPAsyncQuery $ AsyncActionQueryExecutionPlan (_aaaqActionId s) $ resolveAsyncActionQuery userInfo s, _aaaqName s, _aaaqForwardClientHeaders s) pure $ ExecStepAction actionExecution (ActionsInfo actionName fch) remoteJoins RFRaw r -> flip onLeft throwError =<< executeIntrospection userInfo r introspectionDisabledRoles diff --git a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs index 811a10c1372..7404f530f36 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs @@ -34,10 +34,10 @@ import Hasura.RQL.Types.Common import Hasura.RemoteSchema.SchemaCache import Hasura.SQL.AnyBackend qualified as AB import Hasura.Server.Types (RequestId) +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP ------------------------------------------------------------------------------- @@ -58,19 +58,19 @@ processRemoteJoins :: MonadBaseControl IO m, EB.MonadQueryTags m, MonadQueryLog m, - Tracing.MonadTrace m + Tracing.MonadTrace m, + ProvidesNetwork m ) => RequestId -> L.Logger L.Hasura -> Env.Environment -> - HTTP.Manager -> [HTTP.Header] -> UserInfo -> EncJSON -> Maybe RemoteJoins -> GQLReqUnparsed -> m EncJSON -processRemoteJoins requestId logger env manager requestHeaders userInfo lhs maybeJoinTree gqlreq = +processRemoteJoins requestId logger env requestHeaders userInfo lhs maybeJoinTree gqlreq = forRemoteJoins maybeJoinTree lhs \joinTree -> do lhsParsed <- JO.eitherDecode (encJToLBS lhs) @@ -117,7 +117,7 @@ processRemoteJoins requestId logger env manager requestHeaders userInfo lhs mayb m BL.ByteString callRemoteServer remoteSchemaInfo request = fmap (view _3) $ - execRemoteGQ env manager userInfo requestHeaders remoteSchemaInfo request + execRemoteGQ env userInfo requestHeaders remoteSchemaInfo request -- | Fold the join tree. -- diff --git a/server/src-lib/Hasura/GraphQL/RemoteServer.hs b/server/src-lib/Hasura/GraphQL/RemoteServer.hs index c59930cb433..18dcb0766ba 100644 --- a/server/src-lib/Hasura/GraphQL/RemoteServer.hs +++ b/server/src-lib/Hasura/GraphQL/RemoteServer.hs @@ -36,6 +36,7 @@ import Hasura.RQL.Types.Common import Hasura.RemoteSchema.Metadata import Hasura.RemoteSchema.SchemaCache.Types import Hasura.Server.Utils +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Parser qualified as G @@ -53,15 +54,14 @@ import Network.Wreq qualified as Wreq -- and also is called by schema cache rebuilding code in "Hasura.RQL.DDL.Schema.Cache". fetchRemoteSchema :: forall m. - (MonadIO m, MonadError QErr m, Tracing.MonadTrace m) => + (MonadIO m, MonadError QErr m, Tracing.MonadTrace m, ProvidesNetwork m) => Env.Environment -> - HTTP.Manager -> RemoteSchemaName -> ValidatedRemoteSchemaDef -> m (IntrospectionResult, BL.ByteString, RemoteSchemaInfo) -fetchRemoteSchema env manager _rscName rsDef = do +fetchRemoteSchema env _rscName rsDef = do (_, _, rawIntrospectionResult) <- - execRemoteGQ env manager adminUserInfo [] rsDef introspectionQuery + execRemoteGQ env adminUserInfo [] rsDef introspectionQuery (ir, rsi) <- stitchRemoteSchema rawIntrospectionResult _rscName rsDef -- The 'rawIntrospectionResult' contains the 'Bytestring' response of -- the introspection result of the remote server. We store this in the @@ -128,10 +128,10 @@ stitchRemoteSchema rawIntrospectionResult _rscName rsDef@ValidatedRemoteSchemaDe execRemoteGQ :: ( MonadIO m, MonadError QErr m, - Tracing.MonadTrace m + Tracing.MonadTrace m, + ProvidesNetwork m ) => Env.Environment -> - HTTP.Manager -> UserInfo -> [HTTP.Header] -> ValidatedRemoteSchemaDef -> @@ -139,7 +139,7 @@ execRemoteGQ :: -- | Returns the response body and headers, along with the time taken for the -- HTTP request to complete m (DiffTime, [HTTP.Header], BL.ByteString) -execRemoteGQ env manager userInfo reqHdrs rsdef gqlReq@GQLReq {..} = do +execRemoteGQ env userInfo reqHdrs rsdef gqlReq@GQLReq {..} = do let gqlReqUnparsed = renderGQLReqOutgoing gqlReq when (G._todType _grQuery == G.OperationTypeSubscription) $ @@ -163,6 +163,7 @@ execRemoteGQ env manager userInfo reqHdrs rsdef gqlReq@GQLReq {..} = do & set HTTP.body (Just $ J.encode gqlReqUnparsed) & set HTTP.timeout (HTTP.responseTimeoutMicro (timeout * 1000000)) + manager <- askHTTPManager Tracing.tracedHttpRequest req \req' -> do (time, res) <- withElapsedTime $ liftIO $ try $ HTTP.performRequest req' manager resp <- onLeft res (throwRemoteSchemaHttp webhookEnvRecord) diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs index 3266b3eeb10..07c60341307 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs @@ -82,11 +82,11 @@ import Hasura.Server.Prometheus ) import Hasura.Server.Telemetry.Counters qualified as Telem import Hasura.Server.Types (RequestId) +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing (MonadTrace, TraceT, trace) import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP import Network.Wai.Extended qualified as Wai import System.Metrics.Prometheus.Counter qualified as Prometheus.Counter @@ -307,7 +307,8 @@ runGQ :: MonadExecuteQuery m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> @@ -319,7 +320,7 @@ runGQ :: GQLReqUnparsed -> m (GQLQueryOperationSuccessLog, HttpResponse (Maybe GQResponse, EncJSON)) runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do - E.ExecutionCtx _ sqlGenCtx sc scVer httpManager enableAL readOnlyMode prometheusMetrics <- ask + E.ExecutionCtx _ sqlGenCtx sc scVer enableAL readOnlyMode prometheusMetrics <- ask let gqlMetrics = pmGraphQLRequestMetrics prometheusMetrics (totalTime, (response, parameterizedQueryHash, gqlOpType)) <- withElapsedTime $ do @@ -351,7 +352,6 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do sc scVer queryType - httpManager reqHeaders reqUnparsed queryParts @@ -359,7 +359,7 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do reqId -- 4. Execute the execution plan producing a 'AnnotatedResponse'. - response <- executePlan httpManager reqParsed runLimits execPlan + response <- executePlan reqParsed runLimits execPlan return (response, parameterizedQueryHash, gqlOpType) -- 5. Record telemetry @@ -382,12 +382,11 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do forWithKey = flip OMap.traverseWithKey executePlan :: - HTTP.Manager -> GQLReqParsed -> (m AnnotatedResponse -> m AnnotatedResponse) -> E.ResolvedExecutionPlan -> m AnnotatedResponse - executePlan httpManager reqParsed runLimits execPlan = case execPlan of + executePlan reqParsed runLimits execPlan = case execPlan of E.QueryExecutionPlan queryPlans asts dirMap -> trace "Query" $ do -- Attempt to lookup a cached response in the query cache. -- 'keyedLookup' is a monadic action possibly returning a cache hit. @@ -408,7 +407,8 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do -- If we get a cache miss, we must run the query against the graphql engine. Nothing -> runLimits $ do -- 1. 'traverse' the 'ExecutionPlan' executing every step. - conclusion <- runExceptT $ forWithKey queryPlans $ executeQueryStep httpManager + -- TODO: can this be a `catch` rather than a `runExceptT`? + conclusion <- runExceptT $ forWithKey queryPlans executeQueryStep -- 2. Construct an 'AnnotatedResponse' from the results of all steps in the 'ExecutionPlan'. result <- buildResponseFromParts Telem.Query conclusion cachingHeaders let response@(HttpResponse responseData _) = arResponse result @@ -435,6 +435,7 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do -- we are in the aforementioned case; we circumvent the normal process Just (sourceConfig, resolvedConnectionTemplate, pgMutations) -> do res <- + -- TODO: can this be a `catch` rather than a `runExceptT`? runExceptT $ doQErr $ runPGMutationTransaction reqId reqUnparsed userInfo logger sourceConfig resolvedConnectionTemplate pgMutations @@ -453,17 +454,17 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do -- we are not in the transaction case; proceeding normally Nothing -> do - conclusion <- runExceptT $ forWithKey mutationPlans $ executeMutationStep httpManager + -- TODO: can this be a `catch` rather than a `runExceptT`? + conclusion <- runExceptT $ forWithKey mutationPlans executeMutationStep buildResponseFromParts Telem.Mutation conclusion [] E.SubscriptionExecutionPlan _sub -> throw400 UnexpectedPayload "subscriptions are not supported over HTTP, use websockets instead" executeQueryStep :: - HTTP.Manager -> RootFieldAlias -> EB.ExecutionStep -> ExceptT (Either GQExecError QErr) m AnnotatedResponsePart - executeQueryStep httpManager fieldName = \case + executeQueryStep fieldName = \case E.ExecStepDB _headers exists remoteJoins -> doQErr $ do (telemTimeIO_DT, resp) <- AB.dispatchAnyBackend @BackendTransport @@ -471,17 +472,17 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do \(EB.DBStepInfo _ sourceConfig genSql tx resolvedConnectionTemplate :: EB.DBStepInfo b) -> runDBQuery @b reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql resolvedConnectionTemplate finalResponse <- - RJ.processRemoteJoins reqId logger env httpManager reqHeaders userInfo resp remoteJoins reqUnparsed + RJ.processRemoteJoins reqId logger env reqHeaders userInfo resp remoteJoins reqUnparsed pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindRemoteSchema - runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq remoteJoins + runRemoteGQ fieldName rsi resultCustomizer gqlReq remoteJoins E.ExecStepAction aep _ remoteJoins -> do logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindAction (time, resp) <- doQErr $ do (time, (resp, _)) <- EA.runActionExecution userInfo aep finalResponse <- - RJ.processRemoteJoins reqId logger env httpManager reqHeaders userInfo resp remoteJoins reqUnparsed + RJ.processRemoteJoins reqId logger env reqHeaders userInfo resp remoteJoins reqUnparsed pure (time, finalResponse) pure $ AnnotatedResponsePart time Telem.Empty resp [] E.ExecStepRaw json -> do @@ -489,15 +490,14 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do buildRaw json -- For `ExecStepMulti`, execute all steps and then concat them in a list E.ExecStepMulti lst -> do - _all <- traverse (executeQueryStep httpManager fieldName) lst + _all <- traverse (executeQueryStep fieldName) lst pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse _all)) [] executeMutationStep :: - HTTP.Manager -> RootFieldAlias -> EB.ExecutionStep -> ExceptT (Either GQExecError QErr) m AnnotatedResponsePart - executeMutationStep httpManager fieldName = \case + executeMutationStep fieldName = \case E.ExecStepDB responseHeaders exists remoteJoins -> doQErr $ do (telemTimeIO_DT, resp) <- AB.dispatchAnyBackend @BackendTransport @@ -505,17 +505,17 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do \(EB.DBStepInfo _ sourceConfig genSql tx resolvedConnectionTemplate :: EB.DBStepInfo b) -> runDBMutation @b reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql resolvedConnectionTemplate finalResponse <- - RJ.processRemoteJoins reqId logger env httpManager reqHeaders userInfo resp remoteJoins reqUnparsed + RJ.processRemoteJoins reqId logger env reqHeaders userInfo resp remoteJoins reqUnparsed pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse responseHeaders E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindRemoteSchema - runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq remoteJoins + runRemoteGQ fieldName rsi resultCustomizer gqlReq remoteJoins E.ExecStepAction aep _ remoteJoins -> do logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindAction (time, (resp, hdrs)) <- doQErr $ do (time, (resp, hdrs)) <- EA.runActionExecution userInfo aep finalResponse <- - RJ.processRemoteJoins reqId logger env httpManager reqHeaders userInfo resp remoteJoins reqUnparsed + RJ.processRemoteJoins reqId logger env reqHeaders userInfo resp remoteJoins reqUnparsed pure (time, (finalResponse, hdrs)) pure $ AnnotatedResponsePart time Telem.Empty resp $ fromMaybe [] hdrs E.ExecStepRaw json -> do @@ -523,12 +523,12 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do buildRaw json -- For `ExecStepMulti`, execute all steps and then concat them in a list E.ExecStepMulti lst -> do - _all <- traverse (executeQueryStep httpManager fieldName) lst + _all <- traverse (executeQueryStep fieldName) lst pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse _all)) [] - runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq remoteJoins = do + runRemoteGQ fieldName rsi resultCustomizer gqlReq remoteJoins = do (telemTimeIO_DT, remoteResponseHeaders, resp) <- - doQErr $ E.execRemoteGQ env httpManager userInfo reqHeaders (rsDef rsi) gqlReq + doQErr $ E.execRemoteGQ env userInfo reqHeaders (rsDef rsi) gqlReq value <- extractFieldFromResponse fieldName resultCustomizer resp finalResponse <- doQErr $ @@ -536,7 +536,6 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do reqId logger env - httpManager reqHeaders userInfo -- TODO: avoid encode and decode here @@ -736,7 +735,8 @@ runGQBatched :: MonadExecuteQuery m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => Env.Environment -> L.Logger L.Hasura -> diff --git a/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs b/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs index 6e6e8132876..8cc3bf48a76 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs @@ -43,6 +43,7 @@ import Hasura.Server.Prometheus incWebsocketConnections, ) import Hasura.Server.Types (ReadOnlyMode) +import Hasura.Services.Network import Hasura.Tracing qualified as Tracing import Network.HTTP.Client qualified as HTTP import Network.WebSockets qualified as WS @@ -60,7 +61,8 @@ createWSServerApp :: MonadExecuteQuery m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => Env.Environment -> HashSet (L.EngineLogType L.Hasura) -> diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs index bd074ca9082..1fd14e79fac 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs @@ -88,6 +88,7 @@ import Hasura.Server.Prometheus ) import Hasura.Server.Telemetry.Counters qualified as Telem import Hasura.Server.Types (RequestId, getRequestId) +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax (Name (..)) @@ -408,7 +409,8 @@ onStart :: MC.MonadBaseControl IO m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => Env.Environment -> HashSet (L.EngineLogType L.Hasura) -> @@ -467,7 +469,6 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op sc scVer queryType - httpMgr reqHdrs q queryParts @@ -526,7 +527,7 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op genSql resolvedConnectionTemplate finalResponse <- - RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q + RJ.processRemoteJoins requestId logger env reqHdrs userInfo resp remoteJoins q pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema @@ -536,7 +537,7 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op (time, (resp, _)) <- doQErr $ do (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan finalResponse <- - RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q + RJ.processRemoteJoins requestId logger env reqHdrs userInfo resp remoteJoins q pure (time, (finalResponse, hdrs)) pure $ AnnotatedResponsePart time Telem.Empty resp [] E.ExecStepRaw json -> do @@ -604,14 +605,14 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op genSql resolvedConnectionTemplate finalResponse <- - RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q + RJ.processRemoteJoins requestId logger env reqHdrs userInfo resp remoteJoins q pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] E.ExecStepAction actionExecPlan _ remoteJoins -> do logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction (time, (resp, hdrs)) <- doQErr $ do (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan finalResponse <- - RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q + RJ.processRemoteJoins requestId logger env reqHdrs userInfo resp remoteJoins q pure (time, (finalResponse, hdrs)) pure $ AnnotatedResponsePart time Telem.Empty resp $ fromMaybe [] hdrs E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do @@ -768,7 +769,7 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op runRemoteGQ requestId reqUnparsed fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins = do (telemTimeIO_DT, _respHdrs, resp) <- doQErr $ - E.execRemoteGQ env httpMgr userInfo reqHdrs (rsDef rsi) gqlReq + E.execRemoteGQ env userInfo reqHdrs (rsDef rsi) gqlReq value <- mapExceptT lift $ extractFieldFromResponse fieldName resultCustomizer resp finalResponse <- doQErr $ @@ -776,7 +777,6 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op requestId logger env - httpMgr reqHdrs userInfo -- TODO: avoid encode and decode here @@ -789,7 +789,7 @@ onStart env enabledLogTypes serverEnv wsConn shouldCaptureVariables (StartMsg op logger subscriptionsState getSchemaCache - httpMgr + _ _ sqlGenCtx readOnlyMode @@ -1004,7 +1004,8 @@ onMessage :: MC.MonadBaseControl IO m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => Env.Environment -> HashSet (L.EngineLogType L.Hasura) -> diff --git a/server/src-lib/Hasura/Metadata/Class.hs b/server/src-lib/Hasura/Metadata/Class.hs index abd7a86e5f0..888208cf5c1 100644 --- a/server/src-lib/Hasura/Metadata/Class.hs +++ b/server/src-lib/Hasura/Metadata/Class.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE UndecidableInstances #-} - -- | This module has type class and types which implements the Metadata Storage Abstraction module Hasura.Metadata.Class ( SchemaSyncEventProcessResult (..), diff --git a/server/src-lib/Hasura/RQL/DDL/DataConnector.hs b/server/src-lib/Hasura/RQL/DDL/DataConnector.hs index fd44a971613..d061ec81b32 100644 --- a/server/src-lib/Hasura/RQL/DDL/DataConnector.hs +++ b/server/src-lib/Hasura/RQL/DDL/DataConnector.hs @@ -36,8 +36,8 @@ import Hasura.RQL.Types.Metadata qualified as Metadata import Hasura.RQL.Types.SchemaCache.Build qualified as SC.Build import Hasura.SQL.Backend qualified as Backend import Hasura.SQL.BackendMap qualified as BackendMap +import Hasura.Services.Network import Hasura.Tracing (ignoreTraceT) -import Network.HTTP.Client.Manager qualified as HTTP import Servant.Client qualified as Servant import Servant.Client.Core.HasClient ((//)) import Servant.Client.Generic (genericClient) @@ -81,13 +81,13 @@ instance ToJSON DCAddAgent where -- | Insert a new Data Connector Agent into Metadata. runAddDataConnectorAgent :: ( Metadata.MetadataM m, + ProvidesNetwork m, SC.Build.CacheRWM m, Has (L.Logger L.Hasura) r, MonadReader r m, MonadBaseControl IO m, MonadError Error.QErr m, - MonadIO m, - HTTP.HasHttpManagerM m + MonadIO m ) => DCAddAgent -> m EncJSON @@ -123,16 +123,16 @@ data Availability = Available | NotAvailable Error.QErr -- | Check DC Agent availability by checking its Capabilities endpoint. checkAgentAvailability :: - ( Has (L.Logger L.Hasura) r, + ( ProvidesNetwork m, + Has (L.Logger L.Hasura) r, MonadReader r m, MonadIO m, - MonadBaseControl IO m, - HTTP.HasHttpManagerM m + MonadBaseControl IO m ) => Servant.BaseUrl -> m Availability checkAgentAvailability url = do - manager <- HTTP.askHttpManager + manager <- askHTTPManager logger <- asks getter res <- runExceptT $ do capabilitiesU <- (ignoreTraceT . flip runAgentClientT (AgentClientContext logger url manager Nothing) $ genericClient @API.Routes // API._capabilities) diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs index 9561fee79f2..8dad06965ea 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -90,10 +90,10 @@ import Hasura.SQL.BackendMap qualified as BackendMap import Hasura.SQL.Tag import Hasura.Server.Migrate.Version import Hasura.Server.Types +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) {- Note [Roles Inheritance] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -167,13 +167,13 @@ newtype CacheRWT m a MonadReader r, MonadError e, UserInfoM, - HasHttpManagerM, MonadMetadataStorage, MonadMetadataStorageQueryAPI, Tracing.MonadTrace, HasServerConfigCtx, MonadBase b, - MonadBaseControl b + MonadBaseControl b, + ProvidesNetwork ) instance (MonadEventLogCleanup m) => MonadEventLogCleanup (CacheRWT m) where @@ -198,7 +198,7 @@ instance (Monad m) => CacheRM (CacheRWT m) where instance ( MonadIO m, MonadError QErr m, - HasHttpManagerM m, + ProvidesNetwork m, MonadResolveSource m, HasServerConfigCtx m ) => @@ -306,7 +306,7 @@ buildSchemaCacheRule :: MonadBaseControl IO m, MonadError QErr m, MonadReader BuildReason m, - HasHttpManagerM m, + ProvidesNetwork m, MonadResolveSource m, HasServerConfigCtx m ) => @@ -440,7 +440,7 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, st ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr, MonadIO m, MonadBaseControl IO m, - HasHttpManagerM m + ProvidesNetwork m ) => (BackendConfigWrapper b, Inc.Dependency (BackendMap BackendInvalidationKeysWrapper)) `arr` BackendCache resolveBackendInfo' = proc (backendConfigWrapper, backendInvalidationMap) -> do @@ -458,7 +458,7 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, st ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr, MonadIO m, MonadBaseControl IO m, - HasHttpManagerM m + ProvidesNetwork m ) => (Inc.Dependency (BackendMap BackendInvalidationKeysWrapper), [AB.AnyBackend BackendConfigWrapper]) `arr` BackendCache resolveBackendCache = proc (backendInvalidationMap, backendConfigs) -> do @@ -478,7 +478,7 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, st MonadIO m, MonadBaseControl IO m, MonadResolveSource m, - HasHttpManagerM m, + ProvidesNetwork m, BackendMetadata b ) => ( Inc.Dependency (HashMap SourceName Inc.InvalidationKey), @@ -490,7 +490,11 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, st `arr` Maybe (SourceConfig b) tryGetSourceConfig = Inc.cache proc (invalidationKeys, sourceName, sourceConfig, backendKind, backendInfo) -> do let metadataObj = MetadataObject (MOSource sourceName) $ toJSON sourceName - httpMgr <- bindA -< askHttpManager + -- TODO: if we make all of 'resolveSourceConfig' a Service, we could + -- delegate to it the responsibility of extracting the HTTP manager, and + -- avoid having to thread 'ProvidesNetwork' throughout the cache building + -- code. + httpMgr <- bindA -< askHTTPManager Inc.dependOn -< Inc.selectKeyD sourceName invalidationKeys (| withRecordInconsistency @@ -506,7 +510,7 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, st MonadIO m, MonadBaseControl IO m, MonadResolveSource m, - HasHttpManagerM m, + ProvidesNetwork m, BackendMetadata b ) => ( Inc.Dependency (HashMap SourceName Inc.InvalidationKey), @@ -711,7 +715,7 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, st MonadError QErr m, MonadReader BuildReason m, MonadBaseControl IO m, - HasHttpManagerM m, + ProvidesNetwork m, HasServerConfigCtx m, MonadResolveSource m ) => diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs index 95a26ed3234..ef445dcb175 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs @@ -78,8 +78,8 @@ import Hasura.SQL.Backend import Hasura.SQL.BackendMap (BackendMap) import Hasura.SQL.BackendMap qualified as BackendMap import Hasura.Server.Types +import Hasura.Services import Hasura.Session -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) import Network.HTTP.Client.Transformable qualified as HTTP newtype BackendInvalidationKeysWrapper (b :: BackendType) = BackendInvalidationKeysWrapper @@ -272,8 +272,8 @@ newtype CacheBuild a = CacheBuild (ReaderT CacheBuildParams (ExceptT QErr IO) a) MonadBaseControl IO ) -instance HasHttpManagerM CacheBuild where - askHttpManager = asks _cbpManager +instance ProvidesNetwork CacheBuild where + askHTTPManager = asks _cbpManager instance HasServerConfigCtx CacheBuild where askServerConfigCtx = asks _cbpServerConfigCtx @@ -295,16 +295,16 @@ runCacheBuild params (CacheBuild m) = do runCacheBuildM :: ( MonadIO m, MonadError QErr m, - HasHttpManagerM m, HasServerConfigCtx m, - MonadResolveSource m + MonadResolveSource m, + ProvidesNetwork m ) => CacheBuild a -> m a runCacheBuildM m = do params <- CacheBuildParams - <$> askHttpManager + <$> askHTTPManager <*> getPGSourceResolver <*> getMSSQLSourceResolver <*> askServerConfigCtx diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs index f381d8c27b9..636baca9e43 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs @@ -77,8 +77,9 @@ import Hasura.SQL.Backend import Hasura.SQL.Backend qualified as Backend import Hasura.SQL.BackendMap qualified as BackendMap import Hasura.Server.Logging (MetadataLog (..)) +import Hasura.Services import Hasura.Tracing qualified as Tracing -import Network.HTTP.Client.Manager qualified as HTTP.Manager +import Network.HTTP.Client qualified as HTTP import Servant.API (Union) import Servant.Client (BaseUrl, (//)) import Servant.Client.Generic qualified as Servant.Client @@ -356,12 +357,12 @@ instance FromJSON GetSourceTables where runGetSourceTables :: ( CacheRM m, Has (L.Logger L.Hasura) r, - HTTP.Manager.HasHttpManagerM m, MonadReader r m, MonadError Error.QErr m, Metadata.MetadataM m, MonadIO m, - MonadBaseControl IO m + MonadBaseControl IO m, + ProvidesNetwork m ) => Env.Environment -> GetSourceTables -> @@ -378,7 +379,7 @@ runGetSourceTables env GetSourceTables {..} = do case _smKind of Backend.DataConnectorKind dcName -> do logger :: L.Logger L.Hasura <- asks getter - manager <- HTTP.Manager.askHttpManager + manager <- askHTTPManager let timeout = DC.Types.timeout _smConfiguration DC.Types.DataConnectorOptions {..} <- lookupDataConnectorOptions dcName bmap @@ -408,7 +409,7 @@ instance FromJSON GetTableInfo where runGetTableInfo :: ( CacheRM m, Has (L.Logger L.Hasura) r, - HTTP.Manager.HasHttpManagerM m, + ProvidesNetwork m, MonadReader r m, MonadError Error.QErr m, Metadata.MetadataM m, @@ -430,7 +431,7 @@ runGetTableInfo env GetTableInfo {..} = do case _smKind of Backend.DataConnectorKind dcName -> do logger :: L.Logger L.Hasura <- asks getter - manager <- HTTP.Manager.askHttpManager + manager <- askHTTPManager let timeout = DC.Types.timeout _smConfiguration DC.Types.DataConnectorOptions {..} <- lookupDataConnectorOptions dcName bmap @@ -459,7 +460,7 @@ lookupDataConnectorOptions dcName bmap = querySourceSchema :: (MonadIO m, MonadBaseControl IO m, MonadError QErr m) => L.Logger L.Hasura -> - HTTP.Manager.Manager -> + HTTP.Manager -> Maybe DC.Types.SourceTimeout -> BaseUrl -> SourceName -> diff --git a/server/src-lib/Hasura/RQL/Types/Action.hs b/server/src-lib/Hasura/RQL/Types/Action.hs index c8611510a6c..2bf142c4a2e 100644 --- a/server/src-lib/Hasura/RQL/Types/Action.hs +++ b/server/src-lib/Hasura/RQL/Types/Action.hs @@ -79,7 +79,6 @@ import Hasura.RQL.Types.CustomTypes import Hasura.RQL.Types.Eventing (EventId (..)) import Hasura.Session import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP import PostgreSQL.Binary.Encoding qualified as PE @@ -289,8 +288,7 @@ newtype ActionPermissionInfo = ActionPermissionInfo -- GraphQL.Execute. data ActionExecContext = ActionExecContext - { _aecManager :: HTTP.Manager, - _aecHeaders :: HTTP.RequestHeaders, + { _aecHeaders :: HTTP.RequestHeaders, _aecSessionVariables :: SessionVariables } diff --git a/server/src-lib/Hasura/RQL/Types/Metadata/Backend.hs b/server/src-lib/Hasura/RQL/Types/Metadata/Backend.hs index cbd89d39194..b93bc4cf796 100644 --- a/server/src-lib/Hasura/RQL/Types/Metadata/Backend.hs +++ b/server/src-lib/Hasura/RQL/Types/Metadata/Backend.hs @@ -31,8 +31,8 @@ import Hasura.RQL.Types.Table import Hasura.SQL.Backend import Hasura.SQL.Types import Hasura.Server.Migrate.Version +import Hasura.Services.Network import Network.HTTP.Client qualified as HTTP -import Network.HTTP.Client.Manager (HasHttpManagerM) class ( Backend b, @@ -76,7 +76,7 @@ class ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr, MonadIO m, MonadBaseControl IO m, - HasHttpManagerM m + ProvidesNetwork m ) => Logger Hasura -> (Inc.Dependency (Maybe (BackendInvalidationKeys b)), BackendConfig b) `arr` BackendInfo b diff --git a/server/src-lib/Hasura/RQL/Types/Run.hs b/server/src-lib/Hasura/RQL/Types/Run.hs index 24bd6128fcb..84351d0476c 100644 --- a/server/src-lib/Hasura/RQL/Types/Run.hs +++ b/server/src-lib/Hasura/RQL/Types/Run.hs @@ -7,19 +7,19 @@ module Hasura.RQL.Types.Run ) where +import Control.Monad.Trans import Control.Monad.Trans.Control (MonadBaseControl) import Hasura.Metadata.Class import Hasura.Prelude import Hasura.RQL.DDL.EventTrigger (MonadEventLogCleanup (..)) import Hasura.RQL.Types.Source import Hasura.Server.Types +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing -import Network.HTTP.Client.Manager qualified as HTTP data RunCtx = RunCtx { _rcUserInfo :: UserInfo, - _rcHttpMgr :: HTTP.Manager, _rcServerConfigCtx :: ServerConfigCtx } @@ -35,7 +35,8 @@ newtype RunT m a = RunT {unRunT :: ReaderT RunCtx m a} MonadBase b, MonadBaseControl b, MonadMetadataStorage, - MonadMetadataStorageQueryAPI + MonadMetadataStorageQueryAPI, + ProvidesNetwork ) instance MonadTrans RunT where @@ -44,9 +45,6 @@ instance MonadTrans RunT where instance (Monad m) => UserInfoM (RunT m) where askUserInfo = asks _rcUserInfo -instance (Monad m) => HTTP.HasHttpManagerM (RunT m) where - askHttpManager = asks _rcHttpMgr - instance (Monad m) => HasServerConfigCtx (RunT m) where askServerConfigCtx = asks _rcServerConfigCtx diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs index 98d1ce60224..ce57e32ddfc 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs @@ -58,11 +58,11 @@ import Hasura.RQL.Types.QueryCollection import Hasura.RQL.Types.SchemaCache import Hasura.RemoteSchema.Metadata (RemoteSchemaName) import Hasura.Server.Types +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing (TraceT) import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) -- * Inconsistencies @@ -237,16 +237,14 @@ newtype MetadataT m a = MetadataT {unMetadataT :: StateT Metadata m a} MFunctor, Tracing.MonadTrace, MonadBase b, - MonadBaseControl b + MonadBaseControl b, + ProvidesNetwork ) instance (Monad m) => MetadataM (MetadataT m) where getMetadata = MetadataT get putMetadata = MetadataT . put -instance (HasHttpManagerM m) => HasHttpManagerM (MetadataT m) where - askHttpManager = lift askHttpManager - instance (UserInfoM m) => UserInfoM (MetadataT m) where askUserInfo = lift askUserInfo diff --git a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs index a0e99183051..d1473222409 100644 --- a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs +++ b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs @@ -33,9 +33,9 @@ import Hasura.RQL.Types.SchemaCacheTypes import Hasura.RemoteSchema.Metadata import Hasura.RemoteSchema.SchemaCache.Build (addRemoteSchemaP2Setup) import Hasura.RemoteSchema.SchemaCache.Types +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) -- | The payload for 'add_remote_schema', and a component of 'Metadata'. data AddRemoteSchemaQuery = AddRemoteSchemaQuery @@ -62,7 +62,7 @@ runAddRemoteSchema :: ( QErrM m, CacheRWM m, MonadIO m, - HasHttpManagerM m, + ProvidesNetwork m, MetadataM m, Tracing.MonadTrace m ) => @@ -163,7 +163,7 @@ runUpdateRemoteSchema :: ( QErrM m, CacheRWM m, MonadIO m, - HasHttpManagerM m, + ProvidesNetwork m, MetadataM m, Tracing.MonadTrace m ) => @@ -196,9 +196,8 @@ runUpdateRemoteSchema env (AddRemoteSchemaQuery name defn comment) = do ( (isJust metadataRMSchemaURL && isJust currentRMSchemaURL && metadataRMSchemaURL == currentRMSchemaURL) || (isJust metadataRMSchemaURLFromEnv && isJust currentRMSchemaURLFromEnv && metadataRMSchemaURLFromEnv == currentRMSchemaURLFromEnv) ) - $ do - httpMgr <- askHttpManager - void $ fetchRemoteSchema env httpMgr name rsi + $ void + $ fetchRemoteSchema env name rsi -- This will throw an error if the new schema fetched in incompatible -- with the existing permissions and relations diff --git a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs index cf5d565c00e..2eca2a1a469 100644 --- a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs +++ b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs @@ -28,9 +28,9 @@ import Hasura.RQL.Types.SchemaCache.Build import Hasura.RemoteSchema.Metadata import Hasura.RemoteSchema.SchemaCache.Permission (resolveRoleBasedRemoteSchema) import Hasura.RemoteSchema.SchemaCache.Types +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) -- Resolves a user specified `RemoteSchemaMetadata` into information rich `RemoteSchemaCtx` -- However, given the nature of remote relationships, we cannot fully 'resolve' them, so @@ -42,10 +42,10 @@ buildRemoteSchemas :: Inc.ArrowCache m arr, MonadIO m, MonadBaseControl IO m, - HasHttpManagerM m, Eq remoteRelationshipDefinition, ToJSON remoteRelationshipDefinition, - MonadError QErr m + MonadError QErr m, + ProvidesNetwork m ) => Env.Environment -> ( (Inc.Dependency (HashMap RemoteSchemaName Inc.InvalidationKey), OrderedRoles, Maybe (HashMap RemoteSchemaName BL.ByteString)), @@ -170,12 +170,11 @@ buildRemoteSchemaPermissions = proc ((remoteSchemaName, originalIntrospection, o in MetadataObject objectId $ toJSON defn addRemoteSchemaP2Setup :: - (QErrM m, MonadIO m, HasHttpManagerM m, Tracing.MonadTrace m) => + (QErrM m, MonadIO m, ProvidesNetwork m, Tracing.MonadTrace m) => Env.Environment -> RemoteSchemaName -> RemoteSchemaDef -> m (IntrospectionResult, BL.ByteString, RemoteSchemaInfo) addRemoteSchemaP2Setup env name def = do rsi <- validateRemoteSchemaDef env def - httpMgr <- askHttpManager - fetchRemoteSchema env httpMgr name rsi + fetchRemoteSchema env name rsi diff --git a/server/src-lib/Hasura/Server/API/Metadata.hs b/server/src-lib/Hasura/Server/API/Metadata.hs index 23f66936d95..9622c0c62fa 100644 --- a/server/src-lib/Hasura/Server/API/Metadata.hs +++ b/server/src-lib/Hasura/Server/API/Metadata.hs @@ -80,9 +80,9 @@ import Hasura.Server.Logging (SchemaSyncLog (..), SchemaSyncThreadType (TTMetada import Hasura.Server.SchemaCacheRef import Hasura.Server.Types import Hasura.Server.Utils (APIVersion (..)) +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing -import Network.HTTP.Client.Manager qualified as HTTP data RQLMetadataV1 = -- Sources @@ -383,18 +383,18 @@ runMetadataQuery :: Tracing.MonadTrace m, MonadMetadataStorageQueryAPI m, MonadResolveSource m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => Env.Environment -> L.Logger L.Hasura -> InstanceId -> UserInfo -> - HTTP.Manager -> ServerConfigCtx -> SchemaCacheRef -> RQLMetadata -> m (EncJSON, RebuildableSchemaCache) -runMetadataQuery env logger instanceId userInfo httpManager serverConfigCtx schemaCacheRef RQLMetadata {..} = do +runMetadataQuery env logger instanceId userInfo serverConfigCtx schemaCacheRef RQLMetadata {..} = do schemaCache <- liftIO $ fst <$> readSchemaCacheRef schemaCacheRef (metadata, currentResourceVersion) <- Tracing.trace "fetchMetadata" $ liftEitherM fetchMetadata let exportsMetadata = \case @@ -425,7 +425,7 @@ runMetadataQuery env logger instanceId userInfo httpManager serverConfigCtx sche & flip runReaderT logger & runMetadataT metadata metadataDefaults & runCacheRWT schemaCache - & peelRun (RunCtx userInfo httpManager serverConfigCtx) + & peelRun (RunCtx userInfo serverConfigCtx) -- set modified metadata in storage if queryModifiesMetadata _rqlMetadata then case (_sccMaintenanceMode serverConfigCtx, _sccReadOnlyMode serverConfigCtx) of @@ -457,7 +457,7 @@ runMetadataQuery env logger instanceId userInfo httpManager serverConfigCtx sche Tracing.trace "setMetadataResourceVersionInSchemaCache" $ setMetadataResourceVersionInSchemaCache newResourceVersion & runCacheRWT modSchemaCache - & peelRun (RunCtx userInfo httpManager serverConfigCtx) + & peelRun (RunCtx userInfo serverConfigCtx) pure (r, modSchemaCache') (MaintenanceModeEnabled (), ReadOnlyModeDisabled) -> @@ -593,14 +593,14 @@ runMetadataQueryM :: CacheRWM m, Tracing.MonadTrace m, UserInfoM m, - HTTP.HasHttpManagerM m, MetadataM m, MonadMetadataStorageQueryAPI m, HasServerConfigCtx m, MonadReader r m, Has (L.Logger L.Hasura) r, MonadError QErr m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => Env.Environment -> MetadataResourceVersion -> @@ -624,14 +624,14 @@ runMetadataQueryV1M :: CacheRWM m, Tracing.MonadTrace m, UserInfoM m, - HTTP.HasHttpManagerM m, MetadataM m, MonadMetadataStorageQueryAPI m, HasServerConfigCtx m, MonadReader r m, Has (L.Logger L.Hasura) r, MonadError QErr m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => Env.Environment -> MetadataResourceVersion -> diff --git a/server/src-lib/Hasura/Server/API/Query.hs b/server/src-lib/Hasura/Server/API/Query.hs index 7f3d6797947..8b97831278a 100644 --- a/server/src-lib/Hasura/Server/API/Query.hs +++ b/server/src-lib/Hasura/Server/API/Query.hs @@ -56,10 +56,9 @@ import Hasura.RemoteSchema.MetadataAPI import Hasura.SQL.Backend import Hasura.Server.Types import Hasura.Server.Utils +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing -import Network.HTTP.Client qualified as HTTP -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) data RQLQueryV1 = RQAddExistingTableOrView !(TrackTable ('Postgres 'Vanilla)) @@ -182,18 +181,18 @@ runQuery :: MonadMetadataStorageQueryAPI m, MonadResolveSource m, MonadQueryTags m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => Env.Environment -> L.Logger L.Hasura -> InstanceId -> UserInfo -> RebuildableSchemaCache -> - HTTP.Manager -> ServerConfigCtx -> RQLQuery -> m (EncJSON, RebuildableSchemaCache) -runQuery env logger instanceId userInfo sc hMgr serverConfigCtx query = do +runQuery env logger instanceId userInfo sc serverConfigCtx query = do when ((_sccReadOnlyMode serverConfigCtx == ReadOnlyModeEnabled) && queryModifiesUserDB query) $ throw400 NotSupported "Cannot run write queries when read-only mode is enabled" @@ -216,7 +215,7 @@ runQuery env logger instanceId userInfo sc hMgr serverConfigCtx query = do pure (js, rsc, ci, meta) withReload currentResourceVersion result where - runCtx = RunCtx userInfo hMgr serverConfigCtx + runCtx = RunCtx userInfo serverConfigCtx withReload currentResourceVersion (result, updatedCache, invalidations, updatedMetadata) = do when (queryModifiesSchemaCache query) $ do @@ -394,7 +393,6 @@ runQueryM :: UserInfoM m, MonadBaseControl IO m, MonadIO m, - HasHttpManagerM m, HasServerConfigCtx m, Tracing.MonadTrace m, MetadataM m, @@ -403,7 +401,8 @@ runQueryM :: MonadReader r m, MonadError QErr m, Has (L.Logger L.Hasura) r, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesHasuraServices m ) => Env.Environment -> RQLQuery -> diff --git a/server/src-lib/Hasura/Server/API/V2Query.hs b/server/src-lib/Hasura/Server/API/V2Query.hs index 31f7947baf4..d914d1b2a84 100644 --- a/server/src-lib/Hasura/Server/API/V2Query.hs +++ b/server/src-lib/Hasura/Server/API/V2Query.hs @@ -46,10 +46,10 @@ import Hasura.RQL.Types.SchemaCache.Build import Hasura.RQL.Types.Source import Hasura.SQL.Backend import Hasura.Server.Types +import Hasura.Services import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as GQL -import Network.HTTP.Client qualified as HTTP data RQLQuery = RQInsert !InsertQuery @@ -108,17 +108,17 @@ runQuery :: Tracing.MonadTrace m, MonadMetadataStorage m, MonadResolveSource m, - MonadQueryTags m + MonadQueryTags m, + ProvidesHasuraServices m ) => Env.Environment -> InstanceId -> UserInfo -> RebuildableSchemaCache -> - HTTP.Manager -> ServerConfigCtx -> RQLQuery -> m (EncJSON, RebuildableSchemaCache) -runQuery env instanceId userInfo schemaCache httpManager serverConfigCtx rqlQuery = do +runQuery env instanceId userInfo schemaCache serverConfigCtx rqlQuery = do when ((_sccReadOnlyMode serverConfigCtx == ReadOnlyModeEnabled) && queryModifiesUserDB rqlQuery) $ throw400 NotSupported "Cannot run write queries when read-only mode is enabled" @@ -134,7 +134,7 @@ runQuery env instanceId userInfo schemaCache httpManager serverConfigCtx rqlQuer pure (js, rsc, ci, meta) withReload currentResourceVersion result where - runCtx = RunCtx userInfo httpManager serverConfigCtx + runCtx = RunCtx userInfo serverConfigCtx withReload currentResourceVersion (result, updatedCache, invalidations, updatedMetadata) = do when (queryModifiesSchema rqlQuery) $ do diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs index 3e64af50759..8ad9b58eddc 100644 --- a/server/src-lib/Hasura/Server/App.hs +++ b/server/src-lib/Hasura/Server/App.hs @@ -1,4 +1,5 @@ {-# LANGUAGE CPP #-} +{-# LANGUAGE UndecidableInstances #-} module Hasura.Server.App ( APIResp (JSONResp, RawResp), @@ -100,6 +101,7 @@ import Hasura.Server.SchemaCacheRef import Hasura.Server.Types import Hasura.Server.Utils import Hasura.Server.Version +import Hasura.Services import Hasura.Session import Hasura.ShutdownLatch import Hasura.Tracing (MonadTrace) @@ -165,7 +167,40 @@ data HandlerCtx = HandlerCtx hcSourceIpAddress :: !Wai.IpAddress } -type Handler m = ReaderT HandlerCtx (ExceptT QErr m) +newtype Handler m a = Handler (ReaderT HandlerCtx (ExceptT QErr m) a) + deriving + ( Functor, + Applicative, + Monad, + MonadIO, + MonadFix, + MonadBase b, + MonadBaseControl b, + MonadReader HandlerCtx, + MonadError QErr, + -- Tracing.HasReporter, + Tracing.MonadTrace, + HasResourceLimits, + MonadResolveSource, + HasServerConfigCtx, + E.MonadGQLExecutionCheck, + MonadEventLogCleanup, + MonadQueryLog, + EB.MonadQueryTags, + GH.MonadExecuteQuery, + MonadMetadataApiAuthorization, + MonadMetadataStorage, + MonadMetadataStorageQueryAPI, + ProvidesNetwork + ) + +instance MonadTrans Handler where + lift = Handler . lift . lift + +runHandler :: (HasResourceLimits m, MonadBaseControl IO m) => HandlerCtx -> Handler m a -> m (Either QErr a) +runHandler ctx (Handler r) = do + handlerLimit <- askHTTPHandlerLimit + runExceptT $ flip runReaderT ctx $ runResourceLimits handlerLimit r data APIResp = JSONResp !(HttpResponse EncJSON) @@ -311,7 +346,6 @@ mkSpockAction serverCtx@ServerCtx {..} qErrEncoder qErrModifier apiHandler = do (requestId, headers) <- getRequestId origHeaders tracingCtx <- liftIO $ Tracing.extractB3HttpContext headers - handlerLimit <- lift askHTTPHandlerLimit let runTraceT :: forall m1 a1. @@ -323,14 +357,6 @@ mkSpockAction serverCtx@ServerCtx {..} qErrEncoder qErrModifier apiHandler = do scTraceSamplingPolicy (fromString (B8.unpack pathInfo)) - runHandler :: - MonadBaseControl IO m2 => - HandlerCtx -> - ReaderT HandlerCtx (ExceptT QErr m2) a2 -> - m2 (Either QErr a2) - runHandler handlerCtx handler = - runExceptT $ flip runReaderT handlerCtx $ runResourceLimits handlerLimit $ handler - getInfo parsedRequest = do authenticationResp <- lift (resolveUserInfo (_lsLogger scLoggers) scManager headers scAuthMode parsedRequest) authInfo <- onLeft authenticationResp (logErrorAndResp Nothing requestId req (reqBody, Nothing) False origHeaders (ExtraUserInfo Nothing) . qErrModifier) @@ -430,7 +456,8 @@ v1QueryHandler :: MonadMetadataStorageQueryAPI m, MonadResolveSource m, EB.MonadQueryTags m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesNetwork m ) => RQLQuery -> m (HttpResponse EncJSON) @@ -446,7 +473,6 @@ v1QueryHandler query = do scRef <- asks (scCacheRef . hcServerCtx) metadataDefaults <- asks (scMetadataDefaults . hcServerCtx) schemaCache <- liftIO $ fst <$> readSchemaCacheRef scRef - httpMgr <- asks (scManager . hcServerCtx) sqlGenCtx <- asks (scSQLGenCtx . hcServerCtx) instanceId <- asks (scInstanceId . hcServerCtx) env <- asks (scEnvironment . hcServerCtx) @@ -476,7 +502,6 @@ v1QueryHandler query = do instanceId userInfo schemaCache - httpMgr serverConfigCtx query @@ -489,7 +514,8 @@ v1MetadataHandler :: MonadMetadataStorageQueryAPI m, MonadResolveSource m, MonadMetadataApiAuthorization m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesNetwork m ) => RQLMetadata -> m (HttpResponse EncJSON) @@ -497,7 +523,6 @@ v1MetadataHandler query = Tracing.trace "Metadata" $ do (liftEitherM . authorizeV1MetadataApi query) =<< ask userInfo <- asks hcUser scRef <- asks (scCacheRef . hcServerCtx) - httpMgr <- asks (scManager . hcServerCtx) _sccSQLGenCtx <- asks (scSQLGenCtx . hcServerCtx) env <- asks (scEnvironment . hcServerCtx) instanceId <- asks (scInstanceId . hcServerCtx) @@ -522,7 +547,6 @@ v1MetadataHandler query = Tracing.trace "Metadata" $ do logger instanceId userInfo - httpMgr serverConfigCtx scRef query @@ -537,7 +561,8 @@ v2QueryHandler :: MonadReader HandlerCtx m, MonadMetadataStorage m, MonadResolveSource m, - EB.MonadQueryTags m + EB.MonadQueryTags m, + ProvidesNetwork m ) => V2Q.RQLQuery -> m (HttpResponse EncJSON) @@ -555,7 +580,6 @@ v2QueryHandler query = Tracing.trace "v2 Query" $ do userInfo <- asks hcUser scRef <- asks (scCacheRef . hcServerCtx) schemaCache <- liftIO $ fst <$> readSchemaCacheRef scRef - httpMgr <- asks (scManager . hcServerCtx) sqlGenCtx <- asks (scSQLGenCtx . hcServerCtx) instanceId <- asks (scInstanceId . hcServerCtx) env <- asks (scEnvironment . hcServerCtx) @@ -581,7 +605,7 @@ v2QueryHandler query = Tracing.trace "v2 Query" $ do defaultMetadata checkFeatureFlag - V2Q.runQuery env instanceId userInfo schemaCache httpMgr serverConfigCtx query + V2Q.runQuery env instanceId userInfo schemaCache serverConfigCtx query v1Alpha1GQHandler :: ( MonadIO m, @@ -594,7 +618,8 @@ v1Alpha1GQHandler :: MonadReader HandlerCtx m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => E.GraphQLQueryType -> GH.GQLBatchedReqs (GH.GQLReq GH.GQLQueryText) -> @@ -619,7 +644,6 @@ mkExecutionContext :: ) => m E.ExecutionCtx mkExecutionContext = do - manager <- asks (scManager . hcServerCtx) scRef <- asks (scCacheRef . hcServerCtx) (sc, scVer) <- liftIO $ readSchemaCacheRef scRef sqlGenCtx <- asks (scSQLGenCtx . hcServerCtx) @@ -627,7 +651,7 @@ mkExecutionContext = do logger <- asks (_lsLogger . scLoggers . hcServerCtx) readOnlyMode <- asks (scEnableReadOnlyMode . hcServerCtx) prometheusMetrics <- asks (scPrometheusMetrics . hcServerCtx) - pure $ E.ExecutionCtx logger sqlGenCtx (lastBuiltSchemaCache sc) scVer manager enableAL readOnlyMode prometheusMetrics + pure $ E.ExecutionCtx logger sqlGenCtx (lastBuiltSchemaCache sc) scVer enableAL readOnlyMode prometheusMetrics v1GQHandler :: ( MonadIO m, @@ -640,7 +664,8 @@ v1GQHandler :: MonadReader HandlerCtx m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => GH.GQLBatchedReqs (GH.GQLReq GH.GQLQueryText) -> m (HttpLogGraphQLInfo, HttpResponse EncJSON) @@ -657,7 +682,8 @@ v1GQRelayHandler :: MonadReader HandlerCtx m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => GH.GQLBatchedReqs (GH.GQLReq GH.GQLQueryText) -> m (HttpLogGraphQLInfo, HttpResponse EncJSON) @@ -749,7 +775,13 @@ renderHtmlTemplate template jVal = -- | Default implementation of the 'MonadConfigApiHandler' configApiGetHandler :: forall m. - (MonadIO m, MonadBaseControl IO m, UserAuthentication (Tracing.TraceT m), HttpLog m, Tracing.HasReporter m, HasResourceLimits m) => + ( MonadIO m, + MonadBaseControl IO m, + UserAuthentication (Tracing.TraceT m), + HttpLog m, + Tracing.HasReporter m, + HasResourceLimits m + ) => ServerCtx -> Maybe Text -> Spock.SpockCtxT () m () @@ -802,7 +834,8 @@ mkWaiApp :: MonadMetadataStorageQueryAPI m, MonadResolveSource m, EB.MonadQueryTags m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesNetwork m ) => (ServerCtx -> Spock.SpockT m ()) -> -- | Set of environment variables for reference in UIs @@ -840,15 +873,14 @@ mkWaiApp wsConnInitTimeout ekgStore = do let getSchemaCache' = first lastBuiltSchemaCache <$> readSchemaCacheRef schemaCacheRef - - let corsPolicy = mkDefaultCorsPolicy corsCfg - + corsPolicy = mkDefaultCorsPolicy corsCfg + httpManager <- askHTTPManager wsServerEnv <- WS.createWSServerEnv (_lsLogger scLoggers) scSubscriptionState getSchemaCache' - scManager + httpManager corsPolicy scSQLGenCtx scEnableReadOnlyMode @@ -890,7 +922,8 @@ httpApp :: HasResourceLimits m, MonadResolveSource m, EB.MonadQueryTags m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesNetwork m ) => (ServerCtx -> Spock.SpockT m ()) -> CorsConfig -> @@ -970,7 +1003,8 @@ httpApp setupHook corsCfg serverCtx consoleStatus consoleAssetsDir consoleSentry GH.MonadExecuteQuery n, MonadMetadataStorage n, EB.MonadQueryTags n, - HasResourceLimits n + HasResourceLimits n, + ProvidesNetwork n ) => RestRequest Spock.SpockMethod -> Handler (Tracing.TraceT n) (HttpLogGraphQLInfo, APIResp) @@ -1138,7 +1172,14 @@ httpApp setupHook corsCfg serverCtx consoleStatus consoleAssetsDir consoleSentry spockAction :: forall a n. - (FromJSON a, MonadIO n, MonadBaseControl IO n, UserAuthentication (Tracing.TraceT n), HttpLog n, Tracing.HasReporter n, HasResourceLimits n) => + ( FromJSON a, + MonadIO n, + MonadBaseControl IO n, + UserAuthentication (Tracing.TraceT n), + HttpLog n, + Tracing.HasReporter n, + HasResourceLimits n + ) => (Bool -> QErr -> Value) -> (QErr -> QErr) -> APIHandler (Tracing.TraceT n) a -> diff --git a/server/src-lib/Hasura/Server/Rest.hs b/server/src-lib/Hasura/Server/Rest.hs index f98594a5bfb..e61e471bf24 100644 --- a/server/src-lib/Hasura/Server/Rest.hs +++ b/server/src-lib/Hasura/Server/Rest.hs @@ -32,6 +32,7 @@ import Hasura.Server.Limits import Hasura.Server.Logging import Hasura.Server.Name qualified as Name import Hasura.Server.Types +import Hasura.Services.Network import Hasura.Session import Hasura.Tracing qualified as Tracing import Language.GraphQL.Draft.Syntax qualified as G @@ -100,7 +101,8 @@ runCustomEndpoint :: GH.MonadExecuteQuery m, MonadMetadataStorage m, EB.MonadQueryTags m, - HasResourceLimits m + HasResourceLimits m, + ProvidesNetwork m ) => Env.Environment -> E.ExecutionCtx -> diff --git a/server/src-lib/Hasura/Server/SchemaUpdate.hs b/server/src-lib/Hasura/Server/SchemaUpdate.hs index 388cce64ed8..907843804d6 100644 --- a/server/src-lib/Hasura/Server/SchemaUpdate.hs +++ b/server/src-lib/Hasura/Server/SchemaUpdate.hs @@ -39,8 +39,8 @@ import Hasura.Server.SchemaCacheRef withSchemaCacheUpdate, ) import Hasura.Server.Types +import Hasura.Services import Hasura.Session -import Network.HTTP.Client qualified as HTTP import Refined (NonNegative, Refined, unrefine) data ThreadError @@ -138,10 +138,10 @@ startSchemaSyncListenerThread logger pool instanceId interval metaVersionRef = d startSchemaSyncProcessorThread :: ( C.ForkableMonadIO m, MonadMetadataStorage m, - MonadResolveSource m + MonadResolveSource m, + ProvidesHasuraServices m ) => Logger Hasura -> - HTTP.Manager -> STM.TMVar MetadataResourceVersion -> SchemaCacheRef -> InstanceId -> @@ -150,7 +150,6 @@ startSchemaSyncProcessorThread :: ManagedT m Immortal.Thread startSchemaSyncProcessorThread logger - httpMgr schemaSyncEventRef cacheRef instanceId @@ -159,7 +158,7 @@ startSchemaSyncProcessorThread -- Start processor thread processorThread <- C.forkManagedT "SchemeUpdate.processor" logger $ - processor logger httpMgr schemaSyncEventRef cacheRef instanceId serverConfigCtx logTVar + processor logger schemaSyncEventRef cacheRef instanceId serverConfigCtx logTVar logThreadStarted logger instanceId TTProcessor processorThread pure processorThread @@ -251,10 +250,10 @@ processor :: forall m void. ( C.ForkableMonadIO m, MonadMetadataStorage m, - MonadResolveSource m + MonadResolveSource m, + ProvidesHasuraServices m ) => Logger Hasura -> - HTTP.Manager -> STM.TMVar MetadataResourceVersion -> SchemaCacheRef -> InstanceId -> @@ -263,28 +262,24 @@ processor :: m void processor logger - httpMgr metaVersionRef cacheRef instanceId serverConfigCtx logTVar = forever $ do metaVersion <- liftIO $ STM.atomically $ STM.takeTMVar metaVersionRef - respErr <- - runExceptT $ - refreshSchemaCache metaVersion instanceId logger httpMgr cacheRef TTProcessor serverConfigCtx logTVar - onLeft respErr (logError logger TTProcessor . TEQueryError) + refreshSchemaCache metaVersion instanceId logger cacheRef TTProcessor serverConfigCtx logTVar refreshSchemaCache :: ( MonadIO m, MonadBaseControl IO m, MonadMetadataStorage m, - MonadResolveSource m + MonadResolveSource m, + ProvidesNetwork m ) => MetadataResourceVersion -> InstanceId -> Logger Hasura -> - HTTP.Manager -> SchemaCacheRef -> SchemaSyncThreadType -> ServerConfigCtx -> @@ -294,7 +289,6 @@ refreshSchemaCache resourceVersion instanceId logger - httpManager cacheRef threadType serverConfigCtx @@ -368,7 +362,7 @@ refreshSchemaCache pure (msg, cache) onLeft respErr (logError logger threadType . TEQueryError) where - runCtx = RunCtx adminUserInfo httpManager serverConfigCtx + runCtx = RunCtx adminUserInfo serverConfigCtx logInfo :: (MonadIO m) => Logger Hasura -> SchemaSyncThreadType -> Value -> m () logInfo logger threadType val = diff --git a/server/src-lib/Hasura/Services.hs b/server/src-lib/Hasura/Services.hs new file mode 100644 index 00000000000..77a34126808 --- /dev/null +++ b/server/src-lib/Hasura/Services.hs @@ -0,0 +1,22 @@ +module Hasura.Services (module Services, ProvidesHasuraServices) where + +import Hasura.Services.Network as Services + +{- Note [Services] + +Different editions of the GraphQL Engine use the same common core, but provide +different features. To avoid having logic deep within the common core that +decides whether a feature is active or not, we favour an "injection" approach: +the core of the engine delegates the implementation details of features / +external dependencies to class constraints, and it's the role of the top-level +caller to implement those constraints. + +Those services are implemented on the base monad on which we run the engine. See +'PGMetadataStorageT' in Hasura/App. + +-} + +-- | A constraint alias that groups all services together. +type ProvidesHasuraServices m = + ( ProvidesNetwork m + ) diff --git a/server/src-lib/Hasura/Services/Network.hs b/server/src-lib/Hasura/Services/Network.hs new file mode 100644 index 00000000000..3854c06cff6 --- /dev/null +++ b/server/src-lib/Hasura/Services/Network.hs @@ -0,0 +1,37 @@ +-- | Network service provider. +-- +-- This module defines a Service (see Note [Services]) that provides access to +-- the network; for now, that only means providing a HTTP Manager. This is +-- consequentlt a simple analogue to `(MonadReader r m, Has Manager r)`, but +-- could be updated to either encompass other network utilities, or to provide a +-- more restricted interface if deemed useful. Alternatively this could be +-- removed altogether if all network calls were to be hidden behind more +-- specific services. +module Hasura.Services.Network + ( ProvidesNetwork (..), + ) +where + +import Hasura.Prelude +import Hasura.Tracing +import Network.HTTP.Client qualified as HTTP + +-------------------------------------------------------------------------------- + +class Monad m => ProvidesNetwork m where + askHTTPManager :: m HTTP.Manager + +instance ProvidesNetwork m => ProvidesNetwork (ReaderT r m) where + askHTTPManager = lift askHTTPManager + +instance (Monoid w, ProvidesNetwork m) => ProvidesNetwork (WriterT w m) where + askHTTPManager = lift askHTTPManager + +instance ProvidesNetwork m => ProvidesNetwork (StateT s m) where + askHTTPManager = lift askHTTPManager + +instance ProvidesNetwork m => ProvidesNetwork (ExceptT e m) where + askHTTPManager = lift askHTTPManager + +instance ProvidesNetwork m => ProvidesNetwork (TraceT m) where + askHTTPManager = lift askHTTPManager diff --git a/server/src-lib/Hasura/Tracing.hs b/server/src-lib/Hasura/Tracing.hs index a9971b90404..56cc337e4de 100644 --- a/server/src-lib/Hasura/Tracing.hs +++ b/server/src-lib/Hasura/Tracing.hs @@ -46,7 +46,6 @@ import Hasura.Tracing.TraceId traceIdFromHex, traceIdToHex, ) -import Network.HTTP.Client.Manager (HasHttpManagerM (..)) import Network.HTTP.Client.Transformable qualified as HTTP import Refined (Positive, Refined, unrefine) import System.Random.Stateful qualified as Random @@ -190,7 +189,18 @@ sampleOneInN denominator -- | The 'TraceT' monad transformer adds the ability to keep track of -- the current trace context. newtype TraceT m a = TraceT {unTraceT :: ReaderT TraceTEnv m a} - deriving (Functor, Applicative, Monad, MonadIO, MonadFix, MonadMask, MonadCatch, MonadThrow, MonadBase b, MonadBaseControl b) + deriving + ( Functor, + Applicative, + Monad, + MonadIO, + MonadFix, + MonadMask, + MonadCatch, + MonadThrow, + MonadBase b, + MonadBaseControl b + ) instance MonadTrans TraceT where lift = TraceT . lift @@ -206,9 +216,6 @@ instance MonadReader r m => MonadReader r (TraceT m) where ask = TraceT $ lift ask local f m = TraceT $ mapReaderT (local f) (unTraceT m) -instance (HasHttpManagerM m) => HasHttpManagerM (TraceT m) where - askHttpManager = lift askHttpManager - -- | Run an action in the 'TraceT' monad transformer. -- 'runTraceT' delimits a new trace with its root span, and the arguments -- specify a name and metadata for that span. diff --git a/server/src-lib/Network/HTTP/Client/Manager.hs b/server/src-lib/Network/HTTP/Client/Manager.hs deleted file mode 100644 index 7971268d1d8..00000000000 --- a/server/src-lib/Network/HTTP/Client/Manager.hs +++ /dev/null @@ -1,26 +0,0 @@ -module Network.HTTP.Client.Manager - ( HasHttpManagerM (..), - HTTP.Manager, - ) -where - -import Control.Monad.Except -import Control.Monad.Reader -import Control.Monad.State.Strict -import Control.Monad.Writer.Strict -import Network.HTTP.Client as HTTP - -class (Monad m) => HasHttpManagerM m where - askHttpManager :: m HTTP.Manager - -instance (HasHttpManagerM m) => HasHttpManagerM (ExceptT e m) where - askHttpManager = lift askHttpManager - -instance (HasHttpManagerM m) => HasHttpManagerM (ReaderT r m) where - askHttpManager = lift askHttpManager - -instance (HasHttpManagerM m) => HasHttpManagerM (StateT s m) where - askHttpManager = lift askHttpManager - -instance (Monoid w, HasHttpManagerM m) => HasHttpManagerM (WriterT w m) where - askHttpManager = lift askHttpManager diff --git a/server/test-postgres/Main.hs b/server/test-postgres/Main.hs index 35b541f653d..012882996e3 100644 --- a/server/test-postgres/Main.hs +++ b/server/test-postgres/Main.hs @@ -12,7 +12,8 @@ import Data.Time.Clock (getCurrentTime) import Data.URL.Template import Database.PG.Query qualified as PG import Hasura.App - ( PGMetadataStorageAppT (..), + ( AppContext (..), + PGMetadataStorageAppT (..), mkMSSQLSourceResolver, mkPgSourceResolver, ) @@ -91,12 +92,12 @@ main = do emptyMetadataDefaults (FF.checkFeatureFlag mempty) cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) mkMSSQLSourceResolver serverConfigCtx - pgLogger = print + appContext = AppContext httpManager print pgPool run :: ExceptT QErr (PGMetadataStorageAppT CacheBuild) a -> IO a run = runExceptT - >>> flip runPGMetadataStorageAppT (pgPool, pgLogger) + >>> flip runPGMetadataStorageAppT appContext >>> runCacheBuild cacheBuildParams >>> runExceptT >=> flip onLeft printErrJExit diff --git a/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs b/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs index a00549b4f8d..5c581a5cfb9 100644 --- a/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs +++ b/server/test-postgres/Test/Hasura/Server/MigrateSuite.hs @@ -30,8 +30,8 @@ import Hasura.Server.API.PGDump import Hasura.Server.Init (DowngradeOptions (..)) import Hasura.Server.Migrate import Hasura.Server.Types +import Hasura.Services.Network import Hasura.Session -import Network.HTTP.Client.Manager qualified as HTTP import Test.Hspec.Core.Spec import Test.Hspec.Expectations.Lifted @@ -48,10 +48,10 @@ newtype CacheRefT m a = CacheRefT {runCacheRefT :: MVar RebuildableSchemaCache - MonadBaseControl b, MonadTx, UserInfoM, - HTTP.HasHttpManagerM, HasServerConfigCtx, MonadMetadataStorage, - MonadMetadataStorageQueryAPI + MonadMetadataStorageQueryAPI, + ProvidesNetwork ) via (ReaderT (MVar RebuildableSchemaCache) m) @@ -74,9 +74,9 @@ instance ( MonadIO m, MonadBaseControl IO m, MonadError QErr m, - HTTP.HasHttpManagerM m, MonadResolveSource m, - HasServerConfigCtx m + HasServerConfigCtx m, + ProvidesNetwork m ) => CacheRWM (CacheRefT m) where @@ -104,11 +104,11 @@ suite :: ( MonadIO m, MonadError QErr m, MonadBaseControl IO m, - HTTP.HasHttpManagerM m, HasServerConfigCtx m, MonadResolveSource m, MonadMetadataStorageQueryAPI m, - MonadEventLogCleanup m + MonadEventLogCleanup m, + ProvidesNetwork m ) => PostgresConnConfiguration -> PGExecCtx ->