server: add auth hook span and correct server SpanKind for OpenTelemetry spans

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/10887
GitOrigin-RevId: b6b947c00a24a30c480427b533ab617f217c6884
This commit is contained in:
Toan Nguyen 2024-06-21 23:57:43 +07:00 committed by hasura-bot
parent 5d8333753d
commit 7aef8f7102
28 changed files with 173 additions and 93 deletions

View File

@ -55,6 +55,7 @@ services:
- postgres
- postgres-replica
- redis
- auth-hook
restart: always
## uncomment the auto migration image and volumes to explore the example
volumes:
@ -67,6 +68,7 @@ services:
HASURA_GRAPHQL_METADATA_DATABASE_URL: ${HGE_METADATA_DATABASE_URL}
HGE_DATABASE_URL: ${HGE_DATABASE_URL}
HGE_DATABASE_REPLICA_URL: ${HGE_DATABASE_REPLICA_URL}
HASURA_GRAPHQL_AUTH_HOOK: http://auth-hook:4200
## Optional settings:
## enable the console served by server
@ -81,7 +83,7 @@ services:
HASURA_GRAPHQL_CACHE_MAX_ENTRY_SIZE: "200"
# HASURA_GRAPHQL_SERVER_PORT: "8080"
HASURA_GRAPHQL_ENABLED_APIS: ${HGE_ENABLED_APIS}
HASURA_GRAPHQL_UNAUTHORIZED_ROLE: anonymous
# HASURA_GRAPHQL_UNAUTHORIZED_ROLE: anonymous
JAEGER_HOST: http://jaeger:4318
OTEL_COLLECTOR_HOST: http://otel-collector:4318
@ -213,3 +215,7 @@ services:
HONEYCOMB_API_KEY: ${HONEYCOMB_API_KEY}
HONEYCOMB_DATASET: ${HONEYCOMB_DATASET}
# GOOGLE_APPLICATION_CREDENTIALS: /etc/otel/gcp-sa.json
auth-hook:
build:
context: examples/auth-hook

View File

@ -0,0 +1,11 @@
FROM denoland/deno
EXPOSE 3000
WORKDIR /app
ADD . /app
RUN deno cache main.ts
CMD ["run", "--allow-net", "main.ts"]

View File

@ -0,0 +1,17 @@
const port = 4200;
const handler = (): Response => {
const body = JSON.stringify({
"x-hasura-role": "admin",
});
return new Response(body, {
status: 200,
headers: {
"Content-Type": "application/json",
},
});
};
console.log(`HTTP server running. Access it at: http://localhost:4200/`);
Deno.serve({ port }, handler);

View File

@ -697,7 +697,7 @@ instance HasCacheStaticConfig AppM where
instance MonadTrace AppM where
newTraceWith c p n (AppM a) = AppM $ newTraceWith c p n a
newSpanWith i n (AppM a) = AppM $ newSpanWith i n a
newSpanWith i n k (AppM a) = AppM $ newSpanWith i n k a
attachMetadata = AppM . attachMetadata
instance MonadTraceContext AppM where

View File

@ -30,7 +30,7 @@ import Hasura.RQL.Types.BackendType (BackendType (DataConnector))
import Hasura.RQL.Types.Common qualified as RQL
import Hasura.SQL.AnyBackend (mkAnyBackend)
import Hasura.Session
import Hasura.Tracing (MonadTrace)
import Hasura.Tracing (MonadTrace, SpanKind (..))
import Hasura.Tracing qualified as Tracing
import Language.GraphQL.Draft.Syntax qualified as G
@ -125,7 +125,7 @@ instance BackendExecute 'DataConnector where
buildQueryAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> SourceConfig -> Plan API.QueryRequest API.QueryResponse -> AgentClientT m EncJSON
buildQueryAction sourceName SourceConfig {..} Plan {..} = do
queryResponse <- Client.query sourceName _scConfig _pRequest
reshapedResponse <- Tracing.newSpan "QueryResponse reshaping" $ _pResponseReshaper queryResponse
reshapedResponse <- Tracing.newSpan "QueryResponse reshaping" SKInternal $ _pResponseReshaper queryResponse
pure $ encJFromJEncoding reshapedResponse
-- Delegates the generation to the Agent's /explain endpoint if it has that capability,
@ -150,5 +150,5 @@ toExplainPlan fieldName queryRequest =
buildMutationAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> SourceConfig -> Plan API.MutationRequest API.MutationResponse -> AgentClientT m EncJSON
buildMutationAction sourceName SourceConfig {..} Plan {..} = do
mutationResponse <- Client.mutation sourceName _scConfig _pRequest
reshapedResponse <- Tracing.newSpan "MutationResponse reshaping" $ _pResponseReshaper mutationResponse
reshapedResponse <- Tracing.newSpan "MutationResponse reshaping" SKInternal $ _pResponseReshaper mutationResponse
pure $ encJFromJEncoding reshapedResponse

View File

@ -72,7 +72,7 @@ runDBQuery' requestId query fieldName _userInfo logger licenseKeyCacheMaybe sour
-- _ -> do
void $ HGL.logQueryLog logger $ mkQueryLog query fieldName queryRequest requestId
withElapsedTime
. Tracing.newSpan ("Data Connector backend query for root field " <>> fieldName)
. Tracing.newSpan ("Data Connector backend query for root field " <>> fieldName) Tracing.SKClient
. (<* Tracing.attachSourceConfigAttributes @'DataConnector sourceConfig)
. flip runAgentClientT (AgentClientContext logger _scEndpoint _scManager _scTimeoutMicroseconds agentAuthKey)
. runOnBaseMonad
@ -149,7 +149,7 @@ runDBMutation' requestId query fieldName _userInfo logger licenseKeyCacheMaybe s
-- _ -> do
void $ HGL.logQueryLog logger $ mkQueryLog query fieldName queryRequest requestId
withElapsedTime
. Tracing.newSpan ("Data Connector backend mutation for root field " <>> fieldName)
. Tracing.newSpan ("Data Connector backend mutation for root field " <>> fieldName) Tracing.SKClient
. (<* Tracing.attachSourceConfigAttributes @'DataConnector sourceConfig)
. flip runAgentClientT (AgentClientContext logger _scEndpoint _scManager _scTimeoutMicroseconds agentAuthKey)
. runOnBaseMonad

View File

@ -78,7 +78,7 @@ runQuery ::
runQuery reqId query fieldName _userInfo logger _ sourceConfig tx genSql _ = do
logQueryLog logger $ mkQueryLog query fieldName genSql reqId
withElapsedTime
$ newSpan ("MSSQL Query for root field " <>> fieldName)
$ newSpan ("MSSQL Query for root field " <>> fieldName) SKInternal
$ (<* attachSourceConfigAttributes @'MSSQL sourceConfig)
$ fmap snd (run tx)
@ -116,7 +116,7 @@ runMutation ::
runMutation reqId query fieldName _userInfo logger _ sourceConfig tx _genSql _ = do
logQueryLog logger $ mkQueryLog query fieldName Nothing reqId
withElapsedTime
$ newSpan ("MSSQL Mutation for root field " <>> fieldName)
$ newSpan ("MSSQL Mutation for root field " <>> fieldName) SKInternal
$ (<* attachSourceConfigAttributes @'MSSQL sourceConfig)
$ run tx

View File

@ -119,7 +119,7 @@ insertMultipleObjects multiObjIns additionalColumns userInfo mutationOutput plan
mutationOutput
columnInfos
rowCount = tshow . length $ IR._aiInsertObject multiObjIns
Tracing.newSpan ("Insert (" <> rowCount <> ") " <> qualifiedObjectToText table) do
Tracing.newSpan ("Insert (" <> rowCount <> ") " <> qualifiedObjectToText table) Tracing.SKInternal do
Tracing.attachMetadata [("count", rowCount)]
PGE.execInsertQuery stringifyNum tCase userInfo (insertQuery, planVars)
@ -158,7 +158,7 @@ insertObject ::
Maybe NamingCase ->
m (Int, Maybe (ColumnValues ('Postgres pgKind) TxtEncodedVal))
insertObject singleObjIns additionalColumns userInfo planVars stringifyNum tCase =
Tracing.newSpan ("Insert " <> qualifiedObjectToText table) do
Tracing.newSpan ("Insert " <> qualifiedObjectToText table) Tracing.SKInternal do
validateInsert (HashMap.keys columns) (map IR._riRelationInfo objectRels) (HashMap.keys additionalColumns)
-- insert all object relations and fetch this insert dependent column values

View File

@ -85,7 +85,7 @@ runPGQuery reqId query fieldName _userInfo logger _ sourceConfig tx genSql resol
-- log the generated SQL and the graphql query
logQueryLog logger $ mkQueryLog query fieldName genSql reqId (resolvedConnectionTemplate <$ resolvedConnectionTemplate)
withElapsedTime
$ newSpan ("Postgres Query for root field " <>> fieldName)
$ newSpan ("Postgres Query for root field " <>> fieldName) SKInternal
$ (<* attachSourceConfigAttributes @('Postgres pgKind) sourceConfig)
$ runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate)
$ fmap snd (runOnBaseMonad tx)
@ -114,7 +114,7 @@ runPGMutation reqId query fieldName userInfo logger _ sourceConfig tx _genSql re
-- log the graphql query
logQueryLog logger $ mkQueryLog query fieldName Nothing reqId (resolvedConnectionTemplate <$ resolvedConnectionTemplate)
withElapsedTime
$ newSpan ("Postgres Mutation for root field " <>> fieldName)
$ newSpan ("Postgres Mutation for root field " <>> fieldName) SKInternal
$ (<* attachSourceConfigAttributes @('Postgres pgKind) sourceConfig)
$ runTxWithCtxAndUserInfo userInfo (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) (GraphQLQuery resolvedConnectionTemplate)
$ runOnBaseMonad tx
@ -204,7 +204,7 @@ runPGMutationTransaction reqId query userInfo logger sourceConfig resolvedConnec
withElapsedTime
$ runTxWithCtxAndUserInfo userInfo (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) (GraphQLQuery resolvedConnectionTemplate)
$ flip InsOrdHashMap.traverseWithKey mutations \fieldName dbsi ->
newSpan ("Postgres Mutation for root field " <>> fieldName)
newSpan ("Postgres Mutation for root field " <>> fieldName) SKInternal
$ (<* attachSourceConfigAttributes @('Postgres pgKind) sourceConfig)
$ fmap arResult
$ runOnBaseMonad

View File

@ -389,7 +389,7 @@ getResolvedExecPlan
-- Construct the full 'ResolvedExecutionPlan' from the 'queryParts :: SingleOperation'.
(parameterizedQueryHash, resolvedExecPlan, modelInfoList') <-
case queryParts of
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives inlinedSelSet -> Tracing.newSpan "Resolve query execution plan" $ do
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives inlinedSelSet -> Tracing.newSpan "Resolve query execution plan" Tracing.SKInternal $ do
(executionPlan, queryRootFields, dirMap, parameterizedQueryHash, modelInfoList) <-
EQ.convertQuerySelSet
env
@ -412,7 +412,7 @@ getResolvedExecPlan
traceQueryStatus
Tracing.attachMetadata [("graphql.operation.type", "query"), ("parameterized_query_hash", bsToTxt $ unParamQueryHash parameterizedQueryHash)]
pure (parameterizedQueryHash, QueryExecutionPlan executionPlan queryRootFields dirMap, modelInfoList)
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs directives inlinedSelSet -> Tracing.newSpan "Resolve mutation execution plan" $ do
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs directives inlinedSelSet -> Tracing.newSpan "Resolve mutation execution plan" Tracing.SKInternal $ do
when (readOnlyMode == ReadOnlyModeEnabled)
$ throw400 NotSupported "Mutations are not allowed when read-only mode is enabled"
(executionPlan, parameterizedQueryHash, modelInfoList) <-
@ -436,7 +436,7 @@ getResolvedExecPlan
traceQueryStatus
Tracing.attachMetadata [("graphql.operation.type", "mutation")]
pure (parameterizedQueryHash, MutationExecutionPlan executionPlan, modelInfoList)
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives inlinedSelSet -> Tracing.newSpan "Resolve subscription execution plan" $ do
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives inlinedSelSet -> Tracing.newSpan "Resolve subscription execution plan" Tracing.SKInternal $ do
(normalizedDirectives, normalizedSelectionSet) <-
ER.resolveVariables
(nullInNonNullableVariables sqlGenCtx)
@ -445,7 +445,7 @@ getResolvedExecPlan
directives
inlinedSelSet
subscriptionParser <- C.gqlSubscriptionParser gCtx `onNothing` throw400 ValidationFailed "no subscriptions exist"
unpreparedAST <- Tracing.newSpan "Parse subscription IR" $ liftEither $ subscriptionParser normalizedSelectionSet
unpreparedAST <- Tracing.newSpan "Parse subscription IR" Tracing.SKInternal $ liftEither $ subscriptionParser normalizedSelectionSet
let parameterizedQueryHash = calculateParameterizedQueryHash normalizedSelectionSet
-- Process directives on the subscription
dirMap <-

View File

@ -125,13 +125,13 @@ convertMutationSelectionSet
(resolvedDirectives, resolvedSelSet) <- resolveVariables nullInNonNullableVariables varDefs (fromMaybe HashMap.empty (GH._grVariables gqlUnparsed)) directives fields
-- Parse the GraphQL query into the RQL AST
(unpreparedQueries :: RootFieldMap (MutationRootField UnpreparedValue)) <-
Tracing.newSpan "Parse mutation IR" $ liftEither $ mutationParser resolvedSelSet
Tracing.newSpan "Parse mutation IR" Tracing.SKInternal $ liftEither $ mutationParser resolvedSelSet
-- Process directives on the mutation
_dirMap <- toQErr $ runParse (parseDirectives customDirectives (G.DLExecutable G.EDLMUTATION) resolvedDirectives)
let parameterizedQueryHash = calculateParameterizedQueryHash resolvedSelSet
resolveExecutionSteps rootFieldName rootFieldUnpreparedValue = Tracing.newSpan ("Resolve execution step for " <>> rootFieldName) do
resolveExecutionSteps rootFieldName rootFieldUnpreparedValue = Tracing.newSpan ("Resolve execution step for " <>> rootFieldName) Tracing.SKInternal do
case rootFieldUnpreparedValue of
RFDB sourceName exists ->
AB.dispatchAnyBackend @BackendExecute

View File

@ -117,14 +117,14 @@ convertQuerySelSet
traceQueryStatus = do
-- 1. Parse the GraphQL query into the 'RootFieldMap' and a 'SelectionSet'
(unpreparedQueries, normalizedDirectives, normalizedSelectionSet) <-
Tracing.newSpan "Parse query IR" $ parseGraphQLQuery nullInNonNullableVariables gqlContext varDefs (GH._grVariables gqlUnparsed) directives fields
Tracing.newSpan "Parse query IR" Tracing.SKInternal $ parseGraphQLQuery nullInNonNullableVariables gqlContext varDefs (GH._grVariables gqlUnparsed) directives fields
-- 2. Parse directives on the query
dirMap <- toQErr $ runParse (parseDirectives customDirectives (G.DLExecutable G.EDLQUERY) normalizedDirectives)
let parameterizedQueryHash = calculateParameterizedQueryHash normalizedSelectionSet
resolveExecutionSteps rootFieldName rootFieldUnpreparedValue = Tracing.newSpan ("Resolve execution step for " <>> rootFieldName) do
resolveExecutionSteps rootFieldName rootFieldUnpreparedValue = Tracing.newSpan ("Resolve execution step for " <>> rootFieldName) Tracing.SKInternal do
case rootFieldUnpreparedValue of
RFMulti lst -> do
allSteps <- traverse (resolveExecutionSteps rootFieldName) lst

View File

@ -81,7 +81,7 @@ processRemoteJoins ::
TraceQueryStatus ->
m (EncJSON, [ModelInfoPart])
processRemoteJoins requestId logger agentLicenseKey env requestHeaders userInfo lhs maybeJoinTree gqlreq tracesPropagator traceQueryStatus =
Tracing.newSpan "Process remote joins" $ forRemoteJoins maybeJoinTree (lhs, []) \joinTree -> do
Tracing.newSpan "Process remote joins" Tracing.SKInternal $ forRemoteJoins maybeJoinTree (lhs, []) \joinTree -> do
lhsParsed <-
JO.eitherDecode (encJToLBS lhs)
`onLeft` (throw500 . T.pack)
@ -190,7 +190,7 @@ foldJoinTreeWith callSource callRemoteSchema userInfo lhs joinTree reqHeaders op
(intMap, model) = unzip compositeValue'
joinIndices' = IntMap.fromList $ zip key intMap
modelInfoList = concat model
Tracing.newSpan "Join remote join results"
Tracing.newSpan "Join remote join results" Tracing.SKInternal
$ (,(modelInfoList))
<$> (joinResults joinIndices' compositeValue)

View File

@ -70,17 +70,17 @@ makeRemoteSchemaJoinCall ::
-- | The resulting join index (see 'buildJoinIndex') if any.
m (Maybe (IntMap.IntMap AO.Value))
makeRemoteSchemaJoinCall networkFunction userInfo remoteSchemaJoin jaFieldName joinArguments = do
Tracing.newSpan ("Remote join to remote schema for field " <>> jaFieldName) do
Tracing.newSpan ("Remote join to remote schema for field " <>> jaFieldName) Tracing.SKClient do
-- step 1: construct the internal intermediary representation
maybeRemoteCall <-
Tracing.newSpan "Resolve execution step for remote join field"
Tracing.newSpan "Resolve execution step for remote join field" Tracing.SKInternal
$ buildRemoteSchemaCall remoteSchemaJoin joinArguments userInfo
-- if there actually is a remote call:
for maybeRemoteCall \remoteCall -> do
-- step 2: execute it over the network
responseValue <- executeRemoteSchemaCall networkFunction remoteCall
-- step 3: build the join index
Tracing.newSpan "Build remote join index"
Tracing.newSpan "Build remote join index" Tracing.SKInternal
$ buildJoinIndex remoteCall responseValue
-------------------------------------------------------------------------------

View File

@ -74,7 +74,7 @@ makeSourceJoinCall ::
-- | The resulting join index (see 'buildJoinIndex') if any.
m (Maybe (IntMap.IntMap AO.Value, [ModelInfoPart]))
makeSourceJoinCall networkFunction userInfo remoteSourceJoin jaFieldName joinArguments reqHeaders operationName traceQueryStatus =
Tracing.newSpan ("Remote join to data source " <> sourceName <<> " for field " <>> jaFieldName) do
Tracing.newSpan ("Remote join to data source " <> sourceName <<> " for field " <>> jaFieldName) Tracing.SKClient do
-- step 1: create the SourceJoinCall
-- maybeSourceCall <-
-- AB.dispatchAnyBackend @EB.BackendExecute remoteSourceJoin \(sjc :: SourceJoinCall b) ->
@ -87,7 +87,7 @@ makeSourceJoinCall networkFunction userInfo remoteSourceJoin jaFieldName joinArg
-- step 2: send this call over the network
sourceResponse <- networkFunction sourceCall
-- step 3: build the join index
Tracing.newSpan "Build remote join index"
Tracing.newSpan "Build remote join index" Tracing.SKInternal
$ (,(modelInfoList))
<$> buildJoinIndex sourceResponse
where
@ -120,7 +120,7 @@ buildSourceJoinCall ::
RemoteSourceJoin b ->
m (Maybe (AB.AnyBackend SourceJoinCall, [ModelInfoPart]))
buildSourceJoinCall userInfo jaFieldName joinArguments reqHeaders operationName traceQueryStatus remoteSourceJoin = do
Tracing.newSpan "Resolve execution step for remote join field" do
Tracing.newSpan "Resolve execution step for remote join field" Tracing.SKInternal do
let rows =
IntMap.toList joinArguments <&> \(argumentId, argument) ->
KM.insert "__argument_id__" (J.toJSON argumentId)

View File

@ -339,7 +339,7 @@ runGQ env sqlGenCtx sc enableAL readOnlyMode remoteSchemaResponsePriority header
let gqlMetrics = pmGraphQLRequestMetrics prometheusMetrics
(totalTime, (response, parameterizedQueryHash, gqlOpType, gqlOperationName, modelInfoListForLogging, queryCachedStatus)) <- withElapsedTime $ do
(reqParsed, runLimits, queryParts) <- Tracing.newSpan "Parse GraphQL" $ observeGQLQueryError granularPrometheusMetricsState gqlMetrics Nothing (_grOperationName reqUnparsed) Nothing $ do
(reqParsed, runLimits, queryParts) <- Tracing.newSpan "Parse GraphQL" Tracing.SKInternal $ observeGQLQueryError granularPrometheusMetricsState gqlMetrics Nothing (_grOperationName reqUnparsed) Nothing $ do
-- 1. Run system authorization on the 'reqUnparsed :: GQLReqUnparsed' query.
reqParsed <-
E.checkGQLExecution userInfo (reqHeaders, ipAddress) enableAL sc reqUnparsed reqId
@ -572,7 +572,7 @@ runGQ env sqlGenCtx sc enableAL readOnlyMode remoteSchemaResponsePriority header
let (allResponses, allModelInfo) = unzip _all
pure $ (AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) [], concat allModelInfo)
runRemoteGQ fieldName rsi resultCustomizer gqlReq remoteJoins = Tracing.newSpan ("Remote schema query for root field " <>> fieldName) $ do
runRemoteGQ fieldName rsi resultCustomizer gqlReq remoteJoins = Tracing.newSpan ("Remote schema query for root field " <>> fieldName) Tracing.SKInternal $ do
(telemTimeIO_DT, remoteResponseHeaders, resp) <-
doQErr $ E.execRemoteGQ env tracesPropagator userInfo reqHeaders (rsDef rsi) gqlReq
value <- extractFieldFromResponse remoteSchemaResponsePriority fieldName resultCustomizer resp

View File

@ -490,7 +490,7 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables
enableAL <- liftIO $ acEnableAllowlist <$> getAppContext appStateRef
remoteSchemaResponsePriority <- liftIO $ acRemoteSchemaResponsePriority <$> getAppContext appStateRef
(reqParsed, queryParts) <- Tracing.newSpan "Parse GraphQL" $ do
(reqParsed, queryParts) <- Tracing.newSpan "Parse GraphQL" Tracing.SKInternal $ do
reqParsedE <- lift $ E.checkGQLExecution userInfo (reqHdrs, ipAddress) enableAL sc q requestId
reqParsed <- onLeft reqParsedE (withComplete . preExecErr granularPrometheusMetricsState requestId Nothing (_grOperationName q) Nothing)
queryPartsE <- runExceptT $ getSingleOperation reqParsed
@ -843,7 +843,7 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables
Tracing.HttpPropagator ->
RemoteSchemaResponsePriority ->
ExceptT (Either GQExecError QErr) (ExceptT () m) (AnnotatedResponsePart, [ModelInfoPart])
runRemoteGQ requestId reqUnparsed fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins tracesPropagator remoteSchemaResponsePriority = Tracing.newSpan ("Remote schema query for root field " <>> fieldName) $ do
runRemoteGQ requestId reqUnparsed fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins tracesPropagator remoteSchemaResponsePriority = Tracing.newSpan ("Remote schema query for root field " <>> fieldName) Tracing.SKInternal $ do
env <- liftIO $ acEnvironment <$> getAppContext appStateRef
(telemTimeIO_DT, _respHdrs, resp) <-
doQErr

View File

@ -113,7 +113,7 @@ runMetadataQuery ::
runMetadataQuery appContext schemaCache closeWebsocketsOnMetadataChange RQLMetadata {..} = do
AppEnv {..} <- askAppEnv
let logger = _lsLogger appEnvLoggers
MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" $ liftEitherM fetchMetadata
MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" Tracing.SKInternal $ liftEitherM fetchMetadata
let exportsMetadata = \case
RMV1 (RMExportMetadata _) -> True
RMV2 (RMV2ExportMetadata _) -> True
@ -160,7 +160,7 @@ runMetadataQuery appContext schemaCache closeWebsocketsOnMetadataChange RQLMetad
$ "Attempting to insert new metadata in storage"
newResourceVersion <-
Tracing.newSpan "updateMetadataAndNotifySchemaSync"
Tracing.newSpan "updateMetadataAndNotifySchemaSync" Tracing.SKInternal
$ liftEitherM
$ updateMetadataAndNotifySchemaSync appEnvInstanceId (fromMaybe currentResourceVersion _rqlMetadataResourceVersion) modMetadata cacheInvalidations
@ -177,24 +177,24 @@ runMetadataQuery appContext schemaCache closeWebsocketsOnMetadataChange RQLMetad
<> showMetadataResourceVersion newResourceVersion
-- save sources introspection to stored-introspection DB
Tracing.newSpan "storeSourcesIntrospection"
Tracing.newSpan "storeSourcesIntrospection" Tracing.SKInternal
$ saveSourcesIntrospection logger sourcesIntrospection newResourceVersion
-- run the schema registry action
Tracing.newSpan "runSchemaRegistryAction"
Tracing.newSpan "runSchemaRegistryAction" Tracing.SKInternal
$ for_ schemaRegistryAction
$ \action -> do
liftIO $ action newResourceVersion (scInconsistentObjs (lastBuiltSchemaCache modSchemaCache)) modMetadata
(_, modSchemaCache', _, _, _) <-
Tracing.newSpan "setMetadataResourceVersionInSchemaCache"
Tracing.newSpan "setMetadataResourceVersionInSchemaCache" Tracing.SKInternal
$ setMetadataResourceVersionInSchemaCache newResourceVersion
& runCacheRWT dynamicConfig modSchemaCache
-- Close all subscriptions with 1012 code (subscribers should reconnect)
-- and close poller threads
when ((_cdcCloseWebsocketsOnMetadataChangeStatus dynamicConfig) == CWMCEnabled)
$ Tracing.newSpan "closeWebsocketsOnMetadataChange"
$ Tracing.newSpan "closeWebsocketsOnMetadataChange" Tracing.SKInternal
$ liftIO
$ WS.runWebsocketCloseOnMetadataChangeAction closeWebsocketsOnMetadataChange
@ -369,10 +369,10 @@ runMetadataQueryM env schemaSampledFeatureFlags remoteSchemaPerms currentResourc
-- NOTE: This is a good place to install tracing, since it's involved in
-- the recursive case via "bulk":
RMV1 q ->
Tracing.newSpan ("v1 " <> T.pack (constrName q))
Tracing.newSpan ("v1 " <> T.pack (constrName q)) Tracing.SKInternal
$ runMetadataQueryV1M env schemaSampledFeatureFlags remoteSchemaPerms currentResourceVersion q
RMV2 q ->
Tracing.newSpan ("v2 " <> T.pack (constrName q))
Tracing.newSpan ("v2 " <> T.pack (constrName q)) Tracing.SKInternal
$ runMetadataQueryV2M currentResourceVersion q
runMetadataQueryV1M ::

View File

@ -225,7 +225,7 @@ runQuery appContext sc query = do
saveSourcesIntrospection logger sourcesIntrospection newResourceVersion
(_, modSchemaCache', _, _, _) <-
Tracing.newSpan "setMetadataResourceVersionInSchemaCache"
Tracing.newSpan "setMetadataResourceVersionInSchemaCache" Tracing.SKInternal
$ setMetadataResourceVersionInSchemaCache newResourceVersion
& runCacheRWT dynamicConfig modSchemaCache

View File

@ -123,7 +123,7 @@ runQuery appContext schemaCache rqlQuery = do
$ throw400 NotSupported "Cannot run write queries when read-only mode is enabled"
let dynamicConfig = buildCacheDynamicConfig appContext
MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" $ liftEitherM fetchMetadata
MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" Tracing.SKInternal $ liftEitherM fetchMetadata
((result, updatedMetadata), modSchemaCache, invalidations, sourcesIntrospection, schemaRegistryAction) <-
runQueryM (acSQLGenCtx appContext) rqlQuery
-- We can use defaults here unconditionally, since there is no MD export function in V2Query
@ -134,21 +134,21 @@ runQuery appContext schemaCache rqlQuery = do
MaintenanceModeDisabled -> do
-- set modified metadata in storage and notify schema sync
newResourceVersion <-
Tracing.newSpan "updateMetadataAndNotifySchemaSync"
Tracing.newSpan "updateMetadataAndNotifySchemaSync" Tracing.SKInternal
$ liftEitherM
$ updateMetadataAndNotifySchemaSync appEnvInstanceId currentResourceVersion updatedMetadata invalidations
-- save sources introspection to stored-introspection DB
Tracing.newSpan "storeSourcesIntrospection"
Tracing.newSpan "storeSourcesIntrospection" Tracing.SKInternal
$ saveSourcesIntrospection (_lsLogger appEnvLoggers) sourcesIntrospection newResourceVersion
(_, modSchemaCache', _, _, _) <-
Tracing.newSpan "setMetadataResourceVersionInSchemaCache"
Tracing.newSpan "setMetadataResourceVersionInSchemaCache" Tracing.SKInternal
$ setMetadataResourceVersionInSchemaCache newResourceVersion
& runCacheRWT dynamicConfig modSchemaCache
-- run schema registry action
Tracing.newSpan "runSchemaRegistryAction"
Tracing.newSpan "runSchemaRegistryAction" Tracing.SKInternal
$ for_ schemaRegistryAction
$ \action -> do
liftIO $ action newResourceVersion (scInconsistentObjs (lastBuiltSchemaCache modSchemaCache')) updatedMetadata
@ -188,7 +188,7 @@ runQueryM ::
SQLGenCtx ->
RQLQuery ->
m EncJSON
runQueryM sqlGen rq = Tracing.newSpan (T.pack $ constrName rq) $ case rq of
runQueryM sqlGen rq = Tracing.newSpan (T.pack $ constrName rq) Tracing.SKInternal $ case rq of
RQInsert q -> runInsert sqlGen q
RQSelect q -> runSelect sqlGen q
RQUpdate q -> runUpdate sqlGen q

View File

@ -517,7 +517,7 @@ v1MetadataHandler ::
WS.WebsocketCloseOnMetadataChangeAction ->
RQLMetadata ->
m (HttpResponse EncJSON)
v1MetadataHandler schemaCacheRefUpdater closeWebsocketsOnMetadataChangeAction query = Tracing.newSpan "Metadata" $ do
v1MetadataHandler schemaCacheRefUpdater closeWebsocketsOnMetadataChangeAction query = Tracing.newSpan "Metadata" Tracing.SKInternal $ do
(liftEitherM . authorizeV1MetadataApi query) =<< ask
appContext <- asks hcAppContext
r <-
@ -547,7 +547,7 @@ v2QueryHandler ::
((RebuildableSchemaCache -> m (EncJSON, RebuildableSchemaCache)) -> m EncJSON) ->
V2Q.RQLQuery ->
m (HttpResponse EncJSON)
v2QueryHandler schemaCacheRefUpdater query = Tracing.newSpan "v2 Query" $ do
v2QueryHandler schemaCacheRefUpdater query = Tracing.newSpan "v2 Query" Tracing.SKInternal $ do
schemaCache <- asks hcSchemaCache
(liftEitherM . authorizeV2QueryApi query) =<< ask
res <-

View File

@ -48,6 +48,7 @@ import Hasura.Server.Auth.JWT hiding (processJwt_)
import Hasura.Server.Auth.WebHook
import Hasura.Server.Utils
import Hasura.Session (ExtraUserInfo, UserAdminSecret (..), UserInfo, UserRoleBuild (..), getSessionVariableValue, mkSessionVariablesHeaders, mkUserInfo)
import Hasura.Tracing qualified as Tracing
import Network.HTTP.Client qualified as HTTP
import Network.HTTP.Types qualified as HTTP
@ -71,8 +72,8 @@ class (Monad m) => UserAuthentication m where
-- Although this exists only in memory we store only a hash of the admin secret
-- primarily in order to:
--
-- - prevent theoretical timing attacks from a naive `==` check
-- - prevent misuse or inadvertent leaking of the secret
-- - prevent theoretical timing attacks from a naive `==` check
-- - prevent misuse or inadvertent leaking of the secret
newtype AdminSecretHash = AdminSecretHash (Crypto.Digest Crypto.SHA512)
deriving (Ord, Eq)
@ -229,7 +230,7 @@ updateJwkFromUrl contextAdvice (JWTCtx url ref _ _ _ _ _) httpManager logger =
-- | Authenticate the request using the headers and the configured 'AuthMode'.
getUserInfoWithExpTime ::
forall m.
(MonadIO m, MonadBaseControl IO m, MonadError QErr m) =>
(MonadIO m, MonadBaseControl IO m, MonadError QErr m, Tracing.MonadTrace m) =>
Logger Hasura ->
HTTP.Manager ->
[HTTP.Header] ->

View File

@ -24,6 +24,7 @@ import Hasura.Prelude
import Hasura.Server.Logging
import Hasura.Server.Utils
import Hasura.Session
import Hasura.Tracing qualified as Tracing
import Network.HTTP.Client.Transformable qualified as HTTP
import Network.Wreq qualified as Wreq
@ -55,7 +56,11 @@ hookMethod authHook = case ahType authHook of
-- for finer-grained auth. (#2666)
userInfoFromAuthHook ::
forall m.
(MonadIO m, MonadBaseControl IO m, MonadError QErr m) =>
( MonadIO m,
MonadBaseControl IO m,
MonadError QErr m,
Tracing.MonadTrace m
) =>
Logger Hasura ->
HTTP.Manager ->
AuthHook ->
@ -74,31 +79,34 @@ userInfoFromAuthHook logger manager hook reqHeaders reqs = do
performHTTPRequest = do
let url = T.unpack $ ahUrl hook
req <- liftIO $ HTTP.mkRequestThrow $ T.pack url
liftIO do
case ahType hook of
AHTGet -> do
let isCommonHeader = (`elem` commonClientHeadersIgnored)
filteredHeaders = filter (not . isCommonHeader . fst) reqHeaders
req' = req & set HTTP.headers (addDefaultHeaders filteredHeaders)
HTTP.httpLbs req' manager
AHTPost -> do
let contentType = ("Content-Type", "application/json")
headersPayload = J.toJSON $ HashMap.fromList $ hdrsToText reqHeaders
req' =
req
& set HTTP.method "POST"
& set HTTP.headers (addDefaultHeaders [contentType])
& set
HTTP.body
( HTTP.RequestBodyLBS
$ J.encode
$ object
( ["headers" J..= headersPayload]
-- We will only send the request if `ahSendRequestBody` is set to true
<> ["request" J..= reqs | ahSendRequestBody hook]
)
)
HTTP.httpLbs req' manager
case ahType hook of
AHTGet -> do
let isCommonHeader = (`elem` commonClientHeadersIgnored)
filteredHeaders = filter (not . isCommonHeader . fst) reqHeaders
req' = req & set HTTP.headers (addDefaultHeaders filteredHeaders)
doHTTPRequest req'
AHTPost -> do
let contentType = ("Content-Type", "application/json")
headersPayload = J.toJSON $ HashMap.fromList $ hdrsToText reqHeaders
req' =
req
& set HTTP.method "POST"
& set HTTP.headers (addDefaultHeaders [contentType])
& set
HTTP.body
( HTTP.RequestBodyLBS
$ J.encode
$ object
( ["headers" J..= headersPayload]
-- We will only send the request if `ahSendRequestBody` is set to true
<> ["request" J..= reqs | ahSendRequestBody hook]
)
)
doHTTPRequest req'
doHTTPRequest :: HTTP.Request -> m (HTTP.Response BL.ByteString)
doHTTPRequest req = Tracing.traceHTTPRequest Tracing.composedPropagator req
$ \req' -> liftIO $ HTTP.httpLbs req' manager
logAndThrow :: HTTP.HttpException -> m a
logAndThrow err = do

View File

@ -41,6 +41,7 @@ class (MonadTraceContext m) => MonadTrace m where
newSpanWith ::
SpanId ->
Text ->
SpanKind ->
m a ->
m a
@ -49,22 +50,22 @@ class (MonadTraceContext m) => MonadTrace m where
instance (MonadTrace m) => MonadTrace (ReaderT r m) where
newTraceWith c p n = mapReaderT (newTraceWith c p n)
newSpanWith i n = mapReaderT (newSpanWith i n)
newSpanWith i n kind = mapReaderT (newSpanWith i n kind)
attachMetadata = lift . attachMetadata
instance (MonadTrace m) => MonadTrace (StateT e m) where
newTraceWith c p n = mapStateT (newTraceWith c p n)
newSpanWith i n = mapStateT (newSpanWith i n)
newSpanWith i n k = mapStateT (newSpanWith i n k)
attachMetadata = lift . attachMetadata
instance (MonadTrace m) => MonadTrace (ExceptT e m) where
newTraceWith c p n = mapExceptT (newTraceWith c p n)
newSpanWith i n = mapExceptT (newSpanWith i n)
newSpanWith i n k = mapExceptT (newSpanWith i n k)
attachMetadata = lift . attachMetadata
instance (MonadTrace m) => MonadTrace (MaybeT m) where
newTraceWith c p n = mapMaybeT (newTraceWith c p n)
newSpanWith i n = mapMaybeT (newSpanWith i n)
newSpanWith i n k = mapMaybeT (newSpanWith i n k)
attachMetadata = lift . attachMetadata
-- | Access to the current tracing context, factored out of 'MonadTrace' so we
@ -110,7 +111,7 @@ newTrace policy name body = do
newTraceWith context policy name body
-- | Create a new span with a randomly-generated id.
newSpan :: (MonadIO m, MonadTrace m) => Text -> m a -> m a
newSpan name body = do
newSpan :: (MonadIO m, MonadTrace m) => Text -> SpanKind -> m a -> m a
newSpan name kind body = do
spanId <- randomSpanId
newSpanWith spanId name body
newSpanWith spanId name kind body

View File

@ -19,6 +19,7 @@ import Hasura.Tracing.Class
import Hasura.Tracing.Context
import Hasura.Tracing.Reporter
import Hasura.Tracing.Sampling
import Hasura.Tracing.TraceId
--------------------------------------------------------------------------------
-- TraceT
@ -75,7 +76,7 @@ instance (MonadIO m, MonadBaseControl IO m) => MonadTrace (TraceT m) where
metadataRef <- liftIO $ newIORef []
let report = case samplingDecision of
SampleNever -> id
SampleAlways -> runReporter reporter context name (readIORef metadataRef)
SampleAlways -> runReporter reporter context name SKServer (readIORef metadataRef)
updatedContext =
context
{ tcSamplingState = updateSamplingState samplingDecision (tcSamplingState context)
@ -83,7 +84,7 @@ instance (MonadIO m, MonadBaseControl IO m) => MonadTrace (TraceT m) where
traceEnv = TraceEnv updatedContext metadataRef samplingDecision
report $ local (_2 .~ Just traceEnv) body
newSpanWith spanId name (TraceT body) = TraceT do
newSpanWith spanId name kind (TraceT body) = TraceT do
(reporter, traceEnv) <- ask
case traceEnv of
-- we are not currently in a trace: ignore this span
@ -103,7 +104,7 @@ instance (MonadIO m, MonadBaseControl IO m) => MonadTrace (TraceT m) where
{ teTraceContext = subContext,
teMetadataRef = metadataRef
}
runReporter reporter subContext name (readIORef metadataRef)
runReporter reporter subContext name kind (readIORef metadataRef)
$ local (_2 .~ Just subTraceEnv) body
attachMetadata metadata = TraceT do

View File

@ -7,6 +7,7 @@ where
import Control.Monad.Trans.Control
import Hasura.Prelude
import Hasura.Tracing.Context
import Hasura.Tracing.TraceId (SpanKind)
newtype Reporter = Reporter
{ runReporter ::
@ -16,6 +17,8 @@ newtype Reporter = Reporter
TraceContext ->
-- \| Human readable name of this span.
Text ->
-- \| Kind of the current span.
SpanKind ->
-- \| IO action that retrieves the metadata associated with the
-- current span.
IO TraceMetadata ->
@ -25,4 +28,4 @@ newtype Reporter = Reporter
}
noReporter :: Reporter
noReporter = Reporter \_ _ _ -> id
noReporter = Reporter \_ _ _ _ -> id

View File

@ -14,6 +14,9 @@ module Hasura.Tracing.TraceId
spanIdToBytes,
spanIdFromHex,
spanIdToHex,
-- * SpanKind
SpanKind (..),
)
where
@ -150,3 +153,24 @@ spanIdFromHex = spanIdFromBytes <=< eitherToMaybe . Base16.decode
-- | Serialize a 'SpanId' to the standard ASCII-encoded hex representation.
spanIdToHex :: SpanId -> ByteString
spanIdToHex = Base16.encode . spanIdToBytes
-- | SpanKind describes the relationship between the Span, its parents, and its children in a Trace.
--
-- Specification: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#spankind
data SpanKind
= -- Indicates that the span describes a request to some remote service.
-- Use this kind for outcoming webhook invocations (action, remote schema, etc...).
SKClient
| -- Indicates that the span covers server-side handling of a synchronous RPC or other remote request.
-- Use this kind for the root span of HTTP handlers.
SKServer
| -- Default value. Indicates that the span represents an internal operation within an application.
SKInternal
instance J.ToJSON SpanKind where
toJSON = J.toJSON . show
instance Show SpanKind where
show SKClient = "client"
show SKServer = "server"
show SKInternal = "internal"

View File

@ -5,6 +5,7 @@
module Hasura.Tracing.Utils
( traceHTTPRequest,
attachSourceConfigAttributes,
composedPropagator,
)
where
@ -15,7 +16,10 @@ import Hasura.Prelude
import Hasura.RQL.Types.SourceConfiguration (HasSourceConfiguration (..))
import Hasura.Tracing.Class
import Hasura.Tracing.Context
import Hasura.Tracing.Propagator (HttpPropagator, inject)
import Hasura.Tracing.Propagator
import Hasura.Tracing.Propagator.B3 (b3TraceContextPropagator)
import Hasura.Tracing.Propagator.W3CTraceContext (w3cTraceContextPropagator)
import Hasura.Tracing.TraceId (SpanKind (SKClient))
import Network.HTTP.Client.Transformable qualified as HTTP
-- | Wrap the execution of an HTTP request in a span in the current
@ -36,7 +40,7 @@ traceHTTPRequest ::
traceHTTPRequest propagator req f = do
let method = bsToTxt (view HTTP.method req)
uri = view HTTP.url req
newSpan (method <> " " <> uri) do
newSpan (method <> " " <> uri) SKClient do
let reqBytes = HTTP.getReqSize req
attachMetadata [("request_body_bytes", fromString (show reqBytes))]
headers <- fmap (maybe [] toHeaders) currentContext
@ -49,3 +53,7 @@ attachSourceConfigAttributes :: forall b m. (HasSourceConfiguration b, MonadTrac
attachSourceConfigAttributes sourceConfig = do
let backendSourceKind = sourceConfigBackendSourceKind @b sourceConfig
attachMetadata [("source.kind", toTxt $ backendSourceKind)]
-- | Propagator composition of Trace Context and ZipKin B3.
composedPropagator :: HttpPropagator
composedPropagator = b3TraceContextPropagator <> w3cTraceContextPropagator