mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
Split Hasura.RQL.DDL.Headers to put types in Hasura.RQL.Types.Headers
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/8873 GitOrigin-RevId: 566cb4271f0eb27e6688c2e0fbc26711bdf8baa9
This commit is contained in:
parent
7fcb3bc8d4
commit
8cf134dad1
@ -803,6 +803,7 @@ library
|
||||
, Hasura.RQL.Types.EventTrigger
|
||||
, Hasura.RQL.Types.Eventing
|
||||
, Hasura.RQL.Types.Eventing.Backend
|
||||
, Hasura.RQL.Types.Headers
|
||||
, Hasura.RQL.Types.HealthCheck
|
||||
, Hasura.RQL.Types.HealthCheckImplementation
|
||||
, Hasura.RQL.Types.GraphqlSchemaIntrospection
|
||||
|
@ -74,7 +74,6 @@ import Hasura.Eventing.HTTP
|
||||
import Hasura.HTTP (getHTTPExceptionStatus)
|
||||
import Hasura.Logging qualified as L
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.RQL.Types.Common
|
||||
|
@ -65,11 +65,11 @@ import Data.Text.Encoding.Error qualified as TE
|
||||
import Hasura.HTTP
|
||||
import Hasura.Logging
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform qualified as Transform
|
||||
import Hasura.RQL.Types.Common (ResolvedWebhook (..))
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
import Hasura.RQL.Types.Eventing
|
||||
import Hasura.RQL.Types.Headers
|
||||
import Hasura.Session (SessionVariables)
|
||||
import Hasura.Tracing
|
||||
import Network.HTTP.Client.Transformable qualified as HTTP
|
||||
|
@ -152,7 +152,6 @@ import Hasura.Logging qualified as L
|
||||
import Hasura.Metadata.Class
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.EventTrigger (ResolveHeaderError, getHeaderInfosFromConfEither)
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
|
@ -60,7 +60,7 @@ import Hasura.Logging qualified as L
|
||||
import Hasura.Metadata.Class
|
||||
import Hasura.Name qualified as Name
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Headers (makeHeadersFromConf, toHeadersConf)
|
||||
import Hasura.RQL.DDL.Webhook.Transform
|
||||
import Hasura.RQL.IR.Action qualified as IR
|
||||
import Hasura.RQL.IR.BoolExp
|
||||
@ -72,6 +72,7 @@ import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.ComputedField
|
||||
import Hasura.RQL.Types.CustomTypes
|
||||
import Hasura.RQL.Types.Eventing
|
||||
import Hasura.RQL.Types.Headers (HeaderConf)
|
||||
import Hasura.RQL.Types.Roles (adminRoleName)
|
||||
import Hasura.RQL.Types.Schema.Options qualified as Options
|
||||
import Hasura.RQL.Types.SchemaCache
|
||||
|
@ -27,12 +27,12 @@ import Hasura.EncJSON
|
||||
import Hasura.GraphQL.Transport.HTTP.Protocol
|
||||
import Hasura.Logging qualified as L
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.IR.Select qualified as RS
|
||||
import Hasura.RQL.IR.Value
|
||||
import Hasura.RQL.Types.Action
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Headers (HeaderConf)
|
||||
import Hasura.SQL.Backend
|
||||
import Hasura.Session
|
||||
import Hasura.Tracing qualified as Tracing
|
||||
|
@ -63,13 +63,13 @@ import Hasura.EncJSON
|
||||
import Hasura.Eventing.EventTrigger (logQErr)
|
||||
import Hasura.Logging qualified as L
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform (MetadataResponseTransform, RequestTransform)
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
import Hasura.RQL.Types.Eventing
|
||||
import Hasura.RQL.Types.Eventing.Backend
|
||||
import Hasura.RQL.Types.Headers (HeaderValue (..))
|
||||
import Hasura.RQL.Types.Metadata
|
||||
import Hasura.RQL.Types.Metadata.Backend
|
||||
import Hasura.RQL.Types.Metadata.Object
|
||||
|
@ -1,82 +1,18 @@
|
||||
module Hasura.RQL.DDL.Headers
|
||||
( HeaderConf (..),
|
||||
HeaderValue (HVEnv, HVValue),
|
||||
makeHeadersFromConf,
|
||||
( makeHeadersFromConf,
|
||||
toHeadersConf,
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), bimapCodec, disjointEitherCodec, requiredField')
|
||||
import Autodocodec qualified as AC
|
||||
import Data.Aeson
|
||||
import Data.CaseInsensitive qualified as CI
|
||||
import Data.Environment qualified as Env
|
||||
import Data.Text qualified as T
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Base.Instances ()
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Headers
|
||||
import Network.HTTP.Types qualified as HTTP
|
||||
|
||||
data HeaderConf = HeaderConf HeaderName HeaderValue
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance NFData HeaderConf
|
||||
|
||||
instance Hashable HeaderConf
|
||||
|
||||
type HeaderName = Text
|
||||
|
||||
data HeaderValue = HVValue Text | HVEnv Text
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance NFData HeaderValue
|
||||
|
||||
instance Hashable HeaderValue
|
||||
|
||||
instance HasCodec HeaderConf where
|
||||
codec = bimapCodec dec enc $ disjointEitherCodec valCodec fromEnvCodec
|
||||
where
|
||||
valCodec =
|
||||
AC.object "HeaderConfValue" $
|
||||
(,)
|
||||
<$> requiredField' "name" AC..= fst
|
||||
<*> requiredField' "value" AC..= snd
|
||||
|
||||
fromEnvCodec =
|
||||
AC.object "HeaderConfFromEnv" $
|
||||
(,)
|
||||
<$> requiredField' "name" AC..= fst
|
||||
<*> requiredField' "value_from_env" AC..= snd
|
||||
|
||||
dec (Left (name, value)) = Right $ HeaderConf name (HVValue value)
|
||||
dec (Right (name, valueFromEnv)) =
|
||||
if T.isPrefixOf "HASURA_GRAPHQL_" valueFromEnv
|
||||
then Left $ "env variables starting with \"HASURA_GRAPHQL_\" are not allowed in value_from_env: " <> T.unpack valueFromEnv
|
||||
else Right $ HeaderConf name (HVEnv valueFromEnv)
|
||||
|
||||
enc (HeaderConf name (HVValue val)) = Left (name, val)
|
||||
enc (HeaderConf name (HVEnv val)) = Right (name, val)
|
||||
|
||||
instance FromJSON HeaderConf where
|
||||
parseJSON (Object o) = do
|
||||
name <- o .: "name"
|
||||
value <- o .:? "value"
|
||||
valueFromEnv <- o .:? "value_from_env"
|
||||
case (value, valueFromEnv) of
|
||||
(Nothing, Nothing) -> fail "expecting value or value_from_env keys"
|
||||
(Just val, Nothing) -> return $ HeaderConf name (HVValue val)
|
||||
(Nothing, Just val) -> do
|
||||
when (T.isPrefixOf "HASURA_GRAPHQL_" val) $
|
||||
fail $
|
||||
"env variables starting with \"HASURA_GRAPHQL_\" are not allowed in value_from_env: " <> T.unpack val
|
||||
return $ HeaderConf name (HVEnv val)
|
||||
(Just _, Just _) -> fail "expecting only one of value or value_from_env keys"
|
||||
parseJSON _ = fail "expecting object for headers"
|
||||
|
||||
instance ToJSON HeaderConf where
|
||||
toJSON (HeaderConf name (HVValue val)) = object ["name" .= name, "value" .= val]
|
||||
toJSON (HeaderConf name (HVEnv val)) = object ["name" .= name, "value_from_env" .= val]
|
||||
|
||||
-- | Resolve configuration headers
|
||||
makeHeadersFromConf ::
|
||||
MonadError QErr m => Env.Environment -> [HeaderConf] -> m [HTTP.Header]
|
||||
|
@ -1,19 +1,28 @@
|
||||
module Hasura.RQL.DDL.OpenTelemetry
|
||||
( runSetOpenTelemetryConfig,
|
||||
runSetOpenTelemetryStatus,
|
||||
parseOtelExporterConfig,
|
||||
parseOtelBatchSpanProcessorConfig,
|
||||
)
|
||||
where
|
||||
|
||||
import Control.Lens ((.~))
|
||||
import Hasura.Base.Error
|
||||
import Data.Bifunctor (first)
|
||||
import Data.Environment (Environment)
|
||||
import Data.Map.Strict qualified as Map
|
||||
import Data.Text qualified as Text
|
||||
import Hasura.Base.Error (Code (InvalidParams), QErr, err400)
|
||||
import Hasura.EncJSON
|
||||
import Hasura.Metadata.Class ()
|
||||
import Hasura.Prelude
|
||||
import Hasura.Prelude hiding (first)
|
||||
import Hasura.RQL.DDL.Headers (makeHeadersFromConf)
|
||||
import Hasura.RQL.Types.Common (successMsg)
|
||||
import Hasura.RQL.Types.Metadata
|
||||
import Hasura.RQL.Types.Metadata.Object
|
||||
import Hasura.RQL.Types.OpenTelemetry
|
||||
import Hasura.RQL.Types.SchemaCache.Build
|
||||
import Network.HTTP.Client (Request (requestHeaders), requestFromURI)
|
||||
import Network.URI (parseURI)
|
||||
|
||||
-- | Set the OpenTelemetry configuration to the provided value.
|
||||
runSetOpenTelemetryConfig ::
|
||||
@ -38,3 +47,57 @@ runSetOpenTelemetryStatus otelStatus = do
|
||||
MetadataModifier $
|
||||
metaOpenTelemetryConfig . ocStatus .~ otelStatus
|
||||
pure successMsg
|
||||
|
||||
-- | Smart constructor for 'OtelExporterInfo'.
|
||||
--
|
||||
-- Returns a @Left qErr@ to signal a validation error. Returns @Right Nothing@
|
||||
-- to signal that the exporter should be disabled without raising an error.
|
||||
--
|
||||
-- Allows the trace endpoint to be unset if the entire OpenTelemetry system is
|
||||
-- disabled.
|
||||
parseOtelExporterConfig ::
|
||||
OtelStatus ->
|
||||
Environment ->
|
||||
OtelExporterConfig ->
|
||||
Either QErr (Maybe OtelExporterInfo)
|
||||
parseOtelExporterConfig otelStatus env OtelExporterConfig {..} = do
|
||||
-- First validate everything but the trace endpoint
|
||||
headers <- makeHeadersFromConf env _oecHeaders
|
||||
-- Allow the trace endpoint to be unset when OpenTelemetry is disabled
|
||||
case _oecTracesEndpoint of
|
||||
Nothing ->
|
||||
case otelStatus of
|
||||
OtelDisabled ->
|
||||
pure Nothing
|
||||
OtelEnabled -> Left (err400 InvalidParams "Missing traces endpoint")
|
||||
Just rawTracesEndpoint -> do
|
||||
tracesUri <-
|
||||
maybeToEither (err400 InvalidParams "Invalid URL") $
|
||||
parseURI $
|
||||
Text.unpack rawTracesEndpoint
|
||||
uriRequest <-
|
||||
first (err400 InvalidParams . tshow) $ requestFromURI tracesUri
|
||||
pure $
|
||||
Just $
|
||||
OtelExporterInfo
|
||||
{ _oteleiTracesBaseRequest =
|
||||
uriRequest
|
||||
{ requestHeaders = headers ++ requestHeaders uriRequest
|
||||
},
|
||||
_oteleiResourceAttributes =
|
||||
Map.fromList $
|
||||
map
|
||||
(\NameValue {nv_name, nv_value} -> (nv_name, nv_value))
|
||||
_oecResourceAttributes
|
||||
}
|
||||
|
||||
-- Smart constructor. Consistent with defaults.
|
||||
parseOtelBatchSpanProcessorConfig ::
|
||||
OtelBatchSpanProcessorConfig -> Either QErr OtelBatchSpanProcessorInfo
|
||||
parseOtelBatchSpanProcessorConfig OtelBatchSpanProcessorConfig {..} = do
|
||||
_obspiMaxExportBatchSize <-
|
||||
if _obspcMaxExportBatchSize > 0
|
||||
then Right _obspcMaxExportBatchSize
|
||||
else Left (err400 InvalidParams "max_export_batch_size must be a positive integer")
|
||||
let _obspiMaxQueueSize = 4 * _obspiMaxExportBatchSize -- consistent with default value of 2048
|
||||
pure OtelBatchSpanProcessorInfo {..}
|
||||
|
@ -58,6 +58,7 @@ import Hasura.RQL.DDL.ApiLimit (MonadGetApiTimeLimit (..))
|
||||
import Hasura.RQL.DDL.CustomTypes
|
||||
import Hasura.RQL.DDL.EventTrigger (MonadEventLogCleanup (..), buildEventTriggerInfo)
|
||||
import Hasura.RQL.DDL.InheritedRoles (resolveInheritedRole)
|
||||
import Hasura.RQL.DDL.OpenTelemetry (parseOtelBatchSpanProcessorConfig, parseOtelExporterConfig)
|
||||
import Hasura.RQL.DDL.Relationship
|
||||
import Hasura.RQL.DDL.RemoteRelationship (CreateRemoteSchemaRemoteRelationship (..), PartiallyResolvedSource (..), buildRemoteFieldInfo, getRemoteSchemaEntityJoinColumns)
|
||||
import Hasura.RQL.DDL.ScheduledTrigger
|
||||
|
@ -48,7 +48,6 @@ import Data.Aeson qualified as J
|
||||
import Data.HashMap.Strict qualified as Map
|
||||
import Data.Kind (Type)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform (MetadataResponseTransform, RequestTransform)
|
||||
import Hasura.RQL.Types.Action qualified as RQL
|
||||
import Hasura.RQL.Types.Backend
|
||||
@ -60,6 +59,7 @@ import Hasura.RQL.Types.CustomTypes
|
||||
ObjectFieldDefinition (..),
|
||||
ObjectFieldName (..),
|
||||
)
|
||||
import Hasura.RQL.Types.Headers
|
||||
import Hasura.RQL.Types.Schema.Options (StringifyNumbers)
|
||||
import Hasura.SQL.Backend
|
||||
import Language.GraphQL.Draft.Syntax qualified as G
|
||||
|
@ -72,11 +72,11 @@ import Database.PG.Query.PTI qualified as PTI
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Metadata.DTO.Utils (discriminatorField)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform (MetadataResponseTransform, RequestTransform)
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.CustomTypes
|
||||
import Hasura.RQL.Types.Eventing (EventId (..))
|
||||
import Hasura.RQL.Types.Headers (HeaderConf)
|
||||
import Hasura.RQL.Types.Roles (RoleName)
|
||||
import Hasura.Session (SessionVariables)
|
||||
import Language.GraphQL.Draft.Syntax qualified as G
|
||||
|
@ -53,11 +53,11 @@ import Data.Time.Clock qualified as Time
|
||||
import Database.PG.Query qualified as PG
|
||||
import Hasura.Metadata.DTO.Utils (boolConstCodec, codecNamePrefix)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.DDL.Webhook.Transform (MetadataResponseTransform, RequestTransform)
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.RQL.Types.Common (EnvRecord, InputWebhook, ResolvedWebhook, SourceName (..), TriggerOnReplication (..))
|
||||
import Hasura.RQL.Types.Eventing
|
||||
import Hasura.RQL.Types.Headers (HeaderConf (..))
|
||||
import Hasura.SQL.Backend
|
||||
import System.Cron (CronSchedule)
|
||||
import Text.Regex.TDFA qualified as TDFA
|
||||
|
@ -24,7 +24,7 @@ import Data.Text.Extended
|
||||
import Database.PG.Query qualified as PG
|
||||
import Database.PG.Query.PTI qualified as PTI
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.Types.Headers (HeaderConf)
|
||||
import PostgreSQL.Binary.Encoding qualified as PE
|
||||
|
||||
newtype EventId = EventId {unEventId :: Text}
|
||||
|
71
server/src-lib/Hasura/RQL/Types/Headers.hs
Normal file
71
server/src-lib/Hasura/RQL/Types/Headers.hs
Normal file
@ -0,0 +1,71 @@
|
||||
module Hasura.RQL.Types.Headers
|
||||
( HeaderConf (..),
|
||||
HeaderValue (HVEnv, HVValue),
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), bimapCodec, disjointEitherCodec, requiredField')
|
||||
import Autodocodec qualified as AC
|
||||
import Data.Aeson
|
||||
import Data.Text qualified as T
|
||||
import Hasura.Prelude
|
||||
|
||||
data HeaderConf = HeaderConf HeaderName HeaderValue
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance NFData HeaderConf
|
||||
|
||||
instance Hashable HeaderConf
|
||||
|
||||
type HeaderName = Text
|
||||
|
||||
data HeaderValue = HVValue Text | HVEnv Text
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance NFData HeaderValue
|
||||
|
||||
instance Hashable HeaderValue
|
||||
|
||||
instance HasCodec HeaderConf where
|
||||
codec = bimapCodec dec enc $ disjointEitherCodec valCodec fromEnvCodec
|
||||
where
|
||||
valCodec =
|
||||
AC.object "HeaderConfValue" $
|
||||
(,)
|
||||
<$> requiredField' "name" AC..= fst
|
||||
<*> requiredField' "value" AC..= snd
|
||||
|
||||
fromEnvCodec =
|
||||
AC.object "HeaderConfFromEnv" $
|
||||
(,)
|
||||
<$> requiredField' "name" AC..= fst
|
||||
<*> requiredField' "value_from_env" AC..= snd
|
||||
|
||||
dec (Left (name, value)) = Right $ HeaderConf name (HVValue value)
|
||||
dec (Right (name, valueFromEnv)) =
|
||||
if T.isPrefixOf "HASURA_GRAPHQL_" valueFromEnv
|
||||
then Left $ "env variables starting with \"HASURA_GRAPHQL_\" are not allowed in value_from_env: " <> T.unpack valueFromEnv
|
||||
else Right $ HeaderConf name (HVEnv valueFromEnv)
|
||||
|
||||
enc (HeaderConf name (HVValue val)) = Left (name, val)
|
||||
enc (HeaderConf name (HVEnv val)) = Right (name, val)
|
||||
|
||||
instance FromJSON HeaderConf where
|
||||
parseJSON (Object o) = do
|
||||
name <- o .: "name"
|
||||
value <- o .:? "value"
|
||||
valueFromEnv <- o .:? "value_from_env"
|
||||
case (value, valueFromEnv) of
|
||||
(Nothing, Nothing) -> fail "expecting value or value_from_env keys"
|
||||
(Just val, Nothing) -> return $ HeaderConf name (HVValue val)
|
||||
(Nothing, Just val) -> do
|
||||
when (T.isPrefixOf "HASURA_GRAPHQL_" val) $
|
||||
fail $
|
||||
"env variables starting with \"HASURA_GRAPHQL_\" are not allowed in value_from_env: " <> T.unpack val
|
||||
return $ HeaderConf name (HVEnv val)
|
||||
(Just _, Just _) -> fail "expecting only one of value or value_from_env keys"
|
||||
parseJSON _ = fail "expecting object for headers"
|
||||
|
||||
instance ToJSON HeaderConf where
|
||||
toJSON (HeaderConf name (HVValue val)) = object ["name" .= name, "value" .= val]
|
||||
toJSON (HeaderConf name (HVEnv val)) = object ["name" .= name, "value_from_env" .= val]
|
@ -18,18 +18,17 @@ module Hasura.RQL.Types.OpenTelemetry
|
||||
OtlpProtocol (..),
|
||||
OtelBatchSpanProcessorConfig (..),
|
||||
defaultOtelBatchSpanProcessorConfig,
|
||||
NameValue (..),
|
||||
|
||||
-- * Parsed configuration (schema cache)
|
||||
OpenTelemetryInfo (..),
|
||||
otiExporterOtlp,
|
||||
otiBatchSpanProcessor,
|
||||
emptyOpenTelemetryInfo,
|
||||
OtelExporterInfo,
|
||||
parseOtelExporterConfig,
|
||||
OtelExporterInfo (..),
|
||||
getOtelExporterTracesBaseRequest,
|
||||
getOtelExporterResourceAttributes,
|
||||
OtelBatchSpanProcessorInfo,
|
||||
parseOtelBatchSpanProcessorConfig,
|
||||
OtelBatchSpanProcessorInfo (..),
|
||||
getMaxExportBatchSize,
|
||||
getMaxQueueSize,
|
||||
defaultOtelBatchSpanProcessorInfo,
|
||||
@ -42,20 +41,14 @@ import Autodocodec.Extended (boundedEnumCodec)
|
||||
import Control.Lens.TH (makeLenses)
|
||||
import Data.Aeson (FromJSON, ToJSON (..), (.!=), (.:), (.:?), (.=))
|
||||
import Data.Aeson qualified as Aeson
|
||||
import Data.Bifunctor (first)
|
||||
import Data.Environment (Environment)
|
||||
import Data.Map.Strict (Map)
|
||||
import Data.Map.Strict qualified as Map
|
||||
import Data.Set (Set)
|
||||
import Data.Set qualified as Set
|
||||
import Data.Text qualified as Text
|
||||
import GHC.Generics
|
||||
import Hasura.Base.Error (Code (InvalidParams), QErr, err400)
|
||||
import Hasura.Prelude hiding (first)
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.Types.Headers (HeaderConf)
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
import Network.HTTP.Client (Request (requestHeaders), requestFromURI)
|
||||
import Network.URI (parseURI)
|
||||
import Network.HTTP.Client (Request)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
@ -351,49 +344,6 @@ data OtelExporterInfo = OtelExporterInfo
|
||||
_oteleiResourceAttributes :: Map Text Text
|
||||
}
|
||||
|
||||
-- | Smart constructor for 'OtelExporterInfo'.
|
||||
--
|
||||
-- Returns a @Left qErr@ to signal a validation error. Returns @Right Nothing@
|
||||
-- to signal that the exporter should be disabled without raising an error.
|
||||
--
|
||||
-- Allows the trace endpoint to be unset if the entire OpenTelemetry system is
|
||||
-- disabled.
|
||||
parseOtelExporterConfig ::
|
||||
OtelStatus ->
|
||||
Environment ->
|
||||
OtelExporterConfig ->
|
||||
Either QErr (Maybe OtelExporterInfo)
|
||||
parseOtelExporterConfig otelStatus env OtelExporterConfig {..} = do
|
||||
-- First validate everything but the trace endpoint
|
||||
headers <- makeHeadersFromConf env _oecHeaders
|
||||
-- Allow the trace endpoint to be unset when OpenTelemetry is disabled
|
||||
case _oecTracesEndpoint of
|
||||
Nothing ->
|
||||
case otelStatus of
|
||||
OtelDisabled ->
|
||||
pure Nothing
|
||||
OtelEnabled -> Left (err400 InvalidParams "Missing traces endpoint")
|
||||
Just rawTracesEndpoint -> do
|
||||
tracesUri <-
|
||||
maybeToEither (err400 InvalidParams "Invalid URL") $
|
||||
parseURI $
|
||||
Text.unpack rawTracesEndpoint
|
||||
uriRequest <-
|
||||
first (err400 InvalidParams . tshow) $ requestFromURI tracesUri
|
||||
pure $
|
||||
Just $
|
||||
OtelExporterInfo
|
||||
{ _oteleiTracesBaseRequest =
|
||||
uriRequest
|
||||
{ requestHeaders = headers ++ requestHeaders uriRequest
|
||||
},
|
||||
_oteleiResourceAttributes =
|
||||
Map.fromList $
|
||||
map
|
||||
(\NameValue {nv_name, nv_value} -> (nv_name, nv_value))
|
||||
_oecResourceAttributes
|
||||
}
|
||||
|
||||
getOtelExporterTracesBaseRequest :: OtelExporterInfo -> Request
|
||||
getOtelExporterTracesBaseRequest = _oteleiTracesBaseRequest
|
||||
|
||||
@ -410,17 +360,6 @@ data OtelBatchSpanProcessorInfo = OtelBatchSpanProcessorInfo
|
||||
}
|
||||
deriving (Lift)
|
||||
|
||||
-- Smart constructor. Consistent with defaults.
|
||||
parseOtelBatchSpanProcessorConfig ::
|
||||
OtelBatchSpanProcessorConfig -> Either QErr OtelBatchSpanProcessorInfo
|
||||
parseOtelBatchSpanProcessorConfig OtelBatchSpanProcessorConfig {..} = do
|
||||
_obspiMaxExportBatchSize <-
|
||||
if _obspcMaxExportBatchSize > 0
|
||||
then Right _obspcMaxExportBatchSize
|
||||
else Left (err400 InvalidParams "max_export_batch_size must be a positive integer")
|
||||
let _obspiMaxQueueSize = 4 * _obspiMaxExportBatchSize -- consistent with default value of 2048
|
||||
pure OtelBatchSpanProcessorInfo {..}
|
||||
|
||||
getMaxExportBatchSize :: OtelBatchSpanProcessorInfo -> Int
|
||||
getMaxExportBatchSize = _obspiMaxExportBatchSize
|
||||
|
||||
|
@ -29,8 +29,8 @@ import Data.Text qualified as T
|
||||
import Data.Typeable (Typeable)
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers (HeaderConf (..))
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Headers (HeaderConf (..))
|
||||
import Hasura.RemoteSchema.Metadata.Base
|
||||
import Hasura.RemoteSchema.Metadata.Customization
|
||||
import Hasura.RemoteSchema.Metadata.Permission
|
||||
|
@ -63,8 +63,8 @@ import Hasura.Base.Error
|
||||
import Hasura.GraphQL.Parser.Variable
|
||||
import Hasura.GraphQL.Schema.Typename
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers (HeaderConf (..))
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Headers (HeaderConf (..))
|
||||
import Hasura.RQL.Types.Roles (RoleName)
|
||||
import Hasura.RemoteSchema.Metadata
|
||||
import Hasura.Session (SessionVariable)
|
||||
|
Loading…
Reference in New Issue
Block a user