server: null forwarding feature flag

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9861
Co-authored-by: paritosh-08 <85472423+paritosh-08@users.noreply.github.com>
Co-authored-by: Rob Dominguez <24390149+robertjdominguez@users.noreply.github.com>
GitOrigin-RevId: 46153d68adf1acc707dc451555055600b006629e
This commit is contained in:
Auke Booij 2023-07-21 17:10:53 +02:00 committed by hasura-bot
parent a2e15d9ae6
commit dad9a80dac
20 changed files with 174 additions and 76 deletions

View File

@ -113,8 +113,8 @@ When you use [webhook or JWT mode for authentication](/auth/authentication/index
### Admin Secrets
A list of valid admin [secret keys](/auth/authentication/multiple-admin-secrets.mdx) any one of which can be used to access the
Hasura instance.
A list of valid admin [secret keys](/auth/authentication/multiple-admin-secrets.mdx) any one of which can be used to
access the Hasura instance.
| | |
| ------------------- | ------------------------------ |
@ -344,13 +344,13 @@ subgraph in an Apollo supergraph.
Sets the maximum cumulative length of all headers in bytes.
| | |
| ------------------- | ----------------------------------------- |
| **Flag** | `--max-total-header-length` |
| **Env var** | `HASURA_GRAPHQL_MAX_TOTAL_HEADER_LENGTH` |
| **Accepted values** | Integer |
| **Default** | `1024*1024` (1MB) |
| **Supported in** | CE, Enterprise Edition |
| | |
| ------------------- | ---------------------------------------- |
| **Flag** | `--max-total-header-length` |
| **Env var** | `HASURA_GRAPHQL_MAX_TOTAL_HEADER_LENGTH` |
| **Accepted values** | Integer |
| **Default** | `1024*1024` (1MB) |
| **Supported in** | CE, Enterprise Edition |
### Enable Console
@ -367,8 +367,9 @@ Enable the Hasura Console (served by the server on `/` and `/console`).
### Enable High-cardinality Labels for Metrics
Enable high-cardinality labels for [Prometheus Metrics](/observability/enterprise-edition/prometheus/metrics.mdx). Enabling this setting
will add more labels to some of the metrics (e.g. `operation_name` label for Graphql subscription metrics).
Enable high-cardinality labels for [Prometheus Metrics](/observability/enterprise-edition/prometheus/metrics.mdx).
Enabling this setting will add more labels to some of the metrics (e.g. `operation_name` label for Graphql subscription
metrics).
| | |
| ------------------- | ------------------------------------------------------- |
@ -804,7 +805,8 @@ The maximum number of query plans that can be cached, where `0` disables the cac
### Rate-Limit Redis TLS Hostname
The hostname to use for SNI when connecting to a rate-limiting [Redis instance over TLS](/caching/enterprise-caching.mdx).
The hostname to use for SNI when connecting to a rate-limiting
[Redis instance over TLS](/caching/enterprise-caching.mdx).
| | |
| ------------------- | ---------------------------------------------- |
@ -885,8 +887,8 @@ The path to a shared CA store to use to connect to both (caching and rate-limiti
### Redis URL
The Redis URL to use for [query caching](/caching/enterprise-caching.mdx) and [Webhook Auth
Caching](/auth/authentication/webhook.mdx#webhook-auth-caching).
The Redis URL to use for [query caching](/caching/enterprise-caching.mdx) and
[Webhook Auth Caching](/auth/authentication/webhook.mdx#webhook-auth-caching).
| | |
| ------------------- | ---------------------------------------- |
@ -946,6 +948,39 @@ List of third-party identity providers to enable Single Sign-on authentication f
| **Example** | `[{\"client_id\": \"<client-id-from-idp>\", \"name\": \"<display-name>\", \"scope\": \"openid\", \"authorization_url\": \"<login-url>\", \"request_token_url\": \"<request-token-url>\", \"admin_roles\": [\"admin\"], \"jwt_secret\": {\"type\": \"RS256\", \"jwk_url\": \"https://...\", \"issuer\": \"myapp\"}}]` |
| **Supported in** | EE |
### Skip Arguments with `null` Values for Remote Schema
Skip arguments with `null` values while resolving fields from Remote Schemas. For instance:
```graphql
query RemoteQuery {
users(type:null) {
id
...
}
}
```
is forwarded as:
```graphql
query RemoteQuery {
users {
id
...
}
}
```
| | |
| ------------------- | ----------------------------------------- |
| **Flag** | `--remote-schema-skip-nulls` |
| **Env var** | `HASURA_GRAPHQL_REMOTE_SCHEMA_SKIP_NULLS` |
| **Accepted values** | Boolean |
| **Options** | `true` or `false` |
| **Default** | `false` |
| **Supported in** | CE, Enterprise Edition, Cloud |
### Streaming Queries Multiplexed Batch Size
Multiplexed [streaming queries](/subscriptions/postgres/streaming/index.mdx) are split into batches of the specified

View File

@ -280,6 +280,7 @@ serveOptions =
soEnableTelemetry = Init.TelemetryDisabled,
soStringifyNum = Options.Don'tStringifyNumbers,
soDangerousBooleanCollapse = Options.Don'tDangerouslyCollapseBooleans,
soRemoteNullForwardingPolicy = Options.RemoteForwardAccurately,
soEnabledAPIs = testSuiteEnabledApis,
soLiveQueryOpts = ES.mkSubscriptionsOptions Nothing Nothing,
soStreamingQueryOpts = ES.mkSubscriptionsOptions Nothing Nothing,

View File

@ -502,7 +502,7 @@ initialiseAppContext env serveOptions@ServeOptions {..} AppInit {..} = do
appEnv@AppEnv {..} <- askAppEnv
let cacheStaticConfig = buildCacheStaticConfig appEnv
Loggers _ logger pgLogger = appEnvLoggers
sqlGenCtx = initSQLGenCtx soExperimentalFeatures soStringifyNum soDangerousBooleanCollapse
sqlGenCtx = initSQLGenCtx soExperimentalFeatures soStringifyNum soDangerousBooleanCollapse soRemoteNullForwardingPolicy
cacheDynamicConfig =
CacheDynamicConfig
soInferFunctionPermissions

View File

@ -247,7 +247,7 @@ buildAppContextRule ::
(ServeOptions impl, E.Environment, InvalidationKeys) `arr` AppContext
buildAppContextRule = proc (ServeOptions {..}, env, _keys) -> do
authMode <- buildAuthMode -< (soAdminSecret, soAuthHook, soJwtSecret, soUnAuthRole)
sqlGenCtx <- buildSqlGenCtx -< (soExperimentalFeatures, soStringifyNum, soDangerousBooleanCollapse)
let sqlGenCtx = initSQLGenCtx soExperimentalFeatures soStringifyNum soDangerousBooleanCollapse soRemoteNullForwardingPolicy
responseInternalErrorsConfig <- buildResponseInternalErrorsConfig -< (soAdminInternalErrors, soDevMode)
eventEngineCtx <- buildEventEngineCtx -< (soEventsHttpPoolSize, soEventsFetchInterval, soEventsFetchBatchSize)
returnA
@ -275,10 +275,6 @@ buildAppContextRule = proc (ServeOptions {..}, env, _keys) -> do
acCloseWebsocketsOnMetadataChangeStatus = soCloseWebsocketsOnMetadataChangeStatus
}
where
buildSqlGenCtx = Inc.cache proc (experimentalFeatures, stringifyNum, dangerousBooleanCollapse) -> do
let sqlGenCtx = initSQLGenCtx experimentalFeatures stringifyNum dangerousBooleanCollapse
returnA -< sqlGenCtx
buildEventEngineCtx = Inc.cache proc (httpPoolSize, fetchInterval, fetchBatchSize) -> do
eventEngineCtx <- bindA -< initEventEngineCtx httpPoolSize fetchInterval fetchBatchSize
returnA -< eventEngineCtx
@ -313,8 +309,8 @@ buildAppContextRule = proc (ServeOptions {..}, env, _keys) -> do
--------------------------------------------------------------------------------
-- subsets
initSQLGenCtx :: HashSet ExperimentalFeature -> Options.StringifyNumbers -> Options.DangerouslyCollapseBooleans -> SQLGenCtx
initSQLGenCtx experimentalFeatures stringifyNum dangerousBooleanCollapse =
initSQLGenCtx :: HashSet ExperimentalFeature -> Options.StringifyNumbers -> Options.DangerouslyCollapseBooleans -> Options.RemoteNullForwardingPolicy -> SQLGenCtx
initSQLGenCtx experimentalFeatures stringifyNum dangerousBooleanCollapse remoteNullForwardingPolicy =
let optimizePermissionFilters
| EFOptimizePermissionFilters `elem` experimentalFeatures = Options.OptimizePermissionFilters
| otherwise = Options.Don'tOptimizePermissionFilters
@ -322,7 +318,7 @@ initSQLGenCtx experimentalFeatures stringifyNum dangerousBooleanCollapse =
bigqueryStringNumericInput
| EFBigQueryStringNumericInput `elem` experimentalFeatures = Options.EnableBigQueryStringNumericInput
| otherwise = Options.DisableBigQueryStringNumericInput
in SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput
in SQLGenCtx stringifyNum dangerousBooleanCollapse remoteNullForwardingPolicy optimizePermissionFilters bigqueryStringNumericInput
buildCacheStaticConfig :: AppEnv -> CacheStaticConfig
buildCacheStaticConfig AppEnv {..} =

View File

@ -34,6 +34,7 @@ import Hasura.Prelude
import Hasura.RQL.DDL.Headers (makeHeadersFromConf)
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Roles (adminRoleName)
import Hasura.RQL.Types.Schema.Options qualified as Options
import Hasura.RemoteSchema.Metadata
import Hasura.RemoteSchema.SchemaCache.Types
import Hasura.Server.Utils
@ -93,7 +94,7 @@ stitchRemoteSchema rawIntrospectionResult rsDef@ValidatedRemoteSchemaDef {..} =
-- quickly reject an invalid schema.
void
$ runMemoizeT
$ runRemoteSchema minimumValidContext
$ runRemoteSchema minimumValidContext Options.RemoteForwardAccurately
$ buildRemoteParser @_ @_ @Parse
_rscIntroOriginal
mempty -- remote relationships

View File

@ -244,13 +244,14 @@ buildSchemaOptions ::
HashSet ExperimentalFeature ->
SchemaOptions
buildSchemaOptions
( SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput,
( SQLGenCtx stringifyNum dangerousBooleanCollapse remoteNullForwardingPolicy optimizePermissionFilters bigqueryStringNumericInput,
functionPermsCtx
)
expFeatures =
SchemaOptions
{ soStringifyNumbers = stringifyNum,
soDangerousBooleanCollapse = dangerousBooleanCollapse,
soRemoteNullForwardingPolicy = remoteNullForwardingPolicy,
soInferFunctionPermissions = functionPermsCtx,
soOptimizePermissionFilters = optimizePermissionFilters,
soIncludeUpdateManyFields =
@ -315,7 +316,7 @@ buildRoleContext options sources remotes actions customTypes role remoteSchemaPe
-- build all remote schemas
-- we only keep the ones that don't result in a name conflict
(remoteSchemaFields, !remoteSchemaErrors) <-
runRemoteSchema schemaContext
runRemoteSchema schemaContext (soRemoteNullForwardingPolicy schemaOptions)
$ buildAndValidateRemoteSchemas remotes sourcesQueryFields sourcesMutationBackendFields role remoteSchemaPermsCtx
let remotesQueryFields = concatMap piQuery remoteSchemaFields
remotesMutationFields = concat $ mapMaybe piMutation remoteSchemaFields
@ -610,7 +611,7 @@ unauthenticatedContext options sources allRemotes expFeatures remoteSchemaPermsC
Options.DisableRemoteSchemaPermissions -> do
-- Permissions are disabled, unauthenticated users have access to remote schemas.
(remoteFields, remoteSchemaErrors) <-
runRemoteSchema fakeSchemaContext
runRemoteSchema fakeSchemaContext (soRemoteNullForwardingPolicy schemaOptions)
$ buildAndValidateRemoteSchemas allRemotes [] [] fakeRole remoteSchemaPermsCtx
pure
( fmap (fmap RFRemote) <$> concatMap piQuery remoteFields,
@ -650,6 +651,7 @@ buildAndValidateRemoteSchemas ::
Options.RemoteSchemaPermissions ->
SchemaT
( SchemaContext,
Options.RemoteNullForwardingPolicy,
MkTypename,
CustomizeRemoteFieldName
)
@ -707,6 +709,7 @@ buildRemoteSchemaParser ::
RemoteSchemaCtx ->
SchemaT
( SchemaContext,
Options.RemoteNullForwardingPolicy,
MkTypename,
CustomizeRemoteFieldName
)

View File

@ -261,6 +261,7 @@ runSourceSchema context options sourceInfo (SchemaT action) = runReaderT action
type MonadBuildRemoteSchema r m n =
( MonadBuildSchemaBase m n,
Has SchemaContext r,
Has Options.RemoteNullForwardingPolicy r,
Has CustomizeRemoteFieldName r,
Has MkTypename r
)
@ -268,15 +269,17 @@ type MonadBuildRemoteSchema r m n =
-- | Runs a schema-building computation with all the context required to build a remote schema.
runRemoteSchema ::
SchemaContext ->
Options.RemoteNullForwardingPolicy ->
SchemaT
( SchemaContext,
Options.RemoteNullForwardingPolicy,
MkTypename,
CustomizeRemoteFieldName
)
m
a ->
m a
runRemoteSchema context (SchemaT action) = runReaderT action (context, mempty, mempty)
runRemoteSchema context nullForwarding (SchemaT action) = runReaderT action (context, nullForwarding, mempty, mempty)
type MonadBuildActionSchema r m n =
( MonadBuildSchemaBase m n,

View File

@ -33,6 +33,7 @@ import Hasura.RQL.IR.Root qualified as IR
import Hasura.RQL.IR.Value qualified as IR
import Hasura.RQL.Types.Relationships.Remote
import Hasura.RQL.Types.ResultCustomization
import Hasura.RQL.Types.Schema.Options qualified as Options
import Hasura.RQL.Types.SchemaCache
import Hasura.RemoteSchema.SchemaCache.Types
import Language.GraphQL.Draft.Syntax qualified as G
@ -270,15 +271,18 @@ inputValueDefinitionParser schemaDoc (G.InputValueDefinition desc name fieldType
doNullability ::
forall k.
('Input <: k) =>
Options.RemoteNullForwardingPolicy ->
G.Nullability ->
Parser k n (Maybe (Altered, G.Value RemoteSchemaVariable)) ->
Parser k n (Maybe (Altered, G.Value RemoteSchemaVariable))
doNullability (G.Nullability True) parser =
doNullability Options.RemoteOnlyForwardNonNull (G.Nullability True) parser =
nullable parser `bind` pure . join
doNullability Options.RemoteForwardAccurately (G.Nullability True) parser =
P.nullableExact parser `bind` \case
P.NullableInputValue x -> pure x
P.NullableInputNull -> pure $ Just (Altered False, G.VNull)
P.NullableInputAbsent -> pure Nothing
doNullability (G.Nullability False) parser = parser
doNullability _nullForwarding (G.Nullability False) parser = parser
fieldConstructor ::
forall k.
@ -301,49 +305,51 @@ inputValueDefinitionParser schemaDoc (G.InputValueDefinition desc name fieldType
) ->
G.GType ->
SchemaT r m (InputFieldsParser n (Maybe (Altered, G.Value RemoteSchemaVariable)))
buildField mkInputFieldsParser = \case
G.TypeNamed nullability typeName ->
case lookupType schemaDoc typeName of
Nothing -> throw400 RemoteSchemaError $ "Could not find type with name " <>> typeName
Just typeDef -> do
customizeTypename <- asks getter
case typeDef of
G.TypeDefinitionScalar scalarTypeDefn ->
pure $ mkInputFieldsParser $ doNullability nullability $ Just <$> remoteFieldScalarParser customizeTypename scalarTypeDefn
G.TypeDefinitionEnum defn ->
pure $ mkInputFieldsParser $ doNullability nullability $ Just <$> remoteFieldEnumParser customizeTypename defn
G.TypeDefinitionObject _ ->
throw400 RemoteSchemaError "expected input type, but got output type"
G.TypeDefinitionInputObject defn -> do
potentialObject <- remoteInputObjectParser schemaDoc defn
pure $ case potentialObject of
Left dummyInputFieldsParser -> do
-- We couln't create a parser, meaning we can't create a field for this
-- object. Instead we must return a "pure" InputFieldsParser that always yields
-- the needed result without containing a field definition.
--
-- !!! WARNING #1 !!!
-- Since we have no input field in the schema for this field, we can't make the
-- distinction between it being actually present at parsing time or not. We
-- therefore choose to behave as if it was always present, and we always
-- include the preset values in the result.
--
-- !!! WARNING #2 !!!
-- We are re-using an 'InputFieldsParser' that was created earlier! Won't that
-- create new fields in the current context? No, it won't, but only because in
-- this case we know that it was created from the preset fields in
-- 'argumentsParser', and therefore contains no field definition.
Just <$> dummyInputFieldsParser
Right actualParser -> do
-- We're in the normal case: we do have a parser for the input object, which is
-- therefore valid (non-empty).
mkInputFieldsParser $ doNullability nullability $ Just <$> actualParser
G.TypeDefinitionUnion _ ->
throw400 RemoteSchemaError "expected input type, but got output type"
G.TypeDefinitionInterface _ ->
throw400 RemoteSchemaError "expected input type, but got output type"
G.TypeList nullability subType -> do
buildField (mkInputFieldsParser . doNullability nullability . fmap (Just . fmap G.VList . aggregateListAndAlteration) . P.list) subType
buildField mkInputFieldsParser gType = do
nullForwarding <- asks getter
case gType of
G.TypeNamed nullability typeName ->
case lookupType schemaDoc typeName of
Nothing -> throw400 RemoteSchemaError $ "Could not find type with name " <>> typeName
Just typeDef -> do
customizeTypename <- asks getter
case typeDef of
G.TypeDefinitionScalar scalarTypeDefn ->
pure $ mkInputFieldsParser $ doNullability nullForwarding nullability $ Just <$> remoteFieldScalarParser customizeTypename scalarTypeDefn
G.TypeDefinitionEnum defn ->
pure $ mkInputFieldsParser $ doNullability nullForwarding nullability $ Just <$> remoteFieldEnumParser customizeTypename defn
G.TypeDefinitionObject _ ->
throw400 RemoteSchemaError "expected input type, but got output type"
G.TypeDefinitionInputObject defn -> do
potentialObject <- remoteInputObjectParser schemaDoc defn
pure $ case potentialObject of
Left dummyInputFieldsParser -> do
-- We couln't create a parser, meaning we can't create a field for this
-- object. Instead we must return a "pure" InputFieldsParser that always yields
-- the needed result without containing a field definition.
--
-- !!! WARNING #1 !!!
-- Since we have no input field in the schema for this field, we can't make the
-- distinction between it being actually present at parsing time or not. We
-- therefore choose to behave as if it was always present, and we always
-- include the preset values in the result.
--
-- !!! WARNING #2 !!!
-- We are re-using an 'InputFieldsParser' that was created earlier! Won't that
-- create new fields in the current context? No, it won't, but only because in
-- this case we know that it was created from the preset fields in
-- 'argumentsParser', and therefore contains no field definition.
Just <$> dummyInputFieldsParser
Right actualParser -> do
-- We're in the normal case: we do have a parser for the input object, which is
-- therefore valid (non-empty).
mkInputFieldsParser $ doNullability nullForwarding nullability $ Just <$> actualParser
G.TypeDefinitionUnion _ ->
throw400 RemoteSchemaError "expected input type, but got output type"
G.TypeDefinitionInterface _ ->
throw400 RemoteSchemaError "expected input type, but got output type"
G.TypeList nullability subType -> do
buildField (mkInputFieldsParser . doNullability nullForwarding nullability . fmap (Just . fmap G.VList . aggregateListAndAlteration) . P.list) subType
-- | remoteFieldScalarParser attempts to parse a scalar value for a given remote field
--

View File

@ -56,7 +56,7 @@ remoteRelationshipField schemaContext schemaOptions sourceCache remoteSchemaCach
ExcludeRemoteSourceRelationship -> pure Nothing
RFISchema remoteSchema ->
-- see Note [SchemaT and stacking]
runRemoteSchema schemaContext do
runRemoteSchema schemaContext (soRemoteNullForwardingPolicy schemaOptions) do
fields <- remoteRelationshipToSchemaField remoteSchemaCache remoteSchemaPermissions _rfiLHS remoteSchema
pure $ fmap (pure . fmap IR.RemoteSchemaField) fields

View File

@ -299,6 +299,7 @@ isSystemDefined = unSystemDefined
data SQLGenCtx = SQLGenCtx
{ stringifyNum :: Options.StringifyNumbers,
dangerousBooleanCollapse :: Options.DangerouslyCollapseBooleans,
remoteNullForwardingPolicy :: Options.RemoteNullForwardingPolicy,
optimizePermissionFilters :: Options.OptimizePermissionFilters,
bigqueryStringNumericInput :: Options.BigQueryStringNumericInput
}

View File

@ -4,6 +4,7 @@ module Hasura.RQL.Types.Schema.Options
( SchemaOptions (..),
StringifyNumbers (..),
DangerouslyCollapseBooleans (..),
RemoteNullForwardingPolicy (..),
InferFunctionPermissions (..),
RemoteSchemaPermissions (..),
OptimizePermissionFilters (..),
@ -24,6 +25,7 @@ import Hasura.Prelude
data SchemaOptions = SchemaOptions
{ soStringifyNumbers :: StringifyNumbers,
soDangerousBooleanCollapse :: DangerouslyCollapseBooleans,
soRemoteNullForwardingPolicy :: RemoteNullForwardingPolicy,
soInferFunctionPermissions :: InferFunctionPermissions,
soOptimizePermissionFilters :: OptimizePermissionFilters,
soIncludeUpdateManyFields :: IncludeUpdateManyFields,
@ -87,6 +89,24 @@ instance ToJSON DangerouslyCollapseBooleans where
DangerouslyCollapseBooleans -> Bool True
Don'tDangerouslyCollapseBooleans -> Bool False
data RemoteNullForwardingPolicy
= RemoteForwardAccurately
| RemoteOnlyForwardNonNull
deriving (Show, Eq)
instance FromJSON RemoteNullForwardingPolicy where
parseJSON =
withBool "RemoteNullForwardingPolicy"
$ pure
. \case
False -> RemoteForwardAccurately
True -> RemoteOnlyForwardNonNull
instance ToJSON RemoteNullForwardingPolicy where
toJSON = \case
RemoteForwardAccurately -> Bool False
RemoteOnlyForwardNonNull -> Bool True
-- | Should we infer function permissions? If this flag is set to
-- 'InferFunctionPermissions', we may fail to build expression parsers
-- in 'buildQueryAndSubscriptionFields' for users with unrecognised roles.

View File

@ -169,6 +169,7 @@ mkServeOptions sor@ServeOptionsRaw {..} = do
Options.Don'tStringifyNumbers -> withOptionDefault Nothing stringifyNumOption
stringifyNums -> pure stringifyNums
soDangerousBooleanCollapse <- withOptionDefault rsoDangerousBooleanCollapse dangerousBooleanCollapseOption
soRemoteNullForwardingPolicy <- withOptionDefault rsoRemoteNullForwardingPolicy remoteNullForwardingPolicyOption
soEnabledAPIs <- withOptionDefault rsoEnabledAPIs enabledAPIsOption
soLiveQueryOpts <- do
_lqoRefetchInterval <- withOptionDefault rsoMxRefetchInt mxRefetchDelayOption

View File

@ -31,6 +31,7 @@ module Hasura.Server.Init.Arg.Command.Serve
wsReadCookieOption,
stringifyNumOption,
dangerousBooleanCollapseOption,
remoteNullForwardingPolicyOption,
enabledAPIsOption,
mxRefetchDelayOption,
mxBatchSizeOption,
@ -119,6 +120,7 @@ serveCommandParser =
<*> parseWsReadCookie
<*> parseStringifyNum
<*> parseDangerousBooleanCollapse
<*> parseRemoteNullForwardingPolicy
<*> parseEnabledAPIs
<*> parseMxRefetchDelay
<*> parseMxBatchSize
@ -628,6 +630,24 @@ dangerousBooleanCollapseOption =
<> " [DEPRECATED, WILL BE REMOVED SOON] (default: false)"
}
parseRemoteNullForwardingPolicy :: Opt.Parser (Maybe Options.RemoteNullForwardingPolicy)
parseRemoteNullForwardingPolicy =
fmap (bool Nothing (Just Options.RemoteOnlyForwardNonNull))
$ Opt.switch
( Opt.long "remote-schema-skip-nulls"
<> Opt.help (Config._helpMessage remoteNullForwardingPolicyOption)
)
remoteNullForwardingPolicyOption :: Config.Option Options.RemoteNullForwardingPolicy
remoteNullForwardingPolicyOption =
Config.Option
{ Config._default = Options.RemoteForwardAccurately,
Config._envVar = "HASURA_GRAPHQL_REMOTE_SCHEMA_SKIP_NULLS",
Config._helpMessage =
"Skip null values from arguments while resolving fields from remote schemas. (default: false, i.e."
<> " forward null values in argument as well)"
}
parseEnabledAPIs :: Opt.Parser (Maybe (HashSet Config.API))
parseEnabledAPIs =
Opt.optional
@ -1295,6 +1315,7 @@ serveCmdFooter =
Config.optionPP metadataDBExtensionsSchemaOption,
Config.optionPP apolloFederationStatusOption,
Config.optionPP closeWebsocketsOnMetadataChangeOption,
Config.optionPP maxTotalHeaderLengthOption
Config.optionPP maxTotalHeaderLengthOption,
Config.optionPP remoteNullForwardingPolicyOption
]
eventEnvs = [Config.optionPP graphqlEventsHttpPoolSizeOption, Config.optionPP graphqlEventsFetchIntervalOption]

View File

@ -290,6 +290,7 @@ data ServeOptionsRaw impl = ServeOptionsRaw
rsoWsReadCookie :: WsReadCookieStatus,
rsoStringifyNum :: Schema.Options.StringifyNumbers,
rsoDangerousBooleanCollapse :: Maybe Schema.Options.DangerouslyCollapseBooleans,
rsoRemoteNullForwardingPolicy :: Maybe Schema.Options.RemoteNullForwardingPolicy,
rsoEnabledAPIs :: Maybe (HashSet API),
rsoMxRefetchInt :: Maybe Subscription.Options.RefetchInterval,
rsoMxBatchSize :: Maybe Subscription.Options.BatchSize,
@ -591,6 +592,7 @@ data ServeOptions impl = ServeOptions
soEnableTelemetry :: TelemetryStatus,
soStringifyNum :: Schema.Options.StringifyNumbers,
soDangerousBooleanCollapse :: Schema.Options.DangerouslyCollapseBooleans,
soRemoteNullForwardingPolicy :: Schema.Options.RemoteNullForwardingPolicy,
soEnabledAPIs :: HashSet API,
soLiveQueryOpts :: Subscription.Options.LiveQueriesOptions,
soStreamingQueryOpts :: Subscription.Options.StreamQueriesOptions,

View File

@ -253,6 +253,9 @@ instance FromEnv Options.RemoteSchemaPermissions where
instance FromEnv Options.DangerouslyCollapseBooleans where
fromEnv = fmap (bool Options.Don'tDangerouslyCollapseBooleans Options.DangerouslyCollapseBooleans) . fromEnv @Bool
instance FromEnv Options.RemoteNullForwardingPolicy where
fromEnv = fmap (bool Options.RemoteForwardAccurately Options.RemoteOnlyForwardNonNull) . fromEnv @Bool
instance FromEnv Options.InferFunctionPermissions where
fromEnv = fmap (bool Options.Don'tInferFunctionPermissions Options.InferFunctionPermissions) . fromEnv @Bool

View File

@ -32,6 +32,7 @@ import Hasura.RQL.IR.Root
import Hasura.RQL.IR.Value
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Roles (adminRoleName)
import Hasura.RQL.Types.Schema.Options qualified as Options
import Hasura.RemoteSchema.SchemaCache
import Hasura.Session (BackendOnlyFieldAccess (..), SessionVariables, UserInfo (..), mkSessionVariable)
import Language.GraphQL.Draft.Parser qualified as G
@ -137,7 +138,7 @@ buildQueryParsers introspection = do
RemoteSchemaParser query _ _ <-
runError
$ runMemoizeT
$ runRemoteSchema schemaContext
$ runRemoteSchema schemaContext Options.RemoteForwardAccurately
$ buildRemoteParser introResult remoteSchemaRels remoteSchemaInfo
pure
$ head query

View File

@ -64,6 +64,7 @@ emptyServeOptionsRaw =
rsoWsReadCookie = UUT.WsReadCookieDisabled,
rsoStringifyNum = Options.Don'tStringifyNumbers,
rsoDangerousBooleanCollapse = Nothing,
rsoRemoteNullForwardingPolicy = Nothing,
rsoEnabledAPIs = Nothing,
rsoMxRefetchInt = Nothing,
rsoMxBatchSize = Nothing,

View File

@ -64,6 +64,7 @@ defaultSchemaOptions =
SchemaOptions
{ soStringifyNumbers = Options.Don'tStringifyNumbers,
soDangerousBooleanCollapse = Options.Don'tDangerouslyCollapseBooleans,
soRemoteNullForwardingPolicy = Options.RemoteForwardAccurately,
soInferFunctionPermissions = Options.InferFunctionPermissions,
soOptimizePermissionFilters = Options.Don'tOptimizePermissionFilters,
soIncludeUpdateManyFields = Options.IncludeUpdateManyFields,

View File

@ -62,6 +62,7 @@ serveOptions =
soEnableTelemetry = Init.TelemetryDisabled,
soStringifyNum = Options.Don'tStringifyNumbers,
soDangerousBooleanCollapse = Options.Don'tDangerouslyCollapseBooleans,
soRemoteNullForwardingPolicy = Options.RemoteForwardAccurately,
soEnabledAPIs = testSuiteEnabledApis,
soLiveQueryOpts = ES.mkSubscriptionsOptions Nothing Nothing,
soStreamingQueryOpts = ES.mkSubscriptionsOptions Nothing Nothing,

View File

@ -105,6 +105,7 @@ main = do
SQLGenCtx
Options.Don'tStringifyNumbers
Options.Don'tDangerouslyCollapseBooleans
Options.RemoteForwardAccurately
Options.Don'tOptimizePermissionFilters
Options.EnableBigQueryStringNumericInput
maintenanceMode = MaintenanceModeDisabled