diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 60c22eb6e22..bb417c5e142 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -409,6 +409,7 @@ library , Hasura.Backends.BigQuery.Instances.Metadata , Hasura.Backends.BigQuery.Meta , Hasura.Backends.BigQuery.Name + , Hasura.Backends.BigQuery.Parser.Scalars , Hasura.Backends.BigQuery.Plan , Hasura.Backends.BigQuery.Source , Hasura.Backends.BigQuery.ToQuery diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 91eed0cf5fa..180f7574d26 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -366,7 +366,11 @@ initialiseServeCtx env GlobalCtx {..} so@ServeOptions {..} serverMetrics = do optimizePermissionFilters | EFOptimizePermissionFilters `elem` soExperimentalFeatures = Options.OptimizePermissionFilters | otherwise = Options.Don'tOptimizePermissionFilters - sqlGenCtx = SQLGenCtx soStringifyNum soDangerousBooleanCollapse optimizePermissionFilters + + bigqueryStringNumericInput + | EFBigQueryStringNumericInput `elem` soExperimentalFeatures = Options.EnableBigQueryStringNumericInput + | otherwise = Options.DisableBigQueryStringNumericInput + sqlGenCtx = SQLGenCtx soStringifyNum soDangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput let serverConfigCtx = ServerConfigCtx @@ -679,7 +683,12 @@ mkHGEServer setupHook env ServeOptions {..} ServeCtx {..} initTime postPollHook let optimizePermissionFilters | EFOptimizePermissionFilters `elem` soExperimentalFeatures = Options.OptimizePermissionFilters | otherwise = Options.Don'tOptimizePermissionFilters - sqlGenCtx = SQLGenCtx soStringifyNum soDangerousBooleanCollapse optimizePermissionFilters + + bigqueryStringNumericInput + | EFBigQueryStringNumericInput `elem` soExperimentalFeatures = Options.EnableBigQueryStringNumericInput + | otherwise = Options.DisableBigQueryStringNumericInput + + sqlGenCtx = SQLGenCtx soStringifyNum soDangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput Loggers loggerCtx logger _ = _scLoggers authModeRes <- diff --git a/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs b/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs index ced978abec4..9ce945660f0 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs @@ -12,6 +12,7 @@ import Data.Text qualified as T import Data.Text.Casing qualified as C import Data.Text.Extended import Hasura.Backends.BigQuery.Name +import Hasura.Backends.BigQuery.Parser.Scalars qualified as BQP import Hasura.Backends.BigQuery.Types qualified as BigQuery import Hasura.Base.Error import Hasura.Base.ErrorMessage (toErrorMessage) @@ -70,7 +71,7 @@ instance BackendSchema 'BigQuery where -- individual components columnParser = bqColumnParser enumParser = bqEnumParser - possiblyNullable = bqPossiblyNullable + possiblyNullable = const bqPossiblyNullable scalarSelectionArgumentsParser _ = pure Nothing orderByOperators _sourceInfo = bqOrderByOperators comparisonExps = const bqComparisonExps @@ -92,33 +93,54 @@ bqColumnParser :: ColumnType 'BigQuery -> G.Nullability -> SchemaT r m (Parser 'Both n (IR.ValueWithOrigin (ColumnValue 'BigQuery))) -bqColumnParser columnType nullability = +bqColumnParser columnType nullability = do + Options.SchemaOptions {soBigQueryStringNumericInput} <- asks getter + let numericInputParser :: forall a. a -> a -> a + numericInputParser builtin custom = + case soBigQueryStringNumericInput of + Options.EnableBigQueryStringNumericInput -> custom + Options.DisableBigQueryStringNumericInput -> builtin peelWithOrigin . fmap (ColumnValue columnType) <$> case columnType of - ColumnScalar scalarType -> case scalarType of - -- bytestrings - -- we only accept string literals - BigQuery.BytesScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.StringValue <$> stringBased _Bytes - -- text - BigQuery.StringScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.StringValue <$> P.string - -- floating point values - -- TODO: we do not perform size checks here, meaning we would accept an - -- out-of-bounds value as long as it can be represented by a GraphQL float; this - -- will in all likelihood error on the BigQuery side. Do we want to handle those - -- properly here? - BigQuery.FloatScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.FloatValue . BigQuery.doubleToFloat64 <$> P.float - BigQuery.IntegerScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.IntegerValue . BigQuery.intToInt64 . fromIntegral <$> P.int - BigQuery.DecimalScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.DecimalValue . BigQuery.Decimal . BigQuery.scientificToText <$> P.scientific - BigQuery.BigDecimalScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.BigDecimalValue . BigQuery.BigDecimal . BigQuery.scientificToText <$> P.scientific - -- boolean type - BigQuery.BoolScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.BoolValue <$> P.boolean - BigQuery.DateScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.DateValue . BigQuery.Date <$> stringBased _Date - BigQuery.TimeScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.TimeValue . BigQuery.Time <$> stringBased _Time - BigQuery.DatetimeScalarType -> pure $ bqPossiblyNullable scalarType nullability $ BigQuery.DatetimeValue . BigQuery.Datetime <$> stringBased _Datetime - BigQuery.GeographyScalarType -> - pure $ bqPossiblyNullable scalarType nullability $ BigQuery.GeographyValue . BigQuery.Geography <$> throughJSON _Geography - BigQuery.TimestampScalarType -> - pure $ bqPossiblyNullable scalarType nullability $ BigQuery.TimestampValue . BigQuery.Timestamp <$> stringBased _Timestamp - ty -> throwError $ internalError $ T.pack $ "Type currently unsupported for BigQuery: " ++ show ty + ColumnScalar scalarType -> do + p <- case scalarType of + -- bytestrings + -- we only accept string literals + BigQuery.BytesScalarType -> pure $ BigQuery.StringValue <$> stringBased _Bytes + -- text + BigQuery.StringScalarType -> pure $ BigQuery.StringValue <$> P.string + -- floating point values + + BigQuery.FloatScalarType -> + pure $ + BigQuery.FloatValue + <$> numericInputParser (BigQuery.doubleToFloat64 <$> P.float) BQP.bqFloat64 + BigQuery.IntegerScalarType -> + pure $ + BigQuery.IntegerValue + <$> numericInputParser (BigQuery.intToInt64 . fromIntegral <$> P.int) BQP.bqInt64 + BigQuery.DecimalScalarType -> + pure $ + BigQuery.DecimalValue + <$> numericInputParser + (BigQuery.Decimal . BigQuery.scientificToText <$> P.scientific) + BQP.bqDecimal + BigQuery.BigDecimalScalarType -> + pure $ + BigQuery.BigDecimalValue + <$> numericInputParser + (BigQuery.BigDecimal . BigQuery.scientificToText <$> P.scientific) + BQP.bqBigDecimal + -- boolean type + BigQuery.BoolScalarType -> pure $ BigQuery.BoolValue <$> P.boolean + BigQuery.DateScalarType -> pure $ BigQuery.DateValue . BigQuery.Date <$> stringBased _Date + BigQuery.TimeScalarType -> pure $ BigQuery.TimeValue . BigQuery.Time <$> stringBased _Time + BigQuery.DatetimeScalarType -> pure $ BigQuery.DatetimeValue . BigQuery.Datetime <$> stringBased _Datetime + BigQuery.GeographyScalarType -> + pure $ BigQuery.GeographyValue . BigQuery.Geography <$> throughJSON _Geography + BigQuery.TimestampScalarType -> + pure $ BigQuery.TimestampValue . BigQuery.Timestamp <$> stringBased _Timestamp + ty -> throwError $ internalError $ T.pack $ "Type currently unsupported for BigQuery: " ++ show ty + return $ bqPossiblyNullable nullability p ColumnEnumReference (EnumReference tableName enumValues customTableName) -> case nonEmpty (Map.toList enumValues) of Just enumValuesList -> bqEnumParser tableName enumValuesList customTableName nullability @@ -145,7 +167,7 @@ bqEnumParser :: SchemaT r m (Parser 'Both n (ScalarValue 'BigQuery)) bqEnumParser tableName enumValues customTableName nullability = do enumName <- mkEnumTypeName @'BigQuery tableName customTableName - pure $ bqPossiblyNullable BigQuery.StringScalarType nullability $ P.enum enumName Nothing (mkEnumValue <$> enumValues) + pure $ bqPossiblyNullable nullability $ P.enum enumName Nothing (mkEnumValue <$> enumValues) where mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'BigQuery) mkEnumValue (EnumValue value, EnumValueInfo description) = @@ -155,11 +177,10 @@ bqEnumParser tableName enumValues customTableName nullability = do bqPossiblyNullable :: MonadParse m => - ScalarType 'BigQuery -> G.Nullability -> Parser 'Both m (ScalarValue 'BigQuery) -> Parser 'Both m (ScalarValue 'BigQuery) -bqPossiblyNullable _scalarType (G.Nullability isNullable) +bqPossiblyNullable (G.Nullability isNullable) | isNullable = fmap (fromMaybe BigQuery.NullValue) . P.nullable | otherwise = id diff --git a/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs b/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs new file mode 100644 index 00000000000..9cbb1acebe2 --- /dev/null +++ b/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs @@ -0,0 +1,148 @@ +{-# LANGUAGE QuasiQuotes #-} + +-- | This module defines the scalars we use specific to the BigQuery +-- schema. +-- +-- An idiosyncracy of BigQuery is that numbers serialized via JSON uses string +-- literals instead of number literals, because BigQuery handles wider-bit +-- numbers than JSON/JavaScript does. +-- +-- Therefore, the BigQuery Backend uses bespoke parsers for numeric scalar +-- input, which accept string literals as well as number literals, such that we +-- preserve symmetry with with output formats. +module Hasura.Backends.BigQuery.Parser.Scalars + ( bqInt64, + bqFloat64, + bqDecimal, + bqBigDecimal, + ) +where + +import Data.Aeson qualified as A +import Data.Int (Int64) +import Data.Scientific (Scientific) +import Data.Scientific qualified as S +import Data.Scientific qualified as Scientific +import Data.Text qualified as Text +import Hasura.Backends.BigQuery.Types qualified as BigQuery +import Hasura.Base.ErrorMessage (toErrorMessage) +import Hasura.Base.ErrorValue (dquote) +import Hasura.GraphQL.Parser.Class +import Hasura.GraphQL.Parser.ErrorCode +import Hasura.GraphQL.Parser.Internal.TypeChecking +import Hasura.GraphQL.Parser.Internal.Types +import Hasura.GraphQL.Parser.Schema +import Hasura.GraphQL.Parser.Variable +import Hasura.Prelude +import Language.GraphQL.Draft.Syntax hiding (Definition) +import Language.GraphQL.Draft.Syntax qualified as G +import Language.GraphQL.Draft.Syntax.QQ qualified as G +import Text.ParserCombinators.ReadP + +bqInt64 :: forall origin m. MonadParse m => Parser origin 'Both m BigQuery.Int64 +bqInt64 = mkScalar name "64-bit integers. Accepts both string and number literals." \case + GraphQLValue (VInt i) + | checkIntegerBounds i -> return $ BigQuery.Int64 (tshow i) + | otherwise -> boundsFailure (tshow i) + GraphQLValue (VString s) -> integralText s + JSONValue (A.String s) -> integralText s + JSONValue (A.Number n) -> integralSci (tshow n) n + v -> typeMismatch name "a 64-bit integer" v + where + name = [G.name|bigquery_int|] + + checkIntegerBounds :: Integer -> Bool + checkIntegerBounds i = toInteger (minBound @Int64) <= i && i <= toInteger (maxBound @Int64) + + integralText :: Text -> m BigQuery.Int64 + integralText inputText + | [(sci, "")] <- readP_to_S Scientific.scientificP (Text.unpack inputText) = integralSci inputText sci + | otherwise = stringNotationError name inputText + + integralSci :: Text -> Scientific -> m BigQuery.Int64 + integralSci inputText sci + | Scientific.isInteger sci = + case Scientific.toBoundedInteger @Int64 sci of + Just v -> return $ BigQuery.intToInt64 v + Nothing -> boundsFailure inputText + | otherwise = integralFailure inputText + + boundsFailure, integralFailure :: forall a. Text -> m a + boundsFailure inputText = parseErrorWith ParseFailed $ "The value " <> toErrorMessage inputText <> " lies outside the accepted numerical integral bounds." + integralFailure inputText = parseErrorWith ParseFailed $ "The value " <> toErrorMessage inputText <> " has a non-zero fractional part." + +bqFloat64 :: forall origin m. MonadParse m => Parser origin 'Both m BigQuery.Float64 +bqFloat64 = mkScalar name "64-bit floats. Accepts both string and number literals." \case + GraphQLValue (VFloat f) -> floatSci (tshow f) f + GraphQLValue (VInt i) -> floatSci (tshow i) (fromInteger i) + GraphQLValue (VString s) -> floatText s + JSONValue (A.String s) -> floatText s + JSONValue (A.Number n) -> floatSci (tshow n) n + v -> typeMismatch name "a 64-bit float" v + where + name = [G.name|bigquery_float|] + + floatText :: Text -> m BigQuery.Float64 + floatText inputText + | [(sci, "")] <- readP_to_S Scientific.scientificP (Text.unpack inputText) = floatSci inputText sci + | otherwise = stringNotationError name inputText + + floatSci :: Text -> Scientific -> m BigQuery.Float64 + floatSci inputText sci = + case Scientific.toBoundedRealFloat @Double sci of + Right v -> return $ BigQuery.doubleToFloat64 v + Left _ -> boundsFailure inputText + + boundsFailure :: forall a. Text -> m a + boundsFailure inputText = parseErrorWith ParseFailed $ "The value " <> toErrorMessage inputText <> " lies outside the accepted numerical integral bounds." + +bqBigDecimal :: MonadParse m => Parser origin 'Both m BigQuery.BigDecimal +bqBigDecimal = mkScalar name "BigDecimals. Accepts both string and number literals." $ fmap (BigQuery.BigDecimal . BigQuery.scientificToText) . decimal name + where + name = [G.name|bigquery_bigdecimal|] + +bqDecimal :: MonadParse m => Parser origin 'Both m BigQuery.Decimal +bqDecimal = mkScalar name "Decimals. Accepts both string and number literals." $ fmap (BigQuery.Decimal . BigQuery.scientificToText) . decimal name + where + name = [G.name|bigquery_decimal|] + +decimal :: MonadParse f => Name -> InputValue Variable -> f Scientific +decimal name = \case + GraphQLValue (VFloat f) -> pure f + GraphQLValue (VInt i) -> pure $ S.scientific i 0 + GraphQLValue (VString s) + | Just sci <- readMaybe (Text.unpack s) -> pure $ sci + | otherwise -> stringNotationError name s + JSONValue (A.Number n) -> pure n + JSONValue (A.String s) + | Just sci <- readMaybe (Text.unpack s) -> pure $ sci + | otherwise -> stringNotationError name s + v -> typeMismatch name "decimal" v + +-------------------------------------------------------------------------------- +-- Local helpers + +mkScalar :: + MonadParse m => + Name -> + Description -> + (InputValue Variable -> m a) -> + Parser origin 'Both m a +mkScalar name desc parser = + Parser + { pType = schemaType, + pParser = peelVariable (toGraphQLType schemaType) >=> parser + } + where + schemaType = typeNamed name (Just desc) + +typeNamed :: Name -> Maybe Description -> Type origin 'Both +typeNamed name description = TNamed NonNullable $ Definition name description Nothing [] TIScalar + +stringNotationError :: MonadParse m => G.Name -> Text -> m a +stringNotationError typeName actualString = + parseError $ + "expected " <> toErrorMessage (tshow typeName) <> " represented as a string, but got " <> dquote actualString + <> ", which is not a recognizable " + <> toErrorMessage (tshow typeName) + <> "." diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index c93d23fbe4c..804997715bc 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -190,7 +190,7 @@ buildRoleContext :: G.SchemaIntrospection ) buildRoleContext options sources remotes actions customTypes role remoteSchemaPermsCtx expFeatures = do - let ( SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters, + let ( SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput, functionPermsCtx ) = options schemaOptions = @@ -202,7 +202,8 @@ buildRoleContext options sources remotes actions customTypes role remoteSchemaPe soIncludeUpdateManyFields = if EFHideUpdateManyFields `Set.member` expFeatures then Options.DontIncludeUpdateManyFields - else Options.IncludeUpdateManyFields + else Options.IncludeUpdateManyFields, + soBigQueryStringNumericInput = bigqueryStringNumericInput } schemaContext = SchemaContext @@ -354,7 +355,7 @@ buildRelayRoleContext :: Set.HashSet ExperimentalFeature -> m (RoleContext GQLContext) buildRelayRoleContext options sources actions customTypes role expFeatures = do - let ( SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters, + let ( SQLGenCtx stringifyNum dangerousBooleanCollapse optimizePermissionFilters bigqueryStringNumericInput, functionPermsCtx ) = options schemaOptions = @@ -366,7 +367,8 @@ buildRelayRoleContext options sources actions customTypes role expFeatures = do soIncludeUpdateManyFields = if EFHideUpdateManyFields `Set.member` expFeatures then Options.DontIncludeUpdateManyFields - else Options.IncludeUpdateManyFields + else Options.IncludeUpdateManyFields, + soBigQueryStringNumericInput = bigqueryStringNumericInput } -- TODO: At the time of writing this, remote schema queries are not supported in relay. -- When they are supported, we should get do what `buildRoleContext` does. Since, they diff --git a/server/src-lib/Hasura/GraphQL/Schema/Options.hs b/server/src-lib/Hasura/GraphQL/Schema/Options.hs index 4fde9b12988..964d7adfdcc 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Options.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Options.hs @@ -8,6 +8,7 @@ module Hasura.GraphQL.Schema.Options RemoteSchemaPermissions (..), OptimizePermissionFilters (..), IncludeUpdateManyFields (..), + BigQueryStringNumericInput (..), ) where @@ -21,7 +22,8 @@ data SchemaOptions = SchemaOptions soDangerousBooleanCollapse :: DangerouslyCollapseBooleans, soInferFunctionPermissions :: InferFunctionPermissions, soOptimizePermissionFilters :: OptimizePermissionFilters, - soIncludeUpdateManyFields :: IncludeUpdateManyFields + soIncludeUpdateManyFields :: IncludeUpdateManyFields, + soBigQueryStringNumericInput :: BigQueryStringNumericInput } -- | Should we represent numbers in our responses as numbers, or strings? @@ -106,3 +108,9 @@ data OptimizePermissionFilters = OptimizePermissionFilters | Don'tOptimizePermissionFilters deriving (Eq, Show) + +-- | Should we enable string-accepting scalar parsers for BigQuery sources +data BigQueryStringNumericInput + = EnableBigQueryStringNumericInput + | DisableBigQueryStringNumericInput + deriving (Eq, Show) diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs index 12b089620bf..17ee0447597 100644 --- a/server/src-lib/Hasura/RQL/Types/Common.hs +++ b/server/src-lib/Hasura/RQL/Types/Common.hs @@ -260,7 +260,8 @@ isSystemDefined = unSystemDefined data SQLGenCtx = SQLGenCtx { stringifyNum :: Options.StringifyNumbers, dangerousBooleanCollapse :: Options.DangerouslyCollapseBooleans, - optimizePermissionFilters :: Options.OptimizePermissionFilters + optimizePermissionFilters :: Options.OptimizePermissionFilters, + bigqueryStringNumericInput :: Options.BigQueryStringNumericInput } deriving (Show, Eq) diff --git a/server/src-lib/Hasura/Server/Init/Env.hs b/server/src-lib/Hasura/Server/Init/Env.hs index 957c3f5b3c0..f63cab9c653 100644 --- a/server/src-lib/Hasura/Server/Init/Env.hs +++ b/server/src-lib/Hasura/Server/Init/Env.hs @@ -250,10 +250,11 @@ instance FromEnv (HashSet Server.Types.ExperimentalFeature) where "naming_convention" -> Right Server.Types.EFNamingConventions "apollo_federation" -> Right Server.Types.EFApolloFederation "hide_update_many_fields" -> Right Server.Types.EFHideUpdateManyFields + "bigquery_string_numeric_input" -> Right Server.Types.EFBigQueryStringNumericInput _ -> Left $ "Only expecting list of comma separated experimental features, options are:" - ++ "inherited_roles, streaming_subscriptions, hide_update_many_fields, optimize_permission_filters, naming_convention, apollo_federation" + ++ "inherited_roles, streaming_subscriptions, hide_update_many_fields, optimize_permission_filters, naming_convention, apollo_federation, bigquery_string_numeric_input" instance FromEnv Subscription.Options.BatchSize where fromEnv s = do diff --git a/server/src-lib/Hasura/Server/Types.hs b/server/src-lib/Hasura/Server/Types.hs index 5df03fd8181..9ff2aaaa99a 100644 --- a/server/src-lib/Hasura/Server/Types.hs +++ b/server/src-lib/Hasura/Server/Types.hs @@ -76,6 +76,7 @@ data ExperimentalFeature | EFStreamingSubscriptions | EFApolloFederation | EFHideUpdateManyFields + | EFBigQueryStringNumericInput deriving (Show, Eq, Generic) instance Hashable ExperimentalFeature @@ -88,7 +89,8 @@ instance FromJSON ExperimentalFeature where "streaming_subscriptions" -> pure EFStreamingSubscriptions "hide_update_many_fields" -> pure EFHideUpdateManyFields "apollo_federation" -> pure EFApolloFederation - _ -> fail "ExperimentalFeature can only be one of these value: inherited_roles, optimize_permission_filters, hide_update_many_fields, naming_convention, streaming_subscriptions or apollo_federation" + "bigquery_string_numeric_input" -> pure EFBigQueryStringNumericInput + _ -> fail "ExperimentalFeature can only be one of these value: inherited_roles, optimize_permission_filters, hide_update_many_fields, naming_convention, streaming_subscriptions apollo_federation, or bigquery_string_numeric_input" instance ToJSON ExperimentalFeature where toJSON = \case @@ -98,6 +100,7 @@ instance ToJSON ExperimentalFeature where EFStreamingSubscriptions -> "streaming_subscriptions" EFApolloFederation -> "apollo_federation" EFHideUpdateManyFields -> "hide_update_many_fields" + EFBigQueryStringNumericInput -> "bigquery_string_numeric_input" data MaintenanceMode a = MaintenanceModeEnabled a | MaintenanceModeDisabled deriving (Show, Eq) diff --git a/server/src-test/Main.hs b/server/src-test/Main.hs index b867a589f37..e1e7d93b736 100644 --- a/server/src-test/Main.hs +++ b/server/src-test/Main.hs @@ -119,7 +119,12 @@ buildPostgresSpecs = do setupCacheRef = do httpManager <- HTTP.newManager HTTP.tlsManagerSettings - let sqlGenCtx = SQLGenCtx Options.Don'tStringifyNumbers Options.Don'tDangerouslyCollapseBooleans Options.Don'tOptimizePermissionFilters + let sqlGenCtx = + SQLGenCtx + Options.Don'tStringifyNumbers + Options.Don'tDangerouslyCollapseBooleans + Options.Don'tOptimizePermissionFilters + Options.EnableBigQueryStringNumericInput maintenanceMode = MaintenanceModeDisabled readOnlyMode = ReadOnlyModeDisabled serverConfigCtx = diff --git a/server/src-test/Test/Parser/Monad.hs b/server/src-test/Test/Parser/Monad.hs index b5ef4368657..44791d464e2 100644 --- a/server/src-test/Test/Parser/Monad.hs +++ b/server/src-test/Test/Parser/Monad.hs @@ -71,7 +71,8 @@ instance Has SchemaOptions SchemaEnvironment where soDangerousBooleanCollapse = Options.Don'tDangerouslyCollapseBooleans, soInferFunctionPermissions = Options.InferFunctionPermissions, soOptimizePermissionFilters = Options.Don'tOptimizePermissionFilters, - soIncludeUpdateManyFields = Options.IncludeUpdateManyFields + soIncludeUpdateManyFields = Options.IncludeUpdateManyFields, + soBigQueryStringNumericInput = Options.EnableBigQueryStringNumericInput } modifier :: (SchemaOptions -> SchemaOptions) -> SchemaEnvironment -> SchemaEnvironment diff --git a/server/tests-hspec/Harness/Constants.hs b/server/tests-hspec/Harness/Constants.hs index ef4bd2030ab..4b44de501c2 100644 --- a/server/tests-hspec/Harness/Constants.hs +++ b/server/tests-hspec/Harness/Constants.hs @@ -60,7 +60,7 @@ import Hasura.Server.Init qualified as Init import Hasura.Server.Logging (MetadataQueryLoggingMode (MetadataQueryLoggingDisabled)) import Hasura.Server.Types ( EventingMode (EventingEnabled), - ExperimentalFeature (EFStreamingSubscriptions), + ExperimentalFeature (..), MaintenanceMode (MaintenanceModeDisabled), ReadOnlyMode (ReadOnlyModeDisabled), ) @@ -271,7 +271,7 @@ serveOptions = soEnableMaintenanceMode = MaintenanceModeDisabled, -- MUST be disabled to be able to modify schema. soSchemaPollInterval = Interval $$(refineTH 10), - soExperimentalFeatures = Set.singleton EFStreamingSubscriptions, + soExperimentalFeatures = Set.fromList [EFStreamingSubscriptions, EFBigQueryStringNumericInput], soEventsFetchBatchSize = $$(refineTH 1), soDevMode = True, soGracefulShutdownTimeout = $$(refineTH 0), -- Don't wait to shutdown. diff --git a/server/tests-hspec/Test/BigQuery/TypeInterpretationSpec.hs b/server/tests-hspec/Test/BigQuery/TypeInterpretationSpec.hs index bf6e87a0429..10f8215b234 100644 --- a/server/tests-hspec/Test/BigQuery/TypeInterpretationSpec.hs +++ b/server/tests-hspec/Test/BigQuery/TypeInterpretationSpec.hs @@ -183,6 +183,43 @@ tests opts = do actual `shouldBe` expected + it "Accepts strings for numbers in input fields (experimental feature 'bigquery_string_numeric_input')" \testEnvironment -> do + let schemaName = Schema.getSchemaName testEnvironment + + let expected :: Value + expected = + [interpolateYaml| + data: + #{schemaName}_all_types: + - float: "0.5" + numeric: "1234" + bignumeric: "23456789098765432" + |] + + actual :: IO Value + actual = + postGraphql + testEnvironment + [graphql| + query { + #{schemaName}_all_types(where: + {_and: [ + { float: { _eq: "0.5" }} + { integer: { _eq: "3" }} + { numeric: { _eq: "1234" }} + { bignumeric: { _eq: "23456789098765432" }} + ] + } + ) { + float + numeric + bignumeric + } + } + |] + + actual `shouldBe` expected + it "Aggregates all comparable types" \testEnvironment -> do let schemaName = Schema.getSchemaName testEnvironment