chore(server): fix types of BigQuery parameters

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9511
GitOrigin-RevId: f291814fab80060d83fabc34af918682c8108b7c
This commit is contained in:
Daniel Harvey 2023-06-12 18:11:28 +01:00 committed by hasura-bot
parent be658e532c
commit aeb8bc4c7a
7 changed files with 88 additions and 115 deletions

View File

@ -4,6 +4,7 @@ module Hasura.Backends.BigQuery.DDL
updateColumnInEventTrigger,
parseBoolExpOperations,
parseCollectableType,
scalarTypeFromColumnType,
module M,
)
where
@ -74,10 +75,14 @@ parseCollectableType collectableType = \case
| isReqUserId t -> pure $ mkTypedSessionVar collectableType userIdHeader
val -> case collectableType of
CollectableTypeScalar scalarType ->
PSESQLExp . BigQuery.ValueExpression <$> parseScalarValueColumnType scalarType val
PSESQLExp . BigQuery.ValueExpression . BigQuery.TypedValue (scalarTypeFromColumnType scalarType) <$> parseScalarValueColumnType scalarType val
CollectableTypeArray _ ->
throw400 NotSupported "Array types are not supported in BigQuery backend"
scalarTypeFromColumnType :: ColumnType 'BigQuery -> BigQuery.ScalarType
scalarTypeFromColumnType (ColumnEnumReference _) = BigQuery.StringScalarType
scalarTypeFromColumnType (ColumnScalar scalar) = scalar
mkTypedSessionVar ::
CollectableType (ColumnType 'BigQuery) ->
SessionVariable ->

View File

@ -173,25 +173,6 @@ newtype Execute a = Execute
MonadError ExecuteProblem
)
-- | Big query parameters must be accompanied by an explicit type
-- signature.
data BigQueryType
= DECIMAL
| INTEGER
| FLOAT
| BYTES
| STRING
| BOOL
| ARRAY BigQueryType
| GEOGRAPHY
| DATE
| TIMESTAMP
| DATETIME
| TIME
| JSON
| BIGDECIMAL
deriving (Show, Eq)
data BigQuery = BigQuery
{ query :: LT.Text,
parameters :: InsOrdHashMap ParameterName Parameter
@ -199,7 +180,7 @@ data BigQuery = BigQuery
deriving (Show)
data Parameter = Parameter
{ typ :: BigQueryType,
{ typ :: ScalarType,
value :: Value
}
deriving (Show)
@ -307,9 +288,9 @@ selectToBigQuery select =
parameters =
InsOrdHashMap.fromList
( map
( \(int, value) ->
( \(int, (TypedValue typ value)) ->
( ParameterName (LT.toLazyText (ToQuery.paramName int)),
Parameter {typ = valueType value, value}
Parameter {typ, value}
)
)
(InsOrdHashMap.toList params)
@ -319,53 +300,36 @@ selectToBigQuery select =
(query, params) =
ToQuery.renderBuilderPretty (ToQuery.fromSelect select)
--------------------------------------------------------------------------------
-- Type system
-- | Make a BigQuery type for the given value.
valueType :: Value -> BigQueryType
valueType =
\case
DecimalValue {} -> DECIMAL
BigDecimalValue {} -> BIGDECIMAL
IntegerValue {} -> INTEGER
FloatValue {} -> FLOAT
GeographyValue {} -> GEOGRAPHY
StringValue {} -> STRING
BytesValue {} -> BYTES
BoolValue {} -> BOOL
DatetimeValue {} -> DATETIME
TimeValue {} -> TIME
DateValue {} -> DATE
TimestampValue {} -> TIMESTAMP
JsonValue {} -> JSON
ArrayValue values ->
ARRAY
( maybe
STRING
-- Above: If the array is null, it doesn't matter what type
-- the element is. So we put STRING.
valueType
(values V.!? 0)
-- Above: We base the type from the first element. Later,
-- we could add some kind of sanity check that they are all
-- the same type.
)
NullValue -> STRING
-- Above: If the value is null, it doesn't matter what type
-- the element is. So we put STRING.
--------------------------------------------------------------------------------
-- JSON serialization
typeToBigQueryJson :: ScalarType -> J.Value
typeToBigQueryJson =
\case
DecimalScalarType -> atomic "NUMERIC"
BigDecimalScalarType -> atomic "BIGNUMERIC"
IntegerScalarType -> atomic "INTEGER"
DateScalarType -> atomic "DATE"
TimeScalarType -> atomic "TIME"
DatetimeScalarType -> atomic "DATETIME"
JsonScalarType -> atomic "JSON"
TimestampScalarType -> atomic "TIMESTAMP"
FloatScalarType -> atomic "FLOAT"
GeographyScalarType -> atomic "GEOGRAPHY"
StringScalarType -> atomic "STRING"
BytesScalarType -> atomic "BYTES"
BoolScalarType -> atomic "BOOL"
StructScalarType -> atomic "STRUCT"
where
atomic ty = J.object ["type" J..= (ty :: Text)]
-- | Make a JSON representation of the type of the given value.
valueToBigQueryJson :: Value -> J.Value
valueToBigQueryJson = go
where
go =
\case
NullValue -> J.Null -- TODO: I haven't tested whether BigQuery is happy with this null value.
NullValue -> J.object [("value", J.Null)]
DecimalValue i -> J.object ["value" .= i]
BigDecimalValue i -> J.object ["value" .= i]
IntegerValue i -> J.object ["value" .= i]
@ -580,7 +544,7 @@ createQueryJob conn BigQuery {..} = do
( \(name, Parameter {..}) ->
J.object
[ "name" .= J.toJSON name,
"parameterType" .= J.toJSON typ,
"parameterType" .= typeToBigQueryJson typ,
"parameterValue" .= valueToBigQueryJson value
]
)
@ -860,26 +824,6 @@ has_v_generic f =
--------------------------------------------------------------------------------
-- Generic JSON deserialization
instance J.ToJSON BigQueryType where
toJSON =
\case
ARRAY t -> J.object ["type" .= ("ARRAY" :: Text), "arrayType" .= t]
DECIMAL -> atomic "NUMERIC"
BIGDECIMAL -> atomic "BIGNUMERIC"
INTEGER -> atomic "INTEGER"
DATE -> atomic "DATE"
TIME -> atomic "TIME"
DATETIME -> atomic "DATETIME"
JSON -> atomic "JSON"
TIMESTAMP -> atomic "TIMESTAMP"
FLOAT -> atomic "FLOAT"
GEOGRAPHY -> atomic "GEOGRAPHY"
STRING -> atomic "STRING"
BYTES -> atomic "BYTES"
BOOL -> atomic "BOOL"
where
atomic ty = J.object ["type" .= (ty :: Text)]
instance J.FromJSON BigQueryField where
parseJSON =
J.withObject

View File

@ -979,7 +979,7 @@ fromTableAggregateFieldG args permissionBasedTop (Rql.FieldName name, field) =
pure
( ExpressionFieldSource
Aliased
{ aliasedThing = BigQuery.ValueExpression (StringValue text),
{ aliasedThing = BigQuery.ValueExpression (BigQuery.TypedValue BigQuery.StringScalarType (StringValue text)),
aliasedAlias = name
}
)
@ -1019,7 +1019,7 @@ fromAggregateField aggregateField =
expression' <-
case columnField of
Ir.SFCol column _columnType -> fmap ColumnExpression (fromColumn column)
Ir.SFExp text -> pure (ValueExpression (StringValue text))
Ir.SFExp text -> pure (ValueExpression (BigQuery.TypedValue BigQuery.StringScalarType (StringValue text)))
-- See Hasura.RQL.Types.Backend.supportsAggregateComputedFields
Ir.SFComputedField _ _ -> error "Aggregate computed fields aren't currently supported for BigQuery!"
pure (fieldName, expression')
@ -1044,7 +1044,7 @@ fromAnnFieldsG existingJoins (Rql.FieldName name, field) =
pure
( ExpressionFieldSource
Aliased
{ aliasedThing = BigQuery.ValueExpression (StringValue text),
{ aliasedThing = BigQuery.ValueExpression (BigQuery.TypedValue BigQuery.StringScalarType (StringValue text)),
aliasedAlias = name
}
)
@ -1857,7 +1857,7 @@ selectProjectionsFromFieldSources keepJoinField fieldSources = do
Nothing -> refute (pure NoProjectionFields)
trueExpression :: Expression
trueExpression = ValueExpression (BoolValue True)
trueExpression = ValueExpression (TypedValue BoolScalarType (BoolValue True))
--------------------------------------------------------------------------------
-- Constants

View File

@ -11,6 +11,7 @@ import Data.List.NonEmpty qualified as NE
import Data.Text qualified as T
import Data.Text.Casing qualified as C
import Data.Text.Extended
import Hasura.Backends.BigQuery.DDL (scalarTypeFromColumnType)
import Hasura.Backends.BigQuery.Name
import Hasura.Backends.BigQuery.Parser.Scalars qualified as BQP
import Hasura.Backends.BigQuery.Types qualified as BigQuery
@ -261,7 +262,16 @@ bqComparisonExps = P.memoize 'comparisonExps $ \columnType -> do
columnListParser = fmap IR.openValueOrigin <$> P.list typedParser
mkListLiteral :: [ColumnValue 'BigQuery] -> IR.UnpreparedValue 'BigQuery
mkListLiteral =
IR.UVLiteral . BigQuery.ListExpression . fmap (BigQuery.ValueExpression . cvValue)
IR.UVLiteral
. BigQuery.ListExpression
. fmap
( \columnValue ->
BigQuery.ValueExpression
( BigQuery.TypedValue
(scalarTypeFromColumnType (cvType columnValue))
(cvValue columnValue)
)
)
pure
$ P.object name (Just desc)
$ fmap catMaybes

View File

@ -12,6 +12,7 @@ import Data.List.NonEmpty qualified as NE
import Data.Map.Strict qualified as Map
import Data.Text.Extended
import Data.Text.Lazy qualified as LT
import Hasura.Backends.BigQuery.DDL (scalarTypeFromColumnType)
import Hasura.Backends.BigQuery.FromIr as BigQuery
import Hasura.Backends.BigQuery.Types
import Hasura.Base.Error qualified as E
@ -76,8 +77,12 @@ prepareValueNoPlan sessionVariables =
)
CollectableTypeArray {} ->
throwError $ E.internalError "Cannot currently prepare array types in BigQuery."
UVParameter _ RQL.ColumnValue {..} -> pure (ValueExpression cvValue)
UVParameter _ RQL.ColumnValue {..} ->
pure (ValueExpression (TypedValue (scalarTypeFromColumnType cvType) cvValue))
where
globalSessionExpression =
ValueExpression
(StringValue (LT.toStrict (encodeToLazyText sessionVariables)))
( TypedValue
StringScalarType
(StringValue (LT.toStrict (encodeToLazyText sessionVariables)))
)

View File

@ -43,7 +43,7 @@ data Printer
| NewlinePrinter
| UnsafeTextPrinter Text
| IndentPrinter Int Printer
| ValuePrinter Value
| ValuePrinter TypedValue
deriving (Show, Eq)
instance IsString Printer where
@ -68,12 +68,12 @@ fromExpression =
\case
CastExpression e scalarType ->
"CAST(" <+> fromExpression e <+> " AS " <+> fromScalarType scalarType <+> ")"
InExpression e value ->
"(" <+> fromExpression e <+> ") IN UNNEST(" <+> fromValue value <+> ")"
InExpression e (TypedValue ty val) ->
"(" <+> fromExpression e <+> ") IN UNNEST(" <+> fromValue ty val <+> ")"
JsonQueryExpression e -> "JSON_QUERY(" <+> fromExpression e <+> ")"
JsonValueExpression e path ->
"JSON_VALUE(" <+> fromExpression e <+> fromPath path <+> ")"
ValueExpression value -> fromValue value
ValueExpression (TypedValue ty val) -> fromValue ty val
AndExpression xs ->
SepByPrinter
(NewlinePrinter <+> "AND ")
@ -161,6 +161,7 @@ fromPath path =
string =
fromExpression
. ValueExpression
. TypedValue StringScalarType
. StringValue
. LT.toStrict
. LT.toLazyText
@ -293,10 +294,10 @@ fromOrderBys top moffset morderBys =
-- present.
<+> " OFFSET "
<+> fromExpression offset
(Top n, Nothing) -> "LIMIT " <+> fromValue (IntegerValue (intToInt64 n))
(Top n, Nothing) -> "LIMIT " <+> fromValue IntegerScalarType (IntegerValue (intToInt64 n))
(Top n, Just offset) ->
"LIMIT "
<+> fromValue (IntegerValue (intToInt64 n))
<+> fromValue IntegerScalarType (IntegerValue (intToInt64 n))
<+> " OFFSET "
<+> fromExpression offset
]
@ -489,7 +490,7 @@ fromAggregate =
)
)
<+> ")"
TextAggregate text -> fromExpression (ValueExpression (StringValue text))
TextAggregate text -> fromExpression (ValueExpression (TypedValue StringScalarType (StringValue text)))
fromCountable :: Countable FieldName -> Printer
fromCountable =
@ -582,13 +583,13 @@ fromNameText :: Text -> Printer
fromNameText t = UnsafeTextPrinter ("`" <> t <> "`")
trueExpression :: Expression
trueExpression = ValueExpression (BoolValue True)
trueExpression = ValueExpression (TypedValue BoolScalarType (BoolValue True))
falseExpression :: Expression
falseExpression = ValueExpression (BoolValue False)
falseExpression = ValueExpression (TypedValue BoolScalarType (BoolValue False))
fromValue :: Value -> Printer
fromValue = ValuePrinter
fromValue :: ScalarType -> Value -> Printer
fromValue ty val = ValuePrinter (TypedValue ty val)
parens :: Printer -> Printer
parens x = "(" <+> IndentPrinter 1 x <+> ")"
@ -612,14 +613,14 @@ toTextFlat = LT.toStrict . LT.toLazyText . toBuilderFlat
-- Printer ready for consumption
-- | Produces a query with holes, and a mapping for each
renderBuilderFlat :: Printer -> (Builder, InsOrdHashMap Int Value)
renderBuilderFlat :: Printer -> (Builder, InsOrdHashMap Int TypedValue)
renderBuilderFlat =
second (InsOrdHashMap.fromList . map swap . InsOrdHashMap.toList)
. flip runState mempty
. runBuilderFlat
-- | Produces a query with holes, and a mapping for each
renderBuilderPretty :: Printer -> (Builder, InsOrdHashMap Int Value)
renderBuilderPretty :: Printer -> (Builder, InsOrdHashMap Int TypedValue)
renderBuilderPretty =
second (InsOrdHashMap.fromList . map swap . InsOrdHashMap.toList)
. flip runState mempty
@ -631,7 +632,7 @@ renderBuilderPretty =
paramName :: Int -> Builder
paramName next = "param" <> fromString (show next)
runBuilderFlat :: Printer -> State (InsOrdHashMap Value Int) Builder
runBuilderFlat :: Printer -> State (InsOrdHashMap TypedValue Int) Builder
runBuilderFlat = go 0
where
go level =
@ -643,18 +644,18 @@ runBuilderFlat = go 0
fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs)
NewlinePrinter -> pure " "
IndentPrinter n p -> go (level + n) p
ValuePrinter (ArrayValue x) | V.null x -> pure "[]"
ValuePrinter v -> do
ValuePrinter (TypedValue _ (ArrayValue x)) | V.null x -> pure "[]"
ValuePrinter tv -> do
themap <- get
next <-
InsOrdHashMap.lookup v themap `onNothing` do
InsOrdHashMap.lookup tv themap `onNothing` do
next <- gets InsOrdHashMap.size
modify (InsOrdHashMap.insert v next)
modify (InsOrdHashMap.insert tv next)
pure next
pure ("@" <> paramName next)
notEmpty = (/= mempty)
runBuilderPretty :: Printer -> State (InsOrdHashMap Value Int) Builder
runBuilderPretty :: Printer -> State (InsOrdHashMap TypedValue Int) Builder
runBuilderPretty = go 0
where
go level =
@ -666,14 +667,14 @@ runBuilderPretty = go 0
fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs)
NewlinePrinter -> pure ("\n" <> indentation level)
IndentPrinter n p -> go (level + n) p
ValuePrinter (ArrayValue x)
ValuePrinter (TypedValue _ (ArrayValue x))
| V.null x -> pure "[]"
ValuePrinter v -> do
ValuePrinter tv -> do
themap <- get
next <-
InsOrdHashMap.lookup v themap `onNothing` do
InsOrdHashMap.lookup tv themap `onNothing` do
next <- gets InsOrdHashMap.size
modify (InsOrdHashMap.insert v next)
modify (InsOrdHashMap.insert tv next)
pure next
pure ("@" <> paramName next)
indentation n = LT.fromText (T.replicate n " ")

View File

@ -49,6 +49,7 @@ module Hasura.Backends.BigQuery.Types
Time (..),
Timestamp (..),
Top (..),
TypedValue (..),
Value (..),
Where (..),
With (..),
@ -285,8 +286,8 @@ instance Semigroup Top where
(<>) (Top x) (Top y) = Top (min x y)
data Expression
= ValueExpression Value
| InExpression Expression Value
= ValueExpression TypedValue
| InExpression Expression TypedValue
| AndExpression [Expression]
| OrExpression [Expression]
| NotExpression Expression
@ -542,11 +543,18 @@ instance FromJSON Int64 where parseJSON = liberalInt64Parser Int64
instance ToJSON Int64 where toJSON = liberalIntegralPrinter
data TypedValue = TypedValue
{ tvType :: ScalarType,
tvValue :: Value
}
deriving stock (Eq, Ord, Show, Generic, Data, Lift)
deriving anyclass (Hashable, NFData)
intToInt64 :: Int.Int64 -> Int64
intToInt64 = Int64 . tshow
int64Expr :: Int.Int64 -> Expression
int64Expr = ValueExpression . IntegerValue . intToInt64
int64Expr i = ValueExpression (TypedValue IntegerScalarType (IntegerValue (intToInt64 i)))
-- | BigQuery's conception of a fixed precision decimal.
newtype Decimal = Decimal Text