chore(server): fix types of BigQuery parameters

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9511
GitOrigin-RevId: f291814fab80060d83fabc34af918682c8108b7c
This commit is contained in:
Daniel Harvey 2023-06-12 18:11:28 +01:00 committed by hasura-bot
parent be658e532c
commit aeb8bc4c7a
7 changed files with 88 additions and 115 deletions

View File

@ -4,6 +4,7 @@ module Hasura.Backends.BigQuery.DDL
updateColumnInEventTrigger, updateColumnInEventTrigger,
parseBoolExpOperations, parseBoolExpOperations,
parseCollectableType, parseCollectableType,
scalarTypeFromColumnType,
module M, module M,
) )
where where
@ -74,10 +75,14 @@ parseCollectableType collectableType = \case
| isReqUserId t -> pure $ mkTypedSessionVar collectableType userIdHeader | isReqUserId t -> pure $ mkTypedSessionVar collectableType userIdHeader
val -> case collectableType of val -> case collectableType of
CollectableTypeScalar scalarType -> CollectableTypeScalar scalarType ->
PSESQLExp . BigQuery.ValueExpression <$> parseScalarValueColumnType scalarType val PSESQLExp . BigQuery.ValueExpression . BigQuery.TypedValue (scalarTypeFromColumnType scalarType) <$> parseScalarValueColumnType scalarType val
CollectableTypeArray _ -> CollectableTypeArray _ ->
throw400 NotSupported "Array types are not supported in BigQuery backend" throw400 NotSupported "Array types are not supported in BigQuery backend"
scalarTypeFromColumnType :: ColumnType 'BigQuery -> BigQuery.ScalarType
scalarTypeFromColumnType (ColumnEnumReference _) = BigQuery.StringScalarType
scalarTypeFromColumnType (ColumnScalar scalar) = scalar
mkTypedSessionVar :: mkTypedSessionVar ::
CollectableType (ColumnType 'BigQuery) -> CollectableType (ColumnType 'BigQuery) ->
SessionVariable -> SessionVariable ->

View File

@ -173,25 +173,6 @@ newtype Execute a = Execute
MonadError ExecuteProblem MonadError ExecuteProblem
) )
-- | Big query parameters must be accompanied by an explicit type
-- signature.
data BigQueryType
= DECIMAL
| INTEGER
| FLOAT
| BYTES
| STRING
| BOOL
| ARRAY BigQueryType
| GEOGRAPHY
| DATE
| TIMESTAMP
| DATETIME
| TIME
| JSON
| BIGDECIMAL
deriving (Show, Eq)
data BigQuery = BigQuery data BigQuery = BigQuery
{ query :: LT.Text, { query :: LT.Text,
parameters :: InsOrdHashMap ParameterName Parameter parameters :: InsOrdHashMap ParameterName Parameter
@ -199,7 +180,7 @@ data BigQuery = BigQuery
deriving (Show) deriving (Show)
data Parameter = Parameter data Parameter = Parameter
{ typ :: BigQueryType, { typ :: ScalarType,
value :: Value value :: Value
} }
deriving (Show) deriving (Show)
@ -307,9 +288,9 @@ selectToBigQuery select =
parameters = parameters =
InsOrdHashMap.fromList InsOrdHashMap.fromList
( map ( map
( \(int, value) -> ( \(int, (TypedValue typ value)) ->
( ParameterName (LT.toLazyText (ToQuery.paramName int)), ( ParameterName (LT.toLazyText (ToQuery.paramName int)),
Parameter {typ = valueType value, value} Parameter {typ, value}
) )
) )
(InsOrdHashMap.toList params) (InsOrdHashMap.toList params)
@ -319,53 +300,36 @@ selectToBigQuery select =
(query, params) = (query, params) =
ToQuery.renderBuilderPretty (ToQuery.fromSelect select) ToQuery.renderBuilderPretty (ToQuery.fromSelect select)
--------------------------------------------------------------------------------
-- Type system
-- | Make a BigQuery type for the given value.
valueType :: Value -> BigQueryType
valueType =
\case
DecimalValue {} -> DECIMAL
BigDecimalValue {} -> BIGDECIMAL
IntegerValue {} -> INTEGER
FloatValue {} -> FLOAT
GeographyValue {} -> GEOGRAPHY
StringValue {} -> STRING
BytesValue {} -> BYTES
BoolValue {} -> BOOL
DatetimeValue {} -> DATETIME
TimeValue {} -> TIME
DateValue {} -> DATE
TimestampValue {} -> TIMESTAMP
JsonValue {} -> JSON
ArrayValue values ->
ARRAY
( maybe
STRING
-- Above: If the array is null, it doesn't matter what type
-- the element is. So we put STRING.
valueType
(values V.!? 0)
-- Above: We base the type from the first element. Later,
-- we could add some kind of sanity check that they are all
-- the same type.
)
NullValue -> STRING
-- Above: If the value is null, it doesn't matter what type
-- the element is. So we put STRING.
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
-- JSON serialization -- JSON serialization
typeToBigQueryJson :: ScalarType -> J.Value
typeToBigQueryJson =
\case
DecimalScalarType -> atomic "NUMERIC"
BigDecimalScalarType -> atomic "BIGNUMERIC"
IntegerScalarType -> atomic "INTEGER"
DateScalarType -> atomic "DATE"
TimeScalarType -> atomic "TIME"
DatetimeScalarType -> atomic "DATETIME"
JsonScalarType -> atomic "JSON"
TimestampScalarType -> atomic "TIMESTAMP"
FloatScalarType -> atomic "FLOAT"
GeographyScalarType -> atomic "GEOGRAPHY"
StringScalarType -> atomic "STRING"
BytesScalarType -> atomic "BYTES"
BoolScalarType -> atomic "BOOL"
StructScalarType -> atomic "STRUCT"
where
atomic ty = J.object ["type" J..= (ty :: Text)]
-- | Make a JSON representation of the type of the given value. -- | Make a JSON representation of the type of the given value.
valueToBigQueryJson :: Value -> J.Value valueToBigQueryJson :: Value -> J.Value
valueToBigQueryJson = go valueToBigQueryJson = go
where where
go = go =
\case \case
NullValue -> J.Null -- TODO: I haven't tested whether BigQuery is happy with this null value. NullValue -> J.object [("value", J.Null)]
DecimalValue i -> J.object ["value" .= i] DecimalValue i -> J.object ["value" .= i]
BigDecimalValue i -> J.object ["value" .= i] BigDecimalValue i -> J.object ["value" .= i]
IntegerValue i -> J.object ["value" .= i] IntegerValue i -> J.object ["value" .= i]
@ -580,7 +544,7 @@ createQueryJob conn BigQuery {..} = do
( \(name, Parameter {..}) -> ( \(name, Parameter {..}) ->
J.object J.object
[ "name" .= J.toJSON name, [ "name" .= J.toJSON name,
"parameterType" .= J.toJSON typ, "parameterType" .= typeToBigQueryJson typ,
"parameterValue" .= valueToBigQueryJson value "parameterValue" .= valueToBigQueryJson value
] ]
) )
@ -860,26 +824,6 @@ has_v_generic f =
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
-- Generic JSON deserialization -- Generic JSON deserialization
instance J.ToJSON BigQueryType where
toJSON =
\case
ARRAY t -> J.object ["type" .= ("ARRAY" :: Text), "arrayType" .= t]
DECIMAL -> atomic "NUMERIC"
BIGDECIMAL -> atomic "BIGNUMERIC"
INTEGER -> atomic "INTEGER"
DATE -> atomic "DATE"
TIME -> atomic "TIME"
DATETIME -> atomic "DATETIME"
JSON -> atomic "JSON"
TIMESTAMP -> atomic "TIMESTAMP"
FLOAT -> atomic "FLOAT"
GEOGRAPHY -> atomic "GEOGRAPHY"
STRING -> atomic "STRING"
BYTES -> atomic "BYTES"
BOOL -> atomic "BOOL"
where
atomic ty = J.object ["type" .= (ty :: Text)]
instance J.FromJSON BigQueryField where instance J.FromJSON BigQueryField where
parseJSON = parseJSON =
J.withObject J.withObject

View File

@ -979,7 +979,7 @@ fromTableAggregateFieldG args permissionBasedTop (Rql.FieldName name, field) =
pure pure
( ExpressionFieldSource ( ExpressionFieldSource
Aliased Aliased
{ aliasedThing = BigQuery.ValueExpression (StringValue text), { aliasedThing = BigQuery.ValueExpression (BigQuery.TypedValue BigQuery.StringScalarType (StringValue text)),
aliasedAlias = name aliasedAlias = name
} }
) )
@ -1019,7 +1019,7 @@ fromAggregateField aggregateField =
expression' <- expression' <-
case columnField of case columnField of
Ir.SFCol column _columnType -> fmap ColumnExpression (fromColumn column) Ir.SFCol column _columnType -> fmap ColumnExpression (fromColumn column)
Ir.SFExp text -> pure (ValueExpression (StringValue text)) Ir.SFExp text -> pure (ValueExpression (BigQuery.TypedValue BigQuery.StringScalarType (StringValue text)))
-- See Hasura.RQL.Types.Backend.supportsAggregateComputedFields -- See Hasura.RQL.Types.Backend.supportsAggregateComputedFields
Ir.SFComputedField _ _ -> error "Aggregate computed fields aren't currently supported for BigQuery!" Ir.SFComputedField _ _ -> error "Aggregate computed fields aren't currently supported for BigQuery!"
pure (fieldName, expression') pure (fieldName, expression')
@ -1044,7 +1044,7 @@ fromAnnFieldsG existingJoins (Rql.FieldName name, field) =
pure pure
( ExpressionFieldSource ( ExpressionFieldSource
Aliased Aliased
{ aliasedThing = BigQuery.ValueExpression (StringValue text), { aliasedThing = BigQuery.ValueExpression (BigQuery.TypedValue BigQuery.StringScalarType (StringValue text)),
aliasedAlias = name aliasedAlias = name
} }
) )
@ -1857,7 +1857,7 @@ selectProjectionsFromFieldSources keepJoinField fieldSources = do
Nothing -> refute (pure NoProjectionFields) Nothing -> refute (pure NoProjectionFields)
trueExpression :: Expression trueExpression :: Expression
trueExpression = ValueExpression (BoolValue True) trueExpression = ValueExpression (TypedValue BoolScalarType (BoolValue True))
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
-- Constants -- Constants

View File

@ -11,6 +11,7 @@ import Data.List.NonEmpty qualified as NE
import Data.Text qualified as T import Data.Text qualified as T
import Data.Text.Casing qualified as C import Data.Text.Casing qualified as C
import Data.Text.Extended import Data.Text.Extended
import Hasura.Backends.BigQuery.DDL (scalarTypeFromColumnType)
import Hasura.Backends.BigQuery.Name import Hasura.Backends.BigQuery.Name
import Hasura.Backends.BigQuery.Parser.Scalars qualified as BQP import Hasura.Backends.BigQuery.Parser.Scalars qualified as BQP
import Hasura.Backends.BigQuery.Types qualified as BigQuery import Hasura.Backends.BigQuery.Types qualified as BigQuery
@ -261,7 +262,16 @@ bqComparisonExps = P.memoize 'comparisonExps $ \columnType -> do
columnListParser = fmap IR.openValueOrigin <$> P.list typedParser columnListParser = fmap IR.openValueOrigin <$> P.list typedParser
mkListLiteral :: [ColumnValue 'BigQuery] -> IR.UnpreparedValue 'BigQuery mkListLiteral :: [ColumnValue 'BigQuery] -> IR.UnpreparedValue 'BigQuery
mkListLiteral = mkListLiteral =
IR.UVLiteral . BigQuery.ListExpression . fmap (BigQuery.ValueExpression . cvValue) IR.UVLiteral
. BigQuery.ListExpression
. fmap
( \columnValue ->
BigQuery.ValueExpression
( BigQuery.TypedValue
(scalarTypeFromColumnType (cvType columnValue))
(cvValue columnValue)
)
)
pure pure
$ P.object name (Just desc) $ P.object name (Just desc)
$ fmap catMaybes $ fmap catMaybes

View File

@ -12,6 +12,7 @@ import Data.List.NonEmpty qualified as NE
import Data.Map.Strict qualified as Map import Data.Map.Strict qualified as Map
import Data.Text.Extended import Data.Text.Extended
import Data.Text.Lazy qualified as LT import Data.Text.Lazy qualified as LT
import Hasura.Backends.BigQuery.DDL (scalarTypeFromColumnType)
import Hasura.Backends.BigQuery.FromIr as BigQuery import Hasura.Backends.BigQuery.FromIr as BigQuery
import Hasura.Backends.BigQuery.Types import Hasura.Backends.BigQuery.Types
import Hasura.Base.Error qualified as E import Hasura.Base.Error qualified as E
@ -76,8 +77,12 @@ prepareValueNoPlan sessionVariables =
) )
CollectableTypeArray {} -> CollectableTypeArray {} ->
throwError $ E.internalError "Cannot currently prepare array types in BigQuery." throwError $ E.internalError "Cannot currently prepare array types in BigQuery."
UVParameter _ RQL.ColumnValue {..} -> pure (ValueExpression cvValue) UVParameter _ RQL.ColumnValue {..} ->
pure (ValueExpression (TypedValue (scalarTypeFromColumnType cvType) cvValue))
where where
globalSessionExpression = globalSessionExpression =
ValueExpression ValueExpression
( TypedValue
StringScalarType
(StringValue (LT.toStrict (encodeToLazyText sessionVariables))) (StringValue (LT.toStrict (encodeToLazyText sessionVariables)))
)

View File

@ -43,7 +43,7 @@ data Printer
| NewlinePrinter | NewlinePrinter
| UnsafeTextPrinter Text | UnsafeTextPrinter Text
| IndentPrinter Int Printer | IndentPrinter Int Printer
| ValuePrinter Value | ValuePrinter TypedValue
deriving (Show, Eq) deriving (Show, Eq)
instance IsString Printer where instance IsString Printer where
@ -68,12 +68,12 @@ fromExpression =
\case \case
CastExpression e scalarType -> CastExpression e scalarType ->
"CAST(" <+> fromExpression e <+> " AS " <+> fromScalarType scalarType <+> ")" "CAST(" <+> fromExpression e <+> " AS " <+> fromScalarType scalarType <+> ")"
InExpression e value -> InExpression e (TypedValue ty val) ->
"(" <+> fromExpression e <+> ") IN UNNEST(" <+> fromValue value <+> ")" "(" <+> fromExpression e <+> ") IN UNNEST(" <+> fromValue ty val <+> ")"
JsonQueryExpression e -> "JSON_QUERY(" <+> fromExpression e <+> ")" JsonQueryExpression e -> "JSON_QUERY(" <+> fromExpression e <+> ")"
JsonValueExpression e path -> JsonValueExpression e path ->
"JSON_VALUE(" <+> fromExpression e <+> fromPath path <+> ")" "JSON_VALUE(" <+> fromExpression e <+> fromPath path <+> ")"
ValueExpression value -> fromValue value ValueExpression (TypedValue ty val) -> fromValue ty val
AndExpression xs -> AndExpression xs ->
SepByPrinter SepByPrinter
(NewlinePrinter <+> "AND ") (NewlinePrinter <+> "AND ")
@ -161,6 +161,7 @@ fromPath path =
string = string =
fromExpression fromExpression
. ValueExpression . ValueExpression
. TypedValue StringScalarType
. StringValue . StringValue
. LT.toStrict . LT.toStrict
. LT.toLazyText . LT.toLazyText
@ -293,10 +294,10 @@ fromOrderBys top moffset morderBys =
-- present. -- present.
<+> " OFFSET " <+> " OFFSET "
<+> fromExpression offset <+> fromExpression offset
(Top n, Nothing) -> "LIMIT " <+> fromValue (IntegerValue (intToInt64 n)) (Top n, Nothing) -> "LIMIT " <+> fromValue IntegerScalarType (IntegerValue (intToInt64 n))
(Top n, Just offset) -> (Top n, Just offset) ->
"LIMIT " "LIMIT "
<+> fromValue (IntegerValue (intToInt64 n)) <+> fromValue IntegerScalarType (IntegerValue (intToInt64 n))
<+> " OFFSET " <+> " OFFSET "
<+> fromExpression offset <+> fromExpression offset
] ]
@ -489,7 +490,7 @@ fromAggregate =
) )
) )
<+> ")" <+> ")"
TextAggregate text -> fromExpression (ValueExpression (StringValue text)) TextAggregate text -> fromExpression (ValueExpression (TypedValue StringScalarType (StringValue text)))
fromCountable :: Countable FieldName -> Printer fromCountable :: Countable FieldName -> Printer
fromCountable = fromCountable =
@ -582,13 +583,13 @@ fromNameText :: Text -> Printer
fromNameText t = UnsafeTextPrinter ("`" <> t <> "`") fromNameText t = UnsafeTextPrinter ("`" <> t <> "`")
trueExpression :: Expression trueExpression :: Expression
trueExpression = ValueExpression (BoolValue True) trueExpression = ValueExpression (TypedValue BoolScalarType (BoolValue True))
falseExpression :: Expression falseExpression :: Expression
falseExpression = ValueExpression (BoolValue False) falseExpression = ValueExpression (TypedValue BoolScalarType (BoolValue False))
fromValue :: Value -> Printer fromValue :: ScalarType -> Value -> Printer
fromValue = ValuePrinter fromValue ty val = ValuePrinter (TypedValue ty val)
parens :: Printer -> Printer parens :: Printer -> Printer
parens x = "(" <+> IndentPrinter 1 x <+> ")" parens x = "(" <+> IndentPrinter 1 x <+> ")"
@ -612,14 +613,14 @@ toTextFlat = LT.toStrict . LT.toLazyText . toBuilderFlat
-- Printer ready for consumption -- Printer ready for consumption
-- | Produces a query with holes, and a mapping for each -- | Produces a query with holes, and a mapping for each
renderBuilderFlat :: Printer -> (Builder, InsOrdHashMap Int Value) renderBuilderFlat :: Printer -> (Builder, InsOrdHashMap Int TypedValue)
renderBuilderFlat = renderBuilderFlat =
second (InsOrdHashMap.fromList . map swap . InsOrdHashMap.toList) second (InsOrdHashMap.fromList . map swap . InsOrdHashMap.toList)
. flip runState mempty . flip runState mempty
. runBuilderFlat . runBuilderFlat
-- | Produces a query with holes, and a mapping for each -- | Produces a query with holes, and a mapping for each
renderBuilderPretty :: Printer -> (Builder, InsOrdHashMap Int Value) renderBuilderPretty :: Printer -> (Builder, InsOrdHashMap Int TypedValue)
renderBuilderPretty = renderBuilderPretty =
second (InsOrdHashMap.fromList . map swap . InsOrdHashMap.toList) second (InsOrdHashMap.fromList . map swap . InsOrdHashMap.toList)
. flip runState mempty . flip runState mempty
@ -631,7 +632,7 @@ renderBuilderPretty =
paramName :: Int -> Builder paramName :: Int -> Builder
paramName next = "param" <> fromString (show next) paramName next = "param" <> fromString (show next)
runBuilderFlat :: Printer -> State (InsOrdHashMap Value Int) Builder runBuilderFlat :: Printer -> State (InsOrdHashMap TypedValue Int) Builder
runBuilderFlat = go 0 runBuilderFlat = go 0
where where
go level = go level =
@ -643,18 +644,18 @@ runBuilderFlat = go 0
fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs) fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs)
NewlinePrinter -> pure " " NewlinePrinter -> pure " "
IndentPrinter n p -> go (level + n) p IndentPrinter n p -> go (level + n) p
ValuePrinter (ArrayValue x) | V.null x -> pure "[]" ValuePrinter (TypedValue _ (ArrayValue x)) | V.null x -> pure "[]"
ValuePrinter v -> do ValuePrinter tv -> do
themap <- get themap <- get
next <- next <-
InsOrdHashMap.lookup v themap `onNothing` do InsOrdHashMap.lookup tv themap `onNothing` do
next <- gets InsOrdHashMap.size next <- gets InsOrdHashMap.size
modify (InsOrdHashMap.insert v next) modify (InsOrdHashMap.insert tv next)
pure next pure next
pure ("@" <> paramName next) pure ("@" <> paramName next)
notEmpty = (/= mempty) notEmpty = (/= mempty)
runBuilderPretty :: Printer -> State (InsOrdHashMap Value Int) Builder runBuilderPretty :: Printer -> State (InsOrdHashMap TypedValue Int) Builder
runBuilderPretty = go 0 runBuilderPretty = go 0
where where
go level = go level =
@ -666,14 +667,14 @@ runBuilderPretty = go 0
fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs) fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs)
NewlinePrinter -> pure ("\n" <> indentation level) NewlinePrinter -> pure ("\n" <> indentation level)
IndentPrinter n p -> go (level + n) p IndentPrinter n p -> go (level + n) p
ValuePrinter (ArrayValue x) ValuePrinter (TypedValue _ (ArrayValue x))
| V.null x -> pure "[]" | V.null x -> pure "[]"
ValuePrinter v -> do ValuePrinter tv -> do
themap <- get themap <- get
next <- next <-
InsOrdHashMap.lookup v themap `onNothing` do InsOrdHashMap.lookup tv themap `onNothing` do
next <- gets InsOrdHashMap.size next <- gets InsOrdHashMap.size
modify (InsOrdHashMap.insert v next) modify (InsOrdHashMap.insert tv next)
pure next pure next
pure ("@" <> paramName next) pure ("@" <> paramName next)
indentation n = LT.fromText (T.replicate n " ") indentation n = LT.fromText (T.replicate n " ")

View File

@ -49,6 +49,7 @@ module Hasura.Backends.BigQuery.Types
Time (..), Time (..),
Timestamp (..), Timestamp (..),
Top (..), Top (..),
TypedValue (..),
Value (..), Value (..),
Where (..), Where (..),
With (..), With (..),
@ -285,8 +286,8 @@ instance Semigroup Top where
(<>) (Top x) (Top y) = Top (min x y) (<>) (Top x) (Top y) = Top (min x y)
data Expression data Expression
= ValueExpression Value = ValueExpression TypedValue
| InExpression Expression Value | InExpression Expression TypedValue
| AndExpression [Expression] | AndExpression [Expression]
| OrExpression [Expression] | OrExpression [Expression]
| NotExpression Expression | NotExpression Expression
@ -542,11 +543,18 @@ instance FromJSON Int64 where parseJSON = liberalInt64Parser Int64
instance ToJSON Int64 where toJSON = liberalIntegralPrinter instance ToJSON Int64 where toJSON = liberalIntegralPrinter
data TypedValue = TypedValue
{ tvType :: ScalarType,
tvValue :: Value
}
deriving stock (Eq, Ord, Show, Generic, Data, Lift)
deriving anyclass (Hashable, NFData)
intToInt64 :: Int.Int64 -> Int64 intToInt64 :: Int.Int64 -> Int64
intToInt64 = Int64 . tshow intToInt64 = Int64 . tshow
int64Expr :: Int.Int64 -> Expression int64Expr :: Int.Int64 -> Expression
int64Expr = ValueExpression . IntegerValue . intToInt64 int64Expr i = ValueExpression (TypedValue IntegerScalarType (IntegerValue (intToInt64 i)))
-- | BigQuery's conception of a fixed precision decimal. -- | BigQuery's conception of a fixed precision decimal.
newtype Decimal = Decimal Text newtype Decimal = Decimal Text