mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-10-05 06:18:04 +03:00
Fix Decimal serialization for BigQuery backend
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4168 GitOrigin-RevId: 16072ce326ede22ba8be05f92fcfa0aaa2a7a644
This commit is contained in:
parent
3c15fa9f93
commit
74328156c5
35
CHANGELOG.md
35
CHANGELOG.md
@ -11,6 +11,41 @@
|
|||||||
|
|
||||||
- The `query` and `raw-query` field from http-logs for metadata requests are removed by default. Use
|
- The `query` and `raw-query` field from http-logs for metadata requests are removed by default. Use
|
||||||
`HASURA_GRAPHQL_ENABLE_METADATA_QUERY_LOGGING` to renable those fields.
|
`HASURA_GRAPHQL_ENABLE_METADATA_QUERY_LOGGING` to renable those fields.
|
||||||
|
- server: Fix BigQuery overflow issue when using Decimal/NUMERIC data
|
||||||
|
type. The Hasura Graphql Engine renders the column value as string instead
|
||||||
|
of numeric value to avoid precision loss. If your endpoint was
|
||||||
|
returning this result:
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"hasura_author": [
|
||||||
|
{
|
||||||
|
"name": "Author 3",
|
||||||
|
"tax_id": 44403
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
It would now instead return this:
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"hasura_author": [
|
||||||
|
{
|
||||||
|
"name": "Author 3",
|
||||||
|
"tax_id": "44403"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that the column type of `tax_id` is Decimal and in the second
|
||||||
|
case it is represented as string.
|
||||||
|
|
||||||
### Bug fixes and improvements
|
### Bug fixes and improvements
|
||||||
|
|
||||||
|
@ -1102,6 +1102,7 @@ test-suite tests-hspec
|
|||||||
Test.RemoteRelationship.XToDBObjectRelationshipSpec
|
Test.RemoteRelationship.XToDBObjectRelationshipSpec
|
||||||
Test.RemoteRelationship.XToRemoteSchemaRelationshipSpec
|
Test.RemoteRelationship.XToRemoteSchemaRelationshipSpec
|
||||||
Test.RequestHeadersSpec
|
Test.RequestHeadersSpec
|
||||||
|
Test.SerializationSpec
|
||||||
Test.ServiceLivenessSpec
|
Test.ServiceLivenessSpec
|
||||||
Test.ViewsSpec
|
Test.ViewsSpec
|
||||||
Test.WhereSpec
|
Test.WhereSpec
|
||||||
|
@ -158,7 +158,7 @@ bqColumnParser columnType (G.Nullability isNullable) =
|
|||||||
-- properly here?
|
-- properly here?
|
||||||
BigQuery.FloatScalarType -> pure $ possiblyNullable scalarType $ BigQuery.FloatValue . BigQuery.doubleToFloat64 <$> P.float
|
BigQuery.FloatScalarType -> pure $ possiblyNullable scalarType $ BigQuery.FloatValue . BigQuery.doubleToFloat64 <$> P.float
|
||||||
BigQuery.IntegerScalarType -> pure $ possiblyNullable scalarType $ BigQuery.IntegerValue . BigQuery.intToInt64 . fromIntegral <$> P.int
|
BigQuery.IntegerScalarType -> pure $ possiblyNullable scalarType $ BigQuery.IntegerValue . BigQuery.intToInt64 . fromIntegral <$> P.int
|
||||||
BigQuery.DecimalScalarType -> pure $ possiblyNullable scalarType $ BigQuery.DecimalValue . BigQuery.doubleToDecimal <$> P.float
|
BigQuery.DecimalScalarType -> pure $ possiblyNullable scalarType $ BigQuery.DecimalValue . BigQuery.Decimal . BigQuery.scientificToText <$> P.scientific
|
||||||
BigQuery.BigDecimalScalarType -> pure $ possiblyNullable scalarType $ BigQuery.BigDecimalValue . BigQuery.doubleToBigDecimal <$> P.float
|
BigQuery.BigDecimalScalarType -> pure $ possiblyNullable scalarType $ BigQuery.BigDecimalValue . BigQuery.doubleToBigDecimal <$> P.float
|
||||||
-- boolean type
|
-- boolean type
|
||||||
BigQuery.BoolScalarType -> pure $ possiblyNullable scalarType $ BigQuery.BoolValue <$> P.boolean
|
BigQuery.BoolScalarType -> pure $ possiblyNullable scalarType $ BigQuery.BoolValue <$> P.boolean
|
||||||
|
@ -13,7 +13,7 @@ module Hasura.Backends.BigQuery.Types
|
|||||||
Countable (..),
|
Countable (..),
|
||||||
Date (..),
|
Date (..),
|
||||||
Datetime (..),
|
Datetime (..),
|
||||||
Decimal,
|
Decimal (..),
|
||||||
EntityAlias (..),
|
EntityAlias (..),
|
||||||
Expression (..),
|
Expression (..),
|
||||||
FieldName (..),
|
FieldName (..),
|
||||||
@ -49,7 +49,6 @@ module Hasura.Backends.BigQuery.Types
|
|||||||
WindowFunction (..),
|
WindowFunction (..),
|
||||||
aggregateProjectionsFieldOrigin,
|
aggregateProjectionsFieldOrigin,
|
||||||
doubleToBigDecimal,
|
doubleToBigDecimal,
|
||||||
doubleToDecimal,
|
|
||||||
doubleToFloat64,
|
doubleToFloat64,
|
||||||
getGQLTableName,
|
getGQLTableName,
|
||||||
intToInt64,
|
intToInt64,
|
||||||
@ -59,6 +58,7 @@ module Hasura.Backends.BigQuery.Types
|
|||||||
parseScalarValue,
|
parseScalarValue,
|
||||||
projectionAlias,
|
projectionAlias,
|
||||||
scalarTypeGraphQLName,
|
scalarTypeGraphQLName,
|
||||||
|
scientificToText,
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
||||||
@ -75,6 +75,9 @@ import Data.Scientific
|
|||||||
import Data.Text qualified as T
|
import Data.Text qualified as T
|
||||||
import Data.Text.Encoding qualified as T
|
import Data.Text.Encoding qualified as T
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
|
import Data.Text.Lazy (toStrict)
|
||||||
|
import Data.Text.Lazy.Builder (toLazyText)
|
||||||
|
import Data.Text.Lazy.Builder.Scientific (formatScientificBuilder)
|
||||||
import Data.Vector (Vector)
|
import Data.Vector (Vector)
|
||||||
import Data.Vector.Instances ()
|
import Data.Vector.Instances ()
|
||||||
import Hasura.Base.Error
|
import Hasura.Base.Error
|
||||||
@ -702,12 +705,17 @@ int64Expr = ValueExpression . IntegerValue . intToInt64
|
|||||||
newtype Decimal = Decimal Text
|
newtype Decimal = Decimal Text
|
||||||
deriving (Show, Eq, Ord, Generic, Data, Cacheable, NFData, Hashable, Lift)
|
deriving (Show, Eq, Ord, Generic, Data, Cacheable, NFData, Hashable, Lift)
|
||||||
|
|
||||||
instance FromJSON Decimal where parseJSON = liberalDecimalParser Decimal
|
instance FromJSON Decimal where
|
||||||
|
parseJSON (J.Number num) = pure $ Decimal $ scientificToText num
|
||||||
|
parseJSON (J.String num) = pure $ Decimal num
|
||||||
|
parseJSON _ = fail "parseJSON: FromJSON Decimal failure"
|
||||||
|
|
||||||
instance ToJSON Decimal where toJSON = liberalDecimalPrinter
|
instance ToJSON Decimal where
|
||||||
|
toJSON (Decimal x) = J.toJSON x
|
||||||
|
|
||||||
doubleToDecimal :: Double -> Decimal
|
-- | Convert 'Scientific' to 'Text'
|
||||||
doubleToDecimal = Decimal . T.decodeUtf8 . L.toStrict . J.encode
|
scientificToText :: Scientific -> Text
|
||||||
|
scientificToText num = toStrict $ toLazyText $ formatScientificBuilder Fixed Nothing num
|
||||||
|
|
||||||
-- | BigQuery's conception of a \"big\" fixed precision decimal.
|
-- | BigQuery's conception of a \"big\" fixed precision decimal.
|
||||||
newtype BigDecimal = BigDecimal Text
|
newtype BigDecimal = BigDecimal Text
|
||||||
|
@ -17,6 +17,7 @@ module Hasura.GraphQL.Parser
|
|||||||
jsonb,
|
jsonb,
|
||||||
nonNegativeInt,
|
nonNegativeInt,
|
||||||
bigInt,
|
bigInt,
|
||||||
|
scientific,
|
||||||
unsafeRawScalar,
|
unsafeRawScalar,
|
||||||
jsonScalar,
|
jsonScalar,
|
||||||
enum,
|
enum,
|
||||||
|
@ -15,6 +15,7 @@ module Hasura.GraphQL.Parser.Internal.Scalars
|
|||||||
jsonb,
|
jsonb,
|
||||||
nonNegativeInt,
|
nonNegativeInt,
|
||||||
bigInt,
|
bigInt,
|
||||||
|
scientific,
|
||||||
-- internal
|
-- internal
|
||||||
unsafeRawScalar,
|
unsafeRawScalar,
|
||||||
jsonScalar,
|
jsonScalar,
|
||||||
@ -24,6 +25,8 @@ where
|
|||||||
import Data.Aeson qualified as A
|
import Data.Aeson qualified as A
|
||||||
import Data.Aeson.Types qualified as A
|
import Data.Aeson.Types qualified as A
|
||||||
import Data.Int (Int32, Int64)
|
import Data.Int (Int32, Int64)
|
||||||
|
import Data.Scientific (Scientific)
|
||||||
|
import Data.Scientific qualified as S
|
||||||
import Data.Text.Read (decimal)
|
import Data.Text.Read (decimal)
|
||||||
import Data.UUID qualified as UUID
|
import Data.UUID qualified as UUID
|
||||||
import Hasura.Backends.Postgres.SQL.Value
|
import Hasura.Backends.Postgres.SQL.Value
|
||||||
@ -117,6 +120,17 @@ bigInt = mkScalar intScalar Nothing \case
|
|||||||
pure i
|
pure i
|
||||||
v -> typeMismatch intScalar "a 32-bit integer, or a 64-bit integer represented as a string" v
|
v -> typeMismatch intScalar "a 32-bit integer, or a 64-bit integer represented as a string" v
|
||||||
|
|
||||||
|
-- | Parser for 'Scientific'. Certain backends like BigQuery support
|
||||||
|
-- Decimal/BigDecimal and need an arbitrary precision number.
|
||||||
|
scientific :: MonadParse m => Parser 'Both m Scientific
|
||||||
|
scientific = mkScalar name Nothing \case
|
||||||
|
GraphQLValue (VFloat f) -> pure f
|
||||||
|
GraphQLValue (VInt i) -> pure $ S.scientific i 0
|
||||||
|
JSONValue (A.Number n) -> pure n
|
||||||
|
v -> typeMismatch name "Decimal represented as a string" v
|
||||||
|
where
|
||||||
|
name = $$(litName "decimal")
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
-- Internal tools
|
-- Internal tools
|
||||||
|
|
||||||
|
@ -6,9 +6,11 @@
|
|||||||
-- | BigQuery helpers.
|
-- | BigQuery helpers.
|
||||||
module Harness.Backend.BigQuery
|
module Harness.Backend.BigQuery
|
||||||
( run_,
|
( run_,
|
||||||
|
runSql_,
|
||||||
getServiceAccount,
|
getServiceAccount,
|
||||||
getProjectId,
|
getProjectId,
|
||||||
createTable,
|
createTable,
|
||||||
|
defaultSourceMetadata,
|
||||||
insertTable,
|
insertTable,
|
||||||
trackTable,
|
trackTable,
|
||||||
dropTable,
|
dropTable,
|
||||||
@ -18,6 +20,8 @@ module Harness.Backend.BigQuery
|
|||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
||||||
|
import Control.Monad (void)
|
||||||
|
import Data.Aeson (Value)
|
||||||
import Data.Bool (bool)
|
import Data.Bool (bool)
|
||||||
import Data.Foldable (for_)
|
import Data.Foldable (for_)
|
||||||
import Data.String
|
import Data.String
|
||||||
@ -28,7 +32,7 @@ import Data.Time (defaultTimeLocale, formatTime)
|
|||||||
import GHC.Stack
|
import GHC.Stack
|
||||||
import Harness.Constants as Constants
|
import Harness.Constants as Constants
|
||||||
import Harness.Env
|
import Harness.Env
|
||||||
import Harness.Exceptions (forFinally_)
|
import Harness.Exceptions
|
||||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||||
import Harness.Quoter.Yaml (yaml)
|
import Harness.Quoter.Yaml (yaml)
|
||||||
import Harness.State (State)
|
import Harness.State (State)
|
||||||
@ -55,6 +59,31 @@ run_ serviceAccount projectId query = do
|
|||||||
res <- Execute.executeBigQuery conn Execute.BigQuery {Execute.query = fromString query, Execute.parameters = mempty}
|
res <- Execute.executeBigQuery conn Execute.BigQuery {Execute.query = fromString query, Execute.parameters = mempty}
|
||||||
res `onLeft` (`bigQueryError` query)
|
res `onLeft` (`bigQueryError` query)
|
||||||
|
|
||||||
|
runSql_ :: HasCallStack => String -> IO ()
|
||||||
|
runSql_ query = do
|
||||||
|
serviceAccount <- getServiceAccount
|
||||||
|
projectId <- getProjectId
|
||||||
|
catch
|
||||||
|
( bracket
|
||||||
|
(initConnection serviceAccount projectId Nothing)
|
||||||
|
(const (pure ()))
|
||||||
|
(\conn -> void $ handleResult <$> (Execute.executeBigQuery conn Execute.BigQuery {Execute.query = fromString query, Execute.parameters = mempty}))
|
||||||
|
)
|
||||||
|
( \(e :: SomeException) ->
|
||||||
|
error
|
||||||
|
( unlines
|
||||||
|
[ "BigQuery error:",
|
||||||
|
show e,
|
||||||
|
"SQL was:",
|
||||||
|
query
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
where
|
||||||
|
handleResult :: Either Execute.ExecuteProblem () -> IO ()
|
||||||
|
handleResult (Left _) = throwString "Error handling bigquery"
|
||||||
|
handleResult (Right ()) = pure ()
|
||||||
|
|
||||||
bigQueryError :: HasCallStack => Execute.ExecuteProblem -> String -> IO ()
|
bigQueryError :: HasCallStack => Execute.ExecuteProblem -> String -> IO ()
|
||||||
bigQueryError e query =
|
bigQueryError e query =
|
||||||
error
|
error
|
||||||
@ -192,6 +221,29 @@ args:
|
|||||||
name: *tableName
|
name: *tableName
|
||||||
|]
|
|]
|
||||||
|
|
||||||
|
-- | Metadata source information for the default BigQuery instance
|
||||||
|
defaultSourceMetadata :: IO Value
|
||||||
|
defaultSourceMetadata = do
|
||||||
|
let dataset = Constants.bigqueryDataset
|
||||||
|
source = defaultSource BigQuery
|
||||||
|
backendType = defaultBackendTypeString BigQuery
|
||||||
|
serviceAccount <- getServiceAccount
|
||||||
|
projectId <- getProjectId
|
||||||
|
pure $
|
||||||
|
[yaml|
|
||||||
|
type: replace_metadata
|
||||||
|
args:
|
||||||
|
version: 3
|
||||||
|
sources:
|
||||||
|
- name: *source
|
||||||
|
kind: *backendType
|
||||||
|
tables: []
|
||||||
|
configuration:
|
||||||
|
service_account: *serviceAccount
|
||||||
|
project_id: *projectId
|
||||||
|
datasets: [*dataset]
|
||||||
|
|]
|
||||||
|
|
||||||
-- | Setup the schema in the most expected way.
|
-- | Setup the schema in the most expected way.
|
||||||
-- NOTE: Certain test modules may warrant having their own local version.
|
-- NOTE: Certain test modules may warrant having their own local version.
|
||||||
setup :: [Schema.Table] -> (State, ()) -> IO ()
|
setup :: [Schema.Table] -> (State, ()) -> IO ()
|
||||||
|
93
server/tests-hspec/Test/SerializationSpec.hs
Normal file
93
server/tests-hspec/Test/SerializationSpec.hs
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
{-# LANGUAGE QuasiQuotes #-}
|
||||||
|
|
||||||
|
-- | Serialization test for specific data types
|
||||||
|
module Test.SerializationSpec (spec) where
|
||||||
|
|
||||||
|
import Harness.Backend.BigQuery qualified as Bigquery
|
||||||
|
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||||
|
import Harness.Quoter.Graphql (graphql)
|
||||||
|
import Harness.Quoter.Sql
|
||||||
|
import Harness.Quoter.Yaml (shouldReturnYaml, yaml)
|
||||||
|
import Harness.State (State)
|
||||||
|
import Harness.Test.Context qualified as Context
|
||||||
|
import Harness.Test.Schema qualified as Schema
|
||||||
|
import Test.Hspec (SpecWith, describe, it)
|
||||||
|
import Prelude
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Preamble
|
||||||
|
|
||||||
|
spec :: SpecWith State
|
||||||
|
spec =
|
||||||
|
Context.run
|
||||||
|
[ Context.Context
|
||||||
|
{ name = Context.Backend Context.BigQuery,
|
||||||
|
mkLocalState = Context.noLocalState,
|
||||||
|
setup = bigQuerySetup,
|
||||||
|
teardown = const bigQueryTeardown,
|
||||||
|
customOptions =
|
||||||
|
Just $
|
||||||
|
Context.Options
|
||||||
|
{ stringifyNumbers = True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
tests
|
||||||
|
|
||||||
|
authorTable :: Schema.Table
|
||||||
|
authorTable =
|
||||||
|
Schema.Table
|
||||||
|
{ tableName = "author",
|
||||||
|
tableColumns = [],
|
||||||
|
tablePrimaryKey = [],
|
||||||
|
tableReferences = [],
|
||||||
|
tableData = []
|
||||||
|
}
|
||||||
|
|
||||||
|
-- todo: Remove this when this gets merged: https://github.com/hasura/graphql-engine-mono/pull/4246
|
||||||
|
bigQuerySetup :: (State, ()) -> IO ()
|
||||||
|
bigQuerySetup (state, _) = do
|
||||||
|
sourceMetadata <- Bigquery.defaultSourceMetadata
|
||||||
|
GraphqlEngine.postMetadata_ state sourceMetadata
|
||||||
|
|
||||||
|
Bigquery.runSql_
|
||||||
|
[sql|
|
||||||
|
CREATE TABLE hasura.author (
|
||||||
|
id INT,
|
||||||
|
name STRING,
|
||||||
|
tax_id DECIMAL,
|
||||||
|
);
|
||||||
|
|]
|
||||||
|
Bigquery.runSql_
|
||||||
|
[sql|
|
||||||
|
INSERT hasura.author (id, name, tax_id)
|
||||||
|
VALUES (1, "sibi", 5555555555555556666);
|
||||||
|
|]
|
||||||
|
|
||||||
|
Bigquery.trackTable state authorTable
|
||||||
|
|
||||||
|
bigQueryTeardown :: IO ()
|
||||||
|
bigQueryTeardown = do
|
||||||
|
Bigquery.dropTable authorTable
|
||||||
|
|
||||||
|
tests :: Context.Options -> SpecWith State
|
||||||
|
tests opts = describe "SerializationSpec" $ do
|
||||||
|
it "serde Decimal column" $ \state ->
|
||||||
|
shouldReturnYaml
|
||||||
|
opts
|
||||||
|
( GraphqlEngine.postGraphql
|
||||||
|
state
|
||||||
|
[graphql|
|
||||||
|
query MyQuery {
|
||||||
|
hasura_author(where: {tax_id: {_eq: 5555555555555556666}}) {
|
||||||
|
id
|
||||||
|
tax_id
|
||||||
|
}
|
||||||
|
}|]
|
||||||
|
)
|
||||||
|
[yaml|
|
||||||
|
data:
|
||||||
|
hasura_author:
|
||||||
|
- tax_id: "5555555555555556666"
|
||||||
|
id: 1
|
||||||
|
|]
|
Loading…
Reference in New Issue
Block a user