server: add type information to aggregates and stringify them (closes #5704)

Fixes https://github.com/hasura/graphql-engine/issues/5704 by checking, for aggregate fields whether we are handling a numeric aggregation.

This PR also adds type information to `ColFld` such that we know the type of the field.

This is the second attempt. See #319 for a less invasive approach. @nicuveo suggested type information might be useful, and since it wasn't hard to add, I think this version is better as well.

GitOrigin-RevId: aa6a259fd5debe9466df6302839ddbbd0ea659b5
This commit is contained in:
Vladimir Ciobanu 2021-01-18 15:51:36 +02:00 committed by hasura-bot
parent 6c22132061
commit 6e752a7876
6 changed files with 80 additions and 18 deletions

View File

@ -93,6 +93,7 @@ and be accessible according to the permissions that were configured for the role
- server: fix issue with non-optional fields of the remote schema being added as optional in the graphql-engine (fix #6401)
- server: accept new config `allowed_skew` in JWT config to provide leeway for JWT expiry (fixes #2109)
- server: fix issue with query actions with relationship with permissions configured on the remote table (fix #6385)
- server: fix issue with `--stringify-numeric-types` not stringifying aggregate fields (fix #5704)
- console: allow user to cascade Postgres dependencies when dropping Postgres objects (close #5109) (#5248)
- console: mark inconsistent remote schemas in the UI (close #5093) (#5181)
- console: remove ONLY as default for ALTER TABLE in column alter operations (close #5512) #5706

View File

@ -78,8 +78,8 @@ selectFromToQual = \case
FromIdentifier i -> S.QualifiedIdentifier i Nothing
FromFunction qf _ _ -> S.QualifiedIdentifier (functionToIdentifier qf) Nothing
aggregateFieldToExp :: AggregateFields 'Postgres -> S.SQLExp
aggregateFieldToExp aggFlds = jsonRow
aggregateFieldToExp :: AggregateFields 'Postgres -> Bool -> S.SQLExp
aggregateFieldToExp aggFlds strfyNum = jsonRow
where
jsonRow = S.applyJsonBuildObj (concatMap aggToFlds aggFlds)
withAls fldName sqlExp = [S.SELit fldName, sqlExp]
@ -91,9 +91,10 @@ aggregateFieldToExp aggFlds = jsonRow
aggOpToObj (AggregateOp opText flds) =
S.applyJsonBuildObj $ concatMap (colFldsToExtr opText) flds
colFldsToExtr opText (FieldName t, CFCol col) =
colFldsToExtr opText (FieldName t, CFCol col ty) =
[ S.SELit t
, S.SEFnApp opText [S.SEIdentifier $ toIdentifier col] Nothing
, toJSONableExp strfyNum ty False
$ S.SEFnApp opText [S.SEIdentifier $ toIdentifier col] Nothing
]
colFldsToExtr _ (FieldName t, CFExp e) =
[ S.SELit t , S.SELit e]
@ -271,8 +272,16 @@ mkAggregateOrderByExtractorAndFields annAggOrderBy =
)
AAOOp opText pgColumnInfo ->
let pgColumn = pgiColumn pgColumnInfo
pgType = pgiType pgColumnInfo
in ( S.Extractor (S.SEFnApp opText [S.SEIdentifier $ toIdentifier pgColumn] Nothing) alias
, [(FieldName opText, AFOp $ AggregateOp opText [(fromCol @'Postgres pgColumn, CFCol pgColumn)])]
, [ ( FieldName opText
, AFOp $ AggregateOp opText
[ ( fromCol @'Postgres pgColumn
, CFCol pgColumn pgType
)
]
)
]
)
where
alias = Just $ mkAggregateOrderByAlias annAggOrderBy
@ -479,7 +488,7 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do
case field of
TAFAgg aggFields ->
pure ( aggregateFieldsToExtractorExps thisSourcePrefix aggFields
, aggregateFieldToExp aggFields
, aggregateFieldToExp aggFields strfyNum
)
TAFNodes annFields -> do
annFieldExtr <- processAnnFields thisSourcePrefix fieldName similarArrayFields annFields
@ -501,7 +510,7 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do
pure (selectSource, nodeExtractors, topLevelExtractor)
where
AnnSelectG aggSelFields tableFrom tablePermissions tableArgs _ = annAggSel
AnnSelectG aggSelFields tableFrom tablePermissions tableArgs strfyNum = annAggSel
permLimit = _tpLimit tablePermissions
orderBy = _saOrderBy tableArgs
permLimitSubQuery = mkPermissionLimitSubQuery permLimit aggSelFields orderBy
@ -735,14 +744,17 @@ aggregateFieldsToExtractorExps sourcePrefix aggregateFields =
AFOp aggOp -> aggOpToExps aggOp
AFExp _ -> []
where
colsToExps = mapMaybe (mkColExp . CFCol)
aggOpToExps = mapMaybe (mkColExp . snd) . _aoFields
colsToExps = fmap mkColExp
mkColExp (CFCol c) =
aggOpToExps = mapMaybe colToMaybeExp . _aoFields
colToMaybeExp = \case
(_, CFCol col _) -> Just $ mkColExp col
_ -> Nothing
mkColExp c =
let qualCol = S.mkQIdenExp (mkBaseTableAlias sourcePrefix) (toIdentifier c)
colAls = toIdentifier c
in Just (S.Alias colAls, qualCol)
mkColExp _ = Nothing
in (S.Alias colAls, qualCol)
processAnnFields
:: forall m . ( MonadReader Bool m

View File

@ -790,14 +790,19 @@ tableAggregationFields table selectPermissions = do
mkNumericAggFields name
| name == $$(G.litName "sum") = traverse mkColumnAggField
| otherwise = traverse \columnInfo ->
pure $ P.selection_ (pgiName columnInfo) (pgiDescription columnInfo)
(P.nullable P.float) $> IR.CFCol (pgiColumn columnInfo)
pure $ P.selection_
(pgiName columnInfo)
(pgiDescription columnInfo)
(P.nullable P.float)
$> IR.CFCol (pgiColumn columnInfo) (pgiType columnInfo)
mkColumnAggField :: ColumnInfo b -> m (FieldParser n (IR.ColFld b))
mkColumnAggField columnInfo = do
field <- columnParser (pgiType columnInfo) (G.Nullability True)
pure $ P.selection_ (pgiName columnInfo) (pgiDescription columnInfo) field
$> IR.CFCol (pgiColumn columnInfo)
pure $ P.selection_
(pgiName columnInfo)
(pgiDescription columnInfo) field
$> IR.CFCol (pgiColumn columnInfo) (pgiType columnInfo)
countField :: m (FieldParser n (IR.AggregateField b))
countField = do

View File

@ -245,7 +245,7 @@ noSelectArgs :: SelectArgsG backend v
noSelectArgs = SelectArgs Nothing Nothing Nothing Nothing Nothing
data ColFld (b :: BackendType)
= CFCol !(Column b)
= CFCol !(Column b) !(ColumnType b)
| CFExp !Text
{-
deriving instance Eq (Column b) => Eq (ColFld b)

View File

@ -13,81 +13,97 @@ response:
id_sum: 5
author_id: 3
author_id_sum: 3
read_time: '11.1'
sum_fields:
id: 5
id_sum: 5
author_id: 3
author_id_sum: 3
read_time: '11.1'
avg:
id: 2.5
id_avg: 2.5
author_id: 1.5
author_id_avg: 1.5
read_time: '5.5500000000000000'
avg_fields:
id: 2.5
id_avg: 2.5
author_id: 1.5
author_id_avg: 1.5
read_time: '5.5500000000000000'
stddev:
id: 0.7071067811865476
id_stddev: 0.7071067811865476
author_id: 0.7071067811865476
author_id_stddev: 0.7071067811865476
read_time: '3.1819805153394639'
stddev_fields:
id: 0.7071067811865476
id_stddev: 0.7071067811865476
author_id: 0.7071067811865476
author_id_stddev: 0.7071067811865476
read_time: '3.1819805153394639'
stddev_samp:
id: 0.7071067811865476
id_stddev_samp: 0.7071067811865476
author_id: 0.7071067811865476
author_id_stddev_samp: 0.7071067811865476
read_time: '3.1819805153394639'
stddev_samp_fields:
id: 0.7071067811865476
id_stddev_samp: 0.7071067811865476
author_id: 0.7071067811865476
author_id_stddev_samp: 0.7071067811865476
read_time: '3.1819805153394639'
stddev_pop:
id: 0.5
id_stddev_pop: 0.5
author_id: 0.5
author_id_stddev_pop: 0.5
read_time: '2.2500000000000000'
stddev_pop_fields:
id: 0.5
id_stddev_pop: 0.5
author_id: 0.5
author_id_stddev_pop: 0.5
read_time: '2.2500000000000000'
variance:
id: 0.5
id_variance: 0.5
author_id: 0.5
author_id_variance: 0.5
read_time: '10.1250000000000000'
variance_fields:
id: 0.5
id_variance: 0.5
author_id: 0.5
author_id_variance: 0.5
read_time: '10.1250000000000000'
var_samp:
id: 0.5
id_var_samp: 0.5
author_id: 0.5
author_id_var_samp: 0.5
read_time: '10.1250000000000000'
var_samp_fields:
id: 0.25
id_var_pop: 0.25
author_id: 0.25
author_id_var_pop: 0.25
read_time: '5.0625000000000000'
var_pop:
id: 0.25
id_var_pop: 0.25
author_id: 0.25
author_id_var_pop: 0.25
read_time: '5.0625000000000000'
var_pop_fields:
id: 0.25
id_var_pop: 0.25
author_id: 0.25
author_id_var_pop: 0.25
read_time: '5.0625000000000000'
max:
id: 3
id_max: 3
@ -99,6 +115,7 @@ response:
author_id_max: 2
published_on: '1999-01-09T04:05:06'
published_on_max: '1999-01-09T04:05:06'
read_time: '7.8'
max_fields:
id: 3
id_max: 3
@ -110,6 +127,7 @@ response:
author_id_max: 2
published_on: '1999-01-09T04:05:06'
published_on_max: '1999-01-09T04:05:06'
read_time: '7.8'
min:
id: 2
id_min: 2
@ -121,6 +139,7 @@ response:
author_id_min: 1
published_on: '1999-01-08T04:05:07'
published_on_min: '1999-01-08T04:05:07'
read_time: '3.3'
min_fields:
id: 2
id_min: 2
@ -132,6 +151,7 @@ response:
author_id_min: 1
published_on: '1999-01-08T04:05:07'
published_on_min: '1999-01-08T04:05:07'
read_time: '3.3'
nodes:
- id: 2
title: Article 2
@ -180,96 +200,112 @@ query:
id_sum: id
author_id
author_id_sum: author_id
read_time
}
sum_fields: sum {
id
id_sum: id
author_id
author_id_sum: author_id
read_time
}
avg {
id
id_avg: id
author_id
author_id_avg: author_id
read_time
}
avg_fields: avg {
id
id_avg: id
author_id
author_id_avg: author_id
read_time
}
stddev {
id
id_stddev: id
author_id
author_id_stddev: author_id
read_time
}
stddev_fields: stddev {
id
id_stddev: id
author_id
author_id_stddev: author_id
read_time
}
stddev_samp {
id
id_stddev_samp: id
author_id
author_id_stddev_samp: author_id
read_time
}
stddev_samp_fields: stddev_samp {
id
id_stddev_samp: id
author_id
author_id_stddev_samp: author_id
read_time
}
stddev_pop {
id
id_stddev_pop: id
author_id
author_id_stddev_pop: author_id
read_time
}
stddev_pop_fields: stddev_pop {
id
id_stddev_pop: id
author_id
author_id_stddev_pop: author_id
read_time
}
variance {
id
id_variance: id
author_id
author_id_variance: author_id
read_time
}
variance_fields: variance {
id
id_variance: id
author_id
author_id_variance: author_id
read_time
}
var_samp {
id
id_var_samp: id
author_id
author_id_var_samp: author_id
read_time
}
var_samp_fields: var_pop {
id
id_var_pop: id
author_id
author_id_var_pop: author_id
read_time
}
var_pop {
id
id_var_pop: id
author_id
author_id_var_pop: author_id
read_time
}
var_pop_fields: var_pop {
id
id_var_pop: id
author_id
author_id_var_pop: author_id
read_time
}
max {
id
@ -282,6 +318,7 @@ query:
author_id_max: author_id
published_on
published_on_max: published_on
read_time
}
max_fields: max {
id
@ -294,6 +331,7 @@ query:
author_id_max: author_id
published_on
published_on_max: published_on
read_time
}
min {
id
@ -306,6 +344,7 @@ query:
author_id_min: author_id
published_on
published_on_min: published_on
read_time
}
min_fields: min {
id
@ -318,6 +357,7 @@ query:
author_id_min: author_id
published_on
published_on_min: published_on
read_time
}
}
nodes {

View File

@ -16,19 +16,21 @@ args:
content TEXT,
author_id INTEGER REFERENCES author(id),
is_published BOOLEAN,
read_time NUMERIC,
published_on TIMESTAMP NOT NULL DEFAULT NOW()
);
insert into author (name)
values
('Author 1'),
('Author 2');
insert into article (title,content,author_id,is_published,published_on)
insert into article (title,content,author_id,is_published,read_time,published_on)
values
(
'Article 1',
'Sample article content 1',
1,
false,
1.5,
'1999-01-08 04:05:06'
),
(
@ -36,6 +38,7 @@ args:
'Sample article content 2',
1,
true,
3.3,
'1999-01-08 04:05:07'
),
(
@ -43,6 +46,7 @@ args:
'Sample article content 3',
2,
true,
7.8,
'1999-01-09 04:05:06'
);