2022-04-14 05:06:07 +03:00
|
|
|
{-# LANGUAGE TemplateHaskell #-}
|
2022-02-25 19:08:18 +03:00
|
|
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
|
|
|
|
2022-05-02 08:03:12 +03:00
|
|
|
module Hasura.Backends.DataConnector.Adapter.Schema () where
|
2022-02-25 19:08:18 +03:00
|
|
|
|
|
|
|
--------------------------------------------------------------------------------
|
|
|
|
|
2022-04-28 04:51:58 +03:00
|
|
|
import Data.Has
|
2022-04-14 05:06:07 +03:00
|
|
|
import Data.List.NonEmpty qualified as NE
|
2022-05-26 14:54:30 +03:00
|
|
|
import Data.Text.Casing (GQLNameIdentifier)
|
2022-04-14 05:06:07 +03:00
|
|
|
import Data.Text.Extended ((<<>))
|
2022-06-10 06:59:00 +03:00
|
|
|
import Data.Text.NonEmpty qualified as NET
|
|
|
|
import Hasura.Backends.DataConnector.Adapter.Types qualified as Adapter
|
2022-07-20 08:20:49 +03:00
|
|
|
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
|
|
|
|
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
2022-05-02 08:03:12 +03:00
|
|
|
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
|
|
|
|
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
|
|
|
|
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S.V
|
2022-04-14 05:06:07 +03:00
|
|
|
import Hasura.Base.Error
|
|
|
|
import Hasura.GraphQL.Parser.Class
|
2022-06-30 18:22:19 +03:00
|
|
|
import Hasura.GraphQL.Schema.Backend (BackendSchema (..), BackendTableSelectSchema (..), ComparisonExp, MonadBuildSchema)
|
2022-04-28 04:51:58 +03:00
|
|
|
import Hasura.GraphQL.Schema.BoolExp qualified as GS.BE
|
|
|
|
import Hasura.GraphQL.Schema.Build qualified as GS.B
|
|
|
|
import Hasura.GraphQL.Schema.Common qualified as GS.C
|
2022-07-12 17:00:15 +03:00
|
|
|
import Hasura.GraphQL.Schema.NamingCase
|
2022-07-14 20:57:28 +03:00
|
|
|
import Hasura.GraphQL.Schema.Options (SchemaOptions)
|
|
|
|
import Hasura.GraphQL.Schema.Options qualified as Options
|
server: Metadata origin for definitions (type parameter version v2)
The code that builds the GraphQL schema, and `buildGQLContext` in particular, is partial: not every value of `(ServerConfigCtx, GraphQLQueryType, SourceCache, HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject), ActionCache, AnnotatedCustomTypes)` results in a valid GraphQL schema. When it fails, we want to be able to return better error messages than we currently do.
The key thing that is missing is a way to trace back GraphQL type information to their origin from the Hasura metadata. Currently, we have a number of correctness checks of our GraphQL schema. But these correctness checks only have access to pure GraphQL type information, and hence can only report errors in terms of that. Possibly the worst is the "conflicting definitions" error, which, in practice, can only be debugged by Hasura engineers. This is terrible DX for customers.
This PR allows us to print better error messages, by adding a field to the `Definition` type that traces the GraphQL type to its origin in the metadata. So the idea is simple: just add `MetadataObjId`, or `Maybe` that, or some other sum type of that, to `Definition`.
However, we want to avoid having to import a `Hasura.RQL` module from `Hasura.GraphQL.Parser`. So we instead define this additional field of `Definition` through a new type parameter, which is threaded through in `Hasura.GraphQL.Parser`. We then define type synonyms in `Hasura.GraphQL.Schema.Parser` that fill in this type parameter, so that it is not visible for the majority of the codebase.
The idea of associating metadata information to `Definition`s really comes to fruition when combined with hasura/graphql-engine-mono#4517. Their combination would allow us to use the API of fatal errors (just like the current `MonadError QErr`) to report _inconsistencies_ in the metadata. Such inconsistencies are then _automatically_ ignored. So no ad-hoc decisions need to be made on how to cut out inconsistent metadata from the GraphQL schema. This will allow us to report much better errors, as well as improve the likelihood of a successful HGE startup.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4770
Co-authored-by: Samir Talwar <47582+SamirTalwar@users.noreply.github.com>
GitOrigin-RevId: 728402b0cae83ae8e83463a826ceeb609001acae
2022-06-28 18:52:26 +03:00
|
|
|
import Hasura.GraphQL.Schema.Parser qualified as P
|
2022-04-28 04:51:58 +03:00
|
|
|
import Hasura.GraphQL.Schema.Select qualified as GS.S
|
2022-06-23 12:14:24 +03:00
|
|
|
import Hasura.Name qualified as Name
|
2022-02-25 19:08:18 +03:00
|
|
|
import Hasura.Prelude
|
2022-07-20 08:20:49 +03:00
|
|
|
import Hasura.RQL.IR.Select qualified as IR
|
2022-05-31 01:07:02 +03:00
|
|
|
import Hasura.RQL.IR.Value qualified as IR
|
2022-04-28 04:51:58 +03:00
|
|
|
import Hasura.RQL.Types.Backend qualified as RQL
|
|
|
|
import Hasura.RQL.Types.Column qualified as RQL
|
Remove circular dependency in schema building code
### Description
The main goal of this PR is, as stated, to remove the circular dependency in the schema building code. This cycle arises from the existence of remote relationships: when we build the schema for a source A, a remote relationship might force us to jump to the schema of a source B, or some remote schema. As a result, we end up having to do a dispatch from a "leaf" of the schema, similar to the one done at the root. In turn, this forces us to carry along in the schema a lot of information required for that dispatch, AND it forces us to import the instances in scope, creating an import loop.
As discussed in #4489, this PR implements the "dependency injection" solution: we pass to the schema a function to call to do the dispatch, and to get a generated field for a remote relationship. That way, this function can be chosen at the root level, and the leaves need not be aware of the overall context.
This PR grew a bit bigger than that, however; in an attempt to try and remove the `SourceCache` from the schema altogether, it changed a lot of functions across the schema building code, to thread along the `SourceInfo b` of the source being built. This avoids having to do cache lookups within a given source. A few cases remain, such as relay, that we might try to tackle in a subsequent PR.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4557
GitOrigin-RevId: 9388e48372877520a72a9fd1677005df9f7b2d72
2022-05-27 20:21:22 +03:00
|
|
|
import Hasura.RQL.Types.Source qualified as RQL
|
2022-05-02 08:03:12 +03:00
|
|
|
import Hasura.RQL.Types.Table qualified as RQL
|
2022-04-28 04:51:58 +03:00
|
|
|
import Hasura.SQL.Backend (BackendType (..))
|
|
|
|
import Language.GraphQL.Draft.Syntax qualified as GQL
|
2022-02-25 19:08:18 +03:00
|
|
|
|
|
|
|
--------------------------------------------------------------------------------
|
|
|
|
|
2022-05-02 08:03:12 +03:00
|
|
|
instance BackendSchema 'DataConnector where
|
2022-02-25 19:08:18 +03:00
|
|
|
-- top level parsers
|
2022-06-07 08:32:08 +03:00
|
|
|
buildTableQueryAndSubscriptionFields = GS.B.buildTableQueryAndSubscriptionFields
|
2022-02-25 19:08:18 +03:00
|
|
|
|
|
|
|
buildTableRelayQueryFields = experimentalBuildTableRelayQueryFields
|
|
|
|
|
2022-04-14 05:06:07 +03:00
|
|
|
buildFunctionQueryFields _ _ _ _ = pure []
|
|
|
|
buildFunctionRelayQueryFields _ _ _ _ _ = pure []
|
|
|
|
buildFunctionMutationFields _ _ _ _ = pure []
|
|
|
|
buildTableInsertMutationFields _ _ _ _ _ = pure []
|
2022-05-31 17:41:09 +03:00
|
|
|
buildTableUpdateMutationFields _ _ _ _ _ = pure []
|
|
|
|
buildTableDeleteMutationFields _ _ _ _ _ = pure []
|
2022-04-22 22:53:12 +03:00
|
|
|
buildTableStreamingSubscriptionFields _ _ _ _ = pure []
|
2022-02-25 19:08:18 +03:00
|
|
|
|
|
|
|
-- backend extensions
|
|
|
|
relayExtension = Nothing
|
2022-07-20 08:20:49 +03:00
|
|
|
nodesAggExtension = Just ()
|
2022-04-22 22:53:12 +03:00
|
|
|
streamSubscriptionExtension = Nothing
|
2022-02-25 19:08:18 +03:00
|
|
|
|
2022-04-14 05:06:07 +03:00
|
|
|
-- individual components
|
|
|
|
columnParser = columnParser'
|
2022-05-03 11:58:56 +03:00
|
|
|
scalarSelectionArgumentsParser _ = pure Nothing
|
2022-04-14 05:06:07 +03:00
|
|
|
orderByOperators = orderByOperators'
|
|
|
|
comparisonExps = comparisonExps'
|
2022-02-25 19:08:18 +03:00
|
|
|
|
2022-07-20 08:20:49 +03:00
|
|
|
countTypeInput = countTypeInput'
|
|
|
|
aggregateOrderByCountType = IR.S.T.Number
|
2022-02-25 19:08:18 +03:00
|
|
|
computedField =
|
2022-05-02 08:03:12 +03:00
|
|
|
error "computedField: not implemented for the Data Connector backend."
|
2022-02-25 19:08:18 +03:00
|
|
|
|
2022-06-30 18:22:19 +03:00
|
|
|
instance BackendTableSelectSchema 'DataConnector where
|
|
|
|
tableArguments = tableArgs'
|
|
|
|
selectTable = GS.S.defaultSelectTable
|
|
|
|
selectTableAggregate = GS.S.defaultSelectTableAggregate
|
|
|
|
tableSelectionSet = GS.S.defaultTableSelectionSet
|
|
|
|
|
2022-02-25 19:08:18 +03:00
|
|
|
--------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
experimentalBuildTableRelayQueryFields ::
|
2022-05-02 08:03:12 +03:00
|
|
|
MonadBuildSchema 'DataConnector r m n =>
|
Remove circular dependency in schema building code
### Description
The main goal of this PR is, as stated, to remove the circular dependency in the schema building code. This cycle arises from the existence of remote relationships: when we build the schema for a source A, a remote relationship might force us to jump to the schema of a source B, or some remote schema. As a result, we end up having to do a dispatch from a "leaf" of the schema, similar to the one done at the root. In turn, this forces us to carry along in the schema a lot of information required for that dispatch, AND it forces us to import the instances in scope, creating an import loop.
As discussed in #4489, this PR implements the "dependency injection" solution: we pass to the schema a function to call to do the dispatch, and to get a generated field for a remote relationship. That way, this function can be chosen at the root level, and the leaves need not be aware of the overall context.
This PR grew a bit bigger than that, however; in an attempt to try and remove the `SourceCache` from the schema altogether, it changed a lot of functions across the schema building code, to thread along the `SourceInfo b` of the source being built. This avoids having to do cache lookups within a given source. A few cases remain, such as relay, that we might try to tackle in a subsequent PR.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4557
GitOrigin-RevId: 9388e48372877520a72a9fd1677005df9f7b2d72
2022-05-27 20:21:22 +03:00
|
|
|
RQL.SourceInfo 'DataConnector ->
|
2022-05-02 08:03:12 +03:00
|
|
|
RQL.TableName 'DataConnector ->
|
|
|
|
RQL.TableInfo 'DataConnector ->
|
2022-05-26 14:54:30 +03:00
|
|
|
GQLNameIdentifier ->
|
2022-05-02 08:03:12 +03:00
|
|
|
NESeq (RQL.ColumnInfo 'DataConnector) ->
|
2022-02-25 19:08:18 +03:00
|
|
|
m [a]
|
|
|
|
experimentalBuildTableRelayQueryFields _sourceName _tableName _tableInfo _gqlName _pkeyColumns =
|
|
|
|
pure []
|
2022-04-14 05:06:07 +03:00
|
|
|
|
|
|
|
columnParser' ::
|
|
|
|
(MonadSchema n m, MonadError QErr m) =>
|
2022-05-02 08:03:12 +03:00
|
|
|
RQL.ColumnType 'DataConnector ->
|
2022-04-28 04:51:58 +03:00
|
|
|
GQL.Nullability ->
|
2022-05-31 01:07:02 +03:00
|
|
|
m (P.Parser 'P.Both n (IR.ValueWithOrigin (RQL.ColumnValue 'DataConnector)))
|
2022-04-28 04:51:58 +03:00
|
|
|
columnParser' columnType (GQL.Nullability isNullable) = do
|
2022-04-14 05:06:07 +03:00
|
|
|
parser <- case columnType of
|
2022-04-28 04:51:58 +03:00
|
|
|
RQL.ColumnScalar IR.S.T.String -> pure (IR.S.V.String <$> P.string)
|
|
|
|
RQL.ColumnScalar IR.S.T.Number -> pure (IR.S.V.Number <$> P.scientific)
|
|
|
|
RQL.ColumnScalar IR.S.T.Bool -> pure (IR.S.V.Boolean <$> P.boolean)
|
2022-05-02 08:03:12 +03:00
|
|
|
_ -> throw400 NotSupported "This column type is unsupported by the Data Connector backend"
|
2022-05-31 01:07:02 +03:00
|
|
|
pure . GS.C.peelWithOrigin . fmap (RQL.ColumnValue columnType) . possiblyNullable $ parser
|
2022-04-14 05:06:07 +03:00
|
|
|
where
|
|
|
|
possiblyNullable ::
|
|
|
|
MonadParse m =>
|
2022-04-28 04:51:58 +03:00
|
|
|
P.Parser 'P.Both m IR.S.V.Value ->
|
|
|
|
P.Parser 'P.Both m IR.S.V.Value
|
2022-04-14 05:06:07 +03:00
|
|
|
possiblyNullable
|
2022-04-28 04:51:58 +03:00
|
|
|
| isNullable = fmap (fromMaybe IR.S.V.Null) . P.nullable
|
2022-04-14 05:06:07 +03:00
|
|
|
| otherwise = id
|
|
|
|
|
2022-06-10 06:59:00 +03:00
|
|
|
orderByOperators' :: RQL.SourceInfo 'DataConnector -> NamingCase -> (GQL.Name, NonEmpty (P.Definition P.EnumValueInfo, (RQL.BasicOrderType 'DataConnector, RQL.NullsOrderType 'DataConnector)))
|
|
|
|
orderByOperators' RQL.SourceInfo {_siConfiguration} _tCase =
|
|
|
|
let dcName = Adapter._scDataConnectorName _siConfiguration
|
2022-06-23 12:14:24 +03:00
|
|
|
orderBy = fromMaybe Name._order_by $ GQL.mkName $ NET.unNonEmptyText (Adapter.unDataConnectorName dcName) <> "_order_by"
|
2022-06-10 06:59:00 +03:00
|
|
|
in (orderBy,) $
|
|
|
|
-- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB
|
|
|
|
NE.fromList
|
|
|
|
[ ( define $$(GQL.litName "asc") "in ascending order",
|
|
|
|
(IR.O.Ascending, ())
|
|
|
|
),
|
|
|
|
( define $$(GQL.litName "desc") "in descending order",
|
|
|
|
(IR.O.Descending, ())
|
|
|
|
)
|
|
|
|
]
|
2022-04-14 05:06:07 +03:00
|
|
|
where
|
server: Metadata origin for definitions (type parameter version v2)
The code that builds the GraphQL schema, and `buildGQLContext` in particular, is partial: not every value of `(ServerConfigCtx, GraphQLQueryType, SourceCache, HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject), ActionCache, AnnotatedCustomTypes)` results in a valid GraphQL schema. When it fails, we want to be able to return better error messages than we currently do.
The key thing that is missing is a way to trace back GraphQL type information to their origin from the Hasura metadata. Currently, we have a number of correctness checks of our GraphQL schema. But these correctness checks only have access to pure GraphQL type information, and hence can only report errors in terms of that. Possibly the worst is the "conflicting definitions" error, which, in practice, can only be debugged by Hasura engineers. This is terrible DX for customers.
This PR allows us to print better error messages, by adding a field to the `Definition` type that traces the GraphQL type to its origin in the metadata. So the idea is simple: just add `MetadataObjId`, or `Maybe` that, or some other sum type of that, to `Definition`.
However, we want to avoid having to import a `Hasura.RQL` module from `Hasura.GraphQL.Parser`. So we instead define this additional field of `Definition` through a new type parameter, which is threaded through in `Hasura.GraphQL.Parser`. We then define type synonyms in `Hasura.GraphQL.Schema.Parser` that fill in this type parameter, so that it is not visible for the majority of the codebase.
The idea of associating metadata information to `Definition`s really comes to fruition when combined with hasura/graphql-engine-mono#4517. Their combination would allow us to use the API of fatal errors (just like the current `MonadError QErr`) to report _inconsistencies_ in the metadata. Such inconsistencies are then _automatically_ ignored. So no ad-hoc decisions need to be made on how to cut out inconsistent metadata from the GraphQL schema. This will allow us to report much better errors, as well as improve the likelihood of a successful HGE startup.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4770
Co-authored-by: Samir Talwar <47582+SamirTalwar@users.noreply.github.com>
GitOrigin-RevId: 728402b0cae83ae8e83463a826ceeb609001acae
2022-06-28 18:52:26 +03:00
|
|
|
define name desc = P.Definition name (Just desc) Nothing P.EnumValueInfo
|
2022-04-14 05:06:07 +03:00
|
|
|
|
|
|
|
comparisonExps' ::
|
2022-04-28 04:51:58 +03:00
|
|
|
forall m n r.
|
2022-05-02 08:03:12 +03:00
|
|
|
( BackendSchema 'DataConnector,
|
2022-04-14 05:06:07 +03:00
|
|
|
MonadSchema n m,
|
2022-04-28 04:51:58 +03:00
|
|
|
MonadError QErr m,
|
|
|
|
MonadReader r m,
|
2022-07-14 20:57:28 +03:00
|
|
|
Has SchemaOptions r,
|
2022-05-26 14:54:30 +03:00
|
|
|
Has NamingCase r
|
2022-04-14 05:06:07 +03:00
|
|
|
) =>
|
2022-05-02 08:03:12 +03:00
|
|
|
RQL.ColumnType 'DataConnector ->
|
|
|
|
m (P.Parser 'P.Input n [ComparisonExp 'DataConnector])
|
2022-04-28 04:51:58 +03:00
|
|
|
comparisonExps' = P.memoize 'comparisonExps' $ \columnType -> do
|
2022-05-26 14:54:30 +03:00
|
|
|
tCase <- asks getter
|
2022-07-14 20:57:28 +03:00
|
|
|
collapseIfNull <- GS.C.retrieve Options.soDangerousBooleanCollapse
|
|
|
|
|
2022-04-28 04:51:58 +03:00
|
|
|
typedParser <- columnParser' columnType (GQL.Nullability False)
|
|
|
|
nullableTextParser <- columnParser' (RQL.ColumnScalar IR.S.T.String) (GQL.Nullability True)
|
|
|
|
textParser <- columnParser' (RQL.ColumnScalar IR.S.T.String) (GQL.Nullability False)
|
|
|
|
let name = P.getName typedParser <> $$(GQL.litName "_Dynamic_comparison_exp")
|
2022-04-14 05:06:07 +03:00
|
|
|
desc =
|
2022-04-28 04:51:58 +03:00
|
|
|
GQL.Description $
|
2022-04-14 05:06:07 +03:00
|
|
|
"Boolean expression to compare columns of type "
|
|
|
|
<> P.getName typedParser
|
|
|
|
<<> ". All fields are combined with logical 'AND'."
|
2022-05-31 01:07:02 +03:00
|
|
|
textListParser = fmap IR.openValueOrigin <$> P.list textParser
|
|
|
|
columnListParser = fmap IR.openValueOrigin <$> P.list typedParser
|
2022-04-14 05:06:07 +03:00
|
|
|
pure $
|
|
|
|
P.object name (Just desc) $
|
2022-04-28 04:51:58 +03:00
|
|
|
fmap catMaybes $
|
|
|
|
sequenceA $
|
|
|
|
concat
|
|
|
|
[ GS.BE.equalityOperators
|
2022-05-26 14:54:30 +03:00
|
|
|
tCase
|
2022-04-28 04:51:58 +03:00
|
|
|
collapseIfNull
|
2022-05-31 01:07:02 +03:00
|
|
|
(IR.mkParameter <$> typedParser)
|
2022-04-28 04:51:58 +03:00
|
|
|
(mkListLiteral <$> columnListParser),
|
|
|
|
GS.BE.comparisonOperators
|
2022-05-26 14:54:30 +03:00
|
|
|
tCase
|
2022-04-28 04:51:58 +03:00
|
|
|
collapseIfNull
|
2022-05-31 01:07:02 +03:00
|
|
|
(IR.mkParameter <$> typedParser)
|
2022-04-28 04:51:58 +03:00
|
|
|
]
|
|
|
|
where
|
2022-05-31 01:07:02 +03:00
|
|
|
mkListLiteral :: [RQL.ColumnValue 'DataConnector] -> IR.UnpreparedValue 'DataConnector
|
2022-04-28 04:51:58 +03:00
|
|
|
mkListLiteral columnValues =
|
2022-06-02 05:06:45 +03:00
|
|
|
IR.UVLiteral . IR.S.V.ArrayLiteral $ RQL.cvValue <$> columnValues
|
2022-04-14 05:06:07 +03:00
|
|
|
|
|
|
|
tableArgs' ::
|
|
|
|
forall r m n.
|
2022-05-02 08:03:12 +03:00
|
|
|
MonadBuildSchema 'DataConnector r m n =>
|
Remove circular dependency in schema building code
### Description
The main goal of this PR is, as stated, to remove the circular dependency in the schema building code. This cycle arises from the existence of remote relationships: when we build the schema for a source A, a remote relationship might force us to jump to the schema of a source B, or some remote schema. As a result, we end up having to do a dispatch from a "leaf" of the schema, similar to the one done at the root. In turn, this forces us to carry along in the schema a lot of information required for that dispatch, AND it forces us to import the instances in scope, creating an import loop.
As discussed in #4489, this PR implements the "dependency injection" solution: we pass to the schema a function to call to do the dispatch, and to get a generated field for a remote relationship. That way, this function can be chosen at the root level, and the leaves need not be aware of the overall context.
This PR grew a bit bigger than that, however; in an attempt to try and remove the `SourceCache` from the schema altogether, it changed a lot of functions across the schema building code, to thread along the `SourceInfo b` of the source being built. This avoids having to do cache lookups within a given source. A few cases remain, such as relay, that we might try to tackle in a subsequent PR.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4557
GitOrigin-RevId: 9388e48372877520a72a9fd1677005df9f7b2d72
2022-05-27 20:21:22 +03:00
|
|
|
RQL.SourceInfo 'DataConnector ->
|
2022-05-02 08:03:12 +03:00
|
|
|
RQL.TableInfo 'DataConnector ->
|
2022-07-20 08:20:49 +03:00
|
|
|
m (P.InputFieldsParser n (IR.SelectArgsG 'DataConnector (IR.UnpreparedValue 'DataConnector)))
|
2022-04-14 05:06:07 +03:00
|
|
|
tableArgs' sourceName tableInfo = do
|
2022-04-28 04:51:58 +03:00
|
|
|
whereParser <- GS.S.tableWhereArg sourceName tableInfo
|
|
|
|
orderByParser <- GS.S.tableOrderByArg sourceName tableInfo
|
2022-04-14 05:06:07 +03:00
|
|
|
let mkSelectArgs whereArg orderByArg limitArg offsetArg =
|
2022-07-20 08:20:49 +03:00
|
|
|
IR.SelectArgs
|
2022-04-14 05:06:07 +03:00
|
|
|
{ _saWhere = whereArg,
|
|
|
|
_saOrderBy = orderByArg,
|
|
|
|
_saLimit = limitArg,
|
|
|
|
_saOffset = offsetArg,
|
|
|
|
_saDistinct = Nothing
|
|
|
|
}
|
|
|
|
pure $
|
|
|
|
mkSelectArgs
|
|
|
|
<$> whereParser
|
|
|
|
<*> orderByParser
|
2022-04-28 04:51:58 +03:00
|
|
|
<*> GS.S.tableLimitArg
|
|
|
|
<*> GS.S.tableOffsetArg
|
2022-07-20 08:20:49 +03:00
|
|
|
|
|
|
|
countTypeInput' ::
|
|
|
|
MonadParse n =>
|
|
|
|
Maybe (P.Parser 'P.Both n IR.C.Name) ->
|
|
|
|
P.InputFieldsParser n (IR.CountDistinct -> IR.A.CountAggregate)
|
|
|
|
countTypeInput' = \case
|
|
|
|
Just columnEnum -> mkCountAggregate <$> P.fieldOptional Name._columns Nothing (P.list columnEnum)
|
|
|
|
Nothing -> pure $ mkCountAggregate Nothing
|
|
|
|
where
|
|
|
|
mkCountAggregate :: Maybe [IR.C.Name] -> IR.CountDistinct -> IR.A.CountAggregate
|
|
|
|
mkCountAggregate Nothing _ = IR.A.StarCount
|
|
|
|
mkCountAggregate (Just cols) IR.SelectCountDistinct = maybe IR.A.StarCount IR.A.ColumnDistinctCount $ nonEmpty cols
|
|
|
|
mkCountAggregate (Just cols) IR.SelectCountNonDistinct = maybe IR.A.StarCount IR.A.ColumnCount $ nonEmpty cols
|