graphql-engine/server/src-lib/Hasura/Backends/DataConnector/Adapter/Schema.hs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

212 lines
8.4 KiB
Haskell
Raw Normal View History

{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.DataConnector.Adapter.Schema () where
--------------------------------------------------------------------------------
import Data.Has
import Data.List.NonEmpty qualified as NE
import Data.Text.Casing (GQLNameIdentifier)
import Data.Text.Extended ((<<>))
import Data.Text.NonEmpty qualified as NET
import Hasura.Backends.DataConnector.Adapter.Types qualified as Adapter
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S.V
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Schema.Backend (BackendSchema (..), BackendTableSelectSchema (..), ComparisonExp, MonadBuildSchema)
import Hasura.GraphQL.Schema.BoolExp qualified as GS.BE
import Hasura.GraphQL.Schema.Build qualified as GS.B
import Hasura.GraphQL.Schema.Common qualified as GS.C
import Hasura.GraphQL.Schema.NamingCase
import Hasura.GraphQL.Schema.Options (SchemaOptions)
import Hasura.GraphQL.Schema.Options qualified as Options
server: Metadata origin for definitions (type parameter version v2) The code that builds the GraphQL schema, and `buildGQLContext` in particular, is partial: not every value of `(ServerConfigCtx, GraphQLQueryType, SourceCache, HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject), ActionCache, AnnotatedCustomTypes)` results in a valid GraphQL schema. When it fails, we want to be able to return better error messages than we currently do. The key thing that is missing is a way to trace back GraphQL type information to their origin from the Hasura metadata. Currently, we have a number of correctness checks of our GraphQL schema. But these correctness checks only have access to pure GraphQL type information, and hence can only report errors in terms of that. Possibly the worst is the "conflicting definitions" error, which, in practice, can only be debugged by Hasura engineers. This is terrible DX for customers. This PR allows us to print better error messages, by adding a field to the `Definition` type that traces the GraphQL type to its origin in the metadata. So the idea is simple: just add `MetadataObjId`, or `Maybe` that, or some other sum type of that, to `Definition`. However, we want to avoid having to import a `Hasura.RQL` module from `Hasura.GraphQL.Parser`. So we instead define this additional field of `Definition` through a new type parameter, which is threaded through in `Hasura.GraphQL.Parser`. We then define type synonyms in `Hasura.GraphQL.Schema.Parser` that fill in this type parameter, so that it is not visible for the majority of the codebase. The idea of associating metadata information to `Definition`s really comes to fruition when combined with hasura/graphql-engine-mono#4517. Their combination would allow us to use the API of fatal errors (just like the current `MonadError QErr`) to report _inconsistencies_ in the metadata. Such inconsistencies are then _automatically_ ignored. So no ad-hoc decisions need to be made on how to cut out inconsistent metadata from the GraphQL schema. This will allow us to report much better errors, as well as improve the likelihood of a successful HGE startup. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4770 Co-authored-by: Samir Talwar <47582+SamirTalwar@users.noreply.github.com> GitOrigin-RevId: 728402b0cae83ae8e83463a826ceeb609001acae
2022-06-28 18:52:26 +03:00
import Hasura.GraphQL.Schema.Parser qualified as P
import Hasura.GraphQL.Schema.Select qualified as GS.S
import Hasura.Name qualified as Name
import Hasura.Prelude
import Hasura.RQL.IR.Select qualified as IR
import Hasura.RQL.IR.Value qualified as IR
import Hasura.RQL.Types.Backend qualified as RQL
import Hasura.RQL.Types.Column qualified as RQL
2022-05-27 20:21:22 +03:00
import Hasura.RQL.Types.Source qualified as RQL
import Hasura.RQL.Types.SourceCustomization qualified as RQL
import Hasura.RQL.Types.Table qualified as RQL
import Hasura.SQL.Backend (BackendType (..))
import Language.GraphQL.Draft.Syntax qualified as GQL
--------------------------------------------------------------------------------
instance BackendSchema 'DataConnector where
-- top level parsers
buildTableQueryAndSubscriptionFields = GS.B.buildTableQueryAndSubscriptionFields
buildTableRelayQueryFields = experimentalBuildTableRelayQueryFields
buildFunctionQueryFields _ _ _ _ _ = pure []
buildFunctionRelayQueryFields _ _ _ _ _ _ = pure []
buildFunctionMutationFields _ _ _ _ _ = pure []
buildTableInsertMutationFields _ _ _ _ _ _ = pure []
buildTableUpdateMutationFields _ _ _ _ _ _ = pure []
buildTableDeleteMutationFields _ _ _ _ _ _ = pure []
buildTableStreamingSubscriptionFields _ _ _ _ _ = pure []
-- backend extensions
relayExtension = Nothing
nodesAggExtension = Just ()
streamSubscriptionExtension = Nothing
-- individual components
columnParser = columnParser'
scalarSelectionArgumentsParser _ = pure Nothing
orderByOperators = orderByOperators'
comparisonExps = comparisonExps'
countTypeInput = countTypeInput'
aggregateOrderByCountType = IR.S.T.Number
computedField =
error "computedField: not implemented for the Data Connector backend."
instance BackendTableSelectSchema 'DataConnector where
tableArguments = tableArgs'
selectTable = GS.S.defaultSelectTable
selectTableAggregate = GS.S.defaultSelectTableAggregate
tableSelectionSet = GS.S.defaultTableSelectionSet
--------------------------------------------------------------------------------
experimentalBuildTableRelayQueryFields ::
MonadBuildSchema 'DataConnector r m n =>
RQL.MkRootFieldName ->
2022-05-27 20:21:22 +03:00
RQL.SourceInfo 'DataConnector ->
RQL.TableName 'DataConnector ->
RQL.TableInfo 'DataConnector ->
GQLNameIdentifier ->
NESeq (RQL.ColumnInfo 'DataConnector) ->
server: refactor `MonadSchema` into `MonadMemoize` Followup to hasura/graphql-engine-mono#4713. The `memoizeOn` method, part of `MonadSchema`, originally had the following type: ```haskell memoizeOn :: (HasCallStack, Ord a, Typeable a, Typeable b, Typeable k) => TH.Name -> a -> m (Parser k n b) -> m (Parser k n b) ``` The reason for operating on `Parser`s specifically was that the `MonadSchema` effect would additionally initialize certain `Unique` values, which appear (nested in) the type of `Parser`. hasura/graphql-engine-mono#518 changed the type of `memoizeOn`, to additionally allow memoizing `FieldParser`s. These also contained a `Unique` value, which was similarly initialized by the `MonadSchema` effect. The new type of `memoizeOn` was as follows: ```haskell memoizeOn :: forall p d a b . (HasCallStack, HasDefinition (p n b) d, Ord a, Typeable p, Typeable a, Typeable b) => TH.Name -> a -> m (p n b) -> m (p n b) ``` Note the type `p n b` of the value being memoized: by choosing `p` to be either `Parser k` or `FieldParser`, both can be memoized. Also note the new `HasDefinition (p n b) d` constraint, which provided a `Lens` for accessing the `Unique` value to be initialized. A quick simplification is that the `HasCallStack` constraint has never been used by any code. This was realized in hasura/graphql-engine-mono#4713, by removing that constraint. hasura/graphql-engine-mono#2980 removed the `Unique` value from our GraphQL-related types entirely, as their original purpose was never truly realized. One part of removing `Unique` consisted of dropping the `HasDefinition (p n b) d` constraint from `memoizeOn`. What I didn't realize at the time was that this meant that the type of `memoizeOn` could be generalized and simplified much further. This PR finally implements that generalization. The new type is as follows: ```haskell memoizeOn :: forall a p. (Ord a, Typeable a, Typeable p) => TH.Name -> a -> m p -> m p ``` This change has a couple of consequences. 1. While constructing the schema, we often output `Maybe (Parser ...)`, to model that the existence of certain pieces of GraphQL schema sometimes depends on the permissions that a certain role has. The previous versions of `memoizeOn` were not able to handle this, as the only thing they could memoize was fully-defined (if not yet fully-evaluated) `(Field)Parser`s. This much more general API _would_ allow memoizing `Maybe (Parser ...)`s. However, we probably have to be continue being cautious with this: if we blindly memoize all `Maybe (Parser ...)`s, the resulting code may never be able to decide whether the value is `Just` or `Nothing` - i.e. it never commits to the existence-or-not of a GraphQL schema fragment. This would manifest as a non-well-founded knot tying, and this would get reported as an error by the implementation of `memoizeOn`. tl;dr: This generalization _technically_ allows for memoizing `Maybe` values, but we probably still want to avoid doing so. For this reason, the PR adds a specialized version of `memoizeOn` to `Hasura.GraphQL.Schema.Parser`. 2. There is no longer any need to connect the `MonadSchema` knot-tying effect with the `MonadParse` effect. In fact, after this PR, the `memoizeOn` method is completely GraphQL-agnostic, and so we implement hasura/graphql-engine-mono#4726, separating `memoizeOn` from `MonadParse` entirely - `memoizeOn` can be defined and implemented as a general Haskell typeclass method. Since `MonadSchema` has been made into a single-type-parameter type class, it has been renamed to something more general, namely `MonadMemoize`. Its only task is to memoize arbitrary `Typeable p` objects under a combined key consisting of a `TH.Name` and a `Typeable a`. Also for this reason, the new `MonadMemoize` has been moved to the more general `Control.Monad.Memoize`. 3. After this change, it's somewhat clearer what `memoizeOn` does: it memoizes an arbitrary value of a `Typeable` type. The only thing that needs to be understood in its implementation is how the manual blackholing works. There is no more semantic interaction with _any_ GraphQL code. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4725 Co-authored-by: Daniel Harvey <4729125+danieljharvey@users.noreply.github.com> GitOrigin-RevId: 089fa2e82c2ce29da76850e994eabb1e261f9c92
2022-08-04 16:44:14 +03:00
m [P.FieldParser n a]
experimentalBuildTableRelayQueryFields _mkRootFieldName _sourceName _tableName _tableInfo _gqlName _pkeyColumns =
pure []
columnParser' ::
server: refactor `MonadSchema` into `MonadMemoize` Followup to hasura/graphql-engine-mono#4713. The `memoizeOn` method, part of `MonadSchema`, originally had the following type: ```haskell memoizeOn :: (HasCallStack, Ord a, Typeable a, Typeable b, Typeable k) => TH.Name -> a -> m (Parser k n b) -> m (Parser k n b) ``` The reason for operating on `Parser`s specifically was that the `MonadSchema` effect would additionally initialize certain `Unique` values, which appear (nested in) the type of `Parser`. hasura/graphql-engine-mono#518 changed the type of `memoizeOn`, to additionally allow memoizing `FieldParser`s. These also contained a `Unique` value, which was similarly initialized by the `MonadSchema` effect. The new type of `memoizeOn` was as follows: ```haskell memoizeOn :: forall p d a b . (HasCallStack, HasDefinition (p n b) d, Ord a, Typeable p, Typeable a, Typeable b) => TH.Name -> a -> m (p n b) -> m (p n b) ``` Note the type `p n b` of the value being memoized: by choosing `p` to be either `Parser k` or `FieldParser`, both can be memoized. Also note the new `HasDefinition (p n b) d` constraint, which provided a `Lens` for accessing the `Unique` value to be initialized. A quick simplification is that the `HasCallStack` constraint has never been used by any code. This was realized in hasura/graphql-engine-mono#4713, by removing that constraint. hasura/graphql-engine-mono#2980 removed the `Unique` value from our GraphQL-related types entirely, as their original purpose was never truly realized. One part of removing `Unique` consisted of dropping the `HasDefinition (p n b) d` constraint from `memoizeOn`. What I didn't realize at the time was that this meant that the type of `memoizeOn` could be generalized and simplified much further. This PR finally implements that generalization. The new type is as follows: ```haskell memoizeOn :: forall a p. (Ord a, Typeable a, Typeable p) => TH.Name -> a -> m p -> m p ``` This change has a couple of consequences. 1. While constructing the schema, we often output `Maybe (Parser ...)`, to model that the existence of certain pieces of GraphQL schema sometimes depends on the permissions that a certain role has. The previous versions of `memoizeOn` were not able to handle this, as the only thing they could memoize was fully-defined (if not yet fully-evaluated) `(Field)Parser`s. This much more general API _would_ allow memoizing `Maybe (Parser ...)`s. However, we probably have to be continue being cautious with this: if we blindly memoize all `Maybe (Parser ...)`s, the resulting code may never be able to decide whether the value is `Just` or `Nothing` - i.e. it never commits to the existence-or-not of a GraphQL schema fragment. This would manifest as a non-well-founded knot tying, and this would get reported as an error by the implementation of `memoizeOn`. tl;dr: This generalization _technically_ allows for memoizing `Maybe` values, but we probably still want to avoid doing so. For this reason, the PR adds a specialized version of `memoizeOn` to `Hasura.GraphQL.Schema.Parser`. 2. There is no longer any need to connect the `MonadSchema` knot-tying effect with the `MonadParse` effect. In fact, after this PR, the `memoizeOn` method is completely GraphQL-agnostic, and so we implement hasura/graphql-engine-mono#4726, separating `memoizeOn` from `MonadParse` entirely - `memoizeOn` can be defined and implemented as a general Haskell typeclass method. Since `MonadSchema` has been made into a single-type-parameter type class, it has been renamed to something more general, namely `MonadMemoize`. Its only task is to memoize arbitrary `Typeable p` objects under a combined key consisting of a `TH.Name` and a `Typeable a`. Also for this reason, the new `MonadMemoize` has been moved to the more general `Control.Monad.Memoize`. 3. After this change, it's somewhat clearer what `memoizeOn` does: it memoizes an arbitrary value of a `Typeable` type. The only thing that needs to be understood in its implementation is how the manual blackholing works. There is no more semantic interaction with _any_ GraphQL code. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4725 Co-authored-by: Daniel Harvey <4729125+danieljharvey@users.noreply.github.com> GitOrigin-RevId: 089fa2e82c2ce29da76850e994eabb1e261f9c92
2022-08-04 16:44:14 +03:00
(MonadParse n, MonadError QErr m) =>
RQL.ColumnType 'DataConnector ->
GQL.Nullability ->
m (P.Parser 'P.Both n (IR.ValueWithOrigin (RQL.ColumnValue 'DataConnector)))
columnParser' columnType (GQL.Nullability isNullable) = do
parser <- case columnType of
RQL.ColumnScalar IR.S.T.String -> pure (IR.S.V.String <$> P.string)
RQL.ColumnScalar IR.S.T.Number -> pure (IR.S.V.Number <$> P.scientific)
RQL.ColumnScalar IR.S.T.Bool -> pure (IR.S.V.Boolean <$> P.boolean)
_ -> throw400 NotSupported "This column type is unsupported by the Data Connector backend"
pure . GS.C.peelWithOrigin . fmap (RQL.ColumnValue columnType) . possiblyNullable $ parser
where
possiblyNullable ::
MonadParse m =>
P.Parser 'P.Both m IR.S.V.Value ->
P.Parser 'P.Both m IR.S.V.Value
possiblyNullable
| isNullable = fmap (fromMaybe IR.S.V.Null) . P.nullable
| otherwise = id
orderByOperators' :: RQL.SourceInfo 'DataConnector -> NamingCase -> (GQL.Name, NonEmpty (P.Definition P.EnumValueInfo, (RQL.BasicOrderType 'DataConnector, RQL.NullsOrderType 'DataConnector)))
orderByOperators' RQL.SourceInfo {_siConfiguration} _tCase =
let dcName = Adapter._scDataConnectorName _siConfiguration
orderBy = fromMaybe Name._order_by $ GQL.mkName $ NET.unNonEmptyText (Adapter.unDataConnectorName dcName) <> "_order_by"
in (orderBy,) $
-- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB
NE.fromList
[ ( define $$(GQL.litName "asc") "in ascending order",
(IR.O.Ascending, ())
),
( define $$(GQL.litName "desc") "in descending order",
(IR.O.Descending, ())
)
]
where
define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
comparisonExps' ::
forall m n r.
( BackendSchema 'DataConnector,
server: refactor `MonadSchema` into `MonadMemoize` Followup to hasura/graphql-engine-mono#4713. The `memoizeOn` method, part of `MonadSchema`, originally had the following type: ```haskell memoizeOn :: (HasCallStack, Ord a, Typeable a, Typeable b, Typeable k) => TH.Name -> a -> m (Parser k n b) -> m (Parser k n b) ``` The reason for operating on `Parser`s specifically was that the `MonadSchema` effect would additionally initialize certain `Unique` values, which appear (nested in) the type of `Parser`. hasura/graphql-engine-mono#518 changed the type of `memoizeOn`, to additionally allow memoizing `FieldParser`s. These also contained a `Unique` value, which was similarly initialized by the `MonadSchema` effect. The new type of `memoizeOn` was as follows: ```haskell memoizeOn :: forall p d a b . (HasCallStack, HasDefinition (p n b) d, Ord a, Typeable p, Typeable a, Typeable b) => TH.Name -> a -> m (p n b) -> m (p n b) ``` Note the type `p n b` of the value being memoized: by choosing `p` to be either `Parser k` or `FieldParser`, both can be memoized. Also note the new `HasDefinition (p n b) d` constraint, which provided a `Lens` for accessing the `Unique` value to be initialized. A quick simplification is that the `HasCallStack` constraint has never been used by any code. This was realized in hasura/graphql-engine-mono#4713, by removing that constraint. hasura/graphql-engine-mono#2980 removed the `Unique` value from our GraphQL-related types entirely, as their original purpose was never truly realized. One part of removing `Unique` consisted of dropping the `HasDefinition (p n b) d` constraint from `memoizeOn`. What I didn't realize at the time was that this meant that the type of `memoizeOn` could be generalized and simplified much further. This PR finally implements that generalization. The new type is as follows: ```haskell memoizeOn :: forall a p. (Ord a, Typeable a, Typeable p) => TH.Name -> a -> m p -> m p ``` This change has a couple of consequences. 1. While constructing the schema, we often output `Maybe (Parser ...)`, to model that the existence of certain pieces of GraphQL schema sometimes depends on the permissions that a certain role has. The previous versions of `memoizeOn` were not able to handle this, as the only thing they could memoize was fully-defined (if not yet fully-evaluated) `(Field)Parser`s. This much more general API _would_ allow memoizing `Maybe (Parser ...)`s. However, we probably have to be continue being cautious with this: if we blindly memoize all `Maybe (Parser ...)`s, the resulting code may never be able to decide whether the value is `Just` or `Nothing` - i.e. it never commits to the existence-or-not of a GraphQL schema fragment. This would manifest as a non-well-founded knot tying, and this would get reported as an error by the implementation of `memoizeOn`. tl;dr: This generalization _technically_ allows for memoizing `Maybe` values, but we probably still want to avoid doing so. For this reason, the PR adds a specialized version of `memoizeOn` to `Hasura.GraphQL.Schema.Parser`. 2. There is no longer any need to connect the `MonadSchema` knot-tying effect with the `MonadParse` effect. In fact, after this PR, the `memoizeOn` method is completely GraphQL-agnostic, and so we implement hasura/graphql-engine-mono#4726, separating `memoizeOn` from `MonadParse` entirely - `memoizeOn` can be defined and implemented as a general Haskell typeclass method. Since `MonadSchema` has been made into a single-type-parameter type class, it has been renamed to something more general, namely `MonadMemoize`. Its only task is to memoize arbitrary `Typeable p` objects under a combined key consisting of a `TH.Name` and a `Typeable a`. Also for this reason, the new `MonadMemoize` has been moved to the more general `Control.Monad.Memoize`. 3. After this change, it's somewhat clearer what `memoizeOn` does: it memoizes an arbitrary value of a `Typeable` type. The only thing that needs to be understood in its implementation is how the manual blackholing works. There is no more semantic interaction with _any_ GraphQL code. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4725 Co-authored-by: Daniel Harvey <4729125+danieljharvey@users.noreply.github.com> GitOrigin-RevId: 089fa2e82c2ce29da76850e994eabb1e261f9c92
2022-08-04 16:44:14 +03:00
P.MonadMemoize m,
MonadParse n,
MonadError QErr m,
MonadReader r m,
Has SchemaOptions r,
Has NamingCase r
) =>
RQL.ColumnType 'DataConnector ->
m (P.Parser 'P.Input n [ComparisonExp 'DataConnector])
comparisonExps' = P.memoize 'comparisonExps' $ \columnType -> do
tCase <- asks getter
collapseIfNull <- GS.C.retrieve Options.soDangerousBooleanCollapse
typedParser <- columnParser' columnType (GQL.Nullability False)
let name = P.getName typedParser <> $$(GQL.litName "_Dynamic_comparison_exp")
desc =
GQL.Description $
"Boolean expression to compare columns of type "
<> P.getName typedParser
<<> ". All fields are combined with logical 'AND'."
columnListParser = fmap IR.openValueOrigin <$> P.list typedParser
pure $
P.object name (Just desc) $
fmap catMaybes $
sequenceA $
concat
[ GS.BE.equalityOperators
tCase
collapseIfNull
(IR.mkParameter <$> typedParser)
(mkListLiteral <$> columnListParser),
GS.BE.comparisonOperators
tCase
collapseIfNull
(IR.mkParameter <$> typedParser)
]
where
mkListLiteral :: [RQL.ColumnValue 'DataConnector] -> IR.UnpreparedValue 'DataConnector
mkListLiteral columnValues =
IR.UVLiteral . IR.S.V.ArrayLiteral $ RQL.cvValue <$> columnValues
tableArgs' ::
forall r m n.
MonadBuildSchema 'DataConnector r m n =>
2022-05-27 20:21:22 +03:00
RQL.SourceInfo 'DataConnector ->
RQL.TableInfo 'DataConnector ->
m (P.InputFieldsParser n (IR.SelectArgsG 'DataConnector (IR.UnpreparedValue 'DataConnector)))
tableArgs' sourceName tableInfo = do
whereParser <- GS.S.tableWhereArg sourceName tableInfo
orderByParser <- GS.S.tableOrderByArg sourceName tableInfo
let mkSelectArgs whereArg orderByArg limitArg offsetArg =
IR.SelectArgs
{ _saWhere = whereArg,
_saOrderBy = orderByArg,
_saLimit = limitArg,
_saOffset = offsetArg,
_saDistinct = Nothing
}
pure $
mkSelectArgs
<$> whereParser
<*> orderByParser
<*> GS.S.tableLimitArg
<*> GS.S.tableOffsetArg
countTypeInput' ::
MonadParse n =>
Maybe (P.Parser 'P.Both n IR.C.Name) ->
P.InputFieldsParser n (IR.CountDistinct -> IR.A.CountAggregate)
countTypeInput' = \case
Just columnEnum -> mkCountAggregate <$> P.fieldOptional Name._column Nothing columnEnum
Nothing -> pure $ mkCountAggregate Nothing
where
mkCountAggregate :: Maybe IR.C.Name -> IR.CountDistinct -> IR.A.CountAggregate
mkCountAggregate Nothing _ = IR.A.StarCount
mkCountAggregate (Just column) IR.SelectCountDistinct = IR.A.ColumnDistinctCount column
mkCountAggregate (Just column) IR.SelectCountNonDistinct = IR.A.ColumnCount column