graphql-engine/server/src-lib/Hasura/Backends/DataConnector/Adapter/Schema.hs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

267 lines
12 KiB
Haskell
Raw Normal View History

{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.DataConnector.Adapter.Schema () where
--------------------------------------------------------------------------------
import Control.Lens ((^.))
import Data.Aeson qualified as J
import Data.Has
import Data.HashMap.Strict qualified as Map
import Data.List.NonEmpty qualified as NE
import Data.Text.Casing (GQLNameIdentifier, fromCustomName)
import Data.Text.Extended ((<<>))
import Data.Text.NonEmpty qualified as NET
import Hasura.Backends.DataConnector.API.V0.Capabilities (lookupComparisonInputObjectDefinition)
import Hasura.Backends.DataConnector.Adapter.Backend (CustomBooleanOperator (..))
import Hasura.Backends.DataConnector.Adapter.Types qualified as Adapter
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S.V
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Schema.Backend (BackendSchema (..), BackendTableSelectSchema (..), ComparisonExp, MonadBuildSchema)
import Hasura.GraphQL.Schema.BoolExp qualified as GS.BE
import Hasura.GraphQL.Schema.Build qualified as GS.B
import Hasura.GraphQL.Schema.Common qualified as GS.C
import Hasura.GraphQL.Schema.NamingCase
import Hasura.GraphQL.Schema.Options qualified as Options
server: Metadata origin for definitions (type parameter version v2) The code that builds the GraphQL schema, and `buildGQLContext` in particular, is partial: not every value of `(ServerConfigCtx, GraphQLQueryType, SourceCache, HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject), ActionCache, AnnotatedCustomTypes)` results in a valid GraphQL schema. When it fails, we want to be able to return better error messages than we currently do. The key thing that is missing is a way to trace back GraphQL type information to their origin from the Hasura metadata. Currently, we have a number of correctness checks of our GraphQL schema. But these correctness checks only have access to pure GraphQL type information, and hence can only report errors in terms of that. Possibly the worst is the "conflicting definitions" error, which, in practice, can only be debugged by Hasura engineers. This is terrible DX for customers. This PR allows us to print better error messages, by adding a field to the `Definition` type that traces the GraphQL type to its origin in the metadata. So the idea is simple: just add `MetadataObjId`, or `Maybe` that, or some other sum type of that, to `Definition`. However, we want to avoid having to import a `Hasura.RQL` module from `Hasura.GraphQL.Parser`. So we instead define this additional field of `Definition` through a new type parameter, which is threaded through in `Hasura.GraphQL.Parser`. We then define type synonyms in `Hasura.GraphQL.Schema.Parser` that fill in this type parameter, so that it is not visible for the majority of the codebase. The idea of associating metadata information to `Definition`s really comes to fruition when combined with hasura/graphql-engine-mono#4517. Their combination would allow us to use the API of fatal errors (just like the current `MonadError QErr`) to report _inconsistencies_ in the metadata. Such inconsistencies are then _automatically_ ignored. So no ad-hoc decisions need to be made on how to cut out inconsistent metadata from the GraphQL schema. This will allow us to report much better errors, as well as improve the likelihood of a successful HGE startup. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4770 Co-authored-by: Samir Talwar <47582+SamirTalwar@users.noreply.github.com> GitOrigin-RevId: 728402b0cae83ae8e83463a826ceeb609001acae
2022-06-28 18:52:26 +03:00
import Hasura.GraphQL.Schema.Parser qualified as P
import Hasura.GraphQL.Schema.Select qualified as GS.S
import Hasura.Name qualified as Name
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp qualified as IR
import Hasura.RQL.IR.Select qualified as IR
import Hasura.RQL.IR.Value qualified as IR
import Hasura.RQL.Types.Backend qualified as RQL
import Hasura.RQL.Types.Column qualified as RQL
2022-05-27 20:21:22 +03:00
import Hasura.RQL.Types.Source qualified as RQL
import Hasura.RQL.Types.SourceCustomization qualified as RQL
import Hasura.RQL.Types.Table qualified as RQL
import Hasura.SQL.Backend (BackendType (..))
import Language.GraphQL.Draft.Syntax qualified as GQL
--------------------------------------------------------------------------------
instance BackendSchema 'DataConnector where
-- top level parsers
buildTableQueryAndSubscriptionFields = GS.B.buildTableQueryAndSubscriptionFields
buildTableRelayQueryFields = experimentalBuildTableRelayQueryFields
buildFunctionQueryFields _ _ _ _ _ = pure []
buildFunctionRelayQueryFields _ _ _ _ _ _ = pure []
buildFunctionMutationFields _ _ _ _ _ = pure []
buildTableInsertMutationFields _ _ _ _ _ _ = pure []
buildTableUpdateMutationFields _ _ _ _ _ _ = pure []
buildTableDeleteMutationFields _ _ _ _ _ _ = pure []
buildTableStreamingSubscriptionFields _ _ _ _ _ = pure []
-- backend extensions
relayExtension = Nothing
nodesAggExtension = Just ()
streamSubscriptionExtension = Nothing
-- individual components
columnParser = columnParser'
enumParser = enumParser'
possiblyNullable = possiblyNullable'
scalarSelectionArgumentsParser _ = pure Nothing
orderByOperators = orderByOperators'
comparisonExps = comparisonExps'
countTypeInput = countTypeInput'
aggregateOrderByCountType = IR.S.T.Number
computedField =
error "computedField: not implemented for the Data Connector backend."
instance BackendTableSelectSchema 'DataConnector where
tableArguments = tableArgs'
selectTable = GS.S.defaultSelectTable
selectTableAggregate = GS.S.defaultSelectTableAggregate
tableSelectionSet = GS.S.defaultTableSelectionSet
--------------------------------------------------------------------------------
experimentalBuildTableRelayQueryFields ::
MonadBuildSchema 'DataConnector r m n =>
RQL.MkRootFieldName ->
2022-05-27 20:21:22 +03:00
RQL.SourceInfo 'DataConnector ->
RQL.TableName 'DataConnector ->
RQL.TableInfo 'DataConnector ->
GQLNameIdentifier ->
NESeq (RQL.ColumnInfo 'DataConnector) ->
GS.C.SchemaT r m [P.FieldParser n a]
experimentalBuildTableRelayQueryFields _mkRootFieldName _sourceName _tableName _tableInfo _gqlName _pkeyColumns =
pure []
columnParser' ::
server: refactor `MonadSchema` into `MonadMemoize` Followup to hasura/graphql-engine-mono#4713. The `memoizeOn` method, part of `MonadSchema`, originally had the following type: ```haskell memoizeOn :: (HasCallStack, Ord a, Typeable a, Typeable b, Typeable k) => TH.Name -> a -> m (Parser k n b) -> m (Parser k n b) ``` The reason for operating on `Parser`s specifically was that the `MonadSchema` effect would additionally initialize certain `Unique` values, which appear (nested in) the type of `Parser`. hasura/graphql-engine-mono#518 changed the type of `memoizeOn`, to additionally allow memoizing `FieldParser`s. These also contained a `Unique` value, which was similarly initialized by the `MonadSchema` effect. The new type of `memoizeOn` was as follows: ```haskell memoizeOn :: forall p d a b . (HasCallStack, HasDefinition (p n b) d, Ord a, Typeable p, Typeable a, Typeable b) => TH.Name -> a -> m (p n b) -> m (p n b) ``` Note the type `p n b` of the value being memoized: by choosing `p` to be either `Parser k` or `FieldParser`, both can be memoized. Also note the new `HasDefinition (p n b) d` constraint, which provided a `Lens` for accessing the `Unique` value to be initialized. A quick simplification is that the `HasCallStack` constraint has never been used by any code. This was realized in hasura/graphql-engine-mono#4713, by removing that constraint. hasura/graphql-engine-mono#2980 removed the `Unique` value from our GraphQL-related types entirely, as their original purpose was never truly realized. One part of removing `Unique` consisted of dropping the `HasDefinition (p n b) d` constraint from `memoizeOn`. What I didn't realize at the time was that this meant that the type of `memoizeOn` could be generalized and simplified much further. This PR finally implements that generalization. The new type is as follows: ```haskell memoizeOn :: forall a p. (Ord a, Typeable a, Typeable p) => TH.Name -> a -> m p -> m p ``` This change has a couple of consequences. 1. While constructing the schema, we often output `Maybe (Parser ...)`, to model that the existence of certain pieces of GraphQL schema sometimes depends on the permissions that a certain role has. The previous versions of `memoizeOn` were not able to handle this, as the only thing they could memoize was fully-defined (if not yet fully-evaluated) `(Field)Parser`s. This much more general API _would_ allow memoizing `Maybe (Parser ...)`s. However, we probably have to be continue being cautious with this: if we blindly memoize all `Maybe (Parser ...)`s, the resulting code may never be able to decide whether the value is `Just` or `Nothing` - i.e. it never commits to the existence-or-not of a GraphQL schema fragment. This would manifest as a non-well-founded knot tying, and this would get reported as an error by the implementation of `memoizeOn`. tl;dr: This generalization _technically_ allows for memoizing `Maybe` values, but we probably still want to avoid doing so. For this reason, the PR adds a specialized version of `memoizeOn` to `Hasura.GraphQL.Schema.Parser`. 2. There is no longer any need to connect the `MonadSchema` knot-tying effect with the `MonadParse` effect. In fact, after this PR, the `memoizeOn` method is completely GraphQL-agnostic, and so we implement hasura/graphql-engine-mono#4726, separating `memoizeOn` from `MonadParse` entirely - `memoizeOn` can be defined and implemented as a general Haskell typeclass method. Since `MonadSchema` has been made into a single-type-parameter type class, it has been renamed to something more general, namely `MonadMemoize`. Its only task is to memoize arbitrary `Typeable p` objects under a combined key consisting of a `TH.Name` and a `Typeable a`. Also for this reason, the new `MonadMemoize` has been moved to the more general `Control.Monad.Memoize`. 3. After this change, it's somewhat clearer what `memoizeOn` does: it memoizes an arbitrary value of a `Typeable` type. The only thing that needs to be understood in its implementation is how the manual blackholing works. There is no more semantic interaction with _any_ GraphQL code. PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4725 Co-authored-by: Daniel Harvey <4729125+danieljharvey@users.noreply.github.com> GitOrigin-RevId: 089fa2e82c2ce29da76850e994eabb1e261f9c92
2022-08-04 16:44:14 +03:00
(MonadParse n, MonadError QErr m) =>
RQL.ColumnType 'DataConnector ->
GQL.Nullability ->
GS.C.SchemaT r m (P.Parser 'P.Both n (IR.ValueWithOrigin (RQL.ColumnValue 'DataConnector)))
columnParser' columnType nullability = do
parser <- case columnType of
RQL.ColumnScalar scalarType@IR.S.T.String -> pure . possiblyNullable' scalarType nullability $ J.String <$> P.string
RQL.ColumnScalar scalarType@IR.S.T.Number -> pure . possiblyNullable' scalarType nullability $ J.Number <$> P.scientific
RQL.ColumnScalar scalarType@IR.S.T.Bool -> pure . possiblyNullable' scalarType nullability $ J.Bool <$> P.boolean
RQL.ColumnScalar scalarType@(IR.S.T.Custom name) -> do
gqlName <-
GQL.mkName name
`onNothing` throw400 ValidationFailed ("The column type name " <> name <<> " is not a valid GraphQL name")
pure . possiblyNullable' scalarType nullability $ P.jsonScalar gqlName (Just "A custom scalar type")
RQL.ColumnEnumReference (RQL.EnumReference tableName enumValues customTableName) ->
case nonEmpty (Map.toList enumValues) of
Just enumValuesList -> enumParser' tableName enumValuesList customTableName nullability
Nothing -> throw400 ValidationFailed "empty enum values"
pure . GS.C.peelWithOrigin . fmap (RQL.ColumnValue columnType) $ parser
enumParser' ::
MonadError QErr m =>
RQL.TableName 'DataConnector ->
NonEmpty (RQL.EnumValue, RQL.EnumValueInfo) ->
Maybe GQL.Name ->
GQL.Nullability ->
GS.C.SchemaT r m (P.Parser 'P.Both n (RQL.ScalarValue 'DataConnector))
enumParser' _tableName _enumValues _customTableName _nullability =
throw400 NotSupported "This column type is unsupported by the Data Connector backend"
possiblyNullable' ::
MonadParse m =>
RQL.ScalarType 'DataConnector ->
GQL.Nullability ->
P.Parser 'P.Both m J.Value ->
P.Parser 'P.Both m J.Value
possiblyNullable' _scalarType (GQL.Nullability isNullable)
| isNullable = fmap (fromMaybe J.Null) . P.nullable
| otherwise = id
orderByOperators' :: RQL.SourceInfo 'DataConnector -> NamingCase -> (GQL.Name, NonEmpty (P.Definition P.EnumValueInfo, (RQL.BasicOrderType 'DataConnector, RQL.NullsOrderType 'DataConnector)))
orderByOperators' RQL.SourceInfo {_siConfiguration} _tCase =
let dcName = Adapter._scDataConnectorName _siConfiguration
orderBy = fromMaybe Name._order_by $ GQL.mkName $ NET.unNonEmptyText (Adapter.unDataConnectorName dcName) <> "_order_by"
in (orderBy,) $
-- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB
NE.fromList
[ ( define $$(GQL.litName "asc") "in ascending order",
(IR.O.Ascending, ())
),
( define $$(GQL.litName "desc") "in descending order",
(IR.O.Descending, ())
)
]
where
define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
comparisonExps' ::
forall m n r.
MonadBuildSchema 'DataConnector r m n =>
RQL.SourceInfo 'DataConnector ->
RQL.ColumnType 'DataConnector ->
GS.C.SchemaT r m (P.Parser 'P.Input n [ComparisonExp 'DataConnector])
comparisonExps' sourceInfo columnType = P.memoizeOn 'comparisonExps' (dataConnectorName, columnType) $ do
tCase <- asks getter
collapseIfNull <- GS.C.retrieve Options.soDangerousBooleanCollapse
typedParser <- columnParser' columnType (GQL.Nullability False)
let name = P.getName typedParser <> $$(GQL.litName "_Dynamic_comparison_exp")
desc =
GQL.Description $
"Boolean expression to compare columns of type "
<> P.getName typedParser
<<> ". All fields are combined with logical 'AND'."
columnListParser = fmap IR.openValueOrigin <$> P.list typedParser
customOperators <- (fmap . fmap . fmap) IR.ABackendSpecific <$> mkCustomOperators tCase collapseIfNull (P.getName typedParser)
pure $
P.object name (Just desc) $
fmap catMaybes $
sequenceA $
concat
[ GS.BE.equalityOperators
tCase
collapseIfNull
(IR.mkParameter <$> typedParser)
(mkListLiteral <$> columnListParser),
GS.BE.comparisonOperators
tCase
collapseIfNull
(IR.mkParameter <$> typedParser),
customOperators
]
where
dataConnectorName = sourceInfo ^. RQL.siConfiguration . Adapter.scDataConnectorName
mkListLiteral :: [RQL.ColumnValue 'DataConnector] -> IR.UnpreparedValue 'DataConnector
mkListLiteral columnValues =
IR.UVLiteral . IR.S.V.ArrayLiteral $ RQL.cvValue <$> columnValues
mkCustomOperators ::
NamingCase ->
Options.DangerouslyCollapseBooleans ->
GQL.Name ->
GS.C.SchemaT r m [P.InputFieldsParser n (Maybe (CustomBooleanOperator (IR.UnpreparedValue 'DataConnector)))]
mkCustomOperators tCase collapseIfNull typeName = do
let capabilities = sourceInfo ^. RQL.siConfiguration . Adapter.scCapabilities
case lookupComparisonInputObjectDefinition capabilities typeName of
Nothing -> pure []
Just GQL.InputObjectTypeDefinition {..} -> do
traverse (mkCustomOperator tCase collapseIfNull) _iotdValueDefinitions
mkCustomOperator ::
NamingCase ->
Options.DangerouslyCollapseBooleans ->
GQL.InputValueDefinition ->
GS.C.SchemaT r m (P.InputFieldsParser n (Maybe (CustomBooleanOperator (IR.UnpreparedValue 'DataConnector))))
mkCustomOperator tCase collapseIfNull GQL.InputValueDefinition {..} = do
argParser <- mkArgParser _ivdType
pure $
GS.BE.mkBoolOperator tCase collapseIfNull (fromCustomName _ivdName) _ivdDescription $
CustomBooleanOperator (GQL.unName _ivdName) . Just . Right <$> argParser
mkArgParser :: GQL.GType -> GS.C.SchemaT r m (P.Parser 'P.Both n (IR.UnpreparedValue 'DataConnector))
mkArgParser argType =
fmap IR.mkParameter
<$> columnParser'
(RQL.ColumnScalar $ IR.S.T.fromGQLType $ GQL.getBaseType argType)
(GQL.Nullability $ GQL.isNotNull argType)
tableArgs' ::
forall r m n.
MonadBuildSchema 'DataConnector r m n =>
2022-05-27 20:21:22 +03:00
RQL.SourceInfo 'DataConnector ->
RQL.TableInfo 'DataConnector ->
GS.C.SchemaT r m (P.InputFieldsParser n (IR.SelectArgsG 'DataConnector (IR.UnpreparedValue 'DataConnector)))
tableArgs' sourceName tableInfo = do
whereParser <- GS.S.tableWhereArg sourceName tableInfo
orderByParser <- GS.S.tableOrderByArg sourceName tableInfo
let mkSelectArgs whereArg orderByArg limitArg offsetArg =
IR.SelectArgs
{ _saWhere = whereArg,
_saOrderBy = orderByArg,
_saLimit = limitArg,
_saOffset = offsetArg,
_saDistinct = Nothing
}
pure $
mkSelectArgs
<$> whereParser
<*> orderByParser
<*> GS.S.tableLimitArg
<*> GS.S.tableOffsetArg
countTypeInput' ::
MonadParse n =>
Maybe (P.Parser 'P.Both n IR.C.Name) ->
P.InputFieldsParser n (IR.CountDistinct -> IR.A.CountAggregate)
countTypeInput' = \case
Just columnEnum -> mkCountAggregate <$> P.fieldOptional Name._column Nothing columnEnum
Nothing -> pure $ mkCountAggregate Nothing
where
mkCountAggregate :: Maybe IR.C.Name -> IR.CountDistinct -> IR.A.CountAggregate
mkCountAggregate Nothing _ = IR.A.StarCount
mkCountAggregate (Just column) IR.SelectCountDistinct = IR.A.ColumnDistinctCount column
mkCountAggregate (Just column) IR.SelectCountNonDistinct = IR.A.ColumnCount column