server: support for Apollo federation

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4584
Co-authored-by: Auke Booij <164426+abooij@users.noreply.github.com>
GitOrigin-RevId: 0f60c263efb5fbaa25620dd8159e8cfda25a61b2
This commit is contained in:
paritosh-08 2022-07-25 21:23:25 +05:30 committed by hasura-bot
parent 73d8d35df3
commit 95adde4ce2
57 changed files with 3174 additions and 1032 deletions

View File

@ -36,6 +36,7 @@ post-webhook
get-webhook get-webhook
insecure-webhook insecure-webhook
insecure-webhook-with-admin-secret insecure-webhook-with-admin-secret
apollo-federation
allowlist-queries allowlist-queries
jwk-url jwk-url
horizontal-scaling horizontal-scaling

View File

@ -1024,6 +1024,22 @@ insecure-webhook-with-admin-secret)
kill $WH_PID kill $WH_PID
;; ;;
apollo-federation)
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH APOLLO FEDERATION ########>\n"
export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM"
export HASURA_GRAPHQL_EXPERIMENTAL_FEATURES="apollo_federation"
run_hge_with_args serve
wait_for_port 8080
pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_apollo_federation.py
unset HASURA_GRAPHQL_EXPERIMENTAL_FEATURES
unset HASURA_GRAPHQL_ADMIN_SECRET
kill_hge_servers
;;
allowlist-queries) allowlist-queries)
# allowlist queries test # allowlist queries test
# unset HASURA_GRAPHQL_AUTH_HOOK # unset HASURA_GRAPHQL_AUTH_HOOK

View File

@ -2,6 +2,48 @@
## Next release ## Next release
### Introducing Apollo Federation v1 support (experimental)
HGE can now be used as a subgraph in an Apollo federated graphql server.
You can read more about this feature in [the RFC](https://github.com/hasura/graphql-engine/blob/master/rfcs/apollo-federation.md).
This is an experimental feature (can be enabled by setting
`HASURA_GRAPHQL_EXPERIMENTAL_FEATURES: apollo_federation`). This is supported
over all databases. To expose a table in an Apollo federated gateway, we need
to enable Apollo federation in its metadata. This can be done via the
`*_track_table` metadata API and console support will be added soon.
For example, given a table called `user` in a database which is not being
tracked by Hasura, we can run `*_track_table` to enable Apollo federation for
the table:
```
POST /v1/metadata HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
```
``` json
{
"type": "pg_track_table",
"args": {
"table": "user",
"schema": "public",
"apollo_federation_config": {
"enable": "v1"
}
}
}
```
The above API call would add the `@key` directive in the GraphQL schema with
fields argument set to the primary key of the table (say `id`), i.e:
```graphql
type user @key(fields: "id") {
id: Int!
name: String
...
}
```
### Behaviour changes ### Behaviour changes
- server: When providing a JSON path in a JWT claims map, you can now use - server: When providing a JSON path in a JWT claims map, you can now use

View File

@ -526,8 +526,8 @@ elif [ "$MODE" = "test" ]; then
# are defined. # are defined.
export HASURA_GRAPHQL_PG_SOURCE_URL_1=${HASURA_GRAPHQL_PG_SOURCE_URL_1-$PG_DB_URL} export HASURA_GRAPHQL_PG_SOURCE_URL_1=${HASURA_GRAPHQL_PG_SOURCE_URL_1-$PG_DB_URL}
export HASURA_GRAPHQL_PG_SOURCE_URL_2=${HASURA_GRAPHQL_PG_SOURCE_URL_2-$PG_DB_URL} export HASURA_GRAPHQL_PG_SOURCE_URL_2=${HASURA_GRAPHQL_PG_SOURCE_URL_2-$PG_DB_URL}
export HASURA_GRAPHQL_EXPERIMENTAL_FEATURES="inherited_roles, naming_convention"
export HASURA_GRAPHQL_MSSQL_SOURCE_URL=$MSSQL_CONN_STR export HASURA_GRAPHQL_MSSQL_SOURCE_URL=$MSSQL_CONN_STR
export HGE_URL="http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT"
# Using --metadata-database-url flag to test multiple backends # Using --metadata-database-url flag to test multiple backends
# HASURA_GRAPHQL_PG_SOURCE_URL_* For a couple multi-source pytests: # HASURA_GRAPHQL_PG_SOURCE_URL_* For a couple multi-source pytests:

View File

@ -762,6 +762,7 @@ library
, Hasura.RQL.IR.Root , Hasura.RQL.IR.Root
, Hasura.RQL.IR , Hasura.RQL.IR
, Hasura.GraphQL.Analyse , Hasura.GraphQL.Analyse
, Hasura.GraphQL.ApolloFederation
, Hasura.GraphQL.Context , Hasura.GraphQL.Context
, Hasura.GraphQL.Execute , Hasura.GraphQL.Execute
, Hasura.GraphQL.Execute.Action , Hasura.GraphQL.Execute.Action

View File

@ -26,6 +26,7 @@ module Data.Aeson.Ordered
Data.Aeson.Ordered.lookup, Data.Aeson.Ordered.lookup,
toOrdered, toOrdered,
fromOrdered, fromOrdered,
fromOrderedHashMap,
) )
where where
@ -102,6 +103,10 @@ safeUnion (Object_ x) (Object_ y) =
empty :: Object empty :: Object
empty = Object_ mempty empty = Object_ mempty
-- | Ordered Value from ordered hashmap
fromOrderedHashMap :: InsOrdHashMap Text Value -> Value
fromOrderedHashMap = Object . Object_
-- | Insert before the element at index i. Think of it in terms of -- | Insert before the element at index i. Think of it in terms of
-- 'splitAt', which is (take k, drop k). Deletes existing key, if any. -- 'splitAt', which is (take k, drop k). Deletes existing key, if any.
insert :: (Int, Text) -> Value -> Object -> Object insert :: (Int, Text) -> Value -> Object -> Object

View File

@ -208,11 +208,11 @@ bqColumnParser columnType (G.Nullability isNullable) =
| otherwise = id | otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'BigQuery) mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'BigQuery)
mkEnumValue (EnumValue value, EnumValueInfo description) = mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.Definition value (G.Description <$> description) Nothing P.EnumValueInfo, ( P.Definition value (G.Description <$> description) Nothing [] P.EnumValueInfo,
BigQuery.StringValue $ G.unName value BigQuery.StringValue $ G.unName value
) )
throughJSON scalarName = throughJSON scalarName =
let schemaType = P.TNamed P.NonNullable $ P.Definition scalarName Nothing Nothing P.TIScalar let schemaType = P.TNamed P.NonNullable $ P.Definition scalarName Nothing Nothing [] P.TIScalar
in P.Parser in P.Parser
{ pType = schemaType, { pType = schemaType,
pParser = pParser =
@ -221,7 +221,7 @@ bqColumnParser columnType (G.Nullability isNullable) =
} }
stringBased :: MonadParse m => G.Name -> Parser 'Both m Text stringBased :: MonadParse m => G.Name -> Parser 'Both m Text
stringBased scalarName = stringBased scalarName =
P.string {P.pType = P.TNamed P.NonNullable $ P.Definition scalarName Nothing Nothing P.TIScalar} P.string {P.pType = P.TNamed P.NonNullable $ P.Definition scalarName Nothing Nothing [] P.TIScalar}
bqScalarSelectionArgumentsParser :: bqScalarSelectionArgumentsParser ::
MonadParse n => MonadParse n =>
@ -261,7 +261,7 @@ bqOrderByOperators _tCase =
) )
] ]
where where
define name desc = P.Definition name (Just desc) Nothing P.EnumValueInfo define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
bqComparisonExps :: bqComparisonExps ::
forall m n r. forall m n r.

View File

@ -125,7 +125,7 @@ orderByOperators' RQL.SourceInfo {_siConfiguration} _tCase =
) )
] ]
where where
define name desc = P.Definition name (Just desc) Nothing P.EnumValueInfo define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
comparisonExps' :: comparisonExps' ::
forall m n r. forall m n r.

View File

@ -283,7 +283,7 @@ msColumnParser columnType (G.Nullability isNullable) =
MSSQL.BitType -> pure $ ODBC.BoolValue <$> P.boolean MSSQL.BitType -> pure $ ODBC.BoolValue <$> P.boolean
_ -> do _ -> do
name <- MSSQL.mkMSSQLScalarTypeName scalarType name <- MSSQL.mkMSSQLScalarTypeName scalarType
let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing P.TIScalar let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar
pure $ pure $
P.Parser P.Parser
{ pType = schemaType, { pType = schemaType,
@ -303,7 +303,7 @@ msColumnParser columnType (G.Nullability isNullable) =
| otherwise = id | otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'MSSQL) mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'MSSQL)
mkEnumValue (EnumValue value, EnumValueInfo description) = mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.Definition value (G.Description <$> description) Nothing P.EnumValueInfo, ( P.Definition value (G.Description <$> description) Nothing [] P.EnumValueInfo,
ODBC.TextValue $ G.unName value ODBC.TextValue $ G.unName value
) )
@ -345,7 +345,7 @@ msOrderByOperators _tCase =
) )
] ]
where where
define name desc = P.Definition name (Just desc) Nothing P.EnumValueInfo define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
msComparisonExps :: msComparisonExps ::
forall m n r. forall m n r.

View File

@ -132,7 +132,7 @@ tableInsertMatchColumnsEnum sourceInfo tableInfo = do
] ]
where where
define name = define name =
P.Definition name (Just $ G.Description "column name") Nothing P.EnumValueInfo P.Definition name (Just $ G.Description "column name") Nothing [] P.EnumValueInfo
-- | Check whether a column can be used for match_columns. -- | Check whether a column can be used for match_columns.
isMatchColumnValid :: ColumnInfo 'MSSQL -> Bool isMatchColumnValid :: ColumnInfo 'MSSQL -> Bool

View File

@ -196,7 +196,7 @@ columnParser' columnType (GQL.Nullability isNullable) =
MySQL.Timestamp -> pure $ possiblyNullable scalarType $ MySQL.TimestampValue <$> P.string MySQL.Timestamp -> pure $ possiblyNullable scalarType $ MySQL.TimestampValue <$> P.string
_ -> do _ -> do
name <- MySQL.mkMySQLScalarTypeName scalarType name <- MySQL.mkMySQLScalarTypeName scalarType
let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing P.TIScalar let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar
pure $ pure $
P.Parser P.Parser
{ pType = schemaType, { pType = schemaType,
@ -217,7 +217,7 @@ columnParser' columnType (GQL.Nullability isNullable) =
| otherwise = id | otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, RQL.ScalarValue 'MySQL) mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, RQL.ScalarValue 'MySQL)
mkEnumValue (RQL.EnumValue value, EnumValueInfo description) = mkEnumValue (RQL.EnumValue value, EnumValueInfo description) =
( P.Definition value (GQL.Description <$> description) Nothing P.EnumValueInfo, ( P.Definition value (GQL.Description <$> description) Nothing [] P.EnumValueInfo,
MySQL.VarcharValue $ GQL.unName value MySQL.VarcharValue $ GQL.unName value
) )
@ -252,7 +252,7 @@ orderByOperators' _tCase =
) )
] ]
where where
define name desc = P.Definition name (Just desc) Nothing P.EnumValueInfo define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
-- | TODO: Make this as thorough as the one for MSSQL/PostgreSQL -- | TODO: Make this as thorough as the one for MSSQL/PostgreSQL
comparisonExps' :: comparisonExps' ::

View File

@ -368,7 +368,7 @@ columnParser columnType (G.Nullability isNullable) = do
-- --
-- TODO: introduce new dedicated scalars for Postgres column types. -- TODO: introduce new dedicated scalars for Postgres column types.
name <- mkScalarTypeName scalarType name <- mkScalarTypeName scalarType
let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing P.TIScalar let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar
pure $ pure $
P.Parser P.Parser
{ pType = schemaType, { pType = schemaType,
@ -392,7 +392,7 @@ columnParser columnType (G.Nullability isNullable) = do
| otherwise = id | otherwise = id
mkEnumValue :: NamingCase -> (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue) mkEnumValue :: NamingCase -> (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue)
mkEnumValue tCase (EnumValue value, EnumValueInfo description) = mkEnumValue tCase (EnumValue value, EnumValueInfo description) =
( P.Definition (applyEnumValueCase tCase value) (G.Description <$> description) Nothing P.EnumValueInfo, ( P.Definition (applyEnumValueCase tCase value) (G.Description <$> description) Nothing [] P.EnumValueInfo,
PGValText $ G.unName value PGValText $ G.unName value
) )
@ -450,7 +450,7 @@ orderByOperators tCase =
) )
] ]
where where
define name desc = P.Definition name (Just desc) Nothing P.EnumValueInfo define name desc = P.Definition name (Just desc) Nothing [] P.EnumValueInfo
comparisonExps :: comparisonExps ::
forall pgKind m n r. forall pgKind m n r.

View File

@ -147,6 +147,7 @@ conflictConstraint constraints sourceInfo tableInfo =
name name
(Just $ "unique or primary key constraint on columns " <> coerce (showPGCols (HS.toList cCols))) (Just $ "unique or primary key constraint on columns " <> coerce (showPGCols (HS.toList cCols)))
Nothing Nothing
[]
P.EnumValueInfo, P.EnumValueInfo,
c c
) )

View File

@ -0,0 +1,284 @@
-- | Tools for generating fields for Apollo federation
module Hasura.GraphQL.ApolloFederation
( -- * Field Parser generators
mkEntityUnionFieldParser,
mkServiceField,
apolloRootFields,
ApolloFederationParserFunction (..),
convertToApolloFedParserFunc,
)
where
import Control.Lens ((??))
import Data.Aeson qualified as J
import Data.Aeson.Key qualified as K
import Data.Aeson.KeyMap qualified as KMap
import Data.Aeson.Ordered qualified as JO
import Data.HashMap.Strict qualified as Map
import Data.HashMap.Strict.InsOrd qualified as OMap
import Data.HashSet qualified as Set
import Data.Text qualified as T
import Hasura.Base.Error
import Hasura.Base.ErrorMessage (toErrorMessage)
import Hasura.GraphQL.Parser qualified as P
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.NamingCase
import Hasura.GraphQL.Schema.Options (StringifyNumbers)
import Hasura.GraphQL.Schema.Parser
import Hasura.Name qualified as Name
import Hasura.Prelude
import Hasura.RQL.IR qualified as IR
import Hasura.RQL.IR.Root
import Hasura.RQL.IR.Select
import Hasura.RQL.IR.Value (UnpreparedValue, ValueWithOrigin (ValueNoOrigin))
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.Table
import Hasura.SQL.AnyBackend qualified as AB
import Hasura.Server.Types
import Language.GraphQL.Draft.Printer qualified as Printer
import Language.GraphQL.Draft.Syntax qualified as G
import Text.Builder qualified as Builder
-- | Internal parser function for entities field
data ApolloFederationParserFunction n = ApolloFederationParserFunction
{ aafuGetRootField :: ApolloFederationAnyType -> n (QueryRootField UnpreparedValue)
}
-- | Haskell representation of _Any scalar
data ApolloFederationAnyType = ApolloFederationAnyType
{ afTypename :: G.Name,
afPKValues :: J.Object
}
deriving stock (Show)
-- | Parser for _Any scalar
anyParser :: P.Parser origin 'Both Parse ApolloFederationAnyType
anyParser =
jsonScalar Name.__Any (Just "Scalar _Any") `bind` \val -> do
let typenameKey = K.fromText "__typename"
case val of
J.Object obj -> case KMap.lookup typenameKey obj of
Just (J.String txt) -> case G.mkName txt of
Just tName ->
pure $
ApolloFederationAnyType
{ afTypename = tName,
afPKValues = KMap.delete typenameKey obj
}
Nothing -> P.parseError $ toErrorMessage $ txt <> " is not a valid graphql name"
Nothing -> P.parseError $ toErrorMessage "__typename key not found"
_ -> P.parseError $ toErrorMessage "__typename can only be a string value"
_ -> P.parseError $ toErrorMessage "representations is expecting a list of objects only"
convertToApolloFedParserFunc ::
(Monad n, MonadParse n, Backend b) =>
SourceInfo b ->
TableInfo b ->
TablePermG b (UnpreparedValue b) ->
StringifyNumbers ->
Maybe NamingCase ->
NESeq (ColumnInfo b) ->
Parser 'Output n (AnnotatedFields b) ->
Parser 'Output n (ApolloFederationParserFunction n)
convertToApolloFedParserFunc sInfo tInfo selPerm stringifyNumbers tCase pKeys =
fmap (modifyApolloFedParserFunc sInfo tInfo selPerm stringifyNumbers tCase pKeys)
modifyApolloFedParserFunc ::
(MonadParse n, Backend b) =>
SourceInfo b ->
TableInfo b ->
TablePermG b (UnpreparedValue b) ->
StringifyNumbers ->
Maybe NamingCase ->
NESeq (ColumnInfo b) ->
AnnotatedFields b ->
ApolloFederationParserFunction n
modifyApolloFedParserFunc
SourceInfo {..}
TableInfo {..}
selectPermissions
stringifyNumbers
tCase
primaryKeys
annField = ApolloFederationParserFunction $ \ApolloFederationAnyType {..} -> do
allConstraints <-
for primaryKeys \columnInfo -> do
let colName = G.unName $ ciName columnInfo
cvType = ciType columnInfo
cvValue <- case KMap.lookup (K.fromText colName) afPKValues of
Nothing -> P.parseError . toErrorMessage $ "cannot find " <> colName <> " in _Any type"
Just va -> liftQErr $ parseScalarValueColumnType (ciType columnInfo) va
pure $
IR.BoolField . IR.AVColumn columnInfo . pure . IR.AEQ True . IR.mkParameter $
ValueNoOrigin $ ColumnValue {..}
let whereExpr = Just $ IR.BoolAnd $ toList allConstraints
sourceName = _siName
sourceConfig = _siConfiguration
tableName = _tciName _tiCoreInfo
queryDBRoot =
IR.QDBR $
IR.QDBSingleRow $
IR.AnnSelectG
{ IR._asnFields = annField,
IR._asnFrom = IR.FromTable tableName,
IR._asnPerm = selectPermissions,
IR._asnArgs = IR.noSelectArgs {IR._saWhere = whereExpr},
IR._asnStrfyNum = stringifyNumbers,
IR._asnNamingConvention = tCase
}
pure $
IR.RFDB sourceName $
AB.mkAnyBackend $
IR.SourceConfigWith sourceConfig Nothing $
queryDBRoot
where
liftQErr = either (P.parseError . toErrorMessage . qeError) pure . runExcept
-------------------------------------------------------------------------------
-- Related to @service@ field
-- main function
-- | Creates @_service@ @FieldParser@ using the schema introspection.
-- This will allow us to process the following query:
--
-- > query {
-- > _service {
-- > sdl
-- > }
-- > }
mkServiceField ::
FieldParser P.Parse (G.SchemaIntrospection -> QueryRootField UnpreparedValue)
mkServiceField = serviceFieldParser
where
sdlField = JO.String . generateSDL <$ P.selection_ Name._sdl (Just "SDL representation of schema") P.string
serviceParser = P.nonNullableParser $ P.selectionSet Name.__Service Nothing [sdlField]
serviceFieldParser =
P.subselection_ Name.__service Nothing serviceParser `bindField` \selSet -> do
let partialValue = OMap.map (\ps -> handleTypename (\tName _ -> JO.toOrdered tName) ps) (OMap.mapKeys G.unName selSet)
pure \schemaIntrospection -> RFRaw . JO.fromOrderedHashMap $ (partialValue ?? schemaIntrospection)
apolloRootFields ::
Set.HashSet ExperimentalFeature ->
[(G.Name, Parser 'Output P.Parse (ApolloFederationParserFunction P.Parse))] ->
[FieldParser P.Parse (G.SchemaIntrospection -> QueryRootField UnpreparedValue)]
apolloRootFields expFeatures apolloFedTableParsers =
let -- generate the `_service` field parser
serviceField = mkServiceField
-- generate the `_entities` field parser
entityField = const <$> mkEntityUnionFieldParser apolloFedTableParsers
in -- we would want to expose these fields inorder to support apollo federation
-- refer https://www.apollographql.com/docs/federation/federation-spec
-- `serviceField` is essential to connect hasura to gateway, `entityField`
-- is essential only if we have types that has @key directive
if
| EFApolloFederation `elem` expFeatures && not (null apolloFedTableParsers) ->
[serviceField, entityField]
| EFApolloFederation `elem` expFeatures ->
[serviceField]
| otherwise -> []
-- helpers
-- | Generate sdl from the schema introspection
generateSDL :: G.SchemaIntrospection -> Text
generateSDL (G.SchemaIntrospection sIntro) = sdl
where
-- NOTE: add this to the sdl to support apollo v2 directive
_supportV2 :: Text
_supportV2 = "\n\nextend schema\n@link(url: \"https://specs.apollo.dev/federation/v2.0\",\nimport: [\"@key\", \"@shareable\"])"
-- first we filter out the type definitions which are not relevent such as
-- schema fields and types (starts with `__`)
typeDefns = mapMaybe filterAndWrapTypeSystemDefinition (Map.elems sIntro)
-- next we get the root operation type definitions
rootOpTypeDefns =
mapMaybe
( \(fieldName, operationType) ->
Map.lookup fieldName sIntro
$> G.RootOperationTypeDefinition operationType fieldName
)
[ (Name._query_root, G.OperationTypeQuery),
(Name._mutation_root, G.OperationTypeMutation),
(Name._subscription_root, G.OperationTypeSubscription)
]
-- finally we gather everything, run the printer and generate full sdl in `Text`
sdl = Builder.run $ Printer.schemaDocument getSchemaDocument
getSchemaDocument :: G.SchemaDocument
getSchemaDocument =
G.SchemaDocument $
G.TypeSystemDefinitionSchema (G.SchemaDefinition Nothing (rootOpTypeDefns)) : typeDefns
-- | Filter out schema components from sdl which are not required by apollo federation and
-- wraps it in `TypeSystemDefinition`
filterAndWrapTypeSystemDefinition :: G.TypeDefinition [G.Name] G.InputValueDefinition -> Maybe G.TypeSystemDefinition
filterAndWrapTypeSystemDefinition = \case
G.TypeDefinitionScalar (G.ScalarTypeDefinition {}) -> Nothing
G.TypeDefinitionInterface (G.InterfaceTypeDefinition a b c d _) ->
Just $ G.TypeSystemDefinitionType (G.TypeDefinitionInterface (G.InterfaceTypeDefinition a b c d ()))
G.TypeDefinitionObject (G.ObjectTypeDefinition a b c d e) ->
-- We are skipping the schema types here
Just . G.TypeSystemDefinitionType . G.TypeDefinitionObject $
G.ObjectTypeDefinition a b c d (filter (not . T.isPrefixOf "__" . G.unName . G._fldName) e)
G.TypeDefinitionUnion defn -> Just $ G.TypeSystemDefinitionType (G.TypeDefinitionUnion defn)
G.TypeDefinitionEnum defn -> Just $ G.TypeSystemDefinitionType (G.TypeDefinitionEnum defn)
G.TypeDefinitionInputObject defn -> Just $ G.TypeSystemDefinitionType (G.TypeDefinitionInputObject defn)
-------------------------------------------------------------------------------
-- Related to @_entities@ field
-- main function
-- | Creates @_entities@ @FieldParser@ using `Parser`s for Entity union, schema
-- introspection and a list of all query `FieldParser`.
-- This will allow us to process the following query:
--
-- > query ($representations: [_Any!]!) {
-- > _entities(representations: $representations) {
-- > ... on SomeType {
-- > foo
-- > bar
-- > }
-- > }
-- > }
mkEntityUnionFieldParser ::
[(G.Name, Parser 'Output Parse (ApolloFederationParserFunction Parse))] ->
FieldParser P.Parse (QueryRootField UnpreparedValue)
mkEntityUnionFieldParser apolloFedTableParsers =
let entityParserMap = Map.fromList apolloFedTableParsers
-- the Union `Entities`
bodyParser = P.selectionSetUnion Name.__Entity (Just "A union of all types that use the @key directive") entityParserMap
-- name of the field
name = Name.__entities
-- description of the field
description = Just "query _Entity union"
representationParser =
field Name._representations Nothing $ list $ anyParser
entityParser =
subselection name description representationParser bodyParser
`bindField` \(parsedArgs, parsedBody) -> do
rootFields <-
for
parsedArgs
( \anyArg ->
case Map.lookup (afTypename anyArg) parsedBody of
Nothing -> (P.parseError . toErrorMessage) $ G.unName (afTypename anyArg) <> " is not found in selection set or apollo federation is not enabled for the type"
Just aafus -> (aafuGetRootField aafus) anyArg
)
pure $ concatQueryRootFields rootFields
in entityParser
-- | concatenates multiple fields
concatQueryRootFields :: [QueryRootField UnpreparedValue] -> QueryRootField UnpreparedValue
concatQueryRootFields = RFMulti

View File

@ -217,6 +217,7 @@ buildSubscriptionPlan userInfo rootFields parameterizedQueryHash = do
go (accLiveQueryFields, accStreamingFields) (gName, field) = case field of go (accLiveQueryFields, accStreamingFields) (gName, field) = case field of
IR.RFRemote _ -> throw400 NotSupported "subscription to remote server is not supported" IR.RFRemote _ -> throw400 NotSupported "subscription to remote server is not supported"
IR.RFRaw _ -> throw400 NotSupported "Introspection not supported over subscriptions" IR.RFRaw _ -> throw400 NotSupported "Introspection not supported over subscriptions"
IR.RFMulti _ -> throw400 NotSupported "not supported over subscriptions"
IR.RFDB src e -> do IR.RFDB src e -> do
let subscriptionType = let subscriptionType =
case AB.unpackAnyBackend @('Postgres 'Vanilla) e of case AB.unpackAnyBackend @('Postgres 'Vanilla) e of

View File

@ -261,6 +261,9 @@ data ExecutionStep where
ExecStepRaw :: ExecStepRaw ::
JO.Value -> JO.Value ->
ExecutionStep ExecutionStep
ExecStepMulti ::
[ExecutionStep] ->
ExecutionStep
-- | The series of steps that need to be executed for a given query. For now, those steps are all -- | The series of steps that need to be executed for a given query. For now, those steps are all
-- independent. In the future, when we implement a client-side dataloader and generalized joins, -- independent. In the future, when we implement a client-side dataloader and generalized joins,

View File

@ -117,30 +117,35 @@ convertMutationSelectionSet
let parameterizedQueryHash = calculateParameterizedQueryHash resolvedSelSet let parameterizedQueryHash = calculateParameterizedQueryHash resolvedSelSet
resolveExecutionSteps rootFieldName rootFieldUnpreparedValue = do
case rootFieldUnpreparedValue of
RFDB sourceName exists ->
AB.dispatchAnyBackend @BackendExecute
exists
\(SourceConfigWith (sourceConfig :: SourceConfig b) queryTagsConfig (MDBR db)) -> do
let mutationQueryTagsAttributes = encodeQueryTags $ QTMutation $ MutationMetadata reqId maybeOperationName rootFieldName parameterizedQueryHash
queryTagsComment = Tagged.untag $ createQueryTags @m mutationQueryTagsAttributes queryTagsConfig
(noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsMutationDB db
dbStepInfo <- flip runReaderT queryTagsComment $ mkDBMutationPlan @b userInfo stringifyNum sourceName sourceConfig noRelsDBAST
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote remoteField -> do
RemoteSchemaRootField remoteSchemaInfo resultCustomizer resolvedRemoteField <- runVariableCache $ resolveRemoteField userInfo remoteField
let (noRelsRemoteField, remoteJoins) = RJ.getRemoteJoinsGraphQLField resolvedRemoteField
pure $
buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeMutation noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed)
RFAction action -> do
let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionMutation action
(actionName, _fch) <- pure $ case noRelsDBAST of
AMSync s -> (_aaeName s, _aaeForwardClientHeaders s)
AMAsync s -> (_aamaName s, _aamaForwardClientHeaders s)
plan <- convertMutationAction env logger userInfo manager reqHeaders (Just (GH._grQuery gqlUnparsed)) noRelsDBAST
pure $ ExecStepAction plan (ActionsInfo actionName _fch) remoteJoins -- `_fch` represents the `forward_client_headers` option from the action
-- definition which is currently being ignored for actions that are mutations
RFRaw customFieldVal -> flip onLeft throwError =<< executeIntrospection userInfo customFieldVal introspectionDisabledRoles
RFMulti lst -> do
allSteps <- traverse (resolveExecutionSteps rootFieldName) lst
pure $ ExecStepMulti allSteps
-- Transform the RQL AST into a prepared SQL query -- Transform the RQL AST into a prepared SQL query
txs <- flip OMap.traverseWithKey unpreparedQueries $ \rootFieldName rootFieldUnpreparedValue -> do txs <- flip OMap.traverseWithKey unpreparedQueries $ resolveExecutionSteps
case rootFieldUnpreparedValue of
RFDB sourceName exists ->
AB.dispatchAnyBackend @BackendExecute
exists
\(SourceConfigWith (sourceConfig :: SourceConfig b) queryTagsConfig (MDBR db)) -> do
let mutationQueryTagsAttributes = encodeQueryTags $ QTMutation $ MutationMetadata reqId maybeOperationName rootFieldName parameterizedQueryHash
queryTagsComment = Tagged.untag $ createQueryTags @m mutationQueryTagsAttributes queryTagsConfig
(noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsMutationDB db
dbStepInfo <- flip runReaderT queryTagsComment $ mkDBMutationPlan @b userInfo stringifyNum sourceName sourceConfig noRelsDBAST
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote remoteField -> do
RemoteSchemaRootField remoteSchemaInfo resultCustomizer resolvedRemoteField <- runVariableCache $ resolveRemoteField userInfo remoteField
let (noRelsRemoteField, remoteJoins) = RJ.getRemoteJoinsGraphQLField resolvedRemoteField
pure $
buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeMutation noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed)
RFAction action -> do
let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionMutation action
(actionName, _fch) <- pure $ case noRelsDBAST of
AMSync s -> (_aaeName s, _aaeForwardClientHeaders s)
AMAsync s -> (_aamaName s, _aamaForwardClientHeaders s)
plan <- convertMutationAction env logger userInfo manager reqHeaders (Just (GH._grQuery gqlUnparsed)) noRelsDBAST
pure $ ExecStepAction plan (ActionsInfo actionName _fch) remoteJoins -- `_fch` represents the `forward_client_headers` option from the action
-- definition which is currently being ignored for actions that are mutations
RFRaw s -> flip onLeft throwError =<< executeIntrospection userInfo s introspectionDisabledRoles
return (txs, parameterizedQueryHash) return (txs, parameterizedQueryHash)

View File

@ -101,27 +101,31 @@ convertQuerySelSet
let parameterizedQueryHash = calculateParameterizedQueryHash normalizedSelectionSet let parameterizedQueryHash = calculateParameterizedQueryHash normalizedSelectionSet
resolveExecutionSteps rootFieldName rootFieldUnpreparedValue = do
case rootFieldUnpreparedValue of
RFMulti lst -> do
allSteps <- traverse (resolveExecutionSteps rootFieldName) lst
pure $ ExecStepMulti allSteps
RFDB sourceName exists ->
AB.dispatchAnyBackend @BackendExecute
exists
\(SourceConfigWith (sourceConfig :: (SourceConfig b)) queryTagsConfig (QDBR db)) -> do
let queryTagsAttributes = encodeQueryTags $ QTQuery $ QueryMetadata reqId maybeOperationName rootFieldName parameterizedQueryHash
queryTagsComment = Tagged.untag $ createQueryTags @m queryTagsAttributes queryTagsConfig
(noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsQueryDB db
dbStepInfo <- flip runReaderT queryTagsComment $ mkDBQueryPlan @b userInfo env sourceName sourceConfig noRelsDBAST
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote rf -> do
RemoteSchemaRootField remoteSchemaInfo resultCustomizer remoteField <- runVariableCache $ for rf $ resolveRemoteVariable userInfo
let (noRelsRemoteField, remoteJoins) = RJ.getRemoteJoinsGraphQLField remoteField
pure $ buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeQuery noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed)
RFAction action -> do
let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionQuery action
(actionExecution, actionName, fch) <- pure $ case noRelsDBAST of
AQQuery s -> (AEPSync $ resolveActionExecution env logger userInfo s (ActionExecContext manager reqHeaders (_uiSession userInfo)) (Just (GH._grQuery gqlUnparsed)), _aaeName s, _aaeForwardClientHeaders s)
AQAsync s -> (AEPAsyncQuery $ AsyncActionQueryExecutionPlan (_aaaqActionId s) $ resolveAsyncActionQuery userInfo s, _aaaqName s, _aaaqForwardClientHeaders s)
pure $ ExecStepAction actionExecution (ActionsInfo actionName fch) remoteJoins
RFRaw r -> flip onLeft throwError =<< executeIntrospection userInfo r introspectionDisabledRoles
-- 3. Transform the 'RootFieldMap' into an execution plan -- 3. Transform the 'RootFieldMap' into an execution plan
executionPlan <- flip OMap.traverseWithKey unpreparedQueries $ \rootFieldName rootFieldUnpreparedValue -> do executionPlan <- flip OMap.traverseWithKey unpreparedQueries $ resolveExecutionSteps
case rootFieldUnpreparedValue of
RFDB sourceName exists ->
AB.dispatchAnyBackend @BackendExecute
exists
\(SourceConfigWith (sourceConfig :: (SourceConfig b)) queryTagsConfig (QDBR db)) -> do
let queryTagsAttributes = encodeQueryTags $ QTQuery $ QueryMetadata reqId maybeOperationName rootFieldName parameterizedQueryHash
queryTagsComment = Tagged.untag $ createQueryTags @m queryTagsAttributes queryTagsConfig
(noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsQueryDB db
dbStepInfo <- flip runReaderT queryTagsComment $ mkDBQueryPlan @b userInfo env sourceName sourceConfig noRelsDBAST
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote rf -> do
RemoteSchemaRootField remoteSchemaInfo resultCustomizer remoteField <- runVariableCache $ for rf $ resolveRemoteVariable userInfo
let (noRelsRemoteField, remoteJoins) = RJ.getRemoteJoinsGraphQLField remoteField
pure $ buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeQuery noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed)
RFAction action -> do
let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionQuery action
(actionExecution, actionName, fch) <- pure $ case noRelsDBAST of
AQQuery s -> (AEPSync $ resolveActionExecution env logger userInfo s (ActionExecContext manager reqHeaders (_uiSession userInfo)) (Just (GH._grQuery gqlUnparsed)), _aaeName s, _aaeForwardClientHeaders s)
AQAsync s -> (AEPAsyncQuery $ AsyncActionQueryExecutionPlan (_aaaqActionId s) $ resolveAsyncActionQuery userInfo s, _aaaqName s, _aaaqForwardClientHeaders s)
pure $ ExecStepAction actionExecution (ActionsInfo actionName fch) remoteJoins
RFRaw r -> flip onLeft throwError =<< executeIntrospection userInfo r introspectionDisabledRoles
pure (executionPlan, OMap.elems unpreparedQueries, dirMap, parameterizedQueryHash) pure (executionPlan, OMap.elems unpreparedQueries, dirMap, parameterizedQueryHash)

View File

@ -61,6 +61,7 @@ explainQueryField userInfo fieldName rootField = do
RFRemote _ -> throw400 InvalidParams "only hasura queries can be explained" RFRemote _ -> throw400 InvalidParams "only hasura queries can be explained"
RFAction _ -> throw400 InvalidParams "query actions cannot be explained" RFAction _ -> throw400 InvalidParams "query actions cannot be explained"
RFRaw _ -> pure $ encJFromJValue $ ExplainPlan fieldName Nothing Nothing RFRaw _ -> pure $ encJFromJValue $ ExplainPlan fieldName Nothing Nothing
RFMulti _ -> pure $ encJFromJValue $ ExplainPlan fieldName Nothing Nothing
RFDB sourceName exists -> do RFDB sourceName exists -> do
step <- AB.dispatchAnyBackend @BackendExecute step <- AB.dispatchAnyBackend @BackendExecute
exists exists

View File

@ -242,7 +242,7 @@ field ::
InputFieldsParser origin m a InputFieldsParser origin m a
field name description parser = field name description parser =
InputFieldsParser InputFieldsParser
{ ifDefinitions = [Definition name description Nothing $ InputFieldInfo (pType parser) Nothing], { ifDefinitions = [Definition name description Nothing [] $ InputFieldInfo (pType parser) Nothing],
ifParser = \values -> withKey (A.Key (K.fromText (unName name))) do ifParser = \values -> withKey (A.Key (K.fromText (unName name))) do
value <- value <-
maybe (parseError ("missing required field " <> toErrorValue name)) pure $ M.lookup name values <|> nullableDefault maybe (parseError ("missing required field " <> toErrorValue name)) pure $ M.lookup name values <|> nullableDefault
@ -271,7 +271,7 @@ fieldOptional ::
fieldOptional name description parser = fieldOptional name description parser =
InputFieldsParser InputFieldsParser
{ ifDefinitions = { ifDefinitions =
[ Definition name description Nothing $ [ Definition name description Nothing [] $
InputFieldInfo (nullableType $ pType parser) Nothing InputFieldInfo (nullableType $ pType parser) Nothing
], ],
ifParser = ifParser =
@ -295,7 +295,7 @@ fieldWithDefault ::
InputFieldsParser origin m a InputFieldsParser origin m a
fieldWithDefault name description defaultValue parser = fieldWithDefault name description defaultValue parser =
InputFieldsParser InputFieldsParser
{ ifDefinitions = [Definition name description Nothing $ InputFieldInfo (pType parser) (Just defaultValue)], { ifDefinitions = [Definition name description Nothing [] $ InputFieldInfo (pType parser) (Just defaultValue)],
ifParser = ifParser =
M.lookup name M.lookup name
>>> withKey (A.Key (K.fromText (unName name))) . \case >>> withKey (A.Key (K.fromText (unName name))) . \case
@ -325,7 +325,7 @@ enum name description values =
other -> typeMismatch name "an enum value" other other -> typeMismatch name "an enum value" other
} }
where where
schemaType = TNamed NonNullable $ Definition name description Nothing $ TIEnum (fst <$> values) schemaType = TNamed NonNullable $ Definition name description Nothing [] $ TIEnum (fst <$> values)
valuesMap = M.fromList $ over (traverse . _1) dName $ NonEmpty.toList values valuesMap = M.fromList $ over (traverse . _1) dName $ NonEmpty.toList values
validate value = validate value =
maybe invalidType pure $ M.lookup value valuesMap maybe invalidType pure $ M.lookup value valuesMap
@ -369,7 +369,7 @@ object name description parser =
where where
schemaType = schemaType =
TNamed NonNullable $ TNamed NonNullable $
Definition name description Nothing $ Definition name description Nothing [] $
TIInputObject (InputObjectInfo (ifDefinitions parser)) TIInputObject (InputObjectInfo (ifDefinitions parser))
fieldNames = S.fromList (dName <$> ifDefinitions parser) fieldNames = S.fromList (dName <$> ifDefinitions parser)
parseFields fields = do parseFields fields = do

View File

@ -30,6 +30,7 @@ import Data.Hashable (Hashable)
import Data.Maybe qualified as Maybe import Data.Maybe qualified as Maybe
import Data.Traversable (for) import Data.Traversable (for)
import Data.Type.Equality import Data.Type.Equality
import Data.Void (Void)
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Base.ErrorMessage import Hasura.Base.ErrorMessage
import Hasura.Base.ToErrorValue import Hasura.Base.ToErrorValue
@ -100,17 +101,17 @@ nullable parser =
-- | Decorate a schema field as NON_NULL -- | Decorate a schema field as NON_NULL
nonNullableField :: forall m origin a. FieldParser origin m a -> FieldParser origin m a nonNullableField :: forall m origin a. FieldParser origin m a -> FieldParser origin m a
nonNullableField (FieldParser (Definition n d o (FieldInfo as t)) p) = nonNullableField (FieldParser (Definition n d o dLst (FieldInfo as t)) p) =
FieldParser (Definition n d o (FieldInfo as (nonNullableType t))) p FieldParser (Definition n d o dLst (FieldInfo as (nonNullableType t))) p
-- | Decorate a schema field as NULL -- | Decorate a schema field as NULL
nullableField :: forall m origin a. FieldParser origin m a -> FieldParser origin m a nullableField :: forall m origin a. FieldParser origin m a -> FieldParser origin m a
nullableField (FieldParser (Definition n d o (FieldInfo as t)) p) = nullableField (FieldParser (Definition n d o dLst (FieldInfo as t)) p) =
FieldParser (Definition n d o (FieldInfo as (nullableType t))) p FieldParser (Definition n d o dLst (FieldInfo as (nullableType t))) p
multipleField :: forall m origin a. FieldParser origin m a -> FieldParser origin m a multipleField :: forall m origin a. FieldParser origin m a -> FieldParser origin m a
multipleField (FieldParser (Definition n d o (FieldInfo as t)) p) = multipleField (FieldParser (Definition n d o dLst (FieldInfo as t)) p) =
FieldParser (Definition n d o (FieldInfo as (TList Nullable t))) p FieldParser (Definition n d o dLst (FieldInfo as (TList Nullable t))) p
-- | Decorate a schema field with reference to given @'G.GType' -- | Decorate a schema field with reference to given @'G.GType'
wrapFieldParser :: forall m origin a. G.GType -> FieldParser origin m a -> FieldParser origin m a wrapFieldParser :: forall m origin a. G.GType -> FieldParser origin m a -> FieldParser origin m a
@ -142,14 +143,33 @@ setParserOrigin o (Parser typ p) =
-- | Set the metadata origin of a 'FieldParser' -- | Set the metadata origin of a 'FieldParser'
setFieldParserOrigin :: forall m origin a. origin -> FieldParser origin m a -> FieldParser origin m a setFieldParserOrigin :: forall m origin a. origin -> FieldParser origin m a -> FieldParser origin m a
setFieldParserOrigin o (FieldParser (Definition n d _ i) p) = setFieldParserOrigin o (FieldParser (Definition n d _ dLst i) p) =
FieldParser (Definition n d (Just o) i) p FieldParser (Definition n d (Just o) dLst i) p
-- | Set the metadata origin of the arguments in a 'InputFieldsParser' -- | Set the metadata origin of the arguments in a 'InputFieldsParser'
setInputFieldsParserOrigin :: forall m origin a. origin -> InputFieldsParser origin m a -> InputFieldsParser origin m a setInputFieldsParserOrigin :: forall m origin a. origin -> InputFieldsParser origin m a -> InputFieldsParser origin m a
setInputFieldsParserOrigin o (InputFieldsParser defs p) = setInputFieldsParserOrigin o (InputFieldsParser defs p) =
InputFieldsParser (map (setDefinitionOrigin o) defs) p InputFieldsParser (map (setDefinitionOrigin o) defs) p
-- | Set the directives of a 'Definition'
setDefinitionDirectives :: [G.Directive Void] -> Definition origin a -> Definition origin a
setDefinitionDirectives dLst def = def {dDirectives = dLst}
-- | Set the directives of a 'Parser'
setParserDirectives :: forall origin k m a. [G.Directive Void] -> Parser origin k m a -> Parser origin k m a
setParserDirectives dLst (Parser typ p) =
Parser (onTypeDef (setDefinitionDirectives dLst) typ) p
-- | Set the directives of a 'FieldParser'
setFieldParserDirectives :: forall m origin a. [G.Directive Void] -> FieldParser origin m a -> FieldParser origin m a
setFieldParserDirectives dLst (FieldParser (Definition n d o _ i) p) =
FieldParser (Definition n d o dLst i) p
-- | Set the directives of the arguments in a 'InputFieldsParser'
setInputFieldsParserDirectives :: forall m origin a. [G.Directive Void] -> InputFieldsParser origin m a -> InputFieldsParser origin m a
setInputFieldsParserDirectives dLst (InputFieldsParser defs p) =
InputFieldsParser (map (setDefinitionDirectives dLst) defs) p
-- | A variant of 'selectionSetObject' which doesn't implement any interfaces -- | A variant of 'selectionSetObject' which doesn't implement any interfaces
selectionSet :: selectionSet ::
MonadParse m => MonadParse m =>
@ -207,7 +227,7 @@ selectionSetObject name description parsers implementsInterfaces =
Parser Parser
{ pType = { pType =
TNamed Nullable $ TNamed Nullable $
Definition name description Nothing $ Definition name description Nothing [] $
TIObject $ ObjectInfo (map fDefinition parsers) interfaces, TIObject $ ObjectInfo (map fDefinition parsers) interfaces,
pParser = \input -> withKey (Key "selectionSet") do pParser = \input -> withKey (Key "selectionSet") do
-- Not all fields have a selection set, but if they have one, it -- Not all fields have a selection set, but if they have one, it
@ -263,7 +283,7 @@ selectionSetInterface name description fields objectImplementations =
Parser Parser
{ pType = { pType =
TNamed Nullable $ TNamed Nullable $
Definition name description Nothing $ Definition name description Nothing [] $
TIInterface $ InterfaceInfo (map fDefinition fields) objects, TIInterface $ InterfaceInfo (map fDefinition fields) objects,
pParser = \input -> for objectImplementations (($ input) . pParser) pParser = \input -> for objectImplementations (($ input) . pParser)
-- Note: This is somewhat suboptimal, since it parses a query against every -- Note: This is somewhat suboptimal, since it parses a query against every
@ -290,7 +310,7 @@ selectionSetUnion name description objectImplementations =
Parser Parser
{ pType = { pType =
TNamed Nullable $ TNamed Nullable $
Definition name description Nothing $ Definition name description Nothing [] $
TIUnion $ UnionInfo objects, TIUnion $ UnionInfo objects,
pParser = \input -> for objectImplementations (($ input) . pParser) pParser = \input -> for objectImplementations (($ input) . pParser)
} }
@ -330,7 +350,7 @@ rawSelection ::
rawSelection name description argumentsParser resultParser = rawSelection name description argumentsParser resultParser =
FieldParser FieldParser
{ fDefinition = { fDefinition =
Definition name description Nothing $ Definition name description Nothing [] $
FieldInfo (ifDefinitions argumentsParser) (pType resultParser), FieldInfo (ifDefinitions argumentsParser) (pType resultParser),
fParser = \Field {_fAlias, _fArguments, _fSelectionSet} -> do fParser = \Field {_fAlias, _fArguments, _fSelectionSet} -> do
unless (null _fSelectionSet) $ unless (null _fSelectionSet) $
@ -386,7 +406,7 @@ rawSubselection ::
rawSubselection name description argumentsParser bodyParser = rawSubselection name description argumentsParser bodyParser =
FieldParser FieldParser
{ fDefinition = { fDefinition =
Definition name description Nothing $ Definition name description Nothing [] $
FieldInfo (ifDefinitions argumentsParser) (pType bodyParser), FieldInfo (ifDefinitions argumentsParser) (pType bodyParser),
fParser = \Field {_fAlias, _fArguments, _fSelectionSet} -> do fParser = \Field {_fAlias, _fArguments, _fSelectionSet} -> do
-- check for extraneous arguments here, since the InputFieldsParser just -- check for extraneous arguments here, since the InputFieldsParser just

View File

@ -161,7 +161,7 @@ unsafeRawScalar ::
Parser origin 'Both n (InputValue Variable) Parser origin 'Both n (InputValue Variable)
unsafeRawScalar name description = unsafeRawScalar name description =
Parser Parser
{ pType = TNamed NonNullable $ Definition name description Nothing TIScalar, { pType = TNamed NonNullable $ Definition name description Nothing [] TIScalar,
pParser = pure pParser = pure
} }
@ -174,7 +174,7 @@ jsonScalar name description =
pParser = valueToJSON $ toGraphQLType schemaType pParser = valueToJSON $ toGraphQLType schemaType
} }
where where
schemaType = TNamed NonNullable $ Definition name description Nothing TIScalar schemaType = TNamed NonNullable $ Definition name description Nothing [] TIScalar
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
-- Local helpers -- Local helpers
@ -191,7 +191,7 @@ mkScalar name description parser =
pParser = peelVariable (toGraphQLType schemaType) >=> parser pParser = peelVariable (toGraphQLType schemaType) >=> parser
} }
where where
schemaType = TNamed NonNullable $ Definition name description Nothing TIScalar schemaType = TNamed NonNullable $ Definition name description Nothing [] TIScalar
convertWith :: convertWith ::
MonadParse m => MonadParse m =>

View File

@ -570,12 +570,12 @@ getInterfaceInfo t = case getTypeInfo t of
data SomeDefinitionTypeInfo origin = forall k. SomeDefinitionTypeInfo (Definition origin (TypeInfo origin k)) data SomeDefinitionTypeInfo origin = forall k. SomeDefinitionTypeInfo (Definition origin (TypeInfo origin k))
instance HasName (SomeDefinitionTypeInfo origin) where instance HasName (SomeDefinitionTypeInfo origin) where
getName (SomeDefinitionTypeInfo (Definition n _ _ _)) = n getName (SomeDefinitionTypeInfo (Definition n _ _ _ _)) = n
instance Eq (SomeDefinitionTypeInfo origin) where instance Eq (SomeDefinitionTypeInfo origin) where
-- Same as instance Eq Definition -- Same as instance Eq Definition
SomeDefinitionTypeInfo (Definition name1 _ _ ti1) SomeDefinitionTypeInfo (Definition name1 _ _ _ ti1)
== SomeDefinitionTypeInfo (Definition name2 _ _ ti2) = == SomeDefinitionTypeInfo (Definition name2 _ _ _ ti2) =
name1 == name2 && eqTypeInfo ti1 ti2 name1 == name2 && eqTypeInfo ti1 ti2
data Definition origin a = Definition data Definition origin a = Definition
@ -598,6 +598,8 @@ data Definition origin a = Definition
-- Maybe, at some point, it makes sense to represent the above options more -- Maybe, at some point, it makes sense to represent the above options more
-- accurately in the type of 'dOrigin'. -- accurately in the type of 'dOrigin'.
dOrigin :: Maybe origin, dOrigin :: Maybe origin,
-- | The directives for this object.
dDirectives :: [G.Directive Void],
-- | Lazy to allow mutually-recursive type definitions. -- | Lazy to allow mutually-recursive type definitions.
dInfo :: ~a dInfo :: ~a
} }
@ -613,8 +615,8 @@ instance Eq a => Eq (Definition origin a) where
instance Eq1 (Definition origin) where instance Eq1 (Definition origin) where
liftEq liftEq
eq eq
(Definition name1 _ _ info1) (Definition name1 _ _ _ info1)
(Definition name2 _ _ info2) = (Definition name2 _ _ _ info2) =
name1 == name2 && eq info1 info2 name1 == name2 && eq info1 info2
instance HasName (Definition origin a) where instance HasName (Definition origin a) where

View File

@ -6,7 +6,6 @@ module Hasura.GraphQL.Parser.Schema.Convert
where where
import Data.List.NonEmpty qualified as NonEmpty import Data.List.NonEmpty qualified as NonEmpty
import Data.Void (Void)
import Hasura.GraphQL.Parser.Schema import Hasura.GraphQL.Parser.Schema
import Language.GraphQL.Draft.Syntax qualified as G import Language.GraphQL.Draft.Syntax qualified as G
import Prelude import Prelude
@ -27,14 +26,14 @@ convertType (SomeDefinitionTypeInfo Definition {..}) = case dInfo of
G.ScalarTypeDefinition G.ScalarTypeDefinition
{ G._stdDescription = dDescription, { G._stdDescription = dDescription,
G._stdName = dName, G._stdName = dName,
G._stdDirectives = noDirectives G._stdDirectives = dDirectives
} }
TIEnum enumInfo -> TIEnum enumInfo ->
G.TypeDefinitionEnum $ G.TypeDefinitionEnum $
G.EnumTypeDefinition G.EnumTypeDefinition
{ G._etdDescription = dDescription, { G._etdDescription = dDescription,
G._etdName = dName, G._etdName = dName,
G._etdDirectives = noDirectives, G._etdDirectives = dDirectives,
G._etdValueDefinitions = map convertEnumValue $ NonEmpty.toList enumInfo G._etdValueDefinitions = map convertEnumValue $ NonEmpty.toList enumInfo
} }
TIInputObject (InputObjectInfo values) -> TIInputObject (InputObjectInfo values) ->
@ -42,7 +41,7 @@ convertType (SomeDefinitionTypeInfo Definition {..}) = case dInfo of
G.InputObjectTypeDefinition G.InputObjectTypeDefinition
{ G._iotdDescription = dDescription, { G._iotdDescription = dDescription,
G._iotdName = dName, G._iotdName = dName,
G._iotdDirectives = noDirectives, G._iotdDirectives = dDirectives,
G._iotdValueDefinitions = map convertInputField values G._iotdValueDefinitions = map convertInputField values
} }
TIObject (ObjectInfo fields interfaces) -> TIObject (ObjectInfo fields interfaces) ->
@ -50,7 +49,7 @@ convertType (SomeDefinitionTypeInfo Definition {..}) = case dInfo of
G.ObjectTypeDefinition G.ObjectTypeDefinition
{ G._otdDescription = dDescription, { G._otdDescription = dDescription,
G._otdName = dName, G._otdName = dName,
G._otdDirectives = noDirectives, G._otdDirectives = dDirectives,
G._otdImplementsInterfaces = map getDefinitionName interfaces, G._otdImplementsInterfaces = map getDefinitionName interfaces,
G._otdFieldsDefinition = map convertField fields G._otdFieldsDefinition = map convertField fields
} }
@ -59,7 +58,7 @@ convertType (SomeDefinitionTypeInfo Definition {..}) = case dInfo of
G.InterfaceTypeDefinition G.InterfaceTypeDefinition
{ G._itdDescription = dDescription, { G._itdDescription = dDescription,
G._itdName = dName, G._itdName = dName,
G._itdDirectives = noDirectives, G._itdDirectives = dDirectives,
G._itdFieldsDefinition = map convertField fields, G._itdFieldsDefinition = map convertField fields,
G._itdPossibleTypes = map getDefinitionName possibleTypes G._itdPossibleTypes = map getDefinitionName possibleTypes
} }
@ -68,7 +67,7 @@ convertType (SomeDefinitionTypeInfo Definition {..}) = case dInfo of
G.UnionTypeDefinition G.UnionTypeDefinition
{ G._utdDescription = dDescription, { G._utdDescription = dDescription,
G._utdName = dName, G._utdName = dName,
G._utdDirectives = noDirectives, G._utdDirectives = dDirectives,
G._utdMemberTypes = map getDefinitionName possibleTypes G._utdMemberTypes = map getDefinitionName possibleTypes
} }
@ -77,7 +76,7 @@ convertEnumValue Definition {..} =
G.EnumValueDefinition G.EnumValueDefinition
{ G._evdDescription = dDescription, { G._evdDescription = dDescription,
G._evdName = G.EnumValue dName, G._evdName = G.EnumValue dName,
G._evdDirectives = noDirectives G._evdDirectives = dDirectives
} }
convertInputField :: Definition origin (InputFieldInfo origin) -> G.InputValueDefinition convertInputField :: Definition origin (InputFieldInfo origin) -> G.InputValueDefinition
@ -88,7 +87,7 @@ convertInputField Definition {..} = case dInfo of
G._ivdName = dName, G._ivdName = dName,
G._ivdType = toGraphQLType typeInfo, G._ivdType = toGraphQLType typeInfo,
G._ivdDefaultValue = defaultValue, G._ivdDefaultValue = defaultValue,
G._ivdDirectives = noDirectives G._ivdDirectives = dDirectives
} }
convertField :: Definition origin (FieldInfo origin) -> G.FieldDefinition G.InputValueDefinition convertField :: Definition origin (FieldInfo origin) -> G.FieldDefinition G.InputValueDefinition
@ -99,13 +98,10 @@ convertField Definition {..} = case dInfo of
G._fldName = dName, G._fldName = dName,
G._fldArgumentsDefinition = map convertInputField arguments, G._fldArgumentsDefinition = map convertInputField arguments,
G._fldType = toGraphQLType typeInfo, G._fldType = toGraphQLType typeInfo,
G._fldDirectives = noDirectives G._fldDirectives = dDirectives
} }
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
getDefinitionName :: Definition origin a -> G.Name getDefinitionName :: Definition origin a -> G.Name
getDefinitionName = dName getDefinitionName = dName
noDirectives :: [G.Directive Void]
noDirectives = []

View File

@ -1,5 +1,6 @@
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Hasura.GraphQL.Schema module Hasura.GraphQL.Schema
( buildGQLContext, ( buildGQLContext,
@ -17,6 +18,7 @@ import Data.Text.Extended
import Data.Text.NonEmpty qualified as NT import Data.Text.NonEmpty qualified as NT
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Base.ErrorMessage (toErrorMessage) import Hasura.Base.ErrorMessage (toErrorMessage)
import Hasura.GraphQL.ApolloFederation
import Hasura.GraphQL.Context import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Types import Hasura.GraphQL.Execute.Types
import Hasura.GraphQL.Namespace import Hasura.GraphQL.Namespace
@ -194,8 +196,9 @@ buildRoleContext options sources remotes allActionInfos customTypes role remoteS
HasuraSchema HasuraSchema
(remoteRelationshipField sources (fst <$> remotes) remoteSchemaPermsCtx) (remoteRelationshipField sources (fst <$> remotes) remoteSchemaPermsCtx)
runMonadSchema schemaOptions schemaContext role $ do runMonadSchema schemaOptions schemaContext role $ do
-- build all sources -- build all sources (`apolloFedTableParsers` contains all the parsers and
(sourcesQueryFields, sourcesMutationFrontendFields, sourcesMutationBackendFields, subscriptionFields) <- -- type names, which are eligible for the `_Entity` Union)
(sourcesQueryFields, sourcesMutationFrontendFields, sourcesMutationBackendFields, subscriptionFields, apolloFedTableParsers) <-
fmap mconcat $ traverse (buildBackendSource buildSource) $ toList sources fmap mconcat $ traverse (buildBackendSource buildSource) $ toList sources
-- build all remote schemas -- build all remote schemas
-- we only keep the ones that don't result in a name conflict -- we only keep the ones that don't result in a name conflict
@ -204,6 +207,7 @@ buildRoleContext options sources remotes allActionInfos customTypes role remoteS
let remotesQueryFields = concatMap piQuery remoteSchemaFields let remotesQueryFields = concatMap piQuery remoteSchemaFields
remotesMutationFields = concat $ mapMaybe piMutation remoteSchemaFields remotesMutationFields = concat $ mapMaybe piMutation remoteSchemaFields
remotesSubscriptionFields = concat $ mapMaybe piSubscription remoteSchemaFields remotesSubscriptionFields = concat $ mapMaybe piSubscription remoteSchemaFields
apolloFields = apolloRootFields expFeatures apolloFedTableParsers
mutationParserFrontend <- mutationParserFrontend <-
buildMutationParser remotesMutationFields allActionInfos customTypes sourcesMutationFrontendFields buildMutationParser remotesMutationFields allActionInfos customTypes sourcesMutationFrontendFields
@ -212,9 +216,9 @@ buildRoleContext options sources remotes allActionInfos customTypes role remoteS
subscriptionParser <- subscriptionParser <-
buildSubscriptionParser subscriptionFields allActionInfos customTypes remotesSubscriptionFields buildSubscriptionParser subscriptionFields allActionInfos customTypes remotesSubscriptionFields
queryParserFrontend <- queryParserFrontend <-
buildQueryParser sourcesQueryFields remotesQueryFields allActionInfos customTypes mutationParserFrontend subscriptionParser buildQueryParser sourcesQueryFields remotesQueryFields apolloFields allActionInfos customTypes mutationParserFrontend subscriptionParser
queryParserBackend <- queryParserBackend <-
buildQueryParser sourcesQueryFields remotesQueryFields allActionInfos customTypes mutationParserBackend subscriptionParser buildQueryParser sourcesQueryFields remotesQueryFields apolloFields allActionInfos customTypes mutationParserBackend subscriptionParser
-- In order to catch errors early, we attempt to generate the data -- In order to catch errors early, we attempt to generate the data
-- required for introspection, which ends up doing a few correctness -- required for introspection, which ends up doing a few correctness
@ -228,6 +232,7 @@ buildRoleContext options sources remotes allActionInfos customTypes role remoteS
(P.parserType <$> mutationParserBackend) (P.parserType <$> mutationParserBackend)
(P.parserType <$> subscriptionParser) (P.parserType <$> subscriptionParser)
pure $ pure $
-- We don't need to persist the introspection schema for all the roles here.
-- TODO(nicuveo): we treat the admin role differently in this function, -- TODO(nicuveo): we treat the admin role differently in this function,
-- which is a bit inelegant; we might want to refactor this function and -- which is a bit inelegant; we might want to refactor this function and
-- split it into several steps, so that we can make a separate function for -- split it into several steps, so that we can make a separate function for
@ -235,6 +240,7 @@ buildRoleContext options sources remotes allActionInfos customTypes role remoteS
if role == adminRoleName if role == adminRoleName
then result then result
else G.SchemaIntrospection mempty else G.SchemaIntrospection mempty
void $ void $
buildIntrospectionSchema buildIntrospectionSchema
(P.parserType queryParserFrontend) (P.parserType queryParserFrontend)
@ -268,16 +274,17 @@ buildRoleContext options sources remotes allActionInfos customTypes role remoteS
( [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))], ( [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))],
[FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))], [FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))],
[FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))], [FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))],
[FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))] [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))],
[(G.Name, Parser 'Output P.Parse (ApolloFederationParserFunction P.Parse))]
) )
buildSource sourceInfo@(SourceInfo _ tables functions _ _ sourceCustomization') = buildSource sourceInfo@(SourceInfo _ tables functions _ _ sourceCustomization') =
withSourceCustomization sourceCustomization (namingConventionSupport @b) globalDefaultNC do withSourceCustomization sourceCustomization (namingConventionSupport @b) globalDefaultNC do
let validFunctions = takeValidFunctions functions let validFunctions = takeValidFunctions functions
validTables = takeValidTables tables validTables = takeValidTables tables
makeTypename <- asks getter makeTypename <- asks getter
(uncustomizedQueryRootFields, uncustomizedSubscriptionRootFields) <- (uncustomizedQueryRootFields, uncustomizedSubscriptionRootFields, apolloFedTableParsers) <-
buildQueryAndSubscriptionFields sourceInfo validTables validFunctions streamingSubscriptionsCtx buildQueryAndSubscriptionFields sourceInfo validTables validFunctions streamingSubscriptionsCtx
(,,,) (,,,,apolloFedTableParsers)
<$> customizeFields <$> customizeFields
sourceCustomization sourceCustomization
(makeTypename <> MkTypename (<> Name.__query)) (makeTypename <> MkTypename (<> Name.__query))
@ -571,7 +578,7 @@ buildQueryAndSubscriptionFields ::
TableCache b -> TableCache b ->
FunctionCache b -> FunctionCache b ->
StreamingSubscriptionsCtx -> StreamingSubscriptionsCtx ->
m ([P.FieldParser n (QueryRootField UnpreparedValue)], [P.FieldParser n (SubscriptionRootField UnpreparedValue)]) m ([P.FieldParser n (QueryRootField UnpreparedValue)], [P.FieldParser n (SubscriptionRootField UnpreparedValue)], [(G.Name, Parser 'Output n (ApolloFederationParserFunction n))])
buildQueryAndSubscriptionFields sourceInfo tables (takeExposedAs FEAQuery -> functions) streamingSubsCtx = do buildQueryAndSubscriptionFields sourceInfo tables (takeExposedAs FEAQuery -> functions) streamingSubsCtx = do
roleName <- asks getter roleName <- asks getter
functionPermsCtx <- retrieve Options.soInferFunctionPermissions functionPermsCtx <- retrieve Options.soInferFunctionPermissions
@ -585,8 +592,8 @@ buildQueryAndSubscriptionFields sourceInfo tables (takeExposedAs FEAQuery -> fun
let targetTableName = _fiReturnType functionInfo let targetTableName = _fiReturnType functionInfo
lift $ mkRFs $ buildFunctionQueryFields sourceInfo functionName functionInfo targetTableName lift $ mkRFs $ buildFunctionQueryFields sourceInfo functionName functionInfo targetTableName
(tableQueryFields, tableSubscriptionFields) <- (tableQueryFields, tableSubscriptionFields, apolloFedTableParsers) <-
unzip . catMaybes unzip3 . catMaybes
<$> for (Map.toList tables) \(tableName, tableInfo) -> runMaybeT $ do <$> for (Map.toList tables) \(tableName, tableInfo) -> runMaybeT $ do
tableIdentifierName <- getTableIdentifierName @b tableInfo tableIdentifierName <- getTableIdentifierName @b tableInfo
lift $ buildTableQueryAndSubscriptionFields sourceInfo tableName tableInfo streamingSubsCtx tableIdentifierName lift $ buildTableQueryAndSubscriptionFields sourceInfo tableName tableInfo streamingSubsCtx tableIdentifierName
@ -596,7 +603,8 @@ buildQueryAndSubscriptionFields sourceInfo tables (takeExposedAs FEAQuery -> fun
pure pure
( tableQueryRootFields <> functionSelectExpParsers, ( tableQueryRootFields <> functionSelectExpParsers,
tableSubscriptionRootFields <> functionSelectExpParsers tableSubscriptionRootFields <> functionSelectExpParsers,
catMaybes apolloFedTableParsers
) )
where where
mkRFs = mkRootFields sourceName sourceConfig queryTagsConfig QDBR mkRFs = mkRootFields sourceName sourceConfig queryTagsConfig QDBR
@ -689,14 +697,35 @@ buildQueryParser ::
MonadBuildSchemaBase r m n => MonadBuildSchemaBase r m n =>
[P.FieldParser n (NamespacedField (QueryRootField UnpreparedValue))] -> [P.FieldParser n (NamespacedField (QueryRootField UnpreparedValue))] ->
[P.FieldParser n (NamespacedField (RemoteSchemaRootField (RemoteRelationshipField UnpreparedValue) RemoteSchemaVariable))] -> [P.FieldParser n (NamespacedField (RemoteSchemaRootField (RemoteRelationshipField UnpreparedValue) RemoteSchemaVariable))] ->
[P.FieldParser n (G.SchemaIntrospection -> QueryRootField UnpreparedValue)] ->
[ActionInfo] -> [ActionInfo] ->
AnnotatedCustomTypes -> AnnotatedCustomTypes ->
Maybe (Parser 'Output n (RootFieldMap (MutationRootField UnpreparedValue))) -> Maybe (Parser 'Output n (RootFieldMap (MutationRootField UnpreparedValue))) ->
Maybe (Parser 'Output n (RootFieldMap (QueryRootField UnpreparedValue))) -> Maybe (Parser 'Output n (RootFieldMap (QueryRootField UnpreparedValue))) ->
m (Parser 'Output n (RootFieldMap (QueryRootField UnpreparedValue))) m (Parser 'Output n (RootFieldMap (QueryRootField UnpreparedValue)))
buildQueryParser sourceQueryFields remoteQueryFields allActions customTypes mutationParser subscriptionParser = do buildQueryParser sourceQueryFields remoteQueryFields apolloFederationFields allActions customTypes mutationParser subscriptionParser = do
actionQueryFields <- concat <$> traverse (buildActionQueryFields customTypes) allActions actionQueryFields <- concat <$> traverse (buildActionQueryFields customTypes) allActions
let allQueryFields = sourceQueryFields <> fmap (fmap NotNamespaced) actionQueryFields <> fmap (fmap $ fmap RFRemote) remoteQueryFields -- This method is aware of our rudimentary support for Apollo federation.
-- Apollo federation adds two fields, `_service` and `_entities`. The
-- `_service` field parser is a selection set that contains an `sdl` field.
-- The `sdl` field, exposes a _serialized_ introspection of the schema. So in
-- that sense it is similar to the `__type` and `__schema` introspection
-- fields. However, a few things must be excluded from this introspection
-- data, notably the Apollo federation fields `_service` and `_entities`
-- themselves. So in this method we build a version of the introspection for
-- Apollo federation purposes.
let partialApolloQueryFP = sourceQueryFields <> fmap (fmap NotNamespaced) actionQueryFields <> fmap (fmap $ fmap RFRemote) remoteQueryFields
basicQueryPForApollo <- queryRootFromFields partialApolloQueryFP
let buildApolloIntrospection buildQRF = do
partialSchema <-
parseBuildIntrospectionSchema
(P.parserType basicQueryPForApollo)
(P.parserType <$> mutationParser)
(P.parserType <$> subscriptionParser)
pure $ NotNamespaced $ buildQRF $ convertToSchemaIntrospection partialSchema
apolloFederationFieldsWithIntrospection :: [P.FieldParser n (NamespacedField (QueryRootField UnpreparedValue))]
apolloFederationFieldsWithIntrospection = apolloFederationFields <&> (`P.bindField` buildApolloIntrospection)
allQueryFields = partialApolloQueryFP <> apolloFederationFieldsWithIntrospection
queryWithIntrospectionHelper allQueryFields mutationParser subscriptionParser queryWithIntrospectionHelper allQueryFields mutationParser subscriptionParser
-- | Builds a @Schema@ at query parsing time -- | Builds a @Schema@ at query parsing time

View File

@ -178,6 +178,8 @@ actionAsyncQuery objectTypes actionInfo = runMaybeT do
actionOutputParser <- lift $ actionOutputFields outputType aot objectTypes actionOutputParser <- lift $ actionOutputFields outputType aot objectTypes
let desc = G.Description $ "fields of action: " <>> actionName let desc = G.Description $ "fields of action: " <>> actionName
selectionSet = selectionSet =
-- Note: If we want support for Apollo Federation for Actions later,
-- we'd need to add support for "key" directive here as well.
P.selectionSet outputTypeName (Just desc) (allFieldParsers actionOutputParser) P.selectionSet outputTypeName (Just desc) (allFieldParsers actionOutputParser)
<&> parsedSelectionsToFields IR.AsyncTypename <&> parsedSelectionsToFields IR.AsyncTypename
pure $ P.subselection fieldName description actionIdInputField selectionSet pure $ P.subselection fieldName description actionIdInputField selectionSet
@ -422,7 +424,7 @@ customScalarParser = \case
| _stdName == GName._Boolean -> J.toJSON <$> P.boolean | _stdName == GName._Boolean -> J.toJSON <$> P.boolean
| otherwise -> P.jsonScalar _stdName _stdDescription | otherwise -> P.jsonScalar _stdName _stdDescription
ASTReusedScalar name backendScalarType -> ASTReusedScalar name backendScalarType ->
let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing P.TIScalar let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar
backendScalarValidator = backendScalarValidator =
AB.dispatchAnyBackend @Backend backendScalarType \(scalarType :: ScalarWrapper b) jsonInput -> do AB.dispatchAnyBackend @Backend backendScalarType \(scalarType :: ScalarWrapper b) jsonInput -> do
-- We attempt to parse the value from JSON to validate it, but still -- We attempt to parse the value from JSON to validate it, but still
@ -456,5 +458,6 @@ customEnumParser (EnumTypeDefinition typeName description enumValues) =
valueName valueName
(_evdDescription enumValue) (_evdDescription enumValue)
Nothing Nothing
[]
P.EnumValueInfo P.EnumValueInfo
in P.enum enumName description enumValueDefinitions in P.enum enumName description enumValueDefinitions

View File

@ -39,6 +39,7 @@ where
import Data.Has import Data.Has
import Data.Text.Casing (GQLNameIdentifier) import Data.Text.Casing (GQLNameIdentifier)
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.GraphQL.ApolloFederation (ApolloFederationParserFunction)
import Hasura.GraphQL.Schema.Common import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.NamingCase import Hasura.GraphQL.Schema.NamingCase
import Hasura.GraphQL.Schema.Parser hiding (Type) import Hasura.GraphQL.Schema.Parser hiding (Type)
@ -104,7 +105,8 @@ class
GQLNameIdentifier -> GQLNameIdentifier ->
m m
( [FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))], ( [FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))],
[FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))] [FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))],
Maybe (G.Name, Parser 'Output n (ApolloFederationParserFunction n))
) )
buildTableStreamingSubscriptionFields :: buildTableStreamingSubscriptionFields ::
MonadBuildSchema b r m n => MonadBuildSchema b r m n =>

View File

@ -57,14 +57,17 @@ where
import Data.Has (getter) import Data.Has (getter)
import Data.Text.Casing qualified as C import Data.Text.Casing qualified as C
import Data.Text.Extended import Data.Text.Extended
import Hasura.GraphQL.ApolloFederation
import Hasura.GraphQL.Schema.Backend (BackendTableSelectSchema (..), MonadBuildSchema) import Hasura.GraphQL.Schema.Backend (BackendTableSelectSchema (..), MonadBuildSchema)
import Hasura.GraphQL.Schema.Common import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Mutation import Hasura.GraphQL.Schema.Mutation
import Hasura.GraphQL.Schema.NamingCase import Hasura.GraphQL.Schema.NamingCase
import Hasura.GraphQL.Schema.Options qualified as Options
import Hasura.GraphQL.Schema.Parser hiding (EnumValueInfo, field) import Hasura.GraphQL.Schema.Parser hiding (EnumValueInfo, field)
import Hasura.GraphQL.Schema.Select import Hasura.GraphQL.Schema.Select
import Hasura.GraphQL.Schema.SubscriptionStream (selectStreamTable) import Hasura.GraphQL.Schema.SubscriptionStream (selectStreamTable)
import Hasura.GraphQL.Schema.Table (tableSelectPermissions) import Hasura.GraphQL.Schema.Table (getTableGQLName, tableSelectPermissions)
import Hasura.GraphQL.Schema.Typename (mkTypename)
import Hasura.GraphQL.Schema.Update (updateTable, updateTableByPk) import Hasura.GraphQL.Schema.Update (updateTable, updateTableByPk)
import Hasura.Prelude import Hasura.Prelude
import Hasura.RQL.IR import Hasura.RQL.IR
@ -110,7 +113,8 @@ buildTableQueryAndSubscriptionFields ::
C.GQLNameIdentifier -> C.GQLNameIdentifier ->
m m
( [FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))], ( [FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))],
[FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))] [FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b))],
Maybe (G.Name, Parser 'Output n (ApolloFederationParserFunction n))
) )
buildTableQueryAndSubscriptionFields sourceInfo tableName tableInfo streamSubCtx gqlName = do buildTableQueryAndSubscriptionFields sourceInfo tableName tableInfo streamSubCtx gqlName = do
tCase <- asks getter tCase <- asks getter
@ -130,7 +134,7 @@ buildTableQueryAndSubscriptionFields sourceInfo tableName tableInfo streamSubCtx
case selectPermission of case selectPermission of
-- No select permission found for the current role, so -- No select permission found for the current role, so
-- no root fields will be accessible to the role -- no root fields will be accessible to the role
Nothing -> pure (mempty, mempty) Nothing -> pure (mempty, mempty, Nothing)
-- Filter the root fields which have been enabled -- Filter the root fields which have been enabled
Just SelPermInfo {..} -> do Just SelPermInfo {..} -> do
selectStreamParser <- selectStreamParser <-
@ -161,7 +165,19 @@ buildTableQueryAndSubscriptionFields sourceInfo tableName tableInfo streamSubCtx
selectStreamParser selectStreamParser
<> catMaybes [subscriptionSelectTableParser, subscriptionSelectTableByPkParser, subscriptionSelectTableAggParser] <> catMaybes [subscriptionSelectTableParser, subscriptionSelectTableByPkParser, subscriptionSelectTableAggParser]
pure (queryRootFields, subscriptionRootFields) -- This parser is for generating apollo federation field _entities
apolloFedTableParser <- runMaybeT do
guard $ isApolloFedV1enabled (_tciApolloFederationConfig (_tiCoreInfo tableInfo))
tableSelSet <- MaybeT $ tableSelectionSet sourceInfo tableInfo
selectPerm <- MaybeT $ tableSelectPermissions tableInfo
stringifyNumbers <- retrieve Options.soStringifyNumbers
primaryKeys <- hoistMaybe $ fmap _pkColumns . _tciPrimaryKey . _tiCoreInfo $ tableInfo
let tableSelPerm = tablePermissionsInfo selectPerm
tableGQLName <- getTableGQLName tableInfo
objectTypename <- mkTypename tableGQLName
pure $ (objectTypename, convertToApolloFedParserFunc sourceInfo tableInfo tableSelPerm stringifyNumbers (Just tCase) primaryKeys tableSelSet)
pure (queryRootFields, subscriptionRootFields, apolloFedTableParser)
where where
selectDesc = buildFieldDescription defaultSelectDesc $ _crfComment _tcrfSelect selectDesc = buildFieldDescription defaultSelectDesc $ _crfComment _tcrfSelect
selectPKDesc = buildFieldDescription defaultSelectPKDesc $ _crfComment _tcrfSelectByPk selectPKDesc = buildFieldDescription defaultSelectPKDesc $ _crfComment _tcrfSelectByPk

View File

@ -334,17 +334,17 @@ typeField =
J.String "NON_NULL" J.String "NON_NULL"
P.TList P.Nullable _ -> P.TList P.Nullable _ ->
J.String "LIST" J.String "LIST"
P.TNamed P.Nullable (P.Definition _ _ _ P.TIScalar) -> P.TNamed P.Nullable (P.Definition _ _ _ _ P.TIScalar) ->
J.String "SCALAR" J.String "SCALAR"
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIEnum _)) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIEnum _)) ->
J.String "ENUM" J.String "ENUM"
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIInputObject _)) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInputObject _)) ->
J.String "INPUT_OBJECT" J.String "INPUT_OBJECT"
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIObject _)) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIObject _)) ->
J.String "OBJECT" J.String "OBJECT"
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIInterface _)) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInterface _)) ->
J.String "INTERFACE" J.String "INTERFACE"
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIUnion _)) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIUnion _)) ->
J.String "UNION" J.String "UNION"
name :: FieldParser n (SomeType -> J.Value) name :: FieldParser n (SomeType -> J.Value)
name = name =
@ -352,14 +352,14 @@ typeField =
$> \case $> \case
SomeType tp -> SomeType tp ->
case tp of case tp of
P.TNamed P.Nullable (P.Definition name' _ _ _) -> P.TNamed P.Nullable (P.Definition name' _ _ _ _) ->
nameAsJSON name' nameAsJSON name'
_ -> J.Null _ -> J.Null
description :: FieldParser n (SomeType -> J.Value) description :: FieldParser n (SomeType -> J.Value)
description = description =
P.selection_ GName._description Nothing P.string P.selection_ GName._description Nothing P.string
$> \case $> \case
SomeType (P.TNamed _ (P.Definition _ (Just desc) _ _)) -> SomeType (P.TNamed _ (P.Definition _ (Just desc) _ _ _)) ->
J.String (G.unDescription desc) J.String (G.unDescription desc)
_ -> J.Null _ -> J.Null
fields :: FieldParser n (SomeType -> J.Value) fields :: FieldParser n (SomeType -> J.Value)
@ -370,9 +370,9 @@ typeField =
\case \case
SomeType tp -> SomeType tp ->
case tp of case tp of
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIObject (P.ObjectInfo fields' _interfaces'))) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIObject (P.ObjectInfo fields' _interfaces'))) ->
J.Array $ V.fromList $ printer <$> fields' J.Array $ V.fromList $ printer <$> fields'
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIInterface (P.InterfaceInfo fields' _objects'))) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInterface (P.InterfaceInfo fields' _objects'))) ->
J.Array $ V.fromList $ printer <$> fields' J.Array $ V.fromList $ printer <$> fields'
_ -> J.Null _ -> J.Null
interfaces :: FieldParser n (SomeType -> J.Value) interfaces :: FieldParser n (SomeType -> J.Value)
@ -382,7 +382,7 @@ typeField =
\case \case
SomeType tp -> SomeType tp ->
case tp of case tp of
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIObject (P.ObjectInfo _fields' interfaces'))) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIObject (P.ObjectInfo _fields' interfaces'))) ->
J.Array $ V.fromList $ printer . SomeType . P.TNamed P.Nullable . fmap P.TIInterface <$> interfaces' J.Array $ V.fromList $ printer . SomeType . P.TNamed P.Nullable . fmap P.TIInterface <$> interfaces'
_ -> J.Null _ -> J.Null
possibleTypes :: FieldParser n (SomeType -> J.Value) possibleTypes :: FieldParser n (SomeType -> J.Value)
@ -392,9 +392,9 @@ typeField =
\case \case
SomeType tp -> SomeType tp ->
case tp of case tp of
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIInterface (P.InterfaceInfo _fields' objects'))) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInterface (P.InterfaceInfo _fields' objects'))) ->
J.Array $ V.fromList $ printer . SomeType . P.TNamed P.Nullable . fmap P.TIObject <$> objects' J.Array $ V.fromList $ printer . SomeType . P.TNamed P.Nullable . fmap P.TIObject <$> objects'
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIUnion (P.UnionInfo objects'))) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIUnion (P.UnionInfo objects'))) ->
J.Array $ V.fromList $ printer . SomeType . P.TNamed P.Nullable . fmap P.TIObject <$> objects' J.Array $ V.fromList $ printer . SomeType . P.TNamed P.Nullable . fmap P.TIObject <$> objects'
_ -> J.Null _ -> J.Null
enumValues :: FieldParser n (SomeType -> J.Value) enumValues :: FieldParser n (SomeType -> J.Value)
@ -405,7 +405,7 @@ typeField =
\case \case
SomeType tp -> SomeType tp ->
case tp of case tp of
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIEnum vals)) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIEnum vals)) ->
J.Array $ V.fromList $ fmap printer $ toList vals J.Array $ V.fromList $ fmap printer $ toList vals
_ -> J.Null _ -> J.Null
inputFields :: FieldParser n (SomeType -> J.Value) inputFields :: FieldParser n (SomeType -> J.Value)
@ -415,7 +415,7 @@ typeField =
\case \case
SomeType tp -> SomeType tp ->
case tp of case tp of
P.TNamed P.Nullable (P.Definition _ _ _ (P.TIInputObject (P.InputObjectInfo fieldDefs))) -> P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInputObject (P.InputObjectInfo fieldDefs))) ->
J.Array $ V.fromList $ map printer fieldDefs J.Array $ V.fromList $ map printer fieldDefs
_ -> J.Null _ -> J.Null
-- ofType peels modalities off of types -- ofType peels modalities off of types
@ -562,7 +562,7 @@ typeKind =
] ]
) )
where where
mkDefinition name = (P.Definition name Nothing Nothing P.EnumValueInfo, ()) mkDefinition name = (P.Definition name Nothing Nothing [] P.EnumValueInfo, ())
{- {-
type __Field { type __Field {

View File

@ -376,7 +376,7 @@ remoteFieldScalarParser customizeTypename (G.ScalarTypeDefinition description na
} }
where where
customizedTypename = runMkTypename customizeTypename name customizedTypename = runMkTypename customizeTypename name
schemaType = TNamed NonNullable $ Definition customizedTypename description Nothing TIScalar schemaType = TNamed NonNullable $ Definition customizedTypename description Nothing [] TIScalar
gType = toGraphQLType schemaType gType = toGraphQLType schemaType
mkRemoteGType = \case mkRemoteGType = \case
@ -391,7 +391,7 @@ remoteFieldEnumParser ::
remoteFieldEnumParser customizeTypename (G.EnumTypeDefinition desc name _directives valueDefns) = remoteFieldEnumParser customizeTypename (G.EnumTypeDefinition desc name _directives valueDefns) =
let enumValDefns = let enumValDefns =
valueDefns <&> \(G.EnumValueDefinition enumDesc enumName _) -> valueDefns <&> \(G.EnumValueDefinition enumDesc enumName _) ->
( Definition (G.unEnumValue enumName) enumDesc Nothing P.EnumValueInfo, ( Definition (G.unEnumValue enumName) enumDesc Nothing [] P.EnumValueInfo,
G.VEnum enumName G.VEnum enumName
) )
in fmap (Altered False,) $ P.enum (runMkTypename customizeTypename name) desc $ NE.fromList enumValDefns in fmap (Altered False,) $ P.enum (runMkTypename customizeTypename name) desc $ NE.fromList enumValDefns
@ -839,12 +839,12 @@ remoteFieldFromDefinition schemaDoc parentTypeName remoteRelationships (G.FieldD
convertType gType convertType gType
where where
addNullableList :: FieldParser n a -> FieldParser n a addNullableList :: FieldParser n a -> FieldParser n a
addNullableList (P.FieldParser (Definition name' desc origin (FieldInfo args typ)) parser) = addNullableList (P.FieldParser (Definition name' desc origin dLst (FieldInfo args typ)) parser) =
P.FieldParser (Definition name' desc origin (FieldInfo args (TList Nullable typ))) parser P.FieldParser (Definition name' desc origin dLst (FieldInfo args (TList Nullable typ))) parser
addNonNullableList :: FieldParser n a -> FieldParser n a addNonNullableList :: FieldParser n a -> FieldParser n a
addNonNullableList (P.FieldParser (Definition name' desc origin (FieldInfo args typ)) parser) = addNonNullableList (P.FieldParser (Definition name' desc origin dLst (FieldInfo args typ)) parser) =
P.FieldParser (Definition name' desc origin (FieldInfo args (TList NonNullable typ))) parser P.FieldParser (Definition name' desc origin dLst (FieldInfo args (TList NonNullable typ))) parser
-- TODO add directives, deprecation -- TODO add directives, deprecation
convertType :: convertType ::

View File

@ -35,6 +35,7 @@ import Data.Has
import Data.HashMap.Strict.Extended qualified as Map import Data.HashMap.Strict.Extended qualified as Map
import Data.Int (Int64) import Data.Int (Int64)
import Data.List.NonEmpty qualified as NE import Data.List.NonEmpty qualified as NE
import Data.Text qualified as T
import Data.Text.Extended import Data.Text.Extended
import Hasura.Backends.Postgres.SQL.Types qualified as PG import Hasura.Backends.Postgres.SQL.Types qualified as PG
import Hasura.Base.Error import Hasura.Base.Error
@ -383,6 +384,19 @@ defaultTableSelectionSet sourceInfo tableInfo = runMaybeT do
let xRelay = relayExtension @b let xRelay = relayExtension @b
tableFields = Map.elems $ _tciFieldInfoMap tableCoreInfo tableFields = Map.elems $ _tciFieldInfoMap tableCoreInfo
tablePkeyColumns = _pkColumns <$> _tciPrimaryKey tableCoreInfo tablePkeyColumns = _pkColumns <$> _tciPrimaryKey tableCoreInfo
pkFields = concatMap toList tablePkeyColumns
pkFieldDirective = T.intercalate " " $ map (G.unName . ciName) pkFields
-- Adding `@key` directives to type for apollo federation. An example
-- of type with key directive:
-- type Product @key(fields: "upc sku"){
-- upc: UPC!
-- sku: SKU!
-- name: String
-- }
pkDirectives =
if isApolloFedV1enabled (_tciApolloFederationConfig tableCoreInfo) && (not . null) pkFields
then [(G.Directive Name._key . Map.singleton Name._fields . G.VString) pkFieldDirective]
else mempty
description = G.Description . PG.getPGDescription <$> _tciDescription tableCoreInfo description = G.Description . PG.getPGDescription <$> _tciDescription tableCoreInfo
fieldParsers <- fieldParsers <-
concat concat
@ -406,16 +420,19 @@ defaultTableSelectionSet sourceInfo tableInfo = runMaybeT do
allFieldParsers = fieldParsers <> [nodeIdFieldParser] allFieldParsers = fieldParsers <> [nodeIdFieldParser]
nodeInterface <- runNodeBuilder nodeBuilder nodeInterface <- runNodeBuilder nodeBuilder
pure $ pure $
P.selectionSetObject objectTypename description allFieldParsers [nodeInterface] selectionSetObjectWithDirective objectTypename description allFieldParsers [nodeInterface] pkDirectives
<&> parsedSelectionsToFields IR.AFExpression <&> parsedSelectionsToFields IR.AFExpression
_ -> _ ->
pure $ pure $
P.selectionSetObject objectTypename description fieldParsers [] selectionSetObjectWithDirective objectTypename description fieldParsers [] pkDirectives
<&> parsedSelectionsToFields IR.AFExpression <&> parsedSelectionsToFields IR.AFExpression
where where
sourceName = _siName sourceInfo sourceName = _siName sourceInfo
tableName = tableInfoName tableInfo tableName = tableInfoName tableInfo
tableCoreInfo = _tiCoreInfo tableInfo tableCoreInfo = _tiCoreInfo tableInfo
selectionSetObjectWithDirective name description parsers implementsInterfaces directives =
P.setParserDirectives directives $
P.selectionSetObject name description parsers implementsInterfaces
-- | List of table fields object. -- | List of table fields object.
-- Just a @'nonNullableObjectList' wrapper over @'tableSelectionSet'. -- Just a @'nonNullableObjectList' wrapper over @'tableSelectionSet'.

View File

@ -78,7 +78,7 @@ cursorOrderingArgParser = do
where where
define (name, val) = define (name, val) =
let orderingTypeDesc = bool "descending" "ascending" $ val == COAscending let orderingTypeDesc = bool "descending" "ascending" $ val == COAscending
in P.Definition name (Just $ G.Description $ orderingTypeDesc <> " ordering of the cursor") Nothing P.EnumValueInfo in P.Definition name (Just $ G.Description $ orderingTypeDesc <> " ordering of the cursor") Nothing [] P.EnumValueInfo
-- | Argument to specify the ordering of the cursor. -- | Argument to specify the ordering of the cursor.
-- > ordering: cursor_ordering -- > ordering: cursor_ordering

View File

@ -106,7 +106,7 @@ tableSelectColumnsEnum sourceInfo tableInfo = do
] ]
where where
define name = define name =
P.Definition name (Just $ G.Description "column name") Nothing P.EnumValueInfo P.Definition name (Just $ G.Description "column name") Nothing [] P.EnumValueInfo
-- | Table update columns enum -- | Table update columns enum
-- --
@ -129,7 +129,7 @@ tableUpdateColumnsEnum tableInfo = do
pure (define $ ciName column, ciColumn column) pure (define $ ciName column, ciColumn column)
pure $ P.enum enumName enumDesc <$> nonEmpty enumValues pure $ P.enum enumName enumDesc <$> nonEmpty enumValues
where where
define name = P.Definition name (Just $ G.Description "column name") Nothing P.EnumValueInfo define name = P.Definition name (Just $ G.Description "column name") Nothing [] P.EnumValueInfo
-- If there's no column for which the current user has "update" -- If there's no column for which the current user has "update"
-- permissions, this functions returns an enum that only contains a -- permissions, this functions returns an enum that only contains a
@ -148,7 +148,7 @@ updateColumnsPlaceholderParser tableInfo = do
pure $ pure $
P.enum enumName (Just $ G.Description $ "placeholder for update columns of table " <> tableInfoName tableInfo <<> " (current role has no relevant permissions)") $ P.enum enumName (Just $ G.Description $ "placeholder for update columns of table " <> tableInfoName tableInfo <<> " (current role has no relevant permissions)") $
pure pure
( P.Definition @_ @P.EnumValueInfo Name.__PLACEHOLDER (Just $ G.Description "placeholder (do not use)") Nothing P.EnumValueInfo, ( P.Definition @_ @P.EnumValueInfo Name.__PLACEHOLDER (Just $ G.Description "placeholder (do not use)") Nothing [] P.EnumValueInfo,
Nothing Nothing
) )

View File

@ -264,6 +264,7 @@ filterVariablesFromQuery = foldMap \case
RFRemote remote -> foldOf (traverse . _SessionPresetVariable . to match) remote RFRemote remote -> foldOf (traverse . _SessionPresetVariable . to match) remote
RFAction actionQ -> foldMap remoteFieldPred actionQ RFAction actionQ -> foldMap remoteFieldPred actionQ
RFRaw {} -> mempty RFRaw {} -> mempty
RFMulti {} -> mempty
where where
_SessionPresetVariable :: Traversal' RemoteSchemaVariable SessionVariable _SessionPresetVariable :: Traversal' RemoteSchemaVariable SessionVariable
_SessionPresetVariable f (SessionPresetVariable a b c) = _SessionPresetVariable f (SessionPresetVariable a b c) =
@ -484,6 +485,10 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
E.ExecStepRaw json -> do E.ExecStepRaw json -> do
logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindIntrospection logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindIntrospection
buildRaw json buildRaw json
-- For `ExecStepMulti`, execute all steps and then concat them in a list
E.ExecStepMulti lst -> do
_all <- traverse (executeQueryStep httpManager fieldName) lst
pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse _all)) []
executeMutationStep :: executeMutationStep ::
HTTP.Manager -> HTTP.Manager ->
@ -514,6 +519,10 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
E.ExecStepRaw json -> do E.ExecStepRaw json -> do
logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindIntrospection logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindIntrospection
buildRaw json buildRaw json
-- For `ExecStepMulti`, execute all steps and then concat them in a list
E.ExecStepMulti lst -> do
_all <- traverse (executeQueryStep httpManager fieldName) lst
pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse _all)) []
runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq remoteJoins = do runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq remoteJoins = do
(telemTimeIO_DT, remoteResponseHeaders, resp) <- (telemTimeIO_DT, remoteResponseHeaders, resp) <-

View File

@ -498,38 +498,43 @@ onStart env enabledLogTypes serverEnv wsConn (StartMsg opId q) onMessageActions
Nothing -> do Nothing -> do
conclusion <- runExceptT $ conclusion <- runExceptT $
runLimits $ runLimits $
forWithKey queryPlan $ \fieldName -> \case forWithKey queryPlan $ \fieldName ->
E.ExecStepDB _headers exists remoteJoins -> doQErr $ do let getResponse = \case
(telemTimeIO_DT, resp) <- E.ExecStepDB _headers exists remoteJoins -> doQErr $ do
AB.dispatchAnyBackend @BackendTransport (telemTimeIO_DT, resp) <-
exists AB.dispatchAnyBackend @BackendTransport
\(EB.DBStepInfo _ sourceConfig genSql tx :: EB.DBStepInfo b) -> exists
runDBQuery @b \(EB.DBStepInfo _ sourceConfig genSql tx :: EB.DBStepInfo b) ->
requestId runDBQuery @b
q requestId
fieldName q
userInfo fieldName
logger userInfo
sourceConfig logger
tx sourceConfig
genSql tx
finalResponse <- genSql
RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q finalResponse <-
pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q
E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse []
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do
runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema
E.ExecStepAction actionExecPlan _ remoteJoins -> do runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction E.ExecStepAction actionExecPlan _ remoteJoins -> do
(time, (resp, _)) <- doQErr $ do logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction
(time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan (time, (resp, _)) <- doQErr $ do
finalResponse <- (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan
RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q finalResponse <-
pure (time, (finalResponse, hdrs)) RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q
pure $ AnnotatedResponsePart time Telem.Empty resp [] pure (time, (finalResponse, hdrs))
E.ExecStepRaw json -> do pure $ AnnotatedResponsePart time Telem.Empty resp []
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection E.ExecStepRaw json -> do
buildRaw json logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection
buildRaw json
E.ExecStepMulti lst -> do
allResponses <- traverse getResponse lst
pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) []
in getResponse
sendResultFromFragments Telem.Query timerTot requestId conclusion opName parameterizedQueryHash gqlOpType sendResultFromFragments Telem.Query timerTot requestId conclusion opName parameterizedQueryHash gqlOpType
case conclusion of case conclusion of
Left _ -> pure () Left _ -> pure ()
@ -568,39 +573,44 @@ onStart env enabledLogTypes serverEnv wsConn (StartMsg opId q) onMessageActions
Nothing -> do Nothing -> do
conclusion <- runExceptT $ conclusion <- runExceptT $
runLimits $ runLimits $
forWithKey mutationPlan $ \fieldName -> \case forWithKey mutationPlan $ \fieldName ->
-- Ignoring response headers since we can't send them over WebSocket let getResponse = \case
E.ExecStepDB _responseHeaders exists remoteJoins -> doQErr $ do -- Ignoring response headers since we can't send them over WebSocket
(telemTimeIO_DT, resp) <- E.ExecStepDB _responseHeaders exists remoteJoins -> doQErr $ do
AB.dispatchAnyBackend @BackendTransport (telemTimeIO_DT, resp) <-
exists AB.dispatchAnyBackend @BackendTransport
\(EB.DBStepInfo _ sourceConfig genSql tx :: EB.DBStepInfo b) -> exists
runDBMutation @b \(EB.DBStepInfo _ sourceConfig genSql tx :: EB.DBStepInfo b) ->
requestId runDBMutation @b
q requestId
fieldName q
userInfo fieldName
logger userInfo
sourceConfig logger
tx sourceConfig
genSql tx
finalResponse <- genSql
RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q finalResponse <-
pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q
E.ExecStepAction actionExecPlan _ remoteJoins -> do pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse []
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction E.ExecStepAction actionExecPlan _ remoteJoins -> do
(time, (resp, hdrs)) <- doQErr $ do logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction
(time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan (time, (resp, hdrs)) <- doQErr $ do
finalResponse <- (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan
RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q finalResponse <-
pure (time, (finalResponse, hdrs)) RJ.processRemoteJoins requestId logger env httpMgr reqHdrs userInfo resp remoteJoins q
pure $ AnnotatedResponsePart time Telem.Empty resp $ fromMaybe [] hdrs pure (time, (finalResponse, hdrs))
E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do pure $ AnnotatedResponsePart time Telem.Empty resp $ fromMaybe [] hdrs
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do
runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema
E.ExecStepRaw json -> do runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection E.ExecStepRaw json -> do
buildRaw json logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection
buildRaw json
E.ExecStepMulti lst -> do
allResponses <- traverse getResponse lst
pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) []
in getResponse
sendResultFromFragments Telem.Query timerTot requestId conclusion opName parameterizedQueryHash gqlOpType sendResultFromFragments Telem.Query timerTot requestId conclusion opName parameterizedQueryHash gqlOpType
liftIO $ sendCompleted (Just requestId) (Just parameterizedQueryHash) liftIO $ sendCompleted (Just requestId) (Just parameterizedQueryHash)
E.SubscriptionExecutionPlan subExec -> do E.SubscriptionExecutionPlan subExec -> do

View File

@ -577,3 +577,32 @@ _Node = [G.name|Node|]
___hasura_internal_typename :: G.Name ___hasura_internal_typename :: G.Name
___hasura_internal_typename = [G.name|__hasura_internal_typename|] ___hasura_internal_typename = [G.name|__hasura_internal_typename|]
-- * Apollo Federation
__service :: G.Name
__service = [G.name|_service|]
_key :: G.Name
_key = [G.name|key|]
_fields :: G.Name
_fields = [G.name|fields|]
_representations :: G.Name
_representations = [G.name|representations|]
__Any :: G.Name
__Any = [G.name|_Any|]
_sdl :: G.Name
_sdl = [G.name|sdl|]
__Service :: G.Name
__Service = [G.name|_Service|]
__Entity :: G.Name
__Entity = [G.name|_Entity|]
__entities :: G.Name
__entities = [G.name|_entities|]

View File

@ -5,7 +5,9 @@
-- | Types/functions shared between modules that implement "Hasura.RQL.DDL.Schema.Cache". Other -- | Types/functions shared between modules that implement "Hasura.RQL.DDL.Schema.Cache". Other
-- modules should not import this module directly. -- modules should not import this module directly.
module Hasura.RQL.DDL.Schema.Cache.Common module Hasura.RQL.DDL.Schema.Cache.Common
( BuildOutputs (..), ( ApolloFederationConfig (..),
ApolloFederationVersion (..),
BuildOutputs (..),
CacheBuild, CacheBuild,
CacheBuildParams (CacheBuildParams), CacheBuildParams (CacheBuildParams),
InvalidationKeys (..), InvalidationKeys (..),
@ -112,9 +114,10 @@ invalidateKeys CacheInvalidations {..} InvalidationKeys {..} =
invalidate = M.alter $ Just . maybe Inc.initialInvalidationKey Inc.invalidate invalidate = M.alter $ Just . maybe Inc.initialInvalidationKey Inc.invalidate
data TableBuildInput b = TableBuildInput data TableBuildInput b = TableBuildInput
{ _tbiName :: !(TableName b), { _tbiName :: TableName b,
_tbiIsEnum :: !Bool, _tbiIsEnum :: Bool,
_tbiConfiguration :: !(TableConfig b) _tbiConfiguration :: TableConfig b,
_tbiApolloFederationConfig :: Maybe ApolloFederationConfig
} }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
@ -151,7 +154,7 @@ mkTableInputs ::
mkTableInputs TableMetadata {..} = mkTableInputs TableMetadata {..} =
(buildInput, nonColumns, permissions) (buildInput, nonColumns, permissions)
where where
buildInput = TableBuildInput _tmTable _tmIsEnum _tmConfiguration buildInput = TableBuildInput _tmTable _tmIsEnum _tmConfiguration _tmApolloFederationConfig
nonColumns = nonColumns =
NonColumnTableInputs NonColumnTableInputs
_tmTable _tmTable

View File

@ -64,7 +64,8 @@ import Language.GraphQL.Draft.Syntax qualified as G
data TrackTable b = TrackTable data TrackTable b = TrackTable
{ tSource :: !SourceName, { tSource :: !SourceName,
tName :: !(TableName b), tName :: !(TableName b),
tIsEnum :: !Bool tIsEnum :: !Bool,
tApolloFedConfig :: !(Maybe ApolloFederationConfig)
} }
deriving instance (Backend b) => Show (TrackTable b) deriving instance (Backend b) => Show (TrackTable b)
@ -79,7 +80,8 @@ instance (Backend b) => FromJSON (TrackTable b) where
<$> o .:? "source" .!= defaultSource <$> o .:? "source" .!= defaultSource
<*> o .: "table" <*> o .: "table"
<*> o .:? "is_enum" .!= False <*> o .:? "is_enum" .!= False
withoutOptions = TrackTable defaultSource <$> parseJSON v <*> pure False <*> o .:? "apollo_federation_config"
withoutOptions = TrackTable defaultSource <$> parseJSON v <*> pure False <*> pure Nothing
data SetTableIsEnum = SetTableIsEnum data SetTableIsEnum = SetTableIsEnum
{ stieSource :: !SourceName, { stieSource :: !SourceName,
@ -211,8 +213,9 @@ trackExistingTableOrViewP2 ::
TableName b -> TableName b ->
Bool -> Bool ->
TableConfig b -> TableConfig b ->
Maybe ApolloFederationConfig ->
m EncJSON m EncJSON
trackExistingTableOrViewP2 source tableName isEnum config = do trackExistingTableOrViewP2 source tableName isEnum config apolloFedConfig = do
sc <- askSchemaCache sc <- askSchemaCache
{- {-
The next line does more than what it says on the tin. Removing the following The next line does more than what it says on the tin. Removing the following
@ -223,7 +226,7 @@ trackExistingTableOrViewP2 source tableName isEnum config = do
memory usage happens even when no substantial GraphQL schema is generated. memory usage happens even when no substantial GraphQL schema is generated.
-} -}
checkConflictingNode sc $ snakeCaseTableName @b tableName checkConflictingNode sc $ snakeCaseTableName @b tableName
let metadata = mkTableMeta tableName isEnum config let metadata = tmApolloFederationConfig .~ apolloFedConfig $ mkTableMeta tableName isEnum config
buildSchemaCacheFor buildSchemaCacheFor
( MOSourceObjId source $ ( MOSourceObjId source $
AB.mkAnyBackend $ AB.mkAnyBackend $
@ -238,9 +241,9 @@ runTrackTableQ ::
(MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) =>
TrackTable b -> TrackTable b ->
m EncJSON m EncJSON
runTrackTableQ (TrackTable source qt isEnum) = do runTrackTableQ (TrackTable source qt isEnum apolloFedConfig) = do
trackExistingTableOrViewP1 @b source qt trackExistingTableOrViewP1 @b source qt
trackExistingTableOrViewP2 @b source qt isEnum emptyTableConfig trackExistingTableOrViewP2 @b source qt isEnum emptyTableConfig apolloFedConfig
data TrackTableV2 b = TrackTableV2 data TrackTableV2 b = TrackTableV2
{ ttv2Table :: !(TrackTable b), { ttv2Table :: !(TrackTable b),
@ -259,9 +262,9 @@ runTrackTableV2Q ::
(MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) =>
TrackTableV2 b -> TrackTableV2 b ->
m EncJSON m EncJSON
runTrackTableV2Q (TrackTableV2 (TrackTable source qt isEnum) config) = do runTrackTableV2Q (TrackTableV2 (TrackTable source qt isEnum apolloFedConfig) config) = do
trackExistingTableOrViewP1 @b source qt trackExistingTableOrViewP1 @b source qt
trackExistingTableOrViewP2 @b source qt isEnum config trackExistingTableOrViewP2 @b source qt isEnum config apolloFedConfig
runSetExistingTableIsEnumQ :: (MonadError QErr m, CacheRWM m, MetadataM m) => SetTableIsEnum -> m EncJSON runSetExistingTableIsEnumQ :: (MonadError QErr m, CacheRWM m, MetadataM m) => SetTableIsEnum -> m EncJSON
runSetExistingTableIsEnumQ (SetTableIsEnum source tableName isEnum) = do runSetExistingTableIsEnumQ (SetTableIsEnum source tableName isEnum) = do
@ -468,7 +471,7 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
) )
(TableCoreInfoG b (RawColumnInfo b) (Column b)) (TableCoreInfoG b (RawColumnInfo b) (Column b))
buildRawTableInfo = Inc.cache proc (tableBuildInput, maybeInfo, sourceConfig, reloadMetadataInvalidationKey) -> do buildRawTableInfo = Inc.cache proc (tableBuildInput, maybeInfo, sourceConfig, reloadMetadataInvalidationKey) -> do
let TableBuildInput name isEnum config = tableBuildInput let TableBuildInput name isEnum config apolloFedConfig = tableBuildInput
metadataTable <- metadataTable <-
(| (|
onNothingA onNothingA
@ -505,7 +508,8 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
_tciEnumValues = enumValues, _tciEnumValues = enumValues,
_tciCustomConfig = config, _tciCustomConfig = config,
_tciDescription = description, _tciDescription = description,
_tciExtraTableMetadata = _ptmiExtraTableMetadata metadataTable _tciExtraTableMetadata = _ptmiExtraTableMetadata metadataTable,
_tciApolloFederationConfig = apolloFedConfig
} }
-- Step 2: Process the raw table cache to replace Postgres column types with logical column -- Step 2: Process the raw table cache to replace Postgres column types with logical column

View File

@ -42,6 +42,7 @@ data RootField (db :: BackendType -> Type) remote action raw where
RFRemote :: remote -> RootField db remote action raw RFRemote :: remote -> RootField db remote action raw
RFAction :: action -> RootField db remote action raw RFAction :: action -> RootField db remote action raw
RFRaw :: raw -> RootField db remote action raw RFRaw :: raw -> RootField db remote action raw
RFMulti :: [RootField db remote action raw] -> RootField db remote action raw
data MutationDB (b :: BackendType) (r :: Type) v data MutationDB (b :: BackendType) (r :: Type) v
= MDBInsert (AnnotatedInsert b r v) = MDBInsert (AnnotatedInsert b r v)

View File

@ -45,10 +45,14 @@ module Hasura.RQL.Types.Common
commentToMaybeText, commentToMaybeText,
commentFromMaybeText, commentFromMaybeText,
EnvRecord (..), EnvRecord (..),
ApolloFederationConfig (..),
ApolloFederationVersion (..),
isApolloFedV1enabled,
) )
where where
import Data.Aeson import Data.Aeson
import Data.Aeson qualified as J
import Data.Aeson.Casing import Data.Aeson.Casing
import Data.Aeson.TH import Data.Aeson.TH
import Data.Aeson.Types (prependFailure, typeMismatch) import Data.Aeson.Types (prependFailure, typeMismatch)
@ -544,3 +548,34 @@ instance (ToJSON a) => ToJSON (EnvRecord a) where
toJSON (EnvRecord envVar _envValue) = object ["env_var" .= envVar] toJSON (EnvRecord envVar _envValue) = object ["env_var" .= envVar]
instance (FromJSON a) => FromJSON (EnvRecord a) instance (FromJSON a) => FromJSON (EnvRecord a)
data ApolloFederationVersion = V1 deriving (Show, Eq, Generic)
instance Cacheable ApolloFederationVersion
instance ToJSON ApolloFederationVersion where
toJSON V1 = J.String "v1"
instance FromJSON ApolloFederationVersion where
parseJSON = withText "ApolloFederationVersion" $
\case
"v1" -> pure V1
_ -> fail "enable takes the version of apollo federation. Supported value is v1 only."
instance NFData ApolloFederationVersion
data ApolloFederationConfig = ApolloFederationConfig
{ enable :: !ApolloFederationVersion
}
deriving (Show, Eq, Generic)
instance Cacheable ApolloFederationConfig
instance ToJSON ApolloFederationConfig
instance FromJSON ApolloFederationConfig
instance NFData ApolloFederationConfig
isApolloFedV1enabled :: Maybe ApolloFederationConfig -> Bool
isApolloFedV1enabled = isJust

View File

@ -91,6 +91,7 @@ module Hasura.RQL.Types.Metadata
tmComputedFields, tmComputedFields,
tmConfiguration, tmConfiguration,
tmDeletePermissions, tmDeletePermissions,
tmApolloFederationConfig,
tmEventTriggers, tmEventTriggers,
tmInsertPermissions, tmInsertPermissions,
tmIsEnum, tmIsEnum,
@ -323,7 +324,8 @@ data TableMetadata b = TableMetadata
_tmSelectPermissions :: !(Permissions (SelPermDef b)), _tmSelectPermissions :: !(Permissions (SelPermDef b)),
_tmUpdatePermissions :: !(Permissions (UpdPermDef b)), _tmUpdatePermissions :: !(Permissions (UpdPermDef b)),
_tmDeletePermissions :: !(Permissions (DelPermDef b)), _tmDeletePermissions :: !(Permissions (DelPermDef b)),
_tmEventTriggers :: !(EventTriggers b) _tmEventTriggers :: !(EventTriggers b),
_tmApolloFederationConfig :: !(Maybe ApolloFederationConfig)
} }
deriving (Generic) deriving (Generic)
@ -353,6 +355,7 @@ mkTableMeta qt isEnum config =
mempty mempty
mempty mempty
mempty mempty
Nothing
instance (Backend b) => FromJSON (TableMetadata b) where instance (Backend b) => FromJSON (TableMetadata b) where
parseJSON = withObject "Object" $ \o -> do parseJSON = withObject "Object" $ \o -> do
@ -375,6 +378,7 @@ instance (Backend b) => FromJSON (TableMetadata b) where
<*> parseListAsMap "update permissions" _pdRole (o .:? upKey .!= []) <*> parseListAsMap "update permissions" _pdRole (o .:? upKey .!= [])
<*> parseListAsMap "delete permissions" _pdRole (o .:? dpKey .!= []) <*> parseListAsMap "delete permissions" _pdRole (o .:? dpKey .!= [])
<*> parseListAsMap "event triggers" etcName (o .:? etKey .!= []) <*> parseListAsMap "event triggers" etcName (o .:? etKey .!= [])
<*> o .:? enableAFKey
where where
tableKey = "table" tableKey = "table"
isEnumKey = "is_enum" isEnumKey = "is_enum"
@ -388,6 +392,7 @@ instance (Backend b) => FromJSON (TableMetadata b) where
etKey = "event_triggers" etKey = "event_triggers"
cfKey = "computed_fields" cfKey = "computed_fields"
rrKey = "remote_relationships" rrKey = "remote_relationships"
enableAFKey = "apollo_federation_config"
getUnexpectedKeys o = getUnexpectedKeys o =
HS.fromList (KM.keys o) `HS.difference` expectedKeySet HS.fromList (KM.keys o) `HS.difference` expectedKeySet
@ -405,7 +410,8 @@ instance (Backend b) => FromJSON (TableMetadata b) where
dpKey, dpKey,
etKey, etKey,
cfKey, cfKey,
rrKey rrKey,
enableAFKey
] ]
data FunctionMetadata b = FunctionMetadata data FunctionMetadata b = FunctionMetadata
@ -932,6 +938,7 @@ metadataToOrdJSON
updatePermissions updatePermissions
deletePermissions deletePermissions
eventTriggers eventTriggers
enableApolloFed
) = ) =
AO.object $ AO.object $
[("table", AO.toOrdered table)] [("table", AO.toOrdered table)]
@ -946,10 +953,12 @@ metadataToOrdJSON
selectPermissionsPair, selectPermissionsPair,
updatePermissionsPair, updatePermissionsPair,
deletePermissionsPair, deletePermissionsPair,
eventTriggersPair eventTriggersPair,
apolloFedConfigPair
] ]
where where
isEnumPair = if isEnum then Just ("is_enum", AO.toOrdered isEnum) else Nothing isEnumPair = if isEnum then Just ("is_enum", AO.toOrdered isEnum) else Nothing
apolloFedConfigPair = fmap (\afConfig -> ("apollo_federation_config", AO.toOrdered afConfig)) enableApolloFed
configPair = configPair =
if config == emptyTableConfig if config == emptyTableConfig
then Nothing then Nothing

View File

@ -66,6 +66,7 @@ module Hasura.RQL.Types.Table
tcColumnConfig, tcColumnConfig,
tciCustomConfig, tciCustomConfig,
tciDescription, tciDescription,
tciApolloFederationConfig,
tciEnumValues, tciEnumValues,
tciExtraTableMetadata, tciExtraTableMetadata,
tciFieldInfoMap, tciFieldInfoMap,
@ -867,7 +868,8 @@ data TableCoreInfoG (b :: BackendType) field primaryKeyColumn = TableCoreInfo
_tciViewInfo :: Maybe ViewInfo, _tciViewInfo :: Maybe ViewInfo,
_tciEnumValues :: Maybe EnumValues, _tciEnumValues :: Maybe EnumValues,
_tciCustomConfig :: TableConfig b, _tciCustomConfig :: TableConfig b,
_tciExtraTableMetadata :: ExtraTableMetadata b _tciExtraTableMetadata :: ExtraTableMetadata b,
_tciApolloFederationConfig :: Maybe ApolloFederationConfig
} }
deriving (Generic) deriving (Generic)

View File

@ -798,11 +798,12 @@ parseExperimentalFeatures =
experimentalFeaturesEnv :: (String, String) experimentalFeaturesEnv :: (String, String)
experimentalFeaturesEnv = experimentalFeaturesEnv =
( "HASURA_GRAPHQL_EXPERIMENTAL_FEATURES", ( "HASURA_GRAPHQL_EXPERIMENTAL_FEATURES",
"Comma separated list of experimental features. (all: inherited_roles,optimize_permission_filters and naming_convention, streaming_subscriptions). " "Comma separated list of experimental features. (all: inherited_roles,optimize_permission_filters and naming_convention, streaming_subscriptions, apollo_federation). "
<> "optimize_permission_filters: Use experimental SQL optimization" <> "optimize_permission_filters: Use experimental SQL optimization"
<> "transformations for permission filters. " <> "transformations for permission filters. "
<> "inherited_roles: ignored; inherited roles cannot be switched off" <> "inherited_roles: ignored; inherited roles cannot be switched off"
<> "naming_convention: apply naming convention (graphql-default/hasura-default) based on source customization" <> "naming_convention: apply naming convention (graphql-default/hasura-default) based on source customization"
<> "apollo_federation: use hasura as a subgraph in an Apollo gateway"
-- TODO(SOLOMON): Write a description of this experimental feature: -- TODO(SOLOMON): Write a description of this experimental feature:
-- <> "streaming_subscriptions: ..." -- <> "streaming_subscriptions: ..."
) )

View File

@ -170,10 +170,11 @@ instance FromEnv [ExperimentalFeature] where
"streaming_subscriptions" -> Right EFStreamingSubscriptions "streaming_subscriptions" -> Right EFStreamingSubscriptions
"optimize_permission_filters" -> Right EFOptimizePermissionFilters "optimize_permission_filters" -> Right EFOptimizePermissionFilters
"naming_convention" -> Right EFNamingConventions "naming_convention" -> Right EFNamingConventions
"apollo_federation" -> Right EFApolloFederation
_ -> _ ->
Left $ Left $
"Only expecting list of comma separated experimental features, options are:" "Only expecting list of comma separated experimental features, options are:"
++ "inherited_roles, streaming_subscriptions, optimize_permission_filters, naming_convention" ++ "inherited_roles, streaming_subscriptions, optimize_permission_filters, naming_convention, apollo_federation"
instance FromEnv ES.BatchSize where instance FromEnv ES.BatchSize where
fromEnv s = do fromEnv s = do

View File

@ -70,6 +70,7 @@ data ExperimentalFeature
| EFOptimizePermissionFilters | EFOptimizePermissionFilters
| EFNamingConventions | EFNamingConventions
| EFStreamingSubscriptions | EFStreamingSubscriptions
| EFApolloFederation
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
instance Hashable ExperimentalFeature instance Hashable ExperimentalFeature
@ -80,7 +81,8 @@ instance FromJSON ExperimentalFeature where
"optimize_permission_filters" -> pure EFOptimizePermissionFilters "optimize_permission_filters" -> pure EFOptimizePermissionFilters
"naming_convention" -> pure EFNamingConventions "naming_convention" -> pure EFNamingConventions
"streaming_subscriptions" -> pure EFStreamingSubscriptions "streaming_subscriptions" -> pure EFStreamingSubscriptions
_ -> fail "ExperimentalFeature can only be one of these value: inherited_roles, optimize_permission_filters, naming_convention or streaming_subscriptions" "apollo_federation" -> pure EFApolloFederation
_ -> fail "ExperimentalFeature can only be one of these value: inherited_roles, optimize_permission_filters, naming_convention, streaming_subscriptions or apollo_federation"
instance ToJSON ExperimentalFeature where instance ToJSON ExperimentalFeature where
toJSON = \case toJSON = \case
@ -88,6 +90,7 @@ instance ToJSON ExperimentalFeature where
EFOptimizePermissionFilters -> "optimize_permission_filters" EFOptimizePermissionFilters -> "optimize_permission_filters"
EFNamingConventions -> "naming_convention" EFNamingConventions -> "naming_convention"
EFStreamingSubscriptions -> "streaming_subscriptions" EFStreamingSubscriptions -> "streaming_subscriptions"
EFApolloFederation -> "apollo_federation"
data MaintenanceMode a = MaintenanceModeEnabled a | MaintenanceModeDisabled data MaintenanceMode a = MaintenanceModeEnabled a | MaintenanceModeDisabled
deriving (Show, Eq) deriving (Show, Eq)

View File

@ -38,12 +38,12 @@ fakeInputFieldValue (InputFieldInfo t _) = go t
go :: forall k. ('Input <: k) => Type k -> G.Value Variable go :: forall k. ('Input <: k) => Type k -> G.Value Variable
go = \case go = \case
TList _ t' -> G.VList [go t', go t'] TList _ t' -> G.VList [go t', go t']
TNamed _ (Definition name _ _ info) -> case (info, subKind @'Input @k) of TNamed _ (Definition name _ _ _ info) -> case (info, subKind @'Input @k) of
(TIScalar, _) -> fakeScalar name (TIScalar, _) -> fakeScalar name
(TIEnum ei, _) -> G.VEnum $ G.EnumValue $ dName $ NE.head ei (TIEnum ei, _) -> G.VEnum $ G.EnumValue $ dName $ NE.head ei
(TIInputObject (InputObjectInfo oi), _) -> G.VObject $ (TIInputObject (InputObjectInfo oi), _) -> G.VObject $
M.fromList $ do M.fromList $ do
Definition fieldName _ _ fieldInfo <- oi Definition fieldName _ _ _ fieldInfo <- oi
pure (fieldName, fakeInputFieldValue fieldInfo) pure (fieldName, fakeInputFieldValue fieldInfo)
_ -> error "fakeInputFieldValue: non-exhaustive. FIXME" _ -> error "fakeInputFieldValue: non-exhaustive. FIXME"
@ -51,5 +51,5 @@ fakeDirective :: DirectiveInfo -> G.Directive Variable
fakeDirective DirectiveInfo {..} = fakeDirective DirectiveInfo {..} =
G.Directive diName $ G.Directive diName $
M.fromList $ M.fromList $
diArguments <&> \(Definition argName _ _ argInfo) -> diArguments <&> \(Definition argName _ _ _ argInfo) ->
(argName, fakeInputFieldValue argInfo) (argName, fakeInputFieldValue argInfo)

View File

@ -146,7 +146,8 @@ mkParser table cib =
_tciViewInfo = Nothing, _tciViewInfo = Nothing,
_tciEnumValues = Nothing, _tciEnumValues = Nothing,
_tciCustomConfig = tableConfig, _tciCustomConfig = tableConfig,
_tciExtraTableMetadata = () _tciExtraTableMetadata = (),
_tciApolloFederationConfig = Nothing
} }
pk :: Maybe (PrimaryKey PG (ColumnInfo PG)) pk :: Maybe (PrimaryKey PG (ColumnInfo PG))

View File

@ -0,0 +1,28 @@
description: Introspection to check query fields and their types
url: /v1/graphql
status: 200
query:
query: |
query EntitiesTest($representations: [_Any!]!) {
_entities(representations: $representations) {
... on user {
id
email
name
is_admin
}
}
}
variables:
representations:
- __typename: user
id: 1
response:
data:
_entities:
- id: 1
email: foo@email.com
name: foo
is_admin: false

View File

@ -0,0 +1,68 @@
description: Introspection to check query fields and their types
url: /v1/graphql
status: 200
query:
query: |
query {
__schema {
queryType {
fields {
name
type {
name
kind
ofType {
name
kind
ofType {
name
kind
}
}
}
}
}
}
}
response:
data:
__schema:
queryType:
fields:
- name: _entities
type:
name: _Entity
kind: UNION
ofType:
- name: _service
type:
name:
kind: NON_NULL
ofType:
name: _Service
kind: OBJECT
ofType:
- name: user
type:
name:
kind: NON_NULL
ofType:
name:
kind: LIST
ofType:
name:
kind: NON_NULL
- name: user_aggregate
type:
name:
kind: NON_NULL
ofType:
name: user_aggregate
kind: OBJECT
ofType:
- name: user_by_pk
type:
name: user
kind: OBJECT
ofType:

View File

@ -0,0 +1,24 @@
type: bulk
args:
- type: run_sql
args:
sql: |
CREATE TABLE "user"(
id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
email TEXT NOT NULL,
is_admin BOOLEAN NOT NULL DEFAULT false
);
INSERT INTO "user" (id, name, email) VALUES
(1, 'foo', 'foo@email.com'),
(2, 'bar', 'bar@email.com'),
(3, 'bar', 'bar@email.com'),
(4, 'baz', 'baz@email.com');
- type: track_table
args:
table: user
schema: public
apollo_federation_config:
enable: v1

View File

@ -0,0 +1,7 @@
type: bulk
args:
- type: run_sql
args:
cascade: true
sql: |
DROP TABLE "user";

View File

@ -0,0 +1,21 @@
const { ApolloServer } = require('apollo-server');
const { ApolloGateway } = require("@apollo/gateway");
const gateway = new ApolloGateway({
serviceList: [
{ name: 'hge', url: process.env.HGE_URL + "/v1/graphql" },
{ name: 'other', url: 'http://localhost:4003/' }
],
introspectionHeaders: {
'x-hasura-admin-secret': process.env.HASURA_GRAPHQL_ADMIN_SECRET
}
});
const server = new ApolloServer({
gateway,
subscriptions: false
});
server.listen(4004).then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});

View File

@ -0,0 +1,20 @@
const { ApolloServer } = require('apollo-server');
const { ApolloGateway } = require("@apollo/gateway");
const gateway = new ApolloGateway({
serviceList: [
{ name: 'hge', url: process.env.HGE_URL + "/v1/graphql" }
],
introspectionHeaders: {
'x-hasura-admin-secret': process.env.HASURA_GRAPHQL_ADMIN_SECRET
}
});
const server = new ApolloServer({
gateway,
subscriptions: false
});
server.listen({ port: process.env.PORT || 4002 }).then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});

View File

@ -0,0 +1,54 @@
const { ApolloServer, gql } = require('apollo-server');
const { buildSubgraphSchema } = require('@apollo/subgraph');
const user = [
{
id: 1,
city: 'New York'
},
{
id: 2,
city: 'Bangalore'
},
{
id: 3,
city: 'Melbourne'
},
{
id: 4,
city: 'New Delhi'
}
];
const typeDefs = gql`
extend schema
@link(url: "https://specs.apollo.dev/federation/v2.0",
import: ["@key", "@extends", "@external", "@shareable"])
type Query {
getUserData(id: Int!): user
}
type user @key(fields: "id") @extends {
id: Int! @external
city: String
}
`;
const resolvers = {
Query: {
getUserData(parent, args, context, info) {
return user.find(user => user.id === args.id);
}
}
}
const server = new ApolloServer({
schema: buildSubgraphSchema({ typeDefs, resolvers })
});
server.listen(4003).then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,10 @@
"author": "", "author": "",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"apollo-server": "2.1.0", "apollo-server": "3.8.1",
"graphql": "14.2.1", "@apollo/gateway": "2.0.3",
"graphql-tag": "2.10.1" "graphql": "16.5.0",
"graphql-tag": "2.10.1",
"@apollo/subgraph": "2.0.4"
} }
} }

View File

@ -0,0 +1,87 @@
#!/usr/bin/env python3
import os
import pytest
import requests
from remote_server import NodeGraphQL
from validate import check_query_f
def make_request(url, query):
print('Sending request to the local federated server')
payload = {'query': query}
resp = requests.post(url, json=payload)
return resp
@pytest.mark.skipif(
os.getenv('HASURA_GRAPHQL_EXPERIMENTAL_FEATURES') is None or
not 'apollo_federation' in os.getenv('HASURA_GRAPHQL_EXPERIMENTAL_FEATURES'),
reason="This test expects the (apollo_federation) experimental feature turned on")
@pytest.mark.usefixtures('per_class_tests_db_state')
class TestApolloFederation:
@classmethod
def dir(cls):
return 'queries/apollo_federation'
def test_apollo_federated_server_with_hge_only(self,hge_ctx):
# start the node server
fed_server = NodeGraphQL(["node", "remote_schemas/nodejs/apollo_federated_server_with_hge_only.js"])
fed_server.start()
url = 'http://localhost:4002'
# run a GQL query
gql_query = """
query {
user_by_pk(id: 1) {
id
name
}
}
"""
resp = make_request(url, gql_query)
# stop the node server
fed_server.stop()
# check if everything was okay
assert resp.status_code == 200, resp.text
assert 'data' in resp.text
def test_apollo_federated_server_with_hge_and_apollo_graphql_server(self,hge_ctx):
# start the node servers
server_1 = NodeGraphQL(["node", "remote_schemas/nodejs/apollo_server_1.js"])
fed_server = NodeGraphQL(["node", "remote_schemas/nodejs/apollo_federated_server_with_hge_and_server1.js"])
server_1.start()
fed_server.start()
url = 'http://localhost:4004'
# run a GQL query
gql_query = """
query {
getUserData(id: 1) {
id
name
city
email
}
}
"""
resp = make_request(url, gql_query)
# stop the node servers
fed_server.stop()
server_1.stop()
# check if everything was okay
assert resp.status_code == 200, resp.text
assert 'data' in resp.text
def test_apollo_federation_fields(self,hge_ctx):
check_query_f(hge_ctx, self.dir() + '/root_fields.yaml')
def test_apollo_federation_entities(self,hge_ctx):
check_query_f(hge_ctx, self.dir() + '/entities.yaml')