Remote Schema Customization take 2 using parser tranformations

https://github.com/hasura/graphql-engine-mono/pull/1740

GitOrigin-RevId: e807952058243a97f67cd9969fa434933a08652f
This commit is contained in:
David Overton 2021-07-30 21:33:06 +10:00 committed by hasura-bot
parent 61663ec901
commit 1abb1dee69
75 changed files with 6720 additions and 324 deletions

View File

@ -83,6 +83,7 @@ NOTE: This only includes the diff between v2.0.0 and v2.0.0-beta.2
faster query responses.
- server: BigQuery: various bug fixes related to aggregations
- server: fix add source API wiping out source's metadata when replace_configuration is true
- server: add support for customization of field names and type names when adding a remote schema
- console: add foreign key CRUD functionality to ms sql server tables
- console: allow tracking of custom SQL functions having composite type (rowtype) input arguments
- console: allow input object presets in remote schema permissions

View File

@ -45,7 +45,25 @@ An example request as follows:
"url": "https://remote-server.com/graphql",
"headers": [{"name": "X-Server-Request-From", "value": "Hasura"}],
"forward_client_headers": false,
"timeout_seconds": 60
"timeout_seconds": 60,
"customization": {
"root_fields_namespace": "some_field_name",
"type_names": {
"prefix": "some_type_name_prefix",
"suffix": "some_type_name_suffix",
"mapping": {
"some_type_name": "some_new_type_name"
}
},
"field_names": [ {
"parent_type": "some_type_name",
"prefix": "some_field_name_prefix",
"suffix": "some_field_name_suffix",
"mapping": {
"some_field_name": "some_new_field_name"
}
} ]
}
},
"comment": "some optional comment"
}

View File

@ -1115,9 +1115,124 @@ RemoteSchemaDef
}
],
"forward_client_headers": boolean,
"timeout_seconds": integer
"timeout_seconds": integer,
"customization": RemoteSchemaCustomization_
}
.. _RemoteSchemaCustomization:
RemoteSchemaCustomization
^^^^^^^^^^^^^^^^^^^^^^^^^
.. parsed-literal::
:class: haskell-pre
{
"root_fields_namespace": String,
"type_names": {
"prefix": String,
"suffix": String,
"mapping": {
String: String
}
},
"field_names": [
{ "parent_type": String,
"prefix": String,
"suffix": String,
"mapping": {
String: String
}
}
]
}
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - ``root_fields_namespace``
- false
- String
- If provided, the fields of the remote schema will be nested under this top level field
* - ``type_names``
- false
- RemoteTypeCustomization_
- Customization of type names in the remote schema
* - ``field_names``
- false
- [RemoteFieldCustomization_]
- Customization of field names for types in the remote schema
.. _RemoteTypeCustomization
RemoteTypeCustomization
^^^^^^^^^^^^^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - ``prefix``
- false
- String
- Prefix applied to type names in the remote schema
* - ``suffix``
- false
- String
- Suffix applied to type names in the remote schema
* - ``mapping``
- false
- ``{String: String}``
- Explicit mapping of type names in the remote schema
Note: explicit mapping takes precedence over ``prefix`` and ``suffix``.
- Type name prefix and suffix will be applied to all types in the schema
except the root types (for query, mutation and subscription),
types starting with ``__``, standard scalar types (``Int``, ``Float``, ``String``, ``Boolean``, and ``ID``),
and types with an explicit mapping.
- Root types, types starting with ``__``, and standard scalar types may only be customized with an explicit mapping.
.. _RemoteFieldCustomization
RemoteFieldCustomization
^^^^^^^^^^^^^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - ``parent_type``
- true
- String
- Name of the parent type (in the original remote schema) for fields to be customized
* - ``prefix``
- false
- String
- Prefix applied to field names in parent type
* - ``suffix``
- false
- String
- Suffix applied to field names in the parent type
* - ``mapping``
- false
- ``{String: String}``
- Explicit mapping of field names in the parent type
Note: explicit mapping takes precedence over ``prefix`` and ``suffix``.
- Fields that are part of an interface must be renamed consistently across all object types that implement that interface.
.. _CollectionName:
CollectionName

View File

@ -0,0 +1,44 @@
# Remote Schema Customization
## Motivation
- Prevent name conflicts between remote schemas and other sources
- Allow customization of third-party schemas to better fit with local naming conventions
## Spec
Add an optional `customization` object to the `add_remote_schema` API with the following form:
```yaml
customization:
# if root_fields_namespace is absent, the fields
# are merged into the query root directly
root_fields_namespace: "something"
type_names:
prefix: some_prefix
suffix: some_suffix
# mapping takes precedence over prefix and suffix
mapping:
old_name: new_name
field_names:
- parent_type: old_type_name
prefix: some_prefix
suffix: some_suffix
# mapping takes precedence over prefix and suffix
mapping:
old_name: new_name
```
- Type name prefix and suffix will be applied to all types in the schema except the root types (for query, mutation and subscription), types starting with `__`, standard scalar types (`Int`, `Float`, `String`, `Boolean`, and `ID`), and types with an explicit mapping.
- Root types, types starting with `__` and standard scalar types may only be customized with an explicit mapping.
- Fields that are part of an interface must be renamed consistently across all object types that implement that interface.
## Implementation approach
- After obtaining the remote schema via introspection we build customization functions (and their inverses) for the types and fields in the schema.
- Customizations are validated against the schema to ensure that:
- Field renamings of objects and interfaces are consistent
- Customization does not result in two types, or two fields of the same type, being renamed to the same name.
- The remote schema is customized using the customization functions to rename types and fields.
- The field parser generators are modified so that they recognise the customized schema, but generate GraphQL queries with the original names to be sent to the remote server
- We can uses aliases on the fields to make sure the response object has the customized fields names.

View File

@ -13,7 +13,7 @@ cat <<EOL
Run hasura benchmarks
Usage:
Usage:
$ $0 <benchmark_dir> [<hasura_docker_image>] [<sleep_time_sec_before_bench>]
The first argument chooses the particular benchmark set to run e.g. "chinook"
@ -29,7 +29,7 @@ exit 1
}
[ ! -d "benchmark_sets/${1-}" ] && die_usage
BENCH_DIR="benchmark_sets/$1"
BENCH_DIR="$(pwd)/benchmark_sets/$1"
REQUESTED_HASURA_DOCKER_IMAGE="${2-}"
# We may wish to sleep after setting up the schema, etc. to e.g. allow memory
# to settle to a baseline before we measure it:
@ -47,9 +47,17 @@ function cleanup {
|| echo "Stopping hasura failed, maybe it never started?"
fi
pg_cleanup || echo "Stopping postgres failed, maybe it never started?"
custom_cleanup || echo "Custom cleanup failed"
}
trap cleanup EXIT
if [ $(uname -s) = Darwin ]; then
DOCKER_LOCALHOST=host.docker.internal
else
DOCKER_LOCALHOST=127.0.0.1
fi
# The beefy c4.8xlarge EC2 instance has two sockets, so we'll try our best to
# pin hasura on one and postgres on the other
if taskset -c 17 sleep 0 ; then
@ -157,7 +165,7 @@ CONF_FLAGS=$(echo "$CONF" | sed -e 's/^/-c /' | tr '\n' ' ')
# numbers we get here are useful in absolute terms as well, representing ideal
# performance)
#
# [1]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSPerformance.html
# [1]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSPerformance.html
# [2]: https://performance.sunlight.io/postgres/
function pg_launch_container(){
echo_pretty "Launching postgres container: $PG_CONTAINER_NAME"
@ -188,10 +196,11 @@ function pg_cleanup(){
######################
# graphql-engine #
######################
# This matches the default we use in `dev.sh graphql-engine`
HASURA_GRAPHQL_SERVER_PORT=8181
HASURA_URL="http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT"
HASURA_DOCKER_URL="http://$DOCKER_LOCALHOST:$HASURA_GRAPHQL_SERVER_PORT"
# Maybe launch the hasura instance we'll benchmark
function maybe_launch_hasura_container() {
@ -214,7 +223,7 @@ function maybe_launch_hasura_container() {
function hasura_wait() {
# Wait for the graphql-engine under bench to be ready
echo -n "Waiting for graphql-engine"
echo -n "Waiting for graphql-engine at $HASURA_URL"
until curl -s "$HASURA_URL/v1/query" &>/dev/null; do
echo -n '.' && sleep 0.2
done
@ -235,12 +244,12 @@ function install_latest_graphql_bench() {
echo_pretty "Installing/updating graphql-bench"
graphql_bench_git=$(mktemp -d -t graphql-bench-XXXXXXXXXX)
git clone --depth=1 https://github.com/hasura/graphql-bench.git "$graphql_bench_git"
cd "$graphql_bench_git"
# We name this 'graphql-bench-ci' so it doesn't interfere with other versions
# (e.g. local dev of `graphql-bench`, installed with `make
# build_local_docker_image`:
docker build -t graphql-bench-ci:latest ./app
docker build -t graphql-bench-ci:latest ./app
cd -
echo_pretty "Done"
}
@ -254,19 +263,49 @@ function run_benchmarks() {
$TASKSET_K6 docker run --net=host -v "$PWD":/app/tmp -i $K6_DOCKER_t_OR_init \
graphql-bench-ci query \
--config="./tmp/config.query.yaml" \
--outfile="./tmp/report.json" --url "$HASURA_URL/v1/graphql"
--outfile="./tmp/report.json" --url "$HASURA_DOCKER_URL/v1/graphql"
echo_pretty "Done. Report at $PWD/report.json"
cd -
}
function custom_setup() {
cd "$BENCH_DIR"
if [ -x setup.sh ]; then
echo_pretty "Running custom setup script"
./setup.sh
fi
cd -
}
function custom_cleanup() {
cd "$BENCH_DIR"
if [ -x cleanup.sh ]; then
echo_pretty "Running custom cleanup script"
./cleanup.sh
fi
cd -
}
function load_data_and_schema() {
echo_pretty "Loading data and adding schema"
cd "$BENCH_DIR"
gunzip -c dump.sql.gz | $PSQL_DOCKER &> /dev/null
# --fail-with-body is what we want, but is not available on older curl:
# TODO LATER: use /v1/metadata once stable
curl --fail -X POST -H "Content-Type: application/json" -d @replace_metadata.json "$HASURA_URL/v1/query"
if [ -f dump.sql.gz ]; then
gunzip -c dump.sql.gz | $PSQL_DOCKER &> /dev/null
else
echo_pretty "No data to load"
fi
if [ -f replace_metadata.json ]; then
# --fail-with-body is what we want, but is not available on older curl:
# TODO LATER: use /v1/metadata once stable
curl --fail -X POST -H "Content-Type: application/json" -d @replace_metadata.json "$HASURA_URL/v1/query"
else
echo_pretty "No metadata to replace"
fi
cd -
}
@ -284,5 +323,7 @@ pg_wait
maybe_launch_hasura_container
hasura_wait
custom_setup
load_data_and_schema
run_benchmarks

View File

@ -0,0 +1,6 @@
#!/usr/bin/env bash
CONTAINER_NAME=graphql-remote-server
echo "Stopping graphql remote server container: $CONTAINER_NAME"
docker stop $CONTAINER_NAME

View File

@ -0,0 +1,229 @@
# This tells graphql-bench that it's testing a hasura instance and should
# collect some additional metrics:
extended_hasura_checks: true
# headers:
# X-Hasura-Admin-Secret: my-secret
# Anchors to help us DRY below; settings here may be overridden selectively
constants:
scalars:
- &very_low_load 1
- &low_load 20
- &high_load 500
k6_custom: &k6_custom
tools: [k6]
execution_strategy: CUSTOM
settings: &settings
# This is equivalent to wrk2's approach:
executor: 'constant-arrival-rate'
timeUnit: '1s'
maxVUs: 500 # NOTE: required, else defaults to `preAllocatedVUs`
duration: '60s'
queries:
############################################################################
# A very simple query returning no rows. A baseline for the benchmark below.
- name: small_query_low_load
<<: *k6_custom
options:
k6:
scenarios:
main:
<<: *settings
rate: *low_load
# tune this so it's just high enough that we can expect to not need
# to allocate during the test:
preAllocatedVUs: 10
query: |
query MyQuery {
heroes {
name
id
__typename
}
}
- name: small_query_low_load_customized
<<: *k6_custom
options:
k6:
scenarios:
main:
<<: *settings
rate: *low_load
# tune this so it's just high enough that we can expect to not need
# to allocate during the test:
preAllocatedVUs: 10
query: |
query MyQuery {
foo {
heroes {
foo_name
foo_id
__typename
}
}
}
- name: big_query_low_load
<<: *k6_custom
options:
k6:
scenarios:
main:
<<: *settings
rate: *very_low_load
# tune this so it's just high enough that we can expect to not need
# to allocate during the test:
preAllocatedVUs: 10
query: |
query MyQuery {
start {
hello
__typename
big {
hello
... on Big {
hello
big {
hello
__typename
... on Big {
hello
__typename
big {
hello
__typename
... on Big {
hello
__typename
big {
hello
__typename
... on Big {
hello
big {
hello
__typename
}
many {
hello
}
}
}
}
}
}
}
}
}
many {
... on Big {
hello
__typename
many {
... on Big {
hello
__typename
many {
... on Big {
hello
__typename
many {
... on Big {
hello
__typename
}
}
}
}
}
}
}
}
}
}
- name: big_query_low_load_customized
<<: *k6_custom
options:
k6:
scenarios:
main:
<<: *settings
rate: *very_low_load
# tune this so it's just high enough that we can expect to not need
# to allocate during the test:
preAllocatedVUs: 10
query: |
query MyQuery {
big_foo {
start {
foo_hello
__typename
foo_big {
foo_hello
... on foo_Big_bar {
foo_hello
foo_big {
foo_hello
__typename
... on foo_Big_bar {
foo_hello
__typename
foo_big {
foo_hello
__typename
... on foo_Big_bar {
foo_hello
__typename
foo_big {
foo_hello
__typename
... on foo_Big_bar {
foo_hello
foo_big {
foo_hello
__typename
}
foo_many {
foo_hello
}
}
}
}
}
}
}
}
}
foo_many {
... on foo_Big_bar {
foo_hello
__typename
foo_many {
... on foo_Big_bar {
foo_hello
__typename
foo_many {
... on foo_Big_bar {
foo_hello
__typename
foo_many {
... on foo_Big_bar {
foo_hello
__typename
}
}
}
}
}
}
}
}
}
}
}

View File

@ -0,0 +1,825 @@
# -*- coding: utf-8 -*-
from http import HTTPStatus
import graphene
import copy
from webserver import RequestHandler, WebServer, MkHandlers, Response
from enum import Enum
import time
import ssl
import sys
from graphql import GraphQLError
HGE_URLS=[]
def mkJSONResp(graphql_result):
return Response(HTTPStatus.OK, graphql_result.to_dict(),
{'Content-Type': 'application/json'})
class HelloWorldHandler(RequestHandler):
def get(self, request):
return Response(HTTPStatus.OK, 'hello world')
def post(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
class Hello(graphene.ObjectType):
hello = graphene.String(arg=graphene.String(default_value="world"))
delayedHello = graphene.String(arg=graphene.String(default_value="world"))
def resolve_hello(self, info, arg):
return "Hello " + arg
def resolve_delayedHello(self, info, arg):
time.sleep(10)
return "Hello " + arg
hello_schema = graphene.Schema(query=Hello, subscription=Hello)
class HelloGraphQL(RequestHandler):
def get(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = hello_schema.execute(request.json['query'])
return mkJSONResp(res)
class User(graphene.ObjectType):
id = graphene.Int()
username = graphene.String()
generateError = graphene.String()
def __init__(self, id, username):
self.id = id
self.username = username
def resolve_id(self, info):
return self.id
def resolve_username(self, info):
return self.username
def resolve_generateError(self, info):
return GraphQLError ('Cannot query field "generateError" on type "User".')
@staticmethod
def get_by_id(_id):
xs = list(filter(lambda u: u.id == _id, all_users))
if not xs:
return None
return xs[0]
all_users = [
User(1, 'jane'),
User(2, 'john'),
User(3, 'joe'),
]
class UserDetailsInput(graphene.InputObjectType):
id = graphene.Int(required=True)
username = graphene.String(required=True)
class CreateUserInputObject(graphene.Mutation):
class Arguments:
user_data = UserDetailsInput(required=True)
ok = graphene.Boolean()
user = graphene.Field(lambda: User)
def mutate(self, info, user_data=None):
user = User(
id = user_data.id,
username = user_data.username
)
all_users.append(user)
return CreateUserInputObject(ok=True, user = user)
class CreateUser(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
username = graphene.String(required=True)
ok = graphene.Boolean()
user = graphene.Field(lambda: User)
def mutate(self, info, id, username):
user = User(id, username)
all_users.append(user)
return CreateUser(ok=True, user=user)
class UserQuery(graphene.ObjectType):
user = graphene.Field( User
, id=graphene.Int(required=True)
, user_info=graphene.Argument(graphene.List(UserDetailsInput), required=False))
allUsers = graphene.List(User)
def resolve_user(self, info, id, user_info=None):
return User.get_by_id(id)
def resolve_allUsers(self, info):
return all_users
class UserMutation(graphene.ObjectType):
createUser = CreateUser.Field()
createUserInputObj = CreateUserInputObject.Field()
user_schema = graphene.Schema(query=UserQuery, mutation=UserMutation)
class UserGraphQL(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = user_schema.execute(req.json['query'])
return mkJSONResp(res)
class timestamptz(graphene.types.Scalar):
@staticmethod
def serialize(t):
return "2018-12-20"
@staticmethod
def parse_literal(s):
return "2018-12-20"
@staticmethod
def parse_value(s):
return "2018-12-20"
class Country(graphene.ObjectType):
name = graphene.String()
def __init__(self, name):
self.name = name
def resolve_name(self, info):
return self.name
class CountryQuery(graphene.ObjectType):
country = graphene.Field(Country)
def resolve_country(self, info):
return Country("India")
country_schema = graphene.Schema(query=CountryQuery)
class CountryGraphQL(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = country_schema.execute(req.json['query'])
return mkJSONResp(res)
class person(graphene.ObjectType):
id = graphene.Int(required=True)
name = graphene.String()
created = graphene.Field(timestamptz)
def resolve_id(self, info):
return 42
def resolve_name(self, info):
return 'Arthur Dent'
def resolve_created(self, info):
return '2018-12-20'
class PersonQuery(graphene.ObjectType):
person_ = graphene.Field(person)
def resolve_person_(self, info):
return person()
person_schema = graphene.Schema(query=PersonQuery)
class PersonGraphQL(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = person_schema.execute(req.json['query'])
return mkJSONResp(res)
# GraphQL server that returns Set-Cookie response header
class SampleAuth(graphene.ObjectType):
hello = graphene.String(arg=graphene.String(default_value="world"))
def resolve_hello(self, info, arg):
return "Hello " + arg
sample_auth_schema = graphene.Schema(query=SampleAuth,
subscription=SampleAuth)
class SampleAuthGraphQL(RequestHandler):
def get(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = sample_auth_schema.execute(request.json['query'])
resp = mkJSONResp(res)
resp.headers['Set-Cookie'] = 'abcd'
resp.headers['Custom-Header'] = 'custom-value'
return resp
# GraphQL server that can return arbitrary size result
class BigInterface(graphene.Interface):
hello = graphene.Field(graphene.String)
class Big(graphene.ObjectType):
class Meta:
interfaces = (BigInterface, )
big = graphene.Field(BigInterface, required=False)
many = graphene.Field(graphene.List(BigInterface), required=False, arg=graphene.Int(default_value=10))
# hello = graphene.Field(graphene.String)
def resolve_hello(self, info):
return "Hello"
def resolve_big(self, info):
return self
def resolve_many(self, info, arg):
for i in range(arg):
yield self
class BigQuery(graphene.ObjectType):
# start = graphene.Field(BigInterface)
start = graphene.Field(Big)
def resolve_start(self, info):
return Big()
big_schema = graphene.Schema(query=BigQuery)
class BigGraphQL(RequestHandler):
def get(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = big_schema.execute(request.json['query'])
resp = mkJSONResp(res)
resp.headers['Set-Cookie'] = 'abcd'
resp.headers['Custom-Header'] = 'custom-value'
return resp
# GraphQL server with interfaces
class Character(graphene.Interface):
id = graphene.ID(required=True)
name = graphene.String(required=True)
def __init__(self, id, name):
self.id = id
self.name = name
class Droid(graphene.ObjectType):
class Meta:
interfaces = (Character, )
primary_function = graphene.String()
def __init__(self, primary_function, character):
self.primary_function = primary_function
self.character = character
def resolve_id(self, info):
return self.character.id
def resolve_name(self, info):
return self.character.name
def resolve_primary_function(self, info):
return self.primary_function
class Human(graphene.ObjectType):
class Meta:
interfaces = (Character, )
home_planet = graphene.String()
droid = graphene.Field(Droid, required=False)
def __init__(self, home_planet, droid, character):
self.home_planet = home_planet
self.character = character
self.droid = droid
def resolve_id(self, info):
return self.character.id
def resolve_name(self, info):
return self.character.name
def resolve_primary_function(self, info):
return self.home_planet
def resolve_droid(self, info):
return self.droid
class CharacterSearchResult(graphene.Union):
class Meta:
types = (Human,Droid)
r2 = Droid("Astromech", Character(1,'R2-D2'))
all_characters = {
4: r2,
5: Human("Tatooine", r2, Character(2, "Luke Skywalker")),
}
character_search_results = {
1: Droid("Astromech", Character(6,'R2-D2')),
2: Human("Tatooine", r2, Character(7, "Luke Skywalker")),
}
class CharacterIFaceQuery(graphene.ObjectType):
hero = graphene.Field(
Character,
required=False,
episode=graphene.Int(required=True)
)
heroes = graphene.Field(
graphene.List(Character),
required=False
)
def resolve_hero(_, info, episode):
return all_characters.get(episode)
def resolve_heroes(_, info):
return all_characters.values()
schema = graphene.Schema(query=CharacterIFaceQuery, types=[Human, Droid])
character_interface_schema = graphene.Schema(query=CharacterIFaceQuery, types=[Human, Droid])
class CharacterInterfaceGraphQL(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'], variable_values=req.json.get('variables'))
return mkJSONResp(res)
class InterfaceGraphQLErrEmptyFieldList(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'INTERFACE':
t['fields'] = []
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class InterfaceGraphQLErrUnknownInterface(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'OBJECT' and t['name'] == 'Droid':
t['interfaces'][0]['name'] = 'UnknownIFace'
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class InterfaceGraphQLErrWrongFieldType(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
#Remove id field from Droid
if t['kind'] == 'OBJECT' and t['name'] == 'Droid':
for f in t['fields'].copy():
if f['name'] == 'id':
f['type']['ofType']['name'] = 'String'
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class InterfaceGraphQLErrMissingField(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
#Remove id field from Droid
if t['kind'] == 'OBJECT' and t['name'] == 'Droid':
for f in t['fields'].copy():
if f['name'] == 'id':
t['fields'].remove(f)
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
ifaceArg = {
"name": "ifaceArg",
"description": None,
"type": {
"kind": "NON_NULL",
"name": None,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": None
}
},
"defaultValue": None
}
class InterfaceGraphQLErrMissingArg(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'INTERFACE':
for f in t['fields']:
if f['name'] == 'id':
f['args'].append(ifaceArg)
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class InterfaceGraphQLErrWrongArgType(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
objArg = copy.deepcopy(ifaceArg)
objArg['type']['ofType']['name'] = 'String'
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in filter(lambda ty : ty['kind'] == 'INTERFACE', typesList):
for f in filter(lambda fld: fld['name'] == 'id', t['fields']):
f['args'].append(ifaceArg)
for t in filter(lambda ty: ty['name'] in ['Droid','Human'], typesList):
for f in filter(lambda fld: fld['name'] == 'id', t['fields']):
f['args'].append(ifaceArg if t['name'] == 'Droid' else objArg)
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class InterfaceGraphQLErrExtraNonNullArg(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'OBJECT' and t['name'] == 'Droid':
for f in t['fields']:
if f['name'] == 'id':
f['args'].append({
"name": "extraArg",
"description": None,
"type": {
"kind": "NON_NULL",
"name": None,
"ofType": {
"kind": "SCALAR",
"name": "Int",
"ofType": None
}
},
"defaultValue": None
})
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
#GraphQL server involving union type
class UnionQuery(graphene.ObjectType):
search = graphene.Field(
CharacterSearchResult,
required=False,
episode=graphene.Int(required=True)
)
def resolve_search(_, info, episode):
return character_search_results.get(episode)
union_schema = graphene.Schema(query=UnionQuery, types=[Human, Droid])
class UnionGraphQL(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = union_schema.execute(req.json['query'])
return mkJSONResp(res)
class UnionGraphQLSchemaErrUnknownTypes(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = union_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'UNION':
for i, p in enumerate(t['possibleTypes']):
p['name'] = 'Unknown' + str(i)
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class UnionGraphQLSchemaErrSubTypeInterface(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = union_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'UNION':
for p in t['possibleTypes']:
p['name'] = 'Character'
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class UnionGraphQLSchemaErrNoMemberTypes(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = union_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'UNION':
t['possibleTypes'] = []
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
class UnionGraphQLSchemaErrWrappedType(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = union_schema.execute(req.json['query'])
respDict = res.to_dict()
typesList = respDict.get('data',{}).get('__schema',{}).get('types',None)
if typesList is not None:
for t in typesList:
if t['kind'] == 'UNION':
for i, p in enumerate(t['possibleTypes']):
t['possibleTypes'][i] = {
"kind": "NON_NULL",
"name": None,
"ofType": p
}
return Response(HTTPStatus.OK, respDict,
{'Content-Type': 'application/json'})
#GraphQL server with default values for inputTypes
class InpObjType(graphene.InputObjectType):
@classmethod
def default(cls):
meta = cls._meta
fields = meta.fields
default_fields = {name: field.default_value for name, field in fields.items()}
container = meta.container
return container(**default_fields)
class SizeObj(graphene.ObjectType):
width = graphene.Int()
height = graphene.Float()
shape = graphene.String()
hasTag = graphene.Boolean()
class Color(Enum):
RED = 1
GREEN = 2
BLUE = 3
GQColorEnum = graphene.Enum.from_enum(Color)
class SizeInput(InpObjType):
width = graphene.Int(default_value=100)
height = graphene.Float(default_value=100.1)
shape = graphene.String(default_value="cube")
hasTag = graphene.Boolean(default_value=False)
def asSizeObj(self):
return SizeObj(width=self.width, height=self.height, shape=self.shape, hasTag=self.hasTag)
class Echo(graphene.ObjectType):
intFld = graphene.Int()
listFld = graphene.List(graphene.String)
objFld = graphene.Field(SizeObj)
enumFld = graphene.Field(GQColorEnum)
class EchoQuery(graphene.ObjectType):
echo = graphene.Field(
Echo,
int_input=graphene.Int(default_value=1234),
list_input=graphene.Argument(graphene.List(graphene.String), default_value=["hi","there"]),
obj_input=graphene.Argument(SizeInput, default_value=SizeInput.default()),
enum_input=graphene.Argument(GQColorEnum, default_value=GQColorEnum.RED.name),
r_int_input=graphene.Int(required=True, default_value=1234),
r_list_input=graphene.Argument(graphene.List(graphene.String, required=True), default_value=["general","Kenobi"]),
r_obj_input=graphene.Argument(SizeInput, required=True, default_value=SizeInput.default()),
r_enum_input=graphene.Argument(GQColorEnum, required=True, default_value=GQColorEnum.RED.name),
)
def resolve_echo(self, info, int_input, list_input, obj_input, enum_input):
#print (int_input, list_input, obj_input)
return Echo(intFld=int_input, listFld=list_input, objFld=obj_input, enumFld=enum_input)
echo_schema = graphene.Schema(query=EchoQuery)
class EchoGraphQL(RequestHandler):
def get(self, req):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = echo_schema.execute(req.json['query'])
resp_dict = res.to_dict()
types_list = resp_dict.get('data',{}).get('__schema',{}).get('types', None)
#Hack around enum default_value serialization issue: https://github.com/graphql-python/graphql-core/issues/166
if types_list is not None:
for t in filter(lambda ty: ty['name'] == 'EchoQuery', types_list):
for f in filter(lambda fld: fld['name'] == 'echo', t['fields']):
for a in filter(lambda arg: arg['name'] == 'enumInput', f['args']):
a['defaultValue'] = 'RED'
return Response(HTTPStatus.OK, resp_dict,
{'Content-Type': 'application/json'})
class HeaderTest(graphene.ObjectType):
wassup = graphene.String(arg=graphene.String(default_value='world'))
def resolve_wassup(self, info, arg):
headers = info.context
hosts = list(map(lambda o: urlparse(o).netloc, HGE_URLS))
if not (headers.get_all('x-hasura-test') == ['abcd'] and
headers.get_all('x-hasura-role') == ['user'] and
headers.get_all('x-hasura-user-id') == ['abcd1234'] and
headers.get_all('content-type') == ['application/json'] and
headers.get_all('Authorization') == ['Bearer abcdef'] and
len(headers.get_all('x-forwarded-host')) == 1 and
all(host in headers.get_all('x-forwarded-host') for host in hosts) and
headers.get_all('x-forwarded-user-agent') == ['python-requests/2.22.0']):
raise Exception('headers dont match. Received: ' + str(headers))
return "Hello " + arg
header_test_schema = graphene.Schema(query=HeaderTest)
class HeaderTestGraphQL(RequestHandler):
def get(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = header_test_schema.execute(request.json['query'],
context=request.headers)
return mkJSONResp(res)
class Message(graphene.ObjectType):
id = graphene.Int()
msg = graphene.String()
def __init__(self, id, msg):
self.id = id
self.msg = msg
def resolve_id(self, info):
return self.id
def resolve_msg(self, info):
return self.msg
@staticmethod
def get_by_id(_id):
xs = list(filter(lambda u: u.id == _id, all_messages))
if not xs:
return None
return xs[0]
all_messages = [
Message(1, 'You win!'),
Message(2, 'You lose!')
]
class MessagesQuery(graphene.ObjectType):
message = graphene.Field(Message, id=graphene.Int(required=True))
messages = graphene.List(Message)
def resolve_message(self, info, id):
return Message.get_by_id(id)
def resolve_messages(self, info):
return all_messages
messages_schema = graphene.Schema(query=MessagesQuery)
class MessagesGraphQL(RequestHandler):
def get(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = messages_schema.execute(request.json['query'])
return mkJSONResp(res)
handlers = MkHandlers({
'/hello': HelloWorldHandler,
'/hello-graphql': HelloGraphQL,
'/user-graphql': UserGraphQL,
'/country-graphql': CountryGraphQL,
'/character-iface-graphql' : CharacterInterfaceGraphQL,
'/iface-graphql-err-empty-field-list' : InterfaceGraphQLErrEmptyFieldList,
'/iface-graphql-err-unknown-iface' : InterfaceGraphQLErrUnknownInterface,
'/iface-graphql-err-missing-field' : InterfaceGraphQLErrMissingField,
'/iface-graphql-err-wrong-field-type' : InterfaceGraphQLErrWrongFieldType,
'/iface-graphql-err-missing-arg' : InterfaceGraphQLErrMissingArg,
'/iface-graphql-err-wrong-arg-type' : InterfaceGraphQLErrWrongArgType,
'/iface-graphql-err-extra-non-null-arg' : InterfaceGraphQLErrExtraNonNullArg,
'/union-graphql' : UnionGraphQL,
'/union-graphql-err-unknown-types' : UnionGraphQLSchemaErrUnknownTypes,
'/union-graphql-err-subtype-iface' : UnionGraphQLSchemaErrSubTypeInterface,
'/union-graphql-err-no-member-types' : UnionGraphQLSchemaErrNoMemberTypes,
'/union-graphql-err-wrapped-type' : UnionGraphQLSchemaErrWrappedType,
'/default-value-echo-graphql' : EchoGraphQL,
'/person-graphql': PersonGraphQL,
'/header-graphql': HeaderTestGraphQL,
'/messages-graphql' : MessagesGraphQL,
'/auth-graphql': SampleAuthGraphQL,
'/big': BigGraphQL
})
def create_server(host='0.0.0.0', port=5000):
return WebServer((host, port), handlers)
def stop_server(server):
server.shutdown()
server.server_close()
def set_hge_urls(hge_urls = []):
global HGE_URLS
HGE_URLS=hge_urls
if __name__ == '__main__':
s = create_server()
s.serve_forever()

View File

@ -0,0 +1,2 @@
graphene
graphql-core

View File

@ -0,0 +1,7 @@
#!/bin/sh
set -e
pip install -r requirements-graphql-server.txt
python graphql_server.py

View File

@ -0,0 +1,139 @@
# -*- coding: utf-8 -*-
"""
Helper module which exposes abstractions to write webservers easily
"""
from abc import ABC, abstractmethod
import socket
import http.server as http
from http import HTTPStatus
from urllib.parse import parse_qs, urlparse
import json
class Response():
""" Represents a HTTP `Response` object """
def __init__(self, status, body=None, headers=None):
if not isinstance(status, HTTPStatus):
raise TypeError('status has to be of type http.HTTPStatus')
if body and not isinstance(body, (str, dict)):
raise TypeError('body has to be of type str or dict')
if headers and not isinstance(headers, dict):
raise TypeError('headers has to be of type dict')
self.status = status
self.body = body
self.headers = headers
def get_body(self):
if not self.body:
return ''
if isinstance(self.body, dict):
return json.dumps(self.body)
return self.body
class Request():
""" Represents a HTTP `Request` object """
def __init__(self, path, qs=None, body=None, json=None, headers=None):
self.path = path
self.qs = qs
self.body = body
self.json = json
self.headers = headers
class RequestHandler(ABC):
"""
The class that users should sub-class and provide implementation. Each of
these functions **should** return an instance of the `Response` class
"""
@abstractmethod
def get(self, request):
pass
@abstractmethod
def post(self, request):
pass
def MkHandlers(handlers):
class HTTPHandler(http.BaseHTTPRequestHandler):
def not_found(self):
self.send_response(HTTPStatus.NOT_FOUND)
self.end_headers()
self.wfile.write('<h1> Not Found </h1>'.encode('utf-8'))
def parse_path(self):
return urlparse(self.path)
def append_headers(self, headers):
for k, v in headers.items():
self.send_header(k, v)
def do_GET(self):
try:
raw_path = self.parse_path()
path = raw_path.path
handler = handlers[path]()
qs = parse_qs(raw_path.query)
req = Request(path, qs, None, None, self.headers)
resp = handler.get(req)
self.send_response(resp.status)
if resp.headers:
self.append_headers(resp.headers)
self.end_headers()
self.wfile.write(resp.get_body().encode('utf-8'))
except KeyError:
self.not_found()
def do_POST(self):
try:
raw_path = self.parse_path()
path = raw_path.path
handler = handlers[path]()
content_len = self.headers.get('Content-Length')
qs = None
req_body = self.rfile.read(int(content_len)).decode("utf-8")
req_json = None
if self.headers.get('Content-Type') == 'application/json':
req_json = json.loads(req_body)
req = Request(self.path, qs, req_body, req_json, self.headers)
resp = handler.post(req)
self.send_response(resp.status)
if resp.headers:
self.append_headers(resp.headers)
#Required for graphiql to work with the graphQL test server
self.send_header('Access-Control-Allow-Origin', self.headers['Origin'])
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Methods', 'GET,POST,PUT,PATCH,DELETE,OPTIONS')
self.end_headers()
self.wfile.write(resp.get_body().encode('utf-8'))
except KeyError:
self.not_found()
def do_OPTIONS(self):
self.send_response(204)
#Required for graphiql to work with the graphQL test server
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.send_header('Access-Control-Max-Age', '1728000')
self.send_header('Access-Control-Allow-Headers', 'content-type,x-apollo-tracing')
self.send_header('Content-Type', 'text/plain charset=UTF-8')
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', self.headers['Origin'])
self.send_header('Access-Control-Allow-Methods', 'GET,POST,PUT,PATCH,DELETE,OPTIONS')
self.end_headers()
def log_message(self, format, *args):
return
return HTTPHandler
class WebServer(http.HTTPServer):
def __init__(self, server_address, handler):
super().__init__(server_address, handler)
def server_bind(self):
print('Running http server on {0}:{1}'.format(self.server_address[0],
self.server_address[1]))
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)

View File

@ -0,0 +1,75 @@
{
"type": "replace_metadata",
"args": {
"version": 3,
"sources": [],
"remote_schemas": [
{
"name": "character",
"definition": {
"url": "http://localhost:5000//character-iface-graphql/character-iface-graphql",
"timeout_seconds": 60
}
},
{
"name": "character_foo",
"definition": {
"url": "http://localhost:5000//character-iface-graphql/character-iface-graphql",
"timeout_seconds": 60,
"customization": {
"root_fields_namespace": "foo",
"type_names": {
"prefix": "foo_",
"suffix": "_bar"
},
"field_names": [
{
"parent_type": "Character",
"prefix": "foo_"
},
{
"parent_type": "Human",
"prefix": "foo_"
},
{
"parent_type": "Droid",
"prefix": "foo_"
}
]
}
}
},
{
"name": "big",
"definition": {
"url": "http://localhost:5000//big/big",
"timeout_seconds": 60
}
},
{
"name": "big_foo",
"definition": {
"url": "http://localhost:5000//big/big",
"timeout_seconds": 60,
"customization": {
"root_fields_namespace": "big_foo",
"type_names": {
"prefix": "foo_",
"suffix": "_bar"
},
"field_names": [
{
"parent_type": "Big",
"prefix": "foo_"
},
{
"parent_type": "BigInterface",
"prefix": "foo_"
}
]
}
}
}
]
}
}

View File

@ -0,0 +1,17 @@
#!/usr/bin/env bash
CONTAINER_NAME=graphql-remote-server
PORT=5000
echo "Launching graphql remote server container: $CONTAINER_NAME"
docker run \
--name "$CONTAINER_NAME" \
-v $(pwd)/graphql_server:/app \
-w /app \
-p "$PORT":$PORT \
--rm \
-d python:3.8-slim-buster \
./run_graphql_server.sh
until curl -s "http://127.0.0.1:$PORT" &>/dev/null; do
echo -n '.' && sleep 0.2
done

View File

@ -623,6 +623,8 @@ library
, Network.URI.Extended
, Network.Wai.Extended
, Network.Wai.Handler.WebSockets.Custom
, Test.QuickCheck.Arbitrary.Extended
, Test.QuickCheck.Arbitrary.Partial
executable graphql-engine
import: common-all, common-exe
@ -650,6 +652,7 @@ test-suite graphql-engine-tests
, cron
, dependent-map
, dependent-sum
, generic-arbitrary
, graphql-engine
, graphql-parser
, hspec >=2.6.1 && <3
@ -659,6 +662,7 @@ test-suite graphql-engine-tests
, http-client
, http-client-tls
, http-types
, insert-ordered-containers
, jose
, kan-extensions
, lens
@ -673,12 +677,14 @@ test-suite graphql-engine-tests
, process
, QuickCheck
, safe
, scientific
, split
, template-haskell
, text
, time
, transformers-base
, unordered-containers
, vector
hs-source-dirs: src-test
main-is: Main.hs
other-modules:
@ -689,10 +695,13 @@ test-suite graphql-engine-tests
Data.Text.RawString
Data.TimeSpec
Hasura.EventingSpec
Hasura.Generator
Hasura.GraphQL.Parser.DirectivesTest
Hasura.GraphQL.Parser.TestUtils
Hasura.GraphQL.Schema.RemoteTest
Hasura.IncrementalSpec
Hasura.SessionSpec
Hasura.GraphQL.RemoteServerSpec
Hasura.RQL.Types.EndpointSpec
Hasura.SQL.WKTSpec
Hasura.Server.AuthSpec

View File

@ -20,6 +20,7 @@ module Data.Aeson.Ordered
, array
, insert
, delete
, adjust
, empty
, eitherDecode
, toEncJSON
@ -127,6 +128,9 @@ lookup key (Object_ omap) = OMap.lookup key omap
delete :: Text -> Object -> Object
delete key (Object_ omap) = Object_ (OMap.delete key omap)
adjust :: (Value -> Value) -> Text -> Object -> Object
adjust f key (Object_ omap) = Object_ (OMap.adjust f key omap)
-- | ToList a key.
toList :: Object -> [(Text,Value)]
toList (Object_ omap) = OMap.toList omap

View File

@ -15,6 +15,7 @@ import Hasura.Prelude
import qualified Data.Map as M
import qualified System.Environment
-- | Server process environment variables
newtype Environment = Environment (M.Map String String) deriving (Eq, Show, Generic)
instance FromJSON Environment

View File

@ -4,6 +4,7 @@ module Data.List.Extended
, getDifference
, getDifferenceOn
, getOverlapWith
, hasNoDuplicates
, module L
) where
@ -34,3 +35,6 @@ getOverlapWith getKey left right =
Map.elems $ Map.intersectionWith (,) (mkMap left) (mkMap right)
where
mkMap = Map.fromList . map (\v -> (getKey v, v))
hasNoDuplicates :: (Eq a, Hashable a) => [a] -> Bool
hasNoDuplicates xs = Set.size (Set.fromList xs) == length xs

View File

@ -173,7 +173,6 @@ data JoinSource
= JoinSelect Select
-- We're not using existingJoins at the moment, which was used to
-- avoid re-joining on the same table twice.
-- | JoinReselect Reselect
deriving (Eq, Ord, Show, Generic, Data, Lift)
instance FromJSON JoinSource
instance Hashable JoinSource
@ -700,8 +699,8 @@ liberalDecimalParser fromText json = viaText <|> viaNumber
-- Parsing scientific is safe; it doesn't normalise until we ask
-- it to.
case readP_to_S scientificP (T.unpack text) of
[(_)] -> pure (fromText text)
_ -> fail ("String containing decimal places is invalid: " ++ show text)
[_] -> pure (fromText text)
_ -> fail ("String containing decimal places is invalid: " ++ show text)
viaNumber = do
d <- J.parseJSON json
-- Converting a scientific to an unbounded number is unsafe, but

View File

@ -12,7 +12,6 @@ module Hasura.GraphQL.Execute
, checkQueryInAllowlist
, MultiplexedLiveQueryPlan(..)
, LiveQueryPlan (..)
, getQueryParts -- this function is exposed for testing in parameterized query hash
) where
import Hasura.Prelude
@ -27,13 +26,10 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import Data.Text.Extended
import qualified Hasura.GraphQL.Context as C
import qualified Hasura.GraphQL.Execute.Action as EA
import qualified Hasura.GraphQL.Execute.Backend as EB
import qualified Hasura.GraphQL.Execute.Common as EC
import qualified Hasura.GraphQL.Execute.Inline as EI
import qualified Hasura.GraphQL.Execute.LiveQuery.Plan as EL
import qualified Hasura.GraphQL.Execute.Mutation as EM
import qualified Hasura.GraphQL.Execute.Query as EQ
@ -61,8 +57,6 @@ import Hasura.QueryTags
import Hasura.Server.Types (RequestId (..))
type QueryParts = G.TypedOperationDefinition G.FragmentSpread G.Name
-- | Execution context
data ExecutionCtx
= ExecutionCtx
@ -74,40 +68,15 @@ data ExecutionCtx
, _ecxEnableAllowList :: !Bool
}
-- | Depending on the request parameters, fetch the correct typed operation
-- definition from the GraphQL query
getQueryParts
:: MonadError QErr m
=> GQLReqParsed
-> m QueryParts
getQueryParts (GQLReq opNameM q _varValsM) = do
let (selSets, opDefs, _fragDefsL) = G.partitionExDefs $ unGQLExecDoc q
case (opNameM, selSets, opDefs) of
(Just opName, [], _) -> do
let n = _unOperationName opName
opDefM = find (\opDef -> G._todName opDef == Just n) opDefs
onNothing opDefM $ throw400 ValidationFailed $
"no such operation found in the document: " <> dquote n
(Just _, _, _) ->
throw400 ValidationFailed $ "operationName cannot be used when " <>
"an anonymous operation exists in the document"
(Nothing, [selSet], []) ->
return $ G.TypedOperationDefinition G.OperationTypeQuery Nothing [] [] selSet
(Nothing, [], [opDef]) ->
return opDef
(Nothing, _, _) ->
throw400 ValidationFailed $ "exactly one operation has to be present " <>
"in the document when operationName is not specified"
getExecPlanPartial
:: (MonadError QErr m)
=> UserInfo
-> SchemaCache
-> ET.GraphQLQueryType
-> GQLReqParsed
-> m (C.GQLContext, QueryParts)
-> m (C.GQLContext, SingleOperation)
getExecPlanPartial userInfo sc queryType req =
(getGCtx ,) <$> getQueryParts req
(getGCtx ,) <$> getSingleOperation req
where
role = _uiRole userInfo
@ -130,7 +99,6 @@ getExecPlanPartial userInfo sc queryType req =
BOFAAllowed -> fromMaybe frontend backend
BOFADisallowed -> frontend
-- The graphql query is resolved into a sequence of execution operations
data ResolvedExecutionPlan
= QueryExecutionPlan EB.ExecutionPlan [IR.QueryRootField UnpreparedValue] DirectiveMap
@ -283,13 +251,6 @@ getResolvedExecPlan
-> m (ParameterizedQueryHash, ResolvedExecutionPlan)
getResolvedExecPlan env logger userInfo sqlGenCtx
sc _scVer queryType httpManager reqHeaders (reqUnparsed, reqParsed) reqId = do
-- GraphQL requests may incorporate fragments which insert a pre-defined
-- part of a GraphQL query. Here we make sure to remember those
-- pre-defined sections, so that when we encounter a fragment spread
-- later, we can inline it instead.
let takeFragment = \case G.ExecutableDefinitionFragment f -> Just f; _ -> Nothing
fragments =
mapMaybe takeFragment $ unGQLExecDoc $ _grQuery reqParsed
(gCtx, queryParts) <- getExecPlanPartial userInfo sc queryType reqParsed
let maybeOperationName = (Just <$> _unOperationName) =<< _grOperationName reqParsed
@ -297,25 +258,19 @@ getResolvedExecPlan env logger userInfo sqlGenCtx
(parameterizedQueryHash, resolvedExecPlan) <-
case queryParts of
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives inlinedSelSet -> do
(executionPlan, queryRootFields, dirMap, parameterizedQueryHash) <-
EQ.convertQuerySelSet env logger gCtx userInfo httpManager reqHeaders directives inlinedSelSet
varDefs (_grVariables reqUnparsed) (scSetGraphqlIntrospectionOptions sc)
reqId maybeOperationName queryTagsConfig
pure $ (parameterizedQueryHash, QueryExecutionPlan executionPlan queryRootFields dirMap)
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs directives inlinedSelSet -> do
(executionPlan, parameterizedQueryHash) <-
EM.convertMutationSelectionSet env logger gCtx sqlGenCtx userInfo httpManager reqHeaders
directives inlinedSelSet varDefs (_grVariables reqUnparsed) (scSetGraphqlIntrospectionOptions sc)
reqId maybeOperationName queryTagsConfig
pure $ (parameterizedQueryHash, MutationExecutionPlan executionPlan)
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives inlinedSelSet -> do
-- Parse as query to check correctness
(unpreparedAST, normalizedDirectives, normalizedSelectionSet) <-
EQ.parseGraphQLQuery gCtx varDefs (_grVariables reqUnparsed) directives inlinedSelSet

View File

@ -149,6 +149,7 @@ data ExecutionStep where
-- | A graphql query to execute against a remote schema
ExecStepRemote
:: !RemoteSchemaInfo
-> !RemoteResultCustomizer
-> !GH.GQLReqOutgoing
-> ExecutionStep
-- | Output a plain JSON object

View File

@ -117,8 +117,8 @@ convertMutationSelectionSet env logger gqlContext SQLGenCtx{stringifyNum} userIn
dbStepInfo <- mkDBMutationPlan @b userInfo stringifyNum sourceName sourceConfig noRelsDBAST mutationQueryTags
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote remoteField -> do
RemoteFieldG remoteSchemaInfo resolvedRemoteField <- runVariableCache $ resolveRemoteField userInfo remoteField
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeMutation $ [G.SelectionField resolvedRemoteField]
RemoteFieldG remoteSchemaInfo resultCustomizer resolvedRemoteField <- runVariableCache $ resolveRemoteField userInfo remoteField
pure $ buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeMutation $ getRemoteFieldSelectionSet resolvedRemoteField
RFAction action -> do
let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionMutation action
(actionName, _fch) <- pure $ case noRelsDBAST of

View File

@ -107,8 +107,8 @@ convertQuerySelSet env logger gqlContext userInfo manager reqHeaders directives
dbStepInfo <- mkDBQueryPlan @b userInfo sourceName sourceConfig noRelsDBAST queryTagsText
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote rf -> do
RemoteFieldG remoteSchemaInfo remoteField <- runVariableCache $ for rf $ resolveRemoteVariable userInfo
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeQuery [G.SelectionField remoteField]
RemoteFieldG remoteSchemaInfo resultCustomizer remoteField <- runVariableCache $ for rf $ resolveRemoteVariable userInfo
pure $ buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeQuery $ getRemoteFieldSelectionSet remoteField
RFAction action -> do
let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionQuery action
(actionExecution, actionName, fch) <- pure $ case noRelsDBAST of

View File

@ -70,10 +70,11 @@ collectVariablesFromSelectionSet =
buildExecStepRemote
:: RemoteSchemaInfo
-> RemoteResultCustomizer
-> G.OperationType
-> G.SelectionSet G.NoFragments Variable
-> ExecutionStep
buildExecStepRemote remoteSchemaInfo tp selSet =
buildExecStepRemote remoteSchemaInfo resultCustomizer tp selSet =
let unresolvedSelSet = unresolveVariables selSet
allVars = map mkVariableDefinitionAndValue $ Set.toList $ collectVariables selSet
varValues = Map.fromList $ map snd allVars
@ -81,7 +82,7 @@ buildExecStepRemote remoteSchemaInfo tp selSet =
varDefs = map fst allVars
_grQuery = G.TypedOperationDefinition tp Nothing varDefs [] unresolvedSelSet
_grVariables = varValsM
in ExecStepRemote remoteSchemaInfo GH.GQLReq{_grOperationName = Nothing, ..}
in ExecStepRemote remoteSchemaInfo resultCustomizer GH.GQLReq{_grOperationName = Nothing, ..}
-- | resolveRemoteVariable resolves a `RemoteSchemaVariable` into a GraphQL `Variable`. A
@ -193,10 +194,10 @@ resolveRemoteVariable userInfo = \case
QueryVariable variable -> pure variable
resolveRemoteField
:: (MonadError QErr m)
:: (MonadError QErr m, Traversable f)
=> UserInfo
-> RemoteField
-> StateT (HashMap J.Value Int) m (RemoteFieldG Variable)
-> RemoteFieldG f RemoteSchemaVariable
-> StateT (HashMap J.Value Int) m (RemoteFieldG f Variable)
resolveRemoteField userInfo = traverse (resolveRemoteVariable userInfo)
runVariableCache

View File

@ -266,7 +266,7 @@ transformAnnFields path fields = do
RemoteSelectSource _s -> error "remote source relationshsip found"
remoteJoins = remoteSelects <&> \(fieldName, remoteSelect) ->
let RemoteSchemaSelect argsMap selSet hasuraFields remoteFields rsi = remoteSelect
let RemoteSchemaSelect relationshipName argsMap resultCustomizer selSet hasuraFields remoteFields rsi = remoteSelect
hasuraFieldNames = HS.map dbJoinFieldToName hasuraFields
-- See Note [Phantom fields in Remote Joins]
@ -276,7 +276,7 @@ transformAnnFields path fields = do
phantomFields = HS.filter (not . fieldPresentInSelection) hasuraFields
phantomFieldNames = toList $ HS.map dbJoinFieldToName phantomFields
in (phantomFields, RemoteJoin fieldName argsMap selSet hasuraFieldNames remoteFields rsi phantomFieldNames)
in (phantomFields, RemoteJoin fieldName relationshipName argsMap resultCustomizer selSet hasuraFieldNames remoteFields rsi phantomFieldNames)
transformedFields <- forM fields $ \(fieldName, field') -> do
let fieldPath = appendPath fieldName path

View File

@ -114,11 +114,12 @@ compositeValueToJSON = \case
-- and made GraphQL request to remote server to fetch remote join values.
data RemoteJoinField
= RemoteJoinField
{ _rjfRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
, _rjfAlias :: !Alias -- ^ Top level alias of the field
, _rjfField :: !(G.Field G.NoFragments RemoteSchemaVariable) -- ^ The field AST
, _rjfFieldCall :: ![G.Name] -- ^ Path to remote join value
} deriving (Show, Eq)
{ _rjfRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
, _rjfAlias :: !Alias -- ^ Top level alias of the field
, _rjfResultCustomizer :: !RemoteResultCustomizer -- ^ Customizer for JSON results from the remote server
, _rjfField :: !(G.Field G.NoFragments RemoteSchemaVariable) -- ^ The field AST
, _rjfFieldCall :: ![G.Name] -- ^ Path to remote join value
}
-- | Generate composite JSON ('CompositeValue') parameterised over 'RemoteJoinField'
-- from remote join map and query response JSON from Postgres.
@ -153,7 +154,7 @@ traverseQueryResponseJSON rjm =
-> n (Maybe RemoteJoinField)
mkRemoteSchemaField siblingFields remoteJoin = runMaybeT $ do
counter <- getCounter
let RemoteJoin fieldName inputArgs selSet hasuraFields fieldCall rsi _ = remoteJoin
let RemoteJoin fieldName relationshipName inputArgs resultCustomizer selSet hasuraFields fieldCall rsi _ = remoteJoin
-- when any of the joining fields are `NULL`, we don't query
-- the remote schema
--
@ -206,8 +207,10 @@ traverseQueryResponseJSON rjm =
hasuraFieldArgs = flip Map.filterWithKey siblingFieldArgs $ \k _ -> k `elem` hasuraFieldVariables
fieldAlias <- lift $ pathToAlias (appendPath fieldName path) counter
queryField <- lift $ fieldCallsToField (inputArgsToMap inputArgs) hasuraFieldArgs selSet fieldAlias fieldCall
let resultCustomizer' = applyAliasMapping (singletonAliasMapping relationshipName fieldAlias) resultCustomizer
pure $ RemoteJoinField rsi
fieldAlias
resultCustomizer'
queryField
(map fcName $ toList $ NE.tail fieldCall)
where
@ -288,7 +291,7 @@ fetchRemoteJoinFields env manager reqHdrs userInfo remoteJoins = do
resolvedRemoteFields <- runVariableCache $ traverse (traverse (resolveRemoteVariable userInfo)) $ _rjfField <$> batch
let gqlReq = fieldsToRequest resolvedRemoteFields
-- NOTE: discard remote headers (for now):
(_, _, respBody) <- execRemoteGQ env manager userInfo reqHdrs rsi gqlReq
(_, _, respBody) <- execRemoteGQ env manager userInfo reqHdrs (rsDef rsi) gqlReq
case AO.eitherDecode respBody of
Left e -> throw500 $ "Remote server response is not valid JSON: " <> T.pack e
Right r -> do
@ -297,7 +300,9 @@ fetchRemoteJoinFields env manager reqHdrs userInfo remoteJoins = do
if | isNothing errors || errors == Just AO.Null ->
case AO.lookup "data" respObj of
Nothing -> throw400 Unexpected "\"data\" field not found in remote response"
Just v -> onLeft (AO.asObject v) throw500
Just v -> do
let v' = applyRemoteResultCustomizer (foldMap _rjfResultCustomizer batch) v
AO.asObject v' `onLeft` throw500
| otherwise ->
throwError (err400 Unexpected "Errors from remote server")

View File

@ -59,16 +59,18 @@ and hasura session arguments.
-- | A 'RemoteJoin' represents the context of remote relationship to be extracted from 'AnnFieldG's.
data RemoteJoin
= RemoteJoin
{ _rjName :: !FieldName -- ^ The remote join field name.
, _rjArgs :: ![RemoteFieldArgument] -- ^ User-provided arguments with variables.
, _rjSelSet :: !(G.SelectionSet G.NoFragments RemoteSchemaVariable) -- ^ User-provided selection set of remote field.
, _rjHasuraFields :: !(HashSet FieldName) -- ^ Table fields.
, _rjFieldCall :: !(NonEmpty FieldCall) -- ^ Remote server fields.
, _rjRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
, _rjPhantomFields :: ![FieldName]
{ _rjName :: !FieldName -- ^ The remote join field name in the remote schema
, _rjRelationshipName :: !G.Name -- ^ The name of the remote join relationship in the Hasura schema
, _rjArgs :: ![RemoteFieldArgument] -- ^ User-provided arguments with variables.
, _rjResultCustomizer :: !RemoteResultCustomizer -- ^ Customizer for JSON result from the remote server
, _rjSelSet :: !(G.SelectionSet G.NoFragments RemoteSchemaVariable) -- ^ User-provided selection set of remote field.
, _rjHasuraFields :: !(HashSet FieldName) -- ^ Table fields.
, _rjFieldCall :: !(NonEmpty FieldCall) -- ^ Remote server fields.
, _rjRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
, _rjPhantomFields :: ![FieldName]
-- ^ Hasura fields which are not in the selection set, but are required as
-- parameters to satisfy the remote join. See Note [Phantom fields in Remote Joins].
} deriving (Eq)
}
type RemoteJoins = NE.NonEmpty (FieldPath, NE.NonEmpty RemoteJoin)
type RemoteJoinMap = Map.HashMap FieldPath (NE.NonEmpty RemoteJoin)

View File

@ -15,7 +15,6 @@ import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.Action as E
import qualified Hasura.GraphQL.Execute.Inline as E
import qualified Hasura.GraphQL.Execute.Query as E
import qualified Hasura.GraphQL.Execute.RemoteJoin.Collect as RJ
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
@ -88,26 +87,19 @@ explainGQLQuery sc (GQLExplain query userVarsRaw maybeIsRelay) = do
mkUserInfo (URBFromSessionVariablesFallback adminRoleName) UAdminSecretSent
sessionVariables
-- we don't need to check in allow list as we consider it an admin endpoint
let takeFragment =
\case G.ExecutableDefinitionFragment f -> Just f; _ -> Nothing
fragments = mapMaybe takeFragment $ GH.unGQLExecDoc $ GH._grQuery query
(graphQLContext, queryParts) <- E.getExecPlanPartial userInfo sc queryType query
case queryParts of
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- E.inlineSelectionSet fragments selSet
G.TypedOperationDefinition G.OperationTypeQuery _ varDefs directives inlinedSelSet -> do
(unpreparedQueries, _, _) <-
E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) directives inlinedSelSet
-- TODO: validate directives here
-- TODO: validate directives here
encJFromList <$>
for (OMap.toList unpreparedQueries) (uncurry (explainQueryField userInfo))
G.TypedOperationDefinition G.OperationTypeMutation _ _ _ _ ->
throw400 InvalidParams "only queries can be explained"
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- E.inlineSelectionSet fragments selSet
G.TypedOperationDefinition G.OperationTypeSubscription _ varDefs directives inlinedSelSet -> do
(unpreparedQueries, _, normalizedSelectionSet) <- E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) directives inlinedSelSet
let parameterizedQueryHash = calculateParameterizedQueryHash normalizedSelectionSet
-- TODO: validate directives here

View File

@ -2,14 +2,22 @@ module Hasura.GraphQL.RemoteServer
( fetchRemoteSchema
, IntrospectionResult
, execRemoteGQ
, identityCustomizer
, customizeIntrospectionResult
-- The following exports are needed for unit tests
, introspectionResultToJSON
, parseIntrospectionResult
, getCustomizer
) where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Types as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Hasura.Tracing as Tracing
import qualified Language.GraphQL.Draft.Parser as G
@ -19,17 +27,23 @@ import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import Control.Arrow.Extended (left)
import Control.Exception (try)
import Control.Lens ((^.))
import Control.Monad.Unique
import Data.Aeson ((.:), (.:?))
import Data.Aeson ((.:), (.:?), (.=))
import Data.FileEmbed (makeRelativeToProject)
import Data.List.Extended (duplicates)
import Data.Text.Extended (dquoteList, toTxt, (<<>))
import Data.Tuple (swap)
import Network.URI (URI)
import qualified Hasura.GraphQL.Parser.Monad as P
import Hasura.Base.Error
import Hasura.GraphQL.Schema.Remote
import Hasura.GraphQL.Parser.Collect ()
-- Needed for GHCi and HLS due to TH in cyclically dependent modules (see https://gitlab.haskell.org/ghc/ghc/-/issues/1012)
import Hasura.GraphQL.Schema.Remote (buildRemoteParser)
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.HTTP
import Hasura.RQL.DDL.Headers (makeHeadersFromConf)
@ -39,66 +53,131 @@ import Hasura.Server.Version (HasVersion)
import Hasura.Session
introspectionQuery :: GQLReqParsed
introspectionQuery :: GQLReqOutgoing
introspectionQuery =
$(do
fp <- makeRelativeToProject "src-rsr/introspection.json"
TH.qAddDependentFile fp
eitherResult <- TH.runIO $ J.eitherDecodeFileStrict fp
case eitherResult of
Left e -> fail e
Right (r::GQLReqParsed) -> TH.lift r
either fail TH.lift $ do
r@GQLReq{..} <- eitherResult
op <- left show $ getSingleOperation r
pure GQLReq{_grQuery = op, ..}
)
validateSchemaCustomizations
:: forall m
. MonadError QErr m
=> RemoteSchemaCustomizer
-> RemoteSchemaIntrospection
-> m ()
validateSchemaCustomizations remoteSchemaCustomizer (RemoteSchemaIntrospection typeDefinitions) = do
traverse_ validateInterfaceFields typeDefinitions
validateTypeMappingsAreDistinct
traverse_ validateFieldMappingsAreDistinct typeDefinitions
where
customizeTypeName = remoteSchemaCustomizeTypeName remoteSchemaCustomizer
customizeFieldName = remoteSchemaCustomizeFieldName remoteSchemaCustomizer
validateInterfaceFields :: G.TypeDefinition [G.Name] a -> m ()
validateInterfaceFields = \case
G.TypeDefinitionInterface G.InterfaceTypeDefinition{..} ->
for_ _itdPossibleTypes $ \typeName ->
for_ _itdFieldsDefinition $ \G.FieldDefinition{..} -> do
let interfaceCustomizedFieldName = customizeFieldName _itdName _fldName
typeCustomizedFieldName = customizeFieldName typeName _fldName
when (interfaceCustomizedFieldName /= typeCustomizedFieldName) $
throwRemoteSchema
$ "Remote schema customization inconsistency: field name mapping for field "
<> _fldName <<> " of interface " <> _itdName
<<> " is inconsistent with mapping for type " <> typeName
<<> ". Interface field name maps to " <> interfaceCustomizedFieldName
<<> ". Type field name maps to " <> typeCustomizedFieldName <<> "."
_ -> pure ()
validateTypeMappingsAreDistinct :: m ()
validateTypeMappingsAreDistinct = do
let dups = duplicates $ (customizeTypeName . typeDefinitionName) <$> typeDefinitions
unless (Set.null dups) $
throwRemoteSchema $
"Type name mappings are not distinct; the following types appear more than once: " <>
dquoteList dups
validateFieldMappingsAreDistinct :: G.TypeDefinition a b -> m ()
validateFieldMappingsAreDistinct = \case
G.TypeDefinitionInterface G.InterfaceTypeDefinition{..} -> do
let dups = duplicates $ (customizeFieldName _itdName . G._fldName) <$> _itdFieldsDefinition
unless (Set.null dups) $
throwRemoteSchema $
"Field name mappings for interface type " <> _itdName <<>
" are not distinct; the following fields appear more than once: " <>
dquoteList dups
G.TypeDefinitionObject G.ObjectTypeDefinition{..} -> do
let dups = duplicates $ (customizeFieldName _otdName . G._fldName) <$> _otdFieldsDefinition
unless (Set.null dups) $
throwRemoteSchema $
"Field name mappings for object type " <> _otdName <<>
" are not distinct; the following fields appear more than once: " <>
dquoteList dups
_ -> pure ()
-- | Make an introspection query to the remote graphql server for the data we
-- need to present and stitch the remote schema. This powers add_remote_schema,
-- and also is called by schema cache rebuilding code in "Hasura.RQL.DDL.Schema.Cache".
fetchRemoteSchema
:: forall m
. (HasVersion, MonadIO m, MonadUnique m, MonadError QErr m)
. (HasVersion, MonadIO m, MonadUnique m, MonadError QErr m, Tracing.MonadTrace m)
=> Env.Environment
-> HTTP.Manager
-> RemoteSchemaName
-> RemoteSchemaInfo
-> ValidatedRemoteSchemaDef
-> m RemoteSchemaCtx
fetchRemoteSchema env manager schemaName schemaInfo@(RemoteSchemaInfo url headerConf _ timeout) = do
headers <- makeHeadersFromConf env headerConf
let hdrsWithDefaults = addDefaultHeaders headers
initReqE <- liftIO $ try $ HTTP.parseRequest (show url)
initReq <- onLeft initReqE (throwRemoteSchemaHttp url)
let req = initReq
{ HTTP.method = "POST"
, HTTP.requestHeaders = hdrsWithDefaults
, HTTP.requestBody = HTTP.RequestBodyLBS $ J.encode introspectionQuery
, HTTP.responseTimeout = HTTP.responseTimeoutMicro (timeout * 1000000)
}
res <- liftIO $ try $ HTTP.httpLbs req manager
resp <- onLeft res (throwRemoteSchemaHttp url)
let respData = resp ^. Wreq.responseBody
statusCode = resp ^. Wreq.responseStatus . Wreq.statusCode
when (statusCode /= 200) $ throwNon200 statusCode respData
fetchRemoteSchema env manager _rscName rsDef@ValidatedRemoteSchemaDef{..} = do
(_, _, rscRawIntrospectionResultDirty) <-
execRemoteGQ env manager adminUserInfo [] rsDef introspectionQuery
-- Parse the JSON into flat GraphQL type AST
(FromIntrospection introspectRes) :: (FromIntrospection IntrospectionResult) <-
onLeft (J.eitherDecode respData) (throwRemoteSchema . T.pack)
FromIntrospection _rscIntroOriginal <-
J.eitherDecode rscRawIntrospectionResultDirty `onLeft` (throwRemoteSchema . T.pack)
-- possibly transform type names from the remote schema, per the user's 'RemoteSchemaDef'
let rsCustomizer = getCustomizer (addDefaultRoots _rscIntroOriginal) _vrsdCustomization
validateSchemaCustomizations rsCustomizer (irDoc _rscIntroOriginal)
let customizedIntro = customizeIntrospectionResult rsCustomizer _rscIntroOriginal
_rscRawIntrospectionResult = J.encode $ FromIntrospection customizedIntro
let _rscInfo = RemoteSchemaInfo{..}
-- Check that the parsed GraphQL type info is valid by running the schema generation
(queryParsers, mutationParsers, subscriptionParsers) <-
P.runSchemaT @m @(P.ParseT Identity) $ buildRemoteParser introspectRes schemaInfo
let parsedIntrospection = ParsedIntrospection queryParsers mutationParsers subscriptionParsers
(piQuery, piMutation, piSubscription) <-
P.runSchemaT @m @(P.ParseT Identity) $ buildRemoteParser _rscIntroOriginal _rscInfo
-- The 'rawIntrospectionResult' contains the 'Bytestring' response of
-- the introspection result of the remote server. We store this in the
-- 'RemoteSchemaCtx' because we can use this when the 'introspect_remote_schema'
-- is called by simple encoding the result to JSON.
return $ RemoteSchemaCtx schemaName introspectRes schemaInfo respData parsedIntrospection mempty
return RemoteSchemaCtx
{ _rscPermissions = mempty
, _rscParsed = ParsedIntrospection{..}
, ..}
where
throwNon200 st = throwRemoteSchemaWithInternal (non200Msg st) . decodeNon200Resp
non200Msg st = T.pack $ "introspection query to " <> show url
<> " has responded with " <> show st <> " status code"
decodeNon200Resp bs = case J.eitherDecode bs of
Right a -> J.object ["response" J..= (a :: J.Value)]
Left _ -> J.object ["raw_body" J..= bsToTxt (BL.toStrict bs)]
-- If there is no explicit mutation or subscription root type we need to check for
-- objects type definitions with the default names "Mutation" and "Subscription".
-- If found, we add the default roots explicitly to the IntrospectionResult.
-- This simplifies the customization code.
addDefaultRoots :: IntrospectionResult -> IntrospectionResult
addDefaultRoots IntrospectionResult {..} =
IntrospectionResult
{ irMutationRoot = getRootTypeName $$(G.litName "Mutation") irMutationRoot
, irSubscriptionRoot = getRootTypeName $$(G.litName "Subscription") irSubscriptionRoot
, ..
}
where
getRootTypeName defaultName providedName =
providedName <|> (defaultName <$ lookupObject irDoc defaultName)
-- | Parsing the introspection query result. We use this newtype wrapper to
-- avoid orphan instances and parse JSON in the way that we need for GraphQL
@ -107,15 +186,26 @@ newtype FromIntrospection a
= FromIntrospection { fromIntrospection :: a }
deriving (Show, Eq, Generic, Functor)
-- | Include a map from type name to kind. This allows us to pass
-- extra type information required to convert our schema
-- back into JSON.
data WithKinds a
= WithKinds !(HashMap G.Name Text) !a
deriving (Show, Eq, Generic, Functor)
pErr :: (MonadFail m) => Text -> m a
pErr = fail . T.unpack
kindErr :: (MonadFail m) => Text -> Text -> m a
kindErr gKind eKind = pErr $ "Invalid `kind: " <> gKind <> "` in " <> eKind
instance J.FromJSON (FromIntrospection G.Description) where
parseJSON = fmap (FromIntrospection . G.Description) . J.parseJSON
instance J.ToJSON (FromIntrospection G.Description) where
toJSON = J.toJSON . G.unDescription . fromIntrospection
instance J.FromJSON (FromIntrospection G.ScalarTypeDefinition) where
parseJSON = J.withObject "ScalarTypeDefinition" $ \o -> do
kind <- o .: "kind"
@ -126,6 +216,13 @@ instance J.FromJSON (FromIntrospection G.ScalarTypeDefinition) where
r = G.ScalarTypeDefinition desc' name []
return $ FromIntrospection r
instance J.ToJSON (FromIntrospection G.ScalarTypeDefinition) where
toJSON (FromIntrospection G.ScalarTypeDefinition {..}) = objectWithoutNullValues
[ "kind" .= J.String "SCALAR"
, "name" .= _stdName
, "description" .= fmap FromIntrospection _stdDescription
]
instance J.FromJSON (FromIntrospection (G.ObjectTypeDefinition G.InputValueDefinition)) where
parseJSON = J.withObject "ObjectTypeDefinition" $ \o -> do
kind <- o .: "kind"
@ -140,6 +237,18 @@ instance J.FromJSON (FromIntrospection (G.ObjectTypeDefinition G.InputValueDefin
r = G.ObjectTypeDefinition desc' name implIfaces [] flds
return $ FromIntrospection r
instance J.ToJSON (WithKinds (G.ObjectTypeDefinition G.InputValueDefinition)) where
toJSON (WithKinds kinds G.ObjectTypeDefinition {..}) = objectWithoutNullValues
[ "kind" .= J.String "OBJECT"
, "name" .= _otdName
, "description" .= fmap FromIntrospection _otdDescription
, "fields" .= fmap (WithKinds kinds) _otdFieldsDefinition
, "interfaces" .= fmap (WithKinds kinds . toInterfaceTypeDefinition) _otdImplementsInterfaces
]
where
toInterfaceTypeDefinition :: G.Name -> G.InterfaceTypeDefinition [G.Name] G.InputValueDefinition
toInterfaceTypeDefinition name = G.InterfaceTypeDefinition Nothing name [] [] []
instance (J.FromJSON (FromIntrospection a)) => J.FromJSON (FromIntrospection (G.FieldDefinition a)) where
parseJSON = J.withObject "FieldDefinition" $ \o -> do
name <- o .: "name"
@ -151,6 +260,14 @@ instance (J.FromJSON (FromIntrospection a)) => J.FromJSON (FromIntrospection (G.
(fromIntrospection _type) []
return $ FromIntrospection r
instance J.ToJSON (WithKinds a) => J.ToJSON (WithKinds (G.FieldDefinition a)) where
toJSON (WithKinds kinds G.FieldDefinition {..}) = objectWithoutNullValues
[ "name" .= _fldName
, "description" .= fmap FromIntrospection _fldDescription
, "args" .= fmap (WithKinds kinds) _fldArgumentsDefinition
, "type" .= WithKinds kinds _fldType
]
instance J.FromJSON (FromIntrospection G.GType) where
parseJSON = J.withObject "GType" $ \o -> do
kind <- o .: "kind"
@ -171,6 +288,25 @@ instance J.FromJSON (FromIntrospection G.GType) where
G.TypeList _ ty -> G.TypeList (G.Nullability False) ty
G.TypeNamed _ n -> G.TypeNamed (G.Nullability False) n
instance J.ToJSON (WithKinds G.GType) where
toJSON (WithKinds kinds gtype) = objectWithoutNullValues $ case gtype of
G.TypeNamed (G.Nullability True) name ->
[ "kind" .= Map.lookup name kinds
, "name" .= name
]
G.TypeNamed (G.Nullability False) name ->
[ "kind" .= J.String "NON_NULL"
, "ofType" .= WithKinds kinds (G.TypeNamed (G.Nullability True) name)
]
G.TypeList (G.Nullability True) ty ->
[ "kind" .= J.String "LIST"
, "ofType" .= WithKinds kinds ty
]
G.TypeList (G.Nullability False) ty ->
[ "kind" .= J.String "NON_NULL"
, "ofType" .= WithKinds kinds (G.TypeList (G.Nullability True) ty)
]
instance J.FromJSON (FromIntrospection G.InputValueDefinition) where
parseJSON = J.withObject "InputValueDefinition" $ \o -> do
name <- o .: "name"
@ -182,11 +318,22 @@ instance J.FromJSON (FromIntrospection G.InputValueDefinition) where
r = G.InputValueDefinition desc' name (fromIntrospection _type) defVal' []
return $ FromIntrospection r
instance J.ToJSON (WithKinds G.InputValueDefinition) where
toJSON (WithKinds kinds G.InputValueDefinition {..}) = objectWithoutNullValues
[ "name" .= _ivdName
, "description" .= fmap FromIntrospection _ivdDescription
, "type" .= WithKinds kinds _ivdType
, "defaultValue" .= fmap FromIntrospection _ivdDefaultValue
]
instance J.FromJSON (FromIntrospection (G.Value Void)) where
parseJSON = J.withText "Value Void" $ \t ->
let parseValueConst = G.runParser G.value
in FromIntrospection <$> onLeft (parseValueConst t) (fail . T.unpack)
instance J.ToJSON (FromIntrospection (G.Value Void)) where
toJSON = J.String . toTxt . fromIntrospection
instance J.FromJSON (FromIntrospection (G.InterfaceTypeDefinition [G.Name] G.InputValueDefinition)) where
parseJSON = J.withObject "InterfaceTypeDefinition" $ \o -> do
kind <- o .: "kind"
@ -203,6 +350,15 @@ instance J.FromJSON (FromIntrospection (G.InterfaceTypeDefinition [G.Name] G.Inp
let r = G.InterfaceTypeDefinition desc' name [] flds possTps
return $ FromIntrospection r
instance J.ToJSON (WithKinds (G.InterfaceTypeDefinition [G.Name] G.InputValueDefinition)) where
toJSON (WithKinds kinds G.InterfaceTypeDefinition {..}) = objectWithoutNullValues
[ "kind" .= J.String "INTERFACE"
, "name" .= _itdName
, "description" .= fmap FromIntrospection _itdDescription
, "fields" .= fmap (WithKinds kinds) _itdFieldsDefinition
, "possibleTypes" .= fmap (WithKinds kinds . toObjectTypeDefinition) _itdPossibleTypes
]
instance J.FromJSON (FromIntrospection G.UnionTypeDefinition) where
parseJSON = J.withObject "UnionTypeDefinition" $ \o -> do
kind <- o .: "kind"
@ -215,6 +371,14 @@ instance J.FromJSON (FromIntrospection G.UnionTypeDefinition) where
let r = G.UnionTypeDefinition desc' name [] possibleTypes'
return $ FromIntrospection r
instance J.ToJSON (WithKinds G.UnionTypeDefinition) where
toJSON (WithKinds kinds G.UnionTypeDefinition {..}) = objectWithoutNullValues
[ "kind" .= J.String "UNION"
, "name" .= _utdName
, "description" .= fmap FromIntrospection _utdDescription
, "possibleTypes" .= fmap (WithKinds kinds . toObjectTypeDefinition) _utdMemberTypes
]
instance J.FromJSON (FromIntrospection G.EnumTypeDefinition) where
parseJSON = J.withObject "EnumTypeDefinition" $ \o -> do
kind <- o .: "kind"
@ -226,6 +390,14 @@ instance J.FromJSON (FromIntrospection G.EnumTypeDefinition) where
let r = G.EnumTypeDefinition desc' name [] (fmap fromIntrospection vals)
return $ FromIntrospection r
instance J.ToJSON (FromIntrospection G.EnumTypeDefinition) where
toJSON (FromIntrospection G.EnumTypeDefinition {..}) = objectWithoutNullValues
[ "kind" .= J.String "ENUM"
, "name" .= _etdName
, "description" .= fmap FromIntrospection _etdDescription
, "enumValues" .= fmap FromIntrospection _etdValueDefinitions
]
instance J.FromJSON (FromIntrospection G.EnumValueDefinition) where
parseJSON = J.withObject "EnumValueDefinition" $ \o -> do
name <- o .: "name"
@ -234,6 +406,12 @@ instance J.FromJSON (FromIntrospection G.EnumValueDefinition) where
let r = G.EnumValueDefinition desc' name []
return $ FromIntrospection r
instance J.ToJSON (FromIntrospection G.EnumValueDefinition) where
toJSON (FromIntrospection G.EnumValueDefinition {..}) = objectWithoutNullValues
[ "name" .= _evdName
, "description" .= fmap FromIntrospection _evdDescription
]
instance J.FromJSON (FromIntrospection (G.InputObjectTypeDefinition G.InputValueDefinition)) where
parseJSON = J.withObject "InputObjectTypeDefinition" $ \o -> do
kind <- o .: "kind"
@ -246,6 +424,14 @@ instance J.FromJSON (FromIntrospection (G.InputObjectTypeDefinition G.InputValue
let r = G.InputObjectTypeDefinition desc' name [] inputFields
return $ FromIntrospection r
instance J.ToJSON (WithKinds (G.InputObjectTypeDefinition G.InputValueDefinition)) where
toJSON (WithKinds kinds G.InputObjectTypeDefinition {..}) = objectWithoutNullValues
[ "kind" .= J.String "INPUT_OBJECT"
, "name" .= _iotdName
, "description" .= fmap FromIntrospection _iotdDescription
, "inputFields" .= fmap (WithKinds kinds) _iotdValueDefinitions
]
instance J.FromJSON (FromIntrospection (G.TypeDefinition [G.Name] G.InputValueDefinition)) where
parseJSON = J.withObject "TypeDefinition" $ \o -> do
kind :: Text <- o .: "kind"
@ -265,6 +451,15 @@ instance J.FromJSON (FromIntrospection (G.TypeDefinition [G.Name] G.InputValueDe
_ -> pErr $ "unknown kind: " <> kind
return $ FromIntrospection r
instance J.ToJSON (WithKinds (G.TypeDefinition [G.Name] G.InputValueDefinition)) where
toJSON (WithKinds kinds typeDefinition) = case typeDefinition of
G.TypeDefinitionScalar scalarTypeDefinition -> J.toJSON $ FromIntrospection scalarTypeDefinition
G.TypeDefinitionObject objectTypeDefinition -> J.toJSON $ WithKinds kinds objectTypeDefinition
G.TypeDefinitionInterface interfaceTypeDefinition -> J.toJSON $ WithKinds kinds interfaceTypeDefinition
G.TypeDefinitionUnion unionTypeDefinition -> J.toJSON $ WithKinds kinds unionTypeDefinition
G.TypeDefinitionEnum enumTypeDefinition -> J.toJSON $ FromIntrospection enumTypeDefinition
G.TypeDefinitionInputObject inputObjectTypeDefinition -> J.toJSON $ WithKinds kinds inputObjectTypeDefinition
instance J.FromJSON (FromIntrospection IntrospectionResult) where
parseJSON = J.withObject "SchemaDocument" $ \o -> do
_data <- o .: "data"
@ -303,6 +498,45 @@ instance J.FromJSON (FromIntrospection IntrospectionResult) where
queryRoot mutationRoot subsRoot
return $ FromIntrospection r
instance J.ToJSON (FromIntrospection IntrospectionResult) where
toJSON (FromIntrospection IntrospectionResult{..}) = objectWithoutNullValues ["data" .= _data]
where
_data = objectWithoutNullValues ["__schema" .= schema]
schema = objectWithoutNullValues
[ "types" .= fmap (WithKinds kinds . fmap _rsitdDefinition) types
, "queryType" .= queryType
, "mutationType" .= mutationType
, "subscriptionType" .= subscriptionType
]
RemoteSchemaIntrospection types = irDoc
kinds = Map.fromList $ types <&> \case
G.TypeDefinitionScalar G.ScalarTypeDefinition{..} -> (_stdName, "SCALAR")
G.TypeDefinitionObject G.ObjectTypeDefinition{..} -> (_otdName, "OBJECT")
G.TypeDefinitionInterface G.InterfaceTypeDefinition{..} -> (_itdName, "INTERFACE")
G.TypeDefinitionUnion G.UnionTypeDefinition{..} -> (_utdName, "UNION")
G.TypeDefinitionEnum G.EnumTypeDefinition{..} -> (_etdName, "ENUM")
G.TypeDefinitionInputObject G.InputObjectTypeDefinition{..} -> (_iotdName, "INPUT_OBJECT")
named :: G.Name -> J.Object
named = ("name" .=)
queryType = named irQueryRoot
mutationType = named <$> irMutationRoot
subscriptionType = named <$> irSubscriptionRoot
parseIntrospectionResult :: J.Value -> J.Parser IntrospectionResult
parseIntrospectionResult value = fromIntrospection <$> J.parseJSON value
introspectionResultToJSON :: IntrospectionResult -> J.Value
introspectionResultToJSON = J.toJSON . FromIntrospection
objectWithoutNullValues :: [J.Pair] -> J.Value
objectWithoutNullValues = J.object . filter notNull
where
notNull (_, J.Null) = False
notNull _ = True
toObjectTypeDefinition :: G.Name -> G.ObjectTypeDefinition G.InputValueDefinition
toObjectTypeDefinition name = G.ObjectTypeDefinition Nothing name [] [] []
execRemoteGQ
:: ( HasVersion
, MonadIO m
@ -313,16 +547,16 @@ execRemoteGQ
-> HTTP.Manager
-> UserInfo
-> [N.Header]
-> RemoteSchemaInfo
-> ValidatedRemoteSchemaDef
-> GQLReqOutgoing
-> m (DiffTime, [N.Header], BL.ByteString)
-- ^ Returns the response body and headers, along with the time taken for the
-- HTTP request to complete
execRemoteGQ env manager userInfo reqHdrs rsi gqlReq@GQLReq{..} = do
execRemoteGQ env manager userInfo reqHdrs rsdef gqlReq@GQLReq{..} = do
let gqlReqUnparsed = renderGQLReqOutgoing gqlReq
when (G._todType _grQuery == G.OperationTypeSubscription) $
throw400 NotSupported "subscription to remote server is not supported"
throwRemoteSchema "subscription to remote server is not supported"
confHdrs <- makeHeadersFromConf env hdrConf
let clientHdrs = bool [] (mkClientHeadersForward reqHdrs) fwdClientHdrs
-- filter out duplicate headers
@ -346,10 +580,200 @@ execRemoteGQ env manager userInfo reqHdrs rsi gqlReq@GQLReq{..} = do
resp <- onLeft res (throwRemoteSchemaHttp url)
pure (time, mkSetCookieHeaders resp, resp ^. Wreq.responseBody)
where
RemoteSchemaInfo url hdrConf fwdClientHdrs timeout = rsi
ValidatedRemoteSchemaDef url hdrConf fwdClientHdrs timeout _mPrefix = rsdef
userInfoToHdrs = sessionVariablesToHeaders $ _uiSession userInfo
identityCustomizer :: RemoteSchemaCustomizer
identityCustomizer = RemoteSchemaCustomizer Nothing mempty mempty mempty mempty
typeDefinitionName :: G.TypeDefinition a b -> G.Name
typeDefinitionName = \case
G.TypeDefinitionScalar G.ScalarTypeDefinition{..} -> _stdName
G.TypeDefinitionObject G.ObjectTypeDefinition{..} -> _otdName
G.TypeDefinitionInterface G.InterfaceTypeDefinition{..} -> _itdName
G.TypeDefinitionUnion G.UnionTypeDefinition{..} -> _utdName
G.TypeDefinitionEnum G.EnumTypeDefinition{..} -> _etdName
G.TypeDefinitionInputObject G.InputObjectTypeDefinition{..} -> _iotdName
getCustomizer :: IntrospectionResult -> Maybe RemoteSchemaCustomization -> RemoteSchemaCustomizer
getCustomizer _ Nothing = identityCustomizer
getCustomizer IntrospectionResult{..} (Just RemoteSchemaCustomization{..}) = RemoteSchemaCustomizer{..}
where
mapMap f = Map.fromList . map f . Map.toList
invertMap = mapMap swap -- key collisions are checked for later in validateSchemaCustomizations
rootTypeNames = if isNothing _rscRootFieldsNamespace
then catMaybes [Just irQueryRoot, irMutationRoot, irSubscriptionRoot]
else []
-- root type names should not be prefixed or suffixed unless
-- there is a custom root namespace field
scalarTypeNames = [intScalar, floatScalar, stringScalar, boolScalar, idScalar]
protectedTypeNames = scalarTypeNames ++ rootTypeNames
nameFilter name = not $ "__" `T.isPrefixOf` G.unName name || name `elem` protectedTypeNames
mkPrefixSuffixMap :: Maybe G.Name -> Maybe G.Name -> [G.Name] -> HashMap G.Name G.Name
mkPrefixSuffixMap mPrefix mSuffix names = Map.fromList $ case (mPrefix, mSuffix) of
(Nothing, Nothing) -> []
(Just prefix, Nothing) -> map (\name -> (name, prefix <> name)) names
(Nothing, Just suffix) -> map (\name -> (name, name <> suffix)) names
(Just prefix, Just suffix) -> map (\name -> (name, prefix <> name <> suffix)) names
RemoteSchemaIntrospection typeDefinitions = irDoc
typesToRename = filter nameFilter $ typeDefinitionName <$> typeDefinitions
typeRenameMap =
case _rscTypeNames of
Nothing -> Map.empty
Just RemoteTypeCustomization{..} ->
_rtcMapping <> mkPrefixSuffixMap _rtcPrefix _rtcSuffix typesToRename
typeFieldMap :: HashMap G.Name [G.Name] -- typeName -> fieldNames
typeFieldMap = Map.fromList $ typeDefinitions >>= \case
G.TypeDefinitionObject G.ObjectTypeDefinition{..} -> pure (_otdName, G._fldName <$> _otdFieldsDefinition)
G.TypeDefinitionInterface G.InterfaceTypeDefinition{..} -> pure (_itdName, G._fldName <$> _itdFieldsDefinition)
_ -> []
mkFieldRenameMap RemoteFieldCustomization{..} fieldNames =
_rfcMapping <> mkPrefixSuffixMap _rfcPrefix _rfcSuffix fieldNames
fieldRenameMap =
case _rscFieldNames of
Nothing -> Map.empty
Just fieldNameCustomizations ->
let customizationMap = Map.fromList $ map (\rfc -> (_rfcParentType rfc, rfc)) fieldNameCustomizations
in Map.intersectionWith mkFieldRenameMap customizationMap typeFieldMap
mapLookup :: (Eq a, Hashable a) => HashMap a a -> a -> a
mapLookup m a = fromMaybe a $ Map.lookup a m
_rscNamespaceFieldName = _rscRootFieldsNamespace
_rscCustomizeTypeName = typeRenameMap
_rscCustomizeFieldName = fieldRenameMap
_rscDecustomizeTypeName = invertMap typeRenameMap
_rscDecustomizeFieldName = mapMap (mapLookup typeRenameMap *** invertMap) fieldRenameMap
customizeIntrospectionResult :: RemoteSchemaCustomizer -> IntrospectionResult -> IntrospectionResult
customizeIntrospectionResult remoteSchemaCustomizer IntrospectionResult{..} = IntrospectionResult
{ irDoc = customizeRemoteSchemaIntrospection irDoc
, irQueryRoot = customizedQueryRoot
, irMutationRoot = customizedMutationRoot
, irSubscriptionRoot = customizedSubscriptionRoot
}
where
namespaceField = _rscNamespaceFieldName remoteSchemaCustomizer
customizeTypeName = remoteSchemaCustomizeTypeName remoteSchemaCustomizer
customizeFieldName = remoteSchemaCustomizeFieldName remoteSchemaCustomizer
-- Create customized root type names by appending "Query", "Mutation" or "Subscription" to the custom namespace field name
customizeRootTypeName suffix = maybe id (const . (<> suffix)) namespaceField
customizedQueryRoot = customizeRootTypeName $$(G.litName "Query") irQueryRoot
customizedMutationRoot = customizeRootTypeName $$(G.litName "Mutation") <$> irMutationRoot
customizedSubscriptionRoot = customizeRootTypeName $$(G.litName "Subscription") <$> irSubscriptionRoot
-- Create object type definitions for each of the custom namespace root types.
-- Each object type has a single field where the field name is
-- the custom namespace and the type is the original root type.
namespaceRootTypeDefinitions = case namespaceField of
Nothing -> []
Just namespaceFieldName ->
let mkNamespaceTypeDef originalRootTypeName customizedRootTypeName =
G.TypeDefinitionObject $ G.ObjectTypeDefinition (Just "custom namespace root type") customizedRootTypeName [] []
[G.FieldDefinition (Just "custom namespace field") namespaceFieldName []
(G.TypeNamed (G.Nullability True) $ customizeTypeName originalRootTypeName) []]
in catMaybes
[ pure $ mkNamespaceTypeDef irQueryRoot customizedQueryRoot
, mkNamespaceTypeDef <$> irMutationRoot <*> customizedMutationRoot
, mkNamespaceTypeDef <$> irSubscriptionRoot <*> customizedSubscriptionRoot
]
customizeRemoteSchemaIntrospection :: RemoteSchemaIntrospection -> RemoteSchemaIntrospection
customizeRemoteSchemaIntrospection (RemoteSchemaIntrospection typeDefinitions) =
RemoteSchemaIntrospection $ namespaceRootTypeDefinitions ++ customizeTypeDefinitions typeDefinitions
where
customizeTypeDefinitions =
if hasTypeOrFieldCustomizations remoteSchemaCustomizer
then fmap customizeTypeDefinition
else id -- no need to traverse the schema if there are no type or field name customizations
customizeTypeDefinition :: G.TypeDefinition [G.Name] RemoteSchemaInputValueDefinition -> G.TypeDefinition [G.Name] RemoteSchemaInputValueDefinition
customizeTypeDefinition = \case
G.TypeDefinitionScalar scalarTypeDefinition -> G.TypeDefinitionScalar $ customizeScalarTypeDefinition scalarTypeDefinition
G.TypeDefinitionObject objectTypeDefinition -> G.TypeDefinitionObject $ customizeObjectTypeDefinition objectTypeDefinition
G.TypeDefinitionInterface interfaceTypeDefinition -> G.TypeDefinitionInterface $ customizeInterfaceTypeDefinition interfaceTypeDefinition
G.TypeDefinitionUnion unionTypeDefinition -> G.TypeDefinitionUnion $ customizeUnionTypeDefinition unionTypeDefinition
G.TypeDefinitionEnum enumTypeDefinition -> G.TypeDefinitionEnum $ customizeEnumTypeDefinition enumTypeDefinition
G.TypeDefinitionInputObject inputObjectTypeDefinition -> G.TypeDefinitionInputObject $ customizeInputObjectTypeDefinition inputObjectTypeDefinition
customizeScalarTypeDefinition :: G.ScalarTypeDefinition -> G.ScalarTypeDefinition
customizeScalarTypeDefinition G.ScalarTypeDefinition{..} =
G.ScalarTypeDefinition { _stdName = customizeTypeName _stdName, ..}
customizeObjectTypeDefinition :: G.ObjectTypeDefinition RemoteSchemaInputValueDefinition -> G.ObjectTypeDefinition RemoteSchemaInputValueDefinition
customizeObjectTypeDefinition G.ObjectTypeDefinition{..} =
G.ObjectTypeDefinition
{ _otdName = customizeTypeName _otdName
, _otdImplementsInterfaces = customizeTypeName <$> _otdImplementsInterfaces
, _otdFieldsDefinition = customizeFieldDefinition (customizeFieldName _otdName) <$> _otdFieldsDefinition
, ..
}
customizeType :: G.GType -> G.GType
customizeType = \case
G.TypeNamed nullability name -> G.TypeNamed nullability $ customizeTypeName name
G.TypeList nullability gtype -> G.TypeList nullability $ customizeType gtype
customizeFieldDefinition :: (G.Name -> G.Name) -> G.FieldDefinition RemoteSchemaInputValueDefinition -> G.FieldDefinition RemoteSchemaInputValueDefinition
customizeFieldDefinition customizeFieldName' G.FieldDefinition{..} =
G.FieldDefinition
{ _fldName = customizeFieldName' _fldName
, _fldType = customizeType _fldType
, _fldArgumentsDefinition = customizeRemoteSchemaInputValueDefinition <$> _fldArgumentsDefinition
, ..
}
customizeRemoteSchemaInputValueDefinition :: RemoteSchemaInputValueDefinition -> RemoteSchemaInputValueDefinition
customizeRemoteSchemaInputValueDefinition RemoteSchemaInputValueDefinition{..} =
RemoteSchemaInputValueDefinition
{ _rsitdDefinition = customizeInputValueDefinition _rsitdDefinition
, ..
}
customizeInputValueDefinition :: G.InputValueDefinition -> G.InputValueDefinition
customizeInputValueDefinition G.InputValueDefinition{..} =
G.InputValueDefinition
{ _ivdType = customizeType _ivdType
, ..
}
customizeInterfaceTypeDefinition :: G.InterfaceTypeDefinition [G.Name] RemoteSchemaInputValueDefinition -> G.InterfaceTypeDefinition [G.Name] RemoteSchemaInputValueDefinition
customizeInterfaceTypeDefinition G.InterfaceTypeDefinition{..} =
G.InterfaceTypeDefinition
{ _itdName = customizeTypeName _itdName
, _itdFieldsDefinition = customizeFieldDefinition (customizeFieldName _itdName) <$> _itdFieldsDefinition
, _itdPossibleTypes = customizeTypeName <$> _itdPossibleTypes
, ..
}
customizeUnionTypeDefinition :: G.UnionTypeDefinition -> G.UnionTypeDefinition
customizeUnionTypeDefinition G.UnionTypeDefinition{..} =
G.UnionTypeDefinition
{ _utdName = customizeTypeName _utdName
, _utdMemberTypes = customizeTypeName <$> _utdMemberTypes
, ..
}
customizeEnumTypeDefinition :: G.EnumTypeDefinition -> G.EnumTypeDefinition
customizeEnumTypeDefinition G.EnumTypeDefinition{..} =
G.EnumTypeDefinition { _etdName = customizeTypeName _etdName, ..}
customizeInputObjectTypeDefinition :: G.InputObjectTypeDefinition RemoteSchemaInputValueDefinition -> G.InputObjectTypeDefinition RemoteSchemaInputValueDefinition
customizeInputObjectTypeDefinition G.InputObjectTypeDefinition{..} =
G.InputObjectTypeDefinition
{ _iotdName = customizeTypeName _iotdName
, _iotdValueDefinitions = customizeRemoteSchemaInputValueDefinition <$> _iotdValueDefinitions
, ..
}
throwRemoteSchema
:: QErrM m
=> Text -> m a

View File

@ -76,8 +76,8 @@ buildGQLContext queryType sources allRemoteSchemas allActions nonObjectCustomTyp
allTableRoles = Set.fromList $ getTableRoles =<< Map.elems sources
adminRemoteRelationshipQueryCtx =
allRemoteSchemas
<&> (\(remoteSchemaCtx, _metadataObj) ->
(_rscIntro remoteSchemaCtx, _rscParsed remoteSchemaCtx))
<&> (\(RemoteSchemaCtx{..}, _metadataObj) ->
RemoteRelationshipQueryContext _rscIntroOriginal _rscParsed $ rsCustomizer _rscInfo)
allRoles :: Set.HashSet RoleName
allRoles = nonTableRoles <> allTableRoles
-- The function permissions context doesn't actually matter because the
@ -115,8 +115,8 @@ buildGQLContext queryType sources allRemoteSchemas allActions nonObjectCustomTyp
let (remotes, remoteErrors) =
runState (remoteSchemaFields queryFieldNames mutationFieldNames allRemoteSchemas) mempty
let adminQueryRemotes = concatMap (piQuery . snd . snd) remotes
adminMutationRemotes = concatMap (concat . piMutation . snd . snd) remotes
let adminQueryRemotes = concatMap (piQuery . _rrscParsedIntrospection . snd) remotes
adminMutationRemotes = concatMap (concat . piMutation . _rrscParsedIntrospection . snd) remotes
roleContexts <-
Set.toMap allRoles & Map.traverseWithKey \role () ->
@ -138,7 +138,7 @@ buildRoleContext
-> RemoteSchemaCache
-> [ActionInfo]
-> NonObjectTypeMap
-> [( RemoteSchemaName , (IntrospectionResult, ParsedIntrospection))]
-> [( RemoteSchemaName , RemoteRelationshipQueryContext)]
-> RoleName
-> RemoteSchemaPermsCtx
-> m (RoleContext GQLContext)
@ -158,8 +158,9 @@ buildRoleContext
-- when remote schema permissions are not enabled, then remote schemas
-- are a public entity which is accesible to all the roles
| otherwise -> pure remotes
let queryRemotes = getQueryRemotes $ snd . snd <$> roleBasedRemoteSchemas
mutationRemotes = getMutationRemotes $ snd . snd <$> roleBasedRemoteSchemas
let parsedIntrospections = _rrscParsedIntrospection . snd <$> roleBasedRemoteSchemas
queryRemotes = getQueryRemotes parsedIntrospections
mutationRemotes = getMutationRemotes parsedIntrospections
remoteRelationshipQueryContext = Map.fromList roleBasedRemoteSchemas
roleQueryContext = QueryContext
stringifyNum
@ -361,16 +362,17 @@ buildRoleBasedRemoteSchemaParser
. (MonadError QErr m, MonadUnique m, MonadIO m)
=> RoleName
-> RemoteSchemaCache
-> m [(RemoteSchemaName, (IntrospectionResult, ParsedIntrospection))]
-> m [(RemoteSchemaName, RemoteRelationshipQueryContext )]
buildRoleBasedRemoteSchemaParser roleName remoteSchemaCache = do
let remoteSchemaIntroInfos = map fst $ toList remoteSchemaCache
remoteSchemaPerms <-
for remoteSchemaIntroInfos $ \(RemoteSchemaCtx remoteSchemaName _ remoteSchemaInfo _ _ permissions) ->
for (Map.lookup roleName permissions) $ \introspectRes -> do
for remoteSchemaIntroInfos $ \RemoteSchemaCtx{..} ->
for (Map.lookup roleName _rscPermissions) $ \introspectRes -> do
let customizer = rsCustomizer _rscInfo
(queryParsers, mutationParsers, subscriptionParsers) <-
P.runSchemaT @m @(P.ParseT Identity) $ buildRemoteParser introspectRes remoteSchemaInfo
P.runSchemaT @m @(P.ParseT Identity) $ buildRemoteParser introspectRes _rscInfo
let parsedIntrospection = ParsedIntrospection queryParsers mutationParsers subscriptionParsers
return (remoteSchemaName, (introspectRes, parsedIntrospection))
return (_rscName, RemoteRelationshipQueryContext introspectRes parsedIntrospection customizer)
return $ catMaybes remoteSchemaPerms
-- checks that there are no conflicting root field names between remotes and
@ -381,18 +383,18 @@ remoteSchemaFields
=> [G.Name]
-> [G.Name]
-> HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject)
-> m [( RemoteSchemaName , (IntrospectionResult, ParsedIntrospection))]
-> m [( RemoteSchemaName , RemoteRelationshipQueryContext)]
remoteSchemaFields queryFieldNames mutationFieldNames allRemoteSchemas = do
foldlM go [] $ Map.toList allRemoteSchemas
where
go :: [( RemoteSchemaName , (IntrospectionResult, ParsedIntrospection))]
go :: [( RemoteSchemaName , RemoteRelationshipQueryContext)]
-> (RemoteSchemaName, (RemoteSchemaCtx, MetadataObject))
-> m [( RemoteSchemaName , (IntrospectionResult, ParsedIntrospection))]
go okSchemas (newSchemaName, (newSchemaContext, newMetadataObject)) = do
-> m [( RemoteSchemaName , RemoteRelationshipQueryContext)]
go okSchemas (newSchemaName, (RemoteSchemaCtx{..}, newMetadataObject)) = do
let (queryOld, mutationOld) =
unzip $ fmap ((\case ParsedIntrospection q m _ -> (q,m)) . snd . snd) okSchemas
unzip $ fmap ((\case ParsedIntrospection q m _ -> (q,m)) . _rrscParsedIntrospection . snd) okSchemas
let ParsedIntrospection queryNew mutationNew _subscriptionNew
= _rscParsed newSchemaContext
= _rscParsed
checkedDuplicates <- runExceptT do
-- First we check for conflicts in query_root
-- Check for conflicts between remotes
@ -417,7 +419,7 @@ remoteSchemaFields queryFieldNames mutationFieldNames allRemoteSchemas = do
withRecordInconsistency' reason meta
return $ okSchemas
Right () ->
return $ (newSchemaName, ( _rscIntro newSchemaContext,_rscParsed newSchemaContext)):okSchemas
return $ (newSchemaName, RemoteRelationshipQueryContext _rscIntroOriginal _rscParsed $ rsCustomizer _rscInfo):okSchemas
-- variant of 'withRecordInconsistency' that works with 'MonadState' rather than 'ArrowWriter'
withRecordInconsistency' reason metadata = modify' (InconsistentObject reason Nothing metadata Seq.:<|)

View File

@ -31,12 +31,19 @@ type AnnotatedField b = IR.AnnFieldG b (IR.RemoteSelect Unprepare
type ConnectionFields b = IR.ConnectionFields b (IR.RemoteSelect UnpreparedValue) (UnpreparedValue b)
type EdgeFields b = IR.EdgeFields b (IR.RemoteSelect UnpreparedValue) (UnpreparedValue b)
data RemoteRelationshipQueryContext
= RemoteRelationshipQueryContext
{ _rrscIntrospectionResultOriginal :: !IntrospectionResult
, _rrscParsedIntrospection :: !ParsedIntrospection
, _rrscRemoteSchemaCustomizer :: !RemoteSchemaCustomizer
}
data QueryContext =
QueryContext
{ qcStringifyNum :: !Bool
, qcDangerousBooleanCollapse :: !Bool
, qcQueryType :: !ET.GraphQLQueryType
, qcRemoteRelationshipContext :: !(HashMap RemoteSchemaName (IntrospectionResult, ParsedIntrospection))
, qcRemoteRelationshipContext :: !(HashMap RemoteSchemaName RemoteRelationshipQueryContext)
, qcFunctionPermsContext :: !FunctionPermissionsCtx
}

View File

@ -1,19 +1,26 @@
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ViewPatterns #-}
module Hasura.GraphQL.Schema.Remote
( buildRemoteParser
, remoteField
, customizeFieldParser
) where
import Hasura.Prelude
import qualified Control.Monad.State.Lazy as Lazy
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens.Extended (Lens', _1, _2, _3, _4, set, use, (%=),
(^.))
import Data.Monoid (Any (..))
import Data.Parser.JSONPath
import Data.Text.Extended
import Data.Type.Equality
@ -22,13 +29,15 @@ import qualified Hasura.GraphQL.Parser.Internal.TypeChecking as P
import Hasura.Base.Error
import Hasura.GraphQL.Parser as P
import Hasura.RQL.Types.Common (stringScalar)
import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.SchemaCache (IntrospectionResult (IntrospectionResult, irMutationRoot, irQueryRoot, irSubscriptionRoot))
--------------------------------------------------------------------------------
-- Top level function
-- TODO return ParsedIntrospection ?
buildRemoteParser
:: forall m n
. (MonadSchema n m, MonadError QErr m)
@ -38,17 +47,37 @@ buildRemoteParser
, Maybe [P.FieldParser n RemoteField]
, Maybe [P.FieldParser n RemoteField]
)
buildRemoteParser (IntrospectionResult sdoc queryRoot mutationRoot subscriptionRoot) info = do
-- ^ parsers for, respectively: queries, mutations, and subscriptions
buildRemoteParser introspectionResult remoteSchemaInfo = do
(rawQueryParsers, rawMutationParsers, rawSubscriptionParsers) <- buildRawRemoteParser introspectionResult remoteSchemaInfo
pure $ evalMemoState $ do
queryParsers <- customizeFieldParsers remoteSchemaInfo (irQueryRoot introspectionResult) rawQueryParsers
mutationParsers <- sequence $ customizeFieldParsers remoteSchemaInfo <$> irMutationRoot introspectionResult <*> rawMutationParsers
subscriptionParsers <- sequence $ customizeFieldParsers remoteSchemaInfo <$> irSubscriptionRoot introspectionResult <*> rawSubscriptionParsers
pure (queryParsers, mutationParsers, subscriptionParsers)
buildRawRemoteParser
:: forall m n
. (MonadSchema n m, MonadError QErr m)
=> IntrospectionResult
-> RemoteSchemaInfo
-> m ( [P.FieldParser n RawRemoteField]
, Maybe [P.FieldParser n RawRemoteField]
, Maybe [P.FieldParser n RawRemoteField]
)
-- ^ parsers for, respectively: queries, mutations, and subscriptions
buildRawRemoteParser (IntrospectionResult sdoc queryRoot mutationRoot subscriptionRoot) info@RemoteSchemaInfo{..} = do
queryT <- makeParsers queryRoot
mutationT <- makeNonQueryRootFieldParser mutationRoot $$(G.litName "Mutation")
subscriptionT <- makeNonQueryRootFieldParser subscriptionRoot $$(G.litName "Subscription")
return (queryT, mutationT, subscriptionT)
where
makeFieldParser :: G.FieldDefinition RemoteSchemaInputValueDefinition -> m (P.FieldParser n RemoteField)
makeFieldParser :: G.FieldDefinition RemoteSchemaInputValueDefinition -> m (P.FieldParser n RawRemoteField)
makeFieldParser fieldDef = do
fldParser <- remoteFieldFromDefinition sdoc fieldDef
pure $ (RemoteFieldG info) <$> fldParser
makeParsers :: G.Name -> m [P.FieldParser n RemoteField]
pure $ RemoteFieldG info mempty <$> fldParser
makeParsers :: G.Name -> m [P.FieldParser n RawRemoteField]
makeParsers rootName =
case lookupType sdoc rootName of
Just (G.TypeDefinitionObject o) ->
@ -62,7 +91,7 @@ buildRemoteParser (IntrospectionResult sdoc queryRoot mutationRoot subscriptionR
-- schema document, then an error is thrown. If no root name has been provided, we lookup the
-- schema document for an object with the default name and if that's not found, we omit the said
-- Root from the schema.
makeNonQueryRootFieldParser :: Maybe G.Name -> G.Name -> m (Maybe [P.FieldParser n RemoteField])
makeNonQueryRootFieldParser :: Maybe G.Name -> G.Name -> m (Maybe [P.FieldParser n RawRemoteField])
makeNonQueryRootFieldParser userProvidedRootName defaultRootName =
case userProvidedRootName of
Just _rootName -> traverse makeParsers userProvidedRootName
@ -71,7 +100,6 @@ buildRemoteParser (IntrospectionResult sdoc queryRoot mutationRoot subscriptionR
in bool (pure Nothing) (traverse makeParsers $ Just defaultRootName) $ isDefaultRootObjectExists
--------------------------------------------------------------------------------
-- Remote schema input parsers
@ -819,13 +847,20 @@ remoteField sdoc fieldName description argsDefn typeDefn = do
remoteSchemaUnion sdoc unionTypeDefn <&> mkFieldParserWithSelectionSet argsParser
_ -> throw400 RemoteSchemaError "expected output type, but got input type"
where
mkField :: Maybe G.Name
-> HashMap G.Name (G.Value RemoteSchemaVariable)
-> G.SelectionSet G.NoFragments RemoteSchemaVariable
-> G.Field G.NoFragments RemoteSchemaVariable
mkField alias args selSet =
G.Field alias fieldName args mempty selSet
mkFieldParserWithoutSelectionSet
:: InputFieldsParser n (Altered, HashMap G.Name (G.Value RemoteSchemaVariable))
-> Parser 'Both n ()
-> FieldParser n (G.Field G.NoFragments RemoteSchemaVariable)
mkFieldParserWithoutSelectionSet argsParser outputParser =
P.rawSelection fieldName description argsParser outputParser
<&> \(alias, _, (_, args)) -> G.Field alias fieldName args mempty []
<&> \(alias, _, (_, args)) -> mkField alias args []
mkFieldParserWithSelectionSet
:: InputFieldsParser n (Altered, HashMap G.Name (G.Value RemoteSchemaVariable))
@ -833,7 +868,7 @@ remoteField sdoc fieldName description argsDefn typeDefn = do
-> FieldParser n (G.Field G.NoFragments RemoteSchemaVariable)
mkFieldParserWithSelectionSet argsParser outputParser =
P.rawSubselection fieldName description argsParser outputParser
<&> \(alias, _, (_, args), selSet) -> G.Field alias fieldName args mempty selSet
<&> \(alias, _, (_, args), selSet) -> mkField alias args selSet
-- | helper function to get a parser of an object with it's name
-- This function is called from 'remoteSchemaInterface' and
@ -850,3 +885,246 @@ getObjectParser
getObjectParser schemaDoc getObject objName = do
obj <- remoteSchemaObject schemaDoc =<< getObject objName
return $ (objName,) <$> obj
addCustomNamespace
:: forall m. MonadParse m
=> RemoteSchemaInfo
-> G.Name
-> G.Name
-> [P.FieldParser m RawRemoteField]
-> P.FieldParser m RemoteField
addCustomNamespace remoteSchemaInfo rootTypeName namespace fieldParsers =
P.subselection_ namespace Nothing remoteFieldParser
where
rawRemoteFieldsParser :: Parser 'Output m [RawRemoteField]
rawRemoteFieldsParser = P.selectionSet rootTypeName Nothing fieldParsers <&>
toList . OMap.mapWithKey (\alias -> \case
P.SelectField fld -> fld
P.SelectTypename fld ->
-- In P.selectionSet we lose the resultCustomizer from __typename fields so we need to put it back
let resultCustomizer = modifyFieldByName alias $ customizeTypeNameString $ _rscCustomizeTypeName $ rsCustomizer remoteSchemaInfo
in RemoteFieldG remoteSchemaInfo resultCustomizer $ G.Field (Just alias) $$(G.litName "__typename") mempty mempty mempty)
remoteFieldParser :: Parser 'Output m RemoteField
remoteFieldParser = rawRemoteFieldsParser <&> \remoteFields ->
RemoteFieldG remoteSchemaInfo
(foldMap _rfResultCustomizer remoteFields)
(RRFNamespaceField $ G.SelectionField . _rfField <$> remoteFields)
customizeFieldParsers
:: forall m n. (MonadState MemoState m, MonadFix m, MonadParse n)
=> RemoteSchemaInfo
-> G.Name
-> [P.FieldParser n RawRemoteField]
-> m [P.FieldParser n RemoteField]
customizeFieldParsers remoteSchemaInfo@RemoteSchemaInfo{..} rootTypeName fieldParsers = do
fieldParsers' <-
if hasTypeOrFieldCustomizations rsCustomizer
then
traverse (customizeFieldParser' (set rfResultCustomizer) rsCustomizer rootTypeName) fieldParsers
else
-- no need to customize individual FieldParsers if there are no type or field name customizations
pure fieldParsers
pure $ case _rscNamespaceFieldName rsCustomizer of
Nothing -> fmap realRemoteField <$> fieldParsers'
Just namespace -> [addCustomNamespace remoteSchemaInfo rootTypeName namespace fieldParsers']
customizeFieldParser
:: forall n a b. (MonadParse n)
=> (RemoteResultCustomizer -> a -> b)
-> RemoteSchemaCustomizer
-> G.Name
-> P.FieldParser n a
-> (P.FieldParser n b)
customizeFieldParser setResultCustomizer remoteSchemaCustomizer rootTypeName =
if hasTypeOrFieldCustomizations remoteSchemaCustomizer
then evalMemoState . customizeFieldParser' setResultCustomizer remoteSchemaCustomizer rootTypeName
else fmap $ setResultCustomizer mempty
customizeFieldParser'
:: forall m n a b. (MonadState MemoState m, MonadFix m, MonadParse n)
=> (RemoteResultCustomizer -> a -> b)
-> RemoteSchemaCustomizer
-> G.Name
-> P.FieldParser n a
-> m (P.FieldParser n b)
customizeFieldParser' setResultCustomizer remoteSchemaCustomizer rootTypeName P.FieldParser{..} = do
customizedDefinition <- customizeFieldDefinition remoteSchemaCustomizer rootTypeName fDefinition
let customizedRootTypeName = remoteSchemaCustomizeTypeName remoteSchemaCustomizer rootTypeName
pure P.FieldParser
{ fParser =
fParserWithResultCustomizer <=<
customizeField customizedRootTypeName (dInfo customizedDefinition) . fmap customizeVariable
, fDefinition = customizedDefinition
}
where
fParserWithResultCustomizer :: (RemoteResultCustomizer, G.Field G.NoFragments Variable) -> n b
fParserWithResultCustomizer (resultCustomizer, fld) =
setResultCustomizer resultCustomizer <$> fParser fld
customizeVariable :: Variable -> Variable
customizeVariable Variable{..} = Variable{vType = customizeGraphQLType vType, ..}
customizeGraphQLType :: G.GType -> G.GType
customizeGraphQLType = \case
G.TypeNamed nullability name -> G.TypeNamed nullability $ remoteSchemaDecustomizeTypeName remoteSchemaCustomizer name
G.TypeList nullability gtype -> G.TypeList nullability $ customizeGraphQLType gtype
customizeField :: G.Name -> P.FieldInfo -> G.Field G.NoFragments var -> n (RemoteResultCustomizer, G.Field G.NoFragments var)
customizeField parentTypeName (P.FieldInfo _ fieldType) (G.Field alias fieldName args directives selSet) = do
let fieldName' = if "__" `T.isPrefixOf` G.unName fieldName
then fieldName
else remoteSchemaDecustomizeFieldName remoteSchemaCustomizer parentTypeName fieldName
alias' = alias <|> if fieldName' == fieldName then Nothing else Just fieldName
selSet' :: [(RemoteResultCustomizer, G.Selection G.NoFragments var)] <- withPath (++ [Key "selectionSet"]) $
case fieldType ^. definitionLens of
typeDef@(Definition _ _ _ TIObject{}) -> traverse (customizeSelection typeDef) selSet
typeDef@(Definition _ _ _ TIInterface{}) -> traverse (customizeSelection typeDef) selSet
typeDef@(Definition _ _ _ TIUnion{}) -> traverse (customizeSelection typeDef) selSet
_ -> pure $ (mempty,) <$> selSet
let resultCustomizer = modifyFieldByName (fromMaybe fieldName' alias') $
if fieldName' == $$(G.litName "__typename")
then customizeTypeNameString (_rscCustomizeTypeName remoteSchemaCustomizer)
else foldMap fst selSet'
pure $ (resultCustomizer, G.Field alias' fieldName' args directives $ snd <$> selSet')
customizeSelection :: Definition (TypeInfo 'Output) -> G.Selection G.NoFragments var -> n (RemoteResultCustomizer, G.Selection G.NoFragments var)
customizeSelection parentTypeDef = \case
G.SelectionField fld@G.Field{..} ->
withPath (++ [Key $ G.unName _fName]) $ do
let parentTypeName = getName parentTypeDef
fieldInfo <- findField _fName parentTypeName $ dInfo parentTypeDef
second G.SelectionField <$> customizeField parentTypeName fieldInfo fld
G.SelectionInlineFragment G.InlineFragment{..} -> do
inlineFragmentType <-
case _ifTypeCondition of
Nothing -> pure parentTypeDef
Just typeName -> findSubtype typeName parentTypeDef
customizedSelectionSet <- traverse (customizeSelection inlineFragmentType) _ifSelectionSet
pure $ (foldMap fst customizedSelectionSet, G.SelectionInlineFragment G.InlineFragment
{ _ifTypeCondition = remoteSchemaDecustomizeTypeName remoteSchemaCustomizer <$> _ifTypeCondition
, _ifSelectionSet = snd <$> customizedSelectionSet
, .. })
findField :: G.Name -> G.Name -> TypeInfo 'Output -> n P.FieldInfo
findField fieldName parentTypeName parentTypeInfo =
if fieldName == $$(G.litName "__typename") -- TODO can we avoid checking for __typename in two different places?
then pure $ P.FieldInfo [] $ NonNullable $ TNamed $ mkDefinition stringScalar Nothing TIScalar
else do
fields <- case parentTypeInfo of
TIObject objectInfo -> pure $ oiFields objectInfo
TIInterface interfaceInfo -> pure $ iiFields interfaceInfo
_ -> parseError $ "Type " <> parentTypeName <<> " has no fields"
fld <- find ((== fieldName) . dName) fields `onNothing` parseError ("field " <> fieldName <<> " not found in type: " <> squote parentTypeName)
pure $ dInfo fld
findSubtype :: G.Name -> Definition (TypeInfo 'Output) -> n (Definition (TypeInfo 'Output))
findSubtype typeName parentTypeDef =
if typeName == getName parentTypeDef
then pure parentTypeDef
else do
possibleTypes <-
case dInfo parentTypeDef of
TIInterface interfaceInfo -> pure $ iiPossibleTypes interfaceInfo
TIUnion unionInfo -> pure $ uiPossibleTypes unionInfo
_ -> parseError $ "Type " <> getName parentTypeDef <<> " has no possible subtypes"
fmap TIObject <$> find ((== typeName) . dName) possibleTypes `onNothing`
parseError ("Type " <> typeName <<> " is not a subtype of " <>> getName parentTypeDef)
type MemoState = (HashMap G.Name ObjectInfo, HashMap G.Name InterfaceInfo, HashMap G.Name UnionInfo, HashMap G.Name InputObjectInfo)
evalMemoState :: Lazy.State MemoState a -> a
evalMemoState = flip Lazy.evalState (mempty, mempty, mempty, mempty)
-- | memo function used to "tie the knot" and preserve sharing in the customized type definitions
-- It would be nice if we could just re-use MonadSchema and memoizeOn, but the types there are too
-- parser-specific.
memo :: (MonadState s m, MonadFix m, Hashable k, Eq k) => Lens' s (HashMap k v) -> (k -> v -> m v) -> k -> v -> m v
memo lens f k v = do
m <- use lens
Map.lookup k m `onNothing` mdo
-- Note: v' is added to the state _before_ it is produced
lens %= Map.insert k v'
v' <- f k v
pure v'
customizeFieldDefinition
:: forall m. (MonadState MemoState m, MonadFix m)
=> RemoteSchemaCustomizer
-> G.Name
-> Definition P.FieldInfo
-> m (Definition P.FieldInfo)
customizeFieldDefinition remoteSchemaCustomizer = customizeFieldDefinition'
where
customizeFieldDefinition' :: G.Name -> Definition P.FieldInfo -> m (Definition P.FieldInfo)
customizeFieldDefinition' parentTypeName Definition{..} = do
dInfo' <- customizeFieldInfo dInfo
pure Definition
{ dName = remoteSchemaCustomizeFieldName remoteSchemaCustomizer parentTypeName dName
, dInfo = dInfo'
, ..
}
customizeFieldInfo :: P.FieldInfo -> m P.FieldInfo
customizeFieldInfo (P.FieldInfo args typ) =
P.FieldInfo <$> traverse (traverse $ customizeInputFieldInfo) args <*> customizeType typ
customizeTypeDefinition :: (G.Name -> b -> m b) -> Definition b -> m (Definition b)
customizeTypeDefinition f Definition{..} = do
dInfo' <- f dName dInfo
pure Definition
{ dName = remoteSchemaCustomizeTypeName remoteSchemaCustomizer dName
, dInfo = dInfo'
, ..
}
customizeType :: Type k -> m (Type k)
customizeType = \case
NonNullable nn -> NonNullable <$> customizeNonNullableType nn
Nullable nn -> Nullable <$> customizeNonNullableType nn
customizeNonNullableType :: NonNullableType k -> m (NonNullableType k)
customizeNonNullableType = \case
TList typ -> TList <$> customizeType typ
TNamed definition -> TNamed <$> customizeTypeDefinition customizeTypeInfo definition
customizeTypeInfo :: G.Name -> TypeInfo k -> m (TypeInfo k)
customizeTypeInfo typeName = \case
ti@TIScalar -> pure ti
ti@TIEnum{} -> pure ti
TIInputObject ioi -> TIInputObject <$> customizeInputObjectInfo typeName ioi
TIObject oi -> TIObject <$> customizeObjectInfo typeName oi
TIInterface ii -> TIInterface <$> customizeInterfaceInfo typeName ii
TIUnion ui -> TIUnion <$> customizeUnionInfo typeName ui
customizeInputFieldInfo :: InputFieldInfo -> m InputFieldInfo
customizeInputFieldInfo = \case
IFRequired nnType -> IFRequired <$> customizeNonNullableType nnType
IFOptional typ value -> IFOptional <$> customizeType typ <*> pure value
customizeObjectInfo :: G.Name -> ObjectInfo -> m ObjectInfo
customizeObjectInfo = memo _1 $ \typeName ObjectInfo{..} -> do
oiFields' <- traverse (customizeFieldDefinition' typeName) oiFields
oiImplements' <- traverse (customizeTypeDefinition customizeInterfaceInfo) oiImplements
pure ObjectInfo
{ oiFields = oiFields'
, oiImplements = oiImplements'
}
customizeInterfaceInfo :: G.Name -> InterfaceInfo -> m InterfaceInfo
customizeInterfaceInfo = memo _2 $ \typeName InterfaceInfo{..} -> do
iiFields' <- traverse (customizeFieldDefinition' typeName) iiFields
iiPossibleTypes' <- traverse (customizeTypeDefinition customizeObjectInfo) iiPossibleTypes
pure InterfaceInfo
{ iiFields = iiFields'
, iiPossibleTypes = iiPossibleTypes'
}
customizeUnionInfo :: G.Name -> UnionInfo -> m UnionInfo
customizeUnionInfo = memo _3 $ \_typeName (UnionInfo possibleTypes) ->
UnionInfo <$> traverse (customizeTypeDefinition customizeObjectInfo) possibleTypes
customizeInputObjectInfo :: G.Name -> InputObjectInfo -> m InputObjectInfo
customizeInputObjectInfo = memo _4 $ \_typeName (InputObjectInfo args) ->
InputObjectInfo <$> traverse (traverse $ customizeInputFieldInfo) args

View File

@ -57,7 +57,9 @@ import Hasura.Base.Error
import Hasura.GraphQL.Parser (FieldParser, InputFieldsParser,
Kind (..), Parser,
UnpreparedValue (..), mkParameter)
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Class (MonadParse (parseErrorWith, withPath),
MonadSchema (..), MonadTableInfo,
askRoleName, askTableInfo, parseError)
import Hasura.GraphQL.Schema.Backend
import Hasura.GraphQL.Schema.BoolExp
import Hasura.GraphQL.Schema.Common
@ -900,30 +902,32 @@ tableAggregationFields sourceName tableInfo selectPermissions = memoizeOn 'table
in P.subselection_ operator Nothing subselectionParser
<&> IR.AFOp . IR.AggregateOp opText
lookupRemoteField'
lookupNestedFieldType'
:: (MonadSchema n m, MonadError QErr m)
=> [P.Definition P.FieldInfo]
=> G.Name
-> RemoteSchemaIntrospection
-> FieldCall
-> m P.FieldInfo
lookupRemoteField' fieldInfos (FieldCall fcName _) =
case find ((== fcName) . P.dName) fieldInfos of
Nothing -> throw400 RemoteSchemaError $ "field with name " <> fcName <<> " not found"
Just (P.Definition _ _ _ fieldInfo) -> pure fieldInfo
-> m G.GType
lookupNestedFieldType' parentTypeName remoteSchemaIntrospection (FieldCall fcName _) =
case lookupObject remoteSchemaIntrospection parentTypeName of
Nothing -> throw400 RemoteSchemaError $ "object with name " <> parentTypeName <<> " not found"
Just G.ObjectTypeDefinition{..} ->
case find ((== fcName) . G._fldName) _otdFieldsDefinition of
Nothing -> throw400 RemoteSchemaError $ "field with name " <> fcName <<> " not found"
Just G.FieldDefinition{..} -> pure _fldType
lookupRemoteField
lookupNestedFieldType
:: (MonadSchema n m, MonadError QErr m)
=> [P.Definition P.FieldInfo]
=> G.Name
-> RemoteSchemaIntrospection
-> NonEmpty FieldCall
-> m P.FieldInfo
lookupRemoteField fieldInfos (fieldCall :| rest) =
-> m G.GType
lookupNestedFieldType parentTypeName remoteSchemaIntrospection (fieldCall :| rest) = do
fieldType <- lookupNestedFieldType' parentTypeName remoteSchemaIntrospection fieldCall
case NE.nonEmpty rest of
Nothing -> lookupRemoteField' fieldInfos fieldCall
Nothing -> pure $ fieldType
Just rest' -> do
(P.FieldInfo _ type') <- lookupRemoteField' fieldInfos fieldCall
(P.Definition _ _ _ (P.ObjectInfo objFieldInfos _))
<- onNothing (P.getObjectInfo type') $
throw400 RemoteSchemaError $ "field " <> fcName fieldCall <<> " is expected to be an object"
lookupRemoteField objFieldInfos rest'
lookupNestedFieldType (G.getBaseType fieldType) remoteSchemaIntrospection rest'
-- | An individual field of a table
--
@ -1122,12 +1126,11 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
case remoteFieldInfo of
RFISource _remoteSource -> throw500 "not supported yet"
RFISchema remoteSchema -> do
let RemoteSchemaFieldInfo name _params hasuraFields remoteFields remoteSchemaInfo remoteSchemaInputValueDefns remoteSchemaName (table, source) = remoteSchema
(roleIntrospectionResult, parsedIntrospection) <-
let RemoteSchemaFieldInfo name params hasuraFields remoteFields remoteSchemaInfo remoteSchemaInputValueDefns remoteSchemaName (table, source) = remoteSchema
RemoteRelationshipQueryContext roleIntrospectionResultOriginal _ remoteSchemaCustomizer <-
-- The remote relationship field should not be accessible
-- if the remote schema is not accessible to the said role
hoistMaybe $ Map.lookup remoteSchemaName remoteRelationshipQueryCtx
let fieldDefns = map P.fDefinition (piQuery parsedIntrospection)
role <- askRoleName
let hasuraFieldNames = Set.map dbJoinFieldToName hasuraFields
relationshipDef = RemoteSchemaRelationshipDef remoteSchemaName hasuraFieldNames remoteFields
@ -1137,44 +1140,55 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
-- we don't validate the remote relationship when the role is admin
-- because it's already been validated, when the remote relationship
-- was created
pure (remoteSchemaInputValueDefns, _rfiParamMap remoteSchema)
pure (remoteSchemaInputValueDefns, params)
else do
fieldInfoMap <- (_tciFieldInfoMap . _tiCoreInfo) <$> askTableInfo @b source table
roleRemoteField <-
afold @(Either _) $
validateRemoteSchemaRelationship relationshipDef table name source (remoteSchemaInfo, roleIntrospectionResult) fieldInfoMap
validateRemoteSchemaRelationship relationshipDef table name source (remoteSchemaInfo, roleIntrospectionResultOriginal) fieldInfoMap
pure $ (_rfiInputValueDefinitions roleRemoteField, _rfiParamMap roleRemoteField)
let RemoteSchemaIntrospection typeDefns = irDoc roleIntrospectionResult
let roleIntrospection@(RemoteSchemaIntrospection typeDefns) = irDoc roleIntrospectionResultOriginal
-- add the new input value definitions created by the remote relationship
-- to the existing schema introspection of the role
remoteRelationshipIntrospection = RemoteSchemaIntrospection $ typeDefns <> newInpValDefns
fieldName <- textToName $ remoteRelationshipNameToText $ _rfiName remoteSchema
fieldName <- textToName $ remoteRelationshipNameToText name
-- This selection set parser, should be of the remote node's selection set parser, which comes
-- from the fieldCall
nestedFieldInfo <- lift $ lookupRemoteField fieldDefns $ unRemoteFields $ _rfiRemoteFields remoteSchema
case nestedFieldInfo of
P.FieldInfo{ P.fType = fieldType } -> do
let typeName = P.getName fieldType
fieldTypeDefinition <- onNothing (lookupType (irDoc roleIntrospectionResult) typeName)
-- the below case will never happen because we get the type name
-- from the schema document itself i.e. if a field exists for the
-- given role, then it's return type also must exist
$ throw500 $ "unexpected: " <> typeName <<> " not found "
-- These are the arguments that are given by the user while executing a query
let remoteFieldUserArguments = map snd $ Map.toList remoteFieldParamMap
remoteFld <-
lift $ remoteField remoteRelationshipIntrospection fieldName Nothing remoteFieldUserArguments fieldTypeDefinition
pure $ pure $ remoteFld
`P.bindField` \G.Field{ G._fArguments = args, G._fSelectionSet = selSet } -> do
let remoteArgs =
Map.toList args <&> \(argName, argVal) -> IR.RemoteFieldArgument argName $ P.GraphQLValue $ argVal
pure $ IR.AFRemote $ IR.RemoteSelectRemoteSchema $ IR.RemoteSchemaSelect
{ _rselArgs = remoteArgs
, _rselSelection = selSet
, _rselHasuraFields = _rfiHasuraFields remoteSchema
, _rselFieldCall = unRemoteFields $ _rfiRemoteFields remoteSchema
, _rselRemoteSchema = _rfiRemoteSchema remoteSchema
}
let fieldCalls = unRemoteFields remoteFields
parentTypeName = irQueryRoot roleIntrospectionResultOriginal
nestedFieldType <- lift $ lookupNestedFieldType parentTypeName roleIntrospection fieldCalls
let typeName = G.getBaseType nestedFieldType
fieldTypeDefinition <- onNothing (lookupType roleIntrospection typeName)
-- the below case will never happen because we get the type name
-- from the schema document itself i.e. if a field exists for the
-- given role, then it's return type also must exist
$ throw500 $ "unexpected: " <> typeName <<> " not found "
-- These are the arguments that are given by the user while executing a query
let remoteFieldUserArguments = map snd $ Map.toList remoteFieldParamMap
remoteFld <-
lift $
customizeFieldParser (,) remoteSchemaCustomizer parentTypeName <$>
remoteField remoteRelationshipIntrospection fieldName Nothing remoteFieldUserArguments fieldTypeDefinition
pure $ pure $ remoteFld
`P.bindField` \(resultCustomizer, G.Field{ G._fArguments = args, G._fSelectionSet = selSet }) -> do
let remoteArgs =
Map.toList args <&> \(argName, argVal) -> IR.RemoteFieldArgument argName $ P.GraphQLValue $ argVal
let resultCustomizer' = applyFieldCalls fieldCalls resultCustomizer
pure $ IR.AFRemote $ IR.RemoteSelectRemoteSchema $ IR.RemoteSchemaSelect
{ _rselName = fieldName
, _rselArgs = remoteArgs
, _rselResultCustomizer = resultCustomizer'
, _rselSelection = selSet
, _rselHasuraFields = hasuraFields
, _rselFieldCall = fieldCalls
, _rselRemoteSchema = remoteSchemaInfo
}
where
-- Apply parent field calls so that the result customizer modifies the nested field
applyFieldCalls :: NonEmpty FieldCall -> RemoteResultCustomizer -> RemoteResultCustomizer
applyFieldCalls fieldCalls resultCustomizer =
foldr (modifyFieldByName . fcName) resultCustomizer $ NE.init fieldCalls
-- | The custom SQL functions' input "args" field parser
-- > function_name(args: function_args)

View File

@ -275,9 +275,9 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
(RJ.processRemoteJoins env httpManager reqHeaders userInfo $ encJToLBS resp)
remoteJoins
return $ ResultsFragment telemTimeIO_DT Telem.Local finalResponse []
E.ExecStepRemote rsi gqlReq -> do
E.ExecStepRemote rsi resultCustomizer gqlReq -> do
logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindRemoteSchema
runRemoteGQ httpManager fieldName rsi gqlReq
runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq
E.ExecStepAction aep _ remoteJoins -> do
logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindAction
@ -346,9 +346,9 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
(RJ.processRemoteJoins env httpManager reqHeaders userInfo $ encJToLBS resp)
remoteJoins
return $ ResultsFragment telemTimeIO_DT Telem.Local finalResponse responseHeaders
E.ExecStepRemote rsi gqlReq -> do
E.ExecStepRemote rsi resultCustomizer gqlReq -> do
logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindRemoteSchema
runRemoteGQ httpManager fieldName rsi gqlReq
runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq
E.ExecStepAction aep _ remoteJoins -> do
logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindAction
(time, (resp, hdrs)) <- doQErr $ do
@ -382,10 +382,10 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
forWithKey = flip OMap.traverseWithKey
runRemoteGQ httpManager fieldName rsi gqlReq = do
runRemoteGQ httpManager fieldName rsi resultCustomizer gqlReq = do
(telemTimeIO_DT, remoteResponseHeaders, resp) <-
doQErr $ E.execRemoteGQ env httpManager userInfo reqHeaders rsi gqlReq
value <- extractFieldFromResponse (G.unName fieldName) resp
doQErr $ E.execRemoteGQ env httpManager userInfo reqHeaders (rsDef rsi) gqlReq
value <- extractFieldFromResponse fieldName rsi resultCustomizer resp
let filteredHeaders = filter ((== "Set-Cookie") . fst) remoteResponseHeaders
pure $ ResultsFragment telemTimeIO_DT Telem.Remote (JO.toEncJSON value) filteredHeaders
@ -466,19 +466,34 @@ coalescePostgresMutations plan = do
Just (oneSourceConfig, mutations)
extractFieldFromResponse
:: Monad m => Text -> LBS.ByteString -> ExceptT (Either GQExecError QErr) m JO.Value
extractFieldFromResponse fieldName bs = do
val <- onLeft (JO.eitherDecode bs) $ do400 . T.pack
:: forall m. Monad m
=> G.Name
-> RemoteSchemaInfo
-> RemoteResultCustomizer
-> LBS.ByteString
-> ExceptT (Either GQExecError QErr) m JO.Value
extractFieldFromResponse fieldName rsi resultCustomizer resp = do
let namespace = fmap G.unName $ _rscNamespaceFieldName $ rsCustomizer rsi
fieldName' = G.unName fieldName
val <- onLeft (JO.eitherDecode resp) $ do400 . T.pack
valObj <- onLeft (JO.asObject val) do400
dataVal <- case JO.toList valObj of
dataVal <- applyRemoteResultCustomizer resultCustomizer <$> case JO.toList valObj of
[("data", v)] -> pure v
_ -> case JO.lookup "errors" valObj of
Just (JO.Array err) -> doGQExecError $ toList $ fmap JO.fromOrdered err
_ -> do400 "Received invalid JSON value from remote"
dataObj <- onLeft (JO.asObject dataVal) do400
fieldVal <- onNothing (JO.lookup fieldName dataObj) $
do400 $ "expecting key " <> fieldName
return fieldVal
case namespace of
Just _ ->
-- If using a custom namespace field then the response from the remote server
-- will already be unwrapped so just return it.
return dataVal
_ -> do
-- No custom namespace so we need to look up the field name in the data
-- object.
dataObj <- onLeft (JO.asObject dataVal) do400
fieldVal <- onNothing (JO.lookup fieldName' dataObj) $
do400 $ "expecting key " <> fieldName'
return fieldVal
where
do400 = withExceptT Right . throw400 RemoteSchemaError
doGQExecError = withExceptT Left . throwError . GQExecError

View File

@ -5,6 +5,8 @@ module Hasura.GraphQL.Transport.HTTP.Protocol
, GQLReqParsed
, GQLReqOutgoing
, renderGQLReqOutgoing
, SingleOperation
, getSingleOperation
, toParsed
, GQLQueryText(..)
, GQLExecDoc(..)
@ -20,6 +22,7 @@ module Hasura.GraphQL.Transport.HTTP.Protocol
, isExecError
) where
import Data.Text.Extended (dquote)
import Hasura.Prelude
import qualified Data.Aeson as J
@ -27,6 +30,7 @@ import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Hasura.GraphQL.Execute.Inline as EI
import qualified Language.GraphQL.Draft.Parser as G
import qualified Language.GraphQL.Draft.Printer as G
import qualified Language.GraphQL.Draft.Syntax as G
@ -119,7 +123,12 @@ type GQLReqParsed = GQLReq GQLExecDoc
-- '_todName' if present.
--
-- These could maybe benefit from an HKD refactoring.
type GQLReqOutgoing = GQLReq (G.TypedOperationDefinition G.NoFragments G.Name)
type GQLReqOutgoing = GQLReq SingleOperation
-- | A single graphql operation to be executed, with fragment definitions
-- inlined. This is the simplified form of 'GQLExecDoc' or
-- 'G.ExecutableDocument':
type SingleOperation = G.TypedOperationDefinition G.NoFragments G.Name
renderGQLReqOutgoing :: GQLReqOutgoing -> GQLReqUnparsed
renderGQLReqOutgoing = fmap (GQLQueryText . G.renderExecutableDoc . toExecDoc . inlineFrags)
@ -132,6 +141,40 @@ renderGQLReqOutgoing = fmap (GQLQueryText . G.renderExecutableDoc . toExecDoc .
toExecDoc =
G.ExecutableDocument . pure . G.ExecutableDefinitionOperation . G.OperationDefinitionTyped
-- | Obtain the actual single operation to be executed, from the possibly-
-- multi-operation document, validating per the spec and inlining any
-- fragment definitions (pre-defined parts of a graphql query) at fragment
-- spreads (locations where fragments are "spliced"). See:
--
-- https://spec.graphql.org/June2018/#sec-Executable-Definitions and...
-- https://graphql.org/learn/serving-over-http/
getSingleOperation
:: MonadError QErr m
=> GQLReqParsed
-> m SingleOperation
getSingleOperation (GQLReq opNameM q _varValsM) = do
let (selSets, opDefs, fragments) = G.partitionExDefs $ unGQLExecDoc q
G.TypedOperationDefinition{..} <-
case (opNameM, selSets, opDefs) of
(Just opName, [], _) -> do
let n = _unOperationName opName
opDefM = find (\opDef -> G._todName opDef == Just n) opDefs
onNothing opDefM $ throw400 ValidationFailed $
"no such operation found in the document: " <> dquote n
(Just _, _, _) ->
throw400 ValidationFailed $ "operationName cannot be used when " <>
"an anonymous operation exists in the document"
(Nothing, [selSet], []) ->
return $ G.TypedOperationDefinition G.OperationTypeQuery Nothing [] [] selSet
(Nothing, [], [opDef]) ->
return opDef
(Nothing, _, _) ->
throw400 ValidationFailed $ "exactly one operation has to be present " <>
"in the document when operationName is not specified"
inlinedSelSet <- EI.inlineSelectionSet fragments _todSelectionSet
pure $ G.TypedOperationDefinition{_todSelectionSet = inlinedSelSet, ..}
toParsed :: (MonadError QErr m ) => GQLReqUnparsed -> m GQLReqParsed
toParsed req = case G.parseExecutableDoc gqlText of
Left _ -> withPathK "query" $ throw400 ValidationFailed "not a valid graphql query"

View File

@ -427,9 +427,9 @@ onStart env enabledLogTypes serverEnv wsConn (StartMsg opId q) = catchAndIgnore
(RJ.processRemoteJoins env httpMgr reqHdrs userInfo $ encJToLBS resp)
remoteJoins
return $ ResultsFragment telemTimeIO_DT Telem.Local finalResponse []
E.ExecStepRemote rsi gqlReq -> do
E.ExecStepRemote rsi resultCustomizer gqlReq -> do
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema
runRemoteGQ fieldName userInfo reqHdrs rsi gqlReq
runRemoteGQ fieldName userInfo reqHdrs rsi resultCustomizer gqlReq
E.ExecStepAction actionExecPlan _ remoteJoins -> do
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction
(time, (resp, _)) <- doQErr $ do
@ -504,9 +504,9 @@ onStart env enabledLogTypes serverEnv wsConn (StartMsg opId q) = catchAndIgnore
remoteJoins
pure (time, (finalResponse, hdrs))
pure $ ResultsFragment time Telem.Empty resp $ fromMaybe [] hdrs
E.ExecStepRemote rsi gqlReq -> do
E.ExecStepRemote rsi resultCustomizer gqlReq -> do
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema
runRemoteGQ fieldName userInfo reqHdrs rsi gqlReq
runRemoteGQ fieldName userInfo reqHdrs rsi resultCustomizer gqlReq
E.ExecStepRaw json -> do
logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection
buildRaw json
@ -601,10 +601,10 @@ onStart env enabledLogTypes serverEnv wsConn (StartMsg opId q) = catchAndIgnore
-- Telemetry. NOTE: don't time network IO:
Telem.recordTimingMetric Telem.RequestDimensions{..} Telem.RequestTimings{..}
runRemoteGQ fieldName userInfo reqHdrs rsi gqlReq = do
runRemoteGQ fieldName userInfo reqHdrs rsi resultCustomizer gqlReq = do
(telemTimeIO_DT, _respHdrs, resp) <-
doQErr $ E.execRemoteGQ env httpMgr userInfo reqHdrs rsi gqlReq
value <- mapExceptT lift $ extractFieldFromResponse (G.unName fieldName) resp
doQErr $ E.execRemoteGQ env httpMgr userInfo reqHdrs (rsDef rsi) gqlReq
value <- mapExceptT lift $ extractFieldFromResponse fieldName rsi resultCustomizer resp
return $ ResultsFragment telemTimeIO_DT Telem.Remote (JO.toEncJSON value) []
WSServerEnv logger lqMap getSchemaCache httpMgr _ sqlGenCtx

View File

@ -164,10 +164,10 @@ buildRemoteFieldInfo sourceSource sourceTable fields RemoteRelationship{..} allS
]
pure (RFISource $ mkAnyBackend @b' rsri, tableDependencies <> columnDependencies)
RemoteSchemaRelDef _ remoteRelationship@RemoteSchemaRelationshipDef{..} -> do
(RemoteSchemaCtx _ introspectionResult remoteSchemaInfo _ _ _) <-
RemoteSchemaCtx {..} <-
onNothing (Map.lookup _rrdRemoteSchemaName remoteSchemaMap)
$ throw400 RemoteSchemaError $ "remote schema with name " <> _rrdRemoteSchemaName <<> " not found"
remoteField <- validateRemoteSchemaRelationship remoteRelationship _rtrTable _rtrName _rtrSource (remoteSchemaInfo, introspectionResult) fields
remoteField <- validateRemoteSchemaRelationship remoteRelationship _rtrTable _rtrName _rtrSource (_rscInfo, _rscIntroOriginal) fields
`onLeft` (throw400 RemoteSchemaError . errorToText)
let tableDep = SchemaDependency (SOSourceObj _rtrSource $ AB.mkAnyBackend $ SOITable @b _rtrTable) DRTable
remoteSchemaDep = SchemaDependency (SORemoteSchema _rrdRemoteSchemaName) DRRemoteSchema

View File

@ -24,6 +24,8 @@ import Control.Monad.Unique
import Data.Text.Extended
import Network.HTTP.Client.Extended
import qualified Hasura.Tracing as Tracing
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.GraphQL.RemoteServer
@ -40,6 +42,7 @@ runAddRemoteSchema
, MonadUnique m
, HasHttpManagerM m
, MetadataM m
, Tracing.MonadTrace m
)
=> Env.Environment
-> AddRemoteSchemaQuery
@ -52,6 +55,8 @@ runAddRemoteSchema env q@(AddRemoteSchemaQuery name defn comment) = do
MetadataModifier $ metaRemoteSchemas %~ OMap.insert name remoteSchemaMeta
pure successMsg
where
-- NOTE: permissions here are empty, manipulated via a separate API with
-- runAddRemoteSchemaPermissions below
remoteSchemaMeta = RemoteSchemaMetadata name defn comment mempty
runAddRemoteSchemaPermissions
@ -95,10 +100,10 @@ runDropRemoteSchemaPermissions
-> m EncJSON
runDropRemoteSchemaPermissions (DropRemoteSchemaPermissions name roleName) = do
remoteSchemaMap <- scRemoteSchemas <$> askSchemaCache
RemoteSchemaCtx _ _ _ _ _ perms <-
RemoteSchemaCtx {..} <-
onNothing (Map.lookup name remoteSchemaMap) $
throw400 NotExists $ "remote schema " <> name <<> " doesn't exist"
onNothing (Map.lookup roleName perms) $
onNothing (Map.lookup roleName _rscPermissions) $
throw400 NotExists $ "permissions for role: " <> roleName <<> " for remote schema:"
<> name <<> " doesn't exist"
buildSchemaCacheFor (MORemoteSchemaPermissions name roleName) $
@ -115,7 +120,7 @@ addRemoteSchemaP1 name = do
<> name <<> " already exists"
addRemoteSchemaP2Setup
:: (HasVersion, QErrM m, MonadIO m, MonadUnique m, HasHttpManagerM m)
:: (HasVersion, QErrM m, MonadIO m, MonadUnique m, HasHttpManagerM m, Tracing.MonadTrace m)
=> Env.Environment
-> AddRemoteSchemaQuery -> m RemoteSchemaCtx
addRemoteSchemaP2Setup env (AddRemoteSchemaQuery name def _) = do
@ -188,9 +193,9 @@ runIntrospectRemoteSchema
:: (CacheRM m, QErrM m) => RemoteSchemaNameQuery -> m EncJSON
runIntrospectRemoteSchema (RemoteSchemaNameQuery rsName) = do
sc <- askSchemaCache
RemoteSchemaCtx _ _ _ introspectionByteString _ _ <-
RemoteSchemaCtx {..} <-
Map.lookup rsName (scRemoteSchemas sc) `onNothing` throw400 NotExists ("remote schema: " <> rsName <<> " not found")
pure $ encJFromLBS introspectionByteString
pure $ encJFromLBS _rscRawIntrospectionResult
runUpdateRemoteSchema
:: (HasVersion
@ -200,6 +205,7 @@ runUpdateRemoteSchema
, MonadUnique m
, HasHttpManagerM m
, MetadataM m
, Tracing.MonadTrace m
)
=> Env.Environment
-> AddRemoteSchemaQuery

View File

@ -954,7 +954,7 @@ resolveRoleBasedRemoteSchema (G.SchemaDocument providedTypeDefns) upstreamRemote
flip onLeft (throw400 ValidationFailed . showErrors)
=<< runValidateT
(flip runReaderT providedSchemaDocWithDefaultScalars
$ validateRemoteSchema $ irDoc $ _rscIntro upstreamRemoteCtx)
$ validateRemoteSchema $ irDoc $ _rscIntroOriginal upstreamRemoteCtx)
pure (introspectionRes, [schemaDependency])
where
showErrors :: [RoleBasedSchemaValidationError] -> Text

View File

@ -92,6 +92,7 @@ buildRebuildableSchemaCacheWithReason reason env metadata = do
Inc.build (buildSchemaCacheRule env) (metadata, initialInvalidationKeys)
pure $ RebuildableSchemaCache (Inc.result result) initialInvalidationKeys (Inc.rebuildRule result)
newtype CacheRWT m a
-- The CacheInvalidations component of the state could actually be collected using WriterT, but
-- WriterT implementations prior to transformers-0.5.6.0 (which added
@ -847,11 +848,16 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
-- We want to cache this call because it fetches the remote schema over HTTP, and we dont
-- want to re-run that if the remote schema definition hasnt changed.
buildRemoteSchema = Inc.cache proc (invalidationKeys, remoteSchema@(RemoteSchemaMetadata name defn comment _)) -> do
-- TODO is it strange how we convert from RemoteSchemaMetadata back
-- to AddRemoteSchemaQuery here? Document types please.
let addRemoteSchemaQuery = AddRemoteSchemaQuery name defn comment
Inc.dependOn -< Inc.selectKeyD name invalidationKeys
(| withRecordInconsistency (liftEitherA <<< bindA -<
runExceptT $ addRemoteSchemaP2Setup env addRemoteSchemaQuery)
runExceptT $ noopTrace $ addRemoteSchemaP2Setup env addRemoteSchemaQuery)
|) (mkRemoteSchemaMetadataObject remoteSchema)
-- TODO continue propagating MonadTrace up calls so that we can get tracing for remote schema introspection.
-- This will require modifying CacheBuild.
noopTrace = Tracing.runTraceTWithReporter Tracing.noReporter "buildSchemaCacheRule"
{- Note [Keep invalidation keys for inconsistent objects]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -401,11 +401,13 @@ data RemoteFieldArgument
data RemoteSchemaSelect (b :: BackendType)
= RemoteSchemaSelect
{ _rselArgs :: ![RemoteFieldArgument]
, _rselSelection :: !(G.SelectionSet G.NoFragments RemoteSchemaVariable)
, _rselHasuraFields :: !(HashSet (DBJoinField b))
, _rselFieldCall :: !(NonEmpty FieldCall)
, _rselRemoteSchema :: !RemoteSchemaInfo
{ _rselName :: !G.Name
, _rselArgs :: ![RemoteFieldArgument]
, _rselResultCustomizer :: !RemoteResultCustomizer
, _rselSelection :: !(G.SelectionSet G.NoFragments RemoteSchemaVariable)
, _rselHasuraFields :: !(HashSet (DBJoinField b))
, _rselFieldCall :: !(NonEmpty FieldCall)
, _rselRemoteSchema :: !RemoteSchemaInfo
}
-- | Captures the selection set of a remote source relationship.

View File

@ -104,7 +104,7 @@ instance ToJSON JsonAggSelect where
data RelType
= ObjRel
| ArrRel
deriving (Show, Eq, Generic)
deriving (Show, Eq, Generic, Data)
instance NFData RelType
instance Hashable RelType
instance Cacheable RelType

View File

@ -651,10 +651,11 @@ metadataToOrdJSON ( Metadata
] <> catMaybes [maybeCommentToMaybeOrdPair permComment]
remoteSchemaDefToOrdJSON :: RemoteSchemaDef -> AO.Value
remoteSchemaDefToOrdJSON (RemoteSchemaDef url urlFromEnv headers frwrdClientHdrs timeout) =
remoteSchemaDefToOrdJSON (RemoteSchemaDef url urlFromEnv headers frwrdClientHdrs timeout customization) =
AO.object $ catMaybes [ maybeToPair "url" url
, maybeToPair "url_from_env" urlFromEnv
, maybeToPair "timeout_seconds" timeout
, maybeToPair "customization" customization
, headers >>= listToMaybeOrdPair "headers" AO.toOrdered
] <> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs]
where

View File

@ -3,8 +3,10 @@ module Hasura.RQL.Types.RemoteSchema where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Ordered as JO
import qualified Data.Aeson.TH as J
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Database.PG.Query as Q
@ -13,9 +15,11 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.URI.Extended as N
import qualified Text.Builder as TB
import Control.Lens.TH (makeLenses)
import Data.Text.Extended
import Data.Text.NonEmpty
import Data.Monoid (Endo (..))
import Hasura.Base.Error
import Hasura.GraphQL.Parser.Schema (Variable)
import Hasura.Incremental (Cacheable)
@ -39,13 +43,113 @@ newtype RemoteSchemaName
, Generic, Cacheable
)
-- NOTE: Prefix and suffix use 'G.Name' so that we can '<>' to form a new valid
-- by-construction 'G.Name'.
data RemoteTypeCustomization
= RemoteTypeCustomization
{ _rtcPrefix :: !(Maybe G.Name)
, _rtcSuffix :: !(Maybe G.Name)
, _rtcMapping :: !(HashMap G.Name G.Name)
} deriving (Show, Eq, Generic)
instance NFData RemoteTypeCustomization
instance Cacheable RemoteTypeCustomization
instance Hashable RemoteTypeCustomization
$(J.deriveToJSON hasuraJSON{J.omitNothingFields=True} ''RemoteTypeCustomization)
instance J.FromJSON RemoteTypeCustomization where
parseJSON = J.withObject "RemoteTypeCustomization" $ \o ->
RemoteTypeCustomization
<$> o J..:? "prefix"
<*> o J..:? "suffix"
<*> o J..:? "mapping" J..!= mempty
data RemoteFieldCustomization
= RemoteFieldCustomization
{ _rfcParentType :: !G.Name
, _rfcPrefix :: !(Maybe G.Name)
, _rfcSuffix :: !(Maybe G.Name)
, _rfcMapping :: !(HashMap G.Name G.Name)
} deriving (Show, Eq, Generic)
instance NFData RemoteFieldCustomization
instance Cacheable RemoteFieldCustomization
instance Hashable RemoteFieldCustomization
$(J.deriveToJSON hasuraJSON{J.omitNothingFields=True} ''RemoteFieldCustomization)
instance J.FromJSON RemoteFieldCustomization where
parseJSON = J.withObject "RemoteFieldCustomization" $ \o ->
RemoteFieldCustomization
<$> o J..: "parent_type"
<*> o J..:? "prefix"
<*> o J..:? "suffix"
<*> o J..:? "mapping" J..!= mempty
data RemoteSchemaCustomization
= RemoteSchemaCustomization
{ _rscRootFieldsNamespace :: !(Maybe G.Name)
, _rscTypeNames :: !(Maybe RemoteTypeCustomization)
, _rscFieldNames :: !(Maybe [RemoteFieldCustomization])
} deriving (Show, Eq, Generic)
instance NFData RemoteSchemaCustomization
instance Cacheable RemoteSchemaCustomization
instance Hashable RemoteSchemaCustomization
$(J.deriveJSON hasuraJSON{J.omitNothingFields=True} ''RemoteSchemaCustomization)
-- | 'RemoteSchemaDef' after validation and baking-in of defaults in 'validateRemoteSchemaDef'.
data ValidatedRemoteSchemaDef
= ValidatedRemoteSchemaDef
{ _vrsdUrl :: !N.URI
, _vrsdHeaders :: ![HeaderConf]
, _vrsdFwdClientHeaders :: !Bool
, _vrsdTimeoutSeconds :: !Int
, _vrsdCustomization :: !(Maybe RemoteSchemaCustomization)
-- ^ See '_rsdCustomization'.
} deriving (Show, Eq, Generic)
instance NFData ValidatedRemoteSchemaDef
instance Cacheable ValidatedRemoteSchemaDef
instance Hashable ValidatedRemoteSchemaDef
$(J.deriveJSON hasuraJSON ''ValidatedRemoteSchemaDef)
data RemoteSchemaCustomizer
= RemoteSchemaCustomizer
{ _rscNamespaceFieldName :: !(Maybe G.Name)
, _rscCustomizeTypeName :: !(HashMap G.Name G.Name) -- ^ type name -> type name
, _rscCustomizeFieldName :: !(HashMap G.Name (HashMap G.Name G.Name)) -- ^ type name -> field name -> field name
, _rscDecustomizeTypeName :: !(HashMap G.Name G.Name) -- ^ type name -> type name
, _rscDecustomizeFieldName :: !(HashMap G.Name (HashMap G.Name G.Name)) -- ^ type name -> field name -> field name
} deriving (Show, Eq, Generic)
instance NFData RemoteSchemaCustomizer
instance Cacheable RemoteSchemaCustomizer
instance Hashable RemoteSchemaCustomizer
$(J.deriveJSON hasuraJSON ''RemoteSchemaCustomizer)
remoteSchemaCustomizeTypeName :: RemoteSchemaCustomizer -> G.Name -> G.Name
remoteSchemaCustomizeTypeName RemoteSchemaCustomizer{..} typeName =
Map.lookupDefault typeName typeName _rscCustomizeTypeName
remoteSchemaCustomizeFieldName :: RemoteSchemaCustomizer -> G.Name -> G.Name -> G.Name
remoteSchemaCustomizeFieldName RemoteSchemaCustomizer{..} typeName fieldName =
Map.lookup typeName _rscCustomizeFieldName >>= Map.lookup fieldName & fromMaybe fieldName
remoteSchemaDecustomizeTypeName :: RemoteSchemaCustomizer -> G.Name -> G.Name
remoteSchemaDecustomizeTypeName RemoteSchemaCustomizer{..} typeName =
Map.lookupDefault typeName typeName _rscDecustomizeTypeName
remoteSchemaDecustomizeFieldName :: RemoteSchemaCustomizer -> G.Name -> G.Name -> G.Name
remoteSchemaDecustomizeFieldName RemoteSchemaCustomizer{..} typeName fieldName =
Map.lookup typeName _rscDecustomizeFieldName >>= Map.lookup fieldName & fromMaybe fieldName
hasTypeOrFieldCustomizations :: RemoteSchemaCustomizer -> Bool
hasTypeOrFieldCustomizations RemoteSchemaCustomizer{..} =
not $ Map.null _rscCustomizeTypeName && Map.null _rscCustomizeFieldName
-- | 'RemoteSchemaDef' after the RemoteSchemaCustomizer has been generated
-- by fetchRemoteSchema
data RemoteSchemaInfo
= RemoteSchemaInfo
{ rsUrl :: !N.URI
, rsHeaders :: ![HeaderConf]
, rsFwdClientHeaders :: !Bool
, rsTimeoutSeconds :: !Int
{ rsDef :: !ValidatedRemoteSchemaDef
, rsCustomizer :: !RemoteSchemaCustomizer
} deriving (Show, Eq, Generic)
instance NFData RemoteSchemaInfo
instance Cacheable RemoteSchemaInfo
@ -53,7 +157,7 @@ instance Hashable RemoteSchemaInfo
$(J.deriveJSON hasuraJSON ''RemoteSchemaInfo)
-- | From the user's API request
-- | Unvalidated remote schema config, from the user's API request
data RemoteSchemaDef
= RemoteSchemaDef
{ _rsdUrl :: !(Maybe InputWebhook)
@ -61,6 +165,13 @@ data RemoteSchemaDef
, _rsdHeaders :: !(Maybe [HeaderConf])
, _rsdForwardClientHeaders :: !Bool
, _rsdTimeoutSeconds :: !(Maybe Int)
, _rsdCustomization :: !(Maybe RemoteSchemaCustomization)
-- NOTE: In the future we might extend this API to support a small DSL of
-- name transformations; this might live at a different layer, and be part of
-- the schema customization story.
--
-- See: https://github.com/hasura/graphql-engine-mono/issues/144
-- TODO we probably want to move this into a sub-field "transformations"?
} deriving (Show, Eq, Generic)
instance NFData RemoteSchemaDef
instance Cacheable RemoteSchemaDef
@ -74,6 +185,7 @@ instance J.FromJSON RemoteSchemaDef where
<*> o J..:? "headers"
<*> o J..:? "forward_client_headers" J..!= False
<*> o J..:? "timeout_seconds"
<*> o J..:? "customization"
-- | The payload for 'add_remote_schema', and a component of 'Metadata'.
data AddRemoteSchemaQuery
@ -82,6 +194,7 @@ data AddRemoteSchemaQuery
-- ^ An internal identifier for this remote schema.
, _arsqDefinition :: !RemoteSchemaDef
, _arsqComment :: !(Maybe Text)
-- ^ An opaque description or comment. We might display this in the UI, for instance.
} deriving (Show, Eq, Generic)
instance NFData AddRemoteSchemaQuery
instance Cacheable AddRemoteSchemaQuery
@ -103,21 +216,36 @@ getUrlFromEnv env urlFromEnv = do
invalidUri x = "not a valid URI: " <> T.pack x
envNotFoundMsg e = "environment variable '" <> e <> "' not set"
validateRemoteSchemaCustomization
:: (MonadError QErr m)
=> Maybe RemoteSchemaCustomization
-> m ()
validateRemoteSchemaCustomization Nothing = pure ()
validateRemoteSchemaCustomization (Just RemoteSchemaCustomization{..}) =
for_ _rscFieldNames $ \fieldCustomizations ->
for_ fieldCustomizations $ \RemoteFieldCustomization{..} ->
for_ (Map.keys _rfcMapping) $ \fieldName ->
when (isReservedName fieldName) $
throw400 InvalidParams $ "attempt to customize reserved field name " <>> fieldName
where
isReservedName = ("__" `T.isPrefixOf`) . G.unName
validateRemoteSchemaDef
:: (MonadError QErr m, MonadIO m)
=> Env.Environment
-> RemoteSchemaDef
-> m RemoteSchemaInfo
validateRemoteSchemaDef env (RemoteSchemaDef mUrl mUrlEnv hdrC fwdHdrs mTimeout) =
-> m ValidatedRemoteSchemaDef
validateRemoteSchemaDef env (RemoteSchemaDef mUrl mUrlEnv hdrC fwdHdrs mTimeout customization) = do
validateRemoteSchemaCustomization customization
case (mUrl, mUrlEnv) of
(Just url, Nothing) -> do
resolvedWebhookTxt <- unResolvedWebhook <$> resolveWebhook env url
case N.parseURI $ T.unpack resolvedWebhookTxt of
Nothing -> throw400 InvalidParams $ "not a valid URI: " <> resolvedWebhookTxt
Just uri -> return $ RemoteSchemaInfo uri hdrs fwdHdrs timeout
Just uri -> return $ ValidatedRemoteSchemaDef uri hdrs fwdHdrs timeout customization
(Nothing, Just urlEnv) -> do
url <- getUrlFromEnv env urlEnv
return $ RemoteSchemaInfo url hdrs fwdHdrs timeout
return $ ValidatedRemoteSchemaDef url hdrs fwdHdrs timeout customization
(Nothing, Nothing) ->
throw400 InvalidParams "both `url` and `url_from_env` can't be empty"
(Just _, Just _) ->
@ -197,13 +325,90 @@ newtype RemoteSchemaIntrospection
= RemoteSchemaIntrospection [(G.TypeDefinition [G.Name] RemoteSchemaInputValueDefinition)]
deriving (Show, Eq, Generic, Hashable, Cacheable, Ord)
data RemoteFieldG var
-- | An RemoteRootField could either be a real field on the remote server
-- or represent a virtual namespace that only exists in the Hasura schema.
data RemoteRootField var
= RRFNamespaceField !(G.SelectionSet G.NoFragments var) -- ^ virtual namespace field
| RRFRealField !(G.Field G.NoFragments var) -- ^ a real field on the remote server
deriving (Functor, Foldable, Traversable)
-- | For a real remote field gives a SelectionSet for selecting the field itself.
-- For a virtual field gives the unwrapped SelectionSet for the field.
getRemoteFieldSelectionSet :: RemoteRootField var -> G.SelectionSet G.NoFragments var
getRemoteFieldSelectionSet = \case
RRFNamespaceField selSet -> selSet
RRFRealField fld -> [G.SelectionField fld]
-- | Mapping that can be provided to a RemoteResultCustomizer
-- to map field aliases that were not available at field parse time.
-- E.g. for aliases created in the remote server query for remote joins.
type AliasMapping = G.Name -> G.Name
-- | AliasMapping that maps a single field name to an alias
singletonAliasMapping :: G.Name -> G.Name -> AliasMapping
singletonAliasMapping fieldName alias fieldName' =
if fieldName == fieldName' then alias else fieldName'
-- | Function to modify JSON values returned from the remote server
-- e.g. to map values of __typename fields to customized type names.
-- The customizer uses Maybe to allow short-circuiting subtrees
-- where no customizations are needed.
newtype RemoteResultCustomizer =
RemoteResultCustomizer { unRemoteResultCustomizer :: Maybe (AliasMapping -> Endo JO.Value) }
deriving (Semigroup, Monoid)
-- | Apply a RemoteResultCustomizer to a JSON value
applyRemoteResultCustomizer :: RemoteResultCustomizer -> JO.Value -> JO.Value
applyRemoteResultCustomizer = maybe id (appEndo . ($ id)) . unRemoteResultCustomizer
-- | Apply an AliasMapping to a RemoteResultCustomizer.
-- Once an alias mapping is a applied to a customizer any further
-- alias mapping applications will be ignored.
applyAliasMapping :: AliasMapping -> RemoteResultCustomizer -> RemoteResultCustomizer
applyAliasMapping aliasMapping (RemoteResultCustomizer m) = RemoteResultCustomizer $
m <&> \g _ -> g aliasMapping
-- | Take a RemoteResultCustomizer for a JSON subtree, and a fieldName,
-- and produce a RemoteResultCustomizer for a parent object or array of objects
-- that applies the subtree customizer to the subtree at the given fieldName.
modifyFieldByName :: G.Name -> RemoteResultCustomizer -> RemoteResultCustomizer
modifyFieldByName fieldName (RemoteResultCustomizer m) = RemoteResultCustomizer $
m <&> \g aliasMapping -> Endo $
let Endo f = g id -- AliasMapping is only applied to the top level so use id for nested customizers
modifyFieldByName' = \case
JO.Object o -> JO.Object $ JO.adjust f (G.unName $ aliasMapping fieldName) o
JO.Array a -> JO.Array $ modifyFieldByName' <$> a
v -> v
in modifyFieldByName'
-- | Create a RemoteResultCustomizer that applies the typeNameMap
-- to a JSON string value, e.g. for use in customizing a __typename field value.
customizeTypeNameString :: HashMap G.Name G.Name -> RemoteResultCustomizer
customizeTypeNameString typeNameMap =
if Map.null typeNameMap
then mempty
else RemoteResultCustomizer $ Just $ const $ Endo $ \case
JO.String t -> JO.String $ G.unName $ customizeTypeName $ G.unsafeMkName t
v -> v
where
customizeTypeName :: G.Name -> G.Name
customizeTypeName typeName = Map.lookupDefault typeName typeName typeNameMap
data RemoteFieldG f var
= RemoteFieldG
{ _rfRemoteSchemaInfo :: !RemoteSchemaInfo
, _rfField :: !(G.Field G.NoFragments var)
, _rfResultCustomizer :: !RemoteResultCustomizer
, _rfField :: !(f var)
} deriving (Functor, Foldable, Traversable)
type RemoteField = RemoteFieldG RemoteSchemaVariable
$(makeLenses ''RemoteFieldG)
type RawRemoteField = RemoteFieldG (G.Field G.NoFragments) RemoteSchemaVariable
type RemoteField = RemoteFieldG RemoteRootField RemoteSchemaVariable
realRemoteField :: RawRemoteField -> RemoteField
realRemoteField RemoteFieldG{..} = RemoteFieldG{_rfField = RRFRealField _rfField, ..}
data RemoteSchemaPermsCtx
= RemoteSchemaPermsEnabled

View File

@ -47,10 +47,15 @@ module Hasura.RQL.Types.SchemaCache
, IntrospectionResult(..)
, ParsedIntrospection(..)
, RemoteSchemaCustomizer(..)
, remoteSchemaCustomizeTypeName
, remoteSchemaCustomizeFieldName
, remoteSchemaDecustomizeTypeName
, remoteSchemaDecustomizeFieldName
, RemoteSchemaCtx(..)
, rscName
, rscInfo
, rscIntro
, rscIntroOriginal
, rscParsed
, rscRawIntrospectionResult
, rscPermissions
@ -220,10 +225,10 @@ type WithDeps a = (a, [SchemaDependency])
data IntrospectionResult
= IntrospectionResult
{ irDoc :: RemoteSchemaIntrospection
, irQueryRoot :: G.Name
, irMutationRoot :: Maybe G.Name
, irSubscriptionRoot :: Maybe G.Name
{ irDoc :: !RemoteSchemaIntrospection
, irQueryRoot :: !G.Name
, irMutationRoot :: !(Maybe G.Name)
, irSubscriptionRoot :: !(Maybe G.Name)
} deriving (Show, Eq, Generic)
instance Cacheable IntrospectionResult
@ -238,21 +243,22 @@ data ParsedIntrospection
data RemoteSchemaCtx
= RemoteSchemaCtx
{ _rscName :: !RemoteSchemaName
, _rscIntro :: !IntrospectionResult
, _rscIntroOriginal :: !IntrospectionResult -- ^ Original remote schema without customizations
, _rscInfo :: !RemoteSchemaInfo
, _rscRawIntrospectionResult :: !BL.ByteString
-- ^ The raw response from the introspection query against the remote server.
-- ^ The raw response from the introspection query against the remote server,
-- or the serialized customized introspection result if there are schema customizations.
-- We store this so we can efficiently service 'introspect_remote_schema'.
, _rscParsed :: ParsedIntrospection
, _rscParsed :: ParsedIntrospection -- ^ FieldParsers with schema customizations applied
, _rscPermissions :: !(M.HashMap RoleName IntrospectionResult)
}
$(makeLenses ''RemoteSchemaCtx)
instance ToJSON RemoteSchemaCtx where
toJSON (RemoteSchemaCtx name _ info _ _ _) =
toJSON RemoteSchemaCtx {..} =
object $
[ "name" .= name
, "info" .= toJSON info
[ "name" .= _rscName
, "info" .= toJSON _rscInfo
]
type RemoteSchemaMap = M.HashMap RemoteSchemaName RemoteSchemaCtx

View File

@ -160,8 +160,11 @@ commonResponseHeadersIgnored =
, "Content-Type", "Content-Length"
]
sessionVariablePrefix :: Text
sessionVariablePrefix = "x-hasura-"
isSessionVariable :: Text -> Bool
isSessionVariable = T.isPrefixOf "x-hasura-" . T.toLower
isSessionVariable = T.isPrefixOf sessionVariablePrefix . T.toLower
isReqUserId :: Text -> Bool
isReqUserId = (== "req_user_id") . T.toLower

View File

@ -51,11 +51,11 @@ newtype Reporter = Reporter
:: forall io a
. MonadIO io
=> TraceContext
-- ^ the current trace context
-- the current trace context
-> Text
-- ^ human-readable name for this block of code
-- human-readable name for this block of code
-> io (a, TracingMetadata)
-- ^ the action whose execution we want to report, returning
-- the action whose execution we want to report, returning
-- any metadata emitted
-> io a
}

View File

@ -0,0 +1,22 @@
module Test.QuickCheck.Arbitrary.Extended
( arbitraryExcluding
, distinctExcluding
, distinctExcluding1
, sublistOf1
) where
import Data.Containers.ListUtils (nubOrd)
import Hasura.Prelude
import Test.QuickCheck
arbitraryExcluding :: (Arbitrary a, Eq a) => [a] -> Gen a
arbitraryExcluding exclusions = arbitrary `suchThat` (`notElem` exclusions)
distinctExcluding :: (Arbitrary a, Ord a) => [a] -> Gen [a]
distinctExcluding = fmap nubOrd . listOf . arbitraryExcluding
distinctExcluding1 :: (Arbitrary a, Ord a) => [a] -> Gen [a]
distinctExcluding1 = fmap nubOrd . listOf1 . arbitraryExcluding
sublistOf1 :: [a] -> Gen [a]
sublistOf1 xs = sublistOf xs `suchThat` (not . null)

View File

@ -0,0 +1,112 @@
{-# LANGUAGE UndecidableInstances #-}
{- |
Generic re-implementation of Arbitrary, that discards unrepresentable
values. Modified from the existing generic-arbitrary.
This is a parallel reimplementation of Arbitrary, in which each type might fail
to produce a value, if it contains a dreaded @Void@ field or any otherwise
unrepresentable type that cannot have a 'Arbitrary' instance. The default
generic implementation automatically disregard constructors whose walues
contains such a field, allowing all strctures with other constructors to suceed.
-}
module Test.QuickCheck.Arbitrary.Partial
( PartialArbitrary (..)
, genericPartialArbitrary
) where
import Control.Applicative
import Data.Maybe
import Data.Proxy
import Data.Void
import GHC.Generics
import GHC.TypeLits
import Prelude
import Test.QuickCheck
-- | A partial version of Arbitrary, for types that cannot or might not be able
-- to have an instance because of unrepresentable types such as Void. By
-- default, the generic implementation implements it by recursively calling
-- `partialArbitrary`. Any branch that fails to produce a value gets discarded.
--
-- At the top of the tree: a type that needs arbitrary but has potentially
-- unrepresentable fields can implement its own arbitrary instance with:
--
-- instance Arbitrary MyType where
-- arbitrary = fromJust partialArbitrary
--
-- This will succeed as long as there's one possible representation.
--
-- At the bottom of the tree: a global overlappable instance means you only need
-- to provide PartialArbitrary instances for said unrepresentable types, or
-- containers; any other type will use this catch-all PartialArbitrary instance,
-- which delegates back to Arbitrary.
class PartialArbitrary a where
partialArbitrary :: Maybe (Gen a)
default partialArbitrary :: (Generic a, GPArbitrary ga, ga ~ Rep a) => Maybe (Gen a)
partialArbitrary = genericPartialArbitrary
instance PartialArbitrary Void where
partialArbitrary = Nothing
instance (PartialArbitrary a, PartialArbitrary b) => PartialArbitrary (a,b) where
partialArbitrary = liftA2 (,) <$> partialArbitrary <*> partialArbitrary
instance PartialArbitrary a => PartialArbitrary [a] where
partialArbitrary = liftArbitrary <$> partialArbitrary
instance PartialArbitrary a => PartialArbitrary (Maybe a) where
partialArbitrary = liftArbitrary <$> partialArbitrary
instance {-# OVERLAPPABLE #-} Arbitrary a => PartialArbitrary a where
partialArbitrary = Just arbitrary
genericPartialArbitrary :: (Generic a, GPArbitrary ga, ga ~ Rep a) => Maybe (Gen a)
genericPartialArbitrary = (fmap . fmap) to gPArbitrary
-- | Generic version of PartialArbitrary, used to implement
-- 'genericPartialArbitrary'. This was originally taken verbatim from
-- generic-arbitrary but was adapted for our @Maybe@ approach.
class GPArbitrary a where
gPArbitrary :: Maybe (Gen (a x))
instance GPArbitrary U1 where
gPArbitrary = Just $ pure U1
instance PartialArbitrary c => GPArbitrary (K1 i c) where
gPArbitrary = (fmap . fmap) K1 partialArbitrary
instance GPArbitrary f => GPArbitrary (M1 i c f) where
gPArbitrary = (fmap . fmap) M1 gPArbitrary
instance (GPArbitrary a, GPArbitrary b) => GPArbitrary (a :*: b) where
gPArbitrary = (liftA2 . liftA2) (:*:) gPArbitrary gPArbitrary
instance
( GPArbitrary a
, GPArbitrary b
, KnownNat (SumLen a)
, KnownNat (SumLen b)
) => GPArbitrary (a :+: b) where
gPArbitrary = case (l1, r1) of
-- both branches are representable: distribute evenly
(Just a, Just b) -> Just $ frequency [(lfreq, a), (rfreq, b)]
-- pick whichever is representable
(a, b ) -> a <|> b
where
l1 = (fmap . fmap) L1 gPArbitrary
r1 = (fmap . fmap) R1 gPArbitrary
lfreq = fromIntegral $ natVal (Proxy :: Proxy (SumLen a))
rfreq = fromIntegral $ natVal (Proxy :: Proxy (SumLen b))
-- | Calculates count of constructors encoded by particular ':+:'. This is used
-- to ensure that we consider all constructors of a type evenly.
type family SumLen a :: Nat where
SumLen (a :+: b) = (SumLen a) + (SumLen b)
SumLen a = 1

View File

@ -0,0 +1,223 @@
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Generator () where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OM
import qualified Data.HashSet as Set
import qualified Data.HashSet.InsOrd as SetIns
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Language.GraphQL.Draft.Syntax as G
import Data.Containers.ListUtils (nubOrd)
import Data.Maybe (fromJust)
import Data.Scientific
import Test.QuickCheck
import Test.QuickCheck.Arbitrary.Extended
import Test.QuickCheck.Arbitrary.Generic
import Test.QuickCheck.Arbitrary.Partial
import Hasura.GraphQL.Parser.Schema (InputValue, Variable, VariableInfo)
import Hasura.RQL.Types
import Hasura.Server.Utils
import Hasura.Session
-- -- Containers
instance (Arbitrary k, Eq k, Hashable k, Arbitrary v) => Arbitrary (InsOrdHashMap k v) where
arbitrary = OM.fromList <$> arbitrary
instance (Arbitrary k, Eq k, Hashable k, Arbitrary v) => Arbitrary (HashMap k v) where
arbitrary = Map.fromList <$> arbitrary
instance (Arbitrary a, Eq a, Hashable a) => Arbitrary (SetIns.InsOrdHashSet a) where
arbitrary = SetIns.fromList <$> arbitrary
instance (Arbitrary a, Eq a, Hashable a) => Arbitrary (Set.HashSet a) where
arbitrary = Set.fromList <$> arbitrary
instance (PartialArbitrary k, Eq k, Hashable k, PartialArbitrary v) => PartialArbitrary (HashMap k v) where
partialArbitrary = (fmap . fmap) Map.fromList partialArbitrary
-- -- Arbitrary instances
-- -- Those types, like Metadata, need an arbitrary instance, but may hit @Void@,
-- -- and therefore delegate their arbitrary instance to 'PartialArbitrary'
instance PartialArbitrary a => Arbitrary (G.Directive a) where
arbitrary = fromJust genericPartialArbitrary
instance PartialArbitrary a => Arbitrary (G.Value a) where
arbitrary = fromJust genericPartialArbitrary
-- Regular types.
-- All those types are known to be representable, and we can write a regular
-- Arbitrary instance for each of them. They will use the default generic
-- overlappable instance of PartialArbitrary that simply defers back to
-- Arbitrary.
instance Arbitrary Text where
arbitrary = T.pack <$> listOf (elements alphaNumerics)
instance Arbitrary SessionVariable where
arbitrary = (mkSessionVariable . (sessionVariablePrefix <>)) <$> arbitrary
instance Arbitrary G.Nullability where
arbitrary = genericArbitrary
instance Arbitrary G.GType where
arbitrary = genericArbitrary
instance Arbitrary G.ScalarTypeDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.InputValueDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.RootOperationTypeDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.OperationType where
arbitrary = genericArbitrary
instance Arbitrary G.UnionTypeDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.EnumValueDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.EnumTypeDefinition where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (G.FieldDefinition a) where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (G.InputObjectTypeDefinition a) where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (G.ObjectTypeDefinition a) where
arbitrary = genericArbitrary
instance (Arbitrary a, Arbitrary b) => Arbitrary (G.InterfaceTypeDefinition a b) where
arbitrary = genericArbitrary
instance (Arbitrary a, Arbitrary b) => Arbitrary (G.TypeDefinition a b) where
arbitrary = genericArbitrary
instance Arbitrary G.TypeSystemDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.SchemaDefinition where
arbitrary = genericArbitrary
instance Arbitrary G.SchemaDocument where
arbitrary = genericArbitrary
instance Arbitrary RemoteSchemaPermissionDefinition where
arbitrary = genericArbitrary
instance Arbitrary (InputValue Void) where
arbitrary = genericArbitrary
instance Arbitrary VariableInfo where
arbitrary = genericArbitrary
instance Arbitrary Variable where
arbitrary = genericArbitrary
instance Arbitrary SessionArgumentPresetInfo where
arbitrary = genericArbitrary
instance Arbitrary RemoteSchemaVariable where
arbitrary = genericArbitrary
instance Arbitrary RemoteSchemaInputValueDefinition where
arbitrary = genericArbitrary
instance Arbitrary RemoteSchemaIntrospection where
arbitrary = genericArbitrary
instance Arbitrary IntrospectionResult where
arbitrary = do
scalarTypeNames <- nubOrd <$> arbitrary
objectTypeNames <- distinctExcluding1 scalarTypeNames
interfaceTypeNames <- distinctExcluding $ scalarTypeNames ++ objectTypeNames
unionTypeNames <- distinctExcluding $ scalarTypeNames ++ objectTypeNames ++ interfaceTypeNames
enumTypeNames <- distinctExcluding $ scalarTypeNames ++ objectTypeNames ++ interfaceTypeNames ++ unionTypeNames
let outputTypeNames = scalarTypeNames ++ objectTypeNames ++ interfaceTypeNames ++ unionTypeNames ++ enumTypeNames
inputObjectTypeNames <- distinctExcluding outputTypeNames
let inputTypeNames = scalarTypeNames ++ enumTypeNames ++ inputObjectTypeNames
let genType typeNames = oneof
[ G.TypeNamed <$> arbitrary <*> elements typeNames
, G.TypeList <$> arbitrary <*> genType typeNames]
let genInputValueDefinition =
G.InputValueDefinition <$> arbitrary <*> arbitrary <*> genType inputTypeNames <*> arbitrary <*> pure []
let genRemoteSchemaInputValueDefinition = RemoteSchemaInputValueDefinition <$> genInputValueDefinition <*> pure Nothing
let genRemoteSchemaInputValueDefinitions = case inputTypeNames of
[] -> pure []
_ -> listOf genRemoteSchemaInputValueDefinition
let genFieldDefinitions = do
fieldNames <- nubOrd <$> listOf1 arbitrary
for fieldNames $ \n ->
G.FieldDefinition <$> arbitrary <*> pure n <*> genRemoteSchemaInputValueDefinitions <*> genType outputTypeNames <*> pure []
let genEnumValueDefinition = G.EnumValueDefinition <$> arbitrary <*> arbitrary <*> pure []
scalarTypeDefinitions <- for scalarTypeNames $ \n ->
G.ScalarTypeDefinition <$> arbitrary <*> pure n <*> pure []
objectTypeDefinitions <- for objectTypeNames $ \n ->
G.ObjectTypeDefinition <$> arbitrary <*> pure n <*> sublistOf interfaceTypeNames <*> pure [] <*> genFieldDefinitions
interfaceTypeDefinitions <- for interfaceTypeNames $ \n ->
G.InterfaceTypeDefinition <$> arbitrary <*> pure n <*> pure [] <*> genFieldDefinitions <*> listOf1 arbitrary
unionTypeDefinitions <- for unionTypeNames $ \n ->
G.UnionTypeDefinition <$> arbitrary <*> pure n <*> pure [] <*> sublistOf1 objectTypeNames
enumTypeDefinitions <- for enumTypeNames $ \n ->
G.EnumTypeDefinition <$> arbitrary <*> pure n <*> pure [] <*> listOf1 genEnumValueDefinition
inputObjectTypeDefinitions <- for inputObjectTypeNames $ \n ->
G.InputObjectTypeDefinition <$> arbitrary <*> pure n <*> pure [] <*> genRemoteSchemaInputValueDefinitions
let irDoc = RemoteSchemaIntrospection $
map G.TypeDefinitionScalar scalarTypeDefinitions ++
map G.TypeDefinitionObject objectTypeDefinitions ++
map G.TypeDefinitionInterface interfaceTypeDefinitions ++
map G.TypeDefinitionUnion unionTypeDefinitions ++
map G.TypeDefinitionEnum enumTypeDefinitions ++
map G.TypeDefinitionInputObject inputObjectTypeDefinitions
irQueryRoot <- elements objectTypeNames
let maybeObjectTypeName = elements $ Nothing : (Just <$> objectTypeNames)
irMutationRoot <- maybeObjectTypeName
irSubscriptionRoot <- maybeObjectTypeName
pure $ IntrospectionResult {..}
-- Custom instances
-- All non-generic non-partial instances.
instance Arbitrary G.Name where
arbitrary = G.unsafeMkName . T.pack <$> listOf1 (elements ['a'..'z'])
instance Arbitrary G.Description where
arbitrary = G.Description <$> arbitrary
instance Arbitrary G.EnumValue where
arbitrary = G.EnumValue <$> arbitrary
instance Arbitrary Scientific where
arbitrary = ((fromRational . toRational) :: Int -> Scientific) <$> arbitrary
instance Arbitrary J.Value where
arbitrary = sized sizedArbitraryValue
where
sizedArbitraryValue n
| n <= 0 = oneof [pure J.Null, boolean, number, string]
| otherwise = resize n' $ oneof [pure J.Null, boolean, number, string, array, object']
where
n' = n `div` 2
boolean = J.Bool <$> arbitrary
number = J.Number <$> arbitrary
string = J.String <$> arbitrary
array = J.Array . V.fromList <$> arbitrary
object' = J.Object <$> arbitrary

File diff suppressed because it is too large Load Diff

View File

@ -22,6 +22,7 @@ import Hasura.GraphQL.Execute.Resolve
import Hasura.GraphQL.Parser.Monad
import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Parser.TestUtils
import Hasura.GraphQL.RemoteServer (identityCustomizer)
import Hasura.GraphQL.Schema.Remote
import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.SchemaCache
@ -95,9 +96,14 @@ buildQueryParsers introspection = do
(query, _, _) <- runError
$ runSchemaT
$ buildRemoteParser introResult
$ RemoteSchemaInfo
N.nullURI [] False 60
pure $ head query <&> \(RemoteFieldG _ f) -> f
$ RemoteSchemaInfo (ValidatedRemoteSchemaDef N.nullURI [] False 60 Nothing) identityCustomizer
pure $ head query <&> \(RemoteFieldG _ _ abstractField) ->
case abstractField of
RRFRealField f -> f
RRFNamespaceField _ ->
error "buildQueryParsers: unexpected RRFNamespaceField"
-- Shouldn't happen if we're using identityCustomizer
-- TODO: add some tests for remote schema customization
runQueryParser

View File

@ -0,0 +1,14 @@
module Hasura.SessionSpec (spec) where
import Hasura.Generator ()
import Hasura.Prelude
import Hasura.Server.Utils
import Hasura.Session
import Test.Hspec
import Test.Hspec.QuickCheck
spec :: Spec
spec = describe "SessionVariable" $ do
prop "Arbitrary instance generates valid session variables" $ \v ->
sessionVariableToText v `shouldSatisfy` isSessionVariable

View File

@ -9,7 +9,6 @@ import qualified Data.Environment as Env
import qualified Data.NonNegativeIntSpec as NonNegetiveIntSpec
import qualified Data.Parser.CacheControlSpec as CacheControlParser
import qualified Data.Parser.JSONPathSpec as JsonPath
import qualified Data.Parser.URLTemplate as URLTemplate
import qualified Data.TimeSpec as TimeSpec
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
@ -27,6 +26,7 @@ import Test.Hspec
import qualified Hasura.EventingSpec as EventingSpec
import qualified Hasura.GraphQL.Parser.DirectivesTest as GraphQLDirectivesSpec
import qualified Hasura.GraphQL.RemoteServerSpec as RemoteServerSpec
import qualified Hasura.GraphQL.Schema.RemoteTest as GraphRemoteSchemaSpec
import qualified Hasura.IncrementalSpec as IncrementalSpec
import qualified Hasura.RQL.Types.EndpointSpec as EndpointSpec
@ -73,13 +73,13 @@ unitSpecs = do
describe "Data.NonNegativeInt" NonNegetiveIntSpec.spec
describe "Data.Parser.CacheControl" CacheControlParser.spec
describe "Data.Parser.JSONPath" JsonPath.spec
describe "Data.Parser.URLTemplate" URLTemplate.spec
describe "Data.Time" TimeSpec.spec
describe "Hasura.Eventing" EventingSpec.spec
describe "Hasura.GraphQL.Parser.Directives" GraphQLDirectivesSpec.spec
describe "Hasura.GraphQL.Schema.Remote" GraphRemoteSchemaSpec.spec
describe "Hasura.Incremental" IncrementalSpec.spec
describe "Hasura.RQL.Types.Endpoint" EndpointSpec.spec
describe "Hasura.GraphQL.RemoteServer" RemoteServerSpec.spec
describe "Hasura.SQL.WKT" WKTSpec.spec
describe "Hasura.Server.Auth" AuthSpec.spec
describe "Hasura.Server.Telemetry" TelemetrySpec.spec

View File

@ -228,6 +228,52 @@ class SampleAuthGraphQL(RequestHandler):
resp.headers['Custom-Header'] = 'custom-value'
return resp
# GraphQL server that can return arbitrary size result
class BigInterface(graphene.Interface):
hello = graphene.Field(graphene.String)
class Big(graphene.ObjectType):
class Meta:
interfaces = (BigInterface, )
big = graphene.Field(BigInterface, required=False)
many = graphene.Field(graphene.List(BigInterface), required=False, arg=graphene.Int(default_value=10))
# hello = graphene.Field(graphene.String)
def resolve_hello(self, info):
return "Hello"
def resolve_big(self, info):
return self
def resolve_many(self, info, arg):
for i in range(arg):
yield self
class BigQuery(graphene.ObjectType):
# start = graphene.Field(BigInterface)
start = graphene.Field(Big)
def resolve_start(self, info):
return Big()
big_schema = graphene.Schema(query=BigQuery)
class BigGraphQL(RequestHandler):
def get(self, request):
return Response(HTTPStatus.METHOD_NOT_ALLOWED)
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = big_schema.execute(request.json['query'])
resp = mkJSONResp(res)
resp.headers['Set-Cookie'] = 'abcd'
resp.headers['Custom-Header'] = 'custom-value'
return resp
# GraphQL server with interfaces
class Character(graphene.Interface):
@ -238,25 +284,6 @@ class Character(graphene.Interface):
self.id = id
self.name = name
class Human(graphene.ObjectType):
class Meta:
interfaces = (Character, )
home_planet = graphene.String()
def __init__(self, home_planet, character):
self.home_planet = home_planet
self.character = character
def resolve_id(self, info):
return self.character.id
def resolve_name(self, info):
return self.character.name
def refolve_primary_function(self, info):
return self.home_planet
class Droid(graphene.ObjectType):
class Meta:
interfaces = (Character, )
@ -276,18 +303,44 @@ class Droid(graphene.ObjectType):
def resolve_primary_function(self, info):
return self.primary_function
class Human(graphene.ObjectType):
class Meta:
interfaces = (Character, )
home_planet = graphene.String()
droid = graphene.Field(Droid, required=False)
def __init__(self, home_planet, droid, character):
self.home_planet = home_planet
self.character = character
self.droid = droid
def resolve_id(self, info):
return self.character.id
def resolve_name(self, info):
return self.character.name
def resolve_primary_function(self, info):
return self.home_planet
def resolve_droid(self, info):
return self.droid
class CharacterSearchResult(graphene.Union):
class Meta:
types = (Human,Droid)
r2 = Droid("Astromech", Character(1,'R2-D2'))
all_characters = {
4: Droid("Astromech", Character(1,'R2-D2')),
5: Human("Tatooine", Character(2, "Luke Skywalker")),
4: r2,
5: Human("Tatooine", r2, Character(2, "Luke Skywalker")),
}
character_search_results = {
1: Droid("Astromech", Character(6,'R2-D2')),
2: Human("Tatooine", Character(7, "Luke Skywalker")),
2: Human("Tatooine", r2, Character(7, "Luke Skywalker")),
}
class CharacterIFaceQuery(graphene.ObjectType):
@ -297,9 +350,17 @@ class CharacterIFaceQuery(graphene.ObjectType):
episode=graphene.Int(required=True)
)
heroes = graphene.Field(
graphene.List(Character),
required=False
)
def resolve_hero(_, info, episode):
return all_characters.get(episode)
def resolve_heroes(_, info):
return all_characters.values()
schema = graphene.Schema(query=CharacterIFaceQuery, types=[Human, Droid])
character_interface_schema = graphene.Schema(query=CharacterIFaceQuery, types=[Human, Droid])
@ -310,7 +371,7 @@ class CharacterInterfaceGraphQL(RequestHandler):
def post(self, req):
if not req.json:
return Response(HTTPStatus.BAD_REQUEST)
res = character_interface_schema.execute(req.json['query'])
res = character_interface_schema.execute(req.json['query'], variable_values=req.json.get('variables'))
return mkJSONResp(res)
class InterfaceGraphQLErrEmptyFieldList(RequestHandler):
@ -743,7 +804,8 @@ handlers = MkHandlers({
'/person-graphql': PersonGraphQL,
'/header-graphql': HeaderTestGraphQL,
'/messages-graphql' : MessagesGraphQL,
'/auth-graphql': SampleAuthGraphQL
'/auth-graphql': SampleAuthGraphQL,
'/big': BigGraphQL
})

View File

@ -4,7 +4,7 @@ status: 400
response:
path: $.args
error: |-
Interface field argument 'Character'."id"("ifaceArg":) required, but Object field 'Human'."id" does not provide it
Interface field argument 'Character'."id"("ifaceArg":) required, but Object field 'Droid'."id" does not provide it
code: remote-schema-error
query:
type: add_remote_schema

View File

@ -0,0 +1,51 @@
type: add_remote_schema_permissions
args:
remote_schema: my-remote-schema
role: user
definition:
schema: |
type User {
user_id: Int
userMessages(whered: MessageWhereInpObj): [Message]
gimmeText(text: String): String
}
interface Communication {
id: Int!
msg: String!
}
type Message implements Communication {
id: Int!
name: String!
msg: String!
errorMsg: String
}
input MessageWhereInpObj {
id: IntCompareObj
name: StringCompareObj
}
input IntCompareObj {
eq : Int
gt : Int
lt : Int
}
input StringCompareObj {
eq : String
}
type Query {
hello: String
messages(where: MessageWhereInpObj): [Message]
user(user_id: Int!): User
users(user_ids: [Int]!): [User]
message(id: Int!) : Message
communications(id: Int): [Communication]
}
schema {
query: Query
}

View File

@ -0,0 +1,18 @@
type: add_remote_schema_permissions
args:
remote_schema: my-remote-schema
role: user
definition:
schema: |
type User {
user_id: Int
}
type Query {
hello: String
user(user_id: Int!): User
}
schema {
query: Query
}

View File

@ -0,0 +1,69 @@
- description: query the remote schema with only args and fields that have been exposed to the role 'user'
url: /v1/graphql
status: 200
headers:
X-Hasura-Role: user
query:
query: |
{
my_remote_schema {
messages(where: {id: {eq: 1}}) {
id
name
msg
}
}
}
response:
data:
my_remote_schema:
messages:
- id: 1
name: alice
msg: You win!
- description: query the remote schema with argument that has not been exposed to the role 'user'
url: /v1/graphql
status: 200
headers:
X-Hasura-Role: user
query:
query: |
{
my_remote_schema {
messages(where: {id: { eq: 1}}, includes: {name: "alice"}) {
id
name
msg
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.my_remote_schema.selectionSet.messages
code: validation-failed
message: '"messages" has no argument named "includes"'
- description: run the above query as the admin role
url: /v1/graphql
status: 200
query:
query: |
{
my_remote_schema {
messages(where: {id : {eq: 1}}, includes: {name:"alice"}) {
id
name
msg
}
}
}
response:
data:
my_remote_schema:
messages:
- id: 1
name: alice
msg: You win!

View File

@ -0,0 +1,110 @@
- description: query the remote schema with only fields that have been exposed to the role 'user'
url: /v1/graphql
status: 200
headers:
X-Hasura-Role: user
query:
query: |
query {
my_remote_schema {
user (user_id: 1) {
foo_user_id
}
}
}
response:
data:
my_remote_schema:
user:
foo_user_id: 1
- description: query the remote schema with only fields that have been exposed to the role 'user' using a fragment
url: /v1/graphql
status: 200
headers:
X-Hasura-Role: user
query:
query: |
query {
my_remote_schema {
user (user_id: 1) {
... userFrag
}
}
}
fragment userFrag on FooUser {
foo_user_id
}
response:
data:
my_remote_schema:
user:
foo_user_id: 1
- description: query the remote schema with only fields that have been exposed to the role 'user' using an inline fragment
url: /v1/graphql
status: 200
headers:
X-Hasura-Role: user
query:
query: |
query {
my_remote_schema {
user (user_id: 1) {
... on FooUser {
foo_user_id
}
}
}
}
response:
data:
my_remote_schema:
user:
foo_user_id: 1
- description: query the remote schema with fields that have not been exposed to the role 'user'
url: /v1/graphql
status: 200
headers:
X-Hasura-Role: user
query:
query: |
query {
my_remote_schema {
hello
user (user_id: 1) {
foo_user_id
foo_gimmeText(text: "hello")
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.my_remote_schema.selectionSet.user.selectionSet.foo_gimmeText
code: validation-failed
message: "field \"foo_gimmeText\" not found in type: 'FooUser'"
- description: run the above query as admin
url: /v1/graphql
status: 200
query:
query: |
query {
my_remote_schema {
hello
user (user_id: 1) {
foo_user_id
foo_gimmeText(text: "hello")
}
}
}
response:
data:
my_remote_schema:
hello: world
user:
foo_user_id: 1
foo_gimmeText: hello

View File

@ -0,0 +1,13 @@
type: add_remote_schema
args:
name: my-remote-schema
definition:
url: http://localhost:4020
forward_client_headers: false
customization:
root_fields_namespace: my_remote_schema
type_names:
prefix: Foo
field_names:
- parent_type: User
prefix: foo_

View File

@ -0,0 +1,3 @@
type: remove_remote_schema
args:
name: my-remote-schema

View File

@ -0,0 +1,29 @@
description: Simple remote relationship GraphQL query
url: /v1/graphql
status: 200
response:
data:
profiles:
- id: 1
name: alice
userNestedFields:
- foo_name: alice
- id: 2
name: bob
userNestedFields:
- foo_name: bob
- id: 3
name: alice
userNestedFields:
- foo_name: alice
query:
query: |
query {
profiles {
id
name
userNestedFields {
foo_name
}
}
}

View File

@ -0,0 +1,29 @@
description: Simple remote relationship GraphQL query
url: /v1/graphql
status: 200
response:
data:
profiles:
- id: 1
messageBasic:
foo_name: alice
__typename: FooMessage
- id: 2
messageBasic:
foo_name: bob
__typename: FooMessage
- id: 3
messageBasic:
foo_name: alice
__typename: FooMessage
query:
query: |
query {
profiles {
id
messageBasic {
foo_name
__typename
}
}
}

View File

@ -0,0 +1,79 @@
type: bulk
args:
# To model this:
# query {
# profiles {
# id
# message {
# id
# msg
# }
# }
# }
#Profile table
- type: run_sql
args:
sql: |
create table profiles (
id serial primary key,
name text
);
insert into profiles (name) values
( 'alice' ),
( 'bob' ),
( 'alice');
create table authors (
id serial primary key,
name text
);
create table employees (
id serial primary key,
name text
);
insert into employees (name) values ('alice'),(NULL),('bob');
- type: track_table
args:
schema: public
name: profiles
- type: add_remote_schema
args:
name: my-remote-schema
definition:
url: http://localhost:4000
forward_client_headers: false
customization:
root_fields_namespace: foo_root
type_names:
mapping:
Message: FooMessage
field_names:
- parent_type: query_root
prefix: foo_root_
mapping:
message: foo_message
- parent_type: Communication
prefix: foo_comm_
mapping:
name: foo_name
- parent_type: Message
prefix: foo_comm_
mapping:
name: foo_name
- parent_type: User
prefix: foo_user_
- type: track_table
args:
schema: public
name: authors
- type: track_table
args:
schema: public
name: employees

View File

@ -0,0 +1,11 @@
type: create_remote_relationship
args:
name: messageBasic
table: profiles
hasura_fields:
- id
remote_schema: my-remote-schema
remote_field:
message:
arguments:
id: "$id"

View File

@ -0,0 +1,19 @@
type: create_remote_relationship
args:
name: userNestedFields
table: profiles
hasura_fields:
- id
- name
remote_schema: my-remote-schema
remote_field:
user:
arguments:
user_id: $id
field:
userMessages:
arguments:
whered:
name:
eq: $name

View File

@ -0,0 +1,14 @@
type: bulk
args:
- type: run_sql
args:
sql: |
drop table if exists profiles;
drop table if exists user_profiles;
drop table if exists authors;
drop table if exists employees;
# also drops remote relationship as direct dep
- type: remove_remote_schema
args:
name: my-remote-schema

View File

@ -0,0 +1,387 @@
- description: query with prefix type name in inline fragment
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
ident
foo_name_f
... on Android {
ident
foo_name_f
foo_primaryFunction_f
}
}
}
}
response:
data:
star_wars:
super_hero:
ident: "1"
foo_name_f: R2-D2
foo_primaryFunction_f: Astromech
- description: query with prefix type name in inline fragment and non-existing field
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
ident
foo_name_f
... on Android {
ident
foo_name_f
foo_primaryFunction_f
non_existing_field
}
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.star_wars.selectionSet.super_hero.selectionSet.non_existing_field
code: validation-failed
message: 'field "non_existing_field" not found in type: ''Android'''
- description: query with prefix original (non-customized) type name in inline fragment
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
ident
foo_name_f
... on Droid {
ident
foo_name_f
foo_primaryFunction_f
}
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.star_wars.selectionSet.super_hero.selectionSet
code: validation-failed
message: Type "Droid" is not a subtype of "FooCharacter_x"
- description: query with fragment
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
... characterFields
}
}
}
fragment characterFields on FooCharacter_x {
ident
foo_name_f
__typename
}
response:
data:
star_wars:
super_hero:
ident: "1"
foo_name_f: R2-D2
__typename: Android
- description: query with variable
url: /v1/graphql
status: 200
query:
query: |
query Hero($ep: MyInt!) {
star_wars {
super_hero(episode: $ep) {
ident
foo_name_f
}
}
}
variables:
ep: 4
response:
data:
star_wars:
super_hero:
ident: "1"
foo_name_f: R2-D2
# - description: query with variable with wrong type name
# url: /v1/graphql
# status: 200
# query:
# query: |
# query Hero($ep: Int!) {
# star_wars {
# super_hero(episode: $ep) {
# ident
# foo_name_f
# }
# }
# }
# variables:
# ep: 4
# response:
# errors:
# - extensions:
# path: $.selectionSet.star_wars.selectionSet.super_hero.args.episode
# code: validation-failed
# message: variable "ep" is declared as Int!, but used where MyInt! is expected
- description: query with __type introspection
url: /v1/graphql
status: 200
query:
query: |
{
__type(name: "Android") {
kind
name
}
}
response:
data:
__type:
kind: OBJECT
name: Android
- description: query with __type introspection on original type should return empty result
url: /v1/graphql
status: 200
query:
query: |
{
__type(name: "Droid") {
kind
name
}
}
response:
data:
__type:
# __typename tests
- description: query with __typename
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
ident
foo_name_f
__typename
}
}
}
response:
data:
star_wars:
super_hero:
ident: "1"
foo_name_f: R2-D2
__typename: Android
- description: query with __typename in list field
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_heroes {
ident
foo_name_f
__typename
... on FooHuman_x {
android {
foo_name_f
__typename
}
}
}
}
}
response:
data:
star_wars:
super_heroes:
- ident: "1"
foo_name_f: R2-D2
__typename: Android
- ident: "2"
foo_name_f: Luke Skywalker
__typename: FooHuman_x
android:
foo_name_f: R2-D2
__typename: Android
- description: query with __typename and field aliases
url: /v1/graphql
status: 200
query:
query: |
{
space_wars: star_wars {
my_hero: super_hero(episode: 4) {
my_id: ident
my_name: foo_name_f
my_typename: __typename
}
}
}
response:
data:
space_wars:
my_hero:
my_id: "1"
my_name: R2-D2
my_typename: Android
- description: query with __typename in list field and field aliases
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
my_heroes: super_heroes {
my_id: ident
my_name: foo_name_f
my_typename: __typename
... on FooHuman_x {
my_droid: android {
my_droid_name: foo_name_f
my_droid_typename: __typename
}
}
}
}
}
response:
data:
star_wars:
my_heroes:
- my_id: "1"
my_name: R2-D2
my_typename: Android
- my_id: "2"
my_name: Luke Skywalker
my_typename: FooHuman_x
my_droid:
my_droid_name: R2-D2
my_droid_typename: Android
- description: query with prefix field name in inline fragment
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
ident
foo_name_f
... on Android {
ident
foo_name_f
foo_primaryFunction_f
}
}
}
}
response:
data:
star_wars:
super_hero:
ident: '1'
foo_name_f: R2-D2
foo_primaryFunction_f: Astromech
- description: query with prefix field name and non-existing (renamed) field
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
id
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.star_wars.selectionSet.super_hero.selectionSet.id
code: validation-failed
message: "field \"id\" not found in type: 'FooCharacter_x'"
- description: query aliases
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
name: ident
id: foo_name_f
__typename: ident
... on Android {
foo_name: foo_primaryFunction_f
android_typename: __typename
}
}
}
}
response:
data:
star_wars:
super_hero:
name: '1'
id: R2-D2
__typename: '1'
foo_name: Astromech
android_typename: Android
- description: query with __typename (__typename should _not_ be affected by field prefix/suffix customization)
url: /v1/graphql
status: 200
query:
query: |
{
star_wars {
super_hero(episode: 4) {
ident
foo_name_f
__typename
}
}
}
response:
data:
star_wars:
super_hero:
ident: "1"
foo_name_f: R2-D2
__typename: Android

View File

@ -0,0 +1,79 @@
- description: query with prefix field name in inline fragment
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
foo_id
foo_name
... on Droid {
foo_id
foo_name
foo_primaryFunction
}
}
}
response:
data:
hero:
foo_id: '1'
foo_name: R2-D2
foo_primaryFunction: Astromech
- description: query with prefix field name and non-existing (renamed) field
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
id
}
}
response:
errors:
- extensions:
path: $.selectionSet.hero.selectionSet.id
code: validation-failed
message: "field \"id\" not found in type: 'Character'"
- description: query aliases
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
name: foo_id
id: foo_name
... on Droid {
foo_name: foo_primaryFunction
}
}
}
response:
data:
hero:
name: '1'
id: R2-D2
foo_name: Astromech
- description: query with __typename (__typename should _not_ be affected by field prefix/suffix customization)
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
foo_id
foo_name
__typename
}
}
response:
data:
hero:
foo_id: "1"
foo_name: R2-D2
__typename: Droid

View File

@ -0,0 +1,75 @@
- description: Schema customization with field mapping inconsistency between interface and object type
url: /v1/metadata
status: 400
query:
{
"type": "add_remote_schema",
"args": {
"name": "character-foo",
"definition": {
"url": "http://localhost:5000/character-iface-graphql",
"customization": { "field_names": [{"parent_type": "Droid", "mapping": {"primaryFunction": "name", "name": "primaryFunction"}}] }
}
}
}
response:
path: $.args
error: 'Remote schema customization inconsistency: field name mapping for field "name" of interface "Character" is inconsistent with mapping for type "Droid". Interface field name maps to "name". Type field name maps to "primaryFunction".'
code: remote-schema-error
- description: Schema customization with two types mapping to same name
url: /v1/metadata
status: 400
query:
{
"type": "add_remote_schema",
"args": {
"name": "character-foo",
"definition": {
"url": "http://localhost:5000/character-iface-graphql",
"customization": { "type_names": {"mapping": {"Droid": "Foo", "Human": "Foo"}} }
}
}
}
response:
path: $.args
error: 'Type name mappings are not distinct; the following types appear more than once: "Foo"'
code: remote-schema-error
- description: Schema customization with two fields mapping to same name
url: /v1/metadata
status: 400
query:
{
"type": "add_remote_schema",
"args": {
"name": "character-foo",
"definition": {
"url": "http://localhost:5000/character-iface-graphql",
"customization": { "field_names": [{"parent_type": "Droid", "mapping": {"primaryFunction": "name"}}] }
}
}
}
response:
path: $.args
error: 'Field name mappings for object type "Droid" are not distinct; the following fields appear more than once: "name"'
code: remote-schema-error
- description: Attempt to customize __typename field
url: /v1/metadata
status: 400
query:
{
"type": "add_remote_schema",
"args": {
"name": "character-foo",
"definition": {
"url": "http://localhost:5000/character-iface-graphql",
"customization": { "field_names": [{"parent_type": "Droid", "mapping": {"__typename": "my_typename"}}] }
}
}
}
response:
path: $.args
error: 'attempt to customize reserved field name "__typename"'
code: invalid-params

View File

@ -0,0 +1,25 @@
- description: query with namespace
url: /v1/graphql
status: 200
query:
query: |
{
foo {
hero(episode: 4) {
id
name
... on BarDroid {
id
name
primaryFunction
}
}
}
}
response:
data:
foo:
hero:
id: '1'
name: R2-D2
primaryFunction: Astromech

View File

@ -0,0 +1,263 @@
- description: query with prefix type name in inline fragment
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
id
name
... on FooDroid {
id
name
primaryFunction
}
}
}
response:
data:
hero:
id: "1"
name: R2-D2
primaryFunction: Astromech
- description: query with prefix type name in inline fragment and non-existing field
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
id
name
... on FooDroid {
id
name
primaryFunction
non_existing_field
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.hero.selectionSet.non_existing_field
code: validation-failed
message: 'field "non_existing_field" not found in type: ''FooDroid'''
- description: query with prefix original (non-customized) type name in inline fragment
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
id
name
... on Droid {
id
name
primaryFunction
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.hero.selectionSet
code: validation-failed
message: Type "Droid" is not a subtype of "FooCharacter"
- description: query with fragment
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
... characterFields
}
}
fragment characterFields on FooCharacter {
id
name
}
response:
data:
hero:
id: "1"
name: R2-D2
- description: query with variable
url: /v1/graphql
status: 200
query:
query: |
query Hero($ep: MyInt!) {
hero(episode: $ep) {
id
name
}
}
variables:
ep: 4
response:
data:
hero:
id: "1"
name: R2-D2
# - description: query with variable with wrong type name
# url: /v1/graphql
# status: 200
# query:
# query: |
# query Hero($ep: Int!) {
# hero(episode: $ep) {
# id
# name
# }
# }
# variables:
# ep: 4
# response:
# errors:
# - extensions:
# path: $.selectionSet.hero.args.episode
# code: validation-failed
# message: variable "ep" is declared as Int!, but used where MyInt! is expected
- description: query with __type introspection
url: /v1/graphql
status: 200
query:
query: |
{
__type(name: "FooDroid") {
kind
name
}
}
response:
data:
__type:
kind: OBJECT
name: FooDroid
- description: query with __type introspection on original type should return empty result
url: /v1/graphql
status: 200
query:
query: |
{
__type(name: "Droid") {
kind
name
}
}
response:
data:
__type:
# __typename tests
- description: query with __typename
url: /v1/graphql
status: 200
query:
query: |
{
hero(episode: 4) {
id
name
__typename
}
}
response:
data:
hero:
id: "1"
name: R2-D2
__typename: FooDroid
- description: query with __typename in list field
url: /v1/graphql
status: 200
query:
query: |
{
heroes {
id
name
__typename
... on FooHuman {
droid {
name
__typename
}
}
}
}
response:
data:
heroes:
- id: "1"
name: R2-D2
__typename: FooDroid
- id: "2"
name: Luke Skywalker
__typename: FooHuman
droid:
name: R2-D2
__typename: FooDroid
- description: query with __typename and field aliases
url: /v1/graphql
status: 200
query:
query: |
{
my_hero: hero(episode: 4) {
my_id: id
my_name: name
my_typename: __typename
}
}
response:
data:
my_hero:
my_id: "1"
my_name: R2-D2
my_typename: FooDroid
- description: query with __typename in list field and field aliases
url: /v1/graphql
status: 200
query:
query: |
{
my_heroes: heroes {
my_id: id
my_name: name
my_typename: __typename
... on FooHuman {
my_droid: droid {
my_droid_name: name
my_droid_typename: __typename
}
}
}
}
response:
data:
my_heroes:
- my_id: "1"
my_name: R2-D2
my_typename: FooDroid
- my_id: "2"
my_name: Luke Skywalker
my_typename: FooHuman
my_droid:
my_droid_name: R2-D2
my_droid_typename: FooDroid

View File

@ -368,6 +368,24 @@ class TestWithRelay:
assert st_code == 200, resp
check_query_f(hge_ctx, self.dir() + "with_relay.yaml")
@use_test_fixtures
class TestExecutionWithCustomization:
@classmethod
def dir(cls):
return "queries/remote_schemas/remote_relationships/schema_customization/"
def test_basic_relationship(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
check_query_f(hge_ctx, self.dir() + 'basic_relationship.yaml')
def test_nested_fields(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_fields.yaml')
assert st_code == 200, resp
check_query_f(hge_ctx, self.dir() + 'basic_nested_fields.yaml')
class TestComputedFieldsInRemoteRelationship:
@classmethod

View File

@ -126,6 +126,23 @@ class TestRemoteSchemaPermissionsExecution:
def test_execution_with_unknown_role(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + 'unknown_role_execution.yaml')
@use_test_fixtures
class TestCustomizedRemoteSchemaPermissionsExecution:
@classmethod
def dir(cls):
return "queries/remote_schemas/permissions/schema_customization/"
def test_execution_with_subset_of_fields_exposed_to_role(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_fields_exposed_to_role.yaml')
def test_execution_with_subset_of_arguments_exposed_to_role(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
assert st_code == 200, resp
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_args_exposed_to_role.yaml')
@use_test_fixtures
class TestRemoteSchemaPermissionsArgumentPresets:

View File

@ -14,7 +14,7 @@ import pytest
from validate import check_query_f, check_query
from graphql import GraphQLError
def mk_add_remote_q(name, url, headers=None, client_hdrs=False, timeout=None):
def mk_add_remote_q(name, url, headers=None, client_hdrs=False, timeout=None, customization=None):
return {
"type": "add_remote_schema",
"args": {
@ -24,11 +24,15 @@ def mk_add_remote_q(name, url, headers=None, client_hdrs=False, timeout=None):
"url": url,
"headers": headers,
"forward_client_headers": client_hdrs,
"timeout_seconds": timeout
"timeout_seconds": timeout,
"customization": customization
}
}
}
def type_prefix_customization(type_prefix, mapping={}):
return { "type_names": {"prefix": type_prefix, "mapping": mapping }}
def mk_update_remote_q(name, url, headers=None, client_hdrs=False, timeout=None):
return {
"type": "update_remote_schema",
@ -669,3 +673,145 @@ class TestValidateRemoteSchemaQuery:
""" test to check that the graphql-engine throws an validation error
when an remote object is queried with an unknown field """
check_query_f(hge_ctx, self.dir() + '/field_validation.yaml')
class TestRemoteSchemaTypePrefix:
""" basic => no hasura tables are tracked """
teardown = {"type": "clear_metadata", "args": {}}
dir = 'queries/remote_schemas'
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
config = request.config
# This is needed for supporting server upgrade tests
# Some marked tests in this class will be run as server upgrade tests
if not config.getoption('--skip-schema-setup'):
q = mk_add_remote_q('simple 2', 'http://localhost:5000/user-graphql', customization=type_prefix_customization("Foo"))
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
def test_add_schema(self, hge_ctx):
""" check if the remote schema is added in the metadata """
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
assert resp['remote_schemas'][0]['name'] == "simple 2"
# assert resp['remote_schemas'][0]['definition']['type_prefix'] == "foo"
@pytest.mark.allow_server_upgrade_test
def test_introspection(self, hge_ctx):
#check_query_f(hge_ctx, 'queries/graphql_introspection/introspection.yaml')
with open('queries/graphql_introspection/introspection.yaml') as f:
query = yaml.safe_load(f)
resp, _ = check_query(hge_ctx, query)
assert check_introspection_result(resp, ['FooUser', 'FooCreateUser', 'FooCreateUserInputObject', 'FooUserDetailsInput'], ['user', 'allUsers'])
class TestValidateRemoteSchemaTypePrefixQuery:
teardown = {"type": "clear_metadata", "args": {}}
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
config = request.config
if not config.getoption('--skip-schema-setup'):
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=type_prefix_customization("Foo", {"Int": "MyInt"}))
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@classmethod
def dir(cls):
return "queries/remote_schemas/validation/"
def test_remote_schema_type_prefix_validation(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/type_prefix_validation.yaml')
class TestValidateRemoteSchemaFieldPrefixQuery:
teardown = {"type": "clear_metadata", "args": {}}
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
config = request.config
if not config.getoption('--skip-schema-setup'):
customization = { "field_names": [{"parent_type": "Character", "prefix": "foo_"},{"parent_type": "Human", "prefix": "foo_"},{"parent_type": "Droid", "prefix": "foo_"}] }
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@classmethod
def dir(cls):
return "queries/remote_schemas/validation/"
def test_remote_schema_field_prefix_validation(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/field_prefix_validation.yaml')
class TestValidateRemoteSchemaCustomization:
@classmethod
def dir(cls):
return "queries/remote_schemas/validation/"
def test_remote_schema_interface_field_validation(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/interface_field_validation.yaml')
class TestValidateRemoteSchemaNamespaceQuery:
teardown = {"type": "clear_metadata", "args": {}}
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
config = request.config
if not config.getoption('--skip-schema-setup'):
customization = { "root_fields_namespace": "foo", "type_names": {"prefix": "Bar" }}
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@classmethod
def dir(cls):
return "queries/remote_schemas/validation/"
def test_remote_schema_namespace_validation(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/namespace_validation.yaml')
class TestValidateRemoteSchemaCustomizeAllTheThings:
teardown = {"type": "clear_metadata", "args": {}}
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
config = request.config
if not config.getoption('--skip-schema-setup'):
customization = {
"root_fields_namespace": "star_wars",
"type_names": {"prefix": "Foo", "suffix": "_x", "mapping": { "Droid": "Android", "Int": "MyInt"}},
"field_names": [
{"parent_type": "Character", "prefix": "foo_", "suffix": "_f", "mapping": {"id": "ident"}},
{"parent_type": "Human", "mapping": {"id": "ident", "name": "foo_name_f", "droid": "android"}},
{"parent_type": "Droid", "prefix": "foo_", "suffix": "_f", "mapping": {"id": "ident"}},
{"parent_type": "CharacterIFaceQuery", "prefix": "super_" }
]
}
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@classmethod
def dir(cls):
return "queries/remote_schemas/validation/"
def test_remote_schema_customize_all_the_things(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/customize_all_the_things.yaml')