2018-11-23 16:02:46 +03:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
import yaml
|
2019-01-28 15:38:38 +03:00
|
|
|
import requests
|
2018-11-23 16:02:46 +03:00
|
|
|
|
|
|
|
from validate import check_query_f, check_query
|
|
|
|
|
2018-11-26 16:08:16 +03:00
|
|
|
|
2018-11-23 16:02:46 +03:00
|
|
|
def mk_add_remote_q(name, url):
|
|
|
|
return {
|
|
|
|
"type": "add_remote_schema",
|
|
|
|
"args": {
|
|
|
|
"name": name,
|
|
|
|
"comment": "testing " + name,
|
|
|
|
"definition": {
|
|
|
|
"url": url,
|
|
|
|
"forward_client_headers": False
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:38:38 +03:00
|
|
|
def mk_delete_remote_q(name):
|
|
|
|
return {
|
|
|
|
"type" : "remove_remote_schema",
|
|
|
|
"args" : {
|
|
|
|
"name": name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-23 16:02:46 +03:00
|
|
|
|
|
|
|
class TestRemoteSchemaBasic:
|
|
|
|
""" basic => no hasura tables are tracked """
|
|
|
|
|
|
|
|
teardown = {"type": "clear_metadata", "args": {}}
|
|
|
|
dir = 'queries/remote_schemas'
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def transact(self, hge_ctx):
|
2018-11-26 16:08:16 +03:00
|
|
|
q = mk_add_remote_q('simple 1', 'http://localhost:5000/hello-graphql')
|
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
2018-11-23 16:02:46 +03:00
|
|
|
assert st_code == 200, resp
|
|
|
|
yield
|
|
|
|
hge_ctx.v1q(self.teardown)
|
|
|
|
|
|
|
|
def test_add_schema(self, hge_ctx):
|
|
|
|
""" check if the remote schema is added in the db """
|
|
|
|
conn = hge_ctx.engine.connect()
|
|
|
|
res = conn.execute('select * from hdb_catalog.remote_schemas')
|
|
|
|
row = res.fetchone()
|
2018-11-26 16:08:16 +03:00
|
|
|
assert row['name'] == "simple 1"
|
2018-11-23 16:02:46 +03:00
|
|
|
conn.close()
|
|
|
|
|
|
|
|
def test_introspection(self, hge_ctx):
|
2018-11-26 16:08:16 +03:00
|
|
|
#check_query_f(hge_ctx, 'queries/graphql_introspection/introspection.yaml')
|
|
|
|
with open('queries/graphql_introspection/introspection.yaml') as f:
|
|
|
|
query = yaml.load(f)
|
|
|
|
st_code, resp = check_query(hge_ctx, query)
|
|
|
|
assert st_code == 200, resp
|
|
|
|
assert check_introspection_result(resp, ['Hello'], ['hello'])
|
|
|
|
#
|
2018-11-23 16:02:46 +03:00
|
|
|
|
|
|
|
def test_introspection_as_user(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, 'queries/graphql_introspection/introspection_user_role.yaml')
|
|
|
|
|
|
|
|
def test_remote_query(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir + '/basic_query.yaml')
|
|
|
|
|
|
|
|
def test_remote_subscription(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir + '/basic_subscription_not_supported.yaml')
|
|
|
|
|
|
|
|
def test_add_schema_conflicts(self, hge_ctx):
|
|
|
|
"""add 2 remote schemas with same node or types"""
|
2018-11-26 16:08:16 +03:00
|
|
|
q = mk_add_remote_q('simple 2', 'http://localhost:5000/hello-graphql')
|
2018-11-23 16:02:46 +03:00
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
|
|
|
assert st_code == 400
|
|
|
|
assert resp['code'] == 'remote-schema-conflicts'
|
|
|
|
|
|
|
|
def test_add_second_remote_schema(self, hge_ctx):
|
|
|
|
"""add 2 remote schemas with different node and types"""
|
2018-11-26 16:08:16 +03:00
|
|
|
q = mk_add_remote_q('my remote', 'http://localhost:5000/user-graphql')
|
2018-11-23 16:02:46 +03:00
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
|
|
|
assert st_code == 200, resp
|
|
|
|
hge_ctx.v1q({"type": "remove_remote_schema", "args": {"name": "my remote"}})
|
|
|
|
assert st_code == 200, resp
|
|
|
|
|
2018-11-26 16:08:16 +03:00
|
|
|
def test_bulk_remove_add_remote_schema(self, hge_ctx):
|
|
|
|
st_code, resp = hge_ctx.v1q_f(self.dir + '/basic_bulk_remove_add.yaml')
|
|
|
|
assert st_code == 200, resp
|
|
|
|
|
2018-11-23 16:02:46 +03:00
|
|
|
|
|
|
|
class TestAddRemoteSchemaTbls:
|
|
|
|
""" tests with adding a table in hasura """
|
|
|
|
|
|
|
|
dir = 'queries/remote_schemas'
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def transact(self, hge_ctx):
|
|
|
|
st_code, resp = hge_ctx.v1q_f('queries/remote_schemas/tbls_setup.yaml')
|
|
|
|
assert st_code == 200, resp
|
|
|
|
yield
|
|
|
|
st_code, resp = hge_ctx.v1q_f('queries/remote_schemas/tbls_teardown.yaml')
|
|
|
|
assert st_code == 200, resp
|
|
|
|
|
|
|
|
def test_add_schema(self, hge_ctx):
|
|
|
|
""" check if the remote schema is added in the db """
|
|
|
|
conn = hge_ctx.engine.connect()
|
|
|
|
res = conn.execute('select * from hdb_catalog.remote_schemas')
|
|
|
|
row = res.fetchone()
|
2018-11-26 16:08:16 +03:00
|
|
|
assert row['name'] == "simple2-graphql"
|
2018-11-23 16:02:46 +03:00
|
|
|
conn.close()
|
|
|
|
|
|
|
|
def test_add_schema_conflicts_with_tables(self, hge_ctx):
|
|
|
|
"""add remote schema which conflicts with hasura tables"""
|
2018-11-26 16:08:16 +03:00
|
|
|
q = mk_add_remote_q('simple2', 'http://localhost:5000/hello-graphql')
|
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
2018-11-23 16:02:46 +03:00
|
|
|
assert st_code == 400
|
|
|
|
assert resp['code'] == 'remote-schema-conflicts'
|
|
|
|
|
|
|
|
def test_add_second_remote_schema(self, hge_ctx):
|
|
|
|
"""add 2 remote schemas with different node and types"""
|
2018-11-26 16:08:16 +03:00
|
|
|
q = mk_add_remote_q('my remote2', 'http://localhost:5000/country-graphql')
|
2018-11-23 16:02:46 +03:00
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
|
|
|
assert st_code == 200, resp
|
|
|
|
hge_ctx.v1q({"type": "remove_remote_schema", "args": {"name": "my remote2"}})
|
|
|
|
assert st_code == 200, resp
|
|
|
|
|
|
|
|
def test_remote_query(self, hge_ctx):
|
2018-11-26 16:08:16 +03:00
|
|
|
check_query_f(hge_ctx, self.dir + '/simple2_query.yaml')
|
|
|
|
|
|
|
|
def test_remote_mutation(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir + '/simple2_mutation.yaml')
|
2018-11-23 16:02:46 +03:00
|
|
|
|
|
|
|
def test_add_conflicting_table(self, hge_ctx):
|
|
|
|
st_code, resp = hge_ctx.v1q_f(self.dir + '/create_conflicting_table.yaml')
|
|
|
|
assert st_code == 400
|
|
|
|
assert resp['code'] == 'remote-schema-conflicts'
|
|
|
|
|
|
|
|
def test_introspection(self, hge_ctx):
|
|
|
|
with open('queries/graphql_introspection/introspection.yaml') as f:
|
|
|
|
query = yaml.load(f)
|
|
|
|
st_code, resp = check_query(hge_ctx, query)
|
|
|
|
assert st_code == 200, resp
|
2018-11-26 16:08:16 +03:00
|
|
|
assert check_introspection_result(resp, ['User', 'hello'], ['user', 'hello'])
|
2018-11-23 16:02:46 +03:00
|
|
|
|
2018-11-26 16:08:16 +03:00
|
|
|
def test_add_schema_duplicate_name(self, hge_ctx):
|
|
|
|
q = mk_add_remote_q('simple2-graphql', 'http://localhost:5000/country-graphql')
|
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
|
|
|
assert st_code == 500, resp
|
|
|
|
assert resp['code'] == 'postgres-error'
|
2018-12-12 15:01:18 +03:00
|
|
|
|
|
|
|
def test_add_schema_same_type(self, hge_ctx):
|
|
|
|
"""
|
|
|
|
test types get merged when remote schema has type with same name and
|
|
|
|
same structure
|
|
|
|
"""
|
|
|
|
st_code, resp = hge_ctx.v1q_f(self.dir + '/person_table.yaml')
|
|
|
|
assert st_code == 200, resp
|
|
|
|
q = mk_add_remote_q('person-graphql', 'http://localhost:5000/person-graphql')
|
|
|
|
|
|
|
|
st_code, resp = hge_ctx.v1q(q)
|
|
|
|
assert st_code == 200, resp
|
|
|
|
st_code, resp = hge_ctx.v1q_f(self.dir + '/drop_person_table.yaml')
|
|
|
|
assert st_code == 200, resp
|
|
|
|
hge_ctx.v1q({"type": "remove_remote_schema", "args": {"name": "person-graphql"}})
|
|
|
|
assert st_code == 200, resp
|
|
|
|
|
2019-01-28 15:38:38 +03:00
|
|
|
|
|
|
|
class TestAddRemoteSchemaCompareRootQueryFields:
|
|
|
|
|
|
|
|
remote = 'http://localhost:5000/default-value-echo-graphql'
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def transact(self, hge_ctx):
|
|
|
|
st_code, resp = hge_ctx.v1q(mk_add_remote_q('default_value_test', self.remote))
|
|
|
|
assert st_code == 200, resp
|
|
|
|
yield
|
|
|
|
st_code, resp = hge_ctx.v1q(mk_delete_remote_q('default_value_test'))
|
|
|
|
assert st_code == 200, resp
|
|
|
|
|
|
|
|
def test_schema_check_arg_default_values_and_field_and_arg_types(self, hge_ctx):
|
|
|
|
with open('queries/graphql_introspection/introspection.yaml') as f:
|
|
|
|
query = yaml.load(f)
|
|
|
|
st_code, introspect_hasura = check_query(hge_ctx, query)
|
|
|
|
assert st_code == 200, introspect_hasura
|
|
|
|
resp = requests.post(
|
|
|
|
self.remote,
|
|
|
|
json=query['query']
|
|
|
|
)
|
|
|
|
introspect_remote = resp.json()
|
|
|
|
assert resp.status_code == 200, introspect_remote
|
|
|
|
remote_root_ty_info = get_query_root_info(introspect_remote)
|
|
|
|
hasura_root_ty_Info = get_query_root_info(introspect_hasura)
|
|
|
|
hasFld=dict()
|
|
|
|
for fr in remote_root_ty_info['fields']:
|
|
|
|
hasFld[fr['name']] = False
|
|
|
|
for fh in filter(lambda f: f['name'] == fr['name'], hasura_root_ty_Info['fields']):
|
|
|
|
hasFld[fr['name']] = True
|
|
|
|
assert fr['type'] == fh['type'], yaml.dump({
|
|
|
|
'error' : 'Types do not match for fld ' + fr['name'],
|
|
|
|
'remote_type' : fr['type'],
|
|
|
|
'hasura_type' : fh['type']
|
|
|
|
})
|
|
|
|
hasArg=dict()
|
|
|
|
for ar in fr['args']:
|
|
|
|
arPath = fr['name'] + '(' + ar['name'] + ':)'
|
|
|
|
hasArg[arPath] = False
|
|
|
|
for ah in filter(lambda a: a['name'] == ar['name'], fh['args']):
|
|
|
|
hasArg[arPath] = True
|
|
|
|
assert ar['type'] == ah['type'], yaml.dump({
|
|
|
|
'error' : 'Types do not match for arg ' + arPath,
|
|
|
|
'remote_type' : ar['type'],
|
|
|
|
'hasura_type' : ah['type']
|
|
|
|
})
|
|
|
|
assert ar['defaultValue'] == ah['defaultValue'], yaml.dump({
|
|
|
|
'error' : 'Default values do not match for arg ' + arPath,
|
|
|
|
'remote_default_value' : ar['defaultValue'],
|
|
|
|
'hasura_default_value' : ah['defaultValue']
|
|
|
|
})
|
|
|
|
assert hasArg[arPath], 'Argument ' + arPath + ' in the remote schema root query type not found in Hasura schema'
|
|
|
|
assert hasFld[fr['name']], 'Field ' + fr['name'] + ' in the remote shema root query type not found in Hasura schema'
|
|
|
|
|
|
|
|
|
2018-11-26 16:08:16 +03:00
|
|
|
# def test_remote_query_variables(self, hge_ctx):
|
|
|
|
# pass
|
2018-11-23 16:02:46 +03:00
|
|
|
# def test_add_schema_url_from_env(self, hge_ctx):
|
|
|
|
# pass
|
|
|
|
# def test_add_schema_header_from_env(self, hge_ctx):
|
|
|
|
# pass
|
2018-11-26 16:08:16 +03:00
|
|
|
|
|
|
|
|
|
|
|
def _map(f, l):
|
|
|
|
return list(map(f, l))
|
|
|
|
|
|
|
|
def _filter(f, l):
|
|
|
|
return list(filter(f, l))
|
|
|
|
|
2019-01-28 15:38:38 +03:00
|
|
|
def get_query_root_info(res):
|
|
|
|
root_ty_name = res['data']['__schema']['queryType']['name']
|
|
|
|
return list(filter(lambda ty: ty['name'] == root_ty_name, get_types(res) ))[0]
|
|
|
|
|
|
|
|
def get_types(res):
|
|
|
|
return res['data']['__schema']['types']
|
|
|
|
|
2018-11-26 16:08:16 +03:00
|
|
|
def check_introspection_result(res, types, node_names):
|
|
|
|
all_types = _map(lambda t: t['name'], res['data']['__schema']['types'])
|
|
|
|
print(all_types)
|
|
|
|
q_root = _filter(lambda t: t['name'] == 'query_root',
|
|
|
|
res['data']['__schema']['types'])[0]
|
|
|
|
all_nodes = _map(lambda f: f['name'], q_root['fields'])
|
|
|
|
print(all_nodes)
|
|
|
|
|
|
|
|
satisfy_ty = True
|
|
|
|
satisfy_node = True
|
|
|
|
|
|
|
|
for ty_name in types:
|
|
|
|
if ty_name not in all_types:
|
|
|
|
satisfy_ty = False
|
|
|
|
|
|
|
|
for nn in node_names:
|
|
|
|
if nn not in all_nodes:
|
|
|
|
satisfy_node = False
|
|
|
|
|
|
|
|
return satisfy_node and satisfy_ty
|