server/tests-py: Reduce the number of locations we check the status code.

We have a lot of `assert st_code == 200` scattered about. This is a
problem because (a) it makes the code harder to parse and (b) the error
message is lacking; I have seen a few flaky tests which were impossible
to diagnose because I didn't know what the response _should_ be.

This reduces the number of places in which we perform this assertion
(moving most of them to `HGECtx.execute_query`), so that we can have a
better chance of seeing a useful error message on test failure.

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4957
GitOrigin-RevId: 3ff388bccf49f96569aa6b7db85266a0c5ee27ea
This commit is contained in:
Samir Talwar 2022-07-05 20:00:08 +02:00 committed by hasura-bot
parent b21d058bec
commit 987b55f981
22 changed files with 511 additions and 884 deletions

View File

@ -200,7 +200,7 @@ class HGE:
def v1q(self, q, exp_status=200):
resp = requests.post(self.url + '/v1/query', json.dumps(q), headers=self.admin_auth_headers())
assert resp.status_code == exp_status, (resp.status_code, resp.json())
assert resp.status_code == exp_status, f'Expected {resp.status_code} to be {exp_status}. Response:\n{json.dumps(resp, indent=2)}'
return resp.json()
def graphql_q(self, query, variables={}, exp_status = 200):

View File

@ -2,14 +2,11 @@ import pytest
import time
from context import HGECtx, HGECtxError, ActionsWebhookServer, EvtsWebhookServer, HGECtxGQLServer, GQLWsClient, PytestConf, GraphQLWSClient
import threading
from auth_webhook_server import create_server, stop_server
import random
from datetime import datetime
import sys
import os
from collections import OrderedDict
from validate import assert_response_code
import json
def pytest_addoption(parser):
parser.addoption(
@ -564,6 +561,8 @@ def db_state_context(request, hge_ctx):
request, hge_ctx, 'setup_files', setup, 'teardown_files',
teardown, schema_setup, schema_teardown, pre_setup, post_teardown, check_file_exists
)
else:
raise NotImplementedError('Invalid API version.')
else:
# setup the metadata using the "/v1/metadata" and the DB schema using the `/v2/query` endpoints
db_context = db_context_with_schema_common_new (
@ -652,10 +651,11 @@ def setup_and_teardown_v1q(request, hge_ctx, setup_files, teardown_files, check_
if check_file_exists:
for o in [setup_files, teardown_files]:
run_on_elem_or_list(assert_file_exists, o)
def v1q_f(f):
if os.path.isfile(f):
st_code, resp = hge_ctx.v1q_f(f)
assert st_code == 200, f'Expected {st_code} to be 200. Response:\n{json.dumps(resp, indent=2)}'
def v1q_f(filepath):
if os.path.isfile(filepath):
return hge_ctx.v1q_f(filepath)
if not skip_setup:
run_on_elem_or_list(v1q_f, setup_files)
yield
@ -669,10 +669,11 @@ def setup_and_teardown_v2q(request, hge_ctx, setup_files, teardown_files, check_
if check_file_exists:
for o in [setup_files, teardown_files]:
run_on_elem_or_list(assert_file_exists, o)
def v2q_f(f):
if os.path.isfile(f):
st_code, resp = hge_ctx.v2q_f(f)
assert st_code == 200, f'Expected {st_code} to be 200. Response:\n{json.dumps(resp, indent=2)}'
def v2q_f(filepath):
if os.path.isfile(filepath):
return hge_ctx.v2q_f(filepath)
if not skip_setup:
run_on_elem_or_list(v2q_f, setup_files)
yield
@ -691,22 +692,29 @@ def setup_and_teardown(request, hge_ctx, setup_files, teardown_files,
run_on_elem_or_list(assert_file_exists, o)
def v2q_f(f):
if os.path.isfile(f):
st_code, resp = hge_ctx.v2q_f(f)
if st_code != 200:
run_on_elem_or_list(pre_post_metadataq_f, post_teardown_file)
assert_response_code('/v2/query', f, st_code, 200, resp)
try:
hge_ctx.v2q_f(f)
except AssertionError:
try:
run_on_elem_or_list(pre_post_metadataq_f, post_teardown_file)
except:
pass
raise
def metadataq_f(f):
if os.path.isfile(f):
st_code, resp = hge_ctx.v1metadataq_f(f)
if st_code != 200:
# drop the sql setup, if the metadata calls fail
run_on_elem_or_list(v2q_f, sql_schema_teardown_file)
run_on_elem_or_list(pre_post_metadataq_f, post_teardown_file)
assert_response_code('/v1/metadata', f, st_code, 200, resp)
try:
hge_ctx.v1metadataq_f(f)
except AssertionError:
try:
# drop the sql setup, if the metadata calls fail
run_on_elem_or_list(v2q_f, sql_schema_teardown_file)
run_on_elem_or_list(pre_post_metadataq_f, post_teardown_file)
except:
pass
raise
def pre_post_metadataq_f(f):
if os.path.isfile(f):
st_code, resp = hge_ctx.v1metadataq_f(f)
assert_response_code('/v1/metadata', f, st_code, 200, resp)
hge_ctx.v1metadataq_f(f)
if not skip_setup:
run_on_elem_or_list(pre_post_metadataq_f, pre_setup_file)
run_on_elem_or_list(v2q_f, sql_schema_setup_file)

View File

@ -376,7 +376,7 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
elif req_path == "/json-response":
resp, status = self.json_response()
self._send_response(status, resp)
elif req_path == "/custom-scalar-array-response":
resp, status = self.custom_scalar_array_response()
self._send_response(status, resp)
@ -610,13 +610,13 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
def null_response(self):
response = None
return response, HTTPStatus.OK
def json_response(self):
response = {
'foo': 'bar'
}
return response, HTTPStatus.OK
def custom_scalar_array_response(self):
response = [{
'foo': 'bar'
@ -832,11 +832,10 @@ class HGECtx:
self.version = result.stdout.decode('utf-8').strip()
if self.is_default_backend and not self.metadata_disabled and not config.getoption('--skip-schema-setup'):
try:
st_code, resp = self.v2q_f("queries/" + self.backend_suffix("clear_db")+ ".yaml")
self.v2q_f("queries/" + self.backend_suffix("clear_db")+ ".yaml")
except requests.exceptions.RequestException as e:
self.teardown()
raise HGECtxError(repr(e))
assert st_code == 200, resp
# Postgres version
if self.is_default_backend:
@ -901,7 +900,7 @@ class HGECtx:
conn.close()
return res
def execute_query(self, q, url_path, headers = {}):
def execute_query(self, q, url_path, headers = {}, expected_status_code = 200):
h = headers.copy()
if self.hge_key is not None:
h['X-Hasura-Admin-Secret'] = self.hge_key
@ -914,26 +913,29 @@ class HGECtx:
# properties in the graphql spec properly
# Don't assume `resp` is JSON object
resp_obj = {} if resp.status_code == 500 else resp.json(object_pairs_hook=OrderedDict)
return resp.status_code, resp_obj
if expected_status_code:
assert \
resp.status_code == expected_status_code, \
f'Expected {resp.status_code} to be {expected_status_code}. Response:\n{json.dumps(resp_obj, indent=2)}'
return resp_obj
def v1q(self, q, headers = {}, expected_status_code = 200):
return self.execute_query(q, "/v1/query", headers, expected_status_code)
def v1q(self, q, headers = {}):
return self.execute_query(q, "/v1/query", headers)
def v1q_f(self, fn):
with open(fn) as f:
def v1q_f(self, filepath, headers = {}, expected_status_code = 200):
with open(filepath) as f:
# NOTE: preserve ordering with ruamel
yml = yaml.YAML()
return self.v1q(yml.load(f))
return self.v1q(yml.load(f), headers, expected_status_code)
def v2q(self, q, headers = {}):
return self.execute_query(q, "/v2/query", headers)
def v2q(self, q, headers = {}, expected_status_code = 200):
return self.execute_query(q, "/v2/query", headers, expected_status_code)
def v2q_f(self, fn):
with open(fn) as f:
def v2q_f(self, filepath, headers = {}, expected_status_code = 200):
with open(filepath) as f:
# NOTE: preserve ordering with ruamel
yml = yaml.YAML()
return self.v2q(yml.load(f))
return self.v2q(yml.load(f), headers, expected_status_code)
def backend_suffix(self, filename):
if self.is_default_backend:
@ -941,23 +943,23 @@ class HGECtx:
else:
return filename + "_" + self.backend
def v1metadataq(self, q, headers = {}):
return self.execute_query(q, "/v1/metadata", headers)
def v1metadataq(self, q, headers = {}, expected_status_code = 200):
return self.execute_query(q, "/v1/metadata", headers, expected_status_code)
def v1metadataq_f(self, fn):
with open(fn) as f:
def v1metadataq_f(self, filepath, headers = {}, expected_status_code = 200):
with open(filepath) as f:
# NOTE: preserve ordering with ruamel
yml = yaml.YAML()
return self.v1metadataq(yml.load(f))
return self.v1metadataq(yml.load(f), headers, expected_status_code)
def v1graphqlq(self, q, headers = {}):
return self.execute_query(q, "/v1/graphql", headers)
def v1graphqlq(self, q, headers = {}, expected_status_code = 200):
return self.execute_query(q, "/v1/graphql", headers, expected_status_code)
def v1graphql_f(self, fn):
with open(fn) as f:
def v1graphql_f(self, filepath, headers = {}, expected_status_code = 200):
with open(filepath) as f:
# NOTE: preserve ordering with ruamel
yml = yaml.YAML()
return self.v1graphqlq(yml.load(f))
return self.v1graphqlq(yml.load(f), headers, expected_status_code)
def teardown(self):
self.http.close()
@ -968,13 +970,5 @@ class HGECtx:
self.ws_client_relay.teardown()
self.ws_client_graphql_ws.teardown()
def v1GraphqlExplain(self, q, hdrs=None):
headers = {}
if hdrs != None:
headers = hdrs
if self.hge_key != None:
headers['X-Hasura-Admin-Secret'] = self.hge_key
resp = self.http.post(self.hge_url + '/v1/graphql/explain', json=q, headers=headers)
return resp.status_code, resp.json()
def v1GraphqlExplain(self, q, headers = {}, expected_status_code = 200):
return self.execute_query(q, '/v1/graphql/explain', headers, expected_status_code)

View File

@ -6,14 +6,12 @@ class DefaultTestQueries(ABC):
def do_setup(self, setup_ctrl, hge_ctx):
if not setup_ctrl['setupDone']:
st_code, resp = hge_ctx.v1q_f(self.dir() + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/setup.yaml')
setup_ctrl['setupDone'] = True
def do_teardown(self, setup_ctrl, hge_ctx):
if setup_ctrl['setupDone'] and not hge_ctx.may_skip_test_teardown:
st_code, resp = hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
setup_ctrl['setupDone'] = False
@pytest.fixture(autouse=True)
@ -23,31 +21,27 @@ class DefaultTestQueries(ABC):
self.do_teardown(setup_ctrl, hge_ctx);
@abstractmethod
def dir(self):
def dir(self) -> str:
pass
class DefaultTestMutations(ABC):
@pytest.fixture(scope='class')
def schema_transact(self, request, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + '/schema_setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/schema_setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + '/schema_teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/schema_teardown.yaml')
@pytest.fixture(autouse=True)
def init_values_transact(self, schema_transact, hge_ctx):
setupValFile = self.dir() + '/values_setup.yaml'
if os.path.isfile(setupValFile):
st_code, resp = hge_ctx.v1q_f(setupValFile)
assert st_code == 200, resp
hge_ctx.v1q_f(setupValFile)
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + '/values_teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/values_teardown.yaml')
@abstractmethod
def dir(self):
def dir(self) -> str:
pass
@ -57,18 +51,16 @@ class GraphQLEngineTest(ABC):
@pytest.fixture(scope='class')
def transact(self, request, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
@pytest.fixture(autouse=True)
def ensure_transact(self, transact):
pass
@abstractmethod
def dir(self):
def dir(self) -> str:
pass
class DefaultTestSelectQueries(GraphQLEngineTest):

View File

@ -24,7 +24,7 @@ class TestAllowlistQueries:
def test_query_non_allowlist(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/query_non_allowlist.yaml', transport)
def test_query_user_fragment(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/query_user_fragment.yaml', transport)
@ -44,101 +44,82 @@ class TestAllowlistQueries:
def export_metadata(hge_ctx):
st, resp = hge_ctx.v1metadataq({
resp = hge_ctx.v1metadataq({
'type': 'export_metadata',
'version': 2,
'args': {}
})
assert st == 200, resp
return resp["metadata"]
def assert_allowlist_metadata(hge_ctx, expected):
metadata = export_metadata(hge_ctx)
assert metadata.get("allowlist") == expected
def replace_allowlist_metadata_unchecked(hge_ctx, base_metadata, allowlist):
def replace_allowlist_metadata_with_check(hge_ctx, base_metadata, allowlist, expected_status_code):
metadata = dict(base_metadata)
metadata["allowlist"] = allowlist
st, resp = hge_ctx.v1metadataq({
return hge_ctx.v1metadataq({
'type': 'replace_metadata',
'version': 2,
'args': {
'metadata': metadata
}
})
return st, resp
}, expected_status_code = expected_status_code)
def replace_allowlist_metadata(hge_ctx, base_metadata, allowlist):
st, resp = replace_allowlist_metadata_unchecked(hge_ctx, base_metadata, allowlist)
assert st == 200, resp
return replace_allowlist_metadata_with_check(hge_ctx, base_metadata, allowlist, expected_status_code = 200)
def fail_replace_allowlist_metadata(hge_ctx, base_metadata, allowlist, expected_error):
st, resp = replace_allowlist_metadata_unchecked(hge_ctx, base_metadata, allowlist)
assert st == 400
resp = replace_allowlist_metadata_with_check(hge_ctx, base_metadata, allowlist, expected_status_code = 400)
assert resp == expected_error
def add_collection_to_allowlist_unchecked(hge_ctx, args):
def add_collection_to_allowlist_with_check(hge_ctx, args, expected_status_code):
payload = {
"type": "add_collection_to_allowlist",
"args": args,
}
return hge_ctx.v1metadataq(payload)
assert st == 200, resp
return hge_ctx.v1metadataq(payload, expected_status_code = expected_status_code)
def add_collection_to_allowlist(hge_ctx, args):
st, resp = add_collection_to_allowlist_unchecked(hge_ctx, args)
assert st == 200, resp
return add_collection_to_allowlist_with_check(hge_ctx, args, expected_status_code = 200)
def fail_add_collection_to_allowlist(hge_ctx, args, expected_error):
st, resp = add_collection_to_allowlist_unchecked(hge_ctx, args)
assert st == 400
resp = add_collection_to_allowlist_with_check(hge_ctx, args, expected_status_code = 400)
assert resp == expected_error
def update_scope_of_collection_in_allowlist(hge_ctx, args):
def update_scope_of_collection_in_allowlist_with_check(hge_ctx, args, expected_status_code):
payload = {
"type": "update_scope_of_collection_in_allowlist",
"args": args,
}
st, resp = hge_ctx.v1metadataq(payload)
assert st == 200, resp
def update_scope_of_collection_in_allowlist_unchecked(hge_ctx, args):
payload = {
"type": "update_scope_of_collection_in_allowlist",
"args": args,
}
return hge_ctx.v1metadataq(payload)
return hge_ctx.v1metadataq(payload, expected_status_code = expected_status_code)
def update_scope_of_collection_in_allowlist(hge_ctx, args):
st, resp = update_scope_of_collection_in_allowlist_unchecked(hge_ctx, args)
assert st == 200, resp
update_scope_of_collection_in_allowlist_with_check(hge_ctx, args, expected_status_code = 200)
def fail_update_scope_of_collection_in_allowlist(hge_ctx, args, expected_error):
st, resp = update_scope_of_collection_in_allowlist_unchecked(hge_ctx, args)
assert st == 400
resp = update_scope_of_collection_in_allowlist_with_check(hge_ctx, args, expected_status_code = 400)
assert resp == expected_error
def drop_collection_from_allowlist_unchecked(hge_ctx, args):
def drop_collection_from_allowlist_with_check(hge_ctx, args, expected_status_code):
payload = {
"type": "drop_collection_from_allowlist",
"args": args
}
return hge_ctx.v1metadataq(payload)
return hge_ctx.v1metadataq(payload, expected_status_code = expected_status_code)
def drop_collection_from_allowlist(hge_ctx, args):
st, resp = drop_collection_from_allowlist_unchecked(hge_ctx, args)
assert st == 200, resp
return drop_collection_from_allowlist_with_check(hge_ctx, args, expected_status_code = 200)
def fail_drop_collection_from_allowlist(hge_ctx, args, expected_error):
st, resp = drop_collection_from_allowlist_unchecked(hge_ctx, args)
assert st == 400
resp = drop_collection_from_allowlist_with_check(hge_ctx, args, expected_status_code = 400)
assert resp == expected_error
@pytest.fixture(scope="function")
def clean_allowlist(hge_ctx):
yield
drop_collection_from_allowlist_unchecked(hge_ctx, {"collection": "collection_1"})
drop_collection_from_allowlist_unchecked(hge_ctx, {"collection": "collection_2"})
drop_collection_from_allowlist_with_check(hge_ctx, {"collection": "collection_1"}, expected_status_code = None)
drop_collection_from_allowlist_with_check(hge_ctx, {"collection": "collection_2"}, expected_status_code = None)
@usefixtures('clean_allowlist', 'per_class_tests_db_state')
class TestAllowlistMetadata:
@ -382,14 +363,13 @@ class TestAllowlistMetadata:
add_collection_to_allowlist(hge_ctx, orig)
assert_allowlist_metadata(hge_ctx, [orig])
st, resp = add_collection_to_allowlist_unchecked(hge_ctx, {
resp = add_collection_to_allowlist(hge_ctx, {
"collection": "collection_1",
"scope": {
"global": False,
"roles": ["foo"]
}
})
assert st == 200, resp
assert resp == {
"message": 'collection "collection_1" already exists in the allowlist, scope ignored; to change scope, use update_scope_of_collection_in_allowlist'
}
@ -405,13 +385,12 @@ class TestAllowlistMetadata:
add_collection_to_allowlist(hge_ctx, orig2)
assert_allowlist_metadata(hge_ctx, [orig, orig2])
st, resp = add_collection_to_allowlist_unchecked(hge_ctx, {
resp = add_collection_to_allowlist(hge_ctx, {
"collection": "collection_2",
"scope": {
"global": True
}
})
assert st == 200, resp
assert resp == {
"message": 'collection "collection_2" already exists in the allowlist, scope ignored; to change scope, use update_scope_of_collection_in_allowlist'
}

File diff suppressed because it is too large Load Diff

View File

@ -96,11 +96,9 @@ class TestGraphQLInsertIdentityColumn:
}
}
if hge_ctx.pg_version >= 100000:
st_code, resp = hge_ctx.v1q(setup_q)
assert st_code == 200, resp
hge_ctx.v1q(setup_q)
yield
st_code, resp = hge_ctx.v1q(teardown_q)
assert st_code == 200, resp
hge_ctx.v1q(teardown_q)
else:
pytest.skip("Identity columns are not supported in Postgres version < 10")
@ -648,8 +646,7 @@ class TestGraphqlDeleteConstraintsMSSQL:
# where 1B29035F changes with each call.
# This makes it hard to write an equality-based test for it, so we just check the error code.
def test_author_delete_foreign_key_violation(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1graphql_f(self.dir() + '/author_foreign_key_violation_mssql.yaml')
assert st_code == 200, resp
resp = hge_ctx.v1graphql_f(self.dir() + '/author_foreign_key_violation_mssql.yaml')
assert len(resp['errors']) == 1, resp
@classmethod
@ -824,18 +821,14 @@ class TestGraphQLMutationFunctions:
# Ensure select permissions on the corresponding SETOF table apply to
# the return set of the mutation field backed by the tracked function.
def test_functions_as_mutations_permissions(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + '/create_function_permission_add_to_score.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + '/create_function_permission_add_to_score.yaml')
check_query_f(hge_ctx, self.dir() + '/function_as_mutations_permissions.yaml', transport)
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + '/drop_function_permission_add_to_score.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + '/drop_function_permission_add_to_score.yaml')
def test_single_row_function_as_mutation(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + '/create_function_permission_add_to_score_by_user_id.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + '/create_function_permission_add_to_score_by_user_id.yaml')
check_query_f(hge_ctx, self.dir() + '/single_row_function_as_mutation.yaml', transport)
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + '/drop_function_permission_add_to_score_by_user_id.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + '/drop_function_permission_add_to_score_by_user_id.yaml')
@pytest.mark.parametrize('transport', ['http', 'websocket'])
@use_mutation_fixtures

View File

@ -382,8 +382,7 @@ class TestGraphQLQueryBasicPostgres:
check_query_f(hge_ctx, self.dir() + "/select_query_batching_with_one_error.yaml", transport)
def test_create_invalid_fkey_relationship(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1q_f(self.dir() + '/setup_invalid_fkey_relationship.yaml')
assert st_code == 400, resp
resp = hge_ctx.v1q_f(self.dir() + '/setup_invalid_fkey_relationship.yaml', expected_status_code = 400)
assert resp['error'] == "Expecting object { table, columns }."
def test_select_query_author_pk(self, hge_ctx, transport):
@ -426,8 +425,7 @@ class TestGraphQLQueryBasicCitus:
check_query_f(hge_ctx, self.dir() + "/select_query_disaster_functions.yaml", transport)
def test_create_invalid_fkey_relationship(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + '/setup_invalid_fkey_relationship.yaml')
assert st_code == 400, resp
resp = hge_ctx.v1metadataq_f(self.dir() + '/setup_invalid_fkey_relationship.yaml', expected_status_code = 400)
assert resp['error'] == "Error when parsing command create_array_relationship.\nSee our documentation at https://hasura.io/docs/latest/graphql/core/api-reference/metadata-api/index.html#metadata-apis.\nInternal error message: Expecting object { table, columns }."
@classmethod
@ -670,8 +668,7 @@ class TestGraphQLQueryBoolExpBasicMSSQL:
check_query_f(hge_ctx, self.dir() + '/select_bools_mssql.yaml', transport)
def test_create_invalid_fkey_relationship(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + '/setup_invalid_fkey_relationship_mssql.yaml')
assert st_code == 400, resp
resp = hge_ctx.v1metadataq_f(self.dir() + '/setup_invalid_fkey_relationship_mssql.yaml', expected_status_code = 400)
assert resp['error'] == "Error when parsing command create_array_relationship.\nSee our documentation at https://hasura.io/docs/latest/graphql/core/api-reference/metadata-api/index.html#metadata-apis.\nInternal error message: Expecting object { table, columns }."
@classmethod
@ -788,8 +785,7 @@ class TestGraphQLInheritedRolesSchema:
"type": "export_metadata",
"args": {}
}
st_code, resp = hge_ctx.v1q(export_metadata_query)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_query)
circular_roles_metadata = [
{
"role_name": "intermediate_circular_role_1",
@ -821,8 +817,7 @@ class TestGraphQLInheritedRolesSchema:
"metadata": resp
}
}
st_code, resp = hge_ctx.v1q(import_metadata_query)
assert st_code == 400, resp
resp = hge_ctx.v1q(import_metadata_query, expected_status_code = 400)
assert resp['error'] == '''found cycle(s) in roles: ["circular_role","intermediate_circular_role_2","intermediate_circular_role_1","circular_role"]'''
def test_explicit_metadata_permission_should_override_role_inheritance(self, hge_ctx, transport):
@ -1272,13 +1267,11 @@ class TestGraphQLExplainPostgresMSSQLMySQL:
def test_simple_query_as_admin(self, hge_ctx, backend):
q = {"query": {"query": "query abc { __typename }", "operationName": "abc"}}
st_code, resp = hge_ctx.v1GraphqlExplain(q)
assert st_code == 200, resp
hge_ctx.v1GraphqlExplain(q)
def test_simple_query_as_user(self, hge_ctx, backend):
q = {"query": {"query": "query abc { __typename }", "operationName": "abc"}}
st_code, resp = hge_ctx.v1GraphqlExplain(q, {"x-hasura-role": "random_user"})
assert st_code == 400, resp
hge_ctx.v1GraphqlExplain(q, {"x-hasura-role": "random_user"}, expected_status_code = 400)
@pytest.mark.parametrize("backend", ['postgres', 'mssql', 'mysql'])
def test_simple_query(self, hge_ctx, backend):
@ -1502,8 +1495,7 @@ class TestGraphQLQueryFunctionPermissions:
check_query_f(hge_ctx, self.dir() + 'get_articles_without_permission_configured.yaml')
def test_access_function_with_permission_configured(self, hge_ctx, transport):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_function_permission_get_articles.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_function_permission_get_articles.yaml')
check_query_f(hge_ctx, self.dir() + 'get_articles_with_permission_configured.yaml')
@pytest.mark.parametrize('transport', ['http', 'websocket'])

View File

@ -1,5 +1,6 @@
import pytest
import time
import json
import jsondiff
from context import PytestConf
from ruamel.yaml import YAML
@ -20,8 +21,7 @@ class TestHorizontalScaleBasic():
self.servers['2'] = hge_ctx.hge_scale_url
yield
# teardown
st_code, resp = hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
def test_horizontal_scale_basic(self, hge_ctx):
with open(self.dir() + "/steps.yaml") as c:
@ -34,9 +34,10 @@ class TestHorizontalScaleBasic():
self.servers[step['operation']['server']] + "/v1/query",
json=step['operation']['query']
)
st_code = response.status_code
resp = response.json()
assert st_code == 200, resp
assert \
response.status_code == 200, \
f'Expected {response.status_code} to be 200. Response:\n{json.dumps(resp, indent=2)}'
# wait for 20 sec
time.sleep(20)
@ -45,9 +46,10 @@ class TestHorizontalScaleBasic():
self.servers[step['validate']['server']] + "/v1alpha1/graphql",
json=step['validate']['query']
)
st_code = response.status_code
resp = response.json()
assert st_code == 200, resp
assert \
response.status_code == 200, \
f'Expected {response.status_code} to be 200. Response:\n{json.dumps(resp, indent=2)}'
if 'response' in step['validate']:
assert resp == step['validate']['response'], yaml.dump({

View File

@ -25,16 +25,14 @@ class TestInconsistentObjects():
test = yaml.load(c)
# setup
st_code, resp = hge_ctx.v1q(json.loads(json.dumps(test['setup'])))
assert st_code == 200, resp
resp = hge_ctx.v1q(json.loads(json.dumps(test['setup'])))
try:
# exec sql to cause inconsistentancy
sql_res = hge_ctx.sql(test['sql'])
hge_ctx.sql(test['sql'])
# reload metadata
st_code, resp = hge_ctx.v1q(q=self.reload_metadata)
assert st_code == 200, resp
resp = hge_ctx.v1q(q=self.reload_metadata)
# check inconsistent objects
incons_objs_test = test['inconsistent_objects']
incons_objs_resp = resp['inconsistent_objects']
@ -47,32 +45,28 @@ class TestInconsistentObjects():
})
# export metadata
st_code, export = hge_ctx.v1q(q=self.export_metadata)
assert st_code == 200, export
export = hge_ctx.v1q(q=self.export_metadata)
# apply metadata
st_code, resp = hge_ctx.v1q(
hge_ctx.v1q(
q={
"type": "replace_metadata",
"args": export
}
},
expected_status_code = 400
)
assert st_code == 400, resp
finally:
# drop inconsistent objects
st_code, resp = hge_ctx.v1q(q=self.drop_inconsistent_metadata)
assert st_code == 200, resp
hge_ctx.v1q(q=self.drop_inconsistent_metadata)
# reload metadata
st_code, resp = hge_ctx.v1q(q=self.reload_metadata)
assert st_code == 200, resp
resp = hge_ctx.v1q(q=self.reload_metadata)
# check inconsistent objects
assert resp['is_consistent'] == True, resp
# teardown
st_code, resp = hge_ctx.v1q(json.loads(json.dumps(test['teardown'])))
assert st_code == 200, resp
hge_ctx.v1q(json.loads(json.dumps(test['teardown'])))
@classmethod
def dir(cls):

View File

@ -121,11 +121,9 @@ class TestJWTExpirySkew():
@pytest.fixture(scope='class')
def setup(self, request, hge_ctx):
self.dir = 'queries/graphql_query/permissions'
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')
@pytest.mark.parametrize('endpoint', ['/v1/graphql', '/v1alpha1/graphql'])
class TestJWTBasic():
@ -392,11 +390,9 @@ class TestJWTBasic():
@pytest.fixture(scope='class')
def setup(self, request, hge_ctx):
self.dir = 'queries/graphql_query/permissions'
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')
def gen_rsa_key():
@ -518,11 +514,9 @@ class TestJwtAudienceCheck():
@pytest.fixture(scope='class')
def setup(self, request, hge_ctx):
self.dir = 'queries/graphql_query/permissions'
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')
@pytest.mark.parametrize('endpoint', ['/v1/graphql', '/v1alpha1/graphql'])
class TestJwtIssuerCheck():
@ -600,8 +594,6 @@ class TestJwtIssuerCheck():
@pytest.fixture(scope='class')
def setup(self, request, hge_ctx):
self.dir = 'queries/graphql_query/permissions'
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')

View File

@ -200,11 +200,9 @@ class TestJWTClaimsMapBasic():
@pytest.fixture(scope='class')
def setup(self, request, hge_ctx):
self.dir = 'queries/graphql_query/permissions'
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')
# The values of 'x-hasura-allowed-roles' and 'x-hasura-default-role' has
# been set in the JWT config
@ -284,8 +282,6 @@ class TestJWTClaimsMapWithStaticHasuraClaimsMapValues():
@pytest.fixture(scope='class')
def setup(self, request, hge_ctx):
self.dir = 'queries/graphql_query/permissions'
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')

View File

@ -28,14 +28,12 @@ class TestLogging():
success_query = {'query': 'query { hello {code name} }'}
def _teardown(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
# setup some tables
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
try:
# make a successful query
@ -70,7 +68,7 @@ class TestLogging():
'args': {
"table": {
"name": "hdb_function",
"schema": "hdb_catalog"
"schema": "hdb_catalog"
},
"columns": ["function_name", "function_schema", "is_system_defined"],
"where": { "function_schema": "public" }
@ -223,14 +221,12 @@ class TestWebsocketLogging():
query_id = 'successful-ws-log-test'
def _teardown(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/teardown.yaml')
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
# setup some tables
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/setup.yaml')
try:
# make a successful websocket query

View File

@ -1,4 +1,3 @@
import ruamel.yaml as yaml
from validate import check_query_f
import pytest
import os
@ -47,16 +46,15 @@ class TestMetadata:
'/replace_metadata_allow_inconsistent.yaml')
def test_replace_metadata_disallow_inconsistent_metadata(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
default_source_config = {}
default_source = list(filter(lambda source: (source["name"] == "default"), resp["sources"]))
if default_source:
default_source_config = default_source[0]["configuration"]
else:
assert False, "default source config not found"
return
st_code, resp = hge_ctx.v1metadataq({
resp = hge_ctx.v1metadataq(
{
"type": "replace_metadata",
"version": 2,
"args": {
@ -116,8 +114,9 @@ class TestMetadata:
]
}
}
})
assert st_code == 400, resp
},
expected_status_code = 400
)
assert resp == {
"internal": [
{
@ -139,16 +138,14 @@ class TestMetadata:
"""Test that missing "kind" key in metadata source defaults to "postgres".
Regression test for https://github.com/hasura/graphql-engine-mono/issues/4501"""
def test_replace_metadata_default_kind(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
default_source_config = {}
default_source = list(filter(lambda source: (source["name"] == "default"), resp["sources"]))
if default_source:
default_source_config = default_source[0]["configuration"]
else:
assert False, "default source config not found"
return
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "replace_metadata",
"version": 2,
"args": {
@ -164,9 +161,7 @@ class TestMetadata:
}
}
})
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert resp["sources"][0]["kind"] == "postgres"
def test_dump_internal_state(self, hge_ctx):
@ -176,7 +171,7 @@ class TestMetadata:
check_query_f(hge_ctx, self.dir() + '/pg_add_source.yaml')
def test_pg_add_source_with_replace_config(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_add_source",
"args": {
"name": "pg1",
@ -189,8 +184,7 @@ class TestMetadata:
}
}
})
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_add_source",
"args": {
"name": "pg1",
@ -209,20 +203,18 @@ class TestMetadata:
"replace_configuration": True
}
})
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert resp["sources"][1]["customization"]["root_fields"]["namespace"] == "some_namespace"
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_drop_source",
"args": {
"name": "pg1"
}
})
assert st_code == 200, resp
def test_pg_update_unknown_source(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq({
resp = hge_ctx.v1metadataq(
{
"type": "pg_update_source",
"args": {
"name": "pg-not-previously-added",
@ -234,12 +226,13 @@ class TestMetadata:
}
}
}
})
assert st_code == 400, resp
},
expected_status_code = 400
)
assert resp["error"] == "source with name \"pg-not-previously-added\" does not exist"
def test_pg_update_source(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_add_source",
"args": {
"name": "pg1",
@ -255,8 +248,7 @@ class TestMetadata:
}
}
})
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_update_source",
"args": {
"name": "pg1",
@ -267,12 +259,10 @@ class TestMetadata:
}
}
})
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert resp["sources"][1]["customization"]["root_fields"]["namespace"] == "some_namespace"
assert resp["sources"][1]["configuration"]["connection_info"]["pool_settings"]["max_connections"] == 10
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_update_source",
"args": {
"name": "pg1",
@ -288,18 +278,15 @@ class TestMetadata:
}
}
})
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert resp["sources"][1]["customization"]["root_fields"]["namespace"] == "some_namespace"
assert resp["sources"][1]["configuration"]["connection_info"]["pool_settings"]["max_connections"] == 50
st_code, resp = hge_ctx.v1metadataq({
hge_ctx.v1metadataq({
"type": "pg_drop_source",
"args": {
"name": "pg1"
}
})
assert st_code == 200, resp
@pytest.mark.skipif(
os.getenv('HASURA_GRAPHQL_PG_SOURCE_URL_1') != 'postgresql://gql_test@localhost:5432/pg_source_1',
@ -362,8 +349,8 @@ class TestMetadata:
if hge_ctx.hge_key is not None:
headers['x-hasura-admin-secret'] = hge_ctx.hge_key
st, resp, _ = hge_ctx.anyq(url, query, headers)
assert st == 200, resp
status_code, resp, _ = hge_ctx.anyq(url, query, headers)
assert status_code == 200, f'Expected {status_code} to be 200. Response:\n{resp}'
fn_name = 'search_authors_s1'
fn_description = 'this function helps fetch articles based on the title'

View File

@ -56,13 +56,12 @@ class TestOpenAPISpec:
def test_inconsistent_schema_openAPI(self, hge_ctx, transport):
# export metadata and create a backup
st_code, backup_metadata = hge_ctx.v1q(
backup_metadata = hge_ctx.v1q(
q = {
"type": "export_metadata",
"args": {}
}
)
assert st_code == 200, backup_metadata
new_metadata = backup_metadata.copy()
@ -92,7 +91,7 @@ class TestOpenAPISpec:
new_metadata["rest_endpoints"] = res_endpoint
# apply inconsistent metadata
st_code, resp = hge_ctx.v1q(
hge_ctx.v1q(
q={
"type": "replace_metadata",
"version": 2,
@ -102,16 +101,14 @@ class TestOpenAPISpec:
}
}
)
assert st_code == 200, resp
# check openAPI schema
check_query_f(hge_ctx, self.dir() + '/openapi_inconsistent_schema.yaml', transport)
# revert to old metadata
st_code, resp = hge_ctx.v1q(
hge_ctx.v1q(
q={
"type": "replace_metadata",
"args": backup_metadata
}
)
assert st_code == 200, resp

View File

@ -30,78 +30,48 @@ class TestCreateRemoteRelationship:
# print("In setup method")
# graphql_service.start()
# try:
# st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup.yaml')
# assert st_code == 200, resp
# hge_ctx.v1q_f(self.dir() + 'setup.yaml')
# yield
# st_code, resp = hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
# assert st_code == 200, resp
# hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
# finally:
# graphql_service.stop()
def test_create_valid(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
files = [
'setup_remote_rel_basic.yaml',
'setup_remote_rel_nested_args.yaml',
'setup_remote_rel_array.yaml',
'setup_remote_rel_nested_fields.yaml',
'setup_remote_rel_multiple_fields.yaml',
'setup_remote_rel_joining_singleton_with_array.yaml',
'setup_remote_rel_with_interface.yaml',
'setup_remote_rel_with_union.yaml',
'setup_remote_rel_with_enum.yaml',
'setup_remote_rel_computed_fields.yaml',
]
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_array.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_fields.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_multiple_fields.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_joining_singleton_with_array.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_interface.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_union.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_enum.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_computed_fields.yaml')
assert st_code == 200, resp
for f in files:
hge_ctx.v1q_f(self.dir() + f)
def test_create_invalid(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_hasura_field.yaml')
assert st_code == 400, resp
files = [
'setup_invalid_remote_rel_hasura_field.yaml',
'setup_invalid_remote_rel_literal.yaml',
'setup_invalid_remote_rel_variable.yaml',
'setup_invalid_remote_rel_remote_args.yaml',
'setup_invalid_remote_rel_remote_schema.yaml',
'setup_invalid_remote_rel_remote_field.yaml',
'setup_invalid_remote_rel_nested_args.yaml',
'setup_invalid_remote_rel_array.yaml',
'setup_invalid_remote_rel_computed_field.yaml',
]
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_literal.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_variable.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_remote_args.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_remote_schema.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_remote_field.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_nested_args.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_array.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_invalid_remote_rel_computed_field.yaml')
assert st_code == 400, resp
for f in files:
hge_ctx.v1q_f(self.dir() + f, expected_status_code = 400)
def test_generation(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'select_remote_fields.yaml')
@ -112,18 +82,13 @@ class TestDeleteRemoteRelationship:
return "queries/remote_schemas/remote_relationships/"
def test_delete(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'delete_remote_rel.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
hge_ctx.v1q_f(self.dir() + 'delete_remote_rel.yaml')
def test_delete_dependencies(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'remove_remote_schema.yaml')
assert st_code == 400, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'delete_remote_rel.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
hge_ctx.v1q_f(self.dir() + 'remove_remote_schema.yaml', expected_status_code = 400)
hge_ctx.v1q_f(self.dir() + 'delete_remote_rel.yaml')
def test_deleting_column_with_remote_relationship_dependency(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + 'drop_col_with_remote_rel_dependency.yaml')
@ -138,8 +103,7 @@ class TestDeleteRemoteRelationship:
'type': 'export_metadata',
'args': {}
}
status_code, resp = hge_ctx.v1q(export_metadata_q)
assert status_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
tables = resp['sources'][0]['tables']
for t in tables:
if t['table']['name'] == table:
@ -152,11 +116,9 @@ class TestUpdateRemoteRelationship:
return "queries/remote_schemas/remote_relationships/"
def test_update(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship.yaml')
st_code, resp = hge_ctx.v1q_f(self.dir() + 'update_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'update_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'update_basic_query.yaml')
@use_test_fixtures
@ -170,131 +132,106 @@ class TestExecution:
# check_query_f(hge_ctx, self.dir() + 'basic_mixed.yaml')
def test_basic_relationship(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship.yaml')
def test_basic_relationship_on_object(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_object_rel.yaml')
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_arr_rel.yaml')
def test_regression_7172(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_regression_7172.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_regression_7172.yaml')
check_query_f(hge_ctx, self.dir() + 'regression_7172.yaml')
def test_basic_relationship_joining_singleton_to_array(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_joining_singleton_with_array.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_joining_singleton_with_array.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship_joining_singleton_with_array.yaml')
def test_basic_array(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_array.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_array.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_array.yaml')
def test_basic_array_without_join_key(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_array.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_array.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_array_without_join_key.yaml')
def test_multiple_fields(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_multiple_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_multiple_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_multiple_fields.yaml')
# https://github.com/hasura/graphql-engine/issues/5448
def test_remote_join_fields_with_null_joining_fields(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_null_joining_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_null_joining_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'remote_rel_with_null_joining_fields.yaml')
def test_nested_fields(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_nested_fields.yaml')
def test_arguments(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_arguments.yaml')
def test_with_variables(self, hge_ctx):
# check_query_f(hge_ctx, self.dir() + 'mixed_variables.yaml') -- uses heterogenous execution, due to which this assert fails
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'remote_rel_variables.yaml')
def test_with_fragments(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + 'mixed_fragments.yaml')
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'remote_rel_fragments.yaml')
def test_with_interface(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_interface.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_interface.yaml')
check_query_f(hge_ctx, self.dir() + 'mixed_interface.yaml')
check_query_f(hge_ctx, self.dir() + 'remote_rel_interface.yaml')
def test_with_union(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_union.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_union.yaml')
check_query_f(hge_ctx, self.dir() + 'remote_rel_union.yaml')
def test_with_enum(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_enum.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_with_enum.yaml')
check_query_f(hge_ctx, self.dir() + 'remote_rel_enum.yaml')
def test_with_errors(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_errors_obj.yaml')
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_errors_arr.yaml')
def test_with_aliased_remote_join_keys(self, hge_ctx):
"""
Regression test for https://github.com/hasura/graphql-engine/issues/7180.
"""
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
print(resp)
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship_alias.yaml')
def test_with_scalar_relationship(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_scalar.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_scalar.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_scalar_rel.yaml')
def test_renaming_column_with_remote_relationship_dependency(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'rename_col_with_remote_rel_dependency.yaml')
def test_renaming_table_with_remote_relationship_dependency(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'rename_table_with_remote_rel_dependency.yaml')
# The check for the presence of the remote relationships is deferred to later stage
# in the server source code. To run this test we need to use proper websocket client
# instead of HTTP.
# def test_remote_joins_with_subscription_should_throw_error(self, hge_ctx):
# st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
# assert st_code == 200, resp
# hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
# check_query_f(hge_ctx, self.dir() + 'subscription_with_remote_join_fields.yaml')
def test_remote_joins_in_mutation_response(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic_with_authors.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic_with_authors.yaml')
check_query_f(hge_ctx, self.dir() + 'mutation_output_with_remote_join_fields.yaml')
class TestDeepExecution:
@ -306,35 +243,27 @@ class TestDeepExecution:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx, graphql_service):
print("In setup method")
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_address.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup.yaml')
hge_ctx.v1q_f(self.dir() + 'setup_address.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + 'teardown_address.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'teardown_address.yaml')
hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
def test_with_deep_object(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_deep_nesting_obj.yaml')
def test_with_deep_array(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_deep_nesting_arr.yaml')
def test_with_complex_path_object(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_deep_nesting_complex_path_obj.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_deep_nesting_complex_path_obj2.yaml')
def test_with_complex_path_array(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_deep_nesting_complex_path_arr.yaml')
check_query_f(hge_ctx, self.dir() + 'query_with_deep_nesting_complex_path_arr2.yaml')
@ -348,23 +277,19 @@ class TestExecutionWithPermissions:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx, graphql_service):
print("In setup method")
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_with_permissions.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_with_permissions.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
def test_basic_relationship(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship_with_permissions1.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship_with_permissions2.yaml')
# Test queries that combine several remote relationships, nested in
# different ways, variously filtering different bits using permissions.
def test_complex_multiple_joins(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_multiple_remote_rel.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_multiple_remote_rel.yaml')
check_query_f(hge_ctx, self.dir() + 'complex_multiple_joins.yaml')
@use_test_fixtures
@ -375,8 +300,7 @@ class TestWithRelay:
return "queries/remote_schemas/remote_relationships/"
def test_with_relay_fail(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + "with_relay.yaml")
@use_test_fixtures
@ -387,13 +311,11 @@ class TestExecutionWithCustomization:
return "queries/remote_schemas/remote_relationships/schema_customization/"
def test_basic_relationship(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_relationship.yaml')
def test_nested_fields(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_nested_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_nested_fields.yaml')
@ -406,13 +328,10 @@ class TestComputedFieldsInRemoteRelationship:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx, graphql_service):
print("In setup method")
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_computed_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup.yaml')
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_computed_fields.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
def test_remote_join_with_computed_field(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + 'remote_join_with_computed_field.yaml')
@ -428,6 +347,5 @@ class TestRemoteRelationshipFieldType:
return "queries/remote_schemas/remote_relationships"
def test_remote_relationship_field_type(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + '/setup_remote_rel_nested_args.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + '/setup_remote_rel_nested_args.yaml')
check_query_f(hge_ctx, self.dir() + '/remote_relationship_field_type.yaml')

View File

@ -1,8 +1,6 @@
#!/usr/bin/env python3
import pytest
import subprocess
import time
from validate import check_query_f
from remote_server import NodeGraphQL
@ -47,37 +45,28 @@ class TestAddRemoteSchemaPermissions:
return "queries/remote_schemas/permissions/"
def test_add_permission_with_valid_subset_of_fields(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
""" Here the schemas are compatible """
def test_update_remote_schema_details_with_permissions_set(self, hge_ctx):
""" Permissions check """
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/update_schema.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/update_schema.yaml')
""" check the details of remote schema in metadata """
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert st_code == 200, resp
resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
assert resp['remote_schemas'][0]['definition']['url'] == "http://localhost:4021"
assert resp['remote_schemas'][0]['comment'] == 'this is from update query', resp
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 120, resp
""" reset the changes to the original config """
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/revert_to_original_config.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/revert_to_original_config.yaml')
def test_update_remote_schema_details_with_permissions_set_with_error(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/update_schema_error.yaml')
assert st_code == 400, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/update_schema_error.yaml', expected_status_code = 400)
def test_add_permission_with_valid_subset_of_arguments(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
def test_role_based_schema_enums_validation(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + 'role_based_schema_enum_validations.yaml')
@ -108,19 +97,16 @@ class TestRemoteSchemaPermissionsExecution:
return "queries/remote_schemas/permissions/"
def test_execution_with_subset_of_fields_exposed_to_role(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_fields_exposed_to_role.yaml')
@pytest.mark.skipif(not PytestConf.config.getoption('--redis-url'), reason="Must enable redis")
def test_execution_with_subset_of_fields_exposed_to_role_with_caching(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_fields_exposed_to_role_cached.yaml')
def test_execution_with_subset_of_arguments_exposed_to_role(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_args_exposed_to_role.yaml')
def test_execution_with_unknown_role(self, hge_ctx):
@ -134,13 +120,11 @@ class TestCustomizedRemoteSchemaPermissionsExecution:
return "queries/remote_schemas/permissions/schema_customization/"
def test_execution_with_subset_of_fields_exposed_to_role(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_fields_exposed_to_role.yaml')
def test_execution_with_subset_of_arguments_exposed_to_role(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_partial_args_exposed_to_role.yaml')
@use_test_fixtures
@ -151,13 +135,11 @@ class TestRemoteSchemaPermissionsArgumentPresets:
return "queries/remote_schemas/permissions/argument_presets/"
def test_execution_with_static_argument_preset(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_static_preset_argument.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_static_preset_argument.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_static_preset_args.yaml')
def test_execution_with_session_argument_preset(self, hge_ctx):
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_session_preset_argument.yaml')
assert st_code == 200, resp
hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_session_preset_argument.yaml')
check_query_f(hge_ctx, self.dir() + 'execution_with_session_preset_args.yaml')
class TestRemoteRelationshipPermissions:
@ -168,33 +150,26 @@ class TestRemoteRelationshipPermissions:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx, graphql_service):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_with_permissions.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_with_permissions.yaml')
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'teardown.yaml')
def test_basic_relationship(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic_user.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic_user.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_remote_relationship_without_remote_schema_permissions_configured.yaml')
check_query_f(hge_ctx, self.dir() + 'basic_remote_relationship_with_remote_schema_permissions_configured.yaml')
# Test queries that combine several remote relationships, nested in
# different ways, variously filtering different bits using permissions.
def test_complex_multiple_joins(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_multiple_remote_rel.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_multiple_remote_rel.yaml')
check_query_f(hge_ctx, self.dir() + 'complex_multiple_joins.yaml')
def test_remote_relationship_with_field_containing_preset_argument(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_basic.yaml')
check_query_f(hge_ctx, self.dir() + 'derive_remote_relationship_with_joining_field_containing_preset.yaml')
def test_partial_arguments_of_remote_relationship_from_preset(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_messages_single_field.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir() + 'setup_remote_rel_messages_single_field.yaml')
check_query_f(hge_ctx, self.dir() + 'partial_arguments_from_preset.yaml')

View File

@ -4,9 +4,7 @@ import pytest
from datetime import datetime,timedelta
from croniter import croniter
from validate import validate_event_webhook,validate_event_headers
from queue import Empty
import json
import time
from utils import until_asserts_pass
# The create and delete tests should ideally go in setup and teardown YAML files,
@ -77,8 +75,7 @@ class TestScheduledEvent(object):
}
]
}
st, resp = hge_ctx.v1q(query)
assert st == 200,resp
resp = hge_ctx.v1q(query)
assert len(resp) == 3, resp
# ensuring that valid event_id is returned for all requests
assert all(['event_id' in r for r in resp]), resp
@ -103,8 +100,7 @@ class TestScheduledEvent(object):
'''
}
}
st, resp = hge_ctx.v1q(query)
assert st == 200, resp
resp = hge_ctx.v1q(query)
db_created_at = resp['result'][1][0]
validate_event_webhook(event_success['path'],'/test')
validate_event_headers(event_success['headers'],{"header-key":"header-value"})
@ -122,8 +118,7 @@ class TestScheduledEvent(object):
"sql":"select status,tries from hdb_catalog.hdb_scheduled_events order by status desc"
}
}
st, resp = hge_ctx.v1q(query)
assert st == 200, resp
resp = hge_ctx.v1q(query)
scheduled_event_statuses = resp['result']
# 3 scheduled events have been created
# one should be dead because the timestamp was past the tolerance limit
@ -164,17 +159,16 @@ class TestCronTrigger(object):
"include_in_metadata":True
}
}
cron_st_code,cron_st_resp = hge_ctx.v1q(cron_st_api_query)
resp = hge_ctx.v1q(cron_st_api_query)
TestCronTrigger.init_time = datetime.utcnow()
# the cron events will be generated based on the current time, they
# will not be exactly the same though(the server now and now here)
assert cron_st_code == 200,cron_st_resp
assert cron_st_resp['message'] == 'success'
assert resp['message'] == 'success'
def test_check_generated_cron_scheduled_events(self,hge_ctx):
expected_schedule_timestamps = []
iter = croniter(self.cron_schedule,self.init_time)
for i in range(100):
for _ in range(100):
expected_schedule_timestamps.append(iter.next(datetime))
# Get timestamps in UTC from the db to compare it with
# the croniter generated timestamps
@ -188,8 +182,7 @@ class TestCronTrigger(object):
"sql":sql.format(self.cron_trigger_name)
}
}
st,resp = hge_ctx.v1q(q)
assert st == 200,resp
resp = hge_ctx.v1q(q)
ts_resp = resp['result'][1:]
assert len(ts_resp) == 100
# 100 scheduled events are generated in a single batch when the
@ -203,7 +196,7 @@ class TestCronTrigger(object):
def test_update_existing_cron_trigger(self,hge_ctx):
expected_schedule_timestamps = []
iter = croniter(self.cron_schedule,datetime.utcnow())
for i in range(100):
for _ in range(100):
expected_schedule_timestamps.append(iter.next(datetime))
q = {
"type":"create_cron_trigger",
@ -222,11 +215,9 @@ class TestCronTrigger(object):
"replace":True
}
}
st,resp = hge_ctx.v1q(q)
assert st == 200, resp
hge_ctx.v1q(q)
st, resp = hge_ctx.v1q({'type': 'export_metadata', 'args': {}})
assert st == 200,resp
resp = hge_ctx.v1q({'type': 'export_metadata', 'args': {}})
all_cron_triggers = resp['cron_triggers']
for cron_trigger in all_cron_triggers:
@ -250,8 +241,7 @@ class TestCronTrigger(object):
"sql":sql.format(self.cron_trigger_name)
}
}
st,resp = hge_ctx.v1q(q)
assert st == 200,resp
resp = hge_ctx.v1q(q)
ts_resp = resp['result'][1:]
assert len(ts_resp) == 100
actual_schedule_timestamps = []
@ -277,8 +267,7 @@ class TestCronTrigger(object):
"include_in_metadata":False
}
}
st,resp = hge_ctx.v1q(q)
assert st == 200, resp
hge_ctx.v1q(q)
# The maximum timeout is set to 75s because, the cron timestamps
# that are generated will start from the next minute, suppose
# the cron schedule is "* * * * *" and the time the cron trigger
@ -299,8 +288,7 @@ class TestCronTrigger(object):
"type": "get_cron_triggers",
"args": {}
}
st, resp = hge_ctx.v1metadataq(q)
assert st == 200, resp
resp = hge_ctx.v1metadataq(q)
respDict = json.loads(json.dumps(resp))
assert respDict['cron_triggers'] == [
{
@ -354,8 +342,7 @@ class TestCronTrigger(object):
"type": "export_metadata",
"args": {}
}
st, resp = hge_ctx.v1q(q)
assert st == 200, resp
resp = hge_ctx.v1q(q)
respDict = json.loads(json.dumps(resp))
# Only the cron triggers with `include_in_metadata` set to `True`
# should be exported
@ -382,7 +369,7 @@ class TestCronTrigger(object):
"metadata": resp
}
}
st, resp = hge_ctx.v1q(q)
resp = hge_ctx.v1q(q)
sql = '''
select count(1) as count
from hdb_catalog.hdb_cron_events
@ -394,8 +381,7 @@ class TestCronTrigger(object):
"sql": sql.format(self.cron_trigger_name)
}
}
st, resp = hge_ctx.v1q(run_sql_query)
assert st == 200, resp
resp = hge_ctx.v1q(run_sql_query)
count_resp = resp['result'][1][0]
# Check if the future cron events are created for
# for a cron trigger while imported from the metadata
@ -418,8 +404,7 @@ class TestCronTrigger(object):
"include_in_metadata":False
}
}
st, resp = hge_ctx.v1q(q)
assert st == 400, dict(resp)
resp = hge_ctx.v1q(q, expected_status_code = 400)
assert dict(resp) == {
"code": "already-exists",
"error": 'cron trigger with name: test_cron_trigger already exists',
@ -444,5 +429,4 @@ class TestCronTrigger(object):
}
]
}
st,resp = hge_ctx.v1q(q)
assert st == 200,resp
hge_ctx.v1q(q)

View File

@ -1,11 +1,7 @@
#!/usr/bin/env python3
import string
import random
from ruamel.yaml import YAML
import json
import graphql
import queue
import requests
import time
@ -14,7 +10,6 @@ import pytest
yaml=YAML(typ='safe', pure=True)
from validate import check_query_f, check_query
from graphql import GraphQLError
def mk_add_remote_q(name, url, headers=None, client_hdrs=False, timeout=None, customization=None):
return {
@ -82,44 +77,37 @@ class TestRemoteSchemaBasic:
# Some marked tests in this class will be run as server upgrade tests
if not config.getoption('--skip-schema-setup'):
q = mk_add_remote_q('simple 1', 'http://localhost:5000/hello-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
def test_add_schema(self, hge_ctx):
""" check if the remote schema is added in the metadata """
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert resp['remote_schemas'][0]['name'] == "simple 1"
def test_update_schema_with_no_url_change(self, hge_ctx):
""" call update_remote_schema API and check the details stored in metadata """
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, True, 120)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert resp['remote_schemas'][0]['name'] == "simple 1"
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 120
assert resp['remote_schemas'][0]['definition']['forward_client_headers'] == True
""" revert to original config for remote schema """
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
def test_update_schema_with_url_change(self, hge_ctx):
""" call update_remote_schema API and check the details stored in metadata """
q = mk_update_remote_q('simple 1', 'http://localhost:5000/user-graphql', None, True, 80)
st_code, resp = hge_ctx.v1q(q)
# This should succeed since there isn't any conflicting relations or permissions set up
assert st_code == 200, resp
q = mk_update_remote_q('simple 1', 'http://localhost:5000/user-graphql', None, True, 80)
hge_ctx.v1q(q)
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert resp['remote_schemas'][0]['name'] == "simple 1"
assert resp['remote_schemas'][0]['definition']['url'] == 'http://localhost:5000/user-graphql'
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 80
@ -127,19 +115,16 @@ class TestRemoteSchemaBasic:
""" revert to original config for remote schema """
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
def test_update_schema_with_customization_change(self, hge_ctx):
""" call update_remote_schema API and check the details stored in metadata """
# This should succeed since there isn't any conflicting relations or permissions set up
customization = {'type_names': { 'prefix': 'Foo', 'mapping': {'String': 'MyString'}}, 'field_names': [{'parent_type': 'Hello', 'prefix': 'my_', 'mapping': {}}]}
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60, customization=customization)
st_code, resp = hge_ctx.v1q(q)
# This should succeed since there isn't any conflicting relations or permissions set up
assert st_code == 200, resp
hge_ctx.v1q(q)
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert resp['remote_schemas'][0]['name'] == "simple 1"
assert resp['remote_schemas'][0]['definition']['url'] == 'http://localhost:5000/hello-graphql'
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 60
@ -154,25 +139,21 @@ class TestRemoteSchemaBasic:
""" revert to original config for remote schema """
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert 'customization' not in resp['remote_schemas'][0]['definition']
def test_update_schema_with_customization_change_invalid(self, hge_ctx):
""" call update_remote_schema API and check the details stored in metadata """
customization = {'type_names': { 'mapping': {'String': 'Foo', 'Hello': 'Foo'} } }
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60, customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 400, resp
resp = hge_ctx.v1q(q, expected_status_code = 400)
assert resp['error'] == 'Inconsistent object: Type name mappings are not distinct; the following types appear more than once: "Foo"'
""" revert to original config for remote schema """
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
@pytest.mark.allow_server_upgrade_test
def test_introspection(self, hge_ctx):
@ -196,52 +177,43 @@ class TestRemoteSchemaBasic:
def test_add_schema_conflicts(self, hge_ctx):
"""add 2 remote schemas with same node or types"""
q = mk_add_remote_q('simple 2', 'http://localhost:5000/hello-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 400, resp
resp = hge_ctx.v1q(q, expected_status_code = 400)
assert resp['code'] == 'unexpected'
@pytest.mark.allow_server_upgrade_test
def test_remove_schema_error(self, hge_ctx):
"""remove remote schema which is not added"""
q = mk_delete_remote_q('random name')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 400
resp = hge_ctx.v1q(q, expected_status_code = 400)
assert resp['code'] == 'not-exists'
@pytest.mark.allow_server_upgrade_test
def test_reload_remote_schema(self, hge_ctx):
"""reload a remote schema"""
q = mk_reload_remote_q('simple 1')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200
hge_ctx.v1q(q)
@pytest.mark.allow_server_upgrade_test
def test_add_second_remote_schema(self, hge_ctx):
"""add 2 remote schemas with different node and types"""
q = mk_add_remote_q('my remote', 'http://localhost:5000/user-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q(mk_delete_remote_q('my remote'))
assert st_code == 200, resp
hge_ctx.v1q(q)
hge_ctx.v1q(mk_delete_remote_q('my remote'))
@pytest.mark.allow_server_upgrade_test
def test_json_scalar_dict(self, hge_ctx):
q = mk_add_remote_q('my remote', 'http://localhost:5000/json-scalar-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
check_query_f(hge_ctx, self.dir + '/json_scalar.yaml')
st_code, resp = hge_ctx.v1q(mk_delete_remote_q('my remote'))
assert st_code == 200, resp
hge_ctx.v1q(mk_delete_remote_q('my remote'))
@pytest.mark.allow_server_upgrade_test
def test_add_remote_schema_with_interfaces(self, hge_ctx):
"""add a remote schema with interfaces in it"""
q = mk_add_remote_q('my remote interface one', 'http://localhost:5000/character-iface-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
check_query_f(hge_ctx, self.dir + '/character_interface_query.yaml')
st_code, resp = hge_ctx.v1q(mk_delete_remote_q('my remote interface one'))
assert st_code == 200, resp
hge_ctx.v1q(mk_delete_remote_q('my remote interface one'))
def test_add_remote_schema_with_interface_err_empty_fields_list(self, hge_ctx):
"""add a remote schema with an interface having no fields"""
@ -282,11 +254,9 @@ class TestRemoteSchemaBasic:
def test_add_remote_schema_with_union(self, hge_ctx):
"""add a remote schema with union in it"""
q = mk_add_remote_q('my remote union one', 'http://localhost:5000/union-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
check_query_f(hge_ctx, self.dir + '/search_union_type_query.yaml')
hge_ctx.v1q({"type": "remove_remote_schema", "args": {"name": "my remote union one"}})
assert st_code == 200, resp
def test_add_remote_schema_with_union_err_no_member_types(self, hge_ctx):
"""add a remote schema with a union having no member types"""
@ -305,8 +275,7 @@ class TestRemoteSchemaBasic:
check_query_f(hge_ctx, self.dir + '/add_remote_schema_with_union_err_wrapped_type.yaml')
def test_bulk_remove_add_remote_schema(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir + '/basic_bulk_remove_add.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/basic_bulk_remove_add.yaml')
class TestRemoteSchemaBasicExtensions:
""" basic => no hasura tables are tracked """
@ -321,8 +290,7 @@ class TestRemoteSchemaBasicExtensions:
# Some marked tests in this class will be run as server upgrade tests
if not config.getoption('--skip-schema-setup'):
q = mk_add_remote_q('simple 1', 'http://localhost:5000/hello-graphql-extensions')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@ -338,34 +306,28 @@ class TestAddRemoteSchemaTbls:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f('queries/remote_schemas/tbls_setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f('queries/remote_schemas/tbls_setup.yaml')
yield
st_code, resp = hge_ctx.v1q_f('queries/remote_schemas/tbls_teardown.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f('queries/remote_schemas/tbls_teardown.yaml')
@pytest.mark.allow_server_upgrade_test
def test_add_schema(self, hge_ctx):
""" check if the remote schema is added in the metadata """
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert resp['remote_schemas'][0]['name'] == "simple2-graphql"
def test_add_schema_conflicts_with_tables(self, hge_ctx):
"""add remote schema which conflicts with hasura tables"""
q = mk_add_remote_q('simple2', 'http://localhost:5000/hello-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 400, resp
resp = hge_ctx.v1q(q, expected_status_code = 400)
assert resp['code'] == 'invalid-configuration'
@pytest.mark.allow_server_upgrade_test
def test_add_second_remote_schema(self, hge_ctx):
"""add 2 remote schemas with different node and types"""
q = mk_add_remote_q('my remote2', 'http://localhost:5000/country-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
hge_ctx.v1q({"type": "remove_remote_schema", "args": {"name": "my remote2"}})
assert st_code == 200, resp
def test_remote_query(self, hge_ctx):
check_query_f(hge_ctx, self.dir + '/simple2_query.yaml')
@ -375,12 +337,10 @@ class TestAddRemoteSchemaTbls:
@pytest.mark.allow_server_upgrade_test
def test_add_conflicting_table(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir + '/create_conflicting_table.yaml')
assert st_code == 400
resp = hge_ctx.v1q_f(self.dir + '/create_conflicting_table.yaml', expected_status_code = 400)
assert resp['code'] == 'remote-schema-conflicts'
# Drop "user" table which is created in the previous test
st_code, resp = hge_ctx.v1q_f(self.dir + '/drop_user_table.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/drop_user_table.yaml')
def test_introspection(self, hge_ctx):
with open('queries/graphql_introspection/introspection.yaml') as f:
@ -390,8 +350,7 @@ class TestAddRemoteSchemaTbls:
def test_add_schema_duplicate_name(self, hge_ctx):
q = mk_add_remote_q('simple2-graphql', 'http://localhost:5000/country-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 400, resp
resp = hge_ctx.v1q(q, expected_status_code = 400)
assert resp['code'] == 'already-exists'
@pytest.mark.allow_server_upgrade_test
@ -400,16 +359,12 @@ class TestAddRemoteSchemaTbls:
test types get merged when remote schema has type with same name and
same structure + a same custom scalar
"""
st_code, resp = hge_ctx.v1q_f(self.dir + '/person_table.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/person_table.yaml')
q = mk_add_remote_q('person-graphql', 'http://localhost:5000/person-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir + '/drop_person_table.yaml')
assert st_code == 200, resp
hge_ctx.v1q(q)
hge_ctx.v1q_f(self.dir + '/drop_person_table.yaml')
hge_ctx.v1q({"type": "remove_remote_schema", "args": {"name": "person-graphql"}})
assert st_code == 200, resp
@pytest.mark.allow_server_upgrade_test
def test_remote_schema_forward_headers(self, hge_ctx):
@ -423,8 +378,7 @@ class TestAddRemoteSchemaTbls:
add_remote = mk_add_remote_q('header-graphql',
'http://localhost:5000/header-graphql',
headers=conf_hdrs, client_hdrs=True)
st_code, resp = hge_ctx.v1q(add_remote)
assert st_code == 200, resp
hge_ctx.v1q(add_remote)
q = {'query': '{ wassup }'}
hdrs = {
'x-hasura-test': 'xyzz',
@ -444,9 +398,7 @@ class TestAddRemoteSchemaTbls:
assert 'data' in res
assert res['data']['wassup'] == 'Hello world'
hge_ctx.v1q({'type': 'remove_remote_schema',
'args': {'name': 'header-graphql'}})
assert st_code == 200, resp
hge_ctx.v1q({'type': 'remove_remote_schema', 'args': {'name': 'header-graphql'}})
class TestRemoteSchemaQueriesOverWebsocket:
@ -455,15 +407,12 @@ class TestRemoteSchemaQueriesOverWebsocket:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx, ws_client):
st_code, resp = hge_ctx.v1q_f('queries/remote_schemas/tbls_setup.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f('queries/remote_schemas/tbls_setup.yaml')
ws_client.init_as_admin()
yield
# teardown
st_code, resp = hge_ctx.v1q_f('queries/remote_schemas/tbls_teardown.yaml')
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q(self.teardown)
assert st_code == 200, resp
hge_ctx.v1q_f('queries/remote_schemas/tbls_teardown.yaml')
hge_ctx.v1q(self.teardown)
@pytest.mark.allow_server_upgrade_test
def test_remote_query(self, ws_client):
@ -539,8 +488,7 @@ class TestRemoteSchemaResponseHeaders():
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
q = mk_add_remote_q('sample-auth', 'http://localhost:5000/auth-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
hge_ctx.v1q(self.teardown)
@ -565,11 +513,9 @@ class TestAddRemoteSchemaCompareRootQueryFields:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
st_code, resp = hge_ctx.v1q(mk_add_remote_q('default_value_test', self.remote))
assert st_code == 200, resp
hge_ctx.v1q(mk_add_remote_q('default_value_test', self.remote))
yield
st_code, resp = hge_ctx.v1q(mk_delete_remote_q('default_value_test'))
assert st_code == 200, resp
hge_ctx.v1q(mk_delete_remote_q('default_value_test'))
@pytest.mark.allow_server_upgrade_test
def test_schema_check_arg_default_values_and_field_and_arg_types(self, hge_ctx):
@ -600,8 +546,7 @@ class TestRemoteSchemaTimeout:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
q = mk_add_remote_q('simple 1', '{{REMOTE_SCHEMAS_WEBHOOK_DOMAIN}}/hello-graphql', timeout = 5)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
hge_ctx.v1q(self.teardown)
@ -614,10 +559,10 @@ class TestRemoteSchemaTimeout:
# tests for query timeout
assert resp["errors"][0]["extensions"]["internal"]["type"] == "http_exception"
assert resp["errors"][0]["extensions"]["internal"]["message"] == "Response timeout"
# tests that graphql server url environment variable template did not serialize in the error message
assert resp["errors"][0]["message"] == 'HTTP exception occurred while sending the request to "{{REMOTE_SCHEMAS_WEBHOOK_DOMAIN}}/hello-graphql"'
# wait for graphql server to finish else teardown throws
time.sleep(6)
@ -720,28 +665,23 @@ class TestRemoteSchemaReload:
def test_inconsistent_remote_schema_reload_metadata(self, gql_server, hge_ctx):
# Add remote schema
st_code, resp = hge_ctx.v1q(mk_add_remote_q('simple 1', 'http://127.0.0.1:5991/hello-graphql'))
assert st_code == 200, resp
hge_ctx.v1q(mk_add_remote_q('simple 1', 'http://127.0.0.1:5991/hello-graphql'))
# stop remote graphql server
gql_server.stop_server()
# Reload metadata with remote schemas
st_code, resp = hge_ctx.v1q(reload_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(reload_metadata_q)
# Check if the remote schema present in inconsistent metadata
assert resp['is_consistent'] == False, resp
assert resp['inconsistent_objects'][0]['type'] == 'remote_schema', resp
# Restart remote graphql server
gql_server.start_server()
# Reload the inconsistent remote schema
st_code, resp = hge_ctx.v1q(mk_reload_remote_q('simple 1'))
assert st_code == 200, resp
hge_ctx.v1q(mk_reload_remote_q('simple 1'))
# Check if metadata is consistent
st_code, resp = hge_ctx.v1q(get_inconsistent_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(get_inconsistent_metadata_q)
assert resp['is_consistent'] == True, resp
# Delete remote schema
st_code, resp = hge_ctx.v1q(mk_delete_remote_q('simple 1'))
assert st_code == 200, resp
hge_ctx.v1q(mk_delete_remote_q('simple 1'))
@pytest.mark.usefixtures('per_class_tests_db_state')
class TestValidateRemoteSchemaQuery:
@ -773,16 +713,14 @@ class TestRemoteSchemaTypePrefix:
# Some marked tests in this class will be run as server upgrade tests
if not config.getoption('--skip-schema-setup'):
q = mk_add_remote_q('simple 2', 'http://localhost:5000/user-graphql', customization=type_prefix_customization("Foo"))
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
def test_add_schema(self, hge_ctx):
""" check if the remote schema is added in the metadata """
st_code, resp = hge_ctx.v1q(export_metadata_q)
assert st_code == 200, resp
resp = hge_ctx.v1q(export_metadata_q)
assert resp['remote_schemas'][0]['name'] == "simple 2"
# assert resp['remote_schemas'][0]['definition']['type_prefix'] == "foo"
@ -803,8 +741,7 @@ class TestValidateRemoteSchemaTypePrefixQuery:
config = request.config
if not config.getoption('--skip-schema-setup'):
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=type_prefix_customization("Foo"))
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@ -826,8 +763,7 @@ class TestValidateRemoteSchemaFieldPrefixQuery:
if not config.getoption('--skip-schema-setup'):
customization = { "field_names": [{"parent_type": "Character", "prefix": "foo_"},{"parent_type": "Human", "prefix": "foo_"},{"parent_type": "Droid", "prefix": "foo_"}] }
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@ -857,8 +793,7 @@ class TestValidateRemoteSchemaNamespaceQuery:
if not config.getoption('--skip-schema-setup'):
customization = { "root_fields_namespace": "foo" }
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@ -889,8 +824,7 @@ class TestValidateRemoteSchemaCustomizeAllTheThings:
]
}
q = mk_add_remote_q('character-foo', 'http://localhost:5000/character-iface-graphql', customization=customization)
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
if request.session.testsfailed > 0 or not config.getoption('--skip-schema-teardown'):
hge_ctx.v1q(self.teardown)
@ -909,8 +843,7 @@ class TestRemoteSchemaRequestPayload:
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
q = mk_add_remote_q('echo request', 'http://localhost:5000/hello-echo-request-graphql')
st_code, resp = hge_ctx.v1q(q)
assert st_code == 200, resp
hge_ctx.v1q(q)
yield
hge_ctx.v1q(self.teardown)

View File

@ -454,8 +454,7 @@ class TestStreamingSubscription:
articles_to_insert = []
for i in range(10):
articles_to_insert.append({"id": i + 1, "title": "Article title {}".format(i + 1)})
st_code, resp = insert_many(hge_ctx, {"schema": "hge_tests", "name": "articles"}, articles_to_insert)
assert st_code == 200, resp
insert_many(hge_ctx, {"schema": "hge_tests", "name": "articles"}, articles_to_insert)
if hge_ctx.hge_key is not None:
headers['X-Hasura-Admin-Secret'] = hge_ctx.hge_key
subscrPayload = { 'query': query, 'variables': { 'batch_size': 2 } }

View File

@ -1,10 +1,8 @@
import json
import threading
from urllib.parse import urlparse
import websocket
import pytest
from validate import check_query
from context import PytestConf
if not PytestConf.config.getoption("--test-ws-init-cookie"):
@ -23,11 +21,9 @@ class TestWebsocketInitCookie():
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir + '/person_table.yaml')
assert st_code == 200, resp
hge_ctx.v1q_f(self.dir + '/person_table.yaml')
yield
assert st_code == 200, resp
st_code, resp = hge_ctx.v1q_f(self.dir + '/drop_person_table.yaml')
hge_ctx.v1q_f(self.dir + '/drop_person_table.yaml')
def _send_query(self, hge_ctx):
ws_url = url(hge_ctx)
@ -92,4 +88,4 @@ class TestWebsocketInitCookie():
print(frame)
assert False
break
it = it + 1
it = it + 1

View File

@ -30,8 +30,7 @@ def insert_many(hge_ctx, table, rows, returning=[], headers = {}):
"returning": returning
}
}
st_code, resp = hge_ctx.v1q(q, headers = headers)
return st_code, resp
return hge_ctx.v1q(q, headers = headers)
def update(hge_ctx, table, where_exp, set_exp, headers = {}):
@ -43,8 +42,7 @@ def update(hge_ctx, table, where_exp, set_exp, headers = {}):
"$set": set_exp
}
}
st_code, resp = hge_ctx.v1q(q, headers = headers)
return st_code, resp
return hge_ctx.v1q(q, headers = headers)
def delete(hge_ctx, table, where_exp, headers = {}):
@ -55,5 +53,4 @@ def delete(hge_ctx, table, where_exp, headers = {}):
"where": where_exp
}
}
st_code, resp = hge_ctx.v1q(q, headers = headers)
return st_code, resp
return hge_ctx.v1q(q, headers = headers)