2018-09-18 09:21:57 +03:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import yaml
|
2018-10-28 21:27:49 +03:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import base64
|
|
|
|
import jsondiff
|
|
|
|
import jwt
|
2019-04-08 10:22:38 +03:00
|
|
|
import random
|
|
|
|
import time
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2018-09-18 09:21:57 +03:00
|
|
|
def check_keys(keys, obj):
|
|
|
|
for k in keys:
|
|
|
|
assert k in obj, obj
|
|
|
|
|
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
def check_ev_payload_shape(ev_payload):
|
|
|
|
top_level_keys = ["created_at", "event", "id", "table", "trigger"]
|
2018-09-18 09:21:57 +03:00
|
|
|
check_keys(top_level_keys, ev_payload)
|
|
|
|
|
|
|
|
event_keys = ["data", "op"]
|
|
|
|
check_keys(event_keys, ev_payload['event'])
|
|
|
|
|
2019-03-25 20:10:52 +03:00
|
|
|
trigger_keys = ["name"]
|
2018-09-18 09:21:57 +03:00
|
|
|
check_keys(trigger_keys, ev_payload['trigger'])
|
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2018-09-18 09:21:57 +03:00
|
|
|
def validate_event_payload(ev_payload, trig_name, table):
|
|
|
|
check_ev_payload_shape(ev_payload)
|
|
|
|
assert ev_payload['table'] == table, ev_payload
|
|
|
|
assert ev_payload['trigger']['name'] == trig_name, ev_payload
|
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2018-09-19 15:12:57 +03:00
|
|
|
def validate_event_headers(ev_headers, headers):
|
|
|
|
for key, value in headers.items():
|
|
|
|
v = ev_headers.get(key)
|
2018-09-24 14:50:11 +03:00
|
|
|
assert v == value, (key, v)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2018-09-19 15:12:57 +03:00
|
|
|
def validate_event_webhook(ev_webhook_path, webhook_path):
|
|
|
|
assert ev_webhook_path == webhook_path
|
2018-09-18 09:21:57 +03:00
|
|
|
|
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def check_event(hge_ctx, evts_webhook, trig_name, table, operation, exp_ev_data,
|
2019-01-28 09:12:52 +03:00
|
|
|
headers = {},
|
|
|
|
webhook_path = '/',
|
|
|
|
session_variables = {'x-hasura-role': 'admin'}
|
|
|
|
):
|
2019-04-08 10:22:38 +03:00
|
|
|
ev_full = evts_webhook.get_event(3)
|
2018-09-19 15:12:57 +03:00
|
|
|
validate_event_webhook(ev_full['path'], webhook_path)
|
|
|
|
validate_event_headers(ev_full['headers'], headers)
|
|
|
|
validate_event_payload(ev_full['body'], trig_name, table)
|
|
|
|
ev = ev_full['body']['event']
|
|
|
|
assert ev['op'] == operation, ev
|
2019-01-28 09:12:52 +03:00
|
|
|
assert ev['session_variables'] == session_variables, ev
|
2018-09-19 15:12:57 +03:00
|
|
|
assert ev['data'] == exp_ev_data, ev
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-28 21:27:49 +03:00
|
|
|
|
2019-02-14 12:37:47 +03:00
|
|
|
def test_forbidden_when_admin_secret_reqd(hge_ctx, conf):
|
2018-10-30 12:21:58 +03:00
|
|
|
headers = {}
|
2018-10-28 21:27:49 +03:00
|
|
|
if 'headers' in conf:
|
|
|
|
headers = conf['headers']
|
|
|
|
|
2019-02-14 12:37:47 +03:00
|
|
|
# Test without admin secret
|
2018-10-30 12:21:58 +03:00
|
|
|
code, resp = hge_ctx.anyq(conf['url'], conf['query'], headers)
|
2019-02-28 16:53:03 +03:00
|
|
|
assert code in [401,404], "\n" + yaml.dump({
|
2019-02-14 12:37:47 +03:00
|
|
|
"expected": "Should be access denied as admin secret is not provided",
|
2018-10-30 12:21:58 +03:00
|
|
|
"actual": {
|
|
|
|
"code": code,
|
|
|
|
"response": resp
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2019-02-14 12:37:47 +03:00
|
|
|
# Test with random admin secret
|
|
|
|
headers['X-Hasura-Admin-Secret'] = base64.b64encode(os.urandom(30))
|
2018-10-30 12:21:58 +03:00
|
|
|
code, resp = hge_ctx.anyq(conf['url'], conf['query'], headers)
|
2019-02-28 16:53:03 +03:00
|
|
|
assert code in [401,404], "\n" + yaml.dump({
|
2019-02-14 12:37:47 +03:00
|
|
|
"expected": "Should be access denied as an incorrect admin secret is provided",
|
2018-10-30 12:21:58 +03:00
|
|
|
"actual": {
|
|
|
|
"code": code,
|
|
|
|
"response": resp
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2018-10-28 21:27:49 +03:00
|
|
|
|
|
|
|
def test_forbidden_webhook(hge_ctx, conf):
|
2018-10-30 12:21:58 +03:00
|
|
|
h = {'Authorization': 'Bearer ' + base64.b64encode(base64.b64encode(os.urandom(30))).decode('utf-8')}
|
|
|
|
code, resp = hge_ctx.anyq(conf['url'], conf['query'], h)
|
2019-02-28 16:53:03 +03:00
|
|
|
assert code in [401,404], "\n" + yaml.dump({
|
2018-10-30 12:21:58 +03:00
|
|
|
"expected": "Should be access denied as it is denied from webhook",
|
|
|
|
"actual": {
|
|
|
|
"code": code,
|
|
|
|
"response": resp
|
|
|
|
}
|
|
|
|
})
|
2018-10-28 21:27:49 +03:00
|
|
|
|
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def check_query(hge_ctx, conf, transport='http', add_auth=True):
|
2018-10-30 12:21:58 +03:00
|
|
|
headers = {}
|
2018-09-18 09:21:57 +03:00
|
|
|
if 'headers' in conf:
|
|
|
|
headers = conf['headers']
|
2018-10-28 21:27:49 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
# No headers in conf => Admin role
|
|
|
|
# Set the X-Hasura-Role header randomly
|
|
|
|
# If header is set, jwt/webhook auth will happen
|
|
|
|
# Otherwise admin-secret will be set
|
|
|
|
if len(headers) == 0 and random.choice([True, False]):
|
|
|
|
headers['X-Hasura-Role'] = 'admin'
|
|
|
|
|
2018-10-28 21:27:49 +03:00
|
|
|
if add_auth:
|
2019-04-08 10:22:38 +03:00
|
|
|
#Use the hasura role specified in the test case, and create a JWT token
|
2018-10-28 21:27:49 +03:00
|
|
|
if hge_ctx.hge_jwt_key is not None and len(headers) > 0 and 'X-Hasura-Role' in headers:
|
2018-10-30 12:21:58 +03:00
|
|
|
hClaims = dict()
|
|
|
|
hClaims['X-Hasura-Allowed-Roles'] = [headers['X-Hasura-Role']]
|
|
|
|
hClaims['X-Hasura-Default-Role'] = headers['X-Hasura-Role']
|
2018-10-28 21:27:49 +03:00
|
|
|
for key in headers:
|
|
|
|
if key != 'X-Hasura-Role':
|
2018-10-30 12:21:58 +03:00
|
|
|
hClaims[key] = headers[key]
|
|
|
|
claim = {
|
2018-10-28 21:27:49 +03:00
|
|
|
"sub": "foo",
|
|
|
|
"name": "bar",
|
|
|
|
"https://hasura.io/jwt/claims": hClaims
|
|
|
|
}
|
2018-10-30 12:21:58 +03:00
|
|
|
headers['Authorization'] = 'Bearer ' + jwt.encode(claim, hge_ctx.hge_jwt_key, algorithm='RS512').decode(
|
|
|
|
'UTF-8')
|
2018-10-28 21:27:49 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
#Use the hasura role specified in the test case, and create an authorization token which will be verified by webhook
|
2018-10-28 21:27:49 +03:00
|
|
|
if hge_ctx.hge_webhook is not None and len(headers) > 0:
|
|
|
|
if not hge_ctx.webhook_insecure:
|
2019-04-08 10:22:38 +03:00
|
|
|
#Check whether the output is also forbidden when webhook returns forbidden
|
2018-10-28 21:27:49 +03:00
|
|
|
test_forbidden_webhook(hge_ctx, conf)
|
|
|
|
headers['X-Hasura-Auth-Mode'] = 'webhook'
|
|
|
|
headers_new = dict()
|
2018-10-30 12:21:58 +03:00
|
|
|
headers_new['Authorization'] = 'Bearer ' + base64.b64encode(json.dumps(headers).encode('utf-8')).decode(
|
|
|
|
'utf-8')
|
2018-10-28 21:27:49 +03:00
|
|
|
headers = headers_new
|
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
#The case as admin with admin-secret and jwt/webhook
|
2018-10-30 12:21:58 +03:00
|
|
|
elif (
|
|
|
|
hge_ctx.hge_webhook is not None or hge_ctx.hge_jwt_key is not None) and hge_ctx.hge_key is not None and len(
|
|
|
|
headers) == 0:
|
2019-02-14 12:37:47 +03:00
|
|
|
headers['X-Hasura-Admin-Secret'] = hge_ctx.hge_key
|
2018-10-28 21:27:49 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
#The case as admin with only admin-secret
|
2018-10-28 21:27:49 +03:00
|
|
|
elif hge_ctx.hge_key is not None and hge_ctx.hge_webhook is None and hge_ctx.hge_jwt_key is None:
|
2019-04-08 10:22:38 +03:00
|
|
|
#Test whether it is forbidden when incorrect/no admin_secret is specified
|
2019-02-14 12:37:47 +03:00
|
|
|
test_forbidden_when_admin_secret_reqd(hge_ctx, conf)
|
|
|
|
headers['X-Hasura-Admin-Secret'] = hge_ctx.hge_key
|
2018-10-28 21:27:49 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
assert transport in ['websocket','http'], "Unknown transport type " + transport
|
|
|
|
if transport == 'websocket':
|
|
|
|
assert 'response' in conf
|
|
|
|
assert conf['url'].endswith('/graphql')
|
|
|
|
print('running on websocket')
|
|
|
|
return validate_gql_ws_q (
|
|
|
|
hge_ctx,
|
|
|
|
conf['query'],
|
|
|
|
headers,
|
|
|
|
conf['response'],
|
|
|
|
True
|
|
|
|
)
|
|
|
|
elif transport == 'http':
|
|
|
|
print('running on http')
|
|
|
|
return validate_http_anyq (
|
|
|
|
hge_ctx,
|
|
|
|
conf['url'],
|
|
|
|
conf['query'],
|
|
|
|
headers,
|
|
|
|
conf['status'],
|
|
|
|
conf.get('response')
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def validate_gql_ws_q(hge_ctx, query, headers, exp_http_response, retry=False):
|
|
|
|
ws_client = hge_ctx.ws_client
|
|
|
|
if not headers or len(headers) == 0:
|
|
|
|
ws_client.init({})
|
|
|
|
|
|
|
|
query_resp = ws_client.send_query(query, headers=headers, timeout=15)
|
|
|
|
resp = next(query_resp)
|
|
|
|
|
|
|
|
if resp.get('type') == 'complete':
|
|
|
|
if retry:
|
|
|
|
#Got query complete before payload. Retry once more
|
|
|
|
print ("Got query complete before getting query response payload. Retrying")
|
|
|
|
ws_client.recreate_conn()
|
|
|
|
time.sleep(3)
|
|
|
|
return validate_gql_ws_q(hge_ctx, query, headers, exp_http_response, False)
|
|
|
|
else:
|
|
|
|
assert resp['type'] in ['data','error'], resp
|
|
|
|
|
|
|
|
if 'errors' in exp_http_response:
|
|
|
|
assert resp['type'] in ['data','error'], resp
|
|
|
|
|
|
|
|
exp_ws_response1 = exp_http_response['errors'][0]['extensions']
|
|
|
|
exp_ws_response1['error'] = exp_http_response['errors'][0]['message']
|
|
|
|
|
|
|
|
exp_ws_response2 = {'errors': []}
|
|
|
|
for err in exp_http_response['errors']:
|
|
|
|
ws_err = err['extensions']
|
|
|
|
ws_err['error'] = err['message']
|
|
|
|
exp_ws_response2['errors'].append(ws_err)
|
|
|
|
exp_ws_response2['data'] = None
|
|
|
|
|
|
|
|
if resp['type'] == 'error':
|
|
|
|
exp_ws_response = exp_ws_response1
|
|
|
|
elif resp['type'] == 'data':
|
|
|
|
exp_ws_response = exp_ws_response2
|
|
|
|
else:
|
|
|
|
assert resp['type'] == 'data', resp
|
|
|
|
exp_ws_response = exp_http_response
|
|
|
|
|
|
|
|
assert 'payload' in resp, resp
|
|
|
|
assert resp['payload'] == exp_ws_response, yaml.dump({
|
|
|
|
'response': resp['payload'],
|
|
|
|
'expected': exp_ws_response,
|
|
|
|
'diff': jsondiff.diff(exp_ws_response, resp['payload'])
|
|
|
|
})
|
|
|
|
respDone = next(query_resp)
|
|
|
|
assert respDone['type'] == 'complete'
|
|
|
|
return resp['payload']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def validate_http_anyq(hge_ctx, url, query, headers, exp_code, exp_response):
|
|
|
|
code, resp = hge_ctx.anyq(url, query, headers)
|
2018-10-30 12:21:58 +03:00
|
|
|
print(headers)
|
2019-04-08 10:22:38 +03:00
|
|
|
assert code == exp_code, resp
|
|
|
|
if exp_response:
|
|
|
|
assert json_ordered(resp) == json_ordered(exp_response), yaml.dump({
|
2018-10-30 12:21:58 +03:00
|
|
|
'response': resp,
|
2019-04-08 10:22:38 +03:00
|
|
|
'expected': exp_response,
|
|
|
|
'diff': jsondiff.diff(exp_response, resp)
|
2018-10-30 12:21:58 +03:00
|
|
|
})
|
2019-04-08 10:22:38 +03:00
|
|
|
return resp
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def check_query_f(hge_ctx, f, transport='http', add_auth=True):
|
|
|
|
print("Test file: " + f)
|
2018-10-28 21:27:49 +03:00
|
|
|
hge_ctx.may_skip_test_teardown = False
|
2019-04-08 10:22:38 +03:00
|
|
|
print ("transport="+transport)
|
2018-09-18 09:21:57 +03:00
|
|
|
with open(f) as c:
|
2018-10-28 21:27:49 +03:00
|
|
|
conf = yaml.safe_load(c)
|
2018-09-18 09:21:57 +03:00
|
|
|
if isinstance(conf, list):
|
|
|
|
for sconf in conf:
|
2018-10-30 12:21:58 +03:00
|
|
|
check_query(hge_ctx, sconf)
|
2018-09-18 09:21:57 +03:00
|
|
|
else:
|
2018-10-28 21:27:49 +03:00
|
|
|
if conf['status'] != 200:
|
|
|
|
hge_ctx.may_skip_test_teardown = True
|
2019-04-08 10:22:38 +03:00
|
|
|
check_query(hge_ctx, conf, transport, add_auth)
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2018-09-18 09:21:57 +03:00
|
|
|
|
|
|
|
def json_ordered(obj):
|
|
|
|
if isinstance(obj, dict):
|
|
|
|
return sorted((k, json_ordered(v)) for k, v in obj.items())
|
|
|
|
if isinstance(obj, list):
|
|
|
|
return list(json_ordered(x) for x in obj)
|
|
|
|
else:
|
|
|
|
return obj
|