run graphql tests on both http and websocket; add parallelism (close #1868) (#1921)

Examples 
1)  `
pytest --hge-urls "http://127.0.0.1:8080" --pg-urls "postgresql://admin@127.0.0.1:5432/hge_tests" -vv
`
2)  `pytest --hge-urls "http://127.0.0.1:8080"   "http://127.0.0.1:8081" --pg-urls "postgresql://admin@127.0.0.1:5432/hge_tests"  "postgresql://admin@127.0.0.1:5432/hge_tests2" -vv
`
### Solution and Design
<!-- How is this issue solved/fixed? What is the design? -->
<!-- It's better if we elaborate -->
#### Reducing execution time of tests
- The Schema setup and teardown, which were earlier done per test method, usually takes around 1 sec. 
- For mutations, the model has now been changed to only do schema setup and teardown once per test class.
-  A data setup and teardown will be done once per test instead (usually takes ~10ms).
- For the test class to get this behaviour, one can can extend the class `DefaultTestMutations`. 
    - The function  `dir()` should be define which returns the location of the configuration folder.
    - Inside the configuration folder, there should be 
        - Files `<conf_dir>/schema_setup.yaml` and `<conf_dir>/schema_teardown.yaml`, which has the metadata query executed during schema setup and teardown respectively
        - Files named `<conf_dir>/values_setup.yaml` and `<conf_dir>/values_teardown.yaml`. These files are executed to setup and remove data from the tables respectively. 

#### Running Graphql queries on both http and websockets
- Each GraphQL query/mutation is run on the both HTTP and websocket protocols
- Pytests test parameterisation is used to achieve this
- The errors over websockets are slightly different from that on HTTP
   - The code takes care of converting the errors in HTTP to errors in websockets

#### Parallel executation of tests.
- The plugin pytest-xdist helps in running tests on parallel workers.
- We are using this plugin to group tests by file and run on different workers.
- Parallel test worker processes operate on separate postgres databases(and separate graphql-engines connected to these databases). Thus tests on one worker will not affect the tests on the other worker.
- With two workers, this decreases execution times by half, as the tests on event triggers usually takes a long time, but does not consume much CPU.
This commit is contained in:
nizar-m 2019-04-08 12:52:38 +05:30 committed by Shahidh K Muhammed
parent 3708f95036
commit a40bf10b9f
108 changed files with 2205 additions and 1601 deletions

3
.circleci/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
test-server-flags-output
test-server-output
pgbouncer

View File

@ -109,10 +109,17 @@ refs:
mkdir -p /usr/share/man/man{1,7}
apt-get update
apt install --yes pgbouncer jq curl postgresql-client
- run:
name: Ensure databases are present
environment:
DATABASE_URL: 'postgres://gql_test:@localhost:5432/gql_test'
command: |
psql "$DATABASE_URL" -c "SELECT 1 FROM pg_database WHERE datname = 'gql_test2'" | grep -q -F '(1 row)' || psql "$DATABASE_URL" -c 'CREATE DATABASE gql_test2;'
- run:
name: Run Python tests
environment:
HASURA_GRAPHQL_DATABASE_URL: 'postgres://gql_test:@localhost:5432/gql_test'
HASURA_GRAPHQL_DATABASE_URL_2: 'postgres://gql_test:@localhost:5432/gql_test2'
GRAPHQL_ENGINE: '/build/_server_output/graphql-engine'
command: |
OUTPUT_FOLDER=/build/_server_test_output/$PG_VERSION .circleci/test-server.sh

View File

@ -1,11 +0,0 @@
[databases]
hs_hge_test = host=localhost port=5432 dbname=hs_hge_test user=gql_test
[pgbouncer]
listen_port = 6543
listen_addr = 127.0.0.1
logfile = pgbouncer/pgbouncer.log
pidfile = pgbouncer/pgbouncer.pid
auth_type = md5
auth_file = pgbouncer/users.txt
admin_users = postgres

View File

@ -4,8 +4,21 @@ set -euo pipefail
### Functions
stop_services() {
kill -INT $PID
kill $WH_PID
kill -INT $HGE_PIDS || true
kill $WH_PID || true
kill -INT $WHC_PID || true
}
time_elapsed(){
printf "(%02d:%02d)" $[SECONDS/60] $[SECONDS%60]
}
fail_if_port_busy() {
local PORT=$1
if nc -z localhost $PORT ; then
echo "Port $PORT is busy. Exiting"
exit 1
fi
}
wait_for_port() {
@ -61,22 +74,61 @@ IP.1 = 127.0.0.1'
cd "$CUR_DIR"
}
combine_hpc_reports() {
(stack --allow-different-user exec -- hpc combine graphql-engine.tix graphql-engine-combined.tix --union > graphql-engine-combined.tix2 && mv graphql-engine-combined.tix2 graphql-engine-combined.tix ) || true
rm graphql-engine.tix || true
combine_all_hpc_reports() {
combined_file="${OUTPUT_FOLDER}/graphql-engine.tix"
combined_file_intermediate="${OUTPUT_FOLDER}/hpc/graphql-engine-combined-intermediate.tix"
rm -f "$combined_file"
IFS=: tix_files_arr=($TIX_FILES)
unset IFS
for tix_file in "${tix_files_arr[@]}"
do
if ! [ -f "$tix_file" ] ; then
continue
fi
if [ -f "$combined_file" ] ; then
(set -x && stack --allow-different-user exec -- hpc combine "$combined_file" "$tix_file" --union --output="$combined_file_intermediate" && set +x && mv "$combined_file_intermediate" "$combined_file" && rm "$tix_file" ) || true
else
mv "$tix_file" "$combined_file" || true
fi
done
}
kill_hge_and_combine_hpc_reports() {
kill -INT $PID
wait $PID || true
combine_hpc_reports
kill_hge_servers() {
kill -INT $HGE_PIDS || true
wait $HGE_PIDS || true
HGE_PIDS=""
}
run_hge_with_args() {
i=$((TIX_FILE_INDEX++))
export HPCTIXFILE="${OUTPUT_FOLDER}/hpc/graphql-engine-${i}-${TEST_TYPE}.tix"
rm -f "$HPCTIXFILE"
TIX_FILES="$TIX_FILES:$HPCTIXFILE"
set -x
"$GRAPHQL_ENGINE" "$@" 2>&1 > "$OUTPUT_FOLDER/graphql-engine-${i}-${TEST_TYPE}.log" & HGE_PIDS="$HGE_PIDS $!"
set +x
}
start_multiple_hge_servers() {
run_hge_with_args --database-url "$HASURA_GRAPHQL_DATABASE_URL" serve "$@"
if [ -n ${HASURA_GRAPHQL_DATABASE_URL_2:-} ] ; then
run_hge_with_args --database-url "$HASURA_GRAPHQL_DATABASE_URL_2" serve --server-port 8081 "$@"
wait_for_port 8081
fi
wait_for_port 8080
}
if [ -z "${HASURA_GRAPHQL_DATABASE_URL:-}" ] ; then
echo "Env var HASURA_GRAPHQL_DATABASE_URL is not set"
exit 1
fi
if [ -z "${HASURA_GRAPHQL_DATABASE_URL_2:-}" ] ; then
echo "Env var HASURA_GRAPHQL_DATABASE_URL_2 is not set"
exit 1
fi
if ! stack --allow-different-user exec which hpc ; then
echo "hpc not found; Install it with 'stack install hpc'"
exit 1
@ -101,6 +153,11 @@ PYTEST_ROOT="$CIRCLECI_FOLDER/../server/tests-py"
OUTPUT_FOLDER=${OUTPUT_FOLDER:-"$CIRCLECI_FOLDER/test-server-output"}
mkdir -p "$OUTPUT_FOLDER"
TEST_TYPE="no-auth"
HPCTIXFILE=""
TIX_FILE_INDEX="1"
TIX_FILES=""
cd $PYTEST_ROOT
if ! stack --allow-different-user exec -- which graphql-engine > /dev/null && [ -z "${GRAPHQL_ENGINE:-}" ] ; then
@ -114,95 +171,119 @@ if ! [ -x "$GRAPHQL_ENGINE" ] ; then
fi
RUN_WEBHOOK_TESTS=true
for port in 8080 8081 9876 5592
do
fail_if_port_busy $port
done
echo -e "\nINFO: GraphQL Executable : $GRAPHQL_ENGINE"
echo -e "INFO: Logs Folder : $OUTPUT_FOLDER\n"
pip3 install -r requirements.txt
mkdir -p "$OUTPUT_FOLDER"
mkdir -p "$OUTPUT_FOLDER/hpc"
export EVENT_WEBHOOK_HEADER="MyEnvValue"
export HGE_URL="http://localhost:8080"
export HGE_URL_2=""
if [ -n ${HASURA_GRAPHQL_DATABASE_URL_2:-} ] ; then
HGE_URL_2="http://localhost:8081"
fi
export WEBHOOK_FROM_ENV="http://127.0.0.1:5592"
export HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true
PID=""
HGE_PIDS=""
WH_PID=""
WHC_PID=""
HS_PID=""
trap stop_services ERR
trap stop_services INT
echo -e "\n<########## TEST GRAPHQL-ENGINE WITHOUT ADMIN SECRET ###########################################>\n"
run_pytest_parallel() {
trap stop_services ERR
if [ -n ${HASURA_GRAPHQL_DATABASE_URL_2:-} ] ; then
set -x
pytest -vv --hge-urls "$HGE_URL" "${HGE_URL_2:-}" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" "${HASURA_GRAPHQL_DATABASE_URL_2:-}" -n 2 --dist=loadfile "$@"
set +x
else
set -x
pytest -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" -n 1 "$@"
set +x
fi
}
"$GRAPHQL_ENGINE" serve > "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITHOUT ADMIN SECRET ###########################################>\n"
TEST_TYPE="no-auth"
wait_for_port 8080
start_multiple_hge_servers
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL"
run_pytest_parallel
kill -INT $PID
sleep 4
mv graphql-engine.tix graphql-engine-combined.tix || true
kill_hge_servers
##########
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET #####################################>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET #####################################>\n"
TEST_TYPE="admin-secret"
export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
start_multiple_hge_servers
wait_for_port 8080
run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET"
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET"
kill_hge_and_combine_hpc_reports
kill_hge_servers
##########
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT #####################################>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT #####################################>\n"
TEST_TYPE="jwt"
init_jwt
export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key }')"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
start_multiple_hge_servers
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET"
run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET"
kill_hge_and_combine_hpc_reports
kill_hge_servers
unset HASURA_GRAPHQL_JWT_SECRET
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (in stringified mode) #####################################>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (in stringified mode) #####################################>\n"
TEST_TYPE="jwt-stringified"
export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_format: "stringified_json"}')"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
run_hge_with_args serve
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" test_jwt.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" test_jwt.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
unset HASURA_GRAPHQL_JWT_SECRET
# test with CORS modes
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH CORS DOMAINS ########>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH CORS DOMAINS ########>\n"
export HASURA_GRAPHQL_CORS_DOMAIN="http://*.localhost, http://localhost:3000, https://*.foo.bar.com"
TEST_TYPE="cors-domains"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-cors test_cors.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-cors test_cors.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
unset HASURA_GRAPHQL_CORS_DOMAIN
# test websocket transport with initial cookie header
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH COOKIE IN WEBSOCKET INIT ########>\n"
echo -e "\n$(time_elapsped): <########## TEST GRAPHQL-ENGINE WITH COOKIE IN WEBSOCKET INIT ########>\n"
TEST_TYPE="ws-init-cookie-read-cors-enabled"
export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
@ -210,85 +291,79 @@ python3 test_cookie_webhook.py > "$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC
wait_for_port 9876
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve
wait_for_port 8080
echo "$(time_elapsed): testcase 1: read cookie, cors enabled"
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py
kill_hge_servers
echo "$(time_elapsed): testcase 2: no read cookie, cors disabled"
TEST_TYPE="ws-init-cookie-noread"
run_hge_with_args serve --disable-cors
wait_for_port 8080
echo "testcase 1: read cookie, cors enabled"
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=noread test_websocket_init_cookie.py
kill -INT $PID
sleep 1
kill_hge_servers
echo "testcase 2: no read cookie, cors disabled"
"$GRAPHQL_ENGINE" serve --disable-cors >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=noread test_websocket_init_cookie.py
kill -INT $PID
sleep 1
echo "testcase 3: read cookie, cors disabled and ws-read-cookie"
echo "$(time_elapsed): testcase 3: read cookie, cors disabled and ws-read-cookie"
TEST_TYPE="ws-init-cookie-read-cors-disabled"
export HASURA_GRAPHQL_WS_READ_COOKIE="true"
"$GRAPHQL_ENGINE" serve --disable-cors >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve --disable-cors
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py
kill -INT $PID
kill -INT $WHC_PID
kill_hge_servers
kill $WHC_PID
unset HASURA_GRAPHQL_WS_READ_COOKIE
unset HASURA_GRAPHQL_AUTH_HOOK
unset HASURA_GRAPHQL_AUTH_HOOK_MODE
sleep 4
combine_hpc_reports
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH GRAPHQL DISABLED ########>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH GRAPHQL DISABLED ########>\n"
TEST_TYPE="ws-graphql-api-disabled"
export HASURA_GRAPHQL_ENABLED_APIS="metadata"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
unset HASURA_GRAPHQL_ENABLED_APIS
"$GRAPHQL_ENGINE" serve --enabled-apis metadata >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve --enabled-apis metadata
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH METADATA DISABLED ########>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH METADATA DISABLED ########>\n"
TEST_TYPE="ws-metadata-api-disabled"
export HASURA_GRAPHQL_ENABLED_APIS="graphql"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
unset HASURA_GRAPHQL_ENABLED_APIS
"$GRAPHQL_ENGINE" serve --enabled-apis graphql >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve --enabled-apis graphql
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
# webhook tests
@ -300,62 +375,61 @@ fi
if [ "$RUN_WEBHOOK_TESTS" == "true" ] ; then
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (GET) #########################>\n"
TEST_TYPE="post-webhook"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (GET) #########################>\n"
export HASURA_GRAPHQL_AUTH_HOOK="https://localhost:9090/"
init_ssl
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
start_multiple_hge_servers
python3 webhook.py 9090 "$OUTPUT_FOLDER/ssl/webhook-key.pem" "$OUTPUT_FOLDER/ssl/webhook.pem" > "$OUTPUT_FOLDER/webhook.log" 2>&1 & WH_PID=$!
wait_for_port 8080
wait_for_port 9090
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK"
run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK"
kill_hge_and_combine_hpc_reports
kill_hge_servers
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (POST) #########################>\n"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (POST) #########################>\n"
TEST_TYPE="get-webhook"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
start_multiple_hge_servers
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK"
run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK"
rm /etc/ssl/certs/webhook.crt
update-ca-certificates
kill_hge_and_combine_hpc_reports
kill_hge_servers
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & HTTPS INSECURE WEBHOOK (GET) ########>\n"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="GET"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & HTTPS INSECURE WEBHOOK (GET) ########>\n"
TEST_TYPE="insecure-webhook"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="GET"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN_SECRET & HTTPS INSECURE WEBHOOK (POST) ########>\n"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN_SECRET & HTTPS INSECURE WEBHOOK (POST) ########>\n"
TEST_TYPE="insecure-webhook-with-admin-secret"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args serve
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py
kill_hge_and_combine_hpc_reports
kill_hge_servers
kill $WH_PID
fi
# horizontal scale test
@ -363,33 +437,62 @@ unset HASURA_GRAPHQL_AUTH_HOOK
unset HASURA_GRAPHQL_AUTH_HOOK_MODE
unset HASURA_GRAPHQL_ADMIN_SECRET
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH HORIZONTAL SCALING ########>\n"
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH HORIZONTAL SCALING ########>\n"
TEST_TYPE="horizontal-scaling"
HASURA_HS_TEST_DB='postgres://postgres:postgres@localhost:6543/hs_hge_test'
psql "$HASURA_GRAPHQL_DATABASE_URL" -c "create database hs_hge_test;"
if ! psql "$HASURA_GRAPHQL_DATABASE_URL" -c "SELECT 1 FROM pg_database WHERE datname = 'hs_hge_test'" | grep -q -F '(1 row)'
then
psql "$HASURA_GRAPHQL_DATABASE_URL" -c 'CREATE DATABASE hs_hge_test;'
fi
pgUserInfo=$( python3 -c '
import os
from urllib.parse import urlparse
uri = urlparse( os.environ["HASURA_GRAPHQL_DATABASE_URL"] )
if uri.password:
print("password="+uri.password+" user="+uri.username)
else:
print("user="+uri.username)' )
pgDbInfo=$(psql "$HASURA_GRAPHQL_DATABASE_URL" -c "SELECT concat(' host=',inet_server_addr(),' port=', inet_server_port(),' dbname=',current_database())" | sed -n '3 p')
# create pgbouncer user
useradd pgbouncer
id pgbouncer || useradd pgbouncer
cd $CIRCLECI_FOLDER
mkdir -p pgbouncer
chown -R pgbouncer:pgbouncer pgbouncer
echo '[databases]
hs_hge_test = '"$pgDbInfo" "$pgUserInfo"'
[pgbouncer]
listen_port = 6543
listen_addr = 127.0.0.1
logfile = pgbouncer/pgbouncer.log
pidfile = pgbouncer/pgbouncer.pid
auth_type = md5
auth_file = pgbouncer/users.txt
admin_users = postgres' > pgbouncer/pgbouncer.ini
# start pgbouncer
pgbouncer -u pgbouncer -d pgbouncer/pgbouncer.ini
cd $PYTEST_ROOT
sleep 2
# start 1st server
"$GRAPHQL_ENGINE" --database-url "$HASURA_HS_TEST_DB" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
run_hge_with_args --database-url "$HASURA_HS_TEST_DB" serve
wait_for_port 8080
# start 2nd server
"$GRAPHQL_ENGINE" --database-url "$HASURA_HS_TEST_DB" serve \
--server-port 8081 \
>> "$OUTPUT_FOLDER/hs-graphql-engine.log" 2>&1 & HS_PID=$!
run_hge_with_args --database-url "$HASURA_HS_TEST_DB" serve \
--server-port 8081
wait_for_port 8081
# run test
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py
pytest -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py
# Shutdown pgbouncer
psql "postgres://postgres:postgres@localhost:6543/pgbouncer" -c "SHUTDOWN;" || true
@ -401,23 +504,24 @@ pgbouncer -u pgbouncer -d pgbouncer/pgbouncer.ini
cd $PYTEST_ROOT
# sleep for 30 seconds
sleep 30
# sleep for 20 seconds
sleep 20
# run test
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py
pytest -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py
# Shutdown pgbouncer
psql "postgres://postgres:postgres@localhost:6543/pgbouncer" -c "SHUTDOWN;" || true
kill $PID
kill $HS_PID
kill_hge_servers
psql "$HASURA_GRAPHQL_DATABASE_URL" -c "drop database hs_hge_test;"
sleep 4
combine_hpc_reports
unset HASURA_HS_TEST_DB
# end horizontal scale test
mv graphql-engine-combined.tix "$OUTPUT_FOLDER/graphql-engine.tix" || true
echo -e "\n$(time_elapsed): <########## COMBINE ALL HPC REPORTS ########>\n"
combine_all_hpc_reports || true
echo -e "\n$(time_elapsed): <########## DONE ########>\n"

View File

@ -106,7 +106,7 @@ export WEBHOOK_FROM_ENV="http://127.0.0.1:5592"
```
cd tests-py
pytest -vv --hge-url=http://127.0.0.1:8080 --pg-url=<database_url>
pytest --hge-urls http://127.0.0.1:8080 --pg-urls <database_url> -vv
```
### Create Pull Request

View File

@ -135,4 +135,3 @@ ca.srl
webhook-req.cnf
webhook.csr
*.tix
*.tix2

View File

@ -2,5 +2,27 @@
```bash
pip install -r requirements.txt
pytest -vv --hge-url="http://127.0.0.1:8080" --pg-url="postgresql://admin@127.0.0.1:5432/hge_tests"
pytest --hge-urls "http://127.0.0.1:8080" --pg-urls "postgresql://admin@127.0.0.1:5432/hge_tests" -vv
```
## Tests Structure
- Tests are grouped as test classes in test modules (names starting with `test_`)
- The configuration files (if needed) for the tests in a class are usually kept in one folder.
- The folder name is usually either the `dir` variable or the `dir()` function
- Some tests (like in `test_graphql_queries.py`) requires a setup and teardown per class.
- Here we are extending the `DefaultTestSelectQueries` class.
- This class defines a fixture which will run the configurations in `setup.yaml` and `teardown.yaml` once per class
- Extending test class should define a function name `dir()`, which returns the configuration folder
- For mutation tests (like in `test_graphql_mutations.py`)
- We need a `schema_setup` and `schema_teardown` per class
- And `values_setup` and `values_teardown` per test
- Doing schema setup and teardown per test is expensive.
- We are extending the `DefaultTestMutations` class for this.
- This class defines a fixture which will run the configuration in `setup.yaml` and `teardown.yaml` once per class.
- Another fixture defined in this class runs the configuration in `values_setup.yaml` and `values_teardown.yaml` once per class.

View File

@ -1,14 +1,25 @@
import pytest
import time
from context import HGECtx, HGECtxError
from context import HGECtx, HGECtxError, EvtsWebhookServer, HGECtxGQLServer, GQLWsClient
import threading
import random
from datetime import datetime
import sys
import os
def pytest_addoption(parser):
parser.addoption(
"--hge-url", metavar="HGE_URL", help="url for graphql-engine", required=True
"--hge-urls",
metavar="HGE_URLS",
help="csv list of urls for graphql-engine",
required=False,
nargs='+'
)
parser.addoption(
"--pg-url", metavar="PG_URL", help="url for connecting to Postgres directly", required=True
"--pg-urls", metavar="PG_URLS",
help="csv list of urls for connecting to Postgres directly",
required=False,
nargs='+'
)
parser.addoption(
"--hge-key", metavar="HGE_KEY", help="admin secret key for graphql-engine", required=False
@ -57,20 +68,62 @@ def pytest_addoption(parser):
help="Run testcases for horizontal scaling"
)
#By default,
#1) Set default parallelism to one
#2) Set test grouping to by filename (--dist=loadfile)
def pytest_cmdline_preparse(config, args):
worker = os.environ.get('PYTEST_XDIST_WORKER')
if 'xdist' in sys.modules and not worker: # pytest-xdist plugin
num = 1
args[:] = ["-n" + str(num),"--dist=loadfile"] + args
@pytest.fixture(scope='session')
def pytest_configure(config):
if is_master(config):
config.hge_ctx_gql_server = HGECtxGQLServer()
if not config.getoption('--hge-urls'):
print("hge-urls should be specified")
if not config.getoption('--pg-urls'):
print("pg-urls should be specified")
config.hge_url_list = config.getoption('--hge-urls')
config.pg_url_list = config.getoption('--pg-urls')
if config.getoption('-n', default=None):
xdist_threads = config.getoption('-n')
assert xdist_threads <= len(config.hge_url_list), "Not enough hge_urls specified, Required " + str(xdist_threads) + ", got " + str(len(config.hge_url_list))
assert xdist_threads <= len(config.pg_url_list), "Not enough pg_urls specified, Required " + str(xdist_threads) + ", got " + str(len(config.pg_url_list))
random.seed(datetime.now())
@pytest.hookimpl(optionalhook=True)
def pytest_configure_node(node):
node.slaveinput["hge-url"] = node.config.hge_url_list.pop()
node.slaveinput["pg-url"] = node.config.pg_url_list.pop()
def pytest_unconfigure(config):
config.hge_ctx_gql_server.teardown()
@pytest.fixture(scope='module')
def hge_ctx(request):
config = request.config
print("create hge_ctx")
hge_url = request.config.getoption('--hge-url')
pg_url = request.config.getoption('--pg-url')
hge_key = request.config.getoption('--hge-key')
hge_webhook = request.config.getoption('--hge-webhook')
webhook_insecure = request.config.getoption('--test-webhook-insecure')
hge_jwt_key_file = request.config.getoption('--hge-jwt-key-file')
hge_jwt_conf = request.config.getoption('--hge-jwt-conf')
ws_read_cookie = request.config.getoption('--test-ws-init-cookie')
metadata_disabled = request.config.getoption('--test-metadata-disabled')
hge_scale_url = request.config.getoption('--test-hge-scale-url')
if is_master(config):
hge_url = config.hge_url_list[0]
else:
hge_url = config.slaveinput["hge-url"]
if is_master(config):
pg_url = config.pg_url_list[0]
else:
pg_url = config.slaveinput["pg-url"]
hge_key = config.getoption('--hge-key')
hge_webhook = config.getoption('--hge-webhook')
webhook_insecure = config.getoption('--test-webhook-insecure')
hge_jwt_key_file = config.getoption('--hge-jwt-key-file')
hge_jwt_conf = config.getoption('--hge-jwt-conf')
ws_read_cookie = config.getoption('--test-ws-init-cookie')
metadata_disabled = config.getoption('--test-metadata-disabled')
hge_scale_url = config.getoption('--test-hge-scale-url')
try:
hge_ctx = HGECtx(
hge_url=hge_url,
@ -90,7 +143,24 @@ def hge_ctx(request):
yield hge_ctx # provide the fixture value
print("teardown hge_ctx")
hge_ctx.teardown()
time.sleep(2)
time.sleep(1)
@pytest.fixture(scope='class')
def evts_webhook(request):
webhook_httpd = EvtsWebhookServer(server_address=('127.0.0.1', 5592))
web_server = threading.Thread(target=webhook_httpd.serve_forever)
web_server.start()
yield webhook_httpd
webhook_httpd.shutdown()
webhook_httpd.server_close()
web_server.join()
@pytest.fixture(scope='class')
def ws_client(request, hge_ctx):
client = GQLWsClient(hge_ctx)
time.sleep(0.1)
yield client
client.teardown()
@pytest.fixture(scope='class')
def setup_ctrl(request, hge_ctx):
@ -102,3 +172,9 @@ def setup_ctrl(request, hge_ctx):
yield setup_ctrl
hge_ctx.may_skip_test_teardown = False
request.cls().do_teardown(setup_ctrl, hge_ctx)
def is_master(config):
"""True if the code running the given pytest.config object is running in a xdist master
node or not running xdist at all.
"""
return not hasattr(config, 'slaveinput')

View File

@ -10,6 +10,9 @@ import queue
import socket
import subprocess
import time
import uuid
import string
import random
import yaml
import requests
@ -17,13 +20,136 @@ import websocket
from sqlalchemy import create_engine
from sqlalchemy.schema import MetaData
import graphql_server
import graphql
class HGECtxError(Exception):
pass
class GQLWsClient:
class WebhookHandler(http.server.BaseHTTPRequestHandler):
def __init__(self, hge_ctx):
self.hge_ctx = hge_ctx
self.ws_queue = queue.Queue(maxsize=-1)
self.ws_url = urlparse(hge_ctx.hge_url)
self.ws_url = self.ws_url._replace(scheme='ws')
self.ws_url = self.ws_url._replace(path='/v1alpha1/graphql')
self.create_conn()
def create_conn(self):
self.ws_queue.queue.clear()
self.ws_id_query_queues = dict()
self.ws_active_query_ids = set()
self._ws = websocket.WebSocketApp(self.ws_url.geturl(), on_message=self._on_message, on_close=self._on_close)
self.wst = threading.Thread(target=self._ws.run_forever)
self.wst.daemon = True
self.wst.start()
self.remote_closed = False
self.connected = False
self.init_done = False
def recreate_conn(self):
self.teardown()
self.create_conn()
def get_ws_event(self, timeout):
return self.ws_queue.get(timeout=timeout)
def has_ws_query_events(self, query_id):
return not self.ws_id_query_queues[query_id].empty()
def get_ws_query_event(self, query_id, timeout):
return self.ws_id_query_queues[query_id].get(timeout=timeout)
def send(self, frame):
if not self.connected:
self.recreate_conn()
time.sleep(1)
if frame.get('type') == 'stop':
self.ws_active_query_ids.discard( frame.get('id') )
elif frame.get('type') == 'start' and 'id' in frame:
self.ws_id_query_queues[frame['id']] = queue.Queue(maxsize=-1)
self._ws.send(json.dumps(frame))
def init_as_admin(self):
headers={}
if self.hge_ctx.hge_key:
headers = {'x-hasura-admin-secret': self.hge_ctx.hge_key}
self.init(headers)
def init(self, headers={}):
payload = {'type': 'connection_init', 'payload': {}}
if headers and len(headers) > 0:
payload['payload']['headers'] = headers
self.send(payload)
ev = self.get_ws_event(3)
assert ev['type'] == 'connection_ack', ev
self.init_done = True
def stop(self, query_id):
data = {'id': query_id, 'type': 'stop'}
self.send(data)
self.ws_active_query_ids.discard(query_id)
def gen_id(self, size=6, chars=string.ascii_letters + string.digits):
newId = ''.join(random.choice(chars) for _ in range(size))
if newId in self.ws_active_query_ids:
return gen_id(self,size,chars)
else:
return newId
def send_query(self, query, query_id=None, headers={}, timeout=60):
graphql.parse(query['query'])
if headers and len(headers) > 0:
#Do init If headers are provided
self.init(headers)
elif not self.init_done:
self.init()
if query_id == None:
query_id = self.gen_id()
frame = {
'id': query_id,
'type': 'start',
'payload': query,
}
self.ws_active_query_ids.add(query_id)
self.send(frame)
while True:
yield self.get_ws_query_event(query_id, timeout)
def _on_open(self):
self.connected = True
def _on_message(self, message):
json_msg = json.loads(message)
if 'id' in json_msg:
query_id = json_msg['id']
if json_msg.get('type') == 'stop':
#Remove from active queries list
self.ws_active_query_ids.discard( query_id )
if not query_id in self.ws_id_query_queues:
self.ws_id_query_queues[json_msg['id']] = queue.Queue(maxsize=-1)
#Put event in the correponding query_queue
self.ws_id_query_queues[query_id].put(json_msg)
elif json_msg['type'] == 'ka':
self.connected = True
else:
#Put event in the main queue
self.ws_queue.put(json_msg)
def _on_close(self):
self.remote_closed = True
self.connected = False
self.init_done = False
def teardown(self):
if not self.remote_closed:
self._ws.close()
self.wst.join()
class EvtsWebhookHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(HTTPStatus.OK)
self.end_headers()
@ -62,79 +188,16 @@ class WebhookHandler(http.server.BaseHTTPRequestHandler):
"body": req_json,
"headers": req_headers})
class WebhookServer(http.server.HTTPServer):
def __init__(self, resp_queue, error_queue, server_address):
self.resp_queue = resp_queue
self.error_queue = error_queue
super().__init__(server_address, WebhookHandler)
class EvtsWebhookServer(http.server.HTTPServer):
def __init__(self, server_address):
self.resp_queue = queue.Queue(maxsize=1)
self.error_queue = queue.Queue()
super().__init__(server_address, EvtsWebhookHandler)
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
class HGECtx:
def __init__(self, hge_url, pg_url, hge_key, hge_webhook, webhook_insecure,
hge_jwt_key_file, hge_jwt_conf, metadata_disabled, ws_read_cookie, hge_scale_url):
server_address = ('0.0.0.0', 5592)
self.resp_queue = queue.Queue(maxsize=1)
self.error_queue = queue.Queue()
self.ws_queue = queue.Queue(maxsize=-1)
self.httpd = WebhookServer(self.resp_queue, self.error_queue, server_address)
self.web_server = threading.Thread(target=self.httpd.serve_forever)
self.web_server.start()
self.pg_url = pg_url
self.engine = create_engine(self.pg_url)
self.meta = MetaData()
self.http = requests.Session()
self.hge_url = hge_url
self.hge_key = hge_key
self.hge_webhook = hge_webhook
if hge_jwt_key_file is None:
self.hge_jwt_key = None
else:
with open(hge_jwt_key_file) as f:
self.hge_jwt_key = f.read()
self.hge_jwt_conf = hge_jwt_conf
self.webhook_insecure = webhook_insecure
self.metadata_disabled = metadata_disabled
self.may_skip_test_teardown = False
self.ws_url = urlparse(hge_url)
self.ws_url = self.ws_url._replace(scheme='ws')
self.ws_url = self.ws_url._replace(path='/v1alpha1/graphql')
self.ws = websocket.WebSocketApp(self.ws_url.geturl(), on_message=self._on_message)
self.wst = threading.Thread(target=self.ws.run_forever)
self.wst.daemon = True
self.wst.start()
# start the graphql server
self.graphql_server = graphql_server.create_server('127.0.0.1', 5000)
self.gql_srvr_thread = threading.Thread(target=self.graphql_server.serve_forever)
self.gql_srvr_thread.start()
self.ws_read_cookie = ws_read_cookie
self.hge_scale_url = hge_scale_url
result = subprocess.run(['../../scripts/get-version.sh'], shell=False, stdout=subprocess.PIPE, check=True)
self.version = result.stdout.decode('utf-8').strip()
if not self.metadata_disabled:
try:
st_code, resp = self.v1q_f('queries/clear_db.yaml')
except requests.exceptions.RequestException as e:
self.teardown()
raise HGECtxError(repr(e))
assert st_code == 200, resp
def _on_message(self, message):
my_json = json.loads(message)
if my_json['type'] != 'ka':
self.ws_queue.put(message)
def get_event(self, timeout):
return self.resp_queue.get(timeout=timeout)
@ -145,8 +208,63 @@ class HGECtx:
sz = sz + 1
return sz
def get_ws_event(self, timeout):
return json.loads(self.ws_queue.get(timeout=timeout))
def teardown(self):
self.evt_trggr_httpd.shutdown()
self.evt_trggr_httpd.server_close()
graphql_server.stop_server(self.graphql_server)
self.gql_srvr_thread.join()
self.evt_trggr_web_server.join()
class HGECtxGQLServer:
def __init__(self):
# start the graphql server
self.graphql_server = graphql_server.create_server('127.0.0.1', 5000)
self.gql_srvr_thread = threading.Thread(target=self.graphql_server.serve_forever)
self.gql_srvr_thread.start()
def teardown(self):
graphql_server.stop_server(self.graphql_server)
self.gql_srvr_thread.join()
class HGECtx:
def __init__(self, hge_url, pg_url, hge_key, hge_webhook, webhook_insecure,
hge_jwt_key_file, hge_jwt_conf, metadata_disabled, ws_read_cookie, hge_scale_url):
self.http = requests.Session()
self.hge_key = hge_key
self.hge_url = hge_url
self.pg_url = pg_url
self.hge_webhook = hge_webhook
if hge_jwt_key_file is None:
self.hge_jwt_key = None
else:
with open(hge_jwt_key_file) as f:
self.hge_jwt_key = f.read()
self.hge_jwt_conf = hge_jwt_conf
self.webhook_insecure = webhook_insecure
self.metadata_disabled = metadata_disabled
self.may_skip_test_teardown = False
self.engine = create_engine(self.pg_url)
self.meta = MetaData()
self.ws_read_cookie = ws_read_cookie
self.hge_scale_url = hge_scale_url
self.ws_client = GQLWsClient(self)
result = subprocess.run(['../../scripts/get-version.sh'], shell=False, stdout=subprocess.PIPE, check=True)
self.version = result.stdout.decode('utf-8').strip()
if not self.metadata_disabled:
try:
st_code, resp = self.v1q_f('queries/clear_db.yaml')
except requests.exceptions.RequestException as e:
self.teardown()
raise HGECtxError(repr(e))
assert st_code == 200, resp
def reflect_tables(self):
self.meta.reflect(bind=self.engine)
@ -178,15 +296,8 @@ class HGECtx:
def v1q_f(self, fn):
with open(fn) as f:
return self.v1q(yaml.load(f))
return self.v1q(yaml.safe_load(f))
def teardown(self):
self.http.close()
self.engine.dispose()
self.httpd.shutdown()
self.httpd.server_close()
self.ws.close()
self.web_server.join()
self.wst.join()
graphql_server.stop_server(self.graphql_server)
self.gql_srvr_thread.join()

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique
);
- type: track_table
@ -49,31 +49,3 @@ args:
table: article
column: author_id
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content 1
title: Article 1
author_id: 1
- content: Sample article content 2
title: Article 2
author_id: 1
- content: Sample article content 3
author_id: 1
title: Article 3
- content: Sample article content 4
author_id: 2
title: Article 4
- content: Sample article content 5
author_id: 2
title: Article 5

View File

@ -0,0 +1,10 @@
type: bulk
args:
#Drop relationship first
- type: run_sql
args:
sql: |
drop table article;
drop table author;
cascade: true

View File

@ -1,79 +0,0 @@
type: bulk
args:
#Author table
- type: run_sql
args:
sql: |
create table author(
id serial primary key,
name text unique
);
- type: track_table
args:
schema: public
name: author
#Article table
- type: run_sql
args:
sql: |
CREATE TABLE article (
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER NOT NULL REFERENCES author(id),
is_published BOOLEAN,
published_on TIMESTAMP
)
- type: track_table
args:
schema: public
name: article
#Object relationship
- type: create_object_relationship
args:
table: article
name: author
using:
foreign_key_constraint_on: author_id
#Array relationship
- type: create_array_relationship
args:
table: author
name: articles
using:
foreign_key_constraint_on:
table: article
column: author_id
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content 1
title: Article 1
author_id: 1
- content: Sample article content 2
title: Article 2
author_id: 1
- content: Sample article content 3
author_id: 1
title: Article 3
- content: Sample article content 4
author_id: 2
title: Article 4
- content: Sample article content 5
author_id: 2
title: Article 5

View File

@ -0,0 +1,31 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content 1
title: Article 1
author_id: 1
- content: Sample article content 2
title: Article 2
author_id: 1
- content: Sample article content 3
author_id: 1
title: Article 3
- content: Sample article content 4
author_id: 2
title: Article 4
- content: Sample article content 5
author_id: 2
title: Article 5

View File

@ -0,0 +1,12 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);

View File

@ -0,0 +1,50 @@
type: bulk
args:
#Author table
- type: run_sql
args:
sql: |
create table author(
id serial primary key,
name text unique
);
- type: track_table
args:
schema: public
name: author
#Article table
- type: run_sql
args:
sql: |
CREATE TABLE article (
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER NOT NULL REFERENCES author(id),
is_published BOOLEAN,
published_on TIMESTAMP
)
- type: track_table
args:
schema: public
name: article
#Object relationship
- type: create_object_relationship
args:
table: article
name: author
using:
foreign_key_constraint_on: author_id
#Array relationship
- type: create_array_relationship
args:
table: author
name: articles
using:
foreign_key_constraint_on:
table: article
column: author_id

View File

@ -0,0 +1,31 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content 1
title: Article 1
author_id: 1
- content: Sample article content 2
title: Article 2
author_id: 1
- content: Sample article content 3
author_id: 1
title: Article 3
- content: Sample article content 4
author_id: 2
title: Article 4
- content: Sample article content 5
author_id: 2
title: Article 5

View File

@ -0,0 +1,12 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique,
payments_done boolean not null default false
);
@ -50,37 +50,6 @@ args:
table: article
column: author_id
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
- name: Author 3
payments_done: true
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content 1
title: Article 1
author_id: 1
- content: Sample article content 2
title: Article 2
author_id: 1
- content: Sample article content 3
author_id: 1
title: Article 3
- content: Sample article content 4
author_id: 2
title: Article 4
- content: Sample article content 5
author_id: 2
title: Article 5
#Prevent deletion if payments to the author is not yet done
- type: create_delete_permission
args:
@ -132,15 +101,6 @@ args:
schema: public
name: resident
- type: insert
args:
table: resident
objects:
- name: Griffin
age: 25
- name: Clarke
age: 26
- type: create_delete_permission
args:
table: resident

View File

@ -0,0 +1,9 @@
type: bulk
args:
- type: run_sql
args:
sql: |
drop table article;
drop table author;
drop table resident;

View File

@ -1,22 +0,0 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
relationship: articles
table:
schema: public
name: author
- type: run_sql
args:
sql: |
drop table article
- type: run_sql
args:
sql: |
drop table author
- type: run_sql
args:
sql: |
drop table resident

View File

@ -0,0 +1,43 @@
type: bulk
args:
- type: insert
args:
table: resident
objects:
- name: Griffin
age: 25
- name: Clarke
age: 26
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
- name: Author 3
payments_done: true
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content 1
title: Article 1
author_id: 1
- content: Sample article content 2
title: Article 2
author_id: 1
- content: Sample article content 3
author_id: 1
title: Article 3
- content: Sample article content 4
author_id: 2
title: Article 4
- content: Sample article content 5
author_id: 2
title: Article 5

View File

@ -0,0 +1,15 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);
delete from resident;
SELECT setval('resident_id_seq', 1, FALSE);

View File

@ -98,15 +98,4 @@
content
}
}
}
- description: Delete the inserted articles
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from article;
delete from author;
SELECT setval('article_id_seq', 1, FALSE);
SELECT setval('author_id_seq', 1, FALSE);
}

View File

@ -26,14 +26,3 @@
}
}
}
- description: Delete the inserted test_types rows
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from test_types;
SELECT setval('test_types_c10_bigserial_seq', 1, FALSE);
SELECT setval('test_types_c9_serial_seq', 1, FALSE);
SELECT setval('test_types_c8_smallserial_seq', 1, FALSE);

View File

@ -146,14 +146,3 @@
}
}
}
- description: Delete the inserted rows in test_types table
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from test_types;
SELECT setval('test_types_c10_bigserial_seq', 1, FALSE);
SELECT setval('test_types_c9_serial_seq', 1, FALSE);
SELECT setval('test_types_c8_smallserial_seq', 1, FALSE);

View File

@ -18,12 +18,3 @@
affected_rows
}
}
- description: Delete the inserted orders
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from orders;
SELECT setval('orders_id_seq', 1, FALSE);

View File

@ -25,12 +25,3 @@
}
}
}
- description: Delete the inserted persons
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from person;
SELECT setval('person_id_seq', 1, FALSE);

View File

@ -31,12 +31,3 @@
}
}
}
- description: Delete the inserted persons
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from person;
SELECT setval('person_id_seq', 1, FALSE);

View File

@ -22,12 +22,3 @@
}
}
}
- description: Delete the inserted persons
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from person;
SELECT setval('person_id_seq', 1, FALSE);

View File

@ -25,12 +25,3 @@
}
}
}
- description: Delete the inserted persons
url: /v1/query
status: 200
query:
type: run_sql
args:
sql: |
delete from person;
SELECT setval('person_id_seq', 1, FALSE);

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique
);
- type: track_table

View File

@ -7,7 +7,7 @@ args:
table:
schema: public
name: author
- type: run_sql
args:
sql: |

View File

@ -0,0 +1,22 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from person;
SELECT setval('person_id_seq', 1, FALSE);
delete from orders;
SELECT setval('orders_id_seq', 1, FALSE);
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);
delete from test_types;
SELECT setval('test_types_c10_bigserial_seq', 1, FALSE);
SELECT setval('test_types_c9_serial_seq', 1, FALSE);
SELECT setval('test_types_c8_smallserial_seq', 1, FALSE);

View File

@ -0,0 +1,29 @@
type: bulk
args:
- type: run_sql
args:
sql: |
DELETE from drone_3d_location;
DELETE from landmark;
SELECT setval('landmark_id_seq', 1, FALSE);
DELETE from road;
SELECT setval('road_id_seq', 1, FALSE);
DELETE from service_locations;
SELECT setval('service_locations_id_seq', 1, FALSE);
DELETE from route;
SELECT setval('route_id_seq', 1, FALSE);
DELETE from area;
SELECT setval('area_id_seq', 1, FALSE);
DELETE from compounds;
DELETE from geometry_collection;
SELECT setval('geometry_collection_id_seq', 1, FALSE);

View File

@ -50,11 +50,11 @@ response:
title: "Article 1 by Author 3"
content: "Article content for Article 1 by Author 3"
author:
id: 4
id: 3
name: Author 3
- id: 5
title: "Article 1 by Author 4"
content: "Article content for Article 1 by Author 4"
author:
id: 5
id: 4
name: Author 4

View File

@ -6,8 +6,8 @@ args:
args:
sql: |
create table author(
id serial primary key,
name text unique,
id serial primary key,
name text unique,
is_registered boolean not null default false
);
- type: track_table
@ -49,22 +49,3 @@ args:
table: article
column: author_id
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
- content: Sample article content
title: Article 2
- content: Sample article content
title: Article 3

View File

@ -7,7 +7,7 @@ args:
table:
schema: public
name: author
- type: run_sql
args:
sql: |

View File

@ -1,18 +0,0 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
relationship: articles
table:
schema: public
name: author
- type: run_sql
args:
sql: |
drop table article
- type: run_sql
args:
sql: |
drop table author

View File

@ -0,0 +1,22 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
- content: Sample article content
title: Article 2
- content: Sample article content
title: Article 3

View File

@ -0,0 +1,12 @@
type: bulk
args:
#Drop relationship first
- type: run_sql
args:
sql: |
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique
);
- type: track_table
@ -31,6 +31,33 @@ args:
schema: public
name: article
#Person table
- type: run_sql
args:
sql: |
CREATE TABLE person (
id SERIAL PRIMARY KEY,
details JSONB NOT NULL
)
- type: track_table
args:
schema: public
name: person
#Order table
- type: run_sql
args:
sql: |
CREATE TABLE orders (
id SERIAL PRIMARY KEY,
placed TIMESTAMPTZ NOT NULL,
shipped TIMESTAMPTZ
)
- type: track_table
args:
schema: public
name: orders
#Object relationship
- type: create_object_relationship
@ -50,35 +77,3 @@ args:
table: article
column: author_id
#Person table
- type: run_sql
args:
sql: |
CREATE TABLE person (
id SERIAL PRIMARY KEY,
details JSONB NOT NULL
)
- type: track_table
args:
schema: public
name: person
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert Person table data
- type: insert
args:
table: person
objects:
- details:
name:
first: foo
last: bar
address: foobar

View File

@ -1,99 +0,0 @@
type: bulk
args:
#Author table
- type: run_sql
args:
sql: |
create table author(
id serial primary key,
name text unique
);
- type: track_table
args:
schema: public
name: author
#Article table
- type: run_sql
args:
sql: |
CREATE TABLE article (
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER REFERENCES author(id),
is_published BOOLEAN,
published_on TIMESTAMP
)
- type: track_table
args:
schema: public
name: article
#Person table
- type: run_sql
args:
sql: |
CREATE TABLE person (
id SERIAL PRIMARY KEY,
details JSONB NOT NULL
)
- type: track_table
args:
schema: public
name: person
#Order table
- type: run_sql
args:
sql: |
CREATE TABLE orders (
id SERIAL PRIMARY KEY,
placed TIMESTAMPTZ NOT NULL,
shipped TIMESTAMPTZ
)
- type: track_table
args:
schema: public
name: orders
#Object relationship
- type: create_object_relationship
args:
table: article
name: author
using:
foreign_key_constraint_on: author_id
#Array relationship
- type: create_array_relationship
args:
table: author
name: articles
using:
foreign_key_constraint_on:
table: article
column: author_id
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
- content: Sample article content
title: Article 2
- content: Sample article content
title: Article 3

View File

@ -0,0 +1,23 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
- content: Sample article content
title: Article 2
- content: Sample article content
title: Article 3

View File

@ -0,0 +1,18 @@
type: bulk
args:
#Drop relationship first
- type: run_sql
args:
sql: |
DELETE from person;
SELECT setval('person_id_seq', 1, FALSE);
DELETE from orders;
SELECT setval('orders_id_seq', 1, FALSE);
DELETE from article;
SELECT setval('article_id_seq', 1, FALSE);
DELETE from author;
SELECT setval('author_id_seq', 1, FALSE);

View File

@ -6,8 +6,8 @@ args:
args:
sql: |
create table author(
id serial primary key,
name text unique,
id serial primary key,
name text unique,
bio text,
is_registered boolean not null default false
);
@ -173,29 +173,6 @@ args:
bio:
_is_null: false
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
author_id: 1
- content: Sample article content
title: Article 2
author_id: 1
- content: Sample article content
title: Article 3
author_id: 2
#Company insert permission for user
- type: create_insert_permission
args:
@ -307,17 +284,6 @@ args:
filter:
id: X-Hasura-Resident-Id
#Insert residents
- type: insert
args:
table: resident
objects:
- id: 5
name: Resident 5
age: 21
- id: 6
name: Resident 6
age: 22
#Create blog table
- type: run_sql
@ -331,14 +297,12 @@ args:
last_updated timestamptz,
updated_by INTEGER REFERENCES author(id)
);
INSERT INTO blog (id, title, author_id) VALUES
(1, 'first blog', 1), (2, 'second blog', 2);
- type: track_table
args:
name: blog
schema: public
- type: create_select_permission
args:
table: blog

View File

@ -3,7 +3,7 @@ args:
#Drop relationship first
- type: drop_relationship
args:
table:
table:
name: author
schema: public
relationship: articles

View File

@ -0,0 +1,46 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
author_id: 1
- content: Sample article content
title: Article 2
author_id: 1
- content: Sample article content
title: Article 3
author_id: 2
#Create blog table
- type: run_sql
args:
sql: |
INSERT INTO blog (id, title, author_id) VALUES
(1, 'first blog', 1), (2, 'second blog', 2);
#Insert residents
- type: insert
args:
table: resident
objects:
- id: 5
name: Resident 5
age: 21
- id: 6
name: Resident 6
age: 22

View File

@ -0,0 +1,23 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from address;
SELECT setval('address_id_seq', 1, FALSE);
delete from resident;
SELECT setval('resident_id_seq', 1, FALSE);
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from blog;
SELECT setval('blog_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);
delete from "Company";
SELECT setval('"Company_id_seq"', 1, FALSE);

View File

@ -6,8 +6,8 @@ args:
args:
sql: |
create table author(
id serial primary key,
name text unique,
id serial primary key,
name text unique,
is_registered boolean not null default false
);
- type: track_table

View File

@ -17,7 +17,7 @@ args:
schema: public
name: article
#Drop views
#Drop views
- type: run_sql
args:
sql: |

View File

@ -0,0 +1,12 @@
type: bulk
args:
#Drop tables
- type: run_sql
args:
sql: |
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique
);
- type: track_table
@ -31,6 +31,7 @@ args:
schema: public
name: article
#Object relationship
- type: create_object_relationship
args:
@ -49,23 +50,16 @@ args:
table: article
column: author_id
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Person table
#Insert aticle table data
- type: insert
- type: run_sql
args:
table: article
objects:
- content: Sample article content
title: Article 1
- content: Sample article content
title: Article 2
- content: Sample article content
title: Article 3
sql: |
CREATE TABLE person (
id SERIAL PRIMARY KEY,
details JSONB NOT NULL
)
- type: track_table
args:
schema: public
name: person

View File

@ -3,7 +3,7 @@ args:
#Drop relationship first
- type: drop_relationship
args:
table:
table:
name: author
schema: public
relationship: articles

View File

@ -0,0 +1,21 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert Person table data
- type: insert
args:
table: person
objects:
- details:
name:
first: foo
last: bar
address: foobar

View File

@ -0,0 +1,15 @@
type: bulk
args:
#Drop relationship first
- type: run_sql
args:
sql: |
delete from article;
SELECT setval('article_id_seq', 1, FALSE);
delete from author;
SELECT setval('author_id_seq', 1, FALSE);
delete from person;
SELECT setval('person_id_seq', 1, FALSE);

View File

@ -0,0 +1,16 @@
type: bulk
args:
#Person table
- type: run_sql
args:
sql: |
CREATE TABLE person (
id SERIAL PRIMARY KEY,
details JSONB NOT NULL
)
- type: track_table
args:
schema: public
name: person

View File

@ -1,39 +0,0 @@
type: bulk
args:
#Person table
- type: run_sql
args:
sql: |
CREATE TABLE person (
id SERIAL PRIMARY KEY,
details JSONB NOT NULL
)
- type: track_table
args:
schema: public
name: person
#Insert Person table data
- type: insert
args:
table: person
objects:
- details:
name:
first: John
last: Taylor
- details:
- address:
city: Copenhagen
country: Denmark
- address:
city: Canterbury
country: United Kingdom
- details:
address:
city: Copenhagen
country: Denmark
name:
first: Robert
last: Wilson

View File

@ -0,0 +1,26 @@
type: bulk
args:
#Insert Person table data
- type: insert
args:
table: person
objects:
- details:
name:
first: John
last: Taylor
- details:
- address:
city: Copenhagen
country: Denmark
- address:
city: Canterbury
country: United Kingdom
- details:
address:
city: Copenhagen
country: Denmark
name:
first: Robert
last: Wilson

View File

@ -0,0 +1,7 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from person;
SELECT setval('person_id_seq', 1, FALSE);

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique
);
- type: track_table
@ -66,41 +66,6 @@ args:
schema: public
name: person
#Insert Author table data
- type: insert
args:
table: author
objects:
- id: 1
name: Author 1
- id: 2
name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- id: 1
version: 1.0.0
content: Sample article 1, content version 1.0.0
title: Article 1
author_id: 1
is_published: true
- id: 2
content: Sample article 1, content version 1.0.1
version: 1.0.1
title: Article 1
author_id: 1
is_published: false
- id: 3
content: Sample article 2, content version 1.0.2
version: 1.0.0
title: Article 2
author_id: 2
is_published: false
#Author select permission for user
- type: create_select_permission
@ -153,11 +118,7 @@ args:
age INTEGER NOT NULL,
city TEXT NOT NULL
);
INSERT INTO resident (name, age, city)
VALUES ('phillips', 25, 'canberra')
, ('george', 26, 'sydney')
, ('clarke', 21, 'perth')
;
- type: track_table
args:
name: resident

View File

@ -11,7 +11,8 @@ args:
- type: run_sql
args:
sql: |
DROP TABLE article;
DROP TABLE author;
DROP TABLE person;
DROP TABLE resident;
DROP table article;
DROP table author;
DROP table person;
DROP table resident;
cascade: true

View File

@ -0,0 +1,46 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- id: 1
name: Author 1
- id: 2
name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- id: 1
version: 1.0.0
content: Sample article 1, content version 1.0.0
title: Article 1
author_id: 1
is_published: true
- id: 2
content: Sample article 1, content version 1.0.1
version: 1.0.1
title: Article 1
author_id: 1
is_published: false
- id: 3
content: Sample article 2, content version 1.0.2
version: 1.0.0
title: Article 2
author_id: 2
is_published: false
#Resident table
- type: run_sql
args:
sql: |
INSERT INTO resident (name, age, city)
VALUES ('phillips', 25, 'canberra')
, ('george', 26, 'sydney')
, ('clarke', 21, 'perth')
;

View File

@ -0,0 +1,17 @@
type: bulk
args:
- type: run_sql
args:
sql: |
DELETE from article;
SELECT setval('article_id_seq', 1, FALSE);
DELETE from author;
SELECT setval('author_id_seq', 1, FALSE);
DELETE from person;
SELECT setval('person_id_seq', 1, FALSE);
DELETE from resident;
SELECT setval('resident_id_seq', 1, FALSE);

View File

@ -1,6 +1,13 @@
description: Query aggregations on article with user role (Error)
url: /v1alpha1/graphql
status: 400
response:
errors:
- extensions:
path: $.selectionSet.article_agg
code: validation-failed
message: |-
field "article_agg" not found in type: 'query_root'
headers:
X-Hasura-Role: user
query:

View File

@ -1,6 +1,13 @@
description: Select author with it's article aggregations with user role (Error)
url: /v1alpha1/graphql
status: 400
response:
errors:
- extensions:
path: $.selectionSet.author.selectionSet.articles_agg
code: validation-failed
message: |-
field "articles_agg" not found in type: 'author'
headers:
X-Hasura-Role: user
query:

View File

@ -1,6 +1,12 @@
description: Nested select on article with limit expecting error
url: /v1alpha1/graphql
status: 400
response:
errors:
- extensions:
code: not-supported
path: $.selectionSet.article.args.limit
message: unexpected negative value
query:
query: |
query {

View File

@ -1,6 +1,12 @@
description: Nested select on article with limit expecting error
url: /v1alpha1/graphql
status: 400
response:
errors:
- extensions:
code: validation-failed
path: $.selectionSet.article.args.limit
message: expecting Integer value for "limit"
query:
query: |
query {

View File

@ -6,7 +6,7 @@ args:
args:
sql: |
create table author(
id serial primary key,
id serial primary key,
name text unique
);
- type: track_table

View File

@ -0,0 +1,10 @@
type: bulk
args:
- type: run_sql
args:
sql: |
drop table article;
drop table author;
drop table orders;
cascade: true

View File

@ -1,25 +0,0 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
relationship: articles
table:
schema: public
name: author
- type: run_sql
args:
sql: |
drop table article
- type: run_sql
args:
sql: |
drop table author
- type: run_sql
args:
sql: |
drop table orders

View File

@ -0,0 +1,15 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from article;
select setval('article_id_seq', 1, false);
delete from author;
select setval('author_id_seq', 1, false);
delete from orders;
select setval('orders_id_seq', 1, false);
cascade: true

View File

@ -0,0 +1,51 @@
type: bulk
args:
#Author table
- type: run_sql
args:
sql: |
create table author(
id serial primary key,
name text unique
);
- type: track_table
args:
schema: public
name: author
#Article table
- type: run_sql
args:
sql: |
CREATE TABLE article (
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER REFERENCES author(id),
is_published BOOLEAN,
published_on TIMESTAMP
)
- type: track_table
args:
schema: public
name: article
#Object relationship
- type: create_object_relationship
args:
table: article
name: author
using:
foreign_key_constraint_on: author_id
#Array relationship
- type: create_array_relationship
args:
table: author
name: articles
using:
foreign_key_constraint_on:
table: article
column: author_id

View File

@ -0,0 +1,9 @@
type: bulk
args:
- type: run_sql
args:
sql: |
drop table article;
drop table author;
cascade: true

View File

@ -1,18 +0,0 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
relationship: articles
table:
schema: public
name: author
- type: run_sql
args:
sql: |
drop table article
- type: run_sql
args:
sql: |
drop table author

View File

@ -0,0 +1,23 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
- content: Sample article content
title: Article 2
- content: Sample article content
title: Article 3

View File

@ -0,0 +1,12 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from article;
select setval('article_id_seq', 1, false);
delete from author;
select setval('author_id_seq', 1, false);

View File

@ -6,8 +6,8 @@ args:
args:
sql: |
create table author(
id serial primary key,
name text unique,
id serial primary key,
name text unique,
bio text,
is_registered boolean not null default false
);
@ -184,37 +184,3 @@ args:
filter:
id: X-Hasura-Resident-Id
#Insert residents
- type: insert
args:
table: resident
objects:
- id: 1
name: Resident 1
age: 21
- id: 2
name: Resident 2
age: 22
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
author_id: 1
- content: Sample article content
title: Article 2
author_id: 1
- content: Sample article content
title: Article 3
author_id: 2

View File

@ -0,0 +1,11 @@
type: bulk
args:
- type: run_sql
args:
sql: |
drop table address;
drop table resident;
drop table article;
drop table author;
cascade: true

View File

@ -1,29 +0,0 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
table:
name: author
schema: public
relationship: articles
- type: run_sql
args:
sql: |
drop table address
- type: run_sql
args:
sql: |
drop table resident
- type: run_sql
args:
sql: |
drop table article
- type: run_sql
args:
sql: |
drop table author

View File

@ -0,0 +1,37 @@
type: bulk
args:
#Insert residents
- type: insert
args:
table: resident
objects:
- id: 1
name: Resident 1
age: 21
- id: 2
name: Resident 2
age: 22
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- content: Sample article content
title: Article 1
author_id: 1
- content: Sample article content
title: Article 2
author_id: 1
- content: Sample article content
title: Article 3
author_id: 2

View File

@ -0,0 +1,17 @@
type: bulk
args:
- type: run_sql
args:
sql:
delete from address;
select setval('address_id_seq', 1, false);
delete from resident;
select setval('resident_id_seq', 1, false);
delete from article;
select setval('article_id_seq', 1, false);
delete from author;
select setval('author_id_seq', 1, false);

View File

@ -77,32 +77,3 @@ args:
schema: public
name: products
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert Person table data
- type: insert
args:
table: person
objects:
- details:
name:
first: foo
last: bar
address: foobar
#Insert Products table data
- type: insert
args:
table: products
objects:
- name: Product 1
price: 5
- name: Product 2
price: 15

View File

@ -0,0 +1,11 @@
type: bulk
args:
#Drop relationship first
- type: run_sql
args:
sql: |
drop table article;
drop table author;
drop table person;
drop table products;

View File

@ -1,29 +0,0 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
table:
name: author
schema: public
relationship: articles
- type: run_sql
args:
sql: |
drop table article
- type: run_sql
args:
sql: |
drop table author
- type: run_sql
args:
sql: |
drop table person
- type: run_sql
args:
sql: |
drop table products

View File

@ -0,0 +1,31 @@
type: bulk
args:
#Insert Author table data
- type: insert
args:
table: author
objects:
- name: Author 1
- name: Author 2
#Insert Person table data
- type: insert
args:
table: person
objects:
- details:
name:
first: foo
last: bar
address: foobar
#Insert Products table data
- type: insert
args:
table: products
objects:
- name: Product 1
price: 5
- name: Product 2
price: 15

View File

@ -0,0 +1,18 @@
type: bulk
args:
#Drop relationship first
- type: run_sql
args:
sql: |
delete from article;
select setval('article_id_seq', 1, false);
delete from author;
select setval('author_id_seq', 1, false);
delete from person;
select setval('person_id_seq', 1, false);
delete from products;
select setval('products_product_id_seq', 1, false);

View File

@ -66,41 +66,6 @@ args:
schema: public
name: person
#Insert Author table data
- type: insert
args:
table: author
objects:
- id: 1
name: Author 1
- id: 2
name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- id: 1
version: 1.0.0
content: Sample article 1, content version 1.0.0
title: Article 1
author_id: 1
is_published: true
- id: 2
content: Sample article 1, content version 1.0.1
version: 1.0.1
title: Article 1
author_id: 1
is_published: false
- id: 3
content: Sample article 2, content version 1.0.2
version: 1.0.0
title: Article 2
author_id: 2
is_published: false
#Author select permission for user
- type: create_select_permission
@ -154,11 +119,6 @@ args:
age INTEGER NOT NULL,
city TEXT NOT NULL
);
INSERT INTO resident (name, age, city)
VALUES ('phillips', 25, 'canberra')
, ('george', 26, 'sydney')
, ('clarke', 21, 'perth')
;
- type: track_table
args:
name: resident
@ -172,6 +132,7 @@ args:
permission:
filter: {}
columns: '*'
- type: create_update_permission
args:
table: resident

View File

@ -1,12 +1,5 @@
type: bulk
args:
#Drop relationship first
- type: drop_relationship
args:
table:
name: author
schema: public
relationship: articles
- type: run_sql
args:
@ -15,3 +8,4 @@ args:
DROP TABLE author;
DROP TABLE person;
DROP TABLE resident;
cascade: true

View File

@ -0,0 +1,45 @@
type: bulk
args:
- type: run_sql
args:
sql: |
INSERT INTO resident (name, age, city)
VALUES ('phillips', 25, 'canberra')
, ('george', 26, 'sydney')
, ('clarke', 21, 'perth')
;
#Insert Author table data
- type: insert
args:
table: author
objects:
- id: 1
name: Author 1
- id: 2
name: Author 2
#Insert aticle table data
- type: insert
args:
table: article
objects:
- id: 1
version: 1.0.0
content: Sample article 1, content version 1.0.0
title: Article 1
author_id: 1
is_published: true
- id: 2
content: Sample article 1, content version 1.0.1
version: 1.0.1
title: Article 1
author_id: 1
is_published: false
- id: 3
content: Sample article 2, content version 1.0.2
version: 1.0.0
title: Article 2
author_id: 2
is_published: false

View File

@ -0,0 +1,17 @@
type: bulk
args:
- type: run_sql
args:
sql: |
delete from article;
select setval('article_id_seq', 1, false);
delete from author;
select setval('author_id_seq', 1, false);
delete from person;
select setval('person_id_seq', 1, false);
delete from resident;
select setval('resident_id_seq', 1, false);

View File

@ -1,6 +1,7 @@
psycopg2-binary
sqlalchemy
pytest
pytest-xdist
requests
pyyaml
websocket-client

View File

@ -1,5 +1,6 @@
import pytest
from abc import ABC, abstractmethod
import os
class DefaultTestQueries(ABC):
@ -25,6 +26,31 @@ class DefaultTestQueries(ABC):
def dir(self):
pass
class DefaultTestMutations(ABC):
@pytest.fixture(scope='class')
def schema_transact(self, request, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir() + '/schema_setup.yaml')
assert st_code == 200, resp
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + '/schema_teardown.yaml')
assert st_code == 200, resp
@pytest.fixture(autouse=True)
def init_values_transact(self, schema_transact, hge_ctx):
setupValFile = self.dir() + '/values_setup.yaml'
if os.path.isfile(setupValFile):
st_code, resp = hge_ctx.v1q_f(setupValFile)
assert st_code == 200, resp
yield
st_code, resp = hge_ctx.v1q_f(self.dir() + '/values_teardown.yaml')
assert st_code == 200, resp
@abstractmethod
def dir(self):
pass
class DefaultTestSelectQueries(ABC):

View File

@ -58,6 +58,7 @@ def delete(hge_ctx, table, where_exp, headers = {}):
st_code, resp = hge_ctx.v1q(q, headers = headers)
return st_code, resp
@pytest.mark.usefixtures("evts_webhook")
class TestCreateAndDelete(DefaultTestQueries):
def test_create_delete(self, hge_ctx):
@ -76,7 +77,7 @@ class TestCreateAndDelete(DefaultTestQueries):
class TestCreateEvtQuery(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/basic/setup.yaml')
assert st_code == 200, resp
@ -84,7 +85,7 @@ class TestCreateEvtQuery(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/basic/teardown.yaml')
assert st_code == 200, resp
def test_basic(self, hge_ctx):
def test_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -94,7 +95,7 @@ class TestCreateEvtQuery(object):
}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "INSERT", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -104,7 +105,7 @@ class TestCreateEvtQuery(object):
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data)
exp_ev_data = {
"old": {"c1": 1, "c2": "world"},
@ -112,13 +113,13 @@ class TestCreateEvtQuery(object):
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
class TestRetryConf(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/retry_conf/setup.yaml')
assert st_code == 200, resp
@ -126,7 +127,7 @@ class TestRetryConf(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/retry_conf/teardown.yaml')
assert st_code == 200, resp
def test_basic(self, hge_ctx):
def test_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -137,10 +138,10 @@ class TestRetryConf(object):
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
time.sleep(15)
tries = hge_ctx.get_error_queue_size()
tries = evts_webhook.get_error_queue_size()
assert tries == 5, tries
def test_timeout_short(self, hge_ctx):
def test_timeout_short(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t2"}
init_row = {"c1": 1, "c2": "hello"}
@ -151,10 +152,10 @@ class TestRetryConf(object):
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
time.sleep(20)
tries = hge_ctx.get_error_queue_size()
tries = evts_webhook.get_error_queue_size()
assert tries == 3, tries
def test_timeout_long(self, hge_ctx):
def test_timeout_long(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t3"}
init_row = {"c1": 1, "c2": "hello"}
@ -165,12 +166,12 @@ class TestRetryConf(object):
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
time.sleep(15)
check_event(hge_ctx, "t3_timeout_long", table, "INSERT", exp_ev_data, webhook_path = "/timeout_long")
check_event(hge_ctx, evts_webhook, "t3_timeout_long", table, "INSERT", exp_ev_data, webhook_path = "/timeout_long")
class TestEvtHeaders(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/headers/setup.yaml')
assert st_code == 200, resp
@ -178,7 +179,7 @@ class TestEvtHeaders(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/headers/teardown.yaml')
assert st_code == 200, resp
def test_basic(self, hge_ctx):
def test_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -189,13 +190,13 @@ class TestEvtHeaders(object):
headers = {"X-Header-From-Value": "MyValue", "X-Header-From-Env": "MyEnvValue"}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "INSERT", exp_ev_data, headers = headers)
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, headers = headers)
class TestUpdateEvtQuery(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/update_query/create-setup.yaml')
assert st_code == 200, resp
@ -206,7 +207,7 @@ class TestUpdateEvtQuery(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/update_query/teardown.yaml')
assert st_code == 200, resp
def test_update_basic(self, hge_ctx):
def test_update_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -217,7 +218,7 @@ class TestUpdateEvtQuery(object):
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_cols", table, "INSERT", exp_ev_data, webhook_path = "/new")
check_event(hge_ctx, evts_webhook, "t1_cols", table, "INSERT", exp_ev_data, webhook_path = "/new")
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -225,7 +226,7 @@ class TestUpdateEvtQuery(object):
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path = "/new")
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path = "/new")
where_exp = {"c1": 1}
set_exp = {"c1": 2}
@ -235,7 +236,7 @@ class TestUpdateEvtQuery(object):
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path ="/new")
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path ="/new")
where_exp = {"c1": 2}
exp_ev_data = {
@ -244,13 +245,13 @@ class TestUpdateEvtQuery(object):
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "DELETE", exp_ev_data, webhook_path = "/new")
check_event(hge_ctx, evts_webhook, "t1_cols", table, "DELETE", exp_ev_data, webhook_path = "/new")
class TestDeleteEvtQuery(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/basic/setup.yaml')
assert st_code == 200, resp
@ -260,7 +261,7 @@ class TestDeleteEvtQuery(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/delete_query/teardown.yaml')
assert st_code == 200, resp
def test_delete_basic(self, hge_ctx):
def test_delete_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -271,7 +272,7 @@ class TestDeleteEvtQuery(object):
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_all", table, "INSERT", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -282,7 +283,7 @@ class TestDeleteEvtQuery(object):
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_all", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data)
exp_ev_data = {
"old": {"c1": 1, "c2": "world"},
@ -291,13 +292,13 @@ class TestDeleteEvtQuery(object):
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_all", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
class TestEvtSelCols:
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/selected_cols/setup.yaml')
assert st_code == 200, resp
@ -305,7 +306,7 @@ class TestEvtSelCols:
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/selected_cols/teardown.yaml')
assert st_code == 200, resp
def test_selected_cols(self, hge_ctx):
def test_selected_cols(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -315,7 +316,7 @@ class TestEvtSelCols:
}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "INSERT", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_cols", table, "INSERT", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -323,7 +324,7 @@ class TestEvtSelCols:
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c1": 2}
@ -333,7 +334,7 @@ class TestEvtSelCols:
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data)
where_exp = {"c1": 2}
exp_ev_data = {
@ -342,9 +343,9 @@ class TestEvtSelCols:
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_cols", table, "DELETE", exp_ev_data)
def test_selected_cols_dep(self, hge_ctx):
def test_selected_cols_dep(self, hge_ctx, evts_webhook):
st_code, resp = hge_ctx.v1q({
"type": "run_sql",
"args": {
@ -366,7 +367,7 @@ class TestEvtSelCols:
class TestEvtInsertOnly:
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/insert_only/setup.yaml')
assert st_code == 200, resp
@ -374,7 +375,7 @@ class TestEvtInsertOnly:
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/insert_only/teardown.yaml')
assert st_code == 200, resp
def test_insert_only(self, hge_ctx):
def test_insert_only(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -384,7 +385,7 @@ class TestEvtInsertOnly:
}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_insert", table, "INSERT", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_insert", table, "INSERT", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -395,7 +396,7 @@ class TestEvtInsertOnly:
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_insert", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_insert", table, "UPDATE", exp_ev_data)
exp_ev_data = {
"old": {"c1": 1, "c2": "world"},
@ -404,13 +405,13 @@ class TestEvtInsertOnly:
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_insert", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_insert", table, "DELETE", exp_ev_data)
class TestEvtSelPayload:
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/selected_payload/setup.yaml')
assert st_code == 200, resp
@ -418,7 +419,7 @@ class TestEvtSelPayload:
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/selected_payload/teardown.yaml')
assert st_code == 200, resp
def test_selected_payload(self, hge_ctx):
def test_selected_payload(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -428,7 +429,7 @@ class TestEvtSelPayload:
}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "INSERT", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_payload", table, "INSERT", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -438,7 +439,7 @@ class TestEvtSelPayload:
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_payload", table, "UPDATE", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c1": 2}
@ -448,7 +449,7 @@ class TestEvtSelPayload:
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_payload", table, "UPDATE", exp_ev_data)
where_exp = {"c1": 2}
exp_ev_data = {
@ -457,7 +458,7 @@ class TestEvtSelPayload:
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_payload", table, "DELETE", exp_ev_data)
def test_selected_payload_dep(self, hge_ctx):
st_code, resp = hge_ctx.v1q({
@ -481,7 +482,7 @@ class TestEvtSelPayload:
class TestWebhookEnv(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/webhook_env/setup.yaml')
assert st_code == 200, resp
@ -489,7 +490,7 @@ class TestWebhookEnv(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/webhook_env/teardown.yaml')
assert st_code == 200, resp
def test_basic(self, hge_ctx):
def test_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -499,7 +500,7 @@ class TestWebhookEnv(object):
}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "INSERT", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -509,7 +510,7 @@ class TestWebhookEnv(object):
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "UPDATE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data)
exp_ev_data = {
"old": {"c1": 1, "c2": "world"},
@ -517,12 +518,12 @@ class TestWebhookEnv(object):
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
class TestSessionVariables(object):
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
def transact(self, request, hge_ctx, evts_webhook):
print("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/basic/setup.yaml')
assert st_code == 200, resp
@ -530,7 +531,7 @@ class TestSessionVariables(object):
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/basic/teardown.yaml')
assert st_code == 200, resp
def test_basic(self, hge_ctx):
def test_basic(self, hge_ctx, evts_webhook):
table = {"schema": "hge_tests", "name": "test_t1"}
init_row = {"c1": 1, "c2": "hello"}
@ -541,7 +542,7 @@ class TestSessionVariables(object):
session_variables = { 'x-hasura-role': 'admin', 'x-hasura-allowed-roles': "['admin','user']", 'x-hasura-user-id': '1'}
st_code, resp = insert(hge_ctx, table, init_row, headers = session_variables)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "INSERT", exp_ev_data, session_variables = session_variables)
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, session_variables = session_variables)
where_exp = {"c1": 1}
set_exp = {"c2": "world"}
@ -553,7 +554,7 @@ class TestSessionVariables(object):
st_code, resp = update(hge_ctx, table, where_exp, set_exp, headers = session_variables)
assert st_code == 200, resp
session_variables.pop('X-Random-Header')
check_event(hge_ctx, "t1_all", table, "UPDATE", exp_ev_data, session_variables = session_variables)
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, session_variables = session_variables)
exp_ev_data = {
"old": {"c1": 1, "c2": "world"},
@ -561,4 +562,4 @@ class TestSessionVariables(object):
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "DELETE", exp_ev_data)
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)

View File

@ -7,9 +7,8 @@ class TestGraphqlIntrospection(DefaultTestSelectQueries):
def test_introspection(self, hge_ctx):
with open(self.dir() + "/introspection.yaml") as c:
conf = yaml.load(c)
code, resp = check_query(hge_ctx, conf)
assert code == 200, resp
conf = yaml.safe_load(c)
resp = check_query(hge_ctx, conf)
hasArticle = False
hasArticleAuthorFKRel = False
hasArticleAuthorManualRel = False

View File

@ -1,69 +1,61 @@
import pytest
import yaml
from validate import check_query_f
from super_classes import DefaultTestQueries
from super_classes import DefaultTestQueries, DefaultTestMutations
class TestGraphQLInsert(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphQLInsert(DefaultTestMutations):
def test_inserts_author_article(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + "/author_article.yaml")
hge_ctx.may_skip_test_teardown = True
def test_inserts_author_article(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_article.yaml", transport)
def test_inserts_various_postgres_types(self, hge_ctx):
def test_inserts_various_postgres_types(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_various_postgres_types.yaml")
hge_ctx.may_skip_test_teardown = True
@pytest.mark.xfail(reason="Refer https://github.com/hasura/graphql-engine/issues/348")
def test_insert_into_array_col_with_array_input(self, hge_ctx):
def test_insert_into_array_col_with_array_input(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_into_array_col_with_array_input.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_using_variable(self, hge_ctx):
def test_insert_using_variable(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_jsonb_variable.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_using_array_variable(self, hge_ctx):
def test_insert_using_array_variable(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_jsonb_variable_array.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_person(self, hge_ctx):
def test_insert_person(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_jsonb.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_person_array(self, hge_ctx):
def test_insert_person_array(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_jsonb_array.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_null_col_value(self, hge_ctx):
def test_insert_null_col_value(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/order_col_shipped_null.yaml")
hge_ctx.may_skip_test_teardown = True
@classmethod
def dir(cls):
return "queries/graphql_mutation/insert/basic"
class TestGraphqlInsertOnConflict(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlInsertOnConflict(DefaultTestMutations):
def test_on_conflict_update(self, hge_ctx):
def test_on_conflict_update(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_on_conflict_update.yaml")
def test_on_conflict_ignore(self, hge_ctx):
def test_on_conflict_ignore(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_on_conflict_ignore_constraint.yaml")
hge_ctx.may_skip_test_teardown = True
def test_on_conflict_update_empty_cols(self, hge_ctx):
def test_on_conflict_update_empty_cols(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_on_conflict_empty_update_columns.yaml")
hge_ctx.may_skip_test_teardown = True
def test_err_missing_article_constraint(self, hge_ctx):
def test_err_missing_article_constraint(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_on_conflict_error_missing_article_constraint.yaml")
def test_err_unexpected_action(self, hge_ctx):
def test_err_unexpected_action(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_unexpected_on_conflict_action.yaml")
def test_err_unexpected_constraint(self, hge_ctx):
def test_err_unexpected_constraint(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_unexpected_on_conflict_constraint_error.yaml")
@classmethod
@ -71,64 +63,64 @@ class TestGraphqlInsertOnConflict(DefaultTestQueries):
return "queries/graphql_mutation/insert/onconflict"
class TestGraphqlInsertPermission(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlInsertPermission(DefaultTestMutations):
def test_user_role_on_conflict_update(self, hge_ctx):
def test_user_role_on_conflict_update(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_on_conflict_user_role.yaml")
def test_user_role_on_conflict_constraint_on_error(self, hge_ctx):
def test_user_role_on_conflict_constraint_on_error(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_on_conflict_constraint_on_user_role_error.yaml")
def test_user_role_on_conflict_ignore(self, hge_ctx):
def test_user_role_on_conflict_ignore(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_on_conflict_ignore_user_role.yaml")
hge_ctx.may_skip_test_teardown = True
def test_user_err_missing_article_constraint(self, hge_ctx):
def test_user_err_missing_article_constraint(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_article_on_conflict_error_missing_article_constraint.yaml")
def test_user_err_unexpected_action(self, hge_ctx):
def test_user_err_unexpected_action(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_article_error_unexpected_on_conflict_action.yaml")
def test_user_err_unexpected_constraint(self, hge_ctx):
def test_user_err_unexpected_constraint(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_article_unexpected_on_conflict_constraint_error.yaml")
def test_role_has_no_permissions_err(self, hge_ctx):
def test_role_has_no_permissions_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/address_permission_error.yaml")
def test_author_user_role_insert_check_perm_success(self, hge_ctx):
def test_author_user_role_insert_check_perm_success(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_user_role_insert_check_perm_success.yaml")
def test_user_role_insert_check_is_registered_fail(self, hge_ctx):
def test_user_role_insert_check_is_registered_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_user_role_insert_check_is_registered_fail.yaml")
def test_user_role_insert_check_user_id_fail(self, hge_ctx):
def test_user_role_insert_check_user_id_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_user_role_insert_check_user_id_fail.yaml")
def test_student_role_insert_check_bio_success(self, hge_ctx):
def test_student_role_insert_check_bio_success(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_student_role_insert_check_bio_success.yaml")
def test_student_role_insert_check_bio_fail(self, hge_ctx):
def test_student_role_insert_check_bio_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_student_role_insert_check_bio_fail.yaml")
def test_company_user_role_insert(self, hge_ctx):
def test_company_user_role_insert(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/company_user_role.yaml")
def test_company_user_role_insert_on_conflict(self, hge_ctx):
def test_company_user_role_insert_on_conflict(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/company_user_role_on_conflict.yaml")
def test_resident_user_role_insert(self, hge_ctx):
def test_resident_user_role_insert(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/resident_user.yaml")
def test_resident_infant_role_insert(self, hge_ctx):
def test_resident_infant_role_insert(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/resident_infant.yaml")
def test_resident_infant_role_insert_fail(self, hge_ctx):
def test_resident_infant_role_insert_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/resident_infant_fail.yaml")
def test_resident_5_modifies_resident_6_upsert(self, hge_ctx):
def test_resident_5_modifies_resident_6_upsert(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/resident_5_modifies_resident_6_upsert.yaml")
def test_blog_on_conflict_update_preset(self, hge_ctx):
def test_blog_on_conflict_update_preset(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/blog_on_conflict_update_preset.yaml")
@classmethod
@ -136,12 +128,13 @@ class TestGraphqlInsertPermission(DefaultTestQueries):
return "queries/graphql_mutation/insert/permissions"
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlInsertConstraints(DefaultTestQueries):
def test_address_not_null_constraint_err(self, hge_ctx):
def test_address_not_null_constraint_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/address_not_null_constraint_error.yaml")
def test_insert_unique_constraint_err(self, hge_ctx):
def test_insert_unique_constraint_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_unique_constraint_error.yaml")
@classmethod
@ -149,53 +142,45 @@ class TestGraphqlInsertConstraints(DefaultTestQueries):
return "queries/graphql_mutation/insert/constraints"
class TestGraphqlInsertGeoJson(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlInsertGeoJson(DefaultTestMutations):
def test_insert_point_landmark(self, hge_ctx):
def test_insert_point_landmark(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_landmark.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_3d_point_drone_loc(self, hge_ctx):
def test_insert_3d_point_drone_loc(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_drone_3d_location.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_landmark_single_position_err(self, hge_ctx):
def test_insert_landmark_single_position_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_landmark_single_position_err.yaml")
def test_insert_line_string_road(self, hge_ctx):
def test_insert_line_string_road(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_road.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_road_single_point_err(self, hge_ctx):
def test_insert_road_single_point_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_road_single_point_err.yaml")
def test_insert_multi_point_service_locations(self, hge_ctx):
def test_insert_multi_point_service_locations(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_service_locations.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_multi_line_string_route(self, hge_ctx):
def test_insert_multi_line_string_route(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_route.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_polygon(self, hge_ctx):
def test_insert_polygon(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_area.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_linear_ring_less_than_4_points_err(self, hge_ctx):
def test_insert_linear_ring_less_than_4_points_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_area_less_than_4_points_err.yaml")
def test_insert_linear_ring_last_point_not_equal_to_first_err(self, hge_ctx):
def test_insert_linear_ring_last_point_not_equal_to_first_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_linear_ring_last_point_not_equal_to_first_err.yaml")
def test_insert_multi_polygon_compounds(self, hge_ctx):
def test_insert_multi_polygon_compounds(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_compounds.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_geometry_collection(self, hge_ctx):
def test_insert_geometry_collection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_geometry_collection.yaml")
hge_ctx.may_skip_test_teardown = True
def test_insert_unexpected_geometry_type_err(self, hge_ctx):
def test_insert_unexpected_geometry_type_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_geometry_unexpected_type_err.yaml")
@classmethod
@ -203,33 +188,34 @@ class TestGraphqlInsertGeoJson(DefaultTestQueries):
return "queries/graphql_mutation/insert/geojson"
class TestGraphqlNestedInserts(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlNestedInserts(DefaultTestMutations):
def test_author_with_articles(self, hge_ctx):
def test_author_with_articles(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_with_articles.yaml")
def test_author_with_articles_empty(self, hge_ctx):
def test_author_with_articles_empty(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_with_articles_empty.yaml")
def test_author_with_articles_null(self, hge_ctx):
def test_author_with_articles_null(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_with_articles_null.yaml")
def test_author_with_articles_author_id_fail(self, hge_ctx):
def test_author_with_articles_author_id_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_with_articles_author_id_fail.yaml")
def test_articles_with_author(self, hge_ctx):
def test_articles_with_author(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/articles_with_author.yaml")
def test_articles_with_author_author_id_fail(self, hge_ctx):
def test_articles_with_author_author_id_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/articles_with_author_author_id_fail.yaml")
def test_author_upsert_articles_fail(self, hge_ctx):
def test_author_upsert_articles_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_upsert_articles_fail.yaml")
def test_articles_author_upsert_fail(self, hge_ctx):
def test_articles_author_upsert_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/articles_author_upsert_fail.yaml")
def test_articles_with_author_returning(self, hge_ctx):
def test_articles_with_author_returning(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/articles_with_author_returning.yaml")
@classmethod
@ -237,18 +223,19 @@ class TestGraphqlNestedInserts(DefaultTestQueries):
return "queries/graphql_mutation/insert/nested"
class TestGraphqlInsertViews(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlInsertViews(DefaultTestMutations):
def test_insert_view_author_simple(self, hge_ctx):
def test_insert_view_author_simple(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_view_author_simple.yaml")
def test_insert_view_author_complex_fail(self, hge_ctx):
def test_insert_view_author_complex_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/insert_view_author_complex_fail.yaml")
def test_nested_insert_article_author_simple_view(self, hge_ctx):
def test_nested_insert_article_author_simple_view(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/nested_insert_article_author_simple_view.yaml")
def test_nested_insert_article_author_complex_view_fail(self, hge_ctx):
def test_nested_insert_article_author_complex_view_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/nested_insert_article_author_complex_view_fail.yaml")
@classmethod
@ -256,22 +243,23 @@ class TestGraphqlInsertViews(DefaultTestQueries):
return "queries/graphql_mutation/insert/views"
class TestGraphqlUpdateBasic(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlUpdateBasic(DefaultTestMutations):
def test_set_author_name(self, hge_ctx):
def test_set_author_name(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_set_name.yaml")
def test_empty_set_author(self, hge_ctx):
def test_empty_set_author(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_empty_set.yaml")
hge_ctx.may_skip_test_teardown = True
def test_set_person_details(self, hge_ctx):
def test_set_person_details(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_set_details.yaml")
def test_person_id_inc(self, hge_ctx):
def test_person_id_inc(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_inc.yaml")
def test_no_operator_err(self, hge_ctx):
def test_no_operator_err(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_error_no_operator.yaml")
@classmethod
@ -279,24 +267,25 @@ class TestGraphqlUpdateBasic(DefaultTestQueries):
return "queries/graphql_mutation/update/basic"
class TestGraphqlUpdateJsonB(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlUpdateJsonB(DefaultTestMutations):
def test_jsonb_append_object(self, hge_ctx):
def test_jsonb_append_object(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_append_object.yaml")
def test_jsonb_append_array(self, hge_ctx):
def test_jsonb_append_array(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_append_array.yaml")
def test_jsonb_prepend_array(self, hge_ctx):
def test_jsonb_prepend_array(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_prepend_array.yaml")
def test_jsonb_delete_at_path(self, hge_ctx):
def test_jsonb_delete_at_path(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_delete_at_path.yaml")
def test_jsonb_delete_array_element(self, hge_ctx):
def test_jsonb_delete_array_element(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_delete_array_element.yaml")
def test_jsonb_delete_key(self, hge_ctx):
def test_jsonb_delete_key(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/person_delete_key.yaml")
@classmethod
@ -304,53 +293,52 @@ class TestGraphqlUpdateJsonB(DefaultTestQueries):
return "queries/graphql_mutation/update/jsonb"
class TestGraphqlUpdatePermissions(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlUpdatePermissions(DefaultTestMutations):
def test_user_can_update_unpublished_article(self, hge_ctx):
def test_user_can_update_unpublished_article(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_can_update_unpublished_article.yaml")
def test_user_cannot_update_published_version_col(self, hge_ctx):
def test_user_cannot_update_published_version_col(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_cannot_update_published_article_version.yaml")
hge_ctx.may_skip_test_teardown = True
def test_user_cannot_update_another_users_article(self, hge_ctx):
def test_user_cannot_update_another_users_article(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_cannot_update_another_users_article.yaml")
hge_ctx.may_skip_test_teardown = True
def test_user_cannot_update_id_col(self, hge_ctx):
def test_user_cannot_update_id_col(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/user_cannot_update_id_col_article.yaml")
hge_ctx.may_skip_test_teardown = True
def test_user_update_resident_preset(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/user_update_resident_preset.yaml')
def test_user_update_resident_preset(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/user_update_resident_preset.yaml', transport)
def test_user_update_resident_preset_session_var(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/user_update_resident_preset_session_var.yaml')
def test_user_update_resident_preset_session_var(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/user_update_resident_preset_session_var.yaml', transport)
@classmethod
def dir(cls):
return "queries/graphql_mutation/update/permissions"
class TestGraphqlDeleteBasic(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlDeleteBasic(DefaultTestMutations):
def test_article_delete(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + "/article.yaml")
def test_article_delete(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article.yaml", transport)
def test_article_delete_returning(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + "/article_returning.yaml")
def test_article_delete_returning(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_returning.yaml", transport)
def test_article_delete_returning_author(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + "/article_returning_author.yaml")
def test_article_delete_returning_author(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/article_returning_author.yaml", transport)
@classmethod
def dir(cls):
return "queries/graphql_mutation/delete/basic"
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlDeleteConstraints(DefaultTestMutations):
class TestGraphqlDeleteConstraints(DefaultTestQueries):
def test_author_delete_foreign_key_violation(self, hge_ctx):
def test_author_delete_foreign_key_violation(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_foreign_key_violation.yaml")
@classmethod
@ -358,18 +346,17 @@ class TestGraphqlDeleteConstraints(DefaultTestQueries):
return "queries/graphql_mutation/delete/constraints"
class TestGraphqlDeletePermissions(DefaultTestQueries):
@pytest.mark.parametrize("transport", ['http','websocket'])
class TestGraphqlDeletePermissions(DefaultTestMutations):
def test_author_can_delete_his_articles(self, hge_ctx):
def test_author_can_delete_his_articles(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_can_delete_his_articles.yaml")
def test_author_cannot_delete_other_users_articles(self, hge_ctx):
def test_author_cannot_delete_other_users_articles(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/author_cannot_delete_other_users_articles.yaml")
hge_ctx.may_skip_test_teardown = True
def test_resident_delete_without_select_perm_fail(self, hge_ctx):
def test_resident_delete_without_select_perm_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/resident_delete_without_select_perm_fail.yaml")
hge_ctx.may_skip_test_teardown = True
@classmethod
def dir(cls):

Some files were not shown because too many files have changed in this diff Show More