server: generalize integration tests

Co-authored-by: Antoine Leblanc <1618949+nicuveo@users.noreply.github.com>
GitOrigin-RevId: 647edb8d293bf783c504b7e15ce02c56858b3b72
This commit is contained in:
Abby Sassel 2021-03-11 18:17:41 +00:00 committed by hasura-bot
parent 067a38f74d
commit 64d52f5fa3
20 changed files with 346 additions and 37 deletions

View File

@ -142,6 +142,17 @@ else
echo_warn "Pyenv not installed. Proceeding with system python version: $(python3 --version)"
fi
function cleanup_any_backends(){
# run cleanup only if there are any backend containers running
if ( $DOCKER_PSQL -c '\l' ) &>/dev/null; then
cleanup_postgres
fi
if $DOCKER_MSSQL -Q "SELECT 1" &>/dev/null; then
cleanup_mssql
fi
}
####################################
### Shared environment stuff ###
####################################
@ -376,9 +387,12 @@ EOL
fi
case "$MODE" in
test|postgres)
postgres)
cleanup_postgres
;;
test)
cleanup_any_backends
;;
graphql-engine)
;;
esac
@ -413,10 +427,17 @@ fi
### MSSQL Container ###
#################################
function cleanup_mssql(){
echo_pretty "Removing $MSSQL_CONTAINER_NAME and its volumes in 5 seconds!"
echo_pretty " PRESS CTRL-C TO ABORT removal, or ENTER to clean up right away"
read -t5 || true
docker stop "$MSSQL_CONTAINER_NAME"
docker rm "$MSSQL_CONTAINER_NAME"
}
function launch_mssql_container(){
echo_pretty "Launching MSSQL container: $MSSQL_CONTAINER_NAME"
docker run --rm --name $MSSQL_CONTAINER_NAME --net=host \
-e 'ACCEPT_EULA=Y' -e "SA_PASSWORD=$MSSQL_PASSWORD" \
docker run --name $MSSQL_CONTAINER_NAME -e 'ACCEPT_EULA=Y' -e "SA_PASSWORD=$MSSQL_PASSWORD" \
-p 127.0.0.1:"$MSSQL_PORT":1433 -d mcr.microsoft.com/mssql/server:2019-CU8-ubuntu-16.04
# Since launching the SQL Server container worked we can set up cleanup routines. This will catch CTRL-C
@ -429,12 +450,11 @@ function launch_mssql_container(){
fi
case "$MODE" in
test|mssql)
echo_pretty "Removing $MSSQL_CONTAINER_NAME and its volumes in 5 seconds!"
echo_pretty " PRESS CTRL-C TO ABORT removal, or ENTER to clean up right away"
read -t5 || true
docker stop "$MSSQL_CONTAINER_NAME"
# container will be removed automatically as it was started using the --rm option
mssql)
cleanup_mssql
;;
test)
cleanup_any_backends
;;
graphql-engine)
;;
@ -498,10 +518,16 @@ elif [ "$MODE" = "test" ]; then
fi
if [ "$RUN_INTEGRATION_TESTS" = true ]; then
launch_mssql_container
wait_mssql
GRAPHQL_ENGINE_TEST_LOG=/tmp/hasura-dev-test-engine.log
echo_pretty "Starting graphql-engine, logging to $GRAPHQL_ENGINE_TEST_LOG"
export HASURA_GRAPHQL_SERVER_PORT=8088
cabal new-run --project-file=cabal.project.dev-sh -- exe:graphql-engine --database-url="$POSTGRES_DB_URL" serve --stringify-numeric-types \
# Using --metadata-database-url flag to test multiple backends
cabal new-run --project-file=cabal.project.dev-sh -- exe:graphql-engine \
--metadata-database-url="$POSTGRES_DB_URL" serve --stringify-numeric-types \
--enable-console --console-assets-dir ../console/static/dist \
&> "$GRAPHQL_ENGINE_TEST_LOG" & GRAPHQL_ENGINE_PID=$!
@ -514,8 +540,26 @@ elif [ "$MODE" = "test" ]; then
exit 666
fi
done
echo ""
echo " Ok"
METADATA_URL=http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT/v1/metadata
echo ""
echo "Adding Postgres source"
curl "$METADATA_URL" \
--data-raw '{"type":"pg_add_source","args":{"name":"default","configuration":{"connection_info":{"database_url":"'"$POSTGRES_DB_URL"'","pool_settings":{}}}}}'
echo ""
echo "Adding SQL Server source"
curl "$METADATA_URL" \
--data-raw '{"type":"mssql_add_source","args":{"name":"mssql","configuration":{"connection_info":{"connection_string":"'"$MSSQL_DB_URL"'","pool_settings":{}}}}}'
echo ""
echo "Sources added:"
curl "$METADATA_URL" --data-raw '{"type":"export_metadata","args":{}}'
cd "$PROJECT_ROOT/server/tests-py"
## Install misc test dependencies:

View File

@ -178,7 +178,7 @@ class HGE:
print(Fore.YELLOW + "Stopping graphql engine at port:", self.port, Style.RESET_ALL)
pgrp = os.getpgid(self.proc.pid)
os.killpg(pgrp, signal.SIGTERM)
os.killpg(pgrp, signal.SIGTERM)
# NOTE this doesn't seem to work, although a SIGINT from terminal does ...
# self.proc.send_signal(signal.SIGINT)
self.proc.wait()

View File

@ -35,8 +35,8 @@ data RQLQuery
| RQUpdate !UpdateQuery
| RQDelete !DeleteQuery
| RQCount !CountQuery
| RMMssqlRunSql !MSSQL.MSSQLRunSQL
| RQRunSql !RunSQL
| RQMssqlRunSql !MSSQL.MSSQLRunSQL
| RQBulk ![RQLQuery]
deriving (Show)
@ -119,5 +119,5 @@ runQueryM env = \case
RQDelete q -> runDelete env q
RQCount q -> runCount q
RQRunSql q -> runRunSQL q
RMMssqlRunSql q -> MSSQL.runSQL q
RQMssqlRunSql q -> MSSQL.runSQL q
RQBulk l -> encJFromList <$> indexedMapM (runQueryM env) l

View File

@ -174,6 +174,11 @@ This option may result in test failures if the schema has to change between the
default=False
)
parser.addoption(
"--backend",
help="run integration tests using a particular backend",
default="postgres"
)
#By default,
#1) Set default parallelism to one
@ -411,16 +416,46 @@ def per_method_db_data_for_mutation_tests(request, hge_ctx, per_class_db_schema_
False, False, False
)
def db_state_context(request, hge_ctx):
yield from db_context_with_schema_common(
request, hge_ctx, 'setup_files', 'setup.yaml', 'teardown_files',
'teardown.yaml', True
)
@pytest.fixture(scope='function')
def per_backend_tests(hge_ctx, backend):
"""
This fixture ignores backend-specific tests unless the relevant --backend flag has been passed.
"""
# Currently, we default all tests to run on Postgres with or without a --backend flag.
# As our test suite develops, we may consider running backend-agnostic tests on all
# backends, unless a specific `--backend` flag is passed.
if not hge_ctx.backend == backend:
pytest.skip(
'Skipping test. Add --backend ' + backend + ' to run backend-specific tests'
)
return
def db_state_context_new(request, hge_ctx):
def db_state_context(request, hge_ctx):
# Non-default (Postgres) backend tests expect separate setup and schema_setup
# files for v1/metadata and v2/query requests, respectively.
(setup, teardown, schema_setup, schema_teardown) = [
hge_ctx.backend_suffix(filename) + ".yaml"
for filename in ['setup', 'teardown', 'schema_setup', 'schema_teardown']
]
if hge_ctx.backend == 'postgres':
db_context = db_context_with_schema_common(
request, hge_ctx, 'setup_files', 'setup.yaml', 'teardown_files',
'teardown.yaml', True
)
else:
db_context = db_context_with_schema_common_new (
request, hge_ctx, 'setup_files', setup, 'teardown_files',
teardown, schema_setup, schema_teardown, True
)
yield from db_context
def db_state_context_new(
request, hge_ctx, setup='setup.yaml', teardown='teardown.yaml',
schema_setup='schema_setup.yaml', schema_teardown='schema_teardown.yaml'):
yield from db_context_with_schema_common_new (
request, hge_ctx, 'setup_files', 'setup.yaml', 'teardown_files',
'teardown.yaml', 'sql_schema_setup.yaml', 'sql_schema_teardown.yaml', True
request, hge_ctx, 'setup_files', setup, 'teardown_files',
teardown, schema_setup, schema_teardown, True
)
def db_context_with_schema_common(

View File

@ -486,22 +486,24 @@ class HGECtx:
self.ws_client = GQLWsClient(self, '/v1/graphql')
self.backend = config.getoption('--backend')
# HGE version
result = subprocess.run(['../../scripts/get-version.sh'], shell=False, stdout=subprocess.PIPE, check=True)
env_version = os.getenv('VERSION')
self.version = env_version if env_version else result.stdout.decode('utf-8').strip()
if not self.metadata_disabled and not config.getoption('--skip-schema-setup'):
try:
st_code, resp = self.v1q_f('queries/clear_db.yaml')
st_code, resp = self.v2q_f("queries/" + self.backend_suffix("clear_db")+ ".yaml")
except requests.exceptions.RequestException as e:
self.teardown()
raise HGECtxError(repr(e))
assert st_code == 200, resp
# Postgres version
pg_version_text = self.sql('show server_version_num').fetchone()['server_version_num']
self.pg_version = int(pg_version_text)
if self.backend == 'postgres':
pg_version_text = self.sql('show server_version_num').fetchone()['server_version_num']
self.pg_version = int(pg_version_text)
def reflect_tables(self):
self.meta.reflect(bind=self.engine)
@ -586,6 +588,12 @@ class HGECtx:
yml = yaml.YAML()
return self.v2q(yml.load(f))
def backend_suffix(self, filename):
if self.backend == 'postgres':
return filename
else:
return filename + "_" + self.backend
def v1metadataq(self, q, headers = {}):
return self.execute_query(q, "/v1/metadata", headers)

View File

@ -0,0 +1,2 @@
type: bulk
args: []

View File

@ -0,0 +1,105 @@
type: bulk
args:
- type: mssql_run_sql
args:
source: mssql
sql: |
create table test_types (
c1_smallint smallint
, c2_integer integer
, c3_bigint bigint
, c4_decimal decimal(5, 2)
, c5_numeric numeric (4, 3)
, c6_real real
, c7_double_precision double precision
, c11_varchar_3 varchar(3)
, c12_char_4 char(4)
, c13_text text
, c16_date date
, c17_time time
, c44_xml xml
, c45_money money
, c47_smallmoney smallmoney
, c48_bit bit
, c49_tinyint tinyint
, c50_float float
, c51_real real
, c52_datetime datetime
, c53_datetime2 datetime2
, c54_datetimeoffset datetimeoffset
, c55_smalldatetime smalldatetime
, c56_binary binary(4)
, c57_varbinary varbinary(4)
, c58_hierarchyid hierarchyid
, c59_uniqueidentifier uniqueidentifier
);
insert into test_types(
c1_smallint
, c2_integer
, c3_bigint
, c4_decimal
, c5_numeric
, c6_real
, c7_double_precision
, c11_varchar_3
, c12_char_4
, c13_text
, c16_date
, c17_time
, c44_xml
, c45_money
, c47_smallmoney
, c48_bit
, c49_tinyint
, c50_float
, c51_real
, c52_datetime
, c53_datetime2
, c54_datetimeoffset
, c55_smalldatetime
, c56_binary
, c57_varbinary
, c58_hierarchyid
, c59_uniqueidentifier
)
values(
3277 -- c1_smallint
, 2147483647 -- c2_integer
, 9223372036854775807 -- c3_bigint
, 123.45 -- c4_decimal
, 1.234 -- c5_numeric
, 0.00390625 -- c6_real
, 16.0001220703125 -- c7_double_precision
, 'abc' -- c11_varchar_3
, 'baaz' -- c12_char_4
, 'foo bar baz' -- c13_text
, '2014-09-14' -- c16_date
, '11:09:23' -- c17_time
, '<foo>bar</foo>' -- c44_xml
, 123.45 -- c45_money
, -123.45 -- c47_smallmoney
, 0 -- c48_bit
, 254 -- c49_tinyint
, 2.23E -308 -- c50_float
, 1.18E - 38 -- c51_real
, '04-15-96 4am' -- c52_datetime
, '04-15-9999 23:59:59.9999999' -- c53_datetime2
, '2007-05-08 12:35:29.1234567 +12:15' -- c54_datetimeoffset
, '1955-12-13 12:43:10' -- c55_smalldatetime
, 0x0001e240 -- c56_binary
, 0x0001e240 -- c57_varbinary
, '/0.1/0.2/' -- c58_hierarchyid
, '0E984725-C51C-4BF4-9960-E1C80E27ABA0' -- c59_uniqueidentifier
);
create table author(
id int identity(1,1)
, name nvarchar(450) unique
, createdAt datetime
);
insert into author (name, createdAt)
values
('Author 1', '2017-09-21T09:39:44Z'),
('Author 2', '2017-09-21T09:50:44Z');

View File

@ -0,0 +1,14 @@
type: bulk
args:
- type: mssql_run_sql
args:
source: mssql
sql: |
drop table test_types
- type: mssql_run_sql
args:
source: mssql
sql: |
drop table author

View File

@ -6,10 +6,10 @@ response:
author:
- id: 1
name: Author 1
createdAt: '2017-09-21T09:39:44+00:00'
createdAt: '2017-09-21T09:39:44'
- id: 2
name: Author 2
createdAt: '2017-09-21T09:50:44+00:00'
createdAt: '2017-09-21T09:50:44'
query:
query: |
query {

View File

@ -0,0 +1,66 @@
description: GraphQL query to test different data types of SQL Server
url: /v1/graphql
status: 200
response:
data:
test_types:
- c1_smallint: 3277
c2_integer: 2147483647
c3_bigint: 9223372036854775807
c4_decimal: 123.45
c5_numeric: 1.234
c6_real: 0.00390625
c7_double_precision: 16.0001220703125
c11_varchar_3: abc
c12_char_4: baaz
c13_text: foo bar baz
c16_date: '2014-09-14'
c17_time: 11:09:23
c44_xml: <foo>bar</foo>
c45_money: 123.45
c47_smallmoney: -123.45
c48_bit: false
c49_tinyint: 254
c50_float: -305.77
c51_real: -36.82
c52_datetime: '1996-04-15T04:00:00'
c53_datetime2: '9999-04-15T23:59:59.9999999'
c54_datetimeoffset: '2007-05-08T12:35:29.1234567+12:15'
c55_smalldatetime: '1955-12-13T12:43:00'
c56_binary: AAHiQA==
c57_varbinary: AAHiQA==
c58_hierarchyid: /0.1/0.2/
c59_uniqueidentifier: 0E984725-C51C-4BF4-9960-E1C80E27ABA0
query:
query: |
query {
test_types {
c1_smallint
c2_integer
c3_bigint
c4_decimal
c5_numeric
c6_real
c7_double_precision
c11_varchar_3
c12_char_4
c13_text
c16_date
c17_time
c44_xml
c45_money
c47_smallmoney
c48_bit
c49_tinyint
c50_float
c51_real
c52_datetime
c53_datetime2
c54_datetimeoffset
c55_smalldatetime
c56_binary
c57_varbinary
c58_hierarchyid
c59_uniqueidentifier
}
}

View File

@ -68,7 +68,7 @@ args:
create table author(
id serial primary key,
name text unique,
"createdAt" timestamptz
"createdAt" timestamp
);
CREATE TABLE article (
id SERIAL PRIMARY KEY,
@ -170,8 +170,8 @@ args:
);
insert into author (name, "createdAt")
values
('Author 1', '2017-09-21T09:39:44Z'),
('Author 2', '2017-09-21T09:50:44Z');
('Author 1', '2017-09-21T09:39:44'),
('Author 2', '2017-09-21T09:50:44');
insert into article (title,content,author_id,is_published)
values
(

View File

@ -0,0 +1,15 @@
type: bulk
args:
- type: mssql_track_table
args:
source: mssql
table:
name: test_types
#Author table
- type: mssql_track_table
args:
source: mssql
table:
name: author

View File

@ -0,0 +1,2 @@
type: bulk
args: []

View File

@ -9,9 +9,31 @@ pytestmark = pytest.mark.allow_server_upgrade_test
usefixtures = pytest.mark.usefixtures
@pytest.mark.parametrize("transport", ['http', 'websocket'])
@usefixtures('per_class_tests_db_state')
class TestGraphQLQueryBasic:
@pytest.mark.parametrize("backend", ['mssql', 'postgres'])
@usefixtures('per_class_tests_db_state', 'per_backend_tests')
class TestGraphQLQueryBasicCommon:
def test_select_query_author_quoted_col(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_col_quoted.yaml', transport)
@classmethod
def dir(cls):
return 'queries/graphql_query/basic'
@pytest.mark.parametrize("transport", ['http', 'websocket'])
@pytest.mark.parametrize("backend", ['mssql'])
@usefixtures('per_class_tests_db_state', 'per_backend_tests')
class TestGraphQLQueryBasicMSSQL:
def test_select_various_mssql_types(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_test_types_mssql.yaml', transport)
@classmethod
def dir(cls):
return 'queries/graphql_query/basic'
@pytest.mark.parametrize("transport", ['http', 'websocket'])
@pytest.mark.parametrize("backend", ['postgres'])
@usefixtures('per_class_tests_db_state', 'per_backend_tests')
class TestGraphQLQueryBasicPostgres:
# This also excercises support for multiple operations in a document:
def test_select_query_author(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author.yaml', transport)
@ -25,10 +47,7 @@ class TestGraphQLQueryBasic:
# Can't run server upgrade tests, as this test has a schema change
@pytest.mark.skip_server_upgrade_test
def test_select_various_postgres_types(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_test_types.yaml', transport)
def test_select_query_author_quoted_col(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_col_quoted.yaml', transport)
check_query_f(hge_ctx, self.dir() + '/select_query_test_types_postgres.yaml', transport)
def test_select_query_author_pk(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/select_query_author_by_pkey.yaml', transport)
@ -87,7 +106,6 @@ class TestGraphQLQueryBasic:
def dir(cls):
return 'queries/graphql_query/basic'
@pytest.mark.parametrize("transport", ['http', 'websocket'])
@usefixtures('per_class_tests_db_state')
class TestGraphQLQueryFragments: