2018-09-18 09:21:57 +03:00
|
|
|
import pytest
|
|
|
|
import queue
|
2018-09-19 15:12:57 +03:00
|
|
|
import time
|
2022-08-22 10:21:17 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
import fixtures.postgres
|
2022-09-28 12:19:47 +03:00
|
|
|
from context import EvtsWebhookServer, HGECtx
|
2022-04-22 22:53:12 +03:00
|
|
|
from utils import *
|
2022-08-22 10:21:17 +03:00
|
|
|
from validate import check_query_f, check_event, check_event_transformed, check_events
|
2020-02-13 12:14:02 +03:00
|
|
|
|
|
|
|
usefixtures = pytest.mark.usefixtures
|
|
|
|
|
|
|
|
# Every test in this class requires the events webhook to be running first
|
2023-04-24 13:02:40 +03:00
|
|
|
pytestmark = usefixtures('evts_webhook')
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2018-09-19 15:12:57 +03:00
|
|
|
def select_last_event_fromdb(hge_ctx):
|
|
|
|
q = {
|
|
|
|
"type": "select",
|
|
|
|
"args": {
|
|
|
|
"table": {"schema": "hdb_catalog", "name": "event_log"},
|
|
|
|
"columns": ["*"],
|
|
|
|
"order_by": ["-created_at"],
|
|
|
|
"limit": 1
|
|
|
|
}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
return hge_ctx.v1q(q)
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
def insert_mutation(hge_ctx, table, row, headers = {}):
|
|
|
|
return insert_many_mutation(hge_ctx, table, [row], headers)
|
|
|
|
|
|
|
|
def insert_many_mutation(hge_ctx, table, rows, headers = {}):
|
|
|
|
insert_mutation_field = ""
|
|
|
|
mutation_name = "insert" + "_" + table["name"]
|
|
|
|
if (table["schema"]):
|
|
|
|
insert_value_type = table["schema"] +"_" + table["name"] + "_" + "insert" + "_" + "input"
|
|
|
|
insert_mutation_field = "insert" + "_" + table["schema"] +"_" + table["name"]
|
|
|
|
else:
|
|
|
|
insert_value_type = table["name"] + "_" + "insert" + "_" + "input"
|
|
|
|
insert_mutation_field = "insert" + "_" + table["name"]
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
insert_mutation_query = """
|
|
|
|
mutation {mutation_name}($values: [{insert_value_type}!]!) {{
|
|
|
|
{insert_mutation_field}(objects: $values) {{
|
|
|
|
affected_rows
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
""".format(mutation_name = mutation_name, insert_value_type = insert_value_type, insert_mutation_field = insert_mutation_field )
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
variables = {'values': rows}
|
|
|
|
graphql_query = {'query': insert_mutation_query, 'variables': variables}
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v1graphqlq(graphql_query, headers = headers)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
def update_mutation(hge_ctx, table, where_exp, set_exp, headers = {}):
|
|
|
|
update_mutation_field = ""
|
|
|
|
mutation_name = "update" + "_" + table["name"]
|
|
|
|
|
|
|
|
if (table["schema"]):
|
|
|
|
update_mutation_field = "update" + "_" + table["schema"] +"_" + table["name"]
|
|
|
|
else:
|
|
|
|
update_mutation_field = "update" + "_" + table["name"]
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
update_mutation_query = """
|
|
|
|
mutation {mutation_name} {{
|
|
|
|
{update_mutation_field}(where: {where_exp}, _set: {set_exp}) {{
|
|
|
|
affected_rows
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
""".format(mutation_name = mutation_name,
|
|
|
|
update_mutation_field = update_mutation_field,
|
|
|
|
where_exp = where_exp,
|
|
|
|
set_exp = set_exp)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
print("--- UPDATE MUTATION QUERY ---- \n", update_mutation_query)
|
|
|
|
|
|
|
|
graphql_query = {'query': update_mutation_query}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1graphqlq(graphql_query, headers = headers)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
#print(" ---- UPDATE MUTATION RESP ----", resp)
|
2022-07-05 21:00:08 +03:00
|
|
|
return resp
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
def delete_mutation(hge_ctx, table, where_exp, headers = {}):
|
|
|
|
delete_mutation_field = ""
|
|
|
|
mutation_name = "delete" + "_" + table["name"]
|
|
|
|
|
|
|
|
if (table["schema"]):
|
|
|
|
delete_mutation_field = "delete" + "_" + table["schema"] +"_" + table["name"]
|
|
|
|
else:
|
|
|
|
delete_mutation_field = "delete" + "_" + table["name"]
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
delete_mutation_query = """
|
|
|
|
mutation {mutation_name} {{
|
|
|
|
{delete_mutation_field}(where: {where_exp}) {{
|
|
|
|
affected_rows
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
""".format(mutation_name = mutation_name,
|
|
|
|
delete_mutation_field = delete_mutation_field,
|
|
|
|
where_exp = where_exp)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
print("--- DELETE MUTATION QUERY ---- \n", delete_mutation_query)
|
|
|
|
|
|
|
|
graphql_query = {'query': delete_mutation_query}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1graphqlq(graphql_query, headers = headers)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
print(" ---- DELETE MUTATION RESP ----", resp)
|
2022-07-05 21:00:08 +03:00
|
|
|
return resp
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures("per_method_tests_db_state")
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventCreateAndDelete:
|
2018-12-19 09:34:27 +03:00
|
|
|
|
|
|
|
def test_create_delete(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_and_delete.yaml")
|
|
|
|
|
|
|
|
def test_create_reset(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_and_reset.yaml")
|
|
|
|
|
2019-02-01 12:37:38 +03:00
|
|
|
def test_create_operation_spec_not_provider_err(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_trigger_operation_specs_not_provided_err.yaml")
|
|
|
|
|
2018-12-19 09:34:27 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/create-delete'
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2022-04-11 14:24:11 +03:00
|
|
|
@usefixtures("per_method_tests_db_state")
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventCreateAndResetNonDefaultSource:
|
2022-04-11 14:24:11 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
@pytest.fixture(scope='class', autouse=True)
|
|
|
|
def another_source(self, owner_engine, add_source):
|
|
|
|
backend: fixtures.postgres.Backend = add_source('postgres')
|
|
|
|
backend_database = backend.engine.url.database
|
|
|
|
assert backend_database is not None
|
|
|
|
|
|
|
|
with fixtures.postgres.switch_schema(owner_engine, backend_database).connect() as connection:
|
|
|
|
connection.execute('DROP SCHEMA IF EXISTS hge_tests CASCADE')
|
|
|
|
with backend.engine.connect() as connection:
|
|
|
|
connection.execute('CREATE SCHEMA hge_tests')
|
|
|
|
|
|
|
|
yield backend
|
2022-04-11 14:24:11 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
# TODO: remove once parallelization work is completed
|
|
|
|
# cleanup will no longer be required
|
|
|
|
with backend.engine.connect() as connection:
|
|
|
|
connection.execute('DROP SCHEMA IF EXISTS hge_tests CASCADE')
|
2022-08-22 10:21:17 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_create_reset_non_default_source(self, hge_ctx, another_source):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_and_reset_non_default_source.yaml")
|
|
|
|
|
|
|
|
with another_source.engine.connect() as connection:
|
2022-08-22 10:21:17 +03:00
|
|
|
# Check that the event log table exists.
|
|
|
|
# This must be run against the source database.
|
|
|
|
result = connection.execute("SELECT EXISTS (SELECT * FROM information_schema.tables WHERE table_schema = 'hdb_catalog' and table_name = 'event_log')")
|
|
|
|
row = result.first()
|
|
|
|
assert row == (True,), f'Result: {row!r}'
|
|
|
|
|
|
|
|
# We plan on clearing the metadata in code in the future, so this is not run as YAML input.
|
|
|
|
hge_ctx.v1metadataq({
|
|
|
|
"type": "clear_metadata",
|
|
|
|
"args": {}
|
|
|
|
})
|
|
|
|
|
|
|
|
# Check that the event log table has been dropped.
|
|
|
|
# This must be run against the source database.
|
|
|
|
result = connection.execute("SELECT EXISTS (SELECT * FROM information_schema.tables WHERE table_schema = 'hdb_catalog' and table_name = 'event_log')")
|
|
|
|
row = result.first()
|
|
|
|
assert row == (False,), f'Result: {row!r}'
|
|
|
|
|
2022-04-11 14:24:11 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/create_and_reset_non_default'
|
2022-08-02 22:32:46 +03:00
|
|
|
|
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures("per_method_tests_db_state")
|
|
|
|
class TestEventCreateAndDeleteMSSQL:
|
|
|
|
|
|
|
|
def test_create_delete(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_and_delete_mssql.yaml")
|
|
|
|
|
|
|
|
def test_create_reset(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_and_reset_mssql.yaml")
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
init_row = {"c1": 1, "c2": "world"}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_and_reset_mssql_2.yaml")
|
|
|
|
|
|
|
|
def test_create_operation_spec_not_provider_err(self, hge_ctx):
|
|
|
|
check_query_f(hge_ctx, self.dir() + "/create_trigger_operation_specs_not_provided_err_mssql.yaml")
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/create-delete'
|
|
|
|
|
2022-04-11 14:24:11 +03:00
|
|
|
|
2021-04-29 07:01:06 +03:00
|
|
|
# Generates a backlog of events, then:
|
|
|
|
# - checks that we're processing with the concurrency and backpressure
|
2021-09-16 14:03:01 +03:00
|
|
|
# characteristics we expect
|
2021-04-29 07:01:06 +03:00
|
|
|
# - ensures all events are successfully processed
|
2022-09-28 12:19:47 +03:00
|
|
|
@pytest.mark.backend('mssql', 'postgres')
|
|
|
|
# Set a known batch size for assertions.
|
|
|
|
@pytest.mark.hge_env('HASURA_GRAPHQL_EVENTS_FETCH_BATCH_SIZE', str(100))
|
|
|
|
# Set the HTTP pool size to trigger backpressure upon flooding.
|
|
|
|
@pytest.mark.hge_env('HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE', str(8))
|
2020-03-11 09:27:31 +03:00
|
|
|
@usefixtures("per_method_tests_db_state")
|
server/tests: Fix `BigQuery test failure Job exceeded rate limits` error in CI
Fixes https://github.com/hasura/graphql-engine-mono/issues/3695.
Error: [BigQuery test failure Job exceeded rate limits](https://github.com/hasura/graphql-engine-mono/issues/3695)
Cause:
1. [this command](https://github.com/hasura/graphql-engine/blob/2325755954bb3a777403503d709b412e01219ba9/.circleci/test-server.sh#L1263) runs tests matching the `Bigquery or Common` string, for the `test-oss-server-bigquery` CI job.
2. in this case, the pytest filter matched on `TestGraphQLQueryBoolExpSearchCommon`. Although unrelated pytests are skipped, BQ setup and teardown runs uneccesarily for the [MSSQL and Postgres backends](https://github.com/hasura/graphql-engine/blob/e444cf1f5d5eb1762357266d8b298b1dfb48d937/server/tests-py/test_graphql_queries.py#L868).
4. the setup and teardown runs three times in quick succession, _for each of_ SQL Server, Postgres and BigQuery. Occasionally, this surpassed [BigQuery's maximum rate of 5 table update operations in 10 seconds](https://cloud.google.com/bigquery/quotas#load_job_per_table.long).
Fix: restrict setup/teardown to only the relevant backends...
- Hotfix (this PR): ...by renaming pytest classes and changing the pytest filters in `test-server`
- ok, this is faintly horrifying and an inelegant convention change. On the bright side, it shaves a minute or so off our integration test suite run by skipping fewer tests. Anecdata for `test-oss-server-bigquery`
- before: 87 passed, 299 skipped, 1 warning, 1 error in 192.99s
- after: 87 passed, 20 skipped, 1 warning in 170.82s
- [`Common` was a terrible name, anyway](https://github.com/hasura/graphql-engine-mono/issues/2079), for `AnyCombinationOfBackends`.
- Better fix: ...by refactoring the `conftest.py` helpers. I ran out of a timebox so will write up a separate issue. Given we're actively [porting pytests over to hspec](https://github.com/hasura/graphql-engine/issues/8432), I don't know how much it's worth investing time in a refactor.
To verify the fix: I ran a full CI build a few times [[1]](https://buildkite.com/hasura/graphql-engine-mono/builds/8069#078c781a-c8ef-44f2-a400-15f91fb88e42)[[2]](https://buildkite.com/hasura/graphql-engine-mono/builds/8072#f9e7f59d-264f-46a4-973d-21aa762cca35)[[3]](https://buildkite.com/hasura/graphql-engine-mono/builds/8075#bb104e80-ff76-408c-a46b-6f40e92e6317) whilst troubleshooting to convince myself this fixed the problem.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4362
GitOrigin-RevId: 4c3283f0654b70e9dcda642d9012f6376aa95290
2022-04-27 21:39:40 +03:00
|
|
|
class TestEventFloodPostgresMSSQL(object):
|
2020-03-11 09:27:31 +03:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
2021-04-29 07:01:06 +03:00
|
|
|
return 'queries/event_triggers/flood'
|
2020-03-11 09:27:31 +03:00
|
|
|
|
2022-09-28 12:19:47 +03:00
|
|
|
def test_flood(self, hge_ctx: HGECtx, evts_webhook: EvtsWebhookServer):
|
2021-04-29 07:01:06 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_flood"}
|
2020-03-11 09:27:31 +03:00
|
|
|
|
2021-04-29 07:01:06 +03:00
|
|
|
# Trigger a bunch of events; hasura will begin processing but block on /block
|
2020-03-11 09:27:31 +03:00
|
|
|
payload = range(1,1001)
|
|
|
|
rows = list(map(lambda x: {"c1": x, "c2": "hello"}, payload))
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_many(hge_ctx, table, rows)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_many_mutation(hge_ctx, table, rows)
|
2020-03-11 09:27:31 +03:00
|
|
|
|
2021-04-29 07:01:06 +03:00
|
|
|
def check_backpressure():
|
|
|
|
# Expect that HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE webhooks are pending:
|
|
|
|
assert evts_webhook.blocked_count == 8
|
2021-09-16 14:03:01 +03:00
|
|
|
# ...Great, so presumably:
|
2021-04-29 07:01:06 +03:00
|
|
|
# - event handlers are run concurrently
|
|
|
|
# - with concurrency limited by HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE
|
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
|
|
|
locked_counts = {
|
|
|
|
"type":"run_sql",
|
|
|
|
"args":{
|
|
|
|
"sql":'''
|
|
|
|
select
|
|
|
|
(select count(*) from hdb_catalog.event_log where locked IS NOT NULL) as num_locked,
|
|
|
|
count(*) as total
|
|
|
|
from hdb_catalog.event_log
|
|
|
|
where table_name = 'test_flood'
|
|
|
|
'''
|
|
|
|
}
|
2021-04-29 07:01:06 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q(locked_counts)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
elif (hge_ctx.backend == "mssql"):
|
|
|
|
locked_counts = {
|
|
|
|
"type":"mssql_run_sql",
|
|
|
|
"args":{
|
|
|
|
"source": "mssql",
|
|
|
|
"sql":'''
|
|
|
|
select
|
|
|
|
(select count(*) from hdb_catalog.event_log where locked IS NOT NULL) as num_locked,
|
|
|
|
count(*) as total
|
|
|
|
from hdb_catalog.event_log
|
|
|
|
where table_name = 'test_flood'
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q(locked_counts)
|
|
|
|
else:
|
|
|
|
raise NotImplementedError('Unknown backend.')
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2021-04-29 07:01:06 +03:00
|
|
|
# Make sure we have 2*HASURA_GRAPHQL_EVENTS_FETCH_BATCH_SIZE events checked out:
|
|
|
|
# - 100 prefetched
|
|
|
|
# - 100 being processed right now (but blocked on HTTP_POOL capacity)
|
2021-06-23 21:00:19 +03:00
|
|
|
# TODO it seems like we have some shared state in CI causing this to fail when we check 1000 below
|
|
|
|
# assert resp['result'][1] == ['200', '1000']
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
|
|
|
assert resp['result'][1][0] == '200'
|
|
|
|
elif (hge_ctx.backend == "mssql"):
|
|
|
|
assert resp['result'][1][0] == 200
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2021-04-29 07:01:06 +03:00
|
|
|
# Rather than sleep arbitrarily, loop until assertions pass:
|
2022-09-28 12:19:47 +03:00
|
|
|
until_asserts_pass(30, check_backpressure)
|
2021-04-29 07:01:06 +03:00
|
|
|
# ...then make sure we're truly stable:
|
|
|
|
time.sleep(3)
|
|
|
|
check_backpressure()
|
|
|
|
|
|
|
|
# unblock open and future requests to /block; check all events processed
|
|
|
|
evts_webhook.unblock()
|
|
|
|
|
2020-03-11 09:27:31 +03:00
|
|
|
def get_evt():
|
|
|
|
# TODO ThreadedHTTPServer helps locally (I only need a timeout of
|
|
|
|
# 10 here), but we still need a bit of a long timeout here for CI
|
|
|
|
# it seems, since webhook can't keep up there:
|
|
|
|
ev_full = evts_webhook.get_event(600)
|
|
|
|
return ev_full['body']['event']['data']['new']['c1']
|
|
|
|
# Make sure we got all payloads (probably out of order):
|
|
|
|
ns = list(map(lambda _: get_evt(), payload))
|
|
|
|
ns.sort()
|
|
|
|
assert ns == list(payload)
|
|
|
|
|
2022-08-15 17:29:02 +03:00
|
|
|
@usefixtures('postgis', 'per_class_tests_db_state')
|
2021-05-20 15:26:37 +03:00
|
|
|
class TestEventDataFormat(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/data_format'
|
2021-09-16 14:03:01 +03:00
|
|
|
|
2021-05-20 15:26:37 +03:00
|
|
|
def test_bigint(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_bigint"}
|
|
|
|
|
|
|
|
init_row = {"id": 50755254975729665, "name": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": {"id": "50755254975729665", "name": "hello"}
|
|
|
|
}
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2021-05-20 15:26:37 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "bigint_all", table, "INSERT", exp_ev_data)
|
2021-09-16 14:03:01 +03:00
|
|
|
|
2021-05-20 15:26:37 +03:00
|
|
|
def test_geojson(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_geojson"}
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": { "id" : 1,
|
|
|
|
"location":{
|
|
|
|
"coordinates":[
|
|
|
|
-43.77,
|
|
|
|
45.64
|
|
|
|
],
|
|
|
|
"crs":{
|
|
|
|
"type":"name",
|
|
|
|
"properties":{
|
|
|
|
"name":"urn:ogc:def:crs:EPSG::4326"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"type":"Point"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"new": { "id": 2,
|
|
|
|
"location":{
|
|
|
|
"coordinates":[
|
|
|
|
-43.77,
|
|
|
|
45.64
|
|
|
|
],
|
|
|
|
"crs":{
|
|
|
|
"type":"name",
|
|
|
|
"properties":{
|
|
|
|
"name":"urn:ogc:def:crs:EPSG::4326"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"type":"Point"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-16 14:03:01 +03:00
|
|
|
|
2021-05-20 15:26:37 +03:00
|
|
|
|
|
|
|
where_exp = {"id" : 1}
|
|
|
|
set_exp = {"id": 2}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2021-09-16 14:03:01 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "geojson_all", table, "UPDATE", exp_ev_data)
|
2021-05-20 15:26:37 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures("per_class_tests_db_state")
|
|
|
|
class TestEventDataFormatBigIntMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/data_format/mssql/bigint'
|
|
|
|
|
|
|
|
def test_bigint(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_bigint"}
|
|
|
|
|
|
|
|
#init_row = {"id": 50755254975729665, "name": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": {"id": 50755254975729665, "name": "hello"}
|
|
|
|
}
|
|
|
|
|
2022-04-22 22:53:12 +03:00
|
|
|
# TODO: Naveen: Insert mutation on big int values in MSSQL source
|
2022-04-21 10:19:37 +03:00
|
|
|
# does not work as of now, hence using 'run_sql' to directly insert rows
|
|
|
|
# and trigger the event trigger. When they are supported in future, we
|
|
|
|
# might wanna use the insert_mutation here for consistency.
|
2022-07-05 21:00:08 +03:00
|
|
|
#
|
|
|
|
# resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
insert_bigint_sql = {
|
|
|
|
"type":"mssql_run_sql",
|
|
|
|
"args":{
|
|
|
|
"source": "mssql",
|
|
|
|
"sql":'''
|
|
|
|
INSERT INTO hge_tests.test_bigint ([id], [name]) VALUES (50755254975729665, 'hello')
|
|
|
|
'''
|
2022-04-22 22:53:12 +03:00
|
|
|
}
|
2022-04-21 10:19:37 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q(insert_bigint_sql)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----------- resp ----------\n", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "bigint_all", table, "INSERT", exp_ev_data)
|
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures("per_class_tests_db_state")
|
|
|
|
class TestEventDataFormatGeoJSONMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/data_format/mssql/geojson'
|
2021-05-20 15:26:37 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
def test_geojson(self, hge_ctx, evts_webhook):
|
|
|
|
check_query_f(hge_ctx, self.dir() + '/create_geojson_event_trigger.yaml')
|
2020-03-11 09:27:31 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql','postgres')
|
2021-01-06 23:21:39 +03:00
|
|
|
@usefixtures("per_class_tests_db_state")
|
server/tests: Fix `BigQuery test failure Job exceeded rate limits` error in CI
Fixes https://github.com/hasura/graphql-engine-mono/issues/3695.
Error: [BigQuery test failure Job exceeded rate limits](https://github.com/hasura/graphql-engine-mono/issues/3695)
Cause:
1. [this command](https://github.com/hasura/graphql-engine/blob/2325755954bb3a777403503d709b412e01219ba9/.circleci/test-server.sh#L1263) runs tests matching the `Bigquery or Common` string, for the `test-oss-server-bigquery` CI job.
2. in this case, the pytest filter matched on `TestGraphQLQueryBoolExpSearchCommon`. Although unrelated pytests are skipped, BQ setup and teardown runs uneccesarily for the [MSSQL and Postgres backends](https://github.com/hasura/graphql-engine/blob/e444cf1f5d5eb1762357266d8b298b1dfb48d937/server/tests-py/test_graphql_queries.py#L868).
4. the setup and teardown runs three times in quick succession, _for each of_ SQL Server, Postgres and BigQuery. Occasionally, this surpassed [BigQuery's maximum rate of 5 table update operations in 10 seconds](https://cloud.google.com/bigquery/quotas#load_job_per_table.long).
Fix: restrict setup/teardown to only the relevant backends...
- Hotfix (this PR): ...by renaming pytest classes and changing the pytest filters in `test-server`
- ok, this is faintly horrifying and an inelegant convention change. On the bright side, it shaves a minute or so off our integration test suite run by skipping fewer tests. Anecdata for `test-oss-server-bigquery`
- before: 87 passed, 299 skipped, 1 warning, 1 error in 192.99s
- after: 87 passed, 20 skipped, 1 warning in 170.82s
- [`Common` was a terrible name, anyway](https://github.com/hasura/graphql-engine-mono/issues/2079), for `AnyCombinationOfBackends`.
- Better fix: ...by refactoring the `conftest.py` helpers. I ran out of a timebox so will write up a separate issue. Given we're actively [porting pytests over to hspec](https://github.com/hasura/graphql-engine/issues/8432), I don't know how much it's worth investing time in a refactor.
To verify the fix: I ran a full CI build a few times [[1]](https://buildkite.com/hasura/graphql-engine-mono/builds/8069#078c781a-c8ef-44f2-a400-15f91fb88e42)[[2]](https://buildkite.com/hasura/graphql-engine-mono/builds/8072#f9e7f59d-264f-46a4-973d-21aa762cca35)[[3]](https://buildkite.com/hasura/graphql-engine-mono/builds/8075#bb104e80-ff76-408c-a46b-6f40e92e6317) whilst troubleshooting to convince myself this fixed the problem.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4362
GitOrigin-RevId: 4c3283f0654b70e9dcda642d9012f6376aa95290
2022-04-27 21:39:40 +03:00
|
|
|
class TestCreateEventQueryPostgresMSSQL(object):
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/basic'
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
# Check Insert Event Trigger Payload
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
# Check Update Event Trigger Payload
|
|
|
|
if (hge_ctx.backend == "postgres"):
|
|
|
|
where_exp = {"c1": 1}
|
|
|
|
set_exp = {"c2": "world"}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
2022-07-05 21:00:08 +03:00
|
|
|
update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2022-04-21 10:19:37 +03:00
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
# Check Delete Event Trigger Payload
|
|
|
|
if (hge_ctx.backend == "postgres"):
|
|
|
|
where_exp = {"c1": 1}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
2022-07-05 21:00:08 +03:00
|
|
|
delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2018-10-30 12:21:58 +03:00
|
|
|
"old": {"c1": 1, "c2": "world"},
|
2018-09-18 09:21:57 +03:00
|
|
|
"new": None
|
|
|
|
}
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2022-12-14 19:56:04 +03:00
|
|
|
@pytest.mark.backend('postgres')
|
|
|
|
@usefixtures("per_class_tests_db_state")
|
|
|
|
class TestCreateEventQueryPostgres(object):
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2022-12-14 19:56:04 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/basic'
|
2021-01-06 23:21:39 +03:00
|
|
|
|
2022-12-14 19:56:04 +03:00
|
|
|
def test_partitioned_table_basic_insert(self, hge_ctx, evts_webhook):
|
|
|
|
hge_ctx.v1q_f(self.dir() + '/partition_table_setup.yaml')
|
|
|
|
table = { "schema":"hge_tests", "name": "measurement"}
|
2021-01-06 23:21:39 +03:00
|
|
|
|
2022-12-14 19:56:04 +03:00
|
|
|
init_row = { "city_id": 1, "logdate": "2006-02-02", "peaktemp": 1, "unitsales": 1}
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
|
|
|
insert(hge_ctx, table, init_row)
|
|
|
|
check_event(hge_ctx, evts_webhook, "measurement_all", table, "INSERT", exp_ev_data)
|
|
|
|
hge_ctx.v1q_f(self.dir() + '/partition_table_teardown.yaml')
|
2021-01-06 23:21:39 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql','postgres')
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
server/tests: Fix `BigQuery test failure Job exceeded rate limits` error in CI
Fixes https://github.com/hasura/graphql-engine-mono/issues/3695.
Error: [BigQuery test failure Job exceeded rate limits](https://github.com/hasura/graphql-engine-mono/issues/3695)
Cause:
1. [this command](https://github.com/hasura/graphql-engine/blob/2325755954bb3a777403503d709b412e01219ba9/.circleci/test-server.sh#L1263) runs tests matching the `Bigquery or Common` string, for the `test-oss-server-bigquery` CI job.
2. in this case, the pytest filter matched on `TestGraphQLQueryBoolExpSearchCommon`. Although unrelated pytests are skipped, BQ setup and teardown runs uneccesarily for the [MSSQL and Postgres backends](https://github.com/hasura/graphql-engine/blob/e444cf1f5d5eb1762357266d8b298b1dfb48d937/server/tests-py/test_graphql_queries.py#L868).
4. the setup and teardown runs three times in quick succession, _for each of_ SQL Server, Postgres and BigQuery. Occasionally, this surpassed [BigQuery's maximum rate of 5 table update operations in 10 seconds](https://cloud.google.com/bigquery/quotas#load_job_per_table.long).
Fix: restrict setup/teardown to only the relevant backends...
- Hotfix (this PR): ...by renaming pytest classes and changing the pytest filters in `test-server`
- ok, this is faintly horrifying and an inelegant convention change. On the bright side, it shaves a minute or so off our integration test suite run by skipping fewer tests. Anecdata for `test-oss-server-bigquery`
- before: 87 passed, 299 skipped, 1 warning, 1 error in 192.99s
- after: 87 passed, 20 skipped, 1 warning in 170.82s
- [`Common` was a terrible name, anyway](https://github.com/hasura/graphql-engine-mono/issues/2079), for `AnyCombinationOfBackends`.
- Better fix: ...by refactoring the `conftest.py` helpers. I ran out of a timebox so will write up a separate issue. Given we're actively [porting pytests over to hspec](https://github.com/hasura/graphql-engine/issues/8432), I don't know how much it's worth investing time in a refactor.
To verify the fix: I ran a full CI build a few times [[1]](https://buildkite.com/hasura/graphql-engine-mono/builds/8069#078c781a-c8ef-44f2-a400-15f91fb88e42)[[2]](https://buildkite.com/hasura/graphql-engine-mono/builds/8072#f9e7f59d-264f-46a4-973d-21aa762cca35)[[3]](https://buildkite.com/hasura/graphql-engine-mono/builds/8075#bb104e80-ff76-408c-a46b-6f40e92e6317) whilst troubleshooting to convince myself this fixed the problem.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4362
GitOrigin-RevId: 4c3283f0654b70e9dcda642d9012f6376aa95290
2022-04-27 21:39:40 +03:00
|
|
|
class TestEventRetryConfPostgresMSSQL(object):
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/retry_conf'
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# webhook: http://127.0.0.1:5592/fail
|
|
|
|
# retry_conf:
|
|
|
|
# num_retries: 4
|
|
|
|
# interval_sec: 1
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-19 15:12:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_retry", table, "INSERT", exp_ev_data, webhook_path = "/fail", retry = 0)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_retry", table, "INSERT", exp_ev_data, webhook_path = "/fail", retry = 1)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_retry", table, "INSERT", exp_ev_data, webhook_path = "/fail", retry = 2)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_retry", table, "INSERT", exp_ev_data, webhook_path = "/fail", retry = 3)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_retry", table, "INSERT", exp_ev_data, webhook_path = "/fail", retry = 4)
|
|
|
|
|
|
|
|
# webhook: http://127.0.0.1:5592/sleep_2s
|
|
|
|
# retry_conf:
|
|
|
|
# num_retries: 2
|
|
|
|
# interval_sec: 1
|
|
|
|
# timeout_sec: 1
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_timeout_short(self, hge_ctx, evts_webhook):
|
2019-02-14 10:37:59 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t2"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_mutation(hge_ctx, table, init_row)
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t2_timeout_short", table, "INSERT", exp_ev_data, webhook_path = "/sleep_2s", retry = 0, get_timeout = 5)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t2_timeout_short", table, "INSERT", exp_ev_data, webhook_path = "/sleep_2s", retry = 1, get_timeout = 5)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t2_timeout_short", table, "INSERT", exp_ev_data, webhook_path = "/sleep_2s", retry = 2, get_timeout = 5)
|
2019-02-14 10:37:59 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# webhook: http://127.0.0.1:5592/sleep_2s
|
|
|
|
# retry_conf:
|
|
|
|
# num_retries: 0
|
|
|
|
# interval_sec: 2
|
|
|
|
# timeout_sec: 10
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_timeout_long(self, hge_ctx, evts_webhook):
|
2019-02-14 10:37:59 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t3"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_mutation(hge_ctx, table, init_row)
|
2021-04-27 08:34:14 +03:00
|
|
|
time.sleep(2)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t3_timeout_long", table, "INSERT", exp_ev_data, webhook_path = "/sleep_2s")
|
|
|
|
|
|
|
|
# Keep this one last
|
|
|
|
def test_queue_empty(self, hge_ctx, evts_webhook):
|
|
|
|
try:
|
|
|
|
evts_webhook.get_event(3)
|
|
|
|
assert False, "expected queue to be empty"
|
|
|
|
except queue.Empty:
|
|
|
|
pass
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2022-09-28 12:19:47 +03:00
|
|
|
@pytest.mark.backend('mssql', 'postgres')
|
|
|
|
@pytest.mark.hge_env('EVENT_WEBHOOK_HEADER', 'MyEnvValue')
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
server/tests: Fix `BigQuery test failure Job exceeded rate limits` error in CI
Fixes https://github.com/hasura/graphql-engine-mono/issues/3695.
Error: [BigQuery test failure Job exceeded rate limits](https://github.com/hasura/graphql-engine-mono/issues/3695)
Cause:
1. [this command](https://github.com/hasura/graphql-engine/blob/2325755954bb3a777403503d709b412e01219ba9/.circleci/test-server.sh#L1263) runs tests matching the `Bigquery or Common` string, for the `test-oss-server-bigquery` CI job.
2. in this case, the pytest filter matched on `TestGraphQLQueryBoolExpSearchCommon`. Although unrelated pytests are skipped, BQ setup and teardown runs uneccesarily for the [MSSQL and Postgres backends](https://github.com/hasura/graphql-engine/blob/e444cf1f5d5eb1762357266d8b298b1dfb48d937/server/tests-py/test_graphql_queries.py#L868).
4. the setup and teardown runs three times in quick succession, _for each of_ SQL Server, Postgres and BigQuery. Occasionally, this surpassed [BigQuery's maximum rate of 5 table update operations in 10 seconds](https://cloud.google.com/bigquery/quotas#load_job_per_table.long).
Fix: restrict setup/teardown to only the relevant backends...
- Hotfix (this PR): ...by renaming pytest classes and changing the pytest filters in `test-server`
- ok, this is faintly horrifying and an inelegant convention change. On the bright side, it shaves a minute or so off our integration test suite run by skipping fewer tests. Anecdata for `test-oss-server-bigquery`
- before: 87 passed, 299 skipped, 1 warning, 1 error in 192.99s
- after: 87 passed, 20 skipped, 1 warning in 170.82s
- [`Common` was a terrible name, anyway](https://github.com/hasura/graphql-engine-mono/issues/2079), for `AnyCombinationOfBackends`.
- Better fix: ...by refactoring the `conftest.py` helpers. I ran out of a timebox so will write up a separate issue. Given we're actively [porting pytests over to hspec](https://github.com/hasura/graphql-engine/issues/8432), I don't know how much it's worth investing time in a refactor.
To verify the fix: I ran a full CI build a few times [[1]](https://buildkite.com/hasura/graphql-engine-mono/builds/8069#078c781a-c8ef-44f2-a400-15f91fb88e42)[[2]](https://buildkite.com/hasura/graphql-engine-mono/builds/8072#f9e7f59d-264f-46a4-973d-21aa762cca35)[[3]](https://buildkite.com/hasura/graphql-engine-mono/builds/8075#bb104e80-ff76-408c-a46b-6f40e92e6317) whilst troubleshooting to convince myself this fixed the problem.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4362
GitOrigin-RevId: 4c3283f0654b70e9dcda642d9012f6376aa95290
2022-04-27 21:39:40 +03:00
|
|
|
class TestEventHeadersPostgresMSSQL(object):
|
2018-09-24 14:50:11 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/headers'
|
2018-09-24 14:50:11 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-24 14:50:11 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-24 14:50:11 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
|
|
|
headers = {"X-Header-From-Value": "MyValue", "X-Header-From-Env": "MyEnvValue"}
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
insert_mutation(hge_ctx, table, init_row)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, headers = headers)
|
2018-09-24 14:50:11 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestUpdateEventQuery(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/update_query'
|
2018-09-19 15:12:57 +03:00
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
2019-04-08 10:22:38 +03:00
|
|
|
def transact(self, request, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
print("In setup method")
|
2021-04-27 08:34:14 +03:00
|
|
|
# Adds trigger on 'test_t1' with...
|
|
|
|
# insert:
|
|
|
|
# columns: '*'
|
|
|
|
# update:
|
|
|
|
# columns: [c2, c3]
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q_f(self.dir() + '/create-setup.yaml')
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# overwrites trigger added above, with...
|
|
|
|
# delete:
|
|
|
|
# columns: "*"
|
|
|
|
# update:
|
|
|
|
# columns: ["c1", "c3"]
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q_f(self.dir() + '/update-setup.yaml')
|
2022-08-03 23:04:32 +03:00
|
|
|
assert resp[1]["sources"][0]["tables"][0]["event_triggers"][0]["webhook"] == '{{EVENT_WEBHOOK_HANDLER}}/new'
|
2018-09-19 15:12:57 +03:00
|
|
|
yield
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q_f(self.dir() + '/teardown.yaml')
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_update_basic(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# Expect that inserting a row (which would have triggered in original
|
2022-04-22 22:53:12 +03:00
|
|
|
# create_event_trigger) does not trigger
|
2019-09-13 02:22:01 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello", "c3": {"name": "clarke"}}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2018-12-15 08:05:29 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "INSERT", {}, webhook_path = "/new", get_timeout = 0)
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# Likewise for an update on c2:
|
2018-09-19 15:12:57 +03:00
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c2": "world"}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2018-10-25 10:22:51 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", {}, webhook_path = "/new", get_timeout = 0)
|
2018-10-25 10:22:51 +03:00
|
|
|
|
2019-09-13 02:22:01 +03:00
|
|
|
where_exp = {"c1": 1}
|
|
|
|
set_exp = {"c3": {"name": "bellamy"}}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world", "c3": {"name": "clarke"}},
|
|
|
|
"new": {"c1": 1, "c2": "world", "c3": {"name": "bellamy"}}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2019-09-13 02:22:01 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path ="/new")
|
|
|
|
|
2018-10-25 10:22:51 +03:00
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c1": 2}
|
2018-09-19 15:12:57 +03:00
|
|
|
exp_ev_data = {
|
2019-09-13 02:22:01 +03:00
|
|
|
"old": {"c1": 1, "c2": "world", "c3": {"name": "bellamy"}},
|
|
|
|
"new": {"c1": 2, "c2": "world", "c3": {"name": "bellamy"}}
|
2018-09-19 15:12:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path ="/new")
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2018-10-25 10:22:51 +03:00
|
|
|
where_exp = {"c1": 2}
|
2018-09-19 15:12:57 +03:00
|
|
|
exp_ev_data = {
|
2019-09-13 02:22:01 +03:00
|
|
|
"old": {"c1": 2, "c2": "world", "c3": {"name": "bellamy"}},
|
2018-09-19 15:12:57 +03:00
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "DELETE", exp_ev_data, webhook_path = "/new")
|
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestUpdateEventQueryMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/update_query'
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def transact(self, request, hge_ctx, evts_webhook):
|
|
|
|
print("In setup method")
|
|
|
|
# Adds trigger on 'test_t1' with...
|
|
|
|
# insert:
|
|
|
|
# columns: '*'
|
|
|
|
# update:
|
|
|
|
# columns: ["c3", "c4"]
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v2q_f(self.dir() + '/schema-setup-mssql.yaml')
|
|
|
|
hge_ctx.v1metadataq_f(self.dir() + '/create-setup-mssql.yaml')
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
# overwrites trigger added above, with...
|
|
|
|
# delete:
|
|
|
|
# columns: "*"
|
|
|
|
# update:
|
|
|
|
# columns: ["c1", "c2", "c4"]
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1metadataq_f(self.dir() + '/update-setup-mssql.yaml')
|
2022-04-21 10:19:37 +03:00
|
|
|
sources = resp[1]["sources"]
|
|
|
|
for source in sources:
|
|
|
|
if source["name"] == "mssql":
|
2022-08-03 23:04:32 +03:00
|
|
|
assert source["tables"][0]["event_triggers"][0]["webhook"] == '{{EVENT_WEBHOOK_HANDLER}}/new'
|
2022-04-21 10:19:37 +03:00
|
|
|
|
|
|
|
yield
|
|
|
|
print("--- TEARDOWN STARTED -----")
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q_f(self.dir() + '/teardown-mssql.yaml')
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
def test_update_basic(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
# Expect that inserting a row (which would have triggered in original
|
|
|
|
# create_event_trigger) does not trigger
|
|
|
|
init_row = {"c1": 1, "c2": 100, "c3": "hello", "c4": "{'name': 'clarke'}"}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "INSERT", {}, webhook_path = "/new", get_timeout = 0)
|
|
|
|
|
|
|
|
# Likewise for an update on c3:
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c3: "world"}'
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("--- RESP 1 ---", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", {}, webhook_path = "/new", get_timeout = 0)
|
|
|
|
|
|
|
|
# Update on row c4 should initiate the event trigger
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c4: "{\'name\': \'bellamy\'}"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2":100, "c3": "world", "c4": "{'name': 'clarke'}"},
|
|
|
|
"new": {"c1": 1, "c2":100, "c3": "world", "c4": "{'name': 'bellamy'}"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 ----", resp)
|
|
|
|
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path ="/new")
|
|
|
|
|
|
|
|
# Update on row c2 should initiate the event trigger
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: 101}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2":100, "c3": "world", "c4": "{'name': 'bellamy'}"},
|
|
|
|
"new": {"c1": 1, "c2":101, "c3": "world", "c4": "{'name': 'bellamy'}"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 ----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, webhook_path ="/new")
|
|
|
|
|
|
|
|
# Test Delete Event Trigger
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2":101, "c3": "world", "c4": "{'name': 'bellamy'}"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 4 ----", resp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "DELETE", exp_ev_data, webhook_path = "/new")
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestDeleteEventQuery(object):
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
directory = 'queries/event_triggers'
|
|
|
|
|
|
|
|
setup_files = [
|
|
|
|
directory + '/basic/setup.yaml',
|
|
|
|
directory + '/delete_query/setup.yaml'
|
|
|
|
]
|
|
|
|
|
|
|
|
teardown_files = [ directory + '/delete_query/teardown.yaml']
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# Ensure deleting an event trigger works
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_delete_basic(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-19 15:12:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-19 15:12:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2018-09-19 15:12:57 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, get_timeout=0)
|
2018-09-19 15:12:57 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c2": "world"}
|
2018-09-19 15:12:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
2018-10-30 12:21:58 +03:00
|
|
|
"new": {"c1": 1, "c2": "world"}
|
2018-09-19 15:12:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2018-09-19 15:12:57 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, get_timeout=0)
|
2018-09-19 15:12:57 +03:00
|
|
|
|
|
|
|
exp_ev_data = {
|
2018-10-30 12:21:58 +03:00
|
|
|
"old": {"c1": 1, "c2": "world"},
|
2018-09-19 15:12:57 +03:00
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2018-09-19 15:12:57 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
# NOTE: use a bit of a delay here, to catch any stray events generated above
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data, get_timeout=2)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestDeleteEventQueryMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/delete_query'
|
|
|
|
|
|
|
|
# Ensure deleting an event trigger works
|
|
|
|
def test_delete_basic(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, get_timeout=0)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, get_timeout=0)
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
# NOTE: use a bit of a delay here, to catch any stray events generated above
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data, get_timeout=2)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_class_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventSelCols:
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/selected_cols'
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_selected_cols(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
2018-10-25 10:22:51 +03:00
|
|
|
"new": {"c1": 1, "c2": "hello"}
|
2018-09-18 09:21:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "INSERT", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c2": "world"}
|
2018-10-25 10:22:51 +03:00
|
|
|
# expected no event hence previous expected data
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2018-10-25 10:22:51 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, get_timeout=0)
|
2018-10-25 10:22:51 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c1": 2}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2018-10-30 12:21:58 +03:00
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": {"c1": 2, "c2": "world"}
|
2018-09-18 09:21:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-25 10:22:51 +03:00
|
|
|
where_exp = {"c1": 2}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2018-10-30 12:21:58 +03:00
|
|
|
"old": {"c1": 2, "c2": "world"},
|
2018-09-18 09:21:57 +03:00
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "DELETE", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_selected_cols_dep(self, hge_ctx, evts_webhook):
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q({
|
2018-09-18 09:21:57 +03:00
|
|
|
"type": "run_sql",
|
|
|
|
"args": {
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c1"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2018-09-18 09:21:57 +03:00
|
|
|
assert resp['code'] == "dependency-error", resp
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q({
|
2018-09-18 09:21:57 +03:00
|
|
|
"type": "run_sql",
|
|
|
|
"args": {
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c2"
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_class_tests_db_state')
|
|
|
|
class TestEventSelColsMSSQL:
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/selected_cols'
|
|
|
|
|
|
|
|
def test_selected_cols(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello", "c3": "bellamy"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": {"c1": 1, "c2": "hello", "c3": "bellamy"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "INSERT", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c1: 2}'
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
# expected no event hence previous expected data
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data, get_timeout=0)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 2}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 2, "c2": "hello", "c3": "bellamy"},
|
|
|
|
"new": {"c1": 2, "c2": "world", "c3": "bellamy"}
|
|
|
|
}
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "UPDATE", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 2}}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 2, "c2": "world", "c3": "bellamy"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 4 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_cols", table, "DELETE", exp_ev_data)
|
|
|
|
|
|
|
|
def test_selected_cols_dep(self, hge_ctx, evts_webhook):
|
|
|
|
# Dropping Primary Key is not allowed
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q({
|
2022-04-21 10:19:37 +03:00
|
|
|
"type": "mssql_run_sql",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c1"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2022-04-21 10:19:37 +03:00
|
|
|
assert resp['code'] == "bad-request", resp
|
|
|
|
|
|
|
|
# 'C2' cannot be dropped because event trigger is created on that column
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q({
|
2022-04-21 10:19:37 +03:00
|
|
|
"type": "mssql_run_sql",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c2"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 5 -----", resp)
|
|
|
|
assert resp['code'] == "dependency-error", resp
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q({
|
2022-04-21 10:19:37 +03:00
|
|
|
"type": "mssql_run_sql",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c3"
|
|
|
|
}
|
|
|
|
})
|
|
|
|
print("----- RESP 6 -----", resp)
|
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventInsertOnly:
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/insert_only'
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_insert_only(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
2018-10-25 10:22:51 +03:00
|
|
|
"new": init_row
|
2018-09-18 09:21:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_insert", table, "INSERT", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c2": "world"}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2018-10-25 10:22:51 +03:00
|
|
|
"old": init_row,
|
2018-10-30 12:21:58 +03:00
|
|
|
"new": {"c1": 1, "c2": "world"}
|
2018-09-18 09:21:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2018-10-25 10:22:51 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_insert", table, "UPDATE", exp_ev_data, get_timeout=0)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
|
|
|
exp_ev_data = {
|
2018-10-30 12:21:58 +03:00
|
|
|
"old": {"c1": 1, "c2": "world"},
|
2018-09-18 09:21:57 +03:00
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2018-10-25 10:22:51 +03:00
|
|
|
with pytest.raises(queue.Empty):
|
2021-04-27 08:34:14 +03:00
|
|
|
# NOTE: use a bit of a delay here, to catch any stray events generated above
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_insert", table, "DELETE", exp_ev_data, get_timeout=2)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestEventInsertOnlyMSSQL:
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/insert_only'
|
|
|
|
|
|
|
|
def test_insert_only(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_insert", table, "INSERT", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_insert", table, "UPDATE", exp_ev_data, get_timeout=0)
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
# NOTE: use a bit of a delay here, to catch any stray events generated above
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_insert", table, "DELETE", exp_ev_data, get_timeout=2)
|
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestEventUpdateOnlyMSSQL:
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/update_only'
|
|
|
|
|
|
|
|
def test_update_only(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
# INSERT operations will not fire event triggers
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_update", table, "INSERT", exp_ev_data, get_timeout=0)
|
|
|
|
|
|
|
|
# CASE 1: Primary key is not changed, and some updates happen
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "hello"},
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_update", table, "UPDATE", exp_ev_data)
|
|
|
|
|
|
|
|
# CASE 2: Primary key has changed.
|
|
|
|
# When PK of MSSQL changes, then old data will be NULL
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c1: 2}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": {"c1": 2, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_update", table, "UPDATE", exp_ev_data)
|
|
|
|
|
|
|
|
# DELETE operations will not fire event triggers
|
|
|
|
where_exp = '{c1: {_eq: 2}}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 2, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 4 -----", resp)
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
# NOTE: use a bit of a delay here, to catch any stray events generated above
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_update", table, "DELETE", exp_ev_data, get_timeout=2)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
# CASE 3: An Update transaction, which can give rise to both CASE 1 and CASE 2
|
2022-04-22 22:53:12 +03:00
|
|
|
# described above.
|
2022-04-21 10:19:37 +03:00
|
|
|
# i.e for a single update transaction which changes the primary key of a row
|
|
|
|
# and a non primary key of another row, 2 event triggers should be fired.
|
|
|
|
def test_update_both_cases(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
exp_insert_ev_data = {}
|
|
|
|
|
|
|
|
# Set up the table values to check the update transaction
|
|
|
|
insert_values_sql = {
|
|
|
|
"type":"mssql_run_sql",
|
|
|
|
"args":{
|
|
|
|
"source": "mssql",
|
|
|
|
"sql":'''
|
|
|
|
INSERT INTO hge_tests.test_t1 ([c1], [c2]) VALUES (1, 'hello'), (2, 'world')
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q(insert_values_sql)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----------- resp ----------\n", resp)
|
|
|
|
|
|
|
|
# INSERT operations will not fire event triggers
|
|
|
|
with pytest.raises(queue.Empty):
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_update", table, "INSERT", exp_insert_ev_data, get_timeout=0)
|
2022-04-22 22:53:12 +03:00
|
|
|
|
2022-04-21 10:19:37 +03:00
|
|
|
# An UPDATE SQL which will create two events, one for each case
|
|
|
|
# The following update transaction does the following changes
|
|
|
|
# We have the following values in table [(1, 'hello'), (2, 'world')]
|
|
|
|
# UPDATE transaction changes that to [(2, 'hello'), (3, 'clarke')]
|
|
|
|
update_values_sql = {
|
|
|
|
"type":"mssql_run_sql",
|
|
|
|
"args":{
|
|
|
|
"source": "mssql",
|
|
|
|
"sql":'''
|
|
|
|
UPDATE hge_tests.test_t1
|
2022-04-22 22:53:12 +03:00
|
|
|
SET c1 = (CASE WHEN c1 = 1 THEN 2
|
|
|
|
WHEN c1 = 2 THEN 3
|
2022-04-21 10:19:37 +03:00
|
|
|
ELSE c1 END),
|
|
|
|
c2 = (CASE WHEN c1 = 2 THEN N'clarke' ELSE c2 END)
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# CASE 2.1 : Primary key ('id') is updated, but the updated primary key value
|
|
|
|
# is already present in the table, then both data.old and data.new will be
|
|
|
|
# constructed
|
|
|
|
exp_ev_data_case_1 = {
|
|
|
|
"old": {"c1": 2, "c2": "world"},
|
|
|
|
"new": {"c1": 2, "c2": "hello"}
|
|
|
|
}
|
|
|
|
|
|
|
|
# CASE 2.2: Primary key ('id') is updated to a value which did not exists before
|
|
|
|
# in the table, so "data.old" will be NULL
|
|
|
|
exp_ev_data_case_2 = {
|
|
|
|
"old": None,
|
|
|
|
"new": {"c1": 3, "c2": "clarke"}
|
|
|
|
}
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q(update_values_sql)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----------- resp ----------\n", resp)
|
|
|
|
|
|
|
|
exp_ev_datas = [exp_ev_data_case_1, exp_ev_data_case_2]
|
|
|
|
|
|
|
|
# The UPDATE SQL above will trigger exactly two triggers, one for each case
|
|
|
|
check_events(hge_ctx, evts_webhook, "t1_update", table, "UPDATE", 2, exp_ev_datas)
|
2018-10-30 12:21:58 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_class_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventSelPayload:
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/selected_payload'
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_selected_payload(self, hge_ctx, evts_webhook):
|
2018-10-30 12:21:58 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-30 12:21:58 +03:00
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
2018-10-25 10:22:51 +03:00
|
|
|
"new": {"c1": 1, "c2": "hello"}
|
2018-09-18 09:21:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "INSERT", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c2": "world"}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2018-10-25 10:22:51 +03:00
|
|
|
"old": {"c1": 1},
|
|
|
|
"new": {"c1": 1}
|
2018-09-18 09:21:57 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "UPDATE", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-25 10:22:51 +03:00
|
|
|
where_exp = {"c1": 1}
|
2018-10-30 12:21:58 +03:00
|
|
|
set_exp = {"c1": 2}
|
2018-09-18 09:21:57 +03:00
|
|
|
exp_ev_data = {
|
2018-10-25 10:22:51 +03:00
|
|
|
"old": {"c1": 1},
|
|
|
|
"new": {"c1": 2}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "UPDATE", exp_ev_data)
|
2018-10-25 10:22:51 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 2}
|
|
|
|
exp_ev_data = {
|
2018-10-30 12:21:58 +03:00
|
|
|
"old": {"c2": "world"},
|
2018-09-18 09:21:57 +03:00
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "DELETE", exp_ev_data)
|
2018-09-18 09:21:57 +03:00
|
|
|
|
2018-10-25 10:22:51 +03:00
|
|
|
def test_selected_payload_dep(self, hge_ctx):
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q({
|
2018-10-25 10:22:51 +03:00
|
|
|
"type": "run_sql",
|
|
|
|
"args": {
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c1"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2018-10-25 10:22:51 +03:00
|
|
|
assert resp['code'] == "dependency-error", resp
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q({
|
2018-10-25 10:22:51 +03:00
|
|
|
"type": "run_sql",
|
|
|
|
"args": {
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c2"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2018-10-25 10:22:51 +03:00
|
|
|
assert resp['code'] == "dependency-error", resp
|
2018-11-14 10:13:01 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_class_tests_db_state')
|
|
|
|
class TestEventSelPayloadMSSQL:
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/selected_payload'
|
|
|
|
|
|
|
|
def test_selected_payload(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello", "c3": "bellamy"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": {"c1": 1, "c2": "hello", "c3": "bellamy"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "INSERT", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c2": "hello"},
|
|
|
|
"new": {"c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "UPDATE", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c3: "harry"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c2": "world"},
|
|
|
|
"new": {"c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "UPDATE", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c3": "harry"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 4 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_payload", table, "DELETE", exp_ev_data)
|
|
|
|
|
|
|
|
def test_selected_payload_dep(self, hge_ctx):
|
|
|
|
# Dropping Primary Key is not allowed
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q({
|
2022-04-21 10:19:37 +03:00
|
|
|
"type": "mssql_run_sql",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c1"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 5 -----", resp)
|
|
|
|
assert resp['code'] == "bad-request", resp
|
|
|
|
|
|
|
|
# 'C2' cannot be dropped because event trigger is created on that column
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q({
|
2022-04-21 10:19:37 +03:00
|
|
|
"type": "mssql_run_sql",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c2"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 6 -----", resp)
|
|
|
|
assert resp['code'] == "dependency-error", resp
|
|
|
|
|
|
|
|
# 'C3' cannot be dropped because event trigger is created on that column
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v2q({
|
2022-04-21 10:19:37 +03:00
|
|
|
"type": "mssql_run_sql",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"sql": "alter table hge_tests.test_t1 drop column c3"
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
}, expected_status_code = 400)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 7 -----", resp)
|
|
|
|
assert resp['code'] == "dependency-error", resp
|
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestWebhookEvent(object):
|
2018-11-14 10:13:01 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/webhook_env'
|
2018-11-14 10:13:01 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2018-11-14 10:13:01 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data)
|
2018-11-14 10:13:01 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
|
|
|
set_exp = {"c2": "world"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data)
|
2019-01-28 09:12:52 +03:00
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
|
2019-01-28 09:12:52 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestWebhookEventMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/webhook_env'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data)
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
|
|
|
|
|
2020-09-10 12:30:34 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventWebhookTemplateURL(object):
|
2020-09-10 12:30:34 +03:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/webhook_template_url'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row)
|
2020-09-10 12:30:34 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, webhook_path = '/trigger')
|
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
|
|
|
set_exp = {"c2": "world"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp)
|
2020-09-10 12:30:34 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, webhook_path = '/trigger')
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2020-09-10 12:30:34 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data, webhook_path = '/trigger')
|
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestEventWebhookTemplateURLMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/webhook_template_url'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, webhook_path = '/trigger')
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, webhook_path = '/trigger')
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data, webhook_path = '/trigger')
|
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
2022-04-21 10:19:37 +03:00
|
|
|
class TestEventSessionVariables(object):
|
2019-01-28 09:12:52 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/basic'
|
2019-01-28 09:12:52 +03:00
|
|
|
|
2019-04-08 10:22:38 +03:00
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2019-01-28 09:12:52 +03:00
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
|
|
|
session_variables = { 'x-hasura-role': 'admin', 'x-hasura-allowed-roles': "['admin','user']", 'x-hasura-user-id': '1'}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, init_row, headers = session_variables)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, session_variables = session_variables)
|
2019-01-28 09:12:52 +03:00
|
|
|
|
|
|
|
where_exp = {"c1": 1}
|
|
|
|
set_exp = {"c2": "world"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
|
|
|
session_variables = { 'x-hasura-role': 'admin', 'x-hasura-random': 'some_random_info', 'X-Random-Header': 'not_session_variable'}
|
2022-07-05 21:00:08 +03:00
|
|
|
update(hge_ctx, table, where_exp, set_exp, headers = session_variables)
|
2019-01-28 09:12:52 +03:00
|
|
|
session_variables.pop('X-Random-Header')
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, session_variables = session_variables)
|
2018-11-14 10:13:01 +03:00
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
delete(hge_ctx, table, where_exp)
|
2019-04-08 10:22:38 +03:00
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
|
2019-05-13 12:41:07 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestEventSessionVariablesMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/basic'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
init_row = {"c1": 1, "c2": "hello"}
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": None,
|
|
|
|
"new": init_row
|
|
|
|
}
|
|
|
|
session_variables = { 'x-hasura-role': 'admin', 'x-hasura-allowed-roles': "['admin','user']", 'x-hasura-user-id': '1'}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, init_row, headers = session_variables)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "INSERT", exp_ev_data, session_variables = session_variables)
|
|
|
|
|
|
|
|
where_exp = '{c1: {_eq: 1}}'
|
|
|
|
set_exp = '{c2: "world"}'
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": init_row,
|
|
|
|
"new": {"c1": 1, "c2": "world"}
|
|
|
|
}
|
|
|
|
session_variables = { 'x-hasura-role': 'admin', 'x-hasura-random': 'some_random_info', 'X-Random-Header': 'not_session_variable'}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = update_mutation(hge_ctx, table, where_exp, set_exp, headers = session_variables)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
session_variables.pop('X-Random-Header')
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "UPDATE", exp_ev_data, session_variables = session_variables)
|
|
|
|
|
|
|
|
exp_ev_data = {
|
|
|
|
"old": {"c1": 1, "c2": "world"},
|
|
|
|
"new": None
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = delete_mutation(hge_ctx, table, where_exp)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
check_event(hge_ctx, evts_webhook, "t1_all", table, "DELETE", exp_ev_data)
|
2019-05-13 12:41:07 +03:00
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
2019-05-13 12:41:07 +03:00
|
|
|
class TestManualEvents(object):
|
|
|
|
|
2020-02-13 12:14:02 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/manual_events'
|
2019-05-13 12:41:07 +03:00
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1metadataq_f(
|
|
|
|
'queries/event_triggers/manual_events/enabled.yaml')
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1metadataq_f(
|
|
|
|
'queries/event_triggers/manual_events/disabled.yaml',
|
|
|
|
expected_status_code = 400)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 2 -----", resp)
|
|
|
|
|
|
|
|
# This test is being added to ensure that the manual events
|
|
|
|
# are not failing after any reload_metadata operation, this
|
|
|
|
# has been an issue of concern in some of the recent releases(v2.0.1 onwards)
|
|
|
|
def test_basic_with_reload_metadata(self, hge_ctx, evts_webhook):
|
|
|
|
reload_metadata_q = {
|
|
|
|
"type": "reload_metadata",
|
|
|
|
"args": {
|
|
|
|
"source": "mssql",
|
|
|
|
"reload_sources": True
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _ in range(5):
|
|
|
|
self.test_basic(hge_ctx, evts_webhook)
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1metadataq(reload_metadata_q)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 3 -----", resp)
|
|
|
|
|
|
|
|
self.test_basic(hge_ctx, evts_webhook)
|
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
|
|
|
class TestManualEventsMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/manual_events'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v1metadataq_f(
|
|
|
|
'queries/event_triggers/manual_events/enabled-mssql.yaml')
|
|
|
|
hge_ctx.v1metadataq_f(
|
|
|
|
'queries/event_triggers/manual_events/disabled-mssql.yaml',
|
|
|
|
expected_status_code = 400)
|
2020-07-10 19:47:05 +03:00
|
|
|
|
2021-08-17 13:21:56 +03:00
|
|
|
# This test is being added to ensure that the manual events
|
|
|
|
# are not failing after any reload_metadata operation, this
|
|
|
|
# has been an issue of concern in some of the recent releases(v2.0.1 onwards)
|
|
|
|
def test_basic_with_reload_metadata(self, hge_ctx, evts_webhook):
|
|
|
|
reload_metadata_q = {
|
|
|
|
"type": "reload_metadata",
|
|
|
|
"args": {
|
|
|
|
"reload_sources": True
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _ in range(5):
|
|
|
|
self.test_basic(hge_ctx, evts_webhook)
|
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v1metadataq(reload_metadata_q)
|
2021-08-17 13:21:56 +03:00
|
|
|
|
2021-09-29 11:13:30 +03:00
|
|
|
self.test_basic(hge_ctx, evts_webhook)
|
2021-08-17 13:21:56 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql','postgres')
|
2020-07-10 19:47:05 +03:00
|
|
|
@usefixtures('per_method_tests_db_state')
|
server/tests: Fix `BigQuery test failure Job exceeded rate limits` error in CI
Fixes https://github.com/hasura/graphql-engine-mono/issues/3695.
Error: [BigQuery test failure Job exceeded rate limits](https://github.com/hasura/graphql-engine-mono/issues/3695)
Cause:
1. [this command](https://github.com/hasura/graphql-engine/blob/2325755954bb3a777403503d709b412e01219ba9/.circleci/test-server.sh#L1263) runs tests matching the `Bigquery or Common` string, for the `test-oss-server-bigquery` CI job.
2. in this case, the pytest filter matched on `TestGraphQLQueryBoolExpSearchCommon`. Although unrelated pytests are skipped, BQ setup and teardown runs uneccesarily for the [MSSQL and Postgres backends](https://github.com/hasura/graphql-engine/blob/e444cf1f5d5eb1762357266d8b298b1dfb48d937/server/tests-py/test_graphql_queries.py#L868).
4. the setup and teardown runs three times in quick succession, _for each of_ SQL Server, Postgres and BigQuery. Occasionally, this surpassed [BigQuery's maximum rate of 5 table update operations in 10 seconds](https://cloud.google.com/bigquery/quotas#load_job_per_table.long).
Fix: restrict setup/teardown to only the relevant backends...
- Hotfix (this PR): ...by renaming pytest classes and changing the pytest filters in `test-server`
- ok, this is faintly horrifying and an inelegant convention change. On the bright side, it shaves a minute or so off our integration test suite run by skipping fewer tests. Anecdata for `test-oss-server-bigquery`
- before: 87 passed, 299 skipped, 1 warning, 1 error in 192.99s
- after: 87 passed, 20 skipped, 1 warning in 170.82s
- [`Common` was a terrible name, anyway](https://github.com/hasura/graphql-engine-mono/issues/2079), for `AnyCombinationOfBackends`.
- Better fix: ...by refactoring the `conftest.py` helpers. I ran out of a timebox so will write up a separate issue. Given we're actively [porting pytests over to hspec](https://github.com/hasura/graphql-engine/issues/8432), I don't know how much it's worth investing time in a refactor.
To verify the fix: I ran a full CI build a few times [[1]](https://buildkite.com/hasura/graphql-engine-mono/builds/8069#078c781a-c8ef-44f2-a400-15f91fb88e42)[[2]](https://buildkite.com/hasura/graphql-engine-mono/builds/8072#f9e7f59d-264f-46a4-973d-21aa762cca35)[[3]](https://buildkite.com/hasura/graphql-engine-mono/builds/8075#bb104e80-ff76-408c-a46b-6f40e92e6317) whilst troubleshooting to convince myself this fixed the problem.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4362
GitOrigin-RevId: 4c3283f0654b70e9dcda642d9012f6376aa95290
2022-04-27 21:39:40 +03:00
|
|
|
class TestEventsAsynchronousExecutionPostgresMSSQL(object):
|
2020-07-10 19:47:05 +03:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/async_execution'
|
|
|
|
|
|
|
|
def test_async_execution(self,hge_ctx,evts_webhook):
|
|
|
|
"""
|
|
|
|
A test to check if the events generated by the graphql-engine are
|
|
|
|
processed asynchronously. This test measures the time taken to process
|
|
|
|
all the events and that time should definitely be lesser than the time
|
|
|
|
taken if the events were to be executed sequentially.
|
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
This test inserts 5 rows and the webhook(/sleep_2s) takes
|
|
|
|
~2 seconds to process one request. So, if the graphql-engine
|
|
|
|
were to process the events sequentially it will take 5 * 2 = 10 seconds.
|
|
|
|
Theorotically, all the events should have been processed in ~2 seconds,
|
2020-07-10 19:47:05 +03:00
|
|
|
adding a 5 seconds buffer to the comparision, so that this test
|
|
|
|
doesn't flake in the CI.
|
|
|
|
"""
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
|
|
|
|
payload = range(1,6)
|
|
|
|
rows = list(map(lambda x: {"c1": x, "c2": "hello"}, payload))
|
2022-04-21 10:19:37 +03:00
|
|
|
if (hge_ctx.backend == "postgres"):
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_many(hge_ctx, table, rows)
|
2022-04-21 10:19:37 +03:00
|
|
|
elif (hge_ctx.backend == "mssql"):
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_many_mutation(hge_ctx, table, rows)
|
|
|
|
else:
|
|
|
|
raise NotImplementedError("Unknown backend.")
|
|
|
|
print("----- RESP 1 -----", resp)
|
2020-07-10 19:47:05 +03:00
|
|
|
start_time = time.perf_counter()
|
2022-07-05 21:00:08 +03:00
|
|
|
for _ in range(1,6):
|
|
|
|
evts_webhook.get_event(5) # webhook takes 2 seconds to process a request (+ buffer)
|
2020-07-10 19:47:05 +03:00
|
|
|
end_time = time.perf_counter()
|
|
|
|
time_elapsed = end_time - start_time
|
|
|
|
assert time_elapsed < 10
|
2021-09-16 14:03:01 +03:00
|
|
|
|
|
|
|
@usefixtures("per_class_tests_db_state")
|
|
|
|
class TestEventTransform(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/transform'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
|
|
|
# GIVEN
|
|
|
|
check_query_f(hge_ctx, self.dir() + '/basic_transform.yaml')
|
|
|
|
|
|
|
|
# WHEN
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
insert_row = {"id": 0, "first_name": "Simon", "last_name": "Marlow"}
|
2022-07-05 21:00:08 +03:00
|
|
|
insert(hge_ctx, table, insert_row)
|
2021-09-16 14:03:01 +03:00
|
|
|
|
|
|
|
# THEN
|
|
|
|
expectedPath = "/?foo=bar"
|
|
|
|
expectedBody = insert_row
|
|
|
|
|
|
|
|
check_event_transformed(hge_ctx,
|
|
|
|
evts_webhook,
|
|
|
|
expectedBody,
|
|
|
|
headers={"foo": "bar"},
|
|
|
|
removedHeaders=["user-agent"],
|
|
|
|
webhook_path=expectedPath)
|
2022-04-21 10:19:37 +03:00
|
|
|
|
2022-08-02 22:32:46 +03:00
|
|
|
@pytest.mark.backend('mssql')
|
2022-04-21 10:19:37 +03:00
|
|
|
@usefixtures("per_method_tests_db_state")
|
|
|
|
class TestEventTransformMSSQL(object):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/event_triggers/transform'
|
|
|
|
|
|
|
|
def test_basic(self, hge_ctx, evts_webhook):
|
|
|
|
# GIVEN
|
|
|
|
check_query_f(hge_ctx, self.dir() + '/basic_transform_mssql.yaml')
|
|
|
|
|
|
|
|
# WHEN
|
|
|
|
table = {"schema": "hge_tests", "name": "test_t1"}
|
|
|
|
insert_row = {"id": 0, "first_name": "Simon", "last_name": "Marlow"}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = insert_mutation(hge_ctx, table, insert_row)
|
2022-04-21 10:19:37 +03:00
|
|
|
print("----- RESP 1 -----", resp)
|
|
|
|
|
|
|
|
# THEN
|
|
|
|
expectedPath = "/?foo=bar"
|
|
|
|
expectedBody = insert_row
|
|
|
|
|
|
|
|
check_event_transformed(hge_ctx,
|
|
|
|
evts_webhook,
|
|
|
|
expectedBody,
|
|
|
|
headers={"foo": "bar"},
|
|
|
|
removedHeaders=["user-agent"],
|
|
|
|
webhook_path=expectedPath)
|