2020-05-13 15:33:16 +03:00
|
|
|
from croniter import croniter
|
2022-12-21 18:55:24 +03:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
import itertools
|
2020-06-23 18:21:34 +03:00
|
|
|
import json
|
2022-12-21 18:55:24 +03:00
|
|
|
import sqlalchemy
|
2022-09-28 12:19:47 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
from validate import validate_event_headers, validate_event_webhook
|
2021-10-22 08:54:59 +03:00
|
|
|
from utils import until_asserts_pass
|
2020-05-13 15:33:16 +03:00
|
|
|
|
|
|
|
# The create and delete tests should ideally go in setup and teardown YAML files,
|
2020-09-07 09:15:15 +03:00
|
|
|
# We can't use that here because, the payload is dynamic i.e. in case of one-off scheduled events
|
2020-05-13 15:33:16 +03:00
|
|
|
# the value is the current timestamp and in case of cron Triggers, the cron schedule is
|
|
|
|
# derived based on the current timestamp
|
|
|
|
|
|
|
|
def stringify_datetime(dt):
|
|
|
|
return dt.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
|
|
|
|
class TestScheduledEvent(object):
|
|
|
|
|
2021-10-22 08:54:59 +03:00
|
|
|
@classmethod
|
|
|
|
def dir(cls):
|
|
|
|
return 'queries/scheduled_triggers'
|
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
webhook_payload = {"foo": "baz"}
|
2020-05-13 15:33:16 +03:00
|
|
|
|
|
|
|
header_conf = [
|
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"name": "header-key",
|
|
|
|
"value": "header-value"
|
2020-05-13 15:33:16 +03:00
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_scheduled_events(self, hge_ctx, scheduled_triggers_evts_webhook, metadata_schema_url):
|
|
|
|
metadata_engine = sqlalchemy.engine.create_engine(metadata_schema_url)
|
2020-05-13 15:33:16 +03:00
|
|
|
|
|
|
|
query = {
|
2021-10-22 08:54:59 +03:00
|
|
|
"type": "bulk",
|
|
|
|
"args": [
|
|
|
|
# Succeeds
|
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_scheduled_event",
|
|
|
|
"args": {
|
|
|
|
"webhook": f'{scheduled_triggers_evts_webhook.url}/test',
|
|
|
|
"schedule_at": stringify_datetime(datetime.utcnow()),
|
|
|
|
"payload": self.webhook_payload,
|
|
|
|
"headers": self.header_conf,
|
|
|
|
"comment": "test scheduled event",
|
|
|
|
},
|
2021-10-22 08:54:59 +03:00
|
|
|
},
|
|
|
|
# Fails immediately, with 'dead'
|
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_scheduled_event",
|
|
|
|
"args": {
|
|
|
|
"webhook": f'{scheduled_triggers_evts_webhook.url}/',
|
2021-10-22 08:54:59 +03:00
|
|
|
"schedule_at": "2020-01-01T00:00:00Z",
|
2022-12-21 18:55:24 +03:00
|
|
|
"payload": self.webhook_payload,
|
|
|
|
"headers": self.header_conf,
|
|
|
|
},
|
2021-10-22 08:54:59 +03:00
|
|
|
},
|
|
|
|
# Fails on request, trying twice:
|
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_scheduled_event",
|
|
|
|
"args": {
|
|
|
|
"webhook": f'{scheduled_triggers_evts_webhook.url}/fail',
|
2021-10-22 08:54:59 +03:00
|
|
|
"schedule_at": stringify_datetime(datetime.utcnow()),
|
2022-12-21 18:55:24 +03:00
|
|
|
"payload": self.webhook_payload,
|
|
|
|
"headers": self.header_conf,
|
|
|
|
"retry_conf": {
|
|
|
|
"num_retries": 1,
|
|
|
|
"retry_interval_seconds": 1,
|
|
|
|
"timeout_seconds": 1,
|
|
|
|
"tolerance_seconds": 21600,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
],
|
2020-05-13 15:33:16 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q(query)
|
2021-10-22 08:54:59 +03:00
|
|
|
assert len(resp) == 3, resp
|
|
|
|
# ensuring that valid event_id is returned for all requests
|
|
|
|
assert all(['event_id' in r for r in resp]), resp
|
2020-05-13 15:33:16 +03:00
|
|
|
|
2021-10-22 08:54:59 +03:00
|
|
|
# Here we check the three requests received by the webhook.
|
2021-04-27 08:34:14 +03:00
|
|
|
# Collect the three generated events (they may arrive out of order):
|
|
|
|
e1 = scheduled_triggers_evts_webhook.get_event(12) # at least 10 sec, see processScheduledTriggers.sleep
|
|
|
|
e2 = scheduled_triggers_evts_webhook.get_event(12)
|
|
|
|
e3 = scheduled_triggers_evts_webhook.get_event(12)
|
|
|
|
[event_fail1, event_fail2, event_success] = sorted([e1,e2,e3], key=lambda e: e['path'])
|
|
|
|
# Check the two failures:
|
|
|
|
validate_event_webhook(event_fail1['path'],'/fail')
|
|
|
|
validate_event_webhook(event_fail2['path'],'/fail')
|
2022-12-21 18:55:24 +03:00
|
|
|
|
2021-04-27 08:34:14 +03:00
|
|
|
# Check the one successful webhook call:
|
2022-12-21 18:55:24 +03:00
|
|
|
with metadata_engine.connect() as connection:
|
|
|
|
query = '''
|
|
|
|
select to_json(timezone('utc', created_at)) as created_at
|
2020-09-07 10:37:53 +03:00
|
|
|
from hdb_catalog.hdb_scheduled_events
|
2022-12-21 18:55:24 +03:00
|
|
|
where comment = 'test scheduled event'
|
|
|
|
'''
|
|
|
|
result = connection.execute(query).fetchone()
|
|
|
|
assert result is not None
|
|
|
|
db_created_at = result['created_at']
|
|
|
|
|
|
|
|
validate_event_webhook(event_success['path'], '/test')
|
|
|
|
validate_event_headers(event_success['headers'], {"header-key": "header-value"})
|
2021-04-27 08:34:14 +03:00
|
|
|
assert event_success['body']['payload'] == self.webhook_payload
|
|
|
|
assert event_success['body']['created_at'] == db_created_at.replace(" ","T") + "Z"
|
|
|
|
payload_keys = dict.keys(event_success['body'])
|
2020-07-03 03:55:07 +03:00
|
|
|
for k in ["scheduled_time","created_at","id"]: # additional keys
|
|
|
|
assert k in payload_keys
|
2020-05-13 15:33:16 +03:00
|
|
|
assert scheduled_triggers_evts_webhook.is_queue_empty()
|
|
|
|
|
2021-10-22 08:54:59 +03:00
|
|
|
def try_check_events_statuses():
|
2022-12-21 18:55:24 +03:00
|
|
|
with metadata_engine.connect() as connection:
|
|
|
|
scheduled_event_statuses = list(
|
|
|
|
connection.execute(
|
|
|
|
"select status, tries from hdb_catalog.hdb_scheduled_events order by status desc"
|
|
|
|
).fetchall()
|
|
|
|
)
|
2021-10-22 08:54:59 +03:00
|
|
|
# 3 scheduled events have been created
|
|
|
|
# one should be dead because the timestamp was past the tolerance limit
|
|
|
|
# one should be delivered because all the parameters were reasonable
|
|
|
|
# one should be error because the webhook returns an error state
|
|
|
|
assert scheduled_event_statuses == [
|
2022-12-21 18:55:24 +03:00
|
|
|
# status tries
|
|
|
|
( 'error', 2), # num_retries + 1
|
|
|
|
( 'delivered', 1),
|
|
|
|
( 'dead', 0),
|
|
|
|
]
|
2021-10-22 08:54:59 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
until_asserts_pass(100, try_check_events_statuses)
|
2020-05-13 15:33:16 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
# WARNING: The tests in this class are not independent; they depend on the side effects of previous tests.
|
2020-05-13 15:33:16 +03:00
|
|
|
class TestCronTrigger(object):
|
|
|
|
|
|
|
|
cron_trigger_name = "cron_trigger"
|
2022-12-21 18:55:24 +03:00
|
|
|
# setting the test to be after 30 mins, to make sure that
|
|
|
|
# any of the events are not delivered.
|
|
|
|
min_after_30_mins = (datetime.utcnow() + timedelta(minutes=30)).minute
|
|
|
|
cron_schedule = "{} * * * *".format(min_after_30_mins)
|
|
|
|
init_time = datetime.utcnow()
|
2020-05-13 15:33:16 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_create_cron_schedule_triggers(self, hge_ctx, scheduled_triggers_evts_webhook):
|
2020-05-13 15:33:16 +03:00
|
|
|
cron_st_api_query = {
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_cron_trigger",
|
|
|
|
"args": {
|
|
|
|
"name": self.cron_trigger_name,
|
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/foo",
|
|
|
|
"schedule": self.cron_schedule,
|
|
|
|
"headers": [
|
2020-05-13 15:33:16 +03:00
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"name": "foo",
|
|
|
|
"value": "baz",
|
|
|
|
},
|
2020-05-13 15:33:16 +03:00
|
|
|
],
|
2022-12-21 18:55:24 +03:00
|
|
|
"payload": {"foo": "baz"},
|
|
|
|
"include_in_metadata": True,
|
|
|
|
},
|
2020-05-13 15:33:16 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q(cron_st_api_query)
|
2020-05-13 15:33:16 +03:00
|
|
|
# the cron events will be generated based on the current time, they
|
|
|
|
# will not be exactly the same though(the server now and now here)
|
2022-07-05 21:00:08 +03:00
|
|
|
assert resp['message'] == 'success'
|
2020-05-13 15:33:16 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_check_generated_cron_scheduled_events(self, metadata_schema_url):
|
|
|
|
metadata_engine = sqlalchemy.engine.create_engine(metadata_schema_url)
|
|
|
|
|
|
|
|
schedule = croniter(self.cron_schedule, self.init_time)
|
|
|
|
expected_scheduled_timestamps = list(itertools.islice(schedule.all_next(datetime), 100))
|
|
|
|
self.verify_timestamps(metadata_engine, expected_scheduled_timestamps)
|
|
|
|
|
|
|
|
def test_update_existing_cron_trigger(self ,hge_ctx, metadata_schema_url, scheduled_triggers_evts_webhook):
|
|
|
|
metadata_engine = sqlalchemy.engine.create_engine(metadata_schema_url)
|
|
|
|
|
|
|
|
expected_scheduled_timestamps = []
|
2020-06-23 18:21:34 +03:00
|
|
|
iter = croniter(self.cron_schedule,datetime.utcnow())
|
2022-07-05 21:00:08 +03:00
|
|
|
for _ in range(100):
|
2022-12-21 18:55:24 +03:00
|
|
|
expected_scheduled_timestamps.append(iter.next(datetime))
|
2020-06-23 18:21:34 +03:00
|
|
|
q = {
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_cron_trigger",
|
|
|
|
"args": {
|
|
|
|
"name": self.cron_trigger_name,
|
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/foo",
|
|
|
|
"schedule": self.cron_schedule,
|
|
|
|
"headers": [
|
2020-06-23 18:21:34 +03:00
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"name": "header-name",
|
|
|
|
"value": "header-value",
|
|
|
|
},
|
2020-06-23 18:21:34 +03:00
|
|
|
],
|
2022-12-21 18:55:24 +03:00
|
|
|
"payload": {"foo": "baz"},
|
|
|
|
"include_in_metadata": True,
|
|
|
|
"replace": True,
|
|
|
|
},
|
2020-06-23 18:21:34 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v1q(q)
|
2020-06-23 18:21:34 +03:00
|
|
|
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q({'type': 'export_metadata', 'args': {}})
|
2020-12-08 17:22:31 +03:00
|
|
|
|
|
|
|
all_cron_triggers = resp['cron_triggers']
|
|
|
|
for cron_trigger in all_cron_triggers:
|
|
|
|
if cron_trigger['name'] == self.cron_trigger_name:
|
|
|
|
assert cron_trigger['headers'] == [{
|
2022-12-21 18:55:24 +03:00
|
|
|
"name": "header-name",
|
|
|
|
"value": "header-value",
|
2020-12-08 17:22:31 +03:00
|
|
|
}]
|
2020-06-23 18:21:34 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
# After updating the cron trigger, the future events should have been created
|
|
|
|
self.verify_timestamps(metadata_engine, expected_scheduled_timestamps)
|
2020-06-23 18:21:34 +03:00
|
|
|
|
2020-09-07 10:37:53 +03:00
|
|
|
def test_check_fired_webhook_event(self, hge_ctx, scheduled_triggers_evts_webhook):
|
|
|
|
q = {
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_cron_trigger",
|
|
|
|
"args": {
|
|
|
|
"name": "test_cron_trigger",
|
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/test",
|
|
|
|
"schedule": "* * * * *",
|
|
|
|
"headers": [
|
2020-09-07 10:37:53 +03:00
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"name": "header-key",
|
|
|
|
"value": "header-value",
|
|
|
|
},
|
2020-09-07 10:37:53 +03:00
|
|
|
],
|
2022-12-21 18:55:24 +03:00
|
|
|
"payload": {"foo": "baz"},
|
|
|
|
"include_in_metadata": False,
|
|
|
|
},
|
2020-09-07 10:37:53 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v1q(q)
|
2021-04-27 08:34:14 +03:00
|
|
|
# The maximum timeout is set to 75s because, the cron timestamps
|
2020-09-07 20:01:32 +03:00
|
|
|
# that are generated will start from the next minute, suppose
|
|
|
|
# the cron schedule is "* * * * *" and the time the cron trigger
|
|
|
|
# is created is 10:00:00, then the next event will be scheduled
|
|
|
|
# at 10:01:00, but the events processor will not process it
|
|
|
|
# exactly at the zeroeth second of 10:01. The only guarantee
|
2021-04-27 08:34:14 +03:00
|
|
|
# is that, the event processor will start to process the event before
|
|
|
|
# 10:01:10 (seel sleep in processScheduledTriggers). So, in the worst
|
|
|
|
# case, it will take 70 seconds to process the first scheduled event.
|
|
|
|
event = scheduled_triggers_evts_webhook.get_event(75)
|
2022-12-21 18:55:24 +03:00
|
|
|
validate_event_webhook(event['path'], '/test')
|
|
|
|
validate_event_headers(event['headers'], {"header-key":"header-value"})
|
|
|
|
assert event['body']['payload'] == {"foo": "baz"}
|
2020-09-07 10:37:53 +03:00
|
|
|
assert event['body']['name'] == 'test_cron_trigger'
|
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_get_cron_triggers(self, hge_ctx, scheduled_triggers_evts_webhook):
|
2022-01-27 09:43:39 +03:00
|
|
|
q = {
|
|
|
|
"type": "get_cron_triggers",
|
|
|
|
"args": {}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1metadataq(q)
|
2022-01-27 09:43:39 +03:00
|
|
|
respDict = json.loads(json.dumps(resp))
|
|
|
|
assert respDict['cron_triggers'] == [
|
|
|
|
{
|
|
|
|
"headers": [
|
|
|
|
{
|
|
|
|
"name": "header-name",
|
2022-12-21 18:55:24 +03:00
|
|
|
"value": "header-value",
|
2022-01-27 09:43:39 +03:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"include_in_metadata": True,
|
|
|
|
"name": self.cron_trigger_name,
|
|
|
|
"payload": {
|
2022-12-21 18:55:24 +03:00
|
|
|
"foo": "baz",
|
2022-01-27 09:43:39 +03:00
|
|
|
},
|
|
|
|
"retry_conf": {
|
|
|
|
"num_retries": 0,
|
|
|
|
"retry_interval_seconds": 10,
|
|
|
|
"timeout_seconds": 60,
|
2022-12-21 18:55:24 +03:00
|
|
|
"tolerance_seconds": 21600,
|
2022-01-27 09:43:39 +03:00
|
|
|
},
|
|
|
|
"schedule": self.cron_schedule,
|
2022-12-21 18:55:24 +03:00
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/foo",
|
2022-01-27 09:43:39 +03:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"headers": [
|
|
|
|
{
|
|
|
|
"name": "header-key",
|
2022-12-21 18:55:24 +03:00
|
|
|
"value": "header-value",
|
|
|
|
},
|
2022-01-27 09:43:39 +03:00
|
|
|
],
|
|
|
|
"include_in_metadata": False,
|
|
|
|
"name": "test_cron_trigger",
|
|
|
|
"payload": {
|
2022-12-21 18:55:24 +03:00
|
|
|
"foo": "baz",
|
2022-01-27 09:43:39 +03:00
|
|
|
},
|
|
|
|
"retry_conf": {
|
|
|
|
"num_retries": 0,
|
|
|
|
"retry_interval_seconds": 10,
|
|
|
|
"timeout_seconds": 60,
|
2022-12-21 18:55:24 +03:00
|
|
|
"tolerance_seconds": 21600,
|
2022-01-27 09:43:39 +03:00
|
|
|
},
|
|
|
|
"schedule": "* * * * *",
|
2022-12-21 18:55:24 +03:00
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/test",
|
2022-01-27 09:43:39 +03:00
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_export_and_import_cron_triggers(self, hge_ctx, metadata_schema_url, scheduled_triggers_evts_webhook):
|
|
|
|
metadata_engine = sqlalchemy.engine.create_engine(metadata_schema_url)
|
|
|
|
|
2020-05-13 15:33:16 +03:00
|
|
|
q = {
|
2021-05-26 19:19:26 +03:00
|
|
|
"type": "export_metadata",
|
|
|
|
"args": {}
|
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q(q)
|
2021-05-26 19:19:26 +03:00
|
|
|
respDict = json.loads(json.dumps(resp))
|
|
|
|
# Only the cron triggers with `include_in_metadata` set to `True`
|
|
|
|
# should be exported
|
|
|
|
assert respDict['cron_triggers'] == [
|
|
|
|
{
|
|
|
|
"headers": [
|
|
|
|
{
|
|
|
|
"name": "header-name",
|
2022-12-21 18:55:24 +03:00
|
|
|
"value": "header-value",
|
2021-05-26 19:19:26 +03:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"include_in_metadata": True,
|
|
|
|
"name": self.cron_trigger_name,
|
|
|
|
"payload": {
|
|
|
|
"foo": "baz"
|
|
|
|
},
|
|
|
|
"schedule": self.cron_schedule,
|
2022-12-21 18:55:24 +03:00
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/foo",
|
|
|
|
},
|
2021-05-26 19:19:26 +03:00
|
|
|
]
|
|
|
|
q = {
|
|
|
|
"type": "replace_metadata",
|
|
|
|
"args": {
|
2022-12-21 18:55:24 +03:00
|
|
|
"metadata": resp,
|
|
|
|
},
|
2020-05-13 15:33:16 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q(q)
|
2022-12-21 18:55:24 +03:00
|
|
|
|
|
|
|
with metadata_engine.connect() as connection:
|
|
|
|
sql = '''
|
|
|
|
select count(1) as count
|
|
|
|
from hdb_catalog.hdb_cron_events
|
|
|
|
where trigger_name = %s
|
|
|
|
'''
|
|
|
|
result = connection.execute(sql, (self.cron_trigger_name,)).fetchone()
|
|
|
|
assert result is not None
|
|
|
|
count = result['count']
|
2021-05-26 19:19:26 +03:00
|
|
|
# Check if the future cron events are created for
|
|
|
|
# for a cron trigger while imported from the metadata
|
2022-12-21 18:55:24 +03:00
|
|
|
assert int(count) == 100
|
2021-05-26 19:19:26 +03:00
|
|
|
|
2022-12-21 18:55:24 +03:00
|
|
|
def test_attempt_to_create_duplicate_cron_trigger_fail(self, hge_ctx, scheduled_triggers_evts_webhook):
|
2020-09-07 10:37:53 +03:00
|
|
|
q = {
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "create_cron_trigger",
|
|
|
|
"args": {
|
|
|
|
"name": "test_cron_trigger",
|
|
|
|
"webhook": f"{scheduled_triggers_evts_webhook.url}/test",
|
|
|
|
"schedule": "* * * * *",
|
|
|
|
"headers": [
|
2021-05-26 19:19:26 +03:00
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"name": "header-key",
|
|
|
|
"value": "header-value",
|
|
|
|
},
|
2021-05-26 19:19:26 +03:00
|
|
|
],
|
2022-12-21 18:55:24 +03:00
|
|
|
"payload": {"foo": "baz"},
|
|
|
|
"include_in_metadata": False,
|
|
|
|
},
|
2020-09-07 10:37:53 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
resp = hge_ctx.v1q(q, expected_status_code = 400)
|
2021-05-26 19:19:26 +03:00
|
|
|
assert dict(resp) == {
|
|
|
|
"code": "already-exists",
|
|
|
|
"error": 'cron trigger with name: test_cron_trigger already exists',
|
|
|
|
"path": "$.args"
|
|
|
|
}
|
|
|
|
|
|
|
|
def test_delete_cron_scheduled_trigger(self,hge_ctx):
|
|
|
|
q = {
|
|
|
|
"type": "bulk",
|
|
|
|
"args": [
|
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "delete_cron_trigger",
|
|
|
|
"args": {
|
|
|
|
"name": self.cron_trigger_name,
|
|
|
|
},
|
2021-05-26 19:19:26 +03:00
|
|
|
},
|
|
|
|
{
|
2022-12-21 18:55:24 +03:00
|
|
|
"type": "delete_cron_trigger",
|
|
|
|
"args": {
|
|
|
|
"name": "test_cron_trigger",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
],
|
2021-05-26 19:19:26 +03:00
|
|
|
}
|
2022-07-05 21:00:08 +03:00
|
|
|
hge_ctx.v1q(q)
|
2022-12-21 18:55:24 +03:00
|
|
|
|
|
|
|
def verify_timestamps(self, metadata_engine, expected_scheduled_timestamps):
|
|
|
|
# Get timestamps in UTC from the db to compare them with the croniter-generated timestamps
|
|
|
|
with metadata_engine.connect() as connection:
|
|
|
|
sql = '''
|
|
|
|
select timezone('utc', scheduled_time) as scheduled_time
|
|
|
|
from hdb_catalog.hdb_cron_events
|
|
|
|
where trigger_name = %s
|
|
|
|
order by scheduled_time asc
|
|
|
|
'''
|
|
|
|
actual_scheduled_timestamps = list(scheduled_time for (scheduled_time,) in connection.execute(sql, (self.cron_trigger_name,)).fetchall())
|
|
|
|
|
|
|
|
assert actual_scheduled_timestamps == expected_scheduled_timestamps
|