diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 5de5c358c05..6301ebac0d1 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -1199,6 +1199,7 @@ test-suite tests-hspec Test.DataConnector.SelectPermissionsSpec Test.DirectivesSpec Test.EventTriggersRunSQLSpec + Test.EventTriggersRecreationSpec Test.HelloWorldSpec Test.InsertCheckPermissionSpec Test.InsertDefaultsSpec diff --git a/server/tests-hspec/Harness/GraphqlEngine.hs b/server/tests-hspec/Harness/GraphqlEngine.hs index 99166c2a5ce..1838683f308 100644 --- a/server/tests-hspec/Harness/GraphqlEngine.hs +++ b/server/tests-hspec/Harness/GraphqlEngine.hs @@ -22,6 +22,7 @@ module Harness.GraphqlEngine postWithHeadersStatus, clearMetadata, postV2Query, + postV2Query_, -- ** Misc. setSource, @@ -185,6 +186,10 @@ postV2Query :: HasCallStack => Int -> TestEnvironment -> Value -> IO Value postV2Query statusCode testEnvironment = withFrozenCallStack $ postWithHeadersStatus statusCode testEnvironment "/v2/query" mempty +postV2Query_ :: HasCallStack => TestEnvironment -> Value -> IO () +postV2Query_ testEnvironment = + withFrozenCallStack $ post_ testEnvironment "/v2/query" + ------------------------------------------------------------------------------- -- HTTP Calls - Misc. diff --git a/server/tests-hspec/Harness/Webhook.hs b/server/tests-hspec/Harness/Webhook.hs index ad2f90541ef..13a7f798145 100644 --- a/server/tests-hspec/Harness/Webhook.hs +++ b/server/tests-hspec/Harness/Webhook.hs @@ -48,6 +48,8 @@ run = do Spock.spockT id $ do Spock.get "/" $ Spock.json $ Aeson.String "OK" + Spock.post "/hello" $ + Spock.json $ Aeson.String "world" Spock.post "/echo" $ do req <- Spock.request body <- liftIO $ Wai.strictRequestBody req diff --git a/server/tests-hspec/Test/EventTriggersRecreationSpec.hs b/server/tests-hspec/Test/EventTriggersRecreationSpec.hs new file mode 100644 index 00000000000..ab232354dd0 --- /dev/null +++ b/server/tests-hspec/Test/EventTriggersRecreationSpec.hs @@ -0,0 +1,362 @@ +{-# LANGUAGE QuasiQuotes #-} + +module Test.EventTriggersRecreationSpec (spec) where + +import Harness.Backend.Postgres qualified as Postgres +import Harness.GraphqlEngine (postV2Query_) +import Harness.GraphqlEngine qualified as GraphQLEngine +import Harness.GraphqlEngine qualified as GraphqlEngine +import Harness.Quoter.Yaml +import Harness.Test.Context qualified as Context +import Harness.Test.Schema (Table (..), table) +import Harness.Test.Schema qualified as Schema +import Harness.TestEnvironment (TestEnvironment, stopServer) +import Harness.Webhook qualified as Webhook +import Test.Hspec (SpecWith, it) +import Prelude + +-------------------------------------------------------------------------------- +-- Preamble + +spec :: SpecWith TestEnvironment +spec = + Context.runWithLocalTestEnvironment + [ Context.Context + { name = Context.Backend Context.Postgres, + mkLocalTestEnvironment = webhookServerMkLocalTestEnvironment, + setup = postgresSetup, + teardown = postgresTeardown, + customOptions = Nothing + } + ] + tests + +-------------------------------------------------------------------------------- + +-- * Backend + +-- ** Schema + +usersTable :: Schema.Table +usersTable = + (table "users") + { tableColumns = + [ Schema.column "id" Schema.TStr, + Schema.column "name" Schema.TStr, + Schema.column "created_at" Schema.TUTCTime + ], + tablePrimaryKey = ["id"] + } + +schema :: [Schema.Table] +schema = [usersTable] + +-------------------------------------------------------------------------------- + +-- ** Setup and teardown override + +postgresSetup :: (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) -> IO () +postgresSetup (testEnvironment, _) = do + -- In the setup, we create postgres's event triggers that capture every DDL + -- change made in the database and then store them in a table called + -- `ddl_history` that contains metadata about the DDL query like + -- the query that was executed, time at which the query was executed, + -- what type of query it was etc. + Postgres.setup schema (testEnvironment, ()) + GraphqlEngine.postV2Query_ + testEnvironment + [yaml| + type: run_sql + args: + source: postgres + sql: | + + DROP TABLE IF EXISTS hdb_catalog.hdb_source_catalog_version; + + DROP INDEX IF EXISTS hdb_catalog.hdb_source_catalog_version_one_row; + + DROP TABLE IF EXISTS hdb_catalog.event_invocation_logs CASCADE; + + DROP TABLE IF EXISTS hdb_catalog.event_log; + + DROP INDEX IF EXISTS hdb_catalog.event_log_trigger_name; + + DROP TABLE IF EXISTS hdb_catalog.event_invocation_logs_event_id; + + CREATE TABLE hasura.ddl_history ( + id serial primary key, + event text, + tag text, + object_type text, + schema_name text, + object_identity text, + query text, + created_at timestamptz default now() + ); + + CREATE or REPLACE FUNCTION hasura.log_ddl_command() + RETURNS event_trigger AS + $$ DECLARE + v1 text; + r record; + BEGIN + select + query into v1 + from + pg_stat_activity + where + pid = pg_backend_pid(); + -- RAISE NOTICE 'ddl event:%, command:%', tg_event, tg_tag; + -- NB:since ddl_command_end cannot collect the details of the drop statement, we use sql_drop + if TG_EVENT = 'ddl_command_end' then + SELECT + * into r + FROM + pg_event_trigger_ddl_commands(); + if r.classid > 0 + then + insert into hasura.ddl_history( + event, tag, object_type, schema_name, object_identity, query + ) + values + ( + TG_EVENT, TG_TAG, r.object_type, r.schema_name, + r.object_identity, v1 + ); + end if; + end if; + if TG_EVENT = 'sql_drop' then -- To avoid repeated collection, we filtered 'ALTER TABLE' and 'ALTER FOREIGN TABLE' + if TG_TAG != 'ALTER TABLE' + and TG_TAG != 'ALTER FOREIGN TABLE' then + SELECT + * into r + FROM + pg_event_trigger_dropped_objects(); + insert into postgres.ddl_history( + event, tag, object_type, schema_name, object_identity, query + ) + values + ( + TG_EVENT, TG_TAG, r.object_type, r.schema_name, + r.object_identity, v1 + ); + end if; + end if; + end; + $$ LANGUAGE plpgsql; + + CREATE EVENT TRIGGER pg_get_ddl_command on ddl_command_end EXECUTE PROCEDURE hasura.log_ddl_command(); + |] + +postgresTeardown :: (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) -> IO () +postgresTeardown (testEnvironment, (server, _)) = do + GraphqlEngine.postV2Query_ testEnvironment $ + [yaml| +type: run_sql +args: + source: postgres + sql: | + DROP EVENT TRIGGER pg_get_ddl_command; + + DROP FUNCTION hasura.log_ddl_command; + + DROP TABLE hasura.ddl_history; +|] + stopServer server + Postgres.teardown schema (testEnvironment, ()) + +webhookServerMkLocalTestEnvironment :: + TestEnvironment -> IO (GraphqlEngine.Server, Webhook.EventsQueue) +webhookServerMkLocalTestEnvironment _ = do + Webhook.run + +-------------------------------------------------------------------------------- + +-- * Tests + +tests :: Context.Options -> SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) +tests opts = do + it "Creating an event trigger should create the SQL triggers" $ \(testEnvironment, (webhookServer, _)) -> do + let webhookEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/hello" + shouldReturnYaml + opts + ( GraphQLEngine.postMetadata + testEnvironment + [yaml| +type: pg_create_event_trigger +args: + source: postgres + table: + schema: hasura + name: users + name: users_INSERT + webhook: *webhookEndpoint + insert: + columns: "*" + |] + ) + [yaml| +message: success + |] + it "The source catalog should have been initialized along with the creation of the SQL trigger" $ \(testEnvironment, _) -> + shouldReturnYaml + opts + ( GraphQLEngine.postV2Query + 200 + testEnvironment + [yaml| +type: run_sql +args: + source: postgres + sql: | + SELECT tag, object_identity FROM hasura.ddl_history ORDER BY object_identity; + |] + ) + [yaml| +result: +- - tag + - object_identity +- - CREATE TRIGGER + - '"notify_hasura_users_INSERT_INSERT" on hasura.users' +- - CREATE FUNCTION + - hdb_catalog."notify_hasura_users_INSERT_INSERT"() +- - CREATE TABLE + - hdb_catalog.event_invocation_logs +- - CREATE INDEX + - hdb_catalog.event_invocation_logs_event_id_idx +- - CREATE TABLE + - hdb_catalog.event_log +- - CREATE INDEX + - hdb_catalog.event_log_fetch_events +- - CREATE INDEX + - hdb_catalog.event_log_trigger_name_idx +- - CREATE FUNCTION + - hdb_catalog.gen_hasura_uuid() +- - CREATE TABLE + - hdb_catalog.hdb_source_catalog_version +- - CREATE INDEX + - hdb_catalog.hdb_source_catalog_version_one_row +- - CREATE FUNCTION + - hdb_catalog.insert_event_log(pg_catalog.text,pg_catalog.text,pg_catalog.text,pg_catalog.text,pg_catalog.json) +result_type: TuplesOk +|] + it "only reloading the metadata should not recreate the SQL triggers" $ \(testEnvironment, _) -> do + -- Truncate the ddl_history + postV2Query_ + testEnvironment + [yaml| +type: run_sql +args: + source: postgres + sql: TRUNCATE hasura.ddl_history RESTART IDENTITY; +|] + shouldReturnYaml + opts + ( GraphQLEngine.postMetadata + testEnvironment + [yaml| + type: reload_metadata + args: {} + |] + ) + [yaml| + is_consistent: true + message: success + |] + shouldReturnYaml + opts + ( GraphQLEngine.postV2Query + 200 + testEnvironment + [yaml| + type: run_sql + args: + source: postgres + sql: SELECT tag, object_identity FROM hasura.ddl_history WHERE schema_name = 'hdb_catalog' ORDER BY object_identity; + |] + ) + [yaml| + result: + - - tag + - object_identity + result_type: TuplesOk + |] + it "reloading the metadata with `recreate_event_triggers: true` should recreate the SQL triggers" $ \(testEnvironment, _) -> do + -- Truncate the ddl_history + postV2Query_ + testEnvironment + [yaml| +type: run_sql +args: + source: postgres + sql: TRUNCATE hasura.ddl_history RESTART IDENTITY; +|] + shouldReturnYaml + opts + ( GraphQLEngine.postMetadata + testEnvironment + [yaml| + type: reload_metadata + args: { + "recreate_event_triggers": true + } + |] + ) + [yaml| + is_consistent: true + message: success + |] + shouldReturnYaml + opts + ( GraphQLEngine.postV2Query + 200 + testEnvironment + [yaml| + type: run_sql + args: + source: postgres + sql: SELECT tag, object_identity FROM hasura.ddl_history WHERE schema_name = 'hdb_catalog' ORDER BY object_identity; + |] + ) + [yaml| + result: + - - tag + - object_identity + - - CREATE FUNCTION + - hdb_catalog."notify_hasura_users_INSERT_INSERT"() + result_type: TuplesOk + |] + it "adding a new column to the table should recreate the SQL trigger" $ \(testEnvironment, _) -> do + -- Truncate the ddl_history + postV2Query_ + testEnvironment + [yaml| +type: run_sql +args: + source: postgres + sql: | + TRUNCATE hasura.ddl_history RESTART IDENTITY; + ALTER TABLE hasura.users ADD COLUMN last_name TEXT; +|] + shouldReturnYaml + opts + ( GraphQLEngine.postV2Query + 200 + testEnvironment + [yaml| + type: run_sql + args: + source: postgres + sql: SELECT tag, object_identity FROM hasura.ddl_history WHERE schema_name = 'hdb_catalog' ORDER BY object_identity; + |] + ) + [yaml| + result: + - - tag + - object_identity + - - CREATE FUNCTION + - hdb_catalog."notify_hasura_users_INSERT_INSERT"() + - - CREATE FUNCTION + - hdb_catalog."notify_hasura_users_INSERT_INSERT"() + result_type: TuplesOk + |]