diff --git a/server/lib/api-tests/api-tests.cabal b/server/lib/api-tests/api-tests.cabal index d73ad2fe1b7..bebc29673c4 100644 --- a/server/lib/api-tests/api-tests.cabal +++ b/server/lib/api-tests/api-tests.cabal @@ -154,7 +154,6 @@ library Test.EventTriggers.PG.EventTriggersClearMetadataSpec Test.EventTriggers.PG.EventTriggersRunSQLSpec Test.EventTriggers.PG.EventTriggersUniqueNameSpec - Test.EventTriggers.PG.EventTriggersCreatedAtUTCSpec Test.EventTriggers.PG.EventTriggersUntrackTableCleanupSpec Test.Harness.Quoter.YamlSpec Test.HealthCheckSpec diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersCreatedAtUTCSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersCreatedAtUTCSpec.hs deleted file mode 100644 index 05e98e2c822..00000000000 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersCreatedAtUTCSpec.hs +++ /dev/null @@ -1,165 +0,0 @@ -{-# LANGUAGE QuasiQuotes #-} -{-# LANGUAGE ViewPatterns #-} - --- | Test that the `created_at` value sent in the event payload is --- correct. -module Test.EventTriggers.PG.EventTriggersCreatedAtUTCSpec (spec) where - -import Control.Concurrent.Chan qualified as Chan -import Data.Aeson.Internal qualified as Aeson -import Data.Aeson.Key qualified as Key -import Data.Aeson.KeyMap qualified as KM -import Data.List.NonEmpty qualified as NE -import Data.Time.Clock (addUTCTime, getCurrentTime) -import Harness.Backend.Postgres qualified as Postgres -import Harness.GraphqlEngine qualified as GraphqlEngine -import Harness.Quoter.Yaml -import Harness.Test.Fixture qualified as Fixture -import Harness.Test.Schema (Table (..), table) -import Harness.Test.Schema qualified as Schema -import Harness.TestEnvironment (GlobalTestEnvironment, TestEnvironment (options)) -import Harness.Webhook qualified as Webhook -import Harness.Yaml (fromObject, shouldReturnYaml) -import Hasura.Base.Error (iResultToMaybe) -import Hasura.Prelude -import System.Timeout (timeout) -import Test.HUnit.Base (assertFailure) -import Test.Hspec (SpecWith, describe, it, shouldSatisfy) - --------------------------------------------------------------------------------- --- Preamble - -spec :: SpecWith GlobalTestEnvironment -spec = - Fixture.runWithLocalTestEnvironment - ( NE.fromList - [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) - { -- setup the webhook server as the local test environment, - -- so that the server can be referenced while testing - Fixture.mkLocalTestEnvironment = const Webhook.run, - Fixture.setupTeardown = \(testEnvironment, (webhookServer, _)) -> - [ Postgres.setupTablesAction schema testEnvironment, - Fixture.SetupAction - { Fixture.setupAction = postgresSetup testEnvironment webhookServer, - Fixture.teardownAction = \_ -> postgresTeardown testEnvironment - } - ] - } - ] - ) - tests - --------------------------------------------------------------------------------- - --- * Backend - --- ** Schema - -schema :: [Schema.Table] -schema = [authorsTable] - -authorsTable :: Schema.Table -authorsTable = - (table "authors") - { tableColumns = - [ Schema.column "id" Schema.TInt, - Schema.column "name" Schema.TStr - ], - tablePrimaryKey = ["id"], - tableData = - [ [Schema.VInt 1, Schema.VStr "Author 1"], - [Schema.VInt 2, Schema.VStr "Author 2"] - ] - } - --------------------------------------------------------------------------------- --- Tests - -tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) -tests = - describe "created_at captures the correct value of the timestamp at which the event was created at." do - it "inserting a row in a non UTC timezone shouldn't affect the value of the `created_at`" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do - let schemaName :: Schema.SchemaName - schemaName = Schema.getSchemaName testEnvironment - let insertQuery = - [interpolateYaml| - type: run_sql - args: - source: postgres - sql: "SET timezone to 'Asia/Kolkata';INSERT INTO #{schemaName}.authors (id, name) values (3, 'john');" - |] - - expectedResponse = - [yaml| - result_type: CommandOk - result: null - |] - - -- Insert a row into the table with event trigger - shouldReturnYaml - (options testEnvironment) - (GraphqlEngine.postV2Query 200 testEnvironment insertQuery) - expectedResponse - - -- Check if there was a payload generated due to the insert statement - eventPayload <- - -- wait for the event for a maximum of 5 seconds - timeout (5 * 1000000) (Chan.readChan eventsQueue) - >>= (`onNothing` (assertFailure "Event expected, but not fired")) - - let eventCreatedAtUTCMaybe = - iResultToMaybe - =<< Aeson.ifromJSON - <$> (Key.fromString "created_at") - `KM.lookup` (fromObject eventPayload) - - eventCreatedAtUTC <- - onNothing eventCreatedAtUTCMaybe (assertFailure "Error in parsing the `created_at` of the event") - - currentTimeUTC <- liftIO getCurrentTime - - -- The current timestamp in UTC should always be greater than the - -- event's creation timestamp and also the current time should not - -- be later than 10 seconds from the event's creation time, since it - -- is the only event that has to be delivered. By making both the checks, - -- we can assure that the `created_at_tz` captures the correct value. - eventCreatedAtUTC - `shouldSatisfy` (\eventCreatedTime -> eventCreatedTime < currentTimeUTC && addUTCTime 10 eventCreatedTime > currentTimeUTC) - --------------------------------------------------------------------------------- - --- ** Setup and teardown override - -postgresSetup :: TestEnvironment -> GraphqlEngine.Server -> IO () -postgresSetup testEnvironment webhookServer = do - let schemaName :: Schema.SchemaName - schemaName = Schema.getSchemaName testEnvironment - let webhookEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/whole_event_payload" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| - type: bulk - args: - - type: pg_create_event_trigger - args: - name: authors_all - source: postgres - table: - name: authors - schema: #{schemaName} - webhook: #{webhookEndpoint} - insert: - columns: "*" - |] - -postgresTeardown :: TestEnvironment -> IO () -postgresTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| - type: bulk - args: - - type: pg_delete_event_trigger - args: - name: authors_all - source: postgres - |] diff --git a/server/lib/test-harness/src/Harness/Webhook.hs b/server/lib/test-harness/src/Harness/Webhook.hs index c7a34da4507..51d61eb8c4c 100644 --- a/server/lib/test-harness/src/Harness/Webhook.hs +++ b/server/lib/test-harness/src/Harness/Webhook.hs @@ -47,14 +47,14 @@ run = mkTestResource do port <- bracket (Warp.openFreePort) (Socket.close . snd) (pure . fst) eventsQueueChan <- Chan.newChan let eventsQueue = EventsQueue eventsQueueChan - extractEventDataInsertIntoEventQueue jsonPathString = do + extractEventDataInsertIntoEventQueue = do req <- Spock.request body <- liftIO $ Wai.strictRequestBody req let jsonBody = Aeson.decode body let eventDataPayload = -- Only extract the data payload from the request body let mkJSONPathE = either (error . T.unpack) id . parseJSONPath - eventJSONPath = mkJSONPathE jsonPathString + eventJSONPath = mkJSONPathE "$.event.data" in iResultToMaybe =<< executeJSONPath eventJSONPath <$> jsonBody liftIO $ Chan.writeChan eventsQueueChan $ @@ -69,14 +69,11 @@ run = mkTestResource do Spock.json $ Aeson.String "world" Spock.post "/echo" $ do - extractEventDataInsertIntoEventQueue "$.event.data" + extractEventDataInsertIntoEventQueue Spock.json $ Aeson.object ["success" Aeson..= True] Spock.post "/nextRetry" $ do - extractEventDataInsertIntoEventQueue "$.event.data" + extractEventDataInsertIntoEventQueue Spock.setStatus HTTP.status503 - Spock.post "/whole_event_payload" $ do - extractEventDataInsertIntoEventQueue "$" - Spock.json $ Aeson.object ["success" Aeson..= True] let server = Server {port = fromIntegral port, urlPrefix, thread} pure diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs index 17ed7c60e15..88954b31ca6 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs @@ -1057,7 +1057,7 @@ deleteEventTriggerLogsTx TriggerLogCleanupConfig {..} = do [ST.st| SELECT id FROM hdb_catalog.event_log WHERE ((delivered = true OR error = true) AND trigger_name = $1) - AND created_at < (now() - interval '#{qRetentionPeriod}') AT TIME ZONE 'UTC' + AND created_at < now() - interval '#{qRetentionPeriod}' AND locked IS NULL LIMIT $2 |] diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs index d2485f705aa..070dc031269 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs @@ -27,7 +27,7 @@ initialSourceCatalogVersion :: SourceCatalogVersion pgKind initialSourceCatalogVersion = Version.SourceCatalogVersion 0 latestSourceCatalogVersion :: SourceCatalogVersion pgKind -latestSourceCatalogVersion = Version.SourceCatalogVersion 4 +latestSourceCatalogVersion = Version.SourceCatalogVersion 3 previousSourceCatalogVersions :: [SourceCatalogVersion pgKind] previousSourceCatalogVersions = [initialSourceCatalogVersion .. pred latestSourceCatalogVersion] diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index 887d02c5c2d..abf533b71cb 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -66,6 +66,7 @@ import Data.Text qualified as T import Data.Text.Extended import Data.Text.NonEmpty import Data.Time.Clock +import Data.Time.Clock qualified as Time import Hasura.Backends.Postgres.SQL.Types hiding (TableName) import Hasura.Base.Error import Hasura.Eventing.Common @@ -162,7 +163,7 @@ data EventPayload (b :: BackendType) = EventPayload epTrigger :: TriggerMetadata, epEvent :: J.Value, epDeliveryInfo :: DeliveryInfo, - epCreatedAt :: UTCTime + epCreatedAt :: Time.UTCTime } deriving (Generic) diff --git a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs index 91f0c93f3b0..2e648f55e12 100644 --- a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs @@ -497,6 +497,9 @@ deriving instance Backend b => Show (Event b) deriving instance Backend b => Eq (Event b) +instance Backend b => FromJSON (Event b) where + parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True} + -- | The event payload processed by 'processEvent' data EventWithSource (b :: BackendType) = EventWithSource { _ewsEvent :: Event b, diff --git a/server/src-lib/Hasura/Server/Metrics.hs b/server/src-lib/Hasura/Server/Metrics.hs index ab9ea72f8e4..ee6180d71fc 100644 --- a/server/src-lib/Hasura/Server/Metrics.hs +++ b/server/src-lib/Hasura/Server/Metrics.hs @@ -103,18 +103,18 @@ data -- | Mutable references for the server metrics. See `ServerMetricsSpec` for a -- description of each metric. data ServerMetrics = ServerMetrics - { smWarpThreads :: Gauge, - smWebsocketConnections :: Gauge, - smActiveSubscriptions :: Gauge, - smNumEventsFetchedPerBatch :: Distribution, - smNumEventHTTPWorkers :: Gauge, - smEventQueueTime :: Distribution, - smSchemaCacheMetadataResourceVersion :: Gauge, - smActiveLiveQueries :: Gauge, - smActiveStreamingSubscriptions :: Gauge, - smEventFetchTimePerBatch :: Distribution, - smEventWebhookProcessingTime :: Distribution, - smEventProcessingTime :: Distribution + { smWarpThreads :: !Gauge, + smWebsocketConnections :: !Gauge, + smActiveSubscriptions :: !Gauge, + smNumEventsFetchedPerBatch :: !Distribution, + smNumEventHTTPWorkers :: !Gauge, + smEventQueueTime :: !Distribution, + smSchemaCacheMetadataResourceVersion :: !Gauge, + smActiveLiveQueries :: !Gauge, + smActiveStreamingSubscriptions :: !Gauge, + smEventFetchTimePerBatch :: !Distribution, + smEventWebhookProcessingTime :: !Distribution, + smEventProcessingTime :: !Distribution } createServerMetrics :: Store ServerMetricsSpec -> IO ServerMetrics diff --git a/server/src-rsr/init_pg_source.sql b/server/src-rsr/init_pg_source.sql index 224cbba868d..ef3745fce2f 100644 --- a/server/src-rsr/init_pg_source.sql +++ b/server/src-rsr/init_pg_source.sql @@ -41,7 +41,7 @@ CREATE TABLE hdb_catalog.event_log delivered BOOLEAN NOT NULL DEFAULT FALSE, error BOOLEAN NOT NULL DEFAULT FALSE, tries INTEGER NOT NULL DEFAULT 0, - created_at TIMESTAMP DEFAULT (NOW() AT TIME ZONE 'utc'), + created_at TIMESTAMP DEFAULT NOW(), /* when locked IS NULL the event is unlocked and can be processed */ locked TIMESTAMPTZ, next_retry_at TIMESTAMP, @@ -67,7 +67,7 @@ CREATE TABLE hdb_catalog.event_invocation_logs status INTEGER, request JSON, response JSON, - created_at TIMESTAMP DEFAULT (NOW() AT TIME ZONE 'utc') + created_at TIMESTAMP DEFAULT NOW() ); /* This index improves the performance of deletes by event_id, so that if somebody diff --git a/server/src-rsr/pg_source_migrations/3_to_4.sql b/server/src-rsr/pg_source_migrations/3_to_4.sql deleted file mode 100644 index 898d0f7418b..00000000000 --- a/server/src-rsr/pg_source_migrations/3_to_4.sql +++ /dev/null @@ -1,5 +0,0 @@ -ALTER table hdb_catalog.event_log - ALTER COLUMN created_at SET DEFAULT NOW() AT TIME ZONE 'utc'; - -ALTER table hdb_catalog.event_invocation_logs - ALTER COLUMN created_at SET DEFAULT NOW() AT TIME ZONE 'utc';