mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 08:02:15 +03:00
server: revert changes to created_at column of 'hdb_catalog.event_log'
This PR reverts the following two commits: 1. https://github.com/hasura/graphql-engine-mono/pull/8287 2. https://github.com/hasura/graphql-engine-mono/pull/8467 We are undoing a migration that was done on `hdb_catalog.event_log` table which was done in d4ae6a517da63f2f43567dc16fda135b3cd1d7e6 . And as such, users who were using event triggers on that version will come across the error: ```json {"detail":{"info":{"code":"not-supported","error":"Expected source catalog version <= 3, but the current version is 4","path":"$"},"kind":"catalog_migrate"},"level":"error","timestamp":"2023-03-28T10:17:24.289+0530","type":"startup"} {"code":"not-supported","error":"Expected source catalog version <= 3, but the current version is 4","path":"$"} ``` To fix these errors please run the following SQL on the source where event triggers were created on: ``` UPDATE hdb_catalog.hdb_source_catalog_version SET version = 3, upgraded_on= NOW(); ALTER table hdb_catalog.event_log ALTER COLUMN created_at SET DEFAULT NOW(); ALTER table hdb_catalog.event_invocation_logs ALTER COLUMN created_at SET DEFAULT NOW(); ``` PR-URL: https://github.com/hasura/graphql-engine-mono/pull/8534 GitOrigin-RevId: b6bbcce0163c8beed80619d3cea056e643b8c180
This commit is contained in:
parent
b0643d32d5
commit
4e3dbed938
@ -154,7 +154,6 @@ library
|
|||||||
Test.EventTriggers.PG.EventTriggersClearMetadataSpec
|
Test.EventTriggers.PG.EventTriggersClearMetadataSpec
|
||||||
Test.EventTriggers.PG.EventTriggersRunSQLSpec
|
Test.EventTriggers.PG.EventTriggersRunSQLSpec
|
||||||
Test.EventTriggers.PG.EventTriggersUniqueNameSpec
|
Test.EventTriggers.PG.EventTriggersUniqueNameSpec
|
||||||
Test.EventTriggers.PG.EventTriggersCreatedAtUTCSpec
|
|
||||||
Test.EventTriggers.PG.EventTriggersUntrackTableCleanupSpec
|
Test.EventTriggers.PG.EventTriggersUntrackTableCleanupSpec
|
||||||
Test.Harness.Quoter.YamlSpec
|
Test.Harness.Quoter.YamlSpec
|
||||||
Test.HealthCheckSpec
|
Test.HealthCheckSpec
|
||||||
|
@ -1,165 +0,0 @@
|
|||||||
{-# LANGUAGE QuasiQuotes #-}
|
|
||||||
{-# LANGUAGE ViewPatterns #-}
|
|
||||||
|
|
||||||
-- | Test that the `created_at` value sent in the event payload is
|
|
||||||
-- correct.
|
|
||||||
module Test.EventTriggers.PG.EventTriggersCreatedAtUTCSpec (spec) where
|
|
||||||
|
|
||||||
import Control.Concurrent.Chan qualified as Chan
|
|
||||||
import Data.Aeson.Internal qualified as Aeson
|
|
||||||
import Data.Aeson.Key qualified as Key
|
|
||||||
import Data.Aeson.KeyMap qualified as KM
|
|
||||||
import Data.List.NonEmpty qualified as NE
|
|
||||||
import Data.Time.Clock (addUTCTime, getCurrentTime)
|
|
||||||
import Harness.Backend.Postgres qualified as Postgres
|
|
||||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
|
||||||
import Harness.Quoter.Yaml
|
|
||||||
import Harness.Test.Fixture qualified as Fixture
|
|
||||||
import Harness.Test.Schema (Table (..), table)
|
|
||||||
import Harness.Test.Schema qualified as Schema
|
|
||||||
import Harness.TestEnvironment (GlobalTestEnvironment, TestEnvironment (options))
|
|
||||||
import Harness.Webhook qualified as Webhook
|
|
||||||
import Harness.Yaml (fromObject, shouldReturnYaml)
|
|
||||||
import Hasura.Base.Error (iResultToMaybe)
|
|
||||||
import Hasura.Prelude
|
|
||||||
import System.Timeout (timeout)
|
|
||||||
import Test.HUnit.Base (assertFailure)
|
|
||||||
import Test.Hspec (SpecWith, describe, it, shouldSatisfy)
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
-- Preamble
|
|
||||||
|
|
||||||
spec :: SpecWith GlobalTestEnvironment
|
|
||||||
spec =
|
|
||||||
Fixture.runWithLocalTestEnvironment
|
|
||||||
( NE.fromList
|
|
||||||
[ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata)
|
|
||||||
{ -- setup the webhook server as the local test environment,
|
|
||||||
-- so that the server can be referenced while testing
|
|
||||||
Fixture.mkLocalTestEnvironment = const Webhook.run,
|
|
||||||
Fixture.setupTeardown = \(testEnvironment, (webhookServer, _)) ->
|
|
||||||
[ Postgres.setupTablesAction schema testEnvironment,
|
|
||||||
Fixture.SetupAction
|
|
||||||
{ Fixture.setupAction = postgresSetup testEnvironment webhookServer,
|
|
||||||
Fixture.teardownAction = \_ -> postgresTeardown testEnvironment
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
tests
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
-- * Backend
|
|
||||||
|
|
||||||
-- ** Schema
|
|
||||||
|
|
||||||
schema :: [Schema.Table]
|
|
||||||
schema = [authorsTable]
|
|
||||||
|
|
||||||
authorsTable :: Schema.Table
|
|
||||||
authorsTable =
|
|
||||||
(table "authors")
|
|
||||||
{ tableColumns =
|
|
||||||
[ Schema.column "id" Schema.TInt,
|
|
||||||
Schema.column "name" Schema.TStr
|
|
||||||
],
|
|
||||||
tablePrimaryKey = ["id"],
|
|
||||||
tableData =
|
|
||||||
[ [Schema.VInt 1, Schema.VStr "Author 1"],
|
|
||||||
[Schema.VInt 2, Schema.VStr "Author 2"]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
-- Tests
|
|
||||||
|
|
||||||
tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue))
|
|
||||||
tests =
|
|
||||||
describe "created_at captures the correct value of the timestamp at which the event was created at." do
|
|
||||||
it "inserting a row in a non UTC timezone shouldn't affect the value of the `created_at`" $
|
|
||||||
\(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do
|
|
||||||
let schemaName :: Schema.SchemaName
|
|
||||||
schemaName = Schema.getSchemaName testEnvironment
|
|
||||||
let insertQuery =
|
|
||||||
[interpolateYaml|
|
|
||||||
type: run_sql
|
|
||||||
args:
|
|
||||||
source: postgres
|
|
||||||
sql: "SET timezone to 'Asia/Kolkata';INSERT INTO #{schemaName}.authors (id, name) values (3, 'john');"
|
|
||||||
|]
|
|
||||||
|
|
||||||
expectedResponse =
|
|
||||||
[yaml|
|
|
||||||
result_type: CommandOk
|
|
||||||
result: null
|
|
||||||
|]
|
|
||||||
|
|
||||||
-- Insert a row into the table with event trigger
|
|
||||||
shouldReturnYaml
|
|
||||||
(options testEnvironment)
|
|
||||||
(GraphqlEngine.postV2Query 200 testEnvironment insertQuery)
|
|
||||||
expectedResponse
|
|
||||||
|
|
||||||
-- Check if there was a payload generated due to the insert statement
|
|
||||||
eventPayload <-
|
|
||||||
-- wait for the event for a maximum of 5 seconds
|
|
||||||
timeout (5 * 1000000) (Chan.readChan eventsQueue)
|
|
||||||
>>= (`onNothing` (assertFailure "Event expected, but not fired"))
|
|
||||||
|
|
||||||
let eventCreatedAtUTCMaybe =
|
|
||||||
iResultToMaybe
|
|
||||||
=<< Aeson.ifromJSON
|
|
||||||
<$> (Key.fromString "created_at")
|
|
||||||
`KM.lookup` (fromObject eventPayload)
|
|
||||||
|
|
||||||
eventCreatedAtUTC <-
|
|
||||||
onNothing eventCreatedAtUTCMaybe (assertFailure "Error in parsing the `created_at` of the event")
|
|
||||||
|
|
||||||
currentTimeUTC <- liftIO getCurrentTime
|
|
||||||
|
|
||||||
-- The current timestamp in UTC should always be greater than the
|
|
||||||
-- event's creation timestamp and also the current time should not
|
|
||||||
-- be later than 10 seconds from the event's creation time, since it
|
|
||||||
-- is the only event that has to be delivered. By making both the checks,
|
|
||||||
-- we can assure that the `created_at_tz` captures the correct value.
|
|
||||||
eventCreatedAtUTC
|
|
||||||
`shouldSatisfy` (\eventCreatedTime -> eventCreatedTime < currentTimeUTC && addUTCTime 10 eventCreatedTime > currentTimeUTC)
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
-- ** Setup and teardown override
|
|
||||||
|
|
||||||
postgresSetup :: TestEnvironment -> GraphqlEngine.Server -> IO ()
|
|
||||||
postgresSetup testEnvironment webhookServer = do
|
|
||||||
let schemaName :: Schema.SchemaName
|
|
||||||
schemaName = Schema.getSchemaName testEnvironment
|
|
||||||
let webhookEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/whole_event_payload"
|
|
||||||
GraphqlEngine.postMetadata_ testEnvironment $
|
|
||||||
[interpolateYaml|
|
|
||||||
type: bulk
|
|
||||||
args:
|
|
||||||
- type: pg_create_event_trigger
|
|
||||||
args:
|
|
||||||
name: authors_all
|
|
||||||
source: postgres
|
|
||||||
table:
|
|
||||||
name: authors
|
|
||||||
schema: #{schemaName}
|
|
||||||
webhook: #{webhookEndpoint}
|
|
||||||
insert:
|
|
||||||
columns: "*"
|
|
||||||
|]
|
|
||||||
|
|
||||||
postgresTeardown :: TestEnvironment -> IO ()
|
|
||||||
postgresTeardown testEnvironment = do
|
|
||||||
GraphqlEngine.postMetadata_ testEnvironment $
|
|
||||||
[yaml|
|
|
||||||
type: bulk
|
|
||||||
args:
|
|
||||||
- type: pg_delete_event_trigger
|
|
||||||
args:
|
|
||||||
name: authors_all
|
|
||||||
source: postgres
|
|
||||||
|]
|
|
@ -47,14 +47,14 @@ run = mkTestResource do
|
|||||||
port <- bracket (Warp.openFreePort) (Socket.close . snd) (pure . fst)
|
port <- bracket (Warp.openFreePort) (Socket.close . snd) (pure . fst)
|
||||||
eventsQueueChan <- Chan.newChan
|
eventsQueueChan <- Chan.newChan
|
||||||
let eventsQueue = EventsQueue eventsQueueChan
|
let eventsQueue = EventsQueue eventsQueueChan
|
||||||
extractEventDataInsertIntoEventQueue jsonPathString = do
|
extractEventDataInsertIntoEventQueue = do
|
||||||
req <- Spock.request
|
req <- Spock.request
|
||||||
body <- liftIO $ Wai.strictRequestBody req
|
body <- liftIO $ Wai.strictRequestBody req
|
||||||
let jsonBody = Aeson.decode body
|
let jsonBody = Aeson.decode body
|
||||||
let eventDataPayload =
|
let eventDataPayload =
|
||||||
-- Only extract the data payload from the request body
|
-- Only extract the data payload from the request body
|
||||||
let mkJSONPathE = either (error . T.unpack) id . parseJSONPath
|
let mkJSONPathE = either (error . T.unpack) id . parseJSONPath
|
||||||
eventJSONPath = mkJSONPathE jsonPathString
|
eventJSONPath = mkJSONPathE "$.event.data"
|
||||||
in iResultToMaybe =<< executeJSONPath eventJSONPath <$> jsonBody
|
in iResultToMaybe =<< executeJSONPath eventJSONPath <$> jsonBody
|
||||||
liftIO $
|
liftIO $
|
||||||
Chan.writeChan eventsQueueChan $
|
Chan.writeChan eventsQueueChan $
|
||||||
@ -69,14 +69,11 @@ run = mkTestResource do
|
|||||||
Spock.json $
|
Spock.json $
|
||||||
Aeson.String "world"
|
Aeson.String "world"
|
||||||
Spock.post "/echo" $ do
|
Spock.post "/echo" $ do
|
||||||
extractEventDataInsertIntoEventQueue "$.event.data"
|
extractEventDataInsertIntoEventQueue
|
||||||
Spock.json $ Aeson.object ["success" Aeson..= True]
|
Spock.json $ Aeson.object ["success" Aeson..= True]
|
||||||
Spock.post "/nextRetry" $ do
|
Spock.post "/nextRetry" $ do
|
||||||
extractEventDataInsertIntoEventQueue "$.event.data"
|
extractEventDataInsertIntoEventQueue
|
||||||
Spock.setStatus HTTP.status503
|
Spock.setStatus HTTP.status503
|
||||||
Spock.post "/whole_event_payload" $ do
|
|
||||||
extractEventDataInsertIntoEventQueue "$"
|
|
||||||
Spock.json $ Aeson.object ["success" Aeson..= True]
|
|
||||||
|
|
||||||
let server = Server {port = fromIntegral port, urlPrefix, thread}
|
let server = Server {port = fromIntegral port, urlPrefix, thread}
|
||||||
pure
|
pure
|
||||||
|
@ -1057,7 +1057,7 @@ deleteEventTriggerLogsTx TriggerLogCleanupConfig {..} = do
|
|||||||
[ST.st|
|
[ST.st|
|
||||||
SELECT id FROM hdb_catalog.event_log
|
SELECT id FROM hdb_catalog.event_log
|
||||||
WHERE ((delivered = true OR error = true) AND trigger_name = $1)
|
WHERE ((delivered = true OR error = true) AND trigger_name = $1)
|
||||||
AND created_at < (now() - interval '#{qRetentionPeriod}') AT TIME ZONE 'UTC'
|
AND created_at < now() - interval '#{qRetentionPeriod}'
|
||||||
AND locked IS NULL
|
AND locked IS NULL
|
||||||
LIMIT $2
|
LIMIT $2
|
||||||
|]
|
|]
|
||||||
|
@ -27,7 +27,7 @@ initialSourceCatalogVersion :: SourceCatalogVersion pgKind
|
|||||||
initialSourceCatalogVersion = Version.SourceCatalogVersion 0
|
initialSourceCatalogVersion = Version.SourceCatalogVersion 0
|
||||||
|
|
||||||
latestSourceCatalogVersion :: SourceCatalogVersion pgKind
|
latestSourceCatalogVersion :: SourceCatalogVersion pgKind
|
||||||
latestSourceCatalogVersion = Version.SourceCatalogVersion 4
|
latestSourceCatalogVersion = Version.SourceCatalogVersion 3
|
||||||
|
|
||||||
previousSourceCatalogVersions :: [SourceCatalogVersion pgKind]
|
previousSourceCatalogVersions :: [SourceCatalogVersion pgKind]
|
||||||
previousSourceCatalogVersions = [initialSourceCatalogVersion .. pred latestSourceCatalogVersion]
|
previousSourceCatalogVersions = [initialSourceCatalogVersion .. pred latestSourceCatalogVersion]
|
||||||
|
@ -66,6 +66,7 @@ import Data.Text qualified as T
|
|||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
import Data.Text.NonEmpty
|
import Data.Text.NonEmpty
|
||||||
import Data.Time.Clock
|
import Data.Time.Clock
|
||||||
|
import Data.Time.Clock qualified as Time
|
||||||
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
|
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
|
||||||
import Hasura.Base.Error
|
import Hasura.Base.Error
|
||||||
import Hasura.Eventing.Common
|
import Hasura.Eventing.Common
|
||||||
@ -162,7 +163,7 @@ data EventPayload (b :: BackendType) = EventPayload
|
|||||||
epTrigger :: TriggerMetadata,
|
epTrigger :: TriggerMetadata,
|
||||||
epEvent :: J.Value,
|
epEvent :: J.Value,
|
||||||
epDeliveryInfo :: DeliveryInfo,
|
epDeliveryInfo :: DeliveryInfo,
|
||||||
epCreatedAt :: UTCTime
|
epCreatedAt :: Time.UTCTime
|
||||||
}
|
}
|
||||||
deriving (Generic)
|
deriving (Generic)
|
||||||
|
|
||||||
|
@ -497,6 +497,9 @@ deriving instance Backend b => Show (Event b)
|
|||||||
|
|
||||||
deriving instance Backend b => Eq (Event b)
|
deriving instance Backend b => Eq (Event b)
|
||||||
|
|
||||||
|
instance Backend b => FromJSON (Event b) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True}
|
||||||
|
|
||||||
-- | The event payload processed by 'processEvent'
|
-- | The event payload processed by 'processEvent'
|
||||||
data EventWithSource (b :: BackendType) = EventWithSource
|
data EventWithSource (b :: BackendType) = EventWithSource
|
||||||
{ _ewsEvent :: Event b,
|
{ _ewsEvent :: Event b,
|
||||||
|
@ -103,18 +103,18 @@ data
|
|||||||
-- | Mutable references for the server metrics. See `ServerMetricsSpec` for a
|
-- | Mutable references for the server metrics. See `ServerMetricsSpec` for a
|
||||||
-- description of each metric.
|
-- description of each metric.
|
||||||
data ServerMetrics = ServerMetrics
|
data ServerMetrics = ServerMetrics
|
||||||
{ smWarpThreads :: Gauge,
|
{ smWarpThreads :: !Gauge,
|
||||||
smWebsocketConnections :: Gauge,
|
smWebsocketConnections :: !Gauge,
|
||||||
smActiveSubscriptions :: Gauge,
|
smActiveSubscriptions :: !Gauge,
|
||||||
smNumEventsFetchedPerBatch :: Distribution,
|
smNumEventsFetchedPerBatch :: !Distribution,
|
||||||
smNumEventHTTPWorkers :: Gauge,
|
smNumEventHTTPWorkers :: !Gauge,
|
||||||
smEventQueueTime :: Distribution,
|
smEventQueueTime :: !Distribution,
|
||||||
smSchemaCacheMetadataResourceVersion :: Gauge,
|
smSchemaCacheMetadataResourceVersion :: !Gauge,
|
||||||
smActiveLiveQueries :: Gauge,
|
smActiveLiveQueries :: !Gauge,
|
||||||
smActiveStreamingSubscriptions :: Gauge,
|
smActiveStreamingSubscriptions :: !Gauge,
|
||||||
smEventFetchTimePerBatch :: Distribution,
|
smEventFetchTimePerBatch :: !Distribution,
|
||||||
smEventWebhookProcessingTime :: Distribution,
|
smEventWebhookProcessingTime :: !Distribution,
|
||||||
smEventProcessingTime :: Distribution
|
smEventProcessingTime :: !Distribution
|
||||||
}
|
}
|
||||||
|
|
||||||
createServerMetrics :: Store ServerMetricsSpec -> IO ServerMetrics
|
createServerMetrics :: Store ServerMetricsSpec -> IO ServerMetrics
|
||||||
|
@ -41,7 +41,7 @@ CREATE TABLE hdb_catalog.event_log
|
|||||||
delivered BOOLEAN NOT NULL DEFAULT FALSE,
|
delivered BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
error BOOLEAN NOT NULL DEFAULT FALSE,
|
error BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
tries INTEGER NOT NULL DEFAULT 0,
|
tries INTEGER NOT NULL DEFAULT 0,
|
||||||
created_at TIMESTAMP DEFAULT (NOW() AT TIME ZONE 'utc'),
|
created_at TIMESTAMP DEFAULT NOW(),
|
||||||
/* when locked IS NULL the event is unlocked and can be processed */
|
/* when locked IS NULL the event is unlocked and can be processed */
|
||||||
locked TIMESTAMPTZ,
|
locked TIMESTAMPTZ,
|
||||||
next_retry_at TIMESTAMP,
|
next_retry_at TIMESTAMP,
|
||||||
@ -67,7 +67,7 @@ CREATE TABLE hdb_catalog.event_invocation_logs
|
|||||||
status INTEGER,
|
status INTEGER,
|
||||||
request JSON,
|
request JSON,
|
||||||
response JSON,
|
response JSON,
|
||||||
created_at TIMESTAMP DEFAULT (NOW() AT TIME ZONE 'utc')
|
created_at TIMESTAMP DEFAULT NOW()
|
||||||
);
|
);
|
||||||
|
|
||||||
/* This index improves the performance of deletes by event_id, so that if somebody
|
/* This index improves the performance of deletes by event_id, so that if somebody
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
ALTER table hdb_catalog.event_log
|
|
||||||
ALTER COLUMN created_at SET DEFAULT NOW() AT TIME ZONE 'utc';
|
|
||||||
|
|
||||||
ALTER table hdb_catalog.event_invocation_logs
|
|
||||||
ALTER COLUMN created_at SET DEFAULT NOW() AT TIME ZONE 'utc';
|
|
Loading…
Reference in New Issue
Block a user