server: generalize event triggers - incremental PR 2

https://github.com/hasura/graphql-engine-mono/pull/2270

GitOrigin-RevId: d7644b25d3ee57ffa630de15ae692c1bfa03b4f6
This commit is contained in:
Karthikeyan Chinnakonda 2021-09-09 17:24:19 +05:30 committed by hasura-bot
parent 9ebc07ef4f
commit 3247c8bd71
32 changed files with 463 additions and 430 deletions

View File

@ -702,6 +702,7 @@ test-suite graphql-engine-tests
, time , time
, transformers-base , transformers-base
, unordered-containers , unordered-containers
, utf8-string
, vector , vector
-- mssql support -- mssql support
, odbc , odbc

View File

@ -2,7 +2,6 @@ module Hasura.Backends.BigQuery.DDL
( buildComputedFieldInfo ( buildComputedFieldInfo
, fetchAndValidateEnumValues , fetchAndValidateEnumValues
, createTableEventTrigger , createTableEventTrigger
, buildEventTriggerInfo
, buildFunctionInfo , buildFunctionInfo
, updateColumnInEventTrigger , updateColumnInEventTrigger
, parseBoolExpOperations , parseBoolExpOperations
@ -13,8 +12,6 @@ where
import Hasura.Prelude import Hasura.Prelude
import qualified Data.Environment as Env
import Data.Aeson import Data.Aeson
import qualified Hasura.Backends.BigQuery.Types as BigQuery import qualified Hasura.Backends.BigQuery.Types as BigQuery
@ -72,16 +69,6 @@ createTableEventTrigger
createTableEventTrigger _ _ _ _ _ _ = runExceptT $ createTableEventTrigger _ _ _ _ _ _ = runExceptT $
throw400 NotSupported "Cannot create table event triggers in BigQuery sources" throw400 NotSupported "Cannot create table event triggers in BigQuery sources"
buildEventTriggerInfo
:: MonadError QErr m
=> Env.Environment
-> SourceName
-> TableName 'BigQuery
-> EventTriggerConf 'BigQuery
-> m (EventTriggerInfo 'BigQuery, [SchemaDependency])
buildEventTriggerInfo _ _ _ _ =
throw400 NotSupported "Table event triggers are not supported for BigQuery sources"
buildFunctionInfo buildFunctionInfo
:: (MonadError QErr m) :: (MonadError QErr m)
=> SourceName => SourceName

View File

@ -16,7 +16,6 @@ instance BackendMetadata 'BigQuery where
resolveSourceConfig = BigQuery.resolveSourceConfig resolveSourceConfig = BigQuery.resolveSourceConfig
resolveDatabaseMetadata = BigQuery.resolveSource resolveDatabaseMetadata = BigQuery.resolveSource
createTableEventTrigger = BigQuery.createTableEventTrigger createTableEventTrigger = BigQuery.createTableEventTrigger
buildEventTriggerInfo = BigQuery.buildEventTriggerInfo
parseBoolExpOperations = BigQuery.parseBoolExpOperations parseBoolExpOperations = BigQuery.parseBoolExpOperations
buildFunctionInfo = BigQuery.buildFunctionInfo buildFunctionInfo = BigQuery.buildFunctionInfo
updateColumnInEventTrigger = BigQuery.updateColumnInEventTrigger updateColumnInEventTrigger = BigQuery.updateColumnInEventTrigger

View File

@ -2,7 +2,6 @@ module Hasura.Backends.MSSQL.DDL
( buildComputedFieldInfo ( buildComputedFieldInfo
, fetchAndValidateEnumValues , fetchAndValidateEnumValues
, createTableEventTrigger , createTableEventTrigger
, buildEventTriggerInfo
, buildFunctionInfo , buildFunctionInfo
, updateColumnInEventTrigger , updateColumnInEventTrigger
, parseCollectableType , parseCollectableType
@ -14,8 +13,6 @@ import Hasura.Prelude
import Data.Aeson import Data.Aeson
import qualified Data.Environment as Env
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.RQL.IR.BoolExp import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Backend import Hasura.RQL.Types.Backend
@ -72,16 +69,6 @@ createTableEventTrigger
createTableEventTrigger _ _ _ _ _ _ = runExceptT $ createTableEventTrigger _ _ _ _ _ _ = runExceptT $
throw400 NotSupported "Cannot create table event triggers in MSSQL sources" throw400 NotSupported "Cannot create table event triggers in MSSQL sources"
buildEventTriggerInfo
:: MonadError QErr m
=> Env.Environment
-> SourceName
-> TableName 'MSSQL
-> EventTriggerConf 'MSSQL
-> m (EventTriggerInfo 'MSSQL, [SchemaDependency])
buildEventTriggerInfo _ _ _ _ =
throw400 NotSupported "Table event triggers are not supported for MSSQL sources"
buildFunctionInfo buildFunctionInfo
:: (MonadError QErr m) :: (MonadError QErr m)
=> SourceName => SourceName

View File

@ -15,7 +15,6 @@ instance BackendMetadata 'MSSQL where
resolveSourceConfig = MSSQL.resolveSourceConfig resolveSourceConfig = MSSQL.resolveSourceConfig
resolveDatabaseMetadata = MSSQL.resolveDatabaseMetadata resolveDatabaseMetadata = MSSQL.resolveDatabaseMetadata
createTableEventTrigger = MSSQL.createTableEventTrigger createTableEventTrigger = MSSQL.createTableEventTrigger
buildEventTriggerInfo = MSSQL.buildEventTriggerInfo
parseBoolExpOperations = MSSQL.parseBoolExpOperations parseBoolExpOperations = MSSQL.parseBoolExpOperations
buildFunctionInfo = MSSQL.buildFunctionInfo buildFunctionInfo = MSSQL.buildFunctionInfo
updateColumnInEventTrigger = MSSQL.updateColumnInEventTrigger updateColumnInEventTrigger = MSSQL.updateColumnInEventTrigger

View File

@ -13,7 +13,6 @@ instance BackendMetadata 'MySQL where
resolveSourceConfig = MySQL.resolveSourceConfig resolveSourceConfig = MySQL.resolveSourceConfig
resolveDatabaseMetadata = MySQL.resolveDatabaseMetadata resolveDatabaseMetadata = MySQL.resolveDatabaseMetadata
createTableEventTrigger = error "createTableEventTrigger: MySQL backend does not support this operation yet." createTableEventTrigger = error "createTableEventTrigger: MySQL backend does not support this operation yet."
buildEventTriggerInfo = error "buildEventTriggerInfo: MySQL backend does not support this operation yet."
parseBoolExpOperations = error "parseBoolExpOperations: MySQL backend does not support this operation yet." parseBoolExpOperations = error "parseBoolExpOperations: MySQL backend does not support this operation yet."
buildFunctionInfo = error "buildFunctionInfo: MySQL backend does not support this operation yet." buildFunctionInfo = error "buildFunctionInfo: MySQL backend does not support this operation yet."
updateColumnInEventTrigger = error "updateColumnInEventTrigger: MySQL backend does not support this operation yet." updateColumnInEventTrigger = error "updateColumnInEventTrigger: MySQL backend does not support this operation yet."

View File

@ -7,6 +7,7 @@ where
import Data.Aeson import Data.Aeson
import Hasura.Backends.Postgres.DDL.BoolExp as M import Hasura.Backends.Postgres.DDL.BoolExp as M
import Hasura.Backends.Postgres.DDL.EventTrigger as M
import Hasura.Backends.Postgres.DDL.Field as M import Hasura.Backends.Postgres.DDL.Field as M
import Hasura.Backends.Postgres.DDL.Function as M import Hasura.Backends.Postgres.DDL.Function as M
import Hasura.Backends.Postgres.DDL.Source as M import Hasura.Backends.Postgres.DDL.Source as M

View File

@ -1,25 +1,43 @@
module Hasura.Backends.Postgres.DDL.EventTrigger module Hasura.Backends.Postgres.DDL.EventTrigger
( insertManualEvent ( insertManualEvent
, redeliverEvent , redeliverEvent
, dropTriggerAndArchiveEvents
, createTableEventTrigger
, dropTriggerQ
, mkAllTriggersQ
) where ) where
import Hasura.Prelude import Hasura.Prelude
import qualified Database.PG.Query as Q import qualified Data.Text.Lazy as TL
import qualified Database.PG.Query as Q
import qualified Text.Shakespeare.Text as ST
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson import Data.Aeson
import Data.FileEmbed (makeRelativeToProject)
import qualified Hasura.Tracing as Tracing import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.Connection import Hasura.Backends.Postgres.Connection
import Hasura.Backends.Postgres.SQL.Types hiding (TableName) import Hasura.Backends.Postgres.SQL.DML
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
import Hasura.Backends.Postgres.Translate.Column
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.RQL.Types.Backend (SourceConfig, TableName) import Hasura.RQL.Types.Backend (Backend, SourceConfig, TableName)
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.EventTrigger import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.Table () import Hasura.RQL.Types.Table ()
import Hasura.SQL.Backend import Hasura.SQL.Backend
import Hasura.SQL.Types
import Hasura.Server.Types
import Hasura.Session import Hasura.Session
-- Corresponds to the 'OLD' and 'NEW' Postgres records; see
-- https://www.postgresql.org/docs/current/plpgsql-trigger.html
data OpVar = OLD | NEW deriving (Show)
insertManualEvent insertManualEvent
:: (MonadIO m, MonadError QErr m) :: (MonadIO m, MonadError QErr m)
=> SourceConfig ('Postgres pgKind) => SourceConfig ('Postgres pgKind)
@ -50,6 +68,34 @@ redeliverEvent
redeliverEvent sourceConfig eventId = redeliverEvent sourceConfig eventId =
liftEitherM $ liftIO $ runPgSourceWriteTx sourceConfig (redeliverEventTx eventId) liftEitherM $ liftIO $ runPgSourceWriteTx sourceConfig (redeliverEventTx eventId)
dropTriggerAndArchiveEvents
:: ( MonadIO m
, MonadError QErr m
)
=> SourceConfig ('Postgres pgKind)
-> TriggerName
-> m ()
dropTriggerAndArchiveEvents sourceConfig triggerName =
liftEitherM $ liftIO $ runPgSourceWriteTx sourceConfig $ do
dropTriggerQ triggerName
archiveEvents triggerName
createTableEventTrigger
:: (Backend ('Postgres pgKind), MonadIO m, MonadBaseControl IO m)
=> ServerConfigCtx
-> PGSourceConfig
-> QualifiedTable
-> [ColumnInfo ('Postgres pgKind)]
-> TriggerName
-> TriggerOpsDef ('Postgres pgKind)
-> m (Either QErr ())
createTableEventTrigger serverConfigCtx sourceConfig table columns triggerName opsDefinition = runPgSourceWriteTx sourceConfig $ do
-- Clean all existing triggers
liftTx $ dropTriggerQ triggerName -- executes DROP IF EXISTS.. sql
-- Create the given triggers
flip runReaderT serverConfigCtx $
mkAllTriggersQ triggerName table columns opsDefinition
---- DATABASE QUERIES --------------------- ---- DATABASE QUERIES ---------------------
-- --
-- The API for our in-database work queue: -- The API for our in-database work queue:
@ -65,7 +111,6 @@ insertPGManualEvent (QualifiedObject schemaName tableName) triggerName rowData =
SELECT hdb_catalog.insert_event_log($1, $2, $3, $4, $5) SELECT hdb_catalog.insert_event_log($1, $2, $3, $4, $5)
|] (schemaName, tableName, triggerName, (tshow MANUAL), Q.AltJ rowData) False |] (schemaName, tableName, triggerName, (tshow MANUAL), Q.AltJ rowData) False
checkEvent :: EventId -> Q.TxE QErr () checkEvent :: EventId -> Q.TxE QErr ()
checkEvent eid = do checkEvent eid = do
events <- Q.listQE defaultTxErrorHandler events <- Q.listQE defaultTxErrorHandler
@ -98,3 +143,118 @@ redeliverEventTx :: EventId -> Q.TxE QErr ()
redeliverEventTx eventId = do redeliverEventTx eventId = do
checkEvent eventId checkEvent eventId
markForDelivery eventId markForDelivery eventId
dropTriggerQ :: TriggerName -> Q.TxE QErr ()
dropTriggerQ trn =
mapM_ (\op -> Q.unitQE
defaultTxErrorHandler
(Q.fromText $ getDropFuncSql op) () False) [INSERT, UPDATE, DELETE]
where
getDropFuncSql :: Ops -> Text
getDropFuncSql op =
"DROP FUNCTION IF EXISTS"
<> " hdb_catalog." <> pgIdenTrigger op trn <> "()"
<> " CASCADE"
archiveEvents :: TriggerName -> Q.TxE QErr ()
archiveEvents trn =
Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE hdb_catalog.event_log
SET archived = 't'
WHERE trigger_name = $1
|] (Identity trn) False
---- Postgres event trigger utility functions ---------------------
-- | pgIdenTrigger is a method used to construct the name of the pg function
-- used for event triggers which are present in the hdb_catalog schema.
pgIdenTrigger:: Ops -> TriggerName -> Text
pgIdenTrigger op trn = pgFmtIdentifier . qualifyTriggerName op $ triggerNameToTxt trn
where
qualifyTriggerName op' trn' = "notify_hasura_" <> trn' <> "_" <> tshow op'
-- | Define the pgSQL trigger functions on database events.
mkTriggerQ
:: forall pgKind m
. (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m)
=> TriggerName
-> QualifiedTable
-> [ColumnInfo ('Postgres pgKind)]
-> Ops
-> SubscribeOpSpec ('Postgres pgKind)
-> m ()
mkTriggerQ trn qt@(QualifiedObject schema table) allCols op (SubscribeOpSpec listenColumns deliveryColumns') = do
strfyNum <- stringifyNum . _sccSQLGenCtx <$> ask
liftTx $ Q.multiQE defaultTxErrorHandler $ Q.fromText . TL.toStrict $
let
-- If there are no specific delivery columns selected by user then all the columns will be delivered
-- in payload hence 'SubCStar'.
deliveryColumns = fromMaybe SubCStar deliveryColumns'
getApplicableColumns = \case
SubCStar -> allCols
SubCArray cols -> getColInfos cols allCols
-- Columns that should be present in the payload. By default, all columns are present.
applicableDeliveryCols = getApplicableColumns deliveryColumns
getRowExpression opVar = applyRowToJson' $ mkRowExpression opVar strfyNum applicableDeliveryCols
-- Columns that user subscribed to listen for changes. By default, we listen on all columns.
applicableListenCols = getApplicableColumns listenColumns
renderRow opVar = applyRow $ mkRowExpression opVar strfyNum applicableListenCols
oldDataExp = case op of
INSERT -> SENull
UPDATE -> getRowExpression OLD
DELETE -> getRowExpression OLD
MANUAL -> SENull
newDataExp = case op of
INSERT -> getRowExpression NEW
UPDATE -> getRowExpression NEW
DELETE -> SENull
MANUAL -> SENull
name = triggerNameToTxt trn
qualifiedTriggerName = pgIdenTrigger op trn
qualifiedTable = toSQLTxt qt
schemaName = pgFmtLit $ getSchemaTxt schema
tableName = pgFmtLit $ getTableTxt table
operation = tshow op
oldRow = toSQLTxt $ renderRow OLD
newRow = toSQLTxt $ renderRow NEW
oldPayloadExpression = toSQLTxt oldDataExp
newPayloadExpression = toSQLTxt newDataExp
in $(makeRelativeToProject "src-rsr/trigger.sql.shakespeare" >>= ST.stextFile )
where
applyRowToJson' e = SEFnApp "row_to_json" [e] Nothing
applyRow e = SEFnApp "row" [e] Nothing
opToQual = QualVar . tshow
mkRowExpression opVar strfyNum columns
= mkRowExp $ map (\col -> toExtractor (mkQId opVar strfyNum col) col) columns
mkQId opVar strfyNum colInfo = toJSONableExp strfyNum (pgiType colInfo) False $
SEQIdentifier $ QIdentifier (opToQual opVar) $ toIdentifier $ pgiColumn colInfo
-- Generate the SQL expression
toExtractor sqlExp column
-- If the column type is either 'Geography' or 'Geometry', then after applying the 'ST_AsGeoJSON' function
-- to the column, alias the value of the expression with the column name else it uses `st_asgeojson` as
-- the column name.
| isScalarColumnWhere isGeoType (pgiType column) = Extractor sqlExp (Just $ getAlias column)
| otherwise = Extractor sqlExp Nothing
getAlias col = toAlias $ Identifier $ getPGColTxt (pgiColumn col)
mkAllTriggersQ
:: forall pgKind m
. (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m)
=> TriggerName
-> QualifiedTable
-> [ColumnInfo ('Postgres pgKind)]
-> TriggerOpsDef ('Postgres pgKind)
-> m ()
mkAllTriggersQ trn qt allCols fullspec = do
onJust (tdInsert fullspec) (mkTriggerQ trn qt allCols INSERT)
onJust (tdUpdate fullspec) (mkTriggerQ trn qt allCols UPDATE)
onJust (tdDelete fullspec) (mkTriggerQ trn qt allCols DELETE)

View File

@ -6,32 +6,33 @@ module Hasura.Backends.Postgres.DDL.RunSQL
import Hasura.Prelude import Hasura.Prelude
import qualified Data.HashMap.Strict as M import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS import qualified Data.HashSet as HS
import qualified Database.PG.Query as Q import qualified Database.PG.Query as Q
import qualified Text.Regex.TDFA as TDFA import qualified Text.Regex.TDFA as TDFA
import Control.Monad.Trans.Control (MonadBaseControl) import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson import Data.Aeson
import Data.Text.Extended import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Tracing as Tracing import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.DDL.Source (ToMetadataFetchQuery, fetchFunctionMetadata, import Hasura.Backends.Postgres.DDL.EventTrigger
fetchTableMetadata) import Hasura.Backends.Postgres.DDL.Source (ToMetadataFetchQuery,
import Hasura.Backends.Postgres.DDL.Table fetchFunctionMetadata,
fetchTableMetadata)
import Hasura.Backends.Postgres.SQL.Types import Hasura.Backends.Postgres.SQL.Types
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.EncJSON import Hasura.EncJSON
import Hasura.RQL.DDL.Deps (reportDepsExt) import Hasura.RQL.DDL.Deps (reportDepsExt)
import Hasura.RQL.DDL.Schema import Hasura.RQL.DDL.Schema
import Hasura.RQL.DDL.Schema.Common import Hasura.RQL.DDL.Schema.Common
import Hasura.RQL.DDL.Schema.Diff import Hasura.RQL.DDL.Schema.Diff
import Hasura.RQL.Types hiding (ConstraintName, fmFunction, import Hasura.RQL.Types hiding (ConstraintName, fmFunction,
tmComputedFields, tmTable) tmComputedFields, tmTable)
import Hasura.RQL.Types.Run import Hasura.RQL.Types.Run
import Hasura.Server.Utils (quoteRegex) import Hasura.Server.Utils (quoteRegex)
import Hasura.Session import Hasura.Session
data RunSQL data RunSQL
@ -193,7 +194,7 @@ withMetadataCheck source cascade txAccess action = do
-- Drop event triggers so no interference is caused to the sql query -- Drop event triggers so no interference is caused to the sql query
forM_ (M.elems preActionTables) $ \tableInfo -> do forM_ (M.elems preActionTables) $ \tableInfo -> do
let eventTriggers = _tiEventTriggerInfoMap tableInfo let eventTriggers = _tiEventTriggerInfoMap tableInfo
forM_ (M.keys eventTriggers) (liftTx . delTriggerQ) forM_ (M.keys eventTriggers) (liftTx . dropTriggerQ)
-- Get the metadata before the sql query, everything, need to filter this -- Get the metadata before the sql query, everything, need to filter this
(preActionTableMeta, preActionFunctionMeta) <- fetchMeta preActionTables preActionFunctions (preActionTableMeta, preActionFunctionMeta) <- fetchMeta preActionTables preActionFunctions

View File

@ -1,279 +1,35 @@
module Hasura.Backends.Postgres.DDL.Table module Hasura.Backends.Postgres.DDL.Table
( createTableEventTrigger ( updateColumnInEventTrigger
, buildEventTriggerInfo
, updateColumnInEventTrigger
, fetchAndValidateEnumValues , fetchAndValidateEnumValues
, delTriggerQ
, mkAllTriggersQ
, getHeaderInfosFromConf
) )
where where
import Hasura.Prelude import Hasura.Prelude
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map import qualified Data.HashMap.Strict as Map
import qualified Data.List.NonEmpty as NE import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq import qualified Data.Sequence as Seq
import qualified Data.Sequence.NonEmpty as NESeq import qualified Data.Sequence.NonEmpty as NESeq
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Database.PG.Query as Q import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G import qualified Language.GraphQL.Draft.Syntax as G
import qualified Text.Shakespeare.Text as ST
import Control.Monad.Trans.Control (MonadBaseControl) import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Validate import Control.Monad.Validate
import Data.FileEmbed (makeRelativeToProject)
import Data.List (delete) import Data.List (delete)
import Data.Text.Extended import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.Connection import Hasura.Backends.Postgres.Connection
import Hasura.Backends.Postgres.SQL.DML import Hasura.Backends.Postgres.SQL.DML
import Hasura.Backends.Postgres.SQL.Types import Hasura.Backends.Postgres.SQL.Types
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types.Backend import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Column import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.EventTrigger import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.RQL.Types.Table import Hasura.RQL.Types.Table
import Hasura.SQL.Backend import Hasura.SQL.Backend
import Hasura.SQL.Types import Hasura.SQL.Types
import Hasura.Server.Types
import Hasura.Server.Utils import Hasura.Server.Utils
-- | Create the table event trigger in the database in a @'/v1/query' API
-- transaction as soon as after @'runCreateEventTriggerQuery' is called and
-- in building schema cache.
createTableEventTrigger
:: (Backend ('Postgres pgKind), MonadIO m, MonadBaseControl IO m)
=> ServerConfigCtx
-> PGSourceConfig
-> QualifiedTable
-> [ColumnInfo ('Postgres pgKind)]
-> TriggerName
-> TriggerOpsDef ('Postgres pgKind)
-> m (Either QErr ())
createTableEventTrigger serverConfigCtx sourceConfig table columns triggerName opsDefinition = runPgSourceWriteTx sourceConfig $ do
-- Clean all existing triggers
liftTx $ delTriggerQ triggerName -- executes DROP IF EXISTS.. sql
-- Create the given triggers
flip runReaderT serverConfigCtx $
mkAllTriggersQ triggerName table columns opsDefinition
delTriggerQ :: TriggerName -> Q.TxE QErr ()
delTriggerQ trn =
mapM_ (\op -> Q.unitQE
defaultTxErrorHandler
(Q.fromText $ getDropFuncSql op) () False) [INSERT, UPDATE, DELETE]
where
getDropFuncSql :: Ops -> T.Text
getDropFuncSql op =
"DROP FUNCTION IF EXISTS"
<> " hdb_catalog." <> pgIdenTrigger op trn <> "()"
<> " CASCADE"
-- pgIdenTrigger is a method used to construct the name of the pg function
-- used for event triggers which are present in the hdb_catalog schema.
pgIdenTrigger:: Ops -> TriggerName -> Text
pgIdenTrigger op trn = pgFmtIdentifier . qualifyTriggerName op $ triggerNameToTxt trn
where
qualifyTriggerName op' trn' = "notify_hasura_" <> trn' <> "_" <> tshow op'
mkAllTriggersQ
:: forall pgKind m
. (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m)
=> TriggerName
-> QualifiedTable
-> [ColumnInfo ('Postgres pgKind)]
-> TriggerOpsDef ('Postgres pgKind)
-> m ()
mkAllTriggersQ trn qt allCols fullspec = do
onJust (tdInsert fullspec) (mkTriggerQ trn qt allCols INSERT)
onJust (tdUpdate fullspec) (mkTriggerQ trn qt allCols UPDATE)
onJust (tdDelete fullspec) (mkTriggerQ trn qt allCols DELETE)
data OpVar = OLD | NEW deriving (Show)
-- | Formats each columns to appropriate SQL expression
toJSONableExp :: Bool -> ColumnType ('Postgres pgKind) -> Bool -> SQLExp -> SQLExp
toJSONableExp strfyNum colTy asText expn
-- If its a numeric column greater than a 32-bit integer, we have to stringify it as JSON spec doesn't support >32-bit integers
| asText || (isScalarColumnWhere isBigNum colTy && strfyNum) =
expn `SETyAnn` textTypeAnn
-- If the column is either a `Geometry` or `Geography` then apply the `ST_AsGeoJSON` function to convert it into GeoJSON format
| isScalarColumnWhere isGeoType colTy =
SEFnApp "ST_AsGeoJSON"
[ expn
, SEUnsafe "15" -- max decimal digits
, SEUnsafe "4" -- to print out crs
] Nothing `SETyAnn` jsonTypeAnn
| otherwise = expn
-- | Define the pgSQL trigger functions on database events.
mkTriggerQ
:: forall pgKind m
. (Backend ('Postgres pgKind), MonadTx m, MonadReader ServerConfigCtx m)
=> TriggerName
-> QualifiedTable
-> [ColumnInfo ('Postgres pgKind)]
-> Ops
-> SubscribeOpSpec ('Postgres pgKind)
-> m ()
mkTriggerQ trn qt@(QualifiedObject schema table) allCols op (SubscribeOpSpec listenColumns deliveryColumns') = do
strfyNum <- stringifyNum . _sccSQLGenCtx <$> ask
liftTx $ Q.multiQE defaultTxErrorHandler $ Q.fromText . TL.toStrict $
let
-- If there are no specific delivery columns selected by user then all the columns will be delivered
-- in payload hence 'SubCStar'.
deliveryColumns = fromMaybe SubCStar deliveryColumns'
getApplicableColumns = \case
SubCStar -> allCols
SubCArray cols -> getColInfos cols allCols
-- Columns that should be present in the payload. By default, all columns are present.
applicableDeliveryCols = getApplicableColumns deliveryColumns
getRowExpression opVar = applyRowToJson' $ mkRowExpression opVar strfyNum applicableDeliveryCols
-- Columns that user subscribed to listen for changes. By default, we listen on all columns.
applicableListenCols = getApplicableColumns listenColumns
renderRow opVar = applyRow $ mkRowExpression opVar strfyNum applicableListenCols
oldDataExp = case op of
INSERT -> SENull
UPDATE -> getRowExpression OLD
DELETE -> getRowExpression OLD
MANUAL -> SENull
newDataExp = case op of
INSERT -> getRowExpression NEW
UPDATE -> getRowExpression NEW
DELETE -> SENull
MANUAL -> SENull
name = triggerNameToTxt trn
qualifiedTriggerName = pgIdenTrigger op trn
qualifiedTable = toSQLTxt qt
schemaName = pgFmtLit $ getSchemaTxt schema
tableName = pgFmtLit $ getTableTxt table
operation = tshow op
oldRow = toSQLTxt $ renderRow OLD
newRow = toSQLTxt $ renderRow NEW
oldPayloadExpression = toSQLTxt oldDataExp
newPayloadExpression = toSQLTxt newDataExp
in $(makeRelativeToProject "src-rsr/trigger.sql.shakespeare" >>= ST.stextFile )
where
applyRowToJson' e = SEFnApp "row_to_json" [e] Nothing
applyRow e = SEFnApp "row" [e] Nothing
opToQual = QualVar . tshow
mkRowExpression opVar strfyNum columns
= mkRowExp $ map (\col -> toExtractor (mkQId opVar strfyNum col) col) columns
mkQId opVar strfyNum colInfo = toJSONableExp strfyNum (pgiType colInfo) False $
SEQIdentifier $ QIdentifier (opToQual opVar) $ toIdentifier $ pgiColumn colInfo
-- Generate the SQL expression
toExtractor sqlExp column
-- If the column type is either 'Geography' or 'Geometry', then after applying the 'ST_AsGeoJSON' function
-- to the column, alias the value of the expression with the column name else it uses `st_asgeojson` as
-- the column name.
| isScalarColumnWhere isGeoType (pgiType column) = Extractor sqlExp (Just $ getAlias column)
| otherwise = Extractor sqlExp Nothing
getAlias col = toAlias $ Identifier $ getPGColTxt (pgiColumn col)
buildEventTriggerInfo
:: forall (pgKind :: PostgresKind) m
. (Backend ('Postgres pgKind), QErrM m)
=> Env.Environment
-> SourceName
-> QualifiedTable
-> EventTriggerConf ('Postgres pgKind)
-> m (EventTriggerInfo ('Postgres pgKind), [SchemaDependency])
buildEventTriggerInfo env source qt (EventTriggerConf name def webhook webhookFromEnv rconf mheaders) = do
webhookConf <- case (webhook, webhookFromEnv) of
(Just w, Nothing) -> return $ WCValue w
(Nothing, Just wEnv) -> return $ WCEnv wEnv
_ -> throw500 "expected webhook or webhook_from_env"
let headerConfs = fromMaybe [] mheaders
webhookInfo <- getWebhookInfoFromConf env webhookConf
headerInfos <- getHeaderInfosFromConf env headerConfs
let eTrigInfo = EventTriggerInfo name def rconf webhookInfo headerInfos
tabDep = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable @('Postgres pgKind) qt)
DRParent
pure (eTrigInfo, tabDep:getTrigDefDeps @pgKind source qt def)
getTrigDefDeps
:: forall (pgKind :: PostgresKind)
. (Backend ('Postgres pgKind))
=> SourceName
-> QualifiedTable
-> TriggerOpsDef ('Postgres pgKind)
-> [SchemaDependency]
getTrigDefDeps source qt (TriggerOpsDef mIns mUpd mDel _) =
mconcat $ catMaybes [ subsOpSpecDeps <$> mIns
, subsOpSpecDeps <$> mUpd
, subsOpSpecDeps <$> mDel
]
where
subsOpSpecDeps :: SubscribeOpSpec ('Postgres pgKind) -> [SchemaDependency]
subsOpSpecDeps os =
let cols = getColsFromSub $ sosColumns os
colDeps = flip map cols $ \col ->
SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj @('Postgres pgKind) qt (TOCol @('Postgres pgKind) col))
DRColumn
payload = maybe [] getColsFromSub (sosPayload os)
payloadDeps = flip map payload $ \col ->
SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt (TOCol @('Postgres pgKind) col))
DRPayload
in colDeps <> payloadDeps
getColsFromSub sc = case sc of
SubCStar -> []
SubCArray pgcols -> pgcols
getHeaderInfosFromConf
:: QErrM m
=> Env.Environment
-> [HeaderConf]
-> m [EventHeaderInfo]
getHeaderInfosFromConf env = mapM getHeader
where
getHeader :: QErrM m => HeaderConf -> m EventHeaderInfo
getHeader hconf = case hconf of
(HeaderConf _ (HVValue val)) -> return $ EventHeaderInfo hconf val
(HeaderConf _ (HVEnv val)) -> do
envVal <- getEnv env val
return $ EventHeaderInfo hconf envVal
getWebhookInfoFromConf
:: QErrM m
=> Env.Environment
-> WebhookConf
-> m WebhookConfInfo
getWebhookInfoFromConf env wc = case wc of
WCValue w -> do
resolvedWebhook <- resolveWebhook env w
return $ WebhookConfInfo wc $ unResolvedWebhook resolvedWebhook
WCEnv we -> do
envVal <- getEnv env we
return $ WebhookConfInfo wc envVal
updateColumnInEventTrigger updateColumnInEventTrigger
:: QualifiedTable :: QualifiedTable
-> PGCol -> PGCol

View File

@ -24,9 +24,6 @@ instance BackendAPI ('Postgres 'Vanilla) where
, commandParser "add_computed_field" RMAddComputedField , commandParser "add_computed_field" RMAddComputedField
, commandParser "drop_computed_field" RMDropComputedField , commandParser "drop_computed_field" RMDropComputedField
, commandParser "create_event_trigger" RMPgCreateEventTrigger
, commandParser "delete_event_trigger" RMPgDeleteEventTrigger
] ]
] ]

View File

@ -118,7 +118,6 @@ instance
resolveSourceConfig = PG.resolveSourceConfig resolveSourceConfig = PG.resolveSourceConfig
resolveDatabaseMetadata = PG.resolveDatabaseMetadata resolveDatabaseMetadata = PG.resolveDatabaseMetadata
createTableEventTrigger = PG.createTableEventTrigger createTableEventTrigger = PG.createTableEventTrigger
buildEventTriggerInfo = PG.buildEventTriggerInfo @pgKind
parseBoolExpOperations = PG.parseBoolExpOperations parseBoolExpOperations = PG.parseBoolExpOperations
buildFunctionInfo = PG.buildFunctionInfo buildFunctionInfo = PG.buildFunctionInfo
updateColumnInEventTrigger = PG.updateColumnInEventTrigger updateColumnInEventTrigger = PG.updateColumnInEventTrigger

View File

@ -1,10 +1,12 @@
module Hasura.Backends.Postgres.Translate.Column module Hasura.Backends.Postgres.Translate.Column
( toTxtValue ( toTxtValue
, toJSONableExp
) where ) where
import Hasura.Prelude import Hasura.Prelude
import Hasura.Backends.Postgres.SQL.DML import Hasura.Backends.Postgres.SQL.DML
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Types.Column import Hasura.Backends.Postgres.Types.Column
import Hasura.RQL.Types.Column import Hasura.RQL.Types.Column
@ -16,3 +18,19 @@ toTxtValue ColumnValue{..} =
withTyAnn ty . withConstructorFn ty $ txtEncoder cvValue withTyAnn ty . withConstructorFn ty $ txtEncoder cvValue
where where
ty = unsafePGColumnToBackend cvType ty = unsafePGColumnToBackend cvType
-- | Formats each columns to appropriate SQL expression
toJSONableExp :: Bool -> ColumnType ('Postgres pgKind) -> Bool -> SQLExp -> SQLExp
toJSONableExp stringifyNum colType asText expression
-- If its a numeric column greater than a 32-bit integer, we have to stringify it as JSON spec doesn't support >32-bit integers
| asText || (isScalarColumnWhere isBigNum colType && stringifyNum) =
expression `SETyAnn` textTypeAnn
-- If the column is either a `Geometry` or `Geography` then apply the `ST_AsGeoJSON` function to convert it into GeoJSON format
| isScalarColumnWhere isGeoType colType =
SEFnApp "ST_AsGeoJSON"
[ expression
, SEUnsafe "15" -- max decimal digits
, SEUnsafe "4" -- to print out crs
] Nothing
`SETyAnn` jsonTypeAnn
| otherwise = expression

View File

@ -25,6 +25,7 @@ import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Rewrite import Hasura.Backends.Postgres.SQL.Rewrite
import Hasura.Backends.Postgres.SQL.Types import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.BoolExp import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.Backends.Postgres.Translate.Column (toJSONableExp)
import Hasura.Backends.Postgres.Translate.Types import Hasura.Backends.Postgres.Translate.Types
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.EncJSON import Hasura.EncJSON

View File

@ -135,13 +135,13 @@ import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Logging as L import qualified Hasura.Logging as L
import qualified Hasura.Tracing as Tracing import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.DDL.Table (getHeaderInfosFromConf)
import Hasura.Backends.Postgres.SQL.Types import Hasura.Backends.Postgres.SQL.Types
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Eventing.Common import Hasura.Eventing.Common
import Hasura.Eventing.HTTP import Hasura.Eventing.HTTP
import Hasura.Eventing.ScheduledTrigger.Types import Hasura.Eventing.ScheduledTrigger.Types
import Hasura.Metadata.Class import Hasura.Metadata.Class
import Hasura.RQL.DDL.EventTrigger (getHeaderInfosFromConf)
import Hasura.RQL.DDL.Headers import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types import Hasura.RQL.Types
import Hasura.SQL.Types import Hasura.SQL.Types

View File

@ -25,6 +25,7 @@ module Hasura.Logging
, defaultEnabledEngineLogTypes , defaultEnabledEngineLogTypes
, isEngineLogTypeEnabled , isEngineLogTypeEnabled
, readLogTypes , readLogTypes
, getFormattedTime
) where ) where
import Hasura.Prelude import Hasura.Prelude

View File

@ -9,28 +9,30 @@ module Hasura.RQL.DDL.EventTrigger
, InvokeEventTriggerQuery , InvokeEventTriggerQuery
, runInvokeEventTrigger , runInvokeEventTrigger
-- TODO(from master): review -- TODO(from master): review
, archiveEvents , getHeaderInfosFromConf
, getWebhookInfoFromConf
, buildEventTriggerInfo
) where ) where
import Hasura.Prelude import Hasura.Prelude
import qualified Data.ByteString.Lazy as LBS import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as HM import qualified Data.Environment as Env
import qualified Data.HashMap.Strict.InsOrd as OMap import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Database.PG.Query as Q import qualified Data.Text as T
import qualified Text.Regex.TDFA as TDFA import qualified Text.Regex.TDFA as TDFA
import Control.Lens ((.~)) import Control.Lens ((.~))
import Data.Aeson import Data.Aeson
import Data.Text.Extended import Data.Text.Extended
import qualified Hasura.Backends.Postgres.DDL.Table as PG import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.SQL.AnyBackend as AB import qualified Hasura.Tracing as Tracing
import qualified Hasura.Tracing as Tracing
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.EncJSON import Hasura.EncJSON
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types import Hasura.RQL.Types
import Hasura.RQL.Types.Eventing.Backend import Hasura.RQL.Types.Eventing.Backend
import Hasura.Session import Hasura.Session
@ -130,15 +132,6 @@ instance Backend b => FromJSON (InvokeEventTriggerQuery b) where
<*> o .:? "source" .!= defaultSource <*> o .:? "source" .!= defaultSource
<*> o .: "payload" <*> o .: "payload"
archiveEvents :: TriggerName -> Q.TxE QErr ()
archiveEvents trn =
Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE hdb_catalog.event_log
SET archived = 't'
WHERE trigger_name = $1
|] (Identity trn) False
resolveEventTriggerQuery resolveEventTriggerQuery
:: forall b m :: forall b m
. (Backend b, UserInfoM m, QErrM m, CacheRM m) . (Backend b, UserInfoM m, QErrM m, CacheRM m)
@ -159,14 +152,14 @@ resolveEventTriggerQuery (CreateEventTriggerQuery source name qt insert update d
return (ti, replace, EventTriggerConf name (TriggerOpsDef insert update delete enableManual) webhook webhookFromEnv rconf mheaders) return (ti, replace, EventTriggerConf name (TriggerOpsDef insert update delete enableManual) webhook webhookFromEnv rconf mheaders)
where where
assertCols ti opSpec = onJust opSpec \sos -> case sosColumns sos of assertCols ti opSpec = onJust opSpec \sos -> case sosColumns sos of
SubCStar -> return () SubCStar -> return ()
SubCArray pgcols -> forM_ pgcols (assertPGCol (_tciFieldInfoMap ti) "") SubCArray columns -> forM_ columns (assertColumnExists @b (_tciFieldInfoMap ti) "")
createEventTriggerQueryMetadata createEventTriggerQueryMetadata
:: forall pgKind m :: forall b m
. (BackendMetadata ('Postgres pgKind), QErrM m, UserInfoM m, CacheRWM m, MetadataM m) . (BackendMetadata b, QErrM m, UserInfoM m, CacheRWM m, MetadataM m)
=> CreateEventTriggerQuery ('Postgres pgKind) => CreateEventTriggerQuery b
-> m (TableCoreInfo ('Postgres pgKind), EventTriggerConf ('Postgres pgKind)) -> m (TableCoreInfo b, EventTriggerConf b)
createEventTriggerQueryMetadata q = do createEventTriggerQueryMetadata q = do
(tableCoreInfo, replace, triggerConf) <- resolveEventTriggerQuery q (tableCoreInfo, replace, triggerConf) <- resolveEventTriggerQuery q
let table = _cetqTable q let table = _cetqTable q
@ -175,34 +168,33 @@ createEventTriggerQueryMetadata q = do
metadataObj = metadataObj =
MOSourceObjId source MOSourceObjId source
$ AB.mkAnyBackend $ AB.mkAnyBackend
$ SMOTableObj @('Postgres pgKind) table $ SMOTableObj @b table
$ MTOTrigger triggerName $ MTOTrigger triggerName
buildSchemaCacheFor metadataObj buildSchemaCacheFor metadataObj
$ MetadataModifier $ MetadataModifier
$ tableMetadataSetter @('Postgres pgKind) source table.tmEventTriggers %~ $ tableMetadataSetter @b source table.tmEventTriggers %~
if replace then ix triggerName .~ triggerConf if replace then ix triggerName .~ triggerConf
else OMap.insert triggerName triggerConf else OMap.insert triggerName triggerConf
pure (tableCoreInfo, triggerConf) pure (tableCoreInfo, triggerConf)
runCreateEventTriggerQuery runCreateEventTriggerQuery
:: forall pgKind m :: forall b m
. (BackendMetadata ('Postgres pgKind), QErrM m, UserInfoM m, CacheRWM m, MetadataM m) . (BackendMetadata b, QErrM m, UserInfoM m, CacheRWM m, MetadataM m)
=> CreateEventTriggerQuery ('Postgres pgKind) => CreateEventTriggerQuery b
-> m EncJSON -> m EncJSON
runCreateEventTriggerQuery q = do runCreateEventTriggerQuery q = do
void $ createEventTriggerQueryMetadata @pgKind q void $ createEventTriggerQueryMetadata @b q
pure successMsg pure successMsg
runDeleteEventTriggerQuery runDeleteEventTriggerQuery
:: forall pgKind m :: forall b m
. (BackendMetadata ('Postgres pgKind), MonadError QErr m, CacheRWM m, MonadIO m, MetadataM m) . (BackendEventTrigger b, BackendMetadata b, MonadError QErr m, CacheRWM m, MonadIO m, MetadataM m)
=> DeleteEventTriggerQuery ('Postgres pgKind) => DeleteEventTriggerQuery b
-> m EncJSON -> m EncJSON
runDeleteEventTriggerQuery (DeleteEventTriggerQuery source name) = do runDeleteEventTriggerQuery (DeleteEventTriggerQuery source name) = do
-- liftTx $ delEventTriggerFromCatalog name
sourceInfo <- askSourceInfo source sourceInfo <- askSourceInfo source
let maybeTable = HM.lookup name $ HM.unions $ let maybeTable = HM.lookup name $ HM.unions $
flip map (HM.toList $ _siTables @('Postgres pgKind) sourceInfo) $ \(table, tableInfo) -> flip map (HM.toList $ _siTables @b sourceInfo) $ \(table, tableInfo) ->
HM.map (const table) $ _tiEventTriggerInfoMap tableInfo HM.map (const table) $ _tiEventTriggerInfoMap tableInfo
table <- onNothing maybeTable $ throw400 NotExists $ table <- onNothing maybeTable $ throw400 NotExists $
"event trigger with name " <> name <<> " does not exist" "event trigger with name " <> name <<> " does not exist"
@ -210,11 +202,10 @@ runDeleteEventTriggerQuery (DeleteEventTriggerQuery source name) = do
withNewInconsistentObjsCheck withNewInconsistentObjsCheck
$ buildSchemaCache $ buildSchemaCache
$ MetadataModifier $ MetadataModifier
$ tableMetadataSetter @('Postgres pgKind) source table %~ dropEventTriggerInMetadata name $ tableMetadataSetter @b source table %~ dropEventTriggerInMetadata name
dropTriggerAndArchiveEvents @b (_siConfiguration sourceInfo) name
liftEitherM $ liftIO $ runPgSourceWriteTx (_siConfiguration sourceInfo) $ do
PG.delTriggerQ name
archiveEvents name
pure successMsg pure successMsg
dropEventTriggerInMetadata :: TriggerName -> TableMetadata b -> TableMetadata b dropEventTriggerInMetadata :: TriggerName -> TableMetadata b -> TableMetadata b
@ -287,3 +278,84 @@ askEventTriggerInfo sourceName triggerName = do
-- 63 - (notify_hasura_) - (_INSERT | _UPDATE | _DELETE) -- 63 - (notify_hasura_) - (_INSERT | _UPDATE | _DELETE)
maxTriggerNameLength :: Int maxTriggerNameLength :: Int
maxTriggerNameLength = 42 maxTriggerNameLength = 42
getHeaderInfosFromConf
:: QErrM m
=> Env.Environment
-> [HeaderConf]
-> m [EventHeaderInfo]
getHeaderInfosFromConf env = mapM getHeader
where
getHeader :: QErrM m => HeaderConf -> m EventHeaderInfo
getHeader hconf = case hconf of
(HeaderConf _ (HVValue val)) -> return $ EventHeaderInfo hconf val
(HeaderConf _ (HVEnv val)) -> do
envVal <- getEnv env val
return $ EventHeaderInfo hconf envVal
getWebhookInfoFromConf
:: QErrM m
=> Env.Environment
-> WebhookConf
-> m WebhookConfInfo
getWebhookInfoFromConf env wc = case wc of
WCValue w -> do
resolvedWebhook <- resolveWebhook env w
return $ WebhookConfInfo wc $ unResolvedWebhook resolvedWebhook
WCEnv we -> do
envVal <- getEnv env we
return $ WebhookConfInfo wc envVal
buildEventTriggerInfo
:: forall b m
. (Backend b, QErrM m)
=> Env.Environment
-> SourceName
-> TableName b
-> EventTriggerConf b
-> m (EventTriggerInfo b, [SchemaDependency])
buildEventTriggerInfo env source tableName (EventTriggerConf name def webhook webhookFromEnv rconf mheaders) = do
webhookConf <- case (webhook, webhookFromEnv) of
(Just w, Nothing) -> return $ WCValue w
(Nothing, Just wEnv) -> return $ WCEnv wEnv
_ -> throw500 "expected webhook or webhook_from_env"
let headerConfs = fromMaybe [] mheaders
webhookInfo <- getWebhookInfoFromConf env webhookConf
headerInfos <- getHeaderInfosFromConf env headerConfs
let eTrigInfo = EventTriggerInfo name def rconf webhookInfo headerInfos
tabDep = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable @b tableName)
DRParent
pure (eTrigInfo, tabDep:getTrigDefDeps @b source tableName def)
getTrigDefDeps
:: forall b
. Backend b
=> SourceName
-> TableName b
-> TriggerOpsDef b
-> [SchemaDependency]
getTrigDefDeps source tableName (TriggerOpsDef mIns mUpd mDel _) =
mconcat $ catMaybes [ subsOpSpecDeps <$> mIns
, subsOpSpecDeps <$> mUpd
, subsOpSpecDeps <$> mDel
]
where
subsOpSpecDeps :: SubscribeOpSpec b -> [SchemaDependency]
subsOpSpecDeps os =
let cols = getColsFromSub $ sosColumns os
mkColDependency dependencyReason col =
SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj @b tableName (TOCol @b col))
dependencyReason
colDeps = map (mkColDependency DRColumn) cols
payload = maybe [] getColsFromSub (sosPayload os)
payloadDeps = map (mkColDependency DRPayload) payload
in colDeps <> payloadDeps
getColsFromSub sc = case sc of
SubCStar -> []
SubCArray cols -> cols

View File

@ -24,14 +24,16 @@ import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
import qualified Data.HashSet as HS import qualified Data.HashSet as HS
import qualified Data.List as L import qualified Data.List as L
import qualified Data.TByteString as TBS
import Control.Lens ((.~), (^?)) import Control.Lens ((.~), (^?))
import Data.Aeson import Data.Aeson
import Data.Has (Has, getter)
import Data.Text.Extended ((<<>)) import Data.Text.Extended ((<<>))
import qualified Hasura.Logging as HL
import qualified Hasura.SQL.AnyBackend as AB import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.DDL.Table (delTriggerQ)
import Hasura.Metadata.Class import Hasura.Metadata.Class
import Hasura.RQL.DDL.Action import Hasura.RQL.DDL.Action
import Hasura.RQL.DDL.ComputedField import Hasura.RQL.DDL.ComputedField
@ -51,6 +53,7 @@ import Hasura.Base.Error
import Hasura.EncJSON import Hasura.EncJSON
import Hasura.RQL.DDL.Metadata.Types import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.Types import Hasura.RQL.Types
import Hasura.RQL.Types.Eventing.Backend (BackendEventTrigger (..))
import Hasura.Server.Types (ExperimentalFeature (..)) import Hasura.Server.Types (ExperimentalFeature (..))
@ -60,6 +63,8 @@ runClearMetadata
, MetadataM m , MetadataM m
, HasServerConfigCtx m , HasServerConfigCtx m
, MonadMetadataStorageQueryAPI m , MonadMetadataStorageQueryAPI m
, MonadReader r m
, Has (HL.Logger HL.Hasura) r
) )
=> ClearMetadata -> m EncJSON => ClearMetadata -> m EncJSON
runClearMetadata _ = do runClearMetadata _ = do
@ -82,7 +87,7 @@ runClearMetadata _ = do
& metaSources %~ OMap.insert defaultSource emptyDefaultSource & metaSources %~ OMap.insert defaultSource emptyDefaultSource
runReplaceMetadataV1 $ RMWithSources emptyMetadata' runReplaceMetadataV1 $ RMWithSources emptyMetadata'
{- Note [Clear postgres schema for dropped triggers] {- Note [Cleanup for dropped triggers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There was an issue (https://github.com/hasura/graphql-engine/issues/5461) There was an issue (https://github.com/hasura/graphql-engine/issues/5461)
fixed (via https://github.com/hasura/graphql-engine/pull/6137) related to fixed (via https://github.com/hasura/graphql-engine/pull/6137) related to
@ -90,7 +95,7 @@ event triggers while replacing metadata in the catalog prior to metadata
separation. The metadata separation solves the issue naturally, since the separation. The metadata separation solves the issue naturally, since the
'hdb_catalog.event_triggers' table is no more in use and new/updated event 'hdb_catalog.event_triggers' table is no more in use and new/updated event
triggers are processed in building schema cache. But we need to drop the triggers are processed in building schema cache. But we need to drop the
pg trigger and archive events for dropped event triggers. This is handled database trigger and archive events for dropped event triggers. This is handled
explicitly in @'runReplaceMetadata' function. explicitly in @'runReplaceMetadata' function.
-} -}
@ -102,6 +107,8 @@ runReplaceMetadata
, MonadIO m , MonadIO m
, MonadMetadataStorageQueryAPI m , MonadMetadataStorageQueryAPI m
, HasServerConfigCtx m , HasServerConfigCtx m
, MonadReader r m
, Has (HL.Logger HL.Hasura) r
) )
=> ReplaceMetadata -> m EncJSON => ReplaceMetadata -> m EncJSON
runReplaceMetadata = \case runReplaceMetadata = \case
@ -115,22 +122,27 @@ runReplaceMetadataV1
, MonadIO m , MonadIO m
, MonadMetadataStorageQueryAPI m , MonadMetadataStorageQueryAPI m
, HasServerConfigCtx m , HasServerConfigCtx m
, MonadReader r m
, Has (HL.Logger HL.Hasura) r
) )
=> ReplaceMetadataV1 -> m EncJSON => ReplaceMetadataV1 -> m EncJSON
runReplaceMetadataV1 = runReplaceMetadataV1 =
(successMsg <$) . runReplaceMetadataV2 . ReplaceMetadataV2 NoAllowInconsistentMetadata (successMsg <$) . runReplaceMetadataV2 . ReplaceMetadataV2 NoAllowInconsistentMetadata
runReplaceMetadataV2 runReplaceMetadataV2
:: forall m :: forall m r
. ( QErrM m . ( QErrM m
, CacheRWM m , CacheRWM m
, MetadataM m , MetadataM m
, MonadIO m , MonadIO m
, HasServerConfigCtx m , HasServerConfigCtx m
, MonadMetadataStorageQueryAPI m , MonadMetadataStorageQueryAPI m
, MonadReader r m
, Has (HL.Logger HL.Hasura) r
) )
=> ReplaceMetadataV2 -> m EncJSON => ReplaceMetadataV2 -> m EncJSON
runReplaceMetadataV2 ReplaceMetadataV2{..} = do runReplaceMetadataV2 ReplaceMetadataV2{..} = do
logger :: (HL.Logger HL.Hasura) <- asks getter
-- we drop all the future cron trigger events before inserting the new metadata -- we drop all the future cron trigger events before inserting the new metadata
-- and re-populating future cron events below -- and re-populating future cron events below
experimentalFeatures <- _sccExperimentalFeatures <$> askServerConfigCtx experimentalFeatures <- _sccExperimentalFeatures <$> askServerConfigCtx
@ -180,14 +192,11 @@ runReplaceMetadataV2 ReplaceMetadataV2{..} = do
for_ cronTriggersToBeAdded $ \CronTriggerMetadata {..} -> for_ cronTriggersToBeAdded $ \CronTriggerMetadata {..} ->
populateInitialCronTriggerEvents ctSchedule ctName populateInitialCronTriggerEvents ctSchedule ctName
-- See Note [Clear postgres schema for dropped triggers] -- See Note [Cleanup for dropped triggers]
dropPostgresTriggers (getOnlyPGSources oldMetadata) (getOnlyPGSources metadata) dropSourceSQLTriggers logger (_metaSources oldMetadata) (_metaSources metadata)
encJFromJValue . formatInconsistentObjs . scInconsistentObjs <$> askSchemaCache encJFromJValue . formatInconsistentObjs . scInconsistentObjs <$> askSchemaCache
where where
getOnlyPGSources :: Metadata -> InsOrdHashMap SourceName (SourceMetadata ('Postgres 'Vanilla))
getOnlyPGSources = OMap.mapMaybe AB.unpackAnyBackend . _metaSources
{- Note [Cron triggers behaviour with replace metadata] {- Note [Cron triggers behaviour with replace metadata]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -243,33 +252,50 @@ runReplaceMetadataV2 ReplaceMetadataV2{..} = do
pure $ allNewCronTriggers <> oldCronTriggersNotIncludedInMetadata pure $ allNewCronTriggers <> oldCronTriggersNotIncludedInMetadata
pure $ (cronTriggers, cronTriggersToBeAdded) pure $ (cronTriggers, cronTriggersToBeAdded)
dropSourceSQLTriggers
dropPostgresTriggers :: HL.Logger HL.Hasura
:: InsOrdHashMap SourceName (SourceMetadata ('Postgres 'Vanilla)) -- ^ old pg sources -> InsOrdHashMap SourceName BackendSourceMetadata -- ^ old sources
-> InsOrdHashMap SourceName (SourceMetadata ('Postgres 'Vanilla)) -- ^ new pg sources -> InsOrdHashMap SourceName BackendSourceMetadata -- ^ new sources
-> m () -> m ()
dropPostgresTriggers oldSources newSources = dropSourceSQLTriggers (HL.Logger logger) oldSources newSources = do
for_ (OMap.toList newSources) $ \(source, newSourceCache) -> -- NOTE: the current implementation of this function has an edge case.
onJust (OMap.lookup source oldSources) $ \oldSourceCache -> do -- The edge case is that when a `SourceA` which contained some event triggers
let oldTriggersMap = getPGTriggersMap oldSourceCache -- is modified to point to a new database, this function will try to drop the
newTriggersMap = getPGTriggersMap newSourceCache -- SQL triggers of the dropped event triggers on the new database which doesn't exist.
droppedTriggers = OMap.keys $ oldTriggersMap `OMap.difference` newTriggersMap -- In the current implementation, this doesn't throw an error because the trigger is dropped
catcher e@QErr{ qeCode } -- using `DROP IF EXISTS..` meaning this silently fails without throwing an error.
| qeCode == Unexpected = pure () -- NOTE: This information should be returned by the inconsistent_metadata response, so doesn't need additional logging. for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
| otherwise = throwError e -- rethrow other errors onJust (OMap.lookup source oldSources) $ \oldBackendSourceMetadata ->
compose source newBackendSourceMetadata oldBackendSourceMetadata \(newSourceMetadata :: SourceMetadata b) -> do
dispatch oldBackendSourceMetadata \oldSourceMetadata -> do
let oldTriggersMap = getTriggersMap oldSourceMetadata
newTriggersMap = getTriggersMap newSourceMetadata
droppedTriggers = OMap.keys $ oldTriggersMap `OMap.difference` newTriggersMap
catcher e@QErr{ qeCode }
| qeCode == Unexpected = pure () -- NOTE: This information should be returned by the inconsistent_metadata response, so doesn't need additional logging.
| otherwise = throwError e -- rethrow other errors
-- This will swallow Unexpected exceptions for sources if allow_inconsistent_metadata is enabled
-- This should be ok since if the sources are already missing from the cache then they should
-- not need to be removed.
--
-- TODO: Determine if any errors should be thrown from askSourceConfig at all if the errors are just being discarded
return $
flip catchError catcher do
sourceConfig <- askSourceConfig @b source
for_ droppedTriggers $ dropTriggerAndArchiveEvents @b sourceConfig
-- This will swallow Unexpected exceptions for sources if allow_inconsistent_metadata is enabled
-- This should be ok since if the sources are already missing from the cache then they should
-- not need to be removed.
--
-- TODO: Determine if any errors should be thrown from askSourceConfig at all if the errors are just being discarded
flip catchError catcher do
sourceConfig <- askSourceConfig @('Postgres 'Vanilla) source
for_ droppedTriggers $
\name ->
liftIO $ runPgSourceWriteTx sourceConfig $ delTriggerQ name >> archiveEvents name
where where
getPGTriggersMap = OMap.unions . map _tmEventTriggers . OMap.elems . _smTables getTriggersMap = OMap.unions . map _tmEventTriggers . OMap.elems . _smTables
dispatch = AB.dispatchAnyBackend @BackendEventTrigger
compose
:: SourceName
-> AB.AnyBackend i
-> AB.AnyBackend i
-> (forall b. BackendEventTrigger b => i b -> i b -> m ()) -> m ()
compose sourceName x y f = AB.composeAnyBackend @BackendEventTrigger f x y (logger $ HL.UnstructuredLog HL.LevelInfo $ TBS.fromText $ "Event trigger clean up couldn't be done on the source " <> sourceName <<> " because it has changed its type")
processExperimentalFeatures :: HasServerConfigCtx m => Metadata -> m Metadata processExperimentalFeatures :: HasServerConfigCtx m => Metadata -> m Metadata
processExperimentalFeatures metadata = do processExperimentalFeatures metadata = do

View File

@ -10,21 +10,21 @@ module Hasura.RQL.DDL.ScheduledTrigger
, populateInitialCronTriggerEvents , populateInitialCronTriggerEvents
) where ) where
import System.Cron.Types (CronSchedule) import System.Cron.Types (CronSchedule)
import Hasura.Prelude import Hasura.Prelude
import qualified Data.Aeson as J import qualified Data.Aeson as J
import qualified Data.Environment as Env import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Time.Clock as C import qualified Data.Time.Clock as C
import Hasura.Backends.Postgres.DDL.Table (getHeaderInfosFromConf)
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.EncJSON import Hasura.EncJSON
import Hasura.Eventing.ScheduledTrigger import Hasura.Eventing.ScheduledTrigger
import Hasura.Metadata.Class import Hasura.Metadata.Class
import Hasura.RQL.DDL.EventTrigger (getHeaderInfosFromConf)
import Hasura.RQL.Types import Hasura.RQL.Types
populateInitialCronTriggerEvents populateInitialCronTriggerEvents

View File

@ -58,6 +58,7 @@ import Hasura.GraphQL.Schema (buildGQLContext)
import Hasura.Metadata.Class import Hasura.Metadata.Class
import Hasura.RQL.DDL.Action import Hasura.RQL.DDL.Action
import Hasura.RQL.DDL.CustomTypes import Hasura.RQL.DDL.CustomTypes
import Hasura.RQL.DDL.EventTrigger (buildEventTriggerInfo)
import Hasura.RQL.DDL.InheritedRoles (resolveInheritedRole) import Hasura.RQL.DDL.InheritedRoles (resolveInheritedRole)
import Hasura.RQL.DDL.RemoteRelationship (PartiallyResolvedSource (..)) import Hasura.RQL.DDL.RemoteRelationship (PartiallyResolvedSource (..))
import Hasura.RQL.DDL.RemoteSchema import Hasura.RQL.DDL.RemoteSchema

View File

@ -86,7 +86,7 @@ validateCountQWith sessVarBldr prepValBldr (CountQuery qt _ mDistCols mWhere) =
forM_ mDistCols $ \distCols -> do forM_ mDistCols $ \distCols -> do
let distColAsrns = [ checkSelOnCol selPerm let distColAsrns = [ checkSelOnCol selPerm
, assertPGCol colInfoMap relInDistColsErr] , assertColumnExists colInfoMap relInDistColsErr]
withPathK "distinct" $ verifyAsrns distColAsrns distCols withPathK "distinct" $ verifyAsrns distColAsrns distCols
-- convert the where clause -- convert the where clause

View File

@ -350,19 +350,6 @@ dmlTxErrorHandler = mkTxErrorHandler $ \case
, PGInvalidColumnReference ] , PGInvalidColumnReference ]
_ -> False _ -> False
toJSONableExp :: Bool -> ColumnType ('Postgres pgKind) -> Bool -> S.SQLExp -> S.SQLExp
toJSONableExp strfyNum colTy asText expn
| asText || (isScalarColumnWhere isBigNum colTy && strfyNum) =
expn `S.SETyAnn` S.textTypeAnn
| isScalarColumnWhere isGeoType colTy =
S.SEFnApp "ST_AsGeoJSON"
[ expn
, S.SEUnsafe "15" -- max decimal digits
, S.SEUnsafe "4" -- to print out crs
] Nothing
`S.SETyAnn` S.jsonTypeAnn
| otherwise = expn
-- validate headers -- validate headers
validateHeaders :: (UserInfoM m, QErrM m) => HashSet Text -> m () validateHeaders :: (UserInfoM m, QErrM m) => HashSet Text -> m ()
validateHeaders depHeaders = do validateHeaders depHeaders = do

View File

@ -23,7 +23,7 @@ module Hasura.RQL.Types
, askTableCoreInfoSource , askTableCoreInfoSource
, askFieldInfoMap , askFieldInfoMap
, askFieldInfoMapSource , askFieldInfoMapSource
, assertPGCol , assertColumnExists
, askRelType , askRelType
, askComputedFieldInfo , askComputedFieldInfo
, askRemoteRel , askRemoteRel
@ -288,14 +288,15 @@ askComputedFieldInfo fields computedField = do
, computedField <<> " is a " <> fieldType <> "; " , computedField <<> " is a " <> fieldType <> "; "
] ]
assertPGCol :: (MonadError QErr m, Backend backend) assertColumnExists
=> FieldInfoMap (FieldInfo backend) :: forall backend m
-> Text . (MonadError QErr m, Backend backend)
-> Column backend => FieldInfoMap (FieldInfo backend)
-> m () -> Text
assertPGCol m msg c = do -> Column backend
_ <- askColInfo m c msg -> m ()
return () assertColumnExists m msg c = do
void $ askColInfo m c msg
askRelType :: (MonadError QErr m) askRelType :: (MonadError QErr m)
=> FieldInfoMap (FieldInfo backend) => FieldInfoMap (FieldInfo backend)

View File

@ -40,22 +40,38 @@ class Backend b => BackendEventTrigger (b :: BackendType) where
-> EventId -> EventId
-> m () -> m ()
-- | @dropTriggerAndArchiveEvents@ drops the database trigger and
-- marks all the events related to the event trigger as archived.
-- See Note [Cleanup for dropped triggers]
dropTriggerAndArchiveEvents
:: ( MonadIO m
, MonadError QErr m
)
=> SourceConfig b
-> TriggerName
-> m ()
instance BackendEventTrigger ('Postgres 'Vanilla) where instance BackendEventTrigger ('Postgres 'Vanilla) where
insertManualEvent = PG.insertManualEvent insertManualEvent = PG.insertManualEvent
redeliverEvent = PG.redeliverEvent redeliverEvent = PG.redeliverEvent
dropTriggerAndArchiveEvents = PG.dropTriggerAndArchiveEvents
instance BackendEventTrigger ('Postgres 'Citus) where instance BackendEventTrigger ('Postgres 'Citus) where
insertManualEvent _ _ _ _ _ _ = throw400 NotSupported $ "Event triggers are not supported for Citus sources" insertManualEvent _ _ _ _ _ _ = throw400 NotSupported $ "Event triggers are not supported for Citus sources"
redeliverEvent _ _ = throw400 NotSupported $ "Event triggers are not supported for Citus sources" redeliverEvent _ _ = throw400 NotSupported $ "Event triggers are not supported for Citus sources"
dropTriggerAndArchiveEvents _ _ = throw400 NotSupported $ "Event triggers are not supported for Citus sources"
instance BackendEventTrigger 'MSSQL where instance BackendEventTrigger 'MSSQL where
insertManualEvent _ _ _ _ _ _ = throw400 NotSupported "Event triggers are not supported for MS-SQL sources" insertManualEvent _ _ _ _ _ _ = throw400 NotSupported "Event triggers are not supported for MS-SQL sources"
redeliverEvent _ _ = throw400 NotSupported "Event triggers are not supported for MS-SQL sources" redeliverEvent _ _ = throw400 NotSupported "Event triggers are not supported for MS-SQL sources"
dropTriggerAndArchiveEvents _ _ = throw400 NotSupported $ "Event triggers are not supported for MS-SQL sources"
instance BackendEventTrigger 'BigQuery where instance BackendEventTrigger 'BigQuery where
insertManualEvent _ _ _ _ _ _ = throw400 NotSupported "Event triggers are not supported for BigQuery sources" insertManualEvent _ _ _ _ _ _ = throw400 NotSupported "Event triggers are not supported for BigQuery sources"
redeliverEvent _ _ = throw400 NotSupported "Event triggers are not supported for BigQuery sources" redeliverEvent _ _ = throw400 NotSupported "Event triggers are not supported for BigQuery sources"
dropTriggerAndArchiveEvents _ _ = throw400 NotSupported $ "Event triggers are not supported for BigQUery sources"
instance BackendEventTrigger 'MySQL where instance BackendEventTrigger 'MySQL where
insertManualEvent _ _ _ _ _ _ = throw400 NotSupported "Event triggers are not supported for MySQL sources" insertManualEvent _ _ _ _ _ _ = throw400 NotSupported "Event triggers are not supported for MySQL sources"
redeliverEvent _ _ = throw400 NotSupported "Event triggers are not supported for MySQL sources" redeliverEvent _ _ = throw400 NotSupported "Event triggers are not supported for MySQL sources"
dropTriggerAndArchiveEvents _ _ = throw400 NotSupported $ "Event triggers are not supported for MySQL sources"

View File

@ -71,14 +71,6 @@ class ( Backend b
-> TriggerOpsDef b -> TriggerOpsDef b
-> m (Either QErr ()) -> m (Either QErr ())
buildEventTriggerInfo
:: MonadError QErr m
=> Env.Environment
-> SourceName
-> TableName b
-> EventTriggerConf b
-> m (EventTriggerInfo b, [SchemaDependency])
parseBoolExpOperations parseBoolExpOperations
:: (MonadError QErr m, TableCoreInfoRM b m) :: (MonadError QErr m, TableCoreInfoRM b m)
=> ValueParser b m v => ValueParser b m v

View File

@ -10,6 +10,7 @@ module Hasura.SQL.AnyBackend
, dispatchAnyBackend , dispatchAnyBackend
, dispatchAnyBackend' , dispatchAnyBackend'
, dispatchAnyBackendArrow , dispatchAnyBackendArrow
, dispatchAnyBackendWithTwoConstraints
, unpackAnyBackend , unpackAnyBackend
, composeAnyBackend , composeAnyBackend
, runBackend , runBackend
@ -272,6 +273,19 @@ dispatchAnyBackend
-> r -> r
dispatchAnyBackend e f = $(mkDispatch 'f 'e) dispatchAnyBackend e f = $(mkDispatch 'f 'e)
dispatchAnyBackendWithTwoConstraints
:: forall
(c1 :: BackendType -> Constraint)
(c2 :: BackendType -> Constraint)
(i :: BackendType -> Type)
(r :: Type)
. AllBackendsSatisfy c1
=> AllBackendsSatisfy c2
=> AnyBackend i
-> (forall (b :: BackendType). c1 b => c2 b => i b -> r)
-> r
dispatchAnyBackendWithTwoConstraints e f = $(mkDispatch 'f 'e)
-- | Unlike 'dispatchAnyBackend', the expected constraint has a different kind. -- | Unlike 'dispatchAnyBackend', the expected constraint has a different kind.
-- Use for classes like 'Show', 'ToJSON', etc. -- Use for classes like 'Show', 'ToJSON', etc.
dispatchAnyBackend' dispatchAnyBackend'

View File

@ -92,5 +92,7 @@ remoteRelationshipCommands =
] ]
eventTriggerCommands = eventTriggerCommands =
[ commandParser "invoke_event_trigger" $ RMInvokeEventTrigger . mkAnyBackend @b [ commandParser "invoke_event_trigger" $ RMInvokeEventTrigger . mkAnyBackend @b
, commandParser "create_event_trigger" $ RMCreateEventTrigger . mkAnyBackend @b
, commandParser "delete_event_trigger" $ RMDeleteEventTrigger . mkAnyBackend @b
, commandParser "redeliver_event" $ RMRedeliverEvent . mkAnyBackend @b , commandParser "redeliver_event" $ RMRedeliverEvent . mkAnyBackend @b
] ]

View File

@ -105,11 +105,9 @@ data RQLMetadataV1
| RMAddComputedField !(AddComputedField ('Postgres 'Vanilla)) | RMAddComputedField !(AddComputedField ('Postgres 'Vanilla))
| RMDropComputedField !(DropComputedField ('Postgres 'Vanilla)) | RMDropComputedField !(DropComputedField ('Postgres 'Vanilla))
-- Tables event triggers (PG-specific) -- Tables event triggers
| RMPgCreateEventTrigger !(CreateEventTriggerQuery ('Postgres 'Vanilla)) | RMCreateEventTrigger !(AnyBackend CreateEventTriggerQuery)
| RMPgDeleteEventTrigger !(DeleteEventTriggerQuery ('Postgres 'Vanilla)) | RMDeleteEventTrigger !(AnyBackend DeleteEventTriggerQuery)
-- Event Trigger APIs
| RMRedeliverEvent !(AnyBackend RedeliverEventQuery) | RMRedeliverEvent !(AnyBackend RedeliverEventQuery)
| RMInvokeEventTrigger !(AnyBackend InvokeEventTriggerQuery) | RMInvokeEventTrigger !(AnyBackend InvokeEventTriggerQuery)
@ -466,8 +464,8 @@ runMetadataQueryV1M env currentResourceVersion = \case
RMAddComputedField q -> runAddComputedField q RMAddComputedField q -> runAddComputedField q
RMDropComputedField q -> runDropComputedField q RMDropComputedField q -> runDropComputedField q
RMPgCreateEventTrigger q -> runCreateEventTriggerQuery q RMCreateEventTrigger q -> dispatchMetadata runCreateEventTriggerQuery q
RMPgDeleteEventTrigger q -> runDeleteEventTriggerQuery q RMDeleteEventTrigger q -> dispatchMetadataAndEventTrigger runDeleteEventTriggerQuery q
RMRedeliverEvent q -> dispatchEventTrigger runRedeliverEvent q RMRedeliverEvent q -> dispatchEventTrigger runRedeliverEvent q
RMInvokeEventTrigger q -> dispatchEventTrigger runInvokeEventTrigger q RMInvokeEventTrigger q -> dispatchEventTrigger runInvokeEventTrigger q
@ -541,7 +539,11 @@ runMetadataQueryV1M env currentResourceVersion = \case
dispatchEventTrigger :: (forall b. BackendEventTrigger b => i b -> a) -> AnyBackend i -> a dispatchEventTrigger :: (forall b. BackendEventTrigger b => i b -> a) -> AnyBackend i -> a
dispatchEventTrigger f x = dispatchAnyBackend @BackendEventTrigger x f dispatchEventTrigger f x = dispatchAnyBackend @BackendEventTrigger x f
dispatchMetadataAndEventTrigger
:: (forall b. (BackendMetadata b, BackendEventTrigger b) => i b -> a)
-> AnyBackend i
-> a
dispatchMetadataAndEventTrigger f x = dispatchAnyBackendWithTwoConstraints @BackendMetadata @BackendEventTrigger x f
runMetadataQueryV2M runMetadataQueryV2M
:: ( MonadIO m :: ( MonadIO m
@ -549,6 +551,8 @@ runMetadataQueryV2M
, MetadataM m , MetadataM m
, MonadMetadataStorageQueryAPI m , MonadMetadataStorageQueryAPI m
, HasServerConfigCtx m , HasServerConfigCtx m
, MonadReader r m
, Has (L.Logger L.Hasura) r
) )
=> MetadataResourceVersion => MetadataResourceVersion
-> RQLMetadataV2 -> RQLMetadataV2

View File

@ -64,10 +64,10 @@ data RQLMetadataV1
| RMAddComputedField !(AddComputedField ('Postgres 'Vanilla)) | RMAddComputedField !(AddComputedField ('Postgres 'Vanilla))
| RMDropComputedField !(DropComputedField ('Postgres 'Vanilla)) | RMDropComputedField !(DropComputedField ('Postgres 'Vanilla))
-- Tables event triggers (PG-specific) -- Tables event triggers
| RMPgCreateEventTrigger !(CreateEventTriggerQuery ('Postgres 'Vanilla)) | RMCreateEventTrigger !(AnyBackend CreateEventTriggerQuery)
| RMPgDeleteEventTrigger !(DeleteEventTriggerQuery ('Postgres 'Vanilla)) | RMDeleteEventTrigger !(AnyBackend DeleteEventTriggerQuery)
| RMRedeliverEvent !(AnyBackend RedeliverEventQuery) | RMRedeliverEvent !(AnyBackend RedeliverEventQuery)
| RMInvokeEventTrigger !(AnyBackend InvokeEventTriggerQuery) | RMInvokeEventTrigger !(AnyBackend InvokeEventTriggerQuery)
-- Remote schemas -- Remote schemas

View File

@ -13,8 +13,10 @@ import Control.Monad.Unique
import Data.Aeson import Data.Aeson
import Data.Aeson.Casing import Data.Aeson.Casing
import Data.Aeson.TH import Data.Aeson.TH
import Data.Has (Has)
import Network.HTTP.Client.Extended import Network.HTTP.Client.Extended
import qualified Hasura.Logging as L
import qualified Hasura.Tracing as Tracing import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.DDL.RunSQL import Hasura.Backends.Postgres.DDL.RunSQL
@ -182,12 +184,13 @@ runQuery
, MonadQueryTags m , MonadQueryTags m
) )
=> Env.Environment => Env.Environment
-> L.Logger L.Hasura
-> InstanceId -> InstanceId
-> UserInfo -> RebuildableSchemaCache -> HTTP.Manager -> UserInfo -> RebuildableSchemaCache -> HTTP.Manager
-> ServerConfigCtx -> RQLQuery -> m (EncJSON, RebuildableSchemaCache) -> ServerConfigCtx -> RQLQuery -> m (EncJSON, RebuildableSchemaCache)
runQuery env instanceId userInfo sc hMgr serverConfigCtx query = do runQuery env logger instanceId userInfo sc hMgr serverConfigCtx query = do
(metadata, currentResourceVersion) <- fetchMetadata (metadata, currentResourceVersion) <- fetchMetadata
result <- runQueryM env query & Tracing.interpTraceT \x -> do result <- runReaderT (runQueryM env query) logger & Tracing.interpTraceT \x -> do
(((js, tracemeta), meta), rsc, ci) <- (((js, tracemeta), meta), rsc, ci) <-
x & runMetadataT metadata x & runMetadataT metadata
& runCacheRWT sc & runCacheRWT sc
@ -355,6 +358,8 @@ runQueryM
, MetadataM m , MetadataM m
, MonadMetadataStorageQueryAPI m , MonadMetadataStorageQueryAPI m
, MonadQueryTags m , MonadQueryTags m
, MonadReader r m
, Has (L.Logger L.Hasura) r
) )
=> Env.Environment => Env.Environment
-> RQLQuery -> RQLQuery

View File

@ -443,10 +443,10 @@ v1QueryHandler query = do
(liftEitherM . authorizeV1QueryApi query) =<< ask (liftEitherM . authorizeV1QueryApi query) =<< ask
scRef <- asks (scCacheRef . hcServerCtx) scRef <- asks (scCacheRef . hcServerCtx)
logger <- asks (scLogger . hcServerCtx) logger <- asks (scLogger . hcServerCtx)
res <- bool (fst <$> action) (withSCUpdate scRef logger Nothing action) $ queryModifiesSchemaCache query res <- bool (fst <$> (action logger)) (withSCUpdate scRef logger Nothing (action logger)) $ queryModifiesSchemaCache query
return $ HttpResponse res [] return $ HttpResponse res []
where where
action = do action logger = do
userInfo <- asks hcUser userInfo <- asks hcUser
scRef <- asks (scCacheRef . hcServerCtx) scRef <- asks (scCacheRef . hcServerCtx)
schemaCache <- fmap fst $ liftIO $ readIORef $ _scrCache scRef schemaCache <- fmap fst $ liftIO $ readIORef $ _scrCache scRef
@ -459,7 +459,7 @@ v1QueryHandler query = do
maintenanceMode <- asks (scEnableMaintenanceMode . hcServerCtx) maintenanceMode <- asks (scEnableMaintenanceMode . hcServerCtx)
experimentalFeatures <- asks (scExperimentalFeatures . hcServerCtx) experimentalFeatures <- asks (scExperimentalFeatures . hcServerCtx)
let serverConfigCtx = ServerConfigCtx functionPermsCtx remoteSchemaPermsCtx sqlGenCtx maintenanceMode experimentalFeatures let serverConfigCtx = ServerConfigCtx functionPermsCtx remoteSchemaPermsCtx sqlGenCtx maintenanceMode experimentalFeatures
runQuery env instanceId userInfo schemaCache httpMgr runQuery env logger instanceId userInfo schemaCache httpMgr
serverConfigCtx query serverConfigCtx query
v1MetadataHandler v1MetadataHandler

View File

@ -4,6 +4,7 @@ module Hasura.Server.MigrateSpec (CacheRefT(..), spec) where
import Hasura.Prelude import Hasura.Prelude
import qualified Data.ByteString.Lazy.UTF8 as LBS
import qualified Data.Environment as Env import qualified Data.Environment as Env
import qualified Database.PG.Query as Q import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client.Extended as HTTP import qualified Network.HTTP.Client.Extended as HTTP
@ -13,12 +14,14 @@ import Control.Monad.Morph
import Control.Monad.Trans.Control (MonadBaseControl) import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique import Control.Monad.Unique
import Control.Natural ((:~>) (..)) import Control.Natural ((:~>) (..))
import Data.Aeson (encode)
import Data.Time.Clock (getCurrentTime) import Data.Time.Clock (getCurrentTime)
import Test.Hspec.Core.Spec import Test.Hspec.Core.Spec
import Test.Hspec.Expectations.Lifted import Test.Hspec.Expectations.Lifted
import Hasura.Backends.Postgres.Connection import Hasura.Backends.Postgres.Connection
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Logging
import Hasura.Metadata.Class import Hasura.Metadata.Class
import Hasura.RQL.DDL.Metadata (ClearMetadata (..), runClearMetadata) import Hasura.RQL.DDL.Metadata (ClearMetadata (..), runClearMetadata)
import Hasura.RQL.DDL.Schema import Hasura.RQL.DDL.Schema
@ -137,6 +140,10 @@ spec srcConfig pgExecCtx pgConnInfo = do
describe "recreateSystemMetadata" $ do describe "recreateSystemMetadata" $ do
let dumpMetadata = execPGDump (PGDumpReqBody defaultSource ["--schema=hdb_catalog"] False) pgConnInfo let dumpMetadata = execPGDump (PGDumpReqBody defaultSource ["--schema=hdb_catalog"] False) pgConnInfo
logger :: Logger Hasura = Logger $ \l -> do
let (logLevel, logType :: EngineLogType Hasura, logDetail) = toEngineLog l
t <- liftIO $ getFormattedTime Nothing
liftIO $ putStrLn $ LBS.toString $ encode $ EngineLog t logLevel logType logDetail
it "is idempotent" \(NT transact) -> do it "is idempotent" \(NT transact) -> do
env <- Env.getEnvironment env <- Env.getEnvironment
@ -157,7 +164,7 @@ spec srcConfig pgExecCtx pgConnInfo = do
time <- getCurrentTime time <- getCurrentTime
transact (dropAndInit env time) `shouldReturn` MRInitialized transact (dropAndInit env time) `shouldReturn` MRInitialized
firstDump <- transact dumpMetadata firstDump <- transact dumpMetadata
transact (runClearMetadata ClearMetadata) `shouldReturn` successMsg transact (flip runReaderT logger $ runClearMetadata ClearMetadata) `shouldReturn` successMsg
secondDump <- transact dumpMetadata secondDump <- transact dumpMetadata
secondDump `shouldBe` firstDump secondDump `shouldBe` firstDump