trigger webhooks on column level changes instead of row (close #547, close #680) (#550)

This commit is contained in:
Tirumarai Selvan 2018-10-25 12:52:51 +05:30 committed by Shahidh K Muhammed
parent ab9692da4d
commit 810b440089
17 changed files with 447 additions and 153 deletions

View File

@ -0,0 +1,214 @@
Schema/Metadata API Reference: Event Triggers
=============================================
Event triggers are used to capture database changes and send them to a configured webhook.
.. _create_event_trigger:
create_event_trigger
--------------------
``create_event_trigger`` is used to create a new event trigger or replace an existing event trigger.
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type" : "create_event_trigger",
"args" : {
"name": "sample_trigger",
"table": "users",
"webhook": "https://httpbin.org/post",
"insert": {
"columns": "*",
"payload": ["username"]
},
"update": {
"columns": ["username", "real_name"],
"payload": "*"
},
"delete": {
"columns": "*"
},
"headers":[
{
"name": "X-Hasura-From-Val",
"value": "myvalue"
},
{
"name": "X-Hasura-From-Env",
"value_from_env": "EVENT_WEBHOOK_HEADER"
}
],
"replace": false
}
}
.. _create_event_trigger_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - name
- true
- TriggerName_
- Name of the event trigger
* - table
- true
- :ref:`TableName <TableName>`
- Name of the table
* - webhook
- true
- String
- Full url of webhook
* - insert
- false
- OperationSpec_
- Specification for insert operation
* - update
- false
- OperationSpec_
- Specification for update operation
* - delete
- false
- OperationSpec_
- Specification for delete operation
* - headers
- false
- [ HeaderFromValue_ | HeaderFromEnv_ ]
- List of headers to be sent with the webhook
* - replace
- false
- Boolean
- If set to true, event trigger is replaced with the new definition
.. _delete_event_trigger:
delete_event_trigger
--------------------
``delete_event_trigger`` is used to delete an event trigger.
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type" : "delete_event_trigger",
"args" : {
"name": "sample_trigger"
}
}
.. _delete_event_trigger_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - name
- true
- TriggerName_
- Name of the event trigger
.. _TriggerName:
``TriggerName``
&&&&&&&&&&&&&&&
.. parsed-literal::
String
.. _OperationSpec:
``OperationSpec``
&&&&&&&&&&&&&&&&&
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - columns
- true
- EventTriggerColumns_
- List of columns or "*" to listen changes on
* - payload
- false
- EventTriggerColumns_
- List of columns or "*" to send as part of webhook payload
.. _HeaderFromValue:
``HeaderFromValue``
&&&&&&&&&&&&&&&&&&&
.. list-table::
:header-rows: 1
* - Key
- required
- Schema
- Description
* - name
- true
- String
- Name of the header
* - value
- true
- String
- Value of the header
.. _HeaderFromEnv:
``HeaderFromEnv``
&&&&&&&&&&&&&&&&&
.. list-table::
:header-rows: 1
* - Key
- required
- Schema
- Description
* - name
- true
- String
- Name of the header
* - value_from_env
- true
- String
- Name of the environment variable which holds the value of the header
.. _EventTriggerColumns:
``EventTriggerColumns``
&&&&&&&&&&&&&&&&&&&&&&&
.. parsed-literal::
:class: haskell-pre
"*" | [:ref:`PGColumn`]

View File

@ -125,12 +125,21 @@ The various types of queries are listed in the following table:
- :ref:`Query <query_syntax>` array
- Execute multiple operations in a single query
* - :ref:`create_event_trigger`
- :ref:`create_event_trigger_args <create_event_trigger_syntax>`
- Create or replace event trigger
* - :ref:`delete_event_trigger`
- :ref:`delete_event_trigger_args <delete_event_trigger_syntax>`
- Delete existing event trigger
**See**
- :doc:`Run SQL <run-sql>`
- :doc:`Tables/Views <table-view>`
- :doc:`Relationships <relationship>`
- :doc:`Permissions <permission>`
- :doc:`Event Triggers <event-triggers>`
Response structure
------------------
@ -191,5 +200,6 @@ Error codes
Tables/Views <table-view>
Relationships <relationship>
Permissions <permission>
Event Triggers <event-triggers>
Syntax definitions <syntax-defs>

View File

@ -99,11 +99,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
* - permission
- true
@ -128,7 +128,7 @@ Args syntax
- Description
* - check
- true
- :ref:`BoolExp <BoolExp>`
- :ref:`BoolExp`
- This expression has to hold true for every new row that is inserted
.. _drop_insert_permission:
@ -152,11 +152,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
.. _create_select_permission:
@ -213,11 +213,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
* - permission
- true
@ -242,11 +242,11 @@ Args syntax
- Description
* - columns
- true
- :ref:`PGColumn <PGColumn>` array (or) ``'*'``
- :ref:`PGColumn` array (or) ``'*'``
- Only these columns are selectable (or all when ``'*'`` is specified)
* - filter
- true
- :ref:`BoolExp <BoolExp>`
- :ref:`BoolExp`
- Only the rows where this expression holds true are selectable
.. _drop_select_permission:
@ -270,11 +270,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
.. _create_update_permission:
@ -335,11 +335,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
* - permission
- true
@ -364,11 +364,11 @@ Args syntax
- Description
* - columns
- true
- :ref:`PGColumn <PGColumn>` array
- :ref:`PGColumn` array
- Only these columns are updatable
* - filter
- true
- :ref:`BoolExp <BoolExp>`
- :ref:`BoolExp`
- Only the rows where this expression holds true are deletable
.. _drop_update_permission:
@ -392,11 +392,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
.. _create_delete_permission:
@ -446,11 +446,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
* - permission
- true
@ -475,7 +475,7 @@ Args syntax
- Description
* - filter
- true
- :ref:`BoolExp <BoolExp>`
- :ref:`BoolExp`
- Only the rows where this expression holds true are deletable
.. _drop_delete_permission:
@ -499,11 +499,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- Role
.. _set_permission_comment:
@ -547,11 +547,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - role
- true
- :ref:`RoleName <RoleName>`
- :ref:`RoleName`
- The role in the permission
* - type
- true

View File

@ -166,11 +166,11 @@ Args syntax
- Description
* - remote_table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- The table to which the relationship has to be established
* - column_mapping
- true
- Object (:ref:`PGColumn <PGColumn>` : :ref:`PGColumn <PGColumn>`)
- Object (:ref:`PGColumn` : :ref:`PGColumn`)
- Mapping of columns from current table to remote table
.. _create_array_relationship:
@ -269,11 +269,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - name
- true
- :ref:`RelationshipName <RelationshipName>`
- :ref:`RelationshipName`
- Name of the new relationship
* - using
- true
@ -317,11 +317,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - column
- true
- :ref:`PGColumn <PGColumn>`
- :ref:`PGColumn`
- Name of the column with foreign key constraint
``ArrRelUsingManualMapping``
@ -336,11 +336,11 @@ Args syntax
- Description
* - remote_table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- The table to which the relationship has to be established
* - column_mapping
- true
- Object (:ref:`PGColumn <PGColumn>` : :ref:`PGColumn <PGColumn>`)
- Object (:ref:`PGColumn` : :ref:`PGColumn`)
- Mapping of columns from current table to remote table
.. _drop_relationship:
@ -384,11 +384,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - name
- true
- :ref:`RelationshipName <RelationshipName>`
- :ref:`RelationshipName`
- Name of the relationship that needs to be dropped
* - cascade
- false
@ -439,11 +439,11 @@ Args syntax
- Description
* - table
- true
- :ref:`TableName <TableName>`
- :ref:`TableName`
- Name of the table
* - name
- true
- :ref:`RelationshipName <RelationshipName>`
- :ref:`RelationshipName`
- The relationship
* - comment
- false

View File

@ -3,7 +3,7 @@ Guides: Integration/migration tutorials
Articles:
^^^^^^^^^
- `Move from firebase to realtime GraphQL on Postgres <https://blog.hasura.io/firebase2graphql-moving-from-firebase-to-realtime-graphql-on-postgres-4d36cb7f4eaf>`_.
- `Create a Gatsby site using GraphQL on Postgres <https://blog.hasura.io/create-gatsby-sites-using-graphql-on-postgres-603b5dd1e516>`_.
- `Instant GraphQL on AWS RDS <https://blog.hasura.io/instant-graphql-on-aws-rds-1edfb85b5985>`_.
- `Using TimescaleDB with Hasura GraphQL <https://blog.hasura.io/using-timescaledb-with-hasura-graphql-d05f030c4b10>`_.
- `Move from firebase to realtime GraphQL on Postgres <https://blog.hasura.io/firebase2graphql-moving-from-firebase-to-realtime-graphql-on-postgres-4d36cb7f4eaf>`__.
- `Create a Gatsby site using GraphQL on Postgres <https://blog.hasura.io/create-gatsby-sites-using-graphql-on-postgres-603b5dd1e516>`__.
- `Instant GraphQL on AWS RDS <https://blog.hasura.io/instant-graphql-on-aws-rds-1edfb85b5985>`__.
- `Using TimescaleDB with Hasura GraphQL <https://blog.hasura.io/using-timescaledb-with-hasura-graphql-d05f030c4b10>`__.

View File

@ -6,5 +6,5 @@ monitoring frameworks:
Articles:
^^^^^^^^^
- `GraphQL Observability with Hasura GraphQL Engine and Honeycomb <https://blog.hasura.io/graphql-observability-with-hasura-graphql-engine-and-honeycomb-ee0a1a836c41>`_
- `Uptime Monitoring for Hasura GraphQL Engine with DataDog on GKE <https://blog.hasura.io/uptime-monitoring-for-hasura-graphql-engine-with-datadog-on-gke-4faff5832e7f>`_
- `GraphQL Observability with Hasura GraphQL Engine and Honeycomb <https://blog.hasura.io/graphql-observability-with-hasura-graphql-engine-and-honeycomb-ee0a1a836c41>`__
- `Uptime Monitoring for Hasura GraphQL Engine with DataDog on GKE <https://blog.hasura.io/uptime-monitoring-for-hasura-graphql-engine-with-datadog-on-gke-4faff5832e7f>`__

View File

@ -379,7 +379,7 @@ buildSchemaCache = flip execStateT emptySchemaCache $ do
forM_ eventTriggers $ \(sn, tn, trid, trn, Q.AltJ tDefVal, webhook, nr, rint, Q.AltJ mheaders) -> do
let headerConfs = fromMaybe [] mheaders
qt = QualifiedTable sn tn
allCols <- (getCols . tiFieldInfoMap) <$> askTabInfo qt
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
headers <- getHeadersFromConf headerConfs
tDef <- decodeValue tDefVal
addEventTriggerToCache (QualifiedTable sn tn) trid trn tDef (RetryConf nr rint) webhook headers

View File

@ -21,10 +21,8 @@ import qualified Hasura.SQL.DML as S
import qualified Data.FileEmbed as FE
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Database.PG.Query as Q
data Ops = INSERT | UPDATE | DELETE deriving (Show)
data OpVar = OLD | NEW deriving (Show)
@ -63,11 +61,13 @@ getTriggerSql op trid trn qt allCols spec =
in
spec >> renderGingerTmplt context <$> triggerTmplt
where
createOpCtx op1 (SubscribeOpSpec columns) =
createOpCtx op1 (SubscribeOpSpec columns payload) =
HashMap.fromList
[ (T.pack "OPERATION", T.pack $ show op1)
, (T.pack "OLD_DATA_EXPRESSION", toSQLTxt $ renderOldDataExp op1 columns )
, (T.pack "NEW_DATA_EXPRESSION", toSQLTxt $ renderNewDataExp op1 columns )
, (T.pack "OLD_ROW", toSQLTxt $ renderRow OLD columns )
, (T.pack "NEW_ROW", toSQLTxt $ renderRow NEW columns )
, (T.pack "OLD_PAYLOAD_EXPRESSION", toSQLTxt $ renderOldDataExp op1 $ fromMaybePayload payload )
, (T.pack "NEW_PAYLOAD_EXPRESSION", toSQLTxt $ renderNewDataExp op1 $ fromMaybePayload payload )
]
renderOldDataExp op2 scs =
case op2 of
@ -87,6 +87,7 @@ getTriggerSql op trid trn qt allCols spec =
getColInfos cols allCols
applyRowToJson e = S.SEFnApp "row_to_json" [e] Nothing
applyRow e = S.SEFnApp "row" [e] Nothing
toExtr = flip S.Extractor Nothing
mkQId opVar colInfo = toJSONableExp (pgiType colInfo) $
S.SEQIden $ S.QIden (opToQual opVar) $ toIden $ pgiName colInfo
@ -94,6 +95,14 @@ getTriggerSql op trid trn qt allCols spec =
opToQual = S.QualVar . opToTxt
opToTxt = T.pack . show
renderRow opVar scs =
case scs of
SubCStar -> applyRow $ S.SEUnsafe $ opToTxt opVar
SubCArray cols -> applyRow $
S.mkRowExp $ map (toExtr . mkQId opVar) $
getColInfos cols allCols
fromMaybePayload = fromMaybe SubCStar
mkTriggerQ
:: TriggerId
@ -108,7 +117,7 @@ mkTriggerQ trid trn qt allCols (TriggerOpsDef insert update delete) = do
<> getTriggerSql DELETE trid trn qt allCols delete
case msql of
Just sql -> Q.multiQE defaultTxErrorHandler (Q.fromText sql)
Nothing -> throw500 "no trigger sql generated"
Nothing -> throw500 "no trigger sql generated"
addEventTriggerToCatalog
:: QualifiedTable
@ -176,7 +185,12 @@ fetchEventTrigger trn = do
getTrigger triggers
where
getTrigger [] = throw400 NotExists ("could not find event trigger '" <> trn <> "'")
getTrigger (x:_) = return $ EventTrigger (QualifiedTable sn tn) trn' tDef webhook (RetryConf nr rint)
getTrigger (x:_) = return $ EventTrigger
(QualifiedTable sn tn)
trn'
tDef
webhook
(RetryConf nr rint)
where (sn, tn, trn', Q.AltJ tDef, webhook, nr, rint) = x
fetchEvent :: EventId -> Q.TxE QErr (EventId, Bool)
@ -234,7 +248,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete retryConf webho
subTableP2 :: (P2C m) => QualifiedTable -> Bool -> EventTriggerDef -> m ()
subTableP2 qt replace q@(EventTriggerDef name def webhook rconf mheaders) = do
allCols <- (getCols . tiFieldInfoMap) <$> askTabInfo qt
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
trid <- if replace
then do
delEventTriggerFromCache qt name

View File

@ -425,7 +425,7 @@ data ViewInfo
$(deriveToJSON (aesonDrop 2 snakeCase) ''ViewInfo)
isMutable :: (ViewInfo -> Bool) -> Maybe ViewInfo -> Bool
isMutable _ Nothing = True
isMutable _ Nothing = True
isMutable f (Just vi) = f vi
mutableView :: (MonadError QErr m) => QualifiedTable
@ -786,11 +786,20 @@ getOpInfo trn ti mos= fromSubscrOpSpec <$> mos
fromSubscrOpSpec :: SubscribeOpSpec -> OpTriggerInfo
fromSubscrOpSpec os =
let qt = tiName ti
tableDep = SchemaDependency (SOTable qt) ("event trigger " <> trn <> " is dependent on table")
cols = getColsFromSub $ sosColumns os
schemaDeps = SchemaDependency (SOTable qt) "event trigger is dependent on table"
: map (\col -> SchemaDependency (SOTableObj qt (TOCol col)) "event trigger is dependent on column") (toList cols)
colDeps = map (\col ->
SchemaDependency (SOTableObj qt (TOCol col))
("event trigger " <> trn <> " is dependent on column " <> getPGColTxt col))
(toList cols)
payload = maybe HS.empty getColsFromSub (sosPayload os)
payloadDeps = map (\col ->
SchemaDependency (SOTableObj qt (TOCol col))
("event trigger " <> trn <> " is dependent on column " <> getPGColTxt col))
(toList payload)
schemaDeps = tableDep : colDeps ++ payloadDeps
in OpTriggerInfo qt trn os schemaDeps
where
getColsFromSub sc = case sc of
SubCStar -> HS.fromList $ map pgiName $ getCols $ tiFieldInfoMap ti
SubCStar -> HS.fromList []
SubCArray pgcols -> HS.fromList pgcols

View File

@ -8,6 +8,7 @@ module Hasura.RQL.Types.Subscribe
, SubscribeColumns(..)
, TriggerName
, TriggerId
, Ops(..)
, EventId
, TriggerOpsDef(..)
, EventTrigger(..)
@ -35,6 +36,8 @@ type TriggerId = T.Text
type EventId = T.Text
type HeaderName = T.Text
data Ops = INSERT | UPDATE | DELETE deriving (Show)
data SubscribeColumns = SubCStar | SubCArray [PGCol] deriving (Show, Eq, Lift)
instance FromJSON SubscribeColumns where
@ -51,6 +54,7 @@ instance ToJSON SubscribeColumns where
data SubscribeOpSpec
= SubscribeOpSpec
{ sosColumns :: !SubscribeColumns
, sosPayload :: !(Maybe SubscribeColumns)
} deriving (Show, Eq, Lift)
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''SubscribeOpSpec)
@ -117,7 +121,14 @@ instance FromJSON CreateEventTriggerQuery where
case insert <|> update <|> delete of
Just _ -> return ()
Nothing -> fail "must provide operation spec(s)"
mapM_ checkEmptyCols [insert, update, delete]
return $ CreateEventTriggerQuery name table insert update delete retryConf webhook headers replace
where
checkEmptyCols spec
= case spec of
Just (SubscribeOpSpec (SubCArray cols) _) -> when (null cols) (fail "found empty column specification")
Just (SubscribeOpSpec _ (Just (SubCArray cols)) ) -> when (null cols) (fail "found empty payload specification")
_ -> return ()
parseJSON _ = fail "expecting an object"
$(deriveToJSON (aesonDrop 4 snakeCase){omitNothingFields=True} ''CreateEventTriggerQuery)

View File

@ -2,23 +2,35 @@ CREATE OR REPLACE function hdb_views.notify_hasura_{{NAME}}_{{OPERATION}}() RETU
LANGUAGE plpgsql
AS $$
DECLARE
payload json;
_data json;
id text;
_old record;
_new record;
_data json;
payload json;
BEGIN
id := gen_random_uuid();
IF TG_OP = 'UPDATE' THEN
_old := {{OLD_ROW}};
_new := {{NEW_ROW}};
ELSE
/* initialize _old and _new with dummy values */
_old := row((select 1));
_new := row((select 1));
END IF;
_data := json_build_object(
'old', {{OLD_DATA_EXPRESSION}},
'new', {{NEW_DATA_EXPRESSION}}
'old', {{OLD_PAYLOAD_EXPRESSION}},
'new', {{NEW_PAYLOAD_EXPRESSION}}
);
payload := json_build_object(
'op', TG_OP,
'data', _data
)::text;
INSERT INTO
hdb_catalog.event_log (id, schema_name, table_name, trigger_name, trigger_id, payload)
VALUES
(id, TG_TABLE_SCHEMA, TG_TABLE_NAME, '{{NAME}}', '{{ID}}', payload);
IF (TG_OP <> 'UPDATE') OR (_old <> _new) THEN
INSERT INTO
hdb_catalog.event_log (id, schema_name, table_name, trigger_name, trigger_id, payload)
VALUES
(id, TG_TABLE_SCHEMA, TG_TABLE_NAME, '{{NAME}}', '{{ID}}', payload);
END IF;
RETURN NULL;
END;
$$;

View File

@ -5,8 +5,7 @@ args:
sql: |
create table hge_tests.test_t1(
c1 int,
c2 text,
c3 text
c2 text
);
- type: track_table
args:
@ -19,11 +18,11 @@ args:
schema: hge_tests
name: test_t1
insert:
columns: ["c2"]
columns: "*"
update:
columns: ["c1"]
delete:
columns: ["c1", "c2"]
columns: "*"
webhook: http://127.0.0.1:5592
retry_conf:
num_retries: 10

View File

@ -5,8 +5,7 @@ args:
sql: |
create table hge_tests.test_t1(
c1 int,
c2 text,
c3 text
c2 text
);
- type: track_table
args:
@ -14,16 +13,19 @@ args:
name: test_t1
- type: create_event_trigger
args:
name: t1_empty
name: t1_payload
table:
schema: hge_tests
name: test_t1
insert:
columns: []
columns: "*"
payload: "*"
update:
columns: []
columns: "*"
payload: ["c1"]
delete:
columns: []
columns: "*"
payload: ["c2"]
webhook: http://127.0.0.1:5592
retry_conf:
num_retries: 10

View File

@ -2,7 +2,7 @@ type: bulk
args:
- type: delete_event_trigger
args:
name: t1_empty
name: t1_payload
- type: run_sql
args:
sql: |

View File

@ -18,10 +18,7 @@ args:
schema: hge_tests
name: test_t1
insert:
columns: ["c2"]
columns: "*"
update:
columns: ["c1"]
columns: ["c2"]
webhook: http://127.0.0.1:5592
retry_conf:
num_retries: 5
interval_sec: 5

View File

@ -9,7 +9,7 @@ args:
insert:
columns: ["c1"]
update:
columns: ["c2"]
columns: ["c1"]
delete:
columns: "*"
webhook: http://127.0.0.1:5592/new

View File

@ -98,17 +98,6 @@ class TestCreateEvtQuery(object):
assert st_code == 200, resp
check_event(hge_ctx, "t1_all", table, "DELETE", exp_ev_data, headers, "/")
def test_basic_dep(self,hge_ctx):
st_code, resp = hge_ctx.v1q({
"type": "run_sql",
"args": {
"sql": "alter table hge_tests.test_t1 drop column c1"
}
})
assert st_code == 400, resp
assert resp['code'] == "dependency-error", resp
class TestRetryConf(object):
@pytest.fixture(autouse=True)
@ -179,7 +168,7 @@ class TestUpdateEvtQuery(object):
init_row = {"c1" : 1, "c2" : "hello"}
exp_ev_data = {
"old": None,
"new": {"c1": 1}
"new": {"c1": 1, "c2": "hello"}
}
headers = {}
st_code, resp = insert(hge_ctx, table, init_row)
@ -188,16 +177,25 @@ class TestUpdateEvtQuery(object):
where_exp = {"c1": 1}
set_exp = {"c2" : "world"}
# expected no event hence previous expected data
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data, headers, "/new")
where_exp = {"c1": 1}
set_exp = {"c1" : 2}
exp_ev_data = {
"old": {"c2" : "hello"},
"new": {"c2" : "world"}
"old": {"c1" : 1, "c2": "world"},
"new": {"c1" : 2, "c2": "world"}
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data, headers, "/new")
where_exp = {"c1": 2}
exp_ev_data = {
"old": {"c1" : 1, "c2" : "world"},
"old": {"c1" : 2, "c2" : "world"},
"new": None
}
st_code, resp = delete(hge_ctx, table, where_exp)
@ -270,7 +268,7 @@ class TestEvtSelCols:
init_row = {"c1" : 1, "c2" : "hello"}
exp_ev_data = {
"old": None,
"new": {"c2": "hello"}
"new": {"c1": 1, "c2": "hello"}
}
headers = {}
st_code, resp = insert(hge_ctx, table, init_row)
@ -279,16 +277,25 @@ class TestEvtSelCols:
where_exp = {"c1": 1}
set_exp = {"c2" : "world"}
# expected no event hence previous expected data
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data, headers, "/")
where_exp = {"c1": 1}
set_exp = {"c1" : 2}
exp_ev_data = {
"old": {"c1" : 1},
"new": {"c1" : 1}
"old": {"c1" : 1, "c2": "world"},
"new": {"c1" : 2, "c2": "world"}
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_cols", table, "UPDATE", exp_ev_data, headers, "/")
where_exp = {"c1": 2}
exp_ev_data = {
"old": {"c1" : 1, "c2" : "world"},
"old": {"c1" : 2, "c2" : "world"},
"new": None
}
st_code, resp = delete(hge_ctx, table, where_exp)
@ -313,62 +320,8 @@ class TestEvtSelCols:
"sql": "alter table hge_tests.test_t1 drop column c2"
}
})
assert st_code == 400, resp
assert resp['code'] == "dependency-error", resp
st_code, resp = hge_ctx.v1q({
"type": "run_sql",
"args": {
"sql": "alter table hge_tests.test_t1 drop column c3"
}
})
assert st_code == 200, resp
class TestEvtEmptyCols:
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
print ("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/empty_cols/setup.yaml')
assert st_code == 200, resp
yield
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/empty_cols/teardown.yaml')
assert st_code == 200, resp
def test_empty_cols(self, hge_ctx):
table = {"schema" : "hge_tests", "name": "test_t1"}
init_row = {"c1" : 1, "c2" : "hello"}
exp_ev_data = {
"old": None,
"new": {}
}
headers = {}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_empty", table, "INSERT", exp_ev_data, headers, "/")
where_exp = {"c1": 1}
set_exp = {"c2" : "world"}
exp_ev_data = {
"old": {},
"new": {}
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_empty", table, "UPDATE", exp_ev_data, headers, "/")
exp_ev_data = {
"old": {},
"new": None
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_empty", table, "DELETE", exp_ev_data, headers, "/")
class TestEvtInsertOnly:
@pytest.fixture(autouse=True)
@ -415,3 +368,76 @@ class TestEvtInsertOnly:
with pytest.raises(queue.Empty):
check_event(hge_ctx, "t1_insert", table, "DELETE", exp_ev_data, headers, "/")
class TestEvtSelPayload:
@pytest.fixture(autouse=True)
def transact(self, request, hge_ctx):
print ("In setup method")
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/selected_payload/setup.yaml')
assert st_code == 200, resp
yield
st_code, resp = hge_ctx.v1q_f('queries/event_triggers/selected_payload/teardown.yaml')
assert st_code == 200, resp
def test_selected_payload(self, hge_ctx):
table = {"schema" : "hge_tests", "name": "test_t1"}
init_row = {"c1" : 1, "c2" : "hello"}
exp_ev_data = {
"old": None,
"new": {"c1": 1, "c2": "hello"}
}
headers = {}
st_code, resp = insert(hge_ctx, table, init_row)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "INSERT", exp_ev_data, headers, "/")
where_exp = {"c1": 1}
set_exp = {"c2" : "world"}
exp_ev_data = {
"old": {"c1": 1},
"new": {"c1": 1}
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "UPDATE", exp_ev_data, headers, "/")
where_exp = {"c1": 1}
set_exp = {"c1" : 2}
exp_ev_data = {
"old": {"c1": 1},
"new": {"c1": 2}
}
st_code, resp = update(hge_ctx, table, where_exp, set_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "UPDATE", exp_ev_data, headers, "/")
where_exp = {"c1": 2}
exp_ev_data = {
"old": {"c2" : "world"},
"new": None
}
st_code, resp = delete(hge_ctx, table, where_exp)
assert st_code == 200, resp
check_event(hge_ctx, "t1_payload", table, "DELETE", exp_ev_data, headers, "/")
def test_selected_payload_dep(self, hge_ctx):
st_code, resp = hge_ctx.v1q({
"type": "run_sql",
"args": {
"sql": "alter table hge_tests.test_t1 drop column c1"
}
})
assert st_code == 400, resp
assert resp['code'] == "dependency-error", resp
st_code, resp = hge_ctx.v1q({
"type": "run_sql",
"args": {
"sql": "alter table hge_tests.test_t1 drop column c2"
}
})
assert st_code == 400, resp
assert resp['code'] == "dependency-error", resp