server/mssql: fix handling of special language characters in event trigger payload

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7448
GitOrigin-RevId: d87357fe2a54d755c893938db5fb6193316b0b79
This commit is contained in:
Naveen Naidu 2023-01-10 14:19:32 +05:30 committed by hasura-bot
parent cccc0dc309
commit 99fb6ac22f
5 changed files with 222 additions and 13 deletions

View File

@ -26,8 +26,13 @@ $ export HASURA_BIGQUERY_PROJECT_ID=??? # The project ID
$ export HASURA_BIGQUERY_SERVICE_KEY=??? # The service account key
```
After that, BigQuery will be ready to test. For everything else, run
`docker-compose up` in the root of `graphql-engine`.
After that, BigQuery will be ready to test.
For everything else, run the following in the root of `graphql-engine`:
```
$ docker-compose up
```
_Note to Hasura team: a service account is already setup for internal use,
please check the wiki for further details._
@ -37,29 +42,29 @@ please check the wiki for further details._
To run all the tests, execute the following command:
```bash
$ cabal run api-tests
$ cabal run api-tests:exe:api-tests
```
To run only tests whose name contains a certain string, use the `-m` or
`--match=` flag:
```sh
$ cabal run api-tests -- -m "SQLServer" # SQLServer tests only
$ cabal run api-tests -- --match="Views" # All tests concerning views
$ cabal run api-tests:exe:api-tests -- -m "SQLServer" # SQLServer tests only
$ cabal run api-tests:exe:api-tests -- --match="Views" # All tests concerning views
```
The opposite flag is `-s` or `--skip=`, which will ignore tests containing the
given string:
```sh
$ cabal run api-tests -- -s "BigQuery" # Skip BigQuery tests
$ cabal run api-tests -- --skip="Mutations" # Skip tests around mutations
$ cabal run api-tests:exe:api-tests -- -s "BigQuery" # Skip BigQuery tests
$ cabal run api-tests:exe:api-tests -- --skip="Mutations" # Skip tests around mutations
```
For additional information, consult the help section:
```bash
cabal run api-tests -- --help
cabal run api-tests:exe:api-tests -- --help
```
The local databases persist even after shutting down the containers. If this is

View File

@ -104,6 +104,7 @@ library
Test.DataConnector.MockAgent.TransformedConfigurationSpec
Test.DataConnector.QuerySpec
Test.DataConnector.SelectPermissionsSpec
Test.EventTriggers.EventTriggersSpecialCharactersSpec
Test.EventTriggers.MSSQL.EventTriggerDropSourceCleanupSpec
Test.EventTriggers.MSSQL.EventTriggersForReplicationSpec
Test.EventTriggers.MSSQL.EventTriggersUniqueNameSpec

View File

@ -0,0 +1,198 @@
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ViewPatterns #-}
-- | Test that special characters are handled correctly in event trigger payload
module Test.EventTriggers.EventTriggersSpecialCharactersSpec (spec) where
import Control.Concurrent.Chan qualified as Chan
import Data.List.NonEmpty qualified as NE
import Harness.Backend.Postgres qualified as Postgres
import Harness.Backend.Sqlserver qualified as Sqlserver
import Harness.GraphqlEngine qualified as GraphqlEngine
import Harness.Quoter.Yaml
import Harness.Test.BackendType qualified as BackendType
import Harness.Test.Fixture qualified as Fixture
import Harness.Test.Schema qualified as Schema
import Harness.Test.SetupAction (permitTeardownFail)
import Harness.TestEnvironment (GlobalTestEnvironment, TestEnvironment, getBackendTypeConfig)
import Harness.Webhook qualified as Webhook
import Harness.Yaml (shouldBeYaml, shouldReturnYaml)
import Hasura.Prelude
import System.Timeout (timeout)
import Test.HUnit.Base (assertFailure)
import Test.Hspec (SpecWith, describe, it)
--------------------------------------------------------------------------------
-- Preamble
spec :: SpecWith GlobalTestEnvironment
spec =
Fixture.runWithLocalTestEnvironment
( NE.fromList
[ (Fixture.fixture $ Fixture.Backend Sqlserver.backendTypeMetadata)
{ -- setup the webhook server as the local test environment,
-- so that the server can be referenced while testing
Fixture.mkLocalTestEnvironment = const Webhook.run,
Fixture.setupTeardown = \(testEnvironment, (webhookServer, _)) ->
[ permitTeardownFail (Sqlserver.setupTablesAction schema testEnvironment),
Fixture.SetupAction
{ Fixture.setupAction = dbSetup testEnvironment webhookServer,
Fixture.teardownAction = \_ -> pure ()
}
]
},
(Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata)
{ -- setup the webhook server as the local test environment,
-- so that the server can be referenced while testing
Fixture.mkLocalTestEnvironment = const Webhook.run,
Fixture.setupTeardown = \(testEnvironment, (webhookServer, _)) ->
[ permitTeardownFail (Postgres.setupTablesAction schema testEnvironment),
Fixture.SetupAction
{ Fixture.setupAction = dbSetup testEnvironment webhookServer,
Fixture.teardownAction = \_ -> pure ()
}
]
}
]
)
tests
--------------------------------------------------------------------------------
-- * Backend
-- ** Schema
-- | Create a dummy table since the creation of schema is now moved into 'create_table'
-- function. Hence having a table is necessary to create the schema for the test.
dummyTable :: Schema.Table
dummyTable =
(Schema.table "dummy_table")
{ Schema.tableColumns =
[Schema.column "dummy_column" Schema.TStr],
Schema.tablePrimaryKey = ["dummy_column"]
}
schema :: [Schema.Table]
schema = [dummyTable]
--------------------------------------------------------------------------------
-- Tests
tests :: Fixture.Options -> SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue))
tests opts = do
handleSpecialCharsInEventTriggersPayload opts
handleSpecialCharsInEventTriggersPayload :: Fixture.Options -> SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue))
handleSpecialCharsInEventTriggersPayload opts =
describe "special characters of different languages in event trigger payload are encoded in UTF-8" do
it "check: inserting a new row invokes a event trigger" $
\(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do
let backendTypeMetadata = fromMaybe (error "Expected a backend type but got nothing") $ getBackendTypeConfig testEnvironment
sourceName = BackendType.backendSourceName backendTypeMetadata
schemaName = Schema.getSchemaName testEnvironment
-- TODO: backendPrefixRunSql will no longer be needed, once
-- https://github.com/hasura/graphql-engine-mono/pull/7465 is merged.
backendPrefixRunSql =
case BackendType.backendTypeString backendTypeMetadata of
"pg" -> ""
x -> x <> "_"
insertQuery =
[interpolateYaml|
type: #{backendPrefixRunSql}run_sql
args:
source: #{sourceName}
sql: "INSERT INTO #{schemaName}.authors (id, specialøñámé) values (3, 'john')"
|]
expectedResponse =
[yaml|
result_type: CommandOk
result: null
|]
-- The column name which had special language characters is displayed
-- correctly in the payload
expectedEventPayload =
[yaml|
old: null
new:
specialøñámé: john
id: 3
|]
-- Insert a row into the table with event trigger
shouldReturnYaml
opts
(GraphqlEngine.postV2Query 200 testEnvironment insertQuery)
expectedResponse
-- Check if there was a payload generated due to the insert statement
eventPayload <-
-- wait for the event for a maximum of 5 seconds
timeout (5 * 1000000) (Chan.readChan eventsQueue)
>>= (`onNothing` (assertFailure "Event expected, but not fired"))
eventPayload `shouldBeYaml` expectedEventPayload
--------------------------------------------------------------------------------
-- ** Setup and teardown override
dbSetup :: TestEnvironment -> GraphqlEngine.Server -> IO ()
dbSetup testEnvironment webhookServer = do
let backendTypeMetadata = fromMaybe (error "Expected a backend type but got nothing") $ getBackendTypeConfig testEnvironment
schemaName = Schema.getSchemaName testEnvironment
sourceName = BackendType.backendSourceName backendTypeMetadata
backendPrefix = BackendType.backendTypeString backendTypeMetadata
-- TODO: backendPrefixRunSql will no longer be needed, once
-- https://github.com/hasura/graphql-engine-mono/pull/7465 is merged.
backendPrefixRunSql =
case BackendType.backendTypeString backendTypeMetadata of
"pg" -> ""
x -> x <> "_"
webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo"
-- Create table via run_sql
GraphqlEngine.postV2Query_
testEnvironment
[interpolateYaml|
type: #{backendPrefixRunSql}run_sql
args:
source: #{sourceName}
sql: |
CREATE TABLE #{schemaName}.authors (
id INT PRIMARY KEY,
specialøñámé VARCHAR(255)
);
|]
-- Track table using custom_name for the special character column since GraphQL
-- spec does not support special characters
GraphqlEngine.postMetadata_ testEnvironment $
[interpolateYaml|
type: bulk
args:
- type: #{backendPrefix}_track_table
args:
source: #{sourceName}
table:
schema: #{schemaName}
name: authors
configuration:
column_config:
specialøñámé:
custom_name: special_
- type: #{backendPrefix}_create_event_trigger
args:
name: authors_all
source: #{sourceName}
table:
name: authors
schema: #{schemaName}
webhook: #{webhookServerEchoEndpoint}
insert:
columns: "*"
|]

View File

@ -31,12 +31,13 @@ where
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson qualified as J
import Data.ByteString qualified as B
import Data.ByteString.Lazy qualified as BL
import Data.ByteString.Lazy (fromStrict)
import Data.FileEmbed (makeRelativeToProject)
import Data.HashMap.Strict qualified as Map
import Data.HashSet qualified as HashSet
import Data.Set.NonEmpty qualified as NE
import Data.Text qualified as T
import Data.Text.Encoding qualified as TE
import Data.Text.Extended (ToTxt, commaSeparated, toTxt)
import Data.Text.Lazy qualified as LT
import Data.Text.NonEmpty (mkNonEmptyTextUnsafe)
@ -444,7 +445,7 @@ fetchEvents source triggerNames (FetchBatchSize fetchBatchSize) = do
-- 'IN' MSSQL operator.
triggerNamesTxt = "(" <> commaSeparated (map (\t -> "'" <> toTxt t <> "'") triggerNames) <> ")"
uncurryEvent (id', sn, tn, trn, payload' :: BL.ByteString, tries, created_at :: B.ByteString, next_retry_at :: Maybe B.ByteString) = do
uncurryEvent (id', sn, tn, trn, payload' :: Text, tries, created_at :: B.ByteString, next_retry_at :: Maybe B.ByteString) = do
payload <- encodePayload payload'
createdAt <- convertTime created_at
retryAt <- traverse convertTime next_retry_at
@ -469,10 +470,14 @@ fetchEvents source triggerNames (FetchBatchSize fetchBatchSize) = do
-- We ensure that the values in 'hd_catalog.event_log' is always a JSON is by
-- using the 'FOR JSON PATH' MSSQL operand when inserting value into the
-- 'hdb_catalog.event_log' table.
encodePayload :: (J.FromJSON a, QErrM m) => BL.ByteString -> m a
encodePayload :: (J.FromJSON a, QErrM m) => Text -> m a
encodePayload payload =
onLeft
(J.eitherDecode payload)
-- The NVARCHAR column has UTF-16 or UCS-2 encoding. Ref: https://learn.microsoft.com/en-us/sql/t-sql/data-types/nchar-and-nvarchar-transact-sql?view=sql-server-ver16#nvarchar---n--max--
-- But JSON strings are expected to have UTF-8 encoding as per spec. Ref: https://www.rfc-editor.org/rfc/rfc8259#section-8.1
-- Hence it's important to encode the payload into UTF-8 else the decoding of
-- text to JSON will fail.
(J.eitherDecode $ fromStrict $ TE.encodeUtf8 payload)
(\_ -> throw500 $ T.pack "payload decode failed while fetching MSSQL events")
-- Note: The ODBC server does not have a FromJSON instance of UTCTime and only

View File

@ -1,6 +1,6 @@
UPDATE hdb_catalog.event_log
SET locked = SYSDATETIMEOFFSET()
OUTPUT CONVERT(varchar(MAX), inserted.id), inserted.schema_name, inserted.table_name, inserted.trigger_name, CONVERT(varchar(MAX), inserted.payload), inserted.tries, CONVERT(varchar(MAX), inserted.created_at), CONVERT(varchar(MAX), inserted.next_retry_at)
OUTPUT CONVERT(varchar(MAX), inserted.id), inserted.schema_name, inserted.table_name, inserted.trigger_name, inserted.payload, inserted.tries, CONVERT(varchar(MAX), inserted.created_at), CONVERT(varchar(MAX), inserted.next_retry_at)
WHERE id in
(SELECT TOP(#{fetchBatchSize}) l.id
FROM hdb_catalog.event_log l WITH (UPDLOCK, READPAST)