mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-11-10 10:29:12 +03:00
server/mssql: respect custom field names in delete, insert and update mutations
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/3987 GitOrigin-RevId: c1c4e32c7553e9f1febd55cd4ed49d8c1a83ea03
This commit is contained in:
parent
ccea1da1d5
commit
f06bff4008
@ -152,6 +152,7 @@ function:
|
||||
- server: column presets for SQL Server were broken and consequently insert and upsert mutations were failing with constraint violations. This change fixes this behavior (#8221).
|
||||
- server: fix caching bug with session variables in remote joins
|
||||
- server: fix regression where JWKs are refreshed once per second when both must-revalidate and max-age are specified in the Cache-Control header (#8299)
|
||||
- server: respect custom field names in delete, insert and update mutations on SQL Server (#8314)
|
||||
- console: fixed an issue where editing both a column's name and its GraphQL field name at the same time caused an error
|
||||
- console: enable searching tables within a schema
|
||||
- console: fixed the ability to create updated_at and created_at in the modify page (#8239)
|
||||
|
@ -1040,6 +1040,7 @@ test-suite tests-hspec
|
||||
Test.ArrayRelationshipsSpec
|
||||
Test.BasicFieldsSpec
|
||||
Test.ColumnPresetsSpec
|
||||
Test.CustomFieldNamesSpec
|
||||
Test.DirectivesSpec
|
||||
Test.HelloWorldSpec
|
||||
Test.LimitOffsetSpec
|
||||
|
@ -10,7 +10,6 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.IR qualified as IR
|
||||
import Hasura.RQL.Types.Column qualified as IR
|
||||
import Hasura.SQL.Backend
|
||||
import Language.GraphQL.Draft.Syntax (unName)
|
||||
|
||||
fromDelete :: IR.AnnDel 'MSSQL -> FromIr Delete
|
||||
fromDelete (IR.AnnDel table (permFilter, whereClause) _ allColumns) = do
|
||||
@ -19,7 +18,7 @@ fromDelete (IR.AnnDel table (permFilter, whereClause) _ allColumns) = do
|
||||
( do
|
||||
permissionsFilter <- fromGBoolExp permFilter
|
||||
whereExpression <- fromGBoolExp whereClause
|
||||
let columnNames = map (ColumnName . unName . IR.ciName) allColumns
|
||||
let columnNames = map IR.ciColumn allColumns
|
||||
pure
|
||||
Delete
|
||||
{ deleteTable =
|
||||
|
@ -19,7 +19,6 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.IR qualified as IR
|
||||
import Hasura.RQL.Types.Column qualified as IR
|
||||
import Hasura.SQL.Backend
|
||||
import Language.GraphQL.Draft.Syntax (unName)
|
||||
|
||||
fromInsert :: IR.AnnInsert 'MSSQL Void Expression -> Insert
|
||||
fromInsert IR.AnnInsert {..} =
|
||||
@ -27,7 +26,7 @@ fromInsert IR.AnnInsert {..} =
|
||||
insertRows = normalizeInsertRows _aiDefVals $ map IR.getInsertColumns _aiInsObj
|
||||
insertColumnNames = maybe [] (map fst) $ listToMaybe insertRows
|
||||
insertValues = map (Values . map snd) insertRows
|
||||
allColumnNames = map (ColumnName . unName . IR.ciName) _aiTableCols
|
||||
allColumnNames = map IR.ciColumn _aiTableCols
|
||||
insertOutput = Output Inserted $ map OutputColumn allColumnNames
|
||||
tempTable = TempTable tempTableNameInserted allColumnNames
|
||||
in Insert _aiTableName insertColumnNames insertOutput tempTable insertValues
|
||||
@ -82,7 +81,7 @@ toMerge ::
|
||||
toMerge tableName insertRows allColumns IfMatched {..} = do
|
||||
let normalizedInsertRows = normalizeInsertRows _imColumnPresets $ map IR.getInsertColumns insertRows
|
||||
insertColumnNames = maybe [] (map fst) $ listToMaybe normalizedInsertRows
|
||||
allColumnNames = map (ColumnName . unName . IR.ciName) allColumns
|
||||
allColumnNames = map IR.ciColumn allColumns
|
||||
|
||||
matchConditions <-
|
||||
flip runReaderT (EntityAlias "target") $ -- the table is aliased as "target" in MERGE sql
|
||||
|
@ -11,7 +11,6 @@ import Hasura.Backends.MSSQL.Types.Internal as TSQL
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Column qualified as IR
|
||||
import Hasura.SQL.Backend
|
||||
import Language.GraphQL.Draft.Syntax (unName)
|
||||
|
||||
-- | Create a temporary table with the same schema as the given table.
|
||||
toSelectIntoTempTable :: TempTableName -> TableName -> [IR.ColumnInfo 'MSSQL] -> SITTConstraints -> SelectIntoTempTable
|
||||
@ -29,12 +28,12 @@ columnInfoToUnifiedColumn colInfo =
|
||||
case IR.ciType colInfo of
|
||||
IR.ColumnScalar t ->
|
||||
UnifiedColumn
|
||||
{ name = unName $ IR.ciName colInfo,
|
||||
{ name = IR.ciColumn colInfo,
|
||||
type' = t
|
||||
}
|
||||
-- Enum values are represented as text value so they will always be of type text
|
||||
IR.ColumnEnumReference {} ->
|
||||
UnifiedColumn
|
||||
{ name = unName $ IR.ciName colInfo,
|
||||
{ name = IR.ciColumn colInfo,
|
||||
type' = TextType
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.IR qualified as IR
|
||||
import Hasura.RQL.Types.Column qualified as IR
|
||||
import Hasura.SQL.Backend
|
||||
import Language.GraphQL.Draft.Syntax (unName)
|
||||
|
||||
fromUpdate :: IR.AnnotatedUpdate 'MSSQL -> FromIr Update
|
||||
fromUpdate (IR.AnnotatedUpdateG table (permFilter, whereClause) _ backendUpdate _ allColumns) = do
|
||||
@ -27,7 +26,7 @@ fromUpdate (IR.AnnotatedUpdateG table (permFilter, whereClause) _ backendUpdate
|
||||
( do
|
||||
permissionsFilter <- fromGBoolExp permFilter
|
||||
whereExpression <- fromGBoolExp whereClause
|
||||
let columnNames = map (ColumnName . unName . IR.ciName) allColumns
|
||||
let columnNames = map IR.ciColumn allColumns
|
||||
pure
|
||||
Update
|
||||
{ updateTable =
|
||||
|
@ -467,8 +467,8 @@ fromSelectIntoTempTable SelectIntoTempTable {sittTempTableName, sittColumns, sit
|
||||
-- So, the "timestamp" type is neither insertable nor explicitly updatable. Its values are unique binary numbers within a database.
|
||||
-- We're using "binary" type instead so that we can copy a timestamp row value safely into the temporary table.
|
||||
-- See https://docs.microsoft.com/en-us/sql/t-sql/data-types/rowversion-transact-sql for more details.
|
||||
TimestampType -> "CAST(" <+> fromNameText columnName <+> " AS binary(8)) AS " <+> fromNameText columnName
|
||||
_ -> fromNameText columnName
|
||||
TimestampType -> "CAST(" <+> fromColumnName columnName <+> " AS binary(8)) AS " <+> fromColumnName columnName
|
||||
_ -> fromColumnName columnName
|
||||
|
||||
-- | @TempTableName "deleted"@ becomes @\#deleted@
|
||||
fromTempTableName :: TempTableName -> Printer
|
||||
|
@ -123,7 +123,7 @@ type Value = ODBC.Value
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data UnifiedColumn = UnifiedColumn
|
||||
{ name :: Text,
|
||||
{ name :: ColumnName,
|
||||
type' :: ScalarType
|
||||
}
|
||||
|
||||
|
@ -123,7 +123,7 @@ scalarType = \case
|
||||
mkColumn :: Schema.Column -> Text
|
||||
mkColumn Schema.Column {columnName, columnType, columnNullable, columnDefault} =
|
||||
T.unwords
|
||||
[ columnName,
|
||||
[ wrapIdentifier columnName,
|
||||
scalarType columnType,
|
||||
bool "NOT NULL" "DEFAULT NULL" columnNullable,
|
||||
maybe "" ("DEFAULT " <>) columnDefault
|
||||
@ -134,7 +134,7 @@ mkPrimaryKey key =
|
||||
T.unwords
|
||||
[ "PRIMARY KEY",
|
||||
"(",
|
||||
commaSeparated key,
|
||||
commaSeparated $ map wrapIdentifier key,
|
||||
")"
|
||||
]
|
||||
|
||||
@ -143,12 +143,12 @@ mkReference Schema.Reference {referenceLocalColumn, referenceTargetTable, refere
|
||||
T.unwords
|
||||
[ "CONSTRAINT FOREIGN KEY",
|
||||
"(",
|
||||
referenceLocalColumn,
|
||||
wrapIdentifier referenceLocalColumn,
|
||||
")",
|
||||
"REFERENCES",
|
||||
referenceTargetTable,
|
||||
"(",
|
||||
referenceTargetColumn,
|
||||
wrapIdentifier referenceTargetColumn,
|
||||
")",
|
||||
"ON DELETE CASCADE",
|
||||
"ON UPDATE CASCADE"
|
||||
@ -163,15 +163,19 @@ insertTable Schema.Table {tableName, tableColumns, tableData}
|
||||
T.unpack $
|
||||
T.unwords
|
||||
[ "INSERT INTO",
|
||||
T.pack Constants.citusDb <> "." <> tableName,
|
||||
T.pack Constants.citusDb <> "." <> wrapIdentifier tableName,
|
||||
"(",
|
||||
commaSeparated (Schema.columnName <$> tableColumns),
|
||||
commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns),
|
||||
")",
|
||||
"VALUES",
|
||||
commaSeparated $ mkRow <$> tableData,
|
||||
";"
|
||||
]
|
||||
|
||||
-- | Citus identifiers which may be case-sensitive needs to be wrapped in @""@.
|
||||
wrapIdentifier :: Text -> Text
|
||||
wrapIdentifier identifier = "\"" <> identifier <> "\""
|
||||
|
||||
mkRow :: [Schema.ScalarValue] -> Text
|
||||
mkRow row =
|
||||
T.unwords
|
||||
|
@ -130,7 +130,7 @@ scalarType = \case
|
||||
mkColumn :: Schema.Column -> Text
|
||||
mkColumn Schema.Column {columnName, columnType, columnNullable, columnDefault} =
|
||||
T.unwords
|
||||
[ columnName,
|
||||
[ wrapIdentifier columnName,
|
||||
scalarType columnType,
|
||||
bool "NOT NULL" "DEFAULT NULL" columnNullable,
|
||||
maybe "" ("DEFAULT " <>) columnDefault
|
||||
@ -141,7 +141,7 @@ mkPrimaryKey key =
|
||||
T.unwords
|
||||
[ "PRIMARY KEY",
|
||||
"(",
|
||||
commaSeparated key,
|
||||
commaSeparated $ map wrapIdentifier key,
|
||||
")"
|
||||
]
|
||||
|
||||
@ -150,12 +150,12 @@ mkReference Schema.Reference {referenceLocalColumn, referenceTargetTable, refere
|
||||
T.unwords
|
||||
[ "CONSTRAINT FOREIGN KEY",
|
||||
"(",
|
||||
referenceLocalColumn,
|
||||
wrapIdentifier referenceLocalColumn,
|
||||
")",
|
||||
"REFERENCES",
|
||||
referenceTargetTable,
|
||||
"(",
|
||||
referenceTargetColumn,
|
||||
wrapIdentifier referenceTargetColumn,
|
||||
")",
|
||||
"ON DELETE CASCADE",
|
||||
"ON UPDATE CASCADE"
|
||||
@ -170,15 +170,22 @@ insertTable Schema.Table {tableName, tableColumns, tableData}
|
||||
T.unpack $
|
||||
T.unwords
|
||||
[ "INSERT INTO",
|
||||
T.pack Constants.postgresDb <> "." <> tableName,
|
||||
T.pack Constants.postgresDb <> "." <> wrapIdentifier tableName,
|
||||
"(",
|
||||
commaSeparated (Schema.columnName <$> tableColumns),
|
||||
commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns),
|
||||
")",
|
||||
"VALUES",
|
||||
commaSeparated $ mkRow <$> tableData,
|
||||
";"
|
||||
]
|
||||
|
||||
-- | Identifiers which may be case-sensitive needs to be wrapped in @""@.
|
||||
--
|
||||
-- More information can be found in the postgres docs:
|
||||
-- https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
|
||||
wrapIdentifier :: Text -> Text
|
||||
wrapIdentifier identifier = "\"" <> identifier <> "\""
|
||||
|
||||
mkRow :: [Schema.ScalarValue] -> Text
|
||||
mkRow row =
|
||||
T.unwords
|
||||
|
@ -127,7 +127,7 @@ scalarType = \case
|
||||
mkColumn :: Schema.Column -> Text
|
||||
mkColumn Schema.Column {columnName, columnType, columnNullable, columnDefault} =
|
||||
T.unwords
|
||||
[ columnName,
|
||||
[ wrapIdentifier columnName,
|
||||
scalarType columnType,
|
||||
bool "NOT NULL" "DEFAULT NULL" columnNullable,
|
||||
maybe "" ("DEFAULT " <>) columnDefault
|
||||
@ -138,7 +138,7 @@ mkPrimaryKey key =
|
||||
T.unwords
|
||||
[ "PRIMARY KEY",
|
||||
"(",
|
||||
commaSeparated key,
|
||||
commaSeparated $ map wrapIdentifier key,
|
||||
")"
|
||||
]
|
||||
|
||||
@ -147,12 +147,12 @@ mkReference Schema.Reference {referenceLocalColumn, referenceTargetTable, refere
|
||||
T.unwords
|
||||
[ "FOREIGN KEY",
|
||||
"(",
|
||||
referenceLocalColumn,
|
||||
wrapIdentifier referenceLocalColumn,
|
||||
")",
|
||||
"REFERENCES",
|
||||
referenceTargetTable,
|
||||
"(",
|
||||
referenceTargetColumn,
|
||||
wrapIdentifier referenceTargetColumn,
|
||||
")",
|
||||
"ON DELETE CASCADE",
|
||||
"ON UPDATE CASCADE"
|
||||
@ -167,15 +167,22 @@ insertTable Schema.Table {tableName, tableColumns, tableData}
|
||||
T.unpack $
|
||||
T.unwords
|
||||
[ "INSERT INTO",
|
||||
T.pack Constants.sqlserverDb <> "." <> tableName,
|
||||
T.pack Constants.sqlserverDb <> "." <> wrapIdentifier tableName,
|
||||
"(",
|
||||
commaSeparated (Schema.columnName <$> tableColumns),
|
||||
commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns),
|
||||
")",
|
||||
"VALUES",
|
||||
commaSeparated $ mkRow <$> tableData,
|
||||
";"
|
||||
]
|
||||
|
||||
-- | MSSQL identifiers which may contain spaces or be case-sensitive needs to be wrapped in @[]@.
|
||||
--
|
||||
-- More information can be found in the mssql docs:
|
||||
-- https://docs.microsoft.com/en-us/sql/relational-databases/databases/database-identifiers
|
||||
wrapIdentifier :: Text -> Text
|
||||
wrapIdentifier identifier = "[" <> identifier <> "]"
|
||||
|
||||
mkRow :: [Schema.ScalarValue] -> Text
|
||||
mkRow row =
|
||||
T.unwords
|
||||
|
@ -33,14 +33,13 @@ where
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
import Control.Concurrent (forkIO, threadDelay)
|
||||
import Control.Exception.Safe (bracket)
|
||||
import Control.Monad.Trans.Managed (ManagedT (..), lowerManagedT)
|
||||
import Data.Aeson (Value, object, (.=))
|
||||
import Data.Environment qualified as Env
|
||||
import Data.Text qualified as T
|
||||
import Data.Time (getCurrentTime)
|
||||
import GHC.Stack (HasCallStack)
|
||||
import Harness.Constants qualified as Constants
|
||||
import Harness.Exceptions (HasCallStack, bracket, withFrozenCallStack)
|
||||
import Harness.Http qualified as Http
|
||||
import Harness.Quoter.Yaml (yaml)
|
||||
import Harness.State (Server (..), State, getServer, serverUrl)
|
||||
@ -65,64 +64,84 @@ import System.Metrics qualified as EKG
|
||||
-- failure.
|
||||
--
|
||||
-- See 'postWithHeaders' to issue a request with 'Http.RequestHeaders'.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
post :: HasCallStack => State -> String -> Value -> IO Value
|
||||
post state path = postWithHeaders state path mempty
|
||||
post state path = withFrozenCallStack . postWithHeaders state path mempty
|
||||
|
||||
-- | Same as 'post', but ignores the value.
|
||||
--
|
||||
-- See 'postWithHeaders_' to issue a request with 'Http.RequestHeaders'.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
post_ :: HasCallStack => State -> String -> Value -> IO ()
|
||||
post_ state path = void . postWithHeaders_ state path mempty
|
||||
post_ state path = void . withFrozenCallStack . postWithHeaders_ state path mempty
|
||||
|
||||
-- | Post some JSON to graphql-engine, getting back more JSON.
|
||||
--
|
||||
-- Optimistically assumes success; use another function if you want to test for
|
||||
-- failure.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postWithHeaders ::
|
||||
HasCallStack => State -> String -> Http.RequestHeaders -> Value -> IO Value
|
||||
postWithHeaders (getServer -> Server {urlPrefix, port}) path =
|
||||
Http.postValue (urlPrefix ++ ":" ++ show port ++ path)
|
||||
postWithHeaders (getServer -> Server {urlPrefix, port}) path headers =
|
||||
withFrozenCallStack . Http.postValue (urlPrefix ++ ":" ++ show port ++ path) headers
|
||||
|
||||
-- | Post some JSON to graphql-engine, getting back more JSON.
|
||||
--
|
||||
-- Optimistically assumes success; use another function if you want to test for
|
||||
-- failure.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postWithHeaders_ ::
|
||||
HasCallStack => State -> String -> Http.RequestHeaders -> Value -> IO ()
|
||||
postWithHeaders_ state path headers =
|
||||
void . postWithHeaders state path headers
|
||||
void . withFrozenCallStack . postWithHeaders state path headers
|
||||
|
||||
-- | Same as 'post', but defaults to the graphql end-point.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postGraphqlYaml ::
|
||||
HasCallStack => State -> Value -> IO Value
|
||||
postGraphqlYaml state = postGraphqlYamlWithHeaders state mempty
|
||||
postGraphqlYaml state = withFrozenCallStack . postGraphqlYamlWithHeaders state mempty
|
||||
|
||||
-- | Same as 'postWithHeaders', but defaults to the graphql end-point.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postGraphqlYamlWithHeaders ::
|
||||
HasCallStack => State -> Http.RequestHeaders -> Value -> IO Value
|
||||
postGraphqlYamlWithHeaders state headers =
|
||||
postWithHeaders state "/v1/graphql" headers
|
||||
withFrozenCallStack $ postWithHeaders state "/v1/graphql" headers
|
||||
|
||||
-- | Same as 'postGraphqlYaml', but adds the @{query:..}@ wrapper.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postGraphql :: HasCallStack => State -> Value -> IO Value
|
||||
postGraphql state value =
|
||||
postGraphqlYaml state (object ["query" .= value])
|
||||
withFrozenCallStack $ postGraphqlYaml state (object ["query" .= value])
|
||||
|
||||
-- | Same as 'postGraphqlYamlWithHeaders', but adds the @{query:..}@ wrapper.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postGraphqlWithHeaders ::
|
||||
HasCallStack => State -> Http.RequestHeaders -> Value -> IO Value
|
||||
postGraphqlWithHeaders state headers value =
|
||||
postGraphqlYamlWithHeaders state headers (object ["query" .= value])
|
||||
withFrozenCallStack $ postGraphqlYamlWithHeaders state headers (object ["query" .= value])
|
||||
|
||||
-- | Same as 'post_', but defaults to the @"v1/metadata"@ endpoint.
|
||||
--
|
||||
-- @headers@ are mostly irrelevant for the admin endpoint @v1/metadata@.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
postMetadata_ :: HasCallStack => State -> Value -> IO ()
|
||||
postMetadata_ state = post_ state "/v1/metadata"
|
||||
postMetadata_ state = withFrozenCallStack $ post_ state "/v1/metadata"
|
||||
|
||||
-- | Resets metadata, removing all sources or remote schemas.
|
||||
--
|
||||
-- Note: We add 'withFrozenCallStack' to reduce stack trace clutter.
|
||||
clearMetadata :: HasCallStack => State -> IO ()
|
||||
clearMetadata s = postMetadata_ s [yaml|{type: clear_metadata, args: {}}|]
|
||||
clearMetadata s = withFrozenCallStack $ postMetadata_ s [yaml|{type: clear_metadata, args: {}}|]
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
|
206
server/tests-hspec/Test/CustomFieldNamesSpec.hs
Normal file
206
server/tests-hspec/Test/CustomFieldNamesSpec.hs
Normal file
@ -0,0 +1,206 @@
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
|
||||
-- | Testing custom field names.
|
||||
-- See the main hasura documentation for more information.
|
||||
--
|
||||
-- - Postgres: https://hasura.io/docs/latest/graphql/core/databases/postgres/schema/custom-field-names.html
|
||||
-- - MSSQL: https://hasura.io/docs/latest/graphql/core/databases/ms-sql-server/schema/custom-field-names.html
|
||||
module Test.CustomFieldNamesSpec (spec) where
|
||||
|
||||
import Harness.Backend.Postgres qualified as Postgres
|
||||
import Harness.Backend.Sqlserver qualified as Sqlserver
|
||||
import Harness.GraphqlEngine qualified as GraphqlEngine
|
||||
import Harness.Quoter.Graphql (graphql)
|
||||
import Harness.Quoter.Yaml (shouldReturnYaml, yaml)
|
||||
import Harness.State (State)
|
||||
import Harness.Test.Context qualified as Context
|
||||
import Harness.Test.Schema qualified as Schema
|
||||
import Test.Hspec (SpecWith, it)
|
||||
import Prelude
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- * Preamble
|
||||
|
||||
spec :: SpecWith State
|
||||
spec =
|
||||
Context.run
|
||||
[ Context.Context
|
||||
{ name = Context.Backend Context.SQLServer,
|
||||
mkLocalState = Context.noLocalState,
|
||||
setup = sqlserverSetup,
|
||||
teardown = Sqlserver.teardown schema,
|
||||
customOptions = Nothing
|
||||
},
|
||||
Context.Context
|
||||
{ name = Context.Backend Context.Postgres,
|
||||
mkLocalState = Context.noLocalState,
|
||||
setup = postgresSetup,
|
||||
teardown = Postgres.teardown schema,
|
||||
customOptions = Nothing
|
||||
}
|
||||
]
|
||||
tests
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- * Tests
|
||||
|
||||
tests :: Context.Options -> SpecWith State
|
||||
tests opts = do
|
||||
it "Delete respects custom names" $ \state ->
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
state
|
||||
[graphql|
|
||||
mutation author {
|
||||
delete_hasura_author(
|
||||
where: {Id: {_eq: 1}}
|
||||
) {
|
||||
returning {
|
||||
Id
|
||||
Name
|
||||
}
|
||||
}
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
data:
|
||||
delete_hasura_author:
|
||||
returning:
|
||||
- Id: 1
|
||||
Name: 'Mercer'
|
||||
|]
|
||||
|
||||
it "Update respects custom names" $ \state ->
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
state
|
||||
[graphql|
|
||||
mutation author {
|
||||
update_hasura_author(
|
||||
where: {Id: {_eq: 2}}
|
||||
_set: { Name: "Johnson" }
|
||||
) {
|
||||
returning {
|
||||
Id
|
||||
Name
|
||||
}
|
||||
}
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
data:
|
||||
update_hasura_author:
|
||||
returning:
|
||||
- Id: 2
|
||||
Name: 'Johnson'
|
||||
|]
|
||||
|
||||
it "Insert respects custom names" $ \state ->
|
||||
shouldReturnYaml
|
||||
opts
|
||||
( GraphqlEngine.postGraphql
|
||||
state
|
||||
[graphql|
|
||||
mutation author {
|
||||
insert_hasura_author(objects:
|
||||
{ Id: 3
|
||||
Name: "Jaffe"
|
||||
}) {
|
||||
returning {
|
||||
Id
|
||||
Name
|
||||
}
|
||||
}
|
||||
}
|
||||
|]
|
||||
)
|
||||
[yaml|
|
||||
data:
|
||||
insert_hasura_author:
|
||||
returning:
|
||||
- Id: 3
|
||||
Name: 'Jaffe'
|
||||
|]
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- * Backend
|
||||
|
||||
-- ** Schema
|
||||
|
||||
schema :: [Schema.Table]
|
||||
schema =
|
||||
[ Schema.Table
|
||||
{ tableName = "author",
|
||||
tableColumns =
|
||||
[ Schema.column "AuthorId" Schema.TInt,
|
||||
Schema.column "AuthorName" Schema.TStr
|
||||
],
|
||||
tablePrimaryKey = ["AuthorId"],
|
||||
tableReferences = [],
|
||||
tableData =
|
||||
[ [Schema.VInt 1, Schema.VStr "Mercer"],
|
||||
[Schema.VInt 2, Schema.VStr "Ray"]
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- ** Postgres backend
|
||||
|
||||
postgresSetup :: (State, ()) -> IO ()
|
||||
postgresSetup (state, localState) = do
|
||||
Postgres.setup schema (state, localState)
|
||||
postgresCreateCustomNames state
|
||||
|
||||
postgresCreateCustomNames :: State -> IO ()
|
||||
postgresCreateCustomNames state = do
|
||||
let source = Context.defaultBackendTypeString Context.Postgres
|
||||
in GraphqlEngine.postMetadata_
|
||||
state
|
||||
[yaml|
|
||||
type: pg_set_table_customization
|
||||
args:
|
||||
source: *source
|
||||
table:
|
||||
schema: hasura
|
||||
name: author
|
||||
configuration:
|
||||
custom_column_names:
|
||||
"AuthorId": "Id"
|
||||
"AuthorName": "Name"
|
||||
|]
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- ** SQL Server backend
|
||||
|
||||
sqlserverSetup :: (State, ()) -> IO ()
|
||||
sqlserverSetup (state, localState) = do
|
||||
Sqlserver.setup schema (state, localState)
|
||||
sqlserverCreateCustomNames state
|
||||
|
||||
sqlserverCreateCustomNames :: State -> IO ()
|
||||
sqlserverCreateCustomNames state = do
|
||||
let source = Context.defaultBackendTypeString Context.SQLServer
|
||||
in GraphqlEngine.postMetadata_
|
||||
state
|
||||
[yaml|
|
||||
type: mssql_set_table_customization
|
||||
args:
|
||||
source: *source
|
||||
table:
|
||||
schema: hasura
|
||||
name: author
|
||||
configuration:
|
||||
custom_column_names:
|
||||
"AuthorId": "Id"
|
||||
"AuthorName": "Name"
|
||||
|]
|
Loading…
Reference in New Issue
Block a user