mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
Replace "identity column" with "column mutability" data for all backends
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/3373 GitOrigin-RevId: bf08cc9008a4b0b3ece4952528c15c45e57fc74c
This commit is contained in:
parent
bd2fce3691
commit
e1918adb52
@ -102,6 +102,11 @@ count (
|
||||
- cli: fix regression in `migrate create` command (#7971)
|
||||
- cli: stop using `/healthz` endpoint to determine server health
|
||||
- cli: fix regression with `--address` flag of `hasura console` command (#8005)
|
||||
- server: (Postgres, Citus, and MSSQL backends) Identity columns and computed
|
||||
columns are now marked immutable, removing them from the schema of insert and
|
||||
update mutations.
|
||||
- server: Fix graphql-engine/issues/4633: We can now insert multiple objects
|
||||
that have generated columns in Postgres.
|
||||
|
||||
## v2.1.1
|
||||
|
||||
|
@ -507,6 +507,7 @@ elif [ "$MODE" = "test" ]; then
|
||||
export HASURA_GRAPHQL_PG_SOURCE_URL_1=${HASURA_GRAPHQL_PG_SOURCE_URL_1-$PG_DB_URL}
|
||||
export HASURA_GRAPHQL_PG_SOURCE_URL_2=${HASURA_GRAPHQL_PG_SOURCE_URL_2-$PG_DB_URL}
|
||||
export HASURA_GRAPHQL_EXPERIMENTAL_FEATURES="inherited_roles"
|
||||
export HASURA_GRAPHQL_MSSQL_SOURCE_URL=$MSSQL_CONN_STR
|
||||
|
||||
# Using --metadata-database-url flag to test multiple backends
|
||||
# HASURA_GRAPHQL_PG_SOURCE_URL_* For a couple multi-source pytests:
|
||||
|
@ -87,6 +87,7 @@ For postgres tests, you will want to run `setup.yaml` and maybe `values_setup.ya
|
||||
We will setup an api call to graphql-engine per setup file:
|
||||
|
||||
```sh
|
||||
cat server/tests-py/queries/graphql_query/basic/schema_setup.yaml | yaml2json | curl -d @- localhost:8181/v1/query
|
||||
cat server/tests-py/queries/graphql_query/basic/schema_setup_mssql.yaml | yaml2json | curl -d @- localhost:8181/v2/query
|
||||
cat server/tests-py/queries/graphql_query/basic/setup_mssql.yaml | yaml2json | curl -d @- localhost:8181/v1/metadata
|
||||
```
|
||||
@ -125,6 +126,7 @@ But it is also possible to run the teardown files against graphql-engine. Like t
|
||||
```sh
|
||||
cat server/tests-py/queries/graphql_query/basic/teardown_mssql.yaml | yaml2json | curl -d @- localhost:8181/v1/metadata
|
||||
cat server/tests-py/queries/graphql_query/basic/schema_teardown_mssql.yaml | yaml2json | curl -d @- localhost:8181/v2/query
|
||||
cat server/tests-py/queries/graphql_query/basic/schema_teardown.yaml | yaml2json | curl -d @- localhost:8181/v1/query
|
||||
```
|
||||
|
||||
## Run a remote MSSQL instance with dev.sh
|
||||
|
@ -68,32 +68,20 @@ executeInsert userInfo stringifyNum sourceConfig annInsert = do
|
||||
-- a. Create an empty temporary table with name #inserted to store affected rows (for the response)
|
||||
--
|
||||
-- > SELECT column1, column2 INTO #inserted FROM some_table WHERE (1 <> 1)
|
||||
-- > UNION ALL SELECT column1, column2 FROM some_table;
|
||||
-- > UNION ALL SELECT column1, column2 FROM some_table WHERE (1 <> 1);
|
||||
--
|
||||
-- c. If 'if_matched' is found: Use MERGE statment to perform upsert
|
||||
-- c.1 Use #values temporary table to store input object values
|
||||
-- b. If 'if_matched' is found: Use MERGE statment to perform upsert
|
||||
--
|
||||
-- > SELECT column1, column2 INTO #values FROM some_table WHERE (1 <> 1)
|
||||
-- b.1 Use #values temporary table to store input object values
|
||||
--
|
||||
-- c.2 Before and after the insert, Set IDENTITY_INSERT to ON/OFF if any insert row contains
|
||||
-- at least one identity column.
|
||||
-- > SELECT column1, column2 INTO #values FROM some_table WHERE (1 <> 1)
|
||||
--
|
||||
-- > SET IDENTITY_INSERT #values ON;
|
||||
-- > <INSERT>
|
||||
-- > SET IDENTITY_INSERT #values OFF;
|
||||
--
|
||||
-- c.3 Insert input object values into the temporary table
|
||||
-- b.2 Insert input object values into the temporary table
|
||||
--
|
||||
-- > INSERT INTO #values (column1, column2) VALUES (value1, value2), (value3, value4)
|
||||
--
|
||||
-- c.4 Before and after the MERGE, Set IDENTITY_INSERT to ON/OFF if any insert row contains
|
||||
-- at least one identity column.
|
||||
--
|
||||
-- > SET IDENTITY_INSERT some_table ON;
|
||||
-- > <INSERT>
|
||||
-- > SET IDENTITY_INSERT some_table OFF;
|
||||
--
|
||||
-- c.5 Generate an SQL Merge statement to perform either update or insert (upsert) to the table
|
||||
-- b.3 Generate an SQL Merge statement to perform either update or insert (upsert) to the table
|
||||
--
|
||||
-- > MERGE some_table AS [target]
|
||||
-- > USING (SELECT column1, column2 from #values) AS [source](column1, column2) ON ([target].column1 = [source].column1)
|
||||
@ -156,7 +144,10 @@ buildInsertTx tableName withAlias stringifyNum insert = do
|
||||
--
|
||||
-- Affected rows will be inserted into the #inserted temporary table regardless.
|
||||
case ifMatchedField of
|
||||
Nothing -> buildRegularInsertTx tableName insert
|
||||
Nothing -> do
|
||||
-- Insert values into the table using INSERT query
|
||||
let insertQuery = toQueryFlat $ TQ.fromInsert $ TSQL.fromInsert insert
|
||||
Tx.unitQueryE fromMSSQLTxError insertQuery
|
||||
Just ifMatched -> buildUpsertTx tableName insert ifMatched
|
||||
|
||||
-- Build a response to the user using the values in the temporary table named #inserted
|
||||
@ -171,21 +162,6 @@ buildInsertTx tableName withAlias stringifyNum insert = do
|
||||
|
||||
pure $ encJFromText responseText
|
||||
|
||||
-- | Translate an IR Insert mutation into a simple insert SQL statement,
|
||||
-- which is surrounded by @SET IDENTITY_INSERT <table> ON/OFF@ if needed.
|
||||
--
|
||||
-- Should be used as part of a bigger transaction in 'buildInsertTx'.
|
||||
buildRegularInsertTx :: TSQL.TableName -> AnnInsert 'MSSQL Void Expression -> Tx.TxET QErr IO ()
|
||||
buildRegularInsertTx tableName insert = do
|
||||
let identityColumns = _biIdentityColumns $ _aiBackendInsert $ _aiData insert
|
||||
insertColumns = concatMap (map fst . getInsertColumns) $ _aiInsObj $ _aiData insert
|
||||
-- Set identity insert to ON/OFF before/after inserting into the table
|
||||
-- if insert object contains identity columns
|
||||
withIdentityInsert identityColumns insertColumns (RegularTableName tableName) $ do
|
||||
-- Insert values into the table using INSERT query
|
||||
let insertQuery = toQueryFlat $ TQ.fromInsert $ TSQL.fromInsert insert
|
||||
Tx.unitQueryE fromMSSQLTxError insertQuery
|
||||
|
||||
-- | Translates an IR IfMatched clause to SQL and
|
||||
-- builds a corresponding transaction to run against MS SQL Server.
|
||||
--
|
||||
@ -200,65 +176,35 @@ buildRegularInsertTx tableName insert = do
|
||||
-- Should be used as part of a bigger transaction in 'buildInsertTx'.
|
||||
buildUpsertTx :: TSQL.TableName -> AnnInsert 'MSSQL Void Expression -> IfMatched Expression -> Tx.TxET QErr IO ()
|
||||
buildUpsertTx tableName insert ifMatched = do
|
||||
let identityColumns = _biIdentityColumns $ _aiBackendInsert $ _aiData insert
|
||||
insertColumns = concatMap (map fst . getInsertColumns) $ _aiInsObj $ _aiData insert
|
||||
tableColumns = _aiTableCols $ _aiData insert
|
||||
let insertColumnNames = concatMap (map fst . getInsertColumns) $ _aiInsObj $ _aiData insert
|
||||
allTableColumns = _aiTableCols $ _aiData insert
|
||||
insertColumns = filter (\c -> ciColumn c `elem` insertColumnNames) allTableColumns
|
||||
createValuesTempTableQuery =
|
||||
toQueryFlat $
|
||||
TQ.fromSelectIntoTempTable $
|
||||
-- We want to KeepConstraints here so the user can omit values for identity columns such as `id`
|
||||
TSQL.toSelectIntoTempTable tempTableNameValues tableName tableColumns KeepConstraints
|
||||
TSQL.toSelectIntoTempTable tempTableNameValues tableName insertColumns KeepConstraints
|
||||
-- Create #values temporary table
|
||||
Tx.unitQueryE fromMSSQLTxError createValuesTempTableQuery
|
||||
|
||||
-- Set identity insert to ON if insert object contains identity columns for temporary #values table
|
||||
withIdentityInsert identityColumns insertColumns (TemporaryTableName tempTableNameValues) $ do
|
||||
-- Store values in #values temporary table
|
||||
let insertValuesIntoTempTableQuery =
|
||||
toQueryFlat $
|
||||
TQ.fromInsertValuesIntoTempTable $
|
||||
TSQL.toInsertValuesIntoTempTable tempTableNameValues insert
|
||||
Tx.unitQueryE fromMSSQLTxError insertValuesIntoTempTableQuery
|
||||
-- Store values in #values temporary table
|
||||
let insertValuesIntoTempTableQuery =
|
||||
toQueryFlat $
|
||||
TQ.fromInsertValuesIntoTempTable $
|
||||
TSQL.toInsertValuesIntoTempTable tempTableNameValues insert
|
||||
Tx.unitQueryE fromMSSQLTxError insertValuesIntoTempTableQuery
|
||||
|
||||
-- Set identity insert to ON if insert object contains identity columns
|
||||
-- before inserting into the original table
|
||||
withIdentityInsert identityColumns insertColumns (RegularTableName tableName) $ do
|
||||
-- Run the MERGE query and store the mutated rows in #inserted temporary table
|
||||
merge <-
|
||||
(V.runValidate . runFromIr)
|
||||
(toMerge tableName (_aiInsObj $ _aiData insert) identityColumns tableColumns ifMatched)
|
||||
`onLeft` (throw500 . tshow)
|
||||
let mergeQuery = toQueryFlat $ TQ.fromMerge merge
|
||||
Tx.unitQueryE fromMSSQLTxError mergeQuery
|
||||
-- Run the MERGE query and store the mutated rows in #inserted temporary table
|
||||
merge <-
|
||||
(V.runValidate . runFromIr)
|
||||
(toMerge tableName (_aiInsObj $ _aiData insert) allTableColumns ifMatched)
|
||||
`onLeft` (throw500 . tshow)
|
||||
let mergeQuery = toQueryFlat $ TQ.fromMerge merge
|
||||
Tx.unitQueryE fromMSSQLTxError mergeQuery
|
||||
|
||||
-- After @MERGE@ we no longer need this temporary table
|
||||
Tx.unitQueryE fromMSSQLTxError $ toQueryFlat $ dropTempTableQuery tempTableNameValues
|
||||
|
||||
-- | Sets @IDENTITY_INSERT@ to ON before running some statements and afterwards OFF
|
||||
-- if there are identity columns in the table.
|
||||
--
|
||||
-- This is done so we can insert identity columns explicitly.
|
||||
withIdentityInsert :: [ColumnName] -> [ColumnName] -> SomeTableName -> Tx.TxET QErr IO a -> Tx.TxET QErr IO a
|
||||
withIdentityInsert identityColumns insertColumns table statements = do
|
||||
let setIdentityInsertIf mode =
|
||||
when (any (`elem` identityColumns) insertColumns) $
|
||||
Tx.unitQueryE fromMSSQLTxError $
|
||||
toQueryFlat $
|
||||
TQ.fromSetIdentityInsert $ SetIdentityInsert table mode
|
||||
|
||||
-- Set identity insert to ON if insert object contains identity columns
|
||||
setIdentityInsertIf SetON
|
||||
|
||||
-- Run the statements that should run while @IDENTITY_INSERT@ is set to ON
|
||||
result <- statements
|
||||
|
||||
-- Set identity insert to OFF if insert object contains identity columns,
|
||||
-- because only one table can have @IDENTITY_INSERT@ set to ON in a session :(
|
||||
-- See https://stackoverflow.com/questions/23832598/identity-insert-is-already-on-for-table-x-cannot-perform-set-operation-for-ta
|
||||
setIdentityInsertIf SetOFF
|
||||
|
||||
pure result
|
||||
|
||||
-- | Builds a response to the user using the values in the temporary table named #inserted.
|
||||
buildInsertResponseTx :: Bool -> Text -> AnnInsert 'MSSQL Void Expression -> Tx.TxET QErr IO (Text, Int)
|
||||
buildInsertResponseTx stringifyNum withAlias insert = do
|
||||
|
@ -1,4 +1,5 @@
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# OPTIONS_HADDOCK ignore-exports #-}
|
||||
|
||||
-- | Translate from the DML to the TSql dialect.
|
||||
--
|
||||
@ -37,6 +38,7 @@ module Hasura.Backends.MSSQL.FromIr
|
||||
where
|
||||
|
||||
import Control.Monad.Validate
|
||||
import Data.Containers.ListUtils (nubOrd)
|
||||
import Data.HashMap.Strict qualified as HM
|
||||
import Data.Map.Strict (Map)
|
||||
import Data.Map.Strict qualified as M
|
||||
@ -44,7 +46,7 @@ import Data.Proxy
|
||||
import Data.Text qualified as T
|
||||
import Database.ODBC.SQLServer qualified as ODBC
|
||||
import Hasura.Backends.MSSQL.Instances.Types ()
|
||||
import Hasura.Backends.MSSQL.Types.Insert as TSQL (BackendInsert (..), IfMatched (..))
|
||||
import Hasura.Backends.MSSQL.Types.Insert as TSQL (IfMatched (..))
|
||||
import Hasura.Backends.MSSQL.Types.Internal as TSQL
|
||||
import Hasura.Backends.MSSQL.Types.Update as TSQL (BackendUpdate (..), Update (..))
|
||||
import Hasura.Prelude
|
||||
@ -1072,7 +1074,7 @@ fromGBoolExp =
|
||||
fromInsert :: IR.AnnInsert 'MSSQL Void Expression -> Insert
|
||||
fromInsert IR.AnnInsert {..} =
|
||||
let IR.AnnIns {..} = _aiData
|
||||
insertRows = normalizeInsertRows (_biIdentityColumns _aiBackendInsert) _aiTableCols $ map (IR.getInsertColumns) _aiInsObj
|
||||
insertRows = normalizeInsertRows $ map (IR.getInsertColumns) _aiInsObj
|
||||
insertColumnNames = maybe [] (map fst) $ listToMaybe insertRows
|
||||
insertValues = map (Values . map snd) insertRows
|
||||
allColumnNames = map (ColumnName . unName . IR.ciName) _aiTableCols
|
||||
@ -1080,36 +1082,37 @@ fromInsert IR.AnnInsert {..} =
|
||||
tempTable = TempTable tempTableNameInserted allColumnNames
|
||||
in Insert _aiTableName insertColumnNames insertOutput tempTable insertValues
|
||||
|
||||
-- | Normalize a row by adding missing columns with 'DEFAULT' value and sort by column name to make sure
|
||||
-- all rows are consistent in column values and order.
|
||||
-- | Normalize a row by adding missing columns with @DEFAULT@ value and sort by
|
||||
-- column name to make sure all rows are consistent in column values and order.
|
||||
--
|
||||
-- Example: A table "author" is defined as
|
||||
-- Example: A table "author" is defined as:
|
||||
--
|
||||
-- CREATE TABLE author ([id] INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL, age INTEGER)
|
||||
-- > CREATE TABLE author ([id] INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL, age INTEGER)
|
||||
--
|
||||
-- Consider the following mutation;
|
||||
-- Consider the following mutation:
|
||||
--
|
||||
-- mutation {
|
||||
-- insert_author(
|
||||
-- objects: [{id: 1, name: "Foo", age: 21}, {id: 2, name: "Bar"}]
|
||||
-- ){
|
||||
-- affected_rows
|
||||
-- }
|
||||
-- }
|
||||
-- > mutation {
|
||||
-- > insert_author(
|
||||
-- > objects: [{id: 1, name: "Foo", age: 21}, {id: 2, name: "Bar"}]
|
||||
-- > ){
|
||||
-- > affected_rows
|
||||
-- > }
|
||||
-- > }
|
||||
--
|
||||
-- We consider 'DEFAULT' value for "age" column which is missing in second insert row. The INSERT statement look like
|
||||
-- We consider @DEFAULT@ value for @age@ column which is missing in second
|
||||
-- insert row.
|
||||
--
|
||||
-- INSERT INTO author (id, name, age) OUTPUT INSERTED.id VALUES (1, 'Foo', 21), (2, 'Bar', DEFAULT)
|
||||
-- The corresponding @INSERT@ statement looks like:
|
||||
--
|
||||
-- > INSERT INTO author (id, name, age)
|
||||
-- > OUTPUT INSERTED.id
|
||||
-- > VALUES (1, 'Foo', 21), (2, 'Bar', DEFAULT)
|
||||
normalizeInsertRows ::
|
||||
[ColumnName] ->
|
||||
[IR.ColumnInfo 'MSSQL] ->
|
||||
[[(Column 'MSSQL, Expression)]] ->
|
||||
[[(Column 'MSSQL, Expression)]]
|
||||
normalizeInsertRows identityColumnNames tableColumns insertRows =
|
||||
let isIdentityColumn column = IR.ciColumn column `elem` identityColumnNames
|
||||
allColumnsWithDefaultValue =
|
||||
-- DEFAULT or NULL are not allowed as explicit identity values.
|
||||
map ((,DefaultExpression) . IR.ciColumn) $ filter (not . isIdentityColumn) tableColumns
|
||||
normalizeInsertRows insertRows =
|
||||
let insertColumns = nubOrd (concatMap (map fst) insertRows)
|
||||
allColumnsWithDefaultValue = map (,DefaultExpression) $ insertColumns
|
||||
addMissingColumns insertRow =
|
||||
HM.toList $ HM.fromList insertRow `HM.union` HM.fromList allColumnsWithDefaultValue
|
||||
sortByColumn = sortBy (\l r -> compare (fst l) (fst r))
|
||||
@ -1121,16 +1124,13 @@ normalizeInsertRows identityColumnNames tableColumns insertRows =
|
||||
toMerge ::
|
||||
TableName ->
|
||||
[IR.AnnotatedInsertRow 'MSSQL Expression] ->
|
||||
[ColumnName] ->
|
||||
[IR.ColumnInfo 'MSSQL] ->
|
||||
IfMatched Expression ->
|
||||
FromIr Merge
|
||||
toMerge tableName insertRows identityColumnNames tableColumns IfMatched {..} = do
|
||||
let normalizedInsertRows =
|
||||
normalizeInsertRows identityColumnNames tableColumns $
|
||||
map (IR.getInsertColumns) insertRows
|
||||
toMerge tableName insertRows allColumns IfMatched {..} = do
|
||||
let normalizedInsertRows = normalizeInsertRows $ map (IR.getInsertColumns) insertRows
|
||||
insertColumnNames = maybe [] (map fst) $ listToMaybe normalizedInsertRows
|
||||
allColumnNames = map (ColumnName . unName . IR.ciName) tableColumns
|
||||
allColumnNames = map (ColumnName . unName . IR.ciName) allColumns
|
||||
|
||||
matchConditions <-
|
||||
flip runReaderT (EntityAlias "target") $ -- the table is aliased as "target" in MERGE sql
|
||||
@ -1139,7 +1139,7 @@ toMerge tableName insertRows identityColumnNames tableColumns IfMatched {..} = d
|
||||
pure $
|
||||
Merge
|
||||
{ mergeTargetTable = tableName,
|
||||
mergeUsing = MergeUsing tempTableNameValues allColumnNames,
|
||||
mergeUsing = MergeUsing tempTableNameValues insertColumnNames,
|
||||
mergeOn = MergeOn _imMatchColumns,
|
||||
mergeWhenMatched = MergeWhenMatched _imUpdateColumns matchConditions _imColumnPresets,
|
||||
mergeWhenNotMatched = MergeWhenNotMatched insertColumnNames,
|
||||
@ -1157,7 +1157,7 @@ toMerge tableName insertRows identityColumnNames tableColumns IfMatched {..} = d
|
||||
toInsertValuesIntoTempTable :: TempTableName -> IR.AnnInsert 'MSSQL Void Expression -> InsertValuesIntoTempTable
|
||||
toInsertValuesIntoTempTable tempTable IR.AnnInsert {..} =
|
||||
let IR.AnnIns {..} = _aiData
|
||||
insertRows = normalizeInsertRows (_biIdentityColumns _aiBackendInsert) _aiTableCols $ map IR.getInsertColumns _aiInsObj
|
||||
insertRows = normalizeInsertRows $ map IR.getInsertColumns _aiInsObj
|
||||
insertColumnNames = maybe [] (map fst) $ listToMaybe insertRows
|
||||
insertValues = map (Values . map snd) insertRows
|
||||
in InsertValuesIntoTempTable
|
||||
|
@ -91,6 +91,7 @@ data SysColumn = SysColumn
|
||||
scUserTypeId :: Int,
|
||||
scIsNullable :: Bool,
|
||||
scIsIdentity :: Bool,
|
||||
scIsComputed :: Bool,
|
||||
scJoinedSysType :: SysType,
|
||||
scJoinedForeignKeyColumns :: [SysForeignKeyColumn]
|
||||
}
|
||||
@ -171,7 +172,8 @@ transformColumn columnInfo =
|
||||
_fkColumnMapping = HM.singleton rciName $ ColumnName $ sfkcJoinedReferencedColumnName foreignKeyColumn
|
||||
in ForeignKey {..}
|
||||
|
||||
rciMutability = ColumnMutability {_cmIsInsertable = True, _cmIsUpdatable = True}
|
||||
colIsImmutable = scIsComputed columnInfo || scIsIdentity columnInfo
|
||||
rciMutability = ColumnMutability {_cmIsInsertable = not colIsImmutable, _cmIsUpdatable = not colIsImmutable}
|
||||
in (RawColumnInfo {..}, foreignKeys)
|
||||
|
||||
transformPrimaryKey :: SysPrimaryKey -> PrimaryKey 'MSSQL (Column 'MSSQL)
|
||||
|
@ -20,12 +20,7 @@ import Hasura.SQL.Backend (BackendType (MSSQL))
|
||||
data BackendInsert v = BackendInsert
|
||||
{ -- | @if_matched@ can be omitted (and in that case will be @Nothing@).
|
||||
-- If omitted, we only insert new rows (without upserting).
|
||||
_biIfMatched :: Maybe (IfMatched v),
|
||||
-- | identity columns are needed for the sql generation and are not part
|
||||
-- of the user input. If the table has identity columns we need to add
|
||||
-- the SQL statements @SET IDENTITY_INSERT ...@ to be able to insert
|
||||
-- into that table.
|
||||
_biIdentityColumns :: [ColumnName]
|
||||
_biIfMatched :: Maybe (IfMatched v)
|
||||
}
|
||||
|
||||
deriving instance (Backend 'MSSQL, Show (IfMatched v), Show v) => Show (BackendInsert v)
|
||||
|
@ -134,6 +134,8 @@ resolveDatabaseMetadata ::
|
||||
SourceTypeCustomization ->
|
||||
m (Either QErr (ResolvedSource ('Postgres pgKind)))
|
||||
resolveDatabaseMetadata sourceConfig sourceCustomization = runExceptT do
|
||||
runTx (_pscExecCtx sourceConfig) Q.ReadWrite ensureMetadataSupportingDefinitions
|
||||
|
||||
(tablesMeta, functionsMeta, pgScalars) <- runTx (_pscExecCtx sourceConfig) Q.ReadOnly $ do
|
||||
tablesMeta <- fetchTableMetadata
|
||||
functionsMeta <- fetchFunctionMetadata
|
||||
@ -292,6 +294,11 @@ upMigrationsUntil43 =
|
||||
(migrationsFromFile [5 .. 40]) ++ migrationsFromFile [42 .. 43]
|
||||
)
|
||||
|
||||
-- | Ensure that the supporting definitions used in metadata fetching have been
|
||||
-- loaded.
|
||||
ensureMetadataSupportingDefinitions :: forall m. MonadTx m => m ()
|
||||
ensureMetadataSupportingDefinitions = liftTx $ Q.multiQE defaultTxErrorHandler $(makeRelativeToProject "src-rsr/pg_metadata_lib.sql" >>= Q.sqlFromFile)
|
||||
|
||||
-- | Fetch Postgres metadata of all user tables
|
||||
fetchTableMetadata ::
|
||||
forall pgKind m.
|
||||
|
@ -320,7 +320,12 @@ mkInsertObject objects tableInfo backendInsert insertPerms updatePerms =
|
||||
updateCheck = (fmap . fmap . fmap) partialSQLExpToUnpreparedValue $ upiCheck =<< updatePerms
|
||||
defaultValues =
|
||||
Map.union (partialSQLExpToUnpreparedValue <$> ipiSet insertPerms) $
|
||||
Map.fromList [(column, UVLiteral $ columnDefaultValue @b column) | column <- ciColumn <$> columns]
|
||||
Map.fromList
|
||||
[ (column, UVLiteral $ columnDefaultValue @b column)
|
||||
| ci <- columns,
|
||||
_cmIsInsertable (ciMutability ci),
|
||||
let column = ciColumn ci
|
||||
]
|
||||
|
||||
-- delete
|
||||
|
||||
|
@ -230,8 +230,8 @@ buildInsPermInfo source tn fieldInfoMap (PermDef _rn (InsPerm checkCond set mCol
|
||||
return (InsPermInfo (HS.fromList insColsWithoutPresets) be setColsSQL backendOnly reqHdrs, deps)
|
||||
where
|
||||
backendOnly = Just True == mBackendOnly
|
||||
allCols = map ciColumn $ getCols fieldInfoMap
|
||||
insCols = maybe allCols (convColSpec fieldInfoMap) mCols
|
||||
allInsCols = map ciColumn $ filter (_cmIsInsertable . ciMutability) $ getCols fieldInfoMap
|
||||
insCols = interpColSpec allInsCols (fromMaybe PCStar mCols)
|
||||
relInInsErr = "Only table columns can have insert permissions defined, not relationships or other field types"
|
||||
|
||||
instance IsPerm InsPerm where
|
||||
@ -251,7 +251,7 @@ buildSelPermInfo ::
|
||||
SelPerm b ->
|
||||
m (WithDeps (SelPermInfo b))
|
||||
buildSelPermInfo source tn fieldInfoMap sp = withPathK "permission" $ do
|
||||
let pgCols = convColSpec fieldInfoMap $ spColumns sp
|
||||
let pgCols = interpColSpec (map ciColumn $ (getCols fieldInfoMap)) $ spColumns sp
|
||||
|
||||
(boolExp, boolExpDeps) <-
|
||||
withPathK "filter" $
|
||||
@ -338,7 +338,7 @@ buildUpdPermInfo source tn fieldInfoMap (UpdPerm colSpec set fltr check) = do
|
||||
<<> " is not updatable and so cannot have update permissions defined"
|
||||
)
|
||||
|
||||
let updColDeps = map (mkColDep @b DRUntyped source tn) updCols
|
||||
let updColDeps = map (mkColDep @b DRUntyped source tn) allUpdCols
|
||||
deps = mkParentDep @b source tn : beDeps ++ maybe [] snd checkExpr ++ updColDeps ++ setColDeps
|
||||
depHeaders = getDependentHeaders fltr
|
||||
reqHeaders = depHeaders `HS.union` (HS.fromList setHeaders)
|
||||
@ -346,7 +346,8 @@ buildUpdPermInfo source tn fieldInfoMap (UpdPerm colSpec set fltr check) = do
|
||||
|
||||
return (UpdPermInfo (HS.fromList updColsWithoutPreSets) tn be (fst <$> checkExpr) setColsSQL reqHeaders, deps)
|
||||
where
|
||||
updCols = convColSpec fieldInfoMap colSpec
|
||||
allUpdCols = map ciColumn $ filter (_cmIsUpdatable . ciMutability) $ getCols fieldInfoMap
|
||||
updCols = interpColSpec allUpdCols colSpec
|
||||
relInUpdErr = "Only table columns can have update permissions defined, not relationships or other field types"
|
||||
|
||||
instance IsPerm UpdPerm where
|
||||
|
@ -2,7 +2,7 @@ module Hasura.RQL.DDL.Permission.Internal
|
||||
( CreatePerm (..),
|
||||
DropPerm (..),
|
||||
assertPermDefined,
|
||||
convColSpec,
|
||||
interpColSpec,
|
||||
getDepHeadersFromVal,
|
||||
getDependentHeaders,
|
||||
procBoolExp,
|
||||
@ -23,9 +23,11 @@ import Hasura.RQL.Types
|
||||
import Hasura.Server.Utils
|
||||
import Hasura.Session
|
||||
|
||||
convColSpec :: FieldInfoMap (FieldInfo b) -> PermColSpec b -> [Column b]
|
||||
convColSpec _ (PCCols cols) = cols
|
||||
convColSpec cim PCStar = map ciColumn $ getCols cim
|
||||
-- | Intrepet a 'PermColSpec' column specification, which can either refer to a
|
||||
-- list of named columns or all columns.
|
||||
interpColSpec :: [Column b] -> PermColSpec b -> [Column b]
|
||||
interpColSpec _ (PCCols cols) = cols
|
||||
interpColSpec allColumns PCStar = allColumns
|
||||
|
||||
permissionIsDefined ::
|
||||
Maybe (RolePermInfo backend) -> PermAccessor backend a -> Bool
|
||||
|
@ -172,7 +172,12 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName _ val oC mR
|
||||
|
||||
let mutOutput = mkDefaultMutFlds mAnnRetCols
|
||||
|
||||
let defInsVals = HM.fromList [(column, S.columnDefaultValue) | column <- ciColumn <$> getCols fieldInfoMap]
|
||||
let defInsVals =
|
||||
HM.fromList
|
||||
[ (ciColumn column, S.columnDefaultValue)
|
||||
| column <- getCols fieldInfoMap,
|
||||
_cmIsInsertable (ciMutability column)
|
||||
]
|
||||
allCols = getCols fieldInfoMap
|
||||
insCols = HM.keys defInsVals
|
||||
|
||||
|
@ -48,9 +48,25 @@ LEFT JOIN LATERAL
|
||||
'type', coalesce(base_type.typname, "type".typname),
|
||||
'is_nullable', NOT "column".attnotnull,
|
||||
'description', pg_catalog.col_description("table".oid, "column".attnum),
|
||||
'mutability', jsonb_build_object('is_insertable', true, 'is_updatable', true)
|
||||
'mutability', jsonb_build_object(
|
||||
'is_insertable', NOT (identitypolyfill.attidentity = 'a' OR generatedpolyfill.attgenerated = 's'),
|
||||
'is_updatable', NOT (identitypolyfill.attidentity = 'a' OR generatedpolyfill.attgenerated = 's'))
|
||||
)) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
-- The columns 'pg_attribute.attidentity' and 'pg_attribute.attgenerated' are
|
||||
-- not available in older versions of Postgres, because those versions do not
|
||||
-- implement the concepts the catalog columns represent.
|
||||
-- Therefore we define and use the polyfill functions
|
||||
-- 'hdb_lib.pg_attidentity' and 'hdb_lib.pg_attgenerated', which ensure the
|
||||
-- presence of these columns in this script.
|
||||
INNER JOIN hdb_lib.pg_attidentity() identitypolyfill
|
||||
ON identitypolyfill.attrelid = "column".attrelid
|
||||
AND identitypolyfill.attnum = "column".attnum
|
||||
AND identitypolyfill.attname = "column".attname
|
||||
INNER JOIN hdb_lib.pg_attgenerated() generatedpolyfill
|
||||
ON generatedpolyfill.attrelid = "column".attrelid
|
||||
AND generatedpolyfill.attnum = "column".attnum
|
||||
AND generatedpolyfill.attname = "column".attname
|
||||
LEFT JOIN pg_catalog.pg_type "type"
|
||||
ON "type".oid = "column".atttypid
|
||||
LEFT JOIN pg_catalog.pg_type base_type
|
||||
|
@ -5,7 +5,7 @@ SELECT ISNULL(
|
||||
JSON_QUERY([column].json) AS [joined_sys_column],
|
||||
JSON_QUERY([primary_key].json) AS [joined_sys_primary_key]
|
||||
FROM sys.objects object
|
||||
CROSS APPLY (SELECT [column].name, [column].column_id, [column].is_nullable, [column].is_identity, [column].user_type_id,
|
||||
CROSS APPLY (SELECT [column].name, [column].column_id, [column].is_nullable, [column].is_identity, [column].is_computed, [column].user_type_id,
|
||||
JSON_QUERY([types].json) AS [joined_sys_type],
|
||||
JSON_QUERY(ISNULL([relationships].json,'[]')) AS [joined_foreign_key_columns]
|
||||
FROM sys.columns [column]
|
||||
|
49
server/src-rsr/pg_metadata_lib.sql
Normal file
49
server/src-rsr/pg_metadata_lib.sql
Normal file
@ -0,0 +1,49 @@
|
||||
CREATE SCHEMA IF NOT EXISTS hdb_lib;
|
||||
|
||||
CREATE OR REPLACE FUNCTION
|
||||
hdb_lib.pg_attidentity()
|
||||
RETURNS TABLE (attrelid oid, attname name, attnum smallint, attidentity char) AS $$
|
||||
BEGIN
|
||||
IF current_setting('server_version_num')::int >= 100000
|
||||
THEN RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, a.attidentity::char
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
ELSE
|
||||
-- Always return attidentity = '', indicating that the column is not an
|
||||
-- identity column.
|
||||
RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, ''::char as attidentity
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
COMMENT ON FUNCTION hdb_lib.pg_attidentity() IS
|
||||
'The column "pg_catalog.pg_attribute(attidentity)" was only introduced in PG 10,
|
||||
along with with the introduction of identity columns.
|
||||
This function provides the "attidentity" column in a cross-version compatible way.
|
||||
See https://www.postgresql.org/docs/10/catalog-pg-attribute.html for details.
|
||||
';
|
||||
|
||||
CREATE OR REPLACE FUNCTION
|
||||
hdb_lib.pg_attgenerated()
|
||||
RETURNS TABLE (attrelid oid, attname name, attnum smallint, attgenerated char) AS $$
|
||||
BEGIN
|
||||
IF current_setting('server_version_num')::int >= 120000
|
||||
THEN RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, a.attgenerated::char
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
ELSE
|
||||
-- Always return attgenerated = '', indicating that the column is not a
|
||||
-- generated column.
|
||||
RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, ''::char as attgenerated
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
COMMENT ON FUNCTION hdb_lib.pg_attgenerated() IS
|
||||
'The column "pg_catalog.pg_attribute(attgenerated)" was only introduced in PG 12,
|
||||
along with the introduction of generated columns.
|
||||
This function provides the "attgenerated" column in a cross-version compatible way.
|
||||
See https://www.postgresql.org/docs/12/catalog-pg-attribute.html for details.
|
||||
';
|
@ -40,9 +40,25 @@ LEFT JOIN LATERAL
|
||||
'type', coalesce(base_type.typname, "type".typname),
|
||||
'is_nullable', NOT "column".attnotnull,
|
||||
'description', pg_catalog.col_description("table".oid, "column".attnum),
|
||||
'mutability', jsonb_build_object('is_insertable', true, 'is_updatable', true)
|
||||
'mutability', jsonb_build_object(
|
||||
'is_insertable', NOT (identitypolyfill.attidentity = 'a' OR generatedpolyfill.attgenerated = 's'),
|
||||
'is_updatable', NOT (identitypolyfill.attidentity = 'a' OR generatedpolyfill.attgenerated = 's'))
|
||||
)) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
-- The columns 'pg_attribute.attidentity' and 'pg_attribute.attgenerated' are
|
||||
-- not available in older versions of Postgres, because those versions do not
|
||||
-- implement the concepts the catalog columns represent.
|
||||
-- Therefore we define and use the polyfill functions
|
||||
-- 'hdb_lib.pg_attidentity' and 'hdb_lib.pg_attgenerated', which ensure the
|
||||
-- presence of these columns in this script.
|
||||
INNER JOIN hdb_lib.pg_attidentity() identitypolyfill
|
||||
ON identitypolyfill.attrelid = "column".attrelid
|
||||
AND identitypolyfill.attnum = "column".attnum
|
||||
AND identitypolyfill.attname = "column".attname
|
||||
INNER JOIN hdb_lib.pg_attgenerated() generatedpolyfill
|
||||
ON generatedpolyfill.attrelid = "column".attrelid
|
||||
AND generatedpolyfill.attnum = "column".attnum
|
||||
AND generatedpolyfill.attname = "column".attname
|
||||
LEFT JOIN pg_catalog.pg_type "type"
|
||||
ON "type".oid = "column".atttypid
|
||||
LEFT JOIN pg_catalog.pg_type base_type
|
||||
@ -151,4 +167,4 @@ LEFT JOIN
|
||||
WHERE "table".relkind IN ('r', 't', 'v', 'm', 'f', 'p')
|
||||
-- and tables not from any system schemas
|
||||
AND schema.nspname NOT LIKE 'pg_%'
|
||||
AND schema.nspname NOT IN ('information_schema', 'hdb_catalog');
|
||||
AND schema.nspname NOT IN ('information_schema', 'hdb_catalog', 'hdb_lib');
|
||||
|
@ -8,14 +8,12 @@ query:
|
||||
mutation($float_value: Float) {
|
||||
insert_test_types(
|
||||
objects: [
|
||||
{ c_pkey: 1
|
||||
, c50_float: $float_value
|
||||
{ c50_float: $float_value
|
||||
}
|
||||
]
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
c_pkey
|
||||
c50_float
|
||||
}
|
||||
}
|
||||
@ -25,5 +23,4 @@ response:
|
||||
insert_test_types:
|
||||
affected_rows: 1
|
||||
returning:
|
||||
- c_pkey: 1
|
||||
c50_float: 2.23e-308
|
||||
- c50_float: 2.23e-308
|
||||
|
@ -6,11 +6,9 @@ query:
|
||||
mutation{
|
||||
insert_test_types(
|
||||
objects: [
|
||||
{ c_pkey: 1
|
||||
, c13_text: "one"
|
||||
{ c13_text: "one"
|
||||
},
|
||||
{ c_pkey: 2
|
||||
, c13_text: "two"
|
||||
{ c13_text: "two"
|
||||
}
|
||||
]
|
||||
){
|
||||
|
@ -6,8 +6,7 @@ query:
|
||||
mutation {
|
||||
insert_test_types(
|
||||
objects: [
|
||||
{ c_pkey: 1
|
||||
, c1_smallint: 3277
|
||||
{ c1_smallint: 3277
|
||||
, c2_integer: 2147483647
|
||||
, c4_decimal: 123.45
|
||||
, c5_numeric: 1.234
|
||||
|
@ -6,3 +6,4 @@ args:
|
||||
source: mssql
|
||||
sql: |
|
||||
delete from test_types;
|
||||
DBCC CHECKIDENT ('test_types', RESEED, 0);
|
||||
|
@ -13,15 +13,15 @@
|
||||
objects: [
|
||||
{
|
||||
content: "Updated Article 1 content",
|
||||
id: 1
|
||||
title: "Article 1"
|
||||
},
|
||||
{
|
||||
content: "Updated Article 2 content",
|
||||
id: 2
|
||||
title: "Article 2"
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id,
|
||||
match_columns: title,
|
||||
update_columns: []
|
||||
}
|
||||
) {
|
||||
@ -47,15 +47,15 @@
|
||||
objects: [
|
||||
{
|
||||
content: "Updated Article 1 content",
|
||||
id: 1
|
||||
title: "Article 1"
|
||||
},
|
||||
{
|
||||
content: "Updated Article 2 content",
|
||||
id: 2
|
||||
title: "Article 2"
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id,
|
||||
match_columns: title,
|
||||
}
|
||||
) {
|
||||
returning {
|
||||
|
@ -16,15 +16,15 @@ query:
|
||||
objects: [
|
||||
{
|
||||
content: "Updated Article 1 content",
|
||||
id: 1
|
||||
title: "Article 1"
|
||||
},
|
||||
{
|
||||
content: "Updated Article 2 content",
|
||||
id: 2
|
||||
title: "Article 2"
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id,
|
||||
match_columns: title,
|
||||
update_columns: content
|
||||
}
|
||||
) {
|
||||
|
@ -20,11 +20,11 @@ args:
|
||||
published_on DATETIME
|
||||
);
|
||||
CREATE TABLE person (
|
||||
id INT IDENTITY PRIMARY KEY,
|
||||
id INT PRIMARY KEY,
|
||||
details TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE orders (
|
||||
id INT IDENTITY PRIMARY KEY,
|
||||
id INT PRIMARY KEY,
|
||||
placed DATETIME NOT NULL,
|
||||
shipped DATETIME
|
||||
);
|
||||
|
@ -33,7 +33,7 @@ args:
|
||||
)
|
||||
;
|
||||
|
||||
INSERT INTO orders (placed)
|
||||
INSERT INTO orders (id, placed)
|
||||
VALUES
|
||||
('2017-08-19 14:22:11')
|
||||
(1, '2017-08-19 14:22:11')
|
||||
;
|
||||
|
@ -7,18 +7,18 @@ response:
|
||||
affected_rows: 2
|
||||
returning:
|
||||
- id: 5
|
||||
title: Article by Author 5
|
||||
title: Article by Author 3
|
||||
content: Content for Article 5
|
||||
tags: '[]'
|
||||
author:
|
||||
id: 5
|
||||
name: Author 5
|
||||
id: 3
|
||||
name: Author 3
|
||||
articles_aggregate:
|
||||
aggregate:
|
||||
count: 1
|
||||
articles:
|
||||
- id: 5
|
||||
title: Article by Author 5
|
||||
title: Article by Author 3
|
||||
content: Content for Article 5
|
||||
query:
|
||||
query: |
|
||||
@ -26,13 +26,12 @@ query:
|
||||
insert_article(
|
||||
objects: [{
|
||||
id: 5
|
||||
title: "Article by Author 5"
|
||||
title: "Article by Author 3"
|
||||
content: "Content for Article 5"
|
||||
tags: "[]"
|
||||
author: {
|
||||
data: {
|
||||
id: 5
|
||||
name: "Author 5"
|
||||
name: "Author 3"
|
||||
}
|
||||
}
|
||||
}]
|
||||
|
@ -22,6 +22,9 @@ query:
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "Author 4"
|
||||
}
|
||||
]
|
||||
) {
|
||||
@ -47,7 +50,7 @@ query:
|
||||
response:
|
||||
data:
|
||||
insert_author:
|
||||
affected_rows: 3
|
||||
affected_rows: 4
|
||||
returning:
|
||||
- id: 3
|
||||
name: Author 3
|
||||
@ -67,3 +70,7 @@ response:
|
||||
- id: 5
|
||||
title: Article 2 by Author 3
|
||||
content: Content for Article 2 by Author 3
|
||||
- id: 4
|
||||
name: Author 4
|
||||
articles: []
|
||||
fetch_articles: []
|
||||
|
@ -7,7 +7,6 @@ query:
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
id: 4
|
||||
name: "Author 4"
|
||||
articles: {
|
||||
data: []
|
||||
@ -17,7 +16,6 @@ query:
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
name
|
||||
articles{
|
||||
id
|
||||
@ -37,7 +35,6 @@ response:
|
||||
insert_author:
|
||||
affected_rows: 1
|
||||
returning:
|
||||
- id: 4
|
||||
name: Author 4
|
||||
- name: Author 4
|
||||
articles: []
|
||||
fetch_articles: []
|
||||
|
@ -7,7 +7,6 @@ query:
|
||||
insert_author(
|
||||
objects: [
|
||||
{
|
||||
id: 4
|
||||
name: "Author 4"
|
||||
articles: null
|
||||
}
|
||||
@ -15,7 +14,6 @@ query:
|
||||
){
|
||||
affected_rows
|
||||
returning{
|
||||
id
|
||||
name
|
||||
articles{
|
||||
id
|
||||
@ -30,6 +28,5 @@ response:
|
||||
insert_author:
|
||||
affected_rows: 1
|
||||
returning:
|
||||
- id: 4
|
||||
name: Author 4
|
||||
- name: Author 4
|
||||
articles: []
|
||||
|
@ -6,12 +6,32 @@ args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
create table author(
|
||||
-- Use identity columns if PG version 10 or later
|
||||
CREATE FUNCTION create_author() RETURNS int AS
|
||||
$$
|
||||
BEGIN
|
||||
IF current_setting('server_version_num')::int >= 100000
|
||||
THEN
|
||||
execute $c$create table author(
|
||||
id int generated always as identity primary key,
|
||||
name text unique,
|
||||
is_registered boolean not null default false,
|
||||
emails text[] not null default '{}'::text[]
|
||||
)$c$;
|
||||
ELSE
|
||||
execute $c$ create table author(
|
||||
id serial primary key,
|
||||
name text unique,
|
||||
is_registered boolean not null default false,
|
||||
emails text[] not null default '{}'::text[]
|
||||
);
|
||||
)$c$;
|
||||
END IF;
|
||||
RETURN 0;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
select from create_author();
|
||||
|
||||
CREATE TABLE article (
|
||||
id SERIAL PRIMARY KEY,
|
||||
title TEXT,
|
||||
|
@ -28,4 +28,5 @@ args:
|
||||
drop table author_detail cascade;
|
||||
drop table article cascade;
|
||||
drop table author;
|
||||
drop function create_author();
|
||||
cascade: true
|
||||
|
@ -15,13 +15,13 @@
|
||||
insert_article (
|
||||
objects: [
|
||||
{
|
||||
id: 1
|
||||
title: "Article 1"
|
||||
content: "Updated Article 1 content"
|
||||
author_id: 1
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id
|
||||
match_columns: title
|
||||
update_columns: []
|
||||
}
|
||||
) {
|
||||
|
@ -19,14 +19,13 @@
|
||||
insert_article (
|
||||
objects: [
|
||||
{
|
||||
id: 1
|
||||
title: "unexpected"
|
||||
title: "Article 1"
|
||||
content: "Updated Article 1 content"
|
||||
author_id: 1
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id
|
||||
match_columns: title
|
||||
update_columns: [content]
|
||||
}
|
||||
) {
|
||||
@ -58,14 +57,13 @@
|
||||
insert_article (
|
||||
objects: [
|
||||
{
|
||||
id: 1
|
||||
title: "unexpected"
|
||||
title: "Article 1"
|
||||
content: "Updated Article 1 content"
|
||||
author_id: 1
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id
|
||||
match_columns: title
|
||||
update_columns: [content]
|
||||
}
|
||||
) {
|
||||
@ -91,14 +89,13 @@
|
||||
insert_article (
|
||||
objects: [
|
||||
{
|
||||
id: 1
|
||||
title: "unexpected"
|
||||
title: "Article"
|
||||
content: "Updated Article 1 content"
|
||||
author_id: 1
|
||||
}
|
||||
],
|
||||
if_matched: {
|
||||
match_columns: id
|
||||
match_columns: title
|
||||
update_columns: [content]
|
||||
}
|
||||
) {
|
||||
|
@ -13,7 +13,7 @@ args:
|
||||
|
||||
CREATE TABLE article (
|
||||
id int identity NOT NULL PRIMARY KEY,
|
||||
title TEXT,
|
||||
title varchar(100),
|
||||
content TEXT,
|
||||
author_id INTEGER REFERENCES author(id),
|
||||
is_published bit,
|
||||
|
@ -42,7 +42,7 @@ args:
|
||||
args:
|
||||
source: mssql
|
||||
table: article
|
||||
role: user_with_select_and_inesrt
|
||||
role: user_with_select_and_insert
|
||||
permission:
|
||||
columns: '*'
|
||||
filter:
|
||||
|
Loading…
Reference in New Issue
Block a user