server: reduce the number of backend dispatches

Fixes https://github.com/hasura/graphql-engine-mono/issues/712

Main point of interest: the `Hasura.SQL.Backend` module.

This PR creates an `Exists` type indexed by indexed type and packed constraint while hiding all of its complexity by not exporting the constructor.

Existential constructors/types which are no longer (directly) existential:
- [X] BackendSourceInfo :: BackendSourceInfo
- [x] BackendSourceMetadata :: BackendSourceMetadata
- [x] MOSourceObjId :: MetadatObjId
- [x] SOSourceObj :: SchemaObjId
- [x] RFDB :: RootField
- [x] LQP :: LiveQueryPlan
- [x] ExecutionStep :: ExecStepDB

This PR also removes ALL usages of `Typeable.cast` from our codebase. We still need to derive `Typeable` in a few places in order to be able to derive `Data` in one place. I have not dug deeper to see why this is needed.

GitOrigin-RevId: bb47e957192e4bb0af4c4116aee7bb92f7983445
This commit is contained in:
Vladimir Ciobanu 2021-03-15 15:02:58 +02:00 committed by hasura-bot
parent 7fe46423b8
commit da8f6981d4
47 changed files with 1219 additions and 719 deletions

View File

@ -537,6 +537,7 @@ library
, Hasura.Eventing.Common
, Data.GADT.Compare.Extended
, Data.Tuple.Extended
, Hasura.SQL.AnyBackend
, Hasura.SQL.Backend
, Hasura.SQL.GeoJSON
, Hasura.SQL.Time

View File

@ -13,6 +13,8 @@ import qualified Network.HTTP.Types as HTTP
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.MSSQL.Connection
import Hasura.Backends.MSSQL.Plan
import Hasura.Backends.MSSQL.ToQuery
@ -63,7 +65,10 @@ msDBQueryPlan _env _manager _reqHeaders userInfo _directives sourceConfig qrf =
let queryString = ODBC.renderQuery $ toQueryPretty select
pool = _mscConnectionPool sourceConfig
odbcQuery = encJFromText <$> runJSONPathQuery pool (toQueryFlat select)
pure $ ExecStepDB sourceConfig (Just queryString) [] odbcQuery
pure
$ ExecStepDB []
. AB.mkAnyBackend
$ DBStepInfo sourceConfig (Just queryString) odbcQuery
-- mutation

View File

@ -4,6 +4,8 @@ module Hasura.Backends.Postgres.DDL.Field
)
where
import Hasura.Prelude
import qualified Control.Monad.Validate as MV
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as S
@ -12,9 +14,10 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.DDL.Function
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Prelude
import Hasura.RQL.DDL.RemoteRelationship.Validate
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.ComputedField
@ -222,10 +225,19 @@ buildRemoteFieldInfo remoteRelationship
let table = rtrTable remoteRelationship
source = rtrSource remoteRelationship
schemaDependencies =
let tableDep = SchemaDependency (SOSourceObj source $ SOITable table) DRTable
let tableDep = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable table)
DRTable
columnsDep =
map
(flip SchemaDependency DRRemoteRelationship . SOSourceObj source . SOITableObj table . TOCol . pgiColumn)
(flip SchemaDependency DRRemoteRelationship
. SOSourceObj source
. AB.mkAnyBackend
. SOITableObj table
. TOCol
. pgiColumn)
$ S.toList $ _rfiHasuraFields remoteField
remoteSchemaDep =
SchemaDependency (SORemoteSchema remoteSchemaName) DRRemoteSchema

View File

@ -4,6 +4,8 @@ module Hasura.Backends.Postgres.DDL.Function
)
where
import Hasura.Prelude
import qualified Control.Monad.Validate as MV
import qualified Data.HashSet as S
import qualified Data.Sequence as Seq
@ -13,8 +15,9 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens hiding (from, index, op, (.=))
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Prelude
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Function
@ -88,7 +91,7 @@ buildFunctionInfo source qf systemDefined FunctionConfig{..} permissions rawFunc
-- commited to this check, and it would be backwards compatible to remove
-- it, but this seemed like an obvious case:
when (funVol /= FTVOLATILE && _fcExposedAs == Just FEAMutation) $
throwValidateError $ NonVolatileFunctionAsMutation
throwValidateError NonVolatileFunctionAsMutation
-- If 'exposed_as' is omitted we'll infer it from the volatility:
let exposeAs = flip fromMaybe _fcExposedAs $ case funVol of
FTVOLATILE -> FEAMutation
@ -107,7 +110,11 @@ buildFunctionInfo source qf systemDefined FunctionConfig{..} permissions rawFunc
retJsonAggSelect
pure ( functionInfo
, SchemaDependency (SOSourceObj @'Postgres source $ SOITable retTable) DRTable
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable retTable)
DRTable
)
validateFunctionArgNames = do

View File

@ -14,7 +14,8 @@ import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson.TH
import Data.List.Extended (duplicates)
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.DDL.Table
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
@ -169,16 +170,28 @@ getTableChangeDeps source tn tableDiff = do
sc <- askSchemaCache
-- for all the dropped columns
droppedColDeps <- fmap concat $ forM droppedCols $ \droppedCol -> do
let objId = SOSourceObj source $ SOITableObj tn $ TOCol droppedCol
let objId = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tn
$ TOCol droppedCol
return $ getDependentObjs sc objId
-- for all dropped constraints
droppedConsDeps <- fmap concat $ forM droppedFKeyConstraints $ \droppedCons -> do
let objId = SOSourceObj source $ SOITableObj tn $ TOForeignKey droppedCons
let objId = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tn
$ TOForeignKey droppedCons
return $ getDependentObjs sc objId
return $ droppedConsDeps <> droppedColDeps <> droppedComputedFieldDeps
where
TableDiff _ droppedCols _ _ droppedFKeyConstraints computedFieldDiff _ _ = tableDiff
droppedComputedFieldDeps = map (SOSourceObj source . SOITableObj tn . TOComputedField) $ _cfdDropped computedFieldDiff
droppedComputedFieldDeps =
map
(SOSourceObj source
. AB.mkAnyBackend
. SOITableObj tn
. TOComputedField)
$ _cfdDropped computedFieldDiff
data SchemaDiff (b :: BackendType)
= SchemaDiff
@ -201,7 +214,10 @@ getSchemaChangeDeps
getSchemaChangeDeps source schemaDiff = do
-- Get schema cache
sc <- askSchemaCache
let tableIds = map (SOSourceObj source . SOITable) droppedTables
let tableIds =
map
(SOSourceObj source . AB.mkAnyBackend . SOITable)
droppedTables
-- Get the dependent of the dropped tables
let tableDropDeps = concatMap (getDependentObjs sc) tableIds
tableModDeps <- concat <$> traverse (uncurry (getTableChangeDeps source)) alteredTables
@ -211,15 +227,16 @@ getSchemaChangeDeps source schemaDiff = do
where
SchemaDiff droppedTables alteredTables = schemaDiff
getIndirectDep (SOSourceObj s srcObjId') =
cast srcObjId' >>= \srcObjId -> case srcObjId of
SOITableObj tn _ ->
getIndirectDep :: SchemaObjId -> Maybe (SourceObjId 'Postgres)
getIndirectDep (SOSourceObj s exists) =
AB.unpackAnyBackend exists >>= \case
srcObjId@(SOITableObj tn _) ->
-- Indirect dependancy shouldn't be of same source and not among dropped tables
if not (s == source && tn `HS.member` HS.fromList droppedTables) then
Just srcObjId
else Nothing
_ -> Just srcObjId
getIndirectDep _ = Nothing
if not (s == source && tn `HS.member` HS.fromList droppedTables)
then Just srcObjId
else Nothing
srcObjId -> Just srcObjId
getIndirectDep _ = Nothing
data FunctionDiff
= FunctionDiff
@ -282,7 +299,10 @@ withMetadataCheck source cascade txAccess action = do
indirectSourceDeps <- getSchemaChangeDeps source schemaDiff
let indirectDeps = map (SOSourceObj source) indirectSourceDeps
let indirectDeps =
map
(SOSourceObj source . AB.mkAnyBackend)
indirectSourceDeps
-- Report back with an error if cascade is not set
when (indirectDeps /= [] && not cascade) $ reportDepsExt indirectDeps []
@ -381,7 +401,7 @@ processTableChanges source ti tableDiff = do
modifiedCustomColumnNames = foldl' (flip M.delete) customColumnNames droppedCols
when (modifiedCustomColumnNames /= customColumnNames) $
tell $ MetadataModifier $
tableMetadataSetter source tn.tmConfiguration .~ (TableConfig customFields modifiedCustomColumnNames customName)
tableMetadataSetter source tn.tmConfiguration .~ TableConfig customFields modifiedCustomColumnNames customName
procAlteredCols sc tn = for_ alteredCols $
\( RawColumnInfo oldName _ oldType _ _
@ -390,7 +410,11 @@ processTableChanges source ti tableDiff = do
renameColumnInMetadata oldName newName source tn (_tciFieldInfoMap ti)
| oldType /= newType -> do
let colId = SOSourceObj source $ SOITableObj tn $ TOCol oldName
let colId =
SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tn
$ TOCol oldName
typeDepObjs = getDependentObjsWith (== DROnType) sc colId
unless (null typeDepObjs) $ throw400 DependencyError $

View File

@ -9,6 +9,8 @@ module Hasura.Backends.Postgres.DDL.Table
)
where
import Hasura.Prelude
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map
import qualified Data.List.NonEmpty as NE
@ -25,10 +27,11 @@ import Control.Monad.Validate
import Data.List (delete)
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.Connection
import Hasura.Backends.Postgres.SQL.DML
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
@ -37,7 +40,7 @@ import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.SQL.Backend (BackendType (Postgres))
import Hasura.SQL.Types
import Hasura.Server.Types
import Hasura.Server.Utils
@ -179,7 +182,11 @@ buildEventTriggerInfo env source qt (EventTriggerConf name def webhook webhookFr
webhookInfo <- getWebhookInfoFromConf env webhookConf
headerInfos <- getHeaderInfosFromConf env headerConfs
let eTrigInfo = EventTriggerInfo () name def rconf webhookInfo headerInfos
tabDep = SchemaDependency (SOSourceObj source $ SOITable qt) DRParent
tabDep = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable qt)
DRParent
pure (eTrigInfo, tabDep:getTrigDefDeps source qt def)
getTrigDefDeps :: SourceName -> QualifiedTable -> TriggerOpsDef -> [SchemaDependency]
@ -193,10 +200,18 @@ getTrigDefDeps source qt (TriggerOpsDef mIns mUpd mDel _) =
subsOpSpecDeps os =
let cols = getColsFromSub $ sosColumns os
colDeps = flip map cols $ \col ->
SchemaDependency (SOSourceObj source $ SOITableObj qt (TOCol col)) DRColumn
SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt (TOCol col))
DRColumn
payload = maybe [] getColsFromSub (sosPayload os)
payloadDeps = flip map payload $ \col ->
SchemaDependency (SOSourceObj source $ SOITableObj qt (TOCol col)) DRPayload
SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt (TOCol col))
DRPayload
in colDeps <> payloadDeps
getColsFromSub sc = case sc of
SubCStar -> []

View File

@ -18,6 +18,7 @@ import qualified Hasura.RQL.IR.Insert as IR
import qualified Hasura.RQL.IR.Returning as IR
import qualified Hasura.RQL.IR.Select as IR
import qualified Hasura.RQL.IR.Update as IR
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.Connection
@ -64,7 +65,10 @@ pgDBQueryPlan env manager reqHeaders userInfo _directives sourceConfig qrf = do
(preparedQuery, PlanningSt _ _ planVals expectedVariables) <- flip runStateT initPlanningSt $ traverseQueryDB prepareWithPlan qrf
validateSessionVariables expectedVariables $ _uiSession userInfo
let (action, preparedSQL) = mkCurPlanTx env manager reqHeaders userInfo $ irToRootFieldPlan planVals preparedQuery
pure $ ExecStepDB sourceConfig preparedSQL [] action
pure
$ ExecStepDB []
. AB.mkAnyBackend
$ DBStepInfo sourceConfig preparedSQL action
-- mutation
@ -164,7 +168,7 @@ pgDBMutationPlan
-> MutationDB 'Postgres (UnpreparedValue 'Postgres)
-> m ExecutionStep
pgDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig mrf =
ExecStepDB sourceConfig Nothing [] <$> case mrf of
go <$> case mrf of
MDBInsert s -> convertInsert env userSession remoteJoinCtx s stringifyNum
MDBUpdate s -> convertUpdate env userSession remoteJoinCtx s stringifyNum
MDBDelete s -> convertDelete env userSession remoteJoinCtx s stringifyNum
@ -172,7 +176,7 @@ pgDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig mrf =
where
userSession = _uiSession userInfo
remoteJoinCtx = (manager, reqHeaders, userInfo)
go = ExecStepDB [] . AB.mkAnyBackend . DBStepInfo sourceConfig Nothing
-- subscription

View File

@ -21,6 +21,7 @@ import qualified Hasura.GraphQL.Schema.Backend as BS
import qualified Hasura.GraphQL.Schema.Build as GSB
import qualified Hasura.RQL.IR.Select as IR
import qualified Hasura.RQL.IR.Update as IR
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.DML as PG hiding (CountType)
import Hasura.Backends.Postgres.SQL.Types as PG hiding (FunctionName, TableName)
@ -85,7 +86,10 @@ buildTableRelayQueryFields
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
buildTableRelayQueryFields sourceName sourceInfo tableName tableInfo gqlName pkeyColumns selPerms = do
let
mkRF = RFDB sourceName sourceInfo . QDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. QDBR
fieldName = gqlName <> $$(G.litName "_connection")
fieldDesc = Just $ G.Description $ "fetch data from the table: " <>> tableName
optionalFieldParser (mkRF . QDBConnection) $ selectTableConnection tableName fieldName fieldDesc pkeyColumns selPerms
@ -103,7 +107,10 @@ buildFunctionRelayQueryFields
buildFunctionRelayQueryFields sourceName sourceInfo functionName functionInfo tableName pkeyColumns selPerms = do
funcName <- functionGraphQLName @'Postgres functionName `onLeft` throwError
let
mkRF = RFDB sourceName sourceInfo . QDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. QDBR
fieldName = funcName <> $$(G.litName "_connection")
fieldDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName
optionalFieldParser (mkRF . QDBConnection) $ selectFunctionConnection functionInfo fieldName fieldDesc pkeyColumns selPerms

View File

@ -12,6 +12,7 @@ import qualified Data.HashMap.Strict as M
import Data.Monoid
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
import Hasura.RQL.Types
@ -242,7 +243,11 @@ getColExpDeps source tn = \case
AVRel relInfo relBoolExp ->
let rn = riName relInfo
relTN = riRTable relInfo
pd = SchemaDependency (SOSourceObj @b source $ SOITableObj tn (TORel rn)) DROnType
pd = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tn (TORel rn))
DROnType
in pd : getBoolExpDeps source relTN relBoolExp
getBoolExpDeps
@ -254,7 +259,11 @@ getBoolExpDeps source tn = \case
BoolOr exps -> procExps exps
BoolNot e -> getBoolExpDeps source tn e
BoolExists (GExists refqt whereExp) ->
let tableDep = SchemaDependency (SOSourceObj @b source $ SOITable refqt) DRRemoteTable
let tableDep = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable refqt)
DRRemoteTable
in tableDep:getBoolExpDeps source refqt whereExp
BoolFld fld -> getColExpDeps source tn fld
where

View File

@ -4,6 +4,7 @@ module Hasura.GraphQL.Context
( RoleContext(..)
, GQLContext(..)
, ParserFn
, SourceConfigWith(..)
, RootField(..)
, QueryDB(..)
, MutationDB(..)
@ -23,10 +24,10 @@ module Hasura.GraphQL.Context
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Kind as T
import qualified Language.GraphQL.Draft.Syntax as G
import Data.Aeson.TH
import Data.Typeable (Typeable)
import qualified Hasura.RQL.IR.Delete as IR
import qualified Hasura.RQL.IR.Insert as IR
@ -36,11 +37,11 @@ import qualified Hasura.RQL.Types.Action as RQL
import qualified Hasura.RQL.Types.Backend as RQL
import qualified Hasura.RQL.Types.Common as RQL
import qualified Hasura.RQL.Types.RemoteSchema as RQL
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.GraphQL.Parser
import Hasura.SQL.Backend
-- | For storing both a normal GQLContext and one for the backend variant.
-- Currently, this is to enable the backend variant to have certain insert
-- permissions which the frontend variant does not.
@ -64,13 +65,13 @@ type ParserFn a
= G.SelectionSet G.NoFragments Variable
-> Either (NESeq ParseError) (a, QueryReusability)
data RootField db remote action raw where
data SourceConfigWith (db :: BackendType -> T.Type) (b :: BackendType) =
SourceConfigWith (RQL.SourceConfig b) (db b)
data RootField (db :: BackendType -> T.Type) remote action raw where
RFDB
:: forall (b :: BackendType) db remote action raw
. (RQL.Backend b, Typeable db)
=> RQL.SourceName
-> RQL.SourceConfig b
-> db b
:: RQL.SourceName
-> AB.AnyBackend (SourceConfigWith db)
-> RootField db remote action raw
RFRemote :: remote -> RootField db remote action raw
RFAction :: action -> RootField db remote action raw

View File

@ -9,6 +9,7 @@ module Hasura.GraphQL.Execute
, ExecutionCtx(..)
, MonadGQLExecutionCheck(..)
, checkQueryInAllowlist
, MultiplexedLiveQueryPlan(..)
, LiveQueryPlan (..)
, createSubscriptionPlan
) where
@ -25,7 +26,6 @@ import qualified Network.HTTP.Types as HTTP
import qualified Network.Wai.Extended as Wai
import Data.Text.Extended
import Data.Typeable
import qualified Hasura.GraphQL.Context as C
import qualified Hasura.GraphQL.Execute.Backend as EB
@ -37,6 +37,7 @@ import qualified Hasura.GraphQL.Execute.Query as EQ
import qualified Hasura.GraphQL.Execute.RemoteJoin as RJ
import qualified Hasura.GraphQL.Execute.Types as ET
import qualified Hasura.Logging as L
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Server.Telemetry.Counters as Telem
import qualified Hasura.Tracing as Tracing
@ -163,9 +164,10 @@ data ResolvedExecutionPlan
| SubscriptionExecutionPlan SourceName LiveQueryPlan
-- ^ live query execution; remote schemas and introspection not supported
-- See Note [Existentially Quantified Types]
data LiveQueryPlan where
LQP :: forall b. EB.BackendExecute b => EL.LiveQueryPlan b (EB.MultiplexedQuery b) -> LiveQueryPlan
newtype MultiplexedLiveQueryPlan (b :: BackendType) =
MultiplexedLiveQueryPlan (EL.LiveQueryPlan b (EB.MultiplexedQuery b))
newtype LiveQueryPlan = LQP (AB.AnyBackend MultiplexedLiveQueryPlan)
createSubscriptionPlan
@ -178,34 +180,36 @@ createSubscriptionPlan
-> m (SourceName, LiveQueryPlan)
createSubscriptionPlan userInfo rootFields = do
subscriptions <- for rootFields \case
C.RFDB src e x -> pure $ C.RFDB src e x
C.RFDB src e -> pure $ C.RFDB src e
C.RFAction (C.AQAsync _) -> throw400 NotSupported "async action queries are temporarily not supported in subscription"
C.RFAction (C.AQQuery _) -> throw400 NotSupported "query actions cannot be run as a subscription"
C.RFRemote _ -> throw400 NotSupported "subscription to remote server is not supported"
C.RFRaw _ -> throw400 NotSupported "Introspection not supported over subscriptions"
for_ subscriptions \(C.RFDB _ _ (C.QDBR qdb)) -> do
unless (isNothing $ RJ.getRemoteJoins qdb) $
throw400 NotSupported "Remote relationships are not allowed in subscriptions"
for_ subscriptions \(C.RFDB _ exists) -> do
AB.dispatchAnyBackend @EB.BackendExecute exists \(C.SourceConfigWith _ (C.QDBR qdb)) ->
unless (isNothing $ RJ.getRemoteJoins qdb) $
throw400 NotSupported "Remote relationships are not allowed in subscriptions"
case toList subscriptions of
[] -> throw500 "empty selset for subscription"
(sub:_) -> buildAction sub subscriptions
where
buildAction (C.RFDB sourceName (sourceConfig :: SourceConfig b) _) allFields = do
qdbs <- traverse (checkField @b sourceName) allFields
lqp <- case backendTag @b of
PostgresTag -> LQP <$> EB.mkDBSubscriptionPlan userInfo sourceConfig qdbs
MSSQLTag -> LQP <$> EB.mkDBSubscriptionPlan userInfo sourceConfig qdbs
buildAction (C.RFDB sourceName exists) allFields = do
lqp <- AB.dispatchAnyBackend @EB.BackendExecute exists
\(C.SourceConfigWith (sourceConfig :: SourceConfig b) _) -> do
qdbs <- traverse (checkField @b sourceName) allFields
LQP . AB.mkAnyBackend . MultiplexedLiveQueryPlan
<$> EB.mkDBSubscriptionPlan userInfo sourceConfig qdbs
pure (sourceName, lqp)
checkField
:: forall b. Backend b
=> SourceName
-> C.SubscriptionRootField UnpreparedValue
-> m (C.QueryDB b (UnpreparedValue b))
checkField sourceName (C.RFDB src _ (C.QDBR qdb))
checkField sourceName (C.RFDB src exists)
| sourceName /= src = throw400 NotSupported "all fields of a subscription must be from the same source"
| otherwise = cast qdb `onNothing`
throw500 "internal error: two sources share the same name but are tied to different backends"
| otherwise = case AB.unpackAnyBackend exists of
Nothing -> throw500 "internal error: two sources share the same name but are tied to different backends"
Just (C.SourceConfigWith _ (C.QDBR qdb)) -> pure qdb
checkQueryInAllowlist
:: (MonadError QErr m) => Bool -> UserInfo -> GQLReqParsed -> SchemaCache -> m ()

View File

@ -14,10 +14,12 @@ import Data.Kind (Type)
import Data.Text.Extended
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.RQL.IR.RemoteJoin as IR
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.EncJSON
import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Action.Types
import Hasura.GraphQL.Execute.Action.Types (ActionExecutionPlan)
import Hasura.GraphQL.Execute.LiveQuery.Plan
import Hasura.GraphQL.Parser hiding (Type)
import Hasura.RQL.IR.RemoteJoin
@ -79,16 +81,17 @@ class ( Backend b
-> InsOrdHashMap G.Name (QueryDB b (UnpreparedValue b))
-> m (LiveQueryPlan b (MultiplexedQuery b))
data DBStepInfo b =
DBStepInfo
(SourceConfig b)
(Maybe (PreparedQuery b))
(ExecutionMonad b EncJSON)
-- | One execution step to processing a GraphQL query (e.g. one root field).
data ExecutionStep where
ExecStepDB
:: forall (b :: BackendType)
. BackendExecute b
=> SourceConfig b
-> Maybe (PreparedQuery b)
-> HTTP.ResponseHeaders
-> ExecutionMonad b EncJSON
:: HTTP.ResponseHeaders
-> AB.AnyBackend DBStepInfo
-> ExecutionStep
-- ^ A query to execute against the database
ExecStepAction
@ -111,3 +114,11 @@ data ExecutionStep where
-- independent. In the future, when we implement a client-side dataloader and generalized joins,
-- this will need to be changed into an annotated tree.
type ExecutionPlan = InsOrdHashMap G.Name ExecutionStep
getRemoteSchemaInfo
:: forall b
. BackendExecute b
=> DBStepInfo b
-> [RemoteSchemaInfo]
getRemoteSchemaInfo (DBStepInfo _ genSql _) =
IR._rjRemoteSchema <$> maybe [] (getRemoteJoins @b) genSql

View File

@ -14,8 +14,11 @@ import qualified Network.HTTP.Types as HTTP
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.Logging as L
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.MSSQL.Instances.Execute ()
import Hasura.Backends.Postgres.Instances.Execute ()
import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Action
import Hasura.GraphQL.Execute.Backend
@ -27,10 +30,6 @@ import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
-- backend instances
import Hasura.Backends.MSSQL.Instances.Execute ()
import Hasura.Backends.Postgres.Instances.Execute ()
convertMutationAction
::( HasVersion
@ -87,9 +86,10 @@ convertMutationSelectionSet env logger gqlContext SQLGenCtx{stringifyNum} userIn
-- Transform the RQL AST into a prepared SQL query
txs <- for unpreparedQueries \case
RFDB _ (sourceConfig :: SourceConfig b) (MDBR db) -> case backendTag @b of
PostgresTag -> mkDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig db
MSSQLTag -> mkDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig db
RFDB _ exists ->
AB.dispatchAnyBackend @BackendExecute exists
\(SourceConfigWith sourceConfig (MDBR db)) ->
mkDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig db
RFRemote remoteField -> do
RemoteFieldG remoteSchemaInfo resolvedRemoteField <- resolveRemoteField userInfo remoteField
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeMutation $ [G.SelectionField resolvedRemoteField]

View File

@ -20,8 +20,11 @@ import qualified Network.HTTP.Types as HTTP
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.Logging as L
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.MSSQL.Instances.Execute ()
import Hasura.Backends.Postgres.Instances.Execute ()
import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Action
import Hasura.GraphQL.Execute.Backend
@ -33,10 +36,6 @@ import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
-- backend instances
import Hasura.Backends.MSSQL.Instances.Execute ()
import Hasura.Backends.Postgres.Instances.Execute ()
parseGraphQLQuery
:: MonadError QErr m
@ -84,9 +83,10 @@ convertQuerySelSet env logger gqlContext userInfo manager reqHeaders directives
-- Transform the query plans into an execution plan
let usrVars = _uiSession userInfo
executionPlan <- for unpreparedQueries \case
RFDB _ (sourceConfig :: SourceConfig b) (QDBR db) -> case backendTag @b of
PostgresTag -> mkDBQueryPlan env manager reqHeaders userInfo directives sourceConfig db
MSSQLTag -> mkDBQueryPlan env manager reqHeaders userInfo directives sourceConfig db
RFDB _ exists ->
AB.dispatchAnyBackend @BackendExecute exists
\(SourceConfigWith sourceConfig (QDBR db)) ->
mkDBQueryPlan env manager reqHeaders userInfo directives sourceConfig db
RFRemote rf -> do
RemoteFieldG remoteSchemaInfo remoteField <- for rf $ resolveRemoteVariable userInfo
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeQuery [G.SelectionField remoteField]

View File

@ -15,18 +15,16 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Backends.Postgres.Translate.Select as DS
import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.Backend as E
import qualified Hasura.GraphQL.Execute.Inline as E
import qualified Hasura.GraphQL.Execute.LiveQuery.Explain as E
import qualified Hasura.GraphQL.Execute.LiveQuery.Plan as EL
import qualified Hasura.GraphQL.Execute.Query as E
import qualified Hasura.GraphQL.Execute.RemoteJoin as RR
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.Column (toTxtValue)
@ -94,14 +92,14 @@ explainQueryField userInfo fieldName rootField = do
RFRemote _ -> throw400 InvalidParams "only hasura queries can be explained"
RFAction _ -> throw400 InvalidParams "query actions cannot be explained"
RFRaw _ -> pure $ Just $ FieldPlan fieldName Nothing Nothing
RFDB _ config (QDBR qDB) -> runMaybeT $ do
RFDB _ exists -> runMaybeT $ do
-- TEMPORARY!!!
-- We don't handle non-Postgres backends yet: for now, we filter root fields to only keep those
-- that are targeting postgres, and we *silently* discard all the others. This is fine for now, as
-- we haven't integrated any other backend yet, but will need to be fixed as soon as possible for
-- other backends to work.
pgConfig <- hoistMaybe $ cast config
pgQDB <- hoistMaybe $ cast qDB
SourceConfigWith pgConfig (QDBR pgQDB) <-
hoistMaybe $ AB.unpackAnyBackend exists
lift $ do
resolvedQuery <- E.traverseQueryDB (resolveUnpreparedValue userInfo) pgQDB
let (querySQL, remoteJoins) = case resolvedQuery of
@ -157,10 +155,12 @@ explainGQLQuery sc (GQLExplain query userVarsRaw maybeIsRelay) = do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- E.inlineSelectionSet fragments selSet
(unpreparedQueries, _) <- E.parseGraphQLQuery graphQLContext varDefs (GH._grVariables query) inlinedSelSet
(_, E.LQP (execPlan :: EL.LiveQueryPlan b (E.MultiplexedQuery b))) <- E.createSubscriptionPlan userInfo unpreparedQueries
case backendTag @b of
PostgresTag -> encJFromJValue <$> E.explainLiveQueryPlan execPlan
MSSQLTag -> pure mempty
(_, E.LQP exists) <-
E.createSubscriptionPlan userInfo unpreparedQueries
case AB.unpackAnyBackend exists of
Nothing -> pure mempty
Just (E.MultiplexedLiveQueryPlan execPlan) ->
encJFromJValue <$> E.explainLiveQueryPlan execPlan
where
queryType = bool E.QueryHasura E.QueryRelay $ Just True == maybeIsRelay
sessionVariables = mkSessionVariablesText $ fromMaybe mempty userVarsRaw

View File

@ -19,6 +19,7 @@ import Data.Has
import Data.List.Extended (duplicates)
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.SQL.AnyBackend as AB
import Data.Text.Extended
import Hasura.GraphQL.Context
@ -173,9 +174,8 @@ buildRoleContext (SQLGenCtx stringifyNum, queryType, functionPermsCtx) sources
<$> buildQueryFields sourceName sourceConfig validTables validFunctions
<*> buildMutationFields Frontend sourceName sourceConfig validTables validFunctions
<*> buildMutationFields Backend sourceName sourceConfig validTables validFunctions
buildBackendSource = withBackendSchema buildSource
fieldsList <- traverse buildBackendSource $ toList sources
fieldsList <- traverse (buildBackendSource buildSource) $ toList sources
let (queryFields, mutationFrontendFields, mutationBackendFields) = mconcat fieldsList
-- It's okay to run the rest of this while assuming that the backend is 'Postgres:
@ -240,9 +240,8 @@ buildRelayRoleContext (SQLGenCtx stringifyNum, queryType, functionPermsCtx) sour
<$> buildRelayQueryFields sourceName sourceConfig validTables validFunctions
<*> buildMutationFields Frontend sourceName sourceConfig validTables validFunctions
<*> buildMutationFields Backend sourceName sourceConfig validTables validFunctions
buildBackendSource = withBackendSchema buildSource
fieldsList <- traverse buildBackendSource $ toList sources
fieldsList <- traverse (buildBackendSource buildSource) $ toList sources
-- It's okay to run the rest of this while assuming that the backend is 'Postgres:
-- the only remaining parsers are for actions, that are postgres specific, or for
@ -298,9 +297,8 @@ buildFullestDBSchema queryContext sources allActionInfos nonObjectCustomTypes =
(,)
<$> buildQueryFields sourceName sourceConfig validTables validFunctions
<*> buildMutationFields Frontend sourceName sourceConfig validTables validFunctions
buildBackendSource = withBackendSchema buildSource
fieldsList <- traverse buildBackendSource $ toList sources
fieldsList <- traverse (buildBackendSource buildSource) $ toList sources
let (queryFields, mutationFrontendFields) = mconcat fieldsList
-- It's okay to run the rest of this while assuming that the backend is 'Postgres:
@ -695,12 +693,13 @@ runMonadSchema
runMonadSchema roleName queryContext pgSources extensions m =
flip runReaderT (roleName, pgSources, queryContext, extensions) $ P.runSchemaT m
withBackendSchema :: (forall b. BackendSchema b => SourceInfo b -> r) -> BackendSourceInfo -> r
withBackendSchema f (BackendSourceInfo (bsi :: SourceInfo b)) = case backendTag @b of
PostgresTag -> f bsi
MSSQLTag -> f bsi
-- | Whether the request is sent with `x-hasura-use-backend-only-permissions` set to `true`.
data Scenario = Backend | Frontend deriving (Enum, Show, Eq)
type RemoteSchemaCache = HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject)
buildBackendSource
:: (forall b. BackendSchema b => SourceInfo b -> r)
-> AB.AnyBackend SourceInfo
-> r
buildBackendSource f e = AB.dispatchAnyBackend @BackendSchema e f

View File

@ -10,6 +10,8 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.GraphQL.Context
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
import Hasura.GraphQL.Schema.Backend (MonadBuildSchema)
@ -31,7 +33,10 @@ buildTableQueryFields
-> m [FieldParser n (QueryRootField UnpreparedValue)]
buildTableQueryFields sourceName sourceInfo tableName tableInfo gqlName selPerms = do
let
mkRF = RFDB sourceName sourceInfo . QDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. QDBR
customRootFields = _tcCustomRootFields $ _tciCustomConfig $ _tiCoreInfo tableInfo
-- select table
selectName = fromMaybe gqlName $ _tcrfSelect customRootFields
@ -62,7 +67,10 @@ buildTableInsertMutationFields
-> m [FieldParser n (MutationRootField UnpreparedValue)]
buildTableInsertMutationFields sourceName sourceInfo tableName tableInfo gqlName insPerms mSelPerms mUpdPerms = do
let
mkRF = RFDB sourceName sourceInfo . MDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. MDBR
customRootFields = _tcCustomRootFields $ _tciCustomConfig $ _tiCoreInfo tableInfo
-- insert into table
insertName = fromMaybe ($$(G.litName "insert_") <> gqlName) $ _tcrfInsert customRootFields
@ -91,7 +99,10 @@ buildTableUpdateMutationFields
-> m [FieldParser n (MutationRootField UnpreparedValue)]
buildTableUpdateMutationFields sourceName sourceInfo tableName tableInfo gqlName updPerms mSelPerms = do
let
mkRF = RFDB sourceName sourceInfo . MDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. MDBR
customRootFields = _tcCustomRootFields $ _tciCustomConfig $ _tiCoreInfo tableInfo
-- update table
updateName = fromMaybe ($$(G.litName "update_") <> gqlName) $ _tcrfUpdate customRootFields
@ -119,7 +130,10 @@ buildTableDeleteMutationFields
-> m [FieldParser n (MutationRootField UnpreparedValue)]
buildTableDeleteMutationFields sourceName sourceInfo tableName tableInfo gqlName delPerms mSelPerms = do
let
mkRF = RFDB sourceName sourceInfo . MDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. MDBR
customRootFields = _tcCustomRootFields $ _tciCustomConfig $ _tiCoreInfo tableInfo
-- delete from table
deleteName = fromMaybe ($$(G.litName "delete_") <> gqlName) $ _tcrfDelete customRootFields
@ -147,7 +161,10 @@ buildFunctionQueryFields
buildFunctionQueryFields sourceName sourceInfo functionName functionInfo tableName selPerms = do
funcName <- functionGraphQLName @b functionName `onLeft` throwError
let
mkRF = RFDB sourceName sourceInfo . QDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. QDBR
-- select function
funcDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName
-- select function agg
@ -175,7 +192,10 @@ buildFunctionMutationFields
buildFunctionMutationFields sourceName sourceInfo functionName functionInfo tableName selPerms = do
funcName <- functionGraphQLName @b functionName `onLeft` throwError
let
mkRF = RFDB sourceName sourceInfo . MDBR
mkRF = RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo
. MDBR
funcDesc = Just $ G.Description $ "execute VOLATILE function " <> functionName <<> " which returns " <>> tableName
jsonAggSelect = _fiJsonAggSelect functionInfo
catMaybes <$> sequenceA

View File

@ -49,6 +49,7 @@ import qualified Hasura.GraphQL.Parser.Internal.Parser as P
import qualified Hasura.RQL.IR.BoolExp as IR
import qualified Hasura.RQL.IR.OrderBy as IR
import qualified Hasura.RQL.IR.Select as IR
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.GraphQL.Context
import Hasura.GraphQL.Parser (FieldParser, InputFieldsParser,
@ -1419,19 +1420,25 @@ nodeField = do
onNothing (Map.lookup table parseds) $
withArgsPath $ throwInvalidNodeId $ "the table " <>> ident
whereExp <- buildNodeIdBoolExp columnValues pkeyColumns
return $ RFDB source sourceConfig $ QDBR $ QDBSingleRow $ IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = tablePermissionsInfo perms
, IR._asnArgs = IR.SelectArgs
{ IR._saWhere = Just whereExp
, IR._saOrderBy = Nothing
, IR._saLimit = Nothing
, IR._saOffset = Nothing
, IR._saDistinct = Nothing
return
$ RFDB source
$ AB.mkAnyBackend
$ SourceConfigWith sourceConfig
$ QDBR
$ QDBSingleRow
$ IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = tablePermissionsInfo perms
, IR._asnArgs = IR.SelectArgs
{ IR._saWhere = Just whereExp
, IR._saOrderBy = Nothing
, IR._saLimit = Nothing
, IR._saOffset = Nothing
, IR._saDistinct = Nothing
}
, IR._asnStrfyNum = stringifyNum
}
, IR._asnStrfyNum = stringifyNum
}
where
parseNodeId :: Text -> n NodeId
parseNodeId =

View File

@ -41,10 +41,12 @@ import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.Action as EA
import qualified Hasura.GraphQL.Execute.Backend as EB
import qualified Hasura.Logging as L
import qualified Hasura.RQL.IR.RemoteJoin as IR
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Server.Telemetry.Counters as Telem
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.MSSQL.Instances.Transport ()
import Hasura.Backends.Postgres.Instances.Transport ()
import Hasura.EncJSON
import Hasura.GraphQL.Context
import Hasura.GraphQL.Logging (MonadQueryLog)
@ -60,10 +62,6 @@ import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.Tracing (MonadTrace, TraceT, trace)
-- backend instances
import Hasura.Backends.MSSQL.Instances.Transport ()
import Hasura.Backends.Postgres.Instances.Transport ()
data QueryCacheKey = QueryCacheKey
{ qckQueryString :: !GQLReqParsed
@ -159,8 +157,10 @@ filterVariablesFromQuery query = fold $ rootToSessVarPreds =<< query
where
rootToSessVarPreds :: RootField (QueryDBRoot UnpreparedValue) c h d -> [SessVarPred]
rootToSessVarPreds = \case
RFDB _ _ (QDBR db) -> toPred <$> toListOf traverseQueryDB db
_ -> []
RFDB _ exists ->
AB.runBackend exists \case
SourceConfigWith _ (QDBR db) -> toPred <$> toListOf traverseQueryDB db
_ -> []
toPred :: UnpreparedValue bet -> SessVarPred
-- if we see a reference to the whole session variables object,
@ -212,10 +212,8 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
let filteredSessionVars = runSessVarPred (filterVariablesFromQuery asts) (_uiSession userInfo)
cacheKey = QueryCacheKey reqParsed (_uiRole userInfo) filteredSessionVars
remoteJoins = OMap.elems queryPlans >>= \case
E.ExecStepDB (_ :: SourceConfig b) genSql _headers _tx ->
case backendTag @b of
PostgresTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
MSSQLTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
E.ExecStepDB _headers exists ->
AB.dispatchAnyBackend @BackendTransport exists EB.getRemoteSchemaInfo
_ -> []
(responseHeaders, cachedValue) <- Tracing.interpTraceT (liftEitherM . runExceptT) $ cacheLookup remoteJoins cacheKey
case fmap decodeGQResp cachedValue of
@ -223,10 +221,19 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
pure (Telem.Query, 0, Telem.Local, HttpResponse cachedResponseData responseHeaders)
Nothing -> do
conclusion <- runExceptT $ forWithKey queryPlans $ \fieldName -> \case
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _headers tx -> doQErr $ do
(telemTimeIO_DT, resp) <- case backendTag @b of
PostgresTag -> runDBQuery reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
MSSQLTag -> runDBQuery reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
E.ExecStepDB _headers exists -> doQErr $ do
(telemTimeIO_DT, resp) <-
AB.dispatchAnyBackend @BackendTransport exists
\(EB.DBStepInfo sourceConfig genSql tx) ->
runDBQuery
reqId
reqUnparsed
fieldName
userInfo
logger
sourceConfig
tx
genSql
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
E.ExecStepRemote rsi gqlReq ->
runRemoteGQ httpManager fieldName rsi gqlReq
@ -241,10 +248,19 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
E.MutationExecutionPlan mutationPlans -> do
conclusion <- runExceptT $ forWithKey mutationPlans $ \fieldName -> \case
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql responseHeaders tx -> doQErr $ do
(telemTimeIO_DT, resp) <- case backendTag @b of
PostgresTag -> runDBMutation reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
MSSQLTag -> runDBMutation reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
E.ExecStepDB responseHeaders exists -> doQErr $ do
(telemTimeIO_DT, resp) <-
AB.dispatchAnyBackend @BackendTransport exists
\(EB.DBStepInfo sourceConfig genSql tx) ->
runDBMutation
reqId
reqUnparsed
fieldName
userInfo
logger
sourceConfig
tx
genSql
return $ ResultsFragment telemTimeIO_DT Telem.Local resp responseHeaders
E.ExecStepRemote rsi gqlReq ->
runRemoteGQ httpManager fieldName rsi gqlReq
@ -359,3 +375,4 @@ runGQBatched env logger reqId responseErrorsConfig userInfo ipAddress reqHdrs qu
removeHeaders <$> traverse (try . (fmap . fmap) snd . runGQ env logger reqId userInfo ipAddress reqHdrs queryType) reqs
where
try = flip catchError (pure . Left) . fmap Right

View File

@ -50,15 +50,16 @@ import GHC.AssertNF
import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.Action as EA
import qualified Hasura.GraphQL.Execute.Backend as EB
import qualified Hasura.GraphQL.Execute.LiveQuery.Plan as LQ
import qualified Hasura.GraphQL.Execute.LiveQuery.Poll as LQ
import qualified Hasura.GraphQL.Execute.LiveQuery.State as LQ
import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
import qualified Hasura.Logging as L
import qualified Hasura.RQL.IR.RemoteJoin as IR
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Server.Telemetry.Counters as Telem
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.MSSQL.Instances.Transport ()
import Hasura.Backends.Postgres.Instances.Transport ()
import Hasura.EncJSON
import Hasura.GraphQL.Logging
import Hasura.GraphQL.Transport.Backend
@ -80,10 +81,6 @@ import Hasura.Server.Types (RequestId, getReq
import Hasura.Server.Version (HasVersion)
import Hasura.Session
-- backend instances
import Hasura.Backends.MSSQL.Instances.Transport ()
import Hasura.Backends.Postgres.Instances.Transport ()
-- | 'LQ.LiveQueryId' comes from 'Hasura.GraphQL.Execute.LiveQuery.State.addLiveQuery'. We use
-- this to track a connection's operations so we can remove them from 'LiveQueryState', and
@ -383,10 +380,8 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
let filteredSessionVars = runSessVarPred (filterVariablesFromQuery asts) (_uiSession userInfo)
cacheKey = QueryCacheKey reqParsed (_uiRole userInfo) filteredSessionVars
remoteJoins = OMap.elems queryPlan >>= \case
E.ExecStepDB (_ :: SourceConfig b) genSql _headers _tx ->
case backendTag @b of
PostgresTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
MSSQLTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
E.ExecStepDB _remoteHeaders exists ->
AB.dispatchAnyBackend @BackendTransport exists EB.getRemoteSchemaInfo
_ -> []
-- We ignore the response headers (containing TTL information) because
-- WebSockets don't support them.
@ -396,10 +391,19 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
sendSuccResp cachedResponseData $ LQ.LiveQueryMetadata 0
Nothing -> do
conclusion <- runExceptT $ forWithKey queryPlan $ \fieldName -> \case
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _headerss tx -> doQErr $ do
(telemTimeIO_DT, resp) <- case backendTag @b of
PostgresTag -> runDBQuery requestId q fieldName userInfo logger sourceConfig tx genSql
MSSQLTag -> runDBQuery requestId q fieldName userInfo logger sourceConfig tx genSql
E.ExecStepDB _headers exists -> doQErr $ do
(telemTimeIO_DT, resp) <-
AB.dispatchAnyBackend @BackendTransport exists
\(EB.DBStepInfo sourceConfig genSql tx) ->
runDBQuery
requestId
q
fieldName
userInfo
logger
sourceConfig
tx
genSql
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
E.ExecStepRemote rsi gqlReq -> do
runRemoteGQ fieldName userInfo reqHdrs rsi gqlReq
@ -418,10 +422,19 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
E.MutationExecutionPlan mutationPlan -> do
conclusion <- runExceptT $ forWithKey mutationPlan $ \fieldName -> \case
-- Ignoring response headers since we can't send them over WebSocket
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _responseHeaders tx -> doQErr $ do
(telemTimeIO_DT, resp) <- case backendTag @b of
PostgresTag -> runDBMutation requestId q fieldName userInfo logger sourceConfig tx genSql
MSSQLTag -> runDBMutation requestId q fieldName userInfo logger sourceConfig tx genSql
E.ExecStepDB _responseHeaders exists -> doQErr $ do
(telemTimeIO_DT, resp) <-
AB.dispatchAnyBackend @BackendTransport exists
\(EB.DBStepInfo sourceConfig genSql tx) ->
runDBMutation
requestId
q
fieldName
userInfo
logger
sourceConfig
tx
genSql
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
E.ExecStepAction actionExecPlan hdrs -> do
(time, r) <- doQErr $ EA.runActionExecution actionExecPlan
@ -433,7 +446,7 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
buildResult Telem.Query telemCacheHit timerTot requestId conclusion
sendCompleted (Just requestId)
E.SubscriptionExecutionPlan sourceName (E.LQP (liveQueryPlan :: LQ.LiveQueryPlan b (EB.MultiplexedQuery b))) -> do
E.SubscriptionExecutionPlan sourceName (E.LQP exists) -> do
-- log the graphql query
logQueryLog logger $ QueryLog q Nothing requestId
let subscriberMetadata = LQ.mkSubscriberMetadata $ J.object
@ -442,9 +455,9 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
]
-- NOTE!: we mask async exceptions higher in the call stack, but it's
-- crucial we don't lose lqId after addLiveQuery returns successfully.
!lqId <- liftIO $ case backendTag @b of
PostgresTag -> LQ.addLiveQuery logger subscriberMetadata lqMap sourceName liveQueryPlan liveQOnChange
MSSQLTag -> LQ.addLiveQuery logger subscriberMetadata lqMap sourceName liveQueryPlan liveQOnChange
!lqId <- liftIO $ AB.dispatchAnyBackend @BackendTransport exists
\(E.MultiplexedLiveQueryPlan liveQueryPlan) ->
LQ.addLiveQuery logger subscriberMetadata lqMap sourceName liveQueryPlan liveQOnChange
let !opName = _grOperationName q
#ifndef PROFILING
liftIO $ $assertNFHere (lqId, opName) -- so we don't write thunks to mutable vars

View File

@ -16,7 +16,8 @@ import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.Aeson
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
@ -52,8 +53,10 @@ instance (Backend b) => FromJSON (AddComputedField b) where
runAddComputedField :: (MonadError QErr m, CacheRWM m, MetadataM m) => AddComputedField 'Postgres -> m EncJSON
runAddComputedField q = do
withPathK "table" $ askTabInfo source table
let metadataObj = MOSourceObjId source $
SMOTableObj table $ MTOComputedField computedFieldName
let metadataObj = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTOComputedField computedFieldName
metadata = ComputedFieldMetadata computedFieldName (_afcDefinition q) (_afcComment q)
buildSchemaCacheFor metadataObj
$ MetadataModifier
@ -86,7 +89,8 @@ instance (Backend b) => FromJSON (DropComputedField b) where
<*> o .:? "cascade" .!= False
runDropComputedField
:: (QErrM m, CacheRWM m, MetadataM m, BackendMetadata b)
:: forall b m
. (QErrM m, CacheRWM m, MetadataM m, BackendMetadata b)
=> DropComputedField b -> m EncJSON
runDropComputedField (DropComputedField source table computedField cascade) = do
-- Validation
@ -95,23 +99,28 @@ runDropComputedField (DropComputedField source table computedField cascade) = do
-- Dependencies check
sc <- askSchemaCache
let deps = getDependentObjs sc $ SOSourceObj source $
SOITableObj table $ TOComputedField computedField
let deps = getDependentObjs sc
$ SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj table
$ TOComputedField computedField
when (not cascade && not (null deps)) $ reportDeps deps
withNewInconsistentObjsCheck do
metadataModifiers <- mapM purgeComputedFieldDependency deps
buildSchemaCache $ MetadataModifier $
tableMetadataSetter source table
%~ (dropComputedFieldInMetadata computedField) . foldl' (.) id metadataModifiers
%~ dropComputedFieldInMetadata computedField . foldl' (.) id metadataModifiers
pure successMsg
where
purgeComputedFieldDependency = \case
-- TODO: do a better check of ensuring that the dependency is as expected.
-- i.e, the only allowed dependent objects on a computed fields are permissions
-- on the same table
(SOSourceObj _ (SOITableObj qt (TOPerm roleName permType))) | cast qt == Just table ->
pure $ dropPermissionInMetadata roleName permType
SOSourceObj _ exists
| Just (SOITableObj _ (TOPerm roleName permType))
<- AB.unpackAnyBackend @b exists ->
pure $ dropPermissionInMetadata roleName permType
d -> throw500 $ "unexpected dependency for computed field "
<> computedField <<> "; " <> reportSchemaObj d

View File

@ -23,6 +23,7 @@ import Control.Lens ((.~))
import Data.Aeson
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.DDL.Table
import Hasura.Backends.Postgres.Execute.Types
@ -94,7 +95,11 @@ createEventTriggerQueryMetadata q = do
let table = cetqTable q
source = cetqSource q
triggerName = etcName triggerConf
metadataObj = MOSourceObjId source $ SMOTableObj table $ MTOTrigger triggerName
metadataObj =
MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTOTrigger triggerName
buildSchemaCacheFor metadataObj
$ MetadataModifier
$ tableMetadataSetter source table.tmEventTriggers %~

View File

@ -27,7 +27,8 @@ import qualified Data.List as L
import Control.Lens ((.~), (^?))
import Data.Aeson
import Data.Typeable (cast)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.DDL.Table (delTriggerQ)
import Hasura.Metadata.Class
@ -60,11 +61,14 @@ runClearMetadata _ = do
let maybeDefaultSourceMetadata = metadata ^? metaSources.ix defaultSource
emptyMetadata' = case maybeDefaultSourceMetadata of
Nothing -> emptyMetadata
Just (BackendSourceMetadata defaultSourceMetadata) ->
Just exists ->
-- If default postgres source is defined, we need to set metadata
-- which contains only default source without any tables and functions.
let emptyDefaultSource = BackendSourceMetadata $
SourceMetadata defaultSource mempty mempty $ _smConfiguration defaultSourceMetadata
let emptyDefaultSource =
AB.dispatchAnyBackend @Backend exists
$ AB.mkAnyBackend
. SourceMetadata defaultSource mempty mempty
. _smConfiguration
in emptyMetadata
& metaSources %~ OMap.insert defaultSource emptyDefaultSource
runReplaceMetadataV1 $ RMWithSources emptyMetadata'
@ -127,8 +131,8 @@ runReplaceMetadataV2 ReplaceMetadataV2{..} = do
RMWithoutSources MetadataNoSources{..} -> do
let maybeDefaultSourceMetadata = oldMetadata ^? metaSources.ix defaultSource.toSourceMetadata
defaultSourceMetadata <- onNothing maybeDefaultSourceMetadata $
throw400 NotSupported $ "cannot import metadata without sources since no default source is defined"
let newDefaultSourceMetadata = BackendSourceMetadata defaultSourceMetadata
throw400 NotSupported "cannot import metadata without sources since no default source is defined"
let newDefaultSourceMetadata = AB.mkAnyBackend defaultSourceMetadata
{ _smTables = _mnsTables
, _smFunctions = _mnsFunctions
}
@ -151,7 +155,7 @@ runReplaceMetadataV2 ReplaceMetadataV2{..} = do
pure $ encJFromJValue $ formatInconsistentObjs $ scInconsistentObjs sc
where
getOnlyPGSources :: Metadata -> InsOrdHashMap SourceName (SourceMetadata 'Postgres)
getOnlyPGSources = OMap.mapMaybe (\(BackendSourceMetadata sm) -> cast sm) . _metaSources
getOnlyPGSources = OMap.mapMaybe AB.unpackAnyBackend . _metaSources
dropPostgresTriggers
:: InsOrdHashMap SourceName (SourceMetadata 'Postgres) -- ^ old pg sources
@ -242,45 +246,36 @@ runDropInconsistentMetadata _ = do
-- seems to work well enough for now.
metadataModifier <- execWriterT $ mapM_ (tell . purgeMetadataObj) (reverse inconsSchObjs)
metadata <- getMetadata
putMetadata $ unMetadataModifier metadataModifier $ metadata
putMetadata $ unMetadataModifier metadataModifier metadata
buildSchemaCacheStrict
return successMsg
purgeMetadataObj :: MetadataObjId -> MetadataModifier
purgeMetadataObj = \case
MOSource source -> MetadataModifier $ metaSources %~ OMap.delete source
MOSourceObjId source (sourceObjId :: SourceMetadataObjId b) ->
case backendTag @b of
PostgresTag -> case sourceObjId of
SMOTable qt -> dropTableInMetadata source qt
SMOTableObj qt tableObj -> MetadataModifier $
tableMetadataSetter source qt %~ case tableObj of
MOSource source -> MetadataModifier $ metaSources %~ OMap.delete source
MOSourceObjId source exists -> AB.dispatchAnyBackend @BackendMetadata exists $ handleSourceObj source
MORemoteSchema rsn -> dropRemoteSchemaInMetadata rsn
MORemoteSchemaPermissions rsName role -> dropRemoteSchemaPermissionInMetadata rsName role
MOCustomTypes -> clearCustomTypesInMetadata
MOAction action -> dropActionInMetadata action -- Nothing
MOActionPermission action role -> dropActionPermissionInMetadata action role
MOCronTrigger ctName -> dropCronTriggerInMetadata ctName
MOEndpoint epName -> dropEndpointInMetadata epName
MOInheritedRole role -> dropInheritedRoleInMetadata role
where
handleSourceObj :: BackendMetadata b => SourceName -> SourceMetadataObjId b -> MetadataModifier
handleSourceObj source = \case
SMOTable qt -> dropTableInMetadata source qt
SMOFunction qf -> dropFunctionInMetadata source qf
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata source qf rn
SMOTableObj qt tableObj ->
MetadataModifier
$ tableMetadataSetter source qt %~ case tableObj of
MTORel rn _ -> dropRelationshipInMetadata rn
MTOPerm rn pt -> dropPermissionInMetadata rn pt
MTOTrigger trn -> dropEventTriggerInMetadata trn
MTOComputedField ccn -> dropComputedFieldInMetadata ccn
MTORemoteRelationship rn -> dropRemoteRelationshipInMetadata rn
SMOFunction qf -> dropFunctionInMetadata source qf
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata source qf rn
MSSQLTag -> case sourceObjId of
SMOTable qt -> dropTableInMetadata source qt
SMOTableObj qt tableObj -> MetadataModifier $
tableMetadataSetter source qt %~ case tableObj of
MTORel rn _ -> dropRelationshipInMetadata rn
MTOPerm rn pt -> dropPermissionInMetadata rn pt
MTOTrigger trn -> dropEventTriggerInMetadata trn
MTOComputedField ccn -> dropComputedFieldInMetadata ccn
MTORemoteRelationship rn -> dropRemoteRelationshipInMetadata rn
SMOFunction qf -> dropFunctionInMetadata source qf
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata source qf rn
MORemoteSchema rsn -> dropRemoteSchemaInMetadata rsn
MORemoteSchemaPermissions rsName role -> dropRemoteSchemaPermissionInMetadata rsName role
MOCustomTypes -> clearCustomTypesInMetadata
MOAction action -> dropActionInMetadata action -- Nothing
MOActionPermission action role -> dropActionPermissionInMetadata action role
MOCronTrigger ctName -> dropCronTriggerInMetadata ctName
MOInheritedRole role -> dropInheritedRoleInMetadata role
MOEndpoint epName -> dropEndpointInMetadata epName
runGetCatalogState
:: (MonadMetadataStorageQueryAPI m) => GetCatalogState -> m EncJSON

View File

@ -72,10 +72,6 @@ instance (Backend b) => Arbitrary (SourceMetadata b) where
instance Arbitrary FunctionPermissionMetadata where
arbitrary = genericArbitrary
instance Arbitrary BackendSourceMetadata where
-- FIXME: Derive instance for any b using Backend b
arbitrary = BackendSourceMetadata @'Postgres <$> genericArbitrary
instance Arbitrary TableCustomRootFields where
arbitrary = uniqueRootFields
where

View File

@ -43,6 +43,8 @@ import Control.Lens ((.~))
import Data.Aeson
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.EncJSON
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DML.Internal hiding (askPermInfo)
@ -130,7 +132,8 @@ class (ToJSON a) => IsPerm b a where
:: PermDef a -> TableMetadata b -> TableMetadata b
runCreatePerm
:: (UserInfoM m, CacheRWM m, IsPerm b a, MonadError QErr m, MetadataM m, BackendMetadata b)
:: forall m b a
. (UserInfoM m, CacheRWM m, IsPerm b a, MonadError QErr m, MetadataM m, BackendMetadata b)
=> CreatePerm b a -> m EncJSON
runCreatePerm (WithTable source tn pd) = do
tableInfo <- askTabInfo source tn
@ -138,7 +141,10 @@ runCreatePerm (WithTable source tn pd) = do
pt = permAccToType permAcc
ptText = permTypeToCode pt
role = _pdRole pd
metadataObject = MOSourceObjId source $ SMOTableObj tn $ MTOPerm role pt
metadataObject = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj tn
$ MTOPerm role pt
onJust (getPermInfoMaybe role permAcc tableInfo) $ const $ throw400 AlreadyExists $
ptText <> " permission already defined on table " <> tn <<> " with role " <>> role
buildSchemaCacheFor metadataObject
@ -353,7 +359,8 @@ instance (Backend b) => FromJSON (SetPermComment b) where
<*> o .:? "comment"
runSetPermComment
:: (QErrM m, CacheRWM m, MetadataM m, BackendMetadata b)
:: forall m b
. (QErrM m, CacheRWM m, MetadataM m, BackendMetadata b)
=> SetPermComment b -> m EncJSON
runSetPermComment (SetPermComment source table roleName permType comment) = do
tableInfo <- askTabInfo source table
@ -373,8 +380,10 @@ runSetPermComment (SetPermComment source table roleName permType comment) = do
assertPermDefined roleName PADelete tableInfo
pure $ tmDeletePermissions.ix roleName.pdComment .~ comment
let metadataObject = MOSourceObjId source $
SMOTableObj table $ MTOPerm roleName permType
let metadataObject = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTOPerm roleName permType
buildSchemaCacheFor metadataObject
$ MetadataModifier
$ tableMetadataSetter source table %~ permModifier

View File

@ -21,13 +21,16 @@ import Data.Aeson.Types
import Data.Text.Extended
import Data.Tuple (swap)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.EncJSON
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Permission
import Hasura.RQL.Types
runCreateRelationship
:: (MonadError QErr m, CacheRWM m, ToJSON a, MetadataM m, Backend b, BackendMetadata b)
:: forall m b a
. (MonadError QErr m, CacheRWM m, ToJSON a, MetadataM m, Backend b, BackendMetadata b)
=> RelType -> WithTable b (RelDef a) -> m EncJSON
runCreateRelationship relType (WithTable source tableName relDef) = do
let relName = _rdName relDef
@ -37,8 +40,10 @@ runCreateRelationship relType (WithTable source tableName relDef) = do
throw400 AlreadyExists $
"field with name " <> relName <<> " already exists in table " <>> tableName
let comment = _rdComment relDef
metadataObj = MOSourceObjId source $
SMOTableObj tableName $ MTORel relName relType
metadataObj = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj tableName
$ MTORel relName relType
addRelationshipToMetadata <- case relType of
ObjRel -> do
value <- decodeValue $ toJSON $ _rdUsing relDef
@ -53,7 +58,8 @@ runCreateRelationship relType (WithTable source tableName relDef) = do
pure successMsg
runDropRel
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
:: forall b m
. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
=> DropRel b -> m EncJSON
runDropRel (DropRel source qt rn cascade) = do
depObjs <- collectDependencies
@ -68,7 +74,12 @@ runDropRel (DropRel source qt rn cascade) = do
tabInfo <- askTableCoreInfo source qt
void $ askRelType (_tciFieldInfoMap tabInfo) rn ""
sc <- askSchemaCache
let depObjs = getDependentObjs sc (SOSourceObj source $ SOITableObj qt $ TORel rn)
let depObjs = getDependentObjs
sc
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TORel rn)
when (depObjs /= [] && not cascade) $ reportDeps depObjs
pure depObjs
@ -82,7 +93,8 @@ dropRelationshipInMetadata relName =
. (tmArrayRelationships %~ OMap.delete relName)
objRelP2Setup
:: (QErrM m, Backend b)
:: forall b m
. (QErrM m, Backend b)
=> SourceName
-> TableName b
-> HashMap (TableName b) (HashSet (ForeignKey b))
@ -94,7 +106,12 @@ objRelP2Setup source qt foreignKeys (RelDef rn ru _) fieldInfoMap = case ru of
let refqt = rmTable rm
(lCols, rCols) = unzip $ HM.toList $ rmColumns rm
io = fromMaybe BeforeParent $ rmInsertOrder rm
mkDependency tableName reason col = SchemaDependency (SOSourceObj source $ SOITableObj tableName $ TOCol col) reason
mkDependency tableName reason col = SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tableName
$ TOCol col)
reason
dependencies = map (mkDependency qt DRLeftColumn) lCols
<> map (mkDependency refqt DRRightColumn) rCols
pure (RelInfo rn ObjRel (rmColumns rm) refqt True True io, dependencies)
@ -102,11 +119,25 @@ objRelP2Setup source qt foreignKeys (RelDef rn ru _) fieldInfoMap = case ru of
foreignTableForeignKeys <- findTable qt foreignKeys
ForeignKey constraint foreignTable colMap <- getRequiredFkey columnName (HS.toList foreignTableForeignKeys)
let dependencies =
[ SchemaDependency (SOSourceObj source $ SOITableObj qt $ TOForeignKey (_cName constraint)) DRFkey
, SchemaDependency (SOSourceObj source $ SOITableObj qt $ TOCol columnName) DRUsingColumn
[ SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TOForeignKey (_cName constraint))
DRFkey
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TOCol columnName)
DRUsingColumn
-- this needs to be added explicitly to handle the remote table being untracked. In this case,
-- neither the using_col nor the constraint name will help.
, SchemaDependency (SOSourceObj source $ SOITable foreignTable) DRRemoteTable
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable foreignTable)
DRRemoteTable
]
colInfo <- HM.lookup (fromCol columnName) fieldInfoMap
`onNothing` throw500 "could not find column info in schema cache"
@ -116,54 +147,102 @@ objRelP2Setup source qt foreignKeys (RelDef rn ru _) fieldInfoMap = case ru of
foreignTableForeignKeys <- findTable remoteTable foreignKeys
ForeignKey constraint _foreignTable colMap <- getRequiredRemoteFkey remoteCol (HS.toList foreignTableForeignKeys)
let dependencies =
[ SchemaDependency (SOSourceObj source $ SOITableObj remoteTable $ TOForeignKey (_cName constraint)) DRRemoteFkey
, SchemaDependency (SOSourceObj source $ SOITableObj qt $ TOCol remoteCol) DRUsingColumn
, SchemaDependency (SOSourceObj source $ SOITable remoteTable) DRRemoteTable
[ SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj remoteTable
$ TOForeignKey (_cName constraint))
DRRemoteFkey
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TOCol remoteCol)
DRUsingColumn
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable remoteTable)
DRRemoteTable
]
pure (RelInfo rn ObjRel colMap remoteTable False False AfterParent, dependencies)
arrRelP2Setup
:: (QErrM m, Backend b)
:: forall b m
. (QErrM m, Backend b)
=> HashMap (TableName b) (HashSet (ForeignKey b))
-> SourceName
-> TableName b
-> (ArrRelDef b)
-> ArrRelDef b
-> m (RelInfo b, [SchemaDependency])
arrRelP2Setup foreignKeys source qt (RelDef rn ru _) = case ru of
RUManual rm -> do
let refqt = rmTable rm
(lCols, rCols) = unzip $ HM.toList $ rmColumns rm
deps = map (\c -> SchemaDependency (SOSourceObj source $ SOITableObj qt $ TOCol c) DRLeftColumn) lCols
<> map (\c -> SchemaDependency (SOSourceObj source $ SOITableObj refqt $ TOCol c) DRRightColumn) rCols
deps = map (\c -> SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TOCol c) DRLeftColumn)
lCols
<> map (\c -> SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj refqt
$ TOCol c)
DRRightColumn)
rCols
pure (RelInfo rn ArrRel (rmColumns rm) refqt True True BeforeParent, deps)
RUFKeyOn (ArrRelUsingFKeyOn refqt refCol) -> do
foreignTableForeignKeys <- findTable refqt foreignKeys
let keysThatReferenceUs = filter ((== qt) . _fkForeignTable) (HS.toList foreignTableForeignKeys)
ForeignKey constraint _ colMap <- getRequiredFkey refCol keysThatReferenceUs
let deps = [ SchemaDependency (SOSourceObj source $ SOITableObj refqt $ TOForeignKey (_cName constraint)) DRRemoteFkey
, SchemaDependency (SOSourceObj source $ SOITableObj refqt $ TOCol refCol) DRUsingColumn
let deps = [ SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj refqt
$ TOForeignKey (_cName constraint))
DRRemoteFkey
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj refqt
$ TOCol refCol)
DRUsingColumn
-- we don't need to necessarily track the remote table like we did in
-- case of obj relationships as the remote table is indirectly
-- tracked by tracking the constraint name and 'using_col'
, SchemaDependency (SOSourceObj source $ SOITable refqt) DRRemoteTable
, SchemaDependency
(SOSourceObj source
$ AB.mkAnyBackend
$ SOITable refqt)
DRRemoteTable
]
mapping = HM.fromList $ map swap $ HM.toList colMap
pure (RelInfo rn ArrRel mapping refqt False False BeforeParent, deps)
purgeRelDep
:: (QErrM m)
:: forall b m
. QErrM m
=> Backend b
=> SchemaObjId -> m (TableMetadata b -> TableMetadata b)
purgeRelDep (SOSourceObj _ (SOITableObj _ (TOPerm rn pt))) = pure $ dropPermissionInMetadata rn pt
purgeRelDep (SOSourceObj _ exists)
| Just (SOITableObj _ (TOPerm rn pt)) <- AB.unpackAnyBackend @b exists =
pure $ dropPermissionInMetadata rn pt
purgeRelDep d = throw500 $ "unexpected dependency of relationship : "
<> reportSchemaObj d
runSetRelComment
:: (CacheRWM m, MonadError QErr m, MetadataM m, BackendMetadata b)
:: forall m b
. (CacheRWM m, MonadError QErr m, MetadataM m, BackendMetadata b)
=> SetRelComment b -> m EncJSON
runSetRelComment defn = do
tabInfo <- askTableCoreInfo source qt
relType <- riType <$> askRelType (_tciFieldInfoMap tabInfo) rn ""
let metadataObj = MOSourceObjId source $ SMOTableObj qt $ MTORel rn relType
let metadataObj = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj qt
$ MTORel rn relType
buildSchemaCacheFor metadataObj
$ MetadataModifier
$ tableMetadataSetter source qt %~ case relType of

View File

@ -10,15 +10,22 @@ import Hasura.Prelude
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.EncJSON
import Hasura.RQL.Types
runCreateRemoteRelationship
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => RemoteRelationship b -> m EncJSON
:: forall m b
. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
=> RemoteRelationship b
-> m EncJSON
runCreateRemoteRelationship RemoteRelationship{..} = do
void $ askTabInfo rtrSource rtrTable
let metadataObj = MOSourceObjId rtrSource $
SMOTableObj rtrTable $ MTORemoteRelationship rtrName
let metadataObj = MOSourceObjId rtrSource
$ AB.mkAnyBackend
$ SMOTableObj rtrTable
$ MTORemoteRelationship rtrName
metadata = RemoteRelationshipMetadata rtrName $
RemoteRelationshipDef rtrRemoteSchema rtrHasuraFields rtrRemoteField
buildSchemaCacheFor metadataObj
@ -27,12 +34,18 @@ runCreateRemoteRelationship RemoteRelationship{..} = do
%~ OMap.insert rtrName metadata
pure successMsg
runUpdateRemoteRelationship :: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => RemoteRelationship b -> m EncJSON
runUpdateRemoteRelationship
:: forall m b
. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
=> RemoteRelationship b
-> m EncJSON
runUpdateRemoteRelationship RemoteRelationship{..} = do
fieldInfoMap <- askFieldInfoMap rtrSource rtrTable
void $ askRemoteRel fieldInfoMap rtrName
let metadataObj = MOSourceObjId rtrSource $
SMOTableObj rtrTable $ MTORemoteRelationship rtrName
let metadataObj = MOSourceObjId rtrSource
$ AB.mkAnyBackend
$ SMOTableObj rtrTable
$ MTORemoteRelationship rtrName
metadata = RemoteRelationshipMetadata rtrName $
RemoteRelationshipDef rtrRemoteSchema rtrHasuraFields rtrRemoteField
buildSchemaCacheFor metadataObj
@ -42,12 +55,16 @@ runUpdateRemoteRelationship RemoteRelationship{..} = do
pure successMsg
runDeleteRemoteRelationship
:: (MonadError QErr m, CacheRWM m, MetadataM m) => DeleteRemoteRelationship -> m EncJSON
:: (MonadError QErr m, CacheRWM m, MetadataM m)
=> DeleteRemoteRelationship
-> m EncJSON
runDeleteRemoteRelationship (DeleteRemoteRelationship source table relName)= do
fieldInfoMap <- askFieldInfoMap source table
void $ askRemoteRel fieldInfoMap relName
let metadataObj = MOSourceObjId source $
SMOTableObj table $ MTORemoteRelationship relName
let metadataObj = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTORemoteRelationship relName
buildSchemaCacheFor metadataObj
$ MetadataModifier
$ tableMetadataSetter source table %~ dropRemoteRelationshipInMetadata relName

View File

@ -2,8 +2,6 @@
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -O0 #-}
{-| Top-level functions concerned specifically with operations on the schema cache, such as
rebuilding it from the catalog and incorporating schema changes. See the module documentation for
"Hasura.RQL.DDL.Schema" for more details.
@ -40,6 +38,7 @@ import Data.Text.Extended
import Network.HTTP.Client.Extended
import qualified Hasura.Incremental as Inc
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.Connection
@ -63,7 +62,7 @@ import Hasura.Server.Version (HasVersion)
import Hasura.Session
buildRebuildableSchemaCache
:: (HasVersion)
:: HasVersion
=> Env.Environment
-> Metadata
-> CacheBuild RebuildableSchemaCache
@ -71,7 +70,7 @@ buildRebuildableSchemaCache =
buildRebuildableSchemaCacheWithReason CatalogSync
buildRebuildableSchemaCacheWithReason
:: (HasVersion)
:: HasVersion
=> BuildReason
-> Env.Environment
-> Metadata
@ -320,8 +319,16 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
>-> (| Inc.keyed (\_ (FunctionMetadata qf config functionPermissions) -> do
let systemDefined = SystemDefined False
definition = toJSON $ TrackFunction qf
metadataObject = MetadataObject (MOSourceObjId @b source $ SMOFunction qf) definition
schemaObject = SOSourceObj @b source $ SOIFunction qf
metadataObject =
MetadataObject
(MOSourceObjId source
$ AB.mkAnyBackend
$ SMOFunction qf)
definition
schemaObject =
SOSourceObj source
$ AB.mkAnyBackend
$ SOIFunction qf
addFunctionContext e = "in function " <> qf <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
@ -334,7 +341,7 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
|) metadataObject) |)
>-> (\infos -> M.catMaybes infos >- returnA)
returnA -< BackendSourceInfo $ SourceInfo source tableCache functionCache sourceConfig
returnA -< AB.mkAnyBackend $ SourceInfo source tableCache functionCache sourceConfig
buildSourceOutput
:: forall arr m b
@ -374,7 +381,8 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
remoteSchemaRoles = map _rspmRole . _rsmPermissions =<< OMap.elems remoteSchemas
sourceRoles =
HS.fromList $ concat $
OMap.elems sources >>= \(BackendSourceMetadata (SourceMetadata _ tables _functions _) ) -> do
OMap.elems sources >>=
\e -> AB.dispatchAnyBackend @Backend e \(SourceMetadata _ tables _functions _) -> do
table <- OMap.elems tables
pure ( OMap.keys (_tmInsertPermissions table) <> OMap.keys (_tmSelectPermissions table)
<> OMap.keys (_tmUpdatePermissions table) <> OMap.keys (_tmDeletePermissions table))
@ -423,10 +431,9 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
let remoteSchemaCtxMap = M.map fst remoteSchemaMap
sourcesOutput <-
(| Inc.keyed (\_ (BackendSourceMetadata (sourceMetadata :: SourceMetadata b)) ->
case backendTag @b of
PostgresTag -> buildSourceOutput @arr @m -< (invalidationKeys, remoteSchemaCtxMap, sourceMetadata :: SourceMetadata 'Postgres, inheritedRoles)
MSSQLTag -> buildSourceOutput @arr @m -< (invalidationKeys, remoteSchemaCtxMap, sourceMetadata :: SourceMetadata 'MSSQL, inheritedRoles)
(| Inc.keyed (\_ exists ->
AB.dispatchAnyBackendArrow @BackendMetadata (buildSourceOutput @arr @m)
-< (invalidationKeys, remoteSchemaCtxMap, exists, inheritedRoles)
)
|) (M.fromList $ OMap.toList sources)
>-> (\infos -> M.catMaybes infos >- returnA)
@ -530,9 +537,17 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
pure lq
mkEventTriggerMetadataObject
:: forall b a c
. Backend b
=> (a, SourceName, c, TableName b, EventTriggerConf)
-> MetadataObject
mkEventTriggerMetadataObject (_, source, _, table, eventTriggerConf) =
let objectId = MOSourceObjId source $
SMOTableObj table $ MTOTrigger $ etcName eventTriggerConf
let objectId = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTOTrigger
$ etcName eventTriggerConf
definition = object ["table" .= table, "configuration" .= eventTriggerConf]
in MetadataObject objectId definition
@ -599,7 +614,7 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
, MonadReader BuildReason m, HasServerConfigCtx m, BackendMetadata b)
=> ( SourceName, SourceConfig b, TableCoreInfo b
, [EventTriggerConf], Inc.Dependency Inc.InvalidationKey
) `arr` (EventTriggerInfoMap b)
) `arr` EventTriggerInfoMap b
buildTableEventTriggers = proc (source, sourceConfig, tableInfo, eventTriggerConfs, metadataInvalidationKey) ->
buildInfoMap (etcName . (^. _5)) mkEventTriggerMetadataObject buildEventTrigger
-< (tableInfo, map (metadataInvalidationKey, source, sourceConfig, _tciName tableInfo,) eventTriggerConfs)
@ -607,8 +622,10 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
buildEventTrigger = proc (tableInfo, (metadataInvalidationKey, source, sourceConfig, table, eventTriggerConf)) -> do
let triggerName = etcName eventTriggerConf
metadataObject = mkEventTriggerMetadataObject (metadataInvalidationKey, source, sourceConfig, table, eventTriggerConf)
schemaObjectId = SOSourceObj @b source $
SOITableObj table $ TOTrigger triggerName
schemaObjectId = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj table
$ TOTrigger triggerName
addTriggerContext e = "in event trigger " <> triggerName <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
@ -629,8 +646,15 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
buildReason <- ask
serverConfigCtx <- askServerConfigCtx
-- we don't modify the existing event trigger definitions in the maintenance mode
when (buildReason == CatalogUpdate && (_sccMaintenanceMode serverConfigCtx) == MaintenanceModeDisabled) $
liftEitherM $ createTableEventTrigger serverConfigCtx sourceConfig tableName tableColumns triggerName triggerDefinition
when (buildReason == CatalogUpdate && _sccMaintenanceMode serverConfigCtx == MaintenanceModeDisabled)
$ liftEitherM
$ createTableEventTrigger
serverConfigCtx
sourceConfig
tableName
tableColumns
triggerName
triggerDefinition
buildCronTriggers
:: ( ArrowChoice arr

View File

@ -13,12 +13,12 @@ import Data.Aeson
import Data.List (nub)
import Data.Monoid (First)
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.RQL.DDL.Schema.Cache.Common
import Hasura.RQL.Types
-- | Processes collected 'CIDependency' values into a 'DepMap', performing integrity checking to
-- ensure the dependencies actually exist. If a dependency is missing, its transitive dependents are
-- removed from the cache, and 'InconsistentMetadata's are returned.
@ -96,45 +96,46 @@ pruneDanglingDependents cache = fmap (M.filter (not . null)) . traverse do
unless (roleName `M.member` _rscPermissions (fst remoteSchema)) $
Left $ "no permission defined on remote schema " <> remoteSchemaName
<<> " for role " <>> roleName
SOSourceObj source sourceObjId -> do
sourceInfo <- castSourceInfo source sourceObjId
case sourceObjId of
SOITable tableName -> do
void $ resolveTable sourceInfo tableName
SOIFunction functionName -> void $
M.lookup functionName (_siFunctions sourceInfo)
`onNothing` Left ("function " <> functionName <<> " is not tracked")
SOITableObj tableName tableObjectId -> do
tableInfo <- resolveTable sourceInfo tableName
case tableObjectId of
TOCol columnName ->
void $ resolveField tableInfo (columnToFieldName tableInfo columnName) _FIColumn "column"
TORel relName ->
void $ resolveField tableInfo (fromRel relName) _FIRelationship "relationship"
TOComputedField fieldName ->
void $ resolveField tableInfo (fromComputedField fieldName) _FIComputedField "computed field"
TORemoteRel fieldName ->
void $ resolveField tableInfo (fromRemoteRelationship fieldName) _FIRemoteRelationship "remote relationship"
TOForeignKey constraintName -> do
let foreignKeys = _tciForeignKeys $ _tiCoreInfo tableInfo
unless (isJust $ find ((== constraintName) . _cName . _fkConstraint) foreignKeys) $
Left $ "no foreign key constraint named " <> constraintName <<> " is "
<> "defined for table " <>> tableName
TOPerm roleName permType -> withPermType permType \accessor -> do
let permLens = permAccToLens accessor
unless (has (tiRolePermInfoMap.ix roleName.permLens._Just) tableInfo) $
Left $ "no " <> permTypeToCode permType <> " permission defined on table "
<> tableName <<> " for role " <>> roleName
TOTrigger triggerName ->
unless (M.member triggerName (_tiEventTriggerInfoMap tableInfo)) $ Left $
"no event trigger named " <> triggerName <<> " is defined for table " <>> tableName
SOSourceObj source exists -> do
AB.dispatchAnyBackend @BackendMetadata exists $ \sourceObjId -> do
sourceInfo <- castSourceInfo source sourceObjId
case sourceObjId of
SOITable tableName -> do
void $ resolveTable sourceInfo tableName
SOIFunction functionName -> void $
M.lookup functionName (_siFunctions sourceInfo)
`onNothing` Left ("function " <> functionName <<> " is not tracked")
SOITableObj tableName tableObjectId -> do
tableInfo <- resolveTable sourceInfo tableName
case tableObjectId of
TOCol columnName ->
void $ resolveField tableInfo (columnToFieldName tableInfo columnName) _FIColumn "column"
TORel relName ->
void $ resolveField tableInfo (fromRel relName) _FIRelationship "relationship"
TOComputedField fieldName ->
void $ resolveField tableInfo (fromComputedField fieldName) _FIComputedField "computed field"
TORemoteRel fieldName ->
void $ resolveField tableInfo (fromRemoteRelationship fieldName) _FIRemoteRelationship "remote relationship"
TOForeignKey constraintName -> do
let foreignKeys = _tciForeignKeys $ _tiCoreInfo tableInfo
unless (isJust $ find ((== constraintName) . _cName . _fkConstraint) foreignKeys) $
Left $ "no foreign key constraint named " <> constraintName <<> " is "
<> "defined for table " <>> tableName
TOPerm roleName permType -> withPermType permType \accessor -> do
let permLens = permAccToLens accessor
unless (has (tiRolePermInfoMap.ix roleName.permLens._Just) tableInfo) $
Left $ "no " <> permTypeToCode permType <> " permission defined on table "
<> tableName <<> " for role " <>> roleName
TOTrigger triggerName ->
unless (M.member triggerName (_tiEventTriggerInfoMap tableInfo)) $ Left $
"no event trigger named " <> triggerName <<> " is defined for table " <>> tableName
castSourceInfo
:: (Backend b) => SourceName -> SourceObjId b -> Either Text (SourceInfo b)
castSourceInfo sourceName _ =
-- TODO: if the cast returns Nothing, we should be throwing an internal error
-- the type of the dependency in sources is not as recorded
(M.lookup sourceName (_boSources cache) >>= \(BackendSourceInfo si) -> cast si)
(M.lookup sourceName (_boSources cache) >>= unsafeSourceInfo)
`onNothing` Left ("no such source found " <>> sourceName)
resolveTable sourceInfo tableName =
@ -160,7 +161,7 @@ deleteMetadataObject
:: MetadataObjId -> BuildOutputs -> BuildOutputs
deleteMetadataObject = \case
MOSource name -> boSources %~ M.delete name
MOSourceObjId source sourceObjId -> boSources %~ M.adjust (deleteObjId sourceObjId) source
MOSourceObjId source exists -> AB.dispatchAnyBackend @Backend exists (\sourceObjId -> boSources %~ M.adjust (deleteObjId sourceObjId) source)
MORemoteSchema name -> boRemoteSchemas %~ M.delete name
MORemoteSchemaPermissions name role -> boRemoteSchemas.ix name._1.rscPermissions %~ M.delete role
MOCronTrigger name -> boCronTriggers %~ M.delete name
@ -170,8 +171,13 @@ deleteMetadataObject = \case
MOActionPermission name role -> boActions.ix name.aiPermissions %~ M.delete role
MOInheritedRole name -> boInheritedRoles %~ M.delete name
where
deleteObjId :: (Backend b) => SourceMetadataObjId b -> BackendSourceInfo -> BackendSourceInfo
deleteObjId sourceObjId sourceInfo = maybe sourceInfo (BackendSourceInfo . deleteObjFn sourceObjId) $ unsafeSourceInfo sourceInfo
deleteObjId :: forall b. (Backend b) => SourceMetadataObjId b -> BackendSourceInfo -> BackendSourceInfo
deleteObjId sourceObjId sourceInfo =
maybe
sourceInfo
(AB.mkAnyBackend . deleteObjFn sourceObjId)
$ unsafeSourceInfo sourceInfo
deleteObjFn :: (Backend b) => SourceMetadataObjId b -> SourceInfo b -> SourceInfo b
deleteObjFn = \case
SMOTable name -> siTables %~ M.delete name

View File

@ -17,6 +17,7 @@ import Data.Aeson
import Data.Text.Extended
import qualified Hasura.Incremental as Inc
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.Relationship
@ -125,11 +126,14 @@ addNonColumnFields = proc ( source
returnA -< FIColumn columnInfo
mkRelationshipMetadataObject
:: (ToJSON a, Backend b)
:: forall a b
. (ToJSON a, Backend b)
=> RelType -> (SourceName, TableName b, RelDef a) -> MetadataObject
mkRelationshipMetadataObject relType (source, table, relDef) =
let objectId = MOSourceObjId source $
SMOTableObj table $ MTORel (_rdName relDef) relType
let objectId = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTORel (_rdName relDef) relType
in MetadataObject objectId $ toJSON $ WithTable source table relDef
buildObjectRelationship
@ -164,21 +168,25 @@ buildArrayRelationship = proc (fkeysMap, (source, table, relDef)) -> do
buildRelationship -< (source, table, buildRelInfo, ArrRel, relDef)
buildRelationship
:: ( ArrowChoice arr
:: forall b arr a
. ( ArrowChoice arr
, ArrowWriter (Seq CollectedInfo) arr
, ToJSON a
, Backend b
)
=> ( SourceName
, TableName b
, (RelDef a -> Either QErr (RelInfo b, [SchemaDependency]))
, RelDef a -> Either QErr (RelInfo b, [SchemaDependency])
, RelType
, RelDef a
) `arr` Maybe (RelInfo b)
buildRelationship = proc (source, table, buildRelInfo, relType, relDef) -> do
let relName = _rdName relDef
metadataObject = mkRelationshipMetadataObject relType (source, table, relDef)
schemaObject = SOSourceObj source $ SOITableObj table $ TORel relName
schemaObject = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj table
$ TORel relName
addRelationshipContext e = "in relationship " <> relName <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
@ -189,9 +197,15 @@ buildRelationship = proc (source, table, buildRelInfo, relType, relDef) -> do
|) metadataObject
mkComputedFieldMetadataObject
:: (Backend b) => (SourceName, TableName b, ComputedFieldMetadata b) -> MetadataObject
:: forall b
. (Backend b)
=> (SourceName, TableName b, ComputedFieldMetadata b)
-> MetadataObject
mkComputedFieldMetadataObject (source, table, ComputedFieldMetadata{..}) =
let objectId = MOSourceObjId source $ SMOTableObj table $ MTOComputedField _cfmName
let objectId = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTOComputedField _cfmName
definition = AddComputedField source table _cfmName _cfmDefinition _cfmComment
in MetadataObject objectId (toJSON definition)
@ -213,17 +227,22 @@ buildComputedField = proc (trackedTableNames, (source, pgFunctions, table, cf@Co
|) (mkComputedFieldMetadataObject (source, table, cf))
mkRemoteRelationshipMetadataObject
:: (Backend b)
=> (SourceName, TableName b, RemoteRelationshipMetadata) -> MetadataObject
:: forall b
. Backend b
=> (SourceName, TableName b, RemoteRelationshipMetadata)
-> MetadataObject
mkRemoteRelationshipMetadataObject (source, table, RemoteRelationshipMetadata{..}) =
let objectId = MOSourceObjId source $
SMOTableObj table $ MTORemoteRelationship _rrmName
let objectId = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTORemoteRelationship _rrmName
RemoteRelationshipDef{..} = _rrmDefinition
in MetadataObject objectId $ toJSON $
RemoteRelationship _rrmName source table _rrdHasuraFields _rrdRemoteSchema _rrdRemoteField
buildRemoteRelationship
:: ( ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr
:: forall b arr m
. ( ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr
, ArrowKleisli m arr, MonadError QErr m, BackendMetadata b)
=> ( ([ColumnInfo b], RemoteSchemaMap)
, (SourceName, TableName b, RemoteRelationshipMetadata)
@ -232,8 +251,10 @@ buildRemoteRelationship = proc ( (pgColumns, remoteSchemaMap)
, (source, table, rrm@RemoteRelationshipMetadata{..})
) -> do
let metadataObject = mkRemoteRelationshipMetadataObject (source, table, rrm)
schemaObj = SOSourceObj source $
SOITableObj table $ TORemoteRel _rrmName
schemaObj = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj table
$ TORemoteRel _rrmName
addRemoteRelationshipContext e = "in remote relationship" <> _rrmName <<> ": " <> e
RemoteRelationshipDef{..} = _rrmDefinition
remoteRelationship = RemoteRelationship _rrmName source table _rrdHasuraFields

View File

@ -22,6 +22,7 @@ import Data.Aeson
import Data.Text.Extended
import qualified Hasura.Incremental as Inc
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.RQL.DDL.Permission
import Hasura.RQL.DDL.Permission.Internal
@ -201,8 +202,10 @@ mkPermissionMetadataObject
=> SourceName -> TableName b -> PermDef a -> MetadataObject
mkPermissionMetadataObject source table permDef =
let permType = permAccToType (permAccessor :: PermAccessor b (PermInfo b a))
objectId = MOSourceObjId source $
SMOTableObj table $ MTOPerm (_pdRole permDef) permType
objectId = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj table
$ MTOPerm (_pdRole permDef) permType
definition = toJSON $ WithTable source table permDef
in MetadataObject objectId definition
@ -221,8 +224,10 @@ withPermission f = proc (e, ((source, table, permission), s)) -> do
let metadataObject = mkPermissionMetadataObject source table permission
permType = permAccToType (permAccessor :: PermAccessor bknd (PermInfo bknd c))
roleName = _pdRole permission
schemaObject = SOSourceObj source $
SOITableObj table $ TOPerm roleName permType
schemaObject = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj table
$ TOPerm roleName permType
addPermContext err = "in permission for role " <> roleName <<> ": " <> err
(| withRecordInconsistency (
(| withRecordDependencies (

View File

@ -1,7 +1,13 @@
module Hasura.RQL.DDL.Schema.Common where
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Prelude
import qualified Data.HashMap.Strict as HM
import qualified Database.PG.Query as Q
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.EventTrigger
import Hasura.RQL.DDL.Permission
@ -10,8 +16,6 @@ import Hasura.RQL.DDL.RemoteRelationship
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.Types
import qualified Data.HashMap.Strict as HM
import qualified Database.PG.Query as Q
purgeDependentObject
:: forall b m
@ -28,7 +32,9 @@ purgeDependentObject source sourceObjId = case sourceObjId of
_ -> id
SOIFunction qf -> pure $ dropFunctionInMetadata source qf
_ ->
throw500 $ "unexpected dependent object: " <> reportSchemaObj (SOSourceObj source sourceObjId)
throw500
$ "unexpected dependent object: "
<> reportSchemaObj (SOSourceObj source $ AB.mkAnyBackend sourceObjId)
-- | Fetch Postgres metadata of all user tables
fetchTableMetadata :: (MonadTx m) => m (DBTablesMetadata 'Postgres)

View File

@ -29,7 +29,8 @@ import qualified Data.List.NonEmpty as NE
import Data.Aeson.TH
import Data.List.Extended (duplicates)
import Data.Typeable (cast)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
import Hasura.RQL.DDL.Schema.Common
@ -179,16 +180,28 @@ getTableChangeDeps source tn tableDiff = do
sc <- askSchemaCache
-- for all the dropped columns
droppedColDeps <- fmap concat $ forM droppedCols $ \droppedCol -> do
let objId = SOSourceObj source $ SOITableObj tn $ TOCol droppedCol
let objId = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tn
$ TOCol droppedCol
return $ getDependentObjs sc objId
-- for all dropped constraints
droppedConsDeps <- fmap concat $ forM droppedFKeyConstraints $ \droppedCons -> do
let objId = SOSourceObj source $ SOITableObj tn $ TOForeignKey droppedCons
let objId = SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj tn
$ TOForeignKey droppedCons
return $ getDependentObjs sc objId
return $ droppedConsDeps <> droppedColDeps <> droppedComputedFieldDeps
where
TableDiff _ droppedCols _ _ droppedFKeyConstraints computedFieldDiff _ _ = tableDiff
droppedComputedFieldDeps = map (SOSourceObj source . SOITableObj tn . TOComputedField) $ _cfdDropped computedFieldDiff
droppedComputedFieldDeps =
map
(SOSourceObj source
. AB.mkAnyBackend
. SOITableObj tn
. TOComputedField)
$ _cfdDropped computedFieldDiff
data SchemaDiff (b :: BackendType)
= SchemaDiff
@ -211,7 +224,10 @@ getSchemaChangeDeps
getSchemaChangeDeps source schemaDiff = do
-- Get schema cache
sc <- askSchemaCache
let tableIds = map (SOSourceObj source . SOITable) droppedTables
let tableIds =
map
(SOSourceObj source . AB.mkAnyBackend . SOITable)
droppedTables
-- Get the dependent of the dropped tables
let tableDropDeps = concatMap (getDependentObjs sc) tableIds
tableModDeps <- concat <$> traverse (uncurry (getTableChangeDeps source)) alteredTables
@ -220,10 +236,11 @@ getSchemaChangeDeps source schemaDiff = do
where
SchemaDiff droppedTables alteredTables = schemaDiff
isDirectDep (SOSourceObj s (SOITableObj tn _)) =
case cast tn of
Nothing -> False
Just pgTable -> s == source && pgTable `HS.member` HS.fromList droppedTables
isDirectDep (SOSourceObj s exists) =
case AB.unpackAnyBackend exists of
Just (SOITableObj pgTable _) ->
s == source && pgTable `HS.member` HS.fromList droppedTables
_ -> False
isDirectDep _ = False
data FunctionDiff

View File

@ -12,14 +12,15 @@ import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.Aeson
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.EncJSON
import Hasura.RQL.Types
import Hasura.Session
newtype TrackFunction b
= TrackFunction
{ tfName :: (FunctionName b)}
{ tfName :: FunctionName b }
deriving instance (Backend b) => Show (TrackFunction b)
deriving instance (Backend b) => Eq (TrackFunction b)
deriving instance (Backend b) => FromJSON (TrackFunction b)
@ -41,10 +42,12 @@ trackFunctionP1 sourceName qf = do
throw400 NotSupported $ "table with name " <> qf <<> " already exists"
trackFunctionP2
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
:: forall b m
. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
=> SourceName -> FunctionName b -> FunctionConfig -> m EncJSON
trackFunctionP2 sourceName qf config = do
buildSchemaCacheFor (MOSourceObjId sourceName $ SMOFunction qf)
buildSchemaCacheFor
(MOSourceObjId sourceName $ AB.mkAnyBackend $ SMOFunction qf)
$ MetadataModifier
$ metaSources.ix sourceName.toSourceMetadata.smFunctions
%~ OMap.insert qf (FunctionMetadata qf config mempty)
@ -175,14 +178,18 @@ runCreateFunctionPermission (CreateFunctionPermission functionName source role)
<> role <<> " already exists for function " <> functionName <<> " in source: " <>> source
functionTableInfo <-
unsafeTableInfo @b source (_fiReturnType functionInfo) sourceCache
`onNothing` throw400 NotExists ("function's return table " <> (_fiReturnType functionInfo) <<> " not found in the cache")
`onNothing` throw400 NotExists ("function's return table " <> _fiReturnType functionInfo <<> " not found in the cache")
unless (role `Map.member` _tiRolePermInfoMap functionTableInfo) $
throw400 NotSupported $
"function permission can only be added when the function's return table "
<> _fiReturnType functionInfo <<> " has select permission configured for role: " <>> role
buildSchemaCacheFor (MOSourceObjId source $ SMOFunctionPermission functionName role)
buildSchemaCacheFor
(MOSourceObjId source
$ AB.mkAnyBackend (SMOFunctionPermission functionName role))
$ MetadataModifier
$ metaSources.ix source.toSourceMetadata.smFunctions.ix functionName.fmPermissions
$ metaSources.ix
source.toSourceMetadata.smFunctions.ix
functionName.fmPermissions
%~ (:) (FunctionPermissionMetadata role)
pure successMsg
@ -195,7 +202,8 @@ dropFunctionPermissionInMetadata source function role = MetadataModifier $
type DropFunctionPermission = CreateFunctionPermission
runDropFunctionPermission
:: ( CacheRWM m
:: forall m b
. ( CacheRWM m
, MonadError QErr m
, MetadataM m
, BackendMetadata b
@ -208,6 +216,9 @@ runDropFunctionPermission (CreateFunctionPermission functionName source role) =
throw400 NotExists $
"permission of role "
<> role <<> " does not exist for function " <> functionName <<> " in source: " <>> source
buildSchemaCacheFor (MOSourceObjId source $ SMOFunctionPermission functionName role)
buildSchemaCacheFor
(MOSourceObjId source
$ AB.mkAnyBackend
$ SMOFunctionPermission functionName role)
$ dropFunctionPermissionInMetadata source functionName role
pure successMsg

View File

@ -8,17 +8,7 @@ module Hasura.RQL.DDL.Schema.Rename
)
where
import Control.Lens.Combinators
import Control.Lens.Operators
-- import Hasura.Backends.Postgres.SQL.Types
import Hasura.Prelude
import Hasura.RQL.DDL.Permission
import Hasura.RQL.Types
import Hasura.Session
import Data.Aeson
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Data.HashMap.Strict as M
import qualified Data.HashMap.Strict.InsOrd as OMap
@ -26,6 +16,18 @@ import qualified Data.HashSet as Set
import qualified Data.List.NonEmpty as NE
import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens.Combinators
import Control.Lens.Operators
import Data.Aeson
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.RQL.DDL.Permission
import Hasura.RQL.Types
import Hasura.Session
data RenameItem (b :: BackendType) a
= RenameItem
{ _riTable :: !(TableName b)
@ -51,7 +53,8 @@ otherDeps errMsg d =
<> reportSchemaObj d <> "; " <> errMsg
renameTableInMetadata
:: ( MonadError QErr m
:: forall b m
. ( MonadError QErr m
, CacheRM m
, MonadWriter MetadataModifier m
, BackendMetadata b
@ -59,18 +62,23 @@ renameTableInMetadata
=> SourceName -> TableName b -> TableName b -> m ()
renameTableInMetadata source newQT oldQT = do
sc <- askSchemaCache
let allDeps = getDependentObjs sc $ SOSourceObj source $ SOITable oldQT
let allDeps = getDependentObjs sc
$ SOSourceObj source
$ AB.mkAnyBackend
$ SOITable oldQT
-- update all dependant schema objects
forM_ allDeps $ \case
(SOSourceObj _ (SOITableObj refQT (TORel rn))) ->
onJust (cast refQT) \tn -> updateRelDefs source tn rn (oldQT, newQT)
(SOSourceObj _ (SOITableObj refQT (TOPerm rn pt))) ->
onJust (cast refQT) \tn -> updatePermFlds source tn rn pt $ RTable (oldQT, newQT)
-- A trigger's definition is not dependent on the table directly
(SOSourceObj _ (SOITableObj _ (TOTrigger _))) -> pure ()
-- A remote relationship's definition is not dependent on the table directly
(SOSourceObj _ (SOITableObj _ (TORemoteRel _))) -> pure ()
sobj@(SOSourceObj _ exists) -> case AB.unpackAnyBackend exists of
Just (SOITableObj refQT (TORel rn)) ->
updateRelDefs source refQT rn (oldQT, newQT)
Just (SOITableObj refQT (TOPerm rn pt)) ->
updatePermFlds source refQT rn pt $ RTable (oldQT, newQT)
-- A trigger's definition is not dependent on the table directly
Just (SOITableObj _ (TOTrigger _)) -> pure ()
-- A remote relationship's definition is not dependent on the table directly
Just (SOITableObj _ (TORemoteRel _)) -> pure ()
_ -> otherDeps errMsg sobj
d -> otherDeps errMsg d
-- Update table name in metadata
@ -93,20 +101,28 @@ renameColumnInMetadata oCol nCol source qt fieldInfo = do
-- Check if any relation exists with new column name
assertFldNotExists
-- Fetch dependent objects
let depObjs = getDependentObjs sc $ SOSourceObj source $
SOITableObj qt $ TOCol oCol
let depObjs = getDependentObjs sc
$ SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TOCol oCol
renameFld = RFCol $ RenameItem qt oCol nCol
-- Update dependent objects
forM_ depObjs $ \case
(SOSourceObj _ (SOITableObj refQT (TOPerm role pt))) ->
onJust (cast refQT) \tn -> updatePermFlds source tn role pt $ RField renameFld
(SOSourceObj _ (SOITableObj refQT (TORel rn))) ->
onJust (cast refQT) \tn -> updateColInRel source tn rn $ RenameItem qt oCol nCol
(SOSourceObj _ (SOITableObj refQT (TOTrigger triggerName))) ->
onJust (cast refQT) \tn -> tell $ MetadataModifier $
tableMetadataSetter source tn.tmEventTriggers.ix triggerName %~ (updateColumnInEventTrigger tn oCol nCol qt)
(SOSourceObj _ (SOITableObj _ (TORemoteRel remoteRelName))) ->
updateColInRemoteRelationship source remoteRelName $ RenameItem qt oCol nCol
sobj@(SOSourceObj _ exists) -> case AB.unpackAnyBackend exists of
Just (SOITableObj refQT (TOPerm role pt)) ->
updatePermFlds source refQT role pt $ RField renameFld
Just (SOITableObj refQT (TORel rn)) ->
updateColInRel source refQT rn $ RenameItem qt oCol nCol
Just (SOITableObj refQT (TOTrigger triggerName)) ->
tell
$ MetadataModifier
$ tableMetadataSetter source refQT.tmEventTriggers.ix triggerName
%~ updateColumnInEventTrigger refQT oCol nCol qt
Just (SOITableObj _ (TORemoteRel remoteRelName)) ->
updateColInRemoteRelationship source remoteRelName
$ RenameItem qt oCol nCol
_ -> otherDeps errMsg sobj
d -> otherDeps errMsg d
-- Update custom column names
possiblyUpdateCustomColumnNames source qt oCol nCol
@ -121,7 +137,8 @@ renameColumnInMetadata oCol nCol source qt fieldInfo = do
_ -> pure ()
renameRelationshipInMetadata
:: ( MonadError QErr m
:: forall b m
. ( MonadError QErr m
, CacheRM m
, MonadWriter MetadataModifier m
, BackendMetadata b
@ -129,13 +146,18 @@ renameRelationshipInMetadata
=> SourceName -> TableName b -> RelName -> RelType -> RelName -> m ()
renameRelationshipInMetadata source qt oldRN relType newRN = do
sc <- askSchemaCache
let depObjs = getDependentObjs sc $ SOSourceObj source $
SOITableObj qt $ TORel oldRN
let depObjs = getDependentObjs sc
$ SOSourceObj source
$ AB.mkAnyBackend
$ SOITableObj qt
$ TORel oldRN
renameFld = RFRel $ RenameItem qt oldRN newRN
forM_ depObjs $ \case
(SOSourceObj _ (SOITableObj refQT (TOPerm role pt))) ->
onJust (cast refQT) \tn -> updatePermFlds source tn role pt $ RField renameFld
sobj@(SOSourceObj _ exists) -> case AB.unpackAnyBackend exists of
Just (SOITableObj refQT (TOPerm role pt)) ->
updatePermFlds source refQT role pt $ RField renameFld
_ -> otherDeps errMsg sobj
d -> otherDeps errMsg d
tell $ MetadataModifier $ tableMetadataSetter source qt %~ case relType of
ObjRel -> tmObjectRelationships %~ rewriteRelationships

View File

@ -1,22 +1,25 @@
module Hasura.RQL.DDL.Schema.Source where
import Control.Lens (at, (^.))
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Text.Extended
import Data.Typeable (cast)
import Hasura.Backends.Postgres.Connection
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Schema.Common
import Hasura.RQL.Types
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as HM
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Database.PG.Query as Q
import Control.Lens (at, (^.))
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Text.Extended
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.Connection
import Hasura.EncJSON
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Schema.Common
import Hasura.RQL.Types
mkPgSourceResolver :: Q.PGLogger -> SourceResolver
mkPgSourceResolver pgLogger _ config = runExceptT do
env <- lift Env.getEnvironment
@ -53,7 +56,8 @@ runDropSource (DropSource name cascade) = do
let sources = scSources sc
case HM.lookup name sources of
Just backendSourceInfo ->
dropSource' sc backendSourceInfo
AB.dispatchAnyBackend @BackendMetadata backendSourceInfo $ dropSource sc
Nothing -> do
metadata <- getMetadata
void $ onNothing (metadata ^. metaSources . at name) $
@ -67,19 +71,16 @@ runDropSource (DropSource name cascade) = do
buildSchemaCacheFor (MOSource name) dropSourceMetadataModifier
pure successMsg
where
dropSource' :: SchemaCache -> BackendSourceInfo -> m ()
dropSource' sc (BackendSourceInfo (sourceInfo :: SourceInfo b)) =
case backendTag @b of
PostgresTag -> dropSource sc (sourceInfo :: SourceInfo 'Postgres)
MSSQLTag -> dropSource sc (sourceInfo :: SourceInfo 'MSSQL)
dropSource :: forall b. (BackendMetadata b) => SchemaCache -> SourceInfo b -> m ()
dropSource sc sourceInfo = do
let sourceConfig = _siConfiguration sourceInfo
let indirectDeps = mapMaybe getIndirectDep $
getDependentObjs sc (SOSource name)
when (not cascade && indirectDeps /= []) $ reportDepsExt (map (SOSourceObj name) indirectDeps) []
when (not cascade && indirectDeps /= [])
$ reportDepsExt
(map (SOSourceObj name . AB.mkAnyBackend) indirectDeps)
[]
metadataModifier <- execWriterT $ do
mapM_ (purgeDependentObject name >=> tell) indirectDeps
@ -90,7 +91,11 @@ runDropSource (DropSource name cascade) = do
where
getIndirectDep :: SchemaObjId -> Maybe (SourceObjId b)
getIndirectDep = \case
SOSourceObj s o -> if s == name then Nothing else cast o -- consider only *this* backend specific dependencies
SOSourceObj s o ->
if s == name
then Nothing
-- consider only *this* backend specific dependencies
else AB.unpackAnyBackend o
_ -> Nothing
dropSourceMetadataModifier = MetadataModifier $ metaSources %~ OMap.delete name

View File

@ -39,9 +39,9 @@ import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson
import Data.Aeson.TH
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Hasura.Incremental as Inc
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Types (QualifiedTable)
import Hasura.EncJSON
@ -178,7 +178,8 @@ checkConflictingNode sc tnGQL = do
_ -> pure ()
trackExistingTableOrViewP2
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
:: forall m b
. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
=> SourceName -> TableName b -> Bool -> TableConfig b -> m EncJSON
trackExistingTableOrViewP2 source tableName isEnum config = do
sc <- askSchemaCache
@ -192,7 +193,10 @@ trackExistingTableOrViewP2 source tableName isEnum config = do
-}
checkConflictingNode sc $ snakeCaseTableName tableName
let metadata = mkTableMeta tableName isEnum config
buildSchemaCacheFor (MOSourceObjId source $ SMOTable tableName)
buildSchemaCacheFor
(MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTable tableName)
$ MetadataModifier
$ metaSources.ix source.toSourceMetadata.smTables %~ OMap.insert tableName metadata
pure successMsg
@ -226,7 +230,8 @@ runTrackTableV2Q (TrackTableV2 (TrackTable source qt isEnum) config) = do
runSetExistingTableIsEnumQ :: (MonadError QErr m, CacheRWM m, MetadataM m) => SetTableIsEnum -> m EncJSON
runSetExistingTableIsEnumQ (SetTableIsEnum source tableName isEnum) = do
void $ askTabInfo source tableName -- assert that table is tracked
buildSchemaCacheFor (MOSourceObjId source $ SMOTable tableName)
buildSchemaCacheFor
(MOSourceObjId source $ AB.mkAnyBackend $ SMOTable tableName)
$ MetadataModifier
$ tableMetadataSetter source tableName.tmIsEnum .~ isEnum
return successMsg
@ -268,7 +273,8 @@ runSetTableCustomFieldsQV2
runSetTableCustomFieldsQV2 (SetTableCustomFields source tableName rootFields columnNames) = do
void $ askTabInfo source tableName -- assert that table is tracked
let tableConfig = TableConfig rootFields columnNames Nothing
buildSchemaCacheFor (MOSourceObjId source $ SMOTable tableName)
buildSchemaCacheFor
(MOSourceObjId source $ AB.mkAnyBackend $ SMOTable tableName)
$ MetadataModifier
$ tableMetadataSetter source tableName.tmConfiguration .~ tableConfig
return successMsg
@ -277,7 +283,8 @@ runSetTableCustomization
:: (QErrM m, CacheRWM m, MetadataM m) => SetTableCustomization -> m EncJSON
runSetTableCustomization (SetTableCustomization source table config) = do
void $ askTabInfo source table
buildSchemaCacheFor (MOSourceObjId source $ SMOTable table)
buildSchemaCacheFor
(MOSourceObjId source $ AB.mkAnyBackend $ SMOTable table)
$ MetadataModifier
$ tableMetadataSetter source table.tmConfiguration .~ config
return successMsg
@ -299,10 +306,16 @@ unTrackExistingTableOrViewP2 (UntrackTable source qtn cascade) = withNewInconsis
sc <- askSchemaCache
-- Get relational, query template and function dependants
let allDeps = getDependentObjs sc (SOSourceObj source $ SOITable qtn)
let allDeps =
getDependentObjs
sc
(SOSourceObj source $ AB.mkAnyBackend $ SOITable qtn)
indirectDeps = mapMaybe getIndirectDep allDeps
-- Report bach with an error if cascade is not set
when (indirectDeps /= [] && not cascade) $ reportDepsExt (map (SOSourceObj source) indirectDeps) []
when (indirectDeps /= [] && not cascade)
$ reportDepsExt
(map (SOSourceObj source . AB.mkAnyBackend) indirectDeps)
[]
-- Purge all the dependents from state
metadataModifier <- execWriterT do
mapM_ (purgeDependentObject source >=> tell) indirectDeps
@ -313,11 +326,11 @@ unTrackExistingTableOrViewP2 (UntrackTable source qtn cascade) = withNewInconsis
where
getIndirectDep :: SchemaObjId -> Maybe (SourceObjId b)
getIndirectDep = \case
SOSourceObj s srcObjId ->
SOSourceObj s exists ->
-- If the dependency is to any other source, it automatically is an
-- indirect dependency, hence the cast is safe here. However, we don't
-- have these cross source dependencies yet
cast srcObjId >>= \case
AB.unpackAnyBackend exists >>= \case
v@(SOITableObj dtn _) ->
if not (s == source && qtn == dtn) then Just v else Nothing
v -> Just v
@ -373,7 +386,12 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
withTable :: ErrorA QErr arr (e, s) a -> arr (e, ((SourceName, TableName b), s)) (Maybe a)
withTable f = withRecordInconsistency f <<<
second (first $ arr \(source, name) -> MetadataObject (MOSourceObjId source $ SMOTable name) (toJSON name))
second (first $ arr \(source, name) ->
MetadataObject
(MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTable name)
(toJSON name))
noDuplicateTables = proc tables -> case tables of
table :| [] -> returnA -< table

View File

@ -9,7 +9,7 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Data.Aeson
import Data.Kind (Type)
import Data.Text.Extended
import Data.Typeable
import Data.Typeable (Typeable)
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.Headers ()
@ -63,7 +63,6 @@ class
, Data (TableName b)
, Data (ScalarType b)
, Data (SQLExpression b)
, Typeable (SourceConfig b)
, Typeable b
, ToSQL (SQLExpression b)
, FromJSON (BasicOrderType b)
@ -128,7 +127,7 @@ class
type XDistinct b :: Type
-- functions on types
backendTag :: BackendTag b
backendTag :: BackendTag b
functionArgScalarType :: FunctionArgType b -> ScalarType b
isComparableType :: ScalarType b -> Bool
isNumType :: ScalarType b -> Bool

View File

@ -35,7 +35,6 @@ module Hasura.RQL.Types.CustomTypes
import Control.Lens.TH (makeLenses)
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Data.Aeson as J
import qualified Data.Aeson.TH as J
@ -249,7 +248,7 @@ instance Backend b => Monoid (ScalarSet b) where
instance Eq AnnotatedScalarType where
(ASTCustom std1) == (ASTCustom std2) = std1 == std2
(ASTReusedScalar g1 st1) == (ASTReusedScalar g2 st2) = g1 == g2 && Just st1 == cast st2
(ASTReusedScalar g1 st1) == (ASTReusedScalar g2 st2) = g1 == g2 && st1 == st2
_ == _ = False
instance J.ToJSON AnnotatedScalarType where

View File

@ -9,7 +9,6 @@ import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd.Extended as OM
import qualified Data.HashSet as HS
import qualified Data.HashSet.InsOrd as HSIns
import Data.Int (Int64)
import qualified Data.List.Extended as L
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
@ -18,7 +17,9 @@ import Control.Lens hiding (set, (.=))
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Aeson.Types
import Data.Typeable (cast)
import Data.Int (Int64)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Incremental (Cacheable)
import Hasura.RQL.Types.Action
@ -255,38 +256,21 @@ instance (Backend b) => FromJSON (SourceMetadata b) where
pure SourceMetadata{..}
mkSourceMetadata
:: BackendMetadata b
=> SourceName -> SourceConnConfiguration b -> BackendSourceMetadata
:: forall (b :: BackendType)
. BackendMetadata b
=> SourceName
-> SourceConnConfiguration b
-> BackendSourceMetadata
mkSourceMetadata name config =
BackendSourceMetadata $ SourceMetadata name mempty mempty config
AB.mkAnyBackend $ SourceMetadata name mempty mempty config
-- See Note [Existentially Quantified Types]
data BackendSourceMetadata =
forall b. (BackendMetadata b) => BackendSourceMetadata (SourceMetadata b)
type BackendSourceMetadata = AB.AnyBackend SourceMetadata
instance Show BackendSourceMetadata where
show (BackendSourceMetadata sm) = show sm
instance Eq BackendSourceMetadata where
BackendSourceMetadata sm1 == BackendSourceMetadata sm2 =
(Just sm1) == (cast sm2)
instance FromJSON BackendSourceMetadata where
parseJSON = withObject "Object" $ \o -> do
backendKind :: Text <- fromMaybe "postgres" <$> o .:? "kind"
-- TODO: Make backendKind a concrete type or re-use `BackendType`
case backendKind of
"postgres" -> BackendSourceMetadata @'Postgres <$> parseJSON (Object o)
"mssql" -> BackendSourceMetadata @'MSSQL <$> parseJSON (Object o)
_ -> fail "expected postgres or mssql"
toSourceMetadata :: (BackendMetadata b) => Prism' BackendSourceMetadata (SourceMetadata b)
toSourceMetadata = prism' BackendSourceMetadata getSourceMetadata
where
getSourceMetadata (BackendSourceMetadata sm) = cast sm
toSourceMetadata :: forall b. (BackendMetadata b) => Prism' BackendSourceMetadata (SourceMetadata b)
toSourceMetadata = prism' AB.mkAnyBackend AB.unpackAnyBackend
getSourceName :: BackendSourceMetadata -> SourceName
getSourceName (BackendSourceMetadata sm) = _smName sm
getSourceName e = AB.dispatchAnyBackend @BackendMetadata e _smName
type Sources = InsOrdHashMap SourceName BackendSourceMetadata
@ -478,18 +462,17 @@ metadataToOrdJSON ( Metadata
else Just ("metrics_config", AO.toOrdered metricsConfig)
sourceMetaToOrdJSON :: BackendSourceMetadata -> AO.Value
sourceMetaToOrdJSON (BackendSourceMetadata (SourceMetadata{..} :: SourceMetadata b)) =
let sourceNamePair = ("name", AO.toOrdered _smName)
sourceKind = case backendTag @b of
PostgresTag -> "postgres"
MSSQLTag -> "mssql"
sourceKindPair = ("kind", AO.String sourceKind)
tablesPair = ("tables", AO.array $ map tableMetaToOrdJSON $ sortOn _tmTable $ OM.elems _smTables)
functionsPair = listToMaybeOrdPairSort "functions" functionMetadataToOrdJSON _fmFunction _smFunctions
sourceMetaToOrdJSON exists =
AB.dispatchAnyBackend @BackendMetadata exists $ \(SourceMetadata {..} :: SourceMetadata b) ->
let sourceNamePair = ("name", AO.toOrdered _smName)
sourceKind = backendName $ backendTag @b
sourceKindPair = ("kind", AO.String sourceKind)
tablesPair = ("tables", AO.array $ map tableMetaToOrdJSON $ sortOn _tmTable $ OM.elems _smTables)
functionsPair = listToMaybeOrdPairSort "functions" functionMetadataToOrdJSON _fmFunction _smFunctions
configurationPair = [("configuration", AO.toOrdered _smConfiguration)]
configurationPair = [("configuration", AO.toOrdered _smConfiguration)]
in AO.object $ [sourceNamePair, sourceKindPair, tablesPair] <> maybeToList functionsPair <> configurationPair
in AO.object $ [sourceNamePair, sourceKindPair, tablesPair] <> maybeToList functionsPair <> configurationPair
tableMetaToOrdJSON :: (Backend b) => TableMetadata b -> AO.Value
tableMetaToOrdJSON ( TableMetadata
@ -615,24 +598,27 @@ metadataToOrdJSON ( Metadata
if _fmConfiguration == emptyFunctionConfig then []
else pure ("configuration", AO.toOrdered _fmConfiguration)
permissionsKeyPair =
if (null _fmPermissions) then []
if null _fmPermissions then []
else pure ("permissions", AO.toOrdered _fmPermissions)
in AO.object $ [("function", AO.toOrdered _fmFunction)] <> confKeyPair <> permissionsKeyPair
inheritedRolesQToOrdJSON :: AddInheritedRole -> AO.Value
inheritedRolesQToOrdJSON AddInheritedRole{..} =
AO.object $ [ ("role_name", AO.toOrdered _adrRoleName)
, ("role_set", AO.toOrdered _adrRoleSet)
]
AO.object [ ("role_name", AO.toOrdered _adrRoleName)
, ("role_set", AO.toOrdered _adrRoleSet)
]
remoteSchemaQToOrdJSON :: RemoteSchemaMetadata -> AO.Value
remoteSchemaQToOrdJSON (RemoteSchemaMetadata name definition comment permissions) =
AO.object $ [ ("name", AO.toOrdered name)
, ("definition", remoteSchemaDefToOrdJSON definition)
]
<> (catMaybes [ maybeCommentToMaybeOrdPair comment
, listToMaybeOrdPair "permissions" permsToMaybeOrdJSON permissions
])
<> catMaybes [ maybeCommentToMaybeOrdPair comment
, listToMaybeOrdPair
"permissions"
permsToMaybeOrdJSON
permissions
]
where
permsToMaybeOrdJSON :: RemoteSchemaPermissionMetadata -> AO.Value
permsToMaybeOrdJSON (RemoteSchemaPermissionMetadata role defn permComment) =

View File

@ -2,14 +2,17 @@ module Hasura.RQL.Types.Metadata.Object where
import Hasura.Prelude
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.Extended as M
import Control.Lens hiding (set, (.=))
import Control.Lens hiding (set, (.=))
import Data.Aeson.Types
import Data.Hashable
import Data.Text.Extended
import Data.Typeable (cast)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.MSSQL.Instances.Types ()
import Hasura.Backends.Postgres.Instances.Types ()
import Hasura.RQL.Types.Action
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Common
@ -44,7 +47,7 @@ instance (Backend b) => Hashable (SourceMetadataObjId b)
-- See Note [Existentially Quantified Types]
data MetadataObjId
= MOSource !SourceName
| forall b. (Backend b) => MOSourceObjId !SourceName !(SourceMetadataObjId b)
| MOSourceObjId !SourceName !(AB.AnyBackend SourceMetadataObjId)
| MORemoteSchema !RemoteSchemaName
-- ^ Originates from user-defined '_arsqName'
| MORemoteSchemaPermissions !RemoteSchemaName !RoleName
@ -70,55 +73,45 @@ instance Hashable MetadataObjId where
MOEndpoint endpoint -> hashWithSalt salt endpoint
instance Eq MetadataObjId where
(MOSource s1) == (MOSource s2) = s1 == s2
(MOSourceObjId s1 id1) == (MOSourceObjId s2 id2) = s1 == s2 && Just id1 == cast id2
(MORemoteSchema n1) == (MORemoteSchema n2) = n1 == n2
(MOSource s1) == (MOSource s2) = s1 == s2
(MOSourceObjId s1 id1) == (MOSourceObjId s2 id2) = s1 == s2 && id1 == id2
(MORemoteSchema n1) == (MORemoteSchema n2) = n1 == n2
(MORemoteSchemaPermissions n1 r1) == (MORemoteSchemaPermissions n2 r2) = n1 == n2 && r1 == r2
MOCustomTypes == MOCustomTypes = True
(MOActionPermission an1 r1) == (MOActionPermission an2 r2) = an1 == an2 && r1 == r2
(MOCronTrigger trn1) == (MOCronTrigger trn2) = trn1 == trn2
(MOInheritedRole rn1) == (MOInheritedRole rn2) = rn1 == rn2
_ == _ = False
MOCustomTypes == MOCustomTypes = True
(MOActionPermission an1 r1) == (MOActionPermission an2 r2) = an1 == an2 && r1 == r2
(MOCronTrigger trn1) == (MOCronTrigger trn2) = trn1 == trn2
(MOInheritedRole rn1) == (MOInheritedRole rn2) = rn1 == rn2
_ == _ = False
moiTypeName :: MetadataObjId -> Text
moiTypeName = \case
MOSource _ -> "source"
MOSourceObjId _ sourceObjId -> case sourceObjId of
SMOTable _ -> "table"
SMOFunction _ -> "function"
SMOFunctionPermission _ _ -> "function_permission"
SMOTableObj _ tableObjectId -> case tableObjectId of
MTORel _ relType -> relTypeToTxt relType <> "_relation"
MTOPerm _ permType -> permTypeToCode permType <> "_permission"
MTOTrigger _ -> "event_trigger"
MTOComputedField _ -> "computed_field"
MTORemoteRelationship _ -> "remote_relationship"
MORemoteSchema _ -> "remote_schema"
MOSource _ -> "source"
MOSourceObjId _ exists -> AB.dispatchAnyBackend @Backend exists handleSourceObj
MORemoteSchema _ -> "remote_schema"
MORemoteSchemaPermissions _ _ -> "remote_schema_permission"
MOCronTrigger _ -> "cron_trigger"
MOCustomTypes -> "custom_types"
MOAction _ -> "action"
MOActionPermission _ _ -> "action_permission"
MOInheritedRole _ -> "inherited_role"
MOEndpoint _ -> "endpoint"
MOCronTrigger _ -> "cron_trigger"
MOCustomTypes -> "custom_types"
MOAction _ -> "action"
MOActionPermission _ _ -> "action_permission"
MOInheritedRole _ -> "inherited_role"
MOEndpoint _ -> "endpoint"
where
handleSourceObj :: forall b. SourceMetadataObjId b -> Text
handleSourceObj = \case
SMOTable _ -> "table"
SMOFunction _ -> "function"
SMOFunctionPermission _ _ -> "function_permission"
SMOTableObj _ tableObjectId -> case tableObjectId of
MTORel _ relType -> relTypeToTxt relType <> "_relation"
MTOPerm _ permType -> permTypeToCode permType <> "_permission"
MTOTrigger _ -> "event_trigger"
MTOComputedField _ -> "computed_field"
MTORemoteRelationship _ -> "remote_relationship"
moiName :: MetadataObjId -> Text
moiName objectId = moiTypeName objectId <> " " <> case objectId of
MOSource name -> toTxt name
MOSourceObjId source sourceObjId -> case sourceObjId of
SMOTable name -> toTxt name <> " in source " <> toTxt source
SMOFunction name -> toTxt name <> " in source " <> toTxt source
SMOFunctionPermission functionName roleName ->
toTxt roleName <> " permission for function "
<> toTxt functionName <> " in source " <> toTxt source
SMOTableObj tableName tableObjectId ->
let tableObjectName = case tableObjectId of
MTORel name _ -> toTxt name
MTOComputedField name -> toTxt name
MTORemoteRelationship name -> toTxt name
MTOPerm name _ -> toTxt name
MTOTrigger name -> toTxt name
in tableObjectName <> " in " <> moiName (MOSourceObjId source $ SMOTable tableName)
MOSourceObjId source exists -> AB.dispatchAnyBackend @Backend exists (handleSourceObj source)
MORemoteSchema name -> toTxt name
MORemoteSchemaPermissions name roleName ->
toTxt roleName <> " permission in remote schema " <> toTxt name
@ -128,6 +121,31 @@ moiName objectId = moiTypeName objectId <> " " <> case objectId of
MOActionPermission name roleName -> toTxt roleName <> " permission in " <> toTxt name
MOInheritedRole inheritedRoleName -> "inherited role " <> toTxt inheritedRoleName
MOEndpoint name -> toTxt name
where
handleSourceObj
:: forall b
. Backend b
=> SourceName
-> SourceMetadataObjId b
-> Text
handleSourceObj source = \case
SMOTable name -> toTxt name <> " in source " <> toTxt source
SMOFunction name -> toTxt name <> " in source " <> toTxt source
SMOFunctionPermission functionName roleName ->
toTxt roleName <> " permission for function "
<> toTxt functionName <> " in source " <> toTxt source
SMOTableObj tableName tableObjectId ->
let tableObjectName = case tableObjectId of
MTORel name _ -> toTxt name
MTOComputedField name -> toTxt name
MTORemoteRelationship name -> toTxt name
MTOPerm name _ -> toTxt name
MTOTrigger name -> toTxt name
in tableObjectName
<> " in "
<> moiName (MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTable tableName)
data MetadataObject
= MetadataObject

View File

@ -120,24 +120,26 @@ module Hasura.RQL.Types.SchemaCache
import Hasura.Prelude
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens (makeLenses)
import Control.Lens (makeLenses)
import Data.Aeson
import Data.Aeson.TH
import Data.Text.Extended
import Data.Typeable (cast)
import System.Cron.Types
import qualified Hasura.Backends.Postgres.Connection as PG
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.Backends.Postgres.Connection as PG
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.GraphQL.Context (GQLContext, RemoteField, RoleContext)
import Hasura.Incremental (Cacheable, Dependency, MonadDepend (..),
selectKeyD)
import Hasura.Backends.MSSQL.Instances.Types ()
import Hasura.Backends.Postgres.Instances.Types ()
import Hasura.GraphQL.Context (GQLContext, RemoteField, RoleContext)
import Hasura.Incremental (Cacheable, Dependency, MonadDepend (..),
selectKeyD)
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Action
import Hasura.RQL.Types.ApiLimit
@ -158,27 +160,49 @@ import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.Table
import Hasura.Session
import Hasura.Tracing (TraceT)
import Hasura.Tracing (TraceT)
reportSchemaObjs :: [SchemaObjId] -> Text
reportSchemaObjs = commaSeparated . sort . map reportSchemaObj
mkParentDep :: forall b. (Backend b) => SourceName -> TableName b -> SchemaDependency
mkParentDep s tn = SchemaDependency (SOSourceObj @b s $ SOITable tn) DRTable
mkParentDep
:: forall b
. Backend b
=> SourceName
-> TableName b
-> SchemaDependency
mkParentDep s tn =
SchemaDependency (SOSourceObj s $ AB.mkAnyBackend (SOITable tn)) DRTable
mkColDep
:: forall b
. (Backend b)
=> DependencyReason -> SourceName -> TableName b -> Column b -> SchemaDependency
=> DependencyReason
-> SourceName
-> TableName b
-> Column b
-> SchemaDependency
mkColDep reason source tn col =
flip SchemaDependency reason . SOSourceObj @b source . SOITableObj tn $ TOCol col
flip SchemaDependency reason
. SOSourceObj source
. AB.mkAnyBackend
. SOITableObj tn
$ TOCol col
mkComputedFieldDep
:: forall b. (Backend b)
=> DependencyReason -> SourceName -> TableName b -> ComputedFieldName -> SchemaDependency
=> DependencyReason
-> SourceName
-> TableName b
-> ComputedFieldName
-> SchemaDependency
mkComputedFieldDep reason s tn computedField =
flip SchemaDependency reason . SOSourceObj @b s . SOITableObj tn $ TOComputedField computedField
flip SchemaDependency reason
. SOSourceObj s
. AB.mkAnyBackend
. SOITableObj tn
$ TOComputedField computedField
type WithDeps a = (a, [SchemaDependency])
@ -404,15 +428,17 @@ getDependentObjs = getDependentObjsWith (const True)
getDependentObjsWith
:: (DependencyReason -> Bool) -> SchemaCache -> SchemaObjId -> [SchemaObjId]
getDependentObjsWith f sc objId =
-- [ sdObjId sd | sd <- filter (f . sdReason) allDeps]
map fst $ filter (isDependency . snd) $ M.toList $ scDepMap sc
where
isDependency deps = not $ HS.null $ flip HS.filter deps $
\(SchemaDependency depId reason) -> objId `induces` depId && f reason
-- induces a b : is b dependent on a
induces (SOSource s1) (SOSource s2) = s1 == s2
induces (SOSource s1) (SOSourceObj s2 _) = s1 == s2
induces (SOSourceObj s1 (SOITable tn1)) (SOSourceObj s2 (SOITable tn2)) = s1 == s2 && Just tn1 == (cast tn2)
induces (SOSourceObj s1 (SOITable tn1)) (SOSourceObj s2 (SOITableObj tn2 _)) = s1 == s2 && Just tn1 == (cast tn2)
induces objId1 objId2 = objId1 == objId2
-- allDeps = toList $ fromMaybe HS.empty $ M.lookup objId $ scDepMap sc
induces (SOSource s1) (SOSource s2) = s1 == s2
induces (SOSource s1) (SOSourceObj s2 _) = s1 == s2
induces o1@(SOSourceObj s1 e1) o2@(SOSourceObj s2 e2) =
s1 == s2 && fromMaybe (o1 == o2) (AB.composeAnyBackend @Backend go e1 e2 Nothing)
induces o1 o2 = o1 == o2
go (SOITable tn1) (SOITable tn2) = Just $ tn1 == tn2
go (SOITable tn1) (SOITableObj tn2 _) = Just $ tn1 == tn2
go _ _ = Nothing

View File

@ -2,7 +2,7 @@ module Hasura.RQL.Types.SchemaCacheTypes where
import Hasura.Prelude
import qualified Data.Text as T
import qualified Data.Text as T
import Data.Aeson
import Data.Aeson.TH
@ -10,9 +10,11 @@ import Data.Aeson.Types
import Data.Hashable
import Data.Text.Extended
import Data.Text.NonEmpty
import Data.Typeable (cast)
-- import Hasura.Backends.Postgres.SQL.Types
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.MSSQL.Instances.Types ()
import Hasura.Backends.Postgres.Instances.Types ()
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.ComputedField
@ -43,48 +45,49 @@ data SourceObjId (b :: BackendType)
deriving (Show, Eq, Generic)
instance (Backend b) => Hashable (SourceObjId b)
-- See Note [Existentially Quantified Types]
data SchemaObjId
= SOSource !SourceName
| forall b . (Backend b) => SOSourceObj !SourceName !(SourceObjId b)
| SOSourceObj !SourceName !(AB.AnyBackend SourceObjId)
| SORemoteSchema !RemoteSchemaName
| SORemoteSchemaPermission !RemoteSchemaName !RoleName
instance Hashable SchemaObjId where
hashWithSalt salt = \case
SOSource sourceName -> hashWithSalt salt sourceName
SOSourceObj sourceName sourceObjId -> hashWithSalt salt (sourceName, sourceObjId)
SOSourceObj sourceName exists ->
AB.dispatchAnyBackend' @Hashable exists (hashWithSalt salt . (sourceName,))
SORemoteSchema remoteSchemaName -> hashWithSalt salt remoteSchemaName
SORemoteSchemaPermission remoteSchemaName roleName -> hashWithSalt salt (remoteSchemaName, roleName)
instance Eq SchemaObjId where
(SOSource s1) == (SOSource s2) = s1 == s2
(SORemoteSchema s1) == (SORemoteSchema s2) = s1 == s2
(SOSourceObj s1 id1) == (SOSourceObj s2 id2) = (s1 == s2) && Just id1 == cast id2
(SOSource s1) == (SOSource s2) = s1 == s2
(SORemoteSchema s1) == (SORemoteSchema s2) = s1 == s2
(SOSourceObj s1 id1) == (SOSourceObj s2 id2) = (s1 == s2) && id1 == id2
(SORemoteSchemaPermission s1 r1) == (SORemoteSchemaPermission s2 r2) = (s1, r1) == (s2, r2)
_ == _ = False
_ == _ = False
reportSchemaObj :: SchemaObjId -> T.Text
reportSchemaObj = \case
SOSource source -> "source " <> sourceNameToText source
SOSourceObj source sourceObjId -> inSource source $
case sourceObjId of
SOITable tn -> "table " <> toTxt tn
SOIFunction fn -> "function " <> toTxt fn
SOITableObj tn (TOCol cn) ->
"column " <> toTxt tn <> "." <> toTxt cn
SOITableObj tn (TORel cn) ->
"relationship " <> toTxt tn <> "." <> toTxt cn
SOITableObj tn (TOForeignKey cn) ->
"constraint " <> toTxt tn <> "." <> toTxt cn
SOITableObj tn (TOPerm rn pt) ->
"permission " <> toTxt tn <> "." <> roleNameToTxt rn <> "." <> permTypeToCode pt
SOITableObj tn (TOTrigger trn ) ->
"event-trigger " <> toTxt tn <> "." <> triggerNameToTxt trn
SOITableObj tn (TOComputedField ccn) ->
"computed field " <> toTxt tn <> "." <> computedFieldNameToText ccn
SOITableObj tn (TORemoteRel rn) ->
"remote relationship " <> toTxt tn <> "." <> remoteRelationshipNameToText rn
SOSourceObj source exists -> inSource source $
AB.dispatchAnyBackend @Backend exists
\case
SOITable tn -> "table " <> toTxt tn
SOIFunction fn -> "function " <> toTxt fn
SOITableObj tn (TOCol cn) ->
"column " <> toTxt tn <> "." <> toTxt cn
SOITableObj tn (TORel cn) ->
"relationship " <> toTxt tn <> "." <> toTxt cn
SOITableObj tn (TOForeignKey cn) ->
"constraint " <> toTxt tn <> "." <> toTxt cn
SOITableObj tn (TOPerm rn pt) ->
"permission " <> toTxt tn <> "." <> roleNameToTxt rn <> "." <> permTypeToCode pt
SOITableObj tn (TOTrigger trn ) ->
"event-trigger " <> toTxt tn <> "." <> triggerNameToTxt trn
SOITableObj tn (TOComputedField ccn) ->
"computed field " <> toTxt tn <> "." <> computedFieldNameToText ccn
SOITableObj tn (TORemoteRel rn) ->
"remote relationship " <> toTxt tn <> "." <> remoteRelationshipNameToText rn
SORemoteSchema remoteSchemaName ->
"remote schema " <> unNonEmptyText (unRemoteSchemaName remoteSchemaName)
SORemoteSchemaPermission remoteSchemaName roleName ->

View File

@ -4,15 +4,16 @@ module Hasura.RQL.Types.Source where
import Hasura.Prelude
import qualified Data.HashMap.Strict as M
import qualified Data.HashMap.Strict as M
import Control.Lens
import Data.Aeson
import Data.Aeson.TH
import Data.Typeable (cast)
import qualified Hasura.Tracing as Tracing
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.MSSQL.Instances.Types ()
import Hasura.Backends.Postgres.Connection
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Common
@ -22,71 +23,6 @@ import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.Session
{- Note [Existentially Quantified Types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This note contains a brief introduction to existential types, along with some
examples from this codebase on how to deal with such types.
If we consider the identity function:
id :: forall a. a -> a
Then for all /callers/ of this function, the type variable 'a' is universally
quantified: the caller can pick any type for 'a' when calling the function.
On the other hand, the /implementer/ of this function cannot pick an 'a'. From
this perspective, the type variable 'a' is existentially quantified.
Let's consider a rank-2 function:
rank2 :: forall a. (forall b. b -> String) -> a -> String
In this example, the /caller/ gets to pick 'a' since it's universally quantified,
but 'b' is existentially quantified from this perspective. We have to provide
a function that works for any 'b' the implementer may pick!
From the perspective of the /implementer/, 'a' is existentially quantified,
whereas 'b' is universally quantified: we (the implementers) get to pick
'b' (and we may call it multiple times with different types!).
One thing that we cannot do is we cannot return an existentially quantified
value. In order to do that, we need to wrap it in a constructor, e.g.:
data Exists = forall a. Exists a
Normally, type variables that appear on the right hand side of a type declaration
also appear on the left hand side. This is precisely what existential quantification
relaxes.
IMPORTANT: please keep in mind that existential types /erase/ all type information.
Similarly to implementing the 'id' function), there are few functions we can write
without more context:
idExists :: Exists -> Exists
idExists (Exists a) = Exists a
existsList :: Exists
existsList = [ Exists "hello", Exists (Just '1'), Exists (42 :: Int) ]
However, we can't do anything else: we cannot recover the original values or do
any operations. The way to deal with this problem is to pack a dictionary along
with the value. The most common example is 'Showable':
data Showable = forall a. Show a => Showable a
showShowable :: Showable -> String
showShowable (Showable a) = show a
We are able to call 'show' on 'a' because we are /packing/ the 'Show' constraint
along with the value. This is key to using existential types.
For details on how we use existentials in our code, please see note
[Recovering Existentially Quantified Type Information] -}
data SourceInfo b
= SourceInfo
{ _siName :: !SourceName
@ -98,12 +34,7 @@ $(makeLenses ''SourceInfo)
instance Backend b => ToJSON (SourceInfo b) where
toJSON = genericToJSON hasuraJSON
-- See Note [Existentially Quantified Types]
data BackendSourceInfo =
forall b. Backend b => BackendSourceInfo (SourceInfo b)
instance ToJSON BackendSourceInfo where
toJSON (BackendSourceInfo si) = toJSON si
type BackendSourceInfo = AB.AnyBackend SourceInfo
type SourceCache = HashMap SourceName BackendSourceInfo
@ -115,10 +46,12 @@ type SourceCache = HashMap SourceName BackendSourceInfo
-- uses the schema cache.
unsafeSourceInfo :: forall b. Backend b => BackendSourceInfo -> Maybe (SourceInfo b)
unsafeSourceInfo (BackendSourceInfo si) = cast si
unsafeSourceInfo = AB.unpackAnyBackend
unsafeSourceName :: BackendSourceInfo -> SourceName
unsafeSourceName (BackendSourceInfo (SourceInfo name _ _ _)) = name
unsafeSourceName bsi = AB.dispatchAnyBackend @Backend bsi go
where
go (SourceInfo name _ _ _) = name
unsafeSourceTables :: forall b. Backend b => BackendSourceInfo -> Maybe (TableCache b)
unsafeSourceTables = fmap _siTables . unsafeSourceInfo @b
@ -129,9 +62,10 @@ unsafeSourceFunctions = fmap _siFunctions . unsafeSourceInfo @b
unsafeSourceConfiguration :: forall b. Backend b => BackendSourceInfo -> Maybe (SourceConfig b)
unsafeSourceConfiguration = fmap _siConfiguration . unsafeSourceInfo @b
getTableRoles :: BackendSourceInfo -> [RoleName]
getTableRoles (BackendSourceInfo si) = M.keys . _tiRolePermInfoMap =<< M.elems (_siTables si)
getTableRoles bsi = AB.dispatchAnyBackend @Backend bsi go
where
go si = M.keys . _tiRolePermInfoMap =<< M.elems (_siTables si)
-- | Contains Postgres connection configuration and essential metadata from the

View File

@ -0,0 +1,191 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE UndecidableInstances #-}
module Hasura.SQL.AnyBackend
( AnyBackend
, mkAnyBackend
, dispatchAnyBackend
, dispatchAnyBackend'
, dispatchAnyBackendArrow
, unpackAnyBackend
, composeAnyBackend
, runBackend
) where
import Hasura.Prelude
import qualified Data.Text as T
import qualified Data.Text.Extended as T
import Control.Arrow.Extended (ArrowChoice)
import Data.Aeson (FromJSON (..), ToJSON (..), Value (..), withObject,
(.:?))
import Data.Hashable (Hashable (hashWithSalt))
import Data.Kind (Constraint, Type)
import Test.QuickCheck (oneof)
import Hasura.RQL.Types.Backend
import Hasura.SQL.Backend
-- | This type is essentially an unlabeled box for types indexed by BackendType.
-- Given some type defined as 'data T (b :: BackendType) = ...', we can define
-- 'AnyBackend T' without mentioning any 'BackendType'.
--
-- This is useful for having generic containers of potentially different types
-- of T.
data AnyBackend (i :: BackendType -> Type)
= PostgresValue (i 'Postgres)
| MSSQLValue (i 'MSSQL)
instance
( Arbitrary (i 'Postgres)
, Arbitrary (i 'MSSQL)
) => Arbitrary (AnyBackend i) where
arbitrary = oneof
[ PostgresValue <$> arbitrary
, MSSQLValue <$> arbitrary
]
mkAnyBackend
:: forall
(b :: BackendType)
(i :: BackendType -> Type)
. Backend b
=> i b
-> AnyBackend i
mkAnyBackend =
case (backendTag @b) of
PostgresTag -> PostgresValue
MSSQLTag -> MSSQLValue
runBackend
:: forall
(i :: BackendType -> Type)
(r :: Type)
. AnyBackend i
-> (forall (b :: BackendType). i b -> r)
-> r
runBackend b f =
case b of
PostgresValue pg -> f pg
MSSQLValue ms -> f ms
-- | Dispatch an existential using an universally quantified function while
-- also resolving a different constraint.
-- Use this to dispatch Backend* instances.
-- This is essentially a wrapper around 'runAnyBackend f . repackAnyBackend @c'.
dispatchAnyBackend
:: forall
(c :: BackendType -> Constraint)
(i :: BackendType -> Type)
(r :: Type)
. c 'Postgres
=> c 'MSSQL
=> AnyBackend i
-> (forall (b :: BackendType). c b => i b -> r)
-> r
dispatchAnyBackend e f =
case e of
PostgresValue pg -> f pg
MSSQLValue ms -> f ms
-- | Unlike 'dispatchAnyBackend', the expected constraint has a different kind.
-- Use for classes like 'Show', 'ToJSON', etc.
dispatchAnyBackend'
:: forall
(c :: Type -> Constraint)
(i :: BackendType -> Type)
(r :: Type)
. c (i 'Postgres)
=> c (i 'MSSQL)
=> AnyBackend i
-> (forall (b :: BackendType). c (i b) => i b -> r)
-> r
dispatchAnyBackend' e f =
case e of
PostgresValue pg -> f pg
MSSQLValue ms -> f ms
-- | Sometimes we need to run operations on two backends of the same type.
-- If the backends don't contain the same type, the given 'r' value is returned.
-- Otherwise, the function is called with the two wrapped values.
composeAnyBackend
:: forall
(c :: BackendType -> Constraint)
(i :: BackendType -> Type)
(r :: Type)
. c 'Postgres
=> c 'MSSQL
=> (forall (b :: BackendType). c b => i b -> i b -> r)
-> AnyBackend i
-> AnyBackend i
-> r
-> r
composeAnyBackend f e1 e2 owise =
case (e1, e2) of
(PostgresValue p1, PostgresValue p2) -> f p1 p2
(MSSQLValue m1, MSSQLValue m2) -> f m1 m2
_ -> owise
-- | Dispatch variant for use with arrow syntax. The universally quantified
-- dispatch function is an arrow instead.
-- TODO: How do we only "target" the 'i b' and ditch the tuple details (x y z).
dispatchAnyBackendArrow
:: forall
(c :: BackendType -> Constraint)
(i :: BackendType -> Type)
(r :: Type)
(arr :: Type -> Type -> Type)
x y z
. ArrowChoice arr
=> c 'Postgres
=> c 'MSSQL
=> (forall b. c b => arr (x, y, i b, z) r)
-> arr (x, y, AnyBackend i, z) r
dispatchAnyBackendArrow arrow = proc (x, y, exists, z) -> do
case exists of
PostgresValue pg -> arrow @'Postgres -< (x, y, pg, z)
MSSQLValue ms -> arrow @'MSSQL -< (x, y, ms, z)
-- | Try to guess the type of an existential. 'Just' means you were right.
unpackAnyBackend
:: forall
(b :: BackendType)
(i :: BackendType -> Type)
. Backend b
=> AnyBackend i
-> Maybe (i b)
unpackAnyBackend exists =
case (backendTag @b, exists) of
(PostgresTag, PostgresValue pg) -> Just pg
(MSSQLTag , MSSQLValue ms) -> Just ms
_ -> Nothing
instance (ToJSON (i 'Postgres), ToJSON (i 'MSSQL)) => ToJSON (AnyBackend i) where
toJSON e = dispatchAnyBackend' @ToJSON e toJSON
instance (Show (i 'Postgres), Show (i 'MSSQL)) => Show (AnyBackend i) where
show e = dispatchAnyBackend' @Show e show
instance (Eq (i 'Postgres), Eq (i 'MSSQL)) => Eq (AnyBackend i) where
e1 == e2 =
case (e1, e2) of
(PostgresValue p1, PostgresValue p2) -> p1 == p2
(MSSQLValue m1, MSSQLValue m2) -> m1 == m2
_ -> False
instance (FromJSON (i 'Postgres), FromJSON (i 'MSSQL)) => FromJSON (AnyBackend i) where
parseJSON = withObject "Object" $ \o -> do
backendKind :: Text <- fromMaybe "postgres" <$> o .:? "kind"
-- TODO: Make backendKind a concrete type or re-use `BackendType`
case backendKind of
"postgres" -> PostgresValue <$> parseJSON (Object o)
"mssql" -> MSSQLValue <$> parseJSON (Object o)
_ -> fail
$ "expected one of: "
<> T.unpack (T.commaSeparated (T.toLower <$> supportedBackends))
instance (Hashable (i 'Postgres), Hashable (i 'MSSQL)) => Hashable (AnyBackend i) where
hashWithSalt salt e = dispatchAnyBackend' @Hashable e (hashWithSalt salt)

View File

@ -1,10 +1,16 @@
module Hasura.SQL.Backend where
module Hasura.SQL.Backend
( BackendType(..)
, BackendTag(..)
, reify
, backendName
, supportedBackends
) where
import Hasura.Prelude
import Data.GADT.Compare
import Type.Reflection
import Unsafe.Coerce
import Data.GADT.Compare (GCompare (..), GEq (..), GOrdering (GEQ, GGT, GLT))
import Type.Reflection (type (:~:) (Refl))
import Unsafe.Coerce (unsafeCoerce)
-- | An enum that represents each backend we support.
@ -25,6 +31,13 @@ reify = \case
PostgresTag -> Postgres
MSSQLTag -> MSSQL
backendName :: BackendTag b -> Text
backendName = \case
PostgresTag -> "postgres"
MSSQLTag -> "mssql"
supportedBackends :: [Text]
supportedBackends = tshow <$> [minBound @BackendType .. maxBound]
-- We need those instances to be able to use a @BackendTag@ as a key in a
-- dependent map. Using @BackendType@ as a data kind, makes it difficult to use
@ -40,74 +53,3 @@ instance GCompare BackendTag where
LT -> GLT
GT -> GGT
{- Note [Recovering Existentially Quantified Type Information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See Note [Existentially Quantified Types] for a brief introduction to existential
quantification.
In our codebase, we have a significant amount of types indexed by 'BackendType',
and a few existential wrappers on top of them. One such example can be found in
Hasura.RQL.Types.Source: the 'SourceInfo b' type is existentially quantified as
'BackendSourceInfo'.
The idea is that we want to have a heterogenous list (or map) of 'BackendSourceInfo'
values. However, we also want to be able to recover the information about the
specific source.
One way would be to have multiple constructors on 'BackendSourceInfo':
data BackendSourceInfo where
PostgresSourceInfo :: SourceInfo 'Postgres -> BackendSourceInfo
MssqlSourceInfo :: SourceInfo 'MSSQL -> BackendSourceInfo
-- etc
However, this would mean that we need to change a significant amount of code
when we add a new backend: we need to change all the wrappers and add a new
constructor, as well as all the pattern matches on them.
Instead, we use existential quantification which makes it such that we don't
have to add constructors when adding backends. The problem then is that we
might want to make decisions depending on the backend, but this information
is erased.
data BackendSourceInfo =
forall b. Backend b => BackendSourceInfo (SourceInfo b)
In order to circumvent this problem, we use a trick similar to the one found
in the 'singletons' library.
The trick involves creating a tag type, like BackendTag, which is isomorphic
to the backend type (generally, we want as many elements in this tag type
as different types we need to differentiate from the existential).
Using this type, we can add the following method to the `Backend` class:
backendTag :: BackendTag 'b
and implement it in each backend's instance. Now we can recover the type:
f :: BackendSourceInfo -> String
f (BackendSourceInfo (so :: SourceInfo b)) =
case backendTag @b of
PostgresTag -> "Got Postgres!"
MSSQLTag -> "Got MSSQL!"
But that's not all! In the context of each case branch, the type of 'so'
is actually `so :: SourceInfo 'Postgres`, and `so :: SourceInfo 'MSSQL`
respectively, so we can call backend-specific functions!
IMPORTANT: Please note that this function cannot be written:
impossible :: forall b. BackendSourceInfo -> SourceInfo b
The problem here is that 'BackendSourceInfo' contains one specific instance
of a 'SourceInfo', but the function 'impossible' advertises that a caller
may pick any backend.
However, this function may be written:
tryCastPostgres :: BackendSourceInfo -> Maybe (SourceInfo 'Postgres)
tryCastPostgres (BackendSourceInfo (so :: SourceInfo b)) =
case backendTag @b of
PostgresTag -> Just so
MSSQLTag -> Nothing -}

View File

@ -34,9 +34,10 @@ import qualified Language.Haskell.TH.Syntax as TH
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Time.Clock (UTCTime)
import Data.Typeable (cast)
import System.Directory (doesFileExist)
import qualified Hasura.SQL.AnyBackend as AB
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Logging (Hasura, LogLevel (..), ToEngineLog (..))
import Hasura.RQL.DDL.Schema
@ -132,7 +133,7 @@ migrateCatalog maybeDefaultSourceConfig maintenanceMode migrationTime = do
Nothing -> emptyMetadata
Just defaultSourceConfig ->
-- insert metadata with default source
let defaultSourceMetadata = BackendSourceMetadata $
let defaultSourceMetadata = AB.mkAnyBackend $
SourceMetadata defaultSource mempty mempty defaultSourceConfig
sources = OMap.singleton defaultSource defaultSourceMetadata
in emptyMetadata{_metaSources = sources}
@ -277,7 +278,7 @@ migrations maybeDefaultSourceConfig dryRun maintenanceMode =
"cannot migrate to catalog version 43 without --database-url or env var " <> tshow (fst databaseUrlEnv)
let metadataV3 =
let MetadataNoSources{..} = metadataV2
defaultSourceMetadata = BackendSourceMetadata $
defaultSourceMetadata = AB.mkAnyBackend $
SourceMetadata defaultSource _mnsTables _mnsFunctions defaultSourceConfig
in Metadata (OMap.singleton defaultSource defaultSourceMetadata)
_mnsRemoteSchemas _mnsQueryCollections _mnsAllowlist _mnsCustomTypes _mnsActions _mnsCronTriggers mempty
@ -294,8 +295,8 @@ migrations maybeDefaultSourceConfig dryRun maintenanceMode =
MetadataNoSources mempty mempty mempty mempty mempty emptyCustomTypes mempty mempty
metadataV2 <- case OMap.toList _metaSources of
[] -> pure emptyMetadataNoSources
[(_, BackendSourceMetadata sm)] ->
pure $ case cast sm of
[(_, exists)] ->
pure $ case AB.unpackAnyBackend exists of
Nothing -> emptyMetadataNoSources
Just SourceMetadata{..} ->
MetadataNoSources _smTables _smFunctions _metaRemoteSchemas _metaQueryCollections