2019-08-14 02:34:37 +03:00
|
|
|
{-| This module (along with the various @Hasura.RQL.DDL.Schema.*@ modules) provides operations to
|
|
|
|
load and modify the Hasura catalog and schema cache.
|
|
|
|
|
|
|
|
* The /catalog/ refers to the set of PostgreSQL tables and views that store all schema information
|
|
|
|
known by Hasura. This includes any tracked Postgres tables, views, and functions, all remote
|
|
|
|
schemas, and any additionaly Hasura-specific information such as permissions and relationships.
|
|
|
|
|
|
|
|
Primitive functions for loading and modifying the catalog are defined in
|
|
|
|
"Hasura.RQL.DDL.Schema.Catalog", but most uses are wrapped by other functions to synchronize
|
|
|
|
catalog information with the information in the schema cache.
|
|
|
|
|
|
|
|
* The /schema cache/ is a process-global value of type 'SchemaCache' that stores an in-memory
|
|
|
|
representation of the data stored in the catalog. The in-memory representation is not identical
|
|
|
|
to the data in the catalog, since it has some post-processing applied to it in order to make it
|
|
|
|
easier to consume for other parts of the system, such as GraphQL schema generation. For example,
|
|
|
|
although column information is represented by 'PGRawColumnInfo', the schema cache contains
|
|
|
|
“processed” 'PGColumnInfo' values, instead.
|
|
|
|
|
|
|
|
Ultimately, the catalog is the source of truth for all information contained in the schema
|
|
|
|
cache, but to avoid rebuilding the entire schema cache on every change to the catalog, various
|
|
|
|
functions incrementally update the cache when they modify the catalog.
|
|
|
|
-}
|
2019-11-15 03:20:18 +03:00
|
|
|
|
|
|
|
{-# LANGUAGE RecordWildCards #-}
|
|
|
|
|
2019-08-14 02:34:37 +03:00
|
|
|
module Hasura.RQL.DDL.Schema
|
|
|
|
( module Hasura.RQL.DDL.Schema.Cache
|
|
|
|
, module Hasura.RQL.DDL.Schema.Catalog
|
|
|
|
, module Hasura.RQL.DDL.Schema.Function
|
|
|
|
, module Hasura.RQL.DDL.Schema.Rename
|
|
|
|
, module Hasura.RQL.DDL.Schema.Table
|
|
|
|
|
|
|
|
, RunSQL(..)
|
|
|
|
, runRunSQL
|
|
|
|
) where
|
|
|
|
|
|
|
|
import Hasura.Prelude
|
|
|
|
|
|
|
|
import qualified Data.Text as T
|
|
|
|
import qualified Data.Text.Encoding as TE
|
|
|
|
import qualified Database.PG.Query as Q
|
|
|
|
import qualified Database.PostgreSQL.LibPQ as PQ
|
|
|
|
|
|
|
|
import Data.Aeson
|
|
|
|
import Data.Aeson.Casing
|
|
|
|
import Data.Aeson.TH
|
|
|
|
import Language.Haskell.TH.Syntax (Lift)
|
|
|
|
|
|
|
|
import Hasura.EncJSON
|
|
|
|
import Hasura.RQL.DDL.Schema.Cache
|
|
|
|
import Hasura.RQL.DDL.Schema.Catalog
|
|
|
|
import Hasura.RQL.DDL.Schema.Function
|
|
|
|
import Hasura.RQL.DDL.Schema.Rename
|
|
|
|
import Hasura.RQL.DDL.Schema.Table
|
|
|
|
import Hasura.RQL.Instances ()
|
|
|
|
import Hasura.RQL.Types
|
|
|
|
import Hasura.Server.Utils (matchRegex)
|
|
|
|
|
|
|
|
data RunSQL
|
|
|
|
= RunSQL
|
2019-11-15 03:20:18 +03:00
|
|
|
{ rSql :: Text
|
|
|
|
, rCascade :: !Bool
|
|
|
|
, rCheckMetadataConsistency :: !(Maybe Bool)
|
|
|
|
, rTxAccessMode :: !Q.TxAccess
|
2019-08-14 02:34:37 +03:00
|
|
|
} deriving (Show, Eq, Lift)
|
2019-11-15 03:20:18 +03:00
|
|
|
|
|
|
|
instance FromJSON RunSQL where
|
|
|
|
parseJSON = withObject "RunSQL" $ \o -> do
|
|
|
|
rSql <- o .: "sql"
|
|
|
|
rCascade <- o .:? "cascade" .!= False
|
|
|
|
rCheckMetadataConsistency <- o .:? "check_metadata_consistency"
|
|
|
|
isReadOnly <- o .:? "read_only" .!= False
|
|
|
|
let rTxAccessMode = if isReadOnly then Q.ReadOnly else Q.ReadWrite
|
|
|
|
pure RunSQL{..}
|
|
|
|
|
|
|
|
instance ToJSON RunSQL where
|
|
|
|
toJSON RunSQL {..} =
|
|
|
|
object
|
|
|
|
[ "sql" .= rSql
|
|
|
|
, "cascade" .= rCascade
|
|
|
|
, "check_metadata_consistency" .= rCheckMetadataConsistency
|
|
|
|
, "read_only" .=
|
|
|
|
case rTxAccessMode of
|
|
|
|
Q.ReadOnly -> True
|
|
|
|
Q.ReadWrite -> False
|
|
|
|
]
|
2019-08-14 02:34:37 +03:00
|
|
|
|
2019-11-26 15:14:21 +03:00
|
|
|
runRunSQL :: (CacheBuildM m) => RunSQL -> m EncJSON
|
2019-11-15 03:20:18 +03:00
|
|
|
runRunSQL RunSQL {..} = do
|
|
|
|
metadataCheckNeeded <- onNothing rCheckMetadataConsistency $ isAltrDropReplace rSql
|
|
|
|
bool (execRawSQL rSql) (withMetadataCheck rCascade $ execRawSQL rSql) metadataCheckNeeded
|
2019-08-14 02:34:37 +03:00
|
|
|
where
|
|
|
|
execRawSQL :: (MonadTx m) => Text -> m EncJSON
|
|
|
|
execRawSQL =
|
2019-10-11 08:13:57 +03:00
|
|
|
fmap (encJFromJValue @RunSQLRes) . liftTx . Q.multiQE rawSqlErrHandler . Q.fromText
|
2019-08-14 02:34:37 +03:00
|
|
|
where
|
|
|
|
rawSqlErrHandler txe =
|
|
|
|
let e = err400 PostgresError "query execution failed"
|
2019-11-15 03:20:18 +03:00
|
|
|
in e {qeInternal = Just $ toJSON txe}
|
2019-08-14 02:34:37 +03:00
|
|
|
|
|
|
|
isAltrDropReplace :: QErrM m => T.Text -> m Bool
|
|
|
|
isAltrDropReplace = either throwErr return . matchRegex regex False
|
|
|
|
where
|
|
|
|
throwErr s = throw500 $ "compiling regex failed: " <> T.pack s
|
2019-09-17 04:51:11 +03:00
|
|
|
regex = "alter|drop|replace|create function|comment on"
|
2019-08-14 02:34:37 +03:00
|
|
|
|
|
|
|
data RunSQLRes
|
|
|
|
= RunSQLRes
|
|
|
|
{ rrResultType :: !Text
|
|
|
|
, rrResult :: !Value
|
|
|
|
} deriving (Show, Eq)
|
|
|
|
$(deriveJSON (aesonDrop 2 snakeCase) ''RunSQLRes)
|
|
|
|
|
|
|
|
instance Q.FromRes RunSQLRes where
|
|
|
|
fromRes (Q.ResultOkEmpty _) =
|
|
|
|
return $ RunSQLRes "CommandOk" Null
|
|
|
|
fromRes (Q.ResultOkData res) = do
|
|
|
|
csvRows <- resToCSV res
|
|
|
|
return $ RunSQLRes "TuplesOk" $ toJSON csvRows
|
|
|
|
where
|
|
|
|
resToCSV :: PQ.Result -> ExceptT T.Text IO [[Text]]
|
|
|
|
resToCSV r = do
|
|
|
|
nr <- liftIO $ PQ.ntuples r
|
|
|
|
nc <- liftIO $ PQ.nfields r
|
|
|
|
|
|
|
|
hdr <- forM [0..pred nc] $ \ic -> do
|
|
|
|
colNameBS <- liftIO $ PQ.fname r ic
|
|
|
|
maybe (return "unknown") decodeBS colNameBS
|
|
|
|
|
|
|
|
rows <- forM [0..pred nr] $ \ir ->
|
|
|
|
forM [0..pred nc] $ \ic -> do
|
|
|
|
cellValBS <- liftIO $ PQ.getvalue r ir ic
|
|
|
|
maybe (return "NULL") decodeBS cellValBS
|
|
|
|
|
|
|
|
return $ hdr:rows
|
|
|
|
|
|
|
|
decodeBS = either (throwError . T.pack . show) return . TE.decodeUtf8'
|