mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-21 06:21:39 +03:00
11a454c2d6
This commit applies ormolu to the whole Haskell code base by running `make format`. For in-flight branches, simply merging changes from `main` will result in merge conflicts. To avoid this, update your branch using the following instructions. Replace `<format-commit>` by the hash of *this* commit. $ git checkout my-feature-branch $ git merge <format-commit>^ # and resolve conflicts normally $ make format $ git commit -a -m "reformat with ormolu" $ git merge -s ours post-ormolu https://github.com/hasura/graphql-engine-mono/pull/2404 GitOrigin-RevId: 75049f5c12f430c615eafb4c6b8e83e371e01c8e
72 lines
2.9 KiB
Haskell
72 lines
2.9 KiB
Haskell
-- | This module (along with the various @Hasura.RQL.DDL.Schema.*@ modules) provides operations to
|
|
-- load and modify the Hasura catalog and schema cache.
|
|
--
|
|
-- * The /catalog/ refers to the set of PostgreSQL tables and views that store all schema information
|
|
-- known by Hasura. This includes any tracked Postgres tables, views, and functions, all remote
|
|
-- schemas, and any additionaly Hasura-specific information such as permissions and relationships.
|
|
--
|
|
-- Primitive functions for loading and modifying the catalog are defined in
|
|
-- "Hasura.RQL.DDL.Schema.Catalog", but most uses are wrapped by other functions to synchronize
|
|
-- catalog information with the information in the schema cache.
|
|
--
|
|
-- * The /schema cache/ is a process-global value of type 'SchemaCache' that stores an in-memory
|
|
-- representation of the data stored in the catalog. The in-memory representation is not identical
|
|
-- to the data in the catalog, since it has some post-processing applied to it in order to make it
|
|
-- easier to consume for other parts of the system, such as GraphQL schema generation. For example,
|
|
-- although column information is represented by 'RawColumnInfo', the schema cache contains
|
|
-- “processed” 'ColumnInfo' values, instead.
|
|
--
|
|
-- Ultimately, the catalog is the source of truth for all information contained in the schema
|
|
-- cache, but to avoid rebuilding the entire schema cache on every change to the catalog, various
|
|
-- functions incrementally update the cache when they modify the catalog.
|
|
module Hasura.RQL.DDL.Schema
|
|
( module M,
|
|
RunSQLRes (..),
|
|
)
|
|
where
|
|
|
|
import Data.Aeson
|
|
import Data.Aeson.TH
|
|
import Data.Text.Encoding qualified as TE
|
|
import Database.PG.Query qualified as Q
|
|
import Database.PostgreSQL.LibPQ qualified as PQ
|
|
import Hasura.Prelude
|
|
import Hasura.RQL.DDL.Schema.Cache as M
|
|
import Hasura.RQL.DDL.Schema.Catalog as M
|
|
import Hasura.RQL.DDL.Schema.Function as M
|
|
import Hasura.RQL.DDL.Schema.Rename as M
|
|
import Hasura.RQL.DDL.Schema.Table as M
|
|
|
|
data RunSQLRes = RunSQLRes
|
|
{ rrResultType :: !Text,
|
|
rrResult :: !Value
|
|
}
|
|
deriving (Show, Eq)
|
|
|
|
$(deriveJSON hasuraJSON ''RunSQLRes)
|
|
|
|
instance Q.FromRes RunSQLRes where
|
|
fromRes (Q.ResultOkEmpty _) =
|
|
return $ RunSQLRes "CommandOk" Null
|
|
fromRes (Q.ResultOkData res) = do
|
|
csvRows <- resToCSV res
|
|
return $ RunSQLRes "TuplesOk" $ toJSON csvRows
|
|
where
|
|
resToCSV :: PQ.Result -> ExceptT Text IO [[Text]]
|
|
resToCSV r = do
|
|
nr <- liftIO $ PQ.ntuples r
|
|
nc <- liftIO $ PQ.nfields r
|
|
|
|
hdr <- forM [0 .. pred nc] $ \ic -> do
|
|
colNameBS <- liftIO $ PQ.fname r ic
|
|
maybe (return "unknown") decodeBS colNameBS
|
|
|
|
rows <- forM [0 .. pred nr] $ \ir ->
|
|
forM [0 .. pred nc] $ \ic -> do
|
|
cellValBS <- liftIO $ PQ.getvalue r ir ic
|
|
maybe (return "NULL") decodeBS cellValBS
|
|
|
|
return $ hdr : rows
|
|
|
|
decodeBS = either (throwError . tshow) return . TE.decodeUtf8'
|