graphql-engine/server/src-exec/Main.hs

238 lines
8.7 KiB
Haskell
Raw Normal View History

2018-06-27 16:11:32 +03:00
module Main where
import Migrate (migrateCatalog)
2018-06-27 16:11:32 +03:00
import Ops
2018-09-05 14:26:46 +03:00
import Control.Monad.STM (atomically)
2018-06-27 16:11:32 +03:00
import Data.Time.Clock (getCurrentTime)
import Options.Applicative
import System.Environment (getEnvironment, lookupEnv)
import System.Exit (exitFailure)
2018-06-27 16:11:32 +03:00
import qualified Control.Concurrent as C
2018-06-27 16:11:32 +03:00
import qualified Data.Aeson as A
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BLC
import qualified Data.Text as T
import qualified Data.Yaml as Y
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Network.Wai.Handler.Warp as Warp
2018-06-27 16:11:32 +03:00
2018-09-05 14:26:46 +03:00
import Hasura.Events.Lib
import Hasura.Logging (Logger (..), defaultLoggerSettings,
mkLogger, mkLoggerCtx)
2018-06-27 16:11:32 +03:00
import Hasura.Prelude
import Hasura.RQL.DDL.Metadata (fetchMetadata)
import Hasura.RQL.Types (QErr, adminUserInfo,
emptySchemaCache)
import Hasura.Server.App (mkWaiApp)
import Hasura.Server.Auth
import Hasura.Server.CheckUpdates (checkForUpdates)
2018-06-27 16:11:32 +03:00
import Hasura.Server.Init
import Hasura.Server.Query (peelRun)
2019-01-28 16:55:28 +03:00
import Hasura.Server.Telemetry
import Hasura.Server.Version (currentVersion)
2018-06-27 16:11:32 +03:00
import qualified Database.PG.Query as Q
printErrExit :: forall a . String -> IO a
printErrExit = (>> exitFailure) . putStrLn
printErrJExit :: A.ToJSON a => forall b . a -> IO b
printErrJExit = (>> exitFailure) . printJSON
parseHGECommand :: Parser RawHGECommand
parseHGECommand =
subparser
( command "serve" (info (helper <*> (HCServe <$> serveOpts))
( progDesc "Start the GraphQL Engine Server"
<> footerDoc (Just serveCmdFooter)
))
<> command "export" (info (pure HCExport)
( progDesc "Export graphql-engine's metadata to stdout" ))
<> command "clean" (info (pure HCClean)
( progDesc "Clean graphql-engine's metadata to start afresh" ))
<> command "execute" (info (pure HCExecute)
( progDesc "Execute a query" ))
<> command "version" (info (pure HCVersion)
(progDesc "Prints the version of GraphQL Engine"))
)
2018-06-27 16:11:32 +03:00
where
serveOpts = RawServeOptions
<$> parseServerPort
<*> parseServerHost
<*> parseConnParams
<*> parseTxIsolation
<*> (parseAdminSecret <|> parseAccessKey)
<*> parseWebHook
<*> parseJwtSecret
<*> parseUnAuthRole
<*> parseCorsConfig
<*> parseEnableConsole
2019-01-28 16:55:28 +03:00
<*> parseEnableTelemetry
<*> parseWsReadCookie
<*> parseStringifyNum
<*> parseEnabledAPIs
parseArgs :: IO HGEOptions
parseArgs = do
rawHGEOpts <- execParser opts
env <- getEnvironment
let eitherOpts = runWithEnv env $ mkHGEOptions rawHGEOpts
either printErrExit return eitherOpts
2018-06-27 16:11:32 +03:00
where
opts = info (helper <*> hgeOpts)
2018-06-27 16:11:32 +03:00
( fullDesc <>
header "Hasura GraphQL Engine: Realtime GraphQL API over Postgres with access control" <>
footerDoc (Just mainCmdFooter)
)
hgeOpts = HGEOptionsG <$> parseRawConnInfo <*> parseHGECommand
2018-06-27 16:11:32 +03:00
printJSON :: (A.ToJSON a) => a -> IO ()
printJSON = BLC.putStrLn . A.encode
printYaml :: (A.ToJSON a) => a -> IO ()
printYaml = BC.putStrLn . Y.encode
main :: IO ()
main = do
(HGEOptionsG rci hgeCmd) <- parseArgs
-- global http manager
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
loggerCtx <- mkLoggerCtx $ defaultLoggerSettings True
let logger = mkLogger loggerCtx
case hgeCmd of
HCServe so@(ServeOptions port host cp isoL mAdminSecret mAuthHook mJwtSecret
mUnAuthRole corsCfg enableConsole enableTelemetry strfyNum enabledAPIs) -> do
-- log serve options
unLogger logger $ serveOptsToLog so
hloggerCtx <- mkLoggerCtx $ defaultLoggerSettings False
authModeRes <- runExceptT $ mkAuthMode mAdminSecret mAuthHook mJwtSecret
mUnAuthRole httpManager loggerCtx
am <- either (printErrExit . T.unpack) return authModeRes
ci <- procConnInfo rci
-- log postgres connection info
unLogger logger $ connInfoToLog ci
2019-01-28 16:55:28 +03:00
-- safe init catalog
2019-01-28 16:55:28 +03:00
initRes <- initialise logger ci httpManager
-- prepare event triggers data
prepareEvents logger ci
2018-06-27 16:11:32 +03:00
pool <- Q.initPGPool ci cp
(app, cacheRef) <- mkWaiApp isoL loggerCtx pool httpManager
strfyNum am corsCfg enableConsole enableTelemetry enabledAPIs
let warpSettings = Warp.setPort port $ Warp.setHost host Warp.defaultSettings
2018-09-05 14:26:46 +03:00
maxEvThrds <- getFromEnv defaultMaxEventThreads "HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE"
evFetchMilliSec <- getFromEnv defaultFetchIntervalMilliSec "HASURA_GRAPHQL_EVENTS_FETCH_INTERVAL"
logEnvHeaders <- getFromEnv False "LOG_HEADERS_FROM_ENV"
2018-09-05 14:26:46 +03:00
eventEngineCtx <- atomically $ initEventEngineCtx maxEvThrds evFetchMilliSec
2018-09-05 14:26:46 +03:00
unLogger logger $
mkGenericStrLog "event_triggers" "starting workers"
void $ C.forkIO $ processEventQueue hloggerCtx logEnvHeaders httpManager pool cacheRef eventEngineCtx
2018-09-05 14:26:46 +03:00
2019-01-28 16:55:28 +03:00
-- start a background thread to check for updates
void $ C.forkIO $ checkForUpdates loggerCtx httpManager
-- start a background thread for telemetry
when enableTelemetry $ do
unLogger logger $ mkGenericStrLog "telemetry" telemetryNotice
void $ C.forkIO $ runTelemetry logger httpManager cacheRef initRes
unLogger logger $
mkGenericStrLog "server" "starting API server"
Warp.runSettings warpSettings app
HCExport -> do
ci <- procConnInfo rci
2018-06-27 16:11:32 +03:00
res <- runTx ci fetchMetadata
either printErrJExit printJSON res
HCClean -> do
ci <- procConnInfo rci
2018-06-27 16:11:32 +03:00
res <- runTx ci cleanCatalog
either printErrJExit (const cleanSuccess) res
HCExecute -> do
2018-06-27 16:11:32 +03:00
queryBs <- BL.getContents
ci <- procConnInfo rci
res <- runAsAdmin ci httpManager $ execQuery queryBs
either printErrJExit BLC.putStrLn res
HCVersion -> putStrLn $ "Hasura GraphQL Engine: " ++ T.unpack currentVersion
2018-06-27 16:11:32 +03:00
where
2019-01-28 16:55:28 +03:00
runTx :: Q.ConnInfo -> Q.TxE QErr a -> IO (Either QErr a)
2018-06-27 16:11:32 +03:00
runTx ci tx = do
pool <- getMinimalPool ci
runExceptT $ Q.runTx pool (Q.Serializable, Nothing) tx
runAsAdmin ci httpManager m = do
pool <- getMinimalPool ci
res <- runExceptT $ peelRun emptySchemaCache adminUserInfo
httpManager False pool Q.Serializable m
return $ fmap fst res
procConnInfo rci =
either (printErrExit . connInfoErrModifier) return $
mkConnInfo rci
2018-06-27 16:11:32 +03:00
getMinimalPool ci = do
let connParams = Q.defaultConnParams { Q.cpConns = 1 }
Q.initPGPool ci connParams
initialise (Logger logger) ci httpMgr = do
2018-06-27 16:11:32 +03:00
currentTime <- getCurrentTime
2019-01-28 16:55:28 +03:00
-- initialise the catalog
initRes <- runAsAdmin ci httpMgr $ initCatalogSafe currentTime
either printErrJExit (logger . mkGenericStrLog "db_init") initRes
-- migrate catalog if necessary
migRes <- runAsAdmin ci httpMgr $ migrateCatalog currentTime
either printErrJExit (logger . mkGenericStrLog "db_migrate") migRes
-- generate and retrieve uuids
getUniqIds ci
prepareEvents (Logger logger) ci = do
logger $ mkGenericStrLog "event_triggers" "preparing data"
2018-09-05 14:26:46 +03:00
res <- runTx ci unlockAllEvents
either printErrJExit return res
2019-01-28 16:55:28 +03:00
getUniqIds ci = do
eDbId <- runTx ci getDbId
dbId <- either printErrJExit return eDbId
fp <- liftIO generateFingerprint
return (dbId, fp)
2018-09-05 14:26:46 +03:00
getFromEnv :: (Read a) => a -> String -> IO a
getFromEnv defaults env = do
mEnv <- lookupEnv env
let mRes = case mEnv of
Nothing -> Just defaults
Just val -> readMaybe val
eRes = maybe (Left $ "Wrong expected type for environment variable: " <> env) Right mRes
either printErrExit return eRes
2018-06-27 16:11:32 +03:00
cleanSuccess =
putStrLn "successfully cleaned graphql-engine related data"
2019-01-28 16:55:28 +03:00
telemetryNotice :: String
telemetryNotice =
"Help us improve Hasura! The graphql-engine server collects anonymized "
<> "usage stats which allows us to keep improving Hasura at warp speed. "
<> "To read more or opt-out, visit https://docs.hasura.io/1.0/graphql/manual/guides/telemetry.html"