server: remove ApplicativeDo from default extensions

I spent half the day reducing a weird compile failure here https://github.com/hasura/graphql-engine-mono/pull/1593/files#r713102990 to this https://gitlab.haskell.org/ghc/ghc/-/issues/17768#note_378004. Seems ApplicativeDo makes a mess of non-applicative monadic do in some cases. Given our rather localized use of ApplicativeDo, seemed a good idea to remove it from the list of default extensions.

It appears that ApplicativeDo also buries some unused return value warnings, so this PR also silences those. We should check that none of those warnings were warranted though.

https://github.com/hasura/graphql-engine-mono/pull/2413

GitOrigin-RevId: 1874c1a82230431849265755b1407beebc947041
This commit is contained in:
Robert 2021-09-22 17:34:53 +02:00 committed by hasura-bot
parent 40c3d08d4a
commit bdacf1bd23
18 changed files with 31 additions and 23 deletions

View File

@ -38,7 +38,6 @@ common common-all
default-language: Haskell2010
default-extensions:
AllowAmbiguousTypes
ApplicativeDo
BangPatterns
BlockArguments
ConstraintKinds

View File

@ -56,11 +56,11 @@ runApp env (HGEOptionsG rci metadataDbUrl hgeCmd) = do
HCServe serveOptions -> do
(ekgStore, serverMetrics) <- liftIO $ do
store <- EKG.newStore @AppMetricsSpec
EKG.register (EKG.subset GcSubset store) EKG.registerGcMetrics
void $ EKG.register (EKG.subset GcSubset store) EKG.registerGcMetrics
let getTimeMs :: IO Int64
getTimeMs = (round . (* 1000)) `fmap` getPOSIXTime
EKG.register store $ EKG.registerCounter ServerTimestampMs () getTimeMs
void $ EKG.register store $ EKG.registerCounter ServerTimestampMs () getTimeMs
serverMetrics <-
liftIO $ createServerMetrics $ EKG.subset ServerSubset store

View File

@ -75,7 +75,7 @@ cacheDirectiveParser = tokenWithValue <|> onlyToken
onlyToken = CCDOnlyToken <$> tokenParser
tokenWithValue = do
tok <- tokenParser
AT.char '='
void $ AT.char '='
val <- tokenParser <|> quotedStringParser
return $ CCDTokenWithVal tok val

View File

@ -9,6 +9,7 @@ import qualified Data.Text as T
import Prelude
import Control.Applicative
import Control.Monad (void)
import Data.Aeson.Internal (JSONPath, JSONPathElement (..))
import Data.Attoparsec.Text
@ -40,17 +41,17 @@ name = go <?> "property name" where
-- > ['你好']
bracketElement :: Parser JSONPathElement
bracketElement = do
optional (char '.') *> char '['
void $ optional (char '.') *> char '['
result <- Index <$> decimal
<|> Key <$> quotedString '"'
<|> Key <$> quotedString '\''
char ']'
void $ char ']'
pure result
where
quotedString delimiter = do
char delimiter
void $ char delimiter
result <- T.pack <$> many' (charOrEscape delimiter)
char delimiter
void $ char delimiter
pure result
charOrEscape delimiter = (char '\\' *> anyChar) <|> notChar delimiter

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.BigQuery.Instances.Schema () where

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.MSSQL.Instances.Schema () where

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.MySQL.Instances.Schema where

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
module Hasura.GraphQL.Schema.BoolExp
( boolExp
, mkBoolOperator

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
module Hasura.GraphQL.Schema.Introspect where
import Hasura.Prelude

View File

@ -1,4 +1,4 @@
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ApplicativeDo #-}
module Hasura.GraphQL.Schema.Mutation
( insertIntoTable

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
module Hasura.GraphQL.Schema.OrderBy
( orderByExp
) where

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE ViewPatterns #-}
-- | Generate table selection schema both for ordinary Hasura-type and

View File

@ -295,7 +295,7 @@ getNewWSTimer :: Seconds -> IO WSConnInitTimer
getNewWSTimer timeout = do
timerState <- newTVarIO Running
timer <- newEmptyTMVarIO
forkIO $ do
void $ forkIO $ do
sleep (seconds timeout)
atomically $ do
runTimerState <- readTVar timerState

View File

@ -53,7 +53,7 @@ runAddComputedField
=> AddComputedField b
-> m EncJSON
runAddComputedField q = do
withPathK "table" $ askTabInfo @b source table
void $ withPathK "table" $ askTabInfo @b source table
let metadataObj = MOSourceObjId source
$ AB.mkAnyBackend
$ SMOTableObj @b table

View File

@ -86,7 +86,7 @@ runAddRemoteSchemaPermissions q = do
when (doesRemoteSchemaPermissionExist metadata name role) $
throw400 AlreadyExists $ "permissions for role: " <> role <<> " for remote schema:"
<> name <<> " already exists"
resolveRoleBasedRemoteSchema providedSchemaDoc remoteSchemaCtx
void $ resolveRoleBasedRemoteSchema providedSchemaDoc remoteSchemaCtx
buildSchemaCacheFor (MORemoteSchemaPermissions name role) $
MetadataModifier $ metaRemoteSchemas.ix name.rsmPermissions %~ (:) remoteSchemaPermMeta
pure successMsg
@ -107,7 +107,7 @@ runDropRemoteSchemaPermissions
runDropRemoteSchemaPermissions (DropRemoteSchemaPermissions name roleName) = do
metadata <- getMetadata
remoteSchemaMap <- scRemoteSchemas <$> askSchemaCache
onNothing (Map.lookup name remoteSchemaMap) $
void $ onNothing (Map.lookup name remoteSchemaMap) $
throw400 NotExists $ "remote schema " <> name <<> " doesn't exist"
unless (doesRemoteSchemaPermissionExist metadata name roleName)$
throw400 NotExists $ "permissions for role: " <> roleName <<> " for remote schema:"
@ -138,7 +138,7 @@ runRemoveRemoteSchema
:: (QErrM m, UserInfoM m, CacheRWM m, MetadataM m)
=> RemoteSchemaNameQuery -> m EncJSON
runRemoveRemoteSchema (RemoteSchemaNameQuery rsn) = do
removeRemoteSchemaP1 rsn
void $ removeRemoteSchemaP1 rsn
withNewInconsistentObjsCheck $ buildSchemaCache $
dropRemoteSchemaInMetadata rsn
pure successMsg

View File

@ -566,7 +566,7 @@ validateEnumTypeDefinition providedEnum upstreamEnum = do
when (providedName /= upstreamName) $
dispute $ pure $
UnexpectedNonMatchingNames providedName upstreamName Enum
validateDirectives providedDirectives upstreamDirectives G.TSDLENUM $ (Enum, providedName)
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLENUM $ (Enum, providedName)
onJust (NE.nonEmpty $ S.toList $ duplicates providedEnumValNames) $ \dups -> do
refute $ pure $ DuplicateEnumValues providedName dups
onJust (NE.nonEmpty $ S.toList fieldsDifference) $ \nonExistingEnumVals ->
@ -654,7 +654,7 @@ validateInputObjectTypeDefinition providedInputObj upstreamInputObj = do
when (providedName /= upstreamName) $
dispute $ pure $
UnexpectedNonMatchingNames providedName upstreamName InputObject
validateDirectives providedDirectives upstreamDirectives G.TSDLINPUT_OBJECT $ (InputObject, providedName)
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLINPUT_OBJECT $ (InputObject, providedName)
args <- validateArguments providedArgs upstreamArgs $ providedName
pure $ providedInputObj { G._iotdValueDefinitions = args }
where
@ -674,7 +674,7 @@ validateFieldDefinition providedFieldDefinition upstreamFieldDefinition (parentT
when (providedName /= upstreamName) $
dispute $ pure $
UnexpectedNonMatchingNames providedName upstreamName (Field parentType)
validateDirectives providedDirectives upstreamDirectives G.TSDLFIELD_DEFINITION $ (Field parentType, parentTypeName)
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLFIELD_DEFINITION $ (Field parentType, parentTypeName)
when (providedType /= upstreamType) $
dispute $ pure $ NonMatchingType providedName (Field parentType) upstreamType providedType
args <- validateArguments providedArgs upstreamArgs $ providedName
@ -714,7 +714,7 @@ validateInterfaceDefinition providedInterfaceDefn upstreamInterfaceDefn = do
when (providedName /= upstreamName) $
dispute $ pure $
UnexpectedNonMatchingNames providedName upstreamName Interface
validateDirectives providedDirectives upstreamDirectives G.TSDLINTERFACE $ (Interface, providedName)
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLINTERFACE $ (Interface, providedName)
fieldDefinitions <- validateFieldDefinitions providedFieldDefns upstreamFieldDefns $ (InterfaceField, providedName)
pure $ providedInterfaceDefn { G._itdFieldsDefinition = fieldDefinitions }
where
@ -770,7 +770,7 @@ validateObjectDefinition providedObj upstreamObj interfacesDeclared = do
when (providedName /= upstreamName) $
dispute $ pure $
UnexpectedNonMatchingNames providedName upstreamName Object
validateDirectives providedDirectives upstreamDirectives G.TSDLOBJECT $ (Object, providedName)
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLOBJECT $ (Object, providedName)
onJust (NE.nonEmpty $ S.toList customInterfaces) $ \ifaces ->
dispute $ pure $ CustomInterfacesNotAllowed providedName ifaces
onJust (NE.nonEmpty nonExistingInterfaces) $ \ifaces ->

View File

@ -529,7 +529,7 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
<> commaSeparated (map (dquote . erTable) $ toList enumReferences) <> ")"
assertNoDuplicateFieldNames columns =
flip Map.traverseWithKey (Map.groupOn pgiName columns) \name columnsWithName ->
void $ flip Map.traverseWithKey (Map.groupOn pgiName columns) \name columnsWithName ->
case columnsWithName of
one:two:more -> throw400 AlreadyExists $ "the definitions of columns "
<> englishList "and" (dquote . pgiColumn <$> (one:|two:more))

View File

@ -1,5 +1,6 @@
-- | Types and functions related to the server initialisation
{-# OPTIONS_GHC -O0 #-}
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE CPP #-}
module Hasura.Server.Init
( module Hasura.Server.Init