mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 08:02:15 +03:00
Clean up Hasura.Prelude
a bit
- Remove `onJust` in favor of the more general `for_` - Remove `withJust` which was used only once - Remove `hashNub` in favor of `Ord`-based `uniques` - Simplify some of the implementations in `Hasura.Prelude` - Add `hlint` hint from `maybe True` to `all`, and `maybe False` to `any` PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6173 GitOrigin-RevId: 2c6ebbe2d04f60071d2a53a2d43c6d62dbc4b84e
This commit is contained in:
parent
6602ad44b1
commit
05b3a64e8f
@ -109,6 +109,11 @@
|
|||||||
# To generate a suitable file for HLint do:
|
# To generate a suitable file for HLint do:
|
||||||
# $ hlint --default > .hlint.yaml
|
# $ hlint --default > .hlint.yaml
|
||||||
|
|
||||||
|
- error: {lhs: maybe False, rhs: any}
|
||||||
|
- error: {lhs: maybe True, rhs: all}
|
||||||
|
- error: {lhs: either (const False), rhs: any}
|
||||||
|
- error: {lhs: either (const True), rhs: all}
|
||||||
|
- suggest: {lhs: onJust, rhs: for_}
|
||||||
|
|
||||||
- group:
|
- group:
|
||||||
name: hasura-prelude
|
name: hasura-prelude
|
||||||
@ -120,8 +125,8 @@
|
|||||||
- error: {lhs: "a /= []", rhs: "not (null a)"}
|
- error: {lhs: "a /= []", rhs: "not (null a)"}
|
||||||
- error: {lhs: "maybe b return a", rhs: "onNothing a b"}
|
- error: {lhs: "maybe b return a", rhs: "onNothing a b"}
|
||||||
- error: {lhs: "maybe b pure a", rhs: "onNothing a b"}
|
- error: {lhs: "maybe b pure a", rhs: "onNothing a b"}
|
||||||
- error: {lhs: "maybe (return ()) b a", rhs: "onJust a b"}
|
- error: {lhs: "maybe (return ()) b a", rhs: "for_ a b"}
|
||||||
- error: {lhs: "maybe (pure ()) b a", rhs: "onJust a b"}
|
- error: {lhs: "maybe (pure ()) b a", rhs: "for_ a b"}
|
||||||
- error: {lhs: "either b return a", rhs: "onLeft a b"}
|
- error: {lhs: "either b return a", rhs: "onLeft a b"}
|
||||||
- error: {lhs: "either b pure a", rhs: "onLeft a b"}
|
- error: {lhs: "either b pure a", rhs: "onLeft a b"}
|
||||||
- error: {lhs: "maybe (f x) f", rhs: "f . fromMaybe x"}
|
- error: {lhs: "maybe (f x) f", rhs: "f . fromMaybe x"}
|
||||||
|
@ -89,7 +89,7 @@ spec TestData {..} api sourceName config subqueryComparisonCapabilities = descri
|
|||||||
let filterCustomersBySupportRepCountry (customer :: HashMap FieldName FieldValue) =
|
let filterCustomersBySupportRepCountry (customer :: HashMap FieldName FieldValue) =
|
||||||
let customerCountry = customer ^? Data.field "Country" . Data._ColumnFieldString
|
let customerCountry = customer ^? Data.field "Country" . Data._ColumnFieldString
|
||||||
supportRepCountry = customer ^.. Data.field "SupportRep" . subqueryRows . Data.field "Country" . Data._ColumnFieldString
|
supportRepCountry = customer ^.. Data.field "SupportRep" . subqueryRows . Data.field "Country" . Data._ColumnFieldString
|
||||||
in maybe False (`elem` supportRepCountry) customerCountry
|
in any (`elem` supportRepCountry) customerCountry
|
||||||
|
|
||||||
let expectedCustomers = filter filterCustomersBySupportRepCountry $ Data.filterColumnsByQueryFields (query ^. qrQuery) . joinInSupportRep <$> _tdCustomersRows
|
let expectedCustomers = filter filterCustomersBySupportRepCountry $ Data.filterColumnsByQueryFields (query ^. qrQuery) . joinInSupportRep <$> _tdCustomersRows
|
||||||
Data.responseRows receivedCustomers `rowsShouldBe` expectedCustomers
|
Data.responseRows receivedCustomers `rowsShouldBe` expectedCustomers
|
||||||
@ -119,7 +119,7 @@ spec TestData {..} api sourceName config subqueryComparisonCapabilities = descri
|
|||||||
let filterEmployeesByCustomerCountry (employee :: HashMap FieldName FieldValue) =
|
let filterEmployeesByCustomerCountry (employee :: HashMap FieldName FieldValue) =
|
||||||
let employeeCountry = employee ^? Data.field "Country" . Data._ColumnFieldString
|
let employeeCountry = employee ^? Data.field "Country" . Data._ColumnFieldString
|
||||||
customerCountries = employee ^.. Data.field "SupportRepForCustomers" . subqueryRows . Data.field "Country" . Data._ColumnFieldString
|
customerCountries = employee ^.. Data.field "SupportRepForCustomers" . subqueryRows . Data.field "Country" . Data._ColumnFieldString
|
||||||
in maybe False (`elem` customerCountries) employeeCountry
|
in any (`elem` customerCountries) employeeCountry
|
||||||
|
|
||||||
let expectedEmployees = filter filterEmployeesByCustomerCountry $ Data.filterColumnsByQueryFields (query ^. qrQuery) . joinInCustomers <$> _tdEmployeesRows
|
let expectedEmployees = filter filterEmployeesByCustomerCountry $ Data.filterColumnsByQueryFields (query ^. qrQuery) . joinInCustomers <$> _tdEmployeesRows
|
||||||
Data.responseRows receivedEmployees `rowsShouldBe` expectedEmployees
|
Data.responseRows receivedEmployees `rowsShouldBe` expectedEmployees
|
||||||
|
@ -12,7 +12,6 @@ module Hasura.Prelude
|
|||||||
onNothing,
|
onNothing,
|
||||||
onNothingM,
|
onNothingM,
|
||||||
onJust,
|
onJust,
|
||||||
withJust,
|
|
||||||
mapMaybe,
|
mapMaybe,
|
||||||
maybeToEither,
|
maybeToEither,
|
||||||
eitherToMaybe,
|
eitherToMaybe,
|
||||||
@ -31,7 +30,6 @@ module Hasura.Prelude
|
|||||||
hoistEither,
|
hoistEither,
|
||||||
readJson,
|
readJson,
|
||||||
tshow,
|
tshow,
|
||||||
hashNub,
|
|
||||||
|
|
||||||
-- * Trace debugging
|
-- * Trace debugging
|
||||||
ltrace,
|
ltrace,
|
||||||
@ -156,9 +154,13 @@ import Text.Read as M (readEither, readMaybe)
|
|||||||
import Witherable (catMaybes, mapMaybe)
|
import Witherable (catMaybes, mapMaybe)
|
||||||
import Prelude as M hiding (fail, init, lookup)
|
import Prelude as M hiding (fail, init, lookup)
|
||||||
|
|
||||||
|
-- Don't inline, to avoid the risk of unreasonably long code being generated
|
||||||
|
{-# NOINLINE alphabet #-}
|
||||||
alphabet :: String
|
alphabet :: String
|
||||||
alphabet = ['a' .. 'z'] ++ ['A' .. 'Z']
|
alphabet = ['a' .. 'z'] ++ ['A' .. 'Z']
|
||||||
|
|
||||||
|
-- Don't inline, to avoid the risk of unreasonably long code being generated
|
||||||
|
{-# NOINLINE alphaNumerics #-}
|
||||||
alphaNumerics :: String
|
alphaNumerics :: String
|
||||||
alphaNumerics = alphabet ++ "0123456789"
|
alphaNumerics = alphabet ++ "0123456789"
|
||||||
|
|
||||||
@ -169,17 +171,12 @@ onNothingM :: Monad m => m (Maybe a) -> m a -> m a
|
|||||||
onNothingM m act = m >>= (`onNothing` act)
|
onNothingM m act = m >>= (`onNothing` act)
|
||||||
|
|
||||||
onJust :: Applicative m => Maybe a -> (a -> m ()) -> m ()
|
onJust :: Applicative m => Maybe a -> (a -> m ()) -> m ()
|
||||||
onJust m action = maybe (pure ()) action m
|
onJust = for_
|
||||||
|
|
||||||
withJust :: Applicative m => Maybe a -> (a -> m (Maybe b)) -> m (Maybe b)
|
|
||||||
withJust m action = maybe (pure Nothing) action m
|
|
||||||
|
|
||||||
-- | Transform a 'Maybe' into an 'Either' given a default value.
|
-- | Transform a 'Maybe' into an 'Either' given a default value.
|
||||||
--
|
|
||||||
-- > maybeToEither def Nothing == Left def
|
|
||||||
-- > maybeToEither _def (Just b) == Right b
|
|
||||||
maybeToEither :: a -> Maybe b -> Either a b
|
maybeToEither :: a -> Maybe b -> Either a b
|
||||||
maybeToEither a = maybe (Left a) Right
|
maybeToEither def Nothing = Left def
|
||||||
|
maybeToEither _def (Just b) = Right b
|
||||||
|
|
||||||
-- | Convert an 'Either' to a 'Maybe', forgetting the 'Left' values.
|
-- | Convert an 'Either' to a 'Maybe', forgetting the 'Left' values.
|
||||||
--
|
--
|
||||||
@ -238,8 +235,8 @@ spanMaybeM f = go . toList
|
|||||||
|
|
||||||
findWithIndex :: (a -> Bool) -> [a] -> Maybe (a, Int)
|
findWithIndex :: (a -> Bool) -> [a] -> Maybe (a, Int)
|
||||||
findWithIndex p l = do
|
findWithIndex p l = do
|
||||||
v <- find p l
|
|
||||||
i <- findIndex p l
|
i <- findIndex p l
|
||||||
|
let v = l !! i
|
||||||
pure (v, i)
|
pure (v, i)
|
||||||
|
|
||||||
-- TODO (from main): Move to Data.HashMap.Strict.Extended; rename to fromListWith?
|
-- TODO (from main): Move to Data.HashMap.Strict.Extended; rename to fromListWith?
|
||||||
@ -252,13 +249,12 @@ oMapFromL f = OMap.fromList . map (\v -> (f v, v))
|
|||||||
-- | Time an IO action, returning the time with microsecond precision. The
|
-- | Time an IO action, returning the time with microsecond precision. The
|
||||||
-- result of the input action will be evaluated to WHNF.
|
-- result of the input action will be evaluated to WHNF.
|
||||||
--
|
--
|
||||||
-- The result 'DiffTime' is guarenteed to be >= 0.
|
-- The result 'DiffTime' is guaranteed to be >= 0.
|
||||||
withElapsedTime :: MonadIO m => m a -> m (DiffTime, a)
|
withElapsedTime :: MonadIO m => m a -> m (DiffTime, a)
|
||||||
withElapsedTime ma = do
|
withElapsedTime ma = do
|
||||||
bef <- liftIO Clock.getMonotonicTimeNSec
|
stopTimer <- startTimer
|
||||||
!a <- ma
|
!a <- ma
|
||||||
aft <- liftIO Clock.getMonotonicTimeNSec
|
dur <- stopTimer
|
||||||
let !dur = nanoseconds $ fromIntegral (aft - bef)
|
|
||||||
return (dur, a)
|
return (dur, a)
|
||||||
|
|
||||||
-- | Start timing and return an action to return the elapsed time since 'startTimer' was called.
|
-- | Start timing and return an action to return the elapsed time since 'startTimer' was called.
|
||||||
@ -334,15 +330,6 @@ traceToFileM filepath x =
|
|||||||
]
|
]
|
||||||
{-# WARNING traceToFileM "traceToFileM left in code" #-}
|
{-# WARNING traceToFileM "traceToFileM left in code" #-}
|
||||||
|
|
||||||
-- | Remove duplicates from a list. Like 'nub' but runs in @O(n * log_16(n))@
|
|
||||||
-- time and requires 'Hashable' and `Eq` instances. hashNub is faster than
|
|
||||||
-- ordNub when there're not so many different values in the list.
|
|
||||||
--
|
|
||||||
-- >>> hashNub [1,3,2,9,4,1,5,7,3,3,1,2,5,4,3,2,1,0]
|
|
||||||
-- [0,1,2,3,4,5,7,9]
|
|
||||||
hashNub :: (Hashable a, Eq a) => [a] -> [a]
|
|
||||||
hashNub = HSet.toList . HSet.fromList
|
|
||||||
|
|
||||||
-- | Convert a non-empty sequence to a non-empty list.
|
-- | Convert a non-empty sequence to a non-empty list.
|
||||||
nonEmptySeqToNonEmptyList :: NESeq a -> NonEmpty a
|
nonEmptySeqToNonEmptyList :: NESeq a -> NonEmpty a
|
||||||
nonEmptySeqToNonEmptyList (x NESeq.:<|| xs) =
|
nonEmptySeqToNonEmptyList (x NESeq.:<|| xs) =
|
||||||
|
@ -27,6 +27,7 @@ import Control.Monad.IO.Class (MonadIO (liftIO))
|
|||||||
import Control.Monad.Trans.Class (lift)
|
import Control.Monad.Trans.Class (lift)
|
||||||
import Control.Monad.Trans.Control (MonadBaseControl)
|
import Control.Monad.Trans.Control (MonadBaseControl)
|
||||||
import Control.Monad.Trans.Except (ExceptT, runExceptT)
|
import Control.Monad.Trans.Except (ExceptT, runExceptT)
|
||||||
|
import Data.Foldable
|
||||||
import Data.Kind (Type)
|
import Data.Kind (Type)
|
||||||
import Data.String (IsString)
|
import Data.String (IsString)
|
||||||
import Data.Text qualified as T
|
import Data.Text qualified as T
|
||||||
@ -95,7 +96,7 @@ listen pool channel handler = catchConnErr $
|
|||||||
processNotifs conn = do
|
processNotifs conn = do
|
||||||
-- Collect notification
|
-- Collect notification
|
||||||
mNotify <- PQ.notifies conn
|
mNotify <- PQ.notifies conn
|
||||||
onJust mNotify $ \n -> do
|
for_ mNotify $ \n -> do
|
||||||
-- Apply notify handler on arrived notification
|
-- Apply notify handler on arrived notification
|
||||||
handler $ PNEPQNotify n
|
handler $ PNEPQNotify n
|
||||||
-- Process remaining notifications if any
|
-- Process remaining notifications if any
|
||||||
@ -112,7 +113,3 @@ waitForReadReadiness conn = do
|
|||||||
where
|
where
|
||||||
ioErrorToPGConnErr :: IOError -> PGConnErr
|
ioErrorToPGConnErr :: IOError -> PGConnErr
|
||||||
ioErrorToPGConnErr = PGConnErr . T.pack . displayException
|
ioErrorToPGConnErr = PGConnErr . T.pack . displayException
|
||||||
|
|
||||||
onJust :: Monad m => Maybe a -> (a -> m ()) -> m ()
|
|
||||||
onJust Nothing _ = return ()
|
|
||||||
onJust (Just v) act = act v
|
|
||||||
|
@ -330,7 +330,7 @@ withExpiringPGconn pool f = do
|
|||||||
let microseconds = realToFrac (1000000 * diffUTCTime now old)
|
let microseconds = realToFrac (1000000 * diffUTCTime now old)
|
||||||
liftIO (EKG.Distribution.add (_poolConnAcquireLatency (_stats pool)) microseconds)
|
liftIO (EKG.Distribution.add (_poolConnAcquireLatency (_stats pool)) microseconds)
|
||||||
let connectionStale =
|
let connectionStale =
|
||||||
maybe False (\lifetime -> now `diffUTCTime` pgCreatedAt > lifetime) pgMbLifetime
|
any (\lifetime -> now `diffUTCTime` pgCreatedAt > lifetime) pgMbLifetime
|
||||||
when connectionStale $ do
|
when connectionStale $ do
|
||||||
-- Throwing is the only way to signal to resource pool to discard the
|
-- Throwing is the only way to signal to resource pool to discard the
|
||||||
-- connection at this time, so we need to use it for control flow:
|
-- connection at this time, so we need to use it for control flow:
|
||||||
|
@ -101,7 +101,7 @@ customDirectives = [cachedDirective @m, multipleRootFieldsDirective @m]
|
|||||||
-- Example use:
|
-- Example use:
|
||||||
--
|
--
|
||||||
-- dMap <- parseDirectives customDirectives (DLExecutable EDLQUERY) directives
|
-- dMap <- parseDirectives customDirectives (DLExecutable EDLQUERY) directives
|
||||||
-- withDirective dMap cached $ onJust \_ -> tagAsCached
|
-- withDirective dMap cached $ for_ \_ -> tagAsCached
|
||||||
parseDirectives ::
|
parseDirectives ::
|
||||||
forall origin m.
|
forall origin m.
|
||||||
MonadParse m =>
|
MonadParse m =>
|
||||||
|
@ -10,21 +10,25 @@ module Data.List.Extended
|
|||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
||||||
|
import Data.Containers.ListUtils (nubOrd)
|
||||||
import Data.Function (on)
|
import Data.Function (on)
|
||||||
import Data.HashMap.Strict.Extended qualified as Map
|
import Data.HashMap.Strict.Extended qualified as Map
|
||||||
import Data.HashSet qualified as Set
|
import Data.HashSet qualified as Set
|
||||||
import Data.Hashable (Hashable)
|
import Data.Hashable (Hashable)
|
||||||
import Data.List qualified as L
|
import Data.List qualified as L
|
||||||
import Data.List.NonEmpty qualified as NE
|
import Data.List.NonEmpty qualified as NE
|
||||||
import Data.Set qualified as S
|
|
||||||
import Prelude
|
import Prelude
|
||||||
|
|
||||||
duplicates :: (Eq a, Hashable a) => [a] -> Set.HashSet a
|
duplicates :: (Eq a, Hashable a) => [a] -> Set.HashSet a
|
||||||
duplicates =
|
duplicates =
|
||||||
Set.fromList . Map.keys . Map.filter (> 1) . Map.fromListWith (+) . map (,1 :: Int)
|
Set.fromList . Map.keys . Map.filter (> 1) . Map.fromListWith (+) . map (,1 :: Int)
|
||||||
|
|
||||||
|
-- | Remove duplicates from a list. Like 'nub' but runs in @O(n * log(n))@
|
||||||
|
-- time and requires 'Ord' instances.
|
||||||
|
-- >>> uniques [1,3,2,9,4,1,5,7,3,3,1,2,5,4,3,2,1,0]
|
||||||
|
-- [0,1,2,3,4,5,7,9]
|
||||||
uniques :: (Ord a) => [a] -> [a]
|
uniques :: (Ord a) => [a] -> [a]
|
||||||
uniques = S.toList . S.fromList
|
uniques = nubOrd
|
||||||
|
|
||||||
getDifference :: (Eq a, Hashable a) => [a] -> [a] -> Set.HashSet a
|
getDifference :: (Eq a, Hashable a) => [a] -> [a] -> Set.HashSet a
|
||||||
getDifference = Set.difference `on` Set.fromList
|
getDifference = Set.difference `on` Set.fromList
|
||||||
|
@ -597,7 +597,7 @@ runHGEServer setupHook env serveOptions serveCtx initTime postPollHook serverMet
|
|||||||
. Warp.setHost (soHost serveOptions)
|
. Warp.setHost (soHost serveOptions)
|
||||||
. Warp.setGracefulShutdownTimeout (Just 30) -- 30s graceful shutdown
|
. Warp.setGracefulShutdownTimeout (Just 30) -- 30s graceful shutdown
|
||||||
. Warp.setInstallShutdownHandler shutdownHandler
|
. Warp.setInstallShutdownHandler shutdownHandler
|
||||||
. Warp.setBeforeMainLoop (onJust startupStatusHook id)
|
. Warp.setBeforeMainLoop (for_ startupStatusHook id)
|
||||||
. setForkIOWithMetrics
|
. setForkIOWithMetrics
|
||||||
$ Warp.defaultSettings
|
$ Warp.defaultSettings
|
||||||
|
|
||||||
@ -862,9 +862,9 @@ mkHGEServer setupHook env ServeOptions {..} ServeCtx {..} initTime postPollHook
|
|||||||
AB.dispatchAnyBackend @BackendEventTrigger backendSourceInfo \(SourceInfo sourceName _ _ sourceConfig _ _ :: SourceInfo b) -> do
|
AB.dispatchAnyBackend @BackendEventTrigger backendSourceInfo \(SourceInfo sourceName _ _ sourceConfig _ _ :: SourceInfo b) -> do
|
||||||
let sourceNameText = sourceNameToText sourceName
|
let sourceNameText = sourceNameToText sourceName
|
||||||
logger $ mkGenericLog LevelInfo "event_triggers" $ "unlocking events of source: " <> sourceNameText
|
logger $ mkGenericLog LevelInfo "event_triggers" $ "unlocking events of source: " <> sourceNameText
|
||||||
onJust (HM.lookup sourceName lockedEvents) $ \sourceLockedEvents -> do
|
for_ (HM.lookup sourceName lockedEvents) $ \sourceLockedEvents -> do
|
||||||
-- No need to execute unlockEventsTx when events are not present
|
-- No need to execute unlockEventsTx when events are not present
|
||||||
onJust (NE.nonEmptySet sourceLockedEvents) $ \nonEmptyLockedEvents -> do
|
for_ (NE.nonEmptySet sourceLockedEvents) $ \nonEmptyLockedEvents -> do
|
||||||
res <- Retry.retrying Retry.retryPolicyDefault isRetryRequired (return $ unlockEventsInSource @b sourceConfig nonEmptyLockedEvents)
|
res <- Retry.retrying Retry.retryPolicyDefault isRetryRequired (return $ unlockEventsInSource @b sourceConfig nonEmptyLockedEvents)
|
||||||
case res of
|
case res of
|
||||||
Left err ->
|
Left err ->
|
||||||
|
@ -278,7 +278,7 @@ getFinalRecordSet recordSet =
|
|||||||
fmap
|
fmap
|
||||||
( OMap.filterWithKey
|
( OMap.filterWithKey
|
||||||
( \(FieldNameText k) _ ->
|
( \(FieldNameText k) _ ->
|
||||||
maybe True (elem k) (wantedFields recordSet)
|
all (elem k) (wantedFields recordSet)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
(rows recordSet)
|
(rows recordSet)
|
||||||
|
@ -164,7 +164,7 @@ recordError' ::
|
|||||||
recordError' sourceConfig event invocation processEventError maintenanceModeVersion =
|
recordError' sourceConfig event invocation processEventError maintenanceModeVersion =
|
||||||
liftIO $
|
liftIO $
|
||||||
runMSSQLSourceWriteTx sourceConfig $ do
|
runMSSQLSourceWriteTx sourceConfig $ do
|
||||||
onJust invocation $ insertInvocation (tmName (eTrigger event))
|
for_ invocation $ insertInvocation (tmName (eTrigger event))
|
||||||
case processEventError of
|
case processEventError of
|
||||||
PESetRetry retryTime -> do
|
PESetRetry retryTime -> do
|
||||||
setRetryTx event retryTime maintenanceModeVersion
|
setRetryTx event retryTime maintenanceModeVersion
|
||||||
@ -237,9 +237,9 @@ createMissingSQLTriggers ::
|
|||||||
createMissingSQLTriggers sourceConfig table@(TableName tableNameText (SchemaName schemaText)) (allCols, primaryKeyMaybe) triggerName opsDefinition = do
|
createMissingSQLTriggers sourceConfig table@(TableName tableNameText (SchemaName schemaText)) (allCols, primaryKeyMaybe) triggerName opsDefinition = do
|
||||||
liftEitherM $
|
liftEitherM $
|
||||||
runMSSQLSourceWriteTx sourceConfig $ do
|
runMSSQLSourceWriteTx sourceConfig $ do
|
||||||
onJust (tdInsert opsDefinition) (doesSQLTriggerExist INSERT)
|
for_ (tdInsert opsDefinition) (doesSQLTriggerExist INSERT)
|
||||||
onJust (tdUpdate opsDefinition) (doesSQLTriggerExist UPDATE)
|
for_ (tdUpdate opsDefinition) (doesSQLTriggerExist UPDATE)
|
||||||
onJust (tdDelete opsDefinition) (doesSQLTriggerExist DELETE)
|
for_ (tdDelete opsDefinition) (doesSQLTriggerExist DELETE)
|
||||||
where
|
where
|
||||||
doesSQLTriggerExist op opSpec = do
|
doesSQLTriggerExist op opSpec = do
|
||||||
let triggerNameWithOp = "notify_hasura_" <> triggerNameToTxt triggerName <> "_" <> tshow op
|
let triggerNameWithOp = "notify_hasura_" <> triggerNameToTxt triggerName <> "_" <> tshow op
|
||||||
@ -637,9 +637,9 @@ mkAllTriggersQ ::
|
|||||||
Maybe (PrimaryKey 'MSSQL (ColumnInfo 'MSSQL)) ->
|
Maybe (PrimaryKey 'MSSQL (ColumnInfo 'MSSQL)) ->
|
||||||
m ()
|
m ()
|
||||||
mkAllTriggersQ triggerName tableName allCols fullSpec primaryKey = do
|
mkAllTriggersQ triggerName tableName allCols fullSpec primaryKey = do
|
||||||
onJust (tdInsert fullSpec) (mkInsertTriggerQ triggerName tableName allCols)
|
for_ (tdInsert fullSpec) (mkInsertTriggerQ triggerName tableName allCols)
|
||||||
onJust (tdDelete fullSpec) (mkDeleteTriggerQ triggerName tableName allCols)
|
for_ (tdDelete fullSpec) (mkDeleteTriggerQ triggerName tableName allCols)
|
||||||
onJust (tdUpdate fullSpec) (mkUpdateTriggerQ triggerName tableName allCols primaryKey)
|
for_ (tdUpdate fullSpec) (mkUpdateTriggerQ triggerName tableName allCols primaryKey)
|
||||||
|
|
||||||
getApplicableColumns :: [ColumnInfo 'MSSQL] -> SubscribeColumns 'MSSQL -> [ColumnInfo 'MSSQL]
|
getApplicableColumns :: [ColumnInfo 'MSSQL] -> SubscribeColumns 'MSSQL -> [ColumnInfo 'MSSQL]
|
||||||
getApplicableColumns allColumnInfos = \case
|
getApplicableColumns allColumnInfos = \case
|
||||||
|
@ -353,7 +353,7 @@ validateVariables sourceConfig sessionVariableValues prepState = do
|
|||||||
selectFrom = sessionOpenJson occSessionVars
|
selectFrom = sessionOpenJson occSessionVars
|
||||||
}
|
}
|
||||||
|
|
||||||
onJust
|
for_
|
||||||
canaryQuery
|
canaryQuery
|
||||||
( \q -> do
|
( \q -> do
|
||||||
_ :: [[ODBC.Value]] <- liftEitherM $ runExceptT $ mssqlRunReadOnly (_mscExecCtx sourceConfig) (Tx.multiRowQueryE defaultMSSQLTxErrorHandler q)
|
_ :: [[ODBC.Value]] <- liftEitherM $ runExceptT $ mssqlRunReadOnly (_mscExecCtx sourceConfig) (Tx.multiRowQueryE defaultMSSQLTxErrorHandler q)
|
||||||
|
@ -63,7 +63,9 @@ ifMatchedFieldParser ::
|
|||||||
SchemaT r m (InputFieldsParser n (Maybe (IfMatched (UnpreparedValue 'MSSQL))))
|
SchemaT r m (InputFieldsParser n (Maybe (IfMatched (UnpreparedValue 'MSSQL))))
|
||||||
ifMatchedFieldParser sourceInfo tableInfo = do
|
ifMatchedFieldParser sourceInfo tableInfo = do
|
||||||
maybeObject <- ifMatchedObjectParser sourceInfo tableInfo
|
maybeObject <- ifMatchedObjectParser sourceInfo tableInfo
|
||||||
return $ withJust maybeObject $ P.fieldOptional Name._if_matched (Just "upsert condition")
|
pure case maybeObject of
|
||||||
|
Nothing -> pure Nothing
|
||||||
|
Just object -> P.fieldOptional Name._if_matched (Just "upsert condition") object
|
||||||
|
|
||||||
-- | Parse a @tablename_if_matched@ object.
|
-- | Parse a @tablename_if_matched@ object.
|
||||||
ifMatchedObjectParser ::
|
ifMatchedObjectParser ::
|
||||||
|
@ -230,7 +230,7 @@ getFinalRecordSet HeadAndTail {..} = do
|
|||||||
fmap
|
fmap
|
||||||
( OMap.filterWithKey
|
( OMap.filterWithKey
|
||||||
( \(FieldName k) _ ->
|
( \(FieldName k) _ ->
|
||||||
maybe True (elem k) (wantedFields headSet)
|
all (elem k) (wantedFields headSet)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
(rows tailSet)
|
(rows tailSet)
|
||||||
@ -365,7 +365,7 @@ joinArrayRows wantedFields fieldName leftRow rightRow =
|
|||||||
( RecordOutputValue
|
( RecordOutputValue
|
||||||
. OMap.filterWithKey
|
. OMap.filterWithKey
|
||||||
( \(DataLoaderPlan.FieldName k) _ ->
|
( \(DataLoaderPlan.FieldName k) _ ->
|
||||||
maybe True (elem k) wantedFields
|
all (elem k) wantedFields
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
rightRow
|
rightRow
|
||||||
@ -391,7 +391,7 @@ joinObjectRows wantedFields fieldName leftRow rightRows
|
|||||||
(DataLoaderPlan.FieldName fieldName)
|
(DataLoaderPlan.FieldName fieldName)
|
||||||
( RecordOutputValue
|
( RecordOutputValue
|
||||||
( OMap.filterWithKey
|
( OMap.filterWithKey
|
||||||
(\(DataLoaderPlan.FieldName k) _ -> maybe True (elem k) wantedFields)
|
(\(DataLoaderPlan.FieldName k) _ -> all (elem k) wantedFields)
|
||||||
row
|
row
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -171,7 +171,7 @@ recordError' ::
|
|||||||
recordError' sourceConfig event invocation processEventError maintenanceModeVersion =
|
recordError' sourceConfig event invocation processEventError maintenanceModeVersion =
|
||||||
liftIO $
|
liftIO $
|
||||||
runPgSourceWriteTx sourceConfig $ do
|
runPgSourceWriteTx sourceConfig $ do
|
||||||
onJust invocation $ insertInvocation (tmName (eTrigger event))
|
for_ invocation $ insertInvocation (tmName (eTrigger event))
|
||||||
case processEventError of
|
case processEventError of
|
||||||
PESetRetry retryTime -> setRetryTx event retryTime maintenanceModeVersion
|
PESetRetry retryTime -> setRetryTx event retryTime maintenanceModeVersion
|
||||||
PESetError -> setErrorTx event maintenanceModeVersion
|
PESetError -> setErrorTx event maintenanceModeVersion
|
||||||
@ -216,9 +216,9 @@ createMissingSQLTriggers sourceConfig table (allCols, _) triggerName opsDefiniti
|
|||||||
serverConfigCtx <- askServerConfigCtx
|
serverConfigCtx <- askServerConfigCtx
|
||||||
liftEitherM $
|
liftEitherM $
|
||||||
runPgSourceWriteTx sourceConfig $ do
|
runPgSourceWriteTx sourceConfig $ do
|
||||||
onJust (tdInsert opsDefinition) (doesSQLTriggerExist serverConfigCtx INSERT)
|
for_ (tdInsert opsDefinition) (doesSQLTriggerExist serverConfigCtx INSERT)
|
||||||
onJust (tdUpdate opsDefinition) (doesSQLTriggerExist serverConfigCtx UPDATE)
|
for_ (tdUpdate opsDefinition) (doesSQLTriggerExist serverConfigCtx UPDATE)
|
||||||
onJust (tdDelete opsDefinition) (doesSQLTriggerExist serverConfigCtx DELETE)
|
for_ (tdDelete opsDefinition) (doesSQLTriggerExist serverConfigCtx DELETE)
|
||||||
where
|
where
|
||||||
doesSQLTriggerExist serverConfigCtx op opSpec = do
|
doesSQLTriggerExist serverConfigCtx op opSpec = do
|
||||||
let opTriggerName = pgTriggerName op triggerName
|
let opTriggerName = pgTriggerName op triggerName
|
||||||
@ -821,9 +821,9 @@ mkAllTriggersQ ::
|
|||||||
TriggerOpsDef ('Postgres pgKind) ->
|
TriggerOpsDef ('Postgres pgKind) ->
|
||||||
m ()
|
m ()
|
||||||
mkAllTriggersQ triggerName table allCols fullspec = do
|
mkAllTriggersQ triggerName table allCols fullspec = do
|
||||||
onJust (tdInsert fullspec) (mkTrigger triggerName table allCols INSERT)
|
for_ (tdInsert fullspec) (mkTrigger triggerName table allCols INSERT)
|
||||||
onJust (tdUpdate fullspec) (mkTrigger triggerName table allCols UPDATE)
|
for_ (tdUpdate fullspec) (mkTrigger triggerName table allCols UPDATE)
|
||||||
onJust (tdDelete fullspec) (mkTrigger triggerName table allCols DELETE)
|
for_ (tdDelete fullspec) (mkTrigger triggerName table allCols DELETE)
|
||||||
|
|
||||||
-- | Add cleanup logs for given trigger names and cleanup configs. This will perform the following steps:
|
-- | Add cleanup logs for given trigger names and cleanup configs. This will perform the following steps:
|
||||||
--
|
--
|
||||||
|
@ -254,7 +254,7 @@ pollStreamingQuery pollerId lqOpts (sourceName, sourceConfig) roleName parameter
|
|||||||
-- associating every batch with their BatchId
|
-- associating every batch with their BatchId
|
||||||
pure $ zip (BatchId <$> [1 ..]) cohortBatches
|
pure $ zip (BatchId <$> [1 ..]) cohortBatches
|
||||||
|
|
||||||
onJust testActionMaybe id -- IO action intended to run after the cohorts have been snapshotted
|
for_ testActionMaybe id -- IO action intended to run after the cohorts have been snapshotted
|
||||||
|
|
||||||
-- concurrently process each batch and also get the processed cohort with the new updated cohort key
|
-- concurrently process each batch and also get the processed cohort with the new updated cohort key
|
||||||
batchesDetailsAndProcessedCohorts <- A.forConcurrently cohortBatches $ \(batchId, cohorts) -> do
|
batchesDetailsAndProcessedCohorts <- A.forConcurrently cohortBatches $ \(batchId, cohorts) -> do
|
||||||
|
@ -193,7 +193,7 @@ addLiveQuery
|
|||||||
|
|
||||||
-- we can then attach a polling thread if it is new the livequery can only be
|
-- we can then attach a polling thread if it is new the livequery can only be
|
||||||
-- cancelled after putTMVar
|
-- cancelled after putTMVar
|
||||||
onJust pollerMaybe $ \poller -> do
|
for_ pollerMaybe $ \poller -> do
|
||||||
pollerId <- PollerId <$> UUID.nextRandom
|
pollerId <- PollerId <$> UUID.nextRandom
|
||||||
threadRef <- forkImmortal ("pollLiveQuery." <> show pollerId) logger $
|
threadRef <- forkImmortal ("pollLiveQuery." <> show pollerId) logger $
|
||||||
forever $ do
|
forever $ do
|
||||||
@ -283,7 +283,7 @@ addStreamSubscriptionQuery
|
|||||||
|
|
||||||
-- we can then attach a polling thread if it is new the subscription can only be
|
-- we can then attach a polling thread if it is new the subscription can only be
|
||||||
-- cancelled after putTMVar
|
-- cancelled after putTMVar
|
||||||
onJust handlerM $ \handler -> do
|
for_ handlerM $ \handler -> do
|
||||||
pollerId <- PollerId <$> UUID.nextRandom
|
pollerId <- PollerId <$> UUID.nextRandom
|
||||||
threadRef <- forkImmortal ("pollStreamingQuery." <> show (unPollerId pollerId)) logger $
|
threadRef <- forkImmortal ("pollStreamingQuery." <> show (unPollerId pollerId)) logger $
|
||||||
forever $ do
|
forever $ do
|
||||||
|
@ -75,11 +75,10 @@ runCreateAction ::
|
|||||||
runCreateAction createAction = do
|
runCreateAction createAction = do
|
||||||
-- check if action with same name exists already
|
-- check if action with same name exists already
|
||||||
actionMap <- scActions <$> askSchemaCache
|
actionMap <- scActions <$> askSchemaCache
|
||||||
void $
|
for_ (Map.lookup actionName actionMap) $
|
||||||
onJust (Map.lookup actionName actionMap) $
|
const $
|
||||||
const $
|
throw400 AlreadyExists $
|
||||||
throw400 AlreadyExists $
|
"action with name " <> actionName <<> " already exists"
|
||||||
"action with name " <> actionName <<> " already exists"
|
|
||||||
let metadata =
|
let metadata =
|
||||||
ActionMetadata
|
ActionMetadata
|
||||||
actionName
|
actionName
|
||||||
|
@ -223,7 +223,7 @@ resolveEventTriggerQuery (CreateEventTriggerQuery source name qt insert update d
|
|||||||
return (replace, EventTriggerConf name (TriggerOpsDef insert update delete enableManual) webhook webhookFromEnv rconf mheaders reqTransform respTransform cleanupConfig)
|
return (replace, EventTriggerConf name (TriggerOpsDef insert update delete enableManual) webhook webhookFromEnv rconf mheaders reqTransform respTransform cleanupConfig)
|
||||||
where
|
where
|
||||||
assertCols :: TableCoreInfo b -> Maybe (SubscribeOpSpec b) -> m ()
|
assertCols :: TableCoreInfo b -> Maybe (SubscribeOpSpec b) -> m ()
|
||||||
assertCols ti opSpec = onJust opSpec \sos -> case sosColumns sos of
|
assertCols ti opSpec = for_ opSpec \sos -> case sosColumns sos of
|
||||||
SubCStar -> return ()
|
SubCStar -> return ()
|
||||||
SubCArray columns -> forM_ columns (assertColumnExists @b (_tciFieldInfoMap ti) "")
|
SubCArray columns -> forM_ columns (assertColumnExists @b (_tciFieldInfoMap ti) "")
|
||||||
|
|
||||||
@ -278,13 +278,13 @@ createEventTriggerQueryMetadata q = do
|
|||||||
oldConfig <- etiCleanupConfig <$> askEventTriggerInfo @b source triggerName
|
oldConfig <- etiCleanupConfig <$> askEventTriggerInfo @b source triggerName
|
||||||
when (hasCleanupCronScheduleUpdated oldConfig newConfig) do
|
when (hasCleanupCronScheduleUpdated oldConfig newConfig) do
|
||||||
deleteAllScheduledCleanups @b sourceConfig triggerName
|
deleteAllScheduledCleanups @b sourceConfig triggerName
|
||||||
onJust newConfig \cleanupConfig -> do
|
for_ newConfig \cleanupConfig -> do
|
||||||
(`onLeft` logQErr) =<< generateCleanupSchedules (AB.mkAnyBackend sourceInfo) triggerName cleanupConfig
|
(`onLeft` logQErr) =<< generateCleanupSchedules (AB.mkAnyBackend sourceInfo) triggerName cleanupConfig
|
||||||
else do
|
else do
|
||||||
doesTriggerExists <- checkIfTriggerExists @b sourceConfig triggerName (Set.fromList [INSERT, UPDATE, DELETE])
|
doesTriggerExists <- checkIfTriggerExists @b sourceConfig triggerName (Set.fromList [INSERT, UPDATE, DELETE])
|
||||||
if doesTriggerExists
|
if doesTriggerExists
|
||||||
then throw400 AlreadyExists ("Event trigger with name " <> triggerNameToTxt triggerName <<> " already exists")
|
then throw400 AlreadyExists ("Event trigger with name " <> triggerNameToTxt triggerName <<> " already exists")
|
||||||
else onJust newConfig \cleanupConfig -> do
|
else for_ newConfig \cleanupConfig -> do
|
||||||
(`onLeft` logQErr) =<< generateCleanupSchedules (AB.mkAnyBackend sourceInfo) triggerName cleanupConfig
|
(`onLeft` logQErr) =<< generateCleanupSchedules (AB.mkAnyBackend sourceInfo) triggerName cleanupConfig
|
||||||
|
|
||||||
buildSchemaCacheFor metadataObj $
|
buildSchemaCacheFor metadataObj $
|
||||||
@ -609,7 +609,7 @@ toggleEventTriggerCleanupAction conf cleanupSwitch = do
|
|||||||
let tableName = (_tciName . _tiCoreInfo) tableInfo
|
let tableName = (_tciName . _tiCoreInfo) tableInfo
|
||||||
eventTriggerInfoMap = _tiEventTriggerInfoMap tableInfo
|
eventTriggerInfoMap = _tiEventTriggerInfoMap tableInfo
|
||||||
ifor_ eventTriggerInfoMap $ \triggerName eventTriggerInfo -> do
|
ifor_ eventTriggerInfoMap $ \triggerName eventTriggerInfo -> do
|
||||||
onJust (etiCleanupConfig eventTriggerInfo) $ \cleanupConfig ->
|
for_ (etiCleanupConfig eventTriggerInfo) $ \cleanupConfig ->
|
||||||
updateCleanupStatusInMetadata @b cleanupConfig switch sourceName tableName triggerName
|
updateCleanupStatusInMetadata @b cleanupConfig switch sourceName tableName triggerName
|
||||||
|
|
||||||
runEventTriggerResumeCleanup ::
|
runEventTriggerResumeCleanup ::
|
||||||
|
@ -33,6 +33,7 @@ import Data.HashMap.Strict.InsOrd.Extended qualified as OMap
|
|||||||
import Data.HashSet qualified as HS
|
import Data.HashSet qualified as HS
|
||||||
import Data.HashSet qualified as Set
|
import Data.HashSet qualified as Set
|
||||||
import Data.List qualified as L
|
import Data.List qualified as L
|
||||||
|
import Data.List.Extended qualified as L
|
||||||
import Data.SerializableBlob qualified as SB
|
import Data.SerializableBlob qualified as SB
|
||||||
import Data.Text qualified as T
|
import Data.Text qualified as T
|
||||||
import Data.Text.Encoding qualified as TE
|
import Data.Text.Encoding qualified as TE
|
||||||
@ -257,10 +258,10 @@ runReplaceMetadataV2 ReplaceMetadataV2 {..} = do
|
|||||||
|
|
||||||
-- Check for duplicate trigger names in the new source metadata
|
-- Check for duplicate trigger names in the new source metadata
|
||||||
for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
|
for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
|
||||||
onJust (OMap.lookup source oldSources) $ \_oldBackendSourceMetadata ->
|
for_ (OMap.lookup source oldSources) $ \_oldBackendSourceMetadata ->
|
||||||
dispatch newBackendSourceMetadata \(newSourceMetadata :: SourceMetadata b) -> do
|
dispatch newBackendSourceMetadata \(newSourceMetadata :: SourceMetadata b) -> do
|
||||||
let newTriggerNames = concatMap (OMap.keys . _tmEventTriggers) (OMap.elems $ _smTables newSourceMetadata)
|
let newTriggerNames = concatMap (OMap.keys . _tmEventTriggers) (OMap.elems $ _smTables newSourceMetadata)
|
||||||
duplicateTriggerNamesInNewMetadata = newTriggerNames \\ (hashNub newTriggerNames)
|
duplicateTriggerNamesInNewMetadata = newTriggerNames \\ (L.uniques newTriggerNames)
|
||||||
unless (null duplicateTriggerNamesInNewMetadata) $ do
|
unless (null duplicateTriggerNamesInNewMetadata) $ do
|
||||||
throw400 NotSupported ("Event trigger with duplicate names not allowed: " <> dquoteList (map triggerNameToTxt duplicateTriggerNamesInNewMetadata))
|
throw400 NotSupported ("Event trigger with duplicate names not allowed: " <> dquoteList (map triggerNameToTxt duplicateTriggerNamesInNewMetadata))
|
||||||
|
|
||||||
@ -360,7 +361,7 @@ runReplaceMetadataV2 ReplaceMetadataV2 {..} = do
|
|||||||
-- In the current implementation, this doesn't throw an error because the trigger is dropped
|
-- In the current implementation, this doesn't throw an error because the trigger is dropped
|
||||||
-- using `DROP IF EXISTS..` meaning this silently fails without throwing an error.
|
-- using `DROP IF EXISTS..` meaning this silently fails without throwing an error.
|
||||||
for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
|
for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
|
||||||
onJust (OMap.lookup source oldSources) $ \oldBackendSourceMetadata ->
|
for_ (OMap.lookup source oldSources) $ \oldBackendSourceMetadata ->
|
||||||
compose source (unBackendSourceMetadata newBackendSourceMetadata) (unBackendSourceMetadata oldBackendSourceMetadata) \(newSourceMetadata :: SourceMetadata b) -> do
|
compose source (unBackendSourceMetadata newBackendSourceMetadata) (unBackendSourceMetadata oldBackendSourceMetadata) \(newSourceMetadata :: SourceMetadata b) -> do
|
||||||
dispatch oldBackendSourceMetadata \oldSourceMetadata -> do
|
dispatch oldBackendSourceMetadata \oldSourceMetadata -> do
|
||||||
let oldTriggersMap = getTriggersMap oldSourceMetadata
|
let oldTriggersMap = getTriggersMap oldSourceMetadata
|
||||||
@ -414,7 +415,7 @@ runReplaceMetadataV2 ReplaceMetadataV2 {..} = do
|
|||||||
-- If there are any event trigger cleanup configs with different cron then delete the older schedules
|
-- If there are any event trigger cleanup configs with different cron then delete the older schedules
|
||||||
-- generate cleanup logs for new event trigger cleanup config
|
-- generate cleanup logs for new event trigger cleanup config
|
||||||
for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
|
for_ (OMap.toList newSources) $ \(source, newBackendSourceMetadata) -> do
|
||||||
onJust (OMap.lookup source oldSources) $ \oldBackendSourceMetadata ->
|
for_ (OMap.lookup source oldSources) $ \oldBackendSourceMetadata ->
|
||||||
AB.dispatchAnyBackend @BackendEventTrigger (unBackendSourceMetadata newBackendSourceMetadata) \(newSourceMetadata :: SourceMetadata b) -> do
|
AB.dispatchAnyBackend @BackendEventTrigger (unBackendSourceMetadata newBackendSourceMetadata) \(newSourceMetadata :: SourceMetadata b) -> do
|
||||||
dispatch oldBackendSourceMetadata \oldSourceMetadata -> do
|
dispatch oldBackendSourceMetadata \oldSourceMetadata -> do
|
||||||
sourceInfo@(SourceInfo _ _ _ sourceConfig _ _) <- askSourceInfo @b source
|
sourceInfo@(SourceInfo _ _ _ sourceConfig _ _) <- askSourceInfo @b source
|
||||||
|
@ -57,7 +57,7 @@ assertPermDefined ::
|
|||||||
TableInfo backend ->
|
TableInfo backend ->
|
||||||
m ()
|
m ()
|
||||||
assertPermDefined role pt tableInfo =
|
assertPermDefined role pt tableInfo =
|
||||||
unless (maybe False (permissionIsDefined pt) rpi) $
|
unless (any (permissionIsDefined pt) rpi) $
|
||||||
throw400 PermissionDenied $
|
throw400 PermissionDenied $
|
||||||
"'" <> tshow pt <> "'"
|
"'" <> tshow pt <> "'"
|
||||||
<> " permission on "
|
<> " permission on "
|
||||||
|
@ -45,7 +45,7 @@ runCreateCollection ::
|
|||||||
runCreateCollection cc = do
|
runCreateCollection cc = do
|
||||||
collDetM <- getCollectionDefM collName
|
collDetM <- getCollectionDefM collName
|
||||||
withPathK "name" $
|
withPathK "name" $
|
||||||
onJust collDetM $
|
for_ collDetM $
|
||||||
const $
|
const $
|
||||||
throw400 AlreadyExists $
|
throw400 AlreadyExists $
|
||||||
"query collection with name " <> collName <<> " already exists"
|
"query collection with name " <> collName <<> " already exists"
|
||||||
@ -66,7 +66,7 @@ runRenameCollection (RenameCollection oldName newName) = do
|
|||||||
_ <- getCollectionDef oldName
|
_ <- getCollectionDef oldName
|
||||||
newCollDefM <- getCollectionDefM newName
|
newCollDefM <- getCollectionDefM newName
|
||||||
withPathK "new_name" $
|
withPathK "new_name" $
|
||||||
onJust newCollDefM $
|
for_ newCollDefM $
|
||||||
const $
|
const $
|
||||||
throw400 AlreadyExists $
|
throw400 AlreadyExists $
|
||||||
"query collection with name " <> newName <<> " already exists"
|
"query collection with name " <> newName <<> " already exists"
|
||||||
|
@ -55,7 +55,7 @@ runCreateRelationship relType (WithTable source tableName relDef) = do
|
|||||||
let relName = _rdName relDef
|
let relName = _rdName relDef
|
||||||
-- Check if any field with relationship name already exists in the table
|
-- Check if any field with relationship name already exists in the table
|
||||||
tableFields <- _tciFieldInfoMap <$> askTableCoreInfo @b source tableName
|
tableFields <- _tciFieldInfoMap <$> askTableCoreInfo @b source tableName
|
||||||
onJust (Map.lookup (fromRel relName) tableFields) $
|
for_ (Map.lookup (fromRel relName) tableFields) $
|
||||||
const $
|
const $
|
||||||
throw400 AlreadyExists $
|
throw400 AlreadyExists $
|
||||||
"field with name " <> relName <<> " already exists in table " <>> tableName
|
"field with name " <> relName <<> " already exists in table " <>> tableName
|
||||||
|
@ -530,7 +530,7 @@ validateDirective providedDirective upstreamDirective (parentType, parentTypeNam
|
|||||||
dispute $
|
dispute $
|
||||||
pure $
|
pure $
|
||||||
UnexpectedNonMatchingNames providedName upstreamName Directive
|
UnexpectedNonMatchingNames providedName upstreamName Directive
|
||||||
onJust (NE.nonEmpty $ Map.keys argsDiff) $ \argNames ->
|
for_ (NE.nonEmpty $ Map.keys argsDiff) $ \argNames ->
|
||||||
dispute $
|
dispute $
|
||||||
pure $
|
pure $
|
||||||
NonExistingDirectiveArgument parentTypeName parentType providedName argNames
|
NonExistingDirectiveArgument parentTypeName parentType providedName argNames
|
||||||
@ -551,7 +551,7 @@ validateDirectives ::
|
|||||||
(GraphQLType, G.Name) ->
|
(GraphQLType, G.Name) ->
|
||||||
m (Maybe (G.Directive a))
|
m (Maybe (G.Directive a))
|
||||||
validateDirectives providedDirectives upstreamDirectives directiveLocation parentType = do
|
validateDirectives providedDirectives upstreamDirectives directiveLocation parentType = do
|
||||||
onJust (NE.nonEmpty $ S.toList $ duplicates $ map G._dName nonPresetDirectives) $ \dups -> do
|
for_ (NE.nonEmpty $ S.toList $ duplicates $ map G._dName nonPresetDirectives) $ \dups -> do
|
||||||
refute $ pure $ DuplicateDirectives parentType dups
|
refute $ pure $ DuplicateDirectives parentType dups
|
||||||
for_ nonPresetDirectives $ \dir -> do
|
for_ nonPresetDirectives $ \dir -> do
|
||||||
let directiveName = G._dName dir
|
let directiveName = G._dName dir
|
||||||
@ -599,9 +599,9 @@ validateEnumTypeDefinition providedEnum upstreamEnum = do
|
|||||||
pure $
|
pure $
|
||||||
UnexpectedNonMatchingNames providedName upstreamName Enum
|
UnexpectedNonMatchingNames providedName upstreamName Enum
|
||||||
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLENUM $ (Enum, providedName)
|
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLENUM $ (Enum, providedName)
|
||||||
onJust (NE.nonEmpty $ S.toList $ duplicates providedEnumValNames) $ \dups -> do
|
for_ (NE.nonEmpty $ S.toList $ duplicates providedEnumValNames) $ \dups -> do
|
||||||
refute $ pure $ DuplicateEnumValues providedName dups
|
refute $ pure $ DuplicateEnumValues providedName dups
|
||||||
onJust (NE.nonEmpty $ S.toList fieldsDifference) $ \nonExistingEnumVals ->
|
for_ (NE.nonEmpty $ S.toList fieldsDifference) $ \nonExistingEnumVals ->
|
||||||
dispute $ pure $ NonExistingEnumValues providedName nonExistingEnumVals
|
dispute $ pure $ NonExistingEnumValues providedName nonExistingEnumVals
|
||||||
pure providedEnum
|
pure providedEnum
|
||||||
where
|
where
|
||||||
@ -664,10 +664,10 @@ validateArguments ::
|
|||||||
G.Name ->
|
G.Name ->
|
||||||
m [RemoteSchemaInputValueDefinition]
|
m [RemoteSchemaInputValueDefinition]
|
||||||
validateArguments providedArgs upstreamArgs parentTypeName = do
|
validateArguments providedArgs upstreamArgs parentTypeName = do
|
||||||
onJust (NE.nonEmpty $ S.toList $ duplicates $ map G._ivdName providedArgs) $ \dups -> do
|
for_ (NE.nonEmpty $ S.toList $ duplicates $ map G._ivdName providedArgs) $ \dups -> do
|
||||||
refute $ pure $ DuplicateArguments parentTypeName dups
|
refute $ pure $ DuplicateArguments parentTypeName dups
|
||||||
let argsDiff = getDifference nonNullableUpstreamArgs nonNullableProvidedArgs
|
let argsDiff = getDifference nonNullableUpstreamArgs nonNullableProvidedArgs
|
||||||
onJust (NE.nonEmpty $ S.toList argsDiff) $ \nonNullableArgs -> do
|
for_ (NE.nonEmpty $ S.toList argsDiff) $ \nonNullableArgs -> do
|
||||||
refute $ pure $ MissingNonNullableArguments parentTypeName nonNullableArgs
|
refute $ pure $ MissingNonNullableArguments parentTypeName nonNullableArgs
|
||||||
for providedArgs $ \providedArg@(G.InputValueDefinition _ name _ _ _) -> do
|
for providedArgs $ \providedArg@(G.InputValueDefinition _ name _ _ _) -> do
|
||||||
upstreamArg <-
|
upstreamArg <-
|
||||||
@ -734,7 +734,7 @@ validateFieldDefinitions ::
|
|||||||
(FieldDefinitionType, G.Name) ->
|
(FieldDefinitionType, G.Name) ->
|
||||||
m [(G.FieldDefinition RemoteSchemaInputValueDefinition)]
|
m [(G.FieldDefinition RemoteSchemaInputValueDefinition)]
|
||||||
validateFieldDefinitions providedFldDefnitions upstreamFldDefinitions parentType = do
|
validateFieldDefinitions providedFldDefnitions upstreamFldDefinitions parentType = do
|
||||||
onJust (NE.nonEmpty $ S.toList $ duplicates $ map G._fldName providedFldDefnitions) $ \dups -> do
|
for_ (NE.nonEmpty $ S.toList $ duplicates $ map G._fldName providedFldDefnitions) $ \dups -> do
|
||||||
refute $ pure $ DuplicateFields parentType dups
|
refute $ pure $ DuplicateFields parentType dups
|
||||||
for providedFldDefnitions $ \fldDefn@(G.FieldDefinition _ name _ _ _) -> do
|
for providedFldDefnitions $ \fldDefn@(G.FieldDefinition _ name _ _ _) -> do
|
||||||
upstreamFldDefn <-
|
upstreamFldDefn <-
|
||||||
@ -792,7 +792,7 @@ validateUnionDefinition providedUnion upstreamUnion = do
|
|||||||
pure $
|
pure $
|
||||||
UnexpectedNonMatchingNames providedName upstreamName Union
|
UnexpectedNonMatchingNames providedName upstreamName Union
|
||||||
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLUNION $ (Union, providedName)
|
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLUNION $ (Union, providedName)
|
||||||
onJust (NE.nonEmpty $ S.toList memberTypesDiff) $ \nonExistingMembers ->
|
for_ (NE.nonEmpty $ S.toList memberTypesDiff) $ \nonExistingMembers ->
|
||||||
refute $ pure $ NonExistingUnionMemberTypes providedName nonExistingMembers
|
refute $ pure $ NonExistingUnionMemberTypes providedName nonExistingMembers
|
||||||
pure providedUnion
|
pure providedUnion
|
||||||
where
|
where
|
||||||
@ -817,9 +817,9 @@ validateObjectDefinition providedObj upstreamObj interfacesDeclared = do
|
|||||||
pure $
|
pure $
|
||||||
UnexpectedNonMatchingNames providedName upstreamName Object
|
UnexpectedNonMatchingNames providedName upstreamName Object
|
||||||
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLOBJECT $ (Object, providedName)
|
void $ validateDirectives providedDirectives upstreamDirectives G.TSDLOBJECT $ (Object, providedName)
|
||||||
onJust (NE.nonEmpty $ S.toList customInterfaces) $ \ifaces ->
|
for_ (NE.nonEmpty $ S.toList customInterfaces) $ \ifaces ->
|
||||||
dispute $ pure $ CustomInterfacesNotAllowed providedName ifaces
|
dispute $ pure $ CustomInterfacesNotAllowed providedName ifaces
|
||||||
onJust (NE.nonEmpty nonExistingInterfaces) $ \ifaces ->
|
for_ (NE.nonEmpty nonExistingInterfaces) $ \ifaces ->
|
||||||
dispute $ pure $ ObjectImplementsNonExistingInterfaces providedName ifaces
|
dispute $ pure $ ObjectImplementsNonExistingInterfaces providedName ifaces
|
||||||
fieldDefinitions <-
|
fieldDefinitions <-
|
||||||
validateFieldDefinitions providedFldDefnitions upstreamFldDefnitions $ (ObjectField, providedName)
|
validateFieldDefinitions providedFldDefnitions upstreamFldDefnitions $ (ObjectField, providedName)
|
||||||
@ -930,7 +930,7 @@ validateRemoteSchema upstreamRemoteSchemaIntrospection = do
|
|||||||
let (providedSchemaDefinitions, providedTypeDefinitions) =
|
let (providedSchemaDefinitions, providedTypeDefinitions) =
|
||||||
partitionTypeSystemDefinitions providedTypeSystemDefinitions
|
partitionTypeSystemDefinitions providedTypeSystemDefinitions
|
||||||
duplicateTypesList = S.toList $ duplicates (getTypeName <$> providedTypeDefinitions)
|
duplicateTypesList = S.toList $ duplicates (getTypeName <$> providedTypeDefinitions)
|
||||||
onJust (NE.nonEmpty duplicateTypesList) $ \duplicateTypeNames ->
|
for_ (NE.nonEmpty duplicateTypesList) $ \duplicateTypeNames ->
|
||||||
refute $ pure $ DuplicateTypeNames duplicateTypeNames
|
refute $ pure $ DuplicateTypeNames duplicateTypeNames
|
||||||
rootTypeNames <- validateSchemaDefinitions providedSchemaDefinitions
|
rootTypeNames <- validateSchemaDefinitions providedSchemaDefinitions
|
||||||
let providedInterfacesTypes =
|
let providedInterfacesTypes =
|
||||||
|
@ -172,7 +172,7 @@ pruneDanglingDependents cache =
|
|||||||
"no foreign key constraint named " <> constraintName <<> " is "
|
"no foreign key constraint named " <> constraintName <<> " is "
|
||||||
<> "defined for table " <>> tableName
|
<> "defined for table " <>> tableName
|
||||||
TOPerm roleName permType -> do
|
TOPerm roleName permType -> do
|
||||||
unless (maybe False (permissionIsDefined permType) (tableInfo ^? (tiRolePermInfoMap . ix roleName))) $
|
unless (any (permissionIsDefined permType) (tableInfo ^? (tiRolePermInfoMap . ix roleName))) $
|
||||||
Left $
|
Left $
|
||||||
"no " <> permTypeToCode permType <> " permission defined on table "
|
"no " <> permTypeToCode permType <> " permission defined on table "
|
||||||
<> tableName <<> " for role " <>> roleName
|
<> tableName <<> " for role " <>> roleName
|
||||||
|
@ -153,7 +153,7 @@ parseScalarValueColumnType columnType value = case columnType of
|
|||||||
where
|
where
|
||||||
parseEnumValue :: Maybe G.Name -> m (ScalarValue b)
|
parseEnumValue :: Maybe G.Name -> m (ScalarValue b)
|
||||||
parseEnumValue enumValueName = do
|
parseEnumValue enumValueName = do
|
||||||
onJust enumValueName \evn -> do
|
for_ enumValueName \evn -> do
|
||||||
let enums = map getEnumValue $ M.keys enumValues
|
let enums = map getEnumValue $ M.keys enumValues
|
||||||
unless (evn `elem` enums) $
|
unless (evn `elem` enums) $
|
||||||
throw400 UnexpectedPayload $
|
throw400 UnexpectedPayload $
|
||||||
|
@ -99,7 +99,7 @@ fromRemoteRelationship = FieldName . relNameToTxt
|
|||||||
data RelType
|
data RelType
|
||||||
= ObjRel
|
= ObjRel
|
||||||
| ArrRel
|
| ArrRel
|
||||||
deriving (Show, Eq, Generic, Data)
|
deriving (Show, Eq, Ord, Generic, Data)
|
||||||
|
|
||||||
instance NFData RelType
|
instance NFData RelType
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ data TableMetadataObjId
|
|||||||
| MTOPerm RoleName PermType
|
| MTOPerm RoleName PermType
|
||||||
| MTOTrigger TriggerName
|
| MTOTrigger TriggerName
|
||||||
| MTORemoteRelationship RelName
|
| MTORemoteRelationship RelName
|
||||||
deriving (Show, Eq, Generic)
|
deriving (Show, Eq, Ord, Generic)
|
||||||
|
|
||||||
instance Hashable TableMetadataObjId
|
instance Hashable TableMetadataObjId
|
||||||
|
|
||||||
@ -81,6 +81,8 @@ deriving instance (Backend b) => Show (SourceMetadataObjId b)
|
|||||||
|
|
||||||
deriving instance (Backend b) => Eq (SourceMetadataObjId b)
|
deriving instance (Backend b) => Eq (SourceMetadataObjId b)
|
||||||
|
|
||||||
|
deriving instance (Backend b) => Ord (SourceMetadataObjId b)
|
||||||
|
|
||||||
instance (Backend b) => Hashable (SourceMetadataObjId b)
|
instance (Backend b) => Hashable (SourceMetadataObjId b)
|
||||||
|
|
||||||
data MetadataObjId
|
data MetadataObjId
|
||||||
@ -103,7 +105,7 @@ data MetadataObjId
|
|||||||
| MOHostTlsAllowlist String
|
| MOHostTlsAllowlist String
|
||||||
| MOQueryCollectionsQuery CollectionName ListedQuery
|
| MOQueryCollectionsQuery CollectionName ListedQuery
|
||||||
| MODataConnectorAgent DataConnectorName
|
| MODataConnectorAgent DataConnectorName
|
||||||
deriving (Show, Eq, Generic)
|
deriving (Show, Eq, Ord, Generic)
|
||||||
|
|
||||||
$(makePrisms ''MetadataObjId)
|
$(makePrisms ''MetadataObjId)
|
||||||
|
|
||||||
@ -196,7 +198,7 @@ data MetadataObject = MetadataObject
|
|||||||
{ _moId :: MetadataObjId,
|
{ _moId :: MetadataObjId,
|
||||||
_moDefinition :: Value
|
_moDefinition :: Value
|
||||||
}
|
}
|
||||||
deriving (Show, Eq, Generic)
|
deriving (Show, Eq, Ord, Generic)
|
||||||
|
|
||||||
instance Hashable MetadataObject
|
instance Hashable MetadataObject
|
||||||
|
|
||||||
@ -211,7 +213,7 @@ data InconsistentRoleEntity
|
|||||||
-- use it with `AB.AnyBackend`
|
-- use it with `AB.AnyBackend`
|
||||||
PermType
|
PermType
|
||||||
| InconsistentRemoteSchemaPermission RemoteSchemaName
|
| InconsistentRemoteSchemaPermission RemoteSchemaName
|
||||||
deriving stock (Show, Eq, Generic)
|
deriving stock (Show, Eq, Ord, Generic)
|
||||||
|
|
||||||
instance Hashable InconsistentRoleEntity
|
instance Hashable InconsistentRoleEntity
|
||||||
|
|
||||||
@ -245,7 +247,7 @@ data InconsistentMetadata
|
|||||||
| InvalidRestSegments Text MetadataObject
|
| InvalidRestSegments Text MetadataObject
|
||||||
| AmbiguousRestEndpoints Text [MetadataObject]
|
| AmbiguousRestEndpoints Text [MetadataObject]
|
||||||
| ConflictingInheritedPermission RoleName InconsistentRoleEntity
|
| ConflictingInheritedPermission RoleName InconsistentRoleEntity
|
||||||
deriving stock (Show, Eq, Generic)
|
deriving stock (Show, Eq, Ord, Generic)
|
||||||
|
|
||||||
instance Hashable InconsistentMetadata
|
instance Hashable InconsistentMetadata
|
||||||
|
|
||||||
|
@ -59,12 +59,14 @@ data PermType
|
|||||||
| PTSelect
|
| PTSelect
|
||||||
| PTUpdate
|
| PTUpdate
|
||||||
| PTDelete
|
| PTDelete
|
||||||
deriving (Eq, Generic)
|
deriving (Eq, Ord, Generic)
|
||||||
|
|
||||||
instance NFData PermType
|
instance NFData PermType
|
||||||
|
|
||||||
instance Cacheable PermType
|
instance Cacheable PermType
|
||||||
|
|
||||||
|
instance Hashable PermType
|
||||||
|
|
||||||
instance PG.FromCol PermType where
|
instance PG.FromCol PermType where
|
||||||
fromCol bs = flip PG.fromColHelper bs $
|
fromCol bs = flip PG.fromColHelper bs $
|
||||||
PD.enum $ \case
|
PD.enum $ \case
|
||||||
@ -75,13 +77,7 @@ instance PG.FromCol PermType where
|
|||||||
_ -> Nothing
|
_ -> Nothing
|
||||||
|
|
||||||
permTypeToCode :: PermType -> Text
|
permTypeToCode :: PermType -> Text
|
||||||
permTypeToCode PTInsert = "insert"
|
permTypeToCode = tshow
|
||||||
permTypeToCode PTSelect = "select"
|
|
||||||
permTypeToCode PTUpdate = "update"
|
|
||||||
permTypeToCode PTDelete = "delete"
|
|
||||||
|
|
||||||
instance Hashable PermType where
|
|
||||||
hashWithSalt salt a = hashWithSalt salt $ permTypeToCode a
|
|
||||||
|
|
||||||
instance Show PermType where
|
instance Show PermType where
|
||||||
show PTInsert = "insert"
|
show PTInsert = "insert"
|
||||||
|
@ -77,7 +77,7 @@ data ListedQuery = ListedQuery
|
|||||||
{ _lqName :: QueryName,
|
{ _lqName :: QueryName,
|
||||||
_lqQuery :: GQLQueryWithText
|
_lqQuery :: GQLQueryWithText
|
||||||
}
|
}
|
||||||
deriving (Show, Eq, Generic)
|
deriving (Show, Eq, Ord, Generic)
|
||||||
|
|
||||||
instance NFData ListedQuery
|
instance NFData ListedQuery
|
||||||
|
|
||||||
|
@ -39,6 +39,7 @@ import Data.HashMap.Strict.Extended qualified as Map
|
|||||||
import Data.HashMap.Strict.InsOrd qualified as OMap
|
import Data.HashMap.Strict.InsOrd qualified as OMap
|
||||||
import Data.HashMap.Strict.Multi qualified as MultiMap
|
import Data.HashMap.Strict.Multi qualified as MultiMap
|
||||||
import Data.List qualified as L
|
import Data.List qualified as L
|
||||||
|
import Data.List.Extended qualified as L
|
||||||
import Data.Sequence qualified as Seq
|
import Data.Sequence qualified as Seq
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
import Data.Text.NonEmpty (unNonEmptyText)
|
import Data.Text.NonEmpty (unNonEmptyText)
|
||||||
@ -332,7 +333,7 @@ withNewInconsistentObjsCheck action = do
|
|||||||
|
|
||||||
let diffInconsistentObjects = Map.difference `on` groupInconsistentMetadataById
|
let diffInconsistentObjects = Map.difference `on` groupInconsistentMetadataById
|
||||||
newInconsistentObjects =
|
newInconsistentObjects =
|
||||||
hashNub $ concatMap toList $ Map.elems (currentObjects `diffInconsistentObjects` originalObjects)
|
L.uniques $ concatMap toList $ Map.elems (currentObjects `diffInconsistentObjects` originalObjects)
|
||||||
unless (null newInconsistentObjects) $
|
unless (null newInconsistentObjects) $
|
||||||
throwError
|
throwError
|
||||||
(err500 Unexpected "cannot continue due to newly found inconsistent metadata")
|
(err500 Unexpected "cannot continue due to newly found inconsistent metadata")
|
||||||
|
@ -463,6 +463,8 @@ deriving instance i `SatisfiesForAllBackends` Show => Show (AnyBackend i)
|
|||||||
|
|
||||||
deriving instance i `SatisfiesForAllBackends` Eq => Eq (AnyBackend i)
|
deriving instance i `SatisfiesForAllBackends` Eq => Eq (AnyBackend i)
|
||||||
|
|
||||||
|
deriving instance i `SatisfiesForAllBackends` Ord => Ord (AnyBackend i)
|
||||||
|
|
||||||
instance i `SatisfiesForAllBackends` Hashable => Hashable (AnyBackend i)
|
instance i `SatisfiesForAllBackends` Hashable => Hashable (AnyBackend i)
|
||||||
|
|
||||||
instance i `SatisfiesForAllBackends` Cacheable => Cacheable (AnyBackend i)
|
instance i `SatisfiesForAllBackends` Cacheable => Cacheable (AnyBackend i)
|
||||||
|
@ -1152,7 +1152,7 @@ httpApp setupHook corsCfg serverCtx enableConsole consoleAssetsDir consoleSentry
|
|||||||
Spock.get Spock.root $ Spock.redirect "console"
|
Spock.get Spock.root $ Spock.redirect "console"
|
||||||
|
|
||||||
-- serve static files if consoleAssetsDir is set
|
-- serve static files if consoleAssetsDir is set
|
||||||
onJust consoleAssetsDir $ \dir ->
|
for_ consoleAssetsDir $ \dir ->
|
||||||
Spock.get ("console/assets" <//> Spock.wildcard) $ \path -> do
|
Spock.get ("console/assets" <//> Spock.wildcard) $ \path -> do
|
||||||
consoleAssetsHandler logger (scLoggingSettings serverCtx) dir (T.unpack path)
|
consoleAssetsHandler logger (scLoggingSettings serverCtx) dir (T.unpack path)
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ mkDefaultCorsPolicy cfg =
|
|||||||
|
|
||||||
inWildcardList :: Domains -> Text -> Bool
|
inWildcardList :: Domains -> Text -> Bool
|
||||||
inWildcardList (Domains _ wildcards) origin =
|
inWildcardList (Domains _ wildcards) origin =
|
||||||
either (const False) (`Set.member` wildcards) $ parseOrigin origin
|
any (`Set.member` wildcards) $ parseOrigin origin
|
||||||
|
|
||||||
-- | Parsers for wildcard domains
|
-- | Parsers for wildcard domains
|
||||||
runParser :: AT.Parser a -> Text -> Either String a
|
runParser :: AT.Parser a -> Text -> Either String a
|
||||||
|
@ -590,7 +590,7 @@ logDeprecatedEnvVars logger env sources = do
|
|||||||
-- When a source named 'default' is present, it means that it is a migrated v2
|
-- When a source named 'default' is present, it means that it is a migrated v2
|
||||||
-- hasura project. In such cases log those environment variables that are moved
|
-- hasura project. In such cases log those environment variables that are moved
|
||||||
-- to the metadata
|
-- to the metadata
|
||||||
onJust (HM.lookup SNDefault sources) $ \_defSource -> do
|
for_ (HM.lookup SNDefault sources) $ \_defSource -> do
|
||||||
let deprecated = checkDeprecatedEnvVars (unEnvVarsMovedToMetadata envVarsMovedToMetadata)
|
let deprecated = checkDeprecatedEnvVars (unEnvVarsMovedToMetadata envVarsMovedToMetadata)
|
||||||
unless (null deprecated) $
|
unless (null deprecated) $
|
||||||
unLogger logger $
|
unLogger logger $
|
||||||
|
@ -291,8 +291,8 @@ streamingSubscriptionPollingSpec srcConfig = do
|
|||||||
|
|
||||||
-- Checking below that the newly added subscriber is not added in the updated cohort
|
-- Checking below that the newly added subscriber is not added in the updated cohort
|
||||||
cohortKey2CohortSnapshot <- STM.atomically $ traverse getStaticCohortSnapshot cohortKey2Cohort
|
cohortKey2CohortSnapshot <- STM.atomically $ traverse getStaticCohortSnapshot cohortKey2Cohort
|
||||||
_cssNewSubscribers <$> cohortKey2CohortSnapshot `shouldSatisfy` maybe True (notElem temporarySubscriberId)
|
_cssNewSubscribers <$> cohortKey2CohortSnapshot `shouldSatisfy` all (notElem temporarySubscriberId)
|
||||||
_cssExistingSubscribers <$> cohortKey2CohortSnapshot `shouldSatisfy` maybe True (notElem temporarySubscriberId)
|
_cssExistingSubscribers <$> cohortKey2CohortSnapshot `shouldSatisfy` all (notElem temporarySubscriberId)
|
||||||
STM.atomically $
|
STM.atomically $
|
||||||
TMap.delete temporarySubscriberId (_cNewSubscribers cohort1)
|
TMap.delete temporarySubscriberId (_cNewSubscribers cohort1)
|
||||||
|
|
||||||
|
@ -336,5 +336,5 @@ runMockedTest opts TestCase {..} (testEnvironment, MockAgentEnvironment {..}) =
|
|||||||
I.writeIORef maeQueryConfig Nothing
|
I.writeIORef maeQueryConfig Nothing
|
||||||
|
|
||||||
-- Assert that the 'API.QueryRequest' was constructed how we expected.
|
-- Assert that the 'API.QueryRequest' was constructed how we expected.
|
||||||
onJust _whenQuery ((query `shouldBe`) . Just)
|
for_ _whenQuery ((query `shouldBe`) . Just)
|
||||||
onJust _whenConfig ((queryConfig `shouldBe`) . Just)
|
for_ _whenConfig ((queryConfig `shouldBe`) . Just)
|
||||||
|
@ -541,12 +541,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = do
|
|||||||
-- Returns True iif the given track matches the given boolean expression.
|
-- Returns True iif the given track matches the given boolean expression.
|
||||||
matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (LHSHasuraTrackBoolExp {..}) =
|
matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (LHSHasuraTrackBoolExp {..}) =
|
||||||
and
|
and
|
||||||
[ maybe True (all (matchTrack trackInfo)) tbe__and,
|
[ all (all (matchTrack trackInfo)) tbe__and,
|
||||||
maybe True (any (matchTrack trackInfo)) tbe__or,
|
all (any (matchTrack trackInfo)) tbe__or,
|
||||||
maybe True (not . matchTrack trackInfo) tbe__not,
|
not (any (matchTrack trackInfo) tbe__not),
|
||||||
maybe True (matchInt trackId) tbe_id,
|
all (matchInt trackId) tbe_id,
|
||||||
maybe True (matchString trackTitle) tbe_title,
|
all (matchString trackTitle) tbe_title,
|
||||||
maybe True (matchMaybeInt maybeAlbumId) tbe_album_id
|
all (matchMaybeInt maybeAlbumId) tbe_album_id
|
||||||
]
|
]
|
||||||
matchInt intField IntCompExp {..} = Just intField == _eq
|
matchInt intField IntCompExp {..} = Just intField == _eq
|
||||||
matchString stringField StringCompExp {..} = Just stringField == _eq
|
matchString stringField StringCompExp {..} = Just stringField == _eq
|
||||||
|
@ -473,11 +473,11 @@ lhsRemoteServerMkLocalTestEnvironment _ = do
|
|||||||
-- Returns True iif the given artist matches the given boolean expression.
|
-- Returns True iif the given artist matches the given boolean expression.
|
||||||
matchArtist artistInfo@(artistId, artistName) (HasuraArtistBoolExp {..}) =
|
matchArtist artistInfo@(artistId, artistName) (HasuraArtistBoolExp {..}) =
|
||||||
and
|
and
|
||||||
[ maybe True (all (matchArtist artistInfo)) abe__and,
|
[ all (all (matchArtist artistInfo)) abe__and,
|
||||||
maybe True (any (matchArtist artistInfo)) abe__or,
|
all (any (matchArtist artistInfo)) abe__or,
|
||||||
maybe True (not . matchArtist artistInfo) abe__not,
|
not (any (matchArtist artistInfo) abe__not),
|
||||||
maybe True (matchMaybeInt artistId) abe_id,
|
all (matchMaybeInt artistId) abe_id,
|
||||||
maybe True (matchString artistName) abe_name
|
all (matchString artistName) abe_name
|
||||||
]
|
]
|
||||||
matchString stringField StringCompExp {..} = Just stringField == _eq
|
matchString stringField StringCompExp {..} = Just stringField == _eq
|
||||||
matchMaybeInt maybeIntField IntCompExp {..} = maybeIntField == _eq
|
matchMaybeInt maybeIntField IntCompExp {..} = maybeIntField == _eq
|
||||||
|
@ -471,12 +471,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = do
|
|||||||
-- Returns True iif the given track matches the given boolean expression.
|
-- Returns True iif the given track matches the given boolean expression.
|
||||||
matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (HasuraTrackBoolExp {..}) =
|
matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (HasuraTrackBoolExp {..}) =
|
||||||
and
|
and
|
||||||
[ maybe True (all (matchTrack trackInfo)) tbe__and,
|
[ all (all (matchTrack trackInfo)) tbe__and,
|
||||||
maybe True (any (matchTrack trackInfo)) tbe__or,
|
all (any (matchTrack trackInfo)) tbe__or,
|
||||||
maybe True (not . matchTrack trackInfo) tbe__not,
|
not (any (matchTrack trackInfo) tbe__not),
|
||||||
maybe True (matchInt trackId) tbe_id,
|
all (matchInt trackId) tbe_id,
|
||||||
maybe True (matchString trackTitle) tbe_title,
|
all (matchString trackTitle) tbe_title,
|
||||||
maybe True (matchMaybeInt maybeAlbumId) tbe_album_id
|
all (matchMaybeInt maybeAlbumId) tbe_album_id
|
||||||
]
|
]
|
||||||
matchInt intField IntCompExp {..} = Just intField == _eq
|
matchInt intField IntCompExp {..} = Just intField == _eq
|
||||||
matchString stringField StringCompExp {..} = Just stringField == _eq
|
matchString stringField StringCompExp {..} = Just stringField == _eq
|
||||||
|
@ -357,12 +357,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = do
|
|||||||
-- Returns True iif the given track matches the given boolean expression.
|
-- Returns True iif the given track matches the given boolean expression.
|
||||||
matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (LHSHasuraTrackBoolExp {..}) =
|
matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (LHSHasuraTrackBoolExp {..}) =
|
||||||
and
|
and
|
||||||
[ maybe True (all (matchTrack trackInfo)) tbe__and,
|
[ all (all (matchTrack trackInfo)) tbe__and,
|
||||||
maybe True (any (matchTrack trackInfo)) tbe__or,
|
all (any (matchTrack trackInfo)) tbe__or,
|
||||||
maybe True (not . matchTrack trackInfo) tbe__not,
|
not (any (matchTrack trackInfo) tbe__not),
|
||||||
maybe True (matchInt trackId) tbe_id,
|
all (matchInt trackId) tbe_id,
|
||||||
maybe True (matchString trackTitle) tbe_title,
|
all (matchString trackTitle) tbe_title,
|
||||||
maybe True (matchMaybeInt maybeAlbumId) tbe_album_id
|
all (matchMaybeInt maybeAlbumId) tbe_album_id
|
||||||
]
|
]
|
||||||
matchInt intField IntCompExp {..} = Just intField == _eq
|
matchInt intField IntCompExp {..} = Just intField == _eq
|
||||||
matchString stringField StringCompExp {..} = Just stringField == _eq
|
matchString stringField StringCompExp {..} = Just stringField == _eq
|
||||||
|
Loading…
Reference in New Issue
Block a user