mirror of
https://github.com/facebook/Haxl.git
synced 2024-12-25 01:31:31 +03:00
Collect time in each data sources in each round
Summary: Collect time in each data sources in each round Test Plan: test in haxlsh test the overhead in replay Reviewed By: smarlow@fb.com Subscribers: anfarmer, ldbrandy, watashi, smarlow, akr, bnitka, jcoens FB internal diff: D1521346 Tasks: 4589842
This commit is contained in:
parent
23e7d06d7d
commit
668a8adc2e
@ -10,6 +10,7 @@
|
|||||||
{-# LANGUAGE ExistentialQuantification #-}
|
{-# LANGUAGE ExistentialQuantification #-}
|
||||||
{-# LANGUAGE FlexibleContexts #-}
|
{-# LANGUAGE FlexibleContexts #-}
|
||||||
{-# LANGUAGE GADTs #-}
|
{-# LANGUAGE GADTs #-}
|
||||||
|
{-# LANGUAGE MultiWayIf #-}
|
||||||
{-# LANGUAGE OverloadedStrings #-}
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
{-# LANGUAGE PatternGuards #-}
|
{-# LANGUAGE PatternGuards #-}
|
||||||
{-# LANGUAGE ScopedTypeVariables #-}
|
{-# LANGUAGE ScopedTypeVariables #-}
|
||||||
@ -42,6 +43,7 @@ import Haxl.Core.Exception
|
|||||||
import Haxl.Core.RequestStore
|
import Haxl.Core.RequestStore
|
||||||
import Haxl.Core.Util
|
import Haxl.Core.Util
|
||||||
import Haxl.Core.DataCache as DataCache
|
import Haxl.Core.DataCache as DataCache
|
||||||
|
import Util.Time
|
||||||
|
|
||||||
import qualified Data.Text as Text
|
import qualified Data.Text as Text
|
||||||
import Control.Exception (Exception(..), SomeException)
|
import Control.Exception (Exception(..), SomeException)
|
||||||
@ -401,10 +403,6 @@ performFetches env reqs = do
|
|||||||
getReq :: [BlockedFetch r] -> r a
|
getReq :: [BlockedFetch r] -> r a
|
||||||
getReq = undefined
|
getReq = undefined
|
||||||
|
|
||||||
ifReport f 1 $
|
|
||||||
modifyIORef' sref $ \(Stats rounds) ->
|
|
||||||
Stats (RoundStats (HashMap.fromList roundstats) : rounds)
|
|
||||||
|
|
||||||
ifTrace f 1 $
|
ifTrace f 1 $
|
||||||
printf "Batch data fetch (%s)\n" $
|
printf "Batch data fetch (%s)\n" $
|
||||||
intercalate (", "::String) $
|
intercalate (", "::String) $
|
||||||
@ -423,24 +421,44 @@ performFetches env reqs = do
|
|||||||
e = DataSourceError $
|
e = DataSourceError $
|
||||||
"data source not initialized: " <> dataSourceName req
|
"data source not initialized: " <> dataSourceName req
|
||||||
Just state ->
|
Just state ->
|
||||||
return $ wrapFetch reqs $ fetch state f (userEnv env) reqs
|
return $ wrapFetchInCatch reqs $ fetch state f (userEnv env) reqs
|
||||||
|
|
||||||
fetches <- mapM applyFetch jobs
|
fetches <- mapM applyFetch jobs
|
||||||
|
|
||||||
scheduleFetches fetches
|
times <-
|
||||||
|
if report f >= 2
|
||||||
|
then do
|
||||||
|
(refs, timedfetches) <- mapAndUnzipM wrapFetchInTimer fetches
|
||||||
|
scheduleFetches timedfetches
|
||||||
|
mapM (fmap Just . readIORef) refs
|
||||||
|
else do
|
||||||
|
scheduleFetches fetches
|
||||||
|
return $ repeat Nothing
|
||||||
|
|
||||||
|
let dsroundstats = HashMap.fromList
|
||||||
|
[ (name, DataSourceRoundStats { dataSourceFetches = fetches
|
||||||
|
, dataSourceTime = time
|
||||||
|
})
|
||||||
|
| ((name, fetches), time) <- zip roundstats times]
|
||||||
|
|
||||||
|
t1 <- getCurrentTime
|
||||||
|
let roundtime = realToFrac (diffUTCTime t1 t0) :: Double
|
||||||
|
|
||||||
|
ifReport f 1 $
|
||||||
|
modifyIORef' sref $ \(Stats rounds) ->
|
||||||
|
Stats (RoundStats (microsecs roundtime) dsroundstats: rounds)
|
||||||
|
|
||||||
|
ifTrace f 1 $
|
||||||
|
printf "Batch data fetch done (%.2fs)\n" (realToFrac roundtime :: Double)
|
||||||
|
|
||||||
ifTrace f 1 $ do
|
|
||||||
t1 <- getCurrentTime
|
|
||||||
printf "Batch data fetch done (%.2fs)\n"
|
|
||||||
(realToFrac (diffUTCTime t1 t0) :: Double)
|
|
||||||
|
|
||||||
-- Catch exceptions arising from the data source and stuff them into
|
-- Catch exceptions arising from the data source and stuff them into
|
||||||
-- the appropriate requests. We don't want any exceptions propagating
|
-- the appropriate requests. We don't want any exceptions propagating
|
||||||
-- directly from the data sources, because we want the exception to be
|
-- directly from the data sources, because we want the exception to be
|
||||||
-- thrown by dataFetch instead.
|
-- thrown by dataFetch instead.
|
||||||
--
|
--
|
||||||
wrapFetch :: [BlockedFetch req] -> PerformFetch -> PerformFetch
|
wrapFetchInCatch :: [BlockedFetch req] -> PerformFetch -> PerformFetch
|
||||||
wrapFetch reqs fetch =
|
wrapFetchInCatch reqs fetch =
|
||||||
case fetch of
|
case fetch of
|
||||||
SyncFetch io ->
|
SyncFetch io ->
|
||||||
SyncFetch (io `Control.Exception.catch` handler)
|
SyncFetch (io `Control.Exception.catch` handler)
|
||||||
@ -456,6 +474,24 @@ wrapFetch reqs fetch =
|
|||||||
void $ tryTakeResult rvar
|
void $ tryTakeResult rvar
|
||||||
putResult rvar (except e)
|
putResult rvar (except e)
|
||||||
|
|
||||||
|
wrapFetchInTimer :: PerformFetch -> IO (IORef Microseconds, PerformFetch)
|
||||||
|
wrapFetchInTimer f = do
|
||||||
|
r <- newIORef 0
|
||||||
|
case f of
|
||||||
|
SyncFetch io -> return (r, SyncFetch (time io >>= writeIORef r))
|
||||||
|
AsyncFetch f -> do
|
||||||
|
inner_r <- newIORef 0
|
||||||
|
return (r, AsyncFetch $ \inner -> do
|
||||||
|
total <- time (f (time inner >>= writeIORef inner_r))
|
||||||
|
inner_t <- readIORef inner_r
|
||||||
|
writeIORef r (total - inner_t))
|
||||||
|
|
||||||
|
time :: IO () -> IO Microseconds
|
||||||
|
time io = microsecs . fst <$> timeIt io
|
||||||
|
|
||||||
|
microsecs :: Double -> Microseconds
|
||||||
|
microsecs t = round (t * 10^(6::Int))
|
||||||
|
|
||||||
-- | Start all the async fetches first, then perform the sync fetches before
|
-- | Start all the async fetches first, then perform the sync fetches before
|
||||||
-- getting the results of the async fetches.
|
-- getting the results of the async fetches.
|
||||||
scheduleFetches :: [PerformFetch] -> IO()
|
scheduleFetches :: [PerformFetch] -> IO()
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||||
{-# LANGUAGE OverloadedStrings #-}
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
{-# LANGUAGE Rank2Types #-}
|
{-# LANGUAGE Rank2Types #-}
|
||||||
|
{-# LANGUAGE RecordWildCards #-}
|
||||||
{-# LANGUAGE ScopedTypeVariables #-}
|
{-# LANGUAGE ScopedTypeVariables #-}
|
||||||
{-# LANGUAGE TypeFamilies #-}
|
{-# LANGUAGE TypeFamilies #-}
|
||||||
|
|
||||||
@ -31,6 +32,8 @@ module Haxl.Core.Types (
|
|||||||
-- * Statistics
|
-- * Statistics
|
||||||
Stats(..),
|
Stats(..),
|
||||||
RoundStats(..),
|
RoundStats(..),
|
||||||
|
DataSourceRoundStats(..),
|
||||||
|
Microseconds,
|
||||||
emptyStats,
|
emptyStats,
|
||||||
numRounds,
|
numRounds,
|
||||||
numFetches,
|
numFetches,
|
||||||
@ -65,15 +68,15 @@ module Haxl.Core.Types (
|
|||||||
) where
|
) where
|
||||||
|
|
||||||
import Control.Applicative
|
import Control.Applicative
|
||||||
|
import Control.Concurrent.MVar
|
||||||
import Control.Exception
|
import Control.Exception
|
||||||
import Data.Typeable
|
import Control.Monad
|
||||||
import Data.Text (Text)
|
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
import Data.Hashable
|
import Data.Hashable
|
||||||
import Control.Concurrent.MVar
|
|
||||||
import Control.Monad
|
|
||||||
import qualified Data.HashMap.Strict as HashMap
|
|
||||||
import Data.HashMap.Strict (HashMap)
|
import Data.HashMap.Strict (HashMap)
|
||||||
|
import qualified Data.HashMap.Strict as HashMap
|
||||||
|
import Data.Text (Text)
|
||||||
|
import Data.Typeable (Typeable)
|
||||||
|
|
||||||
#if __GLASGOW_HASKELL__ < 708
|
#if __GLASGOW_HASKELL__ < 708
|
||||||
import Haxl.Core.Util (tryReadMVar)
|
import Haxl.Core.Util (tryReadMVar)
|
||||||
@ -113,6 +116,8 @@ ifTrace flags i = when (trace flags >= i) . void
|
|||||||
ifReport :: (Functor m, Monad m) => Flags -> Int -> m a -> m ()
|
ifReport :: (Functor m, Monad m) => Flags -> Int -> m a -> m ()
|
||||||
ifReport flags i = when (report flags >= i) . void
|
ifReport flags i = when (report flags >= i) . void
|
||||||
|
|
||||||
|
type Microseconds = Int
|
||||||
|
|
||||||
-- | Stats that we collect along the way.
|
-- | Stats that we collect along the way.
|
||||||
newtype Stats = Stats [RoundStats]
|
newtype Stats = Stats [RoundStats]
|
||||||
deriving ToJSON
|
deriving ToJSON
|
||||||
@ -120,11 +125,32 @@ newtype Stats = Stats [RoundStats]
|
|||||||
-- | Maps data source name to the number of requests made in that round.
|
-- | Maps data source name to the number of requests made in that round.
|
||||||
-- The map only contains entries for sources that made requests in that
|
-- The map only contains entries for sources that made requests in that
|
||||||
-- round.
|
-- round.
|
||||||
newtype RoundStats = RoundStats (HashMap Text Int)
|
data RoundStats = RoundStats
|
||||||
deriving ToJSON
|
{ roundTime :: Microseconds
|
||||||
|
, roundDataSources :: HashMap Text DataSourceRoundStats
|
||||||
|
}
|
||||||
|
|
||||||
|
instance ToJSON RoundStats where
|
||||||
|
toJSON RoundStats{..} = object
|
||||||
|
[ "time" .= roundTime
|
||||||
|
, "dataSources" .= roundDataSources
|
||||||
|
]
|
||||||
|
|
||||||
|
-- | Detailed stats of each data source in each round.
|
||||||
|
data DataSourceRoundStats = DataSourceRoundStats
|
||||||
|
{ dataSourceFetches :: Int
|
||||||
|
, dataSourceTime :: Maybe Microseconds
|
||||||
|
}
|
||||||
|
|
||||||
|
instance ToJSON DataSourceRoundStats where
|
||||||
|
toJSON DataSourceRoundStats{..} = object [k .= v | (k, Just v) <-
|
||||||
|
[ ("fetches", Just dataSourceFetches)
|
||||||
|
, ("time", dataSourceTime)
|
||||||
|
]]
|
||||||
|
|
||||||
fetchesInRound :: RoundStats -> Int
|
fetchesInRound :: RoundStats -> Int
|
||||||
fetchesInRound (RoundStats hm) = sum $ HashMap.elems hm
|
fetchesInRound (RoundStats _ hm) =
|
||||||
|
sum $ map dataSourceFetches $ HashMap.elems hm
|
||||||
|
|
||||||
emptyStats :: Stats
|
emptyStats :: Stats
|
||||||
emptyStats = Stats []
|
emptyStats = Stats []
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
{-# LANGUAGE OverloadedStrings, RebindableSyntax #-}
|
{-# LANGUAGE OverloadedStrings, RebindableSyntax, MultiWayIf #-}
|
||||||
module TestExampleDataSource (tests) where
|
module TestExampleDataSource (tests) where
|
||||||
|
|
||||||
import Haxl.Prelude as Haxl
|
import Haxl.Prelude as Haxl
|
||||||
@ -47,12 +47,15 @@ exampleTest = TestCase $ do
|
|||||||
assertEqual "runTests" x (2 + 3)
|
assertEqual "runTests" x (2 + 3)
|
||||||
|
|
||||||
-- Should be just one fetching round:
|
-- Should be just one fetching round:
|
||||||
Stats s <- readIORef (statsRef env)
|
Stats stats <- readIORef (statsRef env)
|
||||||
assertEqual "rounds" 1 (length s)
|
assertEqual "rounds" 1 (length stats)
|
||||||
|
|
||||||
-- With two fetches:
|
-- With two fetches:
|
||||||
let reqs = case head s of RoundStats m -> HashMap.lookup "ExampleDataSource" m
|
assertBool "reqs" $
|
||||||
assertEqual "reqs" (Just 2) reqs
|
if | RoundStats { roundDataSources = m } : _ <- stats,
|
||||||
|
Just (DataSourceRoundStats { dataSourceFetches = 2 })
|
||||||
|
<- HashMap.lookup "ExampleDataSource" m -> True
|
||||||
|
| otherwise -> False
|
||||||
|
|
||||||
-- Test side-effect ordering
|
-- Test side-effect ordering
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user