ref: Use ExceptT String IO a instead of IO (Either String a).

This increases composability and avoids some ugly case handling. We
re-export runExceptT in Hledger.Read.

The final return types of the following functions has been changed from
IO (Either String a) to ExceptT String IO a. If this causes a problem,
you can get the old behaviour by calling runExceptT on the output:
readJournal, readJournalFiles, readJournalFile

Or, you can use the easy functions readJournal', readJournalFiles', and
readJournalFile', which assume default options and return in the IO
monad.
This commit is contained in:
Stephen Morgan 2022-03-12 21:16:33 +11:00 committed by Simon Michael
parent ce169d0543
commit 603b2e9f09
17 changed files with 229 additions and 221 deletions

View File

@ -98,6 +98,7 @@ module Main where
import Control.Arrow (first)
import Control.Monad (mplus, mzero, unless, void)
import Control.Monad.Except (runExceptT)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State.Strict (runStateT)
import Data.String (fromString)
@ -167,7 +168,7 @@ main :: IO ()
main = do
opts <- execParser args
journalFile <- maybe H.defaultJournalPath pure (file opts)
ejournal <- H.readJournalFile (set H.ignore_assertions (ignoreAssertions opts) H.definputopts) journalFile
ejournal <- runExceptT $ H.readJournalFile (set H.ignore_assertions (ignoreAssertions opts) H.definputopts) journalFile
case ejournal of
Right j -> do
(journal, starting) <- fixupJournal opts j

View File

@ -22,14 +22,20 @@ module Hledger.Read (
PrefixedFilePath,
defaultJournal,
defaultJournalPath,
readJournalFiles,
readJournalFile,
requireJournalFileExists,
ensureJournalFileExists,
-- * Journal parsing
readJournal,
readJournalFile,
readJournalFiles,
runExceptT,
-- * Easy journal parsing
readJournal',
readJournalFile',
readJournalFiles',
orDieTrying,
-- * Re-exported
JournalReader.tmpostingrulep,
@ -45,10 +51,9 @@ module Hledger.Read (
) where
--- ** imports
import Control.Arrow (right)
import qualified Control.Exception as C
import Control.Monad (unless, when)
import "mtl" Control.Monad.Except (runExceptT)
import "mtl" Control.Monad.Except (ExceptT(..), runExceptT, liftIO)
import Data.Default (def)
import Data.Foldable (asum)
import Data.List (group, sort, sortBy)
@ -89,36 +94,9 @@ journalEnvVar = "LEDGER_FILE"
journalEnvVar2 = "LEDGER"
journalDefaultFilename = ".hledger.journal"
-- | Read a Journal from the given text, assuming journal format; or
-- throw an error.
readJournal' :: Text -> IO Journal
readJournal' t = readJournal definputopts Nothing t >>= either error' return -- PARTIAL:
-- | @readJournal iopts mfile txt@
--
-- Read a Journal from some text, or return an error message.
--
-- The reader (data format) is chosen based on, in this order:
--
-- - a reader name provided in @iopts@
--
-- - a reader prefix in the @mfile@ path
--
-- - a file extension in @mfile@
--
-- If none of these is available, or if the reader name is unrecognised,
-- we use the journal reader. (We used to try all readers in this case;
-- since hledger 1.17, we prefer predictability.)
readJournal :: InputOpts -> Maybe FilePath -> Text -> IO (Either String Journal)
readJournal iopts mpath txt = do
let r :: Reader IO =
fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath
dbg6IO "trying reader" (rFormat r)
(runExceptT . (rReadFn r) iopts (fromMaybe "(string)" mpath)) txt
-- | Read the default journal file specified by the environment, or raise an error.
defaultJournal :: IO Journal
defaultJournal = defaultJournalPath >>= readJournalFile definputopts >>= either error' return -- PARTIAL:
defaultJournal = defaultJournalPath >>= runExceptT . readJournalFile definputopts >>= either error' return -- PARTIAL:
-- | Get the default journal file path specified by the environment.
-- Like ledger, we look first for the LEDGER_FILE environment
@ -144,17 +122,27 @@ defaultJournalPath = do
-- (journal:, csv:, timedot:, etc.).
type PrefixedFilePath = FilePath
-- | Read a Journal from each specified file path and combine them into one.
-- Or, return the first error message.
-- | @readJournal iopts mfile txt@
--
-- Combining Journals means concatenating them, basically.
-- The parse state resets at the start of each file, which means that
-- directives & aliases do not affect subsequent sibling or parent files.
-- They do affect included child files though.
-- Also the final parse state saved in the Journal does span all files.
readJournalFiles :: InputOpts -> [PrefixedFilePath] -> IO (Either String Journal)
readJournalFiles iopts =
fmap (right (maybe def sconcat . nonEmpty) . sequence) . mapM (readJournalFile iopts)
-- Read a Journal from some text, or return an error message.
--
-- The reader (data format) is chosen based on, in this order:
--
-- - a reader name provided in @iopts@
--
-- - a reader prefix in the @mfile@ path
--
-- - a file extension in @mfile@
--
-- If none of these is available, or if the reader name is unrecognised,
-- we use the journal reader. (We used to try all readers in this case;
-- since hledger 1.17, we prefer predictability.)
readJournal :: InputOpts -> Maybe FilePath -> Text -> ExceptT String IO Journal
readJournal iopts mpath txt = do
let r :: Reader IO =
fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath
dbg6IO "trying reader" (rFormat r)
rReadFn r iopts (fromMaybe "(string)" mpath) txt
-- | Read a Journal from this file, or from stdin if the file path is -,
-- or return an error message. The file path can have a READER: prefix.
@ -167,26 +155,56 @@ readJournalFiles iopts =
--
-- The input options can also configure balance assertion checking, automated posting
-- generation, a rules file for converting CSV data, etc.
readJournalFile :: InputOpts -> PrefixedFilePath -> IO (Either String Journal)
readJournalFile :: InputOpts -> PrefixedFilePath -> ExceptT String IO Journal
readJournalFile iopts prefixedfile = do
let
(mfmt, f) = splitReaderPrefix prefixedfile
iopts' = iopts{mformat_=asum [mfmt, mformat_ iopts]}
requireJournalFileExists f
t <- readFileOrStdinPortably f
liftIO $ requireJournalFileExists f
t <- liftIO $ readFileOrStdinPortably f
-- <- T.readFile f -- or without line ending translation, for testing
ej <- readJournal iopts' (Just f) t
case ej of
Left e -> return $ Left e
Right j | new_ iopts -> do
ds <- previousLatestDates f
let (newj, newds) = journalFilterSinceLatestDates ds j
when (new_save_ iopts && not (null newds)) $ saveLatestDates newds f
return $ Right newj
Right j -> return $ Right j
j <- readJournal iopts' (Just f) t
if new_ iopts
then do
ds <- liftIO $ previousLatestDates f
let (newj, newds) = journalFilterSinceLatestDates ds j
when (new_save_ iopts && not (null newds)) . liftIO $ saveLatestDates newds f
return newj
else return j
-- | Read a Journal from each specified file path and combine them into one.
-- Or, return the first error message.
--
-- Combining Journals means concatenating them, basically.
-- The parse state resets at the start of each file, which means that
-- directives & aliases do not affect subsequent sibling or parent files.
-- They do affect included child files though.
-- Also the final parse state saved in the Journal does span all files.
readJournalFiles :: InputOpts -> [PrefixedFilePath] -> ExceptT String IO Journal
readJournalFiles iopts =
fmap (maybe def sconcat . nonEmpty) . mapM (readJournalFile iopts)
-- | An easy version of 'readJournal' which assumes default options, and fails
-- in the IO monad.
readJournal' :: Text -> IO Journal
readJournal' = orDieTrying . readJournal definputopts Nothing
-- | An easy version of 'readJournalFile' which assumes default options, and fails
-- in the IO monad.
readJournalFile' :: PrefixedFilePath -> IO Journal
readJournalFile' = orDieTrying . readJournalFile definputopts
-- | An easy version of 'readJournalFiles'' which assumes default options, and fails
-- in the IO monad.
readJournalFiles' :: [PrefixedFilePath] -> IO Journal
readJournalFiles' = orDieTrying . readJournalFiles definputopts
--- ** utilities
-- | Extract ExceptT to the IO monad, failing with an error message if necessary.
orDieTrying :: ExceptT String IO a -> IO a
orDieTrying a = either fail return =<< runExceptT a
-- | If the specified journal file does not exist (and is not "-"),
-- give a helpful error and quit.
requireJournalFileExists :: FilePath -> IO ()

View File

@ -38,15 +38,15 @@ where
--- ** imports
import Control.Applicative (liftA2)
import Control.Exception (IOException, handle, throw)
import Control.Monad (unless, when)
import Control.Monad.Except (ExceptT, throwError)
import Control.Monad.Except (ExceptT(..), liftEither, throwError)
import qualified Control.Monad.Fail as Fail
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.State.Strict (StateT, get, modify', evalStateT)
import Control.Monad.Trans.Class (lift)
import Data.Char (toLower, isDigit, isSpace, isAlphaNum, ord)
import Data.Bifunctor (first)
import Data.Functor ((<&>))
import Data.List (elemIndex, foldl', intersperse, mapAccumL, nub, sortBy)
import Data.Maybe (catMaybes, fromMaybe, isJust)
import Data.MemoUgly (memo)
@ -60,7 +60,7 @@ import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TB
import Data.Time.Calendar (Day)
import Data.Time.Format (parseTimeM, defaultTimeLocale)
import Safe (atMay, headMay, lastMay, readDef, readMay)
import Safe (atMay, headMay, lastMay, readMay)
import System.Directory (doesFileExist)
import System.FilePath ((</>), takeDirectory, takeExtension, takeFileName)
import qualified Data.Csv as Cassava
@ -103,23 +103,20 @@ reader = Reader
parse :: InputOpts -> FilePath -> Text -> ExceptT String IO Journal
parse iopts f t = do
let rulesfile = mrules_file_ iopts
r <- liftIO $ readJournalFromCsv rulesfile f t
case r of Left e -> throwError e
Right pj ->
-- journalFinalise assumes the journal's items are
-- reversed, as produced by JournalReader's parser.
-- But here they are already properly ordered. So we'd
-- better preemptively reverse them once more. XXX inefficient
let pj' = journalReverse pj
-- apply any command line account aliases. Can fail with a bad replacement pattern.
in case journalApplyAliases (aliasesFromOpts iopts) pj' of
Left e -> throwError e
Right pj'' -> journalFinalise iopts{balancingopts_=(balancingopts_ iopts){ignore_assertions_=True}} f t pj''
readJournalFromCsv rulesfile f t
-- journalFinalise assumes the journal's items are
-- reversed, as produced by JournalReader's parser.
-- But here they are already properly ordered. So we'd
-- better preemptively reverse them once more. XXX inefficient
<&> journalReverse
-- apply any command line account aliases. Can fail with a bad replacement pattern.
>>= liftEither . journalApplyAliases (aliasesFromOpts iopts)
>>= journalFinalise iopts{balancingopts_=(balancingopts_ iopts){ignore_assertions_=True}} f t
--- ** reading rules files
--- *** rules utilities
-- Not used by hledger; just for lib users,
-- Not used by hledger; just for lib users,
-- | An pure-exception-throwing IO action that parses this file's content
-- as CSV conversion rules, interpolating any included files first,
-- and runs some extra validation checks.
@ -226,7 +223,7 @@ parseCsvRules = runParser (evalStateT rulesp defrules)
-- | Return the validated rules, or an error.
validateRules :: CsvRules -> Either String CsvRules
validateRules rules = do
unless (isAssigned "date") $ Left "Please specify (at top level) the date field. Eg: date %1\n"
unless (isAssigned "date") $ Left "Please specify (at top level) the date field. Eg: date %1"
Right rules
where
isAssigned f = isJust $ getEffectiveAssignment rules [] f
@ -568,7 +565,7 @@ conditionalblockp = do
, fmap Just fieldassignmentp
])
when (null as) $
customFailure $ parseErrorAt start $ "start of conditional block found, but no assignment rules afterward\n(assignment rules in a conditional block should be indented)\n"
customFailure $ parseErrorAt start $ "start of conditional block found, but no assignment rules afterward\n(assignment rules in a conditional block should be indented)"
return $ CB{cbMatchers=ms, cbAssignments=as}
<?> "conditional block"
@ -588,10 +585,10 @@ conditionaltablep = do
m <- matcherp' (char sep >> return ())
vs <- T.split (==sep) . T.pack <$> lift restofline
if (length vs /= length fields)
then customFailure $ parseErrorAt off $ ((printf "line of conditional table should have %d values, but this one has only %d\n" (length fields) (length vs)) :: String)
then customFailure $ parseErrorAt off $ ((printf "line of conditional table should have %d values, but this one has only %d" (length fields) (length vs)) :: String)
else return (m,vs)
when (null body) $
customFailure $ parseErrorAt start $ "start of conditional table found, but no assignment rules afterward\n"
customFailure $ parseErrorAt start $ "start of conditional table found, but no assignment rules afterward"
return $ flip map body $ \(m,vs) ->
CB{cbMatchers=[m], cbAssignments=zip fields vs}
<?> "conditional table"
@ -614,7 +611,7 @@ recordmatcherp end = do
r <- regexp end
return $ RecordMatcher p r
-- when (null ps) $
-- Fail.fail "start of record matcher found, but no patterns afterward\n(patterns should not be indented)\n"
-- Fail.fail "start of record matcher found, but no patterns afterward\n(patterns should not be indented)"
<?> "record matcher"
-- | A single matcher for a specific field. A csv field reference
@ -686,93 +683,85 @@ regexp end = do
-- 4. if the rules file didn't exist, create it with the default rules and filename
--
-- 5. return the transactions as a Journal
--
readJournalFromCsv :: Maybe FilePath -> FilePath -> Text -> IO (Either String Journal)
readJournalFromCsv Nothing "-" _ = return $ Left "please use --rules-file when reading CSV from stdin"
readJournalFromCsv mrulesfile csvfile csvdata =
handle (\(e::IOException) -> return $ Left $ show e) $ do
--
readJournalFromCsv :: Maybe FilePath -> FilePath -> Text -> ExceptT String IO Journal
readJournalFromCsv Nothing "-" _ = throwError "please use --rules-file when reading CSV from stdin"
readJournalFromCsv mrulesfile csvfile csvdata = do
-- parse the csv rules
let rulesfile = fromMaybe (rulesFileFor csvfile) mrulesfile
rulesfileexists <- liftIO $ doesFileExist rulesfile
rulestext <- liftIO $ if rulesfileexists
then do
dbg6IO "using conversion rules file" rulesfile
readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile)
else
return $ defaultRulesText rulesfile
rules <- liftEither $ parseAndValidateCsvRules rulesfile rulestext
dbg6IO "csv rules" rules
-- make and throw an IO exception.. which we catch and convert to an Either above ?
let throwerr = throw . userError
-- parse the skip directive's value, if any
skiplines <- case getDirective "skip" rules of
Nothing -> return 0
Just "" -> return 1
Just s -> maybe (throwError $ "could not parse skip value: " ++ show s) return . readMay $ T.unpack s
-- parse the csv rules
let rulesfile = fromMaybe (rulesFileFor csvfile) mrulesfile
rulesfileexists <- doesFileExist rulesfile
rulestext <-
if rulesfileexists
then do
dbg6IO "using conversion rules file" rulesfile
readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile)
else
return $ defaultRulesText rulesfile
rules <- either throwerr return $ parseAndValidateCsvRules rulesfile rulestext
dbg6IO "csv rules" rules
-- parse csv
let
-- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec
parsecfilename = if csvfile == "-" then "(stdin)" else csvfile
separator =
case getDirective "separator" rules >>= parseSeparator of
Just c -> c
_ | ext == "ssv" -> ';'
_ | ext == "tsv" -> '\t'
_ -> ','
where
ext = map toLower $ drop 1 $ takeExtension csvfile
dbg6IO "using separator" separator
csv <- dbg7 "parseCsv" <$> parseCsv separator parsecfilename csvdata
records <- liftEither $ dbg7 "validateCsv" <$> validateCsv rules skiplines csv
dbg6IO "first 3 csv records" $ take 3 records
-- parse the skip directive's value, if any
let skiplines = case getDirective "skip" rules of
Nothing -> 0
Just "" -> 1
Just s -> readDef (throwerr $ "could not parse skip value: " ++ show s) $ T.unpack s
-- identify header lines
-- let (headerlines, datalines) = identifyHeaderLines records
-- mfieldnames = lastMay headerlines
-- parse csv
let
-- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec
parsecfilename = if csvfile == "-" then "(stdin)" else csvfile
separator =
case getDirective "separator" rules >>= parseSeparator of
Just c -> c
_ | ext == "ssv" -> ';'
_ | ext == "tsv" -> '\t'
_ -> ','
let
-- convert CSV records to transactions, saving the CSV line numbers for error positions
txns = dbg7 "csv txns" $ snd $ mapAccumL
(\pos r ->
let
SourcePos name line col = pos
line' = (mkPos . (+1) . unPos) line
pos' = SourcePos name line' col
in
(pos', transactionFromCsvRecord pos rules r)
)
(initialPos parsecfilename) records
-- Ensure transactions are ordered chronologically.
-- First, if the CSV records seem to be most-recent-first (because
-- there's an explicit "newest-first" directive, or there's more
-- than one date and the first date is more recent than the last):
-- reverse them to get same-date transactions ordered chronologically.
txns' =
(if newestfirst || mdataseemsnewestfirst == Just True
then dbg7 "reversed csv txns" . reverse else id)
txns
where
ext = map toLower $ drop 1 $ takeExtension csvfile
dbg6IO "using separator" separator
records <- (either throwerr id .
dbg7 "validateCsv" . validateCsv rules skiplines .
dbg7 "parseCsv")
`fmap` parseCsv separator parsecfilename csvdata
dbg6IO "first 3 csv records" $ take 3 records
newestfirst = dbg6 "newestfirst" $ isJust $ getDirective "newest-first" rules
mdataseemsnewestfirst = dbg6 "mdataseemsnewestfirst" $
case nub $ map tdate txns of
ds | length ds > 1 -> Just $ head ds > last ds
_ -> Nothing
-- Second, sort by date.
txns'' = dbg7 "date-sorted csv txns" $ sortBy (comparing tdate) txns'
-- identify header lines
-- let (headerlines, datalines) = identifyHeaderLines records
-- mfieldnames = lastMay headerlines
liftIO $ when (not rulesfileexists) $ do
dbg1IO "creating conversion rules file" rulesfile
T.writeFile rulesfile rulestext
let
-- convert CSV records to transactions, saving the CSV line numbers for error positions
txns = dbg7 "csv txns" $ snd $ mapAccumL
(\pos r ->
let
SourcePos name line col = pos
line' = (mkPos . (+1) . unPos) line
pos' = SourcePos name line' col
in
(pos', transactionFromCsvRecord pos rules r)
)
(initialPos parsecfilename) records
-- Ensure transactions are ordered chronologically.
-- First, if the CSV records seem to be most-recent-first (because
-- there's an explicit "newest-first" directive, or there's more
-- than one date and the first date is more recent than the last):
-- reverse them to get same-date transactions ordered chronologically.
txns' =
(if newestfirst || mdataseemsnewestfirst == Just True
then dbg7 "reversed csv txns" . reverse else id)
txns
where
newestfirst = dbg6 "newestfirst" $ isJust $ getDirective "newest-first" rules
mdataseemsnewestfirst = dbg6 "mdataseemsnewestfirst" $
case nub $ map tdate txns of
ds | length ds > 1 -> Just $ head ds > last ds
_ -> Nothing
-- Second, sort by date.
txns'' = dbg7 "date-sorted csv txns" $ sortBy (comparing tdate) txns'
when (not rulesfileexists) $ do
dbg1IO "creating conversion rules file" rulesfile
T.writeFile rulesfile rulestext
return $ Right nulljournal{jtxns=txns''}
return nulljournal{jtxns=txns''}
-- | Parse special separator names TAB and SPACE, or return the first
-- character. Return Nothing on empty string
@ -782,8 +771,8 @@ parseSeparator = specials . T.toLower
specials "tab" = Just '\t'
specials xs = fst <$> T.uncons xs
parseCsv :: Char -> FilePath -> Text -> IO (Either String CSV)
parseCsv separator filePath csvdata =
parseCsv :: Char -> FilePath -> Text -> ExceptT String IO CSV
parseCsv separator filePath csvdata = ExceptT $
case filePath of
"-" -> parseCassava separator "(stdin)" <$> T.getContents
_ -> return $ if T.null csvdata then Right mempty else parseCassava separator filePath csvdata
@ -811,9 +800,8 @@ printCSV = TB.toLazyText . unlinesB . map printRecord
printField = wrap "\"" "\"" . T.replace "\"" "\"\""
-- | Return the cleaned up and validated CSV data (can be empty), or an error.
validateCsv :: CsvRules -> Int -> Either String CSV -> Either String [CsvRecord]
validateCsv _ _ (Left err) = Left err
validateCsv rules numhdrlines (Right rs) = validate $ applyConditionalSkips $ drop numhdrlines $ filternulls rs
validateCsv :: CsvRules -> Int -> CSV -> Either String [CsvRecord]
validateCsv rules numhdrlines = validate . applyConditionalSkips . drop numhdrlines . filternulls
where
filternulls = filter (/=[""])
skipCount r =

View File

@ -15,7 +15,7 @@ where
import Brick
-- import Brick.Widgets.Border ("border")
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Except (liftIO)
import Data.Time.Calendar (Day)
import Data.Void (Void)
import Graphics.Vty (Event(..),Key(..),Modifier(..))
@ -155,7 +155,7 @@ uiReloadJournal :: CliOpts -> Day -> UIState -> IO UIState
uiReloadJournal copts d ui = do
ej <-
let copts' = enableForecastPreservingPeriod ui copts
in journalReload copts'
in runExceptT $ journalReload copts'
return $ case ej of
Right j -> regenerateScreens j d ui
Left err ->
@ -168,13 +168,11 @@ uiReloadJournal copts d ui = do
-- since the provided options or today-date may have changed.
uiReloadJournalIfChanged :: CliOpts -> Day -> Journal -> UIState -> IO UIState
uiReloadJournalIfChanged copts d j ui = do
(ej, _changed) <-
let copts' = enableForecastPreservingPeriod ui copts
in journalReloadIfChanged copts' d j
let copts' = enableForecastPreservingPeriod ui copts
ej <- runExceptT $ journalReloadIfChanged copts' d j
return $ case ej of
Right j' -> regenerateScreens j' d ui
Left err ->
case ui of
Right (j', _) -> regenerateScreens j' d ui
Left err -> case ui of
UIState{aScreen=s@ErrorScreen{}} -> ui{aScreen=s{esError=err}}
_ -> screenEnter d errorScreen{esError=err} ui

View File

@ -9,7 +9,7 @@ module Hledger.UI.TransactionScreen
) where
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Except (liftIO)
import Data.List
import Data.Maybe
import qualified Data.Text as T
@ -174,7 +174,7 @@ tsHandle ui@UIState{aScreen=TransactionScreen{tsTransaction=(i,t), tsTransaction
p = reportPeriod ui
e | e `elem` [VtyEvent (EvKey (KChar 'g') []), AppEvent FileChange] -> do
-- plog (if e == AppEvent FileChange then "file change" else "manual reload") "" `seq` return ()
ej <- liftIO $ journalReload copts
ej <- liftIO . runExceptT $ journalReload copts
case ej of
Left err -> continue $ screenEnter d errorScreen{esError=err} ui
Right j' -> continue $ regenerateScreens j' d ui

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.34.4.
-- This file has been generated from package.yaml by hpack version 0.34.6.
--
-- see: https://github.com/sol/hpack
@ -82,6 +82,7 @@ executable hledger-ui
, megaparsec >=7.0.0 && <9.3
, microlens >=0.4
, microlens-platform >=0.2.3.1
, mtl >=2.2.1
, process >=1.2
, safe >=0.2
, split >=0.1

View File

@ -57,6 +57,7 @@ dependencies:
- microlens >=0.4
- microlens-platform >=0.2.3.1
- megaparsec >=7.0.0 && <9.3
- mtl >=2.2.1
- process >=1.2
- safe >=0.2
- split >=0.1

View File

@ -17,6 +17,7 @@ module Hledger.Web.Foundation where
import Control.Applicative ((<|>))
import Control.Monad (join, when)
import Control.Monad.Except (runExceptT)
import qualified Data.ByteString.Char8 as BC
import Data.Traversable (for)
import Data.IORef (IORef, readIORef, writeIORef)
@ -256,16 +257,16 @@ shouldShowSidebar = do
-- ui message.
getCurrentJournal :: IORef Journal -> CliOpts -> Day -> Handler (Journal, Maybe String)
getCurrentJournal jref opts d = do
-- XXX put this inside atomicModifyIORef' for thread safety
j <- liftIO (readIORef jref)
(ej, changed) <- liftIO $ journalReloadIfChanged opts d j
-- re-apply any initial filter specified at startup
let initq = _rsQuery $ reportspec_ opts
case (changed, filterJournalTransactions initq <$> ej) of
(False, _) -> return (j, Nothing)
(True, Right j') -> do
liftIO $ writeIORef jref j'
return (j',Nothing)
(True, Left e) -> do
-- XXX put this inside atomicModifyIORef' for thread safety
j <- liftIO (readIORef jref)
ej <- liftIO . runExceptT $ journalReloadIfChanged opts d j
case ej of
Left e -> do
setMessage "error while reading journal"
return (j, Just e)
Right (j', True) -> do
liftIO . writeIORef jref $ filterJournalTransactions initq j'
return (j',Nothing)
Right (_, False) -> return (j, Nothing)

View File

@ -10,6 +10,7 @@ module Hledger.Web.Handler.EditR
, postEditR
) where
import Control.Monad.Except (runExceptT)
import Hledger.Web.Import
import Hledger.Web.Widget.Common
(fromFormSuccess, helplink, journalFile404, writeJournalTextIfValidAndChanged)
@ -36,7 +37,7 @@ postEditR f = do
(f', txt) <- journalFile404 f j
((res, view), enctype) <- runFormPost (editForm f' txt)
newtxt <- fromFormSuccess (showForm view enctype) res
writeJournalTextIfValidAndChanged f newtxt >>= \case
runExceptT (writeJournalTextIfValidAndChanged f newtxt) >>= \case
Left e -> do
setMessage $ "Failed to load journal: " <> toHtml e
showForm view enctype

View File

@ -9,6 +9,7 @@ module Hledger.Web.Handler.UploadR
, postUploadR
) where
import Control.Monad.Except (runExceptT)
import qualified Data.ByteString.Lazy as BL
import Data.Conduit (connect)
import Data.Conduit.Binary (sinkLbs)
@ -52,7 +53,7 @@ postUploadR f = do
"where the transcoding should be handled by the browser."
showForm view enctype
Right newtxt -> return newtxt
writeJournalTextIfValidAndChanged f newtxt >>= \case
runExceptT (writeJournalTextIfValidAndChanged f newtxt) >>= \case
Left e -> do
setMessage $ "Failed to load journal: " <> toHtml e
showForm view enctype

View File

@ -4,7 +4,6 @@ module Hledger.Web.Test (
hledgerWebTest
) where
import Control.Monad.Except (runExceptT)
import qualified Data.Text as T
import Test.Hspec (hspec)
import Yesod.Default.Config

View File

@ -19,6 +19,7 @@ module Hledger.Web.Widget.Common
, replaceInacct
) where
import Control.Monad.Except (ExceptT, mapExceptT)
import Data.Foldable (find, for_)
import Data.List (elemIndex)
import Data.Text (Text)
@ -59,17 +60,15 @@ fromFormSuccess _ (FormSuccess a) = pure a
-- The file will be written (if changed) with the current system's native
-- line endings (see writeFileWithBackupIfChanged).
--
writeJournalTextIfValidAndChanged :: MonadHandler m => FilePath -> Text -> m (Either String ())
writeJournalTextIfValidAndChanged f t = do
writeJournalTextIfValidAndChanged :: MonadHandler m => FilePath -> Text -> ExceptT String m ()
writeJournalTextIfValidAndChanged f t = mapExceptT liftIO $ do
-- Ensure unix line endings, since both readJournal (cf
-- formatdirectivep, #1194) writeFileWithBackupIfChanged require them.
-- XXX klunky. Any equivalent of "hSetNewlineMode h universalNewlineMode" for form posts ?
let t' = T.replace "\r" "" t
liftIO (readJournal definputopts (Just f) t') >>= \case
Left e -> return (Left e)
Right _ -> do
_ <- liftIO (writeFileWithBackupIfChanged f t')
return (Right ())
j <- readJournal definputopts (Just f) t'
_ <- liftIO $ j `seq` writeFileWithBackupIfChanged f t' -- Only write backup if the journal didn't error
return ()
-- | Link to a topic in the manual.
helplink :: Text -> Text -> HtmlUrl r

View File

@ -292,8 +292,8 @@ tests_Commands = testGroup "Commands" [
let
ignoresourcepos j = j{jtxns=map (\t -> t{tsourcepos=nullsourcepos}) (jtxns j)}
sameParse str1 str2 = do
j1 <- readJournal definputopts Nothing str1 >>= either error' (return . ignoresourcepos) -- PARTIAL:
j2 <- readJournal definputopts Nothing str2 >>= either error' (return . ignoresourcepos)
j1 <- ignoresourcepos <$> readJournal' str1 -- PARTIAL:
j2 <- ignoresourcepos <$> readJournal' str2 -- PARTIAL:
j1 @?= j2{jlastreadtime=jlastreadtime j1, jfiles=jfiles j1} --, jparsestate=jparsestate j1}
sameParse
("2008/12/07 One\n alpha $-1\n beta $1\n" <>
@ -310,19 +310,19 @@ tests_Commands = testGroup "Commands" [
)
,testCase "preserves \"virtual\" posting type" $ do
j <- readJournal definputopts Nothing "apply account test\n2008/12/07 One\n (from) $-1\n (to) $1\n" >>= either error' return -- PARTIAL:
j <- readJournal' "apply account test\n2008/12/07 One\n (from) $-1\n (to) $1\n" -- PARTIAL:
let p = head $ tpostings $ head $ jtxns j
paccount p @?= "test:from"
ptype p @?= VirtualPosting
]
,testCase "alias directive" $ do
j <- readJournal definputopts Nothing "!alias expenses = equity:draw:personal\n1/1\n (expenses:food) 1\n" >>= either error' return -- PARTIAL:
j <- readJournal' "!alias expenses = equity:draw:personal\n1/1\n (expenses:food) 1\n" -- PARTIAL:
let p = head $ tpostings $ head $ jtxns j
paccount p @?= "equity:draw:personal:food"
,testCase "Y default year directive" $ do
j <- readJournal definputopts Nothing defaultyear_journal_txt >>= either error' return -- PARTIAL:
j <- readJournal' defaultyear_journal_txt -- PARTIAL:
tdate (head $ jtxns j) @?= fromGregorian 2009 1 1
,testCase "ledgerAccountNames" $

View File

@ -12,6 +12,7 @@ module Hledger.Cli.Commands.Diff (
,diff
) where
import Control.Monad.Except (runExceptT)
import Data.List.Extra ((\\), groupSortOn, nubBy, sortBy)
import Data.Function (on)
import Data.Ord (comparing)
@ -82,7 +83,8 @@ matching ppl ppr = do
readJournalFile' :: FilePath -> IO Journal
readJournalFile' fn =
readJournalFile definputopts{balancingopts_=defbalancingopts{ignore_assertions_=True}} fn >>= either error' return -- PARTIAL:
runExceptT (readJournalFile definputopts{balancingopts_=defbalancingopts{ignore_assertions_=True}} fn)
>>= either error' return -- PARTIAL:
matchingPostings :: AccountName -> Journal -> [PostingWithPath]
matchingPostings acct j = filter ((== acct) . paccount . ppposting) $ allPostingsWithPath j

View File

@ -8,6 +8,7 @@ module Hledger.Cli.Commands.Import (
where
import Control.Monad
import Control.Monad.Except (runExceptT)
import Data.List
import qualified Data.Text.IO as T
import Hledger
@ -46,7 +47,7 @@ importcmd opts@CliOpts{rawopts_=rawopts,inputopts_=iopts} j = do
case inputfiles of
[] -> error' "please provide one or more input files as arguments" -- PARTIAL:
fs -> do
enewj <- readJournalFiles iopts' fs
enewj <- runExceptT $ readJournalFiles iopts' fs
case enewj of
Left e -> error' e
Right newj ->

View File

@ -29,7 +29,9 @@ module Hledger.Cli.Utils
tests_Cli_Utils,
)
where
import Control.Exception as C
import Control.Monad.Except (ExceptT, runExceptT, liftIO)
import Data.List
import Data.Maybe
@ -72,9 +74,8 @@ withJournalDo opts cmd = do
-- it's stdin, or it doesn't exist and we are adding. We read it strictly
-- to let the add command work.
journalpaths <- journalFilePathFromOpts opts
files <- readJournalFiles (inputopts_ opts) journalpaths
let transformed = journalTransform opts <$> files
either error' cmd transformed -- PARTIAL:
j <- runExceptT $ journalTransform opts <$> readJournalFiles (inputopts_ opts) journalpaths
either error' cmd j -- PARTIAL:
-- | Apply some extra post-parse transformations to the journal, if
-- specified by options. These happen after journal validation, but
@ -132,29 +133,28 @@ writeOutputLazyText opts s = do
-- Returns a journal or error message, and a flag indicating whether
-- it was re-read or not. Like withJournalDo and journalReload, reads
-- the full journal, without filtering.
journalReloadIfChanged :: CliOpts -> Day -> Journal -> IO (Either String Journal, Bool)
journalReloadIfChanged :: CliOpts -> Day -> Journal -> ExceptT String IO (Journal, Bool)
journalReloadIfChanged opts _d j = do
let maybeChangedFilename f = do newer <- journalFileIsNewer j f
return $ if newer then Just f else Nothing
changedfiles <- catMaybes `fmap` mapM maybeChangedFilename (journalFilePaths j)
changedfiles <- liftIO $ catMaybes <$> mapM maybeChangedFilename (journalFilePaths j)
if not $ null changedfiles
then do
-- XXX not sure why we use cmdarg's verbosity here, but keep it for now
verbose <- isLoud
when (verbose || debugLevel >= 6) $ printf "%s has changed, reloading\n" (head changedfiles)
ej <- journalReload opts
return (ej, True)
verbose <- liftIO isLoud
when (verbose || debugLevel >= 6) . liftIO $ printf "%s has changed, reloading\n" (head changedfiles)
newj <- journalReload opts
return (newj, True)
else
return (Right j, False)
return (j, False)
-- | Re-read the journal file(s) specified by options, applying any
-- transformations specified by options. Or return an error string.
-- Reads the full journal, without filtering.
journalReload :: CliOpts -> IO (Either String Journal)
journalReload :: CliOpts -> ExceptT String IO Journal
journalReload opts = do
journalpaths <- dbg6 "reloading files" <$> journalFilePathFromOpts opts
files <- readJournalFiles (inputopts_ opts) journalpaths
return $ journalTransform opts <$> files
journalpaths <- liftIO $ dbg6 "reloading files" <$> journalFilePathFromOpts opts
journalTransform opts <$> readJournalFiles (inputopts_ opts) journalpaths
-- | Has the specified file changed since the journal was last read ?
-- Typically this is one of the journal's journalFilePaths. These are

View File

@ -759,7 +759,7 @@ $ ./csvtest.sh
>=0
# 39. Insfficient number of values in tabular rules error
# 39. Insufficient number of values in tabular rules error
<
10/2009/09,Flubber Co,50
10/2009/09,Blubber Co,150
@ -774,13 +774,12 @@ if|account2|comment
%description Flubber|acct|
$ ./csvtest.sh
>2
hledger: user error (input.rules:6:1:
hledger: input.rules:6:1:
|
6 | %amount 150|acct2
| ^
line of conditional table should have 2 values, but this one has only 1
)
>=1
# 40. unindented condition block error
@ -797,14 +796,13 @@ account2 acct
comment cmt
$ ./csvtest.sh
>2
hledger: user error (input.rules:5:1:
hledger: input.rules:5:1:
|
5 | if Flubber
| ^
start of conditional block found, but no assignment rules afterward
(assignment rules in a conditional block should be indented)
)
>=1
# 41. Assignment to custom field (#1264) + spaces after the if (#1120)
@ -824,13 +822,13 @@ if Flubber
account2 %myaccount2
$ ./csvtest.sh
>2
hledger: user error (input.rules:6:3:
hledger: input.rules:6:3:
|
6 | myaccount2 acct
| ^^^^^^^^^^^^
unexpected "myaccount2 a"
expecting conditional block
)
>=1
# 42. Rules override each other in the order listed in the file
@ -872,14 +870,13 @@ if account2 comment
%description Flubber acct
$ ./csvtest.sh
>2
hledger: user error (input.rules:5:1:
hledger: input.rules:5:1:
|
5 | if account2 comment
| ^
start of conditional block found, but no assignment rules afterward
(assignment rules in a conditional block should be indented)
)
>=1
# 44. handle conditions with & operator