ref: Use ExceptT String IO a instead of IO (Either String a).

This increases composability and avoids some ugly case handling. We
re-export runExceptT in Hledger.Read.

The final return types of the following functions has been changed from
IO (Either String a) to ExceptT String IO a. If this causes a problem,
you can get the old behaviour by calling runExceptT on the output:
readJournal, readJournalFiles, readJournalFile

Or, you can use the easy functions readJournal', readJournalFiles', and
readJournalFile', which assume default options and return in the IO
monad.
This commit is contained in:
Stephen Morgan 2022-03-12 21:16:33 +11:00 committed by Simon Michael
parent ce169d0543
commit 603b2e9f09
17 changed files with 229 additions and 221 deletions

View File

@ -98,6 +98,7 @@ module Main where
import Control.Arrow (first) import Control.Arrow (first)
import Control.Monad (mplus, mzero, unless, void) import Control.Monad (mplus, mzero, unless, void)
import Control.Monad.Except (runExceptT)
import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State.Strict (runStateT) import Control.Monad.Trans.State.Strict (runStateT)
import Data.String (fromString) import Data.String (fromString)
@ -167,7 +168,7 @@ main :: IO ()
main = do main = do
opts <- execParser args opts <- execParser args
journalFile <- maybe H.defaultJournalPath pure (file opts) journalFile <- maybe H.defaultJournalPath pure (file opts)
ejournal <- H.readJournalFile (set H.ignore_assertions (ignoreAssertions opts) H.definputopts) journalFile ejournal <- runExceptT $ H.readJournalFile (set H.ignore_assertions (ignoreAssertions opts) H.definputopts) journalFile
case ejournal of case ejournal of
Right j -> do Right j -> do
(journal, starting) <- fixupJournal opts j (journal, starting) <- fixupJournal opts j

View File

@ -22,14 +22,20 @@ module Hledger.Read (
PrefixedFilePath, PrefixedFilePath,
defaultJournal, defaultJournal,
defaultJournalPath, defaultJournalPath,
readJournalFiles,
readJournalFile,
requireJournalFileExists, requireJournalFileExists,
ensureJournalFileExists, ensureJournalFileExists,
-- * Journal parsing -- * Journal parsing
readJournal, readJournal,
readJournalFile,
readJournalFiles,
runExceptT,
-- * Easy journal parsing
readJournal', readJournal',
readJournalFile',
readJournalFiles',
orDieTrying,
-- * Re-exported -- * Re-exported
JournalReader.tmpostingrulep, JournalReader.tmpostingrulep,
@ -45,10 +51,9 @@ module Hledger.Read (
) where ) where
--- ** imports --- ** imports
import Control.Arrow (right)
import qualified Control.Exception as C import qualified Control.Exception as C
import Control.Monad (unless, when) import Control.Monad (unless, when)
import "mtl" Control.Monad.Except (runExceptT) import "mtl" Control.Monad.Except (ExceptT(..), runExceptT, liftIO)
import Data.Default (def) import Data.Default (def)
import Data.Foldable (asum) import Data.Foldable (asum)
import Data.List (group, sort, sortBy) import Data.List (group, sort, sortBy)
@ -89,36 +94,9 @@ journalEnvVar = "LEDGER_FILE"
journalEnvVar2 = "LEDGER" journalEnvVar2 = "LEDGER"
journalDefaultFilename = ".hledger.journal" journalDefaultFilename = ".hledger.journal"
-- | Read a Journal from the given text, assuming journal format; or
-- throw an error.
readJournal' :: Text -> IO Journal
readJournal' t = readJournal definputopts Nothing t >>= either error' return -- PARTIAL:
-- | @readJournal iopts mfile txt@
--
-- Read a Journal from some text, or return an error message.
--
-- The reader (data format) is chosen based on, in this order:
--
-- - a reader name provided in @iopts@
--
-- - a reader prefix in the @mfile@ path
--
-- - a file extension in @mfile@
--
-- If none of these is available, or if the reader name is unrecognised,
-- we use the journal reader. (We used to try all readers in this case;
-- since hledger 1.17, we prefer predictability.)
readJournal :: InputOpts -> Maybe FilePath -> Text -> IO (Either String Journal)
readJournal iopts mpath txt = do
let r :: Reader IO =
fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath
dbg6IO "trying reader" (rFormat r)
(runExceptT . (rReadFn r) iopts (fromMaybe "(string)" mpath)) txt
-- | Read the default journal file specified by the environment, or raise an error. -- | Read the default journal file specified by the environment, or raise an error.
defaultJournal :: IO Journal defaultJournal :: IO Journal
defaultJournal = defaultJournalPath >>= readJournalFile definputopts >>= either error' return -- PARTIAL: defaultJournal = defaultJournalPath >>= runExceptT . readJournalFile definputopts >>= either error' return -- PARTIAL:
-- | Get the default journal file path specified by the environment. -- | Get the default journal file path specified by the environment.
-- Like ledger, we look first for the LEDGER_FILE environment -- Like ledger, we look first for the LEDGER_FILE environment
@ -144,17 +122,27 @@ defaultJournalPath = do
-- (journal:, csv:, timedot:, etc.). -- (journal:, csv:, timedot:, etc.).
type PrefixedFilePath = FilePath type PrefixedFilePath = FilePath
-- | Read a Journal from each specified file path and combine them into one. -- | @readJournal iopts mfile txt@
-- Or, return the first error message.
-- --
-- Combining Journals means concatenating them, basically. -- Read a Journal from some text, or return an error message.
-- The parse state resets at the start of each file, which means that --
-- directives & aliases do not affect subsequent sibling or parent files. -- The reader (data format) is chosen based on, in this order:
-- They do affect included child files though. --
-- Also the final parse state saved in the Journal does span all files. -- - a reader name provided in @iopts@
readJournalFiles :: InputOpts -> [PrefixedFilePath] -> IO (Either String Journal) --
readJournalFiles iopts = -- - a reader prefix in the @mfile@ path
fmap (right (maybe def sconcat . nonEmpty) . sequence) . mapM (readJournalFile iopts) --
-- - a file extension in @mfile@
--
-- If none of these is available, or if the reader name is unrecognised,
-- we use the journal reader. (We used to try all readers in this case;
-- since hledger 1.17, we prefer predictability.)
readJournal :: InputOpts -> Maybe FilePath -> Text -> ExceptT String IO Journal
readJournal iopts mpath txt = do
let r :: Reader IO =
fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath
dbg6IO "trying reader" (rFormat r)
rReadFn r iopts (fromMaybe "(string)" mpath) txt
-- | Read a Journal from this file, or from stdin if the file path is -, -- | Read a Journal from this file, or from stdin if the file path is -,
-- or return an error message. The file path can have a READER: prefix. -- or return an error message. The file path can have a READER: prefix.
@ -167,26 +155,56 @@ readJournalFiles iopts =
-- --
-- The input options can also configure balance assertion checking, automated posting -- The input options can also configure balance assertion checking, automated posting
-- generation, a rules file for converting CSV data, etc. -- generation, a rules file for converting CSV data, etc.
readJournalFile :: InputOpts -> PrefixedFilePath -> IO (Either String Journal) readJournalFile :: InputOpts -> PrefixedFilePath -> ExceptT String IO Journal
readJournalFile iopts prefixedfile = do readJournalFile iopts prefixedfile = do
let let
(mfmt, f) = splitReaderPrefix prefixedfile (mfmt, f) = splitReaderPrefix prefixedfile
iopts' = iopts{mformat_=asum [mfmt, mformat_ iopts]} iopts' = iopts{mformat_=asum [mfmt, mformat_ iopts]}
requireJournalFileExists f liftIO $ requireJournalFileExists f
t <- readFileOrStdinPortably f t <- liftIO $ readFileOrStdinPortably f
-- <- T.readFile f -- or without line ending translation, for testing -- <- T.readFile f -- or without line ending translation, for testing
ej <- readJournal iopts' (Just f) t j <- readJournal iopts' (Just f) t
case ej of if new_ iopts
Left e -> return $ Left e then do
Right j | new_ iopts -> do ds <- liftIO $ previousLatestDates f
ds <- previousLatestDates f let (newj, newds) = journalFilterSinceLatestDates ds j
let (newj, newds) = journalFilterSinceLatestDates ds j when (new_save_ iopts && not (null newds)) . liftIO $ saveLatestDates newds f
when (new_save_ iopts && not (null newds)) $ saveLatestDates newds f return newj
return $ Right newj else return j
Right j -> return $ Right j
-- | Read a Journal from each specified file path and combine them into one.
-- Or, return the first error message.
--
-- Combining Journals means concatenating them, basically.
-- The parse state resets at the start of each file, which means that
-- directives & aliases do not affect subsequent sibling or parent files.
-- They do affect included child files though.
-- Also the final parse state saved in the Journal does span all files.
readJournalFiles :: InputOpts -> [PrefixedFilePath] -> ExceptT String IO Journal
readJournalFiles iopts =
fmap (maybe def sconcat . nonEmpty) . mapM (readJournalFile iopts)
-- | An easy version of 'readJournal' which assumes default options, and fails
-- in the IO monad.
readJournal' :: Text -> IO Journal
readJournal' = orDieTrying . readJournal definputopts Nothing
-- | An easy version of 'readJournalFile' which assumes default options, and fails
-- in the IO monad.
readJournalFile' :: PrefixedFilePath -> IO Journal
readJournalFile' = orDieTrying . readJournalFile definputopts
-- | An easy version of 'readJournalFiles'' which assumes default options, and fails
-- in the IO monad.
readJournalFiles' :: [PrefixedFilePath] -> IO Journal
readJournalFiles' = orDieTrying . readJournalFiles definputopts
--- ** utilities --- ** utilities
-- | Extract ExceptT to the IO monad, failing with an error message if necessary.
orDieTrying :: ExceptT String IO a -> IO a
orDieTrying a = either fail return =<< runExceptT a
-- | If the specified journal file does not exist (and is not "-"), -- | If the specified journal file does not exist (and is not "-"),
-- give a helpful error and quit. -- give a helpful error and quit.
requireJournalFileExists :: FilePath -> IO () requireJournalFileExists :: FilePath -> IO ()

View File

@ -38,15 +38,15 @@ where
--- ** imports --- ** imports
import Control.Applicative (liftA2) import Control.Applicative (liftA2)
import Control.Exception (IOException, handle, throw)
import Control.Monad (unless, when) import Control.Monad (unless, when)
import Control.Monad.Except (ExceptT, throwError) import Control.Monad.Except (ExceptT(..), liftEither, throwError)
import qualified Control.Monad.Fail as Fail import qualified Control.Monad.Fail as Fail
import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.State.Strict (StateT, get, modify', evalStateT) import Control.Monad.State.Strict (StateT, get, modify', evalStateT)
import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Class (lift)
import Data.Char (toLower, isDigit, isSpace, isAlphaNum, ord) import Data.Char (toLower, isDigit, isSpace, isAlphaNum, ord)
import Data.Bifunctor (first) import Data.Bifunctor (first)
import Data.Functor ((<&>))
import Data.List (elemIndex, foldl', intersperse, mapAccumL, nub, sortBy) import Data.List (elemIndex, foldl', intersperse, mapAccumL, nub, sortBy)
import Data.Maybe (catMaybes, fromMaybe, isJust) import Data.Maybe (catMaybes, fromMaybe, isJust)
import Data.MemoUgly (memo) import Data.MemoUgly (memo)
@ -60,7 +60,7 @@ import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TB import qualified Data.Text.Lazy.Builder as TB
import Data.Time.Calendar (Day) import Data.Time.Calendar (Day)
import Data.Time.Format (parseTimeM, defaultTimeLocale) import Data.Time.Format (parseTimeM, defaultTimeLocale)
import Safe (atMay, headMay, lastMay, readDef, readMay) import Safe (atMay, headMay, lastMay, readMay)
import System.Directory (doesFileExist) import System.Directory (doesFileExist)
import System.FilePath ((</>), takeDirectory, takeExtension, takeFileName) import System.FilePath ((</>), takeDirectory, takeExtension, takeFileName)
import qualified Data.Csv as Cassava import qualified Data.Csv as Cassava
@ -103,23 +103,20 @@ reader = Reader
parse :: InputOpts -> FilePath -> Text -> ExceptT String IO Journal parse :: InputOpts -> FilePath -> Text -> ExceptT String IO Journal
parse iopts f t = do parse iopts f t = do
let rulesfile = mrules_file_ iopts let rulesfile = mrules_file_ iopts
r <- liftIO $ readJournalFromCsv rulesfile f t readJournalFromCsv rulesfile f t
case r of Left e -> throwError e -- journalFinalise assumes the journal's items are
Right pj -> -- reversed, as produced by JournalReader's parser.
-- journalFinalise assumes the journal's items are -- But here they are already properly ordered. So we'd
-- reversed, as produced by JournalReader's parser. -- better preemptively reverse them once more. XXX inefficient
-- But here they are already properly ordered. So we'd <&> journalReverse
-- better preemptively reverse them once more. XXX inefficient -- apply any command line account aliases. Can fail with a bad replacement pattern.
let pj' = journalReverse pj >>= liftEither . journalApplyAliases (aliasesFromOpts iopts)
-- apply any command line account aliases. Can fail with a bad replacement pattern. >>= journalFinalise iopts{balancingopts_=(balancingopts_ iopts){ignore_assertions_=True}} f t
in case journalApplyAliases (aliasesFromOpts iopts) pj' of
Left e -> throwError e
Right pj'' -> journalFinalise iopts{balancingopts_=(balancingopts_ iopts){ignore_assertions_=True}} f t pj''
--- ** reading rules files --- ** reading rules files
--- *** rules utilities --- *** rules utilities
-- Not used by hledger; just for lib users, -- Not used by hledger; just for lib users,
-- | An pure-exception-throwing IO action that parses this file's content -- | An pure-exception-throwing IO action that parses this file's content
-- as CSV conversion rules, interpolating any included files first, -- as CSV conversion rules, interpolating any included files first,
-- and runs some extra validation checks. -- and runs some extra validation checks.
@ -226,7 +223,7 @@ parseCsvRules = runParser (evalStateT rulesp defrules)
-- | Return the validated rules, or an error. -- | Return the validated rules, or an error.
validateRules :: CsvRules -> Either String CsvRules validateRules :: CsvRules -> Either String CsvRules
validateRules rules = do validateRules rules = do
unless (isAssigned "date") $ Left "Please specify (at top level) the date field. Eg: date %1\n" unless (isAssigned "date") $ Left "Please specify (at top level) the date field. Eg: date %1"
Right rules Right rules
where where
isAssigned f = isJust $ getEffectiveAssignment rules [] f isAssigned f = isJust $ getEffectiveAssignment rules [] f
@ -568,7 +565,7 @@ conditionalblockp = do
, fmap Just fieldassignmentp , fmap Just fieldassignmentp
]) ])
when (null as) $ when (null as) $
customFailure $ parseErrorAt start $ "start of conditional block found, but no assignment rules afterward\n(assignment rules in a conditional block should be indented)\n" customFailure $ parseErrorAt start $ "start of conditional block found, but no assignment rules afterward\n(assignment rules in a conditional block should be indented)"
return $ CB{cbMatchers=ms, cbAssignments=as} return $ CB{cbMatchers=ms, cbAssignments=as}
<?> "conditional block" <?> "conditional block"
@ -588,10 +585,10 @@ conditionaltablep = do
m <- matcherp' (char sep >> return ()) m <- matcherp' (char sep >> return ())
vs <- T.split (==sep) . T.pack <$> lift restofline vs <- T.split (==sep) . T.pack <$> lift restofline
if (length vs /= length fields) if (length vs /= length fields)
then customFailure $ parseErrorAt off $ ((printf "line of conditional table should have %d values, but this one has only %d\n" (length fields) (length vs)) :: String) then customFailure $ parseErrorAt off $ ((printf "line of conditional table should have %d values, but this one has only %d" (length fields) (length vs)) :: String)
else return (m,vs) else return (m,vs)
when (null body) $ when (null body) $
customFailure $ parseErrorAt start $ "start of conditional table found, but no assignment rules afterward\n" customFailure $ parseErrorAt start $ "start of conditional table found, but no assignment rules afterward"
return $ flip map body $ \(m,vs) -> return $ flip map body $ \(m,vs) ->
CB{cbMatchers=[m], cbAssignments=zip fields vs} CB{cbMatchers=[m], cbAssignments=zip fields vs}
<?> "conditional table" <?> "conditional table"
@ -614,7 +611,7 @@ recordmatcherp end = do
r <- regexp end r <- regexp end
return $ RecordMatcher p r return $ RecordMatcher p r
-- when (null ps) $ -- when (null ps) $
-- Fail.fail "start of record matcher found, but no patterns afterward\n(patterns should not be indented)\n" -- Fail.fail "start of record matcher found, but no patterns afterward\n(patterns should not be indented)"
<?> "record matcher" <?> "record matcher"
-- | A single matcher for a specific field. A csv field reference -- | A single matcher for a specific field. A csv field reference
@ -686,93 +683,85 @@ regexp end = do
-- 4. if the rules file didn't exist, create it with the default rules and filename -- 4. if the rules file didn't exist, create it with the default rules and filename
-- --
-- 5. return the transactions as a Journal -- 5. return the transactions as a Journal
-- --
readJournalFromCsv :: Maybe FilePath -> FilePath -> Text -> IO (Either String Journal) readJournalFromCsv :: Maybe FilePath -> FilePath -> Text -> ExceptT String IO Journal
readJournalFromCsv Nothing "-" _ = return $ Left "please use --rules-file when reading CSV from stdin" readJournalFromCsv Nothing "-" _ = throwError "please use --rules-file when reading CSV from stdin"
readJournalFromCsv mrulesfile csvfile csvdata = readJournalFromCsv mrulesfile csvfile csvdata = do
handle (\(e::IOException) -> return $ Left $ show e) $ do -- parse the csv rules
let rulesfile = fromMaybe (rulesFileFor csvfile) mrulesfile
rulesfileexists <- liftIO $ doesFileExist rulesfile
rulestext <- liftIO $ if rulesfileexists
then do
dbg6IO "using conversion rules file" rulesfile
readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile)
else
return $ defaultRulesText rulesfile
rules <- liftEither $ parseAndValidateCsvRules rulesfile rulestext
dbg6IO "csv rules" rules
-- make and throw an IO exception.. which we catch and convert to an Either above ? -- parse the skip directive's value, if any
let throwerr = throw . userError skiplines <- case getDirective "skip" rules of
Nothing -> return 0
Just "" -> return 1
Just s -> maybe (throwError $ "could not parse skip value: " ++ show s) return . readMay $ T.unpack s
-- parse the csv rules -- parse csv
let rulesfile = fromMaybe (rulesFileFor csvfile) mrulesfile let
rulesfileexists <- doesFileExist rulesfile -- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec
rulestext <- parsecfilename = if csvfile == "-" then "(stdin)" else csvfile
if rulesfileexists separator =
then do case getDirective "separator" rules >>= parseSeparator of
dbg6IO "using conversion rules file" rulesfile Just c -> c
readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile) _ | ext == "ssv" -> ';'
else _ | ext == "tsv" -> '\t'
return $ defaultRulesText rulesfile _ -> ','
rules <- either throwerr return $ parseAndValidateCsvRules rulesfile rulestext where
dbg6IO "csv rules" rules ext = map toLower $ drop 1 $ takeExtension csvfile
dbg6IO "using separator" separator
csv <- dbg7 "parseCsv" <$> parseCsv separator parsecfilename csvdata
records <- liftEither $ dbg7 "validateCsv" <$> validateCsv rules skiplines csv
dbg6IO "first 3 csv records" $ take 3 records
-- parse the skip directive's value, if any -- identify header lines
let skiplines = case getDirective "skip" rules of -- let (headerlines, datalines) = identifyHeaderLines records
Nothing -> 0 -- mfieldnames = lastMay headerlines
Just "" -> 1
Just s -> readDef (throwerr $ "could not parse skip value: " ++ show s) $ T.unpack s
-- parse csv let
let -- convert CSV records to transactions, saving the CSV line numbers for error positions
-- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec txns = dbg7 "csv txns" $ snd $ mapAccumL
parsecfilename = if csvfile == "-" then "(stdin)" else csvfile (\pos r ->
separator = let
case getDirective "separator" rules >>= parseSeparator of SourcePos name line col = pos
Just c -> c line' = (mkPos . (+1) . unPos) line
_ | ext == "ssv" -> ';' pos' = SourcePos name line' col
_ | ext == "tsv" -> '\t' in
_ -> ',' (pos', transactionFromCsvRecord pos rules r)
)
(initialPos parsecfilename) records
-- Ensure transactions are ordered chronologically.
-- First, if the CSV records seem to be most-recent-first (because
-- there's an explicit "newest-first" directive, or there's more
-- than one date and the first date is more recent than the last):
-- reverse them to get same-date transactions ordered chronologically.
txns' =
(if newestfirst || mdataseemsnewestfirst == Just True
then dbg7 "reversed csv txns" . reverse else id)
txns
where where
ext = map toLower $ drop 1 $ takeExtension csvfile newestfirst = dbg6 "newestfirst" $ isJust $ getDirective "newest-first" rules
dbg6IO "using separator" separator mdataseemsnewestfirst = dbg6 "mdataseemsnewestfirst" $
records <- (either throwerr id . case nub $ map tdate txns of
dbg7 "validateCsv" . validateCsv rules skiplines . ds | length ds > 1 -> Just $ head ds > last ds
dbg7 "parseCsv") _ -> Nothing
`fmap` parseCsv separator parsecfilename csvdata -- Second, sort by date.
dbg6IO "first 3 csv records" $ take 3 records txns'' = dbg7 "date-sorted csv txns" $ sortBy (comparing tdate) txns'
-- identify header lines liftIO $ when (not rulesfileexists) $ do
-- let (headerlines, datalines) = identifyHeaderLines records dbg1IO "creating conversion rules file" rulesfile
-- mfieldnames = lastMay headerlines T.writeFile rulesfile rulestext
let return nulljournal{jtxns=txns''}
-- convert CSV records to transactions, saving the CSV line numbers for error positions
txns = dbg7 "csv txns" $ snd $ mapAccumL
(\pos r ->
let
SourcePos name line col = pos
line' = (mkPos . (+1) . unPos) line
pos' = SourcePos name line' col
in
(pos', transactionFromCsvRecord pos rules r)
)
(initialPos parsecfilename) records
-- Ensure transactions are ordered chronologically.
-- First, if the CSV records seem to be most-recent-first (because
-- there's an explicit "newest-first" directive, or there's more
-- than one date and the first date is more recent than the last):
-- reverse them to get same-date transactions ordered chronologically.
txns' =
(if newestfirst || mdataseemsnewestfirst == Just True
then dbg7 "reversed csv txns" . reverse else id)
txns
where
newestfirst = dbg6 "newestfirst" $ isJust $ getDirective "newest-first" rules
mdataseemsnewestfirst = dbg6 "mdataseemsnewestfirst" $
case nub $ map tdate txns of
ds | length ds > 1 -> Just $ head ds > last ds
_ -> Nothing
-- Second, sort by date.
txns'' = dbg7 "date-sorted csv txns" $ sortBy (comparing tdate) txns'
when (not rulesfileexists) $ do
dbg1IO "creating conversion rules file" rulesfile
T.writeFile rulesfile rulestext
return $ Right nulljournal{jtxns=txns''}
-- | Parse special separator names TAB and SPACE, or return the first -- | Parse special separator names TAB and SPACE, or return the first
-- character. Return Nothing on empty string -- character. Return Nothing on empty string
@ -782,8 +771,8 @@ parseSeparator = specials . T.toLower
specials "tab" = Just '\t' specials "tab" = Just '\t'
specials xs = fst <$> T.uncons xs specials xs = fst <$> T.uncons xs
parseCsv :: Char -> FilePath -> Text -> IO (Either String CSV) parseCsv :: Char -> FilePath -> Text -> ExceptT String IO CSV
parseCsv separator filePath csvdata = parseCsv separator filePath csvdata = ExceptT $
case filePath of case filePath of
"-" -> parseCassava separator "(stdin)" <$> T.getContents "-" -> parseCassava separator "(stdin)" <$> T.getContents
_ -> return $ if T.null csvdata then Right mempty else parseCassava separator filePath csvdata _ -> return $ if T.null csvdata then Right mempty else parseCassava separator filePath csvdata
@ -811,9 +800,8 @@ printCSV = TB.toLazyText . unlinesB . map printRecord
printField = wrap "\"" "\"" . T.replace "\"" "\"\"" printField = wrap "\"" "\"" . T.replace "\"" "\"\""
-- | Return the cleaned up and validated CSV data (can be empty), or an error. -- | Return the cleaned up and validated CSV data (can be empty), or an error.
validateCsv :: CsvRules -> Int -> Either String CSV -> Either String [CsvRecord] validateCsv :: CsvRules -> Int -> CSV -> Either String [CsvRecord]
validateCsv _ _ (Left err) = Left err validateCsv rules numhdrlines = validate . applyConditionalSkips . drop numhdrlines . filternulls
validateCsv rules numhdrlines (Right rs) = validate $ applyConditionalSkips $ drop numhdrlines $ filternulls rs
where where
filternulls = filter (/=[""]) filternulls = filter (/=[""])
skipCount r = skipCount r =

View File

@ -15,7 +15,7 @@ where
import Brick import Brick
-- import Brick.Widgets.Border ("border") -- import Brick.Widgets.Border ("border")
import Control.Monad import Control.Monad
import Control.Monad.IO.Class (liftIO) import Control.Monad.Except (liftIO)
import Data.Time.Calendar (Day) import Data.Time.Calendar (Day)
import Data.Void (Void) import Data.Void (Void)
import Graphics.Vty (Event(..),Key(..),Modifier(..)) import Graphics.Vty (Event(..),Key(..),Modifier(..))
@ -155,7 +155,7 @@ uiReloadJournal :: CliOpts -> Day -> UIState -> IO UIState
uiReloadJournal copts d ui = do uiReloadJournal copts d ui = do
ej <- ej <-
let copts' = enableForecastPreservingPeriod ui copts let copts' = enableForecastPreservingPeriod ui copts
in journalReload copts' in runExceptT $ journalReload copts'
return $ case ej of return $ case ej of
Right j -> regenerateScreens j d ui Right j -> regenerateScreens j d ui
Left err -> Left err ->
@ -168,13 +168,11 @@ uiReloadJournal copts d ui = do
-- since the provided options or today-date may have changed. -- since the provided options or today-date may have changed.
uiReloadJournalIfChanged :: CliOpts -> Day -> Journal -> UIState -> IO UIState uiReloadJournalIfChanged :: CliOpts -> Day -> Journal -> UIState -> IO UIState
uiReloadJournalIfChanged copts d j ui = do uiReloadJournalIfChanged copts d j ui = do
(ej, _changed) <- let copts' = enableForecastPreservingPeriod ui copts
let copts' = enableForecastPreservingPeriod ui copts ej <- runExceptT $ journalReloadIfChanged copts' d j
in journalReloadIfChanged copts' d j
return $ case ej of return $ case ej of
Right j' -> regenerateScreens j' d ui Right (j', _) -> regenerateScreens j' d ui
Left err -> Left err -> case ui of
case ui of
UIState{aScreen=s@ErrorScreen{}} -> ui{aScreen=s{esError=err}} UIState{aScreen=s@ErrorScreen{}} -> ui{aScreen=s{esError=err}}
_ -> screenEnter d errorScreen{esError=err} ui _ -> screenEnter d errorScreen{esError=err} ui

View File

@ -9,7 +9,7 @@ module Hledger.UI.TransactionScreen
) where ) where
import Control.Monad import Control.Monad
import Control.Monad.IO.Class (liftIO) import Control.Monad.Except (liftIO)
import Data.List import Data.List
import Data.Maybe import Data.Maybe
import qualified Data.Text as T import qualified Data.Text as T
@ -174,7 +174,7 @@ tsHandle ui@UIState{aScreen=TransactionScreen{tsTransaction=(i,t), tsTransaction
p = reportPeriod ui p = reportPeriod ui
e | e `elem` [VtyEvent (EvKey (KChar 'g') []), AppEvent FileChange] -> do e | e `elem` [VtyEvent (EvKey (KChar 'g') []), AppEvent FileChange] -> do
-- plog (if e == AppEvent FileChange then "file change" else "manual reload") "" `seq` return () -- plog (if e == AppEvent FileChange then "file change" else "manual reload") "" `seq` return ()
ej <- liftIO $ journalReload copts ej <- liftIO . runExceptT $ journalReload copts
case ej of case ej of
Left err -> continue $ screenEnter d errorScreen{esError=err} ui Left err -> continue $ screenEnter d errorScreen{esError=err} ui
Right j' -> continue $ regenerateScreens j' d ui Right j' -> continue $ regenerateScreens j' d ui

View File

@ -1,6 +1,6 @@
cabal-version: 1.12 cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.34.4. -- This file has been generated from package.yaml by hpack version 0.34.6.
-- --
-- see: https://github.com/sol/hpack -- see: https://github.com/sol/hpack
@ -82,6 +82,7 @@ executable hledger-ui
, megaparsec >=7.0.0 && <9.3 , megaparsec >=7.0.0 && <9.3
, microlens >=0.4 , microlens >=0.4
, microlens-platform >=0.2.3.1 , microlens-platform >=0.2.3.1
, mtl >=2.2.1
, process >=1.2 , process >=1.2
, safe >=0.2 , safe >=0.2
, split >=0.1 , split >=0.1

View File

@ -57,6 +57,7 @@ dependencies:
- microlens >=0.4 - microlens >=0.4
- microlens-platform >=0.2.3.1 - microlens-platform >=0.2.3.1
- megaparsec >=7.0.0 && <9.3 - megaparsec >=7.0.0 && <9.3
- mtl >=2.2.1
- process >=1.2 - process >=1.2
- safe >=0.2 - safe >=0.2
- split >=0.1 - split >=0.1

View File

@ -17,6 +17,7 @@ module Hledger.Web.Foundation where
import Control.Applicative ((<|>)) import Control.Applicative ((<|>))
import Control.Monad (join, when) import Control.Monad (join, when)
import Control.Monad.Except (runExceptT)
import qualified Data.ByteString.Char8 as BC import qualified Data.ByteString.Char8 as BC
import Data.Traversable (for) import Data.Traversable (for)
import Data.IORef (IORef, readIORef, writeIORef) import Data.IORef (IORef, readIORef, writeIORef)
@ -256,16 +257,16 @@ shouldShowSidebar = do
-- ui message. -- ui message.
getCurrentJournal :: IORef Journal -> CliOpts -> Day -> Handler (Journal, Maybe String) getCurrentJournal :: IORef Journal -> CliOpts -> Day -> Handler (Journal, Maybe String)
getCurrentJournal jref opts d = do getCurrentJournal jref opts d = do
-- XXX put this inside atomicModifyIORef' for thread safety
j <- liftIO (readIORef jref)
(ej, changed) <- liftIO $ journalReloadIfChanged opts d j
-- re-apply any initial filter specified at startup -- re-apply any initial filter specified at startup
let initq = _rsQuery $ reportspec_ opts let initq = _rsQuery $ reportspec_ opts
case (changed, filterJournalTransactions initq <$> ej) of -- XXX put this inside atomicModifyIORef' for thread safety
(False, _) -> return (j, Nothing) j <- liftIO (readIORef jref)
(True, Right j') -> do ej <- liftIO . runExceptT $ journalReloadIfChanged opts d j
liftIO $ writeIORef jref j' case ej of
return (j',Nothing) Left e -> do
(True, Left e) -> do
setMessage "error while reading journal" setMessage "error while reading journal"
return (j, Just e) return (j, Just e)
Right (j', True) -> do
liftIO . writeIORef jref $ filterJournalTransactions initq j'
return (j',Nothing)
Right (_, False) -> return (j, Nothing)

View File

@ -10,6 +10,7 @@ module Hledger.Web.Handler.EditR
, postEditR , postEditR
) where ) where
import Control.Monad.Except (runExceptT)
import Hledger.Web.Import import Hledger.Web.Import
import Hledger.Web.Widget.Common import Hledger.Web.Widget.Common
(fromFormSuccess, helplink, journalFile404, writeJournalTextIfValidAndChanged) (fromFormSuccess, helplink, journalFile404, writeJournalTextIfValidAndChanged)
@ -36,7 +37,7 @@ postEditR f = do
(f', txt) <- journalFile404 f j (f', txt) <- journalFile404 f j
((res, view), enctype) <- runFormPost (editForm f' txt) ((res, view), enctype) <- runFormPost (editForm f' txt)
newtxt <- fromFormSuccess (showForm view enctype) res newtxt <- fromFormSuccess (showForm view enctype) res
writeJournalTextIfValidAndChanged f newtxt >>= \case runExceptT (writeJournalTextIfValidAndChanged f newtxt) >>= \case
Left e -> do Left e -> do
setMessage $ "Failed to load journal: " <> toHtml e setMessage $ "Failed to load journal: " <> toHtml e
showForm view enctype showForm view enctype

View File

@ -9,6 +9,7 @@ module Hledger.Web.Handler.UploadR
, postUploadR , postUploadR
) where ) where
import Control.Monad.Except (runExceptT)
import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Lazy as BL
import Data.Conduit (connect) import Data.Conduit (connect)
import Data.Conduit.Binary (sinkLbs) import Data.Conduit.Binary (sinkLbs)
@ -52,7 +53,7 @@ postUploadR f = do
"where the transcoding should be handled by the browser." "where the transcoding should be handled by the browser."
showForm view enctype showForm view enctype
Right newtxt -> return newtxt Right newtxt -> return newtxt
writeJournalTextIfValidAndChanged f newtxt >>= \case runExceptT (writeJournalTextIfValidAndChanged f newtxt) >>= \case
Left e -> do Left e -> do
setMessage $ "Failed to load journal: " <> toHtml e setMessage $ "Failed to load journal: " <> toHtml e
showForm view enctype showForm view enctype

View File

@ -4,7 +4,6 @@ module Hledger.Web.Test (
hledgerWebTest hledgerWebTest
) where ) where
import Control.Monad.Except (runExceptT)
import qualified Data.Text as T import qualified Data.Text as T
import Test.Hspec (hspec) import Test.Hspec (hspec)
import Yesod.Default.Config import Yesod.Default.Config

View File

@ -19,6 +19,7 @@ module Hledger.Web.Widget.Common
, replaceInacct , replaceInacct
) where ) where
import Control.Monad.Except (ExceptT, mapExceptT)
import Data.Foldable (find, for_) import Data.Foldable (find, for_)
import Data.List (elemIndex) import Data.List (elemIndex)
import Data.Text (Text) import Data.Text (Text)
@ -59,17 +60,15 @@ fromFormSuccess _ (FormSuccess a) = pure a
-- The file will be written (if changed) with the current system's native -- The file will be written (if changed) with the current system's native
-- line endings (see writeFileWithBackupIfChanged). -- line endings (see writeFileWithBackupIfChanged).
-- --
writeJournalTextIfValidAndChanged :: MonadHandler m => FilePath -> Text -> m (Either String ()) writeJournalTextIfValidAndChanged :: MonadHandler m => FilePath -> Text -> ExceptT String m ()
writeJournalTextIfValidAndChanged f t = do writeJournalTextIfValidAndChanged f t = mapExceptT liftIO $ do
-- Ensure unix line endings, since both readJournal (cf -- Ensure unix line endings, since both readJournal (cf
-- formatdirectivep, #1194) writeFileWithBackupIfChanged require them. -- formatdirectivep, #1194) writeFileWithBackupIfChanged require them.
-- XXX klunky. Any equivalent of "hSetNewlineMode h universalNewlineMode" for form posts ? -- XXX klunky. Any equivalent of "hSetNewlineMode h universalNewlineMode" for form posts ?
let t' = T.replace "\r" "" t let t' = T.replace "\r" "" t
liftIO (readJournal definputopts (Just f) t') >>= \case j <- readJournal definputopts (Just f) t'
Left e -> return (Left e) _ <- liftIO $ j `seq` writeFileWithBackupIfChanged f t' -- Only write backup if the journal didn't error
Right _ -> do return ()
_ <- liftIO (writeFileWithBackupIfChanged f t')
return (Right ())
-- | Link to a topic in the manual. -- | Link to a topic in the manual.
helplink :: Text -> Text -> HtmlUrl r helplink :: Text -> Text -> HtmlUrl r

View File

@ -292,8 +292,8 @@ tests_Commands = testGroup "Commands" [
let let
ignoresourcepos j = j{jtxns=map (\t -> t{tsourcepos=nullsourcepos}) (jtxns j)} ignoresourcepos j = j{jtxns=map (\t -> t{tsourcepos=nullsourcepos}) (jtxns j)}
sameParse str1 str2 = do sameParse str1 str2 = do
j1 <- readJournal definputopts Nothing str1 >>= either error' (return . ignoresourcepos) -- PARTIAL: j1 <- ignoresourcepos <$> readJournal' str1 -- PARTIAL:
j2 <- readJournal definputopts Nothing str2 >>= either error' (return . ignoresourcepos) j2 <- ignoresourcepos <$> readJournal' str2 -- PARTIAL:
j1 @?= j2{jlastreadtime=jlastreadtime j1, jfiles=jfiles j1} --, jparsestate=jparsestate j1} j1 @?= j2{jlastreadtime=jlastreadtime j1, jfiles=jfiles j1} --, jparsestate=jparsestate j1}
sameParse sameParse
("2008/12/07 One\n alpha $-1\n beta $1\n" <> ("2008/12/07 One\n alpha $-1\n beta $1\n" <>
@ -310,19 +310,19 @@ tests_Commands = testGroup "Commands" [
) )
,testCase "preserves \"virtual\" posting type" $ do ,testCase "preserves \"virtual\" posting type" $ do
j <- readJournal definputopts Nothing "apply account test\n2008/12/07 One\n (from) $-1\n (to) $1\n" >>= either error' return -- PARTIAL: j <- readJournal' "apply account test\n2008/12/07 One\n (from) $-1\n (to) $1\n" -- PARTIAL:
let p = head $ tpostings $ head $ jtxns j let p = head $ tpostings $ head $ jtxns j
paccount p @?= "test:from" paccount p @?= "test:from"
ptype p @?= VirtualPosting ptype p @?= VirtualPosting
] ]
,testCase "alias directive" $ do ,testCase "alias directive" $ do
j <- readJournal definputopts Nothing "!alias expenses = equity:draw:personal\n1/1\n (expenses:food) 1\n" >>= either error' return -- PARTIAL: j <- readJournal' "!alias expenses = equity:draw:personal\n1/1\n (expenses:food) 1\n" -- PARTIAL:
let p = head $ tpostings $ head $ jtxns j let p = head $ tpostings $ head $ jtxns j
paccount p @?= "equity:draw:personal:food" paccount p @?= "equity:draw:personal:food"
,testCase "Y default year directive" $ do ,testCase "Y default year directive" $ do
j <- readJournal definputopts Nothing defaultyear_journal_txt >>= either error' return -- PARTIAL: j <- readJournal' defaultyear_journal_txt -- PARTIAL:
tdate (head $ jtxns j) @?= fromGregorian 2009 1 1 tdate (head $ jtxns j) @?= fromGregorian 2009 1 1
,testCase "ledgerAccountNames" $ ,testCase "ledgerAccountNames" $

View File

@ -12,6 +12,7 @@ module Hledger.Cli.Commands.Diff (
,diff ,diff
) where ) where
import Control.Monad.Except (runExceptT)
import Data.List.Extra ((\\), groupSortOn, nubBy, sortBy) import Data.List.Extra ((\\), groupSortOn, nubBy, sortBy)
import Data.Function (on) import Data.Function (on)
import Data.Ord (comparing) import Data.Ord (comparing)
@ -82,7 +83,8 @@ matching ppl ppr = do
readJournalFile' :: FilePath -> IO Journal readJournalFile' :: FilePath -> IO Journal
readJournalFile' fn = readJournalFile' fn =
readJournalFile definputopts{balancingopts_=defbalancingopts{ignore_assertions_=True}} fn >>= either error' return -- PARTIAL: runExceptT (readJournalFile definputopts{balancingopts_=defbalancingopts{ignore_assertions_=True}} fn)
>>= either error' return -- PARTIAL:
matchingPostings :: AccountName -> Journal -> [PostingWithPath] matchingPostings :: AccountName -> Journal -> [PostingWithPath]
matchingPostings acct j = filter ((== acct) . paccount . ppposting) $ allPostingsWithPath j matchingPostings acct j = filter ((== acct) . paccount . ppposting) $ allPostingsWithPath j

View File

@ -8,6 +8,7 @@ module Hledger.Cli.Commands.Import (
where where
import Control.Monad import Control.Monad
import Control.Monad.Except (runExceptT)
import Data.List import Data.List
import qualified Data.Text.IO as T import qualified Data.Text.IO as T
import Hledger import Hledger
@ -46,7 +47,7 @@ importcmd opts@CliOpts{rawopts_=rawopts,inputopts_=iopts} j = do
case inputfiles of case inputfiles of
[] -> error' "please provide one or more input files as arguments" -- PARTIAL: [] -> error' "please provide one or more input files as arguments" -- PARTIAL:
fs -> do fs -> do
enewj <- readJournalFiles iopts' fs enewj <- runExceptT $ readJournalFiles iopts' fs
case enewj of case enewj of
Left e -> error' e Left e -> error' e
Right newj -> Right newj ->

View File

@ -29,7 +29,9 @@ module Hledger.Cli.Utils
tests_Cli_Utils, tests_Cli_Utils,
) )
where where
import Control.Exception as C import Control.Exception as C
import Control.Monad.Except (ExceptT, runExceptT, liftIO)
import Data.List import Data.List
import Data.Maybe import Data.Maybe
@ -72,9 +74,8 @@ withJournalDo opts cmd = do
-- it's stdin, or it doesn't exist and we are adding. We read it strictly -- it's stdin, or it doesn't exist and we are adding. We read it strictly
-- to let the add command work. -- to let the add command work.
journalpaths <- journalFilePathFromOpts opts journalpaths <- journalFilePathFromOpts opts
files <- readJournalFiles (inputopts_ opts) journalpaths j <- runExceptT $ journalTransform opts <$> readJournalFiles (inputopts_ opts) journalpaths
let transformed = journalTransform opts <$> files either error' cmd j -- PARTIAL:
either error' cmd transformed -- PARTIAL:
-- | Apply some extra post-parse transformations to the journal, if -- | Apply some extra post-parse transformations to the journal, if
-- specified by options. These happen after journal validation, but -- specified by options. These happen after journal validation, but
@ -132,29 +133,28 @@ writeOutputLazyText opts s = do
-- Returns a journal or error message, and a flag indicating whether -- Returns a journal or error message, and a flag indicating whether
-- it was re-read or not. Like withJournalDo and journalReload, reads -- it was re-read or not. Like withJournalDo and journalReload, reads
-- the full journal, without filtering. -- the full journal, without filtering.
journalReloadIfChanged :: CliOpts -> Day -> Journal -> IO (Either String Journal, Bool) journalReloadIfChanged :: CliOpts -> Day -> Journal -> ExceptT String IO (Journal, Bool)
journalReloadIfChanged opts _d j = do journalReloadIfChanged opts _d j = do
let maybeChangedFilename f = do newer <- journalFileIsNewer j f let maybeChangedFilename f = do newer <- journalFileIsNewer j f
return $ if newer then Just f else Nothing return $ if newer then Just f else Nothing
changedfiles <- catMaybes `fmap` mapM maybeChangedFilename (journalFilePaths j) changedfiles <- liftIO $ catMaybes <$> mapM maybeChangedFilename (journalFilePaths j)
if not $ null changedfiles if not $ null changedfiles
then do then do
-- XXX not sure why we use cmdarg's verbosity here, but keep it for now -- XXX not sure why we use cmdarg's verbosity here, but keep it for now
verbose <- isLoud verbose <- liftIO isLoud
when (verbose || debugLevel >= 6) $ printf "%s has changed, reloading\n" (head changedfiles) when (verbose || debugLevel >= 6) . liftIO $ printf "%s has changed, reloading\n" (head changedfiles)
ej <- journalReload opts newj <- journalReload opts
return (ej, True) return (newj, True)
else else
return (Right j, False) return (j, False)
-- | Re-read the journal file(s) specified by options, applying any -- | Re-read the journal file(s) specified by options, applying any
-- transformations specified by options. Or return an error string. -- transformations specified by options. Or return an error string.
-- Reads the full journal, without filtering. -- Reads the full journal, without filtering.
journalReload :: CliOpts -> IO (Either String Journal) journalReload :: CliOpts -> ExceptT String IO Journal
journalReload opts = do journalReload opts = do
journalpaths <- dbg6 "reloading files" <$> journalFilePathFromOpts opts journalpaths <- liftIO $ dbg6 "reloading files" <$> journalFilePathFromOpts opts
files <- readJournalFiles (inputopts_ opts) journalpaths journalTransform opts <$> readJournalFiles (inputopts_ opts) journalpaths
return $ journalTransform opts <$> files
-- | Has the specified file changed since the journal was last read ? -- | Has the specified file changed since the journal was last read ?
-- Typically this is one of the journal's journalFilePaths. These are -- Typically this is one of the journal's journalFilePaths. These are

View File

@ -759,7 +759,7 @@ $ ./csvtest.sh
>=0 >=0
# 39. Insfficient number of values in tabular rules error # 39. Insufficient number of values in tabular rules error
< <
10/2009/09,Flubber Co,50 10/2009/09,Flubber Co,50
10/2009/09,Blubber Co,150 10/2009/09,Blubber Co,150
@ -774,13 +774,12 @@ if|account2|comment
%description Flubber|acct| %description Flubber|acct|
$ ./csvtest.sh $ ./csvtest.sh
>2 >2
hledger: user error (input.rules:6:1: hledger: input.rules:6:1:
| |
6 | %amount 150|acct2 6 | %amount 150|acct2
| ^ | ^
line of conditional table should have 2 values, but this one has only 1 line of conditional table should have 2 values, but this one has only 1
)
>=1 >=1
# 40. unindented condition block error # 40. unindented condition block error
@ -797,14 +796,13 @@ account2 acct
comment cmt comment cmt
$ ./csvtest.sh $ ./csvtest.sh
>2 >2
hledger: user error (input.rules:5:1: hledger: input.rules:5:1:
| |
5 | if Flubber 5 | if Flubber
| ^ | ^
start of conditional block found, but no assignment rules afterward start of conditional block found, but no assignment rules afterward
(assignment rules in a conditional block should be indented) (assignment rules in a conditional block should be indented)
)
>=1 >=1
# 41. Assignment to custom field (#1264) + spaces after the if (#1120) # 41. Assignment to custom field (#1264) + spaces after the if (#1120)
@ -824,13 +822,13 @@ if Flubber
account2 %myaccount2 account2 %myaccount2
$ ./csvtest.sh $ ./csvtest.sh
>2 >2
hledger: user error (input.rules:6:3: hledger: input.rules:6:3:
| |
6 | myaccount2 acct 6 | myaccount2 acct
| ^^^^^^^^^^^^ | ^^^^^^^^^^^^
unexpected "myaccount2 a" unexpected "myaccount2 a"
expecting conditional block expecting conditional block
)
>=1 >=1
# 42. Rules override each other in the order listed in the file # 42. Rules override each other in the order listed in the file
@ -872,14 +870,13 @@ if account2 comment
%description Flubber acct %description Flubber acct
$ ./csvtest.sh $ ./csvtest.sh
>2 >2
hledger: user error (input.rules:5:1: hledger: input.rules:5:1:
| |
5 | if account2 comment 5 | if account2 comment
| ^ | ^
start of conditional block found, but no assignment rules afterward start of conditional block found, but no assignment rules afterward
(assignment rules in a conditional block should be indented) (assignment rules in a conditional block should be indented)
)
>=1 >=1
# 44. handle conditions with & operator # 44. handle conditions with & operator