Change Handle, Stdio, File unfold APIs (#1998)

This commit is contained in:
Harendra Kumar 2022-10-20 17:27:02 +05:30 committed by GitHub
parent 2ea9fb04e6
commit e03cceb344
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 307 additions and 261 deletions

View File

@ -83,7 +83,7 @@ benchIO name src sink =
-- | Get the last byte from a file bytestream.
toChunksLast :: Handle -> IO (Maybe Word8)
toChunksLast inh = do
let s = Handle.getChunks inh
let s = Handle.readChunks inh
larr <- Stream.last s
return $ case larr of
Nothing -> Nothing
@ -97,7 +97,7 @@ inspect $ 'toChunksLast `hasNoType` ''Step
-- | Count the number of bytes in a file.
toChunksSumLengths :: Handle -> IO Int
toChunksSumLengths inh =
let s = Handle.getChunks inh
let s = Handle.readChunks inh
in Stream.sum (Stream.map Array.length s)
#ifdef INSPECTION
@ -109,7 +109,7 @@ inspect $ 'toChunksSumLengths `hasNoType` ''Step
toChunksCountBytes :: Handle -> IO Word8
toChunksCountBytes inh = do
let foldlArr' f z = runIdentity . Stream.foldl' f z . Array.toStream
let s = Handle.getChunks inh
let s = Handle.readChunks inh
Stream.foldl' (\acc arr -> acc + foldlArr' (+) 0 arr) 0 s
#ifdef INSPECTION
@ -119,7 +119,7 @@ inspect $ 'toChunksCountBytes `hasNoType` ''Step
toChunksDecodeUtf8Arrays :: Handle -> IO ()
toChunksDecodeUtf8Arrays =
Stream.drain . Unicode.decodeUtf8Arrays . Handle.getChunks
Stream.drain . Unicode.decodeUtf8Arrays . Handle.readChunks
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'toChunksDecodeUtf8Arrays
@ -133,7 +133,7 @@ inspect $ hasNoTypeClasses 'toChunksDecodeUtf8Arrays
-- | Count the number of lines in a file.
toChunksSplitOnSuffix :: Handle -> IO Int
toChunksSplitOnSuffix =
Stream.length . ArrayStream.splitOnSuffix 10 . Handle.getChunks
Stream.length . ArrayStream.splitOnSuffix 10 . Handle.readChunks
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'toChunksSplitOnSuffix
@ -143,7 +143,7 @@ inspect $ 'toChunksSplitOnSuffix `hasNoType` ''Step
-- XXX use a word splitting combinator instead of splitOn and test it.
-- | Count the number of words in a file.
toChunksSplitOn :: Handle -> IO Int
toChunksSplitOn = Stream.length . ArrayStream.splitOn 32 . Handle.getChunks
toChunksSplitOn = Stream.length . ArrayStream.splitOn 32 . Handle.readChunks
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'toChunksSplitOn
@ -183,7 +183,7 @@ copyChunksSplitInterposeSuffix inh outh =
Stream.fold (Handle.write outh)
$ ArrayStream.interposeSuffix 10
$ ArrayStream.splitOnSuffix 10
$ Handle.getChunks inh
$ Handle.readChunks inh
#ifdef INSPECTION
inspect $ hasNoTypeClassesExcept 'copyChunksSplitInterposeSuffix [''Unboxed]
@ -198,7 +198,7 @@ copyChunksSplitInterpose inh outh =
$ ArrayStream.interpose 32
-- XXX this is not correct word splitting combinator
$ ArrayStream.splitOn 32
$ Handle.getChunks inh
$ Handle.readChunks inh
#ifdef INSPECTION
inspect $ hasNoTypeClassesExcept 'copyChunksSplitInterpose [''Unboxed]

View File

@ -330,7 +330,7 @@ parseManyChunksOfSum n inh =
Stream.fold Fold.length
$ Stream.parseMany
(PR.fromFold $ Fold.take n Fold.sum)
(Stream.unfold Handle.read inh)
(Stream.unfold Handle.reader inh)
-------------------------------------------------------------------------------
-- Parsing with unfolds

View File

@ -149,7 +149,7 @@ o_1_space_serial_exceptions length =
-- | Send the file contents to /dev/null with exception handling
readWriteOnExceptionStream :: Handle -> Handle -> IO ()
readWriteOnExceptionStream inh devNull =
let readEx = Stream.onException (hClose inh) (Stream.unfold FH.read inh)
let readEx = Stream.onException (hClose inh) (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
#ifdef INSPECTION
@ -160,7 +160,7 @@ inspect $ hasNoTypeClasses 'readWriteOnExceptionStream
readWriteHandleExceptionStream :: Handle -> Handle -> IO ()
readWriteHandleExceptionStream inh devNull =
let handler (_e :: SomeException) = Stream.fromEffect (hClose inh >> return 10)
readEx = Stream.handle handler (Stream.unfold FH.read inh)
readEx = Stream.handle handler (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
#ifdef INSPECTION
@ -170,7 +170,7 @@ inspect $ hasNoTypeClasses 'readWriteHandleExceptionStream
-- | Send the file contents to /dev/null with exception handling
readWriteFinally_Stream :: Handle -> Handle -> IO ()
readWriteFinally_Stream inh devNull =
let readEx = Stream.finally_ (hClose inh) (Stream.unfold FH.read inh)
let readEx = Stream.finally_ (hClose inh) (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
#ifdef INSPECTION
@ -179,14 +179,14 @@ inspect $ hasNoTypeClasses 'readWriteFinally_Stream
readWriteFinallyStream :: Handle -> Handle -> IO ()
readWriteFinallyStream inh devNull =
let readEx = Stream.finally (hClose inh) (Stream.unfold FH.read inh)
let readEx = Stream.finally (hClose inh) (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
-- | Send the file contents to /dev/null with exception handling
fromToBytesBracket_Stream :: Handle -> Handle -> IO ()
fromToBytesBracket_Stream inh devNull =
let readEx = Stream.bracket_ (return ()) (\_ -> hClose inh)
(\_ -> IFH.getBytes inh)
(\_ -> IFH.read inh)
in IFH.putBytes devNull readEx
#ifdef INSPECTION
@ -196,14 +196,14 @@ inspect $ hasNoTypeClasses 'fromToBytesBracket_Stream
fromToBytesBracketStream :: Handle -> Handle -> IO ()
fromToBytesBracketStream inh devNull =
let readEx = Stream.bracket (return ()) (\_ -> hClose inh)
(\_ -> IFH.getBytes inh)
(\_ -> IFH.read inh)
in IFH.putBytes devNull readEx
readWriteBeforeAfterStream :: Handle -> Handle -> IO ()
readWriteBeforeAfterStream inh devNull =
let readEx =
Stream.after (hClose inh)
$ Stream.before (hPutChar devNull 'A') (Stream.unfold FH.read inh)
$ Stream.before (hPutChar devNull 'A') (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
#ifdef INSPECTION
@ -212,7 +212,7 @@ inspect $ 'readWriteBeforeAfterStream `hasNoType` ''D.Step
readWriteAfterStream :: Handle -> Handle -> IO ()
readWriteAfterStream inh devNull =
let readEx = Stream.after (hClose inh) (Stream.unfold FH.read inh)
let readEx = Stream.after (hClose inh) (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
#ifdef INSPECTION
@ -221,7 +221,7 @@ inspect $ 'readWriteAfterStream `hasNoType` ''D.Step
readWriteAfter_Stream :: Handle -> Handle -> IO ()
readWriteAfter_Stream inh devNull =
let readEx = Stream.after_ (hClose inh) (Stream.unfold FH.read inh)
let readEx = Stream.after_ (hClose inh) (Stream.unfold FH.reader inh)
in Stream.fold (FH.write devNull) readEx
#ifdef INSPECTION
@ -262,7 +262,7 @@ o_1_space_copy_stream_exceptions env =
-- | Send the file contents to /dev/null with exception handling
readChunksOnException :: Handle -> Handle -> IO ()
readChunksOnException inh devNull =
let readEx = IUF.onException (\_ -> hClose inh) FH.readChunks
let readEx = IUF.onException (\_ -> hClose inh) FH.chunkReader
in IUF.fold (IFH.writeChunks devNull) readEx inh
#ifdef INSPECTION
@ -272,7 +272,7 @@ inspect $ hasNoTypeClasses 'readChunksOnException
-- | Send the file contents to /dev/null with exception handling
readChunksBracket_ :: Handle -> Handle -> IO ()
readChunksBracket_ inh devNull =
let readEx = IUF.bracket_ return (\_ -> hClose inh) FH.readChunks
let readEx = IUF.bracket_ return (\_ -> hClose inh) FH.chunkReader
in IUF.fold (IFH.writeChunks devNull) readEx inh
#ifdef INSPECTION
@ -281,7 +281,7 @@ inspect $ hasNoTypeClasses 'readChunksBracket_
readChunksBracket :: Handle -> Handle -> IO ()
readChunksBracket inh devNull =
let readEx = IUF.bracket return (\_ -> hClose inh) FH.readChunks
let readEx = IUF.bracket return (\_ -> hClose inh) FH.chunkReader
in IUF.fold (IFH.writeChunks devNull) readEx inh
o_1_space_copy_exceptions_readChunks :: BenchEnv -> [Benchmark]
@ -306,7 +306,7 @@ toChunksBracket_ inh devNull =
let readEx = Stream.bracket_
(return ())
(\_ -> hClose inh)
(\_ -> IFH.getChunks inh)
(\_ -> IFH.readChunks inh)
in Stream.fold (IFH.writeChunks devNull) readEx
#ifdef INSPECTION
@ -318,7 +318,7 @@ toChunksBracket inh devNull =
let readEx = Stream.bracket
(return ())
(\_ -> hClose inh)
(\_ -> IFH.getChunks inh)
(\_ -> IFH.readChunks inh)
in Stream.fold (IFH.writeChunks devNull) readEx
o_1_space_copy_exceptions_toChunks :: BenchEnv -> [Benchmark]

View File

@ -707,7 +707,7 @@ lf = fromIntegral (ord '\n')
-- | Split on line feed.
foldManySepBy :: Handle -> IO Int
foldManySepBy =
let u = UF.foldMany (FL.takeEndBy_ (== lf) FL.drain) FH.read
let u = UF.foldMany (FL.takeEndBy_ (== lf) FL.drain) FH.reader
in UF.fold FL.length u
o_1_space_nested :: BenchEnv -> Int -> [Benchmark]
@ -748,7 +748,7 @@ o_n_space_nested size =
-- | Send the file contents to /dev/null with exception handling
readWriteOnExceptionUnfold :: Handle -> Handle -> IO ()
readWriteOnExceptionUnfold inh devNull =
let readEx = UF.onException (\_ -> hClose inh) FH.read
let readEx = UF.onException (\_ -> hClose inh) FH.reader
in S.fold (FH.write devNull) $ S.unfold readEx inh
#ifdef INSPECTION
@ -760,7 +760,7 @@ inspect $ hasNoTypeClasses 'readWriteOnExceptionUnfold
readWriteHandleExceptionUnfold :: Handle -> Handle -> IO ()
readWriteHandleExceptionUnfold inh devNull =
let handler (_e :: SomeException) = hClose inh >> return 10
readEx = UF.handle (UF.functionM handler) FH.read
readEx = UF.handle (UF.functionM handler) FH.reader
in S.fold (FH.write devNull) $ S.unfold readEx inh
#ifdef INSPECTION
@ -771,7 +771,7 @@ inspect $ hasNoTypeClasses 'readWriteHandleExceptionUnfold
-- | Send the file contents to /dev/null with exception handling
readWriteFinally_Unfold :: Handle -> Handle -> IO ()
readWriteFinally_Unfold inh devNull =
let readEx = UF.finally_ (\_ -> hClose inh) FH.read
let readEx = UF.finally_ (\_ -> hClose inh) FH.reader
in S.fold (FH.write devNull) $ S.unfold readEx inh
#ifdef INSPECTION
@ -781,13 +781,13 @@ inspect $ hasNoTypeClasses 'readWriteFinally_Unfold
readWriteFinallyUnfold :: Handle -> Handle -> IO ()
readWriteFinallyUnfold inh devNull =
let readEx = UF.finally (\_ -> hClose inh) FH.read
let readEx = UF.finally (\_ -> hClose inh) FH.reader
in S.fold (FH.write devNull) $ S.unfold readEx inh
-- | Send the file contents to /dev/null with exception handling
readWriteBracket_Unfold :: Handle -> Handle -> IO ()
readWriteBracket_Unfold inh devNull =
let readEx = UF.bracket_ return (\_ -> hClose inh) FH.read
let readEx = UF.bracket_ return (\_ -> hClose inh) FH.reader
in S.fold (FH.write devNull) $ S.unfold readEx inh
#ifdef INSPECTION
@ -797,7 +797,7 @@ inspect $ hasNoTypeClasses 'readWriteBracket_Unfold
readWriteBracketUnfold :: Handle -> Handle -> IO ()
readWriteBracketUnfold inh devNull =
let readEx = UF.bracket return (\_ -> hClose inh) FH.read
let readEx = UF.bracket return (\_ -> hClose inh) FH.reader
in S.fold (FH.write devNull) $ S.unfold readEx inh
o_1_space_copy_read_exceptions :: BenchEnv -> [Benchmark]

View File

@ -59,7 +59,7 @@ import Test.Inspection
-- | Get the last byte from a file bytestream.
readLast :: Handle -> IO (Maybe Word8)
readLast = S.last . S.unfold FH.read
readLast = S.last . S.unfold FH.reader
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readLast
@ -71,7 +71,7 @@ inspect $ 'readLast `hasNoType` ''MA.ArrayUnsafe -- FH.read/A.read
-- assert that flattenArrays constructors are not present
-- | Count the number of bytes in a file.
readCountBytes :: Handle -> IO Int
readCountBytes = S.length . S.unfold FH.read
readCountBytes = S.length . S.unfold FH.reader
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readCountBytes
@ -86,7 +86,7 @@ readCountLines =
S.length
. IUS.lines FL.drain
. SS.decodeLatin1
. S.unfold FH.read
. S.unfold FH.reader
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readCountLines
@ -101,7 +101,7 @@ readCountWords =
S.length
. IUS.words FL.drain
. SS.decodeLatin1
. S.unfold FH.read
. S.unfold FH.reader
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readCountWords
@ -110,7 +110,7 @@ inspect $ hasNoTypeClasses 'readCountWords
-- | Sum the bytes in a file.
readSumBytes :: Handle -> IO Word8
readSumBytes = S.sum . S.unfold FH.read
readSumBytes = S.sum . S.unfold FH.reader
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readSumBytes
@ -130,7 +130,7 @@ inspect $ 'readSumBytes `hasNoType` ''MA.ArrayUnsafe -- FH.read/A.read
-- fusion-plugin to propagate INLINE phase information such that this problem
-- does not occur.
readDrain :: Handle -> IO ()
readDrain inh = S.drain $ S.unfold FH.read inh
readDrain inh = S.drain $ S.unfold FH.reader inh
-- XXX investigate why we need an INLINE in this case (GHC)
{-# INLINE readDecodeLatin1 #-}
@ -138,13 +138,13 @@ readDecodeLatin1 :: Handle -> IO ()
readDecodeLatin1 inh =
S.drain
$ SS.decodeLatin1
$ S.unfold FH.read inh
$ S.unfold FH.reader inh
readDecodeUtf8 :: Handle -> IO ()
readDecodeUtf8 inh =
S.drain
$ SS.decodeUtf8
$ S.unfold FH.read inh
$ S.unfold FH.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readDecodeUtf8
@ -189,7 +189,7 @@ getChunksConcatUnfoldCountLines inh =
$ IUS.lines FL.drain
$ SS.decodeLatin1
-- XXX replace with toBytes
$ S.unfoldMany A.read (IFH.getChunks inh)
$ S.unfoldMany A.read (IFH.readChunks inh)
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'getChunksConcatUnfoldCountLines
@ -210,15 +210,15 @@ o_1_space_reduce_toBytes env =
-------------------------------------------------------------------------------
chunksOfSum :: Int -> Handle -> IO Int
chunksOfSum n inh = S.length $ S.chunksOf n FL.sum (S.unfold FH.read inh)
chunksOfSum n inh = S.length $ S.chunksOf n FL.sum (S.unfold FH.reader inh)
foldManyPostChunksOfSum :: Int -> Handle -> IO Int
foldManyPostChunksOfSum n inh =
S.length $ IP.foldManyPost (FL.take n FL.sum) (S.unfold FH.read inh)
S.length $ IP.foldManyPost (FL.take n FL.sum) (S.unfold FH.reader inh)
foldManyChunksOfSum :: Int -> Handle -> IO Int
foldManyChunksOfSum n inh =
S.length $ IP.foldMany (FL.take n FL.sum) (S.unfold FH.read inh)
S.length $ IP.foldMany (FL.take n FL.sum) (S.unfold FH.reader inh)
-- XXX investigate why we need an INLINE in this case (GHC)
-- Even though allocations remain the same in both cases inlining improves time
@ -228,7 +228,7 @@ foldManyChunksOfSum n inh =
chunksOf :: Int -> Handle -> IO Int
chunksOf n inh =
-- writeNUnsafe gives 2.5x boost here over writeN.
S.length $ S.chunksOf n (AT.writeNUnsafe n) (S.unfold FH.read inh)
S.length $ S.chunksOf n (AT.writeNUnsafe n) (S.unfold FH.reader inh)
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'chunksOf
@ -241,7 +241,7 @@ inspect $ 'chunksOf `hasNoType` ''IUF.ConcatState -- FH.read/UF.many
{-# INLINE arraysOf #-}
arraysOf :: Int -> Handle -> IO Int
arraysOf n inh = S.length $ IP.arraysOf n (S.unfold FH.read inh)
arraysOf n inh = S.length $ IP.arraysOf n (S.unfold FH.reader inh)
o_1_space_reduce_read_grouped :: BenchEnv -> [Benchmark]
o_1_space_reduce_read_grouped env =

View File

@ -52,7 +52,7 @@ import Test.Inspection
-- | Copy file
copyChunks :: Handle -> Handle -> IO ()
copyChunks inh outh = S.fold (IFH.writeChunks outh) $ IFH.getChunks inh
copyChunks inh outh = S.fold (IFH.writeChunks outh) $ IFH.readChunks inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'copyChunks
@ -75,7 +75,7 @@ o_1_space_copy_chunked env =
-- | Copy file
copyStream :: Handle -> Handle -> IO ()
copyStream inh outh = S.fold (FH.write outh) (S.unfold FH.read inh)
copyStream inh outh = S.fold (FH.write outh) (S.unfold FH.reader inh)
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'copyStream
@ -102,7 +102,7 @@ o_1_space_copy_read env =
-- | Send the file contents to /dev/null
readFromBytesNull :: Handle -> Handle -> IO ()
readFromBytesNull inh devNull = IFH.putBytes devNull $ S.unfold FH.read inh
readFromBytesNull inh devNull = IFH.putBytes devNull $ S.unfold FH.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readFromBytesNull
@ -116,7 +116,7 @@ inspect $ 'readFromBytesNull `hasNoType` ''D.FoldMany
readWithFromBytesNull :: Handle -> Handle -> IO ()
readWithFromBytesNull inh devNull =
IFH.putBytes devNull
$ S.unfold FH.readWith (defaultChunkSize, inh)
$ S.unfold FH.readerWith (defaultChunkSize, inh)
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'readWithFromBytesNull
@ -134,7 +134,7 @@ _readChunks inh devNull = IUF.fold fld unf inh
where
fld = FH.write devNull
unf = IUF.many A.read FH.readChunks
unf = IUF.many A.read FH.chunkReader
-- | Send the chunk content to /dev/null
-- Implicitly benchmarked via 'readWithFromBytesNull'
@ -144,7 +144,7 @@ _readChunksWith inh devNull = IUF.fold fld unf (defaultChunkSize, inh)
where
fld = FH.write devNull
unf = IUF.many A.read FH.readChunksWith
unf = IUF.many A.read FH.chunkReaderWith
o_1_space_copy_fromBytes :: BenchEnv -> [Benchmark]
o_1_space_copy_fromBytes env =
@ -163,7 +163,7 @@ writeReadWith inh devNull = IUF.fold fld unf (defaultChunkSize, inh)
where
fld = FH.writeWith defaultChunkSize devNull
unf = FH.readWith
unf = FH.readerWith
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'writeReadWith
@ -180,7 +180,7 @@ writeRead inh devNull = IUF.fold fld unf inh
where
fld = FH.write devNull
unf = FH.read
unf = FH.reader
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'writeRead

View File

@ -23,7 +23,6 @@ import System.IO (Handle)
import qualified Streamly.Data.Array.Unboxed as Array
import qualified Streamly.Data.Fold as Fold
import qualified Streamly.FileSystem.Handle as Handle
import qualified Streamly.Internal.Data.Stream.IsStream as Stream
import qualified Streamly.Internal.Data.Unfold as Unfold
import qualified Streamly.Internal.FileSystem.Handle as Handle
@ -54,7 +53,7 @@ copyCodecUtf8ArraysLenient inh outh =
Stream.fold (Handle.write outh)
$ Unicode.encodeUtf8'
$ Unicode.decodeUtf8Arrays
$ Handle.getChunks inh
$ Handle.readChunks inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'copyCodecUtf8ArraysLenient
@ -82,7 +81,7 @@ linesUnlinesCopy inh outh =
$ Unicode.unlines Unfold.fromList
$ Stream.splitOnSuffix (== '\n') Fold.toList
$ Unicode.decodeLatin1
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
{-# NOINLINE linesUnlinesArrayWord8Copy #-}
linesUnlinesArrayWord8Copy :: Handle -> Handle -> IO ()
@ -90,7 +89,7 @@ linesUnlinesArrayWord8Copy inh outh =
Stream.fold (Handle.write outh)
$ Stream.interposeSuffix 10 Array.read
$ Stream.splitOnSuffix (== 10) Array.write
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
-- XXX splitSuffixOn requires -funfolding-use-threshold=150 for better fusion
-- | Lines and unlines
@ -102,7 +101,7 @@ linesUnlinesArrayCharCopy inh outh =
$ UnicodeArr.unlines
$ UnicodeArr.lines
$ Unicode.decodeLatin1
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClassesExcept 'linesUnlinesArrayCharCopy [''Unboxed]
@ -130,7 +129,7 @@ wordsUnwordsCopyWord8 inh outh =
Stream.fold (Handle.write outh)
$ Stream.interposeSuffix 32 Unfold.fromList
$ Stream.wordsBy isSp Fold.toList
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'wordsUnwordsCopyWord8
@ -154,7 +153,7 @@ wordsUnwordsCopy inh outh =
$ Stream.wordsBy isSpace Fold.toList
-- -- $ Stream.splitOn isSpace Fold.toList
$ Unicode.decodeLatin1
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
-- inspect $ hasNoTypeClasses 'wordsUnwordsCopy
@ -169,7 +168,7 @@ wordsUnwordsCharArrayCopy inh outh =
$ UnicodeArr.unwords
$ UnicodeArr.words
$ Unicode.decodeLatin1
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
o_1_space_copy_read_group_ungroup :: BenchEnv -> [Benchmark]
o_1_space_copy_read_group_ungroup env =
@ -201,7 +200,7 @@ copyStreamLatin1' inh outh =
Stream.fold (Handle.write outh)
$ Unicode.encodeLatin1'
$ Unicode.decodeLatin1
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'copyStreamLatin1'
@ -221,7 +220,7 @@ copyStreamLatin1 inh outh =
Stream.fold (Handle.write outh)
$ Unicode.encodeLatin1
$ Unicode.decodeLatin1
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'copyStreamLatin1
@ -241,7 +240,7 @@ _copyStreamUtf8' inh outh =
Stream.fold (Handle.write outh)
$ Unicode.encodeUtf8'
$ Unicode.decodeUtf8'
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses '_copyStreamUtf8'
@ -257,7 +256,7 @@ copyStreamUtf8 inh outh =
Stream.fold (Handle.write outh)
$ Unicode.encodeUtf8
$ Unicode.decodeUtf8
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
#ifdef INSPECTION
inspect $ hasNoTypeClasses 'copyStreamUtf8
@ -272,7 +271,7 @@ _copyStreamUtf8'Fold inh outh =
Stream.fold (Handle.write outh)
$ Unicode.encodeUtf8
$ Stream.foldMany Unicode.writeCharUtf8'
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
{-# NOINLINE _copyStreamUtf8Parser #-}
_copyStreamUtf8Parser :: Handle -> Handle -> IO ()
@ -281,7 +280,7 @@ _copyStreamUtf8Parser inh outh =
$ Unicode.encodeUtf8
$ Stream.parseMany
(Unicode.parseCharUtf8With Unicode.TransliterateCodingFailure)
$ Stream.unfold Handle.read inh
$ Stream.unfold Handle.reader inh
o_1_space_decode_encode_read :: BenchEnv -> [Benchmark]
o_1_space_decode_encode_read env =

View File

@ -13,9 +13,9 @@
module Streamly.Console.Stdio
(
-- * Read (stdin)
read
, readChunks
-- * Unfolds (stdin)
reader
, chunkReader
-- * Write (stdout)
, write
@ -24,8 +24,33 @@ module Streamly.Console.Stdio
-- * Write (stderr)
, writeErr
, writeErrChunks
-- * Deprecated
, read
, readChunks
)
where
import Streamly.Internal.Console.Stdio
import Control.Monad.IO.Class (MonadIO(..))
import Data.Word (Word8)
import Streamly.Internal.Data.Array.Unboxed.Type (Array)
import Streamly.Internal.Data.Unfold (Unfold)
import Streamly.Internal.Console.Stdio hiding (read, readChunks)
import Prelude hiding (read)
-- Same as 'reader'
--
-- @since 0.8.0
{-# DEPRECATED read "Please use 'reader' instead" #-}
{-# INLINE read #-}
read :: MonadIO m => Unfold m () Word8
read = reader
-- Same as 'chunkReader'
--
-- @since 0.8.0
{-# DEPRECATED readChunks "Please use 'chunkReader' instead" #-}
{-# INLINE readChunks #-}
readChunks :: MonadIO m => Unfold m () (Array Word8)
readChunks = chunkReader

View File

@ -75,12 +75,19 @@ module Streamly.FileSystem.Handle
-- position of the file handle. The stream ends as soon as EOF is
-- encountered.
, read
, readWith
, readChunks
, readChunksWith
-- -- *** Streams
-- , read
-- , readWith
-- , readChunks
-- , readChunksWith
-- ** Writing
-- -- *** Unfolds
, reader
, readerWith
, chunkReader
, chunkReaderWith
-- ** Folds
-- | 'TextEncoding', 'NewLineMode', and 'Buffering' options of the
-- underlying handle are ignored. The write occurs from the current seek
-- position of the file handle. The write behavior depends on the 'IOMode'
@ -91,12 +98,36 @@ module Streamly.FileSystem.Handle
, writeChunks
-- * Deprecated
, readChunksWithBufferOf
, read
, readWithBufferOf
, readChunks
, readChunksWithBufferOf
, writeChunksWithBufferOf
, writeWithBufferOf
)
where
import Streamly.Internal.FileSystem.Handle
import Control.Monad.IO.Class (MonadIO(..))
import Data.Word (Word8)
import Streamly.Internal.Data.Array.Unboxed.Type (Array)
import Streamly.Internal.Data.Unfold.Type (Unfold)
import System.IO (Handle)
import Streamly.Internal.FileSystem.Handle hiding (read, readChunks)
import Prelude hiding (read)
-- | Same as 'reader'
--
-- @since 0.7.0
{-# DEPRECATED read "Please use 'reader' instead" #-}
{-# INLINE read #-}
read :: MonadIO m => Unfold m Handle Word8
read = reader
-- | Same as 'chunkReader'
--
-- @since 0.7.0
{-# DEPRECATED readChunks "Please use 'chunkReader' instead" #-}
{-# INLINE readChunks #-}
readChunks :: MonadIO m => Unfold m Handle (Array Word8)
readChunks = chunkReader

View File

@ -9,25 +9,29 @@
module Streamly.Internal.Console.Stdio
(
-- * Read
-- * Streams
read
, getBytes
, getChars
, readChars
, readChunks
, getChunks
-- , getChunksLn
-- , getStringsWith -- get strings using the supplied decoding
-- , getStrings -- get strings of complete chars,
-- leave any partial chars for next string
-- , getStringsLn -- get lines decoded as char strings
-- * Write
-- * Unfolds
, reader
, chunkReader
-- * Folds
, write
, writeChunks
, writeErr
, writeErrChunks
-- * Stream writes
, putBytes -- Buffered (32K)
, putChars
, writeChunks
, writeErrChunks
, putChunks -- Unbuffered
, putStringsWith
, putStrings
@ -59,51 +63,51 @@ import qualified Streamly.Internal.Unicode.Stream as Unicode
-- | Unfold standard input into a stream of 'Word8'.
--
-- @since 0.8.0
{-# INLINE read #-}
read :: MonadIO m => Unfold m () Word8
read = Unfold.lmap (\() -> stdin) Handle.read
-- @since 0.9.0
{-# INLINE reader #-}
reader :: MonadIO m => Unfold m () Word8
reader = Unfold.lmap (\() -> stdin) Handle.reader
-- | Read a byte stream from standard input.
--
-- > getBytes = Handle.getBytes stdin
-- > getBytes = Stream.unfold Stdio.read ()
-- > read = Handle.read stdin
-- > read = Stream.unfold Stdio.reader ()
--
-- /Pre-release/
--
{-# INLINE getBytes #-}
getBytes :: MonadIO m => Stream m Word8
getBytes = Handle.getBytes stdin
{-# INLINE read #-}
read :: MonadIO m => Stream m Word8
read = Handle.read stdin
-- | Read a character stream from Utf8 encoded standard input.
--
-- > getChars = Unicode.decodeUtf8 Stdio.getBytes
-- > readChars = Unicode.decodeUtf8 Stdio.read
--
-- /Pre-release/
--
{-# INLINE getChars #-}
getChars :: MonadIO m => Stream m Char
getChars = Unicode.decodeUtf8 getBytes
{-# INLINE readChars #-}
readChars :: MonadIO m => Stream m Char
readChars = Unicode.decodeUtf8 read
-- | Unfolds standard input into a stream of 'Word8' arrays.
--
-- @since 0.8.0
{-# INLINE readChunks #-}
readChunks :: MonadIO m => Unfold m () (Array Word8)
readChunks = Unfold.lmap (\() -> stdin) Handle.readChunks
-- @since 0.9.0
{-# INLINE chunkReader #-}
chunkReader :: MonadIO m => Unfold m () (Array Word8)
chunkReader = Unfold.lmap (\() -> stdin) Handle.chunkReader
-- | Read a stream of chunks from standard input. The maximum size of a single
-- chunk is limited to @defaultChunkSize@. The actual size read may be less
-- than @defaultChunkSize@.
--
-- > getChunks = Handle.getChunks stdin
-- > getChunks = Stream.unfold Stdio.readChunks ()
-- > readChunks = Handle.readChunks stdin
-- > readChunks = Stream.unfold Stdio.chunkReader ()
--
-- /Pre-release/
--
{-# INLINE getChunks #-}
getChunks :: MonadIO m => Stream m (Array Word8)
getChunks = Handle.getChunks stdin
{-# INLINE readChunks #-}
readChunks :: MonadIO m => Stream m (Array Word8)
readChunks = Handle.readChunks stdin
{-
-- | Read UTF8 encoded lines from standard input.

View File

@ -30,41 +30,41 @@ module Streamly.Internal.FileSystem.Handle
, getChunkOf
, putChunk
-- * Byte Stream Read
-- * Streams
, read
, readWith
, readChunksWith
, readChunks
-- * Unfolds
, reader
-- , readUtf8
-- , readLines
-- , readFrames
, readWith
, readerWith
, chunkReader
, chunkReaderWith
, getBytes
, getBytesWith
-- * Chunked Stream Read
, readChunks
, readChunksWith
, getChunksWith
, getChunks
-- * Byte Stream Write
-- Byte stream write (Folds)
-- * Folds
, write
, consumer
-- , writeUtf8
-- , writeUtf8ByLines
-- , writeByFrames
-- , writeLines
, writeWith
, writeMaybesWith
, putBytes
, putBytesWith
-- * Chunked Stream Write
, writeChunks
, writeChunksWith
, writeMaybesWith
-- * Refolds
, writer
, writerWith
, chunkWriter
-- , chunkWriterWith
-- * Stream writes
, putBytes
, putBytesWith
, putChunksWith
, putChunks
@ -86,7 +86,7 @@ module Streamly.Internal.FileSystem.Handle
-- , readChunksFrom
-- , readChunksFromTo
, readChunksFromToWith
, chunkReaderFromToWith
-- , readChunksFromThenToWith
-- , writeIndex
@ -101,6 +101,7 @@ module Streamly.Internal.FileSystem.Handle
-- , writeChunksFromTo
-- , writeChunksFromToWith
-- , writeChunksFromThenToWith
-- * Deprecated
, readChunksWithBufferOf
, readWithBufferOf
@ -145,9 +146,9 @@ import qualified Streamly.Internal.Data.Stream.StreamK.Type as K (mkStream)
-- >>> import qualified Streamly.Data.Array.Unboxed as Array
-- >>> import qualified Streamly.Data.Fold as Fold
-- >>> import qualified Streamly.Data.Unfold as Unfold
-- >>> import qualified Streamly.FileSystem.Handle as Handle
-- >>> import qualified Streamly.Prelude as Stream
--
-- >>> import qualified Streamly.Internal.Data.Array.Unboxed.Type as Array (writeNUnsafe)
-- >>> import qualified Streamly.Internal.Data.Stream.IsStream as Stream
-- >>> import qualified Streamly.Internal.Data.Unfold as Unfold (first)
-- >>> import qualified Streamly.Internal.FileSystem.Handle as Handle
@ -220,16 +221,16 @@ _getChunksWith size h = S.fromStreamK go
then stp
else yld arr go
-- | @getChunksWith size handle@ reads a stream of arrays from the file
-- | @readChunksWith size handle@ reads a stream of arrays from the file
-- handle @handle@. The maximum size of a single array is limited to @size@.
-- The actual size read may be less than or equal to @size@.
--
-- >>> getChunksWith size h = Stream.unfold Handle.readChunksWith (size, h)
-- >>> readChunksWith size h = Stream.unfold Handle.chunkReaderWith (size, h)
--
-- @since 0.9.0
{-# INLINE_NORMAL getChunksWith #-}
getChunksWith :: MonadIO m => Int -> Handle -> Stream m (Array Word8)
getChunksWith size h = S.fromStreamD (D.Stream step ())
{-# INLINE_NORMAL readChunksWith #-}
readChunksWith :: MonadIO m => Int -> Handle -> Stream m (Array Word8)
readChunksWith size h = S.fromStreamD (D.Stream step ())
where
{-# INLINE_LATE step #-}
step _ _ = do
@ -245,20 +246,19 @@ getChunksWith size h = S.fromStreamD (D.Stream step ())
-- or equal to @bufsize@.
--
-- @since 0.9.0
{-# INLINE_NORMAL readChunksWith #-}
readChunksWith :: MonadIO m => Unfold m (Int, Handle) (Array Word8)
readChunksWith =
{-# INLINE_NORMAL chunkReaderWith #-}
chunkReaderWith :: MonadIO m => Unfold m (Int, Handle) (Array Word8)
chunkReaderWith =
UF.lmap (uncurry getChunk) UF.repeatM
& UF.takeWhile ((/= 0) . byteLength)
-- | Same as 'readChunksWith'
-- | Same as 'chunkReaderWith'
--
-- @since 0.7.0
{-# DEPRECATED readChunksWithBufferOf "Please use readChunksWith instead." #-}
{-# DEPRECATED readChunksWithBufferOf "Please use chunkReaderWith instead." #-}
{-# INLINE_NORMAL readChunksWithBufferOf #-}
readChunksWithBufferOf :: MonadIO m => Unfold m (Int, Handle) (Array Word8)
readChunksWithBufferOf = readChunksWith
readChunksWithBufferOf = chunkReaderWith
-- There are two ways to implement this.
--
@ -267,15 +267,14 @@ readChunksWithBufferOf = readChunksWith
-- 2. Simply implement it from scratch like readChunksWith.
--
-- XXX Change this to readChunksWithFromTo (bufferSize, from, to, h)?
--
-- | The input to the unfold is @(from, to, bufferSize, handle)@. It starts
-- reading from the offset `from` in the file and reads up to the offset `to`.
--
--
{-# INLINE_NORMAL readChunksFromToWith #-}
readChunksFromToWith :: MonadIO m =>
{-# INLINE_NORMAL chunkReaderFromToWith #-}
chunkReaderFromToWith :: MonadIO m =>
Unfold m (Int, Int, Int, Handle) (Array Word8)
readChunksFromToWith = Unfold step inject
chunkReaderFromToWith = Unfold step inject
where
@ -297,20 +296,17 @@ readChunksFromToWith = Unfold step inject
assert (len <= remaining)
$ D.Yield arr (remaining - len, bufSize, h)
-- XXX read 'Array a' instead of Word8
--
-- | @getChunks handle@ reads a stream of arrays from the specified file
-- handle. The maximum size of a single array is limited to
-- @defaultChunkSize@. The actual size read may be less than or equal to
-- @defaultChunkSize@.
--
-- >>> getChunks = Handle.getChunksWith IO.defaultChunkSize
-- >>> readChunks = Handle.readChunksWith IO.defaultChunkSize
--
-- @since 0.9.0
{-# INLINE getChunks #-}
getChunks :: MonadIO m => Handle -> Stream m (Array Word8)
getChunks = getChunksWith defaultChunkSize
-- /Pre-release/
{-# INLINE readChunks #-}
readChunks :: MonadIO m => Handle -> Stream m (Array Word8)
readChunks = readChunksWith defaultChunkSize
-- | Unfolds a handle into a stream of 'Word8' arrays. Requests to the IO
-- device are performed using a buffer of size
@ -318,12 +314,12 @@ getChunks = getChunksWith defaultChunkSize
-- size of arrays in the resulting stream are therefore less than or equal to
-- 'Streamly.Internal.Data.Array.Unboxed.Type.defaultChunkSize'.
--
-- >>> readChunks = Unfold.first IO.defaultChunkSize Handle.readChunksWith
-- >>> chunkReader = Unfold.first IO.defaultChunkSize Handle.chunkReaderWith
--
-- @since 0.7.0
{-# INLINE readChunks #-}
readChunks :: MonadIO m => Unfold m Handle (Array Word8)
readChunks = UF.first defaultChunkSize readChunksWith
-- @since 0.9.0
{-# INLINE chunkReader #-}
chunkReader :: MonadIO m => Unfold m Handle (Array Word8)
chunkReader = UF.first defaultChunkSize chunkReaderWith
-------------------------------------------------------------------------------
-- Read File to Stream
@ -336,54 +332,50 @@ readChunks = UF.first defaultChunkSize readChunksWith
-- | Unfolds the tuple @(bufsize, handle)@ into a byte stream, read requests
-- to the IO device are performed using buffers of @bufsize@.
--
-- >>> readWith = Unfold.many Array.read Handle.readChunksWith
-- >>> readerWith = Unfold.many Array.read Handle.chunkReaderWith
--
-- @since 0.9.0
{-# INLINE readWith #-}
readWith :: MonadIO m => Unfold m (Int, Handle) Word8
readWith = UF.many A.read readChunksWith
{-# INLINE readerWith #-}
readerWith :: MonadIO m => Unfold m (Int, Handle) Word8
readerWith = UF.many A.read chunkReaderWith
-- | Same as 'readWith'
-- | Same as 'readerWith'
--
-- @since 0.7.0
{-# DEPRECATED readWithBufferOf "Please use readWith instead." #-}
{-# DEPRECATED readWithBufferOf "Please use 'readerWith' instead." #-}
{-# INLINE readWithBufferOf #-}
readWithBufferOf :: MonadIO m => Unfold m (Int, Handle) Word8
readWithBufferOf = readWith
readWithBufferOf = readerWith
-- | @getBytesWith bufsize handle@ reads a byte stream from a file
-- | @readWith bufsize handle@ reads a byte stream from a file
-- handle, reads are performed in chunks of up to @bufsize@.
--
-- >>> getBytesWith size h = Stream.unfoldMany Array.read $ Handle.getChunksWith size h
-- >>> readWith size h = Stream.unfoldMany Array.read $ Handle.readChunksWith size h
--
-- /Pre-release/
{-# INLINE getBytesWith #-}
getBytesWith :: MonadIO m => Int -> Handle -> Stream m Word8
getBytesWith size h = AS.concat $ getChunksWith size h
{-# INLINE readWith #-}
readWith :: MonadIO m => Int -> Handle -> Stream m Word8
readWith size h = AS.concat $ readChunksWith size h
-- TODO
-- Generate a stream of elements of the given type from a file 'Handle'.
-- read :: (MonadIO m, Unboxed a) => Handle -> Stream m a
--
-- | Unfolds a file handle into a byte stream. IO requests to the device are
-- performed in sizes of
-- 'Streamly.Internal.Data.Array.Unboxed.Type.defaultChunkSize'.
--
-- >>> read = Unfold.many Handle.readChunks Array.read
-- >>> reader = Unfold.many Array.read chunkReader
--
-- @since 0.7.0
{-# INLINE read #-}
read :: MonadIO m => Unfold m Handle Word8
read = UF.many A.read readChunks
-- @since 0.9.0
{-# INLINE reader #-}
reader :: MonadIO m => Unfold m Handle Word8
reader = UF.many A.read chunkReader
-- | Generate a byte stream from a file 'Handle'.
--
-- >>> getBytes h = Stream.unfoldMany Array.read $ Handle.getChunks h
-- >>> read h = Stream.unfoldMany Array.read $ Handle.readChunks h
--
-- /Pre-release/
{-# INLINE getBytes #-}
getBytes :: MonadIO m => Handle -> Stream m Word8
getBytes = AS.concat . getChunks
{-# INLINE read #-}
read :: MonadIO m => Handle -> Stream m Word8
read = AS.concat . readChunks
-------------------------------------------------------------------------------
-- Writing
@ -479,12 +471,12 @@ writeChunks h = FL.drainBy (putChunk h)
-- | Like writeChunks but uses the experimental 'Refold' API.
--
-- /Internal/
{-# INLINE consumeChunks #-}
consumeChunks :: MonadIO m => Refold m Handle (Array a) ()
consumeChunks = Refold.drainBy putChunk
{-# INLINE chunkWriter #-}
chunkWriter :: MonadIO m => Refold m Handle (Array a) ()
chunkWriter = Refold.drainBy putChunk
-- XXX lpackArraysChunksOf should be written idiomatically
--
-- | @writeChunksWith bufsize handle@ writes a stream of arrays
-- to @handle@ after coalescing the adjacent arrays in chunks of @bufsize@.
-- We never split an array, if a single array is bigger than the specified size
@ -515,7 +507,7 @@ writeChunksWithBufferOf = writeChunksWith
-- writes as well.
-- XXX Maybe we should have a Fold.arraysOf like we have Stream.arraysOf
--
-- | @writeWith reqSize handle@ writes the input stream to @handle@.
-- Bytes in the input stream are collected into a buffer until we have a chunk
-- of @reqSize@ and then written to the IO device.
@ -548,13 +540,13 @@ writeMaybesWith n h =
writeOnNothing = FL.takeEndBy_ isNothing writeNJusts
in FL.many writeOnNothing (writeChunks h)
-- | Like 'writeWith' but uses the experimental 'Refold' API.
-- | Like 'writeWith' but uses the experimental 'Refold' API.
--
-- /Internal/
{-# INLINE consumerWith #-}
consumerWith :: MonadIO m => Int -> Refold m Handle Word8 ()
consumerWith n =
FL.refoldMany (FL.take n $ writeNUnsafe n) consumeChunks
{-# INLINE writerWith #-}
writerWith :: MonadIO m => Int -> Refold m Handle Word8 ()
writerWith n =
FL.refoldMany (FL.take n $ writeNUnsafe n) chunkWriter
-- | Write a byte stream to a file handle. Accumulates the input in chunks of
-- up to 'Streamly.Internal.Data.Array.Unboxed.Type.defaultChunkSize' before writing
@ -570,15 +562,9 @@ write = writeWith defaultChunkSize
-- | Like 'write' but uses the experimental 'Refold' API.
--
-- /Internal/
{-# INLINE consumer #-}
consumer :: MonadIO m => Refold m Handle Word8 ()
consumer = consumerWith defaultChunkSize
{-
{-# INLINE write #-}
write :: (MonadIO m, Unboxed a) => Handle -> Stream m a -> m ()
write = toHandleWith A.defaultChunkSize
-}
{-# INLINE writer #-}
writer :: MonadIO m => Refold m Handle Word8 ()
writer = writerWith defaultChunkSize
-- XXX mmap a file into an array. This could be useful for in-place operations
-- on a file. For example, we can quicksort the contents of a file by mmapping

View File

@ -34,13 +34,15 @@ cradle:
component: "bench:Data.Stream"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Expand.hs"
component: "bench:Data.Stream"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Exceptions.hs"
component: "bench:Data.Stream"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Generate.hs"
component: "bench:Data.Stream"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Reduce.hs"
component: "bench:Data.Stream"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Transform.hs"
component: "bench:Data.Stream"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Async.hs"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/Concurrent.hs"
component: "bench:Data.Stream.Concurrent"
- path: "./benchmark/Streamly/Benchmark/Data/Stream/ConcurrentCommon.hs"
component: "bench:Data.Stream.Concurrent"

View File

@ -512,7 +512,7 @@ readOneEvent = do
watchToStream :: Watch -> Stream IO Event
watchToStream (Watch handle _ _) =
S.parseMany readOneEvent $ S.unfold FH.read handle
S.parseMany readOneEvent $ S.unfold FH.reader handle
-- XXX Write tests for all the points in macOS specific behavior.
--

View File

@ -879,7 +879,7 @@ watchToStream cfg wt@(Watch handle _) = do
-- sizeof(struct inotify_event) + NAME_MAX + 1
--
-- will be sufficient to read at least one event.
S.parseMany (readOneEvent cfg wt) $ S.unfold FH.read handle
S.parseMany (readOneEvent cfg wt) $ S.unfold FH.reader handle
-- XXX We should not go across the mount points of network file systems or file
-- systems that are known to not generate any events.

View File

@ -43,37 +43,36 @@ module Streamly.Internal.FileSystem.File
-- ** File IO Using Handle
withFile
-- ** Read From File
, readWith
-- ** Streams
, read
, readChunksWith
, readChunks
-- ** Unfolds
, readerWith
, reader
-- , readShared
-- , readUtf8
-- , readLines
-- , readFrames
, toBytes
-- -- * Array Read
, readChunksWith
, readChunksFromToWith
, readChunks
, toChunksWith
, toChunks
, chunkReaderWith
, chunkReaderFromToWith
, chunkReader
-- ** Write To File
, putChunk -- writeChunk?
-- ** Folds
, write
-- , writeUtf8
-- , writeUtf8ByLines
-- , writeByFrames
, writeWith
, fromBytes
, fromBytesWith
-- -- * Array Write
, putChunk
, writeChunks
-- ** Writing Streams
, fromBytes -- putBytes?
, fromBytesWith
, fromChunks
-- ** Append To File
@ -212,30 +211,30 @@ appendArray file arr = SIO.withFile file AppendMode (`FH.putChunk` arr)
-- Stream of Arrays IO
-------------------------------------------------------------------------------
-- | @toChunksWith size file@ reads a stream of arrays from file @file@.
-- | @readChunksWith size file@ reads a stream of arrays from file @file@.
-- The maximum size of a single array is specified by @size@. The actual size
-- read may be less than or equal to @size@.
--
-- @since 0.9.0
{-# INLINE toChunksWith #-}
toChunksWith :: (MonadCatch m, MonadAsync m)
{-# INLINE readChunksWith #-}
readChunksWith :: (MonadCatch m, MonadAsync m)
=> Int -> FilePath -> Stream m (Array Word8)
toChunksWith size file =
withFile file ReadMode (FH.getChunksWith size)
readChunksWith size file =
withFile file ReadMode (FH.readChunksWith size)
-- XXX read 'Array a' instead of Word8
--
-- | @toChunks file@ reads a stream of arrays from file @file@.
-- | @readChunks file@ reads a stream of arrays from file @file@.
-- The maximum size of a single array is limited to @defaultChunkSize@. The
-- actual size read may be less than @defaultChunkSize@.
--
-- > toChunks = toChunksWith defaultChunkSize
-- > readChunks = readChunksWith defaultChunkSize
--
-- @since 0.7.0
{-# INLINE toChunks #-}
toChunks :: (MonadCatch m, MonadAsync m)
{-# INLINE readChunks #-}
readChunks :: (MonadCatch m, MonadAsync m)
=> FilePath -> Stream m (Array Word8)
toChunks = toChunksWith defaultChunkSize
readChunks = readChunksWith defaultChunkSize
-------------------------------------------------------------------------------
-- Read File to Stream
@ -252,10 +251,10 @@ toChunks = toChunksWith defaultChunkSize
--
-- /Pre-release/
--
{-# INLINE readChunksWith #-}
readChunksWith :: (MonadCatch m, MonadAsync m)
{-# INLINE chunkReaderWith #-}
chunkReaderWith :: (MonadCatch m, MonadAsync m)
=> Unfold m (Int, FilePath) (Array Word8)
readChunksWith = usingFile2 FH.readChunksWith
chunkReaderWith = usingFile2 FH.chunkReaderWith
-- | Unfold the tuple @(from, to, bufsize, filepath)@ into a stream
-- of 'Word8' arrays.
@ -265,10 +264,10 @@ readChunksWith = usingFile2 FH.readChunksWith
-- less than or equal to @bufsize@.
--
-- /Pre-release/
{-# INLINE readChunksFromToWith #-}
readChunksFromToWith :: (MonadCatch m, MonadAsync m) =>
{-# INLINE chunkReaderFromToWith #-}
chunkReaderFromToWith :: (MonadCatch m, MonadAsync m) =>
Unfold m (Int, Int, Int, FilePath) (Array Word8)
readChunksFromToWith = usingFile3 FH.readChunksFromToWith
chunkReaderFromToWith = usingFile3 FH.chunkReaderFromToWith
-- | Unfolds a 'FilePath' into a stream of 'Word8' arrays. Requests to the IO
-- device are performed using a buffer of size
@ -277,26 +276,26 @@ readChunksFromToWith = usingFile3 FH.readChunksFromToWith
-- 'Streamly.Internal.Data.Array.Unboxed.Type.defaultChunkSize'.
--
-- /Pre-release/
{-# INLINE readChunks #-}
readChunks :: (MonadCatch m, MonadAsync m) => Unfold m FilePath (Array Word8)
readChunks = usingFile FH.readChunks
{-# INLINE chunkReader #-}
chunkReader :: (MonadCatch m, MonadAsync m) => Unfold m FilePath (Array Word8)
chunkReader = usingFile FH.chunkReader
-- | Unfolds the tuple @(bufsize, filepath)@ into a byte stream, read requests
-- to the IO device are performed using buffers of @bufsize@.
--
-- /Pre-release/
{-# INLINE readWith #-}
readWith :: (MonadCatch m, MonadAsync m) => Unfold m (Int, FilePath) Word8
readWith = usingFile2 FH.readWith
{-# INLINE readerWith #-}
readerWith :: (MonadCatch m, MonadAsync m) => Unfold m (Int, FilePath) Word8
readerWith = usingFile2 FH.readerWith
-- | Unfolds a file path into a byte stream. IO requests to the device are
-- performed in sizes of
-- 'Streamly.Internal.Data.Array.Unboxed.Type.defaultChunkSize'.
--
-- @since 0.7.0
{-# INLINE read #-}
read :: (MonadCatch m, MonadAsync m) => Unfold m FilePath Word8
read = UF.many A.read (usingFile FH.readChunks)
-- /Pre-release/
{-# INLINE reader #-}
reader :: (MonadCatch m, MonadAsync m) => Unfold m FilePath Word8
reader = UF.many A.read (usingFile FH.chunkReader)
-- | Generate a stream of bytes from a file specified by path. The stream ends
-- when EOF is encountered. File is locked using multiple reader and single
@ -304,9 +303,9 @@ read = UF.many A.read (usingFile FH.readChunks)
--
-- /Pre-release/
--
{-# INLINE toBytes #-}
toBytes :: (MonadCatch m, MonadAsync m) => FilePath -> Stream m Word8
toBytes file = AS.concat $ withFile file ReadMode FH.getChunks
{-# INLINE read #-}
read :: (MonadCatch m, MonadAsync m) => FilePath -> Stream m Word8
read file = AS.concat $ withFile file ReadMode FH.readChunks
{-
-- | Generate a stream of elements of the given type from a file 'Handle'. The

View File

@ -77,20 +77,20 @@ executor f =
readFromHandle :: IO (Stream IO Char)
readFromHandle =
let f = Unicode.decodeUtf8 . Stream.unfold Handle.read
let f = Unicode.decodeUtf8 . Stream.unfold Handle.reader
in executor f
readWithBufferFromHandle :: IO (Stream IO Char)
readWithBufferFromHandle =
let f1 = (\h -> (1024, h))
f2 = Unicode.decodeUtf8 . Stream.unfold Handle.readWith . f1
f2 = Unicode.decodeUtf8 . Stream.unfold Handle.readerWith . f1
in executor f2
readChunksFromHandle :: IO (Stream IO Char)
readChunksFromHandle =
let f = Unicode.decodeUtf8
. Stream.concatMap Array.toStream
. Stream.unfold Handle.readChunks
. Stream.unfold Handle.chunkReader
in executor f
readChunksWithBuffer :: IO (Stream IO Char)
@ -99,7 +99,7 @@ readChunksWithBuffer =
f2 =
Unicode.decodeUtf8
. Stream.concatMap Array.toStream
. Stream.unfold Handle.readChunksWith
. Stream.unfold Handle.chunkReaderWith
. f1
in executor f2
@ -129,7 +129,7 @@ testWrite hfold =
_ <- Stream.fold (hfold h) $ Stream.fromList list
hFlush h
hSeek h AbsoluteSeek 0
ls <- toList $ Stream.unfold Handle.read h
ls <- toList $ Stream.unfold Handle.reader h
hClose h
return (ls == list)
@ -151,10 +151,10 @@ testWriteWithChunk =
hw <- openFile fpathWrite ReadWriteMode
hSeek hw AbsoluteSeek 0
_ <- Stream.fold (Handle.writeChunks hw)
$ Stream.unfold Handle.readChunksWith (1024, hr)
$ Stream.unfold Handle.chunkReaderWith (1024, hr)
hFlush hw
hSeek hw AbsoluteSeek 0
ls <- toList $ Stream.unfold Handle.read hw
ls <- toList $ Stream.unfold Handle.reader hw
let arr = Array.fromList ls
return (testDataLarge == utf8ToString arr)
@ -171,7 +171,7 @@ testReadChunksFromToWith from to buffSize res = monadicIO $ run go
ls <-
toList
$ Stream.unfold
Handle.readChunksFromToWith (from, to, buffSize, h)
Handle.chunkReaderFromToWith (from, to, buffSize, h)
return (res `shouldBe` fmap Array.toList ls)
-- Test for first byte