Remove more redundancy.

This commit is contained in:
Matus Tejiscak 2020-09-19 14:57:45 +02:00
parent 63c3ebf124
commit f3984fff46
2 changed files with 2 additions and 52 deletions

View File

@ -1,45 +0,0 @@
-- This is a copy of the module in contrib.
-- We can't use that module because it would break bootstrapping.
module Data.String.Iterator
export
record StringIterator where
constructor MkSI
string : String
-- backend-dependent offset into the string
-- see prim__readChar below
offset : Int
export
fromString : String -> StringIterator
fromString s = MkSI s 0
private
data ReadResult
= EOF
| Character Char Int -- character, width
-- takes a backend-dependent offset into the string
-- on ML-based backends, this is in bytes
-- in Scheme, this is in codepoints
private
%foreign "scheme:read-string-char"
prim__readChar : Int -> String -> ReadResult
export
uncons : StringIterator -> Maybe (Char, StringIterator)
uncons (MkSI s ofs) =
case prim__readChar ofs s of
EOF => Nothing
Character ch width => Just (ch, MkSI s (ofs + width))
export
foldl : (a -> Char -> a) -> a -> String -> a
foldl f acc s = loop 0 acc
where
loop : Int -> a -> a
loop ofs acc =
case prim__readChar ofs s of
EOF => acc
Character ch width => loop (ofs + width) (f acc ch)

View File

@ -164,11 +164,6 @@ tokenise pred line col acc tmap str
line', col', rest)
Nothing => getFirstToken ts str
-- to avoid breaking the bootstrap,
-- we can't depend on the stdlib version of fastUnpack here
%foreign "scheme:string-unpack"
fastUnpack : String -> List Char
||| Given a mapping from lexers to token generating functions (the
||| TokenMap a) and an input string, return a list of recognised tokens,
||| and the line, column, and remainder of the input at the first point in the
@ -176,12 +171,12 @@ fastUnpack : String -> List Char
export
lex : TokenMap a -> String -> (List (WithBounds a), (Int, Int, String))
lex tmap str
= let (ts, (l, c, str')) = tokenise (const False) 0 0 [] tmap (Lexer.Core.fastUnpack str) in
= let (ts, (l, c, str')) = tokenise (const False) 0 0 [] tmap (fastUnpack str) in
(ts, (l, c, fastPack str'))
export
lexTo : (WithBounds a -> Bool) ->
TokenMap a -> String -> (List (WithBounds a), (Int, Int, String))
lexTo pred tmap str
= let (ts, (l, c, str')) = tokenise pred 0 0 [] tmap (Lexer.Core.fastUnpack str) in
= let (ts, (l, c, str')) = tokenise pred 0 0 [] tmap (fastUnpack str) in
(ts, (l, c, fastPack str'))