diff --git a/src/Idris/Parser.idr b/src/Idris/Parser.idr index c5e69813b..d800fa5f8 100644 --- a/src/Idris/Parser.idr +++ b/src/Idris/Parser.idr @@ -49,13 +49,17 @@ plhs = MkParseOpts False False atom : FileName -> Rule PTerm atom fname = do start <- location - x <- constant - end <- location - pure (PPrimVal (MkFC fname start end) x) - <|> do start <- location exactIdent "Type" end <- location pure (PType (MkFC fname start end)) + <|> do start <- location + x <- name + end <- location + pure (PRef (MkFC fname start end) x) + <|> do start <- location + x <- constant + end <- location + pure (PPrimVal (MkFC fname start end) x) <|> do start <- location symbol "_" end <- location @@ -80,10 +84,6 @@ atom fname pragma "search" end <- location pure (PSearch (MkFC fname start end) 50) - <|> do start <- location - x <- name - end <- location - pure (PRef (MkFC fname start end) x) whereBlock : FileName -> Int -> Rule (List PDecl) whereBlock fname col diff --git a/src/Parser/Support.idr b/src/Parser/Support.idr index d9201c260..72d74a46d 100644 --- a/src/Parser/Support.idr +++ b/src/Parser/Support.idr @@ -155,23 +155,23 @@ escape' ('\\' :: 'o' :: xs) escape' ('\\' :: xs) = case span isDigit xs of ([], (a :: b :: c :: rest)) => - case getEsc (pack (the (List _) [a, b, c])) of + case getEsc (fastPack (the (List _) [a, b, c])) of Just v => Just (v :: !(assert_total (escape' rest))) - Nothing => case getEsc (pack (the (List _) [a, b])) of + Nothing => case getEsc (fastPack (the (List _) [a, b])) of Just v => Just (v :: !(assert_total (escape' (c :: rest)))) Nothing => escape' xs ([], (a :: b :: [])) => - case getEsc (pack (the (List _) [a, b])) of + case getEsc (fastPack (the (List _) [a, b])) of Just v => Just (v :: []) Nothing => escape' xs ([], rest) => assert_total (escape' rest) - (ds, rest) => Just $ cast (cast {to=Int} (pack ds)) :: + (ds, rest) => Just $ cast (cast {to=Int} (fastPack ds)) :: !(assert_total (escape' rest)) escape' (x :: xs) = Just $ x :: !(escape' xs) export escape : String -> Maybe String -escape x = pure $ pack !(escape' (unpack x)) +escape x = pure $ fastPack !(escape' (unpack x)) export getCharLit : String -> Maybe Char diff --git a/src/Text/Lexer/Core.idr b/src/Text/Lexer/Core.idr index af222e4e4..3f5757fbe 100644 --- a/src/Text/Lexer/Core.idr +++ b/src/Text/Lexer/Core.idr @@ -168,23 +168,11 @@ tokenise pred line col acc tmap str getFirstToken [] str = Nothing getFirstToken ((lex, fn) :: ts) str = case scan lex [] str of - Just (tok, rest) => Just (MkToken line col (fn (pack (reverse tok))), + Just (tok, rest) => Just (MkToken line col (fn (fastPack (reverse tok))), line + cast (countNLs tok), getCols tok col, rest) Nothing => getFirstToken ts str --- faster than the prelude version since there's no intermediate StrM -funpack' : String -> List Char -> List Char -funpack' "" acc = reverse acc -funpack' str acc - = assert_total $ - let x = prim__strHead str - xs = prim__strTail str in - funpack' xs (x :: acc) - -funpack : String -> List Char -funpack str = funpack' str [] - ||| Given a mapping from lexers to token generating functions (the ||| TokenMap a) and an input string, return a list of recognised tokens, ||| and the line, column, and remainder of the input at the first point in the @@ -193,11 +181,11 @@ export lex : TokenMap a -> String -> (List (TokenData a), (Int, Int, String)) lex tmap str = let (ts, (l, c, str')) = tokenise (const False) 0 0 [] tmap (unpack str) in - (ts, (l, c, pack str')) + (ts, (l, c, fastPack str')) export lexTo : (TokenData a -> Bool) -> TokenMap a -> String -> (List (TokenData a), (Int, Int, String)) lexTo pred tmap str = let (ts, (l, c, str')) = tokenise pred 0 0 [] tmap (unpack str) in - (ts, (l, c, pack str')) + (ts, (l, c, fastPack str')) diff --git a/src/Text/Parser/Core.idr b/src/Text/Parser/Core.idr index 441291a5d..fdbf80c1d 100644 --- a/src/Text/Parser/Core.idr +++ b/src/Text/Parser/Core.idr @@ -38,7 +38,7 @@ data Grammar : (tok : Type) -> (consumes : Bool) -> Type -> Type where ||| guaranteed to consume some input. If the first one consumes input, the ||| second is allowed to be recursive (because it means some input has been ||| consumed and therefore the input is smaller) -export -- %inline +export %inline (>>=) : {c1, c2 : Bool} -> Grammar tok c1 a -> inf c1 (a -> Grammar tok c2 b) -> @@ -49,7 +49,7 @@ export -- %inline ||| Sequence two grammars. If either consumes some input, the sequence is ||| guaranteed to consume input. This is an explicitly non-infinite version ||| of `>>=`. -export +export %inline seq : {c1,c2 : Bool} -> Grammar tok c1 a -> (a -> Grammar tok c2 b) -> @@ -57,7 +57,7 @@ seq : {c1,c2 : Bool} -> seq = SeqEmpty ||| Sequence a grammar followed by the grammar it returns. -export +export %inline join : {c1,c2 : Bool} -> Grammar tok c1 (Grammar tok c2 a) -> Grammar tok (c1 || c2) a @@ -66,7 +66,7 @@ join {c1 = True} p = SeqEat p id ||| Give two alternative grammars. If both consume, the combination is ||| guaranteed to consume. -export +export %inline (<|>) : {c1,c2 : Bool} -> Grammar tok c1 ty -> Lazy (Grammar tok c2 ty) -> @@ -94,7 +94,7 @@ Functor (Grammar tok c) where ||| with value type `a`. If both succeed, apply the function ||| from the first grammar to the value from the second grammar. ||| Guaranteed to consume if either grammar consumes. -export +export %inline (<*>) : {c1, c2 : Bool} -> Grammar tok c1 (a -> b) -> Grammar tok c2 a -> @@ -103,7 +103,7 @@ export ||| Sequence two grammars. If both succeed, use the value of the first one. ||| Guaranteed to consume if either grammar consumes. -export +export %inline (<*) : {c1,c2 : Bool} -> Grammar tok c1 a -> Grammar tok c2 b -> @@ -112,7 +112,7 @@ export ||| Sequence two grammars. If both succeed, use the value of the second one. ||| Guaranteed to consume if either grammar consumes. -export +export %inline (*>) : {c1,c2 : Bool} -> Grammar tok c1 a -> Grammar tok c2 b -> @@ -135,48 +135,48 @@ mapToken f (SeqEmpty act next) = SeqEmpty (mapToken f act) (\x => mapToken f (ne mapToken f (Alt x y) = Alt (mapToken f x) (mapToken f y) ||| Always succeed with the given value. -export +export %inline pure : (val : ty) -> Grammar tok False ty pure = Empty ||| Check whether the next token satisfies a predicate -export +export %inline nextIs : String -> (tok -> Bool) -> Grammar tok False tok nextIs = NextIs ||| Look at the next token in the input -export +export %inline peek : Grammar tok False tok peek = nextIs "Unrecognised token" (const True) ||| Succeeds if running the predicate on the next token returns Just x, ||| returning x. Otherwise fails. -export +export %inline terminal : String -> (tok -> Maybe a) -> Grammar tok True a terminal = Terminal ||| Always fail with a message -export +export %inline fail : String -> Grammar tok c ty fail = Fail False -export +export %inline fatalError : String -> Grammar tok c ty fatalError = Fail True ||| Succeed if the input is empty -export +export %inline eof : Grammar tok False () eof = EOF ||| Commit to an alternative; if the current branch of an alternative ||| fails to parse, no more branches will be tried -export +export %inline commit : Grammar tok False () commit = Commit ||| If the parser fails, treat it as a fatal error -export +export %inline mustWork : {c : Bool} -> Grammar tok c ty -> Grammar tok c ty mustWork = MustWork