fix: parsing of comments

This commit is contained in:
felipegchi 2022-12-05 10:55:50 -03:00
parent 4067027420
commit d4d8684f8c
4 changed files with 19 additions and 9 deletions

View File

@ -11,11 +11,13 @@ impl<'a> Lexer<'a> {
/// Single line comments
pub fn lex_comment(&mut self, start: usize) -> (Token, Range) {
self.next_char();
let mut is_doc = false;
if let Some('/') = self.peekable.peek() {
self.next_char();
is_doc = true;
}
let cmt = self.accumulate_while(&|x| x != '\n');
(
Token::Comment(is_doc, cmt.to_string()),

View File

@ -57,8 +57,9 @@ impl<'a> Lexer<'a> {
}
}
pub fn get_next_no_error(&mut self, vec: Sender<Box<dyn Diagnostic>>) -> (Token, Range) {
pub fn get_next_no_error(&mut self, vec: Sender<Box<dyn Diagnostic>>) -> (bool, Token, Range) {
loop {
let is_break = self.is_linebreak();
let (token, span) = self.lex_token();
match token {
Token::Error(x) => {
@ -68,11 +69,11 @@ impl<'a> Lexer<'a> {
Token::Comment(false, _) => continue,
_ => (),
}
return (token, span);
return (is_break, token, span);
}
}
pub fn lex_token(&mut self) -> (Token, Range) {
fn lex_token(&mut self) -> (Token, Range) {
let start = self.span();
match self.peekable.peek() {
None => (Token::Eof, self.mk_range(start)),

View File

@ -169,7 +169,8 @@ impl fmt::Display for Token {
Token::Bang => write!(f, "!"),
Token::HashHash => write!(f, "##"),
Token::Hash => write!(f, "#"),
Token::Comment(_, _) => write!(f, "docstring comment"),
Token::Comment(true, comment) => write!(f, "docstring '{comment}'"),
Token::Comment(false, comment) => write!(f, "comment '{comment}'"),
Token::Eof => write!(f, "End of file"),
Token::Error(_) => write!(f, "ERROR"),
Token::Return => write!(f, "return"),

View File

@ -33,9 +33,12 @@ impl<'a> Parser<'a> {
pub fn new(mut lexer: Lexer<'a>, sender: Sender<Box<dyn Diagnostic>>) -> Parser<'a> {
let mut queue = VecDeque::with_capacity(3);
let mut breaks = VecDeque::with_capacity(3);
for _ in 0..3 {
breaks.push_back(lexer.is_linebreak());
queue.push_back(lexer.get_next_no_error(sender.clone()));
let (is_break, token, range) = lexer.get_next_no_error(sender.clone());
breaks.push_back(is_break);
queue.push_back((token, range));
}
Parser {
lexer,
@ -50,9 +53,12 @@ impl<'a> Parser<'a> {
pub fn advance(&mut self) -> (Token, Range) {
let cur = self.queue.pop_front().unwrap();
self.breaks.pop_front();
self.breaks.push_back(self.lexer.is_linebreak());
self.queue
.push_back(self.lexer.get_next_no_error(self.dignostic_channel.clone()));
let (is_break, token, range) = self.lexer.get_next_no_error(self.dignostic_channel.clone());
self.breaks.push_back(is_break);
self.queue.push_back((token, range));
self.eaten += 1;
cur
}