From ad43c509022858dc6ae618def735db1f50e56dfc Mon Sep 17 00:00:00 2001 From: Alessandro Coglio Date: Tue, 31 May 2022 22:24:17 -0700 Subject: [PATCH] Remove remnant mentions on tendril. Rename a function. Rename two variables. Update some comments. Remove a README entry that is no longer there. --- compiler/parser/src/tokenizer/lexer.rs | 28 +++++++++---------- leo/errors/README.md | 1 - leo/errors/src/errors/parser/parser_errors.rs | 4 +-- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/compiler/parser/src/tokenizer/lexer.rs b/compiler/parser/src/tokenizer/lexer.rs index 6c295873ff..3f87f2a808 100644 --- a/compiler/parser/src/tokenizer/lexer.rs +++ b/compiler/parser/src/tokenizer/lexer.rs @@ -50,7 +50,7 @@ impl Token { } else if let Some(c) = input.next() { return Err(ParserError::lexer_unopened_escaped_unicode_char(c).into()); } else { - return Err(ParserError::lexer_empty_input_tendril().into()); + return Err(ParserError::lexer_empty_input().into()); } while let Some(c) = input.next_if(|c| c != &'}') { @@ -96,7 +96,7 @@ impl Token { } else if let Some(c) = input.next() { return Err(ParserError::lexer_expected_valid_hex_char(c).into()); } else { - return Err(ParserError::lexer_empty_input_tendril().into()); + return Err(ParserError::lexer_empty_input().into()); } // Second hex character. @@ -106,7 +106,7 @@ impl Token { } else if let Some(c) = input.next() { return Err(ParserError::lexer_expected_valid_hex_char(c).into()); } else { - return Err(ParserError::lexer_empty_input_tendril().into()); + return Err(ParserError::lexer_empty_input().into()); } if let Ok(ascii_number) = u8::from_str_radix(&hex, 16) { @@ -123,7 +123,7 @@ impl Token { fn eat_escaped_char(input: &mut Peekable>) -> Result<(usize, Char)> { match input.next() { - None => Err(ParserError::lexer_empty_input_tendril().into()), + None => Err(ParserError::lexer_empty_input().into()), // Length of 2 to account the '\'. Some('0') => Ok((2, Char::Scalar(0 as char))), Some('t') => Ok((2, Char::Scalar(9 as char))), @@ -141,17 +141,17 @@ impl Token { /// Returns a `char` if a character can be eaten, otherwise returns [`None`]. fn eat_char(input: &mut Peekable>) -> Result<(usize, Char)> { match input.next() { - None => Err(ParserError::lexer_empty_input_tendril().into()), + None => Err(ParserError::lexer_empty_input().into()), Some('\\') => Self::eat_escaped_char(input), Some(c) => Ok((c.len_utf8(), Char::Scalar(c))), } } /// Returns a tuple: [(integer length, integer token)] if an integer can be eaten, otherwise returns [`None`]. - /// An integer can be eaten if its bytes are at the front of the given `input_tendril` string. + /// An integer can be eaten if its bytes are at the front of the given `input` string. fn eat_integer(input: &mut Peekable>) -> Result<(usize, Token)> { if input.peek().is_none() { - return Err(ParserError::lexer_empty_input_tendril().into()); + return Err(ParserError::lexer_empty_input().into()); } let mut int = String::new(); @@ -169,13 +169,13 @@ impl Token { } /// Returns a tuple: [(token length, token)] if the next token can be eaten, otherwise returns [`None`]. - /// The next token can be eaten if the bytes at the front of the given `input_tendril` string can be scanned into a token. - pub(crate) fn eat(input_tendril: &str) -> Result<(usize, Token)> { - if input_tendril.is_empty() { - return Err(ParserError::lexer_empty_input_tendril().into()); + /// The next token can be eaten if the bytes at the front of the given `input` string can be scanned into a token. + pub(crate) fn eat(input: &str) -> Result<(usize, Token)> { + if input.is_empty() { + return Err(ParserError::lexer_empty_input().into()); } - let mut input = input_tendril.chars().peekable(); + let mut input = input.chars().peekable(); match input.peek() { Some(x) if x.is_ascii_whitespace() => { @@ -224,7 +224,7 @@ impl Token { if input.next_if_eq(&'&').is_some() { return Ok((2, Token::And)); } - return Err(ParserError::lexer_empty_input_tendril().into()); + return Err(ParserError::lexer_empty_input().into()); } Some('(') => { input.next(); @@ -364,7 +364,7 @@ impl Token { } else if let Some(found) = input.next() { return Err(ParserError::lexer_expected_but_found(found, '|').into()); } else { - return Err(ParserError::lexer_empty_input_tendril().into()); + return Err(ParserError::lexer_empty_input().into()); } } _ => (), diff --git a/leo/errors/README.md b/leo/errors/README.md index 0a0c4aa726..1e334f0f1d 100644 --- a/leo/errors/README.md +++ b/leo/errors/README.md @@ -23,7 +23,6 @@ The common section of this crate contains a few sub files: - [Formatted Error](./src/common/formatted.rs): Which contains the information needed to create a formatted error for Leo. - [Macros](./src/common/macros.rs): Which contains the logic to make creating errors easy through a DSL. It also figures out the error codes for each error via a **top down** method. Meaning all new errors should be added to the bottom of the file. You can specify whether an error is formatted or backtraced through a decorator above a function name, where the formatted ones require a Span as an argument by default. The body takes any additional arguments you want provided to the function, the message, and the optional help message for the error. The additional arguments are just specified to implement traits to avoid as many type conversions in other Leo crates. - [Span](./src/common/span.rs): Which contains the span object used throughout the other Leo crates (with the exception of the Input crate see more [below](#input)). -- [Tendril JSON](./src/common/tendril_json.rs): Which contains the common logic for how to searlize a StrTendril from the tendril crate. - [Traits](./src/common/traits.rs): Which contains the common traits in errors to make defining errors easier. ## Error Types diff --git a/leo/errors/src/errors/parser/parser_errors.rs b/leo/errors/src/errors/parser/parser_errors.rs index 721ee49c2e..f750fc57ad 100644 --- a/leo/errors/src/errors/parser/parser_errors.rs +++ b/leo/errors/src/errors/parser/parser_errors.rs @@ -200,9 +200,9 @@ create_messages!( help: None, } - /// When an empty input tendril was expected but not found. + /// When more input was expected but not found. @backtraced - lexer_empty_input_tendril { + lexer_empty_input { args: (), msg: "Expected more characters to lex but found none.", help: None,