make comments consistent

This commit is contained in:
collin 2022-05-04 15:44:41 -07:00
parent 039a71359c
commit 214f6bf5d7
6 changed files with 1 additions and 37 deletions

View File

@ -37,10 +37,8 @@ pub struct ParserContext<'a> {
/// The previous token, i.e., if `p.tokens = ['3', *, '4']`, /// The previous token, i.e., if `p.tokens = ['3', *, '4']`,
/// then after two `p.bump()`s, we'll have `p.token = '*'` and `p.prev_token = '3'`. /// then after two `p.bump()`s, we'll have `p.token = '*'` and `p.prev_token = '3'`.
pub(crate) prev_token: SpannedToken, pub(crate) prev_token: SpannedToken,
/// true if parsing an expression for if and loop statements -- means circuit inits are not legal
// true if parsing an expression for if and loop statements -- means circuit inits are not legal
pub(crate) disallow_circuit_construction: bool, pub(crate) disallow_circuit_construction: bool,
/// HACK(Centril): Place to store a dummy EOF. /// HACK(Centril): Place to store a dummy EOF.
/// Exists to appease borrow checker for now. /// Exists to appease borrow checker for now.
dummy_eof: SpannedToken, dummy_eof: SpannedToken,
@ -166,10 +164,8 @@ impl<'a> ParserContext<'a> {
.ok_or_else(|| ParserError::unexpected_str(&self.token.token, "ident", &self.token.span).into()) .ok_or_else(|| ParserError::unexpected_str(&self.token.token, "ident", &self.token.span).into())
} }
///
/// Returns a reference to the next token if it is a [`GroupCoordinate`], or [None] if /// Returns a reference to the next token if it is a [`GroupCoordinate`], or [None] if
/// the next token is not a [`GroupCoordinate`]. /// the next token is not a [`GroupCoordinate`].
///
fn peek_group_coordinate(&self, dist: &mut usize) -> Option<GroupCoordinate> { fn peek_group_coordinate(&self, dist: &mut usize) -> Option<GroupCoordinate> {
let (advanced, gc) = self.look_ahead(*dist, |t0| match &t0.token { let (advanced, gc) = self.look_ahead(*dist, |t0| match &t0.token {
Token::Add => Some((1, GroupCoordinate::SignHigh)), Token::Add => Some((1, GroupCoordinate::SignHigh)),

View File

@ -35,10 +35,8 @@ const INT_TYPES: &[Token] = &[
]; ];
impl ParserContext<'_> { impl ParserContext<'_> {
///
/// Returns an [`Expression`] AST node if the next token is an expression. /// Returns an [`Expression`] AST node if the next token is an expression.
/// Includes circuit init expressions. /// Includes circuit init expressions.
///
pub fn parse_expression(&mut self) -> Result<Expression> { pub fn parse_expression(&mut self) -> Result<Expression> {
// Store current parser state. // Store current parser state.
let prior_fuzzy_state = self.disallow_circuit_construction; let prior_fuzzy_state = self.disallow_circuit_construction;

View File

@ -20,9 +20,7 @@ use leo_errors::{ParserError, ParserWarning, Result};
use leo_span::sym; use leo_span::sym;
impl ParserContext<'_> { impl ParserContext<'_> {
///
/// Returns a [`Program`] AST if all tokens can be consumed and represent a valid Leo program. /// Returns a [`Program`] AST if all tokens can be consumed and represent a valid Leo program.
///
pub fn parse_program(&mut self) -> Result<Program> { pub fn parse_program(&mut self) -> Result<Program> {
let mut functions = IndexMap::new(); let mut functions = IndexMap::new();
@ -60,9 +58,7 @@ impl ParserContext<'_> {
) )
} }
///
/// Returns a [`ParamMode`] AST node if the next tokens represent a function parameter mode. /// Returns a [`ParamMode`] AST node if the next tokens represent a function parameter mode.
///
pub fn parse_function_parameter_mode(&mut self) -> Result<ParamMode> { pub fn parse_function_parameter_mode(&mut self) -> Result<ParamMode> {
let public = self.eat(&Token::Public).then(|| self.prev_token.span.clone()); let public = self.eat(&Token::Public).then(|| self.prev_token.span.clone());
let constant = self.eat(&Token::Constant).then(|| self.prev_token.span.clone()); let constant = self.eat(&Token::Constant).then(|| self.prev_token.span.clone());
@ -86,9 +82,7 @@ impl ParserContext<'_> {
} }
} }
///
/// Returns a [`FunctionInput`] AST node if the next tokens represent a function parameter. /// Returns a [`FunctionInput`] AST node if the next tokens represent a function parameter.
///
pub fn parse_function_parameter(&mut self) -> Result<FunctionInput> { pub fn parse_function_parameter(&mut self) -> Result<FunctionInput> {
let mode = self.parse_function_parameter_mode()?; let mode = self.parse_function_parameter_mode()?;
let mutable = self.eat(&Token::Mut).then(|| self.prev_token.clone()); let mutable = self.eat(&Token::Mut).then(|| self.prev_token.clone());
@ -143,10 +137,8 @@ impl ParserContext<'_> {
)) ))
} }
///
/// Returns an [`(String, DefinitionStatement)`] AST node if the next tokens represent a global /// Returns an [`(String, DefinitionStatement)`] AST node if the next tokens represent a global
/// constant declaration. /// constant declaration.
///
pub fn parse_global_const_declaration(&mut self) -> Result<(Vec<Identifier>, DefinitionStatement)> { pub fn parse_global_const_declaration(&mut self) -> Result<(Vec<Identifier>, DefinitionStatement)> {
let statement = self.parse_definition_statement()?; let statement = self.parse_definition_statement()?;
let variable_names = statement let variable_names = statement

View File

@ -22,10 +22,8 @@ use leo_span::sym;
const ASSIGN_TOKENS: &[Token] = &[Token::Assign]; const ASSIGN_TOKENS: &[Token] = &[Token::Assign];
impl ParserContext<'_> { impl ParserContext<'_> {
///
/// Returns an [`Identifier`] AST node if the given [`Expression`] AST node evaluates to an /// Returns an [`Identifier`] AST node if the given [`Expression`] AST node evaluates to an
/// identifier access. The access is stored in the given accesses. /// identifier access. The access is stored in the given accesses.
///
pub fn construct_assignee_access(expr: Expression, _accesses: &mut [AssigneeAccess]) -> Result<Identifier> { pub fn construct_assignee_access(expr: Expression, _accesses: &mut [AssigneeAccess]) -> Result<Identifier> {
match expr { match expr {
Expression::Identifier(id) => Ok(id), Expression::Identifier(id) => Ok(id),
@ -33,9 +31,7 @@ impl ParserContext<'_> {
} }
} }
///
/// Returns an [`Assignee`] AST node from the given [`Expression`] AST node with accesses. /// Returns an [`Assignee`] AST node from the given [`Expression`] AST node with accesses.
///
pub fn construct_assignee(expr: Expression) -> Result<Assignee> { pub fn construct_assignee(expr: Expression) -> Result<Assignee> {
let expr_span = expr.span().clone(); let expr_span = expr.span().clone();
let mut accesses = Vec::new(); let mut accesses = Vec::new();
@ -48,9 +44,7 @@ impl ParserContext<'_> {
}) })
} }
///
/// Returns a [`Statement`] AST node if the next tokens represent a statement. /// Returns a [`Statement`] AST node if the next tokens represent a statement.
///
pub fn parse_statement(&mut self) -> Result<Statement> { pub fn parse_statement(&mut self) -> Result<Statement> {
match &self.token.token { match &self.token.token {
Token::Return => Ok(Statement::Return(self.parse_return_statement()?)), Token::Return => Ok(Statement::Return(self.parse_return_statement()?)),
@ -63,9 +57,7 @@ impl ParserContext<'_> {
} }
} }
///
/// Returns a [`Block`] AST node if the next tokens represent a assign, or expression statement. /// Returns a [`Block`] AST node if the next tokens represent a assign, or expression statement.
///
pub fn parse_assign_statement(&mut self) -> Result<Statement> { pub fn parse_assign_statement(&mut self) -> Result<Statement> {
let expr = self.parse_expression()?; let expr = self.parse_expression()?;

View File

@ -35,9 +35,7 @@ pub(crate) const TYPE_TOKENS: &[Token] = &[
]; ];
impl ParserContext<'_> { impl ParserContext<'_> {
///
/// Returns a [`IntegerType`] AST node if the given token is a supported integer type, or [`None`]. /// Returns a [`IntegerType`] AST node if the given token is a supported integer type, or [`None`].
///
pub fn token_to_int_type(token: &Token) -> Option<IntegerType> { pub fn token_to_int_type(token: &Token) -> Option<IntegerType> {
Some(match token { Some(match token {
Token::I8 => IntegerType::I8, Token::I8 => IntegerType::I8,

View File

@ -23,10 +23,8 @@ use serde::{Deserialize, Serialize};
use std::{fmt, iter::Peekable, str::FromStr}; use std::{fmt, iter::Peekable, str::FromStr};
///
/// Returns a new `StrTendril` string if an identifier can be eaten, otherwise returns [`None`]. /// Returns a new `StrTendril` string if an identifier can be eaten, otherwise returns [`None`].
/// An identifier can be eaten if its bytes are at the front of the given `input_tendril` string. /// An identifier can be eaten if its bytes are at the front of the given `input_tendril` string.
///
fn eat_identifier(input: &mut Peekable<impl Iterator<Item = char>>) -> Option<String> { fn eat_identifier(input: &mut Peekable<impl Iterator<Item = char>>) -> Option<String> {
match input.peek() { match input.peek() {
None => return None, None => return None,
@ -41,9 +39,7 @@ fn eat_identifier(input: &mut Peekable<impl Iterator<Item = char>>) -> Option<St
Some(ident) Some(ident)
} }
///
/// Checks if a char is a Unicode Bidirectional Override code point /// Checks if a char is a Unicode Bidirectional Override code point
///
fn is_bidi_override(c: char) -> bool { fn is_bidi_override(c: char) -> bool {
let i = c as u32; let i = c as u32;
(0x202A..=0x202E).contains(&i) || (0x2066..=0x2069).contains(&i) (0x202A..=0x202E).contains(&i) || (0x2066..=0x2069).contains(&i)
@ -150,9 +146,7 @@ impl Token {
} }
} }
///
/// Returns a `char` if a character can be eaten, otherwise returns [`None`]. /// Returns a `char` if a character can be eaten, otherwise returns [`None`].
///
fn eat_char(input: &mut Peekable<impl Iterator<Item = char>>) -> Result<(usize, Char)> { fn eat_char(input: &mut Peekable<impl Iterator<Item = char>>) -> Result<(usize, Char)> {
match input.next() { match input.next() {
None => Err(ParserError::lexer_empty_input_tendril().into()), None => Err(ParserError::lexer_empty_input_tendril().into()),
@ -161,10 +155,8 @@ impl Token {
} }
} }
///
/// Returns a tuple: [(integer length, integer token)] if an integer can be eaten, otherwise returns [`None`]. /// Returns a tuple: [(integer length, integer token)] if an integer can be eaten, otherwise returns [`None`].
/// An integer can be eaten if its bytes are at the front of the given `input_tendril` string. /// An integer can be eaten if its bytes are at the front of the given `input_tendril` string.
///
fn eat_integer(input: &mut Peekable<impl Iterator<Item = char>>) -> Result<(usize, Token)> { fn eat_integer(input: &mut Peekable<impl Iterator<Item = char>>) -> Result<(usize, Token)> {
if input.peek().is_none() { if input.peek().is_none() {
return Err(ParserError::lexer_empty_input_tendril().into()); return Err(ParserError::lexer_empty_input_tendril().into());
@ -184,10 +176,8 @@ impl Token {
Ok((int.len(), Token::Int(int))) Ok((int.len(), Token::Int(int)))
} }
///
/// Returns a tuple: [(token length, token)] if the next token can be eaten, otherwise returns [`None`]. /// Returns a tuple: [(token length, token)] if the next token can be eaten, otherwise returns [`None`].
/// The next token can be eaten if the bytes at the front of the given `input_tendril` string can be scanned into a token. /// The next token can be eaten if the bytes at the front of the given `input_tendril` string can be scanned into a token.
///
pub(crate) fn eat(input_tendril: &str) -> Result<(usize, Token)> { pub(crate) fn eat(input_tendril: &str) -> Result<(usize, Token)> {
if input_tendril.is_empty() { if input_tendril.is_empty() {
return Err(ParserError::lexer_empty_input_tendril().into()); return Err(ParserError::lexer_empty_input_tendril().into());
@ -481,9 +471,7 @@ impl fmt::Debug for SpannedToken {
} }
} }
///
/// Returns true if the given string is a valid Aleo address. /// Returns true if the given string is a valid Aleo address.
///
pub(crate) fn check_address(address: &str) -> bool { pub(crate) fn check_address(address: &str) -> bool {
Address::<Testnet2>::from_str(address).is_ok() Address::<Testnet2>::from_str(address).is_ok()
} }