got input parser working, stashing changes

This commit is contained in:
damirka 2022-02-07 18:10:05 +03:00
parent 5f40bb3647
commit aa5af63c28
6 changed files with 874 additions and 9 deletions

53
leo/commands/verify.rs Normal file
View File

@ -0,0 +1,53 @@
// Copyright (C) 2019-2022 Aleo Systems Inc.
// This file is part of the Leo library.
// The Leo library is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// The Leo library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::{commands::Command, context::Context};
use leo_errors::Result;
use std::path::PathBuf;
use structopt::StructOpt;
use tracing::span::Span;
/// Verify proof
#[derive(StructOpt, Debug)]
#[structopt(setting = structopt::clap::AppSettings::ColoredHelp)]
pub struct Verify {
#[structopt(long = "verifying-key", help = "Path to the verifying key", parse(from_os_str))]
pub(crate) verifying_key: Option<PathBuf>,
#[structopt(parse(from_os_str))]
pub(crate) proof: Option<PathBuf>
}
impl<'a> Command<'a> for Verify {
type Input = ();
type Output = ();
fn log_span(&self) -> Span {
tracing::span!(tracing::Level::INFO, "Verifying")
}
fn prelude(&self, _: Context<'a>) -> Result<Self::Input> {
Ok(())
}
fn apply(self, _: Context<'a>, _: Self::Input) -> Result<Self::Output> {
ProvingKey::new()
Ok(())
}
}

View File

@ -28,8 +28,15 @@ fn to_leo_tree(filepath: &Path) -> Result<String> {
// Parses the Leo file constructing an ast which is then serialized.
create_session_if_not_set_then(|_| {
let handler = Handler::default();
let ast = leo_parser::parse_program_input( program_string, filepath.to_str().unwrap())?;
let _ast = leo_parser::parse_program_input(
&handler,
program_string.clone(),
filepath.to_str().unwrap(),
program_string,
filepath.to_str().unwrap()
)?;
// Ok(Input::to_json_string(&ast).expect("serialization failed"))
Ok("aa".to_string())
})
}

View File

@ -0,0 +1,450 @@
// Copyright (C) 2019-2022 Aleo Systems Inc.
// This file is part of the Leo library.
// The Leo library is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// The Leo library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::{assert_no_whitespace, tokenizer::*, Token, KEYWORD_TOKENS};
use leo_ast::*;
use leo_errors::emitter::Handler;
use leo_errors::{LeoError, ParserError, Result};
use leo_span::{Span, Symbol};
use std::{borrow::Cow, unreachable};
use tendril::format_tendril;
/// Stores a program in tokenized format plus additional context.
/// May be converted into a [`Program`] AST by parsing all tokens.
pub struct InputParserContext<'a> {
#[allow(dead_code)]
pub(crate) handler: &'a Handler,
tokens: Vec<SpannedToken>,
end_span: Span,
// true if parsing an expression for an if statement -- means circuit inits are not legal
pub(crate) fuzzy_struct_state: bool,
}
impl Iterator for InputParserContext<'_> {
type Item = SpannedToken;
fn next(&mut self) -> Option<SpannedToken> {
self.bump()
}
}
impl<'a> InputParserContext<'a> {
///
/// Returns a new [`InputParserContext`] type given a vector of tokens.
///
pub fn new(handler: &'a Handler, mut tokens: Vec<SpannedToken>) -> Self {
tokens.reverse();
// todo: performance optimization here: drain filter
tokens = tokens
.into_iter()
.filter(|x| !matches!(x.token, Token::CommentLine(_) | Token::CommentBlock(_)))
.collect();
Self {
handler,
end_span: tokens
.iter()
.find(|x| !x.span.content.trim().is_empty())
.map(|x| x.span.clone())
.unwrap_or_default(),
tokens,
fuzzy_struct_state: false,
}
}
/// Returns the current token if there is one.
pub fn peek_option(&self) -> Option<&SpannedToken> {
self.tokens.last()
}
/// Emit the error `err`.
pub(crate) fn emit_err(&self, err: ParserError) {
self.handler.emit_err(err.into());
}
///
/// Returns an unexpected end of function [`SyntaxError`].
///
pub fn eof(&self) -> LeoError {
ParserError::unexpected_eof(&self.end_span).into()
}
///
/// Returns a reference to the next SpannedToken or error if it does not exist.
///
pub fn peek_next(&self) -> Result<&SpannedToken> {
self.tokens.get(self.tokens.len() - 2).ok_or_else(|| self.eof())
}
///
/// Returns a reference to the current SpannedToken or error if it does not exist.
///
pub fn peek(&self) -> Result<&SpannedToken> {
self.tokens.last().ok_or_else(|| self.eof())
}
///
/// Returns a reference to the next Token.
///
pub fn peek_token(&self) -> Cow<'_, Token> {
self.peek_option()
.map(|x| &x.token)
.map(Cow::Borrowed)
.unwrap_or_else(|| Cow::Owned(Token::Eof))
}
///
/// Returns true if the next token exists.
///
pub fn has_next(&self) -> bool {
!self.tokens.is_empty()
}
/// Advances the current token.
pub fn bump(&mut self) -> Option<SpannedToken> {
self.tokens.pop()
}
///
/// Removes the next token if it exists and returns it, or [None] if
/// the next token does not exist.
///
pub fn eat(&mut self, token: Token) -> Option<SpannedToken> {
if let Some(SpannedToken { token: inner, .. }) = self.peek_option() {
if &token == inner {
return self.bump();
}
}
None
}
///
/// Appends a token to the back of the vector.
///
pub fn backtrack(&mut self, token: SpannedToken) {
self.tokens.push(token);
}
///
/// Removes the next token if it is a [`Token::Ident(_)`] and returns it, or [None] if
/// the next token is not a [`Token::Ident(_)`] or if the next token does not exist.
///
pub fn eat_identifier(&mut self) -> Option<Identifier> {
if let Some(SpannedToken {
token: Token::Ident(_), ..
}) = self.peek_option()
{
if let SpannedToken {
token: Token::Ident(name),
span,
} = self.bump().unwrap()
{
return Some(Identifier { name, span });
} else {
unreachable!("eat_identifier_ shouldn't produce this")
}
}
None
}
///
/// Returns a reference to the next token if it is a [`GroupCoordinate`], or [None] if
/// the next token is not a [`GroupCoordinate`].
///
fn peek_group_coordinate(&self, i: &mut usize) -> Option<GroupCoordinate> {
if *i < 1 {
return None;
}
let token = self.tokens.get(*i - 1)?;
*i -= 1;
Some(match &token.token {
Token::Add => GroupCoordinate::SignHigh,
Token::Minus if *i > 0 => match self.tokens.get(*i - 1) {
Some(SpannedToken {
token: Token::Int(value),
span,
}) => {
if *i < 1 {
return None;
}
*i -= 1;
GroupCoordinate::Number(format_tendril!("-{}", value), span.clone())
}
_ => GroupCoordinate::SignLow,
},
Token::Underscore => GroupCoordinate::Inferred,
Token::Int(value) => GroupCoordinate::Number(value.clone(), token.span.clone()),
_ => return None,
})
}
/// Returns `true` if the next token is Function or if it is a Const followed by Function.
/// Returns `false` otherwise.
pub fn peek_is_function(&self) -> Result<bool> {
let first = &self.peek()?.token;
let next = if self.tokens.len() >= 2 {
&self.peek_next()?.token
} else {
return Ok(false);
};
Ok(matches!(
(first, next),
(Token::Function | Token::At, _) | (Token::Const, Token::Function)
))
}
///
/// Removes the next two tokens if they are a pair of [`GroupCoordinate`] and returns them,
/// or [None] if the next token is not a [`GroupCoordinate`].
///
pub fn eat_group_partial(&mut self) -> Option<Result<(GroupCoordinate, GroupCoordinate, Span)>> {
let mut i = self.tokens.len();
if i < 1 {
return None;
}
let start_span = self.tokens.get(i - 1)?.span.clone();
let first = self.peek_group_coordinate(&mut i)?;
if i < 1 {
return None;
}
match self.tokens.get(i - 1) {
Some(SpannedToken {
token: Token::Comma, ..
}) => {
i -= 1;
}
_ => {
return None;
}
}
let second = self.peek_group_coordinate(&mut i)?;
if i < 1 {
return None;
}
let right_paren_span;
match self.tokens.get(i - 1) {
Some(SpannedToken {
token: Token::RightParen,
span,
}) => {
right_paren_span = span.clone();
i -= 1;
}
_ => {
return None;
}
}
if i < 1 {
return None;
}
let end_span;
match self.tokens.get(i - 1) {
Some(SpannedToken {
token: Token::Group,
span,
}) => {
end_span = span.clone();
i -= 1;
}
_ => {
return None;
}
}
self.tokens.drain(i..);
if let Err(e) = assert_no_whitespace(
&right_paren_span,
&end_span,
&format!("({},{})", first, second),
"group",
) {
return Some(Err(e));
}
Some(Ok((first, second, start_span + end_span)))
}
///
/// Removes the next token if it is a [`Token::Int(_)`] and returns it, or [None] if
/// the next token is not a [`Token::Int(_)`] or if the next token does not exist.
///
pub fn eat_int(&mut self) -> Option<(PositiveNumber, Span)> {
if let Some(SpannedToken {
token: Token::Int(_), ..
}) = self.peek_option()
{
if let SpannedToken {
token: Token::Int(value),
span,
} = self.bump().unwrap()
{
return Some((PositiveNumber { value }, span));
} else {
unreachable!("eat_int_ shouldn't produce this")
}
}
None
}
///
/// Removes the next token if it exists and returns it, or [None] if
/// the next token does not exist.
///
pub fn eat_any(&mut self, token: &[Token]) -> Option<SpannedToken> {
if let Some(SpannedToken { token: inner, .. }) = self.peek_option() {
if token.iter().any(|x| x == inner) {
return self.bump();
}
}
None
}
///
/// Returns the span of the next token if it is equal to the given [`Token`], or error.
///
pub fn expect(&mut self, token: Token) -> Result<Span> {
if let Some(SpannedToken { token: inner, span }) = self.peek_option() {
if &token == inner {
Ok(self.bump().unwrap().span)
} else {
Err(ParserError::unexpected(inner, token, span).into())
}
} else {
Err(self.eof())
}
}
///
/// Returns the span of the next token if it is equal to one of the given [`Token`]s, or error.
///
pub fn expect_oneof(&mut self, token: &[Token]) -> Result<SpannedToken> {
if let Some(SpannedToken { token: inner, span }) = self.peek_option() {
if token.iter().any(|x| x == inner) {
Ok(self.bump().unwrap())
} else {
return Err(ParserError::unexpected(
inner,
token.iter().map(|x| format!("'{}'", x)).collect::<Vec<_>>().join(", "),
span,
)
.into());
}
} else {
Err(self.eof())
}
}
///
/// Returns the [`Identifier`] of the next token if it is a keyword,
/// [`Token::Int(_)`], or an [`Identifier`], or error.
///
pub fn expect_loose_identifier(&mut self) -> Result<Identifier> {
if let Some(token) = self.eat_any(KEYWORD_TOKENS) {
return Ok(Identifier {
name: token.token.keyword_to_symbol().unwrap(),
span: token.span,
});
}
if let Some((int, span)) = self.eat_int() {
let name = Symbol::intern(&int.value);
return Ok(Identifier { name, span });
}
self.expect_ident()
}
/// Returns the [`Identifier`] of the next token if it is an [`Identifier`], or error.
pub fn expect_ident(&mut self) -> Result<Identifier> {
if let Some(SpannedToken { token: inner, span }) = self.peek_option() {
if let Token::Ident(_) = inner {
if let SpannedToken {
token: Token::Ident(name),
span,
} = self.bump().unwrap()
{
Ok(Identifier { name, span })
} else {
unreachable!("expect_ident_ shouldn't produce this")
}
} else {
Err(ParserError::unexpected_str(inner, "ident", span).into())
}
} else {
Err(self.eof())
}
}
///
/// Returns the next token if it exists or return end of function.
///
pub fn expect_any(&mut self) -> Result<SpannedToken> {
if let Some(x) = self.tokens.pop() {
Ok(x)
} else {
Err(self.eof())
}
}
/// Parses a list of `T`s using `inner`
/// The opening and closing delimiters are `bra` and `ket`,
/// and elements in the list are separated by `sep`.
/// When `(list, true)` is returned, `sep` was a terminator.
pub(super) fn parse_list<T>(
&mut self,
open: Token,
close: Token,
sep: Token,
mut inner: impl FnMut(&mut Self) -> Result<Option<T>>,
) -> Result<(Vec<T>, bool, Span)> {
let mut list = Vec::new();
let mut trailing = false;
// Parse opening delimiter.
let open_span = self.expect(open)?;
while self.peek()?.token != close {
// Parse the element. We allow inner parser recovery through the `Option`.
if let Some(elem) = inner(self)? {
list.push(elem);
}
// Parse the separator.
if self.eat(sep.clone()).is_none() {
trailing = false;
break;
}
trailing = true;
}
// Parse closing delimiter.
let close_span = self.expect(close)?;
Ok((list, trailing, open_span + close_span))
}
/// Parse a list separated by `,` and delimited by parens.
pub(super) fn parse_paren_comma_list<T>(
&mut self,
f: impl FnMut(&mut Self) -> Result<Option<T>>,
) -> Result<(Vec<T>, bool, Span)> {
self.parse_list(Token::LeftParen, Token::RightParen, Token::Comma, f)
}
/// Returns true if the current token is `(`.
pub(super) fn peek_is_left_par(&self) -> bool {
matches!(self.peek_option().map(|t| &t.token), Some(Token::LeftParen))
}
}

View File

@ -0,0 +1,345 @@
// Copyright (C) 2019-2022 Aleo Systems Inc.
// This file is part of the Leo library.
// The Leo library is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// The Leo library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use super::*;
use crate::KEYWORD_TOKENS;
use smallvec::smallvec;
use leo_errors::{ParserError, Result};
use leo_span::sym;
use crate::{Token, SpannedToken};
use tendril::format_tendril;
const INT_TYPES: &[Token] = &[
Token::I8,
Token::I16,
Token::I32,
Token::I64,
Token::I128,
Token::U8,
Token::U16,
Token::U32,
Token::U64,
Token::U128,
Token::Field,
Token::Group,
];
pub(crate) const TYPE_TOKENS: &[Token] = &[
Token::I8,
Token::I16,
Token::I32,
Token::I64,
Token::I128,
Token::U8,
Token::U16,
Token::U32,
Token::U64,
Token::U128,
Token::Field,
Token::Group,
Token::Address,
Token::Bool,
Token::Char,
];
impl InputParserContext<'_> {
pub fn token_to_int_type(token: Token) -> Option<IntegerType> {
Some(match token {
Token::I8 => IntegerType::I8,
Token::I16 => IntegerType::I16,
Token::I32 => IntegerType::I32,
Token::I64 => IntegerType::I64,
Token::I128 => IntegerType::I128,
Token::U8 => IntegerType::U8,
Token::U16 => IntegerType::U16,
Token::U32 => IntegerType::U32,
Token::U64 => IntegerType::U64,
Token::U128 => IntegerType::U128,
_ => return None,
})
}
///
/// Returns a [`Program`] AST if all tokens can be consumed and represent a valid Leo program.
///
pub fn parse_input(&mut self) -> Result<Input> {
while self.has_next() {
let token = self.peek()?;
match token.token {
Token::LeftSquare => {
let (section, definitions) = self.parse_section()?;
println!("Section: {}, Definitions (len): {}", section, definitions.len());
},
_ => ()
};
}
Ok(Input::new())
}
pub fn parse_section(&mut self) -> Result<(Identifier, IndexMap<Identifier, (Type, Expression)>)> {
self.expect(Token::LeftSquare)?;
let section = self.expect_ident()?;
self.expect(Token::RightSquare)?;
let mut assignments = IndexMap::new();
while self.has_next() {
let token = &self.peek()?.token;
if let Token::Ident(_) = token {
let (ident, (type_, value)) = self.parse_assignment()?;
assignments.insert(ident, (type_, value));
} else {
break;
}
}
Ok((section, assignments))
}
pub fn parse_assignment(&mut self) -> Result<(Identifier, (Type, Expression))> {
let var = self.expect_ident()?;
self.expect(Token::Colon)?;
let (type_, _span) = self.parse_type()?;
self.expect(Token::Assign)?;
let value = self.parse_primary_expression()?;
self.expect(Token::Semicolon)?;
Ok((var, (type_, value)))
}
/// Returns a [`(Type, Span)`] tuple of AST nodes if the next token represents a type.
/// Also returns the span of the parsed token.
pub fn parse_type(&mut self) -> Result<(Type, Span)> {
Ok(if let Some(token) = self.eat(Token::BigSelf) {
(Type::SelfType, token.span)
} else if let Some(ident) = self.eat_identifier() {
let span = ident.span.clone();
(Type::Identifier(ident), span)
} else if self.peek_is_left_par() {
let (types, _, span) = self.parse_paren_comma_list(|p| p.parse_type().map(|t| Some(t.0)))?;
(Type::Tuple(types), span)
} else if let Some(token) = self.eat(Token::LeftSquare) {
let (inner, _) = self.parse_type()?;
self.expect(Token::Semicolon)?;
let dimensions = self.parse_array_dimensions()?;
let end_span = self.expect(Token::RightSquare)?;
(Type::Array(Box::new(inner), dimensions), token.span + end_span)
} else {
let token = self.expect_oneof(TYPE_TOKENS)?;
(
match token.token {
Token::Field => Type::Field,
Token::Group => Type::Group,
Token::Address => Type::Address,
Token::Bool => Type::Boolean,
Token::Char => Type::Char,
x => Type::IntegerType(Self::token_to_int_type(x).expect("invalid int type")),
},
token.span,
)
})
}
/// Returns an [`ArrayDimensions`] AST node if the next tokens represent dimensions for an array type.
pub fn parse_array_dimensions(&mut self) -> Result<ArrayDimensions> {
Ok(if let Some(dim) = self.parse_array_dimension() {
ArrayDimensions(smallvec![dim])
} else {
let mut had_item_err = false;
let (dims, _, span) = self.parse_paren_comma_list(|p| {
Ok(if let Some(dim) = p.parse_array_dimension() {
Some(dim)
} else {
let token = p.expect_any()?;
p.emit_err(ParserError::unexpected_str(&token.token, "int", &token.span));
had_item_err = true;
None
})
})?;
if dims.is_empty() && !had_item_err {
self.emit_err(ParserError::array_tuple_dimensions_empty(&span));
}
ArrayDimensions(dims.into())
})
}
/// Parses a basic array dimension, i.e., an integer or `_`.
fn parse_array_dimension(&mut self) -> Option<Dimension> {
if let Some((int, _)) = self.eat_int() {
Some(Dimension::Number(int))
} else if self.eat(Token::Underscore).is_some() {
Some(Dimension::Unspecified)
} else {
None
}
}
///
/// Returns an [`Expression`] AST node if the next token is a primary expression:
/// - Literals: field, group, unsigned integer, signed integer, boolean, address
/// - Aggregate types: array, tuple
/// - Identifiers: variables, keywords
/// - self
///
/// Returns an expression error if the token cannot be matched.
///
pub fn parse_primary_expression(&mut self) -> Result<Expression> {
let SpannedToken { token, span } = self.expect_any()?;
Ok(match token {
Token::Int(value) => {
let type_ = self.eat_any(INT_TYPES);
match type_ {
Some(SpannedToken {
token: Token::Field,
span: type_span,
}) => {
assert_no_whitespace(&span, &type_span, &value, "field")?;
Expression::Value(ValueExpression::Field(value, span + type_span))
}
Some(SpannedToken {
token: Token::Group,
span: type_span,
}) => {
assert_no_whitespace(&span, &type_span, &value, "group")?;
Expression::Value(ValueExpression::Group(Box::new(GroupValue::Single(
value,
span + type_span,
))))
}
Some(SpannedToken { token, span: type_span }) => {
assert_no_whitespace(&span, &type_span, &value, &token.to_string())?;
Expression::Value(ValueExpression::Integer(
Self::token_to_int_type(token).expect("unknown int type token"),
value,
span + type_span,
))
}
None => Expression::Value(ValueExpression::Implicit(value, span)),
}
}
Token::True => Expression::Value(ValueExpression::Boolean("true".into(), span)),
Token::False => Expression::Value(ValueExpression::Boolean("false".into(), span)),
Token::AddressLit(value) => Expression::Value(ValueExpression::Address(value, span)),
Token::CharLit(value) => Expression::Value(ValueExpression::Char(CharValue {
character: value.into(),
span,
})),
Token::StringLit(value) => Expression::Value(ValueExpression::String(value, span)),
Token::LeftParen => self.parse_tuple_expression(&span)?,
Token::LeftSquare => self.parse_array_expression(&span)?,
Token::Ident(name) => Expression::Identifier(Identifier { name, span }),
Token::Input => Expression::Identifier(Identifier { name: sym::input, span }),
t if crate::type_::TYPE_TOKENS.contains(&t) => Expression::Identifier(Identifier {
name: t.keyword_to_symbol().unwrap(),
span,
}),
token => {
return Err(ParserError::unexpected_str(token, "expression", &span).into());
}
})
}
pub fn parse_tuple_expression(&mut self, span: &Span) -> Result<Expression> {
if let Some((left, right, span)) = self.eat_group_partial().transpose()? {
return Ok(Expression::Value(ValueExpression::Group(Box::new(GroupValue::Tuple(
GroupTuple {
span,
x: left,
y: right,
},
)))));
}
let mut args = Vec::new();
let end_span;
loop {
let end = self.eat(Token::RightParen);
if let Some(end) = end {
end_span = end.span;
break;
}
let expr = self.parse_primary_expression()?;
args.push(expr);
if self.eat(Token::Comma).is_none() {
end_span = self.expect(Token::RightParen)?;
break;
}
}
if args.len() == 1 {
Ok(args.remove(0))
} else {
Ok(Expression::TupleInit(TupleInitExpression {
span: span + &end_span,
elements: args,
}))
}
}
///
/// Returns an [`Expression`] AST node if the next tokens represent an
/// array initialization expression.
///
pub fn parse_array_expression(&mut self, span: &Span) -> Result<Expression> {
if let Some(end) = self.eat(Token::RightSquare) {
return Ok(Expression::ArrayInline(ArrayInlineExpression {
elements: Vec::new(),
span: span + &end.span,
}));
}
let first = self.parse_primary_expression()?;
if self.eat(Token::Semicolon).is_some() {
let dimensions = self
.parse_array_dimensions()
.map_err(|_| ParserError::unable_to_parse_array_dimensions(span))?;
let end = self.expect(Token::RightSquare)?;
Ok(Expression::ArrayInit(ArrayInitExpression {
span: span + &end,
element: Box::new(first),
dimensions,
}))
} else {
let end_span;
let mut elements = vec![first];
loop {
if let Some(token) = self.eat(Token::RightSquare) {
end_span = token.span;
break;
}
if elements.len() == 1 {
self.expect(Token::Comma)?;
if let Some(token) = self.eat(Token::RightSquare) {
end_span = token.span;
break;
}
}
elements.push(self.parse_primary_expression()?);
if self.eat(Token::Comma).is_none() {
end_span = self.expect(Token::RightSquare)?;
break;
}
}
Ok(Expression::ArrayInline(ArrayInlineExpression {
elements: elements.into_iter().map(|expr| SpreadOrExpression::Expression(expr)).collect(),
span: span + &end_span,
}))
}
}
}

View File

@ -14,15 +14,23 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::common::ParserContext;
mod context;
use context::*;
pub mod file;
use leo_ast::*;
use leo_errors::emitter::Handler;
use leo_errors::Result;
use leo_errors::{ParserError, Result};
use leo_span::{Span, Symbol};
use indexmap::IndexMap;
/// Creates a new program from a given file path and source code text.
pub fn parse(handler: &Handler, path: &str, source: &str) -> Result<Program> {
let mut tokens = ParserContext::new(handler, crate::tokenize(path, source.into())?);
pub fn parse(handler: &Handler, path: &str, source: &str) -> Result<Input> {
let mut tokens = InputParserContext::new(handler, crate::tokenize(path, source.into())?);
tokens.parse_program()
tokens.parse_input()
}
pub(crate) use super::assert_no_whitespace;

View File

@ -30,6 +30,9 @@ pub(crate) use tokenizer::*;
pub mod parser;
pub use parser::*;
pub mod input_parser;
pub use input_parser::*;
use leo_ast::{Ast, Input};
use leo_errors::emitter::Handler;
use leo_errors::Result;
@ -44,15 +47,14 @@ pub fn parse_ast<T: AsRef<str>, Y: AsRef<str>>(handler: &Handler, path: T, sourc
/// Parses program input from from the input file path and state file path
pub fn parse_program_input<T: AsRef<str>, Y: AsRef<str>, T2: AsRef<str>, Y2: AsRef<str>>(
handler: &Handler,
input_string: T,
input_path: Y,
_state_string: T2,
_state_path: Y2,
) -> Result<Input> {
Ok(Input::default())
input_parser::parse(handler, input_path.as_ref(), input_string.as_ref())
// let input_syntax_tree = LeoInputParser::parse_file(input_string.as_ref()).map_err(|mut e| {
// e.set_path(