Merge pull request #831 from AleoHQ/test-framework

Test Framework for Parser
This commit is contained in:
Collin Chin 2021-04-13 15:16:40 -07:00 committed by GitHub
commit d1847747c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
672 changed files with 29243 additions and 77 deletions

34
Cargo.lock generated
View File

@ -640,6 +640,12 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "dtoa"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0"
[[package]]
name = "either"
version = "1.6.1"
@ -1409,6 +1415,7 @@ dependencies = [
"leo-ast",
"serde",
"serde_json",
"serde_yaml",
"tendril",
"thiserror",
"tracing",
@ -1498,6 +1505,12 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]]
name = "lock_api"
version = "0.4.2"
@ -2508,6 +2521,18 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_yaml"
version = "0.8.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15654ed4ab61726bf918a39cb8d98a2e2995b002387807fa6ba58fdf7f59bb23"
dependencies = [
"dtoa",
"linked-hash-map",
"serde",
"yaml-rust",
]
[[package]]
name = "sha-1"
version = "0.8.2"
@ -3421,6 +3446,15 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "zip"
version = "0.5.10"

View File

@ -75,7 +75,7 @@ fn underline(mut start: usize, mut end: usize) -> String {
impl fmt::Display for FormattedError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let underline = underline(self.col_start - 1, self.col_stop - 1);
let underline = underline(self.col_start, self.col_stop);
write!(
f,
@ -99,7 +99,7 @@ impl fmt::Display for FormattedError {
write!(
f,
"{indent } | {underline}\n\
"{indent } |{underline}\n\
{indent } |\n\
{indent } = {message}",
indent = INDENT,
@ -121,8 +121,8 @@ fn test_error() {
path: std::sync::Arc::new("file.leo".to_string()),
line_start: 2,
line_stop: 2,
col_start: 8,
col_stop: 9,
col_start: 9,
col_stop: 10,
content: "let a = x;".into(),
message: "undefined value `x`".to_string(),
};
@ -130,7 +130,7 @@ fn test_error() {
assert_eq!(
err.to_string(),
vec![
" --> file.leo: 2:8",
" --> file.leo: 2:9",
" |",
" 2 | let a = x;",
" | ^",

View File

@ -51,6 +51,9 @@ version = "0.4"
[dev-dependencies.criterion]
version = "0.3"
[dev-dependencies.serde_yaml]
version = "0.8"
[features]
default = [ ]
ci_skip = [ ]

View File

@ -16,7 +16,7 @@
use leo_ast::{FormattedError, LeoError, Span};
use crate::{DeprecatedError, Token, TokenError};
use crate::{DeprecatedError, SyntaxResult, Token, TokenError};
#[derive(Debug, Error)]
pub enum SyntaxError {
@ -32,6 +32,17 @@ pub enum SyntaxError {
impl LeoError for SyntaxError {}
pub fn assert_no_whitespace(left_span: &Span, right_span: &Span, left: &str, right: &str) -> SyntaxResult<()> {
if left_span.col_stop != right_span.col_start {
let mut error_span = left_span + right_span;
error_span.col_start = left_span.col_stop - 1;
error_span.col_stop = right_span.col_start - 1;
return Err(SyntaxError::unexpected_whitespace(left, right, &error_span));
}
Ok(())
}
impl SyntaxError {
fn new_from_span(message: String, span: &Span) -> Self {
SyntaxError::Error(FormattedError::new_from_span(message, span))

View File

@ -14,9 +14,9 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::unimplemented;
use std::{borrow::Cow, unimplemented};
use crate::{tokenizer::*, unexpected_whitespace, SyntaxError, SyntaxResult, Token, KEYWORD_TOKENS};
use crate::{assert_no_whitespace, tokenizer::*, SyntaxError, SyntaxResult, Token, KEYWORD_TOKENS};
use leo_ast::*;
use tendril::format_tendril;
@ -49,7 +49,11 @@ impl ParserContext {
.filter(|x| !matches!(x.token, Token::CommentLine(_) | Token::CommentBlock(_)))
.collect();
ParserContext {
end_span: tokens.last().map(|x| x.span.clone()).unwrap_or_default(),
end_span: tokens
.iter()
.find(|x| !x.span.content.trim().is_empty())
.map(|x| x.span.clone())
.unwrap_or_default(),
tokens,
fuzzy_struct_state: false,
}
@ -69,6 +73,14 @@ impl ParserContext {
self.tokens.last().ok_or_else(|| self.eof())
}
pub fn peek_token(&self) -> Cow<'_, Token> {
self.tokens
.last()
.map(|x| &x.token)
.map(Cow::Borrowed)
.unwrap_or_else(|| Cow::Owned(Token::Eof))
}
// pub fn peek_oneof(&self, token: &[Token]) -> SyntaxResult<&SpannedToken> {
// if let Some(spanned_token) = self.inner.last() {
// if token.iter().any(|x| x == &spanned_token.token) {
@ -140,15 +152,21 @@ impl ParserContext {
/// the next token is not a [`GroupCoordinate`].
///
fn peek_group_coordinate(&self, i: &mut usize) -> Option<GroupCoordinate> {
let token = self.tokens.get(*i)?;
if *i < 1 {
return None;
}
let token = self.tokens.get(*i - 1)?;
*i -= 1;
Some(match &token.token {
Token::Add => GroupCoordinate::SignHigh,
Token::Minus => match self.tokens.get(*i) {
Token::Minus if *i > 0 => match self.tokens.get(*i - 1) {
Some(SpannedToken {
token: Token::Int(value),
span,
}) => {
if *i < 1 {
return None;
}
*i -= 1;
GroupCoordinate::Number(format_tendril!("-{}", value), span.clone())
}
@ -164,32 +182,32 @@ impl ParserContext {
/// Removes the next two tokens if they are a pair of [`GroupCoordinate`] and returns them,
/// or [None] if the next token is not a [`GroupCoordinate`].
///
pub fn eat_group_partial(&mut self) -> SyntaxResult<Option<(GroupCoordinate, GroupCoordinate, Span)>> {
let mut i = self.tokens.len() - 1;
let start_span = match self.tokens.get(i) {
Some(span) => span.span.clone(),
None => return Ok(None),
};
let first = match self.peek_group_coordinate(&mut i) {
Some(coord) => coord,
None => return Ok(None),
};
match self.tokens.get(i) {
pub fn eat_group_partial(&mut self) -> Option<SyntaxResult<(GroupCoordinate, GroupCoordinate, Span)>> {
let mut i = self.tokens.len();
if i < 1 {
return None;
}
let start_span = self.tokens.get(i - 1)?.span.clone();
let first = self.peek_group_coordinate(&mut i)?;
if i < 1 {
return None;
}
match self.tokens.get(i - 1) {
Some(SpannedToken {
token: Token::Comma, ..
}) => {
i -= 1;
}
_ => {
return Ok(None);
return None;
}
}
let second = match self.peek_group_coordinate(&mut i) {
Some(coord) => coord,
None => return Ok(None),
};
let second = self.peek_group_coordinate(&mut i)?;
if i < 1 {
return None;
}
let right_paren_span;
match self.tokens.get(i) {
match self.tokens.get(i - 1) {
Some(SpannedToken {
token: Token::RightParen,
span,
@ -198,11 +216,14 @@ impl ParserContext {
i -= 1;
}
_ => {
return Ok(None);
return None;
}
}
if i < 1 {
return None;
}
let end_span;
match self.tokens.get(i) {
match self.tokens.get(i - 1) {
Some(SpannedToken {
token: Token::Group,
span,
@ -211,18 +232,20 @@ impl ParserContext {
i -= 1;
}
_ => {
return Ok(None);
return None;
}
}
self.tokens.drain((i + 1)..);
unexpected_whitespace(
self.tokens.drain(i..);
if let Err(e) = assert_no_whitespace(
&right_paren_span,
&end_span,
&format!("({},{})", first, second),
"group",
)?;
Ok(Some((first, second, start_span + end_span)))
) {
return Some(Err(e));
}
Some(Ok((first, second, start_span + end_span)))
}
///

View File

@ -404,7 +404,7 @@ impl ParserContext {
match token.token {
Token::LeftSquare => {
if self.eat(Token::DotDot).is_some() {
let right = if self.peek()?.token != Token::RightSquare {
let right = if self.peek_token().as_ref() != &Token::RightSquare {
Some(Box::new(self.parse_expression()?))
} else {
None
@ -422,7 +422,7 @@ impl ParserContext {
let left = self.parse_expression()?;
if self.eat(Token::DotDot).is_some() {
let right = if self.peek()?.token != Token::RightSquare {
let right = if self.peek_token().as_ref() != &Token::RightSquare {
Some(Box::new(self.parse_expression()?))
} else {
None
@ -554,7 +554,7 @@ impl ParserContext {
/// tuple initialization expression.
///
pub fn parse_tuple_expression(&mut self, span: &Span) -> SyntaxResult<Expression> {
if let Some((left, right, span)) = self.eat_group_partial()? {
if let Some((left, right, span)) = self.eat_group_partial().transpose()? {
return Ok(Expression::Value(ValueExpression::Group(Box::new(GroupValue::Tuple(
GroupTuple {
span,
@ -625,6 +625,10 @@ impl ParserContext {
}
if elements.len() == 1 {
self.expect(Token::Comma)?;
if let Some(token) = self.eat(Token::RightSquare) {
end_span = token.span;
break;
}
}
elements.push(self.parse_spread_or_expression()?);
if self.eat(Token::Comma).is_none() {
@ -658,21 +662,21 @@ impl ParserContext {
token: Token::Field,
span: type_span,
}) => {
unexpected_whitespace(&span, &type_span, &value, "field")?;
assert_no_whitespace(&span, &type_span, &value, "field")?;
Expression::Value(ValueExpression::Field(value, span + type_span))
}
Some(SpannedToken {
token: Token::Group,
span: type_span,
}) => {
unexpected_whitespace(&span, &type_span, &value, "group")?;
assert_no_whitespace(&span, &type_span, &value, "group")?;
Expression::Value(ValueExpression::Group(Box::new(GroupValue::Single(
value,
span + type_span,
))))
}
Some(SpannedToken { token, span: type_span }) => {
unexpected_whitespace(&span, &type_span, &value, &token.to_string())?;
assert_no_whitespace(&span, &type_span, &value, &token.to_string())?;
Expression::Value(ValueExpression::Integer(
Self::token_to_int_type(token).expect("unknown int type token"),
value,
@ -705,7 +709,7 @@ impl ParserContext {
Token::LeftSquare => self.parse_array_expression(&span)?,
Token::Ident(name) => {
let ident = Identifier { name, span };
if !self.fuzzy_struct_state && self.peek()?.token == Token::LeftCurly {
if !self.fuzzy_struct_state && self.peek_token().as_ref() == &Token::LeftCurly {
self.parse_circuit_expression(ident)?
} else {
Expression::Identifier(ident)
@ -716,7 +720,7 @@ impl ParserContext {
name: token.to_string().into(),
span,
};
if !self.fuzzy_struct_state && self.peek()?.token == Token::LeftCurly {
if !self.fuzzy_struct_state && self.peek_token().as_ref() == &Token::LeftCurly {
self.parse_circuit_expression(ident)?
} else {
Expression::Identifier(ident)

View File

@ -91,7 +91,7 @@ impl ParserContext {
)));
}
unexpected_whitespace(&start, &name.span, &name.name, "@")?;
assert_no_whitespace(&start, &name.span, &name.name, "@")?;
let end_span;
let arguments = if self.eat(Token::LeftParen).is_some() {
@ -153,7 +153,7 @@ impl ParserContext {
Ok(PackageAccess::Star(span))
} else {
let name = self.expect_ident()?;
if self.peek()?.token == Token::Dot {
if self.peek_token().as_ref() == &Token::Dot {
self.backtrack(SpannedToken {
token: Token::Ident(name.name),
span: name.span,
@ -188,7 +188,7 @@ impl ParserContext {
// Build the rest of the package name including dashes.
loop {
match &self.peek()?.token {
match &self.peek_token().as_ref() {
Token::Minus => {
let span = self.expect(Token::Minus)?;
base.span = base.span + span;
@ -369,7 +369,7 @@ impl ParserContext {
///
pub fn parse_function_declaration(&mut self) -> SyntaxResult<(Identifier, Function)> {
let mut annotations = Vec::new();
while self.peek()?.token == Token::At {
while self.peek_token().as_ref() == &Token::At {
annotations.push(self.parse_annotation()?);
}
let start = self.expect(Token::Function)?;

View File

@ -20,16 +20,16 @@
//! method to create a new program ast.
mod context;
use context::*;
pub use context::*;
mod expression;
mod file;
mod statement;
mod type_;
pub mod expression;
pub mod file;
pub mod statement;
pub mod type_;
use std::unimplemented;
use crate::{tokenizer::*, DeprecatedError, SyntaxError, Token};
use crate::{errors::assert_no_whitespace, tokenizer::*, DeprecatedError, SyntaxError, Token};
use indexmap::IndexMap;
use leo_ast::*;
@ -41,14 +41,3 @@ pub fn parse(path: &str, source: &str) -> SyntaxResult<Program> {
tokens.parse_program()
}
pub fn unexpected_whitespace(left_span: &Span, right_span: &Span, left: &str, right: &str) -> SyntaxResult<()> {
if left_span.col_stop != right_span.col_start {
let mut error_span = left_span + right_span;
error_span.col_start = left_span.col_stop - 1;
error_span.col_stop = right_span.col_start - 1;
return Err(SyntaxError::unexpected_whitespace(left, right, &error_span));
}
Ok(())
}

View File

@ -167,7 +167,7 @@ impl ParserContext {
pub fn parse_return_statement(&mut self) -> SyntaxResult<ReturnStatement> {
let start = self.expect(Token::Return)?;
let expr = self.parse_expression()?;
self.eat(Token::Comma);
self.eat(Token::Semicolon);
Ok(ReturnStatement {
span: &start + expr.span(),
@ -316,10 +316,17 @@ impl ParserContext {
let mut variable_names = Vec::new();
if self.eat(Token::LeftParen).is_some() {
variable_names.push(self.parse_variable_name(&declare)?);
let mut eaten_ending = false;
while self.eat(Token::Comma).is_some() {
if self.eat(Token::RightParen).is_some() {
eaten_ending = true;
break;
}
variable_names.push(self.parse_variable_name(&declare)?);
}
self.expect(Token::RightParen)?;
if !eaten_ending {
self.expect(Token::RightParen)?;
}
} else {
variable_names.push(self.parse_variable_name(&declare)?);
}

View File

@ -15,15 +15,69 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{
fmt,
fs,
path::{Path, PathBuf},
sync::Arc,
};
use crate::SyntaxError;
use leo_ast::{Expression, ExpressionStatement, Program, Span, Statement, ValueExpression};
use serde_yaml::Value;
use tokenizer::Token;
use crate::{tokenizer, DeprecatedError, ParserContext, SyntaxError, TokenError};
struct TestFailure {
path: String,
error: SyntaxError,
errors: Vec<TestError>,
}
#[derive(Debug)]
enum TestError {
UnexpectedOutput {
index: usize,
expected: String,
output: String,
},
PassedAndShouldntHave {
index: usize,
},
FailedAndShouldntHave {
index: usize,
error: String,
},
UnexpectedError {
index: usize,
expected: String,
output: String,
},
MismatchedTestExpectationLength,
}
impl fmt::Display for TestError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TestError::UnexpectedOutput {
index,
expected,
output,
} => {
write!(f, "test #{} expected\n{}\ngot\n{}", index + 1, expected, output)
}
TestError::PassedAndShouldntHave { index } => write!(f, "test #{} passed and shouldn't have", index + 1),
TestError::FailedAndShouldntHave { index, error } => {
write!(f, "test #{} failed and shouldn't have:\n{}", index + 1, error)
}
TestError::UnexpectedError {
expected,
output,
index,
} => {
write!(f, "test #{} expected error\n{}\ngot\n{}", index + 1, expected, output)
}
TestError::MismatchedTestExpectationLength => write!(f, "invalid number of test expectations"),
}
}
}
pub fn find_tests<T: AsRef<Path>>(path: T, out: &mut Vec<(String, String)>) {
@ -40,13 +94,411 @@ pub fn find_tests<T: AsRef<Path>>(path: T, out: &mut Vec<(String, String)>) {
}
}
#[derive(serde::Serialize, serde::Deserialize, PartialEq, Debug, Clone)]
enum TestNamespace {
Parse,
ParseStatement,
ParseExpression,
Token,
}
#[derive(serde::Serialize, serde::Deserialize, PartialEq, Debug, Clone)]
enum TestExpectationMode {
Pass,
Fail,
}
#[derive(serde::Serialize, serde::Deserialize)]
struct TestConfig {
namespace: TestNamespace,
expectation: TestExpectationMode,
}
#[derive(serde::Serialize, serde::Deserialize, Clone)]
struct TestExpectation {
namespace: TestNamespace,
expectation: TestExpectationMode,
outputs: Vec<Value>,
}
fn extract_test_config(source: &str) -> Option<TestConfig> {
let first_comment_start = source.find("/*")?;
let end_first_comment = source[first_comment_start + 2..].find("*/")?;
let comment_inner = &source[first_comment_start + 2..first_comment_start + 2 + end_first_comment];
Some(serde_yaml::from_str(comment_inner).expect("invalid test configuration"))
}
fn split_tests_oneline(source: &str) -> Vec<&str> {
source.lines().map(|x| x.trim()).filter(|x| !x.is_empty()).collect()
}
fn split_tests_twoline(source: &str) -> Vec<String> {
let mut out = vec![];
let mut lines = vec![];
for line in source.lines() {
let line = line.trim();
if line.is_empty() {
if !lines.is_empty() {
out.push(lines.join("\n"));
}
lines.clear();
continue;
}
lines.push(line);
}
let last_test = lines.join("\n");
if !last_test.trim().is_empty() {
out.push(last_test.trim().to_string());
}
out
}
fn run_individual_token_test(path: &str, source: &str) -> Result<String, String> {
let output = tokenizer::tokenize(path, source.into());
output
.map(|tokens| {
tokens
.into_iter()
.map(|x| x.to_string())
.collect::<Vec<String>>()
.join(",")
})
.map_err(|x| strip_path_syntax_error(x.into()))
}
fn not_fully_consumed(tokens: &mut ParserContext) -> Result<(), String> {
if !tokens.has_next() {
return Ok(());
}
let mut out = "did not consume all input: ".to_string();
while tokens.has_next() {
out.push_str(&tokens.expect_any().unwrap().to_string());
out.push('\n');
}
Err(out)
}
fn run_individual_expression_test(path: &str, source: &str) -> Result<Expression, String> {
let tokenizer = tokenizer::tokenize(path, source.into()).map_err(|x| strip_path_syntax_error(x.into()))?;
if tokenizer
.iter()
.all(|x| matches!(x.token, Token::CommentLine(_) | Token::CommentBlock(_)))
{
return Ok(Expression::Value(ValueExpression::Implicit("".into(), Span::default())));
}
let mut tokens = ParserContext::new(tokenizer);
let parsed = tokens.parse_expression().map_err(strip_path_syntax_error)?;
not_fully_consumed(&mut tokens)?;
Ok(parsed)
}
fn run_individual_statement_test(path: &str, source: &str) -> Result<Statement, String> {
let tokenizer = tokenizer::tokenize(path, source.into()).map_err(|x| strip_path_syntax_error(x.into()))?;
if tokenizer
.iter()
.all(|x| matches!(x.token, Token::CommentLine(_) | Token::CommentBlock(_)))
{
return Ok(Statement::Expression(ExpressionStatement {
expression: Expression::Value(ValueExpression::Implicit("".into(), Span::default())),
span: Span::default(),
}));
}
let mut tokens = ParserContext::new(tokenizer);
let parsed = tokens.parse_statement().map_err(strip_path_syntax_error)?;
not_fully_consumed(&mut tokens)?;
Ok(parsed)
}
fn strip_path_syntax_error(mut err: SyntaxError) -> String {
let inner = match &mut err {
SyntaxError::DeprecatedError(DeprecatedError::Error(x)) => x,
SyntaxError::Error(x) => x,
SyntaxError::TokenError(TokenError::Error(x)) => x,
};
inner.path = Arc::new("test".to_string());
err.to_string()
}
fn run_individual_parse_test(path: &str, source: &str) -> Result<Program, String> {
let tokenizer = tokenizer::tokenize(path, source.into()).map_err(|x| strip_path_syntax_error(x.into()))?;
let mut tokens = ParserContext::new(tokenizer);
let parsed = tokens.parse_program().map_err(strip_path_syntax_error)?;
not_fully_consumed(&mut tokens)?;
Ok(parsed)
}
fn emit_errors<T: PartialEq + ToString + serde::de::DeserializeOwned>(
output: Result<&T, &str>,
mode: &TestExpectationMode,
expected_output: Option<Value>,
test_index: usize,
) -> Option<TestError> {
match (output, mode) {
(Ok(output), TestExpectationMode::Pass) => {
let expected_output: Option<T> =
expected_output.map(|x| serde_yaml::from_value(x).expect("test expectation deserialize failed"));
// passed and should have
if let Some(expected_output) = expected_output.as_ref() {
if output != expected_output {
// invalid output
return Some(TestError::UnexpectedOutput {
index: test_index,
expected: expected_output.to_string(),
output: output.to_string(),
});
}
}
None
}
(Ok(_tokens), TestExpectationMode::Fail) => Some(TestError::PassedAndShouldntHave { index: test_index }),
(Err(err), TestExpectationMode::Pass) => Some(TestError::FailedAndShouldntHave {
error: err.to_string(),
index: test_index,
}),
(Err(err), TestExpectationMode::Fail) => {
let expected_output: Option<String> =
expected_output.map(|x| serde_yaml::from_value(x).expect("test expectation deserialize failed"));
if let Some(expected_output) = expected_output.as_deref() {
if err != expected_output {
// invalid output
return Some(TestError::UnexpectedError {
expected: expected_output.to_string(),
output: err.to_string(),
index: test_index,
});
}
}
None
}
}
}
fn run_test(
config: &TestConfig,
path: &str,
source: &str,
expectations: Option<&TestExpectation>,
errors: &mut Vec<TestError>,
) -> Vec<Value> {
let end_of_header = source.find("*/").expect("failed to find header block in test");
let source = &source[end_of_header + 2..];
let mut outputs = vec![];
match &config.namespace {
TestNamespace::Token => {
let tests = split_tests_oneline(source);
if let Some(expectations) = expectations.as_ref() {
if tests.len() != expectations.outputs.len() {
errors.push(TestError::MismatchedTestExpectationLength);
}
}
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
for (i, test) in tests.into_iter().enumerate() {
let expected_output = expected_output
.as_mut()
.map(|x| x.next())
.flatten()
.map(|x| x.as_str())
.flatten();
let output = run_individual_token_test(path, test);
if let Some(error) = emit_errors(
output.as_ref().map_err(|x| &**x),
&config.expectation,
expected_output.map(|x| Value::String(x.to_string())),
i,
) {
errors.push(error);
} else {
outputs.push(serde_yaml::to_value(output.unwrap_or_else(|e| e)).expect("serialization failed"));
}
}
}
TestNamespace::Parse => {
if let Some(expectations) = expectations.as_ref() {
if expectations.outputs.len() != 1 {
errors.push(TestError::MismatchedTestExpectationLength);
}
}
let expected_output = expectations
.map(|x| x.outputs.get(0))
.flatten()
.map(|x| serde_yaml::from_value(x.clone()).expect("invalid test expectation form"));
let output = run_individual_parse_test(path, source);
if let Some(error) = emit_errors(
output.as_ref().map_err(|x| &**x),
&config.expectation,
expected_output,
0,
) {
errors.push(error);
} else {
outputs.push(
output
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
.unwrap_or_else(|e| serde_yaml::to_value(e).expect("serialization failed")),
);
}
}
TestNamespace::ParseStatement => {
let tests = split_tests_twoline(source);
if let Some(expectations) = expectations.as_ref() {
if tests.len() != expectations.outputs.len() {
errors.push(TestError::MismatchedTestExpectationLength);
}
}
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
for (i, test) in tests.into_iter().enumerate() {
let expected_output = expected_output
.as_mut()
.map(|x| x.next())
.flatten()
.map(|x| serde_yaml::from_value(x.clone()).expect("invalid test expectation form"));
let output = run_individual_statement_test(path, &test);
if let Some(error) = emit_errors(
output.as_ref().map_err(|x| &**x),
&config.expectation,
expected_output,
i,
) {
errors.push(error);
} else {
outputs.push(
output
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
.unwrap_or_else(|e| serde_yaml::to_value(e).expect("serialization failed")),
);
}
}
}
TestNamespace::ParseExpression => {
let tests = split_tests_oneline(source);
if let Some(expectations) = expectations.as_ref() {
if tests.len() != expectations.outputs.len() {
errors.push(TestError::MismatchedTestExpectationLength);
}
}
let mut expected_output = expectations.as_ref().map(|x| x.outputs.iter());
for (i, test) in tests.into_iter().enumerate() {
let expected_output = expected_output
.as_mut()
.map(|x| x.next())
.flatten()
.map(|x| serde_yaml::from_value(x.clone()).expect("invalid test expectation form"));
let output = run_individual_expression_test(path, test);
if let Some(error) = emit_errors(
output.as_ref().map_err(|x| &**x),
&config.expectation,
expected_output,
i,
) {
errors.push(error);
} else {
outputs.push(
output
.map(|x| serde_yaml::to_value(x).expect("serialization failed"))
.unwrap_or_else(|e| serde_yaml::to_value(e).expect("serialization failed")),
);
}
}
}
}
outputs
}
#[test]
pub fn parser_tests() {
let mut pass = 0;
let mut fail = Vec::new();
let mut tests = Vec::new();
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("../tests/parser/");
find_tests(&test_dir, &mut tests);
let mut outputs = vec![];
for (path, content) in tests.into_iter() {
let config = extract_test_config(&content);
if config.is_none() {
panic!("missing configuration for {}", path);
}
let config = config.unwrap();
let mut expectation_path = path.clone();
expectation_path += ".out";
let expectations: Option<TestExpectation> = if std::path::Path::new(&expectation_path).exists() {
if !std::env::var("CLEAR_LEO_TEST_EXPECTATIONS")
.unwrap_or_default()
.trim()
.is_empty()
{
None
} else {
let raw = std::fs::read_to_string(&expectation_path).expect("failed to read expectations file");
Some(serde_yaml::from_str(&raw).expect("invalid yaml in expectations file"))
}
} else {
None
};
let mut errors = vec![];
let raw_path = Path::new(&path);
let new_outputs = run_test(
&config,
raw_path.file_name().unwrap_or_default().to_str().unwrap_or_default(),
&content,
expectations.as_ref(),
&mut errors,
);
if errors.is_empty() {
if expectations.is_none() {
outputs.push((expectation_path, TestExpectation {
namespace: config.namespace,
expectation: config.expectation,
outputs: new_outputs,
}));
}
pass += 1;
} else {
fail.push(TestFailure {
path: path.clone(),
errors,
})
}
}
if !fail.is_empty() {
for (i, fail) in fail.iter().enumerate() {
println!(
"\n\n-----------------TEST #{} FAILED (and shouldn't have)-----------------",
i + 1
);
println!("File: {}", fail.path);
for error in &fail.errors {
println!("{}", error);
}
}
panic!("failed {}/{} tests", fail.len(), fail.len() + pass);
} else {
for (path, new_expectation) in outputs {
std::fs::write(
&path,
serde_yaml::to_string(&new_expectation).expect("failed to serialize expectation yaml"),
)
.expect("failed to write expectation file");
}
println!("passed {}/{} tests", pass, pass);
}
}
#[test]
pub fn parser_pass_tests() {
let mut pass = 0;
let mut fail = Vec::new();
let mut tests = Vec::new();
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("../tests/pass/parse/");
test_dir.push("../tests/old/pass/");
find_tests(&test_dir, &mut tests);
for (path, content) in tests.into_iter() {
match crate::parse(&path, &content) {
@ -54,7 +506,13 @@ pub fn parser_pass_tests() {
pass += 1;
}
Err(e) => {
fail.push(TestFailure { path, error: e });
fail.push(TestFailure {
path,
errors: vec![TestError::FailedAndShouldntHave {
index: 0,
error: e.to_string(),
}],
});
}
}
}
@ -65,7 +523,9 @@ pub fn parser_pass_tests() {
i + 1
);
println!("File: {}", fail.path);
println!("{}", fail.error);
for error in &fail.errors {
println!("{}", error);
}
}
panic!("failed {}/{} tests", fail.len(), fail.len() + pass);
} else {
@ -79,7 +539,7 @@ pub fn parser_fail_tests() {
let mut fail = Vec::new();
let mut tests = Vec::new();
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
test_dir.push("../tests/fail/parse/");
test_dir.push("../tests/old/fail/");
find_tests(&test_dir, &mut tests);
for (path, content) in tests.into_iter() {
match crate::parse(&path, &content) {

View File

@ -306,11 +306,7 @@ impl Token {
return (
ident.len(),
Some(match &*ident {
x if x.starts_with("aleo1")
&& x.chars().skip(5).all(|x| x.is_ascii_lowercase() || x.is_ascii_digit()) =>
{
Token::AddressLit(ident)
}
x if x.starts_with("aleo1") => Token::AddressLit(ident),
"address" => Token::Address,
"as" => Token::As,
"bool" => Token::Bool,

View File

@ -146,6 +146,9 @@ pub enum Token {
// ModEq,
// OrEq,
// AndEq,
// Meta Tokens
Eof,
}
/// Represents all valid Leo keyword tokens.
@ -289,6 +292,7 @@ impl fmt::Display for Token {
Return => write!(f, "return"),
Static => write!(f, "static"),
String => write!(f, "string"),
Eof => write!(f, ""),
// BitAnd => write!(f, "&"),
// BitAndEq => write!(f, "&="),
// BitOr => write!(f, "|"),

Some files were not shown because too many files have changed in this diff Show More