fix test build issues. parser errors on dash + number names

This commit is contained in:
collin 2021-03-04 15:58:00 -08:00
parent 10ecc6e6e2
commit d7d84c0ea7
5 changed files with 34 additions and 37 deletions

View File

@ -15,16 +15,12 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use leo_asg::*;
use leo_ast::Ast;
use leo_parser::parse_ast;
use std::path::Path;
mod fail;
mod pass;
const TESTING_FILEPATH: &str = "input.leo";
const TESTING_PROGRAM_NAME: &str = "test_program";
fn load_asg(program_string: &str) -> Result<Program<'static>, AsgConvertError> {
load_asg_imports(make_test_context(), program_string, &mut NullImportResolver)
@ -35,8 +31,7 @@ fn load_asg_imports<'a, T: ImportResolver<'a>>(
program_string: &str,
imports: &mut T,
) -> Result<Program<'a>, AsgConvertError> {
// let grammar = Grammar::new(Path::new(&TESTING_FILEPATH), program_string)?;
let ast = Ast::new(TESTING_PROGRAM_NAME, &grammar)?;
let ast = parse_ast(&TESTING_FILEPATH, program_string)?;
InternalProgram::new(context, &ast.as_repr(), imports)
}

View File

@ -15,10 +15,7 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::{load_asg, make_test_context};
use leo_ast::Ast;
use leo_grammar::Grammar;
use std::path::Path;
use leo_parser::parse_ast;
#[test]
fn test_basic() {
@ -56,6 +53,7 @@ fn test_function_rename() {
#[test]
fn test_imports() {
let import_name = "test-import".to_string();
let context = make_test_context();
let mut imports = crate::mocked_resolver(&context);
let test_import = r#"
@ -70,7 +68,7 @@ fn test_imports() {
"#;
imports
.packages
.insert("test-import".to_string(), load_asg(test_import).unwrap());
.insert(import_name.clone(), load_asg(test_import).unwrap());
let program_string = r#"
import test-import.foo;
@ -79,17 +77,11 @@ fn test_imports() {
}
"#;
let test_import_grammar = Grammar::new(Path::new("test-import.leo"), test_import).unwrap();
println!(
"{}",
serde_json::to_string(Ast::new("test-import", &test_import_grammar).unwrap().as_repr()).unwrap()
);
let test_import_ast = parse_ast(&import_name, test_import).unwrap();
println!("{}", serde_json::to_string(test_import_ast.as_repr()).unwrap());
let test_grammar = Grammar::new(Path::new("test.leo"), program_string).unwrap();
println!(
"{}",
serde_json::to_string(Ast::new("test", &test_grammar).unwrap().as_repr()).unwrap()
);
let test_ast = parse_ast("test.leo", program_string).unwrap();
println!("{}", serde_json::to_string(test_ast.as_repr()).unwrap());
let asg = crate::load_asg_imports(&context, program_string, &mut imports).unwrap();
let reformed_ast = leo_asg::reform_ast(&asg);

View File

@ -57,21 +57,21 @@ impl ParserContext {
self.inner.last().ok_or_else(|| self.eof())
}
pub fn peek_oneof(&self, token: &[Token]) -> SyntaxResult<&SpannedToken> {
if let Some(spanned_token) = self.inner.last() {
if token.iter().any(|x| x == &spanned_token.token) {
Ok(spanned_token)
} else {
Err(SyntaxError::unexpected(
&spanned_token.token,
token,
&spanned_token.span,
))
}
} else {
Err(self.eof())
}
}
// pub fn peek_oneof(&self, token: &[Token]) -> SyntaxResult<&SpannedToken> {
// if let Some(spanned_token) = self.inner.last() {
// if token.iter().any(|x| x == &spanned_token.token) {
// Ok(spanned_token)
// } else {
// Err(SyntaxError::unexpected(
// &spanned_token.token,
// token,
// &spanned_token.span,
// ))
// }
// } else {
// Err(self.eof())
// }
// }
pub fn has_next(&self) -> bool {
!self.inner.is_empty()

View File

@ -161,7 +161,10 @@ impl ParserContext {
}
pub fn parse_package_name(&mut self) -> SyntaxResult<Identifier> {
// Build the package name, starting with valid characters up to a dash `-` (Token::Minus).
let mut base = self.expect_loose_ident()?;
// Build the rest of the package name including dashes.
while let Some(token) = self.eat(Token::Minus) {
if token.span.line_start == base.span.line_stop && token.span.col_start == base.span.col_stop {
base.name += "-";
@ -173,9 +176,13 @@ impl ParserContext {
break;
}
}
// Return an error if the package name contains a keyword.
if let Some(token) = KEYWORD_TOKENS.iter().find(|x| x.to_string() == base.name) {
return Err(SyntaxError::unexpected_str(token, "package name", &base.span));
}
// Return an error if the package name contains invalid characters.
if !base
.name
.chars()
@ -183,6 +190,8 @@ impl ParserContext {
{
return Err(SyntaxError::invalid_package_name(&base.span));
}
// Return the package name.
Ok(base)
}

View File

@ -16,7 +16,8 @@
//! The tokenizer to convert Leo code text into tokens.
//!
//! This module contains the [`tokenize()`] method.
//! This module contains the [`tokenize()`] method which breaks down string text into tokens,
//! separated by whitespace.
pub(crate) mod token;
pub(crate) use self::token::*;