mirror of
https://github.com/AleoHQ/leo.git
synced 2024-12-20 08:01:42 +03:00
merge testnet3
This commit is contained in:
commit
bacc0d7510
@ -17,7 +17,7 @@
|
||||
use leo_errors::Result;
|
||||
use leo_span::{Span, Symbol};
|
||||
|
||||
use crate::Node;
|
||||
use crate::{simple_node_impl, Node};
|
||||
use serde::{
|
||||
de::{
|
||||
Visitor, {self},
|
||||
@ -43,15 +43,7 @@ pub struct Identifier {
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl Node for Identifier {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
simple_node_impl!(Identifier);
|
||||
|
||||
impl Identifier {
|
||||
/// Constructs a new identifier with `name` and a default span.
|
||||
|
@ -120,12 +120,4 @@ impl fmt::Display for BinaryExpression {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for BinaryExpression {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(BinaryExpression);
|
||||
|
@ -41,12 +41,4 @@ impl fmt::Display for CallExpression {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for CallExpression {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(CallExpression);
|
||||
|
@ -29,12 +29,4 @@ impl fmt::Display for ErrExpression {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for ErrExpression {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(ErrExpression);
|
||||
|
@ -54,9 +54,9 @@ pub enum Expression {
|
||||
}
|
||||
|
||||
impl Node for Expression {
|
||||
fn span(&self) -> &Span {
|
||||
fn span(&self) -> Span {
|
||||
use Expression::*;
|
||||
match &self {
|
||||
match self {
|
||||
Identifier(n) => n.span(),
|
||||
Value(n) => n.span(),
|
||||
Binary(n) => n.span(),
|
||||
|
@ -35,12 +35,4 @@ impl fmt::Display for TernaryExpression {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for TernaryExpression {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(TernaryExpression);
|
||||
|
@ -52,12 +52,4 @@ impl fmt::Display for UnaryExpression {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for UnaryExpression {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(UnaryExpression);
|
||||
|
@ -60,14 +60,14 @@ impl fmt::Display for ValueExpression {
|
||||
}
|
||||
|
||||
impl Node for ValueExpression {
|
||||
fn span(&self) -> &Span {
|
||||
fn span(&self) -> Span {
|
||||
use ValueExpression::*;
|
||||
match &self {
|
||||
Address(_, span) | Boolean(_, span) | Field(_, span) | Integer(_, _, span) | String(_, span) => span,
|
||||
Char(character) => &character.span,
|
||||
Address(_, span) | Boolean(_, span) | Field(_, span) | Integer(_, _, span) | String(_, span) => *span,
|
||||
Char(character) => character.span,
|
||||
Group(group) => match &**group {
|
||||
GroupValue::Single(_, span) => span,
|
||||
GroupValue::Tuple(tuple) => &tuple.span,
|
||||
GroupValue::Single(_, span) => *span,
|
||||
GroupValue::Tuple(tuple) => tuple.span,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -82,12 +82,4 @@ impl fmt::Debug for Function {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for Function {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(Function);
|
||||
|
@ -87,12 +87,4 @@ impl fmt::Debug for FunctionInputVariable {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for FunctionInputVariable {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(FunctionInputVariable);
|
||||
|
@ -70,10 +70,10 @@ impl PartialEq for FunctionInput {
|
||||
impl Eq for FunctionInput {}
|
||||
|
||||
impl Node for FunctionInput {
|
||||
fn span(&self) -> &Span {
|
||||
fn span(&self) -> Span {
|
||||
use FunctionInput::*;
|
||||
match self {
|
||||
Variable(variable) => &variable.span,
|
||||
Variable(variable) => variable.span,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,7 +38,7 @@ impl TryFrom<(Type, Expression)> for InputValue {
|
||||
match (type_, value) {
|
||||
(Type::Address, ValueExpression::Address(value, _)) => Self::Address(value),
|
||||
(Type::Boolean, ValueExpression::Boolean(value, span)) => {
|
||||
let bool_value = value.parse::<bool>().map_err(|_| ParserError::unexpected_eof(&span))?; // TODO: change error
|
||||
let bool_value = value.parse::<bool>().map_err(|_| ParserError::unexpected_eof(span))?; // TODO: change error
|
||||
Self::Boolean(bool_value)
|
||||
}
|
||||
(Type::Char, ValueExpression::Char(value)) => Self::Char(value),
|
||||
@ -48,7 +48,7 @@ impl TryFrom<(Type, Expression)> for InputValue {
|
||||
if expected == actual {
|
||||
Self::Integer(expected, value)
|
||||
} else {
|
||||
return Err(InputError::unexpected_type(expected.to_string(), actual, &span).into());
|
||||
return Err(InputError::unexpected_type(expected.to_string(), actual, span).into());
|
||||
}
|
||||
}
|
||||
(x, y) => {
|
||||
|
@ -35,7 +35,7 @@ impl TryFrom<InputAst> for ProgramInput {
|
||||
sym::registers => &mut registers,
|
||||
_ => {
|
||||
return Err(
|
||||
InputError::unexpected_section(&["main", "registers"], section.name, §ion.span).into(),
|
||||
InputError::unexpected_section(&["main", "registers"], section.name, section.span).into(),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
@ -39,7 +39,7 @@ impl TryFrom<InputAst> for ProgramState {
|
||||
return Err(InputError::unexpected_section(
|
||||
&["state", "record", "state_leaf"],
|
||||
section.name,
|
||||
§ion.span,
|
||||
section.span,
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
@ -21,8 +21,23 @@ pub trait Node:
|
||||
std::fmt::Debug + std::fmt::Display + Clone + PartialEq + Eq + serde::Serialize + serde::de::DeserializeOwned
|
||||
{
|
||||
/// Returns the span of the node.
|
||||
fn span(&self) -> &Span;
|
||||
fn span(&self) -> Span;
|
||||
|
||||
/// Sets the span of the node.
|
||||
fn set_span(&mut self, span: Span);
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! simple_node_impl {
|
||||
($ty:ty) => {
|
||||
impl Node for $ty {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -206,7 +206,7 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
|
||||
pub fn reduce_iteration(&mut self, iteration: &IterationStatement) -> Result<IterationStatement> {
|
||||
let variable = self.reduce_identifier(&iteration.variable)?;
|
||||
let type_ = self.reduce_type(&iteration.type_, iteration.span())?;
|
||||
let type_ = self.reduce_type(&iteration.type_, &iteration.span())?;
|
||||
let start = self.reduce_expression(&iteration.start)?;
|
||||
let stop = self.reduce_expression(&iteration.stop)?;
|
||||
let block = self.reduce_block(&iteration.block)?;
|
||||
@ -227,13 +227,13 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
let formatted = ConsoleArgs {
|
||||
string: args.string.clone(),
|
||||
parameters,
|
||||
span: args.span.clone(),
|
||||
span: args.span,
|
||||
};
|
||||
|
||||
match &console_function_call.function {
|
||||
ConsoleFunction::Error(_) => ConsoleFunction::Error(formatted),
|
||||
ConsoleFunction::Log(_) => ConsoleFunction::Log(formatted),
|
||||
_ => return Err(AstError::impossible_console_assert_call(&args.span).into()),
|
||||
_ => return Err(AstError::impossible_console_assert_call(args.span).into()),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -43,7 +43,7 @@ pub trait ReconstructingReducer {
|
||||
fn reduce_identifier(&mut self, identifier: &Identifier) -> Result<Identifier> {
|
||||
Ok(Identifier {
|
||||
name: identifier.name,
|
||||
span: identifier.span.clone(),
|
||||
span: identifier.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -51,7 +51,7 @@ pub trait ReconstructingReducer {
|
||||
Ok(GroupTuple {
|
||||
x: group_tuple.x.clone(),
|
||||
y: group_tuple.y.clone(),
|
||||
span: group_tuple.span.clone(),
|
||||
span: group_tuple.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -60,10 +60,7 @@ pub trait ReconstructingReducer {
|
||||
}
|
||||
|
||||
fn reduce_string(&mut self, string: &[Char], span: &Span) -> Result<Expression> {
|
||||
Ok(Expression::Value(ValueExpression::String(
|
||||
string.to_vec(),
|
||||
span.clone(),
|
||||
)))
|
||||
Ok(Expression::Value(ValueExpression::String(string.to_vec(), *span)))
|
||||
}
|
||||
|
||||
fn reduce_value(&mut self, _value: &ValueExpression, new: Expression) -> Result<Expression> {
|
||||
@ -81,7 +78,7 @@ pub trait ReconstructingReducer {
|
||||
left: Box::new(left),
|
||||
right: Box::new(right),
|
||||
op,
|
||||
span: binary.span.clone(),
|
||||
span: binary.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -94,7 +91,7 @@ pub trait ReconstructingReducer {
|
||||
Ok(UnaryExpression {
|
||||
inner: Box::new(inner),
|
||||
op,
|
||||
span: unary.span.clone(),
|
||||
span: unary.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -109,7 +106,7 @@ pub trait ReconstructingReducer {
|
||||
condition: Box::new(condition),
|
||||
if_true: Box::new(if_true),
|
||||
if_false: Box::new(if_false),
|
||||
span: ternary.span.clone(),
|
||||
span: ternary.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -122,7 +119,7 @@ pub trait ReconstructingReducer {
|
||||
Ok(CallExpression {
|
||||
function: Box::new(function),
|
||||
arguments,
|
||||
span: call.span.clone(),
|
||||
span: call.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -134,7 +131,7 @@ pub trait ReconstructingReducer {
|
||||
fn reduce_return(&mut self, return_statement: &ReturnStatement, expression: Expression) -> Result<ReturnStatement> {
|
||||
Ok(ReturnStatement {
|
||||
expression,
|
||||
span: return_statement.span.clone(),
|
||||
span: return_statement.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -142,7 +139,7 @@ pub trait ReconstructingReducer {
|
||||
Ok(VariableName {
|
||||
mutable: variable_name.mutable,
|
||||
identifier,
|
||||
span: variable_name.span.clone(),
|
||||
span: variable_name.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -158,7 +155,7 @@ pub trait ReconstructingReducer {
|
||||
variable_names,
|
||||
type_,
|
||||
value,
|
||||
span: definition.span.clone(),
|
||||
span: definition.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -175,7 +172,7 @@ pub trait ReconstructingReducer {
|
||||
Ok(Assignee {
|
||||
identifier,
|
||||
accesses,
|
||||
span: assignee.span.clone(),
|
||||
span: assignee.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -189,7 +186,7 @@ pub trait ReconstructingReducer {
|
||||
operation: assign.operation,
|
||||
assignee,
|
||||
value,
|
||||
span: assign.span.clone(),
|
||||
span: assign.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -204,7 +201,7 @@ pub trait ReconstructingReducer {
|
||||
condition,
|
||||
block,
|
||||
next: statement.map(|statement| Box::new(statement)),
|
||||
span: conditional.span.clone(),
|
||||
span: conditional.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -224,14 +221,14 @@ pub trait ReconstructingReducer {
|
||||
stop,
|
||||
inclusive: iteration.inclusive,
|
||||
block,
|
||||
span: iteration.span.clone(),
|
||||
span: iteration.span,
|
||||
})
|
||||
}
|
||||
|
||||
fn reduce_console(&mut self, console: &ConsoleStatement, function: ConsoleFunction) -> Result<ConsoleStatement> {
|
||||
Ok(ConsoleStatement {
|
||||
function,
|
||||
span: console.span.clone(),
|
||||
span: console.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -242,14 +239,14 @@ pub trait ReconstructingReducer {
|
||||
) -> Result<ExpressionStatement> {
|
||||
Ok(ExpressionStatement {
|
||||
expression,
|
||||
span: expression_statement.span.clone(),
|
||||
span: expression_statement.span,
|
||||
})
|
||||
}
|
||||
|
||||
fn reduce_block(&mut self, block: &Block, statements: Vec<Statement>) -> Result<Block> {
|
||||
Ok(Block {
|
||||
statements,
|
||||
span: block.span.clone(),
|
||||
span: block.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -278,7 +275,7 @@ pub trait ReconstructingReducer {
|
||||
identifier,
|
||||
variable.mode(),
|
||||
type_,
|
||||
variable.span.clone(),
|
||||
variable.span,
|
||||
))
|
||||
}
|
||||
|
||||
@ -305,7 +302,7 @@ pub trait ReconstructingReducer {
|
||||
output,
|
||||
block,
|
||||
core_mapping: function.core_mapping.clone(),
|
||||
span: function.span.clone(),
|
||||
span: function.span,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -100,12 +100,4 @@ impl fmt::Display for AssignStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for AssignStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(AssignStatement);
|
||||
|
@ -43,12 +43,4 @@ impl fmt::Display for Block {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for Block {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(Block);
|
||||
|
@ -43,12 +43,4 @@ impl fmt::Display for ConditionalStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for ConditionalStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(ConditionalStatement);
|
||||
|
@ -46,12 +46,4 @@ impl fmt::Display for ConsoleArgs {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for ConsoleArgs {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(ConsoleArgs);
|
||||
|
@ -45,10 +45,10 @@ impl fmt::Display for ConsoleFunction {
|
||||
}
|
||||
|
||||
impl Node for ConsoleFunction {
|
||||
fn span(&self) -> &Span {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ConsoleFunction::Assert(assert) => assert.span(),
|
||||
ConsoleFunction::Error(formatted) | ConsoleFunction::Log(formatted) => &formatted.span,
|
||||
ConsoleFunction::Error(formatted) | ConsoleFunction::Log(formatted) => formatted.span,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,12 +41,4 @@ impl fmt::Debug for ConsoleStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for ConsoleStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(ConsoleStatement);
|
||||
|
@ -64,12 +64,4 @@ impl fmt::Display for DefinitionStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for DefinitionStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(DefinitionStatement);
|
||||
|
@ -37,12 +37,4 @@ impl fmt::Display for VariableName {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for VariableName {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(VariableName);
|
||||
|
@ -35,12 +35,4 @@ impl fmt::Display for ExpressionStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for ExpressionStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(ExpressionStatement);
|
||||
|
@ -51,12 +51,4 @@ impl fmt::Display for IterationStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for IterationStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(IterationStatement);
|
||||
|
@ -35,12 +35,4 @@ impl fmt::Display for ReturnStatement {
|
||||
}
|
||||
}
|
||||
|
||||
impl Node for ReturnStatement {
|
||||
fn span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
crate::simple_node_impl!(ReturnStatement);
|
||||
|
@ -59,9 +59,9 @@ impl fmt::Display for Statement {
|
||||
}
|
||||
|
||||
impl Node for Statement {
|
||||
fn span(&self) -> &Span {
|
||||
fn span(&self) -> Span {
|
||||
use Statement::*;
|
||||
match &self {
|
||||
match self {
|
||||
Return(n) => n.span(),
|
||||
Definition(n) => n.span(),
|
||||
Assign(n) => n.span(),
|
||||
|
@ -37,7 +37,7 @@ version = "1.5.3"
|
||||
[dependencies.sha2]
|
||||
version = "0.10"
|
||||
|
||||
[dev-dependencies.leo-span]
|
||||
[dependencies.leo-span]
|
||||
path = "../../leo/span"
|
||||
version = "1.5.3"
|
||||
|
||||
|
@ -31,6 +31,8 @@ use leo_errors::emitter::Handler;
|
||||
use leo_errors::{CompilerError, Result};
|
||||
pub use leo_passes::SymbolTable;
|
||||
use leo_passes::*;
|
||||
use leo_span::source_map::FileName;
|
||||
use leo_span::symbol::with_session_globals;
|
||||
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs;
|
||||
@ -77,13 +79,12 @@ impl<'a> Compiler<'a> {
|
||||
}
|
||||
|
||||
// Parses and stores a program file content from a string, constructs a syntax tree, and generates a program.
|
||||
pub fn parse_program_from_string(&mut self, program_string: &str) -> Result<()> {
|
||||
pub fn parse_program_from_string(&mut self, program_string: &str, name: FileName) -> Result<()> {
|
||||
// Register the source (`program_string`) in the source map.
|
||||
let prg_sf = with_session_globals(|s| s.source_map.new_source(program_string, name));
|
||||
|
||||
// Use the parser to construct the abstract syntax tree (ast).
|
||||
let ast: leo_ast::Ast = leo_parser::parse_ast(
|
||||
self.handler,
|
||||
self.main_file_path.to_str().unwrap_or_default(),
|
||||
program_string,
|
||||
)?;
|
||||
let ast: leo_ast::Ast = leo_parser::parse_ast(self.handler, &prg_sf.src, prg_sf.start_pos)?;
|
||||
// Write the AST snapshot post parsing.
|
||||
ast.to_json_file_without_keys(self.output_directory.clone(), "initial_ast.json", &["span"])?;
|
||||
|
||||
@ -96,31 +97,24 @@ impl<'a> Compiler<'a> {
|
||||
pub fn parse_program(&mut self) -> Result<()> {
|
||||
// Load the program file.
|
||||
let program_string = fs::read_to_string(&self.main_file_path)
|
||||
.map_err(|e| CompilerError::file_read_error(self.main_file_path.clone(), e))?;
|
||||
.map_err(|e| CompilerError::file_read_error(&self.main_file_path, e))?;
|
||||
|
||||
self.parse_program_from_string(&program_string)
|
||||
}
|
||||
|
||||
/// Parses and stores the input file, constructs a syntax tree, and generates a program input.
|
||||
pub fn parse_input_from_string(&mut self, input_file_path: PathBuf, input_string: &str) -> Result<()> {
|
||||
let input_ast =
|
||||
leo_parser::parse_input(self.handler, input_file_path.to_str().unwrap_or_default(), input_string)?;
|
||||
input_ast.to_json_file_without_keys(self.output_directory.clone(), "inital_input_ast.json", &["span"])?;
|
||||
|
||||
self.input_ast = Some(input_ast);
|
||||
Ok(())
|
||||
self.parse_program_from_string(&program_string, FileName::Real(self.main_file_path.clone()))
|
||||
}
|
||||
|
||||
/// Parses and stores the input file, constructs a syntax tree, and generates a program input.
|
||||
pub fn parse_input(&mut self, input_file_path: PathBuf) -> Result<()> {
|
||||
// Load the input file if it exists.
|
||||
if input_file_path.exists() {
|
||||
let input_string = fs::read_to_string(&input_file_path)
|
||||
.map_err(|e| CompilerError::file_read_error(input_file_path.clone(), e))?;
|
||||
// Load the input file into the source map.
|
||||
let input_sf = with_session_globals(|s| s.source_map.load_file(&input_file_path))
|
||||
.map_err(|e| CompilerError::file_read_error(&input_file_path, e))?;
|
||||
|
||||
self.parse_input_from_string(input_file_path, &input_string)?;
|
||||
// Parse and serialize it.
|
||||
let input_ast = leo_parser::parse_input(self.handler, &input_sf.src, input_sf.start_pos)?;
|
||||
input_ast.to_json_file_without_keys(self.output_directory.clone(), "inital_input_ast.json", &["span"])?;
|
||||
|
||||
self.input_ast = Some(input_ast);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -28,7 +28,7 @@ use leo_errors::{
|
||||
LeoError, LeoWarning,
|
||||
};
|
||||
use leo_passes::SymbolTable;
|
||||
use leo_span::symbol::create_session_if_not_set_then;
|
||||
use leo_span::{source_map::FileName, symbol::create_session_if_not_set_then};
|
||||
use leo_test_framework::{
|
||||
runner::{Namespace, ParseType, Runner},
|
||||
Test,
|
||||
@ -48,8 +48,9 @@ fn parse_program<'a>(
|
||||
program_string: &str,
|
||||
cwd: Option<PathBuf>,
|
||||
) -> Result<Compiler<'a>, LeoError> {
|
||||
let mut compiler = new_compiler(handler, cwd.unwrap_or_else(|| "compiler-test".into()));
|
||||
compiler.parse_program_from_string(program_string)?;
|
||||
let mut compiler = new_compiler(handler, cwd.clone().unwrap_or_else(|| "compiler-test".into()));
|
||||
let name = cwd.map_or_else(|| FileName::Custom("compiler-test".into()), FileName::Real);
|
||||
compiler.parse_program_from_string(program_string, name)?;
|
||||
|
||||
Ok(compiler)
|
||||
}
|
||||
|
@ -16,14 +16,15 @@
|
||||
|
||||
use leo_ast::Ast;
|
||||
use leo_errors::emitter::Handler;
|
||||
use leo_span::symbol::create_session_if_not_set_then;
|
||||
use leo_span::{source_map::FileName, symbol::create_session_if_not_set_then};
|
||||
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use std::time::Duration;
|
||||
|
||||
fn parse_ast(path: &str, input: &str) -> Ast {
|
||||
create_session_if_not_set_then(|_| {
|
||||
leo_parser::parse_ast(&Handler::default(), path, input).expect("failed to parse benchmark")
|
||||
create_session_if_not_set_then(|s| {
|
||||
let sf = s.source_map.new_source(input, FileName::Custom(path.into()));
|
||||
leo_parser::parse_ast(&Handler::default(), &sf.src, sf.start_pos).expect("failed to parse benchmark")
|
||||
})
|
||||
}
|
||||
|
||||
@ -34,6 +35,8 @@ macro_rules! bench {
|
||||
concat!("./", $file_name, ".leo"),
|
||||
include_str!(concat!("./", $file_name, ".leo"),),
|
||||
);
|
||||
// TODO(Centril): This benchmark seems like it actually does nothing
|
||||
// but take a reference to `&ast`, which should be optimized out?
|
||||
c.bench_function(concat!("Ast::", $file_name), |b| b.iter(|| &ast));
|
||||
}
|
||||
};
|
||||
|
@ -44,11 +44,14 @@ struct Opt {
|
||||
|
||||
fn main() -> Result<(), String> {
|
||||
let opt = Opt::from_args();
|
||||
let input_string = fs::read_to_string(&opt.input_path).expect("failed to open an input file");
|
||||
let input_tree = create_session_if_not_set_then(|_| {
|
||||
let input_tree = create_session_if_not_set_then(|s| {
|
||||
let input_string = s
|
||||
.source_map
|
||||
.load_file(&opt.input_path)
|
||||
.expect("failed to open an input file");
|
||||
|
||||
Handler::with(|handler| {
|
||||
let input =
|
||||
leo_parser::parse_program_inputs(handler, input_string.clone(), opt.input_path.to_str().unwrap())?;
|
||||
let input = leo_parser::parse_program_inputs(handler, &input_string.src, input_string.start_pos)?;
|
||||
input.to_json_string()
|
||||
})
|
||||
.map_err(|e| e.to_string())
|
||||
|
@ -42,12 +42,12 @@ struct Opt {
|
||||
|
||||
fn main() -> Result<(), String> {
|
||||
let opt = Opt::from_args();
|
||||
let code = fs::read_to_string(&opt.input_path).expect("failed to open file");
|
||||
|
||||
// Parses the Leo file constructing an ast which is then serialized.
|
||||
let serialized_leo_tree = create_session_if_not_set_then(|_| {
|
||||
let serialized_leo_tree = create_session_if_not_set_then(|s| {
|
||||
let code = s.source_map.load_file(&opt.input_path).expect("failed to open file");
|
||||
|
||||
Handler::with(|h| {
|
||||
let ast = leo_parser::parse_ast(h, opt.input_path.to_str().unwrap(), &code)?;
|
||||
let ast = leo_parser::parse_ast(h, &code.src, code.start_pos)?;
|
||||
let json = Ast::to_json_string(&ast)?;
|
||||
println!("{}", json);
|
||||
Ok(json)
|
||||
|
@ -23,6 +23,7 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
pub(crate) mod tokenizer;
|
||||
use leo_span::span::BytePos;
|
||||
pub use tokenizer::KEYWORD_TOKENS;
|
||||
pub(crate) use tokenizer::*;
|
||||
|
||||
@ -37,18 +38,13 @@ use leo_errors::Result;
|
||||
mod test;
|
||||
|
||||
/// Creates a new AST from a given file path and source code text.
|
||||
pub fn parse_ast<T: AsRef<str>, Y: AsRef<str>>(handler: &Handler, path: T, source: Y) -> Result<Ast> {
|
||||
Ok(Ast::new(parser::parse(handler, path.as_ref(), source.as_ref())?))
|
||||
pub fn parse_ast(handler: &Handler, source: &str, start_pos: BytePos) -> Result<Ast> {
|
||||
Ok(Ast::new(parser::parse(handler, source, start_pos)?))
|
||||
}
|
||||
|
||||
/// Parses program inputs from from the input file path and state file path
|
||||
pub fn parse_program_inputs<T: AsRef<str>, Y: AsRef<str>>(
|
||||
handler: &Handler,
|
||||
input_string: T,
|
||||
input_path: Y,
|
||||
) -> Result<Input> {
|
||||
let program_input: ProgramInput =
|
||||
parser::parse_input(handler, input_path.as_ref(), input_string.as_ref())?.try_into()?;
|
||||
pub fn parse_program_inputs(handler: &Handler, input_string: &str, start_pos: BytePos) -> Result<Input> {
|
||||
let program_input: ProgramInput = parser::parse_input(handler, input_string, start_pos)?.try_into()?;
|
||||
|
||||
Ok(Input {
|
||||
program_input,
|
||||
|
@ -39,11 +39,14 @@ pub struct ParserContext<'a> {
|
||||
pub(crate) prev_token: SpannedToken,
|
||||
/// true if parsing an expression for if and loop statements -- means circuit inits are not legal
|
||||
pub(crate) disallow_circuit_construction: bool,
|
||||
/// HACK(Centril): Place to store a dummy EOF.
|
||||
/// Exists to appease borrow checker for now.
|
||||
dummy_eof: SpannedToken,
|
||||
}
|
||||
|
||||
/// Dummy span used to appease borrow checker.
|
||||
const DUMMY_EOF: SpannedToken = SpannedToken {
|
||||
token: Token::Eof,
|
||||
span: Span::dummy(),
|
||||
};
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
/// Returns a new [`ParserContext`] type given a vector of tokens.
|
||||
pub fn new(handler: &'a Handler, mut tokens: Vec<SpannedToken>) -> Self {
|
||||
@ -53,14 +56,9 @@ impl<'a> ParserContext<'a> {
|
||||
tokens.reverse();
|
||||
|
||||
let token = SpannedToken::dummy();
|
||||
let dummy_eof = SpannedToken {
|
||||
token: Token::Eof,
|
||||
span: token.span.clone(),
|
||||
};
|
||||
let mut p = Self {
|
||||
handler,
|
||||
disallow_circuit_construction: false,
|
||||
dummy_eof,
|
||||
prev_token: token.clone(),
|
||||
token,
|
||||
tokens,
|
||||
@ -80,9 +78,9 @@ impl<'a> ParserContext<'a> {
|
||||
}
|
||||
|
||||
// Extract next token, or `Eof` if there was none.
|
||||
let next_token = self.tokens.pop().unwrap_or_else(|| SpannedToken {
|
||||
let next_token = self.tokens.pop().unwrap_or(SpannedToken {
|
||||
token: Token::Eof,
|
||||
span: self.token.span.clone(),
|
||||
span: self.token.span,
|
||||
});
|
||||
|
||||
// Set the new token.
|
||||
@ -108,11 +106,11 @@ impl<'a> ParserContext<'a> {
|
||||
}
|
||||
|
||||
let idx = match self.tokens.len().checked_sub(dist) {
|
||||
None => return looker(&self.dummy_eof),
|
||||
None => return looker(&DUMMY_EOF),
|
||||
Some(idx) => idx,
|
||||
};
|
||||
|
||||
looker(self.tokens.get(idx).unwrap_or(&self.dummy_eof))
|
||||
looker(self.tokens.get(idx).unwrap_or(&DUMMY_EOF))
|
||||
}
|
||||
|
||||
/// Emit the error `err`.
|
||||
@ -132,7 +130,7 @@ impl<'a> ParserContext<'a> {
|
||||
|
||||
/// At the previous token, return and make an identifier with `name`.
|
||||
fn mk_ident_prev(&self, name: Symbol) -> Identifier {
|
||||
let span = self.prev_token.span.clone();
|
||||
let span = self.prev_token.span;
|
||||
Identifier { name, span }
|
||||
}
|
||||
|
||||
@ -161,7 +159,7 @@ impl<'a> ParserContext<'a> {
|
||||
/// Expects an [`Identifier`], or errors.
|
||||
pub fn expect_ident(&mut self) -> Result<Identifier> {
|
||||
self.eat_identifier()
|
||||
.ok_or_else(|| ParserError::unexpected_str(&self.token.token, "ident", &self.token.span).into())
|
||||
.ok_or_else(|| ParserError::unexpected_str(&self.token.token, "ident", self.token.span).into())
|
||||
}
|
||||
|
||||
/// Returns a reference to the next token if it is a [`GroupCoordinate`], or [None] if
|
||||
@ -170,11 +168,11 @@ impl<'a> ParserContext<'a> {
|
||||
let (advanced, gc) = self.look_ahead(*dist, |t0| match &t0.token {
|
||||
Token::Add => Some((1, GroupCoordinate::SignHigh)),
|
||||
Token::Minus => self.look_ahead(*dist + 1, |t1| match &t1.token {
|
||||
Token::Int(value) => Some((2, GroupCoordinate::Number(format!("-{}", value), t1.span.clone()))),
|
||||
Token::Int(value) => Some((2, GroupCoordinate::Number(format!("-{}", value), t1.span))),
|
||||
_ => Some((1, GroupCoordinate::SignLow)),
|
||||
}),
|
||||
Token::Underscore => Some((1, GroupCoordinate::Inferred)),
|
||||
Token::Int(value) => Some((1, GroupCoordinate::Number(value.clone(), t0.span.clone()))),
|
||||
Token::Int(value) => Some((1, GroupCoordinate::Number(value.clone(), t0.span))),
|
||||
_ => None,
|
||||
})?;
|
||||
*dist += advanced;
|
||||
@ -200,7 +198,7 @@ impl<'a> ParserContext<'a> {
|
||||
let mut dist = 1; // 0th is `(` so 1st is first gc's start.
|
||||
let first_gc = self.peek_group_coordinate(&mut dist)?;
|
||||
|
||||
let check_ahead = |d, token: &_| self.look_ahead(d, |t| (&t.token == token).then(|| t.span.clone()));
|
||||
let check_ahead = |d, token: &_| self.look_ahead(d, |t| (&t.token == token).then(|| t.span));
|
||||
|
||||
// Peek at `,`.
|
||||
check_ahead(dist, &Token::Comma)?;
|
||||
@ -228,7 +226,7 @@ impl<'a> ParserContext<'a> {
|
||||
self.bump();
|
||||
}
|
||||
|
||||
if let Err(e) = assert_no_whitespace(&right_paren_span, &end_span, &format!("({},{})", gt.x, gt.y), "group") {
|
||||
if let Err(e) = assert_no_whitespace(right_paren_span, end_span, &format!("({},{})", gt.x, gt.y), "group") {
|
||||
return Some(Err(e));
|
||||
}
|
||||
|
||||
@ -252,13 +250,13 @@ impl<'a> ParserContext<'a> {
|
||||
|
||||
/// Returns an unexpected error at the current token.
|
||||
fn unexpected<T>(&self, expected: impl Display) -> Result<T> {
|
||||
Err(ParserError::unexpected(&self.token.token, expected, &self.token.span).into())
|
||||
Err(ParserError::unexpected(&self.token.token, expected, self.token.span).into())
|
||||
}
|
||||
|
||||
/// Eats the expected `token`, or errors.
|
||||
pub fn expect(&mut self, token: &Token) -> Result<Span> {
|
||||
if self.eat(token) {
|
||||
Ok(self.prev_token.span.clone())
|
||||
Ok(self.prev_token.span)
|
||||
} else {
|
||||
self.unexpected(token)
|
||||
}
|
||||
@ -267,7 +265,7 @@ impl<'a> ParserContext<'a> {
|
||||
/// Eats one of the expected `tokens`, or errors.
|
||||
pub fn expect_any(&mut self, tokens: &[Token]) -> Result<Span> {
|
||||
if self.eat_any(tokens) {
|
||||
Ok(self.prev_token.span.clone())
|
||||
Ok(self.prev_token.span)
|
||||
} else {
|
||||
self.unexpected(tokens.iter().map(|x| format!("'{}'", x)).collect::<Vec<_>>().join(", "))
|
||||
}
|
||||
|
@ -205,12 +205,12 @@ impl ParserContext<'_> {
|
||||
Token::Minus => UnaryOperation::Negate,
|
||||
_ => unreachable!("parse_unary_expression_ shouldn't produce this"),
|
||||
};
|
||||
ops.push((operation, self.prev_token.span.clone()));
|
||||
ops.push((operation, self.prev_token.span));
|
||||
}
|
||||
let mut inner = self.parse_postfix_expression()?;
|
||||
for (op, op_span) in ops.into_iter().rev() {
|
||||
inner = Expression::Unary(UnaryExpression {
|
||||
span: &op_span + inner.span(),
|
||||
span: op_span + inner.span(),
|
||||
op,
|
||||
inner: Box::new(inner),
|
||||
});
|
||||
@ -230,7 +230,7 @@ impl ParserContext<'_> {
|
||||
loop {
|
||||
if self.eat(&Token::Dot) {
|
||||
let curr = &self.token;
|
||||
return Err(ParserError::unexpected_str(&curr.token, "int or ident", &curr.span).into());
|
||||
return Err(ParserError::unexpected_str(&curr.token, "int or ident", curr.span).into());
|
||||
}
|
||||
|
||||
if !self.check(&Token::LeftParen) {
|
||||
@ -239,7 +239,7 @@ impl ParserContext<'_> {
|
||||
|
||||
let (arguments, _, span) = self.parse_paren_comma_list(|p| p.parse_expression().map(Some))?;
|
||||
expr = Expression::Call(CallExpression {
|
||||
span: expr.span() + &span,
|
||||
span: expr.span() + span,
|
||||
function: Box::new(expr),
|
||||
arguments,
|
||||
});
|
||||
@ -261,7 +261,7 @@ impl ParserContext<'_> {
|
||||
if !trailing && tuple.len() == 1 {
|
||||
Ok(tuple.remove(0))
|
||||
} else {
|
||||
Err(ParserError::unexpected("A tuple expression.", "A valid expression.", &span).into())
|
||||
Err(ParserError::unexpected("A tuple expression.", "A valid expression.", span).into())
|
||||
}
|
||||
}
|
||||
|
||||
@ -282,9 +282,9 @@ impl ParserContext<'_> {
|
||||
|
||||
Ok(match token {
|
||||
Token::Int(value) => {
|
||||
let suffix_span = self.token.span.clone();
|
||||
let full_span = &span + &suffix_span;
|
||||
let assert_no_whitespace = |x| assert_no_whitespace(&span, &suffix_span, &value, x);
|
||||
let suffix_span = self.token.span;
|
||||
let full_span = span + suffix_span;
|
||||
let assert_no_whitespace = |x| assert_no_whitespace(span, suffix_span, &value, x);
|
||||
match self.eat_any(INT_TYPES).then(|| &self.prev_token.token) {
|
||||
// Literal followed by `field`, e.g., `42field`.
|
||||
Some(Token::Field) => {
|
||||
@ -302,7 +302,7 @@ impl ParserContext<'_> {
|
||||
let int_ty = Self::token_to_int_type(suffix).expect("unknown int type token");
|
||||
Expression::Value(ValueExpression::Integer(int_ty, value, full_span))
|
||||
}
|
||||
None => return Err(ParserError::implicit_values_not_allowed(value, &span).into()),
|
||||
None => return Err(ParserError::implicit_values_not_allowed(value, span).into()),
|
||||
}
|
||||
}
|
||||
Token::True => Expression::Value(ValueExpression::Boolean("true".into(), span)),
|
||||
@ -323,7 +323,7 @@ impl ParserContext<'_> {
|
||||
span,
|
||||
}),
|
||||
token => {
|
||||
return Err(ParserError::unexpected_str(token, "expression", &span).into());
|
||||
return Err(ParserError::unexpected_str(token, "expression", span).into());
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ impl ParserContext<'_> {
|
||||
|
||||
while self.has_next() {
|
||||
match &self.token.token {
|
||||
Token::Ident(sym::test) => return Err(ParserError::test_function(&self.token.span).into()),
|
||||
Token::Ident(sym::test) => return Err(ParserError::test_function(self.token.span).into()),
|
||||
// Const functions share the first token with the global Const.
|
||||
Token::Const if self.peek_is_function() => {
|
||||
let (id, function) = self.parse_function_declaration()?;
|
||||
@ -54,17 +54,17 @@ impl ParserContext<'_> {
|
||||
.map(|x| format!("'{}'", x))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
&token.span,
|
||||
token.span,
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns a [`ParamMode`] AST node if the next tokens represent a function parameter mode.
|
||||
pub fn parse_function_parameter_mode(&mut self) -> Result<ParamMode> {
|
||||
let public = self.eat(&Token::Public).then(|| self.prev_token.span.clone());
|
||||
let constant = self.eat(&Token::Constant).then(|| self.prev_token.span.clone());
|
||||
let const_ = self.eat(&Token::Const).then(|| self.prev_token.span.clone());
|
||||
let public = self.eat(&Token::Public).then(|| self.prev_token.span);
|
||||
let constant = self.eat(&Token::Constant).then(|| self.prev_token.span);
|
||||
let const_ = self.eat(&Token::Const).then(|| self.prev_token.span);
|
||||
|
||||
if let Some(span) = &const_ {
|
||||
if let Some(span) = const_ {
|
||||
self.emit_warning(ParserWarning::const_parameter_or_input(span));
|
||||
}
|
||||
|
||||
@ -74,10 +74,10 @@ impl ParserContext<'_> {
|
||||
(None, None, None) => Ok(ParamMode::Private),
|
||||
(Some(_), None, None) => Ok(ParamMode::Public),
|
||||
(Some(m1), Some(m2), None) | (Some(m1), None, Some(m2)) | (None, Some(m1), Some(m2)) => {
|
||||
Err(ParserError::inputs_multiple_variable_types_specified(&(m1 + m2)).into())
|
||||
Err(ParserError::inputs_multiple_variable_types_specified(m1 + m2).into())
|
||||
}
|
||||
(Some(m1), Some(m2), Some(m3)) => {
|
||||
Err(ParserError::inputs_multiple_variable_types_specified(&(m1 + m2 + m3)).into())
|
||||
Err(ParserError::inputs_multiple_variable_types_specified(m1 + m2 + m3).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -90,7 +90,7 @@ impl ParserContext<'_> {
|
||||
let name = self.expect_ident()?;
|
||||
|
||||
if let Some(mutable) = &mutable {
|
||||
self.emit_err(ParserError::mut_function_input(&(&mutable.span + &name.span)));
|
||||
self.emit_err(ParserError::mut_function_input(mutable.span + name.span));
|
||||
}
|
||||
|
||||
self.expect(&Token::Colon)?;
|
||||
@ -126,7 +126,7 @@ impl ParserContext<'_> {
|
||||
identifier: name,
|
||||
input: inputs,
|
||||
output,
|
||||
span: start + block.span.clone(),
|
||||
span: start + block.span,
|
||||
block,
|
||||
core_mapping: <_>::default(),
|
||||
},
|
||||
|
@ -27,7 +27,7 @@ impl ParserContext<'_> {
|
||||
if self.check(&Token::LeftSquare) {
|
||||
sections.push(self.parse_section()?);
|
||||
} else {
|
||||
return Err(ParserError::unexpected_token(self.token.token.clone(), &self.token.span).into());
|
||||
return Err(ParserError::unexpected_token(self.token.token.clone(), self.token.span).into());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,6 +27,7 @@ use leo_errors::{ParserError, Result};
|
||||
use leo_span::Span;
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use leo_span::span::BytePos;
|
||||
use std::unreachable;
|
||||
|
||||
mod context;
|
||||
@ -38,27 +39,25 @@ pub mod input;
|
||||
pub mod statement;
|
||||
pub mod type_;
|
||||
|
||||
pub(crate) fn assert_no_whitespace(left_span: &Span, right_span: &Span, left: &str, right: &str) -> Result<()> {
|
||||
if left_span.col_stop != right_span.col_start {
|
||||
let mut error_span = left_span + right_span;
|
||||
error_span.col_start = left_span.col_stop - 1;
|
||||
error_span.col_stop = right_span.col_start - 1;
|
||||
return Err(ParserError::unexpected_whitespace(left, right, &error_span).into());
|
||||
pub(crate) fn assert_no_whitespace(left_span: Span, right_span: Span, left: &str, right: &str) -> Result<()> {
|
||||
if left_span.hi != right_span.lo {
|
||||
let error_span = Span::new(left_span.hi, right_span.lo); // The span between them.
|
||||
return Err(ParserError::unexpected_whitespace(left, right, error_span).into());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates a new program from a given file path and source code text.
|
||||
pub fn parse(handler: &Handler, path: &str, source: &str) -> Result<Program> {
|
||||
let mut tokens = ParserContext::new(handler, crate::tokenize(path, source)?);
|
||||
pub fn parse(handler: &Handler, source: &str, start_pos: BytePos) -> Result<Program> {
|
||||
let mut tokens = ParserContext::new(handler, crate::tokenize(source, start_pos)?);
|
||||
|
||||
tokens.parse_program()
|
||||
}
|
||||
|
||||
/// Parses an input file at the given file `path` and `source` code text.
|
||||
pub fn parse_input<T: AsRef<str>, Y: AsRef<str>>(handler: &Handler, path: T, source: Y) -> Result<InputAst> {
|
||||
let mut tokens = ParserContext::new(handler, crate::tokenize(path.as_ref(), source.as_ref())?);
|
||||
pub fn parse_input(handler: &Handler, source: &str, start_pos: BytePos) -> Result<InputAst> {
|
||||
let mut tokens = ParserContext::new(handler, crate::tokenize(source, start_pos)?);
|
||||
|
||||
tokens.parse_input()
|
||||
}
|
||||
|
@ -27,19 +27,16 @@ impl ParserContext<'_> {
|
||||
pub fn construct_assignee_access(expr: Expression, _accesses: &mut [AssigneeAccess]) -> Result<Identifier> {
|
||||
match expr {
|
||||
Expression::Identifier(id) => Ok(id),
|
||||
_ => return Err(ParserError::invalid_assignment_target(expr.span()).into()),
|
||||
_ => Err(ParserError::invalid_assignment_target(expr.span()).into()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an [`Assignee`] AST node from the given [`Expression`] AST node with accesses.
|
||||
pub fn construct_assignee(expr: Expression) -> Result<Assignee> {
|
||||
let expr_span = expr.span().clone();
|
||||
let mut accesses = Vec::new();
|
||||
let identifier = Self::construct_assignee_access(expr, &mut accesses)?;
|
||||
|
||||
Ok(Assignee {
|
||||
span: expr_span,
|
||||
identifier,
|
||||
span: expr.span(),
|
||||
identifier: Self::construct_assignee_access(expr, &mut accesses)?,
|
||||
accesses,
|
||||
})
|
||||
}
|
||||
@ -66,7 +63,7 @@ impl ParserContext<'_> {
|
||||
let assignee = Self::construct_assignee(expr)?;
|
||||
self.expect(&Token::Semicolon)?;
|
||||
Ok(Statement::Assign(Box::new(AssignStatement {
|
||||
span: &assignee.span + value.span(),
|
||||
span: assignee.span + value.span(),
|
||||
assignee,
|
||||
// Currently only `=` so this is alright.
|
||||
operation: AssignOperation::Assign,
|
||||
@ -75,7 +72,7 @@ impl ParserContext<'_> {
|
||||
} else {
|
||||
self.expect(&Token::Semicolon)?;
|
||||
Ok(Statement::Expression(ExpressionStatement {
|
||||
span: expr.span().clone(),
|
||||
span: expr.span(),
|
||||
expression: expr,
|
||||
}))
|
||||
}
|
||||
@ -89,7 +86,7 @@ impl ParserContext<'_> {
|
||||
loop {
|
||||
if self.eat(&Token::RightCurly) {
|
||||
return Ok(Block {
|
||||
span: &start + &self.prev_token.span,
|
||||
span: start + self.prev_token.span,
|
||||
statements,
|
||||
});
|
||||
}
|
||||
@ -103,7 +100,7 @@ impl ParserContext<'_> {
|
||||
let start = self.expect(&Token::Return)?;
|
||||
let expression = self.parse_expression()?;
|
||||
self.expect(&Token::Semicolon)?;
|
||||
let span = &start + expression.span();
|
||||
let span = start + expression.span();
|
||||
Ok(ReturnStatement { span, expression })
|
||||
}
|
||||
|
||||
@ -125,7 +122,7 @@ impl ParserContext<'_> {
|
||||
};
|
||||
|
||||
Ok(ConditionalStatement {
|
||||
span: &start + next.as_ref().map(|x| x.span()).unwrap_or(&body.span),
|
||||
span: start + next.as_ref().map(|x| x.span()).unwrap_or(body.span),
|
||||
condition: expr,
|
||||
block: body,
|
||||
next,
|
||||
@ -151,7 +148,7 @@ impl ParserContext<'_> {
|
||||
let block = self.parse_block()?;
|
||||
|
||||
Ok(IterationStatement {
|
||||
span: start_span + block.span.clone(),
|
||||
span: start_span + block.span,
|
||||
variable: ident,
|
||||
type_: type_.0,
|
||||
start,
|
||||
@ -171,7 +168,7 @@ impl ParserContext<'_> {
|
||||
string = Some(match token {
|
||||
Token::StringLit(chars) => chars,
|
||||
_ => {
|
||||
p.emit_err(ParserError::unexpected_str(token, "formatted string", &span));
|
||||
p.emit_err(ParserError::unexpected_str(token, "formatted string", span));
|
||||
Vec::new()
|
||||
}
|
||||
});
|
||||
@ -207,7 +204,7 @@ impl ParserContext<'_> {
|
||||
self.emit_err(ParserError::unexpected_ident(
|
||||
x,
|
||||
&["assert", "error", "log"],
|
||||
&function.span,
|
||||
function.span,
|
||||
));
|
||||
ConsoleFunction::Log(self.parse_console_args()?)
|
||||
}
|
||||
@ -215,21 +212,21 @@ impl ParserContext<'_> {
|
||||
self.expect(&Token::Semicolon)?;
|
||||
|
||||
Ok(ConsoleStatement {
|
||||
span: &keyword + function.span(),
|
||||
span: keyword + function.span(),
|
||||
function,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a [`VariableName`] AST node if the next tokens represent a variable name with
|
||||
/// valid keywords.
|
||||
pub fn parse_variable_name(&mut self, decl_ty: Declare, span: &Span) -> Result<VariableName> {
|
||||
pub fn parse_variable_name(&mut self, decl_ty: Declare, span: Span) -> Result<VariableName> {
|
||||
if self.eat(&Token::Mut) {
|
||||
self.emit_err(ParserError::let_mut_statement(&(&self.prev_token.span + span)));
|
||||
self.emit_err(ParserError::let_mut_statement(self.prev_token.span + span));
|
||||
}
|
||||
|
||||
let name = self.expect_ident()?;
|
||||
Ok(VariableName {
|
||||
span: name.span.clone(),
|
||||
span: name.span,
|
||||
mutable: matches!(decl_ty, Declare::Let),
|
||||
identifier: name,
|
||||
})
|
||||
@ -238,7 +235,7 @@ impl ParserContext<'_> {
|
||||
/// Returns a [`DefinitionStatement`] AST node if the next tokens represent a definition statement.
|
||||
pub fn parse_definition_statement(&mut self) -> Result<DefinitionStatement> {
|
||||
self.expect_any(&[Token::Let, Token::Const])?;
|
||||
let decl_span = self.prev_token.span.clone();
|
||||
let decl_span = self.prev_token.span;
|
||||
let decl_type = match &self.prev_token.token {
|
||||
Token::Let => Declare::Let,
|
||||
Token::Const => Declare::Const,
|
||||
@ -247,7 +244,7 @@ impl ParserContext<'_> {
|
||||
// Parse variable names.
|
||||
let variable_names = if self.peek_is_left_par() {
|
||||
let vars = self
|
||||
.parse_paren_comma_list(|p| p.parse_variable_name(decl_type, &decl_span).map(Some))
|
||||
.parse_paren_comma_list(|p| p.parse_variable_name(decl_type, decl_span).map(Some))
|
||||
.map(|(vars, ..)| vars)?;
|
||||
|
||||
if vars.len() == 1 {
|
||||
@ -256,7 +253,7 @@ impl ParserContext<'_> {
|
||||
|
||||
vars
|
||||
} else {
|
||||
vec![self.parse_variable_name(decl_type, &decl_span)?]
|
||||
vec![self.parse_variable_name(decl_type, decl_span)?]
|
||||
};
|
||||
|
||||
self.expect(&Token::Colon)?;
|
||||
@ -267,7 +264,7 @@ impl ParserContext<'_> {
|
||||
self.expect(&Token::Semicolon)?;
|
||||
|
||||
Ok(DefinitionStatement {
|
||||
span: &decl_span + expr.span(),
|
||||
span: decl_span + expr.span(),
|
||||
declaration_type: decl_type,
|
||||
variable_names,
|
||||
type_: type_.0,
|
||||
|
@ -17,7 +17,11 @@
|
||||
use crate::{tokenizer, ParserContext, SpannedToken};
|
||||
use leo_ast::{Expression, ExpressionStatement, Statement, ValueExpression};
|
||||
use leo_errors::{emitter::Handler, LeoError};
|
||||
use leo_span::{symbol::create_session_if_not_set_then, Span};
|
||||
use leo_span::{
|
||||
source_map::FileName,
|
||||
symbol::{create_session_if_not_set_then, SessionGlobals},
|
||||
Span,
|
||||
};
|
||||
use leo_test_framework::{
|
||||
runner::{Namespace, ParseType, Runner},
|
||||
Test,
|
||||
@ -34,9 +38,8 @@ impl Namespace for TokenNamespace {
|
||||
}
|
||||
|
||||
fn run_test(&self, test: Test) -> Result<Value, String> {
|
||||
create_session_if_not_set_then(|_| {
|
||||
tokenizer::tokenize("test", &test.content)
|
||||
.map(|tokens| {
|
||||
create_session_if_not_set_then(|s| {
|
||||
tokenize(test, s).map(|tokens| {
|
||||
Value::String(
|
||||
tokens
|
||||
.into_iter()
|
||||
@ -45,7 +48,6 @@ impl Namespace for TokenNamespace {
|
||||
.join(","),
|
||||
)
|
||||
})
|
||||
.map_err(|x| x.to_string())
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -76,8 +78,9 @@ fn with_handler<T>(
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
fn tokenize(test: Test) -> Result<Vec<SpannedToken>, String> {
|
||||
tokenizer::tokenize("test", &test.content).map_err(|x| x.to_string())
|
||||
fn tokenize(test: Test, s: &SessionGlobals) -> Result<Vec<SpannedToken>, String> {
|
||||
let sf = s.source_map.new_source(&test.content, FileName::Custom("test".into()));
|
||||
tokenizer::tokenize(&sf.src, sf.start_pos).map_err(|x| x.to_string())
|
||||
}
|
||||
|
||||
fn all_are_comments(tokens: &[SpannedToken]) -> bool {
|
||||
@ -98,8 +101,8 @@ impl Namespace for ParseExpressionNamespace {
|
||||
}
|
||||
|
||||
fn run_test(&self, test: Test) -> Result<Value, String> {
|
||||
create_session_if_not_set_then(|_| {
|
||||
let tokenizer = tokenize(test)?;
|
||||
create_session_if_not_set_then(|s| {
|
||||
let tokenizer = tokenize(test, s)?;
|
||||
if all_are_comments(&tokenizer) {
|
||||
return Ok(yaml_or_fail(""));
|
||||
}
|
||||
@ -116,8 +119,8 @@ impl Namespace for ParseStatementNamespace {
|
||||
}
|
||||
|
||||
fn run_test(&self, test: Test) -> Result<Value, String> {
|
||||
create_session_if_not_set_then(|_| {
|
||||
let tokenizer = tokenize(test)?;
|
||||
create_session_if_not_set_then(|s| {
|
||||
let tokenizer = tokenize(test, s)?;
|
||||
if all_are_comments(&tokenizer) {
|
||||
return Ok(yaml_or_fail(Statement::Expression(ExpressionStatement {
|
||||
expression: Expression::Value(ValueExpression::String(Vec::new(), Default::default())),
|
||||
@ -137,7 +140,7 @@ impl Namespace for ParseNamespace {
|
||||
}
|
||||
|
||||
fn run_test(&self, test: Test) -> Result<Value, String> {
|
||||
create_session_if_not_set_then(|_| with_handler(tokenize(test)?, |p| p.parse_program()).map(yaml_or_fail))
|
||||
create_session_if_not_set_then(|s| with_handler(tokenize(test, s)?, |p| p.parse_program()).map(yaml_or_fail))
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,8 +196,8 @@ impl Namespace for SerializeNamespace {
|
||||
}
|
||||
|
||||
fn run_test(&self, test: Test) -> Result<Value, String> {
|
||||
create_session_if_not_set_then(|_| {
|
||||
let tokenizer = tokenize(test)?;
|
||||
create_session_if_not_set_then(|s| {
|
||||
let tokenizer = tokenize(test, s)?;
|
||||
let parsed = with_handler(tokenizer, |p| p.parse_program())?;
|
||||
|
||||
let mut json = serde_json::to_value(parsed).expect("failed to convert to json value");
|
||||
@ -214,7 +217,7 @@ impl Namespace for InputNamespace {
|
||||
}
|
||||
|
||||
fn run_test(&self, test: Test) -> Result<Value, String> {
|
||||
create_session_if_not_set_then(|_| with_handler(tokenize(test)?, |p| p.parse_input()).map(yaml_or_fail))
|
||||
create_session_if_not_set_then(|s| with_handler(tokenize(test, s)?, |p| p.parse_input()).map(yaml_or_fail))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -449,7 +449,7 @@ pub struct SpannedToken {
|
||||
|
||||
impl SpannedToken {
|
||||
/// Returns a dummy token at a dummy span.
|
||||
pub fn dummy() -> Self {
|
||||
pub const fn dummy() -> Self {
|
||||
Self {
|
||||
token: Token::Question,
|
||||
span: Span::dummy(),
|
||||
|
@ -21,7 +21,6 @@
|
||||
|
||||
pub(crate) mod token;
|
||||
use std::iter;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use self::token::KEYWORD_TOKENS;
|
||||
pub(crate) use self::token::*;
|
||||
@ -30,78 +29,38 @@ pub(crate) mod lexer;
|
||||
pub(crate) use self::lexer::*;
|
||||
|
||||
use leo_errors::{ParserError, Result};
|
||||
use leo_span::Span;
|
||||
use leo_span::{
|
||||
span::{BytePos, Pos},
|
||||
Span,
|
||||
};
|
||||
|
||||
/// Creates a new vector of spanned tokens from a given file path and source code text.
|
||||
pub(crate) fn tokenize(path: &str, input: &str) -> Result<Vec<SpannedToken>> {
|
||||
tokenize_iter(path, input).collect()
|
||||
pub(crate) fn tokenize(input: &str, start_pos: BytePos) -> Result<Vec<SpannedToken>> {
|
||||
tokenize_iter(input, start_pos).collect()
|
||||
}
|
||||
|
||||
/// Yields spanned tokens from a given file path and source code text.
|
||||
pub(crate) fn tokenize_iter<'a>(path: &'a str, input: &'a str) -> impl 'a + Iterator<Item = Result<SpannedToken>> {
|
||||
let path = Arc::new(path.to_string());
|
||||
/// Yields spanned tokens from the given source code text.
|
||||
///
|
||||
/// The `lo` byte position determines where spans will start.
|
||||
pub(crate) fn tokenize_iter(input: &str, mut lo: BytePos) -> impl '_ + Iterator<Item = Result<SpannedToken>> {
|
||||
let mut index = 0usize;
|
||||
let mut line_no = 1usize;
|
||||
let mut line_start = 0usize;
|
||||
iter::from_fn(move || {
|
||||
while input.len() > index {
|
||||
let token = match Token::eat(&input[index..]) {
|
||||
let (token_len, token) = match Token::eat(&input[index..]) {
|
||||
Err(e) => return Some(Err(e)),
|
||||
Ok(t) => t,
|
||||
};
|
||||
index += token_len;
|
||||
|
||||
let span = Span::new(lo, lo + BytePos::from_usize(token_len));
|
||||
lo = span.hi;
|
||||
|
||||
match token {
|
||||
(token_len, Token::WhiteSpace) => {
|
||||
let bytes = input.as_bytes();
|
||||
if bytes[index] == 0x000D && matches!(bytes.get(index + 1), Some(0x000A)) {
|
||||
// Check carriage return followed by newline.
|
||||
line_no += 1;
|
||||
line_start = index + token_len + 1;
|
||||
index += token_len;
|
||||
} else if matches!(bytes[index], 0x000A | 0x000D) {
|
||||
// Check new-line or carriage-return
|
||||
line_no += 1;
|
||||
line_start = index + token_len;
|
||||
}
|
||||
index += token_len;
|
||||
}
|
||||
(token_len, token) => {
|
||||
let mut span = Span::new(
|
||||
line_no,
|
||||
line_no,
|
||||
index - line_start + 1,
|
||||
index - line_start + token_len + 1,
|
||||
path.clone(),
|
||||
input[line_start
|
||||
..input[line_start..]
|
||||
.find('\n')
|
||||
.map(|i| i + line_start)
|
||||
.unwrap_or(input.len())]
|
||||
.to_string(),
|
||||
);
|
||||
match &token {
|
||||
Token::CommentLine(_) => {
|
||||
line_no += 1;
|
||||
line_start = index + token_len;
|
||||
}
|
||||
Token::CommentBlock(block) => {
|
||||
let line_ct = block.chars().filter(|x| *x == '\n').count();
|
||||
line_no += line_ct;
|
||||
if line_ct > 0 {
|
||||
let last_line_index = block.rfind('\n').unwrap();
|
||||
line_start = index + last_line_index + 1;
|
||||
span.col_stop = index + token_len - line_start + 1;
|
||||
}
|
||||
span.line_stop = line_no;
|
||||
}
|
||||
Token::AddressLit(address) if !check_address(address) => {
|
||||
return Some(Err(ParserError::invalid_address_lit(address, &span).into()));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
index += token_len;
|
||||
return Some(Ok(SpannedToken { token, span }));
|
||||
Token::WhiteSpace => continue,
|
||||
Token::AddressLit(address) if !check_address(&address) => {
|
||||
return Some(Err(ParserError::invalid_address_lit(address, span).into()));
|
||||
}
|
||||
_ => return Some(Ok(SpannedToken { token, span })),
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,14 +71,12 @@ pub(crate) fn tokenize_iter<'a>(path: &'a str, input: &'a str) -> impl 'a + Iter
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use leo_span::symbol::create_session_if_not_set_then;
|
||||
use leo_span::{source_map::FileName, symbol::create_session_if_not_set_then};
|
||||
|
||||
#[test]
|
||||
fn test_tokenizer() {
|
||||
create_session_if_not_set_then(|_| {
|
||||
let tokens = tokenize(
|
||||
"test_path",
|
||||
r#"
|
||||
create_session_if_not_set_then(|s| {
|
||||
let raw = r#"
|
||||
'a'
|
||||
'😭'
|
||||
"test"
|
||||
@ -194,9 +151,9 @@ mod tests {
|
||||
?
|
||||
// test
|
||||
/* test */
|
||||
//"#,
|
||||
)
|
||||
.unwrap();
|
||||
//"#;
|
||||
let sf = s.source_map.new_source(raw, FileName::Custom("test".into()));
|
||||
let tokens = tokenize(&sf.src, sf.start_pos).unwrap();
|
||||
let mut output = String::new();
|
||||
for SpannedToken { token, .. } in tokens.iter() {
|
||||
output += &format!("{} ", token);
|
||||
@ -212,7 +169,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_spans() {
|
||||
create_session_if_not_set_then(|_| {
|
||||
create_session_if_not_set_then(|s| {
|
||||
let raw = r#"
|
||||
ppp test
|
||||
// test
|
||||
@ -223,7 +180,10 @@ ppp test
|
||||
test */
|
||||
test
|
||||
"#;
|
||||
let tokens = tokenize("test_path", raw).unwrap();
|
||||
|
||||
let sm = &s.source_map;
|
||||
let sf = sm.new_source(raw, FileName::Custom("test".into()));
|
||||
let tokens = tokenize(&sf.src, sf.start_pos).unwrap();
|
||||
let mut line_indicies = vec![0];
|
||||
for (i, c) in raw.chars().enumerate() {
|
||||
if c == '\n' {
|
||||
@ -231,11 +191,7 @@ ppp test
|
||||
}
|
||||
}
|
||||
for token in tokens.iter() {
|
||||
let token_raw = token.token.to_string();
|
||||
let start = line_indicies.get(token.span.line_start - 1).unwrap();
|
||||
let stop = line_indicies.get(token.span.line_stop - 1).unwrap();
|
||||
let original = &raw[*start + token.span.col_start - 1..*stop + token.span.col_stop - 1];
|
||||
assert_eq!(original, &token_raw);
|
||||
assert_eq!(token.token.to_string(), sm.contents_of_span(token.span).unwrap());
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ pub struct SymbolTable<'a> {
|
||||
impl<'a> SymbolTable<'a> {
|
||||
pub fn check_shadowing(&self, symbol: &Symbol) -> Result<()> {
|
||||
if let Some(function) = self.functions.get(symbol) {
|
||||
Err(AstError::shadowed_function(symbol, &function.span).into())
|
||||
Err(AstError::shadowed_function(symbol, function.span).into())
|
||||
} else {
|
||||
self.variables.check_shadowing(symbol)?;
|
||||
Ok(())
|
||||
|
@ -41,7 +41,7 @@ impl Display for Declaration {
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct VariableSymbol<'a> {
|
||||
pub type_: &'a Type,
|
||||
pub span: &'a Span,
|
||||
pub span: Span,
|
||||
pub declaration: Declaration,
|
||||
}
|
||||
|
||||
|
@ -39,7 +39,7 @@ fn return_incorrect_type(t1: Option<Type>, t2: Option<Type>, expected: Option<Ty
|
||||
}
|
||||
|
||||
impl<'a> TypeChecker<'a> {
|
||||
pub(crate) fn compare_expr_type(&mut self, expr: &Expression, expected: Option<Type>, span: &Span) -> Option<Type> {
|
||||
pub(crate) fn compare_expr_type(&mut self, expr: &Expression, expected: Option<Type>, span: Span) -> Option<Type> {
|
||||
match expr {
|
||||
Expression::Identifier(ident) => {
|
||||
if let Some(var) = self.symbol_table.lookup_variable(&ident.name) {
|
||||
|
@ -73,7 +73,7 @@ impl<'a> StatementVisitor<'a> for TypeChecker<'a> {
|
||||
Some(var.type_.clone())
|
||||
} else {
|
||||
self.handler.emit_err(
|
||||
TypeCheckerError::unknown_sym("variable", &input.assignee.identifier.name, &input.assignee.span).into(),
|
||||
TypeCheckerError::unknown_sym("variable", &input.assignee.identifier.name, input.assignee.span).into(),
|
||||
);
|
||||
|
||||
None
|
||||
|
@ -79,7 +79,7 @@ impl<'a> TypeChecker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn assert_type(&self, type_: Type, expected: Option<Type>, span: &Span) -> Type {
|
||||
pub(crate) fn assert_type(&self, type_: Type, expected: Option<Type>, span: Span) -> Type {
|
||||
if let Some(expected) = expected {
|
||||
if type_ != expected {
|
||||
self.handler
|
||||
@ -90,7 +90,7 @@ impl<'a> TypeChecker<'a> {
|
||||
type_
|
||||
}
|
||||
|
||||
pub(crate) fn assert_one_of_types(&self, type_: Option<Type>, expected: &[Type], span: &Span) -> Option<Type> {
|
||||
pub(crate) fn assert_one_of_types(&self, type_: Option<Type>, expected: &[Type], span: Span) -> Option<Type> {
|
||||
if let Some(type_) = type_.clone() {
|
||||
for t in expected.iter() {
|
||||
if &type_ == t {
|
||||
@ -111,15 +111,15 @@ impl<'a> TypeChecker<'a> {
|
||||
type_
|
||||
}
|
||||
|
||||
pub(crate) fn assert_arith_type(&self, type_: Option<Type>, span: &Span) -> Option<Type> {
|
||||
pub(crate) fn assert_arith_type(&self, type_: Option<Type>, span: Span) -> Option<Type> {
|
||||
self.assert_one_of_types(type_, ARITHMETIC_TYPES, span)
|
||||
}
|
||||
|
||||
pub(crate) fn assert_field_or_int_type(&self, type_: Option<Type>, span: &Span) -> Option<Type> {
|
||||
pub(crate) fn assert_field_or_int_type(&self, type_: Option<Type>, span: Span) -> Option<Type> {
|
||||
self.assert_one_of_types(type_, FIELD_AND_INT_TYPES, span)
|
||||
}
|
||||
|
||||
pub(crate) fn assert_int_type(&self, type_: Option<Type>, span: &Span) -> Option<Type> {
|
||||
pub(crate) fn assert_int_type(&self, type_: Option<Type>, span: Span) -> Option<Type> {
|
||||
self.assert_one_of_types(type_, INT_TYPES, span)
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ use backtrace::Backtrace;
|
||||
use color_backtrace::{BacktracePrinter, Verbosity};
|
||||
use colored::Colorize;
|
||||
use derivative::Derivative;
|
||||
use leo_span::source_map::is_not_test_framework;
|
||||
|
||||
/// The indent for an error message.
|
||||
pub(crate) const INDENT: &str = " ";
|
||||
@ -120,12 +121,7 @@ impl fmt::Display for Backtraced {
|
||||
let message = format!("{kind} [{code}]: {message}", message = self.message,);
|
||||
|
||||
// To avoid the color enabling characters for comparison with test expectations.
|
||||
if std::env::var("LEO_TESTFRAMEWORK")
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.to_owned()
|
||||
.is_empty()
|
||||
{
|
||||
if is_not_test_framework() {
|
||||
if self.error {
|
||||
write!(f, "{}", message.bold().red())?;
|
||||
} else {
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use crate::{Backtraced, INDENT};
|
||||
|
||||
use leo_span::Span;
|
||||
use leo_span::{source_map::SpanLocation, symbol::with_session_globals, Span};
|
||||
|
||||
use backtrace::Backtrace;
|
||||
use color_backtrace::{BacktracePrinter, Verbosity};
|
||||
@ -50,14 +50,14 @@ impl Formatted {
|
||||
code_identifier: i8,
|
||||
type_: String,
|
||||
error: bool,
|
||||
span: &Span,
|
||||
span: Span,
|
||||
backtrace: Backtrace,
|
||||
) -> Self
|
||||
where
|
||||
S: ToString,
|
||||
{
|
||||
Self {
|
||||
span: span.clone(),
|
||||
span,
|
||||
backtrace: Backtraced::new_from_backtrace(
|
||||
message.to_string(),
|
||||
help,
|
||||
@ -107,7 +107,18 @@ impl fmt::Display for Formatted {
|
||||
underline
|
||||
};
|
||||
|
||||
let underlined = underline(self.span.col_start, self.span.col_stop);
|
||||
let (loc, contents) = with_session_globals(|s| {
|
||||
(
|
||||
s.source_map
|
||||
.span_to_location(self.span)
|
||||
.unwrap_or_else(SpanLocation::dummy),
|
||||
s.source_map
|
||||
.line_contents_of_span(self.span)
|
||||
.unwrap_or_else(|| "<contents unavailable>".to_owned()),
|
||||
)
|
||||
});
|
||||
|
||||
let underlined = underline(loc.col_start, loc.col_stop);
|
||||
|
||||
let (kind, code) = if self.backtrace.error {
|
||||
("Error", self.error_code())
|
||||
@ -138,17 +149,17 @@ impl fmt::Display for Formatted {
|
||||
"\n{indent }--> {path}:{line_start}:{start}\n\
|
||||
{indent } |\n",
|
||||
indent = INDENT,
|
||||
path = &*self.span.path,
|
||||
line_start = self.span.line_start,
|
||||
start = self.span.col_start,
|
||||
path = &loc.source_file.name,
|
||||
line_start = loc.line_start,
|
||||
start = loc.col_start,
|
||||
)?;
|
||||
|
||||
for (line_no, line) in self.span.content.lines().enumerate() {
|
||||
for (line_no, line) in contents.lines().enumerate() {
|
||||
writeln!(
|
||||
f,
|
||||
"{line_no:width$} | {text}",
|
||||
width = INDENT.len(),
|
||||
line_no = self.span.line_start + line_no,
|
||||
line_no = loc.line_start + line_no,
|
||||
text = line,
|
||||
)?;
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ macro_rules! create_messages {
|
||||
// Formatted errors always takes a span.
|
||||
$(#[$error_func_docs])*
|
||||
// Expands additional arguments for the error defining function.
|
||||
pub fn $name($($arg_names: $arg_types,)* span: &leo_span::Span) -> Self {
|
||||
pub fn $name($($arg_names: $arg_types,)* span: leo_span::Span) -> Self {
|
||||
Self::Formatted(
|
||||
Formatted::new_from_span(
|
||||
$message,
|
||||
|
@ -265,7 +265,7 @@ impl Handler {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::ParserError;
|
||||
use leo_span::Span;
|
||||
use leo_span::{symbol::create_session_if_not_set_then, Span};
|
||||
|
||||
#[test]
|
||||
fn fresh_no_errors() {
|
||||
@ -276,28 +276,30 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn buffer_works() {
|
||||
create_session_if_not_set_then(|_| {
|
||||
let count_err = |s: String| s.lines().filter(|l| l.contains("Error")).count();
|
||||
|
||||
let res: Result<(), _> = Handler::with(|h| {
|
||||
let s = Span::default();
|
||||
assert_eq!(h.err_count(), 0);
|
||||
h.emit_err(ParserError::invalid_import_list(&s).into());
|
||||
h.emit_err(ParserError::invalid_import_list(s).into());
|
||||
assert_eq!(h.err_count(), 1);
|
||||
h.emit_err(ParserError::unexpected_eof(&s).into());
|
||||
h.emit_err(ParserError::unexpected_eof(s).into());
|
||||
assert_eq!(h.err_count(), 2);
|
||||
Err(ParserError::spread_in_array_init(&s).into())
|
||||
Err(ParserError::spread_in_array_init(s).into())
|
||||
});
|
||||
|
||||
assert_eq!(count_err(res.unwrap_err().to_string()), 3);
|
||||
|
||||
let res: Result<(), _> = Handler::with(|h| {
|
||||
let s = Span::default();
|
||||
h.emit_err(ParserError::invalid_import_list(&s).into());
|
||||
h.emit_err(ParserError::unexpected_eof(&s).into());
|
||||
h.emit_err(ParserError::invalid_import_list(s).into());
|
||||
h.emit_err(ParserError::unexpected_eof(s).into());
|
||||
Ok(())
|
||||
});
|
||||
assert_eq!(count_err(res.unwrap_err().to_string()), 2);
|
||||
|
||||
let () = Handler::with(|_| Ok(())).unwrap();
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -18,16 +18,8 @@ license = "GPL-3.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.56.1"
|
||||
|
||||
[dependencies.fxhash]
|
||||
version = "0.2.1"
|
||||
|
||||
[dependencies.indexmap]
|
||||
version = "1.8"
|
||||
features = ["serde"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.133"
|
||||
features = [ "derive", "rc" ]
|
||||
|
||||
[dependencies.scoped-tls]
|
||||
version = "1.0.0"
|
||||
[dependencies]
|
||||
fxhash = "0.2.1"
|
||||
indexmap = { version = "1.8", features = ["serde"] }
|
||||
serde = { version = "1.0.133", features = [ "derive", "rc" ] }
|
||||
scoped-tls = { version = "1.0.0" }
|
||||
|
@ -23,3 +23,5 @@ pub mod span;
|
||||
pub use span::Span;
|
||||
|
||||
pub mod span_json;
|
||||
|
||||
pub mod source_map;
|
||||
|
441
leo/span/src/source_map.rs
Normal file
441
leo/span/src/source_map.rs
Normal file
@ -0,0 +1,441 @@
|
||||
use crate::span::{BytePos, CharPos, Pos, Span};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
fmt, fs, io,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
/// The source map containing all recorded sources,
|
||||
/// methods to register new ones,
|
||||
/// and methods to query about spans in relation to recorded sources.
|
||||
#[derive(Default)]
|
||||
pub struct SourceMap {
|
||||
/// The actual source map data.
|
||||
inner: RefCell<SourceMapInner>,
|
||||
}
|
||||
|
||||
/// Actual data of the source map.
|
||||
/// We use this setup for purposes of interior mutability.
|
||||
#[derive(Default)]
|
||||
struct SourceMapInner {
|
||||
/// The address space below this value is currently used by the files in the source map.
|
||||
used_address_space: u32,
|
||||
|
||||
/// All the source files recorded thus far.
|
||||
///
|
||||
/// The list is append-only with mappings from the start byte position
|
||||
/// for fast lookup from a `Span` to its `SourceFile`.
|
||||
source_files: Vec<Rc<SourceFile>>,
|
||||
}
|
||||
|
||||
impl SourceMap {
|
||||
/// Loads the given `path` and returns a `SourceFile` for it.
|
||||
pub fn load_file(&self, path: &Path) -> io::Result<Rc<SourceFile>> {
|
||||
Ok(self.new_source(&fs::read_to_string(path)?, FileName::Real(path.to_owned())))
|
||||
}
|
||||
|
||||
/// Registers `source` under the given file `name`, returning a `SourceFile` back.
|
||||
pub fn new_source(&self, source: &str, name: FileName) -> Rc<SourceFile> {
|
||||
let len = u32::try_from(source.len()).unwrap();
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
let start_pos = inner.try_allocate_address_space(len).unwrap();
|
||||
let source_file = Rc::new(SourceFile::new(name, source.to_owned(), start_pos));
|
||||
inner.source_files.push(source_file.clone());
|
||||
source_file
|
||||
}
|
||||
|
||||
/// Find the index for the source file containing `pos`.
|
||||
fn find_source_file_index(&self, pos: BytePos) -> Option<usize> {
|
||||
self.inner
|
||||
.borrow()
|
||||
.source_files
|
||||
.binary_search_by_key(&pos, |file| file.start_pos)
|
||||
.map_or_else(|p| p.checked_sub(1), Some)
|
||||
}
|
||||
|
||||
/// Find the source file containing `pos`.
|
||||
fn find_source_file(&self, pos: BytePos) -> Option<Rc<SourceFile>> {
|
||||
Some(self.inner.borrow().source_files[self.find_source_file_index(pos)?].clone())
|
||||
}
|
||||
|
||||
/// Finds line column info about a given `pos`.
|
||||
fn find_line_col(&self, pos: BytePos) -> Option<LineCol> {
|
||||
let source_file = self.find_source_file(pos)?;
|
||||
let (line, col) = source_file.lookup_file_pos(pos);
|
||||
Some(LineCol { source_file, line, col })
|
||||
}
|
||||
|
||||
/// Retrives the location (source file, line, col) on the given span.
|
||||
pub fn span_to_location(&self, sp: Span) -> Option<SpanLocation> {
|
||||
let lo = self.find_line_col(sp.lo)?;
|
||||
let hi = self.find_line_col(sp.hi)?;
|
||||
Some(SpanLocation {
|
||||
source_file: lo.source_file,
|
||||
line_start: lo.line,
|
||||
line_stop: hi.line,
|
||||
col_start: lo.col.to_usize() + 1,
|
||||
col_stop: hi.col.to_usize() + 1,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a displayable representation of the `span` as a string.
|
||||
pub fn span_to_string(&self, span: Span) -> String {
|
||||
let loc = match self.span_to_location(span) {
|
||||
None => return "no-location".to_string(),
|
||||
Some(l) => l,
|
||||
};
|
||||
|
||||
if loc.line_start == loc.line_stop {
|
||||
format!("{}:{}-{}", loc.line_start, loc.col_start, loc.col_stop)
|
||||
} else {
|
||||
format!(
|
||||
"{}:{}-{}:{}",
|
||||
loc.line_start, loc.col_start, loc.line_stop, loc.col_stop
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the source contents that is spanned by `span`.
|
||||
pub fn contents_of_span(&self, span: Span) -> Option<String> {
|
||||
let begin = self.find_source_file(span.lo)?;
|
||||
let end = self.find_source_file(span.hi)?;
|
||||
assert_eq!(begin.start_pos, end.start_pos);
|
||||
Some(begin.contents_of_span(span))
|
||||
}
|
||||
|
||||
/// Returns the source contents of the lines that `span` is within.
|
||||
///
|
||||
/// That is, if the span refers to `x = 4` in the source code:
|
||||
///
|
||||
/// > ```text
|
||||
/// > // Line 1
|
||||
/// > let x
|
||||
/// > = 4;
|
||||
/// > // Line 4
|
||||
/// > ```
|
||||
///
|
||||
/// then the contents on lines 2 and 3 are returned.
|
||||
pub fn line_contents_of_span(&self, span: Span) -> Option<String> {
|
||||
let begin = self.find_source_file(span.lo)?;
|
||||
let end = self.find_source_file(span.hi)?;
|
||||
assert_eq!(begin.start_pos, end.start_pos);
|
||||
|
||||
let idx_lo = begin.lookup_line(span.lo).unwrap_or(0);
|
||||
let idx_hi = begin.lookup_line(span.hi).unwrap_or(0) + 1;
|
||||
let lo_line_pos = begin.lines[idx_lo];
|
||||
let hi_line_pos = if idx_hi < begin.lines.len() {
|
||||
begin.lines[idx_hi]
|
||||
} else {
|
||||
begin.end_pos
|
||||
};
|
||||
Some(begin.contents_of_span(Span::new(lo_line_pos, hi_line_pos)))
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceMapInner {
|
||||
/// Attempt reserving address space for `size` number of bytes.
|
||||
fn try_allocate_address_space(&mut self, size: u32) -> Option<BytePos> {
|
||||
let current = self.used_address_space;
|
||||
// By adding one, we can distinguish between files, even when they are empty.
|
||||
self.used_address_space = current.checked_add(size)?.checked_add(1)?;
|
||||
Some(BytePos(current))
|
||||
}
|
||||
}
|
||||
|
||||
/// A file name.
|
||||
///
|
||||
/// For now it's simply a wrapper around `PathBuf`,
|
||||
/// but may become more complicated in the future.
|
||||
#[derive(Clone)]
|
||||
pub enum FileName {
|
||||
/// A real file.
|
||||
Real(PathBuf),
|
||||
/// Any sort of description for a source.
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
impl fmt::Display for FileName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Real(x) if is_not_test_framework() => x.display().fmt(f),
|
||||
Self::Real(_) => Ok(()),
|
||||
Self::Custom(x) => f.write_str(x),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Is the env var `LEO_TESTFRAMEWORK` not enabled?
|
||||
pub fn is_not_test_framework() -> bool {
|
||||
std::env::var("LEO_TESTFRAMEWORK")
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.to_owned()
|
||||
.is_empty()
|
||||
}
|
||||
|
||||
/// A single source in the [`SourceMap`].
|
||||
pub struct SourceFile {
|
||||
/// The name of the file that the source came from.
|
||||
pub name: FileName,
|
||||
/// The complete source code.
|
||||
pub src: String,
|
||||
/// The start position of this source in the `SourceMap`.
|
||||
pub start_pos: BytePos,
|
||||
/// The end position of this source in the `SourceMap`.
|
||||
pub end_pos: BytePos,
|
||||
/// Locations of line beginnings in the source code.
|
||||
lines: Vec<BytePos>,
|
||||
/// Locations of multi-byte characters in the source code.
|
||||
multibyte_chars: Vec<MultiByteChar>,
|
||||
}
|
||||
|
||||
impl SourceFile {
|
||||
/// Creates a new `SourceMap` given the file `name`,
|
||||
/// source contents, and the `start_pos`ition.
|
||||
///
|
||||
/// This position is used for analysis purposes.
|
||||
fn new(name: FileName, mut src: String, start_pos: BytePos) -> Self {
|
||||
normalize_src(&mut src);
|
||||
let end_pos = start_pos + BytePos::from_usize(src.len());
|
||||
let (lines, multibyte_chars) = analyze_source_file(&src, start_pos);
|
||||
Self {
|
||||
name,
|
||||
src,
|
||||
start_pos,
|
||||
end_pos,
|
||||
lines,
|
||||
multibyte_chars,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`.
|
||||
fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
|
||||
// The number of extra bytes due to multibyte chars in the `SourceFile`.
|
||||
let mut total_extra_bytes = 0;
|
||||
|
||||
for mbc in self.multibyte_chars.iter() {
|
||||
if mbc.pos < bpos {
|
||||
// Every character is at least one byte, so we only
|
||||
// count the actual extra bytes.
|
||||
total_extra_bytes += mbc.bytes as u32 - 1;
|
||||
// We should never see a byte position in the middle of a
|
||||
// character.
|
||||
assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
|
||||
CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize)
|
||||
}
|
||||
|
||||
/// Finds the line containing the given position. The return value is the
|
||||
/// index into the `lines` array of this `SourceFile`, not the 1-based line
|
||||
/// number. If the source_file is empty or the position is located before the
|
||||
/// first line, `None` is returned.
|
||||
fn lookup_line(&self, pos: BytePos) -> Option<usize> {
|
||||
match self.lines.binary_search(&pos) {
|
||||
Ok(idx) => Some(idx),
|
||||
Err(0) => None,
|
||||
Err(idx) => Some(idx - 1),
|
||||
}
|
||||
}
|
||||
|
||||
/// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a
|
||||
/// given `BytePos`.
|
||||
fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) {
|
||||
let chpos = self.bytepos_to_file_charpos(pos);
|
||||
match self.lookup_line(pos) {
|
||||
Some(a) => {
|
||||
let line = a + 1; // Line numbers start at 1
|
||||
let linebpos = self.lines[a];
|
||||
let linechpos = self.bytepos_to_file_charpos(linebpos);
|
||||
let col = chpos - linechpos;
|
||||
assert!(chpos >= linechpos);
|
||||
(line, col)
|
||||
}
|
||||
None => (0, chpos),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns contents of a `span` assumed to be within the given file.
|
||||
fn contents_of_span(&self, span: Span) -> String {
|
||||
let begin_pos = self.bytepos_to_file_charpos(span.lo).to_usize();
|
||||
let end_pos = self.bytepos_to_file_charpos(span.hi).to_usize();
|
||||
String::from_utf8_lossy(&self.src.as_bytes()[begin_pos..end_pos]).into_owned()
|
||||
}
|
||||
}
|
||||
|
||||
/// Detailed information on a `Span`.
|
||||
pub struct SpanLocation {
|
||||
pub source_file: Rc<SourceFile>,
|
||||
pub line_start: usize,
|
||||
pub line_stop: usize,
|
||||
pub col_start: usize,
|
||||
pub col_stop: usize,
|
||||
}
|
||||
|
||||
impl SpanLocation {
|
||||
/// Returns a dummy location.
|
||||
pub fn dummy() -> Self {
|
||||
let dummy = "<dummy>".to_owned();
|
||||
let span = Span::dummy();
|
||||
Self {
|
||||
source_file: Rc::new(SourceFile {
|
||||
name: FileName::Custom(dummy.clone()),
|
||||
src: dummy,
|
||||
start_pos: span.lo,
|
||||
end_pos: span.hi,
|
||||
lines: Vec::new(),
|
||||
multibyte_chars: Vec::new(),
|
||||
}),
|
||||
line_start: 0,
|
||||
line_stop: 0,
|
||||
col_start: 0,
|
||||
col_stop: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// File / Line / Column information on a `BytePos`.
|
||||
pub struct LineCol {
|
||||
/// Information on the original source.
|
||||
pub source_file: Rc<SourceFile>,
|
||||
/// The 1-based line number.
|
||||
pub line: usize,
|
||||
/// The (0-based) column offset into the line.
|
||||
pub col: CharPos,
|
||||
}
|
||||
|
||||
/// Normalizes the source code and records the normalizations.
|
||||
fn normalize_src(src: &mut String) {
|
||||
remove_bom(src);
|
||||
normalize_newlines(src);
|
||||
}
|
||||
|
||||
/// Removes UTF-8 BOM, if any.
|
||||
fn remove_bom(src: &mut String) {
|
||||
if src.starts_with('\u{feff}') {
|
||||
src.drain(..3);
|
||||
}
|
||||
}
|
||||
|
||||
/// Replaces `\r\n` with `\n` in-place in `src`.
|
||||
///
|
||||
/// Returns error if there's a lone `\r` in the string.
|
||||
fn normalize_newlines(src: &mut String) {
|
||||
if !src.as_bytes().contains(&b'\r') {
|
||||
return;
|
||||
}
|
||||
|
||||
// We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
|
||||
// While we *can* call `as_mut_vec` and do surgery on the live string
|
||||
// directly, let's rather steal the contents of `src`. This makes the code
|
||||
// safe even if a panic occurs.
|
||||
|
||||
let mut buf = std::mem::take(src).into_bytes();
|
||||
let mut gap_len = 0;
|
||||
let mut tail = buf.as_mut_slice();
|
||||
loop {
|
||||
let idx = match find_crlf(&tail[gap_len..]) {
|
||||
None => tail.len(),
|
||||
Some(idx) => idx + gap_len,
|
||||
};
|
||||
tail.copy_within(gap_len..idx, 0);
|
||||
tail = &mut tail[idx - gap_len..];
|
||||
if tail.len() == gap_len {
|
||||
break;
|
||||
}
|
||||
gap_len += 1;
|
||||
}
|
||||
|
||||
// Account for removed `\r`.
|
||||
// After `set_len`, `buf` is guaranteed to contain utf-8 again.
|
||||
let new_len = buf.len() - gap_len;
|
||||
unsafe {
|
||||
buf.set_len(new_len);
|
||||
*src = String::from_utf8_unchecked(buf);
|
||||
}
|
||||
|
||||
fn find_crlf(src: &[u8]) -> Option<usize> {
|
||||
let mut search_idx = 0;
|
||||
while let Some(idx) = find_cr(&src[search_idx..]) {
|
||||
if src[search_idx..].get(idx + 1) != Some(&b'\n') {
|
||||
search_idx += idx + 1;
|
||||
continue;
|
||||
}
|
||||
return Some(search_idx + idx);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn find_cr(src: &[u8]) -> Option<usize> {
|
||||
src.iter().position(|&b| b == b'\r')
|
||||
}
|
||||
}
|
||||
|
||||
/// Identifies an offset of a multi-byte character in a `SourceFile`.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
struct MultiByteChar {
|
||||
/// The absolute offset of the character in the `SourceMap`.
|
||||
pub pos: BytePos,
|
||||
/// The number of bytes, `>= 2`.
|
||||
pub bytes: u8,
|
||||
}
|
||||
|
||||
/// Finds all newlines, multi-byte characters, and non-narrow characters in a
|
||||
/// SourceFile.
|
||||
///
|
||||
/// This function will use an SSE2 enhanced implementation if hardware support
|
||||
/// is detected at runtime.
|
||||
fn analyze_source_file(src: &str, source_file_start_pos: BytePos) -> (Vec<BytePos>, Vec<MultiByteChar>) {
|
||||
let mut lines = vec![source_file_start_pos];
|
||||
let mut multi_byte_chars = vec![];
|
||||
|
||||
let mut i = 0;
|
||||
let src_bytes = src.as_bytes();
|
||||
|
||||
while i < src.len() {
|
||||
// SAFETY: We verified that i < src.len().
|
||||
let i_usize = i as usize;
|
||||
let byte = unsafe { *src_bytes.get_unchecked(i_usize) };
|
||||
|
||||
// How much to advance to get to the next UTF-8 char in the string.
|
||||
let mut char_len = 1;
|
||||
|
||||
let pos = BytePos::from_usize(i) + source_file_start_pos;
|
||||
|
||||
if let b'\n' = byte {
|
||||
lines.push(pos + BytePos(1));
|
||||
} else if byte >= 127 {
|
||||
// The slow path:
|
||||
// This is either ASCII control character "DEL" or the beginning of
|
||||
// a multibyte char. Just decode to `char`.
|
||||
let c = (&src[i..]).chars().next().unwrap();
|
||||
char_len = c.len_utf8();
|
||||
|
||||
if char_len > 1 {
|
||||
assert!((2..=4).contains(&char_len));
|
||||
let bytes = char_len as u8;
|
||||
let mbc = MultiByteChar { pos, bytes };
|
||||
multi_byte_chars.push(mbc);
|
||||
}
|
||||
}
|
||||
|
||||
i += char_len;
|
||||
}
|
||||
|
||||
// The code above optimistically registers a new line *after* each \n it encounters.
|
||||
// If that point is already outside the source_file, remove it again.
|
||||
if let Some(&last_line_start) = lines.last() {
|
||||
let source_file_end = source_file_start_pos + BytePos::from_usize(src.len());
|
||||
assert!(source_file_end >= last_line_start);
|
||||
if last_line_start == source_file_end {
|
||||
lines.pop();
|
||||
}
|
||||
}
|
||||
|
||||
(lines, multi_byte_chars)
|
||||
}
|
@ -16,103 +16,47 @@
|
||||
|
||||
//! Defines the `Span` type used to track where code comes from.
|
||||
|
||||
use std::{fmt, sync::Arc, usize};
|
||||
use core::ops::{Add, Sub};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt, usize};
|
||||
|
||||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
use serde::Deserialize;
|
||||
use crate::symbol::with_session_globals;
|
||||
|
||||
/// The span type which tracks where formatted errors originate from in a Leo file.
|
||||
/// This is used in many spots throughout the rest of the Leo crates.
|
||||
#[derive(Clone, Debug, Default, Deserialize, Eq, Hash, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, Default, Deserialize, Eq, Hash, PartialEq, Serialize)]
|
||||
pub struct Span {
|
||||
// TODO(Centril): All of could be optimized to just `{ lo: u32, hi: u32 }`,
|
||||
// i.e. 8 bytes by indexing into a global source map of all files concatenated.
|
||||
// That would also give us `Copy` which is quite nice!
|
||||
/// The line number where the error started.
|
||||
pub line_start: usize,
|
||||
/// The line number where the error stopped.
|
||||
pub line_stop: usize,
|
||||
/// The column number where the error started.
|
||||
pub col_start: usize,
|
||||
/// The column number where the error stopped.
|
||||
pub col_stop: usize,
|
||||
/// The path to the Leo file containing the error.
|
||||
pub path: Arc<String>,
|
||||
/// The content of the line(s) that the span is found on.
|
||||
pub content: String,
|
||||
/// The start position of the span.
|
||||
pub lo: BytePos,
|
||||
/// The end position of the span.
|
||||
/// The length is simply `end - start`.
|
||||
pub hi: BytePos,
|
||||
}
|
||||
|
||||
impl Span {
|
||||
/// Generate a new span from:
|
||||
/// - Where the Leo line starts.
|
||||
/// - Where the Leo line stops.
|
||||
/// - Where the Leo column starts.
|
||||
/// - Where the Leo column stops.
|
||||
/// - The path to the Leo file.
|
||||
/// - The content of those specified bounds.
|
||||
pub fn new(
|
||||
line_start: usize,
|
||||
line_stop: usize,
|
||||
col_start: usize,
|
||||
col_stop: usize,
|
||||
path: Arc<String>,
|
||||
content: String,
|
||||
) -> Self {
|
||||
Self {
|
||||
line_start,
|
||||
line_stop,
|
||||
col_start,
|
||||
col_stop,
|
||||
path,
|
||||
content,
|
||||
}
|
||||
/// Generate a new span from the `start`ing and `end`ing positions.
|
||||
pub fn new(start: BytePos, end: BytePos) -> Self {
|
||||
Self { lo: start, hi: end }
|
||||
}
|
||||
|
||||
/// Generates a dummy span with all defaults.
|
||||
/// Should only be used in temporary situations.
|
||||
pub fn dummy() -> Self {
|
||||
Self::new(0, 0, 0, 0, <_>::default(), <_>::default())
|
||||
pub const fn dummy() -> Self {
|
||||
Self {
|
||||
lo: BytePos(0),
|
||||
hi: BytePos(0),
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Span {
|
||||
/// Custom serialization for testing purposes.
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut state = serializer.serialize_struct("Color", 3)?;
|
||||
state.serialize_field("line_start", &self.line_start)?;
|
||||
state.serialize_field("line_stop", &self.line_stop)?;
|
||||
state.serialize_field("col_start", &self.col_start)?;
|
||||
state.serialize_field("col_stop", &self.col_stop)?;
|
||||
// This is for testing purposes since the tests are run on a variety of OSes.
|
||||
if std::env::var("LEO_TESTFRAMEWORK")
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.to_owned()
|
||||
.is_empty()
|
||||
{
|
||||
state.serialize_field("path", &self.path)?;
|
||||
} else {
|
||||
state.serialize_field("path", "")?;
|
||||
}
|
||||
state.serialize_field("content", &self.content)?;
|
||||
state.end()
|
||||
/// Is the span a dummy?
|
||||
pub fn is_dummy(&self) -> bool {
|
||||
self == &Self::dummy()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.line_start == self.line_stop {
|
||||
write!(f, "{}:{}-{}", self.line_start, self.col_start, self.col_stop)
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{}:{}-{}:{}",
|
||||
self.line_start, self.col_start, self.line_stop, self.col_stop
|
||||
)
|
||||
}
|
||||
with_session_globals(|s| write!(f, "{}", s.source_map.span_to_string(*self)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,69 +64,95 @@ impl std::ops::Add for &Span {
|
||||
type Output = Span;
|
||||
|
||||
fn add(self, other: &Span) -> Span {
|
||||
self.clone() + other.clone()
|
||||
*self + *other
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Add for Span {
|
||||
type Output = Self;
|
||||
|
||||
#[allow(clippy::comparison_chain)]
|
||||
fn add(self, other: Self) -> Self {
|
||||
if self.line_start == other.line_stop {
|
||||
Span {
|
||||
line_start: self.line_start,
|
||||
line_stop: self.line_stop,
|
||||
col_start: self.col_start.min(other.col_start),
|
||||
col_stop: self.col_stop.max(other.col_stop),
|
||||
path: self.path,
|
||||
content: self.content,
|
||||
}
|
||||
} else {
|
||||
let mut new_content = vec![];
|
||||
let self_lines = self.content.lines().collect::<Vec<_>>();
|
||||
let other_lines = other.content.lines().collect::<Vec<_>>();
|
||||
for line in self.line_start.min(other.line_start)..self.line_stop.max(other.line_stop) + 1 {
|
||||
if line >= self.line_start && line <= self.line_stop {
|
||||
new_content.push(
|
||||
self_lines
|
||||
.get(line - self.line_start)
|
||||
.copied()
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
);
|
||||
} else if line >= other.line_start && line <= other.line_stop {
|
||||
new_content.push(
|
||||
other_lines
|
||||
.get(line - other.line_start)
|
||||
.copied()
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
);
|
||||
} else if new_content.last().map(|x| *x != "...").unwrap_or(true) {
|
||||
new_content.push(format!("{:<1$}...", " ", other.col_start + 4));
|
||||
let lo = self.lo.min(other.lo);
|
||||
let hi = self.hi.max(other.hi);
|
||||
Self::new(lo, hi)
|
||||
}
|
||||
}
|
||||
let new_content = new_content.join("\n");
|
||||
if self.line_start < other.line_stop {
|
||||
Span {
|
||||
line_start: self.line_start,
|
||||
line_stop: other.line_stop,
|
||||
col_start: self.col_start,
|
||||
col_stop: other.col_stop,
|
||||
path: self.path,
|
||||
content: new_content,
|
||||
|
||||
// _____________________________________________________________________________
|
||||
// Pos, BytePos, CharPos
|
||||
//
|
||||
|
||||
pub trait Pos {
|
||||
fn from_usize(n: usize) -> Self;
|
||||
fn to_usize(&self) -> usize;
|
||||
fn from_u32(n: u32) -> Self;
|
||||
fn to_u32(&self) -> u32;
|
||||
}
|
||||
} else {
|
||||
Span {
|
||||
line_start: other.line_start,
|
||||
line_stop: self.line_stop,
|
||||
col_start: other.col_start,
|
||||
col_stop: self.col_stop,
|
||||
path: self.path,
|
||||
content: new_content,
|
||||
|
||||
macro_rules! impl_pos {
|
||||
(
|
||||
$(
|
||||
$(#[$attr:meta])*
|
||||
$vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty);
|
||||
)*
|
||||
) => {
|
||||
$(
|
||||
$(#[$attr])*
|
||||
$vis struct $ident($inner_vis $inner_ty);
|
||||
|
||||
impl Pos for $ident {
|
||||
#[inline(always)]
|
||||
fn from_usize(n: usize) -> $ident {
|
||||
$ident(n as $inner_ty)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn to_usize(&self) -> usize {
|
||||
self.0 as usize
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn from_u32(n: u32) -> $ident {
|
||||
$ident(n as $inner_ty)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn to_u32(&self) -> u32 {
|
||||
self.0 as u32
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for $ident {
|
||||
type Output = $ident;
|
||||
|
||||
#[inline(always)]
|
||||
fn add(self, rhs: $ident) -> $ident {
|
||||
$ident(self.0 + rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for $ident {
|
||||
type Output = $ident;
|
||||
|
||||
#[inline(always)]
|
||||
fn sub(self, rhs: $ident) -> $ident {
|
||||
$ident(self.0 - rhs.0)
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
impl_pos! {
|
||||
/// A byte offset.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, Serialize, Deserialize, Default)]
|
||||
pub struct BytePos(pub u32);
|
||||
|
||||
/// A character offset.
|
||||
///
|
||||
/// Because of multibyte UTF-8 characters,
|
||||
/// a byte offset is not equivalent to a character offset.
|
||||
/// The [`SourceMap`] will convert [`BytePos`] values to `CharPos` values as necessary.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
||||
pub struct CharPos(pub usize);
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::dropless::DroplessArena;
|
||||
use crate::source_map::SourceMap;
|
||||
|
||||
use core::cmp::PartialEq;
|
||||
use core::convert::AsRef;
|
||||
@ -287,12 +288,15 @@ impl fmt::Display for SymbolStr {
|
||||
pub struct SessionGlobals {
|
||||
/// The interner for `Symbol`s used in the compiler.
|
||||
symbol_interner: Interner,
|
||||
/// The source map used in the compiler.
|
||||
pub source_map: SourceMap,
|
||||
}
|
||||
|
||||
impl SessionGlobals {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbol_interner: Interner::prefilled(),
|
||||
source_map: SourceMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: bbe973798ab14152165455260924a0de60131d31da32d2e223228954365dc609
|
||||
initial_ast: fa5f2ab70ae04bbd7684fc630de6cd5d6d85e791ccf9f06e65b7ae6a03e2ec48
|
||||
- initial_input_ast: c7315faf1ac3ceeb90260e64e4a411a27a8aa732892a64c15f49e81adf464beb
|
||||
initial_ast: b80eed2960509f11bce6294687558fb7b907f1d83455287f944dfa981ebe1ec8
|
||||
symbol_table: 9d42b1d8f167826635e5169bc3a50c14f722fba8e5ce2480fbde3b8cf2e75237
|
||||
|
@ -3,7 +3,7 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 98da6a76f2370e9311042851dde02ebaa4e64528d9461a5e722858f394d25f93
|
||||
- initial_input_ast: 85307a4f16278d38b746d321756e41c3cf48bbb2dc5bad2f0e9a7b8c4dd2541e
|
||||
initial_ast: 998b6e02af1be3c649cecd0129810b237f33289b801d24a90c38260561cc0318
|
||||
- initial_input_ast: dc6b4b00185dd6c1f2b83a1bfae619c1d6e3f68ac0f1d3d87ae3bd0ed5caf083
|
||||
- initial_input_ast: 73a38568160c3d2be402043d04ccdc2290abe27647bc81c4bd50367834c206cf
|
||||
initial_ast: 6514080a9452d6e193510250dec3b87081e0741d05cc59ca456f2b2f3f36ec72
|
||||
symbol_table: 7ec407fabcae0eeef889009b8ba99beac3d18b2d79cc49e7760261d80bd59728
|
||||
|
@ -3,7 +3,7 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 98da6a76f2370e9311042851dde02ebaa4e64528d9461a5e722858f394d25f93
|
||||
- initial_input_ast: 85307a4f16278d38b746d321756e41c3cf48bbb2dc5bad2f0e9a7b8c4dd2541e
|
||||
initial_ast: c1865cfe898ea72b3f411b63364f3643ad452860c6d1db92e843f2c61094eec9
|
||||
- initial_input_ast: b6371958e735320861c84ed514f258ae8a9858b34615364b2f9ebbaa2aaadd8c
|
||||
- initial_input_ast: d384cfea1a36220e9ea4e246ece89d8fffa320f90aeeb85660bc445ab62a0829
|
||||
initial_ast: 7085f8bf0a01a4fd7b73b5e3fc1c2c812d0cd459a5b6ea85791fc3c01118c7a0
|
||||
symbol_table: 5a12f141aef86a7a00b86650e23cfd9af657d6f418df7b1ee9eab06714305d31
|
||||
|
@ -3,9 +3,9 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 51b03881ad0ef3af7d105c163071fb69fb38048bea44c4bc380fd17367ce94e0
|
||||
- initial_input_ast: 59b58754cc7667404c6bba5d90a9e53b7f9f36b6d7c9783e5b88d12728127e66
|
||||
- initial_input_ast: 55ff7a9a3f210ea871c438a89f07da6c54ca1b8129e7344593017d22305297b4
|
||||
- initial_input_ast: c89564770d1984e4e8c0c17c6c50b66b4a5d4ade85899562f506afc22e50496d
|
||||
initial_ast: 30050aaa080b73f03ccc9a5fda9bdb9fcba0aea90171b8cdf39c50c5eb8ac7ab
|
||||
- initial_input_ast: e6457724e4c3bb27eca30df861f711f962ac47fb0e7d0b9dc959be0feaeb7763
|
||||
- initial_input_ast: c8d27a86795a6d56815a681066b7f462f5476be6d56ec910b74d90c60d8b3cc9
|
||||
- initial_input_ast: 4ff2fb01e2d10a59bf4fcd1ed3b510c6860167dbd3bd4d099c6b8a78d2a767af
|
||||
- initial_input_ast: 96ddbb84cba723df65571d6537a303189e6274389593980996fd7ee50eab996e
|
||||
initial_ast: e17f5c2d253ffc1e8e3389253f89feca418b429eb912840a21172d63d117c7ec
|
||||
symbol_table: f36863240edb9fb5fb852c212a9ae1db491ee8243d0469fc155592964595e7d0
|
||||
|
@ -3,9 +3,9 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 51b03881ad0ef3af7d105c163071fb69fb38048bea44c4bc380fd17367ce94e0
|
||||
- initial_input_ast: 59b58754cc7667404c6bba5d90a9e53b7f9f36b6d7c9783e5b88d12728127e66
|
||||
- initial_input_ast: 55ff7a9a3f210ea871c438a89f07da6c54ca1b8129e7344593017d22305297b4
|
||||
- initial_input_ast: c89564770d1984e4e8c0c17c6c50b66b4a5d4ade85899562f506afc22e50496d
|
||||
initial_ast: 73e2d3f95cac34a231f20df1de3c53fd08ae3d9e604ee28557c1098299e8170c
|
||||
- initial_input_ast: e7e9fd77647ac56ed68e547bfb8d0c767313030072a510ec138027ffb62fc368
|
||||
- initial_input_ast: e43c024d6fad8a7a04672fa318936703a4798699283f7b66d9383d52acc104a0
|
||||
- initial_input_ast: 695d879ad212b23fb3e91fae782c701c5f0469bbcaabdcfc6e5dcadc5b7e6c9a
|
||||
- initial_input_ast: 390e951d2b90cf150acd9bc6eeeffbc3a8d7af3ce3781f14ebdce3f1054de4c8
|
||||
initial_ast: adb3d21c4da2e7538a95ba689373f870d370001c8fb60e38a96ed6e6cf87ae6c
|
||||
symbol_table: 4fd4e476609947028fbffe357ffb9d962e96c30a9abe3677d75675ae37b12587
|
||||
|
@ -3,9 +3,9 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 51b03881ad0ef3af7d105c163071fb69fb38048bea44c4bc380fd17367ce94e0
|
||||
- initial_input_ast: 59b58754cc7667404c6bba5d90a9e53b7f9f36b6d7c9783e5b88d12728127e66
|
||||
- initial_input_ast: 55ff7a9a3f210ea871c438a89f07da6c54ca1b8129e7344593017d22305297b4
|
||||
- initial_input_ast: c89564770d1984e4e8c0c17c6c50b66b4a5d4ade85899562f506afc22e50496d
|
||||
initial_ast: 3c8d1d820525dc4e77214dfa0628a8fb73890ee1b90cfd443a4a736e6506bbad
|
||||
- initial_input_ast: e6457724e4c3bb27eca30df861f711f962ac47fb0e7d0b9dc959be0feaeb7763
|
||||
- initial_input_ast: c8d27a86795a6d56815a681066b7f462f5476be6d56ec910b74d90c60d8b3cc9
|
||||
- initial_input_ast: 4ff2fb01e2d10a59bf4fcd1ed3b510c6860167dbd3bd4d099c6b8a78d2a767af
|
||||
- initial_input_ast: 96ddbb84cba723df65571d6537a303189e6274389593980996fd7ee50eab996e
|
||||
initial_ast: 487ec91d706447fafcff6e448723fa41dd02e6d8ea1b3b8670460ef9a7931c90
|
||||
symbol_table: c8dd46774e298ef70fc87f89ecb8b5f23f63b1f2401f337fc97ad83b54e85871
|
||||
|
@ -3,9 +3,9 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 51b03881ad0ef3af7d105c163071fb69fb38048bea44c4bc380fd17367ce94e0
|
||||
- initial_input_ast: 59b58754cc7667404c6bba5d90a9e53b7f9f36b6d7c9783e5b88d12728127e66
|
||||
- initial_input_ast: 55ff7a9a3f210ea871c438a89f07da6c54ca1b8129e7344593017d22305297b4
|
||||
- initial_input_ast: c89564770d1984e4e8c0c17c6c50b66b4a5d4ade85899562f506afc22e50496d
|
||||
initial_ast: f94ce8212b4164b85de34920f7da472033fbafe03e8e1d00a0ec7f9bae692b6c
|
||||
- initial_input_ast: e6457724e4c3bb27eca30df861f711f962ac47fb0e7d0b9dc959be0feaeb7763
|
||||
- initial_input_ast: c8d27a86795a6d56815a681066b7f462f5476be6d56ec910b74d90c60d8b3cc9
|
||||
- initial_input_ast: 4ff2fb01e2d10a59bf4fcd1ed3b510c6860167dbd3bd4d099c6b8a78d2a767af
|
||||
- initial_input_ast: 96ddbb84cba723df65571d6537a303189e6274389593980996fd7ee50eab996e
|
||||
initial_ast: 4c24b2ad645f55d42b011b1173fc756330dfa1df4734c2c3275ff53a364ba28c
|
||||
symbol_table: 8ed9a73e996562abfe75837cfbf2103a4d9213291298206f4f63a7dac808cbc1
|
||||
|
@ -3,9 +3,9 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 51b03881ad0ef3af7d105c163071fb69fb38048bea44c4bc380fd17367ce94e0
|
||||
- initial_input_ast: 59b58754cc7667404c6bba5d90a9e53b7f9f36b6d7c9783e5b88d12728127e66
|
||||
- initial_input_ast: 55ff7a9a3f210ea871c438a89f07da6c54ca1b8129e7344593017d22305297b4
|
||||
- initial_input_ast: c89564770d1984e4e8c0c17c6c50b66b4a5d4ade85899562f506afc22e50496d
|
||||
initial_ast: ceac1c88ce7915eb171fef59cf7788b1d83cb53d313e0325b4b6e7ca7c0b0eea
|
||||
- initial_input_ast: e6457724e4c3bb27eca30df861f711f962ac47fb0e7d0b9dc959be0feaeb7763
|
||||
- initial_input_ast: c8d27a86795a6d56815a681066b7f462f5476be6d56ec910b74d90c60d8b3cc9
|
||||
- initial_input_ast: 4ff2fb01e2d10a59bf4fcd1ed3b510c6860167dbd3bd4d099c6b8a78d2a767af
|
||||
- initial_input_ast: 96ddbb84cba723df65571d6537a303189e6274389593980996fd7ee50eab996e
|
||||
initial_ast: 69f7732147079d749ad863cb4442be4722f1b7eb16f26b7f972beed0d006044c
|
||||
symbol_table: 91630eda77eaf1e355744e663ceba26a0c3f860d3f69e8e46b03f5464d16950f
|
||||
|
@ -3,20 +3,20 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: c5014ebe0b04ecf0cfca9befc3cad1725daf451b4a926741ef909fe396007891
|
||||
- initial_input_ast: e754ce377bf760178cf72cf8b0e1597c94eb833bf18e3524c99cd86c6ac4b003
|
||||
- initial_input_ast: ff9ecea579f37e0abbac30d7065f5293b0bf8affc53cfd627e0e1e43227e9b27
|
||||
- initial_input_ast: 72a3e3befe5feac1a922a42ce7fa7f1bc495eeeab43b3be86df09db770c93e73
|
||||
- initial_input_ast: 45237feb75335b96d98682c743ededad68167fcc6ad151eca3caa55b1625b647
|
||||
- initial_input_ast: c14a6deb2a1f0f60320260b189fea801ee942e3e44e0d6334067d6d7972eb4be
|
||||
- initial_input_ast: 38431bcff38b26387d2f41af68d2a6e22d184be6d44dbcdc6e5eda62ae822d40
|
||||
- initial_input_ast: 767918cd56f1bc8a9c6b64cc0e92dab97698e500588a0cf480e0b5838e7067f0
|
||||
- initial_input_ast: c7ba648f22d3b30d4f62a1093a2dcab8def815aa9f7083ee4fe9236dd63b1c0e
|
||||
- initial_input_ast: d6acd30374e64196b072f5a1c68ba379cce1eb5e866afac4acd98643f68d1c03
|
||||
- initial_input_ast: 2aa446f52a535b7918039280d90d098deea58ae0d84445dd31a8c66f05f52449
|
||||
- initial_input_ast: 69a6b4dcd3e363cce68ccdae93f77ead9319af6886aecf775a09a0ed535bb02b
|
||||
- initial_input_ast: c2f4b8b92cb0ee71de7fd532b483c2fb2c9ebcc8a59249a763fc30d05eb5d38e
|
||||
- initial_input_ast: 9196de328711651e75edb7ffba6b85b37ca468d509b0e96e6b5271d6d0785949
|
||||
- initial_input_ast: 012468d79c66ea8de1b7357678f0dd4f12a4188b384ed38696934add946dc72f
|
||||
initial_ast: 5ee529ed9657976f562ba09ffbd0c642283ac523a4ed4df18520a26b3769e408
|
||||
- initial_input_ast: 1a59d72ff9c08c97f0faa7d32e701487abce48630c3da2ec76dee451ed806c3b
|
||||
- initial_input_ast: 8c79c9fa5fa8422d837bd092a848293d1772fdc97b80c7394146fc759768c868
|
||||
- initial_input_ast: cbf3cfc28c6c396dd5dd24ec3cb59121d2d6615feb4aa9a452ea310248dfb238
|
||||
- initial_input_ast: 8a914ea590d1b978e7f0da8feeebcbe195e925a20424fe0a6611b2115b072c90
|
||||
- initial_input_ast: f37e2792fd64626a0270041c37d587d4929ac2afe6b3b9a576c3ec400271f0db
|
||||
- initial_input_ast: d9a2bd44fa3b8e850b79be10ab046af8d61b5e781cc991250969555eaccf9d23
|
||||
- initial_input_ast: c10a3ffcb8bbbaaa3028e8037fc183214933291d5ecf8ec0354df1b27a83c99a
|
||||
- initial_input_ast: 32628105ebaaed3207a26402f2f7a5075f2d7d865d51e56f2e8f0bade905058c
|
||||
- initial_input_ast: 2bc88f79d4b48e6321c08ba4a6cfeb2a7363b238d79b3a558dbc4383c14a3ebc
|
||||
- initial_input_ast: 5d6794f9c9406df4df4673433c806588bb9ed72992bb58cc62fa63a3a872e153
|
||||
- initial_input_ast: 833c67bb4946711a01be5f564905d83eaf4c4fbc72ff77599ff3b8dd5a5c8691
|
||||
- initial_input_ast: 2a4441d425a2e35c929d92256601e3713c870ef3c9c6206dccb238e42be532cb
|
||||
- initial_input_ast: 3edf030ed195c3ab8278af07fa1592314e2e3f9ad1e6c0ca3e27856be5e6f72c
|
||||
- initial_input_ast: f3dcb662ae0facb7afdd7cee9416261a48d37baa8a4d5d7f09d81e4621b53f3f
|
||||
- initial_input_ast: aebc549a2d2d87e1cca5e6d814b1b6dfdc4c32bb21045ad967bc67978a8ef871
|
||||
initial_ast: 8c9ec5e2c1544bc45c6d4dd9f21caa3121c2ba61c44fb66d8148883460b20820
|
||||
symbol_table: cf3569155d9961e6cab441ea9a60f5c92d2b18e6cd2ecaa64b1529d1774d3585
|
||||
|
@ -3,20 +3,20 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: c5014ebe0b04ecf0cfca9befc3cad1725daf451b4a926741ef909fe396007891
|
||||
- initial_input_ast: e754ce377bf760178cf72cf8b0e1597c94eb833bf18e3524c99cd86c6ac4b003
|
||||
- initial_input_ast: ff9ecea579f37e0abbac30d7065f5293b0bf8affc53cfd627e0e1e43227e9b27
|
||||
- initial_input_ast: 72a3e3befe5feac1a922a42ce7fa7f1bc495eeeab43b3be86df09db770c93e73
|
||||
- initial_input_ast: 45237feb75335b96d98682c743ededad68167fcc6ad151eca3caa55b1625b647
|
||||
- initial_input_ast: c14a6deb2a1f0f60320260b189fea801ee942e3e44e0d6334067d6d7972eb4be
|
||||
- initial_input_ast: 38431bcff38b26387d2f41af68d2a6e22d184be6d44dbcdc6e5eda62ae822d40
|
||||
- initial_input_ast: 767918cd56f1bc8a9c6b64cc0e92dab97698e500588a0cf480e0b5838e7067f0
|
||||
- initial_input_ast: c7ba648f22d3b30d4f62a1093a2dcab8def815aa9f7083ee4fe9236dd63b1c0e
|
||||
- initial_input_ast: d6acd30374e64196b072f5a1c68ba379cce1eb5e866afac4acd98643f68d1c03
|
||||
- initial_input_ast: 2aa446f52a535b7918039280d90d098deea58ae0d84445dd31a8c66f05f52449
|
||||
- initial_input_ast: 69a6b4dcd3e363cce68ccdae93f77ead9319af6886aecf775a09a0ed535bb02b
|
||||
- initial_input_ast: c2f4b8b92cb0ee71de7fd532b483c2fb2c9ebcc8a59249a763fc30d05eb5d38e
|
||||
- initial_input_ast: 9196de328711651e75edb7ffba6b85b37ca468d509b0e96e6b5271d6d0785949
|
||||
- initial_input_ast: 012468d79c66ea8de1b7357678f0dd4f12a4188b384ed38696934add946dc72f
|
||||
initial_ast: 3965395dcb477379853989110bf621aefc6a052ee39e06b9e1d15dbe0f0de184
|
||||
- initial_input_ast: 44e0b05b0da70ab554408e1fa7a8578282522b358320cde13231c2136e4df5b1
|
||||
- initial_input_ast: fa8ef1a0d3743224c55ca78079611c87374266313d2254c5b18a99443e038d51
|
||||
- initial_input_ast: 6aa65f48c47c0a70326ee6034a46c38ed25116b02b748827fde3a4a46cb9cb95
|
||||
- initial_input_ast: de106cf7de15ce9b35d00a97dccf6b90077303e8bfcec568d65352107c2a81f8
|
||||
- initial_input_ast: a6badf7eba50a21d6ebc1c82eb70cd4081e37680dd8d4b597ea04192ccbf4e5a
|
||||
- initial_input_ast: 72e2f6ef3f3cb3135dbb46dd1c5808c206cfa762a708a8d5483b6bc2958a33ba
|
||||
- initial_input_ast: 0a5209868d802596f6094912deb7adaef5f21e0ee269eeaa3a661d403a64701e
|
||||
- initial_input_ast: be71fcee4adbb8c7de279825c5fd87d8c55597a97f57f9af28d88e3f51781c79
|
||||
- initial_input_ast: 55aae45600898144d628c1211e2de07794ce4784eef21a6b978aa064d3fa89f4
|
||||
- initial_input_ast: 8f73004838aee69e847d99ab0b284d99c61a8283b91227e1dd890994220da8e1
|
||||
- initial_input_ast: 6de398471c245e863855e68b0b672bcda53467f638ccf2ded60930a930f3de25
|
||||
- initial_input_ast: 1d22c4edd4f3fec707b8cb6e60bf57b4e278913fbb6c15c4fa2bce0e318de232
|
||||
- initial_input_ast: 7642187a32f44ae4d5743ae8f46bb669937490ff926c46cf8af8b36b5bc747c5
|
||||
- initial_input_ast: 4d7b20c6c94064e08e0f19b0f45cbdd740f668fefcafd361865226c5af087440
|
||||
- initial_input_ast: 9de9e1877e523f5aa44d019bba11e7bb58ad1f9952aae3254908e8b151b3ae55
|
||||
initial_ast: 526f77f4386ed6b5dd40f7e352f48e43c04655123bf329dbed9f6dadd43a4963
|
||||
symbol_table: 1cad55eef598e4d05af7c5f88119636a2e6d23d81213bbaad908d66a32906780
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 204a90b23ba88927aabdc72aed02effa556d73742caf7d370527f79f0b30f621
|
||||
initial_ast: 10539810602f7b65e0ac424d7e92b4565c965fdbe139b267f5fd4aae4c67d470
|
||||
- initial_input_ast: 9698e866b0330be095c65ca93f17ed5fe3d31c61d5622eaf54c774d79d3b6950
|
||||
initial_ast: d771cc51e9c822faf8cb1ca6f80d19c22379ac37c9ae9e357010e79029c6baf2
|
||||
symbol_table: f8c971e501487f7a368a50fd1941c3fb70684b041478fe615a91f088796e301b
|
||||
|
@ -3,7 +3,7 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 204a90b23ba88927aabdc72aed02effa556d73742caf7d370527f79f0b30f621
|
||||
- initial_input_ast: cbc36a842b21e80aee167b65bd1a59861d59a43e27915b785410f032ae19a570
|
||||
initial_ast: 1e37339f631e947b4f85cf4f43ba4c1a26439ab565dc559f27c4923afc3ef34a
|
||||
- initial_input_ast: e9253dc5764d8870dc6842860993ce0b2495925b3bdb18891b7c4aa67fe0a81d
|
||||
- initial_input_ast: 3153e33ab1630d74ad221b5ce6d5e50c17fb86d91a2aae4ce67b46fec12c1ef4
|
||||
initial_ast: 8fdb85c9c133687f89fe5c2ca9b177a4a777269eab425f9008ab330cce85542e
|
||||
symbol_table: f4e056be00b25dfd854a5be8197aeb205436bb0ee11cfe06701531ea086e038c
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 38e6b369397fcf11fa921808be61baf33a705d4509bc15a46ec69e1a0aaa5031
|
||||
initial_ast: d1d00796ff928991e5c898877a2f6e540dc744807d696b3b14af935e380eafb4
|
||||
- initial_input_ast: 6b9e5227fdce9f916cd2398ea85c2d7e7b2f7d706bfa730b8cd1acdeb3f168cd
|
||||
initial_ast: e3b69d0b4355afd331edf8c572b64746bc763a714626043a9edc8eba42b08ec8
|
||||
symbol_table: d46f6eb98259f34d32a60788aa178efa34166bcc6ba1058e2ff5f8327a129b9c
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 38e6b369397fcf11fa921808be61baf33a705d4509bc15a46ec69e1a0aaa5031
|
||||
initial_ast: a2e9492d07dd4600eb79386662e4fd1f56b5c95fe4b05ef149a346e1b935f908
|
||||
- initial_input_ast: 89959164cbf734ac0d261c7459b9c1214eb2f4b3ab9ec57a0b22db487d6537e4
|
||||
initial_ast: cb79b9db64a7e92b85706e517b586b4fe9cd591850e7130cc7bfad6dd92b4d3f
|
||||
symbol_table: 559484bc163178bf54b169f5dd573167771566aa993055b6a28f0c1a759339bc
|
||||
|
@ -3,7 +3,7 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: b9ca1f9e4746ba9f99375205e309b977a522109054d058039197767b39fa6833
|
||||
- initial_input_ast: 398a847f3f6c8826a10942c856b67d9acf37a51bf4ec9109be549b24b2dfff94
|
||||
initial_ast: a80b061288d4070d42ab6f4b37404004faab51e8a58cd301632c88bf940e9630
|
||||
- initial_input_ast: 4132cf36ac66f6b23e249f81b5c2aafa58e4e5e945920cc29752edc5d6d8057f
|
||||
- initial_input_ast: 586ed72429932a1aafcd0f8eed983a4babff8eada9c028b88bbeef24dab1cbc0
|
||||
initial_ast: 0360f7fba87784c290f73693b81ca1b71e3259794f0fb3d3cbe39cc143e92d82
|
||||
symbol_table: 560afbb790df70bfc770d5c2f966428a37baf94a5c0f5312ad445456d33a2cd9
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 38e6b369397fcf11fa921808be61baf33a705d4509bc15a46ec69e1a0aaa5031
|
||||
initial_ast: fd62f417968d0212bd96f8069d2afafeb1f6f44e3270d1b729687cb82d882a12
|
||||
- initial_input_ast: 5411bd17943bb0aa7b0bb27e8b38f57fd27f06f2080b13a32eee50c53de66f6c
|
||||
initial_ast: ab02de15b37b07d52d385468d72b03f8f9ecff3c9f130b8a3985be326f9f6edf
|
||||
symbol_table: 720c2aafae77c261ed1640d4080f9a73657638baa30e54a5e10e2323b6f6eca0
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 38e6b369397fcf11fa921808be61baf33a705d4509bc15a46ec69e1a0aaa5031
|
||||
initial_ast: d95ed89ada21401d204e86326c24904a1155dce3b2c9189741152f2b0c0d2f2b
|
||||
- initial_input_ast: 18e8a4118829470d3150268bbf8d298954e1f566ea2d42ed9f3660dc25c23fcc
|
||||
initial_ast: 4fa505d3542f2df2abcdbbf7d487ff65e1e311366fc2eaee286f8ba253876883
|
||||
symbol_table: e5159343ab03573032873783b28058a482dd401d534a0d3af03790a5286ba470
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 38e6b369397fcf11fa921808be61baf33a705d4509bc15a46ec69e1a0aaa5031
|
||||
initial_ast: 916cd6ab87496b67555c23de086a4a309a9a7630c4d1e198f04d3eb591fabecf
|
||||
- initial_input_ast: caa45de3b412d749048b9be732a4593f3094c671701093103f580817a741acbb
|
||||
initial_ast: 8dc13a9cb4d48b8a5e33d4e16a3071869a6ac7034962af5baf8f5f19a2b218dd
|
||||
symbol_table: 757bb967973b87466c01be1a9dc78d30701964e0d234e0e65d1bbcbd3072370f
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3674d8249023145121ff35eb4d9e59b0ce79cda7e17f9f98b5dbaa6ba4f80cf2
|
||||
initial_ast: ea05e8a68b86c5cd7189870b309bb5a4107dd6f84c9ccd359aacbfee82045242
|
||||
- initial_input_ast: 4a7171bfd4cb5b69729e26e4c6b0915f261d3f51b2937d8de5009069f56abfc1
|
||||
initial_ast: 8b6fdc73a1397af850bcb47a623f82ff773919308fec52eeb890c8b4a2e686e7
|
||||
symbol_table: 66779461e33acc9c5c732509637db91bd72aff3e9dae6aee0c137d0537446878
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: e0c7874ac5b43acb60bfb111cfafba0b5151b202f86c459913a9d4dd8ca151b0
|
||||
- initial_input_ast: 770cad45d17364fd3acd19fb67c66ea8f58ea54c5c42386d1a0fe02f241e9f2b
|
||||
initial_ast: 5c6f3087ccb1ce8c559638d4a4308a784a1a69735fd3a4cbd7eccdcb6b76aeb7
|
||||
symbol_table: d666098c1c0d7c670730cfa6548d47fa89d9a1dd33642f8021b0622f9abc0e5e
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: 149443378c6b75f71b33ad90306cf4b01616284ddfa2136fc19fe41e23c73178
|
||||
- initial_input_ast: 3b1682d80f44da8eb1ea0d1236b5b9be15f7d4792fe7f31139a4362b2952d6a0
|
||||
initial_ast: cda0e01cadbcd6488f11d1a79fd9e4d132560ff75dad986cb4502d74f3e464be
|
||||
symbol_table: 38cbfecf35fb5189618a9767d3245d02e133d59ce2a0fc0f3aba37a8fa14fe8e
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: 51eba8b00503b23218feec128cda498464acd2a9459fc717795d4137495a9820
|
||||
- initial_input_ast: 1d6d705c0d5363431af8b58173f1061d4315c4ffe9ae175d6dd1c7ea2a01488f
|
||||
initial_ast: fa376065aea93f4819afe923b978fd8addc4565c13b33cef8416cdd599f331b2
|
||||
symbol_table: 0879cd6e4cc609ecdbdfc87ff0f08b4f3ae54367e0a1c02116304eb1411d2c23
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: d8d9ee6d763397372e62451e15980cde07796859d8ce319e4de5955d310f0cf6
|
||||
- initial_input_ast: ccecfe74d5a1f89e892c982f5bf5bb59e094ade3b745b615ab1dcdc31b43dcd7
|
||||
initial_ast: 15c31493b92dfe33d788f1259cc4208e47b050ed23a78febaf1611a89bea18a1
|
||||
symbol_table: 879c99134415a9bae5a26b0d2dccfab01b9374218b810853c86bcf36a76d979c
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: d95922ca56dfdffdea1e8c9668d67b17755ca66ff1c5387f034ecfb2ba961df6
|
||||
- initial_input_ast: 770cad45d17364fd3acd19fb67c66ea8f58ea54c5c42386d1a0fe02f241e9f2b
|
||||
initial_ast: 85b903cc00f43ea3e7c890d66ac568daded94a6c215028ef28d454e42bd6a25a
|
||||
symbol_table: 47782aad84b54a835bead341b6113b471712ddd6d19005040d16c5d199a0920a
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: 31535a250bbc7eaedc9a40720b2668544615862297f229f633f51b3311d0ac0e
|
||||
- initial_input_ast: a2440344211fa1dec9858bba8ae80c44b17dcf10d6d75bf639bd9019de97a843
|
||||
initial_ast: f0f1be1e9efbb5455c0188bb7e646379eb2094914382378767d90d2deecb533b
|
||||
symbol_table: e20aa1c0f5d1b64b310c0e6d6bb306713f8696f092d080eab4031eacc0dcb798
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: 7b7c0226a27a7655fd6e4780fdfcf5d2bc923d2aa48585954b47b8cd852bc998
|
||||
- initial_input_ast: 195a6921720db473ae0b5093da0353391308e0e31a698e5ef105127e94113ff6
|
||||
initial_ast: 66e17dd0b33e9a4f2e4312c5d6605a5d63e5bdf537df5f08f1a2fdc30f24c3f5
|
||||
symbol_table: c04c06d2f689387637bac27fff30cdaa87ec9b49fc03e1fe56b1e04029b6f925
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: 586028215ccf26abab187d6bff25c5dd332f1397be5474c468ca1411abec89a0
|
||||
- initial_input_ast: 195a6921720db473ae0b5093da0353391308e0e31a698e5ef105127e94113ff6
|
||||
initial_ast: b4e5d9d62d4ed1e5f963b0d957c9be73b8ebe09e749d1658396672ea7d938454
|
||||
symbol_table: 5527b2434b61b94d365ba1e8bd1c2173b9feef5aa21c99440920259fb7df2052
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: a9df57a8047f8562e1c08ac89cc25997590cb3759d7cc930714de57e8ac30624
|
||||
- initial_input_ast: 3b1682d80f44da8eb1ea0d1236b5b9be15f7d4792fe7f31139a4362b2952d6a0
|
||||
initial_ast: fa1fdad66c8c909e5820c04faa4709ef8c13d92933bf2d310202293b6851ac01
|
||||
symbol_table: f601b6a1652f79ac2853737ecf09f4e5f23c05873e2bb3967137a7b2b0085b04
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 3a50bcc0c4416f93de77861848ac00cd1b40e17f4c023ab3faea0fc0c332f148
|
||||
initial_ast: 3cec4b6f5256f2e9299abbc632cc999d268bfaad48790abeb6efaad6f8081b2f
|
||||
- initial_input_ast: c3b606138d1dc5f4dc541ddc113fb7d6e07cad4cbd1f382fcc0f9b8517077448
|
||||
initial_ast: 3bf00992729530c37d6bd18b45096638d49ae243d31d52d633f065844f2523a4
|
||||
symbol_table: 5bb0a34e488683807eef29093f204fb2f1cfe8da3c2e2370d61e427a109a2d4a
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: 26679d48a4f878012c9f9cacccf9d2d1ef568126030a21abf74a5a4d8e5116d4
|
||||
initial_ast: cc14440f67a0aecc81bc6e1aac0b5571fa1b9937af6ff3dde20addfce27be9e4
|
||||
- initial_input_ast: 138147dfed32a1089f1d6a4ce19ef4f5278dcdbc2012c432ab460bc0601aaa11
|
||||
initial_ast: 8ab7c0eefb3da8ee71f77391565e5d0ee349e690d8cb6f8d8cda9e6582b9d3c5
|
||||
symbol_table: 577abb859b2f33b9e81c5e94c82b559601f44025143fa7a6757561b47e78efa5
|
||||
|
@ -2,4 +2,4 @@
|
||||
namespace: Compile
|
||||
expectation: Fail
|
||||
outputs:
|
||||
- "Error [EAST0372014]: function `main` shadowed\n --> compiler-test:3:1\n |\n 3 | function main(y: bool) -> bool {\n 4 | ...\n 5 | ...\n 6 | }\n | ^\nError [EAST0372014]: function `main` shadowed\n --> compiler-test:3:1\n |\n 3 | function main(y: bool) -> bool {\n 4 | ...\n 5 | ...\n 6 | }\n | ^"
|
||||
- "Error [EAST0372014]: function `main` shadowed\n --> compiler-test:3:1\n |\n 3 | function main(y: bool) -> bool {\n 4 | console.log(\"{}\", 1u8);\n 5 | return y;\n 6 | }\n | ^\nError [EAST0372014]: function `main` shadowed\n --> compiler-test:3:1\n |\n 3 | function main(y: bool) -> bool {\n 4 | console.log(\"{}\", 1u8);\n 5 | return y;\n 6 | }\n | ^"
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: e518f4721bb7a7b6c63e380710a5a8cf4e489ccf66461bf9a68dc4b369e16445
|
||||
initial_ast: d350d059ac4c8b9d1ed1ea39e7345770d9587f8c77ca9f9a5debb1d6ef41038c
|
||||
- initial_input_ast: 78b65cde248c05f4abfe2d3cf794fbd44de082303631db7e3002aa724099fee1
|
||||
initial_ast: b9c41b81bba799989ce6abcae9ea82f5ba0564a66709c675db033456ac1ef862
|
||||
symbol_table: 6754c028b1d3793f022a7da93be8510a6844da8a2e45f5dcaa9566252e418ee2
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: e518f4721bb7a7b6c63e380710a5a8cf4e489ccf66461bf9a68dc4b369e16445
|
||||
initial_ast: 58d1ae0bbcc2c4cf4fa16dc074a64a7a0c4bedef9b0a4230968211edf9b81b26
|
||||
- initial_input_ast: 14d0aff05a3b8673ac44d18a969bd03157e19a724ebe2b6e805fdc82aa1e070d
|
||||
initial_ast: 7266dc80dc8dc35c544868574e0c7654debd2e16f791a9a5ce711685950219a1
|
||||
symbol_table: c45d23aa877641cbf1853709cc103d389f3e3105b5c873f8bb90c3a0c48bd2ff
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: e518f4721bb7a7b6c63e380710a5a8cf4e489ccf66461bf9a68dc4b369e16445
|
||||
initial_ast: 0bc692fc558896d4c90a4826d54ed1c6d41ce578ee930c3546d1d9d4169b4844
|
||||
- initial_input_ast: 5b2906e1b93966fe1b141bb06b4aa45f7a6e60ae0c0895b96cf870eb246e98b4
|
||||
initial_ast: 07480168f37e751b264e4de7e4ef66cea4db7fb1893de65accc9d1da3435f917
|
||||
symbol_table: 7c82d098d4b483b968c5b928f68a4a6f040bf961bbf5192bf323ddabbe592da8
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: e518f4721bb7a7b6c63e380710a5a8cf4e489ccf66461bf9a68dc4b369e16445
|
||||
initial_ast: 8e4af69e677c1eb3afc851864e57fc88768d97bbfbd96b1680b263ba3a24ed98
|
||||
- initial_input_ast: 1ee96076151487dc5e1988331d53506585dd380909cbeab8c32d3f6e6913456d
|
||||
initial_ast: c45cde3ccb382a43e8920b48605cbd571b113a699bfa53adfc986e7ce3ab46eb
|
||||
symbol_table: 8bddbedba52c66dc7a86530a2df470eb3222992c10b75842431a82afc7e936d4
|
||||
|
@ -2,4 +2,4 @@
|
||||
namespace: Compile
|
||||
expectation: Fail
|
||||
outputs:
|
||||
- "Error [EAST0372014]: function `hi` shadowed\n --> compiler-test:3:1\n |\n 3 | function hi() -> u8 {\n 4 | ...\n 5 | }\n | ^\nError [EAST0372014]: function `hi` shadowed\n --> compiler-test:3:1\n |\n 3 | function hi() -> u8 {\n 4 | ...\n 5 | }\n | ^"
|
||||
- "Error [EAST0372014]: function `hi` shadowed\n --> compiler-test:3:1\n |\n 3 | function hi() -> u8 {\n 4 | return 0u8;\n 5 | }\n | ^\nError [EAST0372014]: function `hi` shadowed\n --> compiler-test:3:1\n |\n 3 | function hi() -> u8 {\n 4 | return 0u8;\n 5 | }\n | ^"
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: ee823464d3be14662261697b47c73a67a47c47558987333869ea6e72d6e34ebf
|
||||
initial_ast: 9be3781304a8515dd1a5e35ce3a23574d3b73d5403a46dbd33bb698c7f5f235a
|
||||
- initial_input_ast: f4e1b23f37abb9bcb386ddfd37ee066395d8e84f8ace0f4eb467264131e89fb0
|
||||
initial_ast: 526ba2fdb0342e958bc77572fab3680301af8e1f576a462bb7d94a348fa5f45e
|
||||
symbol_table: b10964224747af7f8ba12f1b3c0dfa228842b3e08b9b82d785b71def31387144
|
||||
|
@ -3,6 +3,6 @@ namespace: Compile
|
||||
expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: ecd9a5086d5d85f1794e023ff6c06e68cc0b4ae67e3f9abc88cd1354ed8fdfad
|
||||
initial_ast: bd47ab2f7c4c1013c5d401a6e1d6af4f28117203c9decaf9864cb0cc7806efaf
|
||||
- initial_input_ast: a183384b085186e92efdf0ccd221ba0f3de6e75cffc5610ed583ccd95aa4adcb
|
||||
initial_ast: bdee31d5ffcb2f4a27fb4b9deb14fea6a514d72323d827a0c0f8f44cd96aa4b6
|
||||
symbol_table: 584d3ba9f7908f1b2e0c918710e78d0a483c12aa3f4644edada2eac31ac689ca
|
||||
|
@ -4,5 +4,5 @@ expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: no input
|
||||
initial_ast: 640119c4e45f3df44391fe0e08e45a44dfac3a996610e10590666400ceebaff8
|
||||
initial_ast: a5068b93a19a2a1062918085bd20112cf08451b84508236869220df920fefb0a
|
||||
symbol_table: 9a61702119ebc681917d7cb7e40ecafa00354849326bf1182635f27a28da35e9
|
||||
|
@ -4,5 +4,5 @@ expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: no input
|
||||
initial_ast: d1df1007245f08ea1398244ba810f689c018c288a85307aabb1632a6a8044de4
|
||||
initial_ast: 11ac6a7372ddf4362cd7eb5a1823c0393db61def4f16c7f6185c4048462e3846
|
||||
symbol_table: e4a96223c049893c904a90f24d069592b33fc137de0f4816cf92089e63663693
|
||||
|
@ -4,5 +4,5 @@ expectation: Pass
|
||||
outputs:
|
||||
- output:
|
||||
- initial_input_ast: no input
|
||||
initial_ast: 36e2dac16d3b386145e585988c6dbd41890556deaa10e6776d0bb2e5de8654e3
|
||||
initial_ast: cae7822867b84b0eaedc184a5f5824b45b5a9f2fa3d3262f463b89fd2932de33
|
||||
symbol_table: 1817d91b99941ddc2590c6a2777ad8f7d4ba26a8b2a3baa3932f1a08eb540206
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user