27: Ecma parser errors r=kdy1 a=kdy1

I stored context in lexer because lexer should have access to parser's context.
This commit is contained in:
bors[bot] 2018-01-27 04:42:40 +00:00
commit d4e2c90caf
28 changed files with 1301 additions and 472 deletions

View File

@ -36,6 +36,7 @@ notifications:
on_success: never
env:
global:
- RUST_MIN_STACK=4194304
- RUSTFLAGS="--cfg procmacro2_semver_exempt"
- secure: Z4RSNlpg/12Qx2fIjS+7TToYxPJQgK70X7u9A5lJiCIa0JfzWCxr1ZEKXfAVLG9o4nQok+nWOZa+vxR1IgyUVnf6oSanjjWl1pSRbvccxMS799NvHmGzIAiqSKAlxiSJuxf7MQbs1XBuI3XahsWLfXGPo7vPT6sKe4AAf9gT6igJr61D5hpHkVIXK7P6rnrWQALQYplyaox0NlU9UlqSXXBjdJfp3138rl7FIeYRsMMow44unBNPvs+mhVP8PWpeFWeop0jxbNbTHwnJUbCm4ZWrvqnJ/m70IMlBMN1AskLmz4KeXOhPx+XR9VtdWBX4q8lJ7s9J0hMBxrEnxgiYVBPMlLoEX+wW3zwZ5F+DQs7uLpRHYRUpxpi/7ZuQjp+uT3mN9PMMSvbHwHLH2r/CC9olKYWySXIsGsGVyyMnZeUwvgzwxiYLoeCWe4zZY99zc7jvGKbSmk0RtPu6hApPwL5A6novXbXL2QsXzqqeWpgMLmZyb7KYhM5IGIAB1oPQIqI++Re9Z+/ea/DRSUJOsA96yRQ+vVbiuClrVgDhaAaJOGYCtR1XZ5N2zRb9+Spu/ECtfisLOb9Xs1584DyRbqG69nRdjuscjYOTFZUlOoOeFvuADY65Jt0kF6u7g8NIDkJ1ROb3heKQtY/bAQUrBNUJydOQnn5tBwn8Z618+Ac=

View File

@ -60,6 +60,7 @@ fn main() {
"async",
"as",
// future reserved words?
"enum",
"implements",
"interface",
"package",

View File

@ -3,7 +3,11 @@ use swc_macros::ast_node;
#[ast_node]
pub enum Lit {
Str(String),
Str {
value: String,
/// This includes line escape.
has_escape: bool,
},
Bool(bool),
Null,
Num(Number),

View File

@ -14,7 +14,7 @@ pub(crate) struct Eof<'a> {
impl<'a> From<Eof<'a>> for Diagnostic<'a> {
fn from(Eof { handler, last }: Eof<'a>) -> Self {
handler.error("expected some tokens after here").span(last)
handler.error("Unexpected eof").span(last)
}
}
@ -24,18 +24,31 @@ impl<'a> Debug for Eof<'a> {
}
}
pub(crate) struct Error<'a> {
pub(crate) struct ErrorToDiag<'a> {
pub handler: &'a Handler,
pub span: Span,
pub error: SyntaxError,
}
#[derive(Debug)]
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Error {
pub span: Span,
pub error: SyntaxError,
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum SyntaxError {
LegacyDecimal,
LegacyOctal,
InvalidIdentChar,
NonUtf8Char {
val: u32,
},
ExpectedDigit {
radix: u8,
},
UnterminatedBlockComment,
// #[fail(display = "unterminated string constant: {:?}", start)]
UnterminatedStrLit,
// #[fail(display = "expected unicode escape sequence: {:?}", pos)]
@ -46,20 +59,22 @@ pub(crate) enum SyntaxError {
},
// #[fail(display = "unterminated regexp (regexp started at {:?})", start)]
UnterminatedRegxp,
UnterminatedTpl,
// #[fail(display = "identifier directly after number at {:?}", pos)]
IdentAfterNum,
// #[fail(display = "Unexpected character '{}' at {:?}", c, pos)]
UnexpectedChar {
c: char,
},
// #[fail(display = "Invalid string escape at {:?}", start)]
InvalidStrEscape,
// #[fail(display = "Invalid unciode escape at {:?}", pos)]
InvalidUnicodeEscape,
// #[fail(display = "Invalid unciode code point at {:?}", pos)]
InvalidCodePoint,
ExpectedHexChars {
/// Number of expected characters.
count: u8,
},
NumLitTerminatedWithExp,
LegacyCommentInModule,
/// "implements", "interface", "let", "package",\
/// "private", "protected", "public", "static", or "yield"
@ -71,12 +86,13 @@ pub(crate) enum SyntaxError {
left_span: Span,
},
LineBreakInThrow,
LineBreakBeforeArrow,
/// Unexpected token
Unexpected,
Expected(&'static Token),
/// "await* has been removed from the async functions proposal. Use
/// Promise.all() instead."
AwaitStar,
/// "cannot use a reserved word as a shorthand property"
ReservedWordInObjShorthandOrPat,
MultipleDefault,
@ -85,33 +101,66 @@ pub(crate) enum SyntaxError {
SpreadInParenExpr,
/// `()`
EmptyParenExpr,
InvalidPat,
NotSimpleAssign,
ExpectedIdent,
ExpctedSemi,
DuplicateLabel(JsWord),
AsyncGenerator,
NonTopLevelImportExport,
PatVarWithoutInit,
WithInStrict,
ReturnNotAllowed,
TooManyVarInForInHead,
VarInitializerInForInHead,
LabelledGenerator,
YieldParamInGen,
}
impl<'a> From<Error<'a>> for Diagnostic<'a> {
fn from(e: Error<'a>) -> Self {
let msg: Cow<'static, _> = match e.error {
LegacyDecimal => "Legacy decimal literal is not permitted in strict mode".into(),
LegacyOctal => "Legacy octal literal is not permitted in strict mode".into(),
InvalidIdentChar => "Invalid character in identifier".into(),
impl<'a> From<ErrorToDiag<'a>> for Error {
#[inline(always)]
fn from(e: ErrorToDiag<'a>) -> Self {
Error {
span: e.span,
error: e.error,
}
}
}
impl<'a> From<ErrorToDiag<'a>> for Diagnostic<'a> {
#[inline(always)]
fn from(e: ErrorToDiag<'a>) -> Self {
let msg: Cow<'static, _> = match e.error {
LegacyDecimal => "Legacy decimal escape is not permitted in strict mode".into(),
LegacyOctal => "Legacy octal escape is not permitted in strict mode".into(),
InvalidIdentChar => "Invalid character in identifier".into(),
NonUtf8Char { val } => format!("Not an utf-8 character: {}", val).into(),
ExpectedDigit { radix } => format!(
"Expected {} digit",
match radix {
2 => "a binary",
8 => "an octal",
10 => "a decimal",
16 => "a hexadecimal",
_ => unreachable!(),
}
).into(),
UnterminatedBlockComment => "Unterminated block comment".into(),
UnterminatedStrLit => "Unterminated string constant".into(),
ExpectedUnicodeEscape => "Expected unicode escape".into(),
EscapeInReservedWord { word } => {
format!("unexpected escape sequence in reserved word: {}", word).into()
format!("Unexpected escape sequence in reserved word: {}", word).into()
}
UnterminatedRegxp => "Unterminated regexp literal".into(),
UnterminatedTpl => "Unterminated template".into(),
IdentAfterNum => "Identifier cannot follow number".into(),
UnexpectedChar { c } => format!("Unexpected character '{}'", c).into(),
InvalidStrEscape => "Invalid string escape".into(),
InvalidUnicodeEscape => "Invalid unciode escape".into(),
InvalidCodePoint => "Invalid unciode code point".into(),
ExpectedHexChars { count } => format!("Expected {} hex characters", count).into(),
LegacyCommentInModule => "Legacy comments cannot be used in module code".into(),
NumLitTerminatedWithExp => "Expected +, - or decimal digit after e".into(),
InvalidIdentInStrict => "'implements', 'interface', 'let', 'package', 'private', \
'protected', 'public', 'static', or 'yield' cannot be used \
@ -122,11 +171,12 @@ impl<'a> From<Error<'a>> for Diagnostic<'a> {
}
UnaryInExp { .. } => "** cannot be applied to unary expression".into(),
LineBreakInThrow => "LineBreak cannot follow 'throw'".into(),
LineBreakBeforeArrow => "Unexpected line break between arrow head and arrow".into(),
Unexpected => "Unexpected token".into(),
Expected(token) => format!("Expected {:?}", token).into(),
AwaitStar => "await* has been removed from the async functions proposal. Use
\
// Promise.all() instead."
AwaitStar => "await* has been removed from the async functions proposal. Use \
Promise.all() instead."
.into(),
ReservedWordInObjShorthandOrPat => {
@ -137,7 +187,10 @@ impl<'a> From<Error<'a>> for Diagnostic<'a> {
CommaAfterRestElement => "Trailing comma isn't permitted after a rest element".into(),
NonLastRestParam => "Rest element must be final element".into(),
SpreadInParenExpr => "Parenthesized expression cannot contain spread operator".into(),
EmptyParenExpr => "Parenthized exprssion cannot be empty".into(),
EmptyParenExpr => "Parenthized expression cannot be empty".into(),
InvalidPat => "Not a pattern".into(),
// TODO
NotSimpleAssign => "Cannot assign to this".into(),
ExpectedIdent => "Expected ident".into(),
ExpctedSemi => "Expected ';' or line break".into(),
DuplicateLabel(label) => format!("Label {} is already declared", label).into(),
@ -145,6 +198,12 @@ impl<'a> From<Error<'a>> for Diagnostic<'a> {
NonTopLevelImportExport => "'import', and 'export' are not permitted here".into(),
PatVarWithoutInit => "Destructuring bindings require initializers".into(),
WithInStrict => "With statement are not allowed in strict mode".into(),
ReturnNotAllowed => "Return statement is not allowed here".into(),
TooManyVarInForInHead => "Expected one variable binding".into(),
VarInitializerInForInHead => "Unexpected initializer in for in/of loop".into(),
LabelledGenerator => "Generator cannot be labelled".into(),
YieldParamInGen => "'yield' cannot be used as a parameter within generator".into(),
};
e.handler.error(&msg).span(e.span)

View File

@ -1,3 +1,4 @@
use super::util::CharExt;
use std::str;
use swc_common::{BytePos, FileMap};
@ -9,22 +10,32 @@ pub(super) struct LexerInput<I: Input> {
}
impl<I: Input> LexerInput<I> {
pub const fn new(input: I) -> Self {
LexerInput {
pub fn new(input: I) -> Self {
let mut i = LexerInput {
input,
last_pos: BytePos(0),
cur: None,
}
};
i.input.record_new_line(i.last_pos);
i
}
pub fn bump(&mut self) {
let pos = match self.cur.take() {
Some((p, prev_c)) => BytePos(p.0 + prev_c.len_utf8() as u32),
let is_new_line = match self.cur.take() {
Some((p, prev_c)) => {
self.last_pos = BytePos(p.0 + prev_c.len_utf8() as u32);
prev_c.is_line_break()
}
None => unreachable!("bump is called without knowing current character"),
};
self.cur = self.input.next();
self.last_pos = pos;
if is_new_line {
match self.cur {
Some((p, _)) => self.input.record_new_line(p),
None => {}
}
}
}
pub fn peek(&mut self) -> Option<char> {
@ -102,12 +113,16 @@ impl<'a> Input for FileMapInput<'a> {
//TODO?
None
}
fn record_new_line(&self, pos: BytePos) {
self.fm.next_line(pos)
}
}
pub trait Input: Iterator<Item = (BytePos, char)> {
fn peek(&mut self) -> Option<(BytePos, char)>;
fn peek_ahead(&mut self) -> Option<(BytePos, char)>;
fn record_new_line(&self, _pos: BytePos) {}
///Takes items from stream, testing each one with predicate. returns the
/// range of items which passed predicate.

View File

@ -4,17 +4,17 @@
#![allow(unused_mut)]
#![allow(unused_variables)]
pub use self::input::Input;
use self::input::LexerInput;
use self::state::State;
use self::util::*;
use Session;
use {Context, Session};
use error::SyntaxError;
use parser_macros::parser;
use std::char;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use swc_common::errors::Diagnostic;
use token::*;
#[macro_use]
@ -26,10 +26,11 @@ mod state;
mod tests;
pub mod util;
pub type LexResult<'a, T> = Result<T, Diagnostic<'a>>;
pub(crate) type LexResult<T> = Result<T, ::error::Error>;
pub struct Lexer<'a, I: Input> {
pub(crate) struct Lexer<'a, I: Input> {
session: Session<'a>,
pub ctx: Context,
input: LexerInput<I>,
state: State,
}
@ -39,11 +40,12 @@ impl<'a, I: Input> Lexer<'a, I> {
Lexer {
session,
input: LexerInput::new(input),
state: State::new(),
state: Default::default(),
ctx: Default::default(),
}
}
fn read_token(&mut self) -> LexResult<'a, Option<Token>> {
fn read_token(&mut self) -> LexResult<Option<Token>> {
let c = match self.input.current() {
Some(c) => c,
None => return Ok(None),
@ -206,8 +208,15 @@ impl<'a, I: Input> Lexer<'a, I> {
// Handle -->
if self.state.had_line_break && c == '-' && is!(self, '>') {
if self.ctx.module {
syntax_error!(
self,
span!(self, start),
SyntaxError::LegacyCommentInModule
)
}
self.skip_line_comment(1);
self.skip_space();
self.skip_space()?;
return self.read_token();
}
@ -273,7 +282,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// Read an escaped charater for string literal.
fn read_escaped_char(&mut self, in_template: bool) -> LexResult<'a, Option<char>> {
fn read_escaped_char(&mut self, in_template: bool) -> LexResult<Option<char>> {
assert_eq!(cur!(self), Some('\\'));
let start = cur_pos!(self);
bump!(self); // '\'
@ -305,7 +314,7 @@ impl<'a, I: Input> Lexer<'a, I> {
// read hexadecimal escape sequences
'x' => {
bump!(self); // 'x'
return self.read_hex_char(2).map(Some);
return self.read_hex_char(start, 2).map(Some);
}
// read unicode escape sequences
@ -318,11 +327,22 @@ impl<'a, I: Input> Lexer<'a, I> {
let first_c = if c == '0' {
match cur!(self) {
Some(next) if next.is_digit(8) => c,
// \0 is not an octal literal nor decimal literal.
_ => return Ok(Some('\u{0000}')),
}
} else {
c
};
// TODO: Show template instead of strict mode
if in_template {
syntax_error!(self, span!(self, start), SyntaxError::LegacyOctal)
}
if self.ctx.strict {
syntax_error!(self, span!(self, start), SyntaxError::LegacyOctal)
}
let mut value: u8 = first_c.to_digit(8).unwrap() as u8;
macro_rules! one {
($check:expr) => {{
@ -362,7 +382,7 @@ impl<'a, I: Input> Lexer<'a, I> {
#[parser]
impl<'a, I: Input> Lexer<'a, I> {
fn read_slash(&mut self) -> LexResult<'a, Option<Token>> {
fn read_slash(&mut self) -> LexResult<Option<Token>> {
debug_assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
@ -382,18 +402,18 @@ impl<'a, I: Input> Lexer<'a, I> {
}))
}
fn read_token_lt_gt(&mut self) -> LexResult<'a, Option<Token>> {
fn read_token_lt_gt(&mut self) -> LexResult<Option<Token>> {
assert!(cur!() == Some('<') || cur!() == Some('>'));
let c = cur!().unwrap();
bump!();
// XML style comment. `<!--`
if !self.session.cfg.module && c == '<' && is!('!') && peek!() == Some('-')
if !self.ctx.module && c == '<' && is!('!') && peek!() == Some('-')
&& peek_ahead!() == Some('-')
{
self.skip_line_comment(3);
self.skip_space();
self.skip_space()?;
return self.read_token();
}
@ -428,25 +448,27 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// See https://tc39.github.io/ecma262/#sec-names-and-keywords
fn read_ident_or_keyword(&mut self) -> LexResult<'a, Token> {
fn read_ident_or_keyword(&mut self) -> LexResult<Token> {
assert!(cur!().is_some());
let start = cur_pos!();
let (word, has_escape) = self.read_word_as_str()?;
// TODO: Use extension trait instead of into/from
let word = Word::from(word);
if has_escape && word.is_reserved_word(self.session.cfg.strict) {
// Note: ctx is store in lexer because of this error.
// 'await' and 'yield' may have semantic of reserved word, which means lexer
// should know context or parser should handle this error. Our approach to this
// problem is former one.
if has_escape && self.ctx.is_reserved_word(&word) {
syntax_error!(
span!(start),
SyntaxError::EscapeInReservedWord { word: word.into() }
);
} else {
Ok(Word(word))
Ok(Word(word.into()))
}
}
fn may_read_word_as_str(&mut self) -> LexResult<'a, Option<(JsWord, bool)>> {
fn may_read_word_as_str(&mut self) -> LexResult<Option<(JsWord, bool)>> {
match cur!() {
Some(c) if c.is_ident_start() => self.read_word_as_str().map(Some),
_ => Ok(None),
@ -454,7 +476,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// returns (word, has_escape)
fn read_word_as_str(&mut self) -> LexResult<'a, (JsWord, bool)> {
fn read_word_as_str(&mut self) -> LexResult<(JsWord, bool)> {
assert!(cur!().is_some());
let mut has_escape = false;
@ -497,7 +519,7 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok((word.into(), has_escape))
}
fn read_unicode_escape(&mut self, start: BytePos) -> LexResult<'a, char> {
fn read_unicode_escape(&mut self, start: BytePos) -> LexResult<char> {
assert_eq!(cur!(), Some('u'));
bump!();
@ -511,25 +533,25 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok(c)
} else {
self.read_hex_char(4)
self.read_hex_char(start, 4)
}
}
fn read_hex_char(&mut self, count: u8) -> LexResult<'a, char> {
fn read_hex_char(&mut self, start: BytePos, count: u8) -> LexResult<char> {
debug_assert!(count == 2 || count == 4);
let pos = cur_pos!();
match self.read_int(16, count)? {
Some(val) => match char::from_u32(val) {
Some(c) => Ok(c),
None => unimplemented!("Syntax Error: not char? val = {}", val),
None => syntax_error!(span!(start), SyntaxError::NonUtf8Char { val }),
},
None => unimplemented!("Syntax Error: expected {} hex chars", count),
None => syntax_error!(span!(start), SyntaxError::ExpectedHexChars { count }),
}
}
/// Read `CodePoint`.
fn read_code_point(&mut self) -> LexResult<'a, char> {
fn read_code_point(&mut self) -> LexResult<char> {
let start = cur_pos!();
let val = self.read_int(16, 0)?;
match val {
@ -542,13 +564,14 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// See https://tc39.github.io/ecma262/#sec-literals-string-literals
fn read_str_lit(&mut self) -> LexResult<'a, Token> {
fn read_str_lit(&mut self) -> LexResult<Token> {
assert!(cur!() == Some('\'') || cur!() == Some('"'));
let start = cur_pos!();
let quote = cur!().unwrap();
bump!(); // '"'
let mut out = String::new();
let mut has_escape = false;
//TODO: Optimize (Cow, Chunk)
@ -556,9 +579,15 @@ impl<'a, I: Input> Lexer<'a, I> {
match c {
c if c == quote => {
bump!();
return Ok(Str(out, c == '"'));
return Ok(Str {
value: out,
has_escape,
});
}
'\\' => {
out.extend(self.read_escaped_char(false)?);
has_escape = true
}
'\\' => out.extend(self.read_escaped_char(false)?),
c if c.is_line_break() => {
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
}
@ -573,7 +602,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// Expects current char to be '/'
fn read_regexp(&mut self) -> LexResult<'a, Token> {
fn read_regexp(&mut self) -> LexResult<Token> {
assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
bump!();
@ -624,7 +653,7 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok(Regex(content, flags))
}
fn read_tmpl_token(&mut self) -> LexResult<'a, Token> {
fn read_tmpl_token(&mut self, start_of_tpl: BytePos) -> LexResult<Token> {
let start = cur_pos!();
// TODO: Optimize
@ -666,7 +695,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
}
unimplemented!("error: unterminated template");
syntax_error!(span!(start_of_tpl), SyntaxError::UnterminatedTpl)
}
pub fn had_line_break_before_last(&self) -> bool {

View File

@ -12,7 +12,7 @@ impl<'a, I: Input> Lexer<'a, I> {
/// Reads an integer, octal integer, or floating-point number
///
///
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> LexResult<'a, Number> {
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> LexResult<Number> {
assert!(cur!().is_some());
if starts_with_dot {
debug_assert_eq!(
@ -51,7 +51,7 @@ impl<'a, I: Input> Lexer<'a, I> {
// strict mode hates non-zero decimals starting with zero.
// e.g. 08.1 is strict mode violation but 0.1 is valid float.
if self.session.cfg.strict {
if self.ctx.strict {
syntax_error!(span!(start), SyntaxError::LegacyDecimal);
}
@ -113,7 +113,7 @@ impl<'a, I: Input> Lexer<'a, I> {
if eat!('e') || eat!('E') {
let next = match cur!() {
Some(next) => next,
None => unimplemented!("expected +, - or digit after e"),
None => syntax_error!(span!(start), SyntaxError::NumLitTerminatedWithExp),
};
let positive = if next == '+' || next == '-' {
@ -123,7 +123,6 @@ impl<'a, I: Input> Lexer<'a, I> {
true
};
// TODO: Optimize this
let exp = self.read_number_no_dot(10)?;
let flag = if positive { '+' } else { '-' };
// TODO:
@ -137,7 +136,7 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok(Number(val))
}
pub(super) fn read_radix_number(&mut self, radix: u8) -> LexResult<'a, Number> {
pub(super) fn read_radix_number(&mut self, radix: u8) -> LexResult<Number> {
debug_assert!(
radix == 2 || radix == 8 || radix == 16,
"radix should be one of 2, 8, 16, but got {}",
@ -156,20 +155,29 @@ impl<'a, I: Input> Lexer<'a, I> {
/// This can read long integers like
/// "13612536612375123612312312312312312312312".
fn read_number_no_dot(&mut self, radix: u8) -> LexResult<'a, f64> {
fn read_number_no_dot(&mut self, radix: u8) -> LexResult<f64> {
debug_assert!(
radix == 2 || radix == 8 || radix == 10 || radix == 16,
"radix for read_number_no_dot should be one of 2, 8, 10, 16, but got {}",
radix
);
let start = cur_pos!();
self.read_digits(radix, |total, radix, v| {
let mut read_any = false;
let res = self.read_digits(radix, |total, radix, v| {
read_any = true;
(f64::mul_add(total, radix as f64, v as f64), true)
})
});
if !read_any {
syntax_error!(span!(start), SyntaxError::ExpectedDigit { radix });
}
res
}
/// Ensure that ident cannot directly follow numbers.
fn ensure_not_ident(&mut self) -> LexResult<'a, ()> {
fn ensure_not_ident(&mut self) -> LexResult<()> {
match cur!() {
Some(c) if c.is_ident_start() => {
syntax_error!(pos_span(cur_pos!()), SyntaxError::IdentAfterNum)
@ -182,17 +190,22 @@ impl<'a, I: Input> Lexer<'a, I> {
/// were read, the integer value otherwise.
/// When `len` is not zero, this
/// will return `None` unless the integer has exactly `len` digits.
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> LexResult<'a, Option<u32>> {
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> LexResult<Option<u32>> {
let mut count = 0;
self.read_digits(radix, |opt: Option<u32>, radix, val| {
let v = self.read_digits(radix, |opt: Option<u32>, radix, val| {
count += 1;
let total = opt.unwrap_or_default() * radix as u32 + val as u32;
(Some(total), count != len)
})
})?;
if len != 0 && count != len {
Ok(None)
} else {
Ok(v)
}
}
/// `op`- |total, radix, value| -> (total * radix + value, continue)
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> LexResult<'a, Ret>
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> LexResult<Ret>
where
F: FnMut(Ret, u8, u32) -> (Ret, bool),
Ret: Copy + Default,
@ -239,23 +252,23 @@ impl<'a, I: Input> Lexer<'a, I> {
let val = if let Some(val) = c.to_digit(radix as _) {
val
} else {
break;
return Ok(total);
};
bump!();
let (t, cont) = op(total, radix, val);
total = t;
if !cont {
break;
return Ok(total);
}
}
Ok(total)
}
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> LexResult<'a, Number> {
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> LexResult<Number> {
self.ensure_not_ident()?;
return if self.session.cfg.strict {
return if self.ctx.strict {
syntax_error!(span!(start), SyntaxError::LegacyOctal)
} else {
// FIXME
@ -282,23 +295,13 @@ mod tests {
}
fn num(s: &'static str) -> f64 {
lex(s, |l| {
l.read_number(s.starts_with("."))
.unwrap_or_else(|err| {
err.emit();
unreachable!()
})
.0
})
lex(s, |l| l.read_number(s.starts_with(".")).unwrap().0)
}
fn int(radix: u8, s: &'static str) -> u32 {
lex(s, |l| {
l.read_int(radix, 0)
.unwrap_or_else(|err| {
err.emit();
unreachable!()
})
.unwrap()
.expect("read_int returned None")
})
}
@ -345,10 +348,7 @@ mod tests {
fn read_radix_number() {
assert_eq!(
Number(0o73 as f64),
lex("0o73", |l| l.read_radix_number(8).unwrap_or_else(|err| {
err.emit();
unreachable!()
}))
lex("0o73", |l| l.read_radix_number(8).unwrap())
);
}
@ -371,10 +371,9 @@ mod tests {
let vec = panic::catch_unwind(|| {
::with_test_sess(case, |mut sess, input| {
sess.cfg.strict = strict;
Lexer::new(sess, input)
.map(|ts| ts.token)
.collect::<Vec<_>>()
let mut l = Lexer::new(sess, input);
l.ctx.strict = strict;
l.map(|ts| ts.token).collect::<Vec<_>>()
})
});

View File

@ -31,25 +31,41 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
// skip spaces before getting next character, if we are allowed to.
if self.state.can_skip_space() {
self.skip_space()
let start = cur_pos!();
match self.skip_space() {
Err(err) => {
return Some(Token::Error(err)).map(|token| {
// Attatch span to token.
TokenAndSpan {
token,
had_line_break: self.had_line_break_before_last(),
span: span!(start),
}
});
}
_ => {}
}
};
let start = cur_pos!();
let res = if self.state.is_in_template() {
self.read_tmpl_token().map(Some)
let res = if let Some(Type::Tpl {
start: start_pos_of_tpl,
}) = self.state.context.current()
{
self.read_tmpl_token(start_pos_of_tpl).map(Some)
} else {
self.read_token()
};
let token = res.unwrap_or_else(|err| {
// Report error
err.emit();
Some(Token::Error)
});
let token = match res.map_err(Token::Error).map_err(Some) {
Ok(t) => t,
Err(e) => e,
};
if let Some(ref token) = token {
self.state.update(&self.session.logger, &token)
self.state.update(&self.session.logger, start, &token)
}
token.map(|token| {
@ -63,8 +79,8 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
}
}
impl State {
pub fn new() -> Self {
impl Default for State {
fn default() -> Self {
State {
is_expr_allowed: true,
octal_pos: None,
@ -74,7 +90,9 @@ impl State {
token_type: None,
}
}
}
impl State {
pub fn can_skip_space(&self) -> bool {
!self.context
.current()
@ -82,10 +100,6 @@ impl State {
.unwrap_or(false)
}
fn is_in_template(&self) -> bool {
self.context.current() == Some(Type::Tpl)
}
pub fn last_was_tpl_element(&self) -> bool {
match self.token_type {
Some(Template(..)) => true,
@ -93,7 +107,7 @@ impl State {
}
}
fn update(&mut self, logger: &Logger, next: &Token) {
fn update(&mut self, logger: &Logger, start: BytePos, next: &Token) {
trace!(
logger,
"updating state: next={:?}, had_line_break={} ",
@ -107,6 +121,7 @@ impl State {
logger,
&mut self.context,
prev,
start,
next,
self.had_line_break,
self.is_expr_allowed,
@ -114,10 +129,12 @@ impl State {
}
/// `is_expr_allowed`: previous value.
/// `start`: start of newly produced token.
fn is_expr_allowed_on_next(
logger: &Logger,
context: &mut Context,
prev: Option<Token>,
start: BytePos,
next: &Token,
had_line_break: bool,
is_expr_allowed: bool,
@ -227,10 +244,10 @@ impl State {
tok!('`') => {
// If we are in template, ` terminates template.
if context.current() == Some(Type::Tpl) {
if let Some(Type::Tpl { .. }) = context.current() {
context.pop(logger);
} else {
context.push(logger, Type::Tpl);
context.push(logger, Type::Tpl { start });
}
return false;
}
@ -324,6 +341,10 @@ enum Type {
is_for_loop: bool,
},
#[kind(is_expr)] ParenExpr,
#[kind(is_expr, preserve_space)] Tpl,
#[kind(is_expr, preserve_space)]
Tpl {
/// Start of a template literal.
start: BytePos,
},
#[kind(is_expr)] FnExpr,
}

View File

@ -126,7 +126,11 @@ fn test262_lexer_error_0001() {
fn test262_lexer_error_0002() {
assert_eq!(
vec![
Str("use strict".into(), false).span(0..15).lb(),
Str {
value: "use strict".into(),
has_escape: true,
}.span(0..15)
.lb(),
Semi.span(15..16),
],
lex(r#"'use\x20strict';"#)
@ -162,7 +166,13 @@ fn ident_escape_unicode_2() {
fn str_escape_hex() {
assert_eq!(
lex(r#"'\x61'"#),
vec![Str("a".into(), false).span(0..6).lb()]
vec![
Str {
value: "a".into(),
has_escape: true,
}.span(0..6)
.lb(),
]
);
}
@ -170,7 +180,13 @@ fn str_escape_hex() {
fn str_escape_octal() {
assert_eq!(
lex(r#"'Hello\012World'"#),
vec![Str("Hello\nWorld".into(), false).span(0..16).lb()]
vec![
Str {
value: "Hello\nWorld".into(),
has_escape: true,
}.span(0..16)
.lb(),
]
)
}
@ -178,7 +194,13 @@ fn str_escape_octal() {
fn str_escape_unicode_long() {
assert_eq!(
lex(r#"'\u{00000000034}'"#),
vec![Str("4".into(), false).span(0..17).lb()]
vec![
Str {
value: "4".into(),
has_escape: true,
}.span(0..17)
.lb(),
]
);
}
@ -502,9 +524,24 @@ fn migrated_0006() {
#[test]
fn str_lit() {
assert_eq!(vec![Str("abcde".into(), false)], lex_tokens("'abcde'"));
assert_eq!(vec![Str("abcde".into(), true)], lex_tokens(r#""abcde""#));
assert_eq!(vec![Str("abc".into(), false)], lex_tokens("'\\\nabc'"));
assert_eq!(
vec![
Str {
value: "abcde".into(),
has_escape: false,
},
],
lex_tokens("'abcde'")
);
assert_eq!(
vec![
Str {
value: "abc".into(),
has_escape: true,
},
],
lex_tokens("'\\\nabc'")
);
}
#[test]

View File

@ -7,9 +7,9 @@
//!
//! Note: Currently this use xid instead of id. (because unicode_xid crate
//! exists)
use super::Lexer;
use super::{LexResult, Lexer};
use super::input::Input;
use error::SyntaxError;
use parser_macros::parser;
use unicode_xid::UnicodeXID;
@ -25,7 +25,7 @@ impl<'a, I: Input> Lexer<'a, I> {
/// Skip comments or whitespaces.
///
/// See https://tc39.github.io/ecma262/#sec-white-space
pub(super) fn skip_space(&mut self) {
pub(super) fn skip_space(&mut self) -> LexResult<()> {
let mut line_break = false;
while let Some(c) = cur!() {
@ -42,7 +42,7 @@ impl<'a, I: Input> Lexer<'a, I> {
self.skip_line_comment(2);
continue;
} else if peek!() == Some('*') {
self.skip_block_comment();
self.skip_block_comment()?;
continue;
}
break;
@ -53,6 +53,8 @@ impl<'a, I: Input> Lexer<'a, I> {
bump!();
}
Ok(())
}
pub(super) fn skip_line_comment(&mut self, start_skip: usize) {
@ -77,7 +79,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// Expects current char to be '/' and next char to be '*'.
pub(super) fn skip_block_comment(&mut self) {
pub(super) fn skip_block_comment(&mut self) -> LexResult<()> {
let start = cur_pos!();
debug_assert_eq!(cur!(), Some('/'));
@ -92,7 +94,7 @@ impl<'a, I: Input> Lexer<'a, I> {
if was_star && is!('/') {
bump!();
// TODO: push comment
return;
return Ok(());
}
if c.is_line_break() {
self.state.had_line_break = true;
@ -102,7 +104,7 @@ impl<'a, I: Input> Lexer<'a, I> {
bump!();
}
unimplemented!("error: unterminated block comment");
syntax_error!(span!(start), SyntaxError::UnterminatedBlockComment)
}
}

View File

@ -39,16 +39,30 @@ mod parser;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct Config {
pub strict: bool,
/// Support numeric separator.
pub num_sep: bool,
/// Support function bind expression.
pub fn_bind: bool,
}
/// Syntatic context.
#[derive(Debug, Clone, Copy, Default)]
struct Context {
/// Is in module code?
pub module: bool,
module: bool,
strict: bool,
include_in_expr: bool,
/// If true, await expression is parsed, and "await" is treated as a
/// keyword.
in_async: bool,
/// If true, yield expression is parsed, and "yield" is treated as a
/// keyword.
in_generator: bool,
in_function: bool,
in_parameters: bool,
}
#[derive(Clone, Copy)]

View File

@ -121,12 +121,12 @@ macro_rules! spanned {
}
macro_rules! syntax_error {
($p:expr, $err:expr) => {{
($p:expr, $err:expr) => {
syntax_error!($p, $p.input.cur_span(), $err)
}};
};
($p:expr, $span:expr, $err:expr) => {{
let err = $crate::error::Error {
let err = $crate::error::ErrorToDiag {
handler: $p.session.handler,
span: $span,
error: $err,

View File

@ -53,29 +53,31 @@ impl<'a, I: Input> Parser<'a, I> {
T: OutputType,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
{
let start = cur_pos!();
expect!("class");
self.strict_mode().parse_with(|p| {
let start = cur_pos!();
expect!("class");
let ident = self.parse_maybe_opt_binding_ident()?;
let ident = p.parse_maybe_opt_binding_ident()?;
let super_class = if eat!("extends") {
self.parse_lhs_expr().map(Some)?
} else {
None
};
let super_class = if eat!("extends") {
p.parse_lhs_expr().map(Some)?
} else {
None
};
expect!('{');
let body = self.parse_class_body()?;
expect!('}');
let end = last_pos!();
Ok(T::finish_class(
ident,
Class {
span: Span::new(start, end, Default::default()),
super_class,
body,
},
))
expect!('{');
let body = p.parse_class_body()?;
expect!('}');
let end = last_pos!();
Ok(T::finish_class(
ident,
Class {
span: Span::new(start, end, Default::default()),
super_class,
body,
},
))
})
}
fn parse_class_body(&mut self) -> PResult<'a, Vec<ClassMethod>> {
@ -120,24 +122,42 @@ impl<'a, I: Input> Parser<'a, I> {
let is_generator = eat!('*');
let ident = self.parse_maybe_opt_binding_ident()?;
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx()
};
expect!('(');
let params = self.parse_formal_params()?;
expect!(')');
let ident = if T::is_fn_expr() {
//
self.with_ctx(ctx).parse_maybe_opt_binding_ident()?
} else {
// function declaration does not change context for `BindingIdentifier`.
self.parse_maybe_opt_binding_ident()?
};
let body = self.parse_fn_body(is_async, is_generator)?;
self.with_ctx(ctx).parse_with(|p| {
expect!('(');
let params_ctx = Context {
in_parameters: true,
..p.ctx()
};
let params = p.with_ctx(params_ctx).parse_formal_params()?;
expect!(')');
Ok(T::finish_fn(
ident,
Function {
span: span!(start),
is_async,
is_generator,
params,
body,
},
))
let body = p.parse_fn_body(is_async, is_generator)?;
Ok(T::finish_fn(
ident,
Function {
span: span!(start),
is_async,
is_generator,
params,
body,
},
))
})
}
/// `parse_args` closure should not eat '(' or ')'.
@ -154,12 +174,16 @@ impl<'a, I: Input> Parser<'a, I> {
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
..self.ctx()
};
self.with_ctx(ctx).parse_with(|mut p| {
self.with_ctx(ctx).parse_with(|p| {
expect!('(');
let params = parse_args(&mut p)?;
let arg_ctx = Context {
in_parameters: true,
..p.ctx()
};
let params = p.with_ctx(arg_ctx).parse_with(|mut p| parse_args(&mut p))?;
expect!(')');
@ -281,7 +305,8 @@ impl<'a, I: Input> Parser<'a, I> {
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
in_function: true,
..self.ctx()
};
self.with_ctx(ctx).parse_fn_body_inner()
}
@ -290,6 +315,20 @@ impl<'a, I: Input> Parser<'a, I> {
trait OutputType {
type Ident;
/// From babel..
///
/// When parsing function expression, the binding identifier is parsed
/// according to the rules inside the function.
/// e.g. (function* yield() {}) is invalid because "yield" is disallowed in
/// generators.
/// This isn't the case with function declarations: function* yield() {} is
/// valid because yield is parsed as if it was outside the generator.
/// Therefore, this.state.inGenerator is set before or after parsing the
/// function id according to the "isStatement" parameter.
fn is_fn_expr() -> bool {
false
}
fn finish_fn(ident: Self::Ident, f: Function) -> Self;
fn finish_class(ident: Self::Ident, class: Class) -> Self;
}
@ -297,6 +336,10 @@ trait OutputType {
impl OutputType for Box<Expr> {
type Ident = Option<Ident>;
fn is_fn_expr() -> bool {
true
}
fn finish_fn(ident: Option<Ident>, function: Function) -> Self {
box Expr {
span: function.span,
@ -341,7 +384,7 @@ pub(super) trait FnBodyParser<'a, Body> {
impl<'a, I: Input> FnBodyParser<'a, BlockStmtOrExpr> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmtOrExpr> {
if is!('{') {
self.parse_block().map(BlockStmtOrExpr::BlockStmt)
self.parse_block(false).map(BlockStmtOrExpr::BlockStmt)
} else {
self.parse_assignment_expr().map(BlockStmtOrExpr::Expr)
}
@ -350,7 +393,7 @@ impl<'a, I: Input> FnBodyParser<'a, BlockStmtOrExpr> for Parser<'a, I> {
impl<'a, I: Input> FnBodyParser<'a, BlockStmt> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmt> {
self.parse_block()
self.parse_block(true)
}
}

View File

@ -1,9 +1,11 @@
use super::*;
use std::iter;
use super::pat::PatType;
use super::util::ExprExt;
mod ops;
#[cfg(test)]
mod tests;
mod verifier;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
@ -30,19 +32,16 @@ impl<'a, I: Input> Parser<'a, I> {
/// operators like `+=`.
///
pub(super) fn parse_assignment_expr(&mut self) -> PResult<'a, Box<Expr>> {
if self.ctx.in_generator && is!("yield") {
if self.ctx().in_generator && is!("yield") {
return self.parse_yield_expr();
}
let start = cur_pos!();
self.state.potential_arrow_start = match *cur!()? {
Word(Ident(..)) | tok!('(') | tok!("yield") => Some(start),
Word(Ident(..)) | tok!('(') | tok!("yield") => Some(cur_pos!()),
_ => None,
};
// self.parse_arrow_fn();
// self.parse_async_arrow_fn();
let start = cur_pos!();
// Try to parse conditional expression.
let cond = self.parse_cond_expr()?;
@ -60,6 +59,20 @@ impl<'a, I: Input> Parser<'a, I> {
match cur!() {
Ok(&AssignOp(op)) => {
let left = if op == Assign {
self.reparse_expr_as_pat(PatType::AssignPat, cond)
.map(PatOrExpr::Pat)?
} else {
//It is an early Reference Error if IsValidSimpleAssignmentTarget of
// LeftHandSideExpression is false.
if !cond.is_valid_simple_assignment_target(self.ctx().strict) {
syntax_error!(cond.span, SyntaxError::NotSimpleAssign)
}
// TODO
PatOrExpr::Expr(cond)
};
bump!();
let right = self.parse_assignment_expr()?;
Ok(box Expr {
@ -67,7 +80,7 @@ impl<'a, I: Input> Parser<'a, I> {
node: ExprKind::Assign(AssignExpr {
op,
// TODO:
left: PatOrExpr::Expr(cond),
left,
right,
}),
})
@ -96,6 +109,8 @@ impl<'a, I: Input> Parser<'a, I> {
/// Parse a primary expression or arrow function
fn parse_primary_expr(&mut self) -> PResult<'a, Box<Expr>> {
let _ = cur!();
let can_be_arrow = self.state
.potential_arrow_start
.map(|s| s == cur_pos!())
@ -134,7 +149,7 @@ impl<'a, I: Input> Parser<'a, I> {
// Literals
if {
match *cur!()? {
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str(..) => true,
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str { .. } => true,
_ => false,
}
} {
@ -182,9 +197,7 @@ impl<'a, I: Input> Parser<'a, I> {
is_async: true,
is_generator: false,
}))
} else if can_be_arrow && !is!(';') && eat!("=>") {
// async is parameter
} else if can_be_arrow && !p.input.had_line_break_before_cur() && eat!("=>") {
let params = vec![id.into()];
let body = p.parse_fn_body(false, false)?;
Ok(ExprKind::Arrow(ArrowExpr {
@ -206,20 +219,17 @@ impl<'a, I: Input> Parser<'a, I> {
self.spanned(|p| {
assert_and_bump!('[');
let mut elems = vec![];
let mut comma = 1;
let mut allow_elem = true;
while !eof!() && !is!(']') {
if eat!(',') {
comma += 1;
if is!(',') || !allow_elem {
expect!(',');
elems.push(None);
allow_elem = true;
continue;
}
allow_elem = false;
// Should have at least one comma between elements.
if comma == 0 {
expect!(',');
}
elems.extend(iter::repeat(None).take(comma - 1));
comma = 0;
elems.push(p.include_in_expr(true).parse_expr_or_spread().map(Some)?);
}
@ -261,6 +271,8 @@ impl<'a, I: Input> Parser<'a, I> {
// 'NewExpression' allows new call without paren.
let callee = self.parse_member_expr_or_new_expr(is_new_expr)?;
return_if_arrow!(callee);
if !is_new_expr || is!('(') {
// Parsed with 'MemberExpression' production.
let args = self.parse_args().map(Some)?;
@ -287,8 +299,10 @@ impl<'a, I: Input> Parser<'a, I> {
let base = ExprOrSuper::Super(span!(start));
return self.parse_subscripts(base, true);
}
let obj = self.parse_primary_expr().map(ExprOrSuper::Expr)?;
self.parse_subscripts(obj, true)
let obj = self.parse_primary_expr()?;
return_if_arrow!(obj);
self.parse_subscripts(ExprOrSuper::Expr(obj), true)
}
/// Parse `NewExpresion`.
@ -348,6 +362,9 @@ impl<'a, I: Input> Parser<'a, I> {
// we parse arrow function at here, to handle it efficiently.
if is!("=>") {
if self.input.had_line_break_before_cur() {
syntax_error!(span!(start), SyntaxError::LineBreakBeforeArrow);
}
if !can_be_arrow {
unexpected!();
}
@ -369,12 +386,21 @@ impl<'a, I: Input> Parser<'a, I> {
// It was not head of arrow function.
// ParenthesizedExpression cannot contain spread.
if expr_or_spreads.len() == 0 {
syntax_error!(SyntaxError::EmptyParenExpr);
} else if expr_or_spreads.len() == 1 {
syntax_error!(
Span::new(start, last_pos!(), Default::default()),
SyntaxError::EmptyParenExpr
);
}
// TODO: Verify that invalid expression like {a = 1} does not exists.
// ParenthesizedExpression cannot contain spread.
if expr_or_spreads.len() == 1 {
let expr = match expr_or_spreads.into_iter().next().unwrap() {
ExprOrSpread::Spread(_) => syntax_error!(SyntaxError::SpreadInParenExpr),
ExprOrSpread::Spread(expr) => {
syntax_error!(expr.span, SyntaxError::SpreadInParenExpr)
}
ExprOrSpread::Expr(expr) => expr,
};
return Ok(box Expr {
@ -387,13 +413,15 @@ impl<'a, I: Input> Parser<'a, I> {
let mut exprs = Vec::with_capacity(expr_or_spreads.len());
for expr in expr_or_spreads {
match expr {
ExprOrSpread::Spread(_) => syntax_error!(SyntaxError::SpreadInParenExpr),
ExprOrSpread::Spread(expr) => {
syntax_error!(expr.span, SyntaxError::SpreadInParenExpr)
}
ExprOrSpread::Expr(expr) => exprs.push(expr),
}
}
assert!(exprs.len() >= 2);
// span of sequence expression should not include '(' and ')'
// span of sequence expression should not include '(', ')'
let seq_expr = box Expr {
span: Span::new(
exprs.first().unwrap().span.lo(),
@ -467,6 +495,8 @@ impl<'a, I: Input> Parser<'a, I> {
obj: ExprOrSuper,
no_call: bool,
) -> PResult<'a, (Box<Expr>, bool)> {
let _ = cur!();
let start = cur_pos!();
// member expression
// $obj.name
@ -602,13 +632,15 @@ impl<'a, I: Input> Parser<'a, I> {
fn parse_yield_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!("yield");
assert!(p.ctx.in_generator);
assert!(p.ctx().in_generator);
//TODO
// Spec says
// YieldExpression cannot be used within the FormalParameters of a generator
// function because any expressions that are part of FormalParameters are
// evaluated before the resulting generator object is in a resumable state.
if p.ctx().in_parameters {
syntax_error!(p.input.prev_span(), SyntaxError::YieldParamInGen)
}
if is!(';') || (!is!('*') && !cur!().map(Token::starts_expr).unwrap_or(true)) {
Ok(ExprKind::Yield(YieldExpr {
@ -639,9 +671,8 @@ impl<'a, I: Input> Parser<'a, I> {
bump!();
Lit::Bool(v)
}
Str(..) => match bump!() {
//FIXME
Str(s, _) => Lit::Str(s),
Str { .. } => match bump!() {
Str { value, has_escape } => Lit::Str { value, has_escape },
_ => unreachable!(),
},
Num(..) => match bump!() {

View File

@ -1,6 +1,6 @@
//! Parser for unary operations and binary operations.
use super::*;
use super::util::ExprExt;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
@ -29,7 +29,7 @@ impl<'a, I: Input> Parser<'a, I> {
Err(..) => return Ok(left),
}
} {
&Word(Keyword(In)) if self.ctx.include_in_expr => op!("in"),
&Word(Keyword(In)) if self.ctx().include_in_expr => op!("in"),
&Word(Keyword(InstanceOf)) => op!("instanceof"),
&BinOp(op) => op.into(),
_ => {
@ -111,6 +111,10 @@ impl<'a, I: Input> Parser<'a, I> {
};
let arg = self.parse_unary_expr()?;
if !arg.is_valid_simple_assignment_target(self.ctx().strict) {
// This is eary ReferenceError
syntax_error!(arg.span, SyntaxError::NotSimpleAssign)
}
return Ok(box Expr {
span: span!(start),
node: ExprKind::Update(UpdateExpr {
@ -140,7 +144,7 @@ impl<'a, I: Input> Parser<'a, I> {
});
}
if self.ctx.in_async && is!("await") {
if self.ctx().in_async && is!("await") {
return self.parse_await_expr();
}
@ -154,6 +158,11 @@ impl<'a, I: Input> Parser<'a, I> {
}
if is_one_of!("++", "--") {
if !expr.is_valid_simple_assignment_target(self.ctx().strict) {
// This is eary ReferenceError
syntax_error!(expr.span, SyntaxError::NotSimpleAssign)
}
let start = cur_pos!();
let op = if bump!() == PlusPlus {
op!("++")
@ -176,7 +185,7 @@ impl<'a, I: Input> Parser<'a, I> {
fn parse_await_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!("await");
assert!(p.ctx.in_async);
assert!(p.ctx().in_async);
if is!('*') {
syntax_error!(SyntaxError::AwaitStar);

View File

@ -40,17 +40,37 @@ fn expr(s: &'static str) -> Box<Expr> {
#[allow(non_upper_case_globals)]
const span: Span = DUMMY_SP;
#[test]
fn arrow_assign() {
assert_eq_ignore_span!(
expr("a = b => false"),
box Expr {
span,
node: ExprKind::Assign(AssignExpr {
left: PatOrExpr::Pat(
Ident {
span,
sym: "a".into(),
}.into()
),
op: op!("="),
right: expr("b => false"),
}),
}
);
}
#[test]
fn new_expr_should_not_eat_too_much() {
assert_eq_ignore_span!(
new_expr("new Date().toString()"),
box Expr {
span: Default::default(),
span,
node: ExprKind::Member(MemberExpr {
obj: ExprOrSuper::Expr(member_expr("new Date()")),
prop: Ident {
sym: "toString".into(),
span: Default::default(),
span,
}.into(),
computed: false,
}),
@ -62,7 +82,7 @@ fn lhs_expr_as_new_expr_prod() {
assert_eq_ignore_span!(
lhs("new Date.toString()"),
box Expr {
span: Default::default(),
span,
node: ExprKind::New(NewExpr {
callee: lhs("Date.toString"),
args: Some(vec![]),
@ -76,7 +96,7 @@ fn lhs_expr_as_call() {
assert_eq_ignore_span!(
lhs("new Date.toString()()"),
box Expr {
span: Default::default(),
span,
node: ExprKind::Call(CallExpr {
callee: ExprOrSuper::Expr(lhs("new Date.toString()")),
args: vec![],

View File

@ -0,0 +1,57 @@
use super::*;
use swc_common::{FoldWith, Folder, Span};
impl<'a, I: Input> Parser<'a, I> {
pub(in parser) fn verify_expr(&self, expr: Box<Expr>) -> PResult<'a, Box<Expr>> {
let mut v = Verifier { errors: vec![] };
let expr = v.fold(expr);
if v.errors.is_empty() {
return Ok(expr);
}
//TODO
let (span, error) = v.errors.into_iter().next().unwrap();
syntax_error!(self, span, error)
}
}
pub(super) struct Verifier {
pub errors: Vec<(Span, SyntaxError)>,
}
impl Folder<ExprKind> for Verifier {
fn fold(&mut self, e: ExprKind) -> ExprKind {
match e {
ExprKind::Fn(..) | ExprKind::Arrow(..) => return e,
_ => e.fold_children(self),
}
}
}
impl Folder<ArrayLit> for Verifier {
fn fold(&mut self, mut arr: ArrayLit) -> ArrayLit {
let len = {
let arr_len = arr.elems.len();
let count_of_none = arr.elems.iter().rev().take_while(|e| e.is_none()).count();
arr_len - count_of_none
};
arr.elems.truncate(len);
arr.elems.shrink_to_fit();
arr.fold_children(self)
}
}
impl Folder<Prop> for Verifier {
fn fold(&mut self, p: Prop) -> Prop {
match p.node {
PropKind::Assign { .. } => {
self.errors.push((p.span, SyntaxError::Unexpected));
return p;
}
_ => p.fold_children(self),
}
}
}

View File

@ -6,14 +6,14 @@ use super::*;
impl<'a, I: Input> Parser<'a, I> {
/// IdentifierReference
pub(super) fn parse_ident_ref(&mut self) -> PResult<'a, Ident> {
let ctx = self.ctx;
let ctx = self.ctx();
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
/// LabelIdentifier
pub(super) fn parse_label_ident(&mut self) -> PResult<'a, Ident> {
let ctx = self.ctx;
let ctx = self.ctx();
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
@ -39,7 +39,7 @@ impl<'a, I: Input> Parser<'a, I> {
/// In strict mode, "yield" is SyntaxError if matched.
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<'a, Ident> {
self.spanned(|p| {
let strict = p.session.cfg.strict;
let strict = p.ctx().strict;
let w = match cur!() {
Ok(&Word(..)) => match bump!() {
Word(w) => w,
@ -52,37 +52,43 @@ impl<'a, I: Input> Parser<'a, I> {
// It is a Syntax Error if this phrase is contained in strict mode code and the
// StringValue of IdentifierName is: "implements", "interface", "let",
// "package", "private", "protected", "public", "static", or "yield".
if strict {
match w {
Keyword(Yield)
| Ident(js_word!("static"))
| Ident(js_word!("implements"))
| Ident(js_word!("interface"))
| Ident(js_word!("let"))
| Ident(js_word!("package"))
| Ident(js_word!("private"))
| Ident(js_word!("protected"))
| Ident(js_word!("public")) => syntax_error!(SyntaxError::InvalidIdentInStrict),
_ => {}
match w {
Ident(js_word!("enum")) => {
syntax_error!(p.input.prev_span(), SyntaxError::InvalidIdentInStrict)
}
Keyword(Yield)
| Ident(js_word!("static"))
| Ident(js_word!("implements"))
| Ident(js_word!("interface"))
| Ident(js_word!("let"))
| Ident(js_word!("package"))
| Ident(js_word!("private"))
| Ident(js_word!("protected"))
| Ident(js_word!("public")) if strict =>
{
syntax_error!(p.input.prev_span(), SyntaxError::InvalidIdentInStrict)
}
_ => {}
}
//TODO
// Spec:
// It is a Syntax Error if the goal symbol of the syntactic grammar is Module
// and the StringValue of IdentifierName is "await".
//TODO
// Spec:
// It is a Syntax Error if StringValue of IdentifierName is the same String
// value as the StringValue of any ReservedWord except for yield or await.
match w {
// It is a Syntax Error if the goal symbol of the syntactic grammar is Module
// and the StringValue of IdentifierName is "await".
Keyword(Await) if p.ctx().module => {
syntax_error!(p.input.prev_span(), SyntaxError::ExpectedIdent)
}
Keyword(Let) => Ok(w.into()),
Ident(ident) => Ok(ident),
Keyword(Yield) if incl_yield => Ok(js_word!("yield")),
Keyword(Await) if incl_await => Ok(js_word!("await")),
Keyword(..) | Null | True | False => syntax_error!(SyntaxError::ExpectedIdent),
Keyword(..) | Null | True | False => {
syntax_error!(p.input.prev_span(), SyntaxError::ExpectedIdent)
}
}
})
}

View File

@ -1,4 +1,5 @@
//! Note: this module requires `#![feature(nll)]`.
use Context;
use lexer::{Input, Lexer};
use swc_common::{BytePos, Span, DUMMY_SP};
use token::*;
@ -7,7 +8,7 @@ use token::*;
pub(super) struct ParserInput<'a, I: Input> {
iter: Lexer<'a, I>,
/// Span of the previous token.
last_span: Span,
prev_span: Span,
cur: Option<TokenAndSpan>,
/// Peeked token
next: Option<TokenAndSpan>,
@ -18,16 +19,16 @@ impl<'a, I: Input> ParserInput<'a, I> {
ParserInput {
iter: lexer,
cur: None,
last_span: DUMMY_SP,
prev_span: DUMMY_SP,
next: None,
}
}
fn bump_inner(&mut self) -> Option<Token> {
let prev = self.cur.take();
self.last_span = match prev {
self.prev_span = match prev {
Some(TokenAndSpan { span, .. }) => span,
_ => self.last_span,
_ => self.prev_span,
};
// If we have peeked a token, take it instead of calling lexer.next()
@ -42,9 +43,17 @@ impl<'a, I: Input> ParserInput<'a, I> {
/// Returns current token.
pub fn bump(&mut self) -> Token {
self.bump_inner().expect(
"Current token is `None`. Parser should not call bump()without knowing current token",
)
let prev = match self.cur.take() {
Some(t) => t,
None => unreachable!(
"Current token is `None`. Parser should not call bump()without knowing current \
token"
),
};
self.prev_span = prev.span;
prev.token
}
pub fn knows_cur(&self) -> bool {
@ -64,12 +73,14 @@ impl<'a, I: Input> ParserInput<'a, I> {
self.next.as_ref().map(|ts| &ts.token)
}
/// This returns true on eof.
pub fn had_line_break_before_cur(&self) -> bool {
/// Returns true on eof.
pub fn had_line_break_before_cur(&mut self) -> bool {
self.cur();
self.cur
.as_ref()
.map(|it| it.had_line_break)
.unwrap_or(true)
.unwrap_or_else(|| true)
}
/// This returns true on eof.
@ -117,28 +128,41 @@ impl<'a, I: Input> ParserInput<'a, I> {
pub fn eat_keyword(&mut self, kwd: Keyword) -> bool {
self.eat(&Word(Keyword(kwd)))
}
/// Returns start of current token.
pub fn cur_pos(&self) -> BytePos {
pub fn cur_pos(&mut self) -> BytePos {
let _ = self.cur();
self.cur
.as_ref()
.map(|item| item.span.lo())
.unwrap_or_else(|| self.last_pos())
.unwrap_or_else(|| {
// eof
self.last_pos()
})
}
pub fn cur_span(&self) -> Span {
self.cur
.as_ref()
.map(|item| item.span)
.unwrap_or(self.last_span)
.unwrap_or(self.prev_span)
}
/// Returns last byte position of previous token.
pub fn last_pos(&self) -> BytePos {
self.last_span.hi()
self.prev_span.hi()
}
/// Returns span of the previous token.
pub const fn last_span(&self) -> Span {
self.last_span
pub const fn prev_span(&self) -> Span {
self.prev_span
}
pub const fn get_ctx(&self) -> Context {
self.iter.ctx
}
pub fn set_ctx(&mut self, ctx: Context) {
self.iter.ctx = ctx;
}
}

View File

@ -1,8 +1,7 @@
macro_rules! unexpected {
($p:expr) => {{
let pos = cur_pos!($p);
let cur = cur!($p)?;
unimplemented!("unexpected token: {:?} at {:?}", cur, pos);
// unimplemented!("Unexpected token")
syntax_error!($p, $p.input.cur_span(), SyntaxError::Unexpected)
}};
}
@ -11,17 +10,17 @@ macro_rules! unexpected {
/// Returns bool.
macro_rules! is {
($p:expr, BindingIdent) => {{
let ctx = $p.ctx();
match cur!($p) {
// TODO: Exclude some keywords
Ok(&Word(ref w)) => !w.is_reserved_word($p.session.cfg.strict),
Ok(&Word(ref w)) => !ctx.is_reserved_word(&w.clone().into()),
_ => false,
}
}};
($p:expr, IdentRef) => {{
let ctx = $p.ctx();
match cur!($p) {
// TODO: Exclude some keywords
Ok(&Word(ref w)) => !w.is_reserved_word($p.session.cfg.strict),
Ok(&Word(ref w)) => !ctx.is_reserved_word(&w.clone().into()),
_ => false,
}
}};
@ -115,7 +114,7 @@ macro_rules! expect {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &token_including_semi!($t);
if !eat!($p, $t) {
syntax_error!($p, SyntaxError::Expected(TOKEN))
syntax_error!($p, $p.input.cur_span(), SyntaxError::Expected(TOKEN))
}
}};
}
@ -124,7 +123,7 @@ macro_rules! expect_exact {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &token_including_semi!($t);
if !eat_exact!($p, $t) {
syntax_error!($p, SyntaxError::Expected(TOKEN))
syntax_error!($p, $p.input.cur_span(), SyntaxError::Expected(TOKEN))
}
}};
}
@ -133,6 +132,24 @@ macro_rules! cur {
($p:expr) => {{
let pos = $p.input.last_pos();
let last = Span::new(pos, pos, Default::default());
let is_err_token = match $p.input.cur() {
Some(&$crate::token::Token::Error(..)) => { true },
_ => false,
};
if is_err_token {
match $p.input.bump() {
$crate::token::Token::Error(e) => {
let err: Result<!, _> = Err($crate::error::ErrorToDiag {
handler: &$p.session.handler,
span: e.span,
error: e.error,
});
err?
}
_ => unreachable!(),
}
}
match $p.input.cur() {
Some(c) => Ok(c),
None => Err($crate::error::Eof {
@ -183,20 +200,24 @@ macro_rules! cur_pos {
}
macro_rules! last_pos {
($p:expr) => { $p.input.last_span().hi() };
($p:expr) => { $p.input.prev_span().hi() };
}
macro_rules! return_if_arrow {
($p:expr, $expr:expr) => {{
let is_cur = match $p.state.potential_arrow_start {
Some(start) => $expr.span.lo() == start,
None => false
};
if is_cur {
// FIXME:
//
//
// let is_cur = match $p.state.potential_arrow_start {
// Some(start) => $expr.span.lo() == start,
// None => false
// };
// if is_cur {
match $expr.node {
ExprKind::Arrow{..} => return Ok($expr),
_ => {},
}
}
// }
}};
}

View File

@ -2,7 +2,7 @@
#![deny(non_snake_case)]
use self::input::ParserInput;
use self::util::ParseObject;
use Session;
use {Context, Session};
use ast::*;
use error::SyntaxError;
use lexer::Input;
@ -30,20 +30,9 @@ pub type PResult<'a, T> = Result<T, Diagnostic<'a>>;
/// EcmaScript parser.
pub struct Parser<'a, I: Input> {
session: Session<'a>,
ctx: Context,
state: State,
input: ParserInput<'a, I>,
}
#[derive(Debug, Clone, Copy, Default)]
struct Context {
include_in_expr: bool,
/// If true, await expression is parsed, and "await" is treated as a
/// keyword.
in_async: bool,
/// If true, yield expression is parsed, and "yield" is treated as a
/// keyword.
in_generator: bool,
}
#[derive(Debug, Default)]
struct State {
@ -57,27 +46,39 @@ impl<'a, I: Input> Parser<'a, I> {
Parser {
session,
input: ParserInput::new(Lexer::new(session, input)),
ctx: Default::default(),
state: Default::default(),
}
}
#[parser]
pub fn parse_script(&mut self) -> PResult<'a, Vec<Stmt>> {
self.session.cfg.module = false;
let ctx = Context {
module: false,
..self.ctx()
};
self.set_ctx(ctx);
self.parse_block_body(true, None)
self.parse_block_body(true, true, None)
}
#[parser]
pub fn parse_module(&mut self) -> PResult<'a, Module> {
//TOOD: parse() -> PResult<'a, Program>
self.session.cfg.module = true;
self.session.cfg.strict = true;
let ctx = Context {
module: true,
strict: true,
..self.ctx()
};
// module code is always in strict mode
self.set_ctx(ctx);
self.parse_block_body(true, None)
self.parse_block_body(true, true, None)
.map(|body| Module { body })
}
const fn ctx(&self) -> Context {
self.input.get_ctx()
}
}
#[cfg(test)]

View File

@ -38,8 +38,8 @@ impl<'a, I: Input> Parser<'a, I> {
let start = cur_pos!();
let v = match *cur!()? {
Str(_, _) => match bump!() {
Str(s, _) => PropName::Str(s),
Str { .. } => match bump!() {
Str { value, .. } => PropName::Str(value),
_ => unreachable!(),
},
Num(_) => match bump!() {
@ -119,7 +119,7 @@ impl<'a, I: Input> ParseObject<'a, Box<Expr>> for Parser<'a, I> {
});
}
let mut ident = match key {
let ident = match key {
PropName::Ident(ident) => ident,
_ => unexpected!(),
};
@ -127,18 +127,9 @@ impl<'a, I: Input> ParseObject<'a, Box<Expr>> for Parser<'a, I> {
// `ident` from parse_prop_name is parsed as 'IdentifierName'
// It means we should check for invalid expressions like { for, }
if is_one_of!('=', ',', '}') {
let is_reserved_word = {
// FIXME: Use extension trait instead of this.
let word = Word::from(ident.sym);
let r = word.is_reserved_word(self.session.cfg.strict);
ident = Ident {
sym: word.into(),
..ident
};
r
};
let is_reserved_word = { self.ctx().is_reserved_word(&ident.sym) };
if is_reserved_word {
syntax_error!(SyntaxError::ReservedWordInObjShorthandOrPat);
syntax_error!(ident.span, SyntaxError::ReservedWordInObjShorthandOrPat);
}
if eat!('=') {
@ -214,6 +205,7 @@ impl<'a, I: Input> ParseObject<'a, Pat> for Parser<'a, I> {
let key = self.parse_prop_name()?;
if eat!(':') {
let value = box self.parse_binding_element()?;
return Ok(ObjectPatProp::KeyValue { key, value });
}
let key = match key {
@ -226,6 +218,10 @@ impl<'a, I: Input> ParseObject<'a, Pat> for Parser<'a, I> {
.parse_assignment_expr()
.map(Some)?
} else {
if self.ctx().is_reserved_word(&key.sym) {
syntax_error!(key.span, SyntaxError::ReservedWordInObjShorthandOrPat);
}
None
};

View File

@ -1,5 +1,6 @@
//! 13.3.3 Destructuring Binding Patterns
use super::*;
use super::util::ExprExt;
use std::iter;
#[parser]
@ -18,11 +19,17 @@ impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_binding_ident(&mut self) -> PResult<'a, Ident> {
// "yield" and "await" is **lexically** accepted.
let ident = self.parse_ident(true, true)?;
if self.session.cfg.strict {
if self.ctx().strict {
if &*ident.sym == "arguments" || &*ident.sym == "eval" {
syntax_error!(SyntaxError::EvalAndArgumentsInStrict);
}
}
if self.ctx().in_async && ident.sym == js_word!("await") {
syntax_error!(ident.span, SyntaxError::ExpectedIdent)
}
if self.ctx().in_generator && ident.sym == js_word!("yield") {
syntax_error!(ident.span, SyntaxError::ExpectedIdent)
}
Ok(ident)
}
@ -32,12 +39,12 @@ impl<'a, I: Input> Parser<'a, I> {
tok!("yield") | Word(..) => self.parse_binding_ident().map(Pat::from),
tok!('[') => self.parse_array_binding_pat(),
tok!('{') => self.parse_object(),
tok!('(') => {
bump!();
let pat = self.parse_binding_pat_or_ident()?;
expect!(')');
Ok(pat)
}
// tok!('(') => {
// bump!();
// let pat = self.parse_binding_pat_or_ident()?;
// expect!(')');
// Ok(pat)
// }
_ => unexpected!(),
}
}
@ -85,6 +92,7 @@ impl<'a, I: Input> Parser<'a, I> {
node: PatKind::Rest(box pat),
};
elems.push(Some(pat));
// Trailing comma isn't allowed
break;
} else {
elems.push(p.parse_binding_element().map(Some)?);
@ -142,31 +150,112 @@ impl<'a, I: Input> Parser<'a, I> {
self.parse_formal_params()
}
}
///
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PatType {
BindingPat,
BindingElement,
/// AssigmentPattern
AssignPat,
AssignElement,
}
impl PatType {
pub fn element(self) -> Self {
match self {
PatType::BindingPat | PatType::BindingElement => PatType::BindingElement,
PatType::AssignPat | PatType::AssignElement => PatType::AssignElement,
}
}
}
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// This does not return 'rest' pattern because non-last parameter cannot be
/// rest.
pub(super) fn reparse_expr_as_pat(&mut self, box expr: Box<Expr>) -> PResult<'a, Pat> {
pub(super) fn reparse_expr_as_pat(
&mut self,
pat_ty: PatType,
box expr: Box<Expr>,
) -> PResult<'a, Pat> {
let span = expr.span;
match expr.node {
// syntatically valid left-hand-side expressions.
ExprKind::Member(..) | ExprKind::Call(..) | ExprKind::New(..) => {
return Ok(Pat {
span,
node: PatKind::Expr(box expr),
})
}
if pat_ty == PatType::AssignPat {
match expr.node {
ExprKind::Object(..) | ExprKind::Array(..) => {
// It is a Syntax Error if LeftHandSideExpression is either an
// ObjectLiteral or an ArrayLiteral and LeftHandSideExpression cannot
// be reparsed as an AssignmentPattern.
ExprKind::Paren(inner) => {
// FIXME: Check if this is correct?
let inner_pat = self.reparse_expr_as_pat(inner)?;
return Ok(Pat {
span,
node: inner_pat.node,
});
}
_ => {
// It is an early Reference Error if LeftHandSideExpression is neither
// an ObjectLiteral nor an ArrayLiteral and
// IsValidSimpleAssignmentTarget of LeftHandSideExpression is false.
if !expr.is_valid_simple_assignment_target(self.ctx().strict) {
syntax_error!(span, SyntaxError::NotSimpleAssign)
}
match expr.node {
// It is a Syntax Error if the LeftHandSideExpression is
// CoverParenthesizedExpressionAndArrowParameterList:(Expression) and
// Expression derives a phrase that would produce a Syntax Error according
// to these rules if that phrase were substituted for
// LeftHandSideExpression. This rule is recursively applied.
ExprKind::Paren(expr) => return self.reparse_expr_as_pat(pat_ty, expr),
ExprKind::Ident(i) => return Ok(i.into()),
_ => {
return Ok(Pat {
span,
node: PatKind::Expr(box expr),
});
}
}
}
}
}
// AssignmentElement:
// DestructuringAssignmentTarget Initializer[+In]?
//
// DestructuringAssignmentTarget:
// LeftHandSideExpression
if pat_ty == PatType::AssignElement {
match expr.node {
ExprKind::Array(..) | ExprKind::Object(..) => {}
ExprKind::Member(..)
| ExprKind::Call(..)
| ExprKind::New(..)
| ExprKind::Lit(..)
| ExprKind::Ident(..)
| ExprKind::Fn(..)
| ExprKind::Class(..)
| ExprKind::Tpl(..) => {
if !expr.node
.is_valid_simple_assignment_target(self.ctx().strict)
{
syntax_error!(span, SyntaxError::NotSimpleAssign)
}
match expr.node {
ExprKind::Ident(i) => return Ok(i.into()),
_ => {
return Ok(Pat {
span,
node: PatKind::Expr(box expr),
});
}
}
}
// It's special because of optional intializer
ExprKind::Assign(..) => {}
_ => syntax_error!(span, SyntaxError::InvalidPat),
}
}
match expr.node {
ExprKind::Paren(inner) => syntax_error!(span, SyntaxError::InvalidPat),
ExprKind::Assign(AssignExpr {
left,
op: Assign,
@ -176,7 +265,7 @@ impl<'a, I: Input> Parser<'a, I> {
span,
node: PatKind::Assign {
left: match left {
PatOrExpr::Expr(left) => box self.reparse_expr_as_pat(left)?,
PatOrExpr::Expr(left) => box self.reparse_expr_as_pat(pat_ty, left)?,
PatOrExpr::Pat(left) => box left,
},
right,
@ -196,18 +285,14 @@ impl<'a, I: Input> Parser<'a, I> {
}),
PropKind::KeyValue { key, value } => Ok(ObjectPatProp::KeyValue {
key,
value: box self.reparse_expr_as_pat(value)?,
value: box self.reparse_expr_as_pat(pat_ty.element(), value)?,
}),
PropKind::Assign { key, value } => Ok(ObjectPatProp::Assign {
key,
value: Some(value),
}),
_ => unimplemented!(
"error reporting: object pattern cannot contain method property: \
{:?}",
prop
),
_ => syntax_error!(prop.span, SyntaxError::InvalidPat),
})
.collect::<PResult<'a, _>>()?),
});
@ -221,48 +306,83 @@ impl<'a, I: Input> Parser<'a, I> {
});
}
let len = exprs.len();
let mut params = Vec::with_capacity(exprs.len());
// Trailing comma may exist. We should remove those commas.
let count_of_trailing_comma =
exprs.iter().rev().take_while(|e| e.is_none()).count();
for expr in exprs.drain(..len - 1) {
let len = exprs.len();
let mut params = Vec::with_capacity(exprs.len() - count_of_trailing_comma);
// Comma or other pattern cannot follow a rest pattern.
let idx_of_rest_not_allowed = if count_of_trailing_comma == 0 {
len - 1
} else {
// last element is comma, so rest is not allowed for every pattern element.
len - count_of_trailing_comma
};
for expr in exprs.drain(..idx_of_rest_not_allowed) {
match expr {
Some(ExprOrSpread::Spread(expr)) => {
syntax_error!(SyntaxError::NonLastRestParam)
syntax_error!(expr.span, SyntaxError::NonLastRestParam)
}
Some(ExprOrSpread::Expr(expr)) => {
params.push(self.reparse_expr_as_pat(expr).map(Some)?)
params.push(self.reparse_expr_as_pat(pat_ty.element(), expr).map(Some)?)
}
None => params.push(None),
}
}
assert_eq!(exprs.len(), 1);
let expr = exprs.into_iter().next().unwrap();
let last = match expr {
// Rest
Some(ExprOrSpread::Spread(expr)) => {
// FIXME: Span should start from ...
let span = expr.span;
self.reparse_expr_as_pat(expr)
.map(|pat| Pat {
span,
node: PatKind::Rest(box pat),
})
.map(Some)?
}
Some(ExprOrSpread::Expr(expr)) => self.reparse_expr_as_pat(expr).map(Some)?,
// TODO: sytax error if last element is ellison and ...rest exists.
None => None,
};
params.push(last);
if count_of_trailing_comma == 0 {
let expr = exprs.into_iter().next().unwrap();
let last = match expr {
// Rest
Some(ExprOrSpread::Spread(expr)) => {
// FIXME: Span should start from ...
let span = expr.span;
// TODO: is BindingPat correct?
self.reparse_expr_as_pat(pat_ty.element(), expr)
.map(|pat| Pat {
span,
node: PatKind::Rest(box pat),
})
.map(Some)?
}
Some(ExprOrSpread::Expr(expr)) => {
// TODO: is BindingPat correct?
self.reparse_expr_as_pat(pat_ty.element(), expr).map(Some)?
}
// TODO: sytax error if last element is ellison and ...rest exists.
None => None,
};
params.push(last);
}
return Ok(Pat {
span,
node: PatKind::Array(params),
});
}
_ => unimplemented!("reparse_expr_as_pat: {:?}", expr),
// Invalid patterns.
// Note that assignment expression with '=' is valid, and handled above.
ExprKind::Lit(..) | ExprKind::Member(..) | ExprKind::Assign(..) => {
syntax_error!(span, SyntaxError::InvalidPat);
}
ExprKind::Yield(..) if self.ctx().in_generator => {
syntax_error!(span, SyntaxError::YieldParamInGen);
}
_ => {
// syntax_error!(span, SyntaxError::InvalidPat)
unimplemented!(
"reparse_expr_as_pat, pat_ty = {:?}, expr = {:?}",
pat_ty,
expr
)
}
}
}
@ -270,6 +390,8 @@ impl<'a, I: Input> Parser<'a, I> {
&mut self,
mut exprs: Vec<ExprOrSpread>,
) -> PResult<'a, Vec<Pat>> {
let pat_ty = PatType::BindingPat;
let len = exprs.len();
if len == 0 {
return Ok(vec![]);
@ -279,8 +401,10 @@ impl<'a, I: Input> Parser<'a, I> {
for expr in exprs.drain(..len - 1) {
match expr {
ExprOrSpread::Spread(expr) => syntax_error!(SyntaxError::NonLastRestParam),
ExprOrSpread::Expr(expr) => params.push(self.reparse_expr_as_pat(expr)?),
ExprOrSpread::Spread(expr) => {
syntax_error!(expr.span, SyntaxError::NonLastRestParam)
}
ExprOrSpread::Expr(expr) => params.push(self.reparse_expr_as_pat(pat_ty, expr)?),
}
}
@ -291,12 +415,12 @@ impl<'a, I: Input> Parser<'a, I> {
ExprOrSpread::Spread(expr) => {
let span = expr.span; //TODO
self.reparse_expr_as_pat(expr).map(|pat| Pat {
self.reparse_expr_as_pat(pat_ty, expr).map(|pat| Pat {
span,
node: PatKind::Rest(box pat),
})?
}
ExprOrSpread::Expr(expr) => self.reparse_expr_as_pat(expr)?,
ExprOrSpread::Expr(expr) => self.reparse_expr_as_pat(pat_ty, expr)?,
};
params.push(last);

View File

@ -1,4 +1,5 @@
use super::*;
use super::pat::PatType;
use swc_macros::ast_node;
mod module_item;
@ -7,19 +8,41 @@ mod module_item;
impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_block_body<Type>(
&mut self,
mut allow_directives: bool,
top_level: bool,
end: Option<&Token>,
) -> PResult<'a, Vec<Type>>
where
Self: StmtLikeParser<'a, Type>,
Type: From<Stmt>,
Type: IsDirective + From<Stmt>,
{
let old_ctx = self.ctx();
let mut stmts = vec![];
while {
let b = cur!().ok() != end;
b
} {
let stmt = self.parse_stmt_like(true, top_level)?;
if allow_directives {
allow_directives = false;
if stmt.is_use_strict() {
let ctx = Context {
strict: true,
..old_ctx
};
self.set_ctx(ctx);
if self.input.knows_cur() && !is_one_of!('}') {
unreachable!(
"'use strict'; directive requires parser.input.cur to be empty or \
'}}', but current token was: {:?}",
self.input.cur()
)
}
}
}
stmts.push(stmt);
}
@ -27,6 +50,8 @@ impl<'a, I: Input> Parser<'a, I> {
bump!();
}
self.set_ctx(old_ctx);
Ok(stmts)
}
@ -42,7 +67,7 @@ impl<'a, I: Input> Parser<'a, I> {
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<'a, Type>
where
Self: StmtLikeParser<'a, Type>,
Type: From<Stmt>,
Type: IsDirective + From<Stmt>,
{
if <Self as StmtLikeParser<Type>>::accept_import_export() {
if is_one_of!("import", "export") {
@ -146,8 +171,9 @@ impl<'a, I: Input> Parser<'a, I> {
// 'let' can start an identifier reference.
if include_decl && is!("let") {
let strict = self.ctx().strict;
let is_keyword = match peek!() {
Ok(t) => t.follows_keyword_let(self.session.cfg.strict),
Ok(t) => t.follows_keyword_let(strict),
_ => false,
};
@ -161,7 +187,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
match *cur!()? {
LBrace => return self.spanned(|p| p.parse_block().map(StmtKind::Block)),
LBrace => return self.spanned(|p| p.parse_block(false).map(StmtKind::Block)),
Semi => {
return self.spanned(|p| {
@ -199,7 +225,11 @@ impl<'a, I: Input> Parser<'a, I> {
node: ExprKind::Ident(ident),
}
}
expr => expr,
expr => {
let expr = self.verify_expr(expr)?;
expr
}
};
expect!(';');
@ -219,7 +249,7 @@ impl<'a, I: Input> Parser<'a, I> {
let cons = {
// Annex B
if !p.session.cfg.strict && is!("function") {
if !p.ctx().strict && is!("function") {
// TODO: report error?
}
box p.parse_stmt(false)?
@ -236,7 +266,9 @@ impl<'a, I: Input> Parser<'a, I> {
}
fn parse_return_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
let start = cur_pos!();
let stmt = self.spanned(|p| {
assert_and_bump!("return");
let arg = if is!(';') {
@ -246,7 +278,17 @@ impl<'a, I: Input> Parser<'a, I> {
};
expect!(';');
Ok(StmtKind::Return(ReturnStmt { arg }))
})
});
if !self.ctx().in_function {
match stmt {
Ok(_) => {}
Err(e) => e.emit(),
}
syntax_error!(span!(start), SyntaxError::ReturnNotAllowed)
} else {
stmt
}
}
fn parse_switch_stmt(&mut self) -> PResult<'a, Stmt> {
@ -288,7 +330,9 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
}
assert_and_bump!('}');
// eof or rbrace
expect!('}');
cases.extend(cur);
Ok(StmtKind::Switch(SwitchStmt {
@ -318,11 +362,11 @@ impl<'a, I: Input> Parser<'a, I> {
self.spanned(|p| {
assert_and_bump!("try");
let block = p.parse_block()?;
let block = p.parse_block(false)?;
let handler = if eat!("catch") {
let param = p.parse_catch_param()?;
p.parse_block()
p.parse_block(false)
.map(|body| CatchClause { param, body })
.map(Some)?
} else {
@ -330,7 +374,7 @@ impl<'a, I: Input> Parser<'a, I> {
};
let finalizer = if eat!("finally") {
p.parse_block().map(Some)?
p.parse_block(false).map(Some)?
} else {
if handler.is_none() {
unexpected!();
@ -437,6 +481,10 @@ impl<'a, I: Input> Parser<'a, I> {
}
fn parse_with_stmt(&mut self) -> PResult<'a, Stmt> {
if self.ctx().strict {
syntax_error!(SyntaxError::WithInStrict)
}
self.spanned(|p| {
assert_and_bump!("with");
@ -449,11 +497,11 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
pub(super) fn parse_block(&mut self) -> PResult<'a, BlockStmt> {
pub(super) fn parse_block(&mut self, allow_directives: bool) -> PResult<'a, BlockStmt> {
self.spanned(|p| {
expect!('{');
let stmts = p.parse_block_body(false, Some(&RBrace))?;
let stmts = p.parse_block_body(allow_directives, false, Some(&RBrace))?;
Ok(stmts)
})
@ -468,7 +516,21 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
let body = box if is!("function") {
self.parse_fn_decl().map(Stmt::from)?
let f = self.parse_fn_decl()?;
match f {
Decl::Fn(FnDecl {
function:
Function {
span,
is_generator: true,
..
},
..
}) => syntax_error!(span, SyntaxError::LabelledGenerator),
_ => {}
}
f.into()
} else {
self.parse_stmt(false)?
};
@ -502,13 +564,19 @@ impl<'a, I: Input> Parser<'a, I> {
fn parse_for_head(&mut self) -> PResult<'a, ForHead> {
let start = cur_pos!();
let strict = self.ctx().strict;
if is_one_of!("const", "var")
|| (is!("let") && peek!()?.follows_keyword_let(self.session.cfg.strict))
{
if is_one_of!("const", "var") || (is!("let") && peek!()?.follows_keyword_let(strict)) {
let decl = self.parse_var_stmt(true)?;
if is_one_of!("of", "in") {
if decl.decls.len() != 1 {
syntax_error!(decl.span, SyntaxError::TooManyVarInForInHead);
}
if decl.decls[0].init.is_some() {
syntax_error!(decl.span, SyntaxError::VarInitializerInForInHead);
}
return self.parse_for_each_head(VarDeclOrPat::VarDecl(decl));
}
@ -524,12 +592,13 @@ impl<'a, I: Input> Parser<'a, I> {
// for (a of b)
if is_one_of!("of", "in") {
let pat = self.reparse_expr_as_pat(init)?;
let pat = self.reparse_expr_as_pat(PatType::AssignPat, init)?;
return self.parse_for_each_head(VarDeclOrPat::Pat(pat));
}
expect_exact!(';');
let init = self.verify_expr(init)?;
self.parse_normal_for_head(Some(VarDeclOrExpr::Expr(init)))
}
@ -580,7 +649,30 @@ enum ForHead {
},
}
pub(super) trait StmtLikeParser<'a, Type> {
pub(super) trait IsDirective {
fn as_ref(&self) -> Option<&StmtKind>;
fn is_use_strict(&self) -> bool {
match self.as_ref() {
Some(&StmtKind::Expr(box Expr {
node:
ExprKind::Lit(Lit::Str {
ref value,
has_escape: false,
}),
..
})) => value == "use strict",
_ => false,
}
}
}
impl IsDirective for Stmt {
fn as_ref(&self) -> Option<&StmtKind> {
Some(&self.node)
}
}
pub(super) trait StmtLikeParser<'a, Type: IsDirective> {
fn accept_import_export() -> bool;
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, Type>;
}
@ -643,21 +735,21 @@ mod tests {
#[test]
fn no_empty_without_semi() {
assert_eq_ignore_span!(
stmt("{ return 1 }"),
stmt("(function foo() { return 1 })"),
stmt(
"{
"(function foo () {
return 1
}"
})"
)
);
assert_eq_ignore_span!(
stmt("{ return 1; }"),
stmt("{ 1; }"),
Stmt {
span,
node: StmtKind::Block(BlockStmt {
span,
stmts: vec![stmt("return 1")],
stmts: vec![stmt("1")],
}),
}
);

View File

@ -8,13 +8,13 @@ impl<'a, I: Input> Parser<'a, I> {
// Handle import 'mod.js'
match *cur!()? {
Str(..) => match bump!() {
Str(src, _) => {
Str { .. } => match bump!() {
Str { value, .. } => {
expect!(';');
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::Import {
src,
src: value,
specifiers: vec![],
},
});
@ -94,7 +94,14 @@ impl<'a, I: Input> Parser<'a, I> {
});
}
// TODO: Check if it's binding ident.
// Handle difference between
//
// 'ImportedBinding'
// 'IdentifierName' as 'ImportedBinding'
if self.ctx().is_reserved_word(&orig_name.sym) {
syntax_error!(orig_name.span, SyntaxError::Unexpected)
}
let local = orig_name;
return Ok(ImportSpecifier {
span: span!(start),
@ -114,7 +121,7 @@ impl<'a, I: Input> Parser<'a, I> {
let ctx = Context {
in_async: false,
in_generator: false,
..self.ctx
..self.ctx()
};
self.with_ctx(ctx).parse_binding_ident()
}
@ -226,10 +233,10 @@ impl<'a, I: Input> Parser<'a, I> {
fn parse_from_clause_and_semi(&mut self) -> PResult<'a, String> {
expect!("from");
match *cur!()? {
Str(..) => match bump!() {
Str(src, _) => {
Str { .. } => match bump!() {
Str { value, .. } => {
expect!(';');
Ok(src)
Ok(value)
}
_ => unreachable!(),
},
@ -238,6 +245,15 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
impl IsDirective for ModuleItem {
fn as_ref(&self) -> Option<&StmtKind> {
match *self {
ModuleItem::Stmt(ref s) => Some(&s.node),
_ => None,
}
}
}
#[parser]
impl<'a, I: Input> StmtLikeParser<'a, ModuleItem> for Parser<'a, I> {
fn accept_import_export() -> bool {

View File

@ -1,23 +1,96 @@
use super::*;
use swc_common::Spanned;
impl Context {
pub fn is_reserved_word(self, word: &JsWord) -> bool {
match *word {
js_word!("let") => self.strict,
js_word!("await") => self.in_async || self.strict,
js_word!("yield") => self.in_generator || self.strict,
js_word!("null")
| js_word!("true")
| js_word!("false")
| js_word!("break")
| js_word!("case")
| js_word!("catch")
| js_word!("continue")
| js_word!("debugger")
| js_word!("default")
| js_word!("do")
| js_word!("export")
| js_word!("else")
| js_word!("finally")
| js_word!("for")
| js_word!("function")
| js_word!("if")
| js_word!("return")
| js_word!("switch")
| js_word!("throw")
| js_word!("try")
| js_word!("var")
| js_word!("const")
| js_word!("while")
| js_word!("with")
| js_word!("new")
| js_word!("this")
| js_word!("super")
| js_word!("class")
| js_word!("extends")
| js_word!("import")
| js_word!("in")
| js_word!("instanceof")
| js_word!("typeof")
| js_word!("void")
| js_word!("delete") => true,
// Future reserved word
js_word!("enum") => true,
js_word!("implements")
| js_word!("package")
| js_word!("protected")
| js_word!("interface")
| js_word!("private")
| js_word!("public") if self.strict =>
{
return true
}
_ => false,
}
}
}
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// Original context is restored when returned guard is dropped.
pub(super) fn with_ctx<'w>(&'w mut self, ctx: Context) -> WithCtx<'w, 'a, I> {
let orig_ctx = self.ctx;
self.ctx = ctx;
let orig_ctx = self.ctx();
self.set_ctx(ctx);
WithCtx {
orig_ctx,
inner: self,
}
}
pub(super) fn set_ctx(&mut self, ctx: Context) {
self.input.set_ctx(ctx);
}
pub(super) fn strict_mode<'w>(&'w mut self) -> WithCtx<'w, 'a, I> {
let ctx = Context {
strict: true,
..self.ctx()
};
self.with_ctx(ctx)
}
/// Original context is restored when returned guard is dropped.
pub(super) fn include_in_expr<'w>(&'w mut self, include_in_expr: bool) -> WithCtx<'w, 'a, I> {
let ctx = Context {
include_in_expr,
..self.ctx
..self.ctx()
};
self.with_ctx(ctx)
}
@ -54,11 +127,13 @@ pub struct WithCtx<'w, 'a: 'w, I: 'w + Input> {
}
impl<'w, 'a, I: Input> Deref for WithCtx<'w, 'a, I> {
type Target = Parser<'a, I>;
#[inline(always)]
fn deref(&self) -> &Parser<'a, I> {
&self.inner
}
}
impl<'w, 'a, I: Input> DerefMut for WithCtx<'w, 'a, I> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Parser<'a, I> {
&mut self.inner
}
@ -66,6 +141,67 @@ impl<'w, 'a, I: Input> DerefMut for WithCtx<'w, 'a, I> {
impl<'w, 'a, I: Input> Drop for WithCtx<'w, 'a, I> {
fn drop(&mut self) {
self.inner.ctx = self.orig_ctx;
self.inner.set_ctx(self.orig_ctx);
}
}
pub(super) trait ExprExt {
fn as_expr_kind(&self) -> &ExprKind;
/// "IsValidSimpleAssignmentTarget" from spec.
fn is_valid_simple_assignment_target(&self, strict: bool) -> bool {
match *self.as_expr_kind() {
ExprKind::Ident(Ident { ref sym, .. }) => {
if strict {
if &*sym == "arguments" || &*sym == "eval" {
return false;
}
}
true
}
ExprKind::This
| ExprKind::Lit(..)
| ExprKind::Array(..)
| ExprKind::Object(..)
| ExprKind::Fn(..)
| ExprKind::Class(..)
| ExprKind::Tpl(..) => false,
ExprKind::Paren(ref expr) => expr.is_valid_simple_assignment_target(strict),
ExprKind::Member(..) => true,
ExprKind::New(..) | ExprKind::Call(..) => false,
// TODO: Spec only mentions `new.target`
ExprKind::MetaProp(..) => false,
ExprKind::Update(..) => false,
ExprKind::Unary(..) | ExprKind::Await(..) => false,
ExprKind::Bin(..) => false,
ExprKind::Cond(..) => false,
ExprKind::Yield(..) | ExprKind::Arrow(..) | ExprKind::Assign(..) => false,
ExprKind::Seq(..) => false,
}
}
}
impl ExprExt for Box<Expr> {
fn as_expr_kind(&self) -> &ExprKind {
&self.node
}
}
impl ExprExt for Expr {
fn as_expr_kind(&self) -> &ExprKind {
&self.node
}
}
impl ExprExt for ExprKind {
fn as_expr_kind(&self) -> &ExprKind {
self
}
}

View File

@ -1,22 +1,21 @@
//! Ported from [babel/bablyon][]
//!
//! [babel/bablyon]:https://github.com/babel/babel/blob/2d378d076eb0c5fe63234a8b509886005c01d7ee/packages/babylon/src/tokenizer/types.js
pub use self::AssignOpToken::*;
pub use self::BinOpToken::*;
pub use self::Keyword::*;
pub use self::Token::*;
pub use self::Word::*;
pub use ast::AssignOp as AssignOpToken;
pub(crate) use self::AssignOpToken::*;
pub(crate) use self::BinOpToken::*;
pub(crate) use self::Keyword::*;
pub(crate) use self::Token::*;
pub(crate) use self::Word::*;
pub(crate) use ast::AssignOp as AssignOpToken;
use ast::BinaryOp;
pub use ast::Number;
pub(crate) use ast::Number;
use std::fmt::{self, Debug, Display, Formatter};
use swc_atoms::JsWord;
use swc_common::Span;
#[derive(Kind, Debug, Clone, PartialEq)]
#[kind(functions(starts_expr = "bool", before_expr = "bool"))]
pub enum Token {
pub(crate) enum Token {
/// Identifier, "null", "true", "false".
///
/// Contains `null` and ``
@ -98,9 +97,13 @@ pub enum Token {
Tilde,
/// String literal.
/// bool field is true if it's enclosed by '"' ( double quote).
#[kind(starts_expr)]
Str(String, bool),
Str {
value: String,
/// This field exsits because 'use\x20strict' is **not** an use strict
/// directive.
has_escape: bool,
},
/// Regexp literal.
#[kind(starts_expr)]
@ -110,7 +113,7 @@ pub enum Token {
#[kind(starts_expr)]
Num(Number),
Error,
Error(::error::Error),
}
#[derive(Kind, Debug, Clone, Copy, Eq, PartialEq, Hash)]
@ -181,7 +184,7 @@ impl BinOpToken {
}
#[derive(Debug, Clone, PartialEq)]
pub struct TokenAndSpan {
pub(crate) struct TokenAndSpan {
pub token: Token,
/// Had a line break before this token?
pub had_line_break: bool,
@ -316,6 +319,7 @@ impl From<Word> for JsWord {
}
}
}
impl Debug for Word {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
@ -328,31 +332,6 @@ impl Debug for Word {
}
}
impl Word {
pub(crate) fn is_reserved_word(&self, strict: bool) -> bool {
match *self {
Keyword(Let) => strict,
Keyword(Await) | Keyword(Yield) => strict,
Keyword(_) => true,
Null | True | False => true,
Ident(ref name) => {
if name == "enum" {
return true;
}
if strict {
match &**name {
"implements" | "package" | "protected" | "interface" | "private"
| "public" => return true,
_ => {}
}
}
false
}
}
}
}
/// Keywords
#[derive(Kind, Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[kind(function(before_expr = "bool", starts_expr = "bool"))]

View File

@ -14,7 +14,6 @@ use std::env;
use std::fs::File;
use std::fs::read_dir;
use std::io::{self, Read};
use std::panic::{catch_unwind, resume_unwind};
use std::path::Path;
use std::rc::Rc;
use swc_common::{FoldWith, Folder};
@ -92,7 +91,85 @@ fn add_test<F: FnOnce() + Send + 'static>(
});
}
fn unit_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
const IGNORED_ERROR_TESTS: &[&str] = &[
// Wrong tests
"0d5e450f1da8a92a.js",
"748656edbfb2d0bb.js",
"79f882da06f88c9f.js",
"92b6af54adef3624.js",
"ef2d369cccc5386c.js",
// Temporarily ignore tests for using octal escape before use strict
"147fa078a7436e0e.js",
"15a6123f6b825c38.js",
"3bc2b27a7430f818.js",
];
let root = {
let mut root = Path::new(env!("CARGO_MANIFEST_DIR")).to_path_buf();
root.push("tests");
root.push("test262-parser");
root
};
eprintln!("Loading tests from {}", root.display());
const TYPES: &[&str] = &[
"fail" /* TODO
* "early" */
];
for err_type in TYPES {
let dir = root.join(err_type);
for entry in read_dir(&dir)? {
let entry = entry?;
let file_name = entry
.path()
.strip_prefix(&dir)
.expect("failed to string prefix")
.to_str()
.unwrap()
.to_string();
let input = {
let mut buf = String::new();
File::open(entry.path())?.read_to_string(&mut buf)?;
buf
};
let ignore = IGNORED_ERROR_TESTS.contains(&&*file_name);
let module = file_name.contains("module");
let name = format!("parser::error::{}::{}", err_type, file_name);
add_test(tests, name, ignore, move || {
eprintln!(
"\n\nRunning error reporting test {}\nSource:\n{}\n",
file_name, input
);
let mut sess = TestSess::new();
// Parse source
let err = if module {
sess.parse_module(&file_name, &input)
.expect_err("should fail, but parsed as")
} else {
sess.parse_script(&file_name, &input)
.expect_err("should fail, but parsed as")
};
// Diff it.
err.emit();
});
}
}
Ok(())
}
fn identity_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
let root = {
let mut root = Path::new(env!("CARGO_MANIFEST_DIR")).to_path_buf();
root.push("tests");
@ -132,42 +209,39 @@ fn unit_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
let module = file_name.contains("module");
let name = format!("test262_parser_pass_{}", file_name);
let name = format!("test262-identity-{}", file_name);
add_test(tests, name, ignore, move || {
println!(
eprintln!(
"\n\n\nRunning test {}\nSource:\n{}\nExplicit:\n{}",
file_name, input, explicit
);
let res = catch_unwind(move || {
let mut sess = TestSess::new();
let mut sess = TestSess::new();
if module {
let mut p = |ty, s| {
sess.parse_module(&file_name, s).unwrap_or_else(|err| {
if module {
let mut p = |ty, s| {
sess.parse_module(&file_name, s)
.map(normalize)
.unwrap_or_else(|err| {
err.emit();
panic!("failed to parse {} code:\n{}", ty, s)
})
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
} else {
let mut p = |ty, s| {
sess.parse_script(&file_name, s).unwrap_or_else(|err| {
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
} else {
let mut p = |ty, s| {
sess.parse_script(&file_name, s)
.map(normalize)
.unwrap_or_else(|err| {
err.emit();
panic!("failed to parse {} code:\n{}", ty, s)
})
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
}
});
match res {
Ok(()) => {}
Err(err) => resume_unwind(err),
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
}
});
}
@ -203,10 +277,10 @@ impl TestSess {
}
}
fn parse_script<'a>(&'a mut self, file_name: &str, s: &str) -> PResult<'a, Vec<Stmt>> {
self.with_parser(file_name, s, |p| p.parse_script().map(normalize))
self.with_parser(file_name, s, |p| p.parse_script())
}
fn parse_module<'a>(&'a mut self, file_name: &str, s: &str) -> PResult<'a, Module> {
self.with_parser(file_name, s, |p| p.parse_module().map(normalize))
self.with_parser(file_name, s, |p| p.parse_module())
}
fn with_parser<'a, F, Ret>(&'a mut self, file_name: &str, src: &str, f: F) -> PResult<'a, Ret>
@ -251,6 +325,17 @@ impl Folder<Span> for Normalizer {
Span::default()
}
}
impl Folder<Lit> for Normalizer {
fn fold(&mut self, lit: Lit) -> Lit {
match lit {
Lit::Str { value, .. } => Lit::Str {
value,
has_escape: false,
},
_ => lit,
}
}
}
impl Folder<ExprKind> for Normalizer {
fn fold(&mut self, e: ExprKind) -> ExprKind {
match e {
@ -303,10 +388,17 @@ impl Folder<PropName> for Normalizer {
}
#[test]
// #[main]
fn main() {
fn identity() {
let args: Vec<_> = env::args().collect();
let mut tests = Vec::new();
unit_tests(&mut tests).unwrap();
identity_tests(&mut tests).unwrap();
test_main(&args, tests, Options::new());
}
#[test]
fn error() {
let args: Vec<_> = env::args().collect();
let mut tests = Vec::new();
error_tests(&mut tests).unwrap();
test_main(&args, tests, Options::new());
}