Improve error reporting of ecmascript parser

And drop nll because it's currently too slow.
This commit is contained in:
강동윤 2018-01-22 10:45:08 +09:00
parent e75836882b
commit 0a2473fe1c
31 changed files with 1019 additions and 721 deletions

3
.gitignore vendored
View File

@ -1,5 +1,6 @@
/target/
target/
**/*.bk
core
*.log

View File

@ -165,3 +165,9 @@ impl<'a> Diagnostic<'a> {
// self.children.extend(from.children.iter().cloned())
// }
}
impl<'a> From<Builder<'a>> for Diagnostic<'a> {
fn from(db: Builder<'a>) -> Self {
Diagnostic { db: box db }
}
}

View File

@ -1,4 +1,7 @@
use rustc_errors::{CodeMapper, ColorConfig, Handler as RustcHandler, HandlerFlags};
use super::Diagnostic;
use Span;
use rustc_errors::{CodeMapper, ColorConfig, Diagnostic as RustcDiagnostic,
Handler as RustcHandler, HandlerFlags, Level};
use std::rc::Rc;
/// A handler deals with errors.
@ -23,6 +26,37 @@ impl Handler {
) -> Self {
RustcHandler::with_tty_emitter_and_flags(color_config, cm, flags).into()
}
pub fn note<'a, 'b>(&'a self, sp: Span, msg: &'b str) -> Diagnostic<'a> {
Diagnostic::new(self, Level::Note, msg).span(sp)
}
pub fn warn<'a, 'b>(&'a self, msg: &'b str) -> Diagnostic<'a> {
self.inner.struct_warn(msg).into()
}
pub fn error<'a, 'b>(&'a self, msg: &'b str) -> Diagnostic<'a> {
Diagnostic::new(self, Level::Error, msg)
}
pub fn fatal<'a, 'b>(&'a self, msg: &'b str) -> Diagnostic<'a> {
Diagnostic::new(self, Level::Fatal, msg)
}
pub fn has_errors(&self) -> bool {
self.inner.has_errors()
}
pub fn abort_if_errors(&self) {
self.inner.abort_if_errors()
}
pub fn track_diagnostics<F, R>(&self, f: F) -> (R, Vec<RustcDiagnostic>)
where
F: FnOnce() -> R,
{
self.inner.track_diagnostics(f)
}
}
impl From<RustcHandler> for Handler {

View File

@ -9,12 +9,12 @@
pub use self::codemap::{CodeMap, FileLoader, FilePathMapping, RealFileLoader};
pub use self::diagnostic::*;
pub use self::handler::Handler;
pub use self::handler::*;
pub use rustc_errors::{ColorConfig, Level};
pub use rustc_errors::Level::*;
mod codemap;
mod diagnostic;
pub mod handler;
mod handler;
#[cfg(test)]
mod tests;

View File

@ -10,7 +10,6 @@ swc_common = { path = "../../common" }
swc_ecma_ast = { path = "../ast" }
parser_macros = { path = "../parser_macros" }
unicode-xid = "0.1"
failure = "0.1"
slog = "2.1"
either = { version = "1.4" }

View File

@ -1,15 +1,75 @@
use self::SyntaxError::*;
use std::borrow::Cow;
use std::fmt::{self, Debug, Formatter};
use swc_atoms::JsWord;
use swc_common::Span;
use swc_common::errors::{Diagnostic, Handler};
use token::Token;
#[derive(Copy, Clone)]
pub(crate) struct Eof<'a> {
pub last: Span,
pub handler: &'a Handler,
}
impl<'a> From<Eof<'a>> for Diagnostic<'a> {
fn from(Eof { handler, last }: Eof<'a>) -> Self {
handler.error("expected some tokens after here").span(last)
}
}
impl<'a> Debug for Eof<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Debug::fmt("<eof>", f)
}
}
pub(crate) struct Error<'a> {
pub handler: &'a Handler,
pub span: Span,
pub error: SyntaxError,
}
#[derive(Debug)]
pub enum SyntaxError {
pub(crate) enum SyntaxError {
LegacyDecimal,
LegacyOctal,
InvalidIdentChar,
// #[fail(display = "unterminated string constant: {:?}", start)]
UnterminatedStrLit,
// #[fail(display = "expected unicode escape sequence: {:?}", pos)]
ExpectedUnicodeEscape,
// #[fail(display = "unexpected escape sequence in reserved word: {:?}", word)]
EscapeInReservedWord {
word: JsWord,
},
// #[fail(display = "unterminated regexp (regexp started at {:?})", start)]
UnterminatedRegxp,
// #[fail(display = "identifier directly after number at {:?}", pos)]
IdentAfterNum,
// #[fail(display = "Unexpected character '{}' at {:?}", c, pos)]
UnexpectedChar {
c: char,
},
// #[fail(display = "Invalid string escape at {:?}", start)]
InvalidStrEscape,
// #[fail(display = "Invalid unciode escape at {:?}", pos)]
InvalidUnicodeEscape,
// #[fail(display = "Invalid unciode code point at {:?}", pos)]
InvalidCodePoint,
/// "implements", "interface", "let", "package",\
/// "private", "protected", "public", "static", or "yield"
InvalidIdentInStrict,
/// 'eval' and 'arguments' are invalid identfier in strict mode.
EvalAndArgumentsInStrict,
UnaryInExp,
UnaryInExp {
left: String,
left_span: Span,
},
LineBreakInThrow,
Expected(&'static Token),
@ -31,8 +91,62 @@ pub enum SyntaxError {
DuplicateLabel(JsWord),
AsyncGenerator,
NonTopLevelImportExport,
/// Destructuring bindings require initializers.
PatVarWithoutInit {
span: Span,
},
PatVarWithoutInit,
}
impl<'a> From<Error<'a>> for Diagnostic<'a> {
fn from(e: Error<'a>) -> Self {
let msg: Cow<'static, _> = match e.error {
LegacyDecimal => "Legacy decimal literal is not permitted in strict mode".into(),
LegacyOctal => "Legacy octal literal is not permitted in strict mode".into(),
InvalidIdentChar => "Invalid character in identifier".into(),
UnterminatedStrLit => "Unterminated string constant".into(),
ExpectedUnicodeEscape => "Expected unicode escape".into(),
EscapeInReservedWord { word } => {
format!("unexpected escape sequence in reserved word: {}", word).into()
}
UnterminatedRegxp => "Unterminated regexp literal".into(),
IdentAfterNum => "Identifier cannot follow number".into(),
UnexpectedChar { c } => format!("Unexpected character '{}'", c).into(),
InvalidStrEscape => "Invalid string escape".into(),
InvalidUnicodeEscape => "Invalid unciode escape".into(),
InvalidCodePoint => "Invalid unciode code point".into(),
InvalidIdentInStrict => "'implements', 'interface', 'let', 'package', 'private', \
'protected', 'public', 'static', or 'yield' cannot be used \
as an identifier in strict mode"
.into(),
EvalAndArgumentsInStrict => {
r#"'eval' and 'arguments' cannot be used as a binding identifier in string mode"#.into()
}
UnaryInExp { .. } => "** cannot be applied to unary expression".into(),
LineBreakInThrow => "LineBreak cannot follow 'throw'".into(),
Expected(token) => format!("Expected {:?}", token).into(),
AwaitStar => "await* has been removed from the async functions proposal. Use
\
// Promise.all() instead."
.into(),
ReservedWordInObjShorthandOrPat => {
"Cannot use a reserved word as a shorthand property".into()
}
MultipleDefault => "A switch block cannot have multiple defaults".into(),
CommaAfterRestElement => "Trailing comma isn't permitted after a rest element".into(),
NonLastRestParam => "Rest element must be final element".into(),
SpreadInParenExpr => "Parenthesized expression cannot contain spread operator".into(),
EmptyParenExpr => "Parenthized exprssion cannot be empty".into(),
ExpectedIdent => "Expected ident".into(),
ExpctedSemi => "Expected ';' or line break".into(),
DuplicateLabel(label) => format!("Label {} is already declared", label).into(),
AsyncGenerator => "An async function cannot be generator".into(),
NonTopLevelImportExport => "'import', and 'export' are not permitted here".into(),
PatVarWithoutInit => "Destructuring bindings require initializers".into(),
};
e.handler.error(&msg).span(e.span)
}
}

View File

@ -1,4 +1,3 @@
use std::fmt::Debug;
use std::str;
use swc_common::BytePos;
@ -61,8 +60,6 @@ impl<'a, I: Input> LexerInput<I> {
pub struct CharIndices<'a>(pub str::CharIndices<'a>);
impl<'a> Input for CharIndices<'a> {
type Error = ();
fn peek(&mut self) -> Option<(BytePos, char)> {
self.clone().nth(0)
}
@ -87,7 +84,6 @@ impl<'a> Iterator for CharIndices<'a> {
}
pub trait Input: Iterator<Item = (BytePos, char)> {
type Error: Debug;
fn peek(&mut self) -> Option<(BytePos, char)>;
fn peek_ahead(&mut self) -> Option<(BytePos, char)>;
@ -103,8 +99,6 @@ impl<'a, I> Input for &'a mut I
where
I: Input,
{
type Error = I::Error;
fn peek(&mut self) -> Option<(BytePos, char)> {
<I as Input>::peek(*self)
}

View File

@ -8,12 +8,13 @@ pub use self::input::Input;
use self::input::LexerInput;
use self::state::State;
use self::util::*;
use Config;
use Session;
use error::SyntaxError;
use parser_macros::parser;
use slog::Logger;
use std::char;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span, SpanData};
use swc_common::{BytePos, Span};
use swc_common::errors::Diagnostic;
use token::*;
#[macro_use]
@ -25,84 +26,24 @@ mod state;
mod tests;
pub mod util;
#[derive(Fail, Debug, PartialEq, Eq, Hash)]
pub enum Error<InputError> {
#[fail(display = "input error: {}", err)] Input {
err: InputError,
},
#[fail(display = "unterminated string constant: {:?}", start)]
UnterminatedStrLit {
start: BytePos,
},
#[fail(display = "expected unicode escape sequence: {:?}", pos)]
ExpectedUnicodeEscape {
pos: BytePos,
},
#[fail(display = "unexpected escape sequence in reserved word: {:?}", word)]
EscapeInReservedWord {
word: Word,
},
#[fail(display = "unterminated regexp (regexp started at {:?})", start)]
UnterminatedRegxp {
start: BytePos,
},
#[fail(display = "identifier directly after number at {:?}", pos)]
IdentAfterNum {
pos: BytePos,
},
#[fail(display = "Decimals with leading zeros (at {:?}) are not allowed in strict mode",
start)]
DecimalStartsWithZero {
start: BytePos,
},
#[fail(display = "Octals with leading zeros (at {:?}) are not allowed in strict mode", start)]
ImplicitOctalOnStrict {
start: BytePos,
},
#[fail(display = "Unexpected character '{}' at {:?}", c, pos)]
UnexpectedChar {
pos: BytePos,
c: char,
},
#[fail(display = "Invalid string escape at {:?}", start)]
InvalidStrEscape {
start: BytePos,
},
#[fail(display = "Invalid unciode escape at {:?}", pos)]
InvalidUnicodeEscape {
pos: SpanData,
},
#[fail(display = "Invalid unciode code point at {:?}", pos)]
InvalidCodePoint {
pos: SpanData,
},
#[fail(display = "Invalid identifier character at {:?}", pos)]
InvalidIdentChar {
pos: SpanData,
},
}
pub type LexResult<'a, T> = Result<T, Diagnostic<'a>>;
pub struct Lexer<'a, I: Input> {
logger: &'a Logger,
session: Session<'a>,
input: LexerInput<I>,
cfg: Config,
state: State,
}
impl<'a, I: Input> Lexer<'a, I> {
pub fn new(logger: &'a Logger, cfg: Config, input: I) -> Self {
pub fn new(session: Session<'a>, input: I) -> Self {
Lexer {
logger,
session,
input: LexerInput::new(input),
cfg,
state: State::new(),
}
}
fn read_token(&mut self) -> Result<Option<Token>, Error<I::Error>> {
fn read_token(&mut self) -> LexResult<'a, Option<Token>> {
let c = match self.input.current() {
Some(c) => c,
None => return Ok(None),
@ -166,7 +107,7 @@ impl<'a, I: Input> Lexer<'a, I> {
':' => {
self.input.bump();
if self.cfg.fn_bind && self.input.current() == Some(':') {
if self.session.cfg.fn_bind && self.input.current() == Some(':') {
self.input.bump();
return Ok(Some(tok!("::")));
}
@ -325,21 +266,21 @@ impl<'a, I: Input> Lexer<'a, I> {
}
// unexpected character
c => return Err(Error::UnexpectedChar { c, pos: start }),
c => syntax_error!(self, pos_span(start), SyntaxError::UnexpectedChar { c }),
};
Ok(Some(token))
}
/// Read an escaped charater for string literal.
fn read_escaped_char(&mut self, in_template: bool) -> Result<Option<char>, Error<I::Error>> {
fn read_escaped_char(&mut self, in_template: bool) -> LexResult<'a, Option<char>> {
assert_eq!(cur!(self), Some('\\'));
let start = cur_pos!(self);
bump!(self); // '\'
let c = match cur!(self) {
Some(c) => c,
None => return Err(Error::InvalidStrEscape { start }),
None => syntax_error!(self, pos_span(start), SyntaxError::InvalidStrEscape),
};
let c = match c {
'n' => '\n',
@ -421,7 +362,7 @@ impl<'a, I: Input> Lexer<'a, I> {
#[parser]
impl<'a, I: Input> Lexer<'a, I> {
fn read_slash(&mut self) -> Result<Option<Token>, Error<I::Error>> {
fn read_slash(&mut self) -> LexResult<'a, Option<Token>> {
debug_assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
@ -441,14 +382,14 @@ impl<'a, I: Input> Lexer<'a, I> {
}))
}
fn read_token_lt_gt(&mut self) -> Result<Option<Token>, Error<I::Error>> {
fn read_token_lt_gt(&mut self) -> LexResult<'a, Option<Token>> {
assert!(cur!() == Some('<') || cur!() == Some('>'));
let c = cur!().unwrap();
bump!();
// XML style comment. `<!--`
if !self.cfg.module && c == '<' && is!('!') && peek!() == Some('-')
if !self.session.cfg.module && c == '<' && is!('!') && peek!() == Some('-')
&& peek_ahead!() == Some('-')
{
self.skip_line_comment(3);
@ -487,20 +428,25 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// See https://tc39.github.io/ecma262/#sec-names-and-keywords
fn read_ident_or_keyword(&mut self) -> Result<Token, Error<I::Error>> {
fn read_ident_or_keyword(&mut self) -> LexResult<'a, Token> {
assert!(cur!().is_some());
let start = cur_pos!();
let (word, has_escape) = self.read_word_as_str()?;
// TODO: Use extension trait instead of into/from
let word = Word::from(word);
if has_escape && word.is_reserved_word(self.cfg.strict) {
return Err(Error::EscapeInReservedWord { word });
if has_escape && word.is_reserved_word(self.session.cfg.strict) {
syntax_error!(
span!(start),
SyntaxError::EscapeInReservedWord { word: word.into() }
);
} else {
Ok(Word(word))
}
Ok(Word(word))
}
fn may_read_word_as_str(&mut self) -> Result<Option<(JsWord, bool)>, Error<I::Error>> {
fn may_read_word_as_str(&mut self) -> LexResult<'a, Option<(JsWord, bool)>> {
match cur!() {
Some(c) if c.is_ident_start() => self.read_word_as_str().map(Some),
_ => Ok(None),
@ -508,7 +454,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// returns (word, has_escape)
fn read_word_as_str(&mut self) -> Result<(JsWord, bool), Error<I::Error>> {
fn read_word_as_str(&mut self) -> LexResult<'a, (JsWord, bool)> {
assert!(cur!().is_some());
let mut has_escape = false;
@ -527,7 +473,7 @@ impl<'a, I: Input> Lexer<'a, I> {
'\\' => {
bump!();
if !is!('u') {
return Err(Error::ExpectedUnicodeEscape { pos: cur_pos!() });
syntax_error!(pos_span(start), SyntaxError::ExpectedUnicodeEscape);
}
let c = self.read_unicode_escape(start)?;
let valid = if first {
@ -537,9 +483,7 @@ impl<'a, I: Input> Lexer<'a, I> {
};
if !valid {
return Err(Error::InvalidIdentChar {
pos: span!(start).data(),
});
syntax_error!(span!(start), SyntaxError::InvalidIdentChar);
}
word.push(c);
}
@ -553,7 +497,7 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok((word.into(), has_escape))
}
fn read_unicode_escape(&mut self, start: BytePos) -> Result<char, Error<I::Error>> {
fn read_unicode_escape(&mut self, start: BytePos) -> LexResult<'a, char> {
assert_eq!(cur!(), Some('u'));
bump!();
@ -562,9 +506,7 @@ impl<'a, I: Input> Lexer<'a, I> {
let c = self.read_code_point()?;
if !eat!('}') {
return Err(Error::InvalidUnicodeEscape {
pos: span!(start).data(),
});
syntax_error!(span!(start), SyntaxError::InvalidUnicodeEscape);
}
Ok(c)
@ -573,7 +515,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
}
fn read_hex_char(&mut self, count: u8) -> Result<char, Error<I::Error>> {
fn read_hex_char(&mut self, count: u8) -> LexResult<'a, char> {
debug_assert!(count == 2 || count == 4);
let pos = cur_pos!();
@ -587,28 +529,20 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// Read `CodePoint`.
fn read_code_point(&mut self) -> Result<char, Error<I::Error>> {
fn read_code_point(&mut self) -> LexResult<'a, char> {
let start = cur_pos!();
let val = self.read_int(16, 0)?;
match val {
Some(val) if 0x10FFFF >= val => match char::from_u32(val) {
Some(c) => Ok(c),
None => {
return Err(Error::InvalidCodePoint {
pos: span!(start).data(),
})
}
None => syntax_error!(span!(start), SyntaxError::InvalidCodePoint),
},
_ => {
return Err(Error::InvalidCodePoint {
pos: span!(start).data(),
})
}
_ => syntax_error!(span!(start), SyntaxError::InvalidCodePoint),
}
}
/// See https://tc39.github.io/ecma262/#sec-literals-string-literals
fn read_str_lit(&mut self) -> Result<Token, Error<I::Error>> {
fn read_str_lit(&mut self) -> LexResult<'a, Token> {
assert!(cur!() == Some('\'') || cur!() == Some('"'));
let start = cur_pos!();
let quote = cur!().unwrap();
@ -625,7 +559,9 @@ impl<'a, I: Input> Lexer<'a, I> {
return Ok(Str(out, c == '"'));
}
'\\' => out.extend(self.read_escaped_char(false)?),
c if c.is_line_break() => return Err(Error::UnterminatedStrLit { start }),
c if c.is_line_break() => {
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
}
_ => {
out.push(c);
bump!();
@ -633,11 +569,11 @@ impl<'a, I: Input> Lexer<'a, I> {
}
}
Err(Error::UnterminatedStrLit { start })
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
}
/// Expects current char to be '/'
fn read_regexp(&mut self) -> Result<Token, Error<I::Error>> {
fn read_regexp(&mut self) -> LexResult<'a, Token> {
assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
bump!();
@ -650,7 +586,7 @@ impl<'a, I: Input> Lexer<'a, I> {
// This is ported from babel.
// Seems like regexp literal cannot contain linebreak.
if c.is_line_break() {
return Err(Error::UnterminatedRegxp { start });
syntax_error!(span!(start), SyntaxError::UnterminatedRegxp);
}
if escaped {
@ -671,7 +607,7 @@ impl<'a, I: Input> Lexer<'a, I> {
// input is terminated without following `/`
if cur!() != Some('/') {
return Err(Error::UnterminatedRegxp { start });
syntax_error!(span!(start), SyntaxError::UnterminatedRegxp);
}
bump!(); // '/'
@ -688,7 +624,7 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok(Regex(content, flags))
}
fn read_tmpl_token(&mut self) -> Result<Token, Error<I::Error>> {
fn read_tmpl_token(&mut self) -> LexResult<'a, Token> {
let start = cur_pos!();
// TODO: Optimize
@ -737,3 +673,7 @@ impl<'a, I: Input> Lexer<'a, I> {
self.state.had_line_break
}
}
fn pos_span(p: BytePos) -> Span {
Span::new(p, p, Default::default())
}

View File

@ -4,6 +4,7 @@
//! See https://tc39.github.io/ecma262/#sec-literals-numeric-literals
use super::*;
use error::SyntaxError;
use std::fmt::Display;
#[parser]
@ -11,7 +12,7 @@ impl<'a, I: Input> Lexer<'a, I> {
/// Reads an integer, octal integer, or floating-point number
///
///
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> Result<Number, Error<I::Error>> {
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> LexResult<'a, Number> {
assert!(cur!().is_some());
if starts_with_dot {
debug_assert_eq!(
@ -50,8 +51,8 @@ impl<'a, I: Input> Lexer<'a, I> {
// strict mode hates non-zero decimals starting with zero.
// e.g. 08.1 is strict mode violation but 0.1 is valid float.
if self.cfg.strict {
return Err(Error::DecimalStartsWithZero { start });
if self.session.cfg.strict {
syntax_error!(span!(start), SyntaxError::LegacyDecimal);
}
let s = format!("{}", val); // TODO: Remove allocation.
@ -136,7 +137,7 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok(Number(val))
}
pub(super) fn read_radix_number(&mut self, radix: u8) -> Result<Number, Error<I::Error>> {
pub(super) fn read_radix_number(&mut self, radix: u8) -> LexResult<'a, Number> {
debug_assert!(
radix == 2 || radix == 8 || radix == 16,
"radix should be one of 2, 8, 16, but got {}",
@ -155,7 +156,7 @@ impl<'a, I: Input> Lexer<'a, I> {
/// This can read long integers like
/// "13612536612375123612312312312312312312312".
fn read_number_no_dot(&mut self, radix: u8) -> Result<f64, Error<I::Error>> {
fn read_number_no_dot(&mut self, radix: u8) -> LexResult<'a, f64> {
debug_assert!(
radix == 2 || radix == 8 || radix == 10 || radix == 16,
"radix for read_number_no_dot should be one of 2, 8, 10, 16, but got {}",
@ -168,9 +169,11 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// Ensure that ident cannot directly follow numbers.
fn ensure_not_ident(&mut self) -> Result<(), Error<I::Error>> {
fn ensure_not_ident(&mut self) -> LexResult<'a, ()> {
match cur!() {
Some(c) if c.is_ident_start() => Err(Error::IdentAfterNum { pos: cur_pos!() }),
Some(c) if c.is_ident_start() => {
syntax_error!(pos_span(cur_pos!()), SyntaxError::IdentAfterNum)
}
_ => Ok(()),
}
}
@ -179,7 +182,7 @@ impl<'a, I: Input> Lexer<'a, I> {
/// were read, the integer value otherwise.
/// When `len` is not zero, this
/// will return `None` unless the integer has exactly `len` digits.
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> Result<Option<u32>, Error<I::Error>> {
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> LexResult<'a, Option<u32>> {
let mut count = 0;
self.read_digits(radix, |opt: Option<u32>, radix, val| {
count += 1;
@ -189,7 +192,7 @@ impl<'a, I: Input> Lexer<'a, I> {
}
/// `op`- |total, radix, value| -> (total * radix + value, continue)
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> Result<Ret, Error<I::Error>>
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> LexResult<'a, Ret>
where
F: FnMut(Ret, u8, u32) -> (Ret, bool),
Ret: Copy + Default,
@ -200,7 +203,7 @@ impl<'a, I: Input> Lexer<'a, I> {
radix
);
debug!(
self.logger,
self.session.logger,
"read_digits(radix = {}), cur = {:?}",
radix,
cur!(self)
@ -211,7 +214,7 @@ impl<'a, I: Input> Lexer<'a, I> {
let mut total: Ret = Default::default();
while let Some(c) = cur!() {
if self.cfg.num_sep {
if self.session.cfg.num_sep {
// let prev: char = unimplemented!("prev");
// let next = self.input.peek();
@ -250,10 +253,10 @@ impl<'a, I: Input> Lexer<'a, I> {
Ok(total)
}
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> Result<Number, Error<I::Error>> {
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> LexResult<'a, Number> {
self.ensure_not_ident()?;
return if self.cfg.strict {
Err(Error::ImplicitOctalOnStrict { start })
return if self.session.cfg.strict {
syntax_error!(span!(start), SyntaxError::LegacyOctal)
} else {
// FIXME
Ok(Number(val))
@ -272,15 +275,19 @@ mod tests {
where
F: FnOnce(&mut Lexer<CharIndices>) -> Ret,
{
let l = ::testing::logger().new(o!("src" => s));
let mut lexer = Lexer::new(&l, Default::default(), CharIndices(s.char_indices()));
f(&mut lexer)
::with_test_sess(s, |sess| {
let mut l = Lexer::new(sess, CharIndices(s.char_indices()));
f(&mut l)
})
}
fn num(s: &'static str) -> f64 {
lex(s, |l| {
l.read_number(s.starts_with("."))
.expect("read_number failed")
.unwrap_or_else(|err| {
err.emit();
unreachable!()
})
.0
})
}
@ -288,7 +295,10 @@ mod tests {
fn int(radix: u8, s: &'static str) -> u32 {
lex(s, |l| {
l.read_int(radix, 0)
.expect("read_int failed")
.unwrap_or_else(|err| {
err.emit();
unreachable!()
})
.expect("read_int returned None")
})
}
@ -334,8 +344,11 @@ mod tests {
#[test]
fn read_radix_number() {
assert_eq!(
Ok(Number(0o73 as f64)),
lex("0o73", |l| l.read_radix_number(8))
Number(0o73 as f64),
lex("0o73", |l| l.read_radix_number(8).unwrap_or_else(|err| {
err.emit();
unreachable!()
}))
);
}
@ -346,12 +359,11 @@ mod tests {
fn test_floats(strict: bool, success: bool, cases: &'static [&'static str]) {
for case in cases {
let logger = ::testing::logger().new(o!("src" => case,
"strict" => strict,
"expected" => if success { "success" } else { "error" }
));
// lazy way to get expected value..
println!(
"Testing {} (when strict = {}); Expects success = {}",
case, strict, success
);
// lazy way to get expected values
let expected: f64 = (i64::from_str_radix(case, 8).map(|v| v as f64))
.or_else(|_| case.parse::<i64>().map(|v| v as f64))
.or_else(|_| case.parse::<f64>())
@ -359,15 +371,12 @@ mod tests {
let input = CharIndices(case.char_indices());
let vec = panic::catch_unwind(|| {
Lexer::new(
&logger,
Config {
strict,
..Default::default()
},
input,
).map(|ts| ts.token)
.collect::<Vec<_>>()
::with_test_sess(case, |mut sess| {
sess.cfg.strict = strict;
Lexer::new(sess, input)
.map(|ts| ts.token)
.collect::<Vec<_>>()
})
});
if success {
@ -380,7 +389,7 @@ mod tests {
assert_eq!(Num(Number(expected)), token);
} else {
match vec {
Ok(vec) => assert!(vec![Num(Number(expected))] != vec),
Ok(vec) => assert_ne!(vec![Num(Number(expected))], vec),
_ => {}
}
}

View File

@ -1,7 +1,7 @@
use super::{Input, Lexer};
use parser_macros::parser;
use slog::Logger;
use swc_common::{BytePos, Span};
use swc_common::BytePos;
use token::*;
/// State of lexer.
@ -35,27 +35,31 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
};
let start = cur_pos!();
if self.state.is_in_template() {
let token = self.read_tmpl_token()
.unwrap_or_else(|err| unimplemented!("error handling: {:?}", err));
self.state.update(&self.logger, &token);
return Some(TokenAndSpan {
token,
span: span!(start),
});
let res = if self.state.is_in_template() {
self.read_tmpl_token().map(Some)
} else {
self.read_token()
};
let token = res.unwrap_or_else(|err| {
// Report error
err.emit();
Some(Token::Error)
});
if let Some(ref token) = token {
self.state.update(&self.session.logger, &token)
}
if let Some(token) = self.read_token()
.unwrap_or_else(|err| unimplemented!("error handling: {:?}", err))
{
self.state.update(&self.logger, &token);
return Some(TokenAndSpan {
token.map(|token| {
// Attatch span to token.
TokenAndSpan {
token,
had_line_break: self.had_line_break_before_last(),
span: span!(start),
});
}
None
}
})
}
}

View File

@ -7,10 +7,10 @@ fn with_lexer<F, Ret>(s: &'static str, f: F) -> Ret
where
F: FnOnce(&mut Lexer<CharIndices>) -> Ret,
{
let l = ::testing::logger().new(o!("src" => s));
let mut lexer = Lexer::new(&l, Default::default(), CharIndices(s.char_indices()));
f(&mut lexer)
::with_test_sess(s, |sess| {
let mut l = Lexer::new(sess, CharIndices(s.char_indices()));
f(&mut l)
})
}
fn lex(s: &'static str) -> Vec<TokenAndSpan> {
@ -21,6 +21,16 @@ fn lex_tokens(s: &'static str) -> Vec<Token> {
with_lexer(s, |l| l.map(|ts| ts.token).collect())
}
trait LineBreak: Into<TokenAndSpan> {
fn lb(self) -> TokenAndSpan {
TokenAndSpan {
had_line_break: true,
..self.into()
}
}
}
impl LineBreak for TokenAndSpan {}
trait SpanRange: Sized {
fn into_span(self) -> Span;
}
@ -50,6 +60,7 @@ trait WithSpan: Sized {
{
TokenAndSpan {
token: self.into_token(),
had_line_break: false,
span: span.into_span(),
}
}
@ -100,7 +111,7 @@ impl WithSpan for AssignOpToken {
fn test262_lexer_error_0001() {
assert_eq!(
vec![
123f64.span(0..4),
123f64.span(0..4).lb(),
Dot.span(4..5),
"a".span(5..6),
LParen.span(6..7),
@ -115,7 +126,7 @@ fn test262_lexer_error_0001() {
fn test262_lexer_error_0002() {
assert_eq!(
vec![
Str("use strict".into(), false).span(0..15),
Str("use strict".into(), false).span(0..15).lb(),
Semi.span(15..16),
],
lex(r#"'use\x20strict';"#)
@ -124,7 +135,7 @@ fn test262_lexer_error_0002() {
#[test]
fn test262_lexer_error_0003() {
assert_eq!(vec!["a".span(0..6)], lex(r#"\u0061"#));
assert_eq!(vec!["a".span(0..6).lb()], lex(r#"\u0061"#));
}
#[test]
@ -137,26 +148,29 @@ fn test262_lexer_error_0004() {
#[test]
fn ident_escape_unicode() {
assert_eq!(vec!["aa".span(0..7)], lex(r#"a\u0061"#));
assert_eq!(vec!["aa".span(0..7).lb()], lex(r#"a\u0061"#));
}
#[test]
fn ident_escape_unicode_2() {
assert_eq!(lex("℘℘"), vec!["℘℘".span(0..6)]);
assert_eq!(lex("℘℘"), vec!["℘℘".span(0..6).lb()]);
assert_eq!(lex(r#"℘\u2118"#), vec!["℘℘".span(0..9)]);
assert_eq!(lex(r#"℘\u2118"#), vec!["℘℘".span(0..9).lb()]);
}
#[test]
fn str_escape_hex() {
assert_eq!(lex(r#"'\x61'"#), vec![Str("a".into(), false).span(0..6)]);
assert_eq!(
lex(r#"'\x61'"#),
vec![Str("a".into(), false).span(0..6).lb()]
);
}
#[test]
fn str_escape_octal() {
assert_eq!(
lex(r#"'Hello\012World'"#),
vec![Str("Hello\nWorld".into(), false).span(0..16)]
vec![Str("Hello\nWorld".into(), false).span(0..16).lb()]
)
}
@ -164,7 +178,7 @@ fn str_escape_octal() {
fn str_escape_unicode_long() {
assert_eq!(
lex(r#"'\u{00000000034}'"#),
vec![Str("4".into(), false).span(0..17)]
vec![Str("4".into(), false).span(0..17).lb()]
);
}
@ -172,12 +186,15 @@ fn str_escape_unicode_long() {
fn regexp_unary_void() {
assert_eq!(
lex("void /test/"),
vec![Void.span(0..4), Regex("test".into(), "".into()).span(5..11)]
vec![
Void.span(0..4).lb(),
Regex("test".into(), "".into()).span(5..11),
]
);
assert_eq!(
lex("void (/test/)"),
vec![
Void.span(0..4),
Void.span(0..4).lb(),
LParen.span(5..6),
Regex("test".into(), "".into()).span(6..12),
RParen.span(12..13),
@ -190,7 +207,7 @@ fn non_regexp_unary_plus() {
assert_eq!(
lex("+{} / 1"),
vec![
tok!('+').span(0..1),
tok!('+').span(0..1).lb(),
tok!('{').span(1..2),
tok!('}').span(2..3),
tok!('/').span(4..5),
@ -204,7 +221,7 @@ fn non_regexp_unary_plus() {
#[test]
fn invalid_but_lexable() {
assert_eq!(
vec![LParen.span(0), LBrace.span(1), Semi.span(2)],
vec![LParen.span(0).lb(), LBrace.span(1), Semi.span(2)],
lex("({;")
);
}
@ -212,7 +229,7 @@ fn invalid_but_lexable() {
#[test]
fn paren_semi() {
assert_eq!(
vec![LParen.span(0), RParen.span(1), Semi.span(2)],
vec![LParen.span(0).lb(), RParen.span(1), Semi.span(2)],
lex("();")
);
}
@ -221,7 +238,7 @@ fn paren_semi() {
fn ident_paren() {
assert_eq!(
vec![
"a".span(0),
"a".span(0).lb(),
LParen.span(1),
"bc".span(2..4),
RParen.span(4),
@ -233,21 +250,27 @@ fn ident_paren() {
#[test]
fn read_word() {
assert_eq!(vec!["a".span(0), "b".span(2), "c".span(4)], lex("a b c"),)
assert_eq!(
vec!["a".span(0).lb(), "b".span(2), "c".span(4)],
lex("a b c"),
)
}
#[test]
fn simple_regex() {
assert_eq!(
lex("x = /42/i"),
vec![
"x".span(0),
"x".span(0).lb(),
Assign.span(2),
Regex("42".into(), "i".into()).span(4..9),
],
lex("x = /42/i")
);
assert_eq!(vec![Regex("42".into(), "".into()).span(0..4)], lex("/42/"));
assert_eq!(
lex("/42/"),
vec![Regex("42".into(), "".into()).span(0..4).lb()]
);
}
#[test]
@ -273,7 +296,10 @@ fn complex_regex() {
#[test]
fn simple_div() {
assert_eq!(vec!["a".span(0), Div.span(2), "b".span(4)], lex("a / b"));
assert_eq!(
vec!["a".span(0).lb(), Div.span(2), "b".span(4)],
lex("a / b")
);
}
#[test]
@ -338,8 +364,9 @@ fn spec_001() {
#[test]
fn after_if() {
assert_eq!(
lex("if(x){} /y/.test(z)"),
vec![
Keyword::If.span(0..2),
Keyword::If.span(0..2).lb(),
LParen.span(2),
"x".span(3),
RParen.span(4),
@ -352,7 +379,6 @@ fn after_if() {
"z".span(17),
RParen.span(18),
],
lex("if(x){} /y/.test(z)"),
)
}
@ -398,7 +424,7 @@ fn invalid_number_failure() {
fn migrated_0002() {
assert_eq!(
vec![
"tokenize".span(0..8),
"tokenize".span(0..8).lb(),
LParen.span(8),
Regex("42".into(), "".into()).span(9..13),
RParen.span(13),
@ -411,7 +437,7 @@ fn migrated_0002() {
fn migrated_0003() {
assert_eq!(
vec![
LParen.span(0),
LParen.span(0).lb(),
Word::False.span(1..6),
RParen.span(6),
Div.span(8),
@ -426,7 +452,7 @@ fn migrated_0003() {
fn migrated_0004() {
assert_eq!(
vec![
Function.span(0..8),
Function.span(0..8).lb(),
"f".span(9),
LParen.span(10),
RParen.span(11),
@ -460,13 +486,13 @@ fn migrated_0004() {
fn migrated_0006() {
// This test seems wrong.
// assert_eq!(
// vec![LBrace.span(0), RBrace.span(1), Div.span(3), 42.span(4..6)],
// vec![LBrace.span(0).lb(), RBrace.span(1), Div.span(3), 42.span(4..6)],
// lex("{} /42")
// )
assert_eq!(
vec![
LBrace.span(0),
LBrace.span(0).lb(),
RBrace.span(1),
Regex("42".into(), "".into()).span(3..7),
],

View File

@ -3,7 +3,7 @@
#![feature(const_fn)]
#![feature(specialization)]
#![feature(never_type)]
#![feature(nll)]
// #![feature(nll)]
#![feature(proc_macro)]
#![feature(try_from)]
#![feature(try_trait)]
@ -12,29 +12,27 @@
#![deny(unsafe_code)]
extern crate either;
#[macro_use]
extern crate failure;
extern crate parser_macros;
#[macro_use]
extern crate slog;
#[macro_use(js_word)]
extern crate swc_atoms;
extern crate swc_common;
pub extern crate swc_atoms;
pub extern crate swc_common;
pub extern crate swc_ecma_ast as ast;
#[macro_use]
extern crate swc_macros;
pub extern crate swc_macros;
#[cfg(test)]
#[macro_use]
extern crate testing;
extern crate unicode_xid;
pub use self::error::*;
pub use self::lexer::input::{CharIndices, Input};
pub use self::parser::*;
use slog::Logger;
use swc_common::errors::Handler;
#[macro_use]
mod macros;
pub mod error;
mod error;
mod lexer;
mod token;
mod parser;
@ -53,8 +51,29 @@ pub struct Config {
pub module: bool,
}
#[derive(Debug, Clone, Copy)]
#[derive(Clone, Copy)]
pub struct Session<'a> {
pub cfg: Config,
pub logger: &'a Logger,
pub handler: &'a Handler,
}
#[cfg(test)]
fn with_test_sess<F, Ret>(src: &'static str, f: F) -> Ret
where
F: FnOnce(Session) -> Ret,
{
let handler = ::swc_common::errors::Handler::with_tty_emitter(
::swc_common::errors::ColorConfig::Never,
true,
false,
None,
);
let logger = ::testing::logger().new(o!("src" => src));
f(Session {
handler: &handler,
logger: &logger,
cfg: Default::default(),
})
}

View File

@ -39,10 +39,13 @@ macro_rules! tok {
("async") => { Token::Word(Word::Ident(js_word!("async"))) };
("as") => { Token::Word(Word::Ident(js_word!("as"))) };
("await") => { Token::Word(Keyword(Await)) };
("break") => { Token::Word(Keyword(Break)) };
("case") => { Token::Word(Keyword(Case)) };
("catch") => { Token::Word(Keyword(Catch)) };
("class") => { Token::Word(Keyword(Class)) };
("const") => { Token::Word(Keyword(Const)) };
("continue") => { Token::Word(Keyword(Continue)) };
("debugger") => { Token::Word(Keyword(Debugger)) };
("default") => { Token::Word(Keyword(Default_)) };
("delete") => { Token::Word(Keyword(Delete)) };
("do") => { Token::Word(Keyword(Do)) };
@ -92,7 +95,7 @@ macro_rules! span {
unreachable!("assertion failed: (span.start <= span.end).
start = {}, end = {}", start.0, end.0)
}
Span::new(start, end, Default::default())
::swc_common::Span::new(start, end, Default::default())
}};
}
@ -100,11 +103,10 @@ macro_rules! spanned {
(
$p:expr, { $($body:tt)* }
) => {{
let start = cur_pos!($p);
let start = { cur_pos!($p) };
let val: Result<_, _> = {
$($body)*
};
#[allow(unreachable_code)]
{
match val {
Ok(val) => {
@ -117,3 +119,19 @@ macro_rules! spanned {
}
}};
}
macro_rules! syntax_error {
($p:expr, $err:expr) => {{
syntax_error!($p, $p.input.cur_span(), $err)
}};
($p:expr, $span:expr, $err:expr) => {{
let err = $crate::error::Error {
handler: $p.session.handler,
span: $span,
error: $err,
};
let res: Result<!, _> = Err(err);
res?
}};
}

View File

@ -5,53 +5,53 @@ use super::ident::MaybeOptionalIdentParser;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
/// Parse function expression
pub(super) fn parse_fn_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_fn_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_fn(None)
}
pub(super) fn parse_async_fn_decl(&mut self) -> PResult<Decl> {
pub(super) fn parse_async_fn_decl(&mut self) -> PResult<'a, Decl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
pub(super) fn parse_fn_decl(&mut self) -> PResult<Decl> {
pub(super) fn parse_fn_decl(&mut self) -> PResult<'a, Decl> {
self.parse_fn(None)
}
pub(super) fn parse_default_async_fn(&mut self) -> PResult<ExportDefaultDecl> {
pub(super) fn parse_default_async_fn(&mut self) -> PResult<'a, ExportDefaultDecl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
pub(super) fn parse_default_fn(&mut self) -> PResult<ExportDefaultDecl> {
pub(super) fn parse_default_fn(&mut self) -> PResult<'a, ExportDefaultDecl> {
self.parse_fn(None)
}
pub(super) fn parse_class_decl(&mut self) -> PResult<Decl> {
pub(super) fn parse_class_decl(&mut self) -> PResult<'a, Decl> {
self.parse_class()
}
pub(super) fn parse_class_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_class_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_class()
}
pub(super) fn parse_default_class(&mut self) -> PResult<ExportDefaultDecl> {
pub(super) fn parse_default_class(&mut self) -> PResult<'a, ExportDefaultDecl> {
self.parse_class()
}
fn parse_class<T>(&mut self) -> PResult<T>
fn parse_class<T>(&mut self) -> PResult<'a, T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<T::Ident>,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
{
let start = cur_pos!();
expect!("class");
@ -78,7 +78,7 @@ impl<'a, I: Input> Parser<'a, I> {
))
}
fn parse_class_body(&mut self) -> PResult<Vec<ClassMethod>> {
fn parse_class_body(&mut self) -> PResult<'a, Vec<ClassMethod>> {
let mut elems = vec![];
while !eof!() && !is!('}') {
if eat_exact!(';') {
@ -90,7 +90,7 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(elems)
}
fn parse_class_element(&mut self) -> PResult<ClassMethod> {
fn parse_class_element(&mut self) -> PResult<'a, ClassMethod> {
// ignore semi
let start_of_static = {
@ -105,17 +105,17 @@ impl<'a, I: Input> Parser<'a, I> {
self.parse_method_def(start_of_static)
}
fn parse_fn<T>(&mut self, start_of_async: Option<BytePos>) -> PResult<T>
fn parse_fn<T>(&mut self, start_of_async: Option<BytePos>) -> PResult<'a, T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<T::Ident>,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
{
let start = start_of_async.unwrap_or(cur_pos!());
assert_and_bump!("function");
let is_async = start_of_async.is_some();
if is_async && is!('*') {
syntax_error!(SyntaxError::AsyncGenerator);
syntax_error!(SyntaxError::AsyncGenerator {});
}
let is_generator = eat!('*');
@ -147,25 +147,26 @@ impl<'a, I: Input> Parser<'a, I> {
parse_args: F,
is_async: bool,
is_generator: bool,
) -> PResult<Function>
) -> PResult<'a, Function>
where
F: FnOnce(&mut Self) -> PResult<Vec<Pat>>,
F: FnOnce(&mut Self) -> PResult<'a, Vec<Pat>>,
{
self.with_ctx(Context {
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
}).parse_with(|mut p| {
expect!(p, '(');
};
self.with_ctx(ctx).parse_with(|mut p| {
expect!('(');
let params = parse_args(&mut p)?;
expect!(p, ')');
expect!(')');
let body = p.parse_fn_body(is_async, is_generator)?;
Ok(Function {
span: span!(p, start),
span: span!(start),
params,
body,
is_async,
@ -174,7 +175,7 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_method_def(&mut self, start_of_static: Option<BytePos>) -> PResult<ClassMethod> {
fn parse_method_def(&mut self, start_of_static: Option<BytePos>) -> PResult<'a, ClassMethod> {
let is_static = start_of_static.is_some();
let start = start_of_static.unwrap_or(cur_pos!());
@ -273,15 +274,16 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<T>
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<'a, T>
where
Self: FnBodyParser<T>,
Self: FnBodyParser<'a, T>,
{
self.with_ctx(Context {
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
}).parse_fn_body_inner()
};
self.with_ctx(ctx).parse_fn_body_inner()
}
}
@ -331,13 +333,13 @@ impl OutputType for Decl {
}
}
pub(super) trait FnBodyParser<Body> {
fn parse_fn_body_inner(&mut self) -> PResult<Body>;
pub(super) trait FnBodyParser<'a, Body> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, Body>;
}
#[parser]
impl<'a, I: Input> FnBodyParser<BlockStmtOrExpr> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmtOrExpr> {
impl<'a, I: Input> FnBodyParser<'a, BlockStmtOrExpr> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmtOrExpr> {
if is!('{') {
self.parse_block().map(BlockStmtOrExpr::BlockStmt)
} else {
@ -346,8 +348,8 @@ impl<'a, I: Input> FnBodyParser<BlockStmtOrExpr> for Parser<'a, I> {
}
}
impl<'a, I: Input> FnBodyParser<BlockStmt> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmt> {
impl<'a, I: Input> FnBodyParser<'a, BlockStmt> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmt> {
self.parse_block()
}
}
@ -359,13 +361,19 @@ mod tests {
fn lhs(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_lhs_expr().expect("failed to parse lhs expression")
p.parse_lhs_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse a left-hand-side expression")
})
})
}
fn expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_expr().expect("failed to parse an expression")
p.parse_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}

View File

@ -7,7 +7,7 @@ mod tests;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
pub fn parse_expr(&mut self) -> PResult<Box<Expr>> {
pub fn parse_expr(&mut self) -> PResult<'a, Box<Expr>> {
let expr = self.parse_assignment_expr()?;
let start = expr.span.lo();
@ -29,7 +29,7 @@ impl<'a, I: Input> Parser<'a, I> {
/// Parse an assignment expression. This includes applications of
/// operators like `+=`.
///
pub(super) fn parse_assignment_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_assignment_expr(&mut self) -> PResult<'a, Box<Expr>> {
if self.ctx.in_generator && is!("yield") {
return self.parse_yield_expr();
}
@ -59,7 +59,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
match cur!() {
Some(&AssignOp(op)) => {
Ok(&AssignOp(op)) => {
bump!();
let right = self.parse_assignment_expr()?;
Ok(box Expr {
@ -77,7 +77,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// Spec: 'ConditionalExpression'
fn parse_cond_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_cond_expr(&mut self) -> PResult<'a, Box<Expr>> {
spanned!({
let test = self.parse_bin_expr()?;
return_if_arrow!(test);
@ -95,79 +95,87 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// Parse a primary expression or arrow function
fn parse_primary_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_primary_expr(&mut self) -> PResult<'a, Box<Expr>> {
let can_be_arrow = self.state
.potential_arrow_start
.map(|s| s == cur_pos!())
.unwrap_or(false);
// debug!(
// self.logger,
// "Parsing a primary expression. cur={:?} can_be_arrow={}",
// cur!(),
// can_be_arrow
// );
if is!("this") {
return self.spanned(|p| {
assert_and_bump!("this");
Ok(ExprKind::This)
});
}
let t = cur!()?;
match *t {
tok!("this") => {
return spanned!({
assert_and_bump!("this");
Ok(ExprKind::This)
});
// Handle async function expression
if { is!("async") } && { peeked_is!("function") } && {
!self.input.has_linebreak_between_cur_and_peeked()
} {
return self.parse_async_fn_expr();
}
if is!('[') {
return self.parse_array_lit();
}
if is!('{') {
return self.parse_object();
}
// Handle FunctionExpression and GeneratorExpression
if is!("function") {
return self.parse_fn_expr();
}
if is!("class") {
return self.parse_class_expr();
}
// Literals
if {
match *cur!()? {
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str(..) => true,
_ => false,
}
} {
return self.spanned(|p| p.parse_lit().map(ExprKind::Lit));
}
tok!("async") => {
// Handle async function expression
if peeked_is!("function") && !self.input.has_linebreak_between_cur_and_peeked() {
return self.parse_async_fn_expr();
}
// Regexp
if {
match *cur!()? {
Regex(..) => true,
_ => false,
}
} {
return self.spanned(|p| match bump!() {
Regex(exp, flags) => Ok(ExprKind::Lit(Lit::Regex(Regex { exp, flags }))),
_ => unreachable!(),
});
}
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str(..) => {
return spanned!({ self.parse_lit().map(ExprKind::Lit) })
}
tok!('[') => return self.parse_array_lit(),
tok!('{') => return self.parse_object(),
if is!('`') {
return self.spanned(|p| {
// parse template literal
Ok(ExprKind::Tpl(p.parse_tpl_lit(None)?))
});
}
// Handle FunctionExpression and GeneratorExpression
tok!("function") => return self.parse_fn_expr(),
tok!("class") => return self.parse_class_expr(),
Regex(_, _) => {
return spanned!({
match bump!() {
Regex(exp, flags) => Ok(ExprKind::Lit(Lit::Regex(Regex { exp, flags }))),
_ => unreachable!(),
}
});
}
tok!('`') => {
return spanned!({
// parse template literal
Ok(ExprKind::Tpl(self.parse_tpl_lit(None)?))
});
}
tok!('(') => {
return self.parse_paren_expr_or_arrow_fn(can_be_arrow);
}
_ => {}
if is!('(') {
return self.parse_paren_expr_or_arrow_fn(can_be_arrow);
}
if is!("let") || is!(IdentRef) {
return spanned!({
return self.spanned(|p| {
// TODO: Handle [Yield, Await]
let id = self.parse_ident_ref()?;
let id = p.parse_ident_ref()?;
if can_be_arrow && id.sym == js_word!("async") && is!(BindingIdent) {
// async a => body
let arg = self.parse_binding_ident().map(Pat::from)?;
let arg = p.parse_binding_ident().map(Pat::from)?;
let params = vec![arg];
expect!("=>");
let body = self.parse_fn_body(true, false)?;
let body = p.parse_fn_body(true, false)?;
Ok(ExprKind::Arrow(ArrowExpr {
body,
params,
@ -178,7 +186,7 @@ impl<'a, I: Input> Parser<'a, I> {
// async is parameter
let params = vec![id.into()];
let body = self.parse_fn_body(false, false)?;
let body = p.parse_fn_body(false, false)?;
Ok(ExprKind::Arrow(ArrowExpr {
body,
params,
@ -186,7 +194,7 @@ impl<'a, I: Input> Parser<'a, I> {
is_generator: false,
}))
} else {
return Ok(id.into());
return Ok(ExprKind::Ident(id.into()));
}
});
}
@ -194,8 +202,8 @@ impl<'a, I: Input> Parser<'a, I> {
unexpected!()
}
fn parse_array_lit(&mut self) -> PResult<Box<Expr>> {
spanned!({
fn parse_array_lit(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!('[');
let mut elems = vec![];
let mut comma = 1;
@ -212,7 +220,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
elems.extend(iter::repeat(None).take(comma - 1));
comma = 0;
elems.push(self.include_in_expr(true).parse_expr_or_spread().map(Some)?);
elems.push(p.include_in_expr(true).parse_expr_or_spread().map(Some)?);
}
expect!(']');
@ -221,12 +229,12 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_member_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_member_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_member_expr_or_new_expr(false)
}
/// `is_new_expr`: true iff we are parsing production 'NewExpression'.
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<Box<Expr>> {
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
if eat!("new") {
let span_of_new = span!(start);
@ -257,14 +265,14 @@ impl<'a, I: Input> Parser<'a, I> {
// Parsed with 'MemberExpression' production.
let args = self.parse_args().map(Some)?;
let new_expr = ExprOrSuper::Expr(box Expr {
span: span!(start),
node: ExprKind::New(NewExpr { callee, args }),
});
// We should parse subscripts for MemberExpression.
return self.parse_subscripts(
ExprOrSuper::Expr(box Expr {
span: span!(start),
node: ExprKind::New(NewExpr { callee, args }),
}),
true,
);
// Because it's left recursive.
return self.parse_subscripts(new_expr, true);
}
// Parsed with 'NewExpression' production.
@ -285,12 +293,12 @@ impl<'a, I: Input> Parser<'a, I> {
/// Parse `NewExpresion`.
/// This includes `MemberExpression`.
fn parse_new_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_new_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_member_expr_or_new_expr(true)
}
/// Parse `Arguments[Yield, Await]`
pub(super) fn parse_args(&mut self) -> PResult<Vec<ExprOrSpread>> {
pub(super) fn parse_args(&mut self) -> PResult<'a, Vec<ExprOrSpread>> {
expect!('(');
let mut first = true;
@ -316,7 +324,7 @@ impl<'a, I: Input> Parser<'a, I> {
/// AssignmentExpression[+In, ?Yield, ?Await]
/// ...AssignmentExpression[+In, ?Yield, ?Await]
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<ExprOrSpread> {
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<'a, ExprOrSpread> {
if eat!("...") {
self.include_in_expr(true)
.parse_assignment_expr()
@ -328,7 +336,7 @@ impl<'a, I: Input> Parser<'a, I> {
/// Parse paren expression or arrow function expression.
///
fn parse_paren_expr_or_arrow_fn(&mut self, can_be_arrow: bool) -> PResult<Box<Expr>> {
fn parse_paren_expr_or_arrow_fn(&mut self, can_be_arrow: bool) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
// At this point, we can't know if it's parenthesized
@ -363,7 +371,7 @@ impl<'a, I: Input> Parser<'a, I> {
// ParenthesizedExpression cannot contain spread.
if expr_or_spreads.len() == 0 {
syntax_error!(SyntaxError::EmptyParenExpr)
syntax_error!(SyntaxError::EmptyParenExpr);
} else if expr_or_spreads.len() == 1 {
let expr = match expr_or_spreads.into_iter().next().unwrap() {
ExprOrSpread::Spread(_) => syntax_error!(SyntaxError::SpreadInParenExpr),
@ -401,7 +409,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
fn parse_tpl_lit(&mut self, tag: Option<Box<Expr>>) -> PResult<TplLit> {
fn parse_tpl_lit(&mut self, tag: Option<Box<Expr>>) -> PResult<'a, TplLit> {
assert_and_bump!('`');
let is_tagged = tag.is_some();
@ -426,7 +434,7 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(TplLit { tag, exprs, quasis })
}
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<TplElement> {
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<'a, TplElement> {
let raw = match *cur!()? {
Template(_) => match bump!() {
Template(s) => s,
@ -444,7 +452,7 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<Box<Expr>> {
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<'a, Box<Expr>> {
loop {
obj = match self.parse_subscript(obj, no_call)? {
(expr, false) => return Ok(expr),
@ -454,7 +462,11 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// returned bool is true if this method should be called again.
fn parse_subscript(&mut self, obj: ExprOrSuper, no_call: bool) -> PResult<(Box<Expr>, bool)> {
fn parse_subscript(
&mut self,
obj: ExprOrSuper,
no_call: bool,
) -> PResult<'a, (Box<Expr>, bool)> {
let start = cur_pos!();
// member expression
// $obj.name
@ -528,7 +540,7 @@ impl<'a, I: Input> Parser<'a, I> {
/// Parse call, dot, and `[]`-subscript expressions.
///
///
pub(super) fn parse_lhs_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_lhs_expr(&mut self) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
// `super()` can't be handled from parse_new_expr()
@ -542,9 +554,10 @@ impl<'a, I: Input> Parser<'a, I> {
match callee.node {
// If this is parsed using 'NewExpression' rule, just return it.
// Because it's not left-recursive.
ExprKind::New(NewExpr { args: None, .. }) => {
assert_ne!(
cur!(),
cur!().ok(),
Some(&LParen),
"parse_new_expr() should eat paren if it exists"
);
@ -575,10 +588,10 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(callee)
}
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_expr()
}
pub(super) fn parse_args_or_pats(&mut self) -> PResult<Vec<ExprOrSpread>> {
pub(super) fn parse_args_or_pats(&mut self) -> PResult<'a, Vec<ExprOrSpread>> {
self.parse_args()
}
}
@ -586,10 +599,10 @@ impl<'a, I: Input> Parser<'a, I> {
/// simple leaf methods.
#[parser]
impl<'a, I: Input> Parser<'a, I> {
fn parse_yield_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
fn parse_yield_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!("yield");
assert!(self.ctx.in_generator);
assert!(p.ctx.in_generator);
//TODO
// Spec says
@ -604,7 +617,7 @@ impl<'a, I: Input> Parser<'a, I> {
}))
} else {
let has_star = eat!('*');
let arg = self.parse_assignment_expr()?;
let arg = p.parse_assignment_expr()?;
Ok(ExprKind::Yield(YieldExpr {
arg: Some(arg),
@ -615,7 +628,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// 12.2.5 Array Initializer
fn parse_lit(&mut self) -> PResult<Lit> {
fn parse_lit(&mut self) -> PResult<'a, Lit> {
let v = match *cur!()? {
Word(Null) => {
bump!();

View File

@ -5,7 +5,7 @@ use super::*;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// Name from spec: 'LogicalORExpression'
pub(super) fn parse_bin_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_bin_expr(&mut self) -> PResult<'a, Box<Expr>> {
let left = self.parse_unary_expr()?;
return_if_arrow!(left);
@ -17,12 +17,16 @@ impl<'a, I: Input> Parser<'a, I> {
/// `minPrec` provides context that allows the function to stop and
/// defer further parser to one of its callers when it encounters an
/// operator that has a lower precedence than the set it is parsing.
fn parse_bin_op_recursively(&mut self, left: Box<Expr>, min_prec: u8) -> PResult<Box<Expr>> {
fn parse_bin_op_recursively(
&mut self,
left: Box<Expr>,
min_prec: u8,
) -> PResult<'a, Box<Expr>> {
let op = match {
// Return left on eof
match cur!() {
Some(cur) => cur,
None => return Ok(left),
Ok(cur) => cur,
Err(..) => return Ok(left),
}
} {
&Word(Keyword(In)) if self.ctx.include_in_expr => op!("in"),
@ -35,7 +39,7 @@ impl<'a, I: Input> Parser<'a, I> {
if op.precedence() <= min_prec {
trace!(
self.logger,
self.session.logger,
"returning {:?} without parsing {:?} because min_prec={}, prec={}",
left,
op,
@ -47,7 +51,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
bump!();
trace!(
self.logger,
self.session.logger,
"parsing binary op {:?} min_prec={}, prec={}",
op,
min_prec,
@ -61,7 +65,11 @@ impl<'a, I: Input> Parser<'a, I> {
// returning "unexpected token '**'" on next.
// But it's not useful error message.
syntax_error!(SyntaxError::UnaryInExp)
syntax_error!(SyntaxError::UnaryInExp {
// FIXME: Use display
left: format!("{:?}", left),
left_span: left.span,
})
}
_ => {}
}
@ -91,7 +99,7 @@ impl<'a, I: Input> Parser<'a, I> {
/// Parse unary expression and update expression.
///
/// spec: 'UnaryExpression'
fn parse_unary_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_unary_expr(&mut self) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
// Parse update expression
@ -165,16 +173,16 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(expr)
}
fn parse_await_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
fn parse_await_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!("await");
assert!(self.ctx.in_async);
assert!(p.ctx.in_async);
if is!('*') {
syntax_error!(SyntaxError::AwaitStar)
syntax_error!(SyntaxError::AwaitStar);
}
let arg = self.parse_unary_expr()?;
let arg = p.parse_unary_expr()?;
Ok(ExprKind::Await(AwaitExpr { arg }))
})
}
@ -187,7 +195,9 @@ mod tests {
fn bin(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_bin_expr().unwrap_or_else(|err| {
panic!("failed to parse '{}' as a binary expression: {:?}", s, err)
err.emit();
panic!("failed to parse '{}' as a binary expression", s)
})
})
}

View File

@ -3,26 +3,37 @@ use swc_common::DUMMY_SP;
fn lhs(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_lhs_expr().expect("failed to parse lhs expression")
p.parse_lhs_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse lhs expression")
})
})
}
fn new_expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_new_expr().expect("failed to parse an expression")
p.parse_new_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
fn member_expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_member_expr()
.expect("failed to parse an expression")
p.parse_member_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
fn expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_expr().expect("failed to parse an expression")
p.parse_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}

View File

@ -5,14 +5,14 @@ use super::*;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// IdentifierReference
pub(super) fn parse_ident_ref(&mut self) -> PResult<Ident> {
pub(super) fn parse_ident_ref(&mut self) -> PResult<'a, Ident> {
let ctx = self.ctx;
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
/// LabelIdentifier
pub(super) fn parse_label_ident(&mut self) -> PResult<Ident> {
pub(super) fn parse_label_ident(&mut self) -> PResult<'a, Ident> {
let ctx = self.ctx;
self.parse_ident(!ctx.in_generator, !ctx.in_async)
@ -20,10 +20,10 @@ impl<'a, I: Input> Parser<'a, I> {
/// Use this when spec says "IdentiferName".
/// This allows idents like `catch`.
pub(super) fn parse_ident_name(&mut self) -> PResult<Ident> {
spanned!({
pub(super) fn parse_ident_name(&mut self) -> PResult<'a, Ident> {
self.spanned(|p| {
let w = match cur!() {
Some(&Word(..)) => match bump!() {
Ok(&Word(..)) => match bump!() {
Word(w) => w,
_ => unreachable!(),
},
@ -37,11 +37,11 @@ impl<'a, I: Input> Parser<'a, I> {
/// Identifier
///
/// In strict mode, "yield" is SyntaxError if matched.
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<Ident> {
spanned!({
let strict = self.cfg.strict;
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<'a, Ident> {
self.spanned(|p| {
let strict = p.session.cfg.strict;
let w = match cur!() {
Some(&Word(..)) => match bump!() {
Ok(&Word(..)) => match bump!() {
Word(w) => w,
_ => unreachable!(),
},
@ -88,16 +88,16 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
pub(super) trait MaybeOptionalIdentParser<Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident>;
pub(super) trait MaybeOptionalIdentParser<'a, Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Ident>;
}
impl<'a, I: Input> MaybeOptionalIdentParser<Ident> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident> {
impl<'a, I: Input> MaybeOptionalIdentParser<'a, Ident> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Ident> {
self.parse_binding_ident()
}
}
impl<'a, I: Input> MaybeOptionalIdentParser<Option<Ident>> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
impl<'a, I: Input> MaybeOptionalIdentParser<'a, Option<Ident>> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Option<Ident>> {
self.parse_opt_binding_ident()
}
}

View File

@ -1,55 +1,33 @@
//! Note: this module requires `#![feature(nll)]`.
use lexer::{Input, Lexer};
use swc_common::{BytePos, Span};
use swc_common::{BytePos, Span, DUMMY_SP};
use token::*;
/// This struct is responsible for managing current token and peeked token.
pub(super) struct ParserInput<'a, I: Input> {
iter: ItemIter<'a, I>,
cur: Option<Item>,
/// Last of previous span
last_pos: BytePos,
iter: Lexer<'a, I>,
/// Span of the previous token.
last_span: Span,
cur: Option<TokenAndSpan>,
/// Peeked token
next: Option<Item>,
}
/// One token
#[derive(Debug)]
struct Item {
token: Token,
/// Had a line break before this token?
had_line_break: bool,
span: Span,
}
struct ItemIter<'a, I: Input>(Lexer<'a, I>);
impl<'a, I: Input> ItemIter<'a, I> {
fn next(&mut self) -> Option<Item> {
match self.0.next() {
Some(TokenAndSpan { token, span }) => Some(Item {
token,
span,
had_line_break: self.0.had_line_break_before_last(),
}),
None => None,
}
}
next: Option<TokenAndSpan>,
}
impl<'a, I: Input> ParserInput<'a, I> {
pub fn new(lexer: Lexer<'a, I>) -> Self {
ParserInput {
iter: ItemIter(lexer),
iter: lexer,
cur: None,
last_pos: BytePos(0),
last_span: DUMMY_SP,
next: None,
}
}
fn bump_inner(&mut self) -> Option<Token> {
let prev = self.cur.take();
self.last_pos = match prev {
Some(Item { span, .. }) => span.hi(),
_ => self.last_pos,
self.last_span = match prev {
Some(TokenAndSpan { span, .. }) => span,
_ => self.last_span,
};
// If we have peeked a token, take it instead of calling lexer.next()
@ -129,18 +107,13 @@ impl<'a, I: Input> ParserInput<'a, I> {
}
pub fn eat(&mut self, expected: &Token) -> bool {
match self.cur() {
Some(t) => {
if *expected == *t {
self.bump();
true
} else {
false
}
}
_ => false,
let v = self.is(expected);
if v {
self.bump();
}
v
}
pub fn eat_keyword(&mut self, kwd: Keyword) -> bool {
self.eat(&Word(Keyword(kwd)))
}
@ -149,10 +122,23 @@ impl<'a, I: Input> ParserInput<'a, I> {
self.cur
.as_ref()
.map(|item| item.span.lo())
.unwrap_or(self.last_pos)
.unwrap_or_else(|| self.last_pos())
}
/// Returns last of previous token.
pub const fn last_pos(&self) -> BytePos {
self.last_pos
pub fn cur_span(&self) -> Span {
self.cur
.as_ref()
.map(|item| item.span)
.unwrap_or(self.last_span)
}
/// Returns last byte position of previous token.
pub fn last_pos(&self) -> BytePos {
self.last_span.hi()
}
/// Returns span of the previous token.
pub const fn last_span(&self) -> Span {
self.last_span
}
}

View File

@ -1,20 +1,11 @@
macro_rules! unexpected {
($p:expr) => {{
let pos = cur_pos!($p);
let cur = cur!($p);
let cur = cur!($p)?;
unimplemented!("unexpected token: {:?} at {:?}", cur, pos);
}};
}
macro_rules! syntax_error {
($p:expr, $s:expr) => {{
let err = Error::Syntax($p.input.cur().cloned(), cur_pos!($p), $s, file!(), line!());
error!($p.logger, "failed to parse: {:?}", err);
let res: PResult<!> = Err(err);
res?
}};
}
/// This handles automatic semicolon insertion.
///
/// Returns bool.
@ -22,7 +13,7 @@ macro_rules! is {
($p:expr, BindingIdent) => {{
match cur!($p) {
// TODO: Exclude some keywords
Some(&Word(ref w)) => !w.is_reserved_word($p.cfg.strict),
Ok(&Word(ref w)) => !w.is_reserved_word($p.session.cfg.strict),
_ => false,
}
}};
@ -30,20 +21,20 @@ macro_rules! is {
($p:expr, IdentRef) => {{
match cur!($p) {
// TODO: Exclude some keywords
Some(&Word(ref w)) => !w.is_reserved_word($p.cfg.strict),
Ok(&Word(ref w)) => !w.is_reserved_word($p.session.cfg.strict),
_ => false,
}
}};
($p:expr, IdentName) => {{
match cur!($p) {
Some(&Word(..)) => true,
Ok(&Word(..)) => true,
_ => false,
}
}};
($p:expr, ';') => {{
$p.input.is(&Token::Semi) || cur!($p) == None || is!($p, '}')
$p.input.is(&Token::Semi) || eof!($p) || is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
@ -55,9 +46,7 @@ macro_rules! is {
/// Returns true on eof.
macro_rules! eof {
($p:expr) => {
cur!($p) == None
};
($p:expr) => { cur!($p).is_err() };
}
macro_rules! peeked_is {
@ -92,8 +81,8 @@ macro_rules! assert_and_bump {
/// if token has data like string.
macro_rules! eat {
($p:expr, ';') => {{
debug!($p.logger, "eat(';'): cur={:?}", cur!($p));
$p.input.eat(&Token::Semi) || cur!($p) == None || is!($p, '}')
debug!($p.session.logger, "eat(';'): cur={:?}", cur!($p));
$p.input.eat(&Token::Semi) || eof!($p) || is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
@ -141,9 +130,17 @@ macro_rules! expect_exact {
}
macro_rules! cur {
($parser:expr) => {
$parser.input.cur()
};
($p:expr) => {{
let pos = $p.input.last_pos();
let last = Span::new(pos, pos, Default::default());
match $p.input.cur() {
Some(c) => Ok(c),
None => Err($crate::error::Eof {
last,
handler: &$p.session.handler,
}),
}
}};
}
macro_rules! peek {
@ -151,9 +148,20 @@ macro_rules! peek {
assert!(
$p.input.knows_cur(),
"parser should not call peek() without knowing current token.
Current token is {:?}", cur!($p)
Current token is {:?}",
cur!($p),
);
$p.input.peek()
let pos = cur_pos!($p);
let last = Span::new(pos, pos, Default::default());
match $p.input.peek() {
Some(c) => Ok(c),
None => Err($crate::error::Eof {
//TODO: Use whole span
last,
handler: &$p.session.handler,
}),
}
}};
}
@ -168,11 +176,14 @@ macro_rules! bump {
}
macro_rules! cur_pos {
($p:expr) => { $p.input.cur_pos() }
($p:expr) => {{
let pos = $p.input.cur_pos();
pos
}}
}
macro_rules! last_pos {
($p:expr) => { $p.input.last_pos()};
($p:expr) => { $p.input.last_span().hi() };
}
macro_rules! return_if_arrow {

View File

@ -2,17 +2,16 @@
#![deny(non_snake_case)]
use self::input::ParserInput;
use self::util::ParseObject;
use Config;
use Session;
use ast::*;
use error::SyntaxError;
use lexer::Input;
use lexer::Lexer;
use parser_macros::parser;
use slog::Logger;
use std::ops::{Deref, DerefMut};
use std::option::NoneError;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use swc_common::errors::Diagnostic;
use token::*;
#[macro_use]
@ -26,23 +25,11 @@ mod pat;
pub mod input;
mod util;
pub type PResult<T> = Result<T, Error>;
pub type PResult<'a, T> = Result<T, Diagnostic<'a>>;
#[derive(Debug)]
pub enum Error {
Eof,
Syntax(Option<Token>, BytePos, SyntaxError, &'static str, u32),
}
impl From<NoneError> for Error {
fn from(_: NoneError) -> Self {
Error::Eof
}
}
/// EcmaScript parser.
pub struct Parser<'a, I: Input> {
logger: &'a Logger,
cfg: Config,
session: Session<'a>,
ctx: Context,
state: State,
input: ParserInput<'a, I>,
@ -66,28 +53,27 @@ struct State {
}
impl<'a, I: Input> Parser<'a, I> {
pub fn new(logger: &'a Logger, cfg: Config, input: I) -> Self {
pub fn new(session: Session<'a>, input: I) -> Self {
Parser {
logger,
input: ParserInput::new(Lexer::new(logger, cfg, input)),
session,
input: ParserInput::new(Lexer::new(session, input)),
ctx: Default::default(),
cfg: cfg,
state: Default::default(),
}
}
#[parser]
pub fn parse_script(&mut self) -> PResult<Vec<Stmt>> {
self.cfg.module = false;
pub fn parse_script(&mut self) -> PResult<'a, Vec<Stmt>> {
self.session.cfg.module = false;
self.parse_block_body(true, None)
}
#[parser]
pub fn parse_module(&mut self) -> PResult<Module> {
//TOOD: parse() -> PResult<Program>
self.cfg.module = true;
self.cfg.strict = true;
pub fn parse_module(&mut self) -> PResult<'a, Module> {
//TOOD: parse() -> PResult<'a, Program>
self.session.cfg.module = true;
self.session.cfg.strict = true;
self.parse_block_body(true, None)
.map(|body| Module { body })
@ -99,7 +85,7 @@ fn test_parser<F, Ret>(s: &'static str, f: F) -> Ret
where
F: FnOnce(&mut Parser<::CharIndices>) -> Ret,
{
let logger = ::testing::logger().new(o!("src" => s));
let mut p = Parser::new(&logger, Default::default(), ::CharIndices(s.char_indices()));
f(&mut p)
::with_test_sess(s, |session| {
f(&mut Parser::new(session, ::CharIndices(s.char_indices())))
})
}

View File

@ -5,9 +5,9 @@ use super::*;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// Parse a object literal or object pattern.
pub(super) fn parse_object<T>(&mut self) -> PResult<T>
pub(super) fn parse_object<T>(&mut self) -> PResult<'a, T>
where
Self: ParseObject<T>,
Self: ParseObject<'a, T>,
{
let start = cur_pos!();
assert_and_bump!('{');
@ -34,7 +34,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// spec: 'PropertyName'
pub(super) fn parse_prop_name(&mut self) -> PResult<PropName> {
pub(super) fn parse_prop_name(&mut self) -> PResult<'a, PropName> {
let start = cur_pos!();
let v = match *cur!()? {
@ -69,7 +69,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
#[parser]
impl<'a, I: Input> ParseObject<Box<Expr>> for Parser<'a, I> {
impl<'a, I: Input> ParseObject<'a, Box<Expr>> for Parser<'a, I> {
type Prop = Prop;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Box<Expr> {
@ -80,7 +80,7 @@ impl<'a, I: Input> ParseObject<Box<Expr>> for Parser<'a, I> {
}
/// spec: 'PropertyDefinition'
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop> {
let start = cur_pos!();
// Parse as 'MethodDefinition'
@ -130,7 +130,7 @@ impl<'a, I: Input> ParseObject<Box<Expr>> for Parser<'a, I> {
let is_reserved_word = {
// FIXME: Use extension trait instead of this.
let word = Word::from(ident.sym);
let r = word.is_reserved_word(self.cfg.strict);
let r = word.is_reserved_word(self.session.cfg.strict);
ident = Ident {
sym: word.into(),
..ident
@ -138,7 +138,7 @@ impl<'a, I: Input> ParseObject<Box<Expr>> for Parser<'a, I> {
r
};
if is_reserved_word {
syntax_error!(SyntaxError::ReservedWordInObjShorthandOrPat)
syntax_error!(SyntaxError::ReservedWordInObjShorthandOrPat);
}
if eat!('=') {
@ -199,7 +199,7 @@ impl<'a, I: Input> ParseObject<Box<Expr>> for Parser<'a, I> {
}
#[parser]
impl<'a, I: Input> ParseObject<Pat> for Parser<'a, I> {
impl<'a, I: Input> ParseObject<'a, Pat> for Parser<'a, I> {
type Prop = ObjectPatProp;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Pat {
@ -210,7 +210,7 @@ impl<'a, I: Input> ParseObject<Pat> for Parser<'a, I> {
}
/// Production 'BindingProperty'
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop> {
let key = self.parse_prop_name()?;
if eat!(':') {
let value = box self.parse_binding_element()?;

View File

@ -4,7 +4,7 @@ use std::iter;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<'a, Option<Ident>> {
if is!(BindingIdent) {
self.parse_binding_ident().map(Some)
} else {
@ -15,19 +15,19 @@ impl<'a, I: Input> Parser<'a, I> {
/// babel: `parseBindingIdentifier`
///
/// spec: `BindingIdentifier`
pub(super) fn parse_binding_ident(&mut self) -> PResult<Ident> {
pub(super) fn parse_binding_ident(&mut self) -> PResult<'a, Ident> {
// "yield" and "await" is **lexically** accepted.
let ident = self.parse_ident(true, true)?;
if self.cfg.strict {
if self.session.cfg.strict {
if &*ident.sym == "arguments" || &*ident.sym == "eval" {
syntax_error!(SyntaxError::EvalAndArgumentsInStrict)
syntax_error!(SyntaxError::EvalAndArgumentsInStrict);
}
}
Ok(ident)
}
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<Pat> {
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<'a, Pat> {
match *cur!()? {
tok!("yield") | Word(..) => self.parse_binding_ident().map(Pat::from),
tok!('[') => self.parse_array_binding_pat(),
@ -43,7 +43,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// babel: `parseBindingAtom`
pub(super) fn parse_binding_element(&mut self) -> PResult<Pat> {
pub(super) fn parse_binding_element(&mut self) -> PResult<'a, Pat> {
let start = cur_pos!();
let left = self.parse_binding_pat_or_ident()?;
@ -61,8 +61,8 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(left)
}
fn parse_array_binding_pat(&mut self) -> PResult<Pat> {
spanned!({
fn parse_array_binding_pat(&mut self) -> PResult<'a, Pat> {
self.spanned(|p| {
assert_and_bump!('[');
let mut elems = vec![];
@ -79,7 +79,7 @@ impl<'a, I: Input> Parser<'a, I> {
let start = cur_pos!();
if eat!("...") {
let pat = self.parse_binding_pat_or_ident()?;
let pat = p.parse_binding_pat_or_ident()?;
let pat = Pat {
span: span!(start),
node: PatKind::Rest(box pat),
@ -87,7 +87,7 @@ impl<'a, I: Input> Parser<'a, I> {
elems.push(Some(pat));
break;
} else {
elems.push(self.parse_binding_element().map(Some)?);
elems.push(p.parse_binding_element().map(Some)?);
}
}
@ -98,13 +98,13 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// spec: 'FormalParameter'
pub(super) fn parse_formal_param(&mut self) -> PResult<Pat> {
pub(super) fn parse_formal_param(&mut self) -> PResult<'a, Pat> {
self.parse_binding_element()
}
///
/// spec: 'FormalParameterList'
pub(super) fn parse_formal_params(&mut self) -> PResult<Vec<Pat>> {
pub(super) fn parse_formal_params(&mut self) -> PResult<'a, Vec<Pat>> {
let mut first = true;
let mut params = vec![];
@ -137,7 +137,7 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(params)
}
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<Vec<Pat>> {
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<'a, Vec<Pat>> {
// FIXME: This is wrong.
self.parse_formal_params()
}
@ -147,7 +147,7 @@ impl<'a, I: Input> Parser<'a, I> {
impl<'a, I: Input> Parser<'a, I> {
/// This does not return 'rest' pattern because non-last parameter cannot be
/// rest.
pub(super) fn reparse_expr_as_pat(&mut self, box expr: Box<Expr>) -> PResult<Pat> {
pub(super) fn reparse_expr_as_pat(&mut self, box expr: Box<Expr>) -> PResult<'a, Pat> {
let span = expr.span;
match expr.node {
@ -209,7 +209,7 @@ impl<'a, I: Input> Parser<'a, I> {
prop
),
})
.collect::<PResult<_>>()?),
.collect::<PResult<'a, _>>()?),
});
}
ExprKind::Ident(ident) => return Ok(ident.into()),
@ -269,7 +269,7 @@ impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_exprs_as_params(
&mut self,
mut exprs: Vec<ExprOrSpread>,
) -> PResult<Vec<Pat>> {
) -> PResult<'a, Vec<Pat>> {
let len = exprs.len();
if len == 0 {
return Ok(vec![]);

View File

@ -9,19 +9,20 @@ impl<'a, I: Input> Parser<'a, I> {
&mut self,
top_level: bool,
end: Option<&Token>,
) -> PResult<Vec<Type>>
) -> PResult<'a, Vec<Type>>
where
Self: StmtLikeParser<Type>,
Self: StmtLikeParser<'a, Type>,
Type: From<Stmt>,
{
let mut stmts = vec![];
while {
let b = cur!() != end;
let b = cur!().ok() != end;
b
} {
let stmt = self.parse_stmt_like(true, top_level)?;
stmts.push(stmt);
}
if end.is_some() {
bump!();
}
@ -29,18 +30,18 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(stmts)
}
fn parse_stmt(&mut self, top_level: bool) -> PResult<Stmt> {
fn parse_stmt(&mut self, top_level: bool) -> PResult<'a, Stmt> {
self.parse_stmt_internal(false, top_level)
}
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<Stmt> {
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<'a, Stmt> {
self.parse_stmt_internal(true, top_level)
}
/// Parse a statement, declaration or module item.
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<Type>
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<'a, Type>
where
Self: StmtLikeParser<Type>,
Self: StmtLikeParser<'a, Type>,
Type: From<Stmt>,
{
if <Self as StmtLikeParser<Type>>::accept_import_export() {
@ -52,91 +53,118 @@ impl<'a, I: Input> Parser<'a, I> {
.map(From::from)
}
fn parse_stmt_internal(&mut self, include_decl: bool, top_level: bool) -> PResult<Stmt> {
fn parse_stmt_internal(&mut self, include_decl: bool, top_level: bool) -> PResult<'a, Stmt> {
let start = cur_pos!();
if is_one_of!("break", "continue") {
return self.spanned(|p| {
let is_break = is!("break");
bump!();
let label = if eat!(';') {
None
} else {
let i = p.parse_label_ident().map(Some)?;
expect!(';');
i
};
Ok(if is_break {
StmtKind::Break(BreakStmt { label })
} else {
StmtKind::Continue(ContinueStmt { label })
})
});
}
if is!("debugger") {
return self.spanned(|p| {
bump!();
expect!(';');
Ok(StmtKind::Debugger)
});
}
if is!("do") {
return self.parse_do_stmt();
}
if is!("for") {
return self.parse_for_stmt();
}
if is!("function") {
if !include_decl {
unexpected!()
}
return self.parse_fn_decl().map(Stmt::from);
}
if is!("class") {
if !include_decl {
unexpected!()
}
return self.parse_class_decl().map(Stmt::from);
}
if is!("if") {
return self.parse_if_stmt();
}
if is!("return") {
return self.parse_return_stmt();
}
if is!("switch") {
return self.parse_switch_stmt();
}
if is!("throw") {
return self.parse_throw_stmt();
}
if is!("try") {
return self.parse_try_stmt();
}
if is!("with") {
return self.parse_with_stmt();
}
if is!("while") {
return self.parse_while_stmt();
}
if is!("var") || (include_decl && is!("const")) {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
// 'let' can start an identifier reference.
if include_decl && is!("let") {
let is_keyword = match peek!() {
Ok(t) => t.follows_keyword_let(self.session.cfg.strict),
_ => false,
};
if is_keyword {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
}
match *cur!()? {
Word(Keyword(w)) => match w {
Break | Continue => {
return spanned!({
bump!();
let is_break = w == Break;
let label = if eat!(';') {
None
} else {
let i = self.parse_label_ident().map(Some)?;
expect!(';');
i
};
Ok(if is_break {
StmtKind::Break(BreakStmt { label })
} else {
StmtKind::Continue(ContinueStmt { label })
})
})
}
Debugger => {
return spanned!({
bump!();
expect!(';');
Ok(StmtKind::Debugger)
})
}
Do => return self.parse_do_stmt(),
For => return self.parse_for_stmt(),
Function => {
if !include_decl {
unexpected!()
}
return self.parse_fn_decl().map(Stmt::from);
}
Class if !include_decl => unexpected!(),
Class => return self.parse_class_decl().map(Stmt::from),
If => return self.parse_if_stmt(),
Return => return self.parse_return_stmt(),
Switch => return self.parse_switch_stmt(),
Throw => return self.parse_throw_stmt(),
Try => return self.parse_try_stmt(),
// `let;` is identifier reference.
Let if include_decl => match peek!() {
Some(t) if t.follows_keyword_let(self.cfg.strict) => {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
_ => {}
},
Const | Var => {
if w == Var || include_decl {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
// Handle `let;` by forwarding it to expression statement
}
While => return self.parse_while_stmt(),
With => return self.parse_with_stmt(),
_ => {}
},
LBrace => return spanned!({ self.parse_block().map(StmtKind::Block) }),
LBrace => return self.spanned(|p| p.parse_block().map(StmtKind::Block)),
Semi => {
return spanned!({
return self.spanned(|p| {
bump!();
Ok(StmtKind::Empty)
})
@ -181,24 +209,24 @@ impl<'a, I: Input> Parser<'a, I> {
}.into())
}
fn parse_if_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_if_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("if");
expect!('(');
let test = self.include_in_expr(true).parse_expr()?;
let test = p.include_in_expr(true).parse_expr()?;
expect!(')');
let cons = {
// Annex B
if !self.cfg.strict && is!("function") {
if !p.session.cfg.strict && is!("function") {
// TODO: report error?
}
box self.parse_stmt(false)?
box p.parse_stmt(false)?
};
let alt = if eat!("else") {
Some(box self.parse_stmt(false)?)
Some(box p.parse_stmt(false)?)
} else {
None
};
@ -207,26 +235,26 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_return_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_return_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("return");
let arg = if is!(';') {
None
} else {
self.include_in_expr(true).parse_expr().map(Some)?
p.include_in_expr(true).parse_expr().map(Some)?
};
expect!(';');
Ok(StmtKind::Return(ReturnStmt { arg }))
})
}
fn parse_switch_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_switch_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("switch");
expect!('(');
let discriminant = self.include_in_expr(true).parse_expr()?;
let discriminant = p.include_in_expr(true).parse_expr()?;
expect!(')');
let mut cur = None;
@ -240,10 +268,10 @@ impl<'a, I: Input> Parser<'a, I> {
bump!();
cases.extend(cur.take());
let test = if is_case {
self.include_in_expr(true).parse_expr().map(Some)?
p.include_in_expr(true).parse_expr().map(Some)?
} else {
if has_default {
syntax_error!(SyntaxError::MultipleDefault)
syntax_error!(SyntaxError::MultipleDefault);
}
has_default = true;
None
@ -254,7 +282,7 @@ impl<'a, I: Input> Parser<'a, I> {
} else {
match cur {
Some(ref mut cur) => {
cur.cons.push(self.parse_stmt_list_item(false)?);
cur.cons.push(p.parse_stmt_list_item(false)?);
}
None => unexpected!(),
}
@ -270,30 +298,31 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_throw_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_throw_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("throw");
if self.input.had_line_break_before_cur() {
syntax_error!(SyntaxError::LineBreakInThrow)
if p.input.had_line_break_before_cur() {
// TODO: Suggest throw arg;
syntax_error!(SyntaxError::LineBreakInThrow);
}
let arg = self.include_in_expr(true).parse_expr()?;
let arg = p.include_in_expr(true).parse_expr()?;
expect!(';');
Ok(StmtKind::Throw(ThrowStmt { arg }))
})
}
fn parse_try_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_try_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("try");
let block = self.parse_block()?;
let block = p.parse_block()?;
let handler = if eat!("catch") {
let param = self.parse_catch_param()?;
self.parse_block()
let param = p.parse_catch_param()?;
p.parse_block()
.map(|body| CatchClause { param, body })
.map(Some)?
} else {
@ -301,7 +330,7 @@ impl<'a, I: Input> Parser<'a, I> {
};
let finalizer = if eat!("finally") {
self.parse_block().map(Some)?
p.parse_block().map(Some)?
} else {
if handler.is_none() {
unexpected!();
@ -317,14 +346,14 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_catch_param(&mut self) -> PResult<Pat> {
fn parse_catch_param(&mut self) -> PResult<'a, Pat> {
expect!('(');
let pat = self.parse_binding_pat_or_ident()?;
expect!(')');
Ok(pat)
}
fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<VarDecl> {
fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<'a, VarDecl> {
let start = cur_pos!();
let kind = match bump!() {
tok!("const") => VarDeclKind::Const,
@ -351,7 +380,7 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<VarDeclarator> {
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<'a, VarDeclarator> {
let start = cur_pos!();
let name = self.parse_binding_pat_or_ident()?;
@ -363,7 +392,7 @@ impl<'a, I: Input> Parser<'a, I> {
// Destructuring bindings require initializers.
match name.node {
PatKind::Ident(..) => None,
_ => syntax_error!(SyntaxError::PatVarWithoutInit { span: span!(start) }),
_ => syntax_error!(SyntaxError::PatVarWithoutInit),
}
}
} else {
@ -378,13 +407,13 @@ impl<'a, I: Input> Parser<'a, I> {
});
}
fn parse_do_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_do_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("do");
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
expect!("while");
let test = self.include_in_expr(true).parse_expr()?;
let test = p.include_in_expr(true).parse_expr()?;
// We *may* eat semicolon.
let _ = eat!(';');
@ -393,49 +422,49 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_while_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_while_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("while");
expect!('(');
let test = self.include_in_expr(true).parse_expr()?;
let test = p.include_in_expr(true).parse_expr()?;
expect!(')');
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
Ok(StmtKind::While(WhileStmt { test, body }))
})
}
fn parse_with_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_with_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("with");
expect!('(');
let obj = self.include_in_expr(true).parse_expr()?;
let obj = p.include_in_expr(true).parse_expr()?;
expect!(')');
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
Ok(StmtKind::With(WithStmt { obj, body }))
})
}
pub(super) fn parse_block(&mut self) -> PResult<BlockStmt> {
spanned!({
pub(super) fn parse_block(&mut self) -> PResult<'a, BlockStmt> {
self.spanned(|p| {
expect!('{');
let stmts = self.parse_block_body(false, Some(&RBrace))?;
let stmts = p.parse_block_body(false, Some(&RBrace))?;
Ok(stmts)
})
}
fn parse_labelled_stmt(&mut self, label: Ident) -> PResult<Stmt> {
fn parse_labelled_stmt(&mut self, label: Ident) -> PResult<'a, Stmt> {
let start = label.span.lo();
for l in &self.state.labels {
if label.sym == *l {
syntax_error!(SyntaxError::DuplicateLabel(label.sym.clone()))
syntax_error!(SyntaxError::DuplicateLabel(label.sym.clone()));
}
}
let body = box if is!("function") {
@ -450,13 +479,13 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_for_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_for_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("for");
expect!('(');
let head = self.parse_for_head()?;
let head = p.parse_for_head()?;
expect!(')');
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
Ok(match head {
ForHead::For { init, test, update } => StmtKind::For(ForStmt {
@ -471,11 +500,11 @@ impl<'a, I: Input> Parser<'a, I> {
})
}
fn parse_for_head(&mut self) -> PResult<ForHead> {
fn parse_for_head(&mut self) -> PResult<'a, ForHead> {
let start = cur_pos!();
if is_one_of!("const", "var")
|| (is!("let") && peek!()?.follows_keyword_let(self.cfg.strict))
|| (is!("let") && peek!()?.follows_keyword_let(self.session.cfg.strict))
{
let decl = self.parse_var_stmt(true)?;
@ -504,7 +533,7 @@ impl<'a, I: Input> Parser<'a, I> {
self.parse_normal_for_head(Some(VarDeclOrExpr::Expr(init)))
}
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<ForHead> {
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<'a, ForHead> {
let of = bump!() == tok!("of");
if of {
let right = self.include_in_expr(true).parse_assignment_expr()?;
@ -515,7 +544,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<ForHead> {
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<'a, ForHead> {
let test = if eat_exact!(';') {
None
} else {
@ -551,16 +580,16 @@ enum ForHead {
},
}
pub(super) trait StmtLikeParser<Type> {
pub(super) trait StmtLikeParser<'a, Type> {
fn accept_import_export() -> bool;
fn handle_import_export(&mut self, top_level: bool) -> PResult<Type>;
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, Type>;
}
impl<'a, I: Input> StmtLikeParser<Stmt> for Parser<'a, I> {
impl<'a, I: Input> StmtLikeParser<'a, Stmt> for Parser<'a, I> {
fn accept_import_export() -> bool {
false
}
fn handle_import_export(&mut self, top_level: bool) -> PResult<Stmt> {
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, Stmt> {
unreachable!()
}
}
@ -572,12 +601,18 @@ mod tests {
fn stmt(s: &'static str) -> Stmt {
test_parser(s, |p| {
p.parse_stmt(true).expect("failed to parse a statement")
p.parse_stmt(true).unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse a statement")
})
})
}
fn expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_expr().expect("failed to parse an expression")
p.parse_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}

View File

@ -2,7 +2,7 @@ use super::*;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
fn parse_import(&mut self) -> PResult<ModuleDecl> {
fn parse_import(&mut self) -> PResult<'a, ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("import");
@ -77,7 +77,7 @@ impl<'a, I: Input> Parser<'a, I> {
}
/// Parse `foo`, `foo2 as bar` in `import { foo, foo2 as bar }`
fn parse_import_specifier(&mut self) -> PResult<ImportSpecifier> {
fn parse_import_specifier(&mut self) -> PResult<'a, ImportSpecifier> {
let start = cur_pos!();
match *cur!()? {
Word(..) => {
@ -106,19 +106,20 @@ impl<'a, I: Input> Parser<'a, I> {
}
}
fn parse_imported_default_binding(&mut self) -> PResult<Ident> {
fn parse_imported_default_binding(&mut self) -> PResult<'a, Ident> {
self.parse_imported_binding()
}
fn parse_imported_binding(&mut self) -> PResult<Ident> {
self.with_ctx(Context {
fn parse_imported_binding(&mut self) -> PResult<'a, Ident> {
let ctx = Context {
in_async: false,
in_generator: false,
..self.ctx
}).parse_binding_ident()
};
self.with_ctx(ctx).parse_binding_ident()
}
fn parse_export(&mut self) -> PResult<ModuleDecl> {
fn parse_export(&mut self) -> PResult<'a, ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("export");
@ -211,7 +212,7 @@ impl<'a, I: Input> Parser<'a, I> {
});
}
fn parse_export_specifier(&mut self) -> PResult<ExportSpecifier> {
fn parse_export_specifier(&mut self) -> PResult<'a, ExportSpecifier> {
let orig = self.parse_ident_name()?;
let exported = if eat!("as") {
@ -222,7 +223,7 @@ impl<'a, I: Input> Parser<'a, I> {
Ok(ExportSpecifier { orig, exported })
}
fn parse_from_clause_and_semi(&mut self) -> PResult<String> {
fn parse_from_clause_and_semi(&mut self) -> PResult<'a, String> {
expect!("from");
match *cur!()? {
Str(..) => match bump!() {
@ -238,12 +239,12 @@ impl<'a, I: Input> Parser<'a, I> {
}
#[parser]
impl<'a, I: Input> StmtLikeParser<ModuleItem> for Parser<'a, I> {
impl<'a, I: Input> StmtLikeParser<'a, ModuleItem> for Parser<'a, I> {
fn accept_import_export() -> bool {
true
}
fn handle_import_export(&mut self, top_level: bool) -> PResult<ModuleItem> {
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, ModuleItem> {
if !top_level {
syntax_error!(SyntaxError::NonTopLevelImportExport);
}

View File

@ -1,5 +1,7 @@
use super::*;
use swc_common::Spanned;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// Original context is restored when returned guard is dropped.
pub(super) fn with_ctx<'w>(&'w mut self, ctx: Context) -> WithCtx<'w, 'a, I> {
@ -13,10 +15,11 @@ impl<'a, I: Input> Parser<'a, I> {
/// Original context is restored when returned guard is dropped.
pub(super) fn include_in_expr<'w>(&'w mut self, include_in_expr: bool) -> WithCtx<'w, 'a, I> {
self.with_ctx(Context {
let ctx = Context {
include_in_expr,
..self.ctx
})
};
self.with_ctx(ctx)
}
/// Parse with given closure
@ -26,11 +29,23 @@ impl<'a, I: Input> Parser<'a, I> {
{
f(self)
}
pub(super) fn spanned<F, Node, Ret>(&mut self, f: F) -> PResult<'a, Node>
where
F: FnOnce(&mut Self) -> PResult<'a, Ret>,
Node: Spanned<Ret>,
{
let start = self.input.cur_pos();
let val = f(self)?;
let span = span!(start);
Ok(Spanned::from_unspanned(val, span))
}
}
pub trait ParseObject<Obj> {
pub trait ParseObject<'a, Obj> {
type Prop;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Obj;
fn parse_object_prop(&mut self) -> PResult<Self::Prop>;
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop>;
}
pub struct WithCtx<'w, 'a: 'w, I: 'w + Input> {

View File

@ -109,6 +109,8 @@ pub enum Token {
/// TODO: Make Num as enum and separate decimal, binary, ..etc
#[kind(starts_expr)]
Num(Number),
Error,
}
#[derive(Kind, Debug, Clone, Copy, Eq, PartialEq, Hash)]
@ -181,6 +183,8 @@ impl BinOpToken {
#[derive(Debug, Clone, PartialEq)]
pub struct TokenAndSpan {
pub token: Token,
/// Had a line break before this token?
pub had_line_break: bool,
pub span: Span,
}

View File

@ -18,7 +18,9 @@ use std::panic::{catch_unwind, resume_unwind};
use std::path::Path;
use swc_common::{FoldWith, Folder};
use swc_common::Span;
use swc_ecma_parser::{CharIndices, PResult, Parser};
use swc_common::errors::Handler;
use swc_ecma_parser::{CharIndices, PResult, Parser, Session};
use swc_ecma_parser::ast::*;
use test::{test_main, Options, TestDesc, TestDescAndFn, TestFn, TestName};
use test::ShouldPanic::No;
@ -136,19 +138,23 @@ fn unit_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
);
let res = catch_unwind(move || {
let mut sess = TestSess::new();
if module {
let p = |ty, s| {
parse_module(&file_name, s).unwrap_or_else(|err| {
panic!("failed to parse {}: {:?}\ncode:\n{}", ty, err, s)
let mut p = |ty, s| {
sess.parse_module(&file_name, s).unwrap_or_else(|err| {
err.emit();
panic!("failed to parse {} code:\n{}", ty, s)
})
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
} else {
let p = |ty, s| {
parse_script(&file_name, s).unwrap_or_else(|err| {
panic!("failed to parse {}: {:?}\ncode:\n{}", ty, err, s)
let mut p = |ty, s| {
sess.parse_script(&file_name, s).unwrap_or_else(|err| {
err.emit();
panic!("failed to parse {} code:\n{}", ty, s)
})
};
let src = p("", &input);
@ -172,24 +178,45 @@ fn logger(file_name: &str, src: &str) -> Logger {
::testing::logger().new(o!("file name" => f, "src" => s,))
}
fn with_parser<F, Ret>(file_name: &str, src: &str, f: F) -> Ret
where
F: FnOnce(&mut Parser<CharIndices>) -> Ret,
{
let logger = logger(file_name, src);
let mut p = Parser::new(
&logger,
Default::default(),
::CharIndices(src.char_indices()),
);
f(&mut p)
struct TestSess {
handler: Handler,
logger: Logger,
}
fn parse_script(file_name: &str, s: &str) -> PResult<Vec<Stmt>> {
with_parser(file_name, s, |p| p.parse_script().map(normalize))
}
fn parse_module(file_name: &str, s: &str) -> PResult<Module> {
with_parser(file_name, s, |p| p.parse_module().map(normalize))
impl TestSess {
fn new() -> Self {
let handler = ::swc_common::errors::Handler::with_tty_emitter(
::swc_common::errors::ColorConfig::Never,
true,
false,
None,
);
TestSess {
handler,
logger: ::testing::logger(),
}
}
fn parse_script<'a>(&'a mut self, file_name: &str, s: &str) -> PResult<'a, Vec<Stmt>> {
self.with_parser(file_name, s, |p| p.parse_script().map(normalize))
}
fn parse_module<'a>(&'a mut self, file_name: &str, s: &str) -> PResult<'a, Module> {
self.with_parser(file_name, s, |p| p.parse_module().map(normalize))
}
fn with_parser<'a, F, Ret>(&'a mut self, file_name: &str, src: &str, f: F) -> PResult<'a, Ret>
where
F: FnOnce(&mut Parser<'a, CharIndices>) -> PResult<'a, Ret>,
{
self.logger = logger(file_name, src);
f(&mut Parser::new(
Session {
logger: &self.logger,
handler: &self.handler,
cfg: Default::default(),
},
::CharIndices(src.char_indices()),
))
}
}
fn normalize<T>(mut t: T) -> T

View File

@ -3,10 +3,12 @@ use syn::fold::{self, Fold};
use syn::synom::Synom;
pub fn expand(_attr: TokenStream, item: Item) -> Item {
MyFolder { parser: None }.fold_item(item)
let item = InjectSelf { parser: None }.fold_item(item);
item
}
struct MyFolder {
struct InjectSelf {
parser: Option<Ident>,
}
@ -36,15 +38,39 @@ where
.0
}
impl Fold for MyFolder {
impl Fold for InjectSelf {
fn fold_expr_method_call(&mut self, i: ExprMethodCall) -> ExprMethodCall {
/// Extract `p` from `self.parse_with(|p|{})`
fn get_parser_arg(call: &ExprMethodCall) -> Ident {
assert_eq!(call.args.len(), 1);
let expr = call.args.iter().next().unwrap();
let inputs = match expr {
&Expr::Closure(ref c) => &c.inputs,
_ => unreachable!("Parser.parse_with and Parser.spanned accepts a closure"),
};
assert_eq!(inputs.len(), 1);
let p = inputs.clone().into_iter().next().unwrap();
match p {
FnArg::Inferred(Pat::Ident(PatIdent { ident, .. })) => ident,
_ => unreachable!("Expected (|p| {..})"),
}
}
match i.method.as_ref() {
"parse_with" => {
"parse_with" | "spanned" => {
//TODO
return fold::fold_expr_method_call(&mut MyFolder { parser: None }, i);
let parser = get_parser_arg(&i);
return fold::fold_expr_method_call(
&mut InjectSelf {
parser: Some(parser),
},
i,
);
}
_ => {}
}
};
fold::fold_expr_method_call(self, i)
}
@ -68,16 +94,12 @@ impl Fold for MyFolder {
i
}
fn fold_expr_closure(&mut self, i: ExprClosure) -> ExprClosure {
if self.parser.is_none() {
// if we don't know what closure is this, don't do anything.
i
} else {
fold::fold_expr_closure(self, i)
}
}
fn fold_macro(&mut self, i: Macro) -> Macro {
let parser = match self.parser {
Some(s) => s,
_ => return i,
};
let name = i.path.dump().to_string();
let span = get_joinned_span(&i.path);
@ -104,7 +126,7 @@ impl Fold for MyFolder {
parse(i.tts.into()).expect("failed to parse input to spanned as a block");
let block = self.fold_block(block);
return Macro {
tts: TokenStream::from(quote_spanned!(span => self,))
tts: TokenStream::from(quote_spanned!(span => #parser, ))
.into_iter()
.chain(TokenStream::from(block.dump()))
.collect(),
@ -118,7 +140,7 @@ impl Fold for MyFolder {
| "peek" | "peek_ahead" | "last_pos" | "return_if_arrow" | "span" | "syntax_error"
| "unexpected" => {
let tts = if i.tts.is_empty() {
quote_spanned!(span => self).into()
quote_spanned!(span => #parser).into()
} else {
let mut args: Punctuated<Expr, token::Comma> = parse_args(i.tts.into());
let args = args.into_pairs()
@ -126,7 +148,7 @@ impl Fold for MyFolder {
.map(|arg| arg.dump())
.flat_map(|t| TokenStream::from(t));
TokenStream::from(quote_spanned!(span => self,))
TokenStream::from(quote_spanned!(span => #parser,))
.into_iter()
.chain(args)
.collect()

3
rls.toml Normal file
View File

@ -0,0 +1,3 @@
workspace_mode = true
unstable_features = true
build_lib = true

View File

@ -5,13 +5,15 @@ pub extern crate swc_common;
pub extern crate swc_ecmascript;
pub extern crate swc_macros;
use slog::Logger;
use swc_common::errors::Handler;
use swc_ecmascript::ast::Module;
use swc_ecmascript::parser::{CharIndices, Config, PResult, Parser};
use swc_ecmascript::parser::{CharIndices, PResult, Parser, Session as ParseSess};
#[derive(Debug)]
pub struct Compiler {
threads: rayon::ThreadPool,
logger: Logger,
handler: Handler,
}
impl Compiler {
@ -20,12 +22,12 @@ impl Compiler {
}
/// TODO
pub fn parse_js(&mut self, src: &str) -> PResult<Module> {
pub fn parse_js(&self, src: &str) -> PResult<Module> {
Parser::new(
&self.logger,
Config {
module: true,
..Default::default()
ParseSess {
handler: &self.handler,
logger: &self.logger,
cfg: Default::default(),
},
CharIndices(src.char_indices()),
).parse_module()