mirror of
https://github.com/swc-project/swc.git
synced 2024-11-23 09:38:16 +03:00
Fix rls (again) (#33)
* Don't use `#[parser]` in lexer * Don't use `#[parser]` in parser/util.rs * Pin rust toolchain to nightly-2018-02-28
This commit is contained in:
parent
67daa4c4e4
commit
10f83f3f02
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -7,8 +7,7 @@
|
||||
"**/.DS_Store": true,
|
||||
"**/*.bk": true,
|
||||
},
|
||||
"rust-client.channel": "nightly",
|
||||
"rust-client.logToFile": false,
|
||||
"rust-client.channel": "nightly-2018-02-28",
|
||||
"rust.unstable_features": true,
|
||||
"rust.workspace_mode": true,
|
||||
"rust.rustflags": "--cfg procmacro2_semver_exempt",
|
||||
|
@ -1,49 +0,0 @@
|
||||
macro_rules! cur {
|
||||
($l:expr) => {{
|
||||
$l.input.current()
|
||||
}};
|
||||
}
|
||||
macro_rules! bump {
|
||||
($l:expr) => {{
|
||||
$l.input.bump()
|
||||
}};
|
||||
}
|
||||
macro_rules! peek {
|
||||
($l:expr) => {{
|
||||
$l.input.peek()
|
||||
}};
|
||||
}
|
||||
macro_rules! peek_ahead {
|
||||
($l:expr) => {{
|
||||
$l.input.peek_ahead()
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! cur_pos {
|
||||
($l:expr) => {{
|
||||
$l.input.cur_pos()
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! last_pos {
|
||||
($l:expr) => {{
|
||||
$l.input.last_pos()
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! is {
|
||||
($l:expr, $t:tt) => {{
|
||||
cur!($l) == Some($t)
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! eat {
|
||||
($l:expr, $t:tt) => {{
|
||||
if is!($l, $t) {
|
||||
bump!($l);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}};
|
||||
}
|
@ -11,14 +11,11 @@ use self::state::State;
|
||||
use self::util::*;
|
||||
use {Context, Session};
|
||||
use error::SyntaxError;
|
||||
use parser_macros::parser;
|
||||
use std::char;
|
||||
use swc_atoms::JsWord;
|
||||
use swc_common::{BytePos, Span};
|
||||
use token::*;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
pub mod input;
|
||||
mod number;
|
||||
mod state;
|
||||
@ -50,7 +47,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
Some(c) => c,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let start = cur_pos!(self);
|
||||
let start = self.cur_pos();
|
||||
|
||||
let token = match c {
|
||||
// Identifier or keyword. '\uXXXX' sequences are allowed in
|
||||
@ -102,7 +99,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
|
||||
'`' => {
|
||||
bump!(self);
|
||||
self.bump();
|
||||
return Ok(Some(tok!('`')));
|
||||
}
|
||||
|
||||
@ -207,13 +204,9 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
self.input.bump();
|
||||
|
||||
// Handle -->
|
||||
if self.state.had_line_break && c == '-' && eat!(self, '>') {
|
||||
if self.state.had_line_break && c == '-' && self.eat('>') {
|
||||
if self.ctx.module {
|
||||
syntax_error!(
|
||||
self,
|
||||
span!(self, start),
|
||||
SyntaxError::LegacyCommentInModule
|
||||
)
|
||||
self.error(start, SyntaxError::LegacyCommentInModule)?
|
||||
}
|
||||
self.skip_line_comment(0);
|
||||
self.skip_space()?;
|
||||
@ -275,7 +268,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
|
||||
// unexpected character
|
||||
c => syntax_error!(self, pos_span(start), SyntaxError::UnexpectedChar { c }),
|
||||
c => self.error_span(pos_span(start), SyntaxError::UnexpectedChar { c })?,
|
||||
};
|
||||
|
||||
Ok(Some(token))
|
||||
@ -283,13 +276,13 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
/// Read an escaped charater for string literal.
|
||||
fn read_escaped_char(&mut self, in_template: bool) -> LexResult<Option<char>> {
|
||||
assert_eq!(cur!(self), Some('\\'));
|
||||
let start = cur_pos!(self);
|
||||
bump!(self); // '\'
|
||||
assert_eq!(self.cur(), Some('\\'));
|
||||
let start = self.cur_pos();
|
||||
self.bump(); // '\'
|
||||
|
||||
let c = match cur!(self) {
|
||||
let c = match self.cur() {
|
||||
Some(c) => c,
|
||||
None => syntax_error!(self, pos_span(start), SyntaxError::InvalidStrEscape),
|
||||
None => self.error_span(pos_span(start), SyntaxError::InvalidStrEscape)?,
|
||||
};
|
||||
let c = match c {
|
||||
'n' => '\n',
|
||||
@ -299,21 +292,21 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
'v' => '\u{000b}',
|
||||
'f' => '\u{000c}',
|
||||
'\r' => {
|
||||
bump!(self); // remove '\r'
|
||||
self.bump(); // remove '\r'
|
||||
|
||||
if cur!(self) == Some('\n') {
|
||||
bump!(self);
|
||||
if self.cur() == Some('\n') {
|
||||
self.bump();
|
||||
}
|
||||
return Ok(None);
|
||||
}
|
||||
'\n' | '\u{2028}' | '\u{2029}' => {
|
||||
bump!(self);
|
||||
self.bump();
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// read hexadecimal escape sequences
|
||||
'x' => {
|
||||
bump!(self); // 'x'
|
||||
self.bump(); // 'x'
|
||||
return self.read_hex_char(start, 2).map(Some);
|
||||
}
|
||||
|
||||
@ -323,9 +316,9 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
// octal escape sequences
|
||||
'0'...'7' => {
|
||||
bump!(self);
|
||||
self.bump();
|
||||
let first_c = if c == '0' {
|
||||
match cur!(self) {
|
||||
match self.cur() {
|
||||
Some(next) if next.is_digit(8) => c,
|
||||
// \0 is not an octal literal nor decimal literal.
|
||||
_ => return Ok(Some('\u{0000}')),
|
||||
@ -336,17 +329,17 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
// TODO: Show template instead of strict mode
|
||||
if in_template {
|
||||
syntax_error!(self, span!(self, start), SyntaxError::LegacyOctal)
|
||||
self.error(start, SyntaxError::LegacyOctal)?
|
||||
}
|
||||
|
||||
if self.ctx.strict {
|
||||
syntax_error!(self, span!(self, start), SyntaxError::LegacyOctal)
|
||||
self.error(start, SyntaxError::LegacyOctal)?
|
||||
}
|
||||
|
||||
let mut value: u8 = first_c.to_digit(8).unwrap() as u8;
|
||||
macro_rules! one {
|
||||
($check:expr) => {{
|
||||
match cur!(self).and_then(|c| c.to_digit(8)) {
|
||||
match self.cur().and_then(|c| c.to_digit(8)) {
|
||||
Some(v) => {
|
||||
value = if $check {
|
||||
let new_val = value
|
||||
@ -359,7 +352,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
} else {
|
||||
value * 8 + v as u8
|
||||
};
|
||||
bump!(self);
|
||||
self.bump();
|
||||
}
|
||||
_ => {
|
||||
return Ok(Some(value as char))
|
||||
@ -380,11 +373,10 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
}
|
||||
|
||||
#[parser]
|
||||
impl<'a, I: Input> Lexer<'a, I> {
|
||||
fn read_slash(&mut self) -> LexResult<(Option<Token>)> {
|
||||
debug_assert_eq!(cur!(), Some('/'));
|
||||
let start = cur_pos!();
|
||||
fn read_slash(&mut self) -> LexResult<Option<Token>> {
|
||||
debug_assert_eq!(self.cur(), Some('/'));
|
||||
let start = self.cur_pos();
|
||||
|
||||
// Regex
|
||||
if self.state.is_expr_allowed {
|
||||
@ -392,25 +384,20 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
|
||||
// Divide operator
|
||||
bump!();
|
||||
self.bump();
|
||||
|
||||
Ok(Some(if cur!() == Some('=') {
|
||||
bump!();
|
||||
tok!("/=")
|
||||
} else {
|
||||
tok!('/')
|
||||
}))
|
||||
Ok(Some(if self.eat('=') { tok!("/=") } else { tok!('/') }))
|
||||
}
|
||||
|
||||
fn read_token_lt_gt(&mut self) -> LexResult<(Option<Token>)> {
|
||||
assert!(cur!() == Some('<') || cur!() == Some('>'));
|
||||
fn read_token_lt_gt(&mut self) -> LexResult<Option<Token>> {
|
||||
assert!(self.cur() == Some('<') || self.cur() == Some('>'));
|
||||
|
||||
let c = cur!().unwrap();
|
||||
bump!();
|
||||
let c = self.cur().unwrap();
|
||||
self.bump();
|
||||
|
||||
// XML style comment. `<!--`
|
||||
if !self.ctx.module && c == '<' && is!('!') && peek!() == Some('-')
|
||||
&& peek_ahead!() == Some('-')
|
||||
if !self.ctx.module && c == '<' && self.is('!') && self.peek() == Some('-')
|
||||
&& self.peek_ahead() == Some('-')
|
||||
{
|
||||
self.skip_line_comment(3);
|
||||
self.skip_space()?;
|
||||
@ -420,18 +407,18 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
let mut op = if c == '<' { Lt } else { Gt };
|
||||
|
||||
// '<<', '>>'
|
||||
if cur!() == Some(c) {
|
||||
bump!();
|
||||
if self.cur() == Some(c) {
|
||||
self.bump();
|
||||
op = if c == '<' { LShift } else { RShift };
|
||||
|
||||
//'>>>'
|
||||
if c == '>' && cur!() == Some(c) {
|
||||
bump!();
|
||||
if c == '>' && self.cur() == Some(c) {
|
||||
self.bump();
|
||||
op = ZeroFillRShift;
|
||||
}
|
||||
}
|
||||
|
||||
let token = if eat!('=') {
|
||||
let token = if self.eat('=') {
|
||||
match op {
|
||||
Lt => BinOp(LtEq),
|
||||
Gt => BinOp(GtEq),
|
||||
@ -449,8 +436,8 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
/// See https://tc39.github.io/ecma262/#sec-names-and-keywords
|
||||
fn read_ident_or_keyword(&mut self) -> LexResult<Token> {
|
||||
assert!(cur!().is_some());
|
||||
let start = cur_pos!();
|
||||
assert!(self.cur().is_some());
|
||||
let start = self.cur_pos();
|
||||
|
||||
let (word, has_escape) = self.read_word_as_str()?;
|
||||
|
||||
@ -459,17 +446,17 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
// should know context or parser should handle this error. Our approach to this
|
||||
// problem is former one.
|
||||
if has_escape && self.ctx.is_reserved_word(&word) {
|
||||
syntax_error!(
|
||||
span!(start),
|
||||
SyntaxError::EscapeInReservedWord { word: word.into() }
|
||||
);
|
||||
self.error(
|
||||
start,
|
||||
SyntaxError::EscapeInReservedWord { word: word.into() },
|
||||
)?
|
||||
} else {
|
||||
Ok(Word(word.into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn may_read_word_as_str(&mut self) -> LexResult<(Option<(JsWord, bool)>)> {
|
||||
match cur!() {
|
||||
match self.cur() {
|
||||
Some(c) if c.is_ident_start() => self.read_word_as_str().map(Some),
|
||||
_ => Ok(None),
|
||||
}
|
||||
@ -477,25 +464,25 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
/// returns (word, has_escape)
|
||||
fn read_word_as_str(&mut self) -> LexResult<(JsWord, bool)> {
|
||||
assert!(cur!().is_some());
|
||||
assert!(self.cur().is_some());
|
||||
|
||||
let mut has_escape = false;
|
||||
let mut word = String::new();
|
||||
let mut first = true;
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
let start = cur_pos!();
|
||||
while let Some(c) = self.cur() {
|
||||
let start = self.cur_pos();
|
||||
// TODO: optimize (cow / chunk)
|
||||
match c {
|
||||
c if c.is_ident_part() => {
|
||||
bump!();
|
||||
self.bump();
|
||||
word.push(c);
|
||||
}
|
||||
// unicode escape
|
||||
'\\' => {
|
||||
bump!();
|
||||
if !is!('u') {
|
||||
syntax_error!(pos_span(start), SyntaxError::ExpectedUnicodeEscape);
|
||||
self.bump();
|
||||
if !self.is('u') {
|
||||
self.error_span(pos_span(start), SyntaxError::ExpectedUnicodeEscape)?
|
||||
}
|
||||
let c = self.read_unicode_escape(start)?;
|
||||
let valid = if first {
|
||||
@ -505,7 +492,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
};
|
||||
|
||||
if !valid {
|
||||
syntax_error!(span!(start), SyntaxError::InvalidIdentChar);
|
||||
self.error(start, SyntaxError::InvalidIdentChar)?
|
||||
}
|
||||
word.push(c);
|
||||
}
|
||||
@ -520,15 +507,15 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
|
||||
fn read_unicode_escape(&mut self, start: BytePos) -> LexResult<char> {
|
||||
assert_eq!(cur!(), Some('u'));
|
||||
bump!();
|
||||
assert_eq!(self.cur(), Some('u'));
|
||||
self.bump();
|
||||
|
||||
if eat!('{') {
|
||||
let cp_start = cur_pos!();
|
||||
if self.eat('{') {
|
||||
let cp_start = self.cur_pos();
|
||||
let c = self.read_code_point()?;
|
||||
|
||||
if !eat!('}') {
|
||||
syntax_error!(span!(start), SyntaxError::InvalidUnicodeEscape);
|
||||
if !self.eat('}') {
|
||||
self.error(start, SyntaxError::InvalidUnicodeEscape)?
|
||||
}
|
||||
|
||||
Ok(c)
|
||||
@ -540,45 +527,45 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
fn read_hex_char(&mut self, start: BytePos, count: u8) -> LexResult<char> {
|
||||
debug_assert!(count == 2 || count == 4);
|
||||
|
||||
let pos = cur_pos!();
|
||||
let pos = self.cur_pos();
|
||||
match self.read_int(16, count)? {
|
||||
Some(val) => match char::from_u32(val) {
|
||||
Some(c) => Ok(c),
|
||||
None => syntax_error!(span!(start), SyntaxError::NonUtf8Char { val }),
|
||||
None => self.error(start, SyntaxError::NonUtf8Char { val })?,
|
||||
},
|
||||
None => syntax_error!(span!(start), SyntaxError::ExpectedHexChars { count }),
|
||||
None => self.error(start, SyntaxError::ExpectedHexChars { count })?,
|
||||
}
|
||||
}
|
||||
|
||||
/// Read `CodePoint`.
|
||||
fn read_code_point(&mut self) -> LexResult<char> {
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
let val = self.read_int(16, 0)?;
|
||||
match val {
|
||||
Some(val) if 0x10FFFF >= val => match char::from_u32(val) {
|
||||
Some(c) => Ok(c),
|
||||
None => syntax_error!(span!(start), SyntaxError::InvalidCodePoint),
|
||||
None => self.error(start, SyntaxError::InvalidCodePoint)?,
|
||||
},
|
||||
_ => syntax_error!(span!(start), SyntaxError::InvalidCodePoint),
|
||||
_ => self.error(start, SyntaxError::InvalidCodePoint)?,
|
||||
}
|
||||
}
|
||||
|
||||
/// See https://tc39.github.io/ecma262/#sec-literals-string-literals
|
||||
fn read_str_lit(&mut self) -> LexResult<Token> {
|
||||
assert!(cur!() == Some('\'') || cur!() == Some('"'));
|
||||
let start = cur_pos!();
|
||||
let quote = cur!().unwrap();
|
||||
bump!(); // '"'
|
||||
assert!(self.cur() == Some('\'') || self.cur() == Some('"'));
|
||||
let start = self.cur_pos();
|
||||
let quote = self.cur().unwrap();
|
||||
self.bump(); // '"'
|
||||
|
||||
let mut out = String::new();
|
||||
let mut has_escape = false;
|
||||
|
||||
//TODO: Optimize (Cow, Chunk)
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
while let Some(c) = self.cur() {
|
||||
match c {
|
||||
c if c == quote => {
|
||||
bump!();
|
||||
self.bump();
|
||||
return Ok(Str {
|
||||
value: out,
|
||||
has_escape,
|
||||
@ -588,34 +575,32 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
out.extend(self.read_escaped_char(false)?);
|
||||
has_escape = true
|
||||
}
|
||||
c if c.is_line_break() => {
|
||||
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
|
||||
}
|
||||
c if c.is_line_break() => self.error(start, SyntaxError::UnterminatedStrLit)?,
|
||||
_ => {
|
||||
out.push(c);
|
||||
bump!();
|
||||
self.bump();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
|
||||
self.error(start, SyntaxError::UnterminatedStrLit)?
|
||||
}
|
||||
|
||||
/// Expects current char to be '/'
|
||||
fn read_regexp(&mut self) -> LexResult<Token> {
|
||||
assert_eq!(cur!(), Some('/'));
|
||||
let start = cur_pos!();
|
||||
bump!();
|
||||
assert_eq!(self.cur(), Some('/'));
|
||||
let start = self.cur_pos();
|
||||
self.bump();
|
||||
|
||||
let (mut escaped, mut in_class) = (false, false);
|
||||
// TODO: Optimize (chunk, cow)
|
||||
let mut content = String::new();
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
while let Some(c) = self.cur() {
|
||||
// This is ported from babel.
|
||||
// Seems like regexp literal cannot contain linebreak.
|
||||
if c.is_line_break() {
|
||||
syntax_error!(span!(start), SyntaxError::UnterminatedRegxp);
|
||||
self.error(start, SyntaxError::UnterminatedRegxp)?;
|
||||
}
|
||||
|
||||
if escaped {
|
||||
@ -630,16 +615,16 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
escaped = c == '\\';
|
||||
}
|
||||
bump!();
|
||||
self.bump();
|
||||
content.push(c);
|
||||
}
|
||||
|
||||
// input is terminated without following `/`
|
||||
if cur!() != Some('/') {
|
||||
syntax_error!(span!(start), SyntaxError::UnterminatedRegxp);
|
||||
if !self.is('/') {
|
||||
self.error(start, SyntaxError::UnterminatedRegxp)?;
|
||||
}
|
||||
|
||||
bump!(); // '/'
|
||||
self.bump(); // '/'
|
||||
|
||||
// Spec says "It is a Syntax Error if IdentifierPart contains a Unicode escape
|
||||
// sequence." TODO: check for escape
|
||||
@ -654,20 +639,20 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
|
||||
fn read_tmpl_token(&mut self, start_of_tpl: BytePos) -> LexResult<Token> {
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
// TODO: Optimize
|
||||
let mut out = String::new();
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
if c == '`' || (c == '$' && peek!() == Some('{')) {
|
||||
if start == cur_pos!() && self.state.last_was_tpl_element() {
|
||||
while let Some(c) = self.cur() {
|
||||
if c == '`' || (c == '$' && self.peek() == Some('{')) {
|
||||
if start == self.cur_pos() && self.state.last_was_tpl_element() {
|
||||
if c == '$' {
|
||||
bump!();
|
||||
bump!();
|
||||
self.bump();
|
||||
self.bump();
|
||||
return Ok(tok!("${"));
|
||||
} else {
|
||||
bump!();
|
||||
self.bump();
|
||||
return Ok(tok!('`'));
|
||||
}
|
||||
}
|
||||
@ -681,21 +666,21 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
out.extend(ch);
|
||||
} else if c.is_line_break() {
|
||||
self.state.had_line_break = true;
|
||||
let c = if c == '\r' && peek!() == Some('\n') {
|
||||
bump!(); // '\r'
|
||||
let c = if c == '\r' && self.peek() == Some('\n') {
|
||||
self.bump(); // '\r'
|
||||
'\n'
|
||||
} else {
|
||||
c
|
||||
};
|
||||
bump!();
|
||||
self.bump();
|
||||
out.push(c);
|
||||
} else {
|
||||
bump!();
|
||||
self.bump();
|
||||
out.push(c);
|
||||
}
|
||||
}
|
||||
|
||||
syntax_error!(span!(start_of_tpl), SyntaxError::UnterminatedTpl)
|
||||
self.error(start_of_tpl, SyntaxError::UnterminatedTpl)?
|
||||
}
|
||||
|
||||
pub fn had_line_break_before_last(&self) -> bool {
|
||||
|
@ -7,23 +7,22 @@ use super::*;
|
||||
use error::SyntaxError;
|
||||
use std::fmt::Display;
|
||||
|
||||
#[parser]
|
||||
impl<'a, I: Input> Lexer<'a, I> {
|
||||
/// Reads an integer, octal integer, or floating-point number
|
||||
///
|
||||
///
|
||||
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> LexResult<Number> {
|
||||
assert!(cur!().is_some());
|
||||
assert!(self.cur().is_some());
|
||||
if starts_with_dot {
|
||||
debug_assert_eq!(
|
||||
cur!(),
|
||||
self.cur(),
|
||||
Some('.'),
|
||||
"read_number(starts_with_dot = true) expects current char to be '.'"
|
||||
);
|
||||
}
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
let starts_with_zero = cur!().unwrap() == '0';
|
||||
let starts_with_zero = self.cur().unwrap() == '0';
|
||||
|
||||
let val = if starts_with_dot {
|
||||
// first char is '.'
|
||||
@ -42,7 +41,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
// e.g. `0` is decimal (so it can be part of float)
|
||||
//
|
||||
// e.g. `000` is octal
|
||||
if start.0 != last_pos!().0 - 1 {
|
||||
if start.0 != self.last_pos().0 - 1 {
|
||||
// `-1` is utf 8 length of `0`
|
||||
|
||||
return self.make_legacy_octal(start, 0f64);
|
||||
@ -52,7 +51,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
// e.g. 08.1 is strict mode violation but 0.1 is valid float.
|
||||
|
||||
if self.ctx.strict {
|
||||
syntax_error!(span!(start), SyntaxError::LegacyDecimal);
|
||||
self.error(start, SyntaxError::LegacyDecimal)?
|
||||
}
|
||||
|
||||
let s = format!("{}", val); // TODO: Remove allocation.
|
||||
@ -82,11 +81,11 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
// `0.a`, `08.a`, `102.a` are invalid.
|
||||
//
|
||||
// `.1.a`, `.1e-4.a` are valid,
|
||||
if cur!() == Some('.') {
|
||||
bump!();
|
||||
if self.cur() == Some('.') {
|
||||
self.bump();
|
||||
if starts_with_dot {
|
||||
debug_assert!(cur!().is_some());
|
||||
debug_assert!(cur!().unwrap().is_digit(10));
|
||||
debug_assert!(self.cur().is_some());
|
||||
debug_assert!(self.cur().unwrap().is_digit(10));
|
||||
}
|
||||
|
||||
// Read numbers after dot
|
||||
@ -110,14 +109,14 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
// 1e2 = 100
|
||||
// 1e+2 = 100
|
||||
// 1e-2 = 0.01
|
||||
if eat!('e') || eat!('E') {
|
||||
let next = match cur!() {
|
||||
if self.eat('e') || self.eat('E') {
|
||||
let next = match self.cur() {
|
||||
Some(next) => next,
|
||||
None => syntax_error!(span!(start), SyntaxError::NumLitTerminatedWithExp),
|
||||
None => self.error(start, SyntaxError::NumLitTerminatedWithExp)?,
|
||||
};
|
||||
|
||||
let positive = if next == '+' || next == '-' {
|
||||
bump!(); // remove '+', '-'
|
||||
self.bump(); // remove '+', '-'
|
||||
next == '+'
|
||||
} else {
|
||||
true
|
||||
@ -142,10 +141,10 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
"radix should be one of 2, 8, 16, but got {}",
|
||||
radix
|
||||
);
|
||||
debug_assert_eq!(cur!(), Some('0'));
|
||||
debug_assert_eq!(self.cur(), Some('0'));
|
||||
|
||||
let start = bump!(); // 0
|
||||
bump!(); // x
|
||||
let start = self.bump(); // 0
|
||||
self.bump(); // x
|
||||
|
||||
let val = self.read_number_no_dot(radix)?;
|
||||
self.ensure_not_ident()?;
|
||||
@ -161,7 +160,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
"radix for read_number_no_dot should be one of 2, 8, 10, 16, but got {}",
|
||||
radix
|
||||
);
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
let mut read_any = false;
|
||||
|
||||
@ -171,16 +170,17 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
});
|
||||
|
||||
if !read_any {
|
||||
syntax_error!(span!(start), SyntaxError::ExpectedDigit { radix });
|
||||
self.error(start, SyntaxError::ExpectedDigit { radix })?;
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Ensure that ident cannot directly follow numbers.
|
||||
fn ensure_not_ident(&mut self) -> LexResult<()> {
|
||||
match cur!() {
|
||||
match self.cur() {
|
||||
Some(c) if c.is_ident_start() => {
|
||||
syntax_error!(pos_span(cur_pos!()), SyntaxError::IdentAfterNum)
|
||||
let span = pos_span(self.cur_pos());
|
||||
self.error_span(span, SyntaxError::IdentAfterNum)?
|
||||
}
|
||||
_ => Ok(()),
|
||||
}
|
||||
@ -190,7 +190,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
/// were read, the integer value otherwise.
|
||||
/// When `len` is not zero, this
|
||||
/// will return `None` unless the integer has exactly `len` digits.
|
||||
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> LexResult<(Option<u32>)> {
|
||||
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> LexResult<Option<u32>> {
|
||||
let mut count = 0;
|
||||
let v = self.read_digits(radix, |opt: Option<u32>, radix, val| {
|
||||
count += 1;
|
||||
@ -219,14 +219,14 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
self.session.logger,
|
||||
"read_digits(radix = {}), cur = {:?}",
|
||||
radix,
|
||||
cur!(self)
|
||||
self.cur()
|
||||
);
|
||||
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
let mut total: Ret = Default::default();
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
while let Some(c) = self.cur() {
|
||||
if self.session.cfg.num_sep {
|
||||
// let prev: char = unimplemented!("prev");
|
||||
// let next = self.input.peek();
|
||||
@ -255,7 +255,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
return Ok(total);
|
||||
};
|
||||
|
||||
bump!();
|
||||
self.bump();
|
||||
let (t, cont) = op(total, radix, val);
|
||||
total = t;
|
||||
if !cont {
|
||||
@ -269,7 +269,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> LexResult<Number> {
|
||||
self.ensure_not_ident()?;
|
||||
return if self.ctx.strict {
|
||||
syntax_error!(span!(start), SyntaxError::LegacyOctal)
|
||||
self.error(start, SyntaxError::LegacyOctal)?
|
||||
} else {
|
||||
// FIXME
|
||||
Ok(Number(val))
|
||||
|
@ -1,5 +1,4 @@
|
||||
use super::{Input, Lexer};
|
||||
use parser_macros::parser;
|
||||
use slog::Logger;
|
||||
use swc_common::BytePos;
|
||||
use token::*;
|
||||
@ -22,7 +21,6 @@ pub(super) struct State {
|
||||
token_type: Option<Token>,
|
||||
}
|
||||
|
||||
#[parser]
|
||||
impl<'a, I: Input> Iterator for Lexer<'a, I> {
|
||||
type Item = TokenAndSpan;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -31,7 +29,7 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
|
||||
|
||||
// skip spaces before getting next character, if we are allowed to.
|
||||
if self.state.can_skip_space() {
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
match self.skip_space() {
|
||||
Err(err) => {
|
||||
@ -40,7 +38,7 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
|
||||
TokenAndSpan {
|
||||
token,
|
||||
had_line_break: self.had_line_break_before_last(),
|
||||
span: span!(start),
|
||||
span: self.span(start),
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -48,7 +46,7 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
|
||||
}
|
||||
};
|
||||
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
let res = if let Some(Type::Tpl {
|
||||
start: start_pos_of_tpl,
|
||||
@ -73,7 +71,7 @@ impl<'a, I: Input> Iterator for Lexer<'a, I> {
|
||||
TokenAndSpan {
|
||||
token,
|
||||
had_line_break: self.had_line_break_before_last(),
|
||||
span: span!(start),
|
||||
span: self.span(start),
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -334,17 +332,21 @@ impl Context {
|
||||
#[kind(fucntion(is_expr = "bool", preserve_space = "bool"))]
|
||||
enum Type {
|
||||
BraceStmt,
|
||||
#[kind(is_expr)] BraceExpr,
|
||||
#[kind(is_expr)] TplQuasi,
|
||||
#[kind(is_expr)]
|
||||
BraceExpr,
|
||||
#[kind(is_expr)]
|
||||
TplQuasi,
|
||||
ParenStmt {
|
||||
/// Is this `for` loop?
|
||||
is_for_loop: bool,
|
||||
},
|
||||
#[kind(is_expr)] ParenExpr,
|
||||
#[kind(is_expr)]
|
||||
ParenExpr,
|
||||
#[kind(is_expr, preserve_space)]
|
||||
Tpl {
|
||||
/// Start of a template literal.
|
||||
start: BytePos,
|
||||
},
|
||||
#[kind(is_expr)] FnExpr,
|
||||
#[kind(is_expr)]
|
||||
FnExpr,
|
||||
}
|
||||
|
@ -5,12 +5,12 @@
|
||||
//! https://github.com/babel/babel/blob/master/packages/babylon/src/util/identifier.js
|
||||
//!
|
||||
//!
|
||||
//! Note: Currently this use xid instead of id. (because unicode_xid crate
|
||||
//! exists)
|
||||
//! Note: Currently this use xid instead of id because unicode_xid crate
|
||||
//! exists.
|
||||
use super::{LexResult, Lexer};
|
||||
use super::input::Input;
|
||||
use error::SyntaxError;
|
||||
use parser_macros::parser;
|
||||
use error::{ErrorToDiag, SyntaxError};
|
||||
use swc_common::{BytePos, Span};
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
// pub const BACKSPACE: char = 8 as char;
|
||||
@ -20,15 +20,75 @@ use unicode_xid::UnicodeXID;
|
||||
// pub const LINE_SEPARATOR: char = '\u{2028}';
|
||||
// pub const PARAGRAPH_SEPARATOR: char = '\u{2029}';
|
||||
|
||||
#[parser]
|
||||
impl<'a, I: Input> Lexer<'a, I> {
|
||||
pub(super) fn span(&self, start: BytePos) -> Span {
|
||||
let end = self.last_pos();
|
||||
if cfg!(debug_assertions) && start > end {
|
||||
unreachable!(
|
||||
"assertion failed: (span.start <= span.end).
|
||||
start = {}, end = {}",
|
||||
start.0, end.0
|
||||
)
|
||||
}
|
||||
Span::new(start, end, Default::default())
|
||||
}
|
||||
|
||||
pub(super) fn bump(&mut self) {
|
||||
self.input.bump()
|
||||
}
|
||||
|
||||
pub(super) fn is(&mut self, c: char) -> bool {
|
||||
self.cur() == Some(c)
|
||||
}
|
||||
|
||||
pub(super) fn eat(&mut self, c: char) -> bool {
|
||||
if self.is(c) {
|
||||
self.bump();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn cur(&mut self) -> Option<char> {
|
||||
self.input.current()
|
||||
}
|
||||
pub(super) fn peek(&mut self) -> Option<char> {
|
||||
self.input.peek()
|
||||
}
|
||||
pub(super) fn peek_ahead(&mut self) -> Option<char> {
|
||||
self.input.peek_ahead()
|
||||
}
|
||||
|
||||
pub(super) fn cur_pos(&mut self) -> BytePos {
|
||||
self.input.cur_pos()
|
||||
}
|
||||
pub(super) fn last_pos(&self) -> BytePos {
|
||||
self.input.last_pos()
|
||||
}
|
||||
|
||||
/// Shorthand for `let span = self.span(start); self.error_span(span)`
|
||||
pub(super) fn error(&mut self, start: BytePos, kind: SyntaxError) -> LexResult<!> {
|
||||
let span = self.span(start);
|
||||
self.error_span(span, kind)
|
||||
}
|
||||
|
||||
pub(super) fn error_span(&mut self, span: Span, kind: SyntaxError) -> LexResult<!> {
|
||||
let err = ErrorToDiag {
|
||||
handler: self.session.handler,
|
||||
span,
|
||||
error: kind,
|
||||
};
|
||||
Err(err)?
|
||||
}
|
||||
|
||||
/// Skip comments or whitespaces.
|
||||
///
|
||||
/// See https://tc39.github.io/ecma262/#sec-white-space
|
||||
pub(super) fn skip_space(&mut self) -> LexResult<()> {
|
||||
let mut line_break = false;
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
while let Some(c) = self.cur() {
|
||||
match c {
|
||||
// white spaces
|
||||
_ if c.is_ws() => {}
|
||||
@ -38,10 +98,10 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
self.state.had_line_break = true;
|
||||
}
|
||||
'/' => {
|
||||
if peek!() == Some('/') {
|
||||
if self.peek() == Some('/') {
|
||||
self.skip_line_comment(2);
|
||||
continue;
|
||||
} else if peek!() == Some('*') {
|
||||
} else if self.peek() == Some('*') {
|
||||
self.skip_block_comment()?;
|
||||
continue;
|
||||
}
|
||||
@ -51,20 +111,20 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
_ => break,
|
||||
}
|
||||
|
||||
bump!();
|
||||
self.bump();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn skip_line_comment(&mut self, start_skip: usize) {
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
for _ in 0..start_skip {
|
||||
bump!();
|
||||
self.bump();
|
||||
}
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
bump!();
|
||||
while let Some(c) = self.cur() {
|
||||
self.bump();
|
||||
if c.is_line_break() {
|
||||
self.state.had_line_break = true;
|
||||
}
|
||||
@ -80,19 +140,18 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
/// Expects current char to be '/' and next char to be '*'.
|
||||
pub(super) fn skip_block_comment(&mut self) -> LexResult<()> {
|
||||
let start = cur_pos!();
|
||||
let start = self.cur_pos();
|
||||
|
||||
debug_assert_eq!(cur!(), Some('/'));
|
||||
debug_assert_eq!(peek!(), Some('*'));
|
||||
debug_assert_eq!(self.cur(), Some('/'));
|
||||
debug_assert_eq!(self.peek(), Some('*'));
|
||||
|
||||
bump!();
|
||||
bump!();
|
||||
self.bump();
|
||||
self.bump();
|
||||
|
||||
let mut was_star = false;
|
||||
|
||||
while let Some(c) = cur!() {
|
||||
if was_star && is!('/') {
|
||||
bump!();
|
||||
while let Some(c) = self.cur() {
|
||||
if was_star && self.eat('/') {
|
||||
// TODO: push comment
|
||||
return Ok(());
|
||||
}
|
||||
@ -100,11 +159,11 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
self.state.had_line_break = true;
|
||||
}
|
||||
|
||||
was_star = is!('*');
|
||||
bump!();
|
||||
was_star = self.is('*');
|
||||
self.bump();
|
||||
}
|
||||
|
||||
syntax_error!(span!(start), SyntaxError::UnterminatedBlockComment)
|
||||
self.error(start, SyntaxError::UnterminatedBlockComment)?
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -85,53 +85,3 @@ macro_rules! token_including_semi {
|
||||
(';') => { Token::Semi };
|
||||
($t:tt) => { tok!($t) };
|
||||
}
|
||||
|
||||
/// This macro requires macro named 'last_pos' to be in scope.
|
||||
macro_rules! span {
|
||||
($p:expr, $start:expr) => {{
|
||||
let start: ::swc_common::BytePos = $start;
|
||||
let end: ::swc_common::BytePos = last_pos!($p);
|
||||
if cfg!(debug_assertions) && start > end {
|
||||
unreachable!("assertion failed: (span.start <= span.end).
|
||||
start = {}, end = {}", start.0, end.0)
|
||||
}
|
||||
::swc_common::Span::new(start, end, Default::default())
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! spanned {
|
||||
(
|
||||
$p:expr, { $($body:tt)* }
|
||||
) => {{
|
||||
let start = { cur_pos!($p) };
|
||||
let val: Result<_, _> = {
|
||||
$($body)*
|
||||
};
|
||||
{
|
||||
match val {
|
||||
Ok(val) => {
|
||||
let span = span!($p, start);
|
||||
let val = ::swc_common::Spanned::from_unspanned(val, span);
|
||||
Ok(val)
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! syntax_error {
|
||||
($p:expr, $err:expr) => {
|
||||
syntax_error!($p, $p.input.cur_span(), $err)
|
||||
};
|
||||
|
||||
($p:expr, $span:expr, $err:expr) => {{
|
||||
let err = $crate::error::ErrorToDiag {
|
||||
handler: $p.session.handler,
|
||||
span: $span,
|
||||
error: $err,
|
||||
};
|
||||
let res: Result<!, _> = Err(err);
|
||||
res?
|
||||
}};
|
||||
}
|
||||
|
@ -221,3 +221,53 @@ macro_rules! return_if_arrow {
|
||||
// }
|
||||
}};
|
||||
}
|
||||
|
||||
/// This macro requires macro named 'last_pos' to be in scope.
|
||||
macro_rules! span {
|
||||
($p:expr, $start:expr) => {{
|
||||
let start: ::swc_common::BytePos = $start;
|
||||
let end: ::swc_common::BytePos = last_pos!($p);
|
||||
if cfg!(debug_assertions) && start > end {
|
||||
unreachable!("assertion failed: (span.start <= span.end).
|
||||
start = {}, end = {}", start.0, end.0)
|
||||
}
|
||||
::swc_common::Span::new(start, end, Default::default())
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! spanned {
|
||||
(
|
||||
$p:expr, { $($body:tt)* }
|
||||
) => {{
|
||||
let start = { cur_pos!($p) };
|
||||
let val: Result<_, _> = {
|
||||
$($body)*
|
||||
};
|
||||
{
|
||||
match val {
|
||||
Ok(val) => {
|
||||
let span = span!($p, start);
|
||||
let val = ::swc_common::Spanned::from_unspanned(val, span);
|
||||
Ok(val)
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! syntax_error {
|
||||
($p:expr, $err:expr) => {
|
||||
syntax_error!($p, $p.input.cur_span(), $err)
|
||||
};
|
||||
|
||||
($p:expr, $span:expr, $err:expr) => {{
|
||||
let err = $crate::error::ErrorToDiag {
|
||||
handler: $p.session.handler,
|
||||
span: $span,
|
||||
error: $err,
|
||||
};
|
||||
let res: Result<!, _> = Err(err);
|
||||
res?
|
||||
}};
|
||||
}
|
||||
|
@ -62,7 +62,6 @@ impl Context {
|
||||
}
|
||||
}
|
||||
|
||||
#[parser]
|
||||
impl<'a, I: Input> Parser<'a, I> {
|
||||
/// Original context is restored when returned guard is dropped.
|
||||
pub(super) fn with_ctx<'w>(&'w mut self, ctx: Context) -> WithCtx<'w, 'a, I> {
|
||||
@ -103,6 +102,18 @@ impl<'a, I: Input> Parser<'a, I> {
|
||||
f(self)
|
||||
}
|
||||
|
||||
pub(super) fn span(&mut self, start: BytePos) -> Span {
|
||||
let end = last_pos!(self);
|
||||
if cfg!(debug_assertions) && start > end {
|
||||
unreachable!(
|
||||
"assertion failed: (span.start <= span.end).
|
||||
start = {}, end = {}",
|
||||
start.0, end.0
|
||||
)
|
||||
}
|
||||
Span::new(start, end, Default::default())
|
||||
}
|
||||
|
||||
pub(super) fn spanned<F, Node, Ret>(&mut self, f: F) -> PResult<'a, Node>
|
||||
where
|
||||
F: FnOnce(&mut Self) -> PResult<'a, Ret>,
|
||||
@ -111,7 +122,7 @@ impl<'a, I: Input> Parser<'a, I> {
|
||||
let start = self.input.cur_pos();
|
||||
let val = f(self)?;
|
||||
|
||||
let span = span!(start);
|
||||
let span = self.span(start);
|
||||
Ok(Spanned::from_unspanned(val, span))
|
||||
}
|
||||
}
|
||||
|
1
rust-toolchain
Normal file
1
rust-toolchain
Normal file
@ -0,0 +1 @@
|
||||
nightly-2018-02-28
|
Loading…
Reference in New Issue
Block a user