Improve ux using swc as a rust library (#893)

swc_ecma_parser:
 - Improve parser's error reporting story
 - Replace `Session`, `Handler`, `Emitter` with Parser.take_errors()
 - Expose `Error`
 - Remove useless lifetime parameters
This commit is contained in:
강동윤 2020-07-25 20:26:04 +09:00 committed by GitHub
parent c7a5d5fef9
commit a1d33d023a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 997 additions and 1229 deletions

View File

@ -37,7 +37,7 @@ install:
- npm install
- npm install browserslist regenerator
- npm install -g jest
- RUST_BACKTRACE=0 cargo test --no-run --color always --all --all-features
- travis_wait 50 RUST_BACKTRACE=0 cargo test --no-run --color always --all --all-features
script:
# - RUST_BACKTRACE=0 cargo check --color always --all --all-targets

View File

@ -95,7 +95,6 @@ After cloning the project there are a few steps required to get the project runn
3. Setup some environment variables which is required for tests.
```bash
export RUST_MIN_STACK=16777216
export RUSTFLAGS='--cfg procmacro2_semver_exempt'
export RUST_BACKTRACE=full
export PATH="$PATH:$PWD/ecmascript/transforms/node_modules/.bin"

View File

@ -1,6 +1,6 @@
[package]
name = "swc_ecma_codegen"
version = "0.26.0"
version = "0.27.0"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git"
@ -19,5 +19,5 @@ sourcemap = "6"
num-bigint = { version = "0.2", features = ["serde"] }
[dev-dependencies]
swc_ecma_parser = { version = "0.28", path ="../parser" }
swc_ecma_parser = { version = "0.29", path ="../parser" }
testing = { version = "0.7", path ="../../testing" }

View File

@ -5,7 +5,7 @@ extern crate test;
use std::hint::black_box;
use swc_common::FileName;
use swc_ecma_codegen::{self, Emitter};
use swc_ecma_parser::{Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{Parser, SourceFileInput, Syntax};
use test::Bencher;
const COLORS_JS: &str = r#"
@ -86,23 +86,19 @@ fn bench_emitter(b: &mut Bencher, s: &str) {
b.bytes = s.len() as _;
let _ = ::testing::run_test(true, |cm, handler| {
let session = Session { handler: &handler };
let fm = cm.new_source_file(FileName::Anon, s.into());
let mut parser = Parser::new(
session,
Syntax::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new(Syntax::default(), SourceFileInput::from(&*fm), None);
let mut src_map_buf = vec![];
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.map_err(|e| e.into_diagnostic(handler).emit())
.unwrap();
for err in parser.take_errors() {
err.into_diagnostic(handler).emit();
}
b.iter(|| {
let mut buf = vec![];
{

View File

@ -5,7 +5,7 @@ extern crate test;
use std::hint::black_box;
use swc_common::FileName;
use swc_ecma_codegen::{self, Emitter};
use swc_ecma_parser::{Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{Parser, SourceFileInput, Syntax};
use test::Bencher;
const COLORS_JS: &str = r#"
@ -86,24 +86,19 @@ fn bench_emitter(b: &mut Bencher, s: &str) {
b.bytes = s.len() as _;
let _ = ::testing::run_test(true, |cm, handler| {
let session = Session { handler: &handler };
b.iter(|| {
let fm = cm.new_source_file(FileName::Anon, s.into());
let mut parser = Parser::new(
session,
Syntax::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new(Syntax::default(), SourceFileInput::from(&*fm), None);
let mut src_map_buf = vec![];
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.map_err(|e| e.into_diagnostic(handler).emit())
.unwrap();
for err in parser.take_errors() {
err.into_diagnostic(handler).emit();
}
let mut buf = vec![];
{
let handlers = Box::new(MyHandlers);

View File

@ -1,4 +1,4 @@
use self::swc_ecma_parser::{EsConfig, Parser, Session, SourceFileInput, Syntax};
use self::swc_ecma_parser::{EsConfig, Parser, SourceFileInput, Syntax};
use super::*;
use crate::config::Config;
use std::{
@ -58,15 +58,16 @@ fn parse_then_emit(from: &str, cfg: Config, syntax: Syntax) -> String {
let comments = Default::default();
let res = {
let mut parser = Parser::new(
Session { handler: &handler },
syntax,
SourceFileInput::from(&*src),
Some(&comments),
);
parser.parse_module().map_err(|mut e| {
e.emit();
})?
let mut parser = Parser::new(syntax, SourceFileInput::from(&*src), Some(&comments));
let res = parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit());
for err in parser.take_errors() {
err.into_diagnostic(handler).emit()
}
res?
};
let out = Builder { cfg, cm, comments }.text(from, |e| e.emit_module(&res).unwrap());

View File

@ -11,7 +11,7 @@ use std::{
};
use swc_common::comments::Comments;
use swc_ecma_codegen::{self, Emitter};
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput, Syntax};
use test::{
test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType,
};
@ -150,14 +150,12 @@ fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
let comments = Comments::default();
let handlers = Box::new(MyHandlers);
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
(&*src).into(),
Some(&comments),
);
let mut parser: Parser<'_, Lexer<'_, SourceFileInput<'_>>> =
Parser::new_from(Session { handler: &handler }, lexer);
let mut parser: Parser<Lexer<SourceFileInput>> = Parser::new_from(lexer);
{
let mut emitter = Emitter {
@ -173,15 +171,19 @@ fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
// Parse source
if module {
emitter
.emit_module(&parser.parse_module().map_err(|mut e| {
e.emit();
})?)
.emit_module(
&parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
} else {
emitter
.emit_script(&parser.parse_script().map_err(|mut e| {
e.emit();
})?)
.emit_script(
&parser
.parse_script()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
}
}

View File

@ -1,6 +1,6 @@
[package]
name = "swc_ecma_parser"
version = "0.28.0"
version = "0.29.0"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git"

View File

@ -4,7 +4,7 @@ extern crate test;
use std::hint::black_box;
use swc_common::FileName;
use swc_ecma_parser::{lexer::Lexer, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, SourceFileInput, Syntax};
use test::Bencher;
#[bench]
@ -75,13 +75,11 @@ fn yui(b: &mut Bencher) {
fn bench_module(b: &mut Bencher, syntax: Syntax, src: &'static str) {
b.bytes = src.len() as _;
let _ = ::testing::run_test(false, |cm, handler| {
let session = Session { handler: &handler };
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon, src.into());
b.iter(|| {
let lexer = Lexer::new(
session,
syntax,
Default::default(),
SourceFileInput::from(&*fm),

View File

@ -3,7 +3,7 @@
extern crate test;
use swc_common::FileName;
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput, Syntax};
use test::Bencher;
#[bench]
@ -156,20 +156,18 @@ fn large(b: &mut Bencher) {
fn bench_module(b: &mut Bencher, syntax: Syntax, src: &'static str) {
b.bytes = src.len() as _;
let _ = ::testing::run_test(false, |cm, handler| {
let session = Session { handler: &handler };
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon, src.into());
b.iter(|| {
let _ = test::black_box({
let lexer = Lexer::new(
session,
syntax,
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(session, lexer);
let mut parser = Parser::new_from(lexer);
parser.parse_module()
});
});

View File

@ -4,15 +4,13 @@ use swc_common::{
errors::{ColorConfig, Handler},
FileName, SourceMap,
};
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, SourceFileInput, Syntax};
fn main() {
swc_common::GLOBALS.set(&swc_common::Globals::new(), || {
let cm: Arc<SourceMap> = Default::default();
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
let session = Session { handler: &handler };
// Real usage
// let fm = cm
// .load_file(Path::new("test.js"))
@ -24,7 +22,6 @@ fn main() {
);
let lexer = Lexer::new(
session,
Syntax::Es(Default::default()),
Default::default(),
SourceFileInput::from(&*fm),
@ -33,13 +30,15 @@ fn main() {
let capturing = Capturing::new(lexer);
let mut parser = Parser::new_from(session, capturing);
let mut parser = Parser::new_from(capturing);
for e in parser.take_errors() {
e.into_diagnostic(&handler).emit();
}
let _module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.map_err(|e| e.into_diagnostic(&handler).emit())
.expect("Failed to parse module.");
println!("Tokens: {:?}", parser.input().take());

View File

@ -5,15 +5,13 @@ use swc_common::{
errors::{ColorConfig, Handler},
FileName, SourceMap,
};
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, SourceFileInput, Syntax};
fn main() {
swc_common::GLOBALS.set(&swc_common::Globals::new(), || {
let cm: Arc<SourceMap> = Default::default();
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
let session = Session { handler: &handler };
// Real usage
// let fm = cm
// .load_file(Path::new("test.js"))
@ -25,7 +23,6 @@ fn main() {
);
let lexer = Lexer::new(
session,
Syntax::Typescript(Default::default()),
Default::default(),
SourceFileInput::from(&*fm),
@ -34,13 +31,15 @@ fn main() {
let capturing = Capturing::new(lexer);
let mut parser = Parser::new_from(session, capturing);
let mut parser = Parser::new_from(capturing);
for e in parser.take_errors() {
e.into_diagnostic(&handler).emit();
}
let _module = parser
.parse_typescript_module()
.map_err(|mut e| {
e.emit();
})
.map_err(|e| e.into_diagnostic(&handler).emit())
.expect("Failed to parse module.");
println!("Tokens: {:?}", parser.input().take());

View File

@ -2,50 +2,23 @@
use self::SyntaxError::*;
use crate::token::Token;
use std::{
borrow::Cow,
fmt::{self, Debug, Formatter},
};
use std::{borrow::Cow, fmt::Debug};
use swc_atoms::JsWord;
use swc_common::{
errors::{DiagnosticBuilder, Handler},
Span,
};
#[derive(Copy, Clone)]
pub(crate) struct Eof<'a> {
pub last: Span,
pub handler: &'a Handler,
}
impl<'a> From<Eof<'a>> for DiagnosticBuilder<'a> {
fn from(Eof { handler, last }: Eof<'a>) -> Self {
let mut db = handler.struct_err("Unexpected eof");
db.set_span(last);
db
}
}
impl<'a> Debug for Eof<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Debug::fmt("<eof>", f)
}
}
pub(crate) struct ErrorToDiag<'a> {
pub handler: &'a Handler,
pub span: Span,
pub error: SyntaxError,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Error {
pub span: Span,
pub error: SyntaxError,
pub error: Box<SyntaxError>,
}
#[derive(Debug, Clone, PartialEq)]
#[non_exhaustive]
pub enum SyntaxError {
Eof,
TopLevelAwait,
LegacyDecimal,
@ -217,20 +190,10 @@ pub enum SyntaxError {
TS2703,
}
impl<'a> From<ErrorToDiag<'a>> for Error {
impl Error {
#[cold]
fn from(e: ErrorToDiag<'a>) -> Self {
Error {
span: e.span,
error: e.error,
}
}
}
impl<'a> From<ErrorToDiag<'a>> for DiagnosticBuilder<'a> {
#[cold]
fn from(e: ErrorToDiag<'a>) -> Self {
let msg: Cow<'static, _> = match e.error {
pub fn into_diagnostic(self, handler: &Handler) -> DiagnosticBuilder {
let msg: Cow<'static, _> = match *self.error {
TopLevelAwait => "top level await requires target to es2017 or higher and \
topLevelAwait:true for ecmascript"
.into(),
@ -380,14 +343,16 @@ impl<'a> From<ErrorToDiag<'a>> for DiagnosticBuilder<'a> {
TS1110 => "type expected".into(),
TS1141 => "literal in an import type should be string literal".into(),
Eof => "Unexpected eof".into(),
// TODO:
_ => format!("{:?}", e.error).into(),
_ => format!("{:?}", self.error).into(),
};
let mut db = e.handler.struct_err(&msg);
db.set_span(e.span);
let mut db = handler.struct_err(&msg);
db.set_span(self.span);
match e.error {
match *self.error {
ExpectedSemiForExprStmt { expr } => {
db.span_note(
expr,
@ -403,3 +368,8 @@ impl<'a> From<ErrorToDiag<'a>> for DiagnosticBuilder<'a> {
db
}
}
#[test]
fn size_of_error() {
assert_eq!(std::mem::size_of::<Error>(), 16);
}

View File

@ -12,11 +12,11 @@ use self::{state::State, util::*};
use crate::{
error::{Error, SyntaxError},
token::*,
Context, JscTarget, Session, Syntax,
Context, JscTarget, Syntax,
};
use either::Either::{Left, Right};
use smallvec::{smallvec, SmallVec};
use std::{char, iter::FusedIterator, mem::take};
use std::{cell::RefCell, char, iter::FusedIterator, mem::take, rc::Rc};
use swc_atoms::{js_word, JsWord};
use swc_common::{
comments::{Comment, Comments},
@ -95,7 +95,6 @@ impl FusedIterator for CharIter {}
#[derive(Clone)]
pub struct Lexer<'a, I: Input> {
session: Session<'a>,
comments: Option<&'a Comments>,
leading_comments_buffer: Option<Vec<Comment>>,
pub(crate) ctx: Context,
@ -104,6 +103,8 @@ pub struct Lexer<'a, I: Input> {
pub(crate) syntax: Syntax,
pub(crate) target: JscTarget,
errors: Rc<RefCell<Vec<Error>>>,
buf: String,
}
@ -111,14 +112,12 @@ impl<I: Input> FusedIterator for Lexer<'_, I> {}
impl<'a, I: Input> Lexer<'a, I> {
pub fn new(
session: Session<'a>,
syntax: Syntax,
target: JscTarget,
input: I,
comments: Option<&'a Comments>,
) -> Self {
Lexer {
session,
leading_comments_buffer: if comments.is_some() {
Some(Default::default())
} else {
@ -130,6 +129,7 @@ impl<'a, I: Input> Lexer<'a, I> {
ctx: Default::default(),
syntax,
target,
errors: Default::default(),
buf: String::with_capacity(16),
}
}

View File

@ -444,9 +444,8 @@ mod tests {
where
F: FnOnce(&mut Lexer<'_, SourceFileInput<'_>>) -> Ret,
{
crate::with_test_sess(s, |sess, fm| {
crate::with_test_sess(s, |handler, fm| {
let mut l = Lexer::new(
sess,
Syntax::Es(EsConfig {
num_sep: true,
..Default::default()
@ -576,9 +575,8 @@ mod tests {
});
let vec = panic::catch_unwind(|| {
crate::with_test_sess(case, |mut sess, input| {
let mut l =
Lexer::new(sess, Syntax::default(), Default::default(), input, None);
crate::with_test_sess(case, |handler, input| {
let mut l = Lexer::new(Syntax::default(), Default::default(), input, None);
l.ctx.strict = strict;
Ok(l.map(|ts| ts.token).collect::<Vec<_>>())
})

View File

@ -1,8 +1,8 @@
use super::{Context, Input, Lexer};
use crate::{input::Tokens, lexer::util::CharExt, token::*, JscTarget, Syntax};
use crate::{error::Error, input::Tokens, lexer::util::CharExt, token::*, JscTarget, Syntax};
use enum_kind::Kind;
use log::trace;
use std::mem;
use std::{mem, mem::take};
use swc_common::BytePos;
/// State of lexer.
@ -129,6 +129,14 @@ impl<I: Input> Tokens for Lexer<'_, I> {
fn set_token_context(&mut self, c: TokenContexts) {
self.state.context = c;
}
fn add_error(&self, error: Error) {
self.errors.borrow_mut().push(error);
}
fn take_errors(&mut self) -> Vec<Error> {
take(&mut self.errors.borrow_mut())
}
}
impl<'a, I: Input> Iterator for Lexer<'a, I> {
@ -613,8 +621,8 @@ pub(crate) fn with_lexer<F, Ret>(
where
F: FnOnce(&mut Lexer<'_, crate::lexer::input::SourceFileInput<'_>>) -> Result<Ret, ()>,
{
crate::with_test_sess(s, |sess, fm| {
let mut l = Lexer::new(sess, syntax, Default::default(), fm, None);
crate::with_test_sess(s, |handler, fm| {
let mut l = Lexer::new(syntax, Default::default(), fm, None);
let res = f(&mut l);
let c = vec![TokenContext::BraceStmt];

View File

@ -130,7 +130,7 @@ fn module_legacy_decimal() {
lex_module(Syntax::default(), "08"),
vec![Token::Error(Error {
span: make_span(sp(0..2)),
error: SyntaxError::LegacyDecimal,
error: Box::new(SyntaxError::LegacyDecimal),
})
.span(0..2)
.lb(),]
@ -143,7 +143,7 @@ fn module_legacy_comment_1() {
lex_module(Syntax::default(), "<!-- foo oo"),
vec![Token::Error(Error {
span: make_span(sp(0..11)),
error: SyntaxError::LegacyCommentInModule,
error: Box::new(SyntaxError::LegacyCommentInModule),
})
.span(0..11)
.lb(),]
@ -156,7 +156,7 @@ fn module_legacy_comment_2() {
lex_module(Syntax::default(), "-->"),
vec![Token::Error(Error {
span: make_span(sp(0..3)),
error: SyntaxError::LegacyCommentInModule,
error: Box::new(SyntaxError::LegacyCommentInModule),
})
.span(0..3)
.lb(),]

View File

@ -6,11 +6,10 @@
//!
//! [babylon/util/identifier.js]:https://github.com/babel/babel/blob/master/packages/babylon/src/util/identifier.js
use super::{input::Input, Char, LexResult, Lexer};
use crate::error::{ErrorToDiag, SyntaxError};
use crate::error::{Error, SyntaxError};
use std::char;
use swc_common::{
comments::{Comment, CommentKind},
errors::DiagnosticBuilder,
BytePos, Span, SpanData, SyntaxContext,
};
use unicode_xid::UnicodeXID;
@ -99,12 +98,10 @@ impl<'a, I: Input> Lexer<'a, I> {
#[cold]
pub(super) fn error_span<T>(&mut self, span: Span, kind: SyntaxError) -> LexResult<T> {
let err = ErrorToDiag {
handler: self.session.handler,
Err(Error {
span,
error: kind,
};
Err(err.into())
error: Box::new(kind),
})
}
#[cold]
@ -115,12 +112,11 @@ impl<'a, I: Input> Lexer<'a, I> {
#[cold]
pub(super) fn emit_error_span(&mut self, span: Span, kind: SyntaxError) {
let err = ErrorToDiag {
handler: self.session.handler,
let err = Error {
span,
error: kind,
error: Box::new(kind),
};
DiagnosticBuilder::from(err).emit();
self.errors.borrow_mut().push(err);
}
/// Skip comments or whitespaces.

View File

@ -45,43 +45,43 @@
//! errors::{ColorConfig, Handler},
//! FileName, FilePathMapping, SourceMap,
//! };
//! use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
//! use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput, Syntax};
//!
//! fn main() {
//! swc_common::GLOBALS.set(&swc_common::Globals::new(), || {
//! let cm: Arc<SourceMap> = Default::default();
//! let handler =
//! Handler::with_tty_emitter(ColorConfig::Auto, true, false,
//! Some(cm.clone()));
//!
//! let session = Session { handler: &handler };
//! Some(cm.clone()));
//!
//! // Real usage
//! // let fm = cm
//! // .load_file(Path::new("test.js"))
//! // .expect("failed to load test.js");
//!
//!
//! let fm = cm.new_source_file(
//! FileName::Custom("test.js".into()),
//! "function foo() {}".into(),
//! );
//! let lexer = Lexer::new(
//! session,
//! // We want to parse ecmascript
//! Syntax::Es(Default::default()),
//! Default::default(),
//! // JscTarget defaults to es5
//! Default::default(),
//! SourceFileInput::from(&*fm),
//! None,
//! );
//!
//! let mut parser = Parser::new_from(session, lexer);
//! let mut parser = Parser::new_from(lexer);
//!
//! for e in parser.take_errors() {
//! e.into_diagnostic(&handler).emit();
//! }
//!
//! let _module = parser
//! .parse_module()
//! .map_err(|mut e| {
//! e.emit();
//! ()
//! // Unrecoverable fatal error occurred
//! e.into_diagnostic(&handler).emit()
//! })
//! .expect("failed to parser module");
//! });
@ -100,11 +100,11 @@ pub use self::{
parser::*,
};
use serde::{Deserialize, Serialize};
use swc_common::{errors::Handler, Span, SpanData};
use swc_common::{Span, SpanData};
#[macro_use]
mod macros;
mod error;
pub mod error;
pub mod lexer;
mod parser;
pub mod token;
@ -427,22 +427,17 @@ pub struct Context {
in_case_cond: bool,
}
#[derive(Clone, Copy)]
pub struct Session<'a> {
pub handler: &'a Handler,
}
#[cfg(test)]
fn with_test_sess<F, Ret>(src: &str, f: F) -> Result<Ret, ::testing::StdErr>
where
F: FnOnce(Session<'_>, SourceFileInput<'_>) -> Result<Ret, ()>,
F: FnOnce(&swc_common::errors::Handler, SourceFileInput<'_>) -> Result<Ret, ()>,
{
use swc_common::FileName;
::testing::run_test(false, |cm, handler| {
let fm = cm.new_source_file(FileName::Real("testing".into()), src.into());
f(Session { handler: &handler }, (&*fm).into())
f(handler, (&*fm).into())
})
}

View File

@ -7,32 +7,32 @@ use swc_ecma_parser_macros::parser;
#[parser]
/// Parser for function expression and function declaration.
impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<'a, Box<Expr>> {
impl<'a, I: Tokens> Parser<I> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start), vec![])
}
/// Parse function expression
pub(super) fn parse_fn_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_fn_expr(&mut self) -> PResult<Box<Expr>> {
self.parse_fn(None, vec![])
}
pub(super) fn parse_async_fn_decl(&mut self, decorators: Vec<Decorator>) -> PResult<'a, Decl> {
pub(super) fn parse_async_fn_decl(&mut self, decorators: Vec<Decorator>) -> PResult<Decl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start), decorators)
}
pub(super) fn parse_fn_decl(&mut self, decorators: Vec<Decorator>) -> PResult<'a, Decl> {
pub(super) fn parse_fn_decl(&mut self, decorators: Vec<Decorator>) -> PResult<Decl> {
self.parse_fn(None, decorators)
}
pub(super) fn parse_default_async_fn(
&mut self,
decorators: Vec<Decorator>,
) -> PResult<'a, ExportDefaultDecl> {
) -> PResult<ExportDefaultDecl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start), decorators)
@ -41,7 +41,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_default_fn(
&mut self,
decorators: Vec<Decorator>,
) -> PResult<'a, ExportDefaultDecl> {
) -> PResult<ExportDefaultDecl> {
self.parse_fn(None, decorators)
}
@ -50,7 +50,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
start: BytePos,
class_start: BytePos,
decorators: Vec<Decorator>,
) -> PResult<'a, Decl> {
) -> PResult<Decl> {
self.parse_class(start, class_start, decorators)
}
@ -58,7 +58,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
decorators: Vec<Decorator>,
) -> PResult<'a, Box<Expr>> {
) -> PResult<Box<Expr>> {
self.parse_class(start, start, decorators)
}
@ -67,7 +67,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
start: BytePos,
class_start: BytePos,
decorators: Vec<Decorator>,
) -> PResult<'a, ExportDefaultDecl> {
) -> PResult<ExportDefaultDecl> {
self.parse_class(start, class_start, decorators)
}
@ -76,10 +76,10 @@ impl<'a, I: Tokens> Parser<'a, I> {
start: BytePos,
class_start: BytePos,
decorators: Vec<Decorator>,
) -> PResult<'a, T>
) -> PResult<T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
Self: MaybeOptionalIdentParser<T::Ident>,
{
self.strict_mode().parse_with(|p| {
expect!("class");
@ -181,7 +181,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
pub(super) fn parse_decorators(&mut self, allow_export: bool) -> PResult<'a, Vec<Decorator>> {
pub(super) fn parse_decorators(&mut self, allow_export: bool) -> PResult<Vec<Decorator>> {
if !self.syntax().decorators() {
return Ok(vec![]);
}
@ -211,7 +211,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(decorators)
}
fn parse_decorator(&mut self) -> PResult<'a, Decorator> {
fn parse_decorator(&mut self) -> PResult<Decorator> {
let start = cur_pos!();
assert_and_bump!('@');
@ -251,7 +251,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_maybe_decorator_args(&mut self, expr: Box<Expr>) -> PResult<'a, Box<Expr>> {
fn parse_maybe_decorator_args(&mut self, expr: Box<Expr>) -> PResult<Box<Expr>> {
let type_args = if self.input.syntax().typescript() && is!('<') {
Some(self.parse_ts_type_args()?)
} else {
@ -271,7 +271,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})))
}
fn parse_class_body(&mut self) -> PResult<'a, Vec<ClassMember>> {
fn parse_class_body(&mut self) -> PResult<Vec<ClassMember>> {
let mut elems = vec![];
while !eof!() && !is!('}') {
if eat_exact!(';') {
@ -287,7 +287,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(elems)
}
pub(super) fn parse_access_modifier(&mut self) -> PResult<'a, Option<Accessibility>> {
pub(super) fn parse_access_modifier(&mut self) -> PResult<Option<Accessibility>> {
Ok(self
.parse_ts_modifier(&["public", "protected", "private"])?
.map(|s| match s {
@ -298,7 +298,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}))
}
fn parse_class_member(&mut self) -> PResult<'a, ClassMember> {
fn parse_class_member(&mut self) -> PResult<ClassMember> {
let start = cur_pos!();
let decorators = self.parse_decorators(false)?;
@ -425,7 +425,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
accessibility: Option<Accessibility>,
static_token: Option<Span>,
decorators: Vec<Decorator>,
) -> PResult<'a, ClassMember> {
) -> PResult<ClassMember> {
let is_static = static_token.is_some();
let modifier = self.parse_ts_modifier(&["abstract", "readonly"])?;
let modifier_span = if let Some(..) = modifier {
@ -728,7 +728,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
is_optional: bool,
readonly: bool,
is_abstract: bool,
) -> PResult<'a, ClassMember> {
) -> PResult<ClassMember> {
if !self.input.syntax().class_props() {
syntax_error!(span!(start), SyntaxError::ClassProperty)
}
@ -803,11 +803,11 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn is_class_method(&mut self) -> PResult<'a, bool> {
fn is_class_method(&mut self) -> PResult<bool> {
Ok(is!('(') || (self.input.syntax().typescript() && is!('<')))
}
fn is_class_property(&mut self) -> PResult<'a, bool> {
fn is_class_property(&mut self) -> PResult<bool> {
Ok((self.input.syntax().typescript() && is_one_of!('!', ':')) || is_one_of!('=', ';', '}'))
}
@ -815,10 +815,10 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start_of_async: Option<BytePos>,
decorators: Vec<Decorator>,
) -> PResult<'a, T>
) -> PResult<T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
Self: MaybeOptionalIdentParser<T::Ident>,
T::Ident: Spanned,
{
let start = start_of_async.unwrap_or(cur_pos!());
@ -890,9 +890,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
parse_args: F,
is_async: bool,
is_generator: bool,
) -> PResult<'a, Function>
) -> PResult<Function>
where
F: FnOnce(&mut Self) -> PResult<'a, Vec<Param>>,
F: FnOnce(&mut Self) -> PResult<Vec<Param>>,
{
// let prev_in_generator = self.ctx().in_generator;
let ctx = Context {
@ -959,7 +959,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_class_prop_name(&mut self) -> PResult<'a, Either<PrivateName, PropName>> {
fn parse_class_prop_name(&mut self) -> PResult<Either<PrivateName, PropName>> {
if is!('#') {
self.parse_private_name().map(Either::Left)
} else {
@ -967,9 +967,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<'a, T>
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<T>
where
Self: FnBodyParser<'a, T>,
Self: FnBodyParser<T>,
{
if self.ctx().in_declare && self.syntax().typescript() && is!('{') {
// self.emit_err(
@ -995,7 +995,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
fn make_method<F>(
&mut self,
parse_args: F,
@ -1011,9 +1011,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
is_async,
is_generator,
}: MakeMethodArgs,
) -> PResult<'a, ClassMember>
) -> PResult<ClassMember>
where
F: FnOnce(&mut Self) -> PResult<'a, Vec<Param>>,
F: FnOnce(&mut Self) -> PResult<Vec<Param>>,
{
let is_static = static_token.is_some();
let ctx = Context {
@ -1178,13 +1178,13 @@ impl OutputType for Decl {
}
}
pub(super) trait FnBodyParser<'a, Body> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, Body>;
pub(super) trait FnBodyParser<Body> {
fn parse_fn_body_inner(&mut self) -> PResult<Body>;
}
#[parser]
impl<'a, I: Tokens> FnBodyParser<'a, BlockStmtOrExpr> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmtOrExpr> {
impl<I: Tokens> FnBodyParser<BlockStmtOrExpr> for Parser<I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmtOrExpr> {
if is!('{') {
self.parse_block(false).map(BlockStmtOrExpr::BlockStmt)
} else {
@ -1194,8 +1194,8 @@ impl<'a, I: Tokens> FnBodyParser<'a, BlockStmtOrExpr> for Parser<'a, I> {
}
#[parser]
impl<'a, I: Tokens> FnBodyParser<'a, Option<BlockStmt>> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, Option<BlockStmt>> {
impl<I: Tokens> FnBodyParser<Option<BlockStmt>> for Parser<I> {
fn parse_fn_body_inner(&mut self) -> PResult<Option<BlockStmt>> {
// allow omitting body and allow placing `{` on next line
if self.input.syntax().typescript() && !is!('{') && eat!(';') {
return Ok(None);
@ -1248,19 +1248,11 @@ mod tests {
use swc_ecma_visit::assert_eq_ignore_span;
fn lhs(s: &'static str) -> Box<Expr> {
test_parser(s, Syntax::default(), |p| {
p.parse_lhs_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_lhs_expr())
}
fn expr(s: &'static str) -> Box<Expr> {
test_parser(s, Syntax::default(), |p| {
p.parse_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_expr())
}
#[test]

View File

@ -10,8 +10,8 @@ mod tests;
mod verifier;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
pub fn parse_expr(&mut self) -> PResult<'a, Box<Expr>> {
impl<'a, I: Tokens> Parser<I> {
pub fn parse_expr(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_expr);
let expr = self.parse_assignment_expr()?;
@ -33,7 +33,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
///`parseMaybeAssign` (overrided)
pub(super) fn parse_assignment_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_assignment_expr(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_assignment_expr);
if self.input.syntax().typescript() {
@ -97,7 +97,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// operators like `+=`.
///
/// `parseMaybeAssign`
fn parse_assignment_expr_base(&mut self) -> PResult<'a, Box<Expr>> {
fn parse_assignment_expr_base(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_assignment_expr_base);
if self.ctx().in_generator && is!("yield") {
@ -126,11 +126,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.finish_assignment_expr(start, cond)
}
fn finish_assignment_expr(
&mut self,
start: BytePos,
cond: Box<Expr>,
) -> PResult<'a, Box<Expr>> {
fn finish_assignment_expr(&mut self, start: BytePos, cond: Box<Expr>) -> PResult<Box<Expr>> {
trace_cur!(finish_assignment_expr);
match cur!(false) {
@ -177,7 +173,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Spec: 'ConditionalExpression'
fn parse_cond_expr(&mut self) -> PResult<'a, Box<Expr>> {
fn parse_cond_expr(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_cond_expr);
let start = cur_pos!();
@ -212,7 +208,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Parse a primary expression or arrow function
#[allow(clippy::cognitive_complexity)]
pub(super) fn parse_primary_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_primary_expr(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_primary_expr);
let _ = cur!(false);
@ -384,7 +380,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
unexpected!()
}
fn parse_array_lit(&mut self) -> PResult<'a, Box<Expr>> {
fn parse_array_lit(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_array_lit);
let start = cur_pos!();
@ -414,12 +410,12 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(Box::new(Expr::Array(ArrayLit { span, elems })))
}
fn parse_member_expr(&mut self) -> PResult<'a, Box<Expr>> {
fn parse_member_expr(&mut self) -> PResult<Box<Expr>> {
self.parse_member_expr_or_new_expr(false)
}
/// `parseImportMetaProperty`
pub(super) fn parse_import_meta_prop(&mut self, import: Ident) -> PResult<'a, MetaPropExpr> {
pub(super) fn parse_import_meta_prop(&mut self, import: Ident) -> PResult<MetaPropExpr> {
let start = cur_pos!();
let meta = import;
@ -436,7 +432,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `is_new_expr`: true iff we are parsing production 'NewExpression'.
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<'a, Box<Expr>> {
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<Box<Expr>> {
trace_cur!(parse_member_expr_or_new_expr);
let start = cur_pos!();
@ -510,14 +506,14 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Parse `NewExpresion`.
/// This includes `MemberExpression`.
pub(super) fn parse_new_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_new_expr(&mut self) -> PResult<Box<Expr>> {
trace_cur!(parse_new_expr);
self.parse_member_expr_or_new_expr(true)
}
/// Parse `Arguments[Yield, Await]`
pub(super) fn parse_args(&mut self, is_dynamic_import: bool) -> PResult<'a, Vec<ExprOrSpread>> {
pub(super) fn parse_args(&mut self, is_dynamic_import: bool) -> PResult<Vec<ExprOrSpread>> {
trace_cur!(parse_args);
let start = cur_pos!();
@ -550,7 +546,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// AssignmentExpression[+In, ?Yield, ?Await]
/// ...AssignmentExpression[+In, ?Yield, ?Await]
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<'a, ExprOrSpread> {
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<ExprOrSpread> {
trace_cur!(parse_expr_or_spread);
let start = cur_pos!();
@ -571,7 +567,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
can_be_arrow: bool,
async_span: Option<Span>,
) -> PResult<'a, Box<Expr>> {
) -> PResult<Box<Expr>> {
trace_cur!(parse_paren_expr_or_arrow_fn);
let expr_start = async_span.map(|x| x.lo()).unwrap_or(cur_pos!());
@ -676,7 +672,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
let expr_or_spreads = paren_items
.into_iter()
.map(|item| -> PResult<'a, _> {
.map(|item| -> PResult<_> {
match item {
PatOrExprOrSpread::ExprOrSpread(e) => Ok(e),
_ => syntax_error!(item.span(), SyntaxError::InvalidExpr),
@ -755,7 +751,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
fn parse_tpl_elements(
&mut self,
is_tagged: bool,
) -> PResult<'a, (Vec<Box<Expr>>, Vec<TplElement>)> {
) -> PResult<(Vec<Box<Expr>>, Vec<TplElement>)> {
trace_cur!(parse_tpl_elements);
let mut exprs = vec![];
@ -780,7 +776,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
tag: Box<Expr>,
type_params: Option<TsTypeParamInstantiation>,
) -> PResult<'a, TaggedTpl> {
) -> PResult<TaggedTpl> {
let tagged_tpl_start = tag.span().lo();
trace_cur!(parse_tagged_tpl);
@ -800,7 +796,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
pub(super) fn parse_tpl(&mut self) -> PResult<'a, Tpl> {
pub(super) fn parse_tpl(&mut self) -> PResult<Tpl> {
trace_cur!(parse_tpl);
let start = cur_pos!();
@ -818,7 +814,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<'a, TplElement> {
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<TplElement> {
let start = cur_pos!();
let (raw, cooked) = match *cur!(true)? {
@ -853,7 +849,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<'a, Box<Expr>> {
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<Box<Expr>> {
loop {
obj = match self.parse_subscript(obj, no_call)? {
(expr, false) => return Ok(expr),
@ -864,11 +860,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// returned bool is true if this method should be called again.
#[allow(clippy::cognitive_complexity)]
fn parse_subscript(
&mut self,
obj: ExprOrSuper,
no_call: bool,
) -> PResult<'a, (Box<Expr>, bool)> {
fn parse_subscript(&mut self, obj: ExprOrSuper, no_call: bool) -> PResult<(Box<Expr>, bool)> {
let _ = cur!(false);
let start = obj.span().lo();
@ -1048,7 +1040,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
/// Parse call, dot, and `[]`-subscript expressions.
pub(super) fn parse_lhs_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_lhs_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
// parse jsx
@ -1149,12 +1141,12 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(callee)
}
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<Box<Expr>> {
self.parse_expr()
}
#[allow(clippy::cognitive_complexity)]
pub(super) fn parse_args_or_pats(&mut self) -> PResult<'a, Vec<PatOrExprOrSpread>> {
pub(super) fn parse_args_or_pats(&mut self) -> PResult<Vec<PatOrExprOrSpread>> {
trace_cur!(parse_args_or_pats);
expect!('(');
@ -1426,8 +1418,8 @@ pub(in crate::parser) enum PatOrExprOrSpread {
/// simple leaf methods.
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
fn parse_yield_expr(&mut self) -> PResult<'a, Box<Expr>> {
impl<'a, I: Tokens> Parser<I> {
fn parse_yield_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
assert_and_bump!("yield");
@ -1462,7 +1454,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
fn at_possible_async(&mut self, expr: &Expr) -> PResult<'a, bool> {
fn at_possible_async(&mut self, expr: &Expr) -> PResult<bool> {
// TODO(kdy1): !this.state.containsEsc &&
Ok(self.state.potential_arrow_start == Some(expr.span().lo())
@ -1476,7 +1468,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// 12.2.5 Array Initializer
pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
pub(super) fn parse_lit(&mut self) -> PResult<Lit> {
let start = cur_pos!();
let v = match *cur!(true)? {
@ -1523,7 +1515,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
import_ident: Ident,
) -> PResult<'a, Box<Expr>> {
) -> PResult<Box<Expr>> {
if !self.input.syntax().dynamic_import() {
syntax_error!(span!(start), SyntaxError::DynamicImport);
}

View File

@ -5,14 +5,14 @@ use log::trace;
use swc_common::Spanned;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
/// Name from spec: 'LogicalORExpression'
pub(super) fn parse_bin_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(super) fn parse_bin_expr(&mut self) -> PResult<Box<Expr>> {
let ctx = self.ctx();
let left = match self.parse_unary_expr() {
Ok(v) => v,
Err(mut err) => {
Err(err) => {
match {
let is_err_token = match self.input.cur() {
Some(&Token::Error(..)) => true,
@ -28,22 +28,14 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
} {
&Word(Word::Keyword(Keyword::In)) if ctx.include_in_expr => {
err.cancel();
self.emit_err(self.input.cur_span(), SyntaxError::TS1109);
Box::new(Expr::Invalid(Invalid {
span: err.span.primary_span().unwrap(),
}))
Box::new(Expr::Invalid(Invalid { span: err.span }))
}
&Word(Word::Keyword(Keyword::InstanceOf)) | &Token::BinOp(..) => {
err.cancel();
self.emit_err(self.input.cur_span(), SyntaxError::TS1109);
Box::new(Expr::Invalid(Invalid {
span: err.span.primary_span().unwrap(),
}))
Box::new(Expr::Invalid(Invalid { span: err.span }))
}
_ => return Err(err),
}
@ -65,7 +57,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
left: Box<Expr>,
min_prec: u8,
) -> PResult<'a, Box<Expr>> {
) -> PResult<Box<Expr>> {
const PREC_OF_IN: u8 = 7;
if self.input.syntax().typescript()
@ -213,7 +205,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Parse unary expression and update expression.
///
/// spec: 'UnaryExpression'
pub(in crate::parser) fn parse_unary_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(in crate::parser) fn parse_unary_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
if !self.input.syntax().jsx() && self.input.syntax().typescript() && eat!('<') {
@ -267,8 +259,8 @@ impl<'a, I: Tokens> Parser<'a, I> {
let arg_start = cur_pos!() - BytePos(1);
let arg = match self.parse_unary_expr() {
Ok(expr) => expr,
Err(mut err) => {
err.emit();
Err(err) => {
self.emit_error(err);
Box::new(Expr::Invalid(Invalid {
span: Span::new(arg_start, arg_start, Default::default()),
}))
@ -338,7 +330,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(expr)
}
pub(crate) fn parse_await_expr(&mut self) -> PResult<'a, Box<Expr>> {
pub(crate) fn parse_await_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
assert_and_bump!("await");
@ -369,11 +361,7 @@ mod tests {
use swc_ecma_visit::assert_eq_ignore_span;
fn bin(s: &'static str) -> Box<Expr> {
test_parser(s, Syntax::default(), |p| {
p.parse_bin_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_bin_expr())
}
#[test]

View File

@ -15,39 +15,23 @@ fn syntax() -> Syntax {
}
fn lhs(s: &'static str) -> Box<Expr> {
test_parser(s, syntax(), |p| {
p.parse_lhs_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, syntax(), |p| p.parse_lhs_expr())
}
fn new_expr(s: &'static str) -> Box<Expr> {
test_parser(s, syntax(), |p| {
p.parse_new_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, syntax(), |p| p.parse_new_expr())
}
fn member_expr(s: &'static str) -> Box<Expr> {
test_parser(s, syntax(), |p| {
p.parse_member_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, syntax(), |p| p.parse_member_expr())
}
fn expr(s: &'static str) -> Box<Expr> {
test_parser(s, syntax(), |p| {
p.parse_stmt(true)
.map(|stmt| match stmt {
Stmt::Expr(expr) => expr.expr,
_ => unreachable!(),
})
.map_err(|mut e| {
e.emit();
})
p.parse_stmt(true).map(|stmt| match stmt {
Stmt::Expr(expr) => expr.expr,
_ => unreachable!(),
})
})
}
@ -353,11 +337,7 @@ fn issue_328() {
dynamic_import: true,
..Default::default()
}),
|p| {
p.parse_stmt(true).map_err(|mut e| {
e.emit();
})
}
|p| { p.parse_stmt(true) }
),
Stmt::Expr(ExprStmt {
span,
@ -383,11 +363,7 @@ fn issue_337() {
test_parser(
"const foo = 'bar' in bas ? 'beep' : 'boop';",
Default::default(),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}

View File

@ -2,8 +2,8 @@ use super::*;
use swc_common::{Span, Spanned, DUMMY_SP};
use swc_ecma_visit::{Node, Visit, VisitWith};
impl<'a, I: Tokens> Parser<'a, I> {
pub(in crate::parser) fn verify_expr(&self, expr: Box<Expr>) -> PResult<'a, Box<Expr>> {
impl<'a, I: Tokens> Parser<I> {
pub(in crate::parser) fn verify_expr(&mut self, expr: Box<Expr>) -> PResult<Box<Expr>> {
let mut v = Verifier { errors: vec![] };
v.visit_expr(&expr, &Invalid { span: DUMMY_SP } as _);

View File

@ -6,8 +6,8 @@ use swc_atoms::js_word;
use swc_ecma_parser_macros::parser;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_maybe_private_name(&mut self) -> PResult<'a, Either<PrivateName, Ident>> {
impl<'a, I: Tokens> Parser<I> {
pub(super) fn parse_maybe_private_name(&mut self) -> PResult<Either<PrivateName, Ident>> {
let start = cur_pos!();
let is_private = is!('#');
@ -18,7 +18,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
pub(super) fn parse_private_name(&mut self) -> PResult<'a, PrivateName> {
pub(super) fn parse_private_name(&mut self) -> PResult<PrivateName> {
let start = cur_pos!();
assert_and_bump!('#');
@ -35,14 +35,14 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// IdentifierReference
pub(super) fn parse_ident_ref(&mut self) -> PResult<'a, Ident> {
pub(super) fn parse_ident_ref(&mut self) -> PResult<Ident> {
let ctx = self.ctx();
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
/// LabelIdentifier
pub(super) fn parse_label_ident(&mut self) -> PResult<'a, Ident> {
pub(super) fn parse_label_ident(&mut self) -> PResult<Ident> {
let ctx = self.ctx();
self.parse_ident(!ctx.in_generator, !ctx.in_async)
@ -50,7 +50,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Use this when spec says "IdentifierName".
/// This allows idents like `catch`.
pub(super) fn parse_ident_name(&mut self) -> PResult<'a, Ident> {
pub(super) fn parse_ident_name(&mut self) -> PResult<Ident> {
let start = cur_pos!();
let w = match cur!(true) {
@ -67,7 +67,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Identifier
///
/// In strict mode, "yield" is SyntaxError if matched.
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<'a, Ident> {
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<Ident> {
trace_cur!(parse_ident);
let start = cur_pos!();
@ -144,16 +144,16 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
pub(super) trait MaybeOptionalIdentParser<'a, Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Ident>;
pub(super) trait MaybeOptionalIdentParser<Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident>;
}
impl<'a, I: Tokens> MaybeOptionalIdentParser<'a, Ident> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Ident> {
impl<I: Tokens> MaybeOptionalIdentParser<Ident> for Parser<I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident> {
self.parse_binding_ident()
}
}
impl<'a, I: Tokens> MaybeOptionalIdentParser<'a, Option<Ident>> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Option<Ident>> {
impl<I: Tokens> MaybeOptionalIdentParser<Option<Ident>> for Parser<I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
self.parse_opt_binding_ident()
}
}

View File

@ -1,11 +1,12 @@
use super::Parser;
use crate::{
error::Error,
lexer::{self},
token::*,
Context, JscTarget, Syntax,
};
use lexer::TokenContexts;
use std::{cell::RefCell, mem, rc::Rc};
use std::{cell::RefCell, mem, mem::take, rc::Rc};
use swc_common::{BytePos, Span, SpanData, DUMMY_SP};
pub trait Tokens: Clone + Iterator<Item = TokenAndSpan> {
@ -18,6 +19,13 @@ pub trait Tokens: Clone + Iterator<Item = TokenAndSpan> {
fn token_context(&self) -> &lexer::TokenContexts;
fn token_context_mut(&mut self) -> &mut lexer::TokenContexts;
fn set_token_context(&mut self, _c: lexer::TokenContexts);
/// Implementors should use Rc<RefCell<Vec<Error>>>.
///
/// It is required because parser should backtrack while parsing typescript
/// code.
fn add_error(&self, error: Error);
fn take_errors(&mut self) -> Vec<Error>;
}
#[derive(Clone)]
@ -27,6 +35,7 @@ pub struct TokensInput {
syntax: Syntax,
target: JscTarget,
token_ctx: TokenContexts,
errors: Rc<RefCell<Vec<Error>>>,
}
impl TokensInput {
@ -37,6 +46,7 @@ impl TokensInput {
syntax,
target,
token_ctx: Default::default(),
errors: Default::default(),
}
}
}
@ -78,6 +88,14 @@ impl Tokens for TokensInput {
fn set_token_context(&mut self, c: TokenContexts) {
self.token_ctx = c;
}
fn add_error(&self, error: Error) {
self.errors.borrow_mut().push(error);
}
fn take_errors(&mut self) -> Vec<Error> {
take(&mut self.errors.borrow_mut())
}
}
/// Note: Lexer need access to parser's context to lex correctly.
@ -168,6 +186,14 @@ impl<I: Tokens> Tokens for Capturing<I> {
fn set_token_context(&mut self, c: TokenContexts) {
self.inner.set_token_context(c)
}
fn add_error(&self, error: Error) {
self.inner.add_error(error);
}
fn take_errors(&mut self) -> Vec<Error> {
self.inner.take_errors()
}
}
/// This struct is responsible for managing current token and peeked token.
@ -181,10 +207,13 @@ pub(super) struct Buffer<I: Tokens> {
next: Option<TokenAndSpan>,
}
impl<I: Tokens> Parser<'_, I> {
impl<I: Tokens> Parser<I> {
pub fn input(&mut self) -> &mut I {
&mut self.input.iter
}
pub(crate) fn input_ref(&self) -> &I {
&self.input.iter
}
}
impl<I: Tokens> Buffer<I> {

View File

@ -7,9 +7,9 @@ use swc_common::{Span, Spanned, SyntaxContext};
mod tests;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
/// Parse next token as JSX identifier
pub(super) fn parse_jsx_ident(&mut self) -> PResult<'a, Ident> {
pub(super) fn parse_jsx_ident(&mut self) -> PResult<Ident> {
debug_assert!(self.input.syntax().jsx());
let ctx = self.ctx();
@ -27,7 +27,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Parse namespaced identifier.
pub(super) fn parse_jsx_namespaced_name(&mut self) -> PResult<'a, JSXAttrName> {
pub(super) fn parse_jsx_namespaced_name(&mut self) -> PResult<JSXAttrName> {
debug_assert!(self.input.syntax().jsx());
let ns = self.parse_jsx_ident()?;
@ -44,7 +44,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Parses element name in any form - namespaced, member or single
/// identifier.
pub(super) fn parse_jsx_element_name(&mut self) -> PResult<'a, JSXElementName> {
pub(super) fn parse_jsx_element_name(&mut self) -> PResult<JSXElementName> {
debug_assert!(self.input.syntax().jsx());
let start_pos = cur_pos!();
@ -70,7 +70,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Parses any type of JSX attribute value.
///
/// TODO(kdy1): Change return type to JSXAttrValue
pub(super) fn parse_jsx_attr_value(&mut self) -> PResult<'a, JSXAttrValue> {
pub(super) fn parse_jsx_attr_value(&mut self) -> PResult<JSXAttrValue> {
debug_assert!(self.input.syntax().jsx());
let start = cur_pos!();
@ -105,7 +105,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// JSXEmptyExpression is unique type since it doesn't actually parse
/// anything, and so it should start at the end of last read token (left
/// brace) and finish at the beginning of the next one (right brace).
pub(super) fn parse_jsx_empty_expr(&mut self) -> PResult<'a, JSXEmptyExpr> {
pub(super) fn parse_jsx_empty_expr(&mut self) -> PResult<JSXEmptyExpr> {
debug_assert!(self.input.syntax().jsx());
let start = cur_pos!();
@ -115,7 +115,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Parse JSX spread child
pub(super) fn parse_jsx_spread_child(&mut self) -> PResult<'a, JSXSpreadChild> {
pub(super) fn parse_jsx_spread_child(&mut self) -> PResult<JSXSpreadChild> {
debug_assert!(self.input.syntax().jsx());
let start = cur_pos!();
expect!('{');
@ -130,10 +130,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Parses JSX expression enclosed into curly brackets.
pub(super) fn parse_jsx_expr_container(
&mut self,
start: BytePos,
) -> PResult<'a, JSXExprContainer> {
pub(super) fn parse_jsx_expr_container(&mut self, start: BytePos) -> PResult<JSXExprContainer> {
debug_assert!(self.input.syntax().jsx());
let start = cur_pos!();
@ -151,7 +148,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Parses following JSX attribute name-value pair.
pub(super) fn parse_jsx_attr(&mut self) -> PResult<'a, JSXAttrOrSpread> {
pub(super) fn parse_jsx_attr(&mut self) -> PResult<JSXAttrOrSpread> {
debug_assert!(self.input.syntax().jsx());
let start = cur_pos!();
@ -183,7 +180,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_jsx_opening_element_at(
&mut self,
start: BytePos,
) -> PResult<'a, Either<JSXOpeningFragment, JSXOpeningElement>> {
) -> PResult<Either<JSXOpeningFragment, JSXOpeningElement>> {
debug_assert!(self.input.syntax().jsx());
if eat!(JSXTagEnd) {
@ -200,7 +197,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
name: JSXElementName,
) -> PResult<'a, JSXOpeningElement> {
) -> PResult<JSXOpeningElement> {
debug_assert!(self.input.syntax().jsx());
let type_args = if self.input.syntax().typescript() && is!('<') {
@ -235,7 +232,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
fn parse_jsx_closing_element_at(
&mut self,
start: BytePos,
) -> PResult<'a, Either<JSXClosingFragment, JSXClosingElement>> {
) -> PResult<Either<JSXClosingFragment, JSXClosingElement>> {
debug_assert!(self.input.syntax().jsx());
if eat!(JSXTagEnd) {
@ -257,7 +254,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_jsx_element_at(
&mut self,
start_pos: BytePos,
) -> PResult<'a, Either<JSXFragment, JSXElement>> {
) -> PResult<Either<JSXFragment, JSXElement>> {
debug_assert!(self.input.syntax().jsx());
let _ = cur!(true);
@ -374,7 +371,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Parses entire JSX element from current position.
///
/// babel: `jsxParseElement`
pub(super) fn parse_jsx_element(&mut self) -> PResult<'a, Either<JSXFragment, JSXElement>> {
pub(super) fn parse_jsx_element(&mut self) -> PResult<Either<JSXFragment, JSXElement>> {
debug_assert!(self.input.syntax().jsx());
debug_assert!({
match *cur!(true)? {
@ -388,7 +385,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.parse_jsx_element_at(start_pos)
}
pub(super) fn parse_jsx_text(&mut self) -> PResult<'a, JSXText> {
pub(super) fn parse_jsx_text(&mut self) -> PResult<JSXText> {
debug_assert!(self.input.syntax().jsx());
debug_assert!({
match cur!(false) {

View File

@ -10,11 +10,7 @@ fn jsx(src: &'static str) -> Box<Expr> {
jsx: true,
..Default::default()
}),
|p| {
p.parse_expr().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_expr(),
)
}

View File

@ -192,12 +192,10 @@ macro_rules! cur {
if is_err_token {
match $p.input.bump() {
$crate::token::Token::Error(e) => {
let err =
::swc_common::errors::DiagnosticBuilder::from($crate::error::ErrorToDiag {
handler: &$p.session.handler,
span: e.span,
error: e.error,
});
let err = crate::error::Error {
span: e.span,
error: e.error,
};
return Err(err.into());
}
_ => unreachable!(),
@ -208,15 +206,15 @@ macro_rules! cur {
Some(c) => Ok(c),
None => {
if $required {
let err = ::swc_common::errors::DiagnosticBuilder::from($crate::error::Eof {
last,
handler: &$p.session.handler,
});
let err = crate::error::Error {
span: last,
error: Box::new(crate::error::SyntaxError::Eof),
};
return Err(err.into());
}
Err($crate::error::Eof {
last,
handler: &$p.session.handler,
Err(crate::error::Error {
span: last,
error: Box::new(crate::error::SyntaxError::Eof),
})
}
}
@ -237,11 +235,10 @@ Current token is {:?}",
match $p.input.peek() {
Some(c) => Ok(c),
None => {
let err = ::swc_common::errors::DiagnosticBuilder::from($crate::error::Eof {
//TODO: Use whole span
last,
handler: &$p.session.handler,
});
let err = crate::error::Error {
span: last,
error: Box::new(crate::error::SyntaxError::Eof),
};
Err(err)
}
}
@ -312,11 +309,10 @@ macro_rules! span {
macro_rules! make_error {
($p:expr, $span:expr, $err:expr) => {{
::swc_common::errors::DiagnosticBuilder::from($crate::error::ErrorToDiag {
handler: $p.session.handler,
crate::error::Error {
span: $span,
error: $err,
})
error: Box::new($err),
}
}};
}

View File

@ -3,18 +3,19 @@
pub use self::input::{Capturing, Tokens, TokensInput};
use self::{input::Buffer, util::ParseObject};
use crate::{
error::{ErrorToDiag, SyntaxError},
error::SyntaxError,
lexer::Lexer,
token::{Token, Word},
Context, JscTarget, Session, Syntax,
Context, JscTarget, Syntax,
};
use std::ops::{Deref, DerefMut};
use swc_atoms::JsWord;
use swc_common::{comments::Comments, errors::DiagnosticBuilder, input::Input, BytePos, Span};
use swc_common::{comments::Comments, input::Input, BytePos, Span};
use swc_ecma_ast::*;
use swc_ecma_parser_macros::parser;
#[cfg(test)]
extern crate test;
use crate::error::Error;
#[cfg(test)]
use test::Bencher;
@ -32,14 +33,13 @@ mod typescript;
mod util;
/// When error occurs, error is emitted and parser returns Err(()).
pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
pub type PResult<T> = Result<T, Error>;
/// EcmaScript parser.
#[derive(Clone)]
pub struct Parser<'a, I: Tokens> {
pub struct Parser<I: Tokens> {
/// [false] while backtracking
emit_err: bool,
session: Session<'a>,
state: State,
input: Buffer<I>,
}
@ -51,37 +51,31 @@ struct State {
potential_arrow_start: Option<BytePos>,
}
impl<'a, I: Input> Parser<'a, Lexer<'a, I>> {
// #[deprecated(since = "0.12.3", note = "Please use new_from instead")]
pub fn new(
session: Session<'a>,
syntax: Syntax,
input: I,
comments: Option<&'a Comments>,
) -> Self {
Self::new_from(
session,
Lexer::new(session, syntax, Default::default(), input, comments),
)
impl<'a, I: Input> Parser<Lexer<'a, I>> {
pub fn new(syntax: Syntax, input: I, comments: Option<&'a Comments>) -> Self {
Self::new_from(Lexer::new(syntax, Default::default(), input, comments))
}
}
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
pub fn new_from(session: Session<'a>, input: I) -> Self {
impl<I: Tokens> Parser<I> {
pub fn new_from(input: I) -> Self {
Parser {
emit_err: true,
session,
input: Buffer::new(input),
state: Default::default(),
}
}
pub fn take_errors(&mut self) -> Vec<Error> {
self.input().take_errors()
}
pub(crate) fn target(&self) -> JscTarget {
self.input.target()
}
pub fn parse_script(&mut self) -> PResult<'a, Script> {
pub fn parse_script(&mut self) -> PResult<Script> {
trace_cur!(parse_script);
let ctx = Context {
@ -101,12 +95,12 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
pub fn parse_typescript_module(&mut self) -> PResult<'a, Module> {
pub fn parse_typescript_module(&mut self) -> PResult<Module> {
trace_cur!(parse_typescript_module);
debug_assert!(self.syntax().typescript());
//TODO: parse() -> PResult<'a, Program>
//TODO: parse() -> PResult<Program>
let ctx = Context {
module: true,
strict: false,
@ -125,8 +119,8 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
pub fn parse_module(&mut self) -> PResult<'a, Module> {
//TODO: parse() -> PResult<'a, Program>
pub fn parse_module(&mut self) -> PResult<Module> {
//TODO: parse() -> PResult<Program>
let ctx = Context {
module: true,
strict: true,
@ -145,7 +139,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_shebang(&mut self) -> PResult<'a, Option<JsWord>> {
fn parse_shebang(&mut self) -> PResult<Option<JsWord>> {
match cur!(false) {
Ok(&Token::Shebang(..)) => match bump!() {
Token::Shebang(v) => Ok(Some(v)),
@ -164,23 +158,36 @@ impl<'a, I: Tokens> Parser<'a, I> {
return;
}
DiagnosticBuilder::from(ErrorToDiag {
handler: self.session.handler,
self.emit_error(Error {
span,
error,
error: Box::new(error),
})
.emit();
}
fn emit_error(&self, error: Error) {
if !self.emit_err || !self.syntax().early_errors() {
return;
}
self.input_ref().add_error(error);
}
}
#[cfg(test)]
pub fn test_parser<F, Ret>(s: &'static str, syntax: Syntax, f: F) -> Ret
where
F: for<'a> FnOnce(&'a mut Parser<'a, Lexer<'a, crate::SourceFileInput<'_>>>) -> Result<Ret, ()>,
F: FnOnce(&mut Parser<Lexer<crate::SourceFileInput<'_>>>) -> Result<Ret, Error>,
{
crate::with_test_sess(s, |sess, input| {
let lexer = Lexer::new(sess, syntax, JscTarget::Es2019, input, None);
f(&mut Parser::new_from(sess, lexer))
crate::with_test_sess(s, |handler, input| {
let lexer = Lexer::new(syntax, JscTarget::Es2019, input, None);
let mut p = Parser::new_from(lexer);
let ret = f(&mut p);
for err in p.take_errors() {
err.into_diagnostic(handler).emit();
}
ret.map_err(|err| err.into_diagnostic(handler).emit())
})
.unwrap_or_else(|output| panic!("test_parser(): failed to parse \n{}\n{}", s, output))
}
@ -188,11 +195,18 @@ where
#[cfg(test)]
pub fn test_parser_comment<F, Ret>(c: &Comments, s: &'static str, syntax: Syntax, f: F) -> Ret
where
F: for<'a> FnOnce(&'a mut Parser<'a, Lexer<'a, crate::SourceFileInput<'_>>>) -> Result<Ret, ()>,
F: FnOnce(&mut Parser<Lexer<crate::SourceFileInput<'_>>>) -> Result<Ret, Error>,
{
crate::with_test_sess(s, |sess, input| {
let lexer = Lexer::new(sess, syntax, JscTarget::Es2019, input, Some(&c));
f(&mut Parser::new_from(sess, lexer))
crate::with_test_sess(s, |handler, input| {
let lexer = Lexer::new(syntax, JscTarget::Es2019, input, Some(&c));
let mut p = Parser::new_from(lexer);
let ret = f(&mut p);
for err in p.take_errors() {
err.into_diagnostic(handler).emit();
}
ret.map_err(|err| err.into_diagnostic(handler).emit())
})
.unwrap_or_else(|output| panic!("test_parser(): failed to parse \n{}\n{}", s, output))
}
@ -200,16 +214,15 @@ where
#[cfg(test)]
pub fn bench_parser<F>(b: &mut Bencher, s: &'static str, syntax: Syntax, mut f: F)
where
F: for<'a> FnMut(&'a mut Parser<'a, Lexer<'a, crate::SourceFileInput<'_>>>) -> PResult<'a, ()>,
F: for<'a> FnMut(&'a mut Parser<Lexer<'a, crate::SourceFileInput<'_>>>) -> PResult<()>,
{
b.bytes = s.len() as u64;
let _ = crate::with_test_sess(s, |sess, input| {
let _ = crate::with_test_sess(s, |handler, input| {
b.iter(|| {
let lexer = Lexer::new(sess, syntax, Default::default(), input.clone(), None);
let _ = f(&mut Parser::new_from(sess, lexer)).map_err(|mut err| {
err.emit();
});
let lexer = Lexer::new(syntax, Default::default(), input.clone(), None);
let _ =
f(&mut Parser::new_from(lexer)).map_err(|err| err.into_diagnostic(handler).emit());
});
Ok(())

View File

@ -6,11 +6,11 @@ use swc_atoms::js_word;
use swc_common::Spanned;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
/// Parse a object literal or object pattern.
pub(super) fn parse_object<T>(&mut self) -> PResult<'a, T>
pub(super) fn parse_object<T>(&mut self) -> PResult<T>
where
Self: ParseObject<'a, T>,
Self: ParseObject<T>,
{
let start = cur_pos!();
assert_and_bump!('{');
@ -37,7 +37,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// spec: 'PropertyName'
pub(super) fn parse_prop_name(&mut self) -> PResult<'a, PropName> {
pub(super) fn parse_prop_name(&mut self) -> PResult<PropName> {
let ctx = self.ctx();
self.with_ctx(Context {
in_property_name: true,
@ -106,15 +106,15 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
#[parser]
impl<'a, I: Tokens> ParseObject<'a, Box<Expr>> for Parser<'a, I> {
impl<I: Tokens> ParseObject<Box<Expr>> for Parser<I> {
type Prop = PropOrSpread;
fn make_object(&mut self, span: Span, props: Vec<Self::Prop>) -> PResult<'a, Box<Expr>> {
fn make_object(&mut self, span: Span, props: Vec<Self::Prop>) -> PResult<Box<Expr>> {
Ok(Box::new(Expr::Object(ObjectLit { span, props })))
}
/// spec: 'PropertyDefinition'
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop> {
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
let start = cur_pos!();
// Parse as 'MethodDefinition'
@ -353,10 +353,10 @@ impl<'a, I: Tokens> ParseObject<'a, Box<Expr>> for Parser<'a, I> {
}
#[parser]
impl<'a, I: Tokens> ParseObject<'a, Pat> for Parser<'a, I> {
impl<I: Tokens> ParseObject<Pat> for Parser<I> {
type Prop = ObjectPatProp;
fn make_object(&mut self, span: Span, props: Vec<Self::Prop>) -> PResult<'a, Pat> {
fn make_object(&mut self, span: Span, props: Vec<Self::Prop>) -> PResult<Pat> {
let len = props.len();
for (i, p) in props.iter().enumerate() {
if i == len - 1 {
@ -387,7 +387,7 @@ impl<'a, I: Tokens> ParseObject<'a, Pat> for Parser<'a, I> {
}
/// Production 'BindingProperty'
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop> {
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
let start = cur_pos!();
if eat!("...") {

View File

@ -6,8 +6,8 @@ use swc_atoms::js_word;
use swc_common::Spanned;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<'a, Option<Ident>> {
impl<'a, I: Tokens> Parser<I> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
trace_cur!(parse_opt_binding_ident);
if is!(BindingIdent) || (self.input.syntax().typescript() && is!("this")) {
@ -20,7 +20,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// babel: `parseBindingIdentifier`
///
/// spec: `BindingIdentifier`
pub(super) fn parse_binding_ident(&mut self) -> PResult<'a, Ident> {
pub(super) fn parse_binding_ident(&mut self) -> PResult<Ident> {
trace_cur!(parse_binding_ident);
// "yield" and "await" is **lexically** accepted.
@ -38,7 +38,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(ident)
}
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<'a, Pat> {
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<Pat> {
trace_cur!(parse_binding_pat_or_ident);
match *cur!(true)? {
@ -56,7 +56,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// babel: `parseBindingAtom`
pub(super) fn parse_binding_element(&mut self) -> PResult<'a, Pat> {
pub(super) fn parse_binding_element(&mut self) -> PResult<Pat> {
trace_cur!(parse_binding_element);
let start = cur_pos!();
@ -80,7 +80,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(left)
}
fn parse_array_binding_pat(&mut self) -> PResult<'a, Pat> {
fn parse_array_binding_pat(&mut self) -> PResult<Pat> {
let start = cur_pos!();
assert_and_bump!('[');
@ -130,7 +130,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}))
}
pub(super) fn eat_any_ts_modifier(&mut self) -> PResult<'a, bool> {
pub(super) fn eat_any_ts_modifier(&mut self) -> PResult<bool> {
let has_modifier = self.syntax().typescript()
&& match *cur!(false)? {
Word(Word::Ident(js_word!("public")))
@ -150,7 +150,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// spec: 'FormalParameter'
///
/// babel: `parseAssignableListItem`
pub(super) fn parse_formal_param_pat(&mut self) -> PResult<'a, Pat> {
pub(super) fn parse_formal_param_pat(&mut self) -> PResult<Pat> {
let start = cur_pos!();
let has_modifier = self.eat_any_ts_modifier()?;
@ -250,7 +250,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(pat)
}
pub(super) fn parse_constructor_params(&mut self) -> PResult<'a, Vec<ParamOrTsParamProp>> {
pub(super) fn parse_constructor_params(&mut self) -> PResult<Vec<ParamOrTsParamProp>> {
let mut first = true;
let mut params = vec![];
@ -304,7 +304,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
param_start: BytePos,
decorators: Vec<Decorator>,
) -> PResult<'a, ParamOrTsParamProp> {
) -> PResult<ParamOrTsParamProp> {
let (accessibility, readonly) = if self.input.syntax().typescript() {
let accessibility = self.parse_access_modifier()?;
(
@ -337,7 +337,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
pub(super) fn parse_formal_params(&mut self) -> PResult<'a, Vec<Param>> {
pub(super) fn parse_formal_params(&mut self) -> PResult<Vec<Param>> {
let mut first = true;
let mut params = vec![];
let mut dot3_token = Span::default();
@ -421,7 +421,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(params)
}
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<'a, Vec<Param>> {
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<Vec<Param>> {
// FIXME: This is wrong
self.parse_formal_params()
}
@ -445,14 +445,10 @@ impl PatType {
}
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
/// This does not return 'rest' pattern because non-last parameter cannot be
/// rest.
pub(super) fn reparse_expr_as_pat(
&mut self,
pat_ty: PatType,
expr: Box<Expr>,
) -> PResult<'a, Pat> {
pub(super) fn reparse_expr_as_pat(&mut self, pat_ty: PatType, expr: Box<Expr>) -> PResult<Pat> {
if let Expr::Invalid(i) = *expr {
return Ok(Pat::Invalid(i));
}
@ -479,7 +475,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
pat_ty: PatType,
expr: Box<Expr>,
) -> PResult<'a, Pat> {
) -> PResult<Pat> {
// In dts, we do not reparse.
debug_assert!(!self.input.syntax().dts());
@ -621,7 +617,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
})
.collect::<PResult<'a, _>>()?,
.collect::<PResult<_>>()?,
optional: false,
type_ann: None,
}))
@ -735,7 +731,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_paren_items_as_params(
&mut self,
mut exprs: Vec<PatOrExprOrSpread>,
) -> PResult<'a, Vec<Pat>> {
) -> PResult<Vec<Pat>> {
let pat_ty = PatType::BindingPat;
let len = exprs.len();
@ -798,11 +794,7 @@ mod tests {
use swc_ecma_visit::assert_eq_ignore_span;
fn array_pat(s: &'static str) -> Pat {
test_parser(s, Syntax::default(), |p| {
p.parse_array_binding_pat().map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_array_binding_pat())
}
fn ident(s: &str) -> Ident {

View File

@ -6,13 +6,13 @@ use swc_common::Spanned;
mod module_item;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
pub(super) fn parse_block_body<Type>(
&mut self,
mut allow_directives: bool,
top_level: bool,
end: Option<&Token>,
) -> PResult<'a, Vec<Type>>
) -> PResult<Vec<Type>>
where
Self: StmtLikeParser<'a, Type>,
Type: IsDirective + From<Stmt>,
@ -58,18 +58,18 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(stmts)
}
pub fn parse_stmt(&mut self, top_level: bool) -> PResult<'a, Stmt> {
pub fn parse_stmt(&mut self, top_level: bool) -> PResult<Stmt> {
trace_cur!(parse_stmt);
self.parse_stmt_like(false, top_level)
}
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<'a, Stmt> {
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<Stmt> {
trace_cur!(parse_stmt_list_item);
self.parse_stmt_like(true, top_level)
}
/// Parse a statement, declaration or module item.
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<'a, Type>
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<Type>
where
Self: StmtLikeParser<'a, Type>,
Type: IsDirective + From<Stmt>,
@ -94,7 +94,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
include_decl: bool,
top_level: bool,
decorators: Vec<Decorator>,
) -> PResult<'a, Stmt> {
) -> PResult<Stmt> {
trace_cur!(parse_stmt_internal);
if top_level && is!("await") {
@ -368,7 +368,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
fn parse_if_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_if_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
assert_and_bump!("if");
@ -413,7 +413,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}))
}
fn parse_return_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_return_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
let stmt = self.parse_with(|p| {
@ -439,7 +439,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
#[allow(clippy::cognitive_complexity)]
fn parse_switch_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_switch_stmt(&mut self) -> PResult<Stmt> {
let switch_start = cur_pos!();
assert_and_bump!("switch");
@ -506,7 +506,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}))
}
fn parse_throw_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_throw_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
assert_and_bump!("throw");
@ -523,7 +523,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(Stmt::Throw(ThrowStmt { span, arg }))
}
fn parse_try_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_try_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
assert_and_bump!("try");
@ -546,7 +546,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}))
}
fn parse_catch_clause(&mut self) -> PResult<'a, Option<CatchClause>> {
fn parse_catch_clause(&mut self) -> PResult<Option<CatchClause>> {
let start = cur_pos!();
Ok(if eat!("catch") {
@ -564,7 +564,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_finally_block(&mut self) -> PResult<'a, Option<BlockStmt>> {
fn parse_finally_block(&mut self) -> PResult<Option<BlockStmt>> {
Ok(if eat!("finally") {
self.parse_block(false).map(Some)?
} else {
@ -573,7 +573,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// It's optional since es2019
fn parse_catch_param(&mut self) -> PResult<'a, Option<Pat>> {
fn parse_catch_param(&mut self) -> PResult<Option<Pat>> {
if eat!('(') {
let pat = self.parse_binding_pat_or_ident()?;
@ -593,7 +593,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
pub(super) fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<'a, VarDecl> {
pub(super) fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<VarDecl> {
let start = cur_pos!();
let kind = match bump!() {
tok!("const") => VarDeclKind::Const,
@ -634,9 +634,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
decls: vec![],
});
}
Err(mut err) => {
err.cancel();
}
Err(..) => {}
_ => {}
}
}
@ -679,10 +677,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
if !eat!(';') {
self.emit_err(self.input.cur_span(), SyntaxError::TS1005);
let _ = self.parse_expr().map_err(|mut e| {
e.emit();
()
});
let _ = self.parse_expr();
while !eat!(';') {
bump!();
@ -698,7 +693,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<'a, VarDeclarator> {
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<VarDeclarator> {
let start = cur_pos!();
let mut name = self.parse_binding_pat_or_ident()?;
@ -770,7 +765,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
#[allow(clippy::cognitive_complexity)]
fn parse_do_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_do_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
assert_and_bump!("do");
@ -793,7 +788,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(Stmt::DoWhile(DoWhileStmt { span, test, body }))
}
fn parse_while_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_while_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
assert_and_bump!("while");
@ -813,7 +808,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(Stmt::While(WhileStmt { span, test, body }))
}
fn parse_with_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_with_stmt(&mut self) -> PResult<Stmt> {
if self.syntax().typescript() {
let span = self.input.cur_span();
self.emit_err(span, SyntaxError::TS2410);
@ -842,7 +837,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(Stmt::With(WithStmt { span, obj, body }))
}
pub(super) fn parse_block(&mut self, allow_directives: bool) -> PResult<'a, BlockStmt> {
pub(super) fn parse_block(&mut self, allow_directives: bool) -> PResult<BlockStmt> {
let start = cur_pos!();
expect!('{');
@ -853,7 +848,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Ok(BlockStmt { span, stmts })
}
fn parse_labelled_stmt(&mut self, l: Ident) -> PResult<'a, Stmt> {
fn parse_labelled_stmt(&mut self, l: Ident) -> PResult<Stmt> {
let ctx = Context {
is_break_allowed: true,
..self.ctx()
@ -903,7 +898,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_for_stmt(&mut self) -> PResult<'a, Stmt> {
fn parse_for_stmt(&mut self) -> PResult<Stmt> {
let start = cur_pos!();
assert_and_bump!("for");
@ -960,7 +955,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_for_head(&mut self) -> PResult<'a, ForHead> {
fn parse_for_head(&mut self) -> PResult<ForHead> {
let start = cur_pos!();
let strict = self.ctx().strict;
@ -1037,7 +1032,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.parse_normal_for_head(Some(VarDeclOrExpr::Expr(init)))
}
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<'a, ForHead> {
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<ForHead> {
let of = bump!() == tok!("of");
if of {
let right = self.include_in_expr(true).parse_assignment_expr()?;
@ -1048,7 +1043,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<'a, ForHead> {
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<ForHead> {
let test = if eat_exact!(';') {
None
} else {
@ -1111,12 +1106,12 @@ pub(super) trait StmtLikeParser<'a, Type: IsDirective> {
&mut self,
top_level: bool,
decorators: Vec<Decorator>,
) -> PResult<'a, Type>;
) -> PResult<Type>;
}
#[parser]
impl<'a, I: Tokens> StmtLikeParser<'a, Stmt> for Parser<'a, I> {
fn handle_import_export(&mut self, top_level: bool, _: Vec<Decorator>) -> PResult<'a, Stmt> {
impl<'a, I: Tokens> StmtLikeParser<'a, Stmt> for Parser<I> {
fn handle_import_export(&mut self, top_level: bool, _: Vec<Decorator>) -> PResult<Stmt> {
let start = cur_pos!();
if self.input.syntax().dynamic_import() && is!("import") {
let expr = self.parse_expr()?;
@ -1154,26 +1149,14 @@ mod tests {
use swc_ecma_visit::assert_eq_ignore_span;
fn stmt(s: &'static str) -> Stmt {
test_parser(s, Syntax::default(), |p| {
p.parse_stmt(true).map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_stmt(true))
}
fn module_item(s: &'static str) -> ModuleItem {
test_parser(s, Syntax::default(), |p| {
p.parse_stmt_like(true, true).map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_stmt_like(true, true))
}
fn expr(s: &'static str) -> Box<Expr> {
test_parser(s, Syntax::default(), |p| {
p.parse_expr().map_err(|mut e| {
e.emit();
})
})
test_parser(s, Syntax::default(), |p| p.parse_expr())
}
#[test]
@ -1303,9 +1286,7 @@ mod tests {
decorators: true,
..Default::default()
}),
|p| p.parse_stmt_list_item(true).map_err(|mut e| {
e.emit();
}),
|p| p.parse_stmt_list_item(true),
),
Stmt::Decl(Decl::Class(ClassDecl {
ident: Ident::new("Foo".into(), span),
@ -1352,11 +1333,7 @@ ReactDOM.render(<App />, document.getElementById('root'))
jsx: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1375,11 +1352,7 @@ export default App"#;
jsx: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1392,11 +1365,7 @@ export default App"#;
export_default_from: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1409,11 +1378,7 @@ export default App"#;
export_default_from: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1426,11 +1391,7 @@ export default App"#;
export_default_from: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1443,11 +1404,7 @@ export default App"#;
export_default_from: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1459,11 +1416,7 @@ export default App"#;
Syntax::Es(EsConfig {
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1476,11 +1429,7 @@ let x = 4";
Syntax::Es(EsConfig {
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1491,11 +1440,7 @@ let x = 4";
Syntax::Es(EsConfig {
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1508,11 +1453,7 @@ let x = 4";
export_namespace_from: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1524,11 +1465,7 @@ export default function waitUntil(callback, options = {}) {
var timeout = 'timeout' in options ? options.timeout : 1000;
}",
Default::default(),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1540,11 +1477,7 @@ export default function waitUntil(callback, options = {}) {
let timeout = 'timeout' in options ? options.timeout : 1000;
}",
Default::default(),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1553,11 +1486,7 @@ export default function waitUntil(callback, options = {}) {
test_parser(
";(function() {})(window, window.lib || (window.lib = {}))",
Default::default(),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1575,9 +1504,7 @@ export default function waitUntil(callback, options = {}) {
#[test]
fn issue_340_fn() {
test_parser("export default function(){};", Default::default(), |p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
p.parse_module()
});
}
@ -1586,29 +1513,21 @@ export default function waitUntil(callback, options = {}) {
test_parser(
"export default async function(){};",
Default::default(),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
#[test]
fn issue_340_generator_fn() {
test_parser("export default function*(){};", Default::default(), |p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
p.parse_module()
});
}
#[test]
fn issue_340_class() {
test_parser("export default class {};", Default::default(), |p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
p.parse_module()
});
}
@ -1617,11 +1536,7 @@ export default function waitUntil(callback, options = {}) {
test_parser(
"var IS_IE11 = !global.ActiveXObject && 'ActiveXObject' in global;",
Default::default(),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1633,11 +1548,7 @@ export default function waitUntil(callback, options = {}) {
dynamic_import: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1654,11 +1565,7 @@ export default function waitUntil(callback, options = {}) {
dynamic_import: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1670,11 +1577,7 @@ export default function waitUntil(callback, options = {}) {
Syntax::Es(EsConfig {
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1686,11 +1589,7 @@ export default function waitUntil(callback, options = {}) {
top_level_await: true,
..Default::default()
}),
|p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_module(),
);
}
@ -1710,11 +1609,7 @@ export default function waitUntil(callback, options = {}) {
Syntax::Typescript(TsConfig {
..Default::default()
}),
|p| {
p.parse_typescript_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_typescript_module(),
);
let (leading, trailing) = c.take_all();
@ -1739,11 +1634,7 @@ export default function waitUntil(callback, options = {}) {
Syntax::Typescript(TsConfig {
..Default::default()
}),
|p| {
p.parse_typescript_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_typescript_module(),
);
let (leading, trailing) = c.take_all();
@ -1770,11 +1661,7 @@ export default function waitUntil(callback, options = {}) {
Syntax::Typescript(TsConfig {
..Default::default()
}),
|p| {
p.parse_typescript_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_typescript_module(),
);
let (leading, trailing) = c.take_all();
@ -1796,11 +1683,7 @@ export default function waitUntil(callback, options = {}) {
Syntax::Typescript(TsConfig {
..Default::default()
}),
|p| {
p.parse_typescript_module().map_err(|mut e| {
e.emit();
})
},
|p| p.parse_typescript_module(),
);
let (leading, trailing) = c.take_all();

View File

@ -1,9 +1,9 @@
use super::*;
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
#[allow(clippy::cognitive_complexity)]
fn parse_import(&mut self) -> PResult<'a, ModuleItem> {
fn parse_import(&mut self) -> PResult<ModuleItem> {
let start = cur_pos!();
if self.input.syntax().import_meta() && peeked_is!('.') {
@ -118,7 +118,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Parse `foo`, `foo2 as bar` in `import { foo, foo2 as bar }`
fn parse_import_specifier(&mut self) -> PResult<'a, ImportSpecifier> {
fn parse_import_specifier(&mut self) -> PResult<ImportSpecifier> {
let start = cur_pos!();
match cur!(false) {
Ok(&Word(..)) => {
@ -152,11 +152,11 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
fn parse_imported_default_binding(&mut self) -> PResult<'a, Ident> {
fn parse_imported_default_binding(&mut self) -> PResult<Ident> {
self.parse_imported_binding()
}
fn parse_imported_binding(&mut self) -> PResult<'a, Ident> {
fn parse_imported_binding(&mut self) -> PResult<Ident> {
let ctx = Context {
in_async: false,
in_generator: false,
@ -166,7 +166,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
#[allow(clippy::cognitive_complexity)]
fn parse_export(&mut self, decorators: Vec<Decorator>) -> PResult<'a, ModuleDecl> {
fn parse_export(&mut self, decorators: Vec<Decorator>) -> PResult<ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("export");
let _ = cur!(true);
@ -475,7 +475,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}))
}
fn parse_named_export_specifier(&mut self) -> PResult<'a, ExportNamedSpecifier> {
fn parse_named_export_specifier(&mut self) -> PResult<ExportNamedSpecifier> {
let start = cur_pos!();
let orig = self.parse_ident_name()?;
@ -493,7 +493,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
fn parse_from_clause_and_semi(&mut self) -> PResult<'a, Str> {
fn parse_from_clause_and_semi(&mut self) -> PResult<Str> {
expect!("from");
let str_start = cur_pos!();
@ -523,12 +523,12 @@ impl IsDirective for ModuleItem {
}
#[parser]
impl<'a, I: Tokens> StmtLikeParser<'a, ModuleItem> for Parser<'a, I> {
impl<'a, I: Tokens> StmtLikeParser<'a, ModuleItem> for Parser<I> {
fn handle_import_export(
&mut self,
top_level: bool,
decorators: Vec<Decorator>,
) -> PResult<'a, ModuleItem> {
) -> PResult<ModuleItem> {
if !top_level {
syntax_error!(SyntaxError::NonTopLevelImportExport);
}
@ -567,7 +567,7 @@ export default class Foo {
decorators_before_export: true,
..Default::default()
}),
|p| p.parse_module().map_err(|mut e| e.emit()),
|p| p.parse_module(),
);
}
}

View File

@ -5,9 +5,9 @@ use swc_atoms::js_word;
use swc_common::{Spanned, SyntaxContext};
#[parser]
impl<'a, I: Tokens> Parser<'a, I> {
impl<I: Tokens> Parser<I> {
/// `tsNextTokenCanFollowModifier`
fn ts_next_token_can_follow_modifier(&mut self) -> PResult<'a, bool> {
fn ts_next_token_can_follow_modifier(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
// Note: TypeScript's implementation is much more complicated because
@ -30,7 +30,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_ts_modifier(
&mut self,
allowed_modifiers: &[&'static str],
) -> PResult<'a, Option<&'static str>> {
) -> PResult<Option<&'static str>> {
if !self.input.syntax().typescript() {
return Ok(None);
}
@ -55,7 +55,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// `tsIsListTerminator`
fn is_ts_list_terminator(&mut self, kind: ParsingContext) -> PResult<'a, bool> {
fn is_ts_list_terminator(&mut self, kind: ParsingContext) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
Ok(match kind {
@ -69,13 +69,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseList`
fn parse_ts_list<T, F>(
&mut self,
kind: ParsingContext,
mut parse_element: F,
) -> PResult<'a, Vec<T>>
fn parse_ts_list<T, F>(&mut self, kind: ParsingContext, mut parse_element: F) -> PResult<Vec<T>>
where
F: FnMut(&mut Self) -> PResult<'a, T>,
F: FnMut(&mut Self) -> PResult<T>,
{
debug_assert!(self.input.syntax().typescript());
@ -93,9 +89,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
kind: ParsingContext,
mut parse_element: F,
) -> PResult<'a, Vec<T>>
) -> PResult<Vec<T>>
where
F: FnMut(&mut Self) -> PResult<'a, T>,
F: FnMut(&mut Self) -> PResult<T>,
{
self.parse_ts_delimited_list_inner(kind, |p| {
let start = p.input.cur_pos();
@ -109,9 +105,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
kind: ParsingContext,
mut parse_element: F,
) -> PResult<'a, Vec<T>>
) -> PResult<Vec<T>>
where
F: FnMut(&mut Self) -> PResult<'a, (BytePos, T)>,
F: FnMut(&mut Self) -> PResult<(BytePos, T)>,
{
debug_assert!(self.input.syntax().typescript());
@ -160,9 +156,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
parse_element: F,
bracket: bool,
skip_first_token: bool,
) -> PResult<'a, Vec<T>>
) -> PResult<Vec<T>>
where
F: FnMut(&mut Self) -> PResult<'a, T>,
F: FnMut(&mut Self) -> PResult<T>,
{
debug_assert!(self.input.syntax().typescript());
@ -186,7 +182,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseEntityName`
fn parse_ts_entity_name(&mut self, allow_reserved_words: bool) -> PResult<'a, TsEntityName> {
fn parse_ts_entity_name(&mut self, allow_reserved_words: bool) -> PResult<TsEntityName> {
debug_assert!(self.input.syntax().typescript());
let init = self.parse_ident_name()?;
@ -229,7 +225,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeReference`
fn parse_ts_type_ref(&mut self) -> PResult<'a, TsTypeRef> {
fn parse_ts_type_ref(&mut self) -> PResult<TsTypeRef> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -260,7 +256,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
start: BytePos,
has_asserts_keyword: bool,
lhs: TsThisType,
) -> PResult<'a, TsTypePredicate> {
) -> PResult<TsTypePredicate> {
debug_assert!(self.input.syntax().typescript());
let param_name = TsThisTypeOrIdent::TsThisType(lhs);
@ -283,7 +279,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseThisTypeNode`
fn parse_ts_this_type_node(&mut self) -> PResult<'a, TsThisType> {
fn parse_ts_this_type_node(&mut self) -> PResult<TsThisType> {
debug_assert!(self.input.syntax().typescript());
expect!("this");
@ -294,7 +290,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseImportType`
fn parse_ts_import_type(&mut self) -> PResult<'a, TsImportType> {
fn parse_ts_import_type(&mut self) -> PResult<TsImportType> {
let start = cur_pos!();
assert_and_bump!("import");
@ -336,7 +332,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeQuery`
fn parse_ts_type_query(&mut self) -> PResult<'a, TsTypeQuery> {
fn parse_ts_type_query(&mut self) -> PResult<TsTypeQuery> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -358,7 +354,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeParameter`
fn parse_ts_type_param(&mut self) -> PResult<'a, TsTypeParam> {
fn parse_ts_type_param(&mut self) -> PResult<TsTypeParam> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -376,7 +372,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeParameter`
pub(super) fn parse_ts_type_params(&mut self) -> PResult<'a, TsTypeParamDecl> {
pub(super) fn parse_ts_type_params(&mut self) -> PResult<TsTypeParamDecl> {
let start = cur_pos!();
if !is!('<') && !is!(JSXTagStart) {
@ -402,7 +398,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn parse_ts_type_or_type_predicate_ann(
&mut self,
return_token: &'static Token,
) -> PResult<'a, TsTypeAnn> {
) -> PResult<TsTypeAnn> {
debug_assert!(self.input.syntax().typescript());
self.in_type().parse_with(|p| {
@ -459,9 +455,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsTryParse`
fn try_parse_ts_bool<F>(&mut self, op: F) -> PResult<'a, bool>
fn try_parse_ts_bool<F>(&mut self, op: F) -> PResult<bool>
where
F: FnOnce(&mut Self) -> PResult<'a, Option<bool>>,
F: FnOnce(&mut Self) -> PResult<Option<bool>>,
{
if !self.input.syntax().typescript() {
return Ok(false);
@ -475,10 +471,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.emit_err = true;
Ok(res)
}
Err(mut err) => {
err.cancel();
Ok(false)
}
Err(err) => Ok(false),
_ => Ok(false),
}
}
@ -486,7 +479,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// `tsTryParse`
pub(super) fn try_parse_ts<T, F>(&mut self, op: F) -> Option<T>
where
F: FnOnce(&mut Self) -> PResult<'a, Option<T>>,
F: FnOnce(&mut Self) -> PResult<Option<T>>,
{
if !self.input.syntax().typescript() {
return None;
@ -501,10 +494,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
Some(res)
}
Ok(None) => None,
Err(mut err) => {
err.cancel();
None
}
Err(..) => None,
}
}
@ -512,7 +502,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
eat_colon: bool,
start: BytePos,
) -> PResult<'a, TsTypeAnn> {
) -> PResult<TsTypeAnn> {
debug_assert!(self.input.syntax().typescript());
self.in_type().parse_with(|p| {
@ -533,7 +523,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
fn eat_then_parse_ts_type(
&mut self,
token_to_eat: &'static Token,
) -> PResult<'a, Option<Box<TsType>>> {
) -> PResult<Option<Box<TsType>>> {
self.in_type().parse_with(|p| {
if !p.input.eat(token_to_eat) {
return Ok(None);
@ -544,7 +534,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsExpectThenParseType`
fn expect_then_parse_ts_type(&mut self, token: &'static Token) -> PResult<'a, Box<TsType>> {
fn expect_then_parse_ts_type(&mut self, token: &'static Token) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
self.in_type().parse_with(|p| {
@ -557,7 +547,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsNextThenParseType`
pub(super) fn next_then_parse_ts_type(&mut self) -> PResult<'a, Box<TsType>> {
pub(super) fn next_then_parse_ts_type(&mut self) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
self.in_type().parse_with(|p| {
@ -568,7 +558,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseEnumMember`
fn parse_ts_enum_member(&mut self) -> PResult<'a, TsEnumMember> {
fn parse_ts_enum_member(&mut self) -> PResult<TsEnumMember> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -632,7 +622,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
is_const: bool,
) -> PResult<'a, TsEnumDecl> {
) -> PResult<TsEnumDecl> {
debug_assert!(self.input.syntax().typescript());
let id = self.parse_ident_name()?;
@ -651,7 +641,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseModuleBlock`
fn parse_ts_module_block(&mut self) -> PResult<'a, TsModuleBlock> {
fn parse_ts_module_block(&mut self) -> PResult<TsModuleBlock> {
trace_cur!(parse_ts_module_block);
debug_assert!(self.input.syntax().typescript());
@ -673,7 +663,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseModuleOrNamespaceDeclaration`
fn parse_ts_module_or_ns_decl(&mut self, start: BytePos) -> PResult<'a, TsModuleDecl> {
fn parse_ts_module_or_ns_decl(&mut self, start: BytePos) -> PResult<TsModuleDecl> {
debug_assert!(self.input.syntax().typescript());
let id = self.parse_ident_name()?;
@ -705,10 +695,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseAmbientExternalModuleDeclaration`
fn parse_ts_ambient_external_module_decl(
&mut self,
start: BytePos,
) -> PResult<'a, TsModuleDecl> {
fn parse_ts_ambient_external_module_decl(&mut self, start: BytePos) -> PResult<TsModuleDecl> {
debug_assert!(self.input.syntax().typescript());
let (global, id) = if is!("global") {
@ -743,7 +730,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
pub fn parse_type(&mut self) -> PResult<'a, Box<TsType>> {
pub fn parse_type(&mut self) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
self.in_type().parse_ts_type()
@ -752,7 +739,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// Be sure to be in a type context before calling self.
///
/// `tsParseType`
pub(super) fn parse_ts_type(&mut self) -> PResult<'a, Box<TsType>> {
pub(super) fn parse_ts_type(&mut self) -> PResult<Box<TsType>> {
trace_cur!(parse_ts_type);
debug_assert!(self.input.syntax().typescript());
@ -788,7 +775,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseNonConditionalType`
fn parse_ts_non_conditional_type(&mut self) -> PResult<'a, Box<TsType>> {
fn parse_ts_non_conditional_type(&mut self) -> PResult<Box<TsType>> {
trace_cur!(parse_ts_non_conditional_type);
debug_assert!(self.input.syntax().typescript());
@ -810,7 +797,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.parse_ts_union_type_or_higher()
}
fn is_ts_start_of_fn_type(&mut self) -> PResult<'a, bool> {
fn is_ts_start_of_fn_type(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
if is!('<') {
@ -821,10 +808,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeAssertion`
pub(super) fn parse_ts_type_assertion(
&mut self,
start: BytePos,
) -> PResult<'a, TsTypeAssertion> {
pub(super) fn parse_ts_type_assertion(&mut self, start: BytePos) -> PResult<TsTypeAssertion> {
debug_assert!(self.input.syntax().typescript());
// Not actually necessary to set state.inType because we never reach here if JSX
@ -841,7 +825,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseHeritageClause`
pub(super) fn parse_ts_heritage_clause(&mut self) -> PResult<'a, Vec<TsExprWithTypeArgs>> {
pub(super) fn parse_ts_heritage_clause(&mut self) -> PResult<Vec<TsExprWithTypeArgs>> {
debug_assert!(self.input.syntax().typescript());
self.parse_ts_delimited_list(ParsingContext::HeritageClauseElement, |p| {
@ -850,7 +834,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseExpressionWithTypeArguments`
fn parse_expr_with_type_args(&mut self) -> PResult<'a, TsExprWithTypeArgs> {
fn parse_expr_with_type_args(&mut self) -> PResult<TsExprWithTypeArgs> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -871,10 +855,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
/// `tsParseInterfaceDeclaration`
pub(super) fn parse_ts_interface_decl(
&mut self,
start: BytePos,
) -> PResult<'a, TsInterfaceDecl> {
pub(super) fn parse_ts_interface_decl(&mut self, start: BytePos) -> PResult<TsInterfaceDecl> {
debug_assert!(self.input.syntax().typescript());
let id = self.parse_ident_name()?;
@ -933,7 +914,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeAliasDeclaration`
fn parse_ts_type_alias_decl(&mut self, start: BytePos) -> PResult<'a, TsTypeAliasDecl> {
fn parse_ts_type_alias_decl(&mut self, start: BytePos) -> PResult<TsTypeAliasDecl> {
debug_assert!(self.input.syntax().typescript());
let id = self.parse_ident_name()?;
@ -954,7 +935,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
is_export: bool,
) -> PResult<'a, TsImportEqualsDecl> {
) -> PResult<TsImportEqualsDecl> {
debug_assert!(self.input.syntax().typescript());
let id = self.parse_ident_name()?;
@ -972,14 +953,14 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsIsExternalModuleReference`
fn is_ts_external_module_ref(&mut self) -> PResult<'a, bool> {
fn is_ts_external_module_ref(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
Ok(is!("require") && peeked_is!('('))
}
/// `tsParseModuleReference`
fn parse_ts_module_ref(&mut self) -> PResult<'a, TsModuleRef> {
fn parse_ts_module_ref(&mut self) -> PResult<TsModuleRef> {
debug_assert!(self.input.syntax().typescript());
if self.is_ts_external_module_ref()? {
@ -992,7 +973,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// `tsParseExternalModuleReference`
#[allow(clippy::cognitive_complexity)]
fn parse_ts_external_module_ref(&mut self) -> PResult<'a, TsExternalModuleRef> {
fn parse_ts_external_module_ref(&mut self) -> PResult<TsExternalModuleRef> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1013,9 +994,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
})
}
pub(super) fn ts_look_ahead<T, F>(&mut self, op: F) -> PResult<'a, T>
pub(super) fn ts_look_ahead<T, F>(&mut self, op: F) -> PResult<T>
where
F: FnOnce(&mut Self) -> PResult<'a, T>,
F: FnOnce(&mut Self) -> PResult<T>,
{
debug_assert!(self.input.syntax().typescript());
@ -1026,7 +1007,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsIsUnambiguouslyStartOfFunctionType`
fn is_ts_unambiguously_start_of_fn_type(&mut self) -> PResult<'a, bool> {
fn is_ts_unambiguously_start_of_fn_type(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
assert_and_bump!('(');
@ -1052,7 +1033,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsSkipParameterStart`
fn skip_ts_parameter_start(&mut self) -> PResult<'a, bool> {
fn skip_ts_parameter_start(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
let _ = self.eat_any_ts_modifier()?;
@ -1096,7 +1077,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeMemberSemicolon`
fn parse_ts_type_member_semicolon(&mut self) -> PResult<'a, ()> {
fn parse_ts_type_member_semicolon(&mut self) -> PResult<()> {
debug_assert!(self.input.syntax().typescript());
if !eat!(',') {
@ -1110,7 +1091,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
fn parse_ts_signature_member(
&mut self,
kind: SignatureParsingMode,
) -> PResult<'a, Either<TsCallSignatureDecl, TsConstructSignatureDecl>> {
) -> PResult<Either<TsCallSignatureDecl, TsConstructSignatureDecl>> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1153,7 +1134,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsIsUnambiguouslyIndexSignature`
fn is_ts_unambiguously_index_signature(&mut self) -> PResult<'a, bool> {
fn is_ts_unambiguously_index_signature(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
// Note: babel's comment is wrong
@ -1168,7 +1149,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
index_signature_start: BytePos,
readonly: bool,
) -> PResult<'a, Option<TsIndexSignature>> {
) -> PResult<Option<TsIndexSignature>> {
if !(is!('[') && self.ts_look_ahead(|p| p.is_ts_unambiguously_index_signature())?) {
return Ok(None);
}
@ -1209,7 +1190,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
readonly: bool,
) -> PResult<'a, Either<TsPropertySignature, TsMethodSignature>> {
) -> PResult<Either<TsPropertySignature, TsMethodSignature>> {
debug_assert!(self.input.syntax().typescript());
// ----- inlined self.parsePropertyName(node);
@ -1283,7 +1264,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeMember`
fn parse_ts_type_member(&mut self) -> PResult<'a, TsTypeElement> {
fn parse_ts_type_member(&mut self) -> PResult<TsTypeElement> {
debug_assert!(self.input.syntax().typescript());
fn into_type_elem(
@ -1321,7 +1302,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsIsStartOfConstructSignature`
fn is_ts_start_of_construct_signature(&mut self) -> PResult<'a, bool> {
fn is_ts_start_of_construct_signature(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
bump!();
@ -1330,7 +1311,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeLiteral`
fn parse_ts_type_lit(&mut self) -> PResult<'a, TsTypeLit> {
fn parse_ts_type_lit(&mut self) -> PResult<TsTypeLit> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1342,7 +1323,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseObjectTypeMembers`
fn parse_ts_object_type_members(&mut self) -> PResult<'a, Vec<TsTypeElement>> {
fn parse_ts_object_type_members(&mut self) -> PResult<Vec<TsTypeElement>> {
debug_assert!(self.input.syntax().typescript());
expect!('{');
@ -1353,7 +1334,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsIsStartOfMappedType`
fn is_ts_start_of_mapped_type(&mut self) -> PResult<'a, bool> {
fn is_ts_start_of_mapped_type(&mut self) -> PResult<bool> {
debug_assert!(self.input.syntax().typescript());
bump!();
@ -1376,7 +1357,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseMappedTypeParameter`
fn parse_ts_mapped_type_param(&mut self) -> PResult<'a, TsTypeParam> {
fn parse_ts_mapped_type_param(&mut self) -> PResult<TsTypeParam> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1393,7 +1374,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// `tsParseMappedType`
#[allow(clippy::cognitive_complexity)]
fn parse_ts_mapped_type(&mut self) -> PResult<'a, TsMappedType> {
fn parse_ts_mapped_type(&mut self) -> PResult<TsMappedType> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1442,7 +1423,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTupleType`
fn parse_ts_tuple_type(&mut self) -> PResult<'a, TsTupleType> {
fn parse_ts_tuple_type(&mut self) -> PResult<TsTupleType> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1479,7 +1460,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTupleElementType`
fn parse_ts_tuple_element_type(&mut self) -> PResult<'a, TsTupleElement> {
fn parse_ts_tuple_element_type(&mut self) -> PResult<TsTupleElement> {
debug_assert!(self.input.syntax().typescript());
// parses `...TsType[]`
@ -1527,7 +1508,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseParenthesizedType`
fn parse_ts_parenthesized_type(&mut self) -> PResult<'a, TsParenthesizedType> {
fn parse_ts_parenthesized_type(&mut self) -> PResult<TsParenthesizedType> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1544,7 +1525,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
fn parse_ts_fn_or_constructor_type(
&mut self,
is_fn_type: bool,
) -> PResult<'a, TsFnOrConstructorType> {
) -> PResult<TsFnOrConstructorType> {
trace_cur!(parse_ts_fn_or_constructor_type);
debug_assert!(self.input.syntax().typescript());
@ -1579,7 +1560,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseLiteralTypeNode`
fn parse_ts_lit_type_node(&mut self) -> PResult<'a, TsLitType> {
fn parse_ts_lit_type_node(&mut self) -> PResult<TsLitType> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1608,7 +1589,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseBindingListForSignature`
fn parse_ts_binding_list_for_signature(&mut self) -> PResult<'a, Vec<TsFnParam>> {
fn parse_ts_binding_list_for_signature(&mut self) -> PResult<Vec<TsFnParam>> {
debug_assert!(self.input.syntax().typescript());
let params = self.parse_formal_params()?;
@ -1631,7 +1612,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// `tsTryParseTypeOrTypePredicateAnnotation`
///
/// Used for parsing return types.
fn try_parse_ts_type_or_type_predicate_ann(&mut self) -> PResult<'a, Option<TsTypeAnn>> {
fn try_parse_ts_type_or_type_predicate_ann(&mut self) -> PResult<Option<TsTypeAnn>> {
if is!(':') {
self.parse_ts_type_or_type_predicate_ann(&tok!(':'))
.map(Some)
@ -1641,7 +1622,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsTryParseTypeAnnotation`
pub(super) fn try_parse_ts_type_ann(&mut self) -> PResult<'a, Option<TsTypeAnn>> {
pub(super) fn try_parse_ts_type_ann(&mut self) -> PResult<Option<TsTypeAnn>> {
if is!(':') {
let pos = cur_pos!();
return self.parse_ts_type_ann(/* eat_colon */ true, pos).map(Some);
@ -1651,12 +1632,12 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsTryParseType`
fn try_parse_ts_type(&mut self) -> PResult<'a, Option<Box<TsType>>> {
fn try_parse_ts_type(&mut self) -> PResult<Option<Box<TsType>>> {
self.eat_then_parse_ts_type(&tok!(':'))
}
/// `tsTryParseTypeParameters`
pub(super) fn try_parse_ts_type_params(&mut self) -> PResult<'a, Option<TsTypeParamDecl>> {
pub(super) fn try_parse_ts_type_params(&mut self) -> PResult<Option<TsTypeParamDecl>> {
if is!('<') {
return self.parse_ts_type_params().map(Some);
}
@ -1665,7 +1646,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
/// `tsParseNonArrayType`
#[allow(clippy::cognitive_complexity)]
fn parse_ts_non_array_type(&mut self) -> PResult<'a, Box<TsType>> {
fn parse_ts_non_array_type(&mut self) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1804,7 +1785,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseArrayTypeOrHigher`
fn parse_ts_array_type_or_higher(&mut self, readonly: bool) -> PResult<'a, Box<TsType>> {
fn parse_ts_array_type_or_higher(&mut self, readonly: bool) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
let mut ty = self.parse_ts_non_array_type()?;
@ -1831,7 +1812,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeOperator`
fn parse_ts_type_operator(&mut self, op: TsTypeOperatorOp) -> PResult<'a, TsTypeOperator> {
fn parse_ts_type_operator(&mut self, op: TsTypeOperatorOp) -> PResult<TsTypeOperator> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1850,7 +1831,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseInferType`
fn parse_ts_infer_type(&mut self) -> PResult<'a, TsInferType> {
fn parse_ts_infer_type(&mut self) -> PResult<TsInferType> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -1869,7 +1850,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeOperatorOrHigher`
fn parse_ts_type_operator_or_higher(&mut self) -> PResult<'a, Box<TsType>> {
fn parse_ts_type_operator_or_higher(&mut self) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
let operator = if is!("keyof") {
@ -1903,7 +1884,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
decorators: Vec<Decorator>,
expr: Ident,
) -> PResult<'a, Option<Decl>> {
) -> PResult<Option<Decl>> {
let start = expr.span().lo();
match &*expr.sym {
@ -1973,7 +1954,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
&mut self,
start: BytePos,
decorators: Vec<Decorator>,
) -> PResult<'a, Option<Decl>> {
) -> PResult<Option<Decl>> {
assert!(
!is!("declare"),
"try_parse_ts_declare should be called after eating `declare`"
@ -2085,7 +2066,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
decorators: Vec<Decorator>,
value: JsWord,
next: bool,
) -> PResult<'a, Option<Decl>> {
) -> PResult<Option<Decl>> {
match value {
js_word!("abstract") => {
if next || is!("class") {
@ -2188,7 +2169,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
pub(super) fn try_parse_ts_generic_async_arrow_fn(
&mut self,
start: BytePos,
) -> PResult<'a, Option<ArrowExpr>> {
) -> PResult<Option<ArrowExpr>> {
let res = if is_one_of!('<', JSXTagStart) {
self.try_parse_ts(|p| {
let type_params = p.parse_ts_type_params()?;
@ -2237,7 +2218,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseTypeArguments`
pub fn parse_ts_type_args(&mut self) -> PResult<'a, TsTypeParamInstantiation> {
pub fn parse_ts_type_args(&mut self) -> PResult<TsTypeParamInstantiation> {
debug_assert!(self.input.syntax().typescript());
let start = cur_pos!();
@ -2263,7 +2244,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseIntersectionTypeOrHigher`
fn parse_ts_intersection_type_or_higher(&mut self) -> PResult<'a, Box<TsType>> {
fn parse_ts_intersection_type_or_higher(&mut self) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
self.parse_ts_union_or_intersection_type(
@ -2274,7 +2255,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// `tsParseUnionTypeOrHigher`
fn parse_ts_union_type_or_higher(&mut self) -> PResult<'a, Box<TsType>> {
fn parse_ts_union_type_or_higher(&mut self) -> PResult<Box<TsType>> {
debug_assert!(self.input.syntax().typescript());
self.parse_ts_union_or_intersection_type(
@ -2290,9 +2271,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
kind: UnionOrIntersection,
mut parse_constituent_type: F,
operator: &'static Token,
) -> PResult<'a, Box<TsType>>
) -> PResult<Box<TsType>>
where
F: FnMut(&mut Self) -> PResult<'a, Box<TsType>>,
F: FnMut(&mut Self) -> PResult<Box<TsType>>,
{
debug_assert!(self.input.syntax().typescript());
@ -2325,11 +2306,11 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
}
impl<'a, I: Tokens> Parser<'a, I> {
impl<I: Tokens> Parser<I> {
/// In no lexer context
fn ts_in_no_context<T, F>(&mut self, op: F) -> PResult<'a, T>
fn ts_in_no_context<T, F>(&mut self, op: F) -> PResult<T>
where
F: FnOnce(&mut Self) -> PResult<'a, T>,
F: FnOnce(&mut Self) -> PResult<T>,
{
debug_assert!(self.input.syntax().typescript());
@ -2393,7 +2374,7 @@ mod tests {
let actual = test_parser(
"type test = -1;",
Syntax::Typescript(Default::default()),
|p| p.parse_module().map_err(|mut e| e.emit()),
|p| p.parse_module(),
);
let expected = Module {
@ -2425,7 +2406,7 @@ mod tests {
let actual = test_parser(
"const t = -1;",
Syntax::Typescript(Default::default()),
|p| p.parse_module().map_err(|mut e| e.emit()),
|p| p.parse_module(),
);
let expected = Module {
@ -2462,9 +2443,8 @@ mod tests {
crate::with_test_sess(
"type Test = (
string | number);",
|sess, input| {
|handler, input| {
let lexer = Lexer::new(
sess,
Syntax::Typescript(TsConfig {
..Default::default()
}),
@ -2474,10 +2454,10 @@ mod tests {
);
let lexer = Capturing::new(lexer);
let mut parser = Parser::new_from(sess, lexer);
parser.parse_typescript_module().map_err(|mut e| {
e.emit();
})?;
let mut parser = Parser::new_from(lexer);
parser
.parse_typescript_module()
.map_err(|e| e.into_diagnostic(handler).emit())?;
let tokens: Vec<TokenAndSpan> = parser.input().take();
let tokens = tokens.into_iter().map(|t| t.token).collect::<Vec<_>>();
assert_eq!(tokens.len(), 9, "Tokens: {:#?}", tokens);
@ -2489,9 +2469,8 @@ mod tests {
#[test]
fn issue_751() {
crate::with_test_sess("t ? -(v >>> 1) : v >>> 1", |sess, input| {
crate::with_test_sess("t ? -(v >>> 1) : v >>> 1", |handler, input| {
let lexer = Lexer::new(
sess,
Syntax::Typescript(TsConfig {
..Default::default()
}),
@ -2501,10 +2480,10 @@ mod tests {
);
let lexer = Capturing::new(lexer);
let mut parser = Parser::new_from(sess, lexer);
parser.parse_typescript_module().map_err(|mut e| {
e.emit();
})?;
let mut parser = Parser::new_from(lexer);
parser
.parse_typescript_module()
.map_err(|e| e.into_diagnostic(handler).emit())?;
let tokens: Vec<TokenAndSpan> = parser.input().take();
let token = &tokens[10];
assert_eq!(

View File

@ -63,9 +63,9 @@ impl Context {
}
}
impl<'a, I: Tokens> Parser<'a, I> {
impl<'a, I: Tokens> Parser<I> {
/// Original context is restored when returned guard is dropped.
pub(super) fn with_ctx<'w>(&'w mut self, ctx: Context) -> WithCtx<'w, 'a, I> {
pub(super) fn with_ctx(&mut self, ctx: Context) -> WithCtx<I> {
let orig_ctx = self.ctx();
self.set_ctx(ctx);
WithCtx {
@ -75,7 +75,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Original state is restored when returned guard is dropped.
pub(super) fn with_state<'w>(&'w mut self, state: State) -> WithState<'w, 'a, I> {
pub(super) fn with_state(&mut self, state: State) -> WithState<I> {
let orig_state = std::mem::replace(&mut self.state, state);
WithState {
orig_state,
@ -87,7 +87,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.input.set_ctx(ctx);
}
pub(super) fn strict_mode<'w>(&'w mut self) -> WithCtx<'w, 'a, I> {
pub(super) fn strict_mode(&mut self) -> WithCtx<I> {
let ctx = Context {
strict: true,
..self.ctx()
@ -96,7 +96,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Original context is restored when returned guard is dropped.
pub(super) fn in_type<'w>(&'w mut self) -> WithCtx<'w, 'a, I> {
pub(super) fn in_type(&mut self) -> WithCtx<I> {
let ctx = Context {
in_type: true,
..self.ctx()
@ -105,7 +105,7 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Original context is restored when returned guard is dropped.
pub(super) fn include_in_expr<'w>(&'w mut self, include_in_expr: bool) -> WithCtx<'w, 'a, I> {
pub(super) fn include_in_expr(&mut self, include_in_expr: bool) -> WithCtx<I> {
let ctx = Context {
include_in_expr,
..self.ctx()
@ -114,9 +114,9 @@ impl<'a, I: Tokens> Parser<'a, I> {
}
/// Parse with given closure
pub(super) fn parse_with<F, Ret>(&mut self, f: F) -> PResult<'a, Ret>
pub(super) fn parse_with<F, Ret>(&mut self, f: F) -> PResult<Ret>
where
F: FnOnce(&mut Self) -> PResult<'a, Ret>,
F: FnOnce(&mut Self) -> PResult<Ret>,
{
f(self)
}
@ -138,52 +138,52 @@ impl<'a, I: Tokens> Parser<'a, I> {
self.input.syntax()
}
}
pub trait ParseObject<'a, Obj> {
pub trait ParseObject<Obj> {
type Prop;
fn make_object(&mut self, span: Span, props: Vec<Self::Prop>) -> PResult<'a, Obj>;
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop>;
fn make_object(&mut self, span: Span, props: Vec<Self::Prop>) -> PResult<Obj>;
fn parse_object_prop(&mut self) -> PResult<Self::Prop>;
}
pub struct WithState<'w, 'a: 'w, I: 'w + Tokens> {
inner: &'w mut Parser<'a, I>,
pub struct WithState<'w, I: 'w + Tokens> {
inner: &'w mut Parser<I>,
orig_state: State,
}
impl<'w, 'a, I: Tokens> Deref for WithState<'w, 'a, I> {
type Target = Parser<'a, I>;
impl<'w, I: Tokens> Deref for WithState<'w, I> {
type Target = Parser<I>;
fn deref(&self) -> &Parser<'a, I> {
fn deref(&self) -> &Parser<I> {
&self.inner
}
}
impl<'w, 'a, I: Tokens> DerefMut for WithState<'w, 'a, I> {
fn deref_mut(&mut self) -> &mut Parser<'a, I> {
impl<'w, I: Tokens> DerefMut for WithState<'w, I> {
fn deref_mut(&mut self) -> &mut Parser<I> {
&mut self.inner
}
}
impl<'w, 'a, I: Tokens> Drop for WithState<'w, 'a, I> {
impl<'w, I: Tokens> Drop for WithState<'w, I> {
fn drop(&mut self) {
std::mem::swap(&mut self.inner.state, &mut self.orig_state);
}
}
pub struct WithCtx<'w, 'a: 'w, I: 'w + Tokens> {
inner: &'w mut Parser<'a, I>,
pub struct WithCtx<'w, I: 'w + Tokens> {
inner: &'w mut Parser<I>,
orig_ctx: Context,
}
impl<'w, 'a, I: Tokens> Deref for WithCtx<'w, 'a, I> {
type Target = Parser<'a, I>;
impl<'w, I: Tokens> Deref for WithCtx<'w, I> {
type Target = Parser<I>;
fn deref(&self) -> &Parser<'a, I> {
fn deref(&self) -> &Parser<I> {
&self.inner
}
}
impl<'w, 'a, I: Tokens> DerefMut for WithCtx<'w, 'a, I> {
fn deref_mut(&mut self) -> &mut Parser<'a, I> {
impl<'w, I: Tokens> DerefMut for WithCtx<'w, I> {
fn deref_mut(&mut self) -> &mut Parser<I> {
&mut self.inner
}
}
impl<'w, 'a, I: Tokens> Drop for WithCtx<'w, 'a, I> {
impl<'w, I: Tokens> Drop for WithCtx<'w, I> {
fn drop(&mut self) {
self.inner.set_ctx(self.orig_ctx);
}

View File

@ -2,14 +2,18 @@ use swc_common::Span;
use swc_ecma_ast::*;
use swc_ecma_visit::{Fold, FoldWith};
pub struct Normalizer;
pub struct Normalizer {
pub drop_span: bool,
pub is_test262: bool,
}
impl Fold for Normalizer {
fn fold_expr(&mut self, e: Expr) -> Expr {
let e = e.fold_children_with(self);
match e {
Expr::Paren(ParenExpr { expr, .. }) => *expr,
Expr::New(n @ NewExpr { args: None, .. }) => Expr::New(NewExpr {
Expr::Paren(ParenExpr { expr, .. }) if self.is_test262 => *expr,
Expr::New(n @ NewExpr { args: None, .. }) if self.is_test262 => Expr::New(NewExpr {
args: Some(vec![]),
..n
}),
@ -54,25 +58,32 @@ impl Fold for Normalizer {
let node = node.fold_children_with(self);
match node {
PatOrExpr::Expr(expr) => match *expr {
Expr::Ident(i) => PatOrExpr::Pat(Box::new(Pat::Ident(i))),
_ => PatOrExpr::Expr(expr),
},
PatOrExpr::Pat(pat) => match *pat {
Pat::Expr(expr) => PatOrExpr::Expr(expr),
_ => PatOrExpr::Pat(pat),
},
_ => node,
}
}
fn fold_prop_name(&mut self, n: PropName) -> PropName {
let n = n.fold_children_with(self);
if !self.is_test262 {
return n;
}
match n {
PropName::Ident(Ident { sym, .. }) => PropName::Str(Str {
span: Default::default(),
PropName::Ident(Ident { span, sym, .. }) => PropName::Str(Str {
span,
value: sym,
has_escape: false,
}),
PropName::Num(num) => PropName::Str(Str {
span: Default::default(),
span: num.span,
value: num.to_string().into(),
has_escape: false,
}),
@ -81,20 +92,34 @@ impl Fold for Normalizer {
}
fn fold_str(&mut self, s: Str) -> Str {
Str {
span: Default::default(),
has_escape: false,
..s
let span = s.span.fold_with(self);
if self.is_test262 {
Str {
span,
has_escape: false,
..s
}
} else {
Str { span, ..s }
}
}
fn fold_span(&mut self, _: Span) -> Span {
Span::default()
fn fold_span(&mut self, span: Span) -> Span {
if self.drop_span {
Span::default()
} else {
span
}
}
fn fold_class_members(&mut self, mut node: Vec<ClassMember>) -> Vec<ClassMember> {
node = node.fold_children_with(self);
if !self.is_test262 {
return node;
}
node.retain(|v| match v {
ClassMember::Empty(..) => false,
_ => true,

View File

@ -13,7 +13,7 @@ use std::{
};
use swc_common::{errors::Handler, SourceMap};
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, PResult, Parser, Session, SourceFileInput};
use swc_ecma_parser::{lexer::Lexer, PResult, Parser, SourceFileInput};
use swc_ecma_visit::{Fold, FoldWith};
use test::{
test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType,
@ -195,24 +195,26 @@ fn with_parser<F, Ret>(
f: F,
) -> Result<Ret, ()>
where
F: for<'a> FnOnce(&mut Parser<'a, Lexer<'a, SourceFileInput<'_>>>) -> PResult<'a, Ret>,
F: FnOnce(&mut Parser<Lexer<SourceFileInput<'_>>>) -> PResult<Ret>,
{
let fm = cm
.load_file(file_name)
.unwrap_or_else(|e| panic!("failed to load {}: {}", file_name.display(), e));
let res = f(&mut Parser::new(
Session { handler: &handler },
let mut p = Parser::new(
::swc_ecma_parser::Syntax::Es(::swc_ecma_parser::EsConfig {
jsx: true,
..Default::default()
}),
(&*fm).into(),
None,
))
.map_err(|mut e| {
e.emit();
});
);
let res = f(&mut p).map_err(|e| e.into_diagnostic(handler).emit());
for e in p.take_errors() {
e.into_diagnostic(&handler).emit();
}
res
}

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/jsx/errors/attribute-empty-expression/input.js:1:13
|
1 | <foo bar={} />
| ^
error: Unexpected token Some(JSXTagEnd)
--> $DIR/tests/jsx/errors/attribute-empty-expression/input.js:1:14
|
1 | <foo bar={} />
| ^
error: TS1109
--> $DIR/tests/jsx/errors/attribute-empty-expression/input.js:1:13
|
1 | <foo bar={} />
| ^

View File

@ -1,12 +1,12 @@
error: top level await requires target to es2017 or higher and topLevelAwait:true for ecmascript
--> $DIR/tests/test262-parser/fail/1aefe47e20eb91fa.module.js:1:1
|
1 | await
| ^^^^^
error: Unexpected token None
--> $DIR/tests/test262-parser/fail/1aefe47e20eb91fa.module.js:1:1
|
1 | await
| ^^^^^
error: top level await requires target to es2017 or higher and topLevelAwait:true for ecmascript
--> $DIR/tests/test262-parser/fail/1aefe47e20eb91fa.module.js:1:1
|
1 | await
| ^^^^^

View File

@ -1,12 +1,12 @@
error: TS1048
--> $DIR/tests/test262-parser/fail/2d1410e37ecc3647.js:1:18
|
1 | function f(a, ...b = 0)
| ^
error: Expected LBrace, got None
--> $DIR/tests/test262-parser/fail/2d1410e37ecc3647.js:1:23
|
1 | function f(a, ...b = 0)
| ^
error: TS1048
--> $DIR/tests/test262-parser/fail/2d1410e37ecc3647.js:1:18
|
1 | function f(a, ...b = 0)
| ^

View File

@ -1,12 +1,12 @@
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
--> $DIR/tests/test262-parser/fail/37cb7557997d4fd6.js:1:29
|
1 | "use strict"; for (let [a = let];;) {}
| ^^^
error: Destructuring bindings require initializers
--> $DIR/tests/test262-parser/fail/37cb7557997d4fd6.js:1:24
|
1 | "use strict"; for (let [a = let];;) {}
| ^^^^^^^^^
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
--> $DIR/tests/test262-parser/fail/37cb7557997d4fd6.js:1:29
|
1 | "use strict"; for (let [a = let];;) {}
| ^^^

View File

@ -1,12 +1,12 @@
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/37e9fb0470e7ec3d.js:1:19
|
1 | 'use strict'; ('\000')
| ^^
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/37e9fb0470e7ec3d.js:1:17
|
1 | 'use strict'; ('\000')
| ^^
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/37e9fb0470e7ec3d.js:1:19
|
1 | 'use strict'; ('\000')
| ^^

View File

@ -4,9 +4,3 @@ error: TS1005
1 | export let[a] = 0 export let[b] = 0
| ^^^^^^
error: Unexpected token Some(Word(export))
--> $DIR/tests/test262-parser/fail/48dee14b7a3a3767.module.js:1:19
|
1 | export let[a] = 0 export let[b] = 0
| ^^^^^^

View File

@ -1,9 +1,3 @@
error: Expected RBracket, got Some(Comma)
--> $DIR/tests/test262-parser/fail/58707e130fe451a8.js:1:10
|
1 | void { [1, 2]: 3 };
| ^
error: Expected ';', '}' or <eof>
--> $DIR/tests/test262-parser/fail/58707e130fe451a8.js:1:13
|
@ -16,3 +10,9 @@ note: This is the expression part of an expression statement
1 | void { [1, 2]: 3 };
| ^^^^^^^^^^^^
error: Expected RBracket, got Some(Comma)
--> $DIR/tests/test262-parser/fail/58707e130fe451a8.js:1:10
|
1 | void { [1, 2]: 3 };
| ^

View File

@ -1,12 +1,12 @@
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/6ac4f95d48362a35.js:1:19
|
1 | 'use strict'; ('\001')
| ^^
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/6ac4f95d48362a35.js:1:17
|
1 | 'use strict'; ('\001')
| ^^
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/6ac4f95d48362a35.js:1:19
|
1 | 'use strict'; ('\001')
| ^^

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/7562c2148b3f455c.js:1:8
|
1 | ;/**/-->
| ^
error: Unexpected token None
--> $DIR/tests/test262-parser/fail/7562c2148b3f455c.js:1:8
|
1 | ;/**/-->
| ^
error: TS1109
--> $DIR/tests/test262-parser/fail/7562c2148b3f455c.js:1:8
|
1 | ;/**/-->
| ^

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/85ee036d67974729.js:1:7
|
1 | ({get +:3})
| ^
error: Unexpected token Some(Colon)
--> $DIR/tests/test262-parser/fail/85ee036d67974729.js:1:8
|
1 | ({get +:3})
| ^
error: TS1109
--> $DIR/tests/test262-parser/fail/85ee036d67974729.js:1:7
|
1 | ({get +:3})
| ^

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/983987033f0e1170.js:1:1
|
1 | **
| ^^
error: Unexpected token None
--> $DIR/tests/test262-parser/fail/983987033f0e1170.js:1:1
|
1 | **
| ^^
error: TS1109
--> $DIR/tests/test262-parser/fail/983987033f0e1170.js:1:1
|
1 | **
| ^^

View File

@ -1,9 +1,3 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/a7be138dfd29025e.js:2:7
|
2 | { *[yield iter]() {} }
| ^
error: Expected ';', '}' or <eof>
--> $DIR/tests/test262-parser/fail/a7be138dfd29025e.js:2:23
|
@ -16,3 +10,9 @@ note: This is the expression part of an expression statement
2 | { *[yield iter]() {} }
| ^^^^^^^^^^^^^^^
error: TS1109
--> $DIR/tests/test262-parser/fail/a7be138dfd29025e.js:2:7
|
2 | { *[yield iter]() {} }
| ^

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/c45a4b00a2a24c2b.js:1:10
|
1 | p = { "q"/ }
| ^
error: Unexpected token Some(RBrace)
--> $DIR/tests/test262-parser/fail/c45a4b00a2a24c2b.js:1:12
|
1 | p = { "q"/ }
| ^
error: TS1109
--> $DIR/tests/test262-parser/fail/c45a4b00a2a24c2b.js:1:10
|
1 | p = { "q"/ }
| ^

View File

@ -4,9 +4,3 @@ error: TS1005
1 | var a.b;
| ^
error: Unexpected token Some(Dot)
--> $DIR/tests/test262-parser/fail/c91a41d48af2ef00.js:1:6
|
1 | var a.b;
| ^

View File

@ -1,12 +1,12 @@
error: Unexpected token Some(Arrow)
--> $DIR/tests/test262-parser/fail/ca3dd7ea0b4626dd.js:1:23
|
1 | console.log(typeof () => {});
| ^^
error: Expected Comma, got Some(Arrow)
--> $DIR/tests/test262-parser/fail/ca3dd7ea0b4626dd.js:1:23
|
1 | console.log(typeof () => {});
| ^^
error: Unexpected token Some(Arrow)
--> $DIR/tests/test262-parser/fail/ca3dd7ea0b4626dd.js:1:23
|
1 | console.log(typeof () => {});
| ^^

View File

@ -1,12 +1,12 @@
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
--> $DIR/tests/test262-parser/fail/d17d3aebb6a3cf43.js:1:32
|
1 | "use strict"; for (let {a: b = let};;) {}
| ^^^
error: Destructuring bindings require initializers
--> $DIR/tests/test262-parser/fail/d17d3aebb6a3cf43.js:1:24
|
1 | "use strict"; for (let {a: b = let};;) {}
| ^^^^^^^^^^^^
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
--> $DIR/tests/test262-parser/fail/d17d3aebb6a3cf43.js:1:32
|
1 | "use strict"; for (let {a: b = let};;) {}
| ^^^

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/e3a388ec11e4d061.js:1:18
|
1 | var obj = { *test** }
| ^^
error: Unexpected token Some(RBrace)
--> $DIR/tests/test262-parser/fail/e3a388ec11e4d061.js:1:21
|
1 | var obj = { *test** }
| ^
error: TS1109
--> $DIR/tests/test262-parser/fail/e3a388ec11e4d061.js:1:18
|
1 | var obj = { *test** }
| ^^

View File

@ -1,12 +1,12 @@
error: Not a pattern
--> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:3
|
1 | ([(a = b)] = []
| ^^^^^^^
error: Expected RParen, got None
--> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:15
|
1 | ([(a = b)] = []
| ^
error: Not a pattern
--> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:3
|
1 | ([(a = b)] = []
| ^^^^^^^

View File

@ -1,12 +1,12 @@
error: TS1014
--> $DIR/tests/test262-parser/fail/f0ff9ff5dab859aa.js:1:15
|
1 | function f(a, ...b, c)
| ^^^
error: Expected LBrace, got None
--> $DIR/tests/test262-parser/fail/f0ff9ff5dab859aa.js:1:22
|
1 | function f(a, ...b, c)
| ^
error: TS1014
--> $DIR/tests/test262-parser/fail/f0ff9ff5dab859aa.js:1:15
|
1 | function f(a, ...b, c)
| ^^^

View File

@ -1,12 +1,12 @@
error: TS1109
--> $DIR/tests/test262-parser/fail/fe233d87f280ed04.js:1:8
|
1 | p = { q/ }
| ^
error: Unexpected token Some(RBrace)
--> $DIR/tests/test262-parser/fail/fe233d87f280ed04.js:1:10
|
1 | p = { q/ }
| ^
error: TS1109
--> $DIR/tests/test262-parser/fail/fe233d87f280ed04.js:1:8
|
1 | p = { q/ }
| ^

View File

@ -10,7 +10,7 @@ use std::{
path::Path,
};
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, PResult, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, PResult, Parser, SourceFileInput, Syntax};
use swc_ecma_visit::FoldWith;
use test::{
test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType,
@ -274,7 +274,10 @@ fn identity_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
err, json
)
})
.fold_with(&mut Normalizer);
.fold_with(&mut Normalizer {
drop_span: true,
is_test262: true,
});
assert_eq!(src, deser, "JSON:\n{}", json);
} else {
let p = |explicit| {
@ -305,22 +308,20 @@ fn parse_module<'a>(file_name: &Path) -> Result<Module, NormalizedOutput> {
fn with_parser<F, Ret>(file_name: &Path, f: F) -> Result<Ret, StdErr>
where
F: for<'a> FnOnce(&mut Parser<'a, Lexer<'a, SourceFileInput<'_>>>) -> PResult<'a, Ret>,
F: FnOnce(&mut Parser<Lexer<SourceFileInput<'_>>>) -> PResult<Ret>,
{
let output = ::testing::run_test(false, |cm, handler| {
let fm = cm
.load_file(file_name)
.unwrap_or_else(|e| panic!("failed to load {}: {}", file_name.display(), e));
let res = f(&mut Parser::new(
Session { handler: &handler },
Syntax::default(),
(&*fm).into(),
None,
))
.map_err(|mut e| {
e.emit();
});
let mut p = Parser::new(Syntax::default(), (&*fm).into(), None);
let res = f(&mut p).map_err(|e| e.into_diagnostic(handler).emit());
for e in p.take_errors() {
e.into_diagnostic(&handler).emit();
}
if handler.has_errors() {
return Err(());
@ -352,6 +353,9 @@ pub fn normalize<T>(t: T) -> T
where
T: FoldWith<Normalizer>,
{
let mut n = Normalizer;
let mut n = Normalizer {
drop_span: true,
is_test262: true,
};
t.fold_with(&mut n)
}

View File

@ -1,12 +1,12 @@
error: TS1005
--> $DIR/tests/typescript-errors/class/input.ts:2:5
|
2 | x?!: number;
| ^
error: Unexpected token Some(Bang)
--> $DIR/tests/typescript-errors/class/input.ts:2:5
|
2 | x?!: number;
| ^
error: TS1005
--> $DIR/tests/typescript-errors/class/input.ts:2:5
|
2 | x?!: number;
| ^

View File

@ -12,7 +12,7 @@ use std::{
};
use swc_ecma_ast::*;
use swc_ecma_parser::{
lexer::Lexer, JscTarget, PResult, Parser, Session, SourceFileInput, Syntax, TsConfig,
lexer::Lexer, JscTarget, PResult, Parser, SourceFileInput, Syntax, TsConfig,
};
use swc_ecma_visit::FoldWith;
use test::{
@ -158,7 +158,10 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
}
} else {
with_parser(is_backtrace_enabled(), &path, !errors, |p| {
let module = p.parse_typescript_module()?.fold_with(&mut Normalizer);
let module = p.parse_typescript_module()?.fold_with(&mut Normalizer {
drop_span: false,
is_test262: false,
});
let json = serde_json::to_string_pretty(&module)
.expect("failed to serialize module as json");
@ -170,8 +173,16 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
panic!()
}
let module = module.fold_with(&mut Normalizer {
drop_span: true,
is_test262: false,
});
let deser = match serde_json::from_str::<Module>(&json) {
Ok(v) => v.fold_with(&mut Normalizer),
Ok(v) => v.fold_with(&mut Normalizer {
drop_span: true,
is_test262: false,
}),
Err(err) => {
if err.to_string().contains("invalid type: null, expected f64") {
return Ok(());
@ -203,7 +214,7 @@ fn with_parser<F, Ret>(
f: F,
) -> Result<Ret, StdErr>
where
F: for<'a> FnOnce(&mut Parser<'a, Lexer<'a, SourceFileInput<'_>>>) -> PResult<'a, Ret>,
F: FnOnce(&mut Parser<Lexer<SourceFileInput<'_>>>) -> PResult<Ret>,
{
let fname = file_name.display().to_string();
let output = ::testing::run_test(treat_error_as_bug, |cm, handler| {
@ -212,7 +223,6 @@ where
.unwrap_or_else(|e| panic!("failed to load {}: {}", file_name.display(), e));
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::Typescript(TsConfig {
dts: fname.ends_with(".d.ts"),
tsx: fname.contains("tsx"),
@ -226,10 +236,13 @@ where
None,
);
let res =
f(&mut Parser::new_from(Session { handler: &handler }, lexer)).map_err(|mut e| {
e.emit();
});
let mut p = Parser::new_from(lexer);
let res = f(&mut p).map_err(|e| e.into_diagnostic(&handler).emit());
for err in p.take_errors() {
err.into_diagnostic(&handler).emit();
}
if handler.has_errors() {
return Err(());

View File

@ -2,7 +2,7 @@
extern crate test;
use swc_common::{FileName, Mark};
use swc_ecma_parser::{Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{Parser, SourceFileInput, Syntax};
use swc_ecma_preset_env::{preset_env, Config};
use swc_ecma_visit::FoldWith;
use test::Bencher;
@ -13,18 +13,16 @@ fn run(b: &mut Bencher, src: &str, config: Config) {
let _ = ::testing::run_test(false, |cm, handler| {
let fm = cm.new_source_file(FileName::Anon, src.into());
let mut parser = Parser::new(
Session { handler: &handler },
Syntax::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new(Syntax::default(), SourceFileInput::from(&*fm), None);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.map_err(|e| e.into_diagnostic(&handler).emit())
.unwrap();
for e in parser.take_errors() {
e.into_diagnostic(&handler).emit()
}
let mut folder = preset_env(Mark::fresh(Mark::root()), config);
b.iter(|| test::black_box(module.clone().fold_with(&mut folder)));

View File

@ -17,7 +17,7 @@ use std::{
use swc_common::{input::SourceFileInput, FromVariant, Mark};
use swc_ecma_ast::*;
use swc_ecma_codegen::Emitter;
use swc_ecma_parser::{EsConfig, Parser, Session, Syntax};
use swc_ecma_parser::{EsConfig, Parser, Syntax};
use swc_ecma_preset_env::{preset_env, Config, FeatureOrModule, Mode, Targets, Version};
use swc_ecma_visit::FoldWith;
use test::{test_main, ShouldPanic, TestDesc, TestDescAndFn, TestFn, TestName, TestType};
@ -225,7 +225,6 @@ fn exec(c: PresetConfig, dir: PathBuf) -> Result<(), Error> {
.load_file(&dir.join("input.mjs"))
.expect("failed to load file");
let mut p = Parser::new(
Session { handler: &handler },
Syntax::Es(EsConfig {
dynamic_import: true,
..Default::default()
@ -234,7 +233,14 @@ fn exec(c: PresetConfig, dir: PathBuf) -> Result<(), Error> {
None,
);
let module = p.parse_module().map_err(|mut e| e.emit())?;
let module = p
.parse_module()
.map_err(|e| e.into_diagnostic(&handler).emit())?;
for e in p.take_errors() {
e.into_diagnostic(&handler).emit()
}
let actual = module.fold_with(&mut pass);
// debug mode?
@ -256,7 +262,6 @@ fn exec(c: PresetConfig, dir: PathBuf) -> Result<(), Error> {
.expect("failed to load output file");
let mut p = Parser::new(
Session { handler: &handler },
Syntax::Es(EsConfig {
dynamic_import: true,
..Default::default()
@ -265,7 +270,13 @@ fn exec(c: PresetConfig, dir: PathBuf) -> Result<(), Error> {
None,
);
let mut m = p.parse_module().map_err(|mut e| e.emit())?;
let mut m = p
.parse_module()
.map_err(|e| e.into_diagnostic(&handler).emit())?;
for e in p.take_errors() {
e.into_diagnostic(&handler).emit()
}
m.body.sort_by(|a, b| match *a {
ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {

View File

@ -1,6 +1,6 @@
[package]
name = "swc_ecma_transforms"
version = "0.13.0"
version = "0.14.0"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git"
@ -12,8 +12,8 @@ edition = "2018"
swc_atoms = { version = "0.2.0", path ="../../atoms" }
swc_common = { version = "0.7.0", path ="../../common" }
swc_ecma_ast = { version = "0.24.0", path ="../ast" }
swc_ecma_utils = { version = "0.12.0", path ="../utils" }
swc_ecma_parser = { version = "0.28.0", path ="../parser" }
swc_ecma_utils = { version = "0.13.0", path ="../utils" }
swc_ecma_parser = { version = "0.29.0", path ="../parser" }
swc_ecma_visit = { version = "0.9.0", path ="../visit" }
dashmap = "=3.5.1"
either = "1.5"
@ -34,7 +34,7 @@ log = "0.4.8"
[dev-dependencies]
testing = { version = "0.7", path ="../../testing" }
swc_ecma_codegen = { version = "0.26.0", path ="../codegen" }
swc_ecma_codegen = { version = "0.27.0", path ="../codegen" }
tempfile = "3"
pretty_assertions = "0.6"
sourcemap = "6"

View File

@ -3,7 +3,7 @@ extern crate test;
use swc_common::{FileName, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput, Syntax};
use swc_ecma_transforms::{pass::noop, util::ExprFactory};
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
use test::Bencher;
@ -86,20 +86,23 @@ fn module_clone(b: &mut Bencher) {
let _ = ::testing::run_test(false, |cm, handler| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let mut parser = Parser::new_from(lexer);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(handler).emit();
})
.unwrap();
for e in parser.take_errors() {
e.into_diagnostic(handler).emit();
}
b.iter(|| test::black_box(module.clone()));
Ok(())
});
@ -112,19 +115,23 @@ fn fold_empty(b: &mut Bencher) {
let _ = ::testing::run_test(false, |cm, handler| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let mut parser = Parser::new_from(lexer);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(&handler).emit();
})
.unwrap();
for e in parser.take_errors() {
e.into_diagnostic(&handler).emit();
}
let mut folder = noop();
b.iter(|| test::black_box(module.clone().fold_with(&mut folder)));
@ -141,19 +148,21 @@ fn fold_noop_impl_all(b: &mut Bencher) {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let mut parser = Parser::new_from(lexer);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.map_err(|e| e.into_diagnostic(&handler).emit())
.unwrap();
for e in parser.take_errors() {
e.into_diagnostic(&handler).emit();
}
let mut folder = noop();
b.iter(|| test::black_box(module.clone().fold_with(&mut folder)));
@ -169,19 +178,23 @@ fn fold_noop_impl_vec(b: &mut Bencher) {
let _ = ::testing::run_test(false, |cm, handler| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let mut parser = Parser::new_from(lexer);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(&handler).emit();
})
.unwrap();
for e in parser.take_errors() {
e.into_diagnostic(&handler).emit();
}
let mut folder = noop();
b.iter(|| test::black_box(module.clone().fold_with(&mut folder)));
@ -244,22 +257,16 @@ fn boxing_unboxed(b: &mut Bencher) {
fn visit_empty(b: &mut Bencher) {
b.bytes = SOURCE.len() as _;
let _ = ::testing::run_test(false, |cm, handler| {
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let _module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.unwrap();
let mut parser = Parser::new_from(lexer);
let _module = parser.parse_module().map_err(|_| ()).unwrap();
b.iter(|| test::black_box(()));
Ok(())
@ -292,22 +299,16 @@ fn visit_contains_this(b: &mut Bencher) {
b.bytes = SOURCE.len() as _;
let _ = ::testing::run_test(false, |cm, handler| {
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
})
.unwrap();
let mut parser = Parser::new_from(lexer);
let module = parser.parse_module().map_err(|_| ()).unwrap();
b.iter(|| test::black_box(contains_this_expr(&module)));
Ok(())

View File

@ -7,7 +7,7 @@ static GLOBAL: System = System;
use std::alloc::System;
use swc_common::{chain, FileName};
use swc_ecma_parser::{Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{Parser, SourceFileInput, Syntax};
use swc_ecma_transforms::helpers;
use swc_ecma_visit::FoldWith;
use test::Bencher;
@ -19,22 +19,11 @@ macro_rules! tr {
($b:expr, $tr:expr) => {
$b.bytes = SOURCE.len() as _;
let _ = ::testing::run_test(false, |cm, handler| {
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let mut parser = Parser::new(
Session { handler: &handler },
Syntax::default(),
SourceFileInput::from(&*fm),
None,
);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
()
})
.unwrap();
let mut parser = Parser::new(Syntax::default(), SourceFileInput::from(&*fm), None);
let module = parser.parse_module().map_err(|_| ()).unwrap();
helpers::HELPERS.set(&Default::default(), || {
let mut tr = $tr();

View File

@ -7,7 +7,7 @@ static GLOBAL: System = System;
use std::alloc::System;
use swc_common::{chain, FileName, Mark};
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput};
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput};
use swc_ecma_transforms::{compat, helpers};
use test::Bencher;
@ -88,23 +88,16 @@ macro_rules! tr {
use swc_ecma_visit::FoldWith;
$b.bytes = SOURCE.len() as _;
let _ = ::testing::run_test(false, |cm, handler| {
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon, SOURCE.into());
let lexer = Lexer::new(
Session { handler: &handler },
Default::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(Session { handler: &handler }, lexer);
let module = parser
.parse_module()
.map_err(|mut e| {
e.emit();
()
})
.unwrap();
let mut parser = Parser::new_from(lexer);
let module = parser.parse_module().map_err(|_| ()).unwrap();
helpers::HELPERS.set(&Default::default(), || {
let mut tr = $tr();

View File

@ -1,7 +1,4 @@
use crate::util::{
drop_span,
options::{CM, SESSION},
};
use crate::util::{drop_span, options::CM};
use dashmap::DashMap;
use once_cell::sync::Lazy;
use std::{collections::HashMap, sync::Arc};
@ -9,6 +6,7 @@ use swc_atoms::JsWord;
use swc_common::{util::move_map::MoveMap, FileName};
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput};
use swc_ecma_utils::HANDLER;
use swc_ecma_visit::{Fold, FoldWith};
pub fn const_modules(globals: HashMap<JsWord, HashMap<JsWord, String>>) -> impl Fold {
@ -41,16 +39,17 @@ fn parse_option(name: &str, src: String) -> Arc<Expr> {
}
let lexer = Lexer::new(
*SESSION,
Default::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let expr = Parser::new_from(*SESSION, lexer)
let expr = Parser::new_from(lexer)
.parse_expr()
.map_err(|mut e| {
e.emit();
.map_err(|e| {
if HANDLER.is_set() {
HANDLER.with(|h| e.into_diagnostic(h).emit())
}
})
.map(drop_span)
.unwrap_or_else(|()| {

View File

@ -4,10 +4,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use swc_common::{FileName, Mark, Span, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput};
use swc_ecma_utils::{
options::{CM, SESSION},
prepend_stmts, quote_ident, quote_str, DropSpan,
};
use swc_ecma_utils::{options::CM, prepend_stmts, quote_ident, quote_str, DropSpan};
use swc_ecma_visit::{Fold, FoldWith};
#[macro_export]
@ -26,22 +23,20 @@ macro_rules! add_to {
let code = include_str!(concat!("helpers/_", stringify!($name), ".js"));
let fm = CM.new_source_file(FileName::Custom(stringify!($name).into()), code.into());
let lexer = Lexer::new(
*SESSION,
Default::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let stmts = Parser::new_from(*SESSION, lexer)
let stmts = Parser::new_from(lexer)
.parse_script()
.map(|script| {
script.body.fold_with(&mut DropSpan {
preserve_ctxt: false,
})
})
.map_err(|mut e| {
e.emit();
()
.map_err(|e| {
unreachable!("Error occurred while parsing error: {:?}", e);
})
.unwrap();
stmts

View File

@ -86,9 +86,7 @@ where
let expected = {
let expected = tester.with_parser("expected.js", Syntax::default(), expected, |p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
p.parse_module()
})?;
tester.print(&expected)
};

View File

@ -3,12 +3,10 @@ use inflector::Inflector;
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, sync::Arc};
use swc_atoms::JsWord;
use swc_common::{
errors::{ColorConfig, Handler},
FileName, SourceMap,
};
use swc_common::{FileName, SourceMap};
use swc_ecma_ast::Expr;
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput, Syntax};
use swc_ecma_utils::HANDLER;
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
@ -22,10 +20,6 @@ pub struct Config {
impl Config {
pub(super) fn build(self, cm: Arc<SourceMap>) -> BuiltConfig {
let handler = Handler::with_tty_emitter(ColorConfig::Always, false, true, Some(cm.clone()));
let session = Session { handler: &handler };
BuiltConfig {
config: self.config,
globals: self
@ -37,16 +31,17 @@ impl Config {
.new_source_file(FileName::Custom(format!("<umd-config-{}.js>", s)), s);
let lexer = Lexer::new(
session,
Syntax::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
Parser::new_from(session, lexer)
Parser::new_from(lexer)
.parse_expr()
.map_err(|mut e| {
e.emit();
.map_err(|e| {
if HANDLER.is_set() {
HANDLER.with(|h| e.into_diagnostic(h).emit())
}
})
.unwrap()
};

View File

@ -1,8 +1,4 @@
use crate::util::{
drop_span,
options::{CM, SESSION},
ExprFactory, HANDLER,
};
use crate::util::{drop_span, options::CM, ExprFactory, HANDLER};
use dashmap::DashMap;
use once_cell::sync::Lazy;
use regex::Regex;
@ -67,23 +63,20 @@ fn parse_option(name: &str, src: String) -> Box<Expr> {
return expr.clone();
}
let expr = Parser::new(
*SESSION,
Syntax::default(),
SourceFileInput::from(&*fm),
None,
)
.parse_expr()
.map_err(|mut e| {
e.emit();
})
.map(drop_span)
.unwrap_or_else(|()| {
panic!(
"faield to parse jsx option {}: '{}' is not an expression",
name, fm.src,
)
});
let expr = Parser::new(Syntax::default(), SourceFileInput::from(&*fm), None)
.parse_expr()
.map_err(|e| {
if HANDLER.is_set() {
HANDLER.with(|h| e.into_diagnostic(h).emit())
}
})
.map(drop_span)
.unwrap_or_else(|()| {
panic!(
"faield to parse jsx option {}: '{}' is not an expression",
name, fm.src,
)
});
CACHE.insert(fm.src.clone(), expr.clone());

View File

@ -10,7 +10,7 @@ use std::{
use swc_common::{comments::Comments, errors::Handler, FileName, SourceMap};
use swc_ecma_ast::{Pat, *};
use swc_ecma_codegen::Emitter;
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{error::Error, lexer::Lexer, Parser, SourceFileInput, Syntax};
use swc_ecma_utils::{DropSpan, COMMENTS};
use swc_ecma_visit::{Fold, FoldWith};
use tempfile::tempdir_in;
@ -58,40 +58,29 @@ impl<'a> Tester<'a> {
op: F,
) -> Result<T, ()>
where
F: FnOnce(&mut Parser<'_, Lexer<'_, SourceFileInput<'_>>>) -> Result<T, ()>,
F: FnOnce(&mut Parser<Lexer<SourceFileInput>>) -> Result<T, Error>,
{
let fm = self
.cm
.new_source_file(FileName::Real(file_name.into()), src.into());
let sess = Session {
handler: &self.handler,
};
let mut p = Parser::new(syntax, SourceFileInput::from(&*fm), Some(&self.comments));
let res = op(&mut p).map_err(|e| e.into_diagnostic(&self.handler).emit());
let mut p = Parser::new(
sess,
syntax,
SourceFileInput::from(&*fm),
Some(&self.comments),
);
op(&mut p)
for e in p.take_errors() {
e.into_diagnostic(&self.handler).emit()
}
res
}
pub fn parse_module(&mut self, file_name: &str, src: &str) -> Result<Module, ()> {
self.with_parser(file_name, Syntax::default(), src, |p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
})
self.with_parser(file_name, Syntax::default(), src, |p| p.parse_module())
}
pub fn parse_stmts(&mut self, file_name: &str, src: &str) -> Result<Vec<Stmt>, ()> {
let stmts = self.with_parser(file_name, Syntax::default(), src, |p| {
p.parse_script()
.map_err(|mut e| {
e.emit();
})
.map(|script| script.body)
p.parse_script().map(|script| script.body)
})?;
Ok(stmts)
@ -116,14 +105,16 @@ impl<'a> Tester<'a> {
.new_source_file(FileName::Real(name.into()), src.into());
let module = {
let sess = Session {
handler: &self.handler,
};
let mut p = Parser::new(syntax, SourceFileInput::from(&*fm), None);
let res = p
.parse_module()
.map_err(|e| e.into_diagnostic(&self.handler).emit());
let mut p = Parser::new(sess, syntax, SourceFileInput::from(&*fm), None);
p.parse_module().map_err(|mut e| {
e.emit();
})?
for e in p.take_errors() {
e.into_diagnostic(&self.handler).emit()
}
res?
};
let module = validate!(module)

View File

@ -12,7 +12,7 @@ use std::{
use swc_common::{chain, comments::Comments, errors::Handler, FileName, SourceMap};
use swc_ecma_ast::*;
use swc_ecma_codegen::Emitter;
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{error::Error, lexer::Lexer, Parser, SourceFileInput, Syntax};
use swc_ecma_transforms::helpers::{InjectHelpers, HELPERS};
use swc_ecma_utils::{DropSpan, COMMENTS};
use swc_ecma_visit::{Fold, FoldWith};
@ -85,31 +85,24 @@ impl<'a> Tester<'a> {
op: F,
) -> Result<T, ()>
where
F: FnOnce(&mut Parser<'_, Lexer<'_, SourceFileInput<'_>>>) -> Result<T, ()>,
F: FnOnce(&mut Parser<Lexer<SourceFileInput>>) -> Result<T, Error>,
{
let fm = self
.cm
.new_source_file(FileName::Real(file_name.into()), src.into());
let sess = Session {
handler: &self.handler,
};
let mut p = Parser::new(syntax, SourceFileInput::from(&*fm), Some(&self.comments));
let res = op(&mut p);
let mut p = Parser::new(
sess,
syntax,
SourceFileInput::from(&*fm),
Some(&self.comments),
);
op(&mut p)
for e in p.take_errors() {
e.into_diagnostic(&self.handler).emit();
}
res.map_err(|e| e.into_diagnostic(&self.handler).emit())
}
pub fn parse_module(&mut self, file_name: &str, src: &str) -> Result<Module, ()> {
self.with_parser(file_name, Syntax::default(), src, |p| {
p.parse_module().map_err(|mut e| {
e.emit();
})
})
self.with_parser(file_name, Syntax::default(), src, |p| p.parse_module())
}
pub fn apply_transform<T: Fold>(
@ -124,14 +117,16 @@ impl<'a> Tester<'a> {
.new_source_file(FileName::Real(name.into()), src.into());
let module = {
let sess = Session {
handler: &self.handler,
};
let mut p = Parser::new(syntax, SourceFileInput::from(&*fm), None);
let res = p.parse_module().map_err(|e| {
e.into_diagnostic(&self.handler).emit();
});
let mut p = Parser::new(sess, syntax, SourceFileInput::from(&*fm), None);
p.parse_module().map_err(|mut e| {
e.emit();
})?
for e in p.take_errors() {
e.into_diagnostic(&self.handler).emit();
}
res?
};
let module = COMMENTS.set(&Comments::default(), || {

View File

@ -11,7 +11,7 @@ use std::{
use swc_common::comments::Comments;
use swc_ecma_ast::*;
use swc_ecma_codegen::{self, Emitter};
use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput, Syntax};
use swc_ecma_transforms::fixer;
use swc_ecma_utils::{DropSpan, COMMENTS};
use swc_ecma_visit::{Fold, FoldWith};
@ -170,12 +170,8 @@ fn identity_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
let handlers = Box::new(MyHandlers);
let handlers2 = Box::new(MyHandlers);
let mut parser: Parser<'_, Lexer<'_, SourceFileInput<'_>>> = Parser::new(
Session { handler: &handler },
Syntax::default(),
(&*src).into(),
None,
);
let mut parser: Parser<Lexer<SourceFileInput>> =
Parser::new(Syntax::default(), (&*src).into(), None);
{
let mut emitter = Emitter {
@ -202,27 +198,22 @@ fn identity_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
// Parse source
let mut e_parser: Parser<'_, Lexer<'_, SourceFileInput<'_>>> =
Parser::new(
Session { handler: &handler },
Syntax::default(),
(&*expected).into(),
None,
);
let mut e_parser: Parser<Lexer<SourceFileInput>> =
Parser::new(Syntax::default(), (&*expected).into(), None);
if module {
let module = parser
.parse_module()
.map(normalize)
.map(|p| p.fold_with(&mut fixer()))
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(handler).emit();
})?;
let module2 = e_parser
.parse_module()
.map(normalize)
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(handler).emit();
})
.expect("failed to parse reference file");
if module == module2 {
@ -235,15 +226,15 @@ fn identity_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
.parse_script()
.map(normalize)
.map(|p| p.fold_with(&mut fixer()))
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(&handler).emit();
})?;
let script2 = e_parser
.parse_script()
.map(normalize)
.map(|p| p.fold_with(&mut fixer()))
.map_err(|mut e| {
e.emit();
.map_err(|e| {
e.into_diagnostic(&handler).emit();
})?;
if script == script2 {
@ -253,6 +244,11 @@ fn identity_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
expected_emitter.emit_script(&script2).unwrap();
}
}
for e in parser.take_errors() {
e.into_diagnostic(handler).emit();
}
let output = String::from_utf8_lossy(&*wr.0.read().unwrap()).to_string();
let expected = String::from_utf8_lossy(&*wr2.0.read().unwrap()).to_string();
if output == expected {

View File

@ -1,6 +1,6 @@
[package]
name = "swc_ecma_utils"
version = "0.12.0"
version = "0.13.0"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git"
@ -14,7 +14,7 @@ edition = "2018"
swc_ecma_ast = { version = "0.24.0", path ="../ast" }
swc_atoms = { version = "0.2.0", path ="../../atoms" }
swc_common = { version = "0.7.0", path ="../../common" }
swc_ecma_parser = { version = "0.28", path ="../parser" }
swc_ecma_parser = { version = "0.29", path ="../parser" }
swc_ecma_visit = { version = "0.9", path ="../visit" }
anyhow = "1.0.26"
once_cell = "1"

View File

@ -1622,6 +1622,7 @@ impl<'a> UsageFinder<'a> {
}
}
// Used for error reporting in transform.
scoped_thread_local!(pub static HANDLER: Handler);
scoped_thread_local!(pub static COMMENTS: Comments);

View File

@ -1,15 +1,7 @@
use once_cell::sync::Lazy;
use std::sync::Arc;
use swc_common::{
errors::{ColorConfig, Handler},
FilePathMapping, SourceMap,
};
use swc_ecma_parser::Session;
use swc_common::{FilePathMapping, SourceMap};
/// SourceMap used by transforms.
pub static CM: Lazy<Arc<SourceMap>> =
Lazy::new(|| Arc::new(SourceMap::new(FilePathMapping::empty())));
pub static HANDLER: Lazy<Handler> =
Lazy::new(|| Handler::with_tty_emitter(ColorConfig::Always, false, true, Some(CM.clone())));
pub static SESSION: Lazy<Session> = Lazy::new(|| Session { handler: &*HANDLER });

View File

@ -3,10 +3,7 @@ use std::sync::atomic::{AtomicBool, Ordering::SeqCst};
use swc_common::FileName;
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, Parser, SourceFileInput};
use swc_ecma_utils::{
options::{CM, SESSION},
prepend_stmts, DropSpan,
};
use swc_ecma_utils::{options::CM, prepend_stmts, DropSpan};
use swc_ecma_visit::FoldWith;
#[derive(Debug, Default)]
@ -30,19 +27,17 @@ macro_rules! define {
let fm =
CM.new_source_file(FileName::Custom(stringify!($name).into()), code.into());
let lexer = Lexer::new(
*SESSION,
Default::default(),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let stmts = Parser::new_from(*SESSION, lexer)
let stmts = Parser::new_from(lexer)
.parse_module()
.map(|script| script.body.fold_with(&mut DropSpan {
preserve_ctxt:false,
}))
.map_err(|mut e| {
e.emit();
.map_err(|_| {
()
})
.unwrap();

View File

@ -17,7 +17,7 @@ use swc_common::{errors::Handler, FileName, Mark, SourceMap};
pub use swc_ecmascript::parser::JscTarget;
use swc_ecmascript::{
ast::{Expr, ExprStmt, ModuleItem, Stmt},
parser::{lexer::Lexer, Parser, Session as ParseSess, SourceFileInput, Syntax, TsConfig},
parser::{lexer::Lexer, Parser, SourceFileInput, Syntax, TsConfig},
preset_env,
transforms::{
const_modules, modules,
@ -637,20 +637,22 @@ impl GlobalPassOption {
};
let v_str = v.clone();
let fm = cm.new_source_file(FileName::Custom(format!("GLOBAL.{}", k)), v);
let session = ParseSess { handler };
let lexer = Lexer::new(
session,
Syntax::Es(Default::default()),
Default::default(),
SourceFileInput::from(&*fm),
None,
);
let mut module = Parser::new_from(session, lexer)
.parse_module()
.map_err(|mut e| {
e.emit();
})
let mut p = Parser::new_from(lexer);
let module = p.parse_module();
for e in p.take_errors() {
e.into_diagnostic(handler).emit()
}
let mut module = module
.map_err(|e| e.into_diagnostic(handler).emit())
.unwrap_or_else(|()| {
panic!(
"failed to parse global variable {}=`{}` as module",

View File

@ -18,7 +18,7 @@ pub use ecmascript::parser::SourceFileInput;
use ecmascript::{
ast::Program,
codegen::{self, Emitter, Node},
parser::{lexer::Lexer, Parser, Session as ParseSess, Syntax},
parser::{lexer::Lexer, Parser, Syntax},
transforms::{
helpers::{self, Helpers},
util,
@ -148,11 +148,7 @@ impl Compiler {
parse_comments: bool,
) -> Result<Program, Error> {
self.run(|| {
let session = ParseSess {
handler: &self.handler,
};
let lexer = Lexer::new(
session,
syntax,
target,
SourceFileInput::from(&*fm),
@ -162,23 +158,31 @@ impl Compiler {
None
},
);
let mut parser = Parser::new_from(session, lexer);
let mut parser = Parser::new_from(lexer);
let program = if is_module {
parser
.parse_module()
.map_err(|mut e| {
e.emit();
Error::msg("failed to parse module")
})
.map(Program::Module)?
let m = parser.parse_module();
for e in parser.take_errors() {
e.into_diagnostic(&self.handler).emit();
}
m.map_err(|e| {
e.into_diagnostic(&self.handler).emit();
Error::msg("failed to parse module")
})
.map(Program::Module)?
} else {
parser
.parse_script()
.map_err(|mut e| {
e.emit();
Error::msg("failed to parse module")
})
.map(Program::Script)?
let s = parser.parse_script();
for e in parser.take_errors() {
e.into_diagnostic(&self.handler).emit();
}
s.map_err(|e| {
e.into_diagnostic(&self.handler).emit();
Error::msg("failed to parse module")
})
.map(Program::Script)?
};
Ok(program)

View File

@ -2,7 +2,7 @@ use std::path::Path;
use swc::{
ecmascript::{
ast::Module,
parser::{lexer::Lexer, PResult, Parser, Session, Syntax},
parser::{lexer::Lexer, PResult, Parser, Syntax},
},
SourceFileInput,
};
@ -10,7 +10,7 @@ use testing::NormalizedOutput;
fn with_parser<F, Ret>(file_name: &str, f: F) -> Result<Ret, NormalizedOutput>
where
F: for<'a> FnOnce(&mut Parser<'a, Lexer<'a, SourceFileInput<'_>>>) -> PResult<'a, Ret>,
F: FnOnce(&mut Parser<Lexer<SourceFileInput>>) -> PResult<Ret>,
{
let output = ::testing::run_test(false, |cm, handler| {
let fm = cm
@ -18,7 +18,6 @@ where
.unwrap_or_else(|e| panic!("failed to load {}: {}", file_name, e));
let lexer = Lexer::new(
Session { handler: &handler },
if file_name.ends_with(".ts") {
Syntax::Typescript(Default::default())
} else {
@ -28,10 +27,12 @@ where
(&*fm).into(),
None,
);
let res =
f(&mut Parser::new_from(Session { handler: &handler }, lexer)).map_err(|mut e| {
e.emit();
});
let mut p = Parser::new_from(lexer);
let res = f(&mut p).map_err(|e| e.into_diagnostic(&handler).emit());
for e in p.take_errors() {
e.into_diagnostic(&handler).emit()
}
if handler.has_errors() {
return Err(());