mirror of
https://github.com/swc-project/swc.git
synced 2024-12-25 14:43:33 +03:00
Fix issues (#1189)
swc_bundler: - Support emitting iife. (Closes #1184) swc_ecma_parser: - Auto-detect script / module (Closes #1164) - lexer: Error recovery for strict mode and module mode. - More error recovery logic for strict mode violation. - Fix panic on invalid input. (Closes #1170)
This commit is contained in:
parent
667a8c72c0
commit
b2aec35eb6
@ -6,7 +6,7 @@ edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_bundler"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.12.0"
|
||||
version = "0.13.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[features]
|
||||
@ -29,9 +29,9 @@ retain_mut = "=0.1.1"
|
||||
swc_atoms = {version = "0.2.4", path = "../atoms"}
|
||||
swc_common = {version = "0.10.0", path = "../common"}
|
||||
swc_ecma_ast = {version = "0.33.0", path = "../ecmascript/ast"}
|
||||
swc_ecma_codegen = {version = "0.38.0", path = "../ecmascript/codegen"}
|
||||
swc_ecma_parser = {version = "0.40.0", path = "../ecmascript/parser"}
|
||||
swc_ecma_transforms = {version = "0.27.0", path = "../ecmascript/transforms"}
|
||||
swc_ecma_codegen = {version = "0.39.0", path = "../ecmascript/codegen"}
|
||||
swc_ecma_parser = {version = "0.41.0", path = "../ecmascript/parser"}
|
||||
swc_ecma_transforms = {version = "0.28.0", path = "../ecmascript/transforms"}
|
||||
swc_ecma_utils = {version = "0.23.0", path = "../ecmascript/utils"}
|
||||
swc_ecma_visit = {version = "0.19.0", path = "../ecmascript/visit"}
|
||||
|
||||
|
@ -1,14 +1,16 @@
|
||||
use crate::{hash::calc_hash, Bundle, BundleKind, Bundler, Load, Resolve};
|
||||
use crate::{hash::calc_hash, Bundle, BundleKind, Bundler, Load, ModuleType, Resolve};
|
||||
use anyhow::Error;
|
||||
use relative_path::RelativePath;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use swc_common::{util::move_map::MoveMap, FileName};
|
||||
use swc_ecma_ast::{ImportDecl, Str};
|
||||
use swc_atoms::js_word;
|
||||
use swc_common::{util::move_map::MoveMap, FileName, DUMMY_SP};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_transforms::{fixer, hygiene};
|
||||
use swc_ecma_visit::{noop_fold_type, Fold, FoldWith};
|
||||
use swc_ecma_utils::{find_ids, private_ident, ExprFactory};
|
||||
use swc_ecma_visit::{noop_fold_type, noop_visit_type, Fold, FoldWith, Node, Visit, VisitWith};
|
||||
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
@ -27,9 +29,12 @@ where
|
||||
|
||||
for mut bundle in bundles {
|
||||
bundle.module = self.optimize(bundle.module);
|
||||
bundle.module = self.may_wrap_with_iife(bundle.module);
|
||||
|
||||
bundle.module = bundle.module.fold_with(&mut hygiene());
|
||||
|
||||
bundle.module = bundle.module.fold_with(&mut fixer(None));
|
||||
|
||||
match bundle.kind {
|
||||
BundleKind::Named { .. } => {
|
||||
// Inject helpers
|
||||
@ -114,6 +119,227 @@ where
|
||||
Ok(new)
|
||||
})
|
||||
}
|
||||
|
||||
fn may_wrap_with_iife(&self, module: Module) -> Module {
|
||||
if self.config.module != ModuleType::Iife {
|
||||
return module;
|
||||
}
|
||||
|
||||
let mut top_level_await_finder = TopLevelAwaitFinder::default();
|
||||
module.visit_with(&Invalid { span: DUMMY_SP }, &mut top_level_await_finder);
|
||||
|
||||
let is_async = top_level_await_finder.found;
|
||||
|
||||
// Properties of returned object
|
||||
let mut props = vec![];
|
||||
|
||||
let mut body = BlockStmt {
|
||||
span: module.span,
|
||||
stmts: module
|
||||
.body
|
||||
.into_iter()
|
||||
.filter_map(|item| {
|
||||
let decl = match item {
|
||||
ModuleItem::ModuleDecl(v) => v,
|
||||
ModuleItem::Stmt(stmt) => return Some(stmt),
|
||||
};
|
||||
|
||||
match decl {
|
||||
ModuleDecl::ExportNamed(NamedExport { src: Some(..), .. })
|
||||
| ModuleDecl::TsImportEquals(_)
|
||||
| ModuleDecl::TsExportAssignment(_)
|
||||
| ModuleDecl::TsNamespaceExport(_)
|
||||
| ModuleDecl::Import(_) => None,
|
||||
|
||||
ModuleDecl::ExportDecl(export) => {
|
||||
match &export.decl {
|
||||
Decl::Class(ClassDecl { ident, .. })
|
||||
| Decl::Fn(FnDecl { ident, .. }) => {
|
||||
props.push(PropOrSpread::Prop(Box::new(Prop::Shorthand(
|
||||
ident.clone(),
|
||||
))));
|
||||
}
|
||||
Decl::Var(decl) => {
|
||||
let ids: Vec<Ident> = find_ids(decl);
|
||||
props.extend(
|
||||
ids.into_iter()
|
||||
.map(Prop::Shorthand)
|
||||
.map(Box::new)
|
||||
.map(PropOrSpread::Prop),
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
Some(Stmt::Decl(export.decl))
|
||||
}
|
||||
|
||||
ModuleDecl::ExportNamed(NamedExport {
|
||||
specifiers,
|
||||
src: None,
|
||||
..
|
||||
}) => {
|
||||
for s in specifiers {
|
||||
match s {
|
||||
ExportSpecifier::Namespace(..) => {
|
||||
// unreachable
|
||||
}
|
||||
ExportSpecifier::Default(s) => {
|
||||
props.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(
|
||||
KeyValueProp {
|
||||
key: PropName::Ident(Ident::new(
|
||||
js_word!("default"),
|
||||
DUMMY_SP,
|
||||
)),
|
||||
value: Box::new(Expr::Ident(s.exported)),
|
||||
},
|
||||
))));
|
||||
}
|
||||
ExportSpecifier::Named(s) => match s.exported {
|
||||
Some(exported) => {
|
||||
props.push(PropOrSpread::Prop(Box::new(
|
||||
Prop::KeyValue(KeyValueProp {
|
||||
key: PropName::Ident(exported),
|
||||
value: Box::new(Expr::Ident(s.orig)),
|
||||
}),
|
||||
)));
|
||||
}
|
||||
None => {
|
||||
props.push(PropOrSpread::Prop(Box::new(
|
||||
Prop::Shorthand(s.orig),
|
||||
)));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
ModuleDecl::ExportDefaultDecl(export) => match export.decl {
|
||||
DefaultDecl::Class(expr) => {
|
||||
let ident = expr.ident;
|
||||
let ident =
|
||||
ident.unwrap_or_else(|| private_ident!("_default_decl"));
|
||||
|
||||
props.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(
|
||||
KeyValueProp {
|
||||
key: PropName::Ident(Ident::new(
|
||||
js_word!("default"),
|
||||
export.span,
|
||||
)),
|
||||
value: Box::new(Expr::Ident(ident.clone())),
|
||||
},
|
||||
))));
|
||||
|
||||
Some(Stmt::Decl(Decl::Class(ClassDecl {
|
||||
ident,
|
||||
class: expr.class,
|
||||
declare: false,
|
||||
})))
|
||||
}
|
||||
DefaultDecl::Fn(expr) => {
|
||||
let ident = expr.ident;
|
||||
let ident =
|
||||
ident.unwrap_or_else(|| private_ident!("_default_decl"));
|
||||
|
||||
props.push(PropOrSpread::Prop(Box::new(Prop::KeyValue(
|
||||
KeyValueProp {
|
||||
key: PropName::Ident(Ident::new(
|
||||
js_word!("default"),
|
||||
export.span,
|
||||
)),
|
||||
value: Box::new(Expr::Ident(ident.clone())),
|
||||
},
|
||||
))));
|
||||
|
||||
Some(Stmt::Decl(Decl::Fn(FnDecl {
|
||||
ident,
|
||||
function: expr.function,
|
||||
declare: false,
|
||||
})))
|
||||
}
|
||||
DefaultDecl::TsInterfaceDecl(_) => None,
|
||||
},
|
||||
ModuleDecl::ExportDefaultExpr(export) => {
|
||||
let default_var = private_ident!("default");
|
||||
props.push(PropOrSpread::Prop(Box::new(Prop::Shorthand(
|
||||
default_var.clone(),
|
||||
))));
|
||||
let var = VarDeclarator {
|
||||
span: DUMMY_SP,
|
||||
name: Pat::Ident(default_var),
|
||||
init: Some(export.expr),
|
||||
definite: false,
|
||||
};
|
||||
Some(Stmt::Decl(Decl::Var(VarDecl {
|
||||
span: DUMMY_SP,
|
||||
kind: VarDeclKind::Const,
|
||||
declare: false,
|
||||
decls: vec![var],
|
||||
})))
|
||||
}
|
||||
|
||||
ModuleDecl::ExportAll(_) => None,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
body.stmts.push(Stmt::Return(ReturnStmt {
|
||||
span: DUMMY_SP,
|
||||
arg: Some(Box::new(Expr::Object(ObjectLit {
|
||||
span: DUMMY_SP,
|
||||
props,
|
||||
}))),
|
||||
}));
|
||||
|
||||
let f = Function {
|
||||
is_generator: false,
|
||||
is_async,
|
||||
params: Default::default(),
|
||||
decorators: Default::default(),
|
||||
span: DUMMY_SP,
|
||||
body: Some(body),
|
||||
type_params: Default::default(),
|
||||
return_type: Default::default(),
|
||||
};
|
||||
|
||||
let invoked_fn_expr = FnExpr {
|
||||
ident: None,
|
||||
function: f,
|
||||
};
|
||||
|
||||
let iife = Box::new(Expr::Call(CallExpr {
|
||||
span: DUMMY_SP,
|
||||
callee: invoked_fn_expr.as_callee(),
|
||||
args: Default::default(),
|
||||
type_args: Default::default(),
|
||||
}));
|
||||
|
||||
Module {
|
||||
span: DUMMY_SP,
|
||||
shebang: None,
|
||||
body: vec![ModuleItem::Stmt(Stmt::Expr(ExprStmt {
|
||||
span: DUMMY_SP,
|
||||
expr: iife,
|
||||
}))],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TopLevelAwaitFinder {
|
||||
found: bool,
|
||||
}
|
||||
|
||||
impl Visit for TopLevelAwaitFinder {
|
||||
noop_visit_type!();
|
||||
|
||||
fn visit_stmts(&mut self, _: &[Stmt], _: &dyn Node) {}
|
||||
|
||||
fn visit_await_expr(&mut self, _: &AwaitExpr, _: &dyn Node) {
|
||||
self.found = true;
|
||||
}
|
||||
}
|
||||
|
||||
/// Import renamer. This pass changes import path.
|
||||
|
@ -30,6 +30,21 @@ pub struct Config {
|
||||
|
||||
/// List of modules which should be preserved.
|
||||
pub external_modules: Vec<JsWord>,
|
||||
|
||||
/// Type of emiited module
|
||||
pub module: ModuleType,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum ModuleType {
|
||||
Es,
|
||||
Iife,
|
||||
}
|
||||
|
||||
impl Default for ModuleType {
|
||||
fn default() -> Self {
|
||||
ModuleType::Es
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
@ -132,6 +132,7 @@ impl TestBuilder {
|
||||
require: true,
|
||||
disable_inliner: true,
|
||||
external_modules: vec![],
|
||||
module: Default::default(),
|
||||
},
|
||||
Box::new(Hook),
|
||||
);
|
||||
|
@ -1,5 +1,5 @@
|
||||
pub use self::{
|
||||
bundler::{Bundle, BundleKind, Bundler, Config},
|
||||
bundler::{Bundle, BundleKind, Bundler, Config, ModuleType},
|
||||
hook::Hook,
|
||||
id::ModuleId,
|
||||
load::Load,
|
||||
|
@ -168,6 +168,7 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect(),
|
||||
module: Default::default(),
|
||||
},
|
||||
Box::new(Hook),
|
||||
);
|
||||
|
@ -6,7 +6,7 @@ edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_ecmascript"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.11.2"
|
||||
version = "0.12.0"
|
||||
|
||||
[features]
|
||||
codegen = ["swc_ecma_codegen"]
|
||||
@ -21,10 +21,10 @@ react = ["swc_ecma_transforms", "swc_ecma_transforms/react"]
|
||||
|
||||
[dependencies]
|
||||
swc_ecma_ast = {version = "0.33.0", path = "./ast"}
|
||||
swc_ecma_codegen = {version = "0.38.0", path = "./codegen", optional = true}
|
||||
swc_ecma_dep_graph = {version = "0.6.0", path = "./dep-graph", optional = true}
|
||||
swc_ecma_parser = {version = "0.40.0", path = "./parser", optional = true}
|
||||
swc_ecma_transforms = {version = "0.27.0", path = "./transforms", optional = true}
|
||||
swc_ecma_codegen = {version = "0.39.0", path = "./codegen", optional = true}
|
||||
swc_ecma_dep_graph = {version = "0.7.0", path = "./dep-graph", optional = true}
|
||||
swc_ecma_parser = {version = "0.41.0", path = "./parser", optional = true}
|
||||
swc_ecma_transforms = {version = "0.28.0", path = "./transforms", optional = true}
|
||||
swc_ecma_utils = {version = "0.23.0", path = "./utils", optional = true}
|
||||
swc_ecma_visit = {version = "0.19.1", path = "./visit", optional = true}
|
||||
|
||||
|
@ -7,7 +7,7 @@ include = ["Cargo.toml", "src/**/*.rs"]
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_ecma_codegen"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.38.1"
|
||||
version = "0.39.0"
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1"
|
||||
@ -20,5 +20,5 @@ swc_ecma_codegen_macros = {version = "0.5", path = "./macros"}
|
||||
|
||||
[dev-dependencies]
|
||||
swc_common = {version = "0.10.0", path = "../../common", features = ["sourcemap"]}
|
||||
swc_ecma_parser = {version = "0.40.0", path = "../parser"}
|
||||
swc_ecma_parser = {version = "0.41.0", path = "../parser"}
|
||||
testing = {version = "0.10.0", path = "../../testing"}
|
||||
|
@ -6,7 +6,7 @@ edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_ecma_dep_graph"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.6.0"
|
||||
version = "0.7.0"
|
||||
|
||||
[dependencies]
|
||||
swc_atoms = {version = "0.2", path = "../../atoms"}
|
||||
@ -15,5 +15,5 @@ swc_ecma_ast = {version = "0.33.0", path = "../ast"}
|
||||
swc_ecma_visit = {version = "0.19.0", path = "../visit"}
|
||||
|
||||
[dev-dependencies]
|
||||
swc_ecma_parser = {version = "0.40.0", path = "../parser"}
|
||||
swc_ecma_parser = {version = "0.41.0", path = "../parser"}
|
||||
testing = {version = "0.10.0", path = "../../testing"}
|
||||
|
@ -5,7 +5,7 @@ documentation = "https://swc.rs/rustdoc/jsdoc/"
|
||||
edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "jsdoc"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -19,6 +19,6 @@ swc_common = {version = "0.10.0", path = "../../common"}
|
||||
anyhow = "1"
|
||||
dashmap = "3"
|
||||
swc_ecma_ast = {version = "0.33.0", path = "../ast"}
|
||||
swc_ecma_parser = {version = "0.40.0", path = "../parser"}
|
||||
swc_ecma_parser = {version = "0.41.0", path = "../parser"}
|
||||
testing = {version = "0.10.0", path = "../../testing"}
|
||||
walkdir = "2"
|
||||
|
@ -7,7 +7,7 @@ include = ["Cargo.toml", "src/**/*.rs", "examples/**/*.rs"]
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_ecma_parser"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.40.0"
|
||||
version = "0.41.0"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
@ -108,6 +108,7 @@ pub struct Lexer<'a, I: Input> {
|
||||
pub(crate) target: JscTarget,
|
||||
|
||||
errors: Rc<RefCell<Vec<Error>>>,
|
||||
module_errors: Rc<RefCell<Vec<Error>>>,
|
||||
|
||||
buf: String,
|
||||
}
|
||||
@ -122,19 +123,20 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
comments: Option<&'a dyn Comments>,
|
||||
) -> Self {
|
||||
Lexer {
|
||||
comments,
|
||||
leading_comments_buffer: if comments.is_some() {
|
||||
Some(Default::default())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
comments,
|
||||
ctx: Default::default(),
|
||||
input,
|
||||
last_comment_pos: Rc::new(RefCell::new(BytePos(0))),
|
||||
state: State::new(syntax),
|
||||
ctx: Default::default(),
|
||||
syntax,
|
||||
target,
|
||||
errors: Default::default(),
|
||||
module_errors: Default::default(),
|
||||
buf: String::with_capacity(16),
|
||||
}
|
||||
}
|
||||
@ -372,9 +374,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
// Handle -->
|
||||
if self.state.had_line_break && c == '-' && self.eat(b'>') {
|
||||
if self.ctx.module {
|
||||
return self.error(start, SyntaxError::LegacyCommentInModule)?;
|
||||
}
|
||||
self.emit_module_mode_error(start, SyntaxError::LegacyCommentInModule);
|
||||
self.skip_line_comment(0);
|
||||
self.skip_space()?;
|
||||
return self.read_token();
|
||||
@ -558,9 +558,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
self.error(start, SyntaxError::LegacyOctal)?
|
||||
}
|
||||
|
||||
if self.ctx.strict {
|
||||
self.error(start, SyntaxError::LegacyOctal)?
|
||||
}
|
||||
self.emit_strict_mode_error(start, SyntaxError::LegacyOctal);
|
||||
|
||||
let mut value: u8 = first_c.to_digit(8).unwrap() as u8;
|
||||
macro_rules! one {
|
||||
@ -631,9 +629,8 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
if c == '<' && self.is(b'!') && self.peek() == Some('-') && self.peek_ahead() == Some('-') {
|
||||
self.skip_line_comment(3);
|
||||
self.skip_space()?;
|
||||
if self.ctx.module {
|
||||
self.error(start, SyntaxError::LegacyCommentInModule)?;
|
||||
}
|
||||
self.emit_module_mode_error(start, SyntaxError::LegacyCommentInModule);
|
||||
|
||||
return self.read_token();
|
||||
}
|
||||
|
||||
|
@ -62,9 +62,8 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
|
||||
// if it contains '8' or '9', it's decimal.
|
||||
if d.clone().any(|v| v == 8 || v == 9) {
|
||||
if self.ctx.strict {
|
||||
self.error(start, SyntaxError::LegacyDecimal)?
|
||||
}
|
||||
// Continue parsing
|
||||
self.emit_strict_mode_error(start, SyntaxError::LegacyDecimal);
|
||||
} else {
|
||||
// It's Legacy octal, and we should reinterpret value.
|
||||
let val = u64::from_str_radix(&val.to_string(), 8)
|
||||
@ -385,9 +384,7 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
if self.syntax.typescript() && self.target >= JscTarget::Es5 {
|
||||
self.emit_error(start, SyntaxError::TS1085);
|
||||
}
|
||||
if self.ctx.strict {
|
||||
self.emit_error(start, SyntaxError::LegacyOctal);
|
||||
}
|
||||
self.emit_strict_mode_error(start, SyntaxError::LegacyOctal);
|
||||
|
||||
return Ok(val);
|
||||
}
|
||||
|
@ -101,6 +101,10 @@ impl<'a> From<&'a Token> for TokenType {
|
||||
|
||||
impl<I: Input> Tokens for Lexer<'_, I> {
|
||||
fn set_ctx(&mut self, ctx: Context) {
|
||||
if ctx.module && !self.module_errors.borrow().is_empty() {
|
||||
let mut module_errors = self.module_errors.borrow_mut();
|
||||
self.errors.borrow_mut().append(&mut *module_errors);
|
||||
}
|
||||
self.ctx = ctx
|
||||
}
|
||||
|
||||
@ -137,6 +141,14 @@ impl<I: Input> Tokens for Lexer<'_, I> {
|
||||
fn take_errors(&mut self) -> Vec<Error> {
|
||||
take(&mut self.errors.borrow_mut())
|
||||
}
|
||||
|
||||
fn add_module_mode_error(&self, error: Error) {
|
||||
if self.ctx.module {
|
||||
self.add_error(error);
|
||||
return;
|
||||
}
|
||||
self.module_errors.borrow_mut().push(error);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I: Input> Iterator for Lexer<'a, I> {
|
||||
@ -662,12 +674,14 @@ pub(crate) fn lex(syntax: Syntax, s: &'static str) -> Vec<TokenAndSpan> {
|
||||
|
||||
/// lex `s` within module context.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn lex_module(syntax: Syntax, s: &'static str) -> Vec<TokenAndSpan> {
|
||||
pub(crate) fn lex_module_errors(syntax: Syntax, s: &'static str) -> Vec<Error> {
|
||||
with_lexer(syntax, Default::default(), s, |l| {
|
||||
l.ctx.strict = true;
|
||||
l.ctx.module = true;
|
||||
|
||||
Ok(l.collect())
|
||||
let _: Vec<_> = l.collect();
|
||||
|
||||
Ok(l.take_errors())
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
extern crate test;
|
||||
|
||||
use super::{
|
||||
state::{lex, lex_module, lex_tokens, lex_tokens_with_target, with_lexer},
|
||||
state::{lex, lex_module_errors, lex_tokens, lex_tokens_with_target, with_lexer},
|
||||
*,
|
||||
};
|
||||
use crate::error::{Error, SyntaxError};
|
||||
@ -123,36 +123,30 @@ impl WithSpan for AssignOpToken {
|
||||
#[test]
|
||||
fn module_legacy_decimal() {
|
||||
assert_eq!(
|
||||
lex_module(Syntax::default(), "08"),
|
||||
vec![Token::Error(Error {
|
||||
lex_module_errors(Syntax::default(), "08"),
|
||||
vec![Error {
|
||||
error: Box::new((sp(0..2), SyntaxError::LegacyDecimal)),
|
||||
})
|
||||
.span(0..2)
|
||||
.lb(),]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_legacy_comment_1() {
|
||||
assert_eq!(
|
||||
lex_module(Syntax::default(), "<!-- foo oo"),
|
||||
vec![Token::Error(Error {
|
||||
lex_module_errors(Syntax::default(), "<!-- foo oo"),
|
||||
vec![Error {
|
||||
error: Box::new((sp(0..11), SyntaxError::LegacyCommentInModule)),
|
||||
})
|
||||
.span(0..11)
|
||||
.lb(),]
|
||||
}]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_legacy_comment_2() {
|
||||
assert_eq!(
|
||||
lex_module(Syntax::default(), "-->"),
|
||||
vec![Token::Error(Error {
|
||||
lex_module_errors(Syntax::default(), "-->"),
|
||||
vec![Error {
|
||||
error: Box::new((sp(0..3), SyntaxError::LegacyCommentInModule)),
|
||||
})
|
||||
.span(0..3)
|
||||
.lb(),]
|
||||
}]
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,10 @@
|
||||
//!
|
||||
//! [babylon/util/identifier.js]:https://github.com/babel/babel/blob/master/packages/babylon/src/util/identifier.js
|
||||
use super::{input::Input, Char, LexResult, Lexer};
|
||||
use crate::error::{Error, SyntaxError};
|
||||
use crate::{
|
||||
error::{Error, SyntaxError},
|
||||
Tokens,
|
||||
};
|
||||
use std::char;
|
||||
use swc_common::{
|
||||
comments::{Comment, CommentKind},
|
||||
@ -119,6 +122,47 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
self.errors.borrow_mut().push(err);
|
||||
}
|
||||
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
pub(super) fn emit_strict_mode_error(&mut self, start: BytePos, kind: SyntaxError) {
|
||||
let span = self.span(start);
|
||||
self.emit_strict_mode_error_span(Span::new(span.lo, span.hi, span.ctxt), kind)
|
||||
}
|
||||
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
pub(super) fn emit_strict_mode_error_span(&mut self, span: Span, kind: SyntaxError) {
|
||||
if self.ctx.strict {
|
||||
self.emit_error_span(span, kind);
|
||||
return;
|
||||
}
|
||||
|
||||
let err = Error {
|
||||
error: Box::new((span, kind)),
|
||||
};
|
||||
|
||||
self.add_module_mode_error(err);
|
||||
}
|
||||
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
pub(super) fn emit_module_mode_error(&mut self, start: BytePos, kind: SyntaxError) {
|
||||
let span = self.span(start);
|
||||
self.emit_module_mode_error_span(Span::new(span.lo, span.hi, span.ctxt), kind)
|
||||
}
|
||||
|
||||
/// Some codes are valid in a strict mode script but invalid in module
|
||||
/// code.
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
pub(super) fn emit_module_mode_error_span(&mut self, span: Span, kind: SyntaxError) {
|
||||
let err = Error {
|
||||
error: Box::new((span, kind)),
|
||||
};
|
||||
|
||||
self.add_module_mode_error(err);
|
||||
}
|
||||
|
||||
/// Skip comments or whitespaces.
|
||||
///
|
||||
/// See https://tc39.github.io/ecma262/#sec-white-space
|
||||
|
@ -149,9 +149,8 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
if self.input.syntax().typescript() && self.ctx().strict && is_eval_or_arguments
|
||||
{
|
||||
self.emit_err(cond.span(), SyntaxError::TS1100);
|
||||
if self.input.syntax().typescript() && is_eval_or_arguments {
|
||||
self.emit_strict_mode_err(cond.span(), SyntaxError::TS1100);
|
||||
}
|
||||
|
||||
// TODO
|
||||
@ -322,24 +321,25 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
if is!("let") || (self.input.syntax().typescript() && is!(IdentName)) || is!(IdentRef) {
|
||||
// TODO: Handle [Yield, Await]
|
||||
let id = self.parse_ident_name()?;
|
||||
if self.ctx().strict {
|
||||
match id.sym {
|
||||
// js_word!("eval") | js_word!("arguments") => {
|
||||
// self.emit_err(id.span,
|
||||
// SyntaxError::EvalAndArgumentsInStrict)
|
||||
// }
|
||||
js_word!("yield")
|
||||
| js_word!("static")
|
||||
| js_word!("implements")
|
||||
| js_word!("let")
|
||||
| js_word!("package")
|
||||
| js_word!("private")
|
||||
| js_word!("protected")
|
||||
| js_word!("public") => {
|
||||
self.emit_err(self.input.prev_span(), SyntaxError::InvalidIdentInStrict);
|
||||
}
|
||||
_ => {}
|
||||
match id.sym {
|
||||
// js_word!("eval") | js_word!("arguments") => {
|
||||
// self.emit_err(id.span,
|
||||
// SyntaxError::EvalAndArgumentsInStrict)
|
||||
// }
|
||||
js_word!("yield")
|
||||
| js_word!("static")
|
||||
| js_word!("implements")
|
||||
| js_word!("let")
|
||||
| js_word!("package")
|
||||
| js_word!("private")
|
||||
| js_word!("protected")
|
||||
| js_word!("public") => {
|
||||
self.emit_strict_mode_err(
|
||||
self.input.prev_span(),
|
||||
SyntaxError::InvalidIdentInStrict,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if can_be_arrow && id.sym == js_word!("async") && is!(BindingIdent) {
|
||||
@ -1352,7 +1352,14 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
*type_ann = new_type_ann;
|
||||
}
|
||||
Pat::Expr(ref expr) => unreachable!("invalid pattern: Expr({:?})", expr),
|
||||
Pat::Invalid(ref i) => unreachable!("invalid pattern: {:?}", i.span),
|
||||
Pat::Invalid(ref i) => {
|
||||
// We don't have to panic here.
|
||||
// See: https://github.com/swc-project/swc/issues/1170
|
||||
//
|
||||
// Also, as an exact error is added to the errors while
|
||||
// creating `Invalid`, we don't have to emit a new
|
||||
// error.
|
||||
}
|
||||
}
|
||||
|
||||
if eat!('=') {
|
||||
@ -1553,8 +1560,8 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if self.ctx().strict && is_eval_or_arguments {
|
||||
self.emit_err(expr.span(), SyntaxError::TS1100);
|
||||
if is_eval_or_arguments {
|
||||
self.emit_strict_mode_err(expr.span(), SyntaxError::TS1100);
|
||||
}
|
||||
|
||||
fn should_deny(e: &Expr, deny_call: bool) -> bool {
|
||||
|
@ -282,12 +282,10 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
};
|
||||
let span = Span::new(start, arg.span().hi(), Default::default());
|
||||
|
||||
if self.ctx().strict {
|
||||
if op == op!("delete") {
|
||||
match *arg {
|
||||
Expr::Ident(ref i) => self.emit_err(i.span, SyntaxError::TS1102),
|
||||
_ => {}
|
||||
}
|
||||
if op == op!("delete") {
|
||||
match *arg {
|
||||
Expr::Ident(ref i) => self.emit_strict_mode_err(i.span, SyntaxError::TS1102),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,7 +73,6 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
let start = cur_pos!();
|
||||
|
||||
let word = self.parse_with(|p| {
|
||||
let strict = p.ctx().strict;
|
||||
let w = match cur!(true) {
|
||||
Ok(&Word(..)) => match bump!() {
|
||||
Word(w) => w,
|
||||
@ -98,10 +97,8 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
| Word::Ident(js_word!("package"))
|
||||
| Word::Ident(js_word!("private"))
|
||||
| Word::Ident(js_word!("protected"))
|
||||
| Word::Ident(js_word!("public"))
|
||||
if strict =>
|
||||
{
|
||||
p.emit_err(p.input.prev_span(), SyntaxError::InvalidIdentInStrict);
|
||||
| Word::Ident(js_word!("public")) => {
|
||||
p.emit_strict_mode_err(p.input.prev_span(), SyntaxError::InvalidIdentInStrict);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -27,6 +27,16 @@ pub trait Tokens: Clone + Iterator<Item = TokenAndSpan> {
|
||||
/// It is required because parser should backtrack while parsing typescript
|
||||
/// code.
|
||||
fn add_error(&self, error: Error);
|
||||
|
||||
/// Add an error which is valid syntax in script mode.
|
||||
///
|
||||
/// This errors should be dropped if it's not a moduloe.
|
||||
///
|
||||
/// Implementor should check for if [Context].module, and buffer errors if
|
||||
/// module is false. Also, implementors should move errors to the error
|
||||
/// buffer on set_ctx if the parser mode become module mode.
|
||||
fn add_module_mode_error(&self, error: Error);
|
||||
|
||||
fn take_errors(&mut self) -> Vec<Error>;
|
||||
}
|
||||
|
||||
@ -38,6 +48,7 @@ pub struct TokensInput {
|
||||
target: JscTarget,
|
||||
token_ctx: TokenContexts,
|
||||
errors: Rc<RefCell<Vec<Error>>>,
|
||||
module_errors: Rc<RefCell<Vec<Error>>>,
|
||||
}
|
||||
|
||||
impl TokensInput {
|
||||
@ -49,6 +60,7 @@ impl TokensInput {
|
||||
target,
|
||||
token_ctx: Default::default(),
|
||||
errors: Default::default(),
|
||||
module_errors: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -63,6 +75,10 @@ impl Iterator for TokensInput {
|
||||
|
||||
impl Tokens for TokensInput {
|
||||
fn set_ctx(&mut self, ctx: Context) {
|
||||
if ctx.module && !self.module_errors.borrow().is_empty() {
|
||||
let mut module_errors = self.module_errors.borrow_mut();
|
||||
self.errors.borrow_mut().append(&mut *module_errors);
|
||||
}
|
||||
self.ctx = ctx;
|
||||
}
|
||||
|
||||
@ -98,6 +114,14 @@ impl Tokens for TokensInput {
|
||||
fn take_errors(&mut self) -> Vec<Error> {
|
||||
take(&mut self.errors.borrow_mut())
|
||||
}
|
||||
|
||||
fn add_module_mode_error(&self, error: Error) {
|
||||
if self.ctx.module {
|
||||
self.add_error(error);
|
||||
return;
|
||||
}
|
||||
self.module_errors.borrow_mut().push(error);
|
||||
}
|
||||
}
|
||||
|
||||
/// Note: Lexer need access to parser's context to lex correctly.
|
||||
@ -196,6 +220,10 @@ impl<I: Tokens> Tokens for Capturing<I> {
|
||||
fn take_errors(&mut self) -> Vec<Error> {
|
||||
self.inner.take_errors()
|
||||
}
|
||||
|
||||
fn add_module_mode_error(&self, error: Error) {
|
||||
self.inner.add_module_mode_error(error)
|
||||
}
|
||||
}
|
||||
|
||||
/// This struct is responsible for managing current token and peeked token.
|
||||
|
@ -29,6 +29,8 @@ mod jsx;
|
||||
mod object;
|
||||
mod pat;
|
||||
mod stmt;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod typescript;
|
||||
mod util;
|
||||
|
||||
@ -62,8 +64,8 @@ impl<I: Tokens> Parser<I> {
|
||||
pub fn new_from(input: I) -> Self {
|
||||
Parser {
|
||||
emit_err: true,
|
||||
input: Buffer::new(input),
|
||||
state: Default::default(),
|
||||
input: Buffer::new(input),
|
||||
}
|
||||
}
|
||||
|
||||
@ -119,8 +121,53 @@ impl<I: Tokens> Parser<I> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns [Module] if it'a module and returns [Script] if it's not a
|
||||
/// module.
|
||||
///
|
||||
/// Note: This is not perfect yet. It means, some strict mode violations may
|
||||
/// not be reported even if the method returns [Module].
|
||||
pub fn parse_program(&mut self) -> PResult<Program> {
|
||||
let start = cur_pos!();
|
||||
let shebang = self.parse_shebang()?;
|
||||
|
||||
let body: Vec<ModuleItem> = self.parse_block_body(true, true, None)?;
|
||||
let has_module_item = body.iter().any(|item| match item {
|
||||
ModuleItem::ModuleDecl(..) => true,
|
||||
_ => false,
|
||||
});
|
||||
if has_module_item && !self.ctx().module {
|
||||
let ctx = Context {
|
||||
module: true,
|
||||
strict: true,
|
||||
..self.ctx()
|
||||
};
|
||||
// Emit buffered strict mode / module code violations
|
||||
self.input.set_ctx(ctx);
|
||||
}
|
||||
|
||||
Ok(if has_module_item {
|
||||
Program::Module(Module {
|
||||
span: span!(start),
|
||||
body,
|
||||
shebang,
|
||||
})
|
||||
} else {
|
||||
let body = body
|
||||
.into_iter()
|
||||
.map(|item| match item {
|
||||
ModuleItem::ModuleDecl(_) => unreachable!("Module is handled above"),
|
||||
ModuleItem::Stmt(stmt) => stmt,
|
||||
})
|
||||
.collect();
|
||||
Program::Script(Script {
|
||||
span: span!(start),
|
||||
body,
|
||||
shebang,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_module(&mut self) -> PResult<Module> {
|
||||
//TODO: parse() -> PResult<Program>
|
||||
let ctx = Context {
|
||||
module: true,
|
||||
strict: true,
|
||||
@ -154,7 +201,6 @@ impl<I: Tokens> Parser<I> {
|
||||
}
|
||||
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
fn emit_err(&self, span: Span, error: SyntaxError) {
|
||||
if !self.emit_err || !self.syntax().early_errors() {
|
||||
return;
|
||||
@ -166,7 +212,6 @@ impl<I: Tokens> Parser<I> {
|
||||
}
|
||||
|
||||
#[cold]
|
||||
#[inline(never)]
|
||||
fn emit_error(&self, error: Error) {
|
||||
if !self.emit_err || !self.syntax().early_errors() {
|
||||
return;
|
||||
@ -174,6 +219,14 @@ impl<I: Tokens> Parser<I> {
|
||||
|
||||
self.input_ref().add_error(error);
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn emit_strict_mode_err(&self, span: Span, error: SyntaxError) {
|
||||
let error = Error {
|
||||
error: Box::new((span, error)),
|
||||
};
|
||||
self.input_ref().add_module_mode_error(error);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -25,8 +25,8 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
|
||||
// "yield" and "await" is **lexically** accepted.
|
||||
let ident = self.parse_ident(true, true)?;
|
||||
if self.ctx().strict && (&*ident.sym == "arguments" || &*ident.sym == "eval") {
|
||||
self.emit_err(ident.span, SyntaxError::EvalAndArgumentsInStrict);
|
||||
if ident.sym == js_word!("arguments") || ident.sym == js_word!("eval") {
|
||||
self.emit_strict_mode_err(ident.span, SyntaxError::EvalAndArgumentsInStrict);
|
||||
}
|
||||
if self.ctx().in_async && ident.sym == js_word!("await") {
|
||||
self.emit_err(ident.span, SyntaxError::ExpectedIdent);
|
||||
|
@ -304,11 +304,8 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
_ => self.verify_expr(expr)?,
|
||||
};
|
||||
if let Expr::Ident(ref ident) = *expr {
|
||||
if *ident.sym == js_word!("interface")
|
||||
&& self.input.had_line_break_before_cur()
|
||||
&& self.ctx().strict
|
||||
{
|
||||
self.emit_err(ident.span, SyntaxError::InvalidIdentInStrict);
|
||||
if *ident.sym == js_word!("interface") && self.input.had_line_break_before_cur() {
|
||||
self.emit_strict_mode_err(ident.span, SyntaxError::InvalidIdentInStrict);
|
||||
|
||||
eat!(';');
|
||||
|
||||
@ -325,16 +322,14 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
}
|
||||
}
|
||||
|
||||
if self.ctx().strict {
|
||||
match *expr {
|
||||
Expr::Ident(Ident { ref sym, span, .. }) => match *sym {
|
||||
js_word!("enum") | js_word!("interface") => {
|
||||
self.emit_err(span, SyntaxError::InvalidIdentInStrict);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
match *expr {
|
||||
Expr::Ident(Ident { ref sym, span, .. }) => match *sym {
|
||||
js_word!("enum") | js_word!("interface") => {
|
||||
self.emit_strict_mode_err(span, SyntaxError::InvalidIdentInStrict);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if self.syntax().typescript() {
|
||||
@ -842,9 +837,9 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
self.emit_err(span, SyntaxError::TS2410);
|
||||
}
|
||||
|
||||
if self.ctx().strict {
|
||||
{
|
||||
let span = self.input.cur_span();
|
||||
self.emit_err(span, SyntaxError::WithInStrict);
|
||||
self.emit_strict_mode_err(span, SyntaxError::WithInStrict);
|
||||
}
|
||||
|
||||
let start = cur_pos!();
|
||||
|
@ -30,6 +30,18 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
.into());
|
||||
}
|
||||
|
||||
// It's now import statement
|
||||
|
||||
if !self.ctx().module {
|
||||
// Switch to module mode
|
||||
let ctx = Context {
|
||||
module: true,
|
||||
strict: true,
|
||||
..self.ctx()
|
||||
};
|
||||
self.set_ctx(ctx);
|
||||
}
|
||||
|
||||
expect!("import");
|
||||
|
||||
if self.input.syntax().typescript() && is!(IdentRef) && peeked_is!('=') {
|
||||
@ -195,6 +207,16 @@ impl<'a, I: Tokens> Parser<I> {
|
||||
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
fn parse_export(&mut self, decorators: Vec<Decorator>) -> PResult<ModuleDecl> {
|
||||
if !self.ctx().module {
|
||||
// Switch to module mode
|
||||
let ctx = Context {
|
||||
module: true,
|
||||
strict: true,
|
||||
..self.ctx()
|
||||
};
|
||||
self.set_ctx(ctx);
|
||||
}
|
||||
|
||||
let start = cur_pos!();
|
||||
assert_and_bump!("export");
|
||||
let _ = cur!(true);
|
||||
|
70
ecmascript/parser/src/parser/tests.rs
Normal file
70
ecmascript/parser/src/parser/tests.rs
Normal file
@ -0,0 +1,70 @@
|
||||
use crate::test_parser;
|
||||
use swc_ecma_ast::*;
|
||||
|
||||
fn program(src: &'static str) -> Program {
|
||||
test_parser(src, Default::default(), |p| p.parse_program())
|
||||
}
|
||||
|
||||
/// Assert that Parser.parse_program returns [Program::Module].
|
||||
fn module(src: &'static str) -> Module {
|
||||
program(src).expect_module()
|
||||
}
|
||||
|
||||
/// Assert that Parser.parse_program returns [Program::Script].
|
||||
fn script(src: &'static str) -> Script {
|
||||
program(src).expect_script()
|
||||
}
|
||||
|
||||
/// Assert that Parser.parse_program returns [Program::Module] and has errors.
|
||||
#[track_caller]
|
||||
fn assert_module_error(src: &'static str) -> Module {
|
||||
test_parser(src, Default::default(), |p| {
|
||||
let program = p.parse_program()?;
|
||||
|
||||
let errors = p.take_errors();
|
||||
assert_ne!(errors, vec![]);
|
||||
|
||||
let module = program.expect_module();
|
||||
|
||||
Ok(module)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_program_module_01() {
|
||||
module("import 'foo';");
|
||||
module("export const a = 1;");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_program_script_01() {
|
||||
script("let a = 5;");
|
||||
script("function foo() {}");
|
||||
script("const a = 00176;");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_program_module_02() {
|
||||
module(
|
||||
"
|
||||
function foo() {}
|
||||
export default foo;
|
||||
",
|
||||
);
|
||||
module(
|
||||
"
|
||||
export function foo() {}
|
||||
export default foo;
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_program_module_error_01() {
|
||||
assert_module_error(
|
||||
"
|
||||
const a = 01234;
|
||||
export default a;
|
||||
",
|
||||
);
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
error: Unterminated string constant
|
||||
--> $DIR/tests/test262-parser/fail/19699bcdea35eb46.js:1:19
|
||||
error: Legacy octal escape is not permitted in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/19699bcdea35eb46.js:1:17
|
||||
|
|
||||
1 | 'use strict'; ('\4')
|
||||
| ^^
|
||||
| ^^
|
||||
|
||||
|
@ -4,9 +4,3 @@ error: Destructuring bindings require initializers
|
||||
1 | "use strict"; for (let [a = let];;) {}
|
||||
| ^^^^^^^^^
|
||||
|
||||
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/37cb7557997d4fd6.js:1:29
|
||||
|
|
||||
1 | "use strict"; for (let [a = let];;) {}
|
||||
| ^^^
|
||||
|
||||
|
@ -4,9 +4,3 @@ error: Legacy octal escape is not permitted in strict mode
|
||||
1 | 'use strict'; ('\000')
|
||||
| ^^
|
||||
|
||||
error: Legacy octal escape is not permitted in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/37e9fb0470e7ec3d.js:1:19
|
||||
|
|
||||
1 | 'use strict'; ('\000')
|
||||
| ^^
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
error: Unterminated string constant
|
||||
--> $DIR/tests/test262-parser/fail/3990bb94b19b1071.module.js:1:5
|
||||
error: Legacy octal escape is not permitted in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/3990bb94b19b1071.module.js:1:3
|
||||
|
|
||||
1 | ('\1')
|
||||
| ^^
|
||||
| ^^
|
||||
|
||||
|
@ -4,9 +4,3 @@ error: Legacy octal escape is not permitted in strict mode
|
||||
1 | 'use strict'; ('\001')
|
||||
| ^^
|
||||
|
||||
error: Legacy octal escape is not permitted in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/6ac4f95d48362a35.js:1:19
|
||||
|
|
||||
1 | 'use strict'; ('\001')
|
||||
| ^^
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
error: Unexpected eof
|
||||
--> $DIR/tests/test262-parser/fail/ca2716d236c027cd.js:1:51
|
||||
error: Legacy octal escape is not permitted in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/ca2716d236c027cd.js:1:38
|
||||
|
|
||||
1 | function hello() { 'use strict'; ({ "\1": 42 }); }
|
||||
| ^
|
||||
| ^^
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
error: Unterminated string constant
|
||||
--> $DIR/tests/test262-parser/fail/d04aecd166354406.js:1:19
|
||||
error: Legacy octal escape is not permitted in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/d04aecd166354406.js:1:17
|
||||
|
|
||||
1 | 'use strict'; ('\1')
|
||||
| ^^
|
||||
| ^^
|
||||
|
||||
|
@ -4,9 +4,3 @@ error: Destructuring bindings require initializers
|
||||
1 | "use strict"; for (let {a: b = let};;) {}
|
||||
| ^^^^^^^^^^^^
|
||||
|
||||
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
|
||||
--> $DIR/tests/test262-parser/fail/d17d3aebb6a3cf43.js:1:32
|
||||
|
|
||||
1 | "use strict"; for (let {a: b = let};;) {}
|
||||
| ^^^
|
||||
|
||||
|
@ -0,0 +1 @@
|
||||
const toString: (local)(this: Function) => string) = undefined;
|
@ -0,0 +1,12 @@
|
||||
error: Expected a semicolon
|
||||
--> $DIR/tests/typescript-errors/issue-1170-1/input.ts:1:24
|
||||
|
|
||||
1 | const toString: (local)(this: Function) => string) = undefined;
|
||||
| ^
|
||||
|
||||
error: Not a pattern
|
||||
--> $DIR/tests/typescript-errors/issue-1170-1/input.ts:1:25
|
||||
|
|
||||
1 | const toString: (local)(this: Function) => string) = undefined;
|
||||
| ^^^^
|
||||
|
@ -159,12 +159,12 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
}
|
||||
} else {
|
||||
with_parser(is_backtrace_enabled(), &path, !errors, |p| {
|
||||
let module = p.parse_typescript_module()?.fold_with(&mut Normalizer {
|
||||
let program = p.parse_program()?.fold_with(&mut Normalizer {
|
||||
drop_span: false,
|
||||
is_test262: false,
|
||||
});
|
||||
|
||||
let json = serde_json::to_string_pretty(&module)
|
||||
let json = serde_json::to_string_pretty(&program)
|
||||
.expect("failed to serialize module as json");
|
||||
|
||||
if StdErr::from(json.clone())
|
||||
@ -174,12 +174,12 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
panic!()
|
||||
}
|
||||
|
||||
let module = module.fold_with(&mut Normalizer {
|
||||
let program = program.fold_with(&mut Normalizer {
|
||||
drop_span: true,
|
||||
is_test262: false,
|
||||
});
|
||||
|
||||
let deser = match serde_json::from_str::<Module>(&json) {
|
||||
let deser = match serde_json::from_str::<Program>(&json) {
|
||||
Ok(v) => v.fold_with(&mut Normalizer {
|
||||
drop_span: true,
|
||||
is_test262: false,
|
||||
@ -196,7 +196,7 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(module, deser, "JSON:\n{}", json);
|
||||
assert_eq!(program, deser, "JSON:\n{}", json);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 25,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 26,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 27,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 24,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 34,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 29,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 21,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 17,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 61,
|
||||
"end": 79,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 18,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 56,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 23,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 30,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 106,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 19,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 14,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 26,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 28,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 6,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 17,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 7,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 8,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 4,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 5,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 3,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 15,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 18,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 15,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 11,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 91,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 124,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 87,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 196,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 91,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 55,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 31,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 61,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 43,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 25,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 44,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 39,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 28,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 64,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 98,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 38,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 74,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 83,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 35,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 29,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 143,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 31,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 27,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 40,
|
||||
"end": 295,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 42,
|
||||
"end": 256,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 509,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 84,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 74,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 98,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 100,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 16,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 71,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 38,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 97,
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"type": "Script",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 99,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user