mirror of
https://github.com/swc-project/swc.git
synced 2024-11-22 15:25:01 +03:00
chore(ci): Configure clippy
(#3250)
This commit is contained in:
parent
8652b2df99
commit
978de5943e
41
.github/workflows/cargo-lints.yml
vendored
Normal file
41
.github/workflows/cargo-lints.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Cargo lint
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
name: fmt
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUST_LOG: "0"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# We explicitly do this to cache properly.
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
components: rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
components: clippy
|
||||
|
||||
- name: Check
|
||||
run: |
|
||||
cargo clippy --all
|
18
.github/workflows/cargo.yml
vendored
18
.github/workflows/cargo.yml
vendored
@ -38,24 +38,6 @@ jobs:
|
||||
run: |
|
||||
cargo deny check
|
||||
|
||||
fmt:
|
||||
name: fmt
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUST_LOG: "0"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# We explicitly do this to cache properly.
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
components: rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@ -20,6 +20,7 @@
|
||||
"eslint.enable": false,
|
||||
"cSpell.allowCompoundWords": true,
|
||||
"cSpell.caseSensitive": true,
|
||||
"rust-analyzer.checkOnSave.command": "clippy",
|
||||
"rust-analyzer.checkOnSave.features": [
|
||||
// We use this to make IDE faster
|
||||
"rust-analyzer"
|
||||
|
@ -168,7 +168,7 @@ pub fn expand(
|
||||
} else {
|
||||
fn make_pat(lit: Lit) -> (Pat, Pat) {
|
||||
let s = match lit.clone() {
|
||||
Lit::Str(s) => s.value().clone(),
|
||||
Lit::Str(s) => s.value(),
|
||||
_ => {
|
||||
unreachable!()
|
||||
}
|
||||
@ -178,7 +178,7 @@ pub fn expand(
|
||||
attrs: Default::default(),
|
||||
expr: Box::new(Expr::Lit(ExprLit {
|
||||
attrs: Default::default(),
|
||||
lit: lit.clone(),
|
||||
lit,
|
||||
})),
|
||||
}),
|
||||
Pat::Lit(PatLit {
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use darling;
|
||||
use pmutil::{smart_quote, Quote, ToTokensExt};
|
||||
use swc_macros_common::prelude::*;
|
||||
use syn::{self, visit_mut::VisitMut, *};
|
||||
@ -130,15 +129,13 @@ pub fn ast_serde(
|
||||
}))
|
||||
});
|
||||
|
||||
let quote = item.quote_with(smart_quote!(Vars { input, serde_tag, serde_rename }, {
|
||||
item.quote_with(smart_quote!(Vars { input, serde_tag, serde_rename }, {
|
||||
#[derive(::serde::Serialize, ::serde::Deserialize)]
|
||||
serde_tag
|
||||
#[serde(rename_all = "camelCase")]
|
||||
serde_rename
|
||||
input
|
||||
}));
|
||||
|
||||
quote
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
@ -231,10 +228,9 @@ pub fn ast_node(
|
||||
}))
|
||||
});
|
||||
|
||||
let ast_node_impl = match args {
|
||||
Some(ref args) => Some(ast_node_macro::expand_struct(args.clone(), input.clone())),
|
||||
None => None,
|
||||
};
|
||||
let ast_node_impl = args
|
||||
.as_ref()
|
||||
.map(|args| ast_node_macro::expand_struct(args.clone(), input.clone()));
|
||||
|
||||
let mut quote =
|
||||
item.quote_with(smart_quote!(Vars { input, serde_tag, serde_rename }, {
|
||||
@ -276,7 +272,7 @@ fn print_item<T: Into<TokenStream>>(
|
||||
let item = Quote::new(def_site::<Span>()).quote_with(smart_quote!(
|
||||
Vars {
|
||||
item: item.into(),
|
||||
NAME: Ident::new(&const_name, Span::call_site())
|
||||
NAME: Ident::new(const_name, Span::call_site())
|
||||
},
|
||||
{
|
||||
const NAME: () = { item };
|
||||
|
@ -90,8 +90,7 @@ impl DiffMinifiedCommand {
|
||||
terser_module.visit_mut_with(&mut Normalizer::default());
|
||||
|
||||
let swc_output = print_js(cm.clone(), &swc_module, None).context("failed to print js")?;
|
||||
let terser_output =
|
||||
print_js(cm.clone(), &terser_module, None).context("failed to print js")?;
|
||||
let terser_output = print_js(cm, &terser_module, None).context("failed to print js")?;
|
||||
|
||||
if swc_output == terser_output {
|
||||
return Ok(());
|
||||
@ -149,7 +148,7 @@ impl VisitMut for Normalizer {
|
||||
e.visit_mut_children_with(self);
|
||||
|
||||
if let Some(args) = &e.args {
|
||||
if args.len() == 0 {
|
||||
if args.is_empty() {
|
||||
e.args = None;
|
||||
}
|
||||
}
|
||||
@ -162,7 +161,6 @@ impl VisitMut for Normalizer {
|
||||
Stmt::Decl(Decl::Var(v)) => {
|
||||
if v.decls.is_empty() {
|
||||
s.take();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@ -188,7 +186,6 @@ impl VisitMut for BeforeDiffNormalizer {
|
||||
Stmt::Block(bs) => {
|
||||
if bs.stmts.len() == 1 {
|
||||
*s = bs.stmts[0].take();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,7 +57,7 @@ impl DependencyCollector {
|
||||
info!("Loading {}", name);
|
||||
|
||||
let fm = match &*name {
|
||||
FileName::Real(path) => self.cm.load_file(&path)?,
|
||||
FileName::Real(path) => self.cm.load_file(path)?,
|
||||
FileName::Custom(..) => return Ok(()),
|
||||
_ => {
|
||||
todo!("load({:?})", name)
|
||||
|
@ -49,7 +49,7 @@ impl ReduceMinCommand {
|
||||
};
|
||||
|
||||
let mut runner = Runner {
|
||||
cm: cm.clone(),
|
||||
cm,
|
||||
comments: Default::default(),
|
||||
working_dir: self.working_dir,
|
||||
build_command: self.build_command,
|
||||
|
@ -40,7 +40,7 @@ pub(crate) fn parse(fm: &SourceFile) -> Result<Module> {
|
||||
|
||||
parser.parse_module().map_err(|err| {
|
||||
HANDLER.with(|handler| {
|
||||
err.into_diagnostic(&handler).emit();
|
||||
err.into_diagnostic(handler).emit();
|
||||
});
|
||||
|
||||
anyhow!("failed to parse module")
|
||||
@ -63,7 +63,7 @@ pub(crate) fn print_js(
|
||||
wr,
|
||||
};
|
||||
|
||||
emitter.emit_module(&m)?;
|
||||
emitter.emit_module(m)?;
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(buf)?)
|
||||
|
@ -1,7 +1,5 @@
|
||||
extern crate proc_macro;
|
||||
|
||||
use syn;
|
||||
|
||||
use swc_macros_common::prelude::*;
|
||||
|
||||
mod expand;
|
||||
|
@ -52,10 +52,7 @@ pub fn parse_tag_item(i: Input) -> IResult<Input, TagItem> {
|
||||
"access" => {
|
||||
let (input, access) = parse_one_of(i, &["private", "protected", "package", "public"])?;
|
||||
i = input;
|
||||
Tag::Access(AccessTag {
|
||||
span,
|
||||
access: access.into(),
|
||||
})
|
||||
Tag::Access(AccessTag { span, access })
|
||||
}
|
||||
|
||||
"alias" => {
|
||||
@ -176,17 +173,14 @@ pub fn parse_tag_item(i: Input) -> IResult<Input, TagItem> {
|
||||
i = input;
|
||||
Tag::Exports(ExportsTag {
|
||||
span,
|
||||
module_name: text.into(),
|
||||
module_name: text,
|
||||
})
|
||||
}
|
||||
|
||||
"external" | "host" => {
|
||||
let (input, name) = parse_line(i)?;
|
||||
i = input;
|
||||
Tag::External(ExternalTag {
|
||||
span,
|
||||
name: name.into(),
|
||||
})
|
||||
Tag::External(ExternalTag { span, name })
|
||||
}
|
||||
|
||||
"file" | "fileoverview" | "overview" => {
|
||||
@ -481,7 +475,7 @@ pub fn parse_tag_item(i: Input) -> IResult<Input, TagItem> {
|
||||
i,
|
||||
TagItem {
|
||||
span,
|
||||
tag_name: tag_name.into(),
|
||||
tag_name,
|
||||
tag,
|
||||
},
|
||||
))
|
||||
|
@ -49,7 +49,7 @@ fn fixture(path: PathBuf) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !cmt.text.starts_with("*") {
|
||||
if !cmt.text.starts_with('*') {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -155,7 +155,7 @@ impl Task for BundleTask {
|
||||
|
||||
Err(napi::Error::new(
|
||||
Status::GenericFailure,
|
||||
format!("panic detected"),
|
||||
"panic detected".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
@ -217,8 +217,8 @@ pub(crate) fn bundle(
|
||||
.config
|
||||
.alias
|
||||
.get(&target_env)
|
||||
.map(|a| a.clone())
|
||||
.unwrap_or_else(|| Default::default());
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let resolver: Box<dyn Resolve> = if let Some((base_url, paths)) = paths {
|
||||
Box::new(paths_resolver(target_env, alias, base_url, paths))
|
||||
@ -228,7 +228,7 @@ pub(crate) fn bundle(
|
||||
|
||||
Ok(AsyncTask::with_optional_signal(
|
||||
BundleTask {
|
||||
swc: c.clone(),
|
||||
swc: c,
|
||||
config: ConfigItem {
|
||||
loader,
|
||||
resolver,
|
||||
|
@ -22,7 +22,7 @@ mod util;
|
||||
static COMPILER: Lazy<Arc<Compiler>> = Lazy::new(|| {
|
||||
let cm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
|
||||
Arc::new(Compiler::new(cm.clone()))
|
||||
Arc::new(Compiler::new(cm))
|
||||
});
|
||||
|
||||
#[napi::module_init]
|
||||
|
@ -58,7 +58,7 @@ impl Task for MinifyTask {
|
||||
try_with(self.c.cm.clone(), false, |handler| {
|
||||
let fm = input.to_file(self.c.cm.clone());
|
||||
|
||||
self.c.minify(fm, &handler, &options)
|
||||
self.c.minify(fm, handler, &options)
|
||||
})
|
||||
.convert_err()
|
||||
}
|
||||
@ -89,5 +89,5 @@ pub fn minify_sync(code: Buffer, opts: Buffer) -> napi::Result<TransformOutput>
|
||||
|
||||
let fm = code.to_file(c.cm.clone());
|
||||
|
||||
try_with(c.cm.clone(), false, |handler| c.minify(fm, &handler, &opts)).convert_err()
|
||||
try_with(c.cm.clone(), false, |handler| c.minify(fm, handler, &opts)).convert_err()
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ impl Task for ParseTask {
|
||||
let program = try_with(self.c.cm.clone(), false, |handler| {
|
||||
self.c.parse_js(
|
||||
fm,
|
||||
&handler,
|
||||
handler,
|
||||
options.target,
|
||||
options.syntax,
|
||||
options.is_module,
|
||||
@ -113,7 +113,7 @@ pub fn parse(
|
||||
|
||||
AsyncTask::with_optional_signal(
|
||||
ParseTask {
|
||||
c: c.clone(),
|
||||
c,
|
||||
filename,
|
||||
src,
|
||||
options,
|
||||
|
@ -43,7 +43,7 @@ impl Task for PrintTask {
|
||||
.unwrap_or(SourceMapsConfig::Bool(false)),
|
||||
&Default::default(),
|
||||
None,
|
||||
options.config.clone().minify,
|
||||
options.config.minify,
|
||||
None,
|
||||
)
|
||||
.convert_err()
|
||||
@ -65,7 +65,7 @@ pub fn print(
|
||||
|
||||
Ok(AsyncTask::with_optional_signal(
|
||||
PrintTask {
|
||||
c: c.clone(),
|
||||
c,
|
||||
program_json,
|
||||
options,
|
||||
},
|
||||
@ -77,7 +77,7 @@ pub fn print(
|
||||
pub fn print_sync(program: String, options: Buffer) -> napi::Result<TransformOutput> {
|
||||
let c = get_compiler();
|
||||
|
||||
let program: Program = deserialize_json(&program.as_str())?;
|
||||
let program: Program = deserialize_json(program.as_str())?;
|
||||
|
||||
let options: Options = get_deserialized(&options)?;
|
||||
|
||||
|
@ -51,14 +51,14 @@ impl Task for TransformTask {
|
||||
self.c.run(|| match &self.input {
|
||||
Input::Program(ref s) => {
|
||||
let program: Program =
|
||||
deserialize_json(&s).expect("failed to deserialize Program");
|
||||
deserialize_json(s).expect("failed to deserialize Program");
|
||||
// TODO: Source map
|
||||
self.c.process_js(&handler, program, &options)
|
||||
self.c.process_js(handler, program, &options)
|
||||
}
|
||||
|
||||
Input::File(ref path) => {
|
||||
let fm = self.c.cm.load_file(path).context("failed to load file")?;
|
||||
self.c.process_js_file(fm, &handler, &options)
|
||||
self.c.process_js_file(fm, handler, &options)
|
||||
}
|
||||
|
||||
Input::Source { src } => {
|
||||
@ -71,7 +71,7 @@ impl Task for TransformTask {
|
||||
src.to_string(),
|
||||
);
|
||||
|
||||
self.c.process_js_file(fm, &handler, &options)
|
||||
self.c.process_js_file(fm, handler, &options)
|
||||
}
|
||||
})
|
||||
},
|
||||
@ -101,11 +101,7 @@ pub fn transform(
|
||||
Input::Source { src }
|
||||
};
|
||||
|
||||
let task = TransformTask {
|
||||
c: c.clone(),
|
||||
input,
|
||||
options,
|
||||
};
|
||||
let task = TransformTask { c, input, options };
|
||||
Ok(AsyncTask::with_optional_signal(task, signal))
|
||||
}
|
||||
|
||||
@ -124,7 +120,7 @@ pub fn transform_sync(s: String, is_module: bool, opts: Buffer) -> napi::Result<
|
||||
if is_module {
|
||||
let program: Program =
|
||||
deserialize_json(s.as_str()).context("failed to deserialize Program")?;
|
||||
c.process_js(&handler, program, &options)
|
||||
c.process_js(handler, program, &options)
|
||||
} else {
|
||||
let fm = c.cm.new_source_file(
|
||||
if options.filename.is_empty() {
|
||||
@ -134,7 +130,7 @@ pub fn transform_sync(s: String, is_module: bool, opts: Buffer) -> napi::Result<
|
||||
},
|
||||
s,
|
||||
);
|
||||
c.process_js_file(fm, &handler, &options)
|
||||
c.process_js_file(fm, handler, &options)
|
||||
}
|
||||
})
|
||||
})
|
||||
@ -153,7 +149,7 @@ pub fn transform_file(
|
||||
let options = String::from_utf8_lossy(options.as_ref()).to_string();
|
||||
let path = clean(&src);
|
||||
let task = TransformTask {
|
||||
c: c.clone(),
|
||||
c,
|
||||
input: Input::File(path.into()),
|
||||
options,
|
||||
};
|
||||
@ -179,10 +175,10 @@ pub fn transform_file_sync(
|
||||
if is_module {
|
||||
let program: Program =
|
||||
deserialize_json(s.as_str()).context("failed to deserialize Program")?;
|
||||
c.process_js(&handler, program, &options)
|
||||
c.process_js(handler, program, &options)
|
||||
} else {
|
||||
let fm = c.cm.load_file(Path::new(&s)).expect("failed to load file");
|
||||
c.process_js_file(fm, &handler, &options)
|
||||
c.process_js_file(fm, handler, &options)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -65,7 +65,7 @@ pub(crate) fn deserialize_json<T>(json: &str) -> Result<T, serde_json::Error>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
let mut deserializer = serde_json::Deserializer::from_str(&json);
|
||||
let mut deserializer = serde_json::Deserializer::from_str(json);
|
||||
deserializer.disable_recursion_limit();
|
||||
|
||||
T::deserialize(&mut deserializer)
|
||||
|
@ -104,7 +104,7 @@ fn derive_fmt(i: &DeriveInput, trait_path: TokenStream) -> ItemImpl {
|
||||
|
||||
fn get_str_value(attrs: &[Attribute]) -> String {
|
||||
// TODO: Accept multiline string
|
||||
let docs: Vec<_> = attrs.iter().map(doc_str).filter_map(|o| o).collect();
|
||||
let docs: Vec<_> = attrs.iter().map(doc_str).flatten().collect();
|
||||
for raw_line in docs {
|
||||
let line = raw_line.trim();
|
||||
if line.starts_with('`') && line.ends_with('`') {
|
||||
@ -119,14 +119,14 @@ fn get_str_value(attrs: &[Attribute]) -> String {
|
||||
}
|
||||
|
||||
fn make_from_str(i: &DeriveInput) -> ItemImpl {
|
||||
let arms = Binder::new_from(&i)
|
||||
let arms = Binder::new_from(i)
|
||||
.variants()
|
||||
.into_iter()
|
||||
.map(|v| {
|
||||
// Qualified path of variant.
|
||||
let qual_name = v.qual_path();
|
||||
|
||||
let str_value = get_str_value(&v.attrs());
|
||||
let str_value = get_str_value(v.attrs());
|
||||
|
||||
let pat: Pat = Quote::new(def_site::<Span>())
|
||||
.quote_with(smart_quote!(Vars { str_value }, { str_value }))
|
||||
@ -196,14 +196,14 @@ fn make_from_str(i: &DeriveInput) -> ItemImpl {
|
||||
}
|
||||
|
||||
fn make_as_str(i: &DeriveInput) -> ItemImpl {
|
||||
let arms = Binder::new_from(&i)
|
||||
let arms = Binder::new_from(i)
|
||||
.variants()
|
||||
.into_iter()
|
||||
.map(|v| {
|
||||
// Qualified path of variant.
|
||||
let qual_name = v.qual_path();
|
||||
|
||||
let str_value = get_str_value(&v.attrs());
|
||||
let str_value = get_str_value(v.attrs());
|
||||
|
||||
let body = Box::new(
|
||||
Quote::new(def_site::<Span>())
|
||||
|
@ -12,9 +12,7 @@ use test::Bencher;
|
||||
fn mk() -> swc::Compiler {
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
|
||||
let c = swc::Compiler::new(cm.clone());
|
||||
|
||||
c
|
||||
swc::Compiler::new(cm)
|
||||
}
|
||||
|
||||
fn bench_file(b: &mut Bencher, path: &Path) {
|
||||
@ -29,7 +27,7 @@ fn bench_file(b: &mut Bencher, path: &Path) {
|
||||
|
||||
let result = {
|
||||
c.process_js_file(
|
||||
fm.clone(),
|
||||
fm,
|
||||
&handler,
|
||||
&Options {
|
||||
is_module: IsModule::Bool(true),
|
||||
|
@ -22,9 +22,7 @@ static SOURCE: &str = include_str!("assets/Observable.ts");
|
||||
fn mk() -> swc::Compiler {
|
||||
let cm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
|
||||
let c = swc::Compiler::new(cm.clone());
|
||||
|
||||
c
|
||||
swc::Compiler::new(cm)
|
||||
}
|
||||
|
||||
fn parse(c: &swc::Compiler) -> (Arc<SourceFile>, Program) {
|
||||
|
@ -122,7 +122,7 @@ impl<'a, 'b, P: swc_ecma_visit::Fold> PassBuilder<'a, 'b, P> {
|
||||
self,
|
||||
c: GlobalPassOption,
|
||||
) -> PassBuilder<'a, 'b, impl swc_ecma_visit::Fold> {
|
||||
let pass = c.build(&self.cm, &self.handler);
|
||||
let pass = c.build(self.cm, self.handler);
|
||||
self.then(pass)
|
||||
}
|
||||
|
||||
@ -176,7 +176,7 @@ impl<'a, 'b, P: swc_ecma_visit::Fold> PassBuilder<'a, 'b, P> {
|
||||
let compat_pass = if let Some(env) = self.env {
|
||||
Either::Left(swc_ecma_preset_env::preset_env(
|
||||
self.top_level_mark,
|
||||
comments.clone(),
|
||||
comments,
|
||||
env,
|
||||
))
|
||||
} else {
|
||||
@ -225,7 +225,7 @@ impl<'a, 'b, P: swc_ecma_visit::Fold> PassBuilder<'a, 'b, P> {
|
||||
Optional::new(
|
||||
compat::es2015(
|
||||
self.top_level_mark,
|
||||
comments.clone(),
|
||||
comments,
|
||||
compat::es2015::Config {
|
||||
computed_props: compat::es2015::computed_props::Config {
|
||||
loose: self.loose
|
||||
|
@ -100,7 +100,7 @@ impl<'de> Deserialize<'de> for IsModule {
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
return Ok(IsModule::Bool(b));
|
||||
Ok(IsModule::Bool(b))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
|
||||
@ -259,7 +259,7 @@ impl Options {
|
||||
where
|
||||
P: 'a + swc_ecma_visit::Fold,
|
||||
{
|
||||
let mut config = config.unwrap_or_else(Default::default);
|
||||
let mut config = config.unwrap_or_default();
|
||||
config.merge(&self.config);
|
||||
|
||||
let mut source_maps = self.source_maps.clone();
|
||||
@ -357,13 +357,11 @@ impl Options {
|
||||
let enable_simplifier = optimizer.as_ref().map(|v| v.simplify).unwrap_or_default();
|
||||
|
||||
let optimization = {
|
||||
let pass = if let Some(opts) = optimizer.and_then(|o| o.globals) {
|
||||
if let Some(opts) = optimizer.and_then(|o| o.globals) {
|
||||
Either::Left(opts.build(cm, handler))
|
||||
} else {
|
||||
Either::Right(noop())
|
||||
};
|
||||
|
||||
pass
|
||||
}
|
||||
};
|
||||
|
||||
let top_level_mark = self
|
||||
@ -378,7 +376,7 @@ impl Options {
|
||||
json_parse_pass
|
||||
);
|
||||
|
||||
let pass = PassBuilder::new(&cm, &handler, loose, assumptions, top_level_mark, pass)
|
||||
let pass = PassBuilder::new(cm, handler, loose, assumptions, top_level_mark, pass)
|
||||
.target(target)
|
||||
.skip_helper_injection(self.skip_helper_injection)
|
||||
.minify(js_minify)
|
||||
@ -424,7 +422,7 @@ impl Options {
|
||||
pragma_frag: Some(transform.react.pragma_frag.clone()),
|
||||
..Default::default()
|
||||
},
|
||||
comments.clone(),
|
||||
comments,
|
||||
top_level_mark
|
||||
),
|
||||
syntax.typescript()
|
||||
@ -434,12 +432,7 @@ impl Options {
|
||||
custom_before_pass(&program),
|
||||
// handle jsx
|
||||
Optional::new(
|
||||
react::react(
|
||||
cm.clone(),
|
||||
comments.clone(),
|
||||
transform.react,
|
||||
top_level_mark
|
||||
),
|
||||
react::react(cm.clone(), comments, transform.react, top_level_mark),
|
||||
syntax.jsx()
|
||||
),
|
||||
pass,
|
||||
@ -866,7 +859,7 @@ impl FileMatcher {
|
||||
}
|
||||
|
||||
if !CACHE.contains_key(&*s) {
|
||||
let re = Regex::new(&s).with_context(|| format!("invalid regex: {}", s))?;
|
||||
let re = Regex::new(s).with_context(|| format!("invalid regex: {}", s))?;
|
||||
CACHE.insert(s.clone(), re);
|
||||
}
|
||||
|
||||
@ -1244,7 +1237,7 @@ impl GlobalPassOption {
|
||||
match &self.envs {
|
||||
GlobalInliningPassEnvs::List(env_list) => {
|
||||
static CACHE: Lazy<DashMap<Vec<String>, ValuesMap, ahash::RandomState>> =
|
||||
Lazy::new(|| Default::default());
|
||||
Lazy::new(Default::default);
|
||||
|
||||
let cache_key = env_list.iter().cloned().collect::<Vec<_>>();
|
||||
if let Some(v) = CACHE.get(&cache_key).as_deref().cloned() {
|
||||
@ -1266,7 +1259,7 @@ impl GlobalPassOption {
|
||||
GlobalInliningPassEnvs::Map(map) => {
|
||||
static CACHE: Lazy<
|
||||
DashMap<Vec<(JsWord, JsWord)>, ValuesMap, ahash::RandomState>,
|
||||
> = Lazy::new(|| Default::default());
|
||||
> = Lazy::new(Default::default);
|
||||
|
||||
let cache_key = self
|
||||
.vars
|
||||
@ -1279,7 +1272,7 @@ impl GlobalPassOption {
|
||||
let map = mk_map(
|
||||
cm,
|
||||
handler,
|
||||
map.into_iter().map(|(k, v)| (k.clone(), v.clone())),
|
||||
map.iter().map(|(k, v)| (k.clone(), v.clone())),
|
||||
false,
|
||||
);
|
||||
CACHE.insert(cache_key, map.clone());
|
||||
@ -1291,7 +1284,7 @@ impl GlobalPassOption {
|
||||
|
||||
let global_exprs = {
|
||||
static CACHE: Lazy<DashMap<Vec<(JsWord, JsWord)>, GlobalExprMap, ahash::RandomState>> =
|
||||
Lazy::new(|| Default::default());
|
||||
Lazy::new(Default::default);
|
||||
|
||||
let cache_key = self
|
||||
.vars
|
||||
@ -1322,7 +1315,7 @@ impl GlobalPassOption {
|
||||
|
||||
let global_map = {
|
||||
static CACHE: Lazy<DashMap<Vec<(JsWord, JsWord)>, ValuesMap, ahash::RandomState>> =
|
||||
Lazy::new(|| Default::default());
|
||||
Lazy::new(Default::default);
|
||||
|
||||
let cache_key = self
|
||||
.vars
|
||||
@ -1599,7 +1592,7 @@ impl Merge for HiddenTransformConfig {
|
||||
|
||||
fn build_resolver(base_url: PathBuf, paths: CompiledPaths) -> SwcImportResolver {
|
||||
static CACHE: Lazy<DashMap<(PathBuf, CompiledPaths), SwcImportResolver, ahash::RandomState>> =
|
||||
Lazy::new(|| Default::default());
|
||||
Lazy::new(Default::default);
|
||||
|
||||
if let Some(cached) = CACHE.get(&(base_url.clone(), paths.clone())) {
|
||||
return (*cached).clone();
|
||||
@ -1608,7 +1601,7 @@ fn build_resolver(base_url: PathBuf, paths: CompiledPaths) -> SwcImportResolver
|
||||
let r = {
|
||||
let r = TsConfigResolver::new(
|
||||
NodeModulesResolver::default(),
|
||||
base_url.clone().into(),
|
||||
base_url.clone(),
|
||||
paths.clone(),
|
||||
);
|
||||
let r = CachingResolver::new(40, r);
|
||||
|
@ -224,8 +224,7 @@ where
|
||||
{
|
||||
let wr = Box::new(LockedWriter::default());
|
||||
|
||||
let e_wr =
|
||||
EmitterWriter::new(wr.clone(), Some(cm.clone()), false, true).skip_filename(skip_filename);
|
||||
let e_wr = EmitterWriter::new(wr.clone(), Some(cm), false, true).skip_filename(skip_filename);
|
||||
let handler = Handler::with_emitter(true, false, Box::new(e_wr));
|
||||
|
||||
let ret = HANDLER.set(&handler, || op(&handler));
|
||||
@ -298,7 +297,7 @@ impl Compiler {
|
||||
where
|
||||
F: FnOnce() -> R,
|
||||
{
|
||||
GLOBALS.set(&self.globals, || op())
|
||||
GLOBALS.set(&self.globals, op)
|
||||
}
|
||||
|
||||
fn get_orig_src_map(
|
||||
@ -316,10 +315,7 @@ impl Compiler {
|
||||
InputSourceMap::Bool(true) => {
|
||||
let s = "sourceMappingURL=";
|
||||
let idx = fm.src.rfind(s);
|
||||
let src_mapping_url = match idx {
|
||||
None => None,
|
||||
Some(idx) => Some(&fm.src[idx + s.len()..]),
|
||||
};
|
||||
let src_mapping_url = idx.map(|idx| &fm.src[idx + s.len()..]);
|
||||
|
||||
// Load original source map if possible
|
||||
match &name {
|
||||
@ -367,7 +363,7 @@ impl Compiler {
|
||||
}
|
||||
_ => {
|
||||
tracing::error!("Failed to load source map for non-file input");
|
||||
return Ok(None);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -501,7 +497,7 @@ impl Compiler {
|
||||
return true;
|
||||
}
|
||||
|
||||
vc.retain(|c: &Comment| c.text.starts_with("!"));
|
||||
vc.retain(|c: &Comment| c.text.starts_with('!'));
|
||||
!vc.is_empty()
|
||||
};
|
||||
self.comments.leading.retain(preserve_excl);
|
||||
@ -690,7 +686,7 @@ impl Compiler {
|
||||
..
|
||||
} = opts;
|
||||
|
||||
let root = root.as_ref().unwrap_or_else(|| &CUR_DIR);
|
||||
let root = root.as_ref().unwrap_or(&CUR_DIR);
|
||||
|
||||
let config_file = match config_file {
|
||||
Some(ConfigFile::Str(ref s)) => Some(load_swcrc(Path::new(&s))?),
|
||||
@ -754,7 +750,7 @@ impl Compiler {
|
||||
}
|
||||
}
|
||||
|
||||
let config_file = config_file.unwrap_or_else(|| Rc::default());
|
||||
let config_file = config_file.unwrap_or_default();
|
||||
let config = config_file.into_config(Some(path))?;
|
||||
|
||||
return Ok(config);
|
||||
@ -810,7 +806,7 @@ impl Compiler {
|
||||
},
|
||||
opts.output_path.as_deref(),
|
||||
opts.source_file_name.clone(),
|
||||
&handler,
|
||||
handler,
|
||||
opts.is_module,
|
||||
Some(config),
|
||||
Some(&self.comments),
|
||||
@ -825,9 +821,7 @@ impl Compiler {
|
||||
F: FnOnce() -> Ret,
|
||||
{
|
||||
self.run(|| {
|
||||
helpers::HELPERS.set(&Helpers::new(external_helpers), || {
|
||||
HANDLER.set(handler, || op())
|
||||
})
|
||||
helpers::HELPERS.set(&Helpers::new(external_helpers), || HANDLER.set(handler, op))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -57,7 +57,7 @@ fn fixture(input: PathBuf) {
|
||||
|
||||
match c.process_js_file(
|
||||
fm,
|
||||
&handler,
|
||||
handler,
|
||||
&Options {
|
||||
swcrc: true,
|
||||
is_module: IsModule::Bool(true),
|
||||
|
@ -749,7 +749,7 @@ fn should_visit() {
|
||||
|
||||
if config.minify {
|
||||
let preserve_excl = |_: &BytePos, vc: &mut Vec<Comment>| -> bool {
|
||||
vc.retain(|c: &Comment| c.text.starts_with("!"));
|
||||
vc.retain(|c: &Comment| c.text.starts_with('!'));
|
||||
!vc.is_empty()
|
||||
};
|
||||
c.comments().leading.retain(preserve_excl);
|
||||
@ -856,7 +856,7 @@ fn tests(input_dir: PathBuf) {
|
||||
#[test]
|
||||
fn issue_1984() {
|
||||
testing::run_test2(false, |cm, handler| {
|
||||
let c = Compiler::new(cm.clone());
|
||||
let c = Compiler::new(cm);
|
||||
let fm = c.cm.new_source_file(
|
||||
FileName::Anon,
|
||||
"
|
||||
|
@ -8,7 +8,7 @@ fn with_parser<F, Ret>(file_name: &str, f: F) -> Result<Ret, NormalizedOutput>
|
||||
where
|
||||
F: FnOnce(&mut Parser<Lexer<StringInput>>) -> PResult<Ret>,
|
||||
{
|
||||
let output = ::testing::run_test(false, |cm, handler| {
|
||||
::testing::run_test(false, |cm, handler| {
|
||||
let fm = cm
|
||||
.load_file(Path::new(file_name))
|
||||
.unwrap_or_else(|e| panic!("failed to load {}: {}", file_name, e));
|
||||
@ -24,10 +24,10 @@ where
|
||||
None,
|
||||
);
|
||||
let mut p = Parser::new_from(lexer);
|
||||
let res = f(&mut p).map_err(|e| e.into_diagnostic(&handler).emit());
|
||||
let res = f(&mut p).map_err(|e| e.into_diagnostic(handler).emit());
|
||||
|
||||
for e in p.take_errors() {
|
||||
e.into_diagnostic(&handler).emit()
|
||||
e.into_diagnostic(handler).emit()
|
||||
}
|
||||
|
||||
if handler.has_errors() {
|
||||
@ -35,9 +35,7 @@ where
|
||||
}
|
||||
|
||||
res
|
||||
});
|
||||
|
||||
output
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -27,7 +27,7 @@ fn compile(src: &str, options: Options) -> String {
|
||||
if handler.has_errors() {
|
||||
Err(())
|
||||
} else {
|
||||
Ok(v.code.into())
|
||||
Ok(v.code)
|
||||
}
|
||||
}
|
||||
Err(..) => Err(()),
|
||||
|
@ -1,5 +1,3 @@
|
||||
use string_cache_codegen;
|
||||
|
||||
use std::{env, path::Path};
|
||||
|
||||
fn main() {
|
||||
|
@ -114,7 +114,7 @@ fn do_test(_entry: &Path, entries: HashMap<String, FileName>, inline: bool, mini
|
||||
modules = modules
|
||||
.into_iter()
|
||||
.map(|mut b| {
|
||||
GLOBALS.set(&globals, || {
|
||||
GLOBALS.set(globals, || {
|
||||
b.module = swc_ecma_minifier::optimize(
|
||||
b.module,
|
||||
cm.clone(),
|
||||
@ -146,7 +146,7 @@ fn do_test(_entry: &Path, entries: HashMap<String, FileName>, inline: bool, mini
|
||||
}
|
||||
|
||||
{
|
||||
let cm = cm.clone();
|
||||
let cm = cm;
|
||||
print_bundles(cm, modules, minify);
|
||||
}
|
||||
|
||||
@ -222,7 +222,7 @@ pub struct Loader {
|
||||
impl Load for Loader {
|
||||
fn load(&self, f: &FileName) -> Result<ModuleData, Error> {
|
||||
let fm = match f {
|
||||
FileName::Real(path) => self.cm.load_file(&path)?,
|
||||
FileName::Real(path) => self.cm.load_file(path)?,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
@ -344,7 +344,7 @@ impl Resolve for NodeResolver {
|
||||
};
|
||||
|
||||
// Absolute path
|
||||
if target.starts_with("/") {
|
||||
if target.starts_with('/') {
|
||||
let base_dir = &Path::new("/");
|
||||
|
||||
let path = base_dir.join(target);
|
||||
@ -355,7 +355,7 @@ impl Resolve for NodeResolver {
|
||||
}
|
||||
|
||||
let cwd = &Path::new(".");
|
||||
let mut base_dir = base.parent().unwrap_or(&cwd);
|
||||
let mut base_dir = base.parent().unwrap_or(cwd);
|
||||
|
||||
if target.starts_with("./") || target.starts_with("../") {
|
||||
let win_target;
|
||||
|
@ -48,7 +48,7 @@ fn main() {
|
||||
cfg: swc_ecma_codegen::Config { minify: false },
|
||||
cm: cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(JsWriter::new(cm.clone(), "\n", wr.lock(), None)),
|
||||
wr: Box::new(JsWriter::new(cm, "\n", wr.lock(), None)),
|
||||
};
|
||||
|
||||
emitter.emit_module(&bundle.module).unwrap();
|
||||
@ -90,7 +90,7 @@ struct PathResolver;
|
||||
impl Resolve for PathResolver {
|
||||
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
|
||||
assert!(
|
||||
module_specifier.starts_with("."),
|
||||
module_specifier.starts_with('.'),
|
||||
"We are not using node_modules within this example"
|
||||
);
|
||||
|
||||
|
@ -138,13 +138,14 @@ fn wrap_module(
|
||||
});
|
||||
|
||||
// var load = __swcpack_require__.bind(void 0, moduleDecl)
|
||||
let load_var_init = Stmt::Decl(Decl::Var(VarDecl {
|
||||
|
||||
Stmt::Decl(Decl::Var(VarDecl {
|
||||
span: DUMMY_SP,
|
||||
kind: VarDeclKind::Var,
|
||||
declare: false,
|
||||
decls: vec![VarDeclarator {
|
||||
span: DUMMY_SP,
|
||||
name: Pat::Ident(load_var.clone().into()),
|
||||
name: Pat::Ident(load_var.into()),
|
||||
init: Some(Box::new(Expr::Call(CallExpr {
|
||||
span: DUMMY_SP,
|
||||
callee: Ident::new(
|
||||
@ -158,9 +159,7 @@ fn wrap_module(
|
||||
}))),
|
||||
definite: false,
|
||||
}],
|
||||
}));
|
||||
|
||||
load_var_init
|
||||
}))
|
||||
}
|
||||
|
||||
struct RequireReplacer<'a, 'b, L, R>
|
||||
@ -202,7 +201,7 @@ where
|
||||
type_args: None,
|
||||
};
|
||||
self.replaced = true;
|
||||
*node = load.clone();
|
||||
*node = load;
|
||||
|
||||
tracing::trace!("Found, and replacing require");
|
||||
}
|
||||
@ -253,7 +252,7 @@ where
|
||||
*node = ModuleItem::Stmt(
|
||||
CallExpr {
|
||||
span: DUMMY_SP,
|
||||
callee: load_var.clone().as_callee(),
|
||||
callee: load_var.as_callee(),
|
||||
args: vec![],
|
||||
type_args: None,
|
||||
}
|
||||
@ -302,7 +301,7 @@ where
|
||||
init: Some(Box::new(
|
||||
CallExpr {
|
||||
span: DUMMY_SP,
|
||||
callee: load_var.clone().as_callee(),
|
||||
callee: load_var.as_callee(),
|
||||
args: vec![],
|
||||
type_args: None,
|
||||
}
|
||||
@ -331,14 +330,13 @@ where
|
||||
}),
|
||||
init: Some(Box::new(Expr::Call(CallExpr {
|
||||
span: DUMMY_SP,
|
||||
callee: load_var.clone().as_callee(),
|
||||
callee: load_var.as_callee(),
|
||||
type_args: None,
|
||||
args: vec![],
|
||||
}))),
|
||||
definite: false,
|
||||
}],
|
||||
})));
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -366,7 +364,6 @@ impl VisitMut for DefaultHandler {
|
||||
))),
|
||||
prop: MemberProp::Ident(quote_ident!("exports")),
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -74,7 +74,7 @@ where
|
||||
.get_module(id)
|
||||
.unwrap_or_else(|| unreachable!("Module {} is not registered", id));
|
||||
let mut module = self.apply_hooks(id, is_entry)?;
|
||||
module = self.prepare_for_merging(&ctx, &info, module)?;
|
||||
module = self.prepare_for_merging(ctx, &info, module)?;
|
||||
|
||||
if !is_entry {
|
||||
module = self.wrap_cjs_module(ctx, &info, module)?;
|
||||
|
@ -71,7 +71,7 @@ where
|
||||
.map(|id| -> Result<_, Error> {
|
||||
self.run(|| {
|
||||
// TODO: is_entry should be false if it's dep of other entry.
|
||||
let is_entry = plan.entries.contains_key(&id);
|
||||
let is_entry = plan.entries.contains_key(id);
|
||||
let module = self.get_for_merging(&ctx, *id, is_entry)?;
|
||||
|
||||
Ok((*id, module))
|
||||
@ -87,7 +87,7 @@ where
|
||||
let entries = all
|
||||
.iter()
|
||||
.filter_map(|(id, module)| {
|
||||
if plan.entries.contains_key(&id) {
|
||||
if plan.entries.contains_key(id) {
|
||||
return Some((*id, module.clone()));
|
||||
}
|
||||
None
|
||||
|
@ -20,7 +20,7 @@ fn assert_cycles(map: AHashMap<String, String>, cycle_entries: Vec<&str>) {
|
||||
|
||||
let expected: AHashSet<_> = cycle_entries
|
||||
.iter()
|
||||
.map(|name| tester.module(&name).id)
|
||||
.map(|name| tester.module(name).id)
|
||||
.collect();
|
||||
|
||||
assert_eq!(expected, actual);
|
||||
|
@ -171,8 +171,6 @@ where
|
||||
alias: None,
|
||||
}
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultDecl(_decl)) => {
|
||||
@ -184,8 +182,6 @@ where
|
||||
local: Id::new(js_word!("default"), SyntaxContext::empty()),
|
||||
alias: None,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportDefaultExpr(_expr)) => {
|
||||
@ -197,8 +193,6 @@ where
|
||||
local: Id::new(js_word!("default"), SyntaxContext::empty()),
|
||||
alias: None,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(named)) => {
|
||||
@ -283,8 +277,6 @@ where
|
||||
if need_wrapping {
|
||||
self.mark_as_wrapping_required(&named.src.as_ref().unwrap().value);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportAll(all)) => {
|
||||
|
@ -423,7 +423,7 @@ where
|
||||
let value = v.as_str();
|
||||
return ImportDecl {
|
||||
src: Str {
|
||||
value: if value.starts_with(".") {
|
||||
value: if value.starts_with('.') {
|
||||
value.into()
|
||||
} else {
|
||||
format!("./{}", value).into()
|
||||
|
@ -36,7 +36,7 @@ macro_rules! define {
|
||||
.parse_module()
|
||||
.map(|script| drop_span(script.body))
|
||||
.map_err(|_| {
|
||||
()
|
||||
|
||||
})
|
||||
.unwrap();
|
||||
stmts
|
||||
|
@ -208,7 +208,6 @@ where
|
||||
.map(|import| import.src.value.clone())
|
||||
{
|
||||
self.info.forced_ns.insert(src);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@ -263,7 +262,6 @@ where
|
||||
}
|
||||
|
||||
self.info.lazy_imports.push(decl);
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Uncomment this after implementing an option to make swc_bundler
|
||||
@ -429,7 +427,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
self.info.insert(&import);
|
||||
self.info.insert(import);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -671,11 +669,7 @@ where
|
||||
_ => true,
|
||||
});
|
||||
|
||||
if var.decls.is_empty() {
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
!var.decls.is_empty()
|
||||
}
|
||||
|
||||
_ => true,
|
||||
|
@ -62,7 +62,7 @@ impl VisitMut for KeywordRenamer {
|
||||
fn visit_mut_expr(&mut self, n: &mut Expr) {
|
||||
match n {
|
||||
Expr::Ident(n) => {
|
||||
if let Some(renamed) = self.renamed(&n) {
|
||||
if let Some(renamed) = self.renamed(n) {
|
||||
*n = renamed;
|
||||
}
|
||||
return;
|
||||
@ -151,7 +151,7 @@ impl VisitMut for KeywordRenamer {
|
||||
fn visit_mut_prop(&mut self, n: &mut Prop) {
|
||||
match n {
|
||||
Prop::Shorthand(i) => {
|
||||
if let Some(renamed) = self.renamed(&i) {
|
||||
if let Some(renamed) = self.renamed(i) {
|
||||
*n = Prop::KeyValue(KeyValueProp {
|
||||
key: PropName::Ident(i.clone()),
|
||||
value: Box::new(Expr::Ident(renamed)),
|
||||
|
@ -73,14 +73,14 @@ where
|
||||
tracing::trace!("load_transformed: ({})", file_name);
|
||||
|
||||
// In case of common module
|
||||
if let Some(cached) = self.scope.get_module_by_path(&file_name) {
|
||||
if let Some(cached) = self.scope.get_module_by_path(file_name) {
|
||||
tracing::debug!("Cached: {}", file_name);
|
||||
return Ok(Some(cached));
|
||||
}
|
||||
|
||||
let (_, data) = self.load(&file_name).context("Bundler.load() failed")?;
|
||||
let (_, data) = self.load(file_name).context("Bundler.load() failed")?;
|
||||
let (v, mut files) = self
|
||||
.analyze(&file_name, data)
|
||||
.analyze(file_name, data)
|
||||
.context("failed to analyze module")?;
|
||||
files.dedup_by_key(|v| v.1.clone());
|
||||
|
||||
@ -117,7 +117,7 @@ where
|
||||
|
||||
let data = self
|
||||
.loader
|
||||
.load(&file_name)
|
||||
.load(file_name)
|
||||
.with_context(|| format!("Bundler.loader.load({}) failed", file_name))?;
|
||||
self.scope.mark_as_loaded(module_id);
|
||||
Ok((module_id, data))
|
||||
|
@ -117,7 +117,7 @@ where
|
||||
config: Config,
|
||||
hook: Box<dyn 'a + Hook>,
|
||||
) -> Self {
|
||||
GLOBALS.set(&globals, || {
|
||||
GLOBALS.set(globals, || {
|
||||
let helper_ctxt = SyntaxContext::empty().apply_mark(Mark::fresh(Mark::root()));
|
||||
tracing::debug!("Helper ctxt: {:?}", helper_ctxt);
|
||||
let synthesized_ctxt = SyntaxContext::empty().apply_mark(Mark::fresh(Mark::root()));
|
||||
|
@ -37,7 +37,7 @@ impl Scope {
|
||||
}
|
||||
|
||||
pub fn get_module(&self, id: ModuleId) -> Option<TransformedModule> {
|
||||
Some(self.transformed_modules.get(&id)?.clone())
|
||||
self.transformed_modules.get(&id)
|
||||
}
|
||||
|
||||
pub fn is_cjs(&self, id: ModuleId) -> bool {
|
||||
|
@ -18,7 +18,7 @@ pub(crate) fn calc_hash(cm: Lrc<SourceMap>, m: &Module) -> Result<String, Error>
|
||||
};
|
||||
|
||||
emitter
|
||||
.emit_module(&m)
|
||||
.emit_module(m)
|
||||
.context("failed to emit module to calculate hash")?;
|
||||
}
|
||||
//
|
||||
|
@ -34,7 +34,7 @@ impl ModuleIdGenerator {
|
||||
pub fn gen(&self, file_name: &FileName) -> (ModuleId, Mark, Mark) {
|
||||
let mut w = self.cache.lock();
|
||||
if let Some(v) = w.get(file_name) {
|
||||
return v.clone();
|
||||
return *v;
|
||||
}
|
||||
|
||||
let id = ModuleId(self.v.fetch_add(1, SeqCst));
|
||||
|
@ -143,7 +143,6 @@ impl VisitMut for Inliner {
|
||||
key: PropName::Ident(orig),
|
||||
value: Box::new(Expr::Ident(i.clone())),
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
@ -261,10 +261,7 @@ impl Modules {
|
||||
if module_span.is_dummy() {
|
||||
return None;
|
||||
}
|
||||
Some(format!(
|
||||
"{}\n",
|
||||
cm.lookup_source_file(module_span.lo).name.to_string()
|
||||
))
|
||||
Some(format!("{}\n", cm.lookup_source_file(module_span.lo).name))
|
||||
})
|
||||
.collect::<String>();
|
||||
let mut cloned = self.clone();
|
||||
|
@ -71,7 +71,7 @@ fn toposort_real_modules<'a>(
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let start = Instant::now();
|
||||
let sorted_ids = toposort_real_module_ids(queue, graph, &cycles).collect::<Vec<_>>();
|
||||
let sorted_ids = toposort_real_module_ids(queue, graph, cycles).collect::<Vec<_>>();
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let end = Instant::now();
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@ -102,13 +102,11 @@ fn toposort_real_modules<'a>(
|
||||
}
|
||||
|
||||
// Skip sorting statements if there is no import.
|
||||
if ids.len() == 1 {
|
||||
if graph.neighbors_directed(ids[0], Outgoing).count() == 0 {
|
||||
chunks.push(Chunk {
|
||||
stmts: stmts.into_iter().next().unwrap(),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
if ids.len() == 1 && graph.neighbors_directed(ids[0], Outgoing).count() == 0 {
|
||||
chunks.push(Chunk {
|
||||
stmts: stmts.into_iter().next().unwrap(),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
let stmts = sort_stmts(injected_ctxt, stmts, cm);
|
||||
@ -172,7 +170,7 @@ fn toposort_real_module_ids<'a>(
|
||||
|
||||
let deps = graph
|
||||
.neighbors_directed(id, Outgoing)
|
||||
.filter(|dep| !done.contains(&dep))
|
||||
.filter(|dep| !done.contains(dep))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if deps.is_empty() {
|
||||
@ -208,7 +206,7 @@ fn toposort_real_module_ids<'a>(
|
||||
.flat_map(|&id| {
|
||||
graph
|
||||
.neighbors_directed(id, Outgoing)
|
||||
.filter(|dep| !done.contains(&dep) && !all_modules_in_circle.contains(dep))
|
||||
.filter(|dep| !done.contains(dep) && !all_modules_in_circle.contains(dep))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
@ -184,17 +184,18 @@ fn iter<'a>(
|
||||
.neighbors_directed(idx, Dependencies)
|
||||
.filter(|dep| {
|
||||
let declared_in_same_module = match ¤t_range {
|
||||
Some(v) => v.contains(&dep),
|
||||
Some(v) => v.contains(dep),
|
||||
None => false,
|
||||
};
|
||||
if declared_in_same_module {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !free.contains(&idx) && graph.has_a_path(*dep, idx) {
|
||||
if !moves.insert((idx, *dep)) {
|
||||
return false;
|
||||
}
|
||||
if !free.contains(&idx)
|
||||
&& graph.has_a_path(*dep, idx)
|
||||
&& !moves.insert((idx, *dep))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Exclude emitted items
|
||||
@ -215,11 +216,9 @@ fn iter<'a>(
|
||||
|| (can_ignore_weak_deps
|
||||
&& graph.edge_weight(idx, dep) == Some(Required::Maybe));
|
||||
|
||||
if can_ignore_dep {
|
||||
if graph.has_a_path(dep, idx) {
|
||||
// Just emit idx.
|
||||
continue;
|
||||
}
|
||||
if can_ignore_dep && graph.has_a_path(dep, idx) {
|
||||
// Just emit idx.
|
||||
continue;
|
||||
}
|
||||
|
||||
deps_to_push.push(dep);
|
||||
@ -341,11 +340,11 @@ impl FieldInitFinter {
|
||||
fn check_lhs_of_assign(&mut self, lhs: &PatOrExpr) {
|
||||
match lhs {
|
||||
PatOrExpr::Expr(e) => {
|
||||
self.check_lhs_expr_of_assign(&e);
|
||||
self.check_lhs_expr_of_assign(e);
|
||||
}
|
||||
PatOrExpr::Pat(pat) => match &**pat {
|
||||
Pat::Expr(e) => {
|
||||
self.check_lhs_expr_of_assign(&e);
|
||||
self.check_lhs_expr_of_assign(e);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
|
@ -518,7 +518,7 @@ fn mark(item: &mut ModuleItem, ctxt: SyntaxContext) {
|
||||
}
|
||||
},
|
||||
ModuleItem::Stmt(stmt) => match stmt {
|
||||
Stmt::Empty(_) => return,
|
||||
Stmt::Empty(_) => {}
|
||||
Stmt::Block(BlockStmt { span, .. })
|
||||
| Stmt::Debugger(DebuggerStmt { span, .. })
|
||||
| Stmt::With(WithStmt { span, .. })
|
||||
|
@ -128,11 +128,7 @@ where
|
||||
{
|
||||
#[cfg(feature = "concurrent")]
|
||||
pub fn get(&self, k: &K) -> Option<V> {
|
||||
if let Some(v) = self.inner.get(k) {
|
||||
Some(v.value().clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
self.inner.get(k).map(|v| v.value().clone())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
|
@ -46,12 +46,10 @@ fn calc_cache_path(cache_dir: &Path, url: &Url) -> PathBuf {
|
||||
|
||||
/// Load url. This method does caching.
|
||||
fn load_url(url: Url) -> Result<String, Error> {
|
||||
let cache_dir = PathBuf::from(
|
||||
current_dir()
|
||||
.expect("the test requires an environment variable named `CARGO_MANIFEST_DIR`"),
|
||||
)
|
||||
.join("tests")
|
||||
.join(".cache");
|
||||
let cache_dir = current_dir()
|
||||
.expect("the test requires an environment variable named `CARGO_MANIFEST_DIR`")
|
||||
.join("tests")
|
||||
.join(".cache");
|
||||
|
||||
let cache_path = calc_cache_path(&cache_dir, &url).with_extension("ts");
|
||||
|
||||
@ -94,7 +92,7 @@ impl Load for Loader {
|
||||
let fm = match f {
|
||||
FileName::Real(path) => {
|
||||
tsx = path.to_string_lossy().ends_with(".tsx");
|
||||
self.cm.load_file(&path)?
|
||||
self.cm.load_file(path)?
|
||||
}
|
||||
FileName::Custom(url) => {
|
||||
tsx = url.ends_with(".tsx");
|
||||
@ -106,7 +104,7 @@ impl Load for Loader {
|
||||
let src = load_url(url.clone())?;
|
||||
|
||||
self.cm
|
||||
.new_source_file(FileName::Custom(url.to_string()), src.to_string())
|
||||
.new_source_file(FileName::Custom(url.to_string()), src)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
@ -273,7 +271,7 @@ impl Resolve for NodeResolver {
|
||||
let base = match base {
|
||||
FileName::Real(v) => v,
|
||||
FileName::Custom(base_url) => {
|
||||
let base_url = Url::parse(&base_url).context("failed to parse url")?;
|
||||
let base_url = Url::parse(base_url).context("failed to parse url")?;
|
||||
|
||||
let options = Url::options();
|
||||
let base_url = options.base_url(Some(&base_url));
|
||||
@ -287,7 +285,7 @@ impl Resolve for NodeResolver {
|
||||
};
|
||||
|
||||
// Absolute path
|
||||
if target.starts_with("/") {
|
||||
if target.starts_with('/') {
|
||||
let base_dir = &Path::new("/");
|
||||
|
||||
let path = base_dir.join(target);
|
||||
@ -298,7 +296,7 @@ impl Resolve for NodeResolver {
|
||||
}
|
||||
|
||||
let cwd = &Path::new(".");
|
||||
let mut base_dir = base.parent().unwrap_or(&cwd);
|
||||
let mut base_dir = base.parent().unwrap_or(cwd);
|
||||
|
||||
if target.starts_with("./") || target.starts_with("../") {
|
||||
let win_target;
|
||||
|
@ -983,15 +983,12 @@ fn run(url: &str, exports: &[&str]) {
|
||||
|
||||
::testing::run_test2(false, |cm, _| {
|
||||
let fm = cm.load_file(&path).unwrap();
|
||||
let loader = Loader { cm: cm.clone() };
|
||||
let loader = Loader { cm };
|
||||
let module = loader.load(&fm.name).unwrap().module;
|
||||
|
||||
let mut actual_exports = collect_exports(&module).into_iter().collect::<Vec<_>>();
|
||||
actual_exports.sort();
|
||||
let mut expected_exports = exports
|
||||
.into_iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
let mut expected_exports = exports.iter().map(|s| s.to_string()).collect::<Vec<_>>();
|
||||
expected_exports.sort();
|
||||
|
||||
assert_eq!(expected_exports, actual_exports);
|
||||
@ -1021,7 +1018,7 @@ fn run(url: &str, exports: &[&str]) {
|
||||
}
|
||||
|
||||
fn bundle(url: &str, minify: bool) -> String {
|
||||
let result = testing::run_test2(false, |cm, _handler| {
|
||||
testing::run_test2(false, |cm, _handler| {
|
||||
GLOBALS.with(|globals| {
|
||||
let mut bundler = Bundler::new(
|
||||
globals,
|
||||
@ -1092,9 +1089,7 @@ fn bundle(url: &str, minify: bool) -> String {
|
||||
Ok(String::from_utf8_lossy(&buf).to_string())
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
result
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
struct Hook;
|
||||
|
@ -116,18 +116,12 @@ fn pass(entry: PathBuf) {
|
||||
let entries = read_dir(&entry)
|
||||
.unwrap()
|
||||
.filter(|e| match e {
|
||||
Ok(e) => {
|
||||
if e.path()
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.starts_with("entry")
|
||||
{
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Ok(e) => e
|
||||
.path()
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.starts_with("entry"),
|
||||
_ => false,
|
||||
})
|
||||
.map(|e| -> Result<_, io::Error> {
|
||||
|
@ -50,7 +50,7 @@ pub trait Comments {
|
||||
let cmts = self.take_leading(pos);
|
||||
|
||||
let ret = if let Some(cmts) = &cmts {
|
||||
f(&cmts)
|
||||
f(cmts)
|
||||
} else {
|
||||
f(&[])
|
||||
};
|
||||
@ -70,7 +70,7 @@ pub trait Comments {
|
||||
let cmts = self.take_trailing(pos);
|
||||
|
||||
let ret = if let Some(cmts) = &cmts {
|
||||
f(&cmts)
|
||||
f(cmts)
|
||||
} else {
|
||||
f(&[])
|
||||
};
|
||||
@ -453,13 +453,11 @@ impl Comments for SingleThreadedComments {
|
||||
let b = self.leading.borrow();
|
||||
let cmts = b.get(&pos);
|
||||
|
||||
let ret = if let Some(cmts) = &cmts {
|
||||
f(&cmts)
|
||||
if let Some(cmts) = &cmts {
|
||||
f(cmts)
|
||||
} else {
|
||||
f(&[])
|
||||
};
|
||||
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
fn with_trailing<F, Ret>(&self, pos: BytePos, f: F) -> Ret
|
||||
@ -470,13 +468,11 @@ impl Comments for SingleThreadedComments {
|
||||
let b = self.trailing.borrow();
|
||||
let cmts = b.get(&pos);
|
||||
|
||||
let ret = if let Some(cmts) = &cmts {
|
||||
f(&cmts)
|
||||
if let Some(cmts) = &cmts {
|
||||
f(cmts)
|
||||
} else {
|
||||
f(&[])
|
||||
};
|
||||
|
||||
ret
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,7 +93,7 @@ impl<'a> DiagnosticBuilder<'a> {
|
||||
return;
|
||||
}
|
||||
|
||||
self.handler.emit_db(&self);
|
||||
self.handler.emit_db(self);
|
||||
self.cancel();
|
||||
}
|
||||
|
||||
|
@ -87,11 +87,11 @@ impl Emitter for EmitterWriter {
|
||||
|
||||
self.emit_messages_default(
|
||||
db.level,
|
||||
&db.styled_message(),
|
||||
db.styled_message(),
|
||||
&db.code,
|
||||
&primary_span,
|
||||
&children,
|
||||
&suggestions,
|
||||
suggestions,
|
||||
);
|
||||
}
|
||||
|
||||
@ -381,7 +381,7 @@ impl EmitterWriter {
|
||||
// 4 | | }
|
||||
// | |_^ test
|
||||
if line.annotations.len() == 1 {
|
||||
if let Some(ref ann) = line.annotations.get(0) {
|
||||
if let Some(ann) = line.annotations.get(0) {
|
||||
if let AnnotationType::MultilineStart(depth) = ann.annotation_type {
|
||||
if source_string
|
||||
.chars()
|
||||
@ -693,7 +693,7 @@ impl EmitterWriter {
|
||||
(pos + 2, annotation.start_col)
|
||||
};
|
||||
if let Some(ref label) = annotation.label {
|
||||
buffer.puts(line_offset + pos, code_offset + col, &label, style);
|
||||
buffer.puts(line_offset + pos, code_offset + col, label, style);
|
||||
}
|
||||
}
|
||||
|
||||
@ -973,7 +973,7 @@ impl EmitterWriter {
|
||||
// only render error codes, not lint codes
|
||||
if let Some(DiagnosticId::Error(ref code)) = *code {
|
||||
buffer.append(0, "[", Style::Level(level));
|
||||
buffer.append(0, &code, Style::Level(level));
|
||||
buffer.append(0, code, Style::Level(level));
|
||||
buffer.append(0, "]", Style::Level(level));
|
||||
}
|
||||
if !level_str.is_empty() {
|
||||
@ -990,7 +990,7 @@ impl EmitterWriter {
|
||||
let mut annotated_files = self.preprocess_annotations(msp);
|
||||
|
||||
// Make sure our primary file comes first
|
||||
let (primary_lo, sm) = if let (Some(sm), Some(ref primary_span)) =
|
||||
let (primary_lo, sm) = if let (Some(sm), Some(primary_span)) =
|
||||
(self.sm.as_ref(), msp.primary_span().as_ref())
|
||||
{
|
||||
if !primary_span.is_dummy() {
|
||||
@ -1369,8 +1369,8 @@ impl EmitterWriter {
|
||||
for child in children {
|
||||
let span = child.render_span.as_ref().unwrap_or(&child.span);
|
||||
if let Err(e) = self.emit_message_default(
|
||||
&span,
|
||||
&child.styled_message(),
|
||||
span,
|
||||
child.styled_message(),
|
||||
&None,
|
||||
child.level,
|
||||
max_line_num_len,
|
||||
|
@ -171,7 +171,7 @@ impl<'a> Input for StringInput<'a> {
|
||||
|
||||
#[inline]
|
||||
fn is_byte(&mut self, c: u8) -> bool {
|
||||
if self.iter.as_str().len() == 0 {
|
||||
if self.iter.as_str().is_empty() {
|
||||
false
|
||||
} else {
|
||||
// Safety: We checked that `self.iter.as_str().len() > 0`
|
||||
|
@ -22,7 +22,7 @@ impl Serialized {
|
||||
len.try_into()
|
||||
.expect("Cannot determine size of the serialized bytes"),
|
||||
);
|
||||
vec.extend_from_slice(&bytes);
|
||||
vec.extend_from_slice(bytes);
|
||||
Serialized { field: vec }
|
||||
}
|
||||
|
||||
@ -39,7 +39,7 @@ impl Serialized {
|
||||
W: rkyv::Serialize<rkyv::ser::serializers::AllocSerializer<512>>,
|
||||
{
|
||||
rkyv::to_bytes::<_, 512>(t)
|
||||
.map(|v| Serialized::from(v))
|
||||
.map(Serialized::from)
|
||||
.map_err(|err| match err {
|
||||
rkyv::ser::serializers::CompositeSerializerError::SerializerError(e) => e.into(),
|
||||
rkyv::ser::serializers::CompositeSerializerError::ScratchSpaceError(e) => {
|
||||
|
@ -82,7 +82,7 @@ where
|
||||
S: ?Sized + Spanned,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
<S as Spanned>::span(&*self)
|
||||
<S as Spanned>::span(*self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -620,7 +620,7 @@ impl SourceMap {
|
||||
/// occurred while retrieving the code snippet.
|
||||
pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span {
|
||||
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
||||
let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_start();
|
||||
let prev_source = prev_source.rsplit(c).next().unwrap_or("").trim_start();
|
||||
if !prev_source.is_empty() && !prev_source.contains('\n') {
|
||||
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
||||
}
|
||||
@ -641,7 +641,7 @@ impl SourceMap {
|
||||
for ws in &[" ", "\t", "\n"] {
|
||||
let pat = pat.to_owned() + ws;
|
||||
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
||||
let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_start();
|
||||
let prev_source = prev_source.rsplit(&pat).next().unwrap_or("").trim_start();
|
||||
if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) {
|
||||
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
||||
}
|
||||
@ -665,7 +665,7 @@ impl SourceMap {
|
||||
|
||||
match self.span_to_source(sp, |src, start_index, end_index| {
|
||||
let snippet = &src[start_index..end_index];
|
||||
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_end();
|
||||
let snippet = snippet.split(c).next().unwrap_or("").trim_end();
|
||||
if !snippet.is_empty() && !snippet.contains('\n') {
|
||||
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
||||
} else {
|
||||
@ -967,7 +967,7 @@ impl SourceMap {
|
||||
pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> {
|
||||
let files = self.files.borrow();
|
||||
let files = &files.source_files;
|
||||
let fm = Self::lookup_source_file_in(&files, pos);
|
||||
let fm = Self::lookup_source_file_in(files, pos);
|
||||
match fm {
|
||||
Some(fm) => fm,
|
||||
None => {
|
||||
@ -1163,9 +1163,9 @@ impl SourceMap {
|
||||
pos,
|
||||
linebpos,
|
||||
);
|
||||
let chpos = pos.to_u32() - self.calc_extra_bytes(&f, &mut ch_start, pos);
|
||||
let chpos = pos.to_u32() - self.calc_extra_bytes(f, &mut ch_start, pos);
|
||||
let linechpos =
|
||||
linebpos.to_u32() - self.calc_extra_bytes(&f, &mut line_ch_start, linebpos);
|
||||
linebpos.to_u32() - self.calc_extra_bytes(f, &mut line_ch_start, linebpos);
|
||||
|
||||
let mut col = max(chpos, linechpos) - min(chpos, linechpos);
|
||||
|
||||
|
@ -726,11 +726,7 @@ impl SourceFile {
|
||||
}
|
||||
|
||||
let begin = {
|
||||
let line = if let Some(line) = self.lines.get(line_number) {
|
||||
line
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let line = self.lines.get(line_number)?;
|
||||
let begin: BytePos = *line - self.start_pos;
|
||||
begin.to_usize()
|
||||
};
|
||||
@ -788,7 +784,7 @@ impl SourceFile {
|
||||
|
||||
/// Remove utf-8 BOM if any.
|
||||
fn remove_bom(src: &mut String) {
|
||||
if src.starts_with("\u{feff}") {
|
||||
if src.starts_with('\u{feff}') {
|
||||
src.drain(..3);
|
||||
}
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ cfg_if::cfg_if! {
|
||||
let ptr = src_bytes.as_ptr() as *const __m128i;
|
||||
// We don't know if the pointer is aligned to 16 bytes, so we
|
||||
// use `loadu`, which supports unaligned loading.
|
||||
let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize));
|
||||
let chunk = _mm_loadu_si128(ptr.add(chunk_index));
|
||||
|
||||
// For character in the chunk, see if its byte value is < 0, which
|
||||
// indicates that it's part of a UTF-8 char.
|
||||
@ -259,7 +259,7 @@ fn analyze_source_file_generic(
|
||||
let pos = BytePos::from_usize(i) + output_offset;
|
||||
|
||||
if char_len > 1 {
|
||||
assert!(char_len >= 2 && char_len <= 4);
|
||||
assert!((2..=4).contains(&char_len));
|
||||
let mbc = MultiByteChar {
|
||||
pos,
|
||||
bytes: char_len as u8,
|
||||
|
@ -212,7 +212,7 @@ impl SyntaxContext {
|
||||
/// that mark.
|
||||
pub fn apply_mark(self, mark: Mark) -> SyntaxContext {
|
||||
assert_ne!(mark, Mark::root());
|
||||
return self.apply_mark_internal(mark);
|
||||
self.apply_mark_internal(mark)
|
||||
}
|
||||
|
||||
fn apply_mark_internal(self, mark: Mark) -> SyntaxContext {
|
||||
|
@ -11,15 +11,12 @@ fn no_overlap() {
|
||||
let files: Vec<Lrc<SourceFile>> = (0..100000)
|
||||
.into_par_iter()
|
||||
.map(|_| {
|
||||
let fm = cm
|
||||
.load_file(
|
||||
&PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap())
|
||||
.join("tests")
|
||||
.join("concurrent.js"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fm
|
||||
cm.load_file(
|
||||
&PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap())
|
||||
.join("tests")
|
||||
.join("concurrent.js"),
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -27,7 +24,7 @@ fn no_overlap() {
|
||||
|
||||
let mut start = files.clone();
|
||||
start.sort_by_key(|f| f.start_pos);
|
||||
let mut end = files.clone();
|
||||
let mut end = files;
|
||||
end.sort_by_key(|f| f.end_pos);
|
||||
|
||||
start
|
||||
|
@ -31,12 +31,12 @@ impl<'w, I: CssWriter> Deref for WithCtx<'w, I> {
|
||||
type Target = CodeGenerator<I>;
|
||||
|
||||
fn deref(&self) -> &CodeGenerator<I> {
|
||||
&self.inner
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
impl<'w, I: CssWriter> DerefMut for WithCtx<'w, I> {
|
||||
fn deref_mut(&mut self) -> &mut CodeGenerator<I> {
|
||||
&mut self.inner
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -502,7 +502,7 @@ where
|
||||
punct!(self, "[");
|
||||
|
||||
if let Some(values) = &n.children {
|
||||
self.emit_list(&values, ListFormat::SpaceDelimited)?;
|
||||
self.emit_list(values, ListFormat::SpaceDelimited)?;
|
||||
}
|
||||
|
||||
punct!(self, "]");
|
||||
@ -513,7 +513,7 @@ where
|
||||
punct!(self, "(");
|
||||
|
||||
if let Some(values) = &n.children {
|
||||
self.emit_list(&values, ListFormat::CommaDelimited)?;
|
||||
self.emit_list(values, ListFormat::CommaDelimited)?;
|
||||
}
|
||||
|
||||
punct!(self, ")");
|
||||
@ -543,7 +543,7 @@ where
|
||||
match token {
|
||||
Token::AtKeyword { raw, .. } => {
|
||||
punct!(self, span, "@");
|
||||
self.wr.write_raw(Some(n.span), &raw)?;
|
||||
self.wr.write_raw(Some(n.span), raw)?;
|
||||
}
|
||||
Token::Delim { value } => {
|
||||
self.wr.write_raw_char(Some(n.span), *value)?;
|
||||
@ -572,32 +572,32 @@ where
|
||||
raw_unit,
|
||||
..
|
||||
} => {
|
||||
self.wr.write_raw(Some(span), &raw_value)?;
|
||||
self.wr.write_raw(Some(span), &raw_unit)?;
|
||||
self.wr.write_raw(Some(span), raw_value)?;
|
||||
self.wr.write_raw(Some(span), raw_unit)?;
|
||||
}
|
||||
Token::Ident { raw, .. } => {
|
||||
self.wr.write_raw(Some(n.span), &raw)?;
|
||||
self.wr.write_raw(Some(n.span), raw)?;
|
||||
}
|
||||
Token::Function { raw, .. } => {
|
||||
self.wr.write_raw(Some(n.span), &raw)?;
|
||||
self.wr.write_raw(Some(n.span), raw)?;
|
||||
punct!(self, "(");
|
||||
}
|
||||
Token::BadStr { raw, .. } => {
|
||||
self.wr.write_raw(Some(span), &raw)?;
|
||||
self.wr.write_raw(Some(span), raw)?;
|
||||
}
|
||||
Token::Str { raw, .. } => {
|
||||
self.wr.write_raw(Some(span), &raw)?;
|
||||
self.wr.write_raw(Some(span), raw)?;
|
||||
}
|
||||
Token::Url { raw, .. } => {
|
||||
self.wr.write_raw(Some(span), "url")?;
|
||||
punct!(self, "(");
|
||||
self.wr.write_raw(None, &raw)?;
|
||||
self.wr.write_raw(None, raw)?;
|
||||
punct!(self, ")");
|
||||
}
|
||||
Token::BadUrl { raw, .. } => {
|
||||
self.wr.write_raw(Some(span), "url")?;
|
||||
punct!(self, "(");
|
||||
self.wr.write_raw(None, &raw)?;
|
||||
self.wr.write_raw(None, raw)?;
|
||||
punct!(self, ")");
|
||||
}
|
||||
Token::Comma => {
|
||||
@ -617,10 +617,10 @@ where
|
||||
}
|
||||
Token::Hash { raw, .. } => {
|
||||
punct!(self, "#");
|
||||
self.wr.write_raw(Some(span), &raw)?;
|
||||
self.wr.write_raw(Some(span), raw)?;
|
||||
}
|
||||
Token::WhiteSpace { value, .. } => {
|
||||
self.wr.write_raw(None, &value)?;
|
||||
self.wr.write_raw(None, value)?;
|
||||
}
|
||||
Token::CDC => {
|
||||
punct!(self, span, "-->");
|
||||
|
@ -975,7 +975,7 @@ impl PatOrExpr {
|
||||
match self {
|
||||
PatOrExpr::Pat(pat) => match *pat {
|
||||
Pat::Expr(expr) => PatOrExpr::Expr(expr),
|
||||
_ => return PatOrExpr::Pat(pat),
|
||||
_ => PatOrExpr::Pat(pat),
|
||||
},
|
||||
_ => self,
|
||||
}
|
||||
|
@ -141,10 +141,8 @@ impl Ident {
|
||||
let mut chars = s.chars();
|
||||
|
||||
if let Some(first) = chars.next() {
|
||||
if Self::is_valid_start(first) {
|
||||
if chars.all(Self::is_valid_continue) {
|
||||
return Ok(());
|
||||
}
|
||||
if Self::is_valid_start(first) && chars.all(Self::is_valid_continue) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -214,7 +214,7 @@ where
|
||||
) -> Result<Self::Resolver, S::Error> {
|
||||
value
|
||||
.as_ref()
|
||||
.map(|value| rkyv::string::ArchivedString::serialize_from_str(&value, serializer))
|
||||
.map(|value| rkyv::string::ArchivedString::serialize_from_str(value, serializer))
|
||||
.transpose()
|
||||
}
|
||||
}
|
||||
|
@ -277,6 +277,8 @@ pub struct Number {
|
||||
|
||||
impl Eq for Number {}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
#[allow(clippy::transmute_float_to_int)]
|
||||
impl Hash for Number {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
fn integer_decode(val: f64) -> (u64, i16, i8) {
|
||||
|
@ -361,7 +361,7 @@ where
|
||||
ExportSpecifier::Namespace(spec) => {
|
||||
result.has_namespace_spec = true;
|
||||
// There can only be one namespace export specifier.
|
||||
if let None = result.namespace_spec {
|
||||
if result.namespace_spec.is_none() {
|
||||
result.namespace_spec = Some(spec)
|
||||
}
|
||||
result
|
||||
@ -470,7 +470,7 @@ where
|
||||
}
|
||||
|
||||
fn emit_js_word(&mut self, span: Span, value: &JsWord) -> Result {
|
||||
self.wr.write_str_lit(span, &value)?;
|
||||
self.wr.write_str_lit(span, value)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -526,23 +526,19 @@ where
|
||||
self.wr.write_str_lit(num.span, "-")?;
|
||||
}
|
||||
self.wr.write_str_lit(num.span, "Infinity")?;
|
||||
} else if num.value.is_sign_negative() && num.value == 0.0 {
|
||||
self.wr.write_str_lit(num.span, "-0")?;
|
||||
} else {
|
||||
if num.value.is_sign_negative() && num.value == 0.0 {
|
||||
self.wr.write_str_lit(num.span, "-0")?;
|
||||
} else {
|
||||
let mut s = num.value.to_string();
|
||||
if self.cfg.minify {
|
||||
if !s.contains('.') && !s.contains('e') && s.ends_with("0000") {
|
||||
let cnt = s.as_bytes().iter().rev().filter(|&&v| v == b'0').count() - 1;
|
||||
let mut s = num.value.to_string();
|
||||
if self.cfg.minify && !s.contains('.') && !s.contains('e') && s.ends_with("0000") {
|
||||
let cnt = s.as_bytes().iter().rev().filter(|&&v| v == b'0').count() - 1;
|
||||
|
||||
s.truncate(s.len() - cnt);
|
||||
s.push('e');
|
||||
s.push_str(&cnt.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
self.wr.write_str_lit(num.span, &s)?;
|
||||
s.truncate(s.len() - cnt);
|
||||
s.push('e');
|
||||
s.push_str(&cnt.to_string());
|
||||
}
|
||||
|
||||
self.wr.write_str_lit(num.span, &s)?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1627,7 +1623,7 @@ where
|
||||
}
|
||||
|
||||
let emit_new_line = !self.cfg.minify
|
||||
&& !(node.props.len() == 0 && is_empty_comments(&node.span(), &self.comments));
|
||||
&& !(node.props.is_empty() && is_empty_comments(&node.span(), &self.comments));
|
||||
|
||||
if emit_new_line {
|
||||
self.wr.write_line()?;
|
||||
@ -1919,10 +1915,9 @@ where
|
||||
// to newline ,
|
||||
if format.contains(ListFormat::DelimitersMask)
|
||||
&& previous_sibling.hi != parent_node.hi()
|
||||
&& self.comments.is_some()
|
||||
{
|
||||
if self.comments.is_some() {
|
||||
self.emit_leading_comments(previous_sibling.span().hi(), true)?;
|
||||
}
|
||||
self.emit_leading_comments(previous_sibling.span().hi(), true)?;
|
||||
}
|
||||
|
||||
self.write_delim(format)?;
|
||||
@ -1991,11 +1986,12 @@ where
|
||||
}
|
||||
};
|
||||
|
||||
if has_trailing_comma && format.contains(ListFormat::CommaDelimited) {
|
||||
if !self.cfg.minify || !format.contains(ListFormat::CanSkipTrailingComma) {
|
||||
punct!(self, ",");
|
||||
formatting_space!(self);
|
||||
}
|
||||
if has_trailing_comma
|
||||
&& format.contains(ListFormat::CommaDelimited)
|
||||
&& (!self.cfg.minify || !format.contains(ListFormat::CanSkipTrailingComma))
|
||||
{
|
||||
punct!(self, ",");
|
||||
formatting_space!(self);
|
||||
}
|
||||
|
||||
{
|
||||
@ -2018,10 +2014,9 @@ where
|
||||
if format.contains(ListFormat::DelimitersMask)
|
||||
&& previous_sibling.span().hi() != parent_node.hi()
|
||||
&& emit_trailing_comments
|
||||
&& self.comments.is_some()
|
||||
{
|
||||
if self.comments.is_some() {
|
||||
self.emit_leading_comments(previous_sibling.span().hi(), true)?;
|
||||
}
|
||||
self.emit_leading_comments(previous_sibling.span().hi(), true)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2316,7 +2311,7 @@ where
|
||||
}
|
||||
|
||||
let emit_new_line = !self.cfg.minify
|
||||
&& !(node.stmts.len() == 0 && is_empty_comments(&node.span(), &self.comments));
|
||||
&& !(node.stmts.is_empty() && is_empty_comments(&node.span(), &self.comments));
|
||||
|
||||
let mut list_format = ListFormat::MultiLineBlockStatements;
|
||||
|
||||
@ -2396,7 +2391,7 @@ where
|
||||
}
|
||||
}
|
||||
Callee::Expr(callee) => {
|
||||
if self.has_leading_comment(&callee) {
|
||||
if self.has_leading_comment(callee) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -2445,12 +2440,10 @@ where
|
||||
.unwrap_or(false);
|
||||
if need_paren {
|
||||
punct!("(");
|
||||
} else if arg.starts_with_alpha_num() {
|
||||
space!();
|
||||
} else {
|
||||
if arg.starts_with_alpha_num() {
|
||||
space!();
|
||||
} else {
|
||||
formatting_space!();
|
||||
}
|
||||
formatting_space!();
|
||||
}
|
||||
|
||||
emit!(arg);
|
||||
@ -2998,14 +2991,14 @@ fn unescape_tpl_lit(s: &str, is_synthesized: bool) -> String {
|
||||
if c != '\\' {
|
||||
match c {
|
||||
'\r' => {
|
||||
if chars.peek().map(|&v| v) == Some('\n') {
|
||||
if chars.peek().copied() == Some('\n') {
|
||||
continue;
|
||||
}
|
||||
|
||||
result.push_str("\r");
|
||||
result.push('\r');
|
||||
}
|
||||
'\n' => {
|
||||
result.push_str("\n");
|
||||
result.push('\n');
|
||||
}
|
||||
|
||||
'`' if is_synthesized => {
|
||||
@ -3074,7 +3067,7 @@ fn escape_without_source(v: &str, target: EsVersion, single_quote: bool) -> Stri
|
||||
|
||||
'\\' => {
|
||||
if iter.peek() == Some(&'\0') {
|
||||
buf.push_str("\\");
|
||||
buf.push('\\');
|
||||
iter.next();
|
||||
} else {
|
||||
buf.push_str("\\\\")
|
||||
@ -3149,10 +3142,8 @@ fn escape_with_source<'s>(
|
||||
|| (single_quote == Some(false) && orig.starts_with('"'))
|
||||
{
|
||||
orig = &orig[1..orig.len() - 1];
|
||||
} else {
|
||||
if single_quote.is_some() {
|
||||
return escape_without_source(s, target, single_quote.unwrap_or(false));
|
||||
}
|
||||
} else if single_quote.is_some() {
|
||||
return escape_without_source(s, target, single_quote.unwrap_or(false));
|
||||
}
|
||||
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
@ -3312,12 +3303,12 @@ fn is_space_require_before_rhs(rhs: &Expr) -> bool {
|
||||
|
||||
Expr::Update(UpdateExpr { prefix: true, .. }) | Expr::Unary(..) => true,
|
||||
|
||||
Expr::Bin(BinExpr { left, .. }) => is_space_require_before_rhs(&left),
|
||||
Expr::Bin(BinExpr { left, .. }) => is_space_require_before_rhs(left),
|
||||
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty_comments(span: &Span, comments: &Option<&dyn Comments>) -> bool {
|
||||
return span.is_dummy() || comments.map_or(true, |c| !c.has_leading(span.hi() - BytePos(1)));
|
||||
span.is_dummy() || comments.map_or(true, |c| !c.has_leading(span.hi() - BytePos(1)))
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ impl Builder {
|
||||
Box::new(writer)
|
||||
};
|
||||
|
||||
let ret = {
|
||||
{
|
||||
let mut e = Emitter {
|
||||
cfg: self.cfg,
|
||||
cm: self.cm.clone(),
|
||||
@ -40,9 +40,7 @@ impl Builder {
|
||||
};
|
||||
|
||||
op(&mut e)
|
||||
};
|
||||
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
pub fn text<F>(self, src: &str, op: F) -> String
|
||||
@ -133,7 +131,7 @@ pub(crate) fn assert_pretty(from: &str, to: &str) {
|
||||
|
||||
println!("Expected: {:?}", to);
|
||||
println!("Actaul: {:?}", out);
|
||||
assert_eq!(DebugUsingDisplay(&out.trim()), DebugUsingDisplay(to),);
|
||||
assert_eq!(DebugUsingDisplay(out.trim()), DebugUsingDisplay(to),);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@ -618,7 +616,7 @@ fn issue_1619_3() {
|
||||
|
||||
fn check_latest(src: &str, expected: &str) {
|
||||
let actual = parse_then_emit(
|
||||
&src,
|
||||
src,
|
||||
Config { minify: false },
|
||||
Default::default(),
|
||||
EsVersion::latest(),
|
||||
|
@ -69,7 +69,7 @@ impl<W: WriteJs> WriteJs for OmitTrailingSemi<W> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.inner.write_punct(span, s)?)
|
||||
self.inner.write_punct(span, s)
|
||||
}
|
||||
|
||||
fn target(&self) -> swc_ecma_ast::EsVersion {
|
||||
|
@ -23,7 +23,7 @@ fn run(input: &Path, minify: bool) {
|
||||
};
|
||||
|
||||
run_test2(false, |cm, _| {
|
||||
let fm = cm.load_file(&input).unwrap();
|
||||
let fm = cm.load_file(input).unwrap();
|
||||
|
||||
let lexer = Lexer::new(
|
||||
Syntax::Typescript(Default::default()),
|
||||
@ -48,7 +48,7 @@ fn run(input: &Path, minify: bool) {
|
||||
|
||||
let mut emitter = Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify },
|
||||
cm: cm.clone(),
|
||||
cm,
|
||||
comments: None,
|
||||
wr,
|
||||
};
|
||||
|
@ -96,7 +96,7 @@ fn do_test(entry: &Path, minify: bool) {
|
||||
|
||||
let module = file_name.contains("module");
|
||||
|
||||
let ref_dir = ref_dir.clone();
|
||||
let ref_dir = ref_dir;
|
||||
|
||||
let msg = format!(
|
||||
"\n\n========== Running codegen test {}\nSource:\n{}\n",
|
||||
@ -105,7 +105,7 @@ fn do_test(entry: &Path, minify: bool) {
|
||||
let mut wr = Buf(Arc::new(RwLock::new(vec![])));
|
||||
|
||||
::testing::run_test(false, |cm, handler| {
|
||||
let src = cm.load_file(&entry).expect("failed to load file");
|
||||
let src = cm.load_file(entry).expect("failed to load file");
|
||||
eprintln!(
|
||||
"{}\nPos: {:?} ~ {:?} (L{})",
|
||||
msg,
|
||||
@ -137,7 +137,7 @@ fn do_test(entry: &Path, minify: bool) {
|
||||
|
||||
let mut emitter = Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify },
|
||||
cm: cm.clone(),
|
||||
cm,
|
||||
wr,
|
||||
comments: if minify { None } else { Some(&comments) },
|
||||
};
|
||||
|
@ -348,7 +348,7 @@ mod tests {
|
||||
file_name: &str,
|
||||
source: &str,
|
||||
) -> Result<(ast::Module, SingleThreadedComments), testing::StdErr> {
|
||||
let output = ::testing::run_test(false, |cm, handler| {
|
||||
::testing::run_test(false, |cm, handler| {
|
||||
let fm =
|
||||
cm.new_source_file(FileName::Custom(file_name.to_string()), source.to_string());
|
||||
|
||||
@ -370,10 +370,10 @@ mod tests {
|
||||
|
||||
let res = p
|
||||
.parse_module()
|
||||
.map_err(|e| e.into_diagnostic(&handler).emit());
|
||||
.map_err(|e| e.into_diagnostic(handler).emit());
|
||||
|
||||
for err in p.take_errors() {
|
||||
err.into_diagnostic(&handler).emit();
|
||||
err.into_diagnostic(handler).emit();
|
||||
}
|
||||
|
||||
if handler.has_errors() {
|
||||
@ -381,9 +381,7 @@ mod tests {
|
||||
}
|
||||
|
||||
Ok((res.unwrap(), comments))
|
||||
});
|
||||
|
||||
output
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -415,7 +413,7 @@ try {
|
||||
// pass
|
||||
}
|
||||
"#;
|
||||
let (module, comments) = helper("test.ts", &source).unwrap();
|
||||
let (module, comments) = helper("test.ts", source).unwrap();
|
||||
let dependencies = analyze_dependencies(&module, &comments);
|
||||
assert_eq!(dependencies.len(), 8);
|
||||
assert_eq!(
|
||||
@ -535,7 +533,7 @@ const d8 = await import("./d8.json", { assert: { type: bar } });
|
||||
const d9 = await import("./d9.json", { assert: { type: "json", ...bar } });
|
||||
const d10 = await import("./d10.json", { assert: { type: "json", ["type"]: "bad" } });
|
||||
"#;
|
||||
let (module, comments) = helper("test.ts", &source).unwrap();
|
||||
let (module, comments) = helper("test.ts", source).unwrap();
|
||||
let expected_assertions1 = ImportAssertions::Known({
|
||||
let mut map = HashMap::new();
|
||||
map.insert(
|
||||
@ -599,7 +597,7 @@ const d10 = await import("./d10.json", { assert: { type: "json", ["type"]: "bad"
|
||||
span: Span::new(BytePos(186), BytePos(239), Default::default()),
|
||||
specifier: JsWord::from("./foo.json"),
|
||||
specifier_span: Span::new(BytePos(202), BytePos(214), Default::default()),
|
||||
import_assertions: expected_assertions2.clone(),
|
||||
import_assertions: expected_assertions2,
|
||||
},
|
||||
DependencyDescriptor {
|
||||
kind: DependencyKind::Import,
|
||||
@ -617,7 +615,7 @@ const d10 = await import("./d10.json", { assert: { type: "json", ["type"]: "bad"
|
||||
span: Span::new(BytePos(333), BytePos(386), Default::default()),
|
||||
specifier: JsWord::from("./buzz.json"),
|
||||
specifier_span: Span::new(BytePos(340), BytePos(353), Default::default()),
|
||||
import_assertions: dynamic_expected_assertions2.clone(),
|
||||
import_assertions: dynamic_expected_assertions2,
|
||||
},
|
||||
DependencyDescriptor {
|
||||
kind: DependencyKind::Import,
|
||||
|
@ -23,9 +23,8 @@ fn parse(cm: Lrc<SourceMap>, path: &Path) -> Module {
|
||||
);
|
||||
|
||||
let mut parser = Parser::new_from(lexer);
|
||||
let program = parser.parse_module().unwrap();
|
||||
|
||||
program
|
||||
parser.parse_module().unwrap()
|
||||
}
|
||||
|
||||
#[testing::fixture("tests/diff/**/l.js")]
|
||||
@ -41,7 +40,7 @@ fn diff(l: PathBuf) {
|
||||
let res = l.diff(&mut r, &mut ctx);
|
||||
|
||||
let l = print(cm.clone(), &[l]);
|
||||
let r = print(cm.clone(), &[r]);
|
||||
let r = print(cm, &[r]);
|
||||
|
||||
Ok((format!("{}", res), l, r))
|
||||
})
|
||||
@ -68,7 +67,7 @@ fn print<N: swc_ecma_codegen::Node>(cm: Lrc<SourceMap>, nodes: &[N]) -> String {
|
||||
|
||||
let mut emitter = Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify: false },
|
||||
cm: cm.clone(),
|
||||
cm,
|
||||
comments: None,
|
||||
wr,
|
||||
};
|
||||
|
@ -66,7 +66,7 @@ impl Visit for DuplicateExports {
|
||||
ModuleExportName::Ident(ident) => ident,
|
||||
ModuleExportName::Str(..) => return,
|
||||
};
|
||||
self.add(&exported.as_ref().unwrap_or(&orig));
|
||||
self.add(exported.as_ref().unwrap_or(&orig));
|
||||
}
|
||||
|
||||
fn visit_export_namespace_specifier(&mut self, s: &ExportNamespaceSpecifier) {
|
||||
|
@ -41,7 +41,7 @@ fn pass(input: PathBuf) {
|
||||
|
||||
let rules = all();
|
||||
|
||||
HANDLER.set(&handler, || {
|
||||
HANDLER.set(handler, || {
|
||||
for mut rule in rules {
|
||||
rule.lint_module(&m);
|
||||
}
|
||||
|
@ -119,10 +119,8 @@ impl NodeModulesResolver {
|
||||
}
|
||||
|
||||
// Try exact file after checking .js, for performance
|
||||
if !try_exact {
|
||||
if path.is_file() {
|
||||
return Ok(Some(path.to_path_buf()));
|
||||
}
|
||||
if !try_exact && path.is_file() {
|
||||
return Ok(Some(path.to_path_buf()));
|
||||
}
|
||||
|
||||
if let Some(name) = path.file_name() {
|
||||
@ -197,17 +195,13 @@ impl NodeModulesResolver {
|
||||
|
||||
let main_fields = match self.target_env {
|
||||
TargetEnv::Node => {
|
||||
vec![pkg.module.as_ref().clone(), pkg.main.as_ref().clone()]
|
||||
vec![pkg.module.as_ref(), pkg.main.as_ref()]
|
||||
}
|
||||
TargetEnv::Browser => {
|
||||
if let Some(browser) = &pkg.browser {
|
||||
match browser {
|
||||
Browser::Str(path) => {
|
||||
vec![
|
||||
Some(path),
|
||||
pkg.module.as_ref().clone(),
|
||||
pkg.main.as_ref().clone(),
|
||||
]
|
||||
vec![Some(path), pkg.module.as_ref(), pkg.main.as_ref()]
|
||||
}
|
||||
Browser::Obj(map) => {
|
||||
let bucket = BROWSER_CACHE.entry(pkg_dir.to_path_buf()).or_default();
|
||||
@ -260,11 +254,11 @@ impl NodeModulesResolver {
|
||||
}
|
||||
}
|
||||
}
|
||||
vec![pkg.module.as_ref().clone(), pkg.main.as_ref().clone()]
|
||||
vec![pkg.module.as_ref(), pkg.main.as_ref()]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
vec![pkg.module.as_ref().clone(), pkg.main.as_ref().clone()]
|
||||
vec![pkg.module.as_ref(), pkg.main.as_ref()]
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -320,7 +314,7 @@ impl Resolve for NodeModulesResolver {
|
||||
|
||||
let base_dir = if base.is_file() {
|
||||
let cwd = &Path::new(".");
|
||||
base.parent().unwrap_or(&cwd)
|
||||
base.parent().unwrap_or(cwd)
|
||||
} else {
|
||||
base
|
||||
};
|
||||
@ -344,7 +338,7 @@ impl Resolve for NodeModulesResolver {
|
||||
// Handle builtin modules for nodejs
|
||||
if let TargetEnv::Node = self.target_env {
|
||||
if is_core_module(target) {
|
||||
return Ok(FileName::Custom(format!("node:{}", target.to_string())));
|
||||
return Ok(FileName::Custom(format!("node:{}", target)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -396,7 +390,7 @@ impl Resolve for NodeModulesResolver {
|
||||
if let Some(item) = BROWSER_CACHE.get(&pkg_base) {
|
||||
let value = item.value();
|
||||
if value.ignores.contains(path) {
|
||||
return Ok(FileName::Custom(path.display().to_string().into()));
|
||||
return Ok(FileName::Custom(path.display().to_string()));
|
||||
}
|
||||
if let Some(rewrite) = value.rewrites.get(path) {
|
||||
return self.wrap(Some(rewrite.to_path_buf()));
|
||||
|
@ -94,13 +94,12 @@ where
|
||||
R: Resolve,
|
||||
{
|
||||
fn resolve(&self, base: &FileName, src: &str) -> Result<FileName, Error> {
|
||||
if src.starts_with(".") {
|
||||
if src == ".." || src.starts_with("./") || src.starts_with("../") {
|
||||
return self
|
||||
.inner
|
||||
.resolve(base, src)
|
||||
.context("not processed by tsc resolver because it's relative import");
|
||||
}
|
||||
if src.starts_with('.') && (src == ".." || src.starts_with("./") || src.starts_with("../"))
|
||||
{
|
||||
return self
|
||||
.inner
|
||||
.resolve(base, src)
|
||||
.context("not processed by tsc resolver because it's relative import");
|
||||
}
|
||||
|
||||
match base {
|
||||
@ -196,8 +195,7 @@ where
|
||||
}
|
||||
|
||||
fn compile_regex(src: String) -> Regex {
|
||||
static CACHE: Lazy<DashMap<String, Regex, ahash::RandomState>> =
|
||||
Lazy::new(|| Default::default());
|
||||
static CACHE: Lazy<DashMap<String, Regex, ahash::RandomState>> = Lazy::new(Default::default);
|
||||
|
||||
if !CACHE.contains_key(&*src) {
|
||||
// Create capture group
|
||||
|
@ -53,7 +53,7 @@ fn main() {
|
||||
let output = output.fold_with(&mut hygiene());
|
||||
let output = output.fold_with(&mut fixer(None));
|
||||
|
||||
let code = print(cm.clone(), &[output], true);
|
||||
let code = print(cm, &[output], true);
|
||||
|
||||
fs::write("output.js", code.as_bytes()).expect("failed to write output");
|
||||
|
||||
@ -70,7 +70,7 @@ fn print<N: swc_ecma_codegen::Node>(cm: Lrc<SourceMap>, nodes: &[N], minify: boo
|
||||
cfg: swc_ecma_codegen::Config { minify },
|
||||
cm: cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(JsWriter::new(cm.clone(), "\n", &mut buf, None)),
|
||||
wr: Box::new(JsWriter::new(cm, "\n", &mut buf, None)),
|
||||
};
|
||||
|
||||
for n in nodes {
|
||||
|
@ -8,7 +8,7 @@ use crate::{
|
||||
mode::Mode,
|
||||
option::CompressOptions,
|
||||
util::{now, unit::CompileUnit, Optional},
|
||||
MAX_PAR_DEPTH,
|
||||
DISABLE_BUGGY_PASSES, MAX_PAR_DEPTH,
|
||||
};
|
||||
#[cfg(feature = "pretty_assertions")]
|
||||
use pretty_assertions::assert_eq;
|
||||
@ -437,7 +437,7 @@ where
|
||||
noop_visit_mut_type!();
|
||||
|
||||
fn visit_mut_fn_expr(&mut self, n: &mut FnExpr) {
|
||||
if false && n.function.span.has_mark(self.marks.standalone) {
|
||||
if !DISABLE_BUGGY_PASSES && n.function.span.has_mark(self.marks.standalone) {
|
||||
self.optimize_unit_repeatedly(n);
|
||||
return;
|
||||
}
|
||||
@ -446,7 +446,8 @@ where
|
||||
}
|
||||
|
||||
fn visit_mut_module(&mut self, n: &mut Module) {
|
||||
let is_bundle_mode = false && n.span.has_mark(self.marks.bundle_of_standalone);
|
||||
let is_bundle_mode =
|
||||
!DISABLE_BUGGY_PASSES && n.span.has_mark(self.marks.bundle_of_standalone);
|
||||
|
||||
// Disable
|
||||
if is_bundle_mode {
|
||||
|
@ -804,10 +804,6 @@ where
|
||||
Mergable::Var(_) => break,
|
||||
Mergable::Expr(e2) => {
|
||||
if !self.is_skippable_for_seq(Some(a), &*e2) {
|
||||
if cfg!(feature = "debug") && false {
|
||||
tracing::trace!("Cannot skip: {}", dump(&**e2, false));
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -27,15 +27,13 @@ impl PrivateNameMangler {
|
||||
let new_sym = if let Some(cached) = self.renamed_private.get(&id) {
|
||||
cached.clone()
|
||||
} else {
|
||||
loop {
|
||||
let sym = incr_base54(&mut self.private_n).1;
|
||||
let sym = incr_base54(&mut self.private_n).1;
|
||||
|
||||
let sym: JsWord = sym.into();
|
||||
let sym: JsWord = sym.into();
|
||||
|
||||
self.renamed_private.insert(id.clone(), sym.clone());
|
||||
self.renamed_private.insert(id.clone(), sym.clone());
|
||||
|
||||
break sym;
|
||||
}
|
||||
sym
|
||||
};
|
||||
|
||||
private_name.id.sym = new_sym;
|
||||
|
@ -56,14 +56,7 @@ impl ManglePropertiesState {
|
||||
}
|
||||
|
||||
fn can_mangle(&self, name: &JsWord) -> bool {
|
||||
if self.unmangleable.contains(name) {
|
||||
false
|
||||
} else if self.is_reserved(name) {
|
||||
false
|
||||
} else {
|
||||
// TODO only_cache, check if it's a name that doesn't need quotes
|
||||
true
|
||||
}
|
||||
!(self.unmangleable.contains(name) || self.is_reserved(name))
|
||||
}
|
||||
|
||||
fn matches_regex_option(&self, name: &JsWord) -> bool {
|
||||
@ -75,9 +68,7 @@ impl ManglePropertiesState {
|
||||
}
|
||||
|
||||
fn should_mangle(&self, name: &JsWord) -> bool {
|
||||
if !self.matches_regex_option(name) {
|
||||
false
|
||||
} else if self.is_reserved(name) {
|
||||
if !self.matches_regex_option(name) || self.is_reserved(name) {
|
||||
false
|
||||
} else {
|
||||
self.cache.contains_key(name) || self.names_to_mangle.contains(name)
|
||||
@ -93,13 +84,11 @@ impl ManglePropertiesState {
|
||||
if let Some(cached) = self.cache.get(name) {
|
||||
Some(cached.clone())
|
||||
} else {
|
||||
loop {
|
||||
let sym = incr_base54(&mut self.n).1;
|
||||
let sym = incr_base54(&mut self.n).1;
|
||||
|
||||
let mangled_name: JsWord = sym.into();
|
||||
self.cache.insert(name.clone(), mangled_name.clone());
|
||||
return Some(mangled_name);
|
||||
}
|
||||
let mangled_name: JsWord = sym.into();
|
||||
self.cache.insert(name.clone(), mangled_name.clone());
|
||||
return Some(mangled_name);
|
||||
}
|
||||
} else {
|
||||
None
|
||||
|
@ -63,17 +63,15 @@ fn is_ignored(path: &Path) -> bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
if env::var("SKIP_GOLDEN").unwrap_or_default() == "1" {
|
||||
if GOLDEN.iter().any(|ignored| s.contains(&**ignored)) {
|
||||
return true;
|
||||
}
|
||||
if env::var("SKIP_GOLDEN").unwrap_or_default() == "1"
|
||||
&& GOLDEN.iter().any(|ignored| s.contains(&**ignored))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Ok(one) = env::var("GOLDEN_ONLY") {
|
||||
if one == "1" {
|
||||
if GOLDEN.iter().all(|golden| !s.contains(&**golden)) {
|
||||
return true;
|
||||
}
|
||||
if one == "1" && GOLDEN.iter().all(|golden| !s.contains(&**golden)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@ -119,9 +117,9 @@ fn run(
|
||||
|
||||
let disable_hygiene = mangle.is_some() || skip_hygiene;
|
||||
|
||||
let (_module, config) = parse_compressor_config(cm.clone(), &config);
|
||||
let (_module, config) = parse_compressor_config(cm.clone(), config);
|
||||
|
||||
let fm = cm.load_file(&input).expect("failed to load input.js");
|
||||
let fm = cm.load_file(input).expect("failed to load input.js");
|
||||
let comments = SingleThreadedComments::default();
|
||||
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Input"), fm.src);
|
||||
@ -164,7 +162,7 @@ fn run(
|
||||
let program = parser
|
||||
.parse_module()
|
||||
.map_err(|err| {
|
||||
err.into_diagnostic(&handler).emit();
|
||||
err.into_diagnostic(handler).emit();
|
||||
})
|
||||
.map(|module| module.fold_with(&mut resolver_with_mark(top_level_mark)));
|
||||
|
||||
@ -179,7 +177,7 @@ fn run(
|
||||
let optimization_start = Instant::now();
|
||||
let mut output = optimize(
|
||||
program,
|
||||
cm.clone(),
|
||||
cm,
|
||||
Some(&comments),
|
||||
None,
|
||||
&MinifyOptions {
|
||||
@ -258,7 +256,7 @@ fn find_config(dir: &Path) -> String {
|
||||
#[testing::fixture("tests/compress/fixture/**/input.js")]
|
||||
fn base_fixture(input: PathBuf) {
|
||||
let dir = input.parent().unwrap();
|
||||
let config = find_config(&dir);
|
||||
let config = find_config(dir);
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Config"), config);
|
||||
|
||||
testing::run_test2(false, |cm, handler| {
|
||||
@ -268,7 +266,7 @@ fn base_fixture(input: PathBuf) {
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let output = print(cm.clone(), &[output_module.clone()], false, false);
|
||||
let output = print(cm, &[output_module], false, false);
|
||||
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Ourput"), output);
|
||||
|
||||
@ -297,7 +295,7 @@ fn projects(input: PathBuf) {
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let output = print(cm.clone(), &[output_module.clone()], false, false);
|
||||
let output = print(cm, &[output_module], false, false);
|
||||
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Ourput"), output);
|
||||
|
||||
@ -322,7 +320,7 @@ fn projects(input: PathBuf) {
|
||||
fn base_exec(input: PathBuf) {
|
||||
let dir = input.parent().unwrap();
|
||||
|
||||
let config = find_config(&dir);
|
||||
let config = find_config(dir);
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Config"), config);
|
||||
|
||||
let mangle = dir.join("mangle.json");
|
||||
@ -351,7 +349,7 @@ fn base_exec(input: PathBuf) {
|
||||
|
||||
let output = run(cm.clone(), &handler, &input, &config, mangle, false);
|
||||
let output = output.expect("Parsing in base test should not fail");
|
||||
let output = print(cm.clone(), &[output], false, false);
|
||||
let output = print(cm, &[output], false, false);
|
||||
|
||||
eprintln!(
|
||||
"---- {} -----\n{}",
|
||||
@ -468,12 +466,7 @@ fn fixture(input: PathBuf) {
|
||||
}
|
||||
}
|
||||
|
||||
let output_str = print(
|
||||
cm.clone(),
|
||||
&[drop_span(output_module.clone())],
|
||||
false,
|
||||
false,
|
||||
);
|
||||
let output_str = print(cm, &[drop_span(output_module)], false, false);
|
||||
|
||||
if env::var("UPDATE").map(|s| s == "1").unwrap_or(false) {
|
||||
let _ = catch_unwind(|| {
|
||||
@ -506,7 +499,7 @@ fn print<N: swc_ecma_codegen::Node>(
|
||||
|
||||
let mut emitter = Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify },
|
||||
cm: cm.clone(),
|
||||
cm,
|
||||
comments: None,
|
||||
wr,
|
||||
};
|
||||
@ -1357,7 +1350,7 @@ impl Visit for Shower<'_> {
|
||||
#[testing::fixture("tests/full/**/input.js")]
|
||||
fn full(input: PathBuf) {
|
||||
let dir = input.parent().unwrap();
|
||||
let config = find_config(&dir);
|
||||
let config = find_config(dir);
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Config"), config);
|
||||
|
||||
testing::run_test2(false, |cm, handler| {
|
||||
@ -1377,7 +1370,7 @@ fn full(input: PathBuf) {
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let output = print(cm.clone(), &[output_module.clone()], true, true);
|
||||
let output = print(cm, &[output_module], true, true);
|
||||
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Output"), output);
|
||||
|
||||
|
@ -101,7 +101,7 @@ impl PartialInliner {
|
||||
eval: Default::default(),
|
||||
};
|
||||
|
||||
op(cm.clone(), module, &mut inliner);
|
||||
op(cm, module, &mut inliner);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
@ -151,7 +151,7 @@ impl PartialInliner {
|
||||
cfg: Default::default(),
|
||||
cm: cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(JsWriter::new(cm.clone(), "\n", &mut buf, None)),
|
||||
wr: Box::new(JsWriter::new(cm, "\n", &mut buf, None)),
|
||||
};
|
||||
|
||||
emitter.emit_module(&module).unwrap();
|
||||
|
@ -39,7 +39,7 @@ fn print(cm: Lrc<SourceMap>, m: &Module, minify: bool) -> String {
|
||||
|
||||
let mut emitter = Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify },
|
||||
cm: cm.clone(),
|
||||
cm,
|
||||
comments: None,
|
||||
wr,
|
||||
};
|
||||
@ -77,7 +77,7 @@ fn run(
|
||||
.thread_name(|i| format!("rayon-{}", i + 1))
|
||||
.build_global();
|
||||
|
||||
let fm = cm.load_file(&input).expect("failed to load input.js");
|
||||
let fm = cm.load_file(input).expect("failed to load input.js");
|
||||
let comments = SingleThreadedComments::default();
|
||||
|
||||
eprintln!("---- {} -----\n{}", Color::Green.paint("Input"), fm.src);
|
||||
@ -95,7 +95,7 @@ fn run(
|
||||
let program = parser
|
||||
.parse_module()
|
||||
.map_err(|err| {
|
||||
err.into_diagnostic(&handler).emit();
|
||||
err.into_diagnostic(handler).emit();
|
||||
})
|
||||
.map(|module| module.fold_with(&mut resolver_with_mark(top_level_mark)));
|
||||
|
||||
@ -109,7 +109,7 @@ fn run(
|
||||
|
||||
let output = optimize(
|
||||
program,
|
||||
cm.clone(),
|
||||
cm,
|
||||
Some(&comments),
|
||||
None,
|
||||
&MinifyOptions {
|
||||
@ -215,7 +215,7 @@ fn fixture(input: PathBuf) {
|
||||
&ExtraOptions { top_level_mark },
|
||||
);
|
||||
|
||||
let mangled = print(cm.clone(), &m, false);
|
||||
let mangled = print(cm, &m, false);
|
||||
|
||||
NormalizedOutput::from(mangled)
|
||||
.compare_to_file(input.parent().unwrap().join("output.js"))
|
||||
@ -253,7 +253,7 @@ fn exec(input: PathBuf) {
|
||||
);
|
||||
|
||||
let output = output.expect("Parsing in base test should not fail");
|
||||
let output = print(cm.clone(), &output, false);
|
||||
let output = print(cm, &output, false);
|
||||
|
||||
eprintln!(
|
||||
"---- {} -----\n{}",
|
||||
|
@ -370,26 +370,15 @@ impl<'a, I: Input> Lexer<'a, I> {
|
||||
}
|
||||
|
||||
if radix == 16 {
|
||||
match c.unwrap() {
|
||||
'.' | 'X' | '_' | 'x' => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(c.unwrap(), '.' | 'X' | '_' | 'x')
|
||||
} else {
|
||||
match c.unwrap() {
|
||||
'.' | 'B' | 'E' | 'O' | '_' | 'b' | 'e' | 'o' => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(c.unwrap(), '.' | 'B' | 'E' | 'O' | '_' | 'b' | 'e' | 'o')
|
||||
}
|
||||
};
|
||||
|
||||
let next = self.input.peek();
|
||||
|
||||
if !is_allowed(next) {
|
||||
self.emit_error(
|
||||
start,
|
||||
SyntaxError::NumericSeparatorIsAllowedOnlyBetweenTwoDigits,
|
||||
);
|
||||
} else if is_forbidden(prev) || is_forbidden(next) {
|
||||
if !is_allowed(next) || is_forbidden(prev) || is_forbidden(next) {
|
||||
self.emit_error(
|
||||
start,
|
||||
SyntaxError::NumericSeparatorIsAllowedOnlyBetweenTwoDigits,
|
||||
|
@ -86,7 +86,7 @@ impl Visit for CommentPrinter<'_> {
|
||||
self.comments.with_leading(n.lo, |comments| {
|
||||
for c in comments {
|
||||
DiagnosticBuilder::new(
|
||||
&self.handler,
|
||||
self.handler,
|
||||
swc_common::errors::Level::Note,
|
||||
"Leading (lo)",
|
||||
)
|
||||
@ -98,7 +98,7 @@ impl Visit for CommentPrinter<'_> {
|
||||
self.comments.with_trailing(n.lo, |comments| {
|
||||
for c in comments {
|
||||
DiagnosticBuilder::new(
|
||||
&self.handler,
|
||||
self.handler,
|
||||
swc_common::errors::Level::Note,
|
||||
"Trailing (lo)",
|
||||
)
|
||||
@ -110,7 +110,7 @@ impl Visit for CommentPrinter<'_> {
|
||||
self.comments.with_leading(n.hi - BytePos(1), |comments| {
|
||||
for c in comments {
|
||||
DiagnosticBuilder::new(
|
||||
&self.handler,
|
||||
self.handler,
|
||||
swc_common::errors::Level::Note,
|
||||
"Leading (hi)",
|
||||
)
|
||||
@ -125,7 +125,7 @@ impl Visit for CommentPrinter<'_> {
|
||||
self.comments.with_trailing(n.hi, |comments| {
|
||||
for c in comments {
|
||||
DiagnosticBuilder::new(
|
||||
&self.handler,
|
||||
self.handler,
|
||||
swc_common::errors::Level::Note,
|
||||
"Trailing (hi)",
|
||||
)
|
||||
|
@ -1,5 +1,4 @@
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json;
|
||||
use std::{
|
||||
fs::read_to_string,
|
||||
path::{Path, PathBuf},
|
||||
@ -39,7 +38,7 @@ where
|
||||
let res = f(&mut p).map_err(|e| e.into_diagnostic(handler).emit());
|
||||
|
||||
for e in p.take_errors() {
|
||||
e.into_diagnostic(&handler).emit();
|
||||
e.into_diagnostic(handler).emit();
|
||||
}
|
||||
|
||||
res
|
||||
@ -56,7 +55,7 @@ fn references(entry: PathBuf) {
|
||||
);
|
||||
|
||||
// Parse source
|
||||
let module = parse_module(cm.clone(), handler, &entry)?.fold_with(&mut Normalizer);
|
||||
let module = parse_module(cm, handler, &entry)?.fold_with(&mut Normalizer);
|
||||
let json =
|
||||
serde_json::to_string_pretty(&module).expect("failed to serialize module as json");
|
||||
if StdErr::from(json.clone())
|
||||
|
@ -319,7 +319,7 @@ fn with_parser<F, Ret>(file_name: &Path, f: F) -> Result<Ret, StdErr>
|
||||
where
|
||||
F: FnOnce(&mut Parser<Lexer<StringInput<'_>>>) -> PResult<Ret>,
|
||||
{
|
||||
let output = ::testing::run_test(false, |cm, handler| {
|
||||
::testing::run_test(false, |cm, handler| {
|
||||
let fm = cm
|
||||
.load_file(file_name)
|
||||
.unwrap_or_else(|e| panic!("failed to load {}: {}", file_name.display(), e));
|
||||
@ -329,7 +329,7 @@ where
|
||||
let res = f(&mut p).map_err(|e| e.into_diagnostic(handler).emit());
|
||||
|
||||
for e in p.take_errors() {
|
||||
e.into_diagnostic(&handler).emit();
|
||||
e.into_diagnostic(handler).emit();
|
||||
}
|
||||
|
||||
if handler.has_errors() {
|
||||
@ -337,9 +337,7 @@ where
|
||||
}
|
||||
|
||||
res
|
||||
});
|
||||
|
||||
output
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -43,7 +43,7 @@ fn shifted(file: PathBuf) {
|
||||
let json =
|
||||
serde_json::to_string_pretty(&program).expect("failed to serialize module as json");
|
||||
|
||||
if StdErr::from(json.clone())
|
||||
if StdErr::from(json)
|
||||
.compare_to_file(&format!("{}.json", file.display()))
|
||||
.is_err()
|
||||
{
|
||||
@ -188,7 +188,8 @@ where
|
||||
F: FnOnce(&mut Parser<Lexer<StringInput<'_>>>, &SingleThreadedComments) -> PResult<Ret>,
|
||||
{
|
||||
let fname = file_name.display().to_string();
|
||||
let output = ::testing::run_test(treat_error_as_bug, |cm, handler| {
|
||||
|
||||
::testing::run_test(treat_error_as_bug, |cm, handler| {
|
||||
if shift {
|
||||
cm.new_source_file(FileName::Anon, "".into());
|
||||
}
|
||||
@ -214,10 +215,10 @@ where
|
||||
|
||||
let mut p = Parser::new_from(lexer);
|
||||
|
||||
let res = f(&mut p, &comments).map_err(|e| e.into_diagnostic(&handler).emit());
|
||||
let res = f(&mut p, &comments).map_err(|e| e.into_diagnostic(handler).emit());
|
||||
|
||||
for err in p.take_errors() {
|
||||
err.into_diagnostic(&handler).emit();
|
||||
err.into_diagnostic(handler).emit();
|
||||
}
|
||||
|
||||
if handler.has_errors() {
|
||||
@ -225,9 +226,7 @@ where
|
||||
}
|
||||
|
||||
res
|
||||
});
|
||||
|
||||
output
|
||||
})
|
||||
}
|
||||
|
||||
#[testing::fixture("tests/typescript-errors/**/*.ts")]
|
||||
|
@ -16,11 +16,11 @@ fn run(b: &mut Bencher, src: &str, config: Config) {
|
||||
let mut parser = Parser::new(Syntax::default(), StringInput::from(&*fm), None);
|
||||
let module = parser
|
||||
.parse_module()
|
||||
.map_err(|e| e.into_diagnostic(&handler).emit())
|
||||
.map_err(|e| e.into_diagnostic(handler).emit())
|
||||
.unwrap();
|
||||
|
||||
for e in parser.take_errors() {
|
||||
e.into_diagnostic(&handler).emit()
|
||||
e.into_diagnostic(handler).emit()
|
||||
}
|
||||
|
||||
let mut folder = preset_env(
|
||||
|
@ -30,13 +30,13 @@ impl Entry {
|
||||
imports: Default::default(),
|
||||
};
|
||||
if is_any_target || is_web_target {
|
||||
v.imports.insert("web.timers".into());
|
||||
v.imports.insert("web.immediate".into());
|
||||
v.imports.insert("web.dom.iterable".into());
|
||||
v.imports.insert("web.timers");
|
||||
v.imports.insert("web.immediate");
|
||||
v.imports.insert("web.dom.iterable");
|
||||
}
|
||||
|
||||
if regenerator {
|
||||
v.imports.insert("regenerator-runtime/runtime.js".into());
|
||||
v.imports.insert("regenerator-runtime/runtime.js");
|
||||
}
|
||||
|
||||
v
|
||||
@ -50,7 +50,7 @@ impl Entry {
|
||||
}
|
||||
|
||||
for (feature, version) in BUILTINS.iter() {
|
||||
self.add_inner(&feature, *version);
|
||||
self.add_inner(feature, *version);
|
||||
}
|
||||
|
||||
true
|
||||
@ -110,10 +110,9 @@ impl Fold for Entry {
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
&& self.add_all("@swc/polyfill")
|
||||
{
|
||||
if self.add_all("@swc/polyfill") {
|
||||
return None;
|
||||
}
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,16 +41,15 @@ impl UsageVisitor {
|
||||
// "web.dom.iterable"]); }
|
||||
// v
|
||||
|
||||
let v = Self {
|
||||
is_any_target: target.is_any_target(),
|
||||
target,
|
||||
required: Default::default(),
|
||||
};
|
||||
//if target.is_any_target() || target.node.is_none() {
|
||||
// v.add(&["web.timers", "web.immediate", "web.dom.iterable"]);
|
||||
//}
|
||||
|
||||
v
|
||||
Self {
|
||||
is_any_target: target.is_any_target(),
|
||||
target,
|
||||
required: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add imports
|
||||
@ -77,14 +76,14 @@ impl UsageVisitor {
|
||||
|
||||
fn add_property_deps_inner(&mut self, obj: Option<&JsWord>, prop: &JsWord) {
|
||||
if let Some(obj) = obj {
|
||||
if let Some(map) = STATIC_PROPERTIES.get_data(&obj) {
|
||||
if let Some(features) = map.get_data(&prop) {
|
||||
if let Some(map) = STATIC_PROPERTIES.get_data(obj) {
|
||||
if let Some(features) = map.get_data(prop) {
|
||||
self.add(features);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(features) = INSTANCE_PROPERTIES.get_data(&prop) {
|
||||
if let Some(features) = INSTANCE_PROPERTIES.get_data(prop) {
|
||||
self.add(features);
|
||||
}
|
||||
}
|
||||
@ -141,7 +140,7 @@ impl Visit for UsageVisitor {
|
||||
if let Some(ref init) = d.init {
|
||||
match d.name {
|
||||
// const { keys, values } = Object
|
||||
Pat::Object(ref o) => self.visit_object_pat_props(&init, &o.props),
|
||||
Pat::Object(ref o) => self.visit_object_pat_props(init, &o.props),
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
@ -309,7 +308,7 @@ impl Visit for UsageVisitor {
|
||||
Expr::Member(MemberExpr {
|
||||
prop: MemberProp::Computed(ComputedPropName { expr, .. }),
|
||||
..
|
||||
}) if is_symbol_iterator(&expr) => true,
|
||||
}) if is_symbol_iterator(expr) => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
|
@ -104,14 +104,14 @@ impl UsageVisitor {
|
||||
self.add_builtin(prop);
|
||||
}
|
||||
|
||||
if let Some(map) = STATIC_PROPERTIES.get_data(&obj) {
|
||||
if let Some(features) = map.get_data(&prop) {
|
||||
if let Some(map) = STATIC_PROPERTIES.get_data(obj) {
|
||||
if let Some(features) = map.get_data(prop) {
|
||||
self.add(features);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(features) = INSTANCE_PROPERTIES.get_data(&prop) {
|
||||
if let Some(features) = INSTANCE_PROPERTIES.get_data(prop) {
|
||||
self.add(features);
|
||||
}
|
||||
}
|
||||
@ -245,7 +245,7 @@ impl Visit for UsageVisitor {
|
||||
if let Some(ref init) = d.init {
|
||||
match d.name {
|
||||
// const { keys, values } = Object
|
||||
Pat::Object(ref o) => self.visit_object_pat_props(&init, &o.props),
|
||||
Pat::Object(ref o) => self.visit_object_pat_props(init, &o.props),
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user