mirror of
https://github.com/swc-project/swc.git
synced 2024-12-23 21:54:36 +03:00
swc_bundler (#943)
swc_bundler: - Splitted from spack swc_ecma_parser: - Fix unexpected eof problem which occurs if log level is trace swc_ecma_transforms: - Fix bugs of dce pass spack: - Support cyclic dependencies
This commit is contained in:
parent
4ab3c58f6a
commit
2fedf32747
4
.github/workflows/compilation.yml
vendored
4
.github/workflows/compilation.yml
vendored
@ -42,3 +42,7 @@ jobs:
|
||||
- name: Check swc_ecma_transforms
|
||||
run: |
|
||||
(cd ecmascript/transforms && cargo hack check --feature-powerset --no-dev-deps)
|
||||
|
||||
- name: Check swc_bundler
|
||||
run: |
|
||||
(cd bundler && cargo hack check --feature-powerset --no-dev-deps)
|
||||
|
@ -15,6 +15,7 @@ package-lock.json
|
||||
wasm/
|
||||
|
||||
# Reduce package size
|
||||
**/tests.rs
|
||||
**/tests/
|
||||
**/benches/
|
||||
**/target/
|
||||
|
@ -31,7 +31,7 @@ serde_json = "1"
|
||||
once_cell = "1"
|
||||
regex = "1"
|
||||
either = "1"
|
||||
dashmap = "=3.5.1"
|
||||
dashmap = "3"
|
||||
sourcemap = "6"
|
||||
base64 = "0.12.0"
|
||||
|
||||
|
39
bundler/Cargo.toml
Normal file
39
bundler/Cargo.toml
Normal file
@ -0,0 +1,39 @@
|
||||
[package]
|
||||
name = "swc_bundler"
|
||||
version = "0.1.0"
|
||||
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
|
||||
license = "Apache-2.0/MIT"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
documentation = "https://swc-project.github.io/rustdoc/swc_bundler/"
|
||||
description = "Very fast ecmascript bundler"
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[features]
|
||||
default = []
|
||||
concurrent = ["swc_common/concurrent", "dashmap", "rayon"]
|
||||
|
||||
|
||||
[dependencies]
|
||||
swc_atoms = { version = "0.2", path = "../atoms" }
|
||||
swc_common = { version = "0.9", path = "../common" }
|
||||
swc_ecma_ast = { version = "0.28", path = "../ecmascript/ast" }
|
||||
swc_ecma_codegen = { version = "0.31", path = "../ecmascript/codegen" }
|
||||
swc_ecma_parser = { version = "0.33", path = "../ecmascript/parser" }
|
||||
swc_ecma_transforms = { version = "0.19", path = "../ecmascript/transforms" }
|
||||
swc_ecma_utils = { version = "0.17", path = "../ecmascript/utils" }
|
||||
swc_ecma_visit = { version = "0.13", path = "../ecmascript/visit" }
|
||||
anyhow = "1"
|
||||
crc = "1.8"
|
||||
radix_fmt = "1"
|
||||
relative-path = "1.2"
|
||||
log = "0.4"
|
||||
petgraph = "0.5"
|
||||
once_cell = "1"
|
||||
dashmap = { version = "3", optional = true }
|
||||
rayon = { version = "1", optional = true }
|
||||
fxhash = "0.2.1"
|
||||
is-macro = "0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
testing = { path = "../testing" }
|
13
bundler/README.md
Normal file
13
bundler/README.md
Normal file
@ -0,0 +1,13 @@
|
||||
# swc_bundler
|
||||
|
||||
Bundler for the swc project.
|
||||
|
||||
## Features
|
||||
|
||||
- Clean merging (generated code is easy to optimize)
|
||||
- Parallel file loading
|
||||
- Tree shaking
|
||||
- Common js support (aka `require`)
|
||||
- Circular imports
|
||||
|
||||
Tests live at `/spack`.
|
7
bundler/assets/a.js
Normal file
7
bundler/assets/a.js
Normal file
@ -0,0 +1,7 @@
|
||||
export const FOO = 1;
|
||||
|
||||
|
||||
export class A {
|
||||
foo() {
|
||||
}
|
||||
}
|
3
bundler/assets/main.js
Normal file
3
bundler/assets/main.js
Normal file
@ -0,0 +1,3 @@
|
||||
import { A, FOO } from './a';
|
||||
|
||||
console.log(A, FOO);
|
112
bundler/examples/path.rs
Normal file
112
bundler/examples/path.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use anyhow::Error;
|
||||
use fxhash::FxHashMap;
|
||||
use std::io::stdout;
|
||||
use swc_bundler::{BundleKind, Bundler, Config, Load, Resolve};
|
||||
use swc_common::{sync::Lrc, FileName, FilePathMapping, Globals, SourceMap};
|
||||
use swc_ecma_codegen::{text_writer::JsWriter, Emitter};
|
||||
use swc_ecma_parser::{lexer::Lexer, EsConfig, Parser, StringInput, Syntax};
|
||||
|
||||
fn main() {
|
||||
testing::init();
|
||||
|
||||
let globals = Globals::new();
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
// This example does not use core modules.
|
||||
let external_modules = vec![];
|
||||
let bundler = Bundler::new(
|
||||
&globals,
|
||||
cm.clone(),
|
||||
PathLoader { cm: cm.clone() },
|
||||
PathResolver,
|
||||
Config {
|
||||
require: true,
|
||||
external_modules,
|
||||
},
|
||||
);
|
||||
let mut entries = FxHashMap::default();
|
||||
entries.insert("main".to_string(), FileName::Real("assets/main.js".into()));
|
||||
|
||||
let mut bundles = bundler.bundle(entries).expect("failed to bundle");
|
||||
assert_eq!(
|
||||
bundles.len(),
|
||||
1,
|
||||
"There's no conditional / dynamic imports and we provided only one entry"
|
||||
);
|
||||
let bundle = bundles.pop().unwrap();
|
||||
assert_eq!(
|
||||
bundle.kind,
|
||||
BundleKind::Named {
|
||||
name: "main".into()
|
||||
},
|
||||
"We provided it"
|
||||
);
|
||||
|
||||
let wr = stdout();
|
||||
let mut emitter = Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify: false },
|
||||
cm: cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(JsWriter::new(cm.clone(), "\n", wr.lock(), None)),
|
||||
handlers: Box::new(Handllers),
|
||||
};
|
||||
|
||||
emitter.emit_module(&bundle.module).unwrap();
|
||||
}
|
||||
|
||||
/// I should remove this...
|
||||
struct Handllers;
|
||||
|
||||
impl swc_ecma_codegen::Handlers for Handllers {}
|
||||
|
||||
struct PathLoader {
|
||||
cm: Lrc<SourceMap>,
|
||||
}
|
||||
|
||||
impl Load for PathLoader {
|
||||
fn load(
|
||||
&self,
|
||||
file: &FileName,
|
||||
) -> Result<(Lrc<swc_common::SourceFile>, swc_ecma_ast::Module), Error> {
|
||||
let file = match file {
|
||||
FileName::Real(v) => v,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let fm = self.cm.load_file(file)?;
|
||||
let lexer = Lexer::new(
|
||||
Syntax::Es(EsConfig {
|
||||
..Default::default()
|
||||
}),
|
||||
Default::default(),
|
||||
StringInput::from(&*fm),
|
||||
None,
|
||||
);
|
||||
|
||||
let mut parser = Parser::new_from(lexer);
|
||||
let module = parser.parse_module().expect("This should not happen");
|
||||
|
||||
Ok((fm, module))
|
||||
}
|
||||
}
|
||||
struct PathResolver;
|
||||
|
||||
impl Resolve for PathResolver {
|
||||
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
|
||||
assert!(
|
||||
module_specifier.starts_with("."),
|
||||
"We are not using node_modules within this example"
|
||||
);
|
||||
|
||||
let base = match base {
|
||||
FileName::Real(v) => v,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
Ok(FileName::Real(
|
||||
base.parent()
|
||||
.unwrap()
|
||||
.join(module_specifier)
|
||||
.with_extension("js"),
|
||||
))
|
||||
}
|
||||
}
|
242
bundler/src/bundler/chunk/circular.rs
Normal file
242
bundler/src/bundler/chunk/circular.rs
Normal file
@ -0,0 +1,242 @@
|
||||
use super::merge::{LocalMarker, Unexporter};
|
||||
use crate::{bundler::load::TransformedModule, Bundler, Load, ModuleId, Resolve};
|
||||
use hygiene::top_level_ident_folder;
|
||||
use std::iter::once;
|
||||
use swc_common::DUMMY_SP;
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
|
||||
|
||||
mod hygiene;
|
||||
|
||||
/// Circular imports are hard to handle.
|
||||
///
|
||||
/// We use some dedicated method to handle circular dependencies.
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
pub(super) fn merge_circular_modules(
|
||||
&self,
|
||||
entry_id: ModuleId,
|
||||
circular_modules: &mut Vec<ModuleId>,
|
||||
) -> Module {
|
||||
assert!(
|
||||
circular_modules.len() >= 1,
|
||||
"# of circular modules should be 2 or greater than 2 including entry. Got {:?}",
|
||||
circular_modules
|
||||
);
|
||||
debug_assert!(
|
||||
self.scope.is_circular(entry_id),
|
||||
"merge_circular_modules should only be called for circular entries"
|
||||
);
|
||||
|
||||
let entry_module = self.scope.get_module(entry_id).unwrap();
|
||||
|
||||
let modules = circular_modules
|
||||
.iter()
|
||||
.chain(once(&entry_id))
|
||||
.map(|&id| self.scope.get_module(id).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut entry = self.process_circular_module(&modules, entry_module);
|
||||
|
||||
for &dep in &*circular_modules {
|
||||
let new_module = self.merge_two_circular_modules(&modules, entry, dep);
|
||||
|
||||
entry = new_module;
|
||||
}
|
||||
|
||||
// All circular modules are inlined
|
||||
circular_modules.clear();
|
||||
circular_modules.push(entry_id);
|
||||
|
||||
entry
|
||||
}
|
||||
|
||||
/// Merges `a` and `b` into one module.
|
||||
fn merge_two_circular_modules(
|
||||
&self,
|
||||
circular_modules: &[TransformedModule],
|
||||
mut entry: Module,
|
||||
dep: ModuleId,
|
||||
) -> Module {
|
||||
self.run(|| {
|
||||
// print_hygiene("START: merge_two_circular_modules", &self.cm, &entry);
|
||||
|
||||
let dep_info = self.scope.get_module(dep).unwrap();
|
||||
let mut dep = self.process_circular_module(circular_modules, dep_info);
|
||||
|
||||
dep = dep.fold_with(&mut Unexporter);
|
||||
|
||||
// Merge code
|
||||
entry.body = merge_respecting_order(entry.body, dep.body);
|
||||
|
||||
// print_hygiene("END :merge_two_circular_modules", &self.cm, &entry);
|
||||
entry
|
||||
})
|
||||
}
|
||||
|
||||
///
|
||||
/// - Remove cicular imnports
|
||||
fn process_circular_module(
|
||||
&self,
|
||||
circular_modules: &[TransformedModule],
|
||||
entry: TransformedModule,
|
||||
) -> Module {
|
||||
let mut module = (*entry.module).clone();
|
||||
// print_hygiene("START: process_circular_module", &self.cm, &module);
|
||||
|
||||
module.body.retain(|item| {
|
||||
match item {
|
||||
ModuleItem::ModuleDecl(ModuleDecl::Import(import)) => {
|
||||
// Drop if it's one of circular import
|
||||
for circular_module in circular_modules {
|
||||
if entry
|
||||
.imports
|
||||
.specifiers
|
||||
.iter()
|
||||
.any(|v| v.0.module_id == circular_module.id && v.0.src == import.src)
|
||||
{
|
||||
log::debug!("Dropping circular import");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
|
||||
for circular_module in circular_modules {
|
||||
for (src, specifiers) in entry.imports.specifiers.iter() {
|
||||
if circular_module.id == src.module_id {
|
||||
module = module.fold_with(&mut LocalMarker {
|
||||
mark: circular_module.mark(),
|
||||
specifiers: &specifiers,
|
||||
excluded: vec![],
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module = module.fold_with(&mut top_level_ident_folder(
|
||||
self.top_level_mark,
|
||||
entry.mark(),
|
||||
));
|
||||
|
||||
// print_hygiene("END: process_circular_module", &self.cm, &module);
|
||||
|
||||
module
|
||||
}
|
||||
}
|
||||
|
||||
/// Originally, this method should create a dependency graph, but
|
||||
fn merge_respecting_order(mut entry: Vec<ModuleItem>, mut dep: Vec<ModuleItem>) -> Vec<ModuleItem> {
|
||||
let mut new = Vec::with_capacity(entry.len() + dep.len());
|
||||
|
||||
// While looping over items from entry, we check for dependency.
|
||||
loop {
|
||||
if entry.is_empty() {
|
||||
log::debug!("entry is empty");
|
||||
break;
|
||||
}
|
||||
let item = entry.drain(..=0).next().unwrap();
|
||||
|
||||
// Everything from dep is injected
|
||||
if dep.is_empty() {
|
||||
log::trace!("dep is empty");
|
||||
new.push(item);
|
||||
new.extend(entry);
|
||||
break;
|
||||
}
|
||||
|
||||
// If the code of entry depends on dependency, we insert dependency source code
|
||||
// at the position.
|
||||
if let Some(pos) = dependency_index(&item, &dep) {
|
||||
log::trace!("Found depndency: {}", pos);
|
||||
|
||||
new.extend(dep.drain(..=pos));
|
||||
new.push(item);
|
||||
continue;
|
||||
}
|
||||
|
||||
// We checked the length of `dep`
|
||||
if let Some(pos) = dependency_index(&dep[0], &[item.clone()]) {
|
||||
log::trace!("Found reverse depndency (index[0]): {}", pos);
|
||||
|
||||
new.extend(entry.drain(..=pos));
|
||||
new.extend(dep.drain(..=0));
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(pos) = dependency_index(&dep[0], &entry) {
|
||||
log::trace!("Found reverse depndency: {}", pos);
|
||||
|
||||
new.extend(entry.drain(..=pos));
|
||||
new.extend(dep.drain(..=0));
|
||||
continue;
|
||||
}
|
||||
|
||||
log::debug!("No dependency");
|
||||
|
||||
new.push(item);
|
||||
}
|
||||
|
||||
// Append remaining statements.
|
||||
new.extend(dep);
|
||||
|
||||
new
|
||||
}
|
||||
|
||||
fn dependency_index(item: &ModuleItem, deps: &[ModuleItem]) -> Option<usize> {
|
||||
let mut v = DepFinder { deps, idx: None };
|
||||
item.visit_with(&Invalid { span: DUMMY_SP }, &mut v);
|
||||
v.idx
|
||||
}
|
||||
|
||||
struct DepFinder<'a> {
|
||||
deps: &'a [ModuleItem],
|
||||
idx: Option<usize>,
|
||||
}
|
||||
|
||||
impl Visit for DepFinder<'_> {
|
||||
fn visit_ident(&mut self, i: &Ident, _: &dyn Node) {
|
||||
if self.idx.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
for (idx, dep) in self.deps.iter().enumerate() {
|
||||
match dep {
|
||||
ModuleItem::Stmt(Stmt::Decl(Decl::Class(decl))) => {
|
||||
log::trace!(
|
||||
"Decl (from dep) = {}{:?}, Ident = {}{:?}",
|
||||
decl.ident.sym,
|
||||
decl.ident.span.ctxt,
|
||||
i.sym,
|
||||
i.span.ctxt
|
||||
);
|
||||
if decl.ident.sym == i.sym && decl.ident.span.ctxt == i.span.ctxt {
|
||||
self.idx = Some(idx);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
|
||||
e.obj.visit_with(e as _, self);
|
||||
|
||||
if e.computed {
|
||||
e.prop.visit_with(e as _, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_class_member(&mut self, _: &ClassMember, _: &dyn Node) {}
|
||||
fn visit_function(&mut self, _: &Function, _: &dyn Node) {}
|
||||
fn visit_arrow_expr(&mut self, _: &ArrowExpr, _: &dyn Node) {}
|
||||
}
|
164
bundler/src/bundler/chunk/circular/hygiene.rs
Normal file
164
bundler/src/bundler/chunk/circular/hygiene.rs
Normal file
@ -0,0 +1,164 @@
|
||||
use crate::id::Id;
|
||||
use fxhash::FxHashSet;
|
||||
use swc_common::{Mark, SyntaxContext};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_visit::{Fold, FoldWith};
|
||||
|
||||
pub fn top_level_ident_folder(top_level_mark: Mark, module_mark: Mark) -> impl 'static + Fold {
|
||||
MergeFolder {
|
||||
scope: Default::default(),
|
||||
top_level_mark,
|
||||
module_mark,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Scope<'a> {
|
||||
parent: Option<&'a Scope<'a>>,
|
||||
binding_idents: FxHashSet<Id>,
|
||||
}
|
||||
|
||||
/// Modifies mark of top-level identifiers so they can be merged cleanly.
|
||||
struct MergeFolder<'a> {
|
||||
scope: Scope<'a>,
|
||||
/// Global marker for the top-level identifiers
|
||||
top_level_mark: Mark,
|
||||
/// THe marker for the module's top-level identifiers.
|
||||
module_mark: Mark,
|
||||
}
|
||||
|
||||
impl<'a> Scope<'a> {
|
||||
pub fn new(parent: Option<&'a Scope<'a>>) -> Self {
|
||||
Scope {
|
||||
parent,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains(&self, i: &Ident) -> bool {
|
||||
if self.binding_idents.contains(&Id::from(i)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
self.parent.map(|p| p.contains(i)).unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO: This is incomplete
|
||||
impl<'a> MergeFolder<'a> {
|
||||
fn fold_bindine_ident(&mut self, mut i: Ident) -> Ident {
|
||||
log::trace!("BindingIdent: {}{:?}", i.sym, i.span.ctxt);
|
||||
|
||||
let mut ctxt = i.span.clone();
|
||||
if self.top_level_mark == ctxt.remove_mark() {
|
||||
i.span = i
|
||||
.span
|
||||
.with_ctxt(SyntaxContext::empty().apply_mark(self.module_mark));
|
||||
}
|
||||
|
||||
self.scope.binding_idents.insert((&i).into());
|
||||
i
|
||||
}
|
||||
|
||||
fn fold_ref_ident(&mut self, mut i: Ident) -> Ident {
|
||||
// Skip reference to globals.
|
||||
if !self.scope.contains(&i) {
|
||||
// eprintln!("Preserving {}{:?}", i.sym, i.span.ctxt);
|
||||
return i;
|
||||
}
|
||||
log::trace!("Changing context of ident ref: {}{:?}", i.sym, i.span.ctxt);
|
||||
|
||||
let mut ctxt = i.span.clone();
|
||||
if self.top_level_mark == ctxt.remove_mark() {
|
||||
i.span = i
|
||||
.span
|
||||
.with_ctxt(SyntaxContext::empty().apply_mark(self.module_mark));
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
fn child(&'a self) -> MergeFolder<'a> {
|
||||
MergeFolder {
|
||||
top_level_mark: self.top_level_mark,
|
||||
module_mark: self.module_mark,
|
||||
scope: Scope::new(Some(&self.scope)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Fold for MergeFolder<'_> {
|
||||
fn fold_class_decl(&mut self, c: ClassDecl) -> ClassDecl {
|
||||
ClassDecl {
|
||||
ident: self.fold_bindine_ident(c.ident),
|
||||
class: c.class.fold_with(self),
|
||||
..c
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_class_expr(&mut self, c: ClassExpr) -> ClassExpr {
|
||||
ClassExpr {
|
||||
ident: c.ident.map(|i| self.fold_bindine_ident(i)),
|
||||
class: c.class.fold_with(self),
|
||||
..c
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_member_expr(&mut self, e: MemberExpr) -> MemberExpr {
|
||||
if e.computed {
|
||||
MemberExpr {
|
||||
obj: e.obj.fold_with(self),
|
||||
prop: e.prop.fold_with(self),
|
||||
..e
|
||||
}
|
||||
} else {
|
||||
MemberExpr {
|
||||
obj: e.obj.fold_with(self),
|
||||
..e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_expr(&mut self, mut e: Expr) -> Expr {
|
||||
e = e.fold_children_with(self);
|
||||
|
||||
match e {
|
||||
Expr::Ident(i) => Expr::Ident(self.fold_ref_ident(i)),
|
||||
_ => e,
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_fn_decl(&mut self, decl: FnDecl) -> FnDecl {
|
||||
let ident = self.fold_bindine_ident(decl.ident);
|
||||
|
||||
let mut child = self.child();
|
||||
let function = decl.function.fold_with(&mut child);
|
||||
|
||||
FnDecl {
|
||||
ident,
|
||||
function,
|
||||
..decl
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_fn_expr(&mut self, f: FnExpr) -> FnExpr {
|
||||
let ident = f.ident.map(|i| self.fold_bindine_ident(i));
|
||||
|
||||
let mut child = self.child();
|
||||
let function = f.function.fold_with(&mut child);
|
||||
|
||||
FnExpr {
|
||||
ident,
|
||||
function,
|
||||
..f
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_pat(&mut self, mut p: Pat) -> Pat {
|
||||
p = p.fold_children_with(self);
|
||||
|
||||
match p {
|
||||
Pat::Ident(i) => Pat::Ident(self.fold_bindine_ident(i)),
|
||||
_ => p,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,7 +1,9 @@
|
||||
use super::Bundler;
|
||||
use crate::{
|
||||
bundler::{export::Exports, load_transformed::Specifier},
|
||||
Id, ModuleId,
|
||||
bundler::{export::Exports, load::Specifier},
|
||||
id::{Id, ModuleId},
|
||||
load::Load,
|
||||
resolve::Resolve,
|
||||
Bundler,
|
||||
};
|
||||
use anyhow::{Context, Error};
|
||||
use std::{
|
||||
@ -12,57 +14,89 @@ use std::{
|
||||
use swc_atoms::{js_word, JsWord};
|
||||
use swc_common::{Mark, Span, Spanned, SyntaxContext, DUMMY_SP};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_transforms::noop_fold_type;
|
||||
use swc_ecma_utils::{
|
||||
find_ids, prepend, private_ident, undefined, DestructuringFinder, ExprFactory, StmtLike,
|
||||
};
|
||||
use swc_ecma_visit::{Fold, FoldWith, VisitMut, VisitMutWith, VisitWith};
|
||||
|
||||
impl Bundler<'_> {
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// Merge `targets` into `entry`.
|
||||
pub(super) fn merge_modules(
|
||||
&self,
|
||||
entry: ModuleId,
|
||||
targets: &mut Vec<ModuleId>,
|
||||
) -> Result<Module, Error> {
|
||||
log::trace!("merge_modules({})", entry);
|
||||
self.run(|| {
|
||||
let is_circular = self.scope.is_circular(entry);
|
||||
|
||||
log::trace!(
|
||||
"merge_modules({}) <- {:?}; circular = {}",
|
||||
entry,
|
||||
targets,
|
||||
is_circular
|
||||
);
|
||||
|
||||
self.swc.run(|| {
|
||||
let info = self.scope.get_module(entry).unwrap();
|
||||
|
||||
let mut entry: Module = (*info.module).clone();
|
||||
if targets.is_empty() {
|
||||
return Ok((*info.module).clone());
|
||||
}
|
||||
|
||||
log::info!("Merge: {} <= {:?}", info.fm.name, targets);
|
||||
if is_circular {
|
||||
log::info!("Circular dependency detected");
|
||||
// TODO: provide only circular imports.
|
||||
return Ok(self.merge_circular_modules(entry, targets));
|
||||
}
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &entry.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("Before merging:\n{}\n\n\n", code);
|
||||
// }
|
||||
let mut entry: Module = (*info.module).clone();
|
||||
|
||||
log::info!("Merge: ({}){} <= {:?}", info.id, info.fm.name, targets);
|
||||
|
||||
// print_hygiene("before:merge", &self.cm, &entry);
|
||||
|
||||
for (src, specifiers) in &info.imports.specifiers {
|
||||
if !targets.contains(&src.module_id) {
|
||||
// Already merged by recursive call to merge_modules.
|
||||
log::debug!(
|
||||
"Not merging: not in target: ({}):{} <= ({}):{}",
|
||||
"Not merging: already merged: ({}):{} <= ({}):{}",
|
||||
info.id,
|
||||
info.fm.name,
|
||||
src.module_id,
|
||||
src.src.value,
|
||||
);
|
||||
|
||||
if let Some(imported) = self.scope.get_module(src.module_id) {
|
||||
// Respan using imported module's syntax context.
|
||||
entry = entry.fold_with(&mut LocalMarker {
|
||||
mark: imported.mark(),
|
||||
specifiers: &specifiers,
|
||||
excluded: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
// Drop imports, as they are already merged.
|
||||
entry.body.retain(|item| {
|
||||
match item {
|
||||
ModuleItem::ModuleDecl(ModuleDecl::Import(import)) => {
|
||||
// Drop if it's one of circular import
|
||||
if info.imports.specifiers.iter().any(|v| {
|
||||
v.0.module_id == src.module_id && v.0.src == import.src
|
||||
}) {
|
||||
log::debug!("Dropping import");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
log::debug!("Merging: {} <= {}", info.fm.name, src.src.value);
|
||||
|
||||
if specifiers.iter().any(|v| v.is_namespace()) {
|
||||
@ -75,6 +109,10 @@ impl Bundler<'_> {
|
||||
if let Some(imported) = self.scope.get_module(src.module_id) {
|
||||
info.helpers.extend(&imported.helpers);
|
||||
|
||||
if let Some(pos) = targets.iter().position(|x| *x == src.module_id) {
|
||||
targets.remove(pos);
|
||||
}
|
||||
|
||||
// In the case of
|
||||
//
|
||||
// a <- b
|
||||
@ -93,43 +131,13 @@ impl Bundler<'_> {
|
||||
)
|
||||
})?;
|
||||
|
||||
if let Some(pos) = targets.iter().position(|x| *x == info.id) {
|
||||
targets.remove(pos);
|
||||
}
|
||||
|
||||
if imported.is_es6 {
|
||||
//{
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &dep.clone().fold_with(&mut HygieneVisualizer),
|
||||
// info.fm.clone(),
|
||||
// false,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("Dep before drop_unused:\n{}\n\n\n", code);
|
||||
//}
|
||||
// print_hygiene("dep:before:tree-shaking", &self.cm, &dep);
|
||||
|
||||
// Tree-shaking
|
||||
dep = self.drop_unused(imported.fm.clone(), dep, Some(&specifiers));
|
||||
dep = self.drop_unused(dep, Some(&specifiers));
|
||||
|
||||
//{
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &dep.clone().fold_with(&mut HygieneVisualizer),
|
||||
// info.fm.clone(),
|
||||
// false,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("Dep after drop_unused:\n{}\n\n\n", code);
|
||||
//}
|
||||
// print_hygiene("dep:after:tree-shaking", &self.cm, &dep);
|
||||
|
||||
if let Some(imports) = info
|
||||
.imports
|
||||
@ -163,11 +171,15 @@ impl Bundler<'_> {
|
||||
// });
|
||||
}
|
||||
|
||||
// print_hygiene("dep:before:global-mark", &self.cm, &dep);
|
||||
|
||||
dep = dep.fold_with(&mut GlobalMarker {
|
||||
used_mark: self.used_mark,
|
||||
module_mark: imported.mark(),
|
||||
});
|
||||
|
||||
// print_hygiene("dep:after:global-mark", &self.cm, &dep);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
@ -183,21 +195,6 @@ impl Bundler<'_> {
|
||||
// println!("Dep:\n{}\n\n\n", code);
|
||||
// }
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &entry.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("@: Before merging:\n{}\n\n\n", code);
|
||||
// }
|
||||
|
||||
// Replace import statement / require with module body
|
||||
let mut injector = Es6ModuleInjector {
|
||||
imported: dep.body.clone(),
|
||||
@ -205,28 +202,14 @@ impl Bundler<'_> {
|
||||
};
|
||||
entry.body.visit_mut_with(&mut injector);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &entry.clone().fold_with(&mut
|
||||
// HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("Merged:\n{}\n\n\n", code);
|
||||
// }
|
||||
// print_hygiene("entry:after:injection", &self.cm, &entry);
|
||||
|
||||
if injector.imported.is_empty() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
if self.config.require {
|
||||
// common js module is transpiled as
|
||||
//
|
||||
// Src:
|
||||
@ -356,16 +339,44 @@ impl Bundler<'_> {
|
||||
}
|
||||
|
||||
/// `export var a = 1` => `var a = 1`
|
||||
struct Unexporter;
|
||||
|
||||
noop_fold_type!(Unexporter);
|
||||
pub(super) struct Unexporter;
|
||||
|
||||
impl Fold for Unexporter {
|
||||
fn fold_module_item(&mut self, item: ModuleItem) -> ModuleItem {
|
||||
match item {
|
||||
ModuleItem::ModuleDecl(decl) => match decl {
|
||||
ModuleDecl::ExportDecl(decl) => ModuleItem::Stmt(Stmt::Decl(decl.decl)),
|
||||
ModuleDecl::ExportDefaultExpr(..) => {
|
||||
|
||||
ModuleDecl::ExportDefaultDecl(export) => match export.decl {
|
||||
DefaultDecl::Class(ClassExpr { ident: None, .. })
|
||||
| DefaultDecl::Fn(FnExpr { ident: None, .. }) => {
|
||||
ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
|
||||
}
|
||||
DefaultDecl::TsInterfaceDecl(decl) => {
|
||||
ModuleItem::Stmt(Stmt::Decl(Decl::TsInterface(decl)))
|
||||
}
|
||||
|
||||
DefaultDecl::Class(ClassExpr {
|
||||
ident: Some(ident),
|
||||
class,
|
||||
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Class(ClassDecl {
|
||||
declare: false,
|
||||
ident,
|
||||
class,
|
||||
}))),
|
||||
|
||||
DefaultDecl::Fn(FnExpr {
|
||||
ident: Some(ident),
|
||||
function,
|
||||
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
|
||||
declare: false,
|
||||
function,
|
||||
ident,
|
||||
}))),
|
||||
},
|
||||
|
||||
// Empty statement
|
||||
ModuleDecl::ExportAll(..) | ModuleDecl::ExportDefaultExpr(..) => {
|
||||
ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
|
||||
}
|
||||
ModuleDecl::ExportNamed(ref n) if n.src.is_none() => {
|
||||
@ -373,8 +384,7 @@ impl Fold for Unexporter {
|
||||
}
|
||||
ModuleDecl::Import(..) => ModuleItem::ModuleDecl(decl),
|
||||
|
||||
// TODO: Handle all
|
||||
_ => unimplemented!("Unexporter: {:?}", decl),
|
||||
_ => unimplemented!("Unexported: {:?}", decl),
|
||||
},
|
||||
|
||||
_ => item,
|
||||
@ -392,8 +402,6 @@ struct ExportRenamer<'a> {
|
||||
extras: Vec<Stmt>,
|
||||
}
|
||||
|
||||
noop_fold_type!(ExportRenamer<'_>);
|
||||
|
||||
impl ExportRenamer<'_> {
|
||||
pub fn aliased_import(&self, sym: &JsWord) -> Option<Id> {
|
||||
log::debug!("aliased_import({})\n{:?}\n\n\n", sym, self.imports);
|
||||
@ -575,8 +583,6 @@ struct ActualMarker<'a> {
|
||||
imports: &'a [Specifier],
|
||||
}
|
||||
|
||||
noop_fold_type!(ActualMarker<'_>);
|
||||
|
||||
impl Fold for ActualMarker<'_> {
|
||||
fn fold_expr(&mut self, node: Expr) -> Expr {
|
||||
node
|
||||
@ -605,15 +611,13 @@ impl Fold for ActualMarker<'_> {
|
||||
}
|
||||
|
||||
/// Applied to the importer module, and marks (connects) imported idents.
|
||||
struct LocalMarker<'a> {
|
||||
pub(super) struct LocalMarker<'a> {
|
||||
/// Mark applied to imported idents.
|
||||
mark: Mark,
|
||||
specifiers: &'a [Specifier],
|
||||
excluded: Vec<Id>,
|
||||
pub mark: Mark,
|
||||
pub specifiers: &'a [Specifier],
|
||||
pub excluded: Vec<Id>,
|
||||
}
|
||||
|
||||
noop_fold_type!(LocalMarker<'_>);
|
||||
|
||||
impl<'a> LocalMarker<'a> {
|
||||
/// Searches for i, and fold T.
|
||||
#[allow(dead_code)]
|
||||
@ -772,13 +776,11 @@ impl VisitMut for Es6ModuleInjector {
|
||||
}
|
||||
}
|
||||
|
||||
struct GlobalMarker {
|
||||
used_mark: Mark,
|
||||
module_mark: Mark,
|
||||
pub(super) struct GlobalMarker {
|
||||
pub used_mark: Mark,
|
||||
pub module_mark: Mark,
|
||||
}
|
||||
|
||||
noop_fold_type!(GlobalMarker);
|
||||
|
||||
impl GlobalMarker {
|
||||
fn is_marked_as_used(&self, span: Span) -> bool {
|
||||
let mut ctxt = span.ctxt();
|
@ -1,15 +1,16 @@
|
||||
use super::Bundler;
|
||||
use super::{load::TransformedModule, Bundler};
|
||||
use crate::{
|
||||
bundler::{load_transformed::TransformedModule, Bundle, BundleKind},
|
||||
ModuleId,
|
||||
id::ModuleId, load::Load, resolve::Resolve, util::IntoParallelIterator, Bundle, BundleKind,
|
||||
};
|
||||
use anyhow::{Context, Error};
|
||||
use fxhash::{FxHashMap, FxHashSet};
|
||||
use petgraph::{graphmap::DiGraphMap, visit::Bfs};
|
||||
use rayon::prelude::*;
|
||||
use swc_ecma_transforms::{fixer, hygiene, optimization::simplify::dce::dce};
|
||||
#[cfg(feature = "rayon")]
|
||||
use rayon::iter::ParallelIterator;
|
||||
use swc_ecma_transforms::{hygiene, optimization::simplify::dce};
|
||||
use swc_ecma_visit::FoldWith;
|
||||
|
||||
mod circular;
|
||||
mod merge;
|
||||
|
||||
pub(super) type ModuleGraph = DiGraphMap<ModuleId, usize>;
|
||||
@ -34,7 +35,11 @@ struct State {
|
||||
common_libs: FxHashSet<ModuleId>,
|
||||
}
|
||||
|
||||
impl Bundler<'_> {
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// `entries` - Entry modules (provided by user) by it's basename.
|
||||
///
|
||||
/// # How it works
|
||||
@ -50,16 +55,15 @@ impl Bundler<'_> {
|
||||
.into_par_iter()
|
||||
.map(
|
||||
|(kind, id, mut module_ids_to_merge): (BundleKind, ModuleId, _)| {
|
||||
self.swc().run(|| {
|
||||
self.run(|| {
|
||||
let module = self
|
||||
.merge_modules(id, &mut module_ids_to_merge)
|
||||
.context("failed to merge module")
|
||||
.unwrap(); // TODO
|
||||
|
||||
let module = module
|
||||
.fold_with(&mut dce(Default::default()))
|
||||
.fold_with(&mut hygiene())
|
||||
.fold_with(&mut fixer(Some(&self.swc.comments() as _)));
|
||||
.fold_with(&mut dce::dce(Default::default()))
|
||||
.fold_with(&mut hygiene());
|
||||
|
||||
Bundle { kind, id, module }
|
||||
})
|
||||
@ -139,6 +143,8 @@ impl Bundler<'_> {
|
||||
}
|
||||
|
||||
fn add_to_graph(&self, graph: &mut ModuleGraph, module_id: ModuleId) {
|
||||
let contains = graph.contains_node(module_id);
|
||||
|
||||
graph.add_node(module_id);
|
||||
|
||||
let m = self
|
||||
@ -146,8 +152,20 @@ impl Bundler<'_> {
|
||||
.get_module(module_id)
|
||||
.expect("failed to get module");
|
||||
|
||||
// Prevent dejavu
|
||||
if contains {
|
||||
for (src, _) in &m.imports.specifiers {
|
||||
if graph.contains_node(src.module_id) {
|
||||
self.scope.mark_as_circular(module_id);
|
||||
self.scope.mark_as_circular(src.module_id);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (src, _) in &*m.imports.specifiers {
|
||||
//
|
||||
|
||||
self.add_to_graph(graph, src.module_id);
|
||||
graph.add_edge(
|
||||
module_id,
|
@ -1,17 +1,20 @@
|
||||
use super::Bundler;
|
||||
use crate::{
|
||||
bundler::load_transformed::{Source, Specifier},
|
||||
Id,
|
||||
use super::{
|
||||
load::{Source, Specifier},
|
||||
Bundler,
|
||||
};
|
||||
use crate::{id::Id, load::Load, resolve::Resolve};
|
||||
use fxhash::FxHashMap;
|
||||
use swc_atoms::js_word;
|
||||
use swc_common::{SyntaxContext, DUMMY_SP};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_transforms::noop_visit_type;
|
||||
use swc_ecma_utils::find_ids;
|
||||
use swc_ecma_visit::{Node, Visit, VisitWith};
|
||||
|
||||
impl Bundler<'_> {
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// This method removes exported pure constants from the module.
|
||||
///
|
||||
/// A pure constant is a exported literal.
|
||||
@ -20,7 +23,7 @@ impl Bundler<'_> {
|
||||
/// TODO: Support pattern like
|
||||
/// export const [a, b] = [1, 2]
|
||||
pub(super) fn extract_export_info(&self, module: &Module) -> RawExports {
|
||||
self.swc.run(|| {
|
||||
self.run(|| {
|
||||
let mut v = ExportFinder::default();
|
||||
|
||||
module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v);
|
||||
@ -32,14 +35,12 @@ impl Bundler<'_> {
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct RawExports {
|
||||
pub pure_constants: Vec<(Id, Lit)>,
|
||||
/// Key is None if it's exported from the module itself.
|
||||
pub items: FxHashMap<Option<Str>, Vec<Specifier>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct Exports {
|
||||
pub pure_constants: Vec<(Id, Lit)>,
|
||||
pub items: Vec<Specifier>,
|
||||
pub reexports: FxHashMap<Source, Vec<Specifier>>,
|
||||
}
|
||||
@ -49,8 +50,6 @@ struct ExportFinder {
|
||||
info: RawExports,
|
||||
}
|
||||
|
||||
noop_visit_type!(ExportFinder);
|
||||
|
||||
impl Visit for ExportFinder {
|
||||
fn visit_module_item(&mut self, item: &ModuleItem, _: &dyn Node) {
|
||||
match item {
|
170
bundler/src/bundler/finalize.rs
Normal file
170
bundler/src/bundler/finalize.rs
Normal file
@ -0,0 +1,170 @@
|
||||
use crate::{hash::calc_hash, Bundle, BundleKind, Bundler, Load, Resolve};
|
||||
use anyhow::Error;
|
||||
use fxhash::FxHashMap;
|
||||
use relative_path::RelativePath;
|
||||
use std::path::{Path, PathBuf};
|
||||
use swc_common::{util::move_map::MoveMap, FileName};
|
||||
use swc_ecma_ast::{ImportDecl, Str};
|
||||
use swc_ecma_transforms::noop_fold_type;
|
||||
use swc_ecma_visit::{Fold, FoldWith};
|
||||
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// This method do
|
||||
///
|
||||
/// - inject helpers
|
||||
/// - rename chunks
|
||||
pub(super) fn finalize(&self, bundles: Vec<Bundle>) -> Result<Vec<Bundle>, Error> {
|
||||
self.run(|| {
|
||||
let mut new = Vec::with_capacity(bundles.len());
|
||||
let mut renamed = FxHashMap::default();
|
||||
|
||||
for mut bundle in bundles {
|
||||
match bundle.kind {
|
||||
BundleKind::Named { .. } => {
|
||||
// Inject helpers
|
||||
let helpers = self
|
||||
.scope
|
||||
.get_module(bundle.id)
|
||||
.expect("module should exist at this point")
|
||||
.helpers;
|
||||
|
||||
helpers.append_to(&mut bundle.module.body);
|
||||
|
||||
new.push(Bundle { ..bundle });
|
||||
}
|
||||
BundleKind::Lib { name } => {
|
||||
let hash = calc_hash(self.cm.clone(), &bundle.module)?;
|
||||
let mut new_name = PathBuf::from(name);
|
||||
let key = new_name.clone();
|
||||
let file_name = new_name
|
||||
.file_name()
|
||||
.map(|path| -> PathBuf {
|
||||
let path = Path::new(path);
|
||||
let ext = path.extension();
|
||||
if let Some(ext) = ext {
|
||||
return format!(
|
||||
"{}-{}.{}",
|
||||
path.file_stem().unwrap().to_string_lossy(),
|
||||
hash,
|
||||
ext.to_string_lossy()
|
||||
)
|
||||
.into();
|
||||
}
|
||||
return format!(
|
||||
"{}-{}",
|
||||
path.file_stem().unwrap().to_string_lossy(),
|
||||
hash,
|
||||
)
|
||||
.into();
|
||||
})
|
||||
.expect("javascript file should have name");
|
||||
new_name.pop();
|
||||
new_name = new_name.join(file_name.clone());
|
||||
|
||||
renamed.insert(key, new_name.to_string_lossy().to_string());
|
||||
|
||||
new.push(Bundle {
|
||||
kind: BundleKind::Named {
|
||||
name: file_name.display().to_string(),
|
||||
},
|
||||
..bundle
|
||||
})
|
||||
}
|
||||
_ => new.push(bundle),
|
||||
}
|
||||
}
|
||||
|
||||
if new.len() == 1 {
|
||||
return Ok(new);
|
||||
}
|
||||
|
||||
new = new.move_map(|bundle| {
|
||||
let path = match self.scope.get_module(bundle.id).unwrap().fm.name {
|
||||
FileName::Real(ref v) => v.clone(),
|
||||
_ => {
|
||||
log::error!("Cannot rename: not a real file");
|
||||
return bundle;
|
||||
}
|
||||
};
|
||||
|
||||
let module = {
|
||||
// Change imports
|
||||
let mut v = Renamer {
|
||||
resolver: &self.resolver,
|
||||
base: &path,
|
||||
renamed: &renamed,
|
||||
};
|
||||
bundle.module.fold_with(&mut v)
|
||||
};
|
||||
|
||||
Bundle { module, ..bundle }
|
||||
});
|
||||
|
||||
Ok(new)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Import renamer. This pass changes import path.
|
||||
struct Renamer<'a, R>
|
||||
where
|
||||
R: Resolve,
|
||||
{
|
||||
resolver: R,
|
||||
base: &'a PathBuf,
|
||||
renamed: &'a FxHashMap<PathBuf, String>,
|
||||
}
|
||||
|
||||
noop_fold_type!(Renamer<'_, '_>);
|
||||
|
||||
impl<R> Fold for Renamer<'_, R>
|
||||
where
|
||||
R: Resolve,
|
||||
{
|
||||
fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl {
|
||||
let resolved = match self
|
||||
.resolver
|
||||
.resolve(&FileName::Real(self.base.clone()), &import.src.value)
|
||||
{
|
||||
Ok(v) => match v {
|
||||
FileName::Real(v) => v,
|
||||
_ => panic!("rename_bundles called with non-path module"),
|
||||
},
|
||||
Err(_) => return import,
|
||||
};
|
||||
|
||||
if let Some(v) = self.renamed.get(&resolved) {
|
||||
// We use parent because RelativePath uses ../common-[hash].js
|
||||
// if we use `entry-a.js` as a base.
|
||||
//
|
||||
// entry-a.js
|
||||
// common.js
|
||||
let base = self
|
||||
.base
|
||||
.parent()
|
||||
.unwrap_or(self.base)
|
||||
.as_os_str()
|
||||
.to_string_lossy();
|
||||
let base = RelativePath::new(&*base);
|
||||
let v = base.relative(&*v);
|
||||
let value = v.as_str();
|
||||
return ImportDecl {
|
||||
src: Str {
|
||||
value: if value.starts_with(".") {
|
||||
value.into()
|
||||
} else {
|
||||
format!("./{}", value).into()
|
||||
},
|
||||
..import.src
|
||||
},
|
||||
..import
|
||||
};
|
||||
}
|
||||
|
||||
import
|
||||
}
|
||||
}
|
@ -1,30 +1,30 @@
|
||||
use super::Bundler;
|
||||
use crate::{load::Load, resolve::Resolve};
|
||||
use anyhow::{Context, Error};
|
||||
use fxhash::{FxHashMap, FxHashSet};
|
||||
use node_resolve::is_core_module;
|
||||
use std::{
|
||||
mem::replace,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::mem::replace;
|
||||
use swc_atoms::{js_word, JsWord};
|
||||
use swc_common::{util::move_map::MoveMap, Mark, Spanned, DUMMY_SP};
|
||||
use swc_common::{sync::Lrc, util::move_map::MoveMap, FileName, Mark, Spanned, DUMMY_SP};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_transforms::noop_fold_type;
|
||||
use swc_ecma_utils::{find_ids, ident::IdentLike, Id};
|
||||
use swc_ecma_visit::{Fold, FoldWith};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
impl Bundler<'_> {
|
||||
/// This de-globs imports if possible.
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// This method de-globs imports if possible.
|
||||
pub(super) fn extract_import_info(
|
||||
&self,
|
||||
path: &Path,
|
||||
path: &FileName,
|
||||
module: &mut Module,
|
||||
_mark: Mark,
|
||||
) -> RawImports {
|
||||
self.run(|| {
|
||||
let body = replace(&mut module.body, vec![]);
|
||||
|
||||
let mut v = ImportHandler {
|
||||
@ -42,16 +42,21 @@ impl Bundler<'_> {
|
||||
module.body = body;
|
||||
|
||||
v.info
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn resolve(&self, base: &Path, s: &str) -> Result<Arc<PathBuf>, Error> {
|
||||
self.swc.run(|| {
|
||||
pub(super) fn resolve(
|
||||
&self,
|
||||
base: &FileName,
|
||||
module_specifier: &str,
|
||||
) -> Result<Lrc<FileName>, Error> {
|
||||
self.run(|| {
|
||||
let path = self
|
||||
.resolver
|
||||
.resolve(base, s)
|
||||
.with_context(|| format!("failed to resolve {} from {}", s, base.display()))?;
|
||||
.resolve(base, module_specifier)
|
||||
.with_context(|| format!("failed to resolve {} from {}", module_specifier, base))?;
|
||||
|
||||
let path = Arc::new(path);
|
||||
let path = Lrc::new(path);
|
||||
|
||||
Ok(path)
|
||||
})
|
||||
@ -77,9 +82,13 @@ pub(super) struct RawImports {
|
||||
pub dynamic_imports: Vec<Str>,
|
||||
}
|
||||
|
||||
struct ImportHandler<'a, 'b> {
|
||||
path: &'a Path,
|
||||
bundler: &'a Bundler<'b>,
|
||||
struct ImportHandler<'a, 'b, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
path: &'a FileName,
|
||||
bundler: &'a Bundler<'b, L, R>,
|
||||
top_level: bool,
|
||||
info: RawImports,
|
||||
/// Contains namespace imports accessed with computed key.
|
||||
@ -99,11 +108,20 @@ struct ImportHandler<'a, 'b> {
|
||||
deglob_phase: bool,
|
||||
}
|
||||
|
||||
noop_fold_type!(ImportHandler<'_, '_>);
|
||||
|
||||
impl ImportHandler<'_, '_> {
|
||||
impl<L, R> ImportHandler<'_, '_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
fn mark_for(&self, src: &str) -> Option<Mark> {
|
||||
if is_core_module(src) {
|
||||
// Don't apply mark if it's a core module.
|
||||
if self
|
||||
.bundler
|
||||
.config
|
||||
.external_modules
|
||||
.iter()
|
||||
.any(|v| v == src)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let path = self.bundler.resolve(self.path, src).ok()?;
|
||||
@ -112,10 +130,20 @@ impl ImportHandler<'_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Fold for ImportHandler<'_, '_> {
|
||||
impl<L, R> Fold for ImportHandler<'_, '_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl {
|
||||
if !self.deglob_phase {
|
||||
if is_core_module(&import.src.value) {
|
||||
// Ignore if it's a core module.
|
||||
if self
|
||||
.bundler
|
||||
.config
|
||||
.external_modules
|
||||
.contains(&import.src.value)
|
||||
{
|
||||
return import;
|
||||
}
|
||||
|
||||
@ -334,7 +362,8 @@ impl Fold for ImportHandler<'_, '_> {
|
||||
|
||||
match &e.callee {
|
||||
ExprOrSuper::Expr(callee)
|
||||
if match &**callee {
|
||||
if self.bundler.config.require
|
||||
&& match &**callee {
|
||||
Expr::Ident(Ident {
|
||||
sym: js_word!("require"),
|
||||
..
|
||||
@ -399,13 +428,15 @@ impl Fold for ImportHandler<'_, '_> {
|
||||
callee: ExprOrSuper::Expr(ref callee),
|
||||
ref args,
|
||||
..
|
||||
}) if match &**callee {
|
||||
}) if self.bundler.config.require
|
||||
&& match &**callee {
|
||||
Expr::Ident(Ident {
|
||||
sym: js_word!("require"),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
} && args.len() == 1 =>
|
||||
}
|
||||
&& args.len() == 1 =>
|
||||
{
|
||||
let span = *span;
|
||||
let src = match args.first().unwrap() {
|
||||
@ -415,7 +446,8 @@ impl Fold for ImportHandler<'_, '_> {
|
||||
},
|
||||
_ => return node,
|
||||
};
|
||||
if is_core_module(&src.value) {
|
||||
// Ignore core modules.
|
||||
if self.bundler.config.external_modules.contains(&src.value) {
|
||||
return node;
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
use crate::bundler::{import::ImportHandler, tests::test_bundler};
|
||||
use super::ImportHandler;
|
||||
use crate::bundler::tests::test_bundler;
|
||||
use std::path::Path;
|
||||
use swc_common::FileName;
|
||||
use swc_ecma_visit::FoldWith;
|
||||
|
||||
#[test]
|
||||
@ -13,7 +15,7 @@ ns.foo();
|
||||
",
|
||||
);
|
||||
let mut v = ImportHandler {
|
||||
path: &Path::new("index.js"),
|
||||
path: &FileName::Real(Path::new("index.js").to_path_buf()),
|
||||
bundler: &t.bundler,
|
||||
top_level: false,
|
||||
info: Default::default(),
|
||||
@ -41,7 +43,7 @@ ns.bar();
|
||||
",
|
||||
);
|
||||
let mut v = ImportHandler {
|
||||
path: &Path::new("index.js"),
|
||||
path: &FileName::Real(Path::new("index.js").to_path_buf()),
|
||||
bundler: &t.bundler,
|
||||
top_level: false,
|
||||
info: Default::default(),
|
422
bundler/src/bundler/load.rs
Normal file
422
bundler/src/bundler/load.rs
Normal file
@ -0,0 +1,422 @@
|
||||
use super::{export::Exports, helpers::Helpers, Bundler};
|
||||
use crate::{
|
||||
bundler::{export::RawExports, import::RawImports},
|
||||
id::{Id, ModuleId},
|
||||
util,
|
||||
util::IntoParallelIterator,
|
||||
Load, Resolve,
|
||||
};
|
||||
use anyhow::{Context, Error};
|
||||
use is_macro::Is;
|
||||
#[cfg(feature = "rayon")]
|
||||
use rayon::iter::ParallelIterator;
|
||||
use swc_atoms::js_word;
|
||||
use swc_common::{sync::Lrc, FileName, Mark, SourceFile, DUMMY_SP};
|
||||
use swc_ecma_ast::{
|
||||
Expr, ExprOrSuper, ImportDecl, ImportSpecifier, Invalid, MemberExpr, Module, ModuleDecl, Str,
|
||||
};
|
||||
use swc_ecma_transforms::resolver_with_mark;
|
||||
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
|
||||
/// Module after applying transformations.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct TransformedModule {
|
||||
pub id: ModuleId,
|
||||
pub fm: Lrc<SourceFile>,
|
||||
pub module: Lrc<Module>,
|
||||
pub imports: Lrc<Imports>,
|
||||
pub exports: Lrc<Exports>,
|
||||
|
||||
/// If false, the module will be wrapped with a small helper function.
|
||||
pub is_es6: bool,
|
||||
|
||||
/// Used helpers
|
||||
pub helpers: Lrc<Helpers>,
|
||||
|
||||
mark: Mark,
|
||||
}
|
||||
|
||||
impl TransformedModule {
|
||||
/// THe marker for the module's top-level identifiers.
|
||||
pub fn mark(&self) -> Mark {
|
||||
self.mark
|
||||
}
|
||||
}
|
||||
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// Phase 1 (discovery)
|
||||
///
|
||||
/// We apply transforms at this phase to make cache efficient.
|
||||
/// As we cache in this phase, changing dependency does not affect cache.
|
||||
pub(super) fn load_transformed(
|
||||
&self,
|
||||
file_name: &FileName,
|
||||
) -> Result<Option<TransformedModule>, Error> {
|
||||
self.run(|| {
|
||||
log::trace!("load_transformed: ({})", file_name);
|
||||
|
||||
// In case of common module
|
||||
if let Some(cached) = self.scope.get_module_by_path(&file_name) {
|
||||
log::info!("Cached: {}", file_name);
|
||||
return Ok(Some(cached));
|
||||
}
|
||||
|
||||
let (_, fm, module) = self.load(&file_name).context("Bundler.load() failed")?;
|
||||
let (v, mut files) = self
|
||||
.analyze(&file_name, fm.clone(), module)
|
||||
.context("failed to analyze module")?;
|
||||
files.dedup_by_key(|v| v.1.clone());
|
||||
|
||||
log::info!("Storing module: {}", file_name);
|
||||
self.scope.store_module(v.clone());
|
||||
|
||||
// Load dependencies and store them in the `Scope`
|
||||
let results = files
|
||||
.into_par_iter()
|
||||
.map(|(_src, path)| {
|
||||
log::debug!("loading dependency: {}", path);
|
||||
self.load_transformed(&path)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Do tasks in parallel, and then wait for result
|
||||
for result in results {
|
||||
let res = result?;
|
||||
dbg!(res.is_none());
|
||||
}
|
||||
|
||||
Ok(Some(v))
|
||||
})
|
||||
}
|
||||
|
||||
fn load(&self, file_name: &FileName) -> Result<(ModuleId, Lrc<SourceFile>, Module), Error> {
|
||||
self.run(|| {
|
||||
let (module_id, _) = self.scope.module_id_gen.gen(file_name);
|
||||
|
||||
let (fm, module) = self
|
||||
.loader
|
||||
.load(&file_name)
|
||||
.with_context(|| format!("Bundler.loader.load({}) failed", file_name))?;
|
||||
self.scope.mark_as_loaded(module_id);
|
||||
Ok((module_id, fm, module))
|
||||
})
|
||||
}
|
||||
|
||||
/// This methods returns [Source]s which should be loaded.
|
||||
fn analyze(
|
||||
&self,
|
||||
file_name: &FileName,
|
||||
fm: Lrc<SourceFile>,
|
||||
mut module: Module,
|
||||
) -> Result<(TransformedModule, Vec<(Source, Lrc<FileName>)>), Error> {
|
||||
self.run(|| {
|
||||
log::trace!("transform_module({})", fm.name);
|
||||
module = module.fold_with(&mut resolver_with_mark(self.top_level_mark));
|
||||
|
||||
let (id, mark) = self.scope.module_id_gen.gen(file_name);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &module.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("Resolved:\n{}\n\n", code);
|
||||
// }
|
||||
|
||||
let imports = self.extract_import_info(file_name, &mut module, mark);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &module.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("After imports:\n{}\n", code,);
|
||||
// }
|
||||
|
||||
let exports = self.extract_export_info(&module);
|
||||
|
||||
let is_es6 = {
|
||||
let mut v = Es6ModuleDetector {
|
||||
forced_es6: false,
|
||||
found_other: false,
|
||||
};
|
||||
module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v);
|
||||
v.forced_es6 || !v.found_other
|
||||
};
|
||||
if is_es6 {
|
||||
module = self.drop_unused(module, None);
|
||||
}
|
||||
|
||||
let (imports, exports) = util::join(
|
||||
|| self.resolve_imports(file_name, imports),
|
||||
|| self.resolve_exports(file_name, exports),
|
||||
);
|
||||
let (imports, mut import_files) = imports?;
|
||||
let (exports, reexport_files) = exports?;
|
||||
import_files.extend(reexport_files);
|
||||
|
||||
let module = Lrc::new(module);
|
||||
|
||||
Ok((
|
||||
TransformedModule {
|
||||
id,
|
||||
fm,
|
||||
module,
|
||||
imports: Lrc::new(imports),
|
||||
exports: Lrc::new(exports),
|
||||
is_es6,
|
||||
helpers: Default::default(),
|
||||
mark,
|
||||
},
|
||||
import_files,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
/// Resolve re-exports.
|
||||
fn resolve_exports(
|
||||
&self,
|
||||
base: &FileName,
|
||||
raw: RawExports,
|
||||
) -> Result<(Exports, Vec<(Source, Lrc<FileName>)>), Error> {
|
||||
self.run(|| {
|
||||
log::trace!("resolve_exports({})", base);
|
||||
let mut files = vec![];
|
||||
|
||||
let mut exports = Exports::default();
|
||||
|
||||
let items = raw
|
||||
.items
|
||||
.into_par_iter()
|
||||
.map(|(src, ss)| -> Result<_, Error> {
|
||||
self.run(|| {
|
||||
let info = match src {
|
||||
Some(src) => {
|
||||
let name = self.resolve(base, &src.value)?;
|
||||
let (id, _) = self.scope.module_id_gen.gen(&name);
|
||||
Some((id, name, src))
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok((info, ss))
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for res in items {
|
||||
let (info, specifiers) = res?;
|
||||
|
||||
match info {
|
||||
None => exports.items.extend(specifiers),
|
||||
Some((id, name, src)) => {
|
||||
//
|
||||
let src = Source {
|
||||
is_loaded_synchronously: true,
|
||||
is_unconditional: false,
|
||||
module_id: id,
|
||||
src,
|
||||
};
|
||||
exports
|
||||
.reexports
|
||||
.entry(src.clone())
|
||||
.or_default()
|
||||
.extend(specifiers);
|
||||
files.push((src, name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((exports, files))
|
||||
})
|
||||
}
|
||||
|
||||
/// Resolve dependencies
|
||||
fn resolve_imports(
|
||||
&self,
|
||||
base: &FileName,
|
||||
info: RawImports,
|
||||
) -> Result<(Imports, Vec<(Source, Lrc<FileName>)>), Error> {
|
||||
self.run(|| {
|
||||
log::trace!("resolve_imports({})", base);
|
||||
let mut files = vec![];
|
||||
|
||||
let mut merged = Imports::default();
|
||||
let RawImports {
|
||||
imports,
|
||||
lazy_imports,
|
||||
dynamic_imports,
|
||||
} = info;
|
||||
|
||||
let loaded = imports
|
||||
.into_par_iter()
|
||||
.map(|v| (v, false, true))
|
||||
.chain(lazy_imports.into_par_iter().map(|v| (v, false, false)))
|
||||
.chain(dynamic_imports.into_par_iter().map(|src| {
|
||||
(
|
||||
ImportDecl {
|
||||
span: src.span,
|
||||
specifiers: vec![],
|
||||
src,
|
||||
type_only: false,
|
||||
},
|
||||
true,
|
||||
false,
|
||||
)
|
||||
}))
|
||||
.map(|(decl, dynamic, unconditional)| -> Result<_, Error> {
|
||||
self.run(|| {
|
||||
//
|
||||
let file_name = self.resolve(base, &decl.src.value)?;
|
||||
let (id, _) = self.scope.module_id_gen.gen(&file_name);
|
||||
|
||||
Ok((id, file_name, decl, dynamic, unconditional))
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for res in loaded {
|
||||
// TODO: Report error and proceed instead of returning an error
|
||||
let (id, file_name, decl, is_dynamic, is_unconditional) = res?;
|
||||
|
||||
let src = Source {
|
||||
is_loaded_synchronously: !is_dynamic,
|
||||
is_unconditional,
|
||||
module_id: id,
|
||||
src: decl.src,
|
||||
};
|
||||
files.push((src.clone(), file_name));
|
||||
|
||||
// TODO: Handle rename
|
||||
let mut specifiers = vec![];
|
||||
for s in decl.specifiers {
|
||||
match s {
|
||||
ImportSpecifier::Named(s) => specifiers.push(Specifier::Specific {
|
||||
local: s.local.into(),
|
||||
alias: s.imported.map(From::from),
|
||||
}),
|
||||
ImportSpecifier::Default(s) => specifiers.push(Specifier::Specific {
|
||||
local: s.local.into(),
|
||||
alias: Some(Id::new(js_word!("default"), s.span.ctxt())),
|
||||
}),
|
||||
ImportSpecifier::Namespace(s) => {
|
||||
specifiers.push(Specifier::Namespace {
|
||||
local: s.local.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
merged.specifiers.push((src, specifiers));
|
||||
}
|
||||
|
||||
Ok((merged, files))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct Imports {
|
||||
/// If imported ids are empty, it is a side-effect import.
|
||||
pub specifiers: Vec<(Source, Vec<Specifier>)>,
|
||||
}
|
||||
|
||||
/// Clone is relatively cheap
|
||||
#[derive(Debug, Clone, Is)]
|
||||
pub(super) enum Specifier {
|
||||
Specific { local: Id, alias: Option<Id> },
|
||||
Namespace { local: Id },
|
||||
}
|
||||
|
||||
impl Specifier {
|
||||
pub fn local(&self) -> &Id {
|
||||
match self {
|
||||
Specifier::Specific { local, .. } => local,
|
||||
Specifier::Namespace { local, .. } => local,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub(super) struct Source {
|
||||
pub is_loaded_synchronously: bool,
|
||||
pub is_unconditional: bool,
|
||||
|
||||
pub module_id: ModuleId,
|
||||
// Clone is relatively cheap, thanks to string_cache.
|
||||
pub src: Str,
|
||||
}
|
||||
|
||||
struct Es6ModuleDetector {
|
||||
/// If import statement or export is detected, it's an es6 module regardless
|
||||
/// of other codes.
|
||||
forced_es6: bool,
|
||||
/// True if other module system is detected.
|
||||
found_other: bool,
|
||||
}
|
||||
|
||||
impl Visit for Es6ModuleDetector {
|
||||
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
|
||||
e.obj.visit_with(e as _, self);
|
||||
|
||||
if e.computed {
|
||||
e.prop.visit_with(e as _, self);
|
||||
}
|
||||
|
||||
match &e.obj {
|
||||
ExprOrSuper::Expr(e) => {
|
||||
match &**e {
|
||||
Expr::Ident(i) => {
|
||||
// TODO: Check syntax context (Check if marker is the global mark)
|
||||
if i.sym == *"module" {
|
||||
self.found_other = true;
|
||||
}
|
||||
|
||||
if i.sym == *"exports" {
|
||||
self.found_other = true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
//
|
||||
}
|
||||
|
||||
fn visit_module_decl(&mut self, decl: &ModuleDecl, _: &dyn Node) {
|
||||
match decl {
|
||||
ModuleDecl::Import(_)
|
||||
| ModuleDecl::ExportDecl(_)
|
||||
| ModuleDecl::ExportNamed(_)
|
||||
| ModuleDecl::ExportDefaultDecl(_)
|
||||
| ModuleDecl::ExportDefaultExpr(_)
|
||||
| ModuleDecl::ExportAll(_) => {
|
||||
self.forced_es6 = true;
|
||||
}
|
||||
|
||||
ModuleDecl::TsImportEquals(_) => {}
|
||||
ModuleDecl::TsExportAssignment(_) => {}
|
||||
ModuleDecl::TsNamespaceExport(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
150
bundler/src/bundler/mod.rs
Normal file
150
bundler/src/bundler/mod.rs
Normal file
@ -0,0 +1,150 @@
|
||||
use self::scope::Scope;
|
||||
use crate::{Load, ModuleId, Resolve};
|
||||
use anyhow::{Context, Error};
|
||||
use fxhash::FxHashMap;
|
||||
use swc_atoms::JsWord;
|
||||
use swc_common::{sync::Lrc, FileName, Globals, Mark, SourceMap, DUMMY_SP, GLOBALS};
|
||||
use swc_ecma_ast::Module;
|
||||
|
||||
mod chunk;
|
||||
mod export;
|
||||
mod finalize;
|
||||
mod helpers;
|
||||
mod import;
|
||||
mod load;
|
||||
mod scope;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod usage_analysis;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Config {
|
||||
/// If it's true, [Bundler] searches for require calls.
|
||||
pub require: bool,
|
||||
/// List of modules which should be preserved.
|
||||
pub external_modules: Vec<JsWord>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum BundleKind {
|
||||
/// User-provided entry
|
||||
Named { name: String },
|
||||
/// Auto-generated entry (created by import expression)
|
||||
Dynamic,
|
||||
/// A lazy-loaded shared library
|
||||
Lib { name: String },
|
||||
}
|
||||
|
||||
/// Built bundle
|
||||
#[derive(Debug)]
|
||||
pub struct Bundle {
|
||||
pub kind: BundleKind,
|
||||
pub id: ModuleId,
|
||||
/// Merged module.
|
||||
///
|
||||
/// You **should** run fixer.
|
||||
pub module: Module,
|
||||
}
|
||||
|
||||
pub struct Bundler<'a, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
config: Config,
|
||||
|
||||
globals: &'a Globals,
|
||||
cm: Lrc<SourceMap>,
|
||||
loader: L,
|
||||
resolver: R,
|
||||
|
||||
/// [Mark] used while tree shaking
|
||||
used_mark: Mark,
|
||||
/// [Mark] used while tree shaking
|
||||
top_level_mark: Mark,
|
||||
|
||||
scope: Scope,
|
||||
}
|
||||
|
||||
impl<'a, L, R> Bundler<'a, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
pub fn new(
|
||||
globals: &'a Globals,
|
||||
cm: Lrc<SourceMap>,
|
||||
loader: L,
|
||||
resolver: R,
|
||||
config: Config,
|
||||
) -> Self {
|
||||
GLOBALS.set(&globals, || {
|
||||
let used_mark = Mark::fresh(Mark::root());
|
||||
log::info!("Used mark: {:?}", DUMMY_SP.apply_mark(used_mark).ctxt());
|
||||
let top_level_mark = Mark::fresh(Mark::root());
|
||||
log::info!(
|
||||
"top-level mark: {:?}",
|
||||
DUMMY_SP.apply_mark(top_level_mark).ctxt()
|
||||
);
|
||||
|
||||
Bundler {
|
||||
cm,
|
||||
loader,
|
||||
resolver,
|
||||
used_mark,
|
||||
top_level_mark,
|
||||
scope: Default::default(),
|
||||
globals,
|
||||
config,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
///
|
||||
///
|
||||
///
|
||||
/// Note: This method will panic if entries references each other in
|
||||
/// circular manner. However, it applies only to the provided `entries`, and
|
||||
/// dependencies with circular reference is ok.
|
||||
pub fn bundle(&self, entries: FxHashMap<String, FileName>) -> Result<Vec<Bundle>, Error> {
|
||||
let results = entries
|
||||
.into_iter()
|
||||
.map(|(name, path)| -> Result<_, Error> {
|
||||
let res = self
|
||||
.load_transformed(&path)
|
||||
.context("load_transformed failed")?;
|
||||
Ok((name, res))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// We collect at here to handle dynamic imports
|
||||
// TODO: Handle dynamic imports
|
||||
|
||||
let local = {
|
||||
let mut output = FxHashMap::default();
|
||||
|
||||
for res in results {
|
||||
let (name, m) = res?;
|
||||
let m = m.unwrap();
|
||||
|
||||
output.insert(name, m);
|
||||
}
|
||||
|
||||
output
|
||||
};
|
||||
|
||||
let bundles = self.chunk(local)?;
|
||||
|
||||
let bundles = self.finalize(bundles)?;
|
||||
Ok(bundles)
|
||||
}
|
||||
|
||||
/// Sets `swc_common::GLOBALS`
|
||||
#[inline]
|
||||
fn run<F, Ret>(&self, op: F) -> Ret
|
||||
where
|
||||
F: FnOnce() -> Ret,
|
||||
{
|
||||
GLOBALS.set(self.globals, op)
|
||||
}
|
||||
}
|
47
bundler/src/bundler/scope.rs
Normal file
47
bundler/src/bundler/scope.rs
Normal file
@ -0,0 +1,47 @@
|
||||
use super::load::TransformedModule;
|
||||
use crate::{
|
||||
id::{ModuleId, ModuleIdGenerator},
|
||||
util::CloneMap,
|
||||
};
|
||||
use swc_common::FileName;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct Scope {
|
||||
pub module_id_gen: ModuleIdGenerator,
|
||||
|
||||
circular_modules: CloneMap<ModuleId, ()>,
|
||||
loaded_modules: CloneMap<ModuleId, ()>,
|
||||
|
||||
/// Cached after applying basical transformations.
|
||||
transformed_modules: CloneMap<ModuleId, TransformedModule>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn is_circular(&self, id: ModuleId) -> bool {
|
||||
self.circular_modules.get(&id).is_some()
|
||||
}
|
||||
|
||||
pub fn mark_as_circular(&self, id: ModuleId) {
|
||||
self.circular_modules.insert(id, ());
|
||||
}
|
||||
|
||||
pub fn mark_as_loaded(&self, id: ModuleId) {
|
||||
self.loaded_modules.insert(id, ());
|
||||
}
|
||||
|
||||
/// Stores module information. The information should contain only
|
||||
/// information gotten from module itself. In other words, it should not
|
||||
/// contains information from a dependency.
|
||||
pub fn store_module(&self, info: TransformedModule) {
|
||||
self.transformed_modules.insert(info.id, info);
|
||||
}
|
||||
|
||||
pub fn get_module_by_path(&self, file_name: &FileName) -> Option<TransformedModule> {
|
||||
let (id, _) = self.module_id_gen.gen(file_name);
|
||||
self.get_module(id)
|
||||
}
|
||||
|
||||
pub fn get_module(&self, id: ModuleId) -> Option<TransformedModule> {
|
||||
Some(self.transformed_modules.get(&id)?.clone())
|
||||
}
|
||||
}
|
89
bundler/src/bundler/tests.rs
Normal file
89
bundler/src/bundler/tests.rs
Normal file
@ -0,0 +1,89 @@
|
||||
//! Utilities for testing.
|
||||
use super::{Bundler, Config};
|
||||
use crate::{util::HygieneRemover, Load, Resolve};
|
||||
use anyhow::Error;
|
||||
use std::path::PathBuf;
|
||||
use swc_common::{sync::Lrc, FileName, SourceFile, SourceMap, GLOBALS};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_parser::{lexer::Lexer, Parser, StringInput};
|
||||
use swc_ecma_utils::drop_span;
|
||||
use swc_ecma_visit::FoldWith;
|
||||
|
||||
pub struct Tester<'a> {
|
||||
pub cm: Lrc<SourceMap>,
|
||||
pub bundler: Bundler<'a, Loader, Resolver>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Loader;
|
||||
|
||||
impl Load for Loader {
|
||||
fn load(&self, _: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
|
||||
unreachable!("swc_bundler: tester.load")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Resolver;
|
||||
|
||||
impl Resolve for Resolver {
|
||||
fn resolve(&self, _: &FileName, _: &str) -> Result<FileName, Error> {
|
||||
unreachable!("swc_bundler: tester.resolve")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Tester<'a> {
|
||||
pub fn parse(&self, s: &str) -> Module {
|
||||
let fm = self
|
||||
.cm
|
||||
.new_source_file(FileName::Real(PathBuf::from("input.js")), s.into());
|
||||
|
||||
let lexer = Lexer::new(
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
StringInput::from(&*fm),
|
||||
None,
|
||||
);
|
||||
let mut parser = Parser::new_from(lexer);
|
||||
parser.parse_module().unwrap()
|
||||
}
|
||||
|
||||
pub fn assert_eq(&self, m: &Module, expected: &str) {
|
||||
let expected = self.parse(expected);
|
||||
|
||||
let m = drop_span(m.clone().fold_with(&mut HygieneRemover));
|
||||
let expected = drop_span(expected);
|
||||
|
||||
assert_eq!(m, expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn test_bundler<F>(op: F)
|
||||
where
|
||||
F: FnOnce(&mut Tester),
|
||||
{
|
||||
testing::run_test2(true, |cm, _| {
|
||||
GLOBALS.with(|globals| {
|
||||
let bundler = Bundler::new(
|
||||
globals,
|
||||
cm.clone(),
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
Config {
|
||||
require: true,
|
||||
external_modules: vec![],
|
||||
},
|
||||
);
|
||||
|
||||
let mut t = Tester {
|
||||
cm: cm.clone(),
|
||||
bundler,
|
||||
};
|
||||
|
||||
op(&mut t);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.expect("WTF?");
|
||||
}
|
@ -1,23 +1,22 @@
|
||||
use crate::{bundler::load_transformed::Specifier, Bundler};
|
||||
use std::{borrow::Cow, sync::Arc};
|
||||
use swc_common::SourceFile;
|
||||
use super::load::Specifier;
|
||||
use crate::{Bundler, Load, Resolve};
|
||||
use std::borrow::Cow;
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_transforms::optimization::simplify::dce;
|
||||
use swc_ecma_utils::ident::IdentLike;
|
||||
use swc_ecma_visit::FoldWith;
|
||||
|
||||
impl Bundler<'_> {
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
L: Load,
|
||||
R: Resolve,
|
||||
{
|
||||
/// If used_exports is [None], all exports are treated as exported.
|
||||
///
|
||||
/// Note: Context of used_exports is ignored, as the specifiers comes from
|
||||
/// other module.
|
||||
pub(super) fn drop_unused(
|
||||
&self,
|
||||
_fm: Arc<SourceFile>,
|
||||
node: Module,
|
||||
used_exports: Option<&[Specifier]>,
|
||||
) -> Module {
|
||||
self.swc.run(|| {
|
||||
pub(super) fn drop_unused(&self, node: Module, used_exports: Option<&[Specifier]>) -> Module {
|
||||
self.run(|| {
|
||||
let mut used = vec![];
|
||||
|
||||
if let Some(used_exports) = used_exports {
|
41
bundler/src/debug/mod.rs
Normal file
41
bundler/src/debug/mod.rs
Normal file
@ -0,0 +1,41 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::io::{stdout, Write};
|
||||
use swc_common::{sync::Lrc, SourceMap};
|
||||
use swc_ecma_ast::{Ident, Module};
|
||||
use swc_ecma_codegen::{text_writer::JsWriter, Emitter};
|
||||
use swc_ecma_visit::{Fold, FoldWith};
|
||||
|
||||
pub(crate) fn print_hygiene(event: &str, cm: &Lrc<SourceMap>, t: &Module) {
|
||||
let module = t.clone().fold_with(&mut HygieneVisualizer);
|
||||
|
||||
let stdout = stdout();
|
||||
let mut w = stdout.lock();
|
||||
|
||||
writeln!(w, "==================== @ {} ====================", event).unwrap();
|
||||
Emitter {
|
||||
cfg: swc_ecma_codegen::Config { minify: false },
|
||||
cm: cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(JsWriter::new(cm.clone(), "\n", &mut w, None)),
|
||||
handlers: Box::new(Handlers),
|
||||
}
|
||||
.emit_module(&module)
|
||||
.unwrap();
|
||||
writeln!(w, "==================== @ ====================").unwrap();
|
||||
}
|
||||
|
||||
impl swc_ecma_codegen::Handlers for Handlers {}
|
||||
|
||||
struct Handlers;
|
||||
|
||||
struct HygieneVisualizer;
|
||||
|
||||
impl Fold for HygieneVisualizer {
|
||||
fn fold_ident(&mut self, node: Ident) -> Ident {
|
||||
Ident {
|
||||
sym: format!("{}{:?}", node.sym, node.span.ctxt()).into(),
|
||||
..node
|
||||
}
|
||||
}
|
||||
}
|
117
bundler/src/hash.rs
Normal file
117
bundler/src/hash.rs
Normal file
@ -0,0 +1,117 @@
|
||||
use anyhow::{Context, Error};
|
||||
use crc::{crc64, crc64::Digest, Hasher64};
|
||||
use std::io;
|
||||
use swc_common::{sync::Lrc, SourceMap, Span};
|
||||
use swc_ecma_ast::Module;
|
||||
use swc_ecma_codegen::{text_writer::WriteJs, Emitter};
|
||||
|
||||
pub(crate) fn calc_hash(cm: Lrc<SourceMap>, m: &Module) -> Result<String, Error> {
|
||||
let digest = crc64::Digest::new(crc64::ECMA);
|
||||
let mut buf = Hasher { digest };
|
||||
|
||||
{
|
||||
let mut emitter = Emitter {
|
||||
cfg: Default::default(),
|
||||
cm,
|
||||
comments: None,
|
||||
wr: Box::new(&mut buf) as Box<dyn WriteJs>,
|
||||
handlers: Box::new(Handlers),
|
||||
};
|
||||
|
||||
emitter
|
||||
.emit_module(&m)
|
||||
.context("failed to emit module to calculate hash")?;
|
||||
}
|
||||
//
|
||||
|
||||
let result = buf.digest.sum64();
|
||||
Ok(radix_fmt::radix(result, 36).to_string())
|
||||
}
|
||||
|
||||
impl swc_ecma_codegen::Handlers for Handlers {}
|
||||
struct Handlers;
|
||||
|
||||
struct Hasher {
|
||||
digest: Digest,
|
||||
}
|
||||
|
||||
impl Hasher {
|
||||
fn w(&mut self, s: &str) {
|
||||
self.digest.write(s.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl WriteJs for &mut Hasher {
|
||||
fn increase_indent(&mut self) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decrease_indent(&mut self) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_semi(&mut self) -> io::Result<()> {
|
||||
self.w(";");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_space(&mut self) -> io::Result<()> {
|
||||
self.w(" ");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_keyword(&mut self, _: Option<Span>, s: &'static str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_operator(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_param(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_property(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_line(&mut self) -> io::Result<()> {
|
||||
self.w("\n");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_comment(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_str_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_str(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_symbol(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_punct(&mut self, s: &'static str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -1,14 +1,10 @@
|
||||
use dashmap::DashMap;
|
||||
use fxhash::FxHashMap;
|
||||
use std::{
|
||||
fmt,
|
||||
path::PathBuf,
|
||||
sync::{
|
||||
atomic::{AtomicU64, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
sync::atomic::{AtomicU64, Ordering::SeqCst},
|
||||
};
|
||||
use swc_atoms::JsWord;
|
||||
use swc_common::{Mark, SyntaxContext, DUMMY_SP};
|
||||
use swc_common::{sync::Lock, FileName, Mark, SyntaxContext, DUMMY_SP};
|
||||
use swc_ecma_ast::Ident;
|
||||
use swc_ecma_utils::ident::IdentLike;
|
||||
|
||||
@ -24,18 +20,19 @@ impl fmt::Display for ModuleId {
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct ModuleIdGenerator {
|
||||
v: AtomicU64,
|
||||
cache: DashMap<Arc<PathBuf>, (ModuleId, Mark)>,
|
||||
cache: Lock<FxHashMap<FileName, (ModuleId, Mark)>>,
|
||||
}
|
||||
|
||||
impl ModuleIdGenerator {
|
||||
pub fn gen(&self, path: &Arc<PathBuf>) -> (ModuleId, Mark) {
|
||||
if let Some(v) = self.cache.get(path) {
|
||||
return *v.value();
|
||||
pub fn gen(&self, file_name: &FileName) -> (ModuleId, Mark) {
|
||||
let mut w = self.cache.lock();
|
||||
if let Some(v) = w.get(file_name) {
|
||||
return v.clone();
|
||||
}
|
||||
|
||||
let id = ModuleId(self.v.fetch_add(1, SeqCst));
|
||||
let mark = Mark::fresh(Mark::root());
|
||||
self.cache.insert(path.clone(), (id, mark));
|
||||
w.insert(file_name.clone(), (id, mark));
|
||||
(id, mark)
|
||||
}
|
||||
}
|
||||
@ -62,11 +59,6 @@ impl Id {
|
||||
Ident::new(self.0, DUMMY_SP.with_ctxt(self.1))
|
||||
}
|
||||
|
||||
pub fn append_mark(mut self, mark: Mark) -> Self {
|
||||
self.1 = self.1.apply_mark(mark);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn replace_mark(mut self, mark: Mark) -> Self {
|
||||
self.1 = SyntaxContext::empty().apply_mark(mark);
|
||||
self
|
||||
@ -110,5 +102,3 @@ impl PartialEq<JsWord> for Id {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
pub type QualifiedId = (ModuleId, Id);
|
14
bundler/src/lib.rs
Normal file
14
bundler/src/lib.rs
Normal file
@ -0,0 +1,14 @@
|
||||
pub use self::{
|
||||
bundler::{Bundle, BundleKind, Bundler, Config},
|
||||
id::ModuleId,
|
||||
load::Load,
|
||||
resolve::Resolve,
|
||||
};
|
||||
|
||||
mod bundler;
|
||||
mod debug;
|
||||
mod hash;
|
||||
mod id;
|
||||
mod load;
|
||||
mod resolve;
|
||||
mod util;
|
27
bundler/src/load.rs
Normal file
27
bundler/src/load.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use anyhow::Error;
|
||||
use swc_common::{sync::Lrc, FileName, SourceFile};
|
||||
use swc_ecma_ast::Module;
|
||||
|
||||
/// Responsible for providing files to the bundler.
|
||||
///
|
||||
/// Note: Resolve and Load are separate trait because multiple module can depend
|
||||
/// on a single module. Due to the possibility of 'common' module, bundler
|
||||
/// should implement some caching. The bundler uses [FileName] as a key of the
|
||||
/// cache.
|
||||
///
|
||||
/// This trait is designed to allow passing pre-parsed module.
|
||||
pub trait Load: swc_common::sync::Send + swc_common::sync::Sync {
|
||||
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Load> Load for Box<T> {
|
||||
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
|
||||
(**self).load(file)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + Load> Load for &'a T {
|
||||
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
|
||||
(**self).load(file)
|
||||
}
|
||||
}
|
18
bundler/src/resolve.rs
Normal file
18
bundler/src/resolve.rs
Normal file
@ -0,0 +1,18 @@
|
||||
use anyhow::Error;
|
||||
use swc_common::FileName;
|
||||
|
||||
pub trait Resolve: swc_common::sync::Send + swc_common::sync::Sync {
|
||||
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Resolve> Resolve for Box<T> {
|
||||
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
|
||||
(**self).resolve(base, module_specifier)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + Resolve> Resolve for &'a T {
|
||||
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
|
||||
(**self).resolve(base, module_specifier)
|
||||
}
|
||||
}
|
96
bundler/src/util.rs
Normal file
96
bundler/src/util.rs
Normal file
@ -0,0 +1,96 @@
|
||||
use fxhash::FxBuildHasher;
|
||||
use std::hash::Hash;
|
||||
use swc_common::{Span, SyntaxContext};
|
||||
use swc_ecma_visit::Fold;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CloneMap<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
#[cfg(feature = "concurrent")]
|
||||
inner: dashmap::DashMap<K, V, FxBuildHasher>,
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
inner: std::cell::RefCell<std::collections::HashMap<K, V, FxBuildHasher>>,
|
||||
}
|
||||
|
||||
impl<K, V> Default for CloneMap<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
inner: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> CloneMap<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
#[cfg(feature = "concurrent")]
|
||||
pub fn get(&self, k: &K) -> Option<V> {
|
||||
if let Some(v) = self.inner.get(k) {
|
||||
Some(v.value().clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
pub fn get(&self, k: &K) -> Option<V> {
|
||||
if let Some(v) = self.inner.borrow().get(k) {
|
||||
Some(v.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "concurrent")]
|
||||
pub fn insert(&self, k: K, v: V) {
|
||||
self.inner.insert(k, v);
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
pub fn insert(&self, k: K, v: V) {
|
||||
self.inner.borrow_mut().insert(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct HygieneRemover;
|
||||
|
||||
impl Fold for HygieneRemover {
|
||||
fn fold_span(&mut self, s: Span) -> Span {
|
||||
s.with_ctxt(SyntaxContext::empty())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "rayon")]
|
||||
pub(crate) use rayon::join;
|
||||
|
||||
#[cfg(not(feature = "rayon"))]
|
||||
pub(crate) fn join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where
|
||||
A: FnOnce() -> RA,
|
||||
B: FnOnce() -> RB,
|
||||
{
|
||||
(oper_a(), oper_b())
|
||||
}
|
||||
|
||||
#[cfg(feature = "rayon")]
|
||||
pub(crate) use rayon::iter::IntoParallelIterator;
|
||||
|
||||
/// Fake trait
|
||||
#[cfg(not(feature = "rayon"))]
|
||||
pub(crate) trait IntoParallelIterator: Sized + IntoIterator {
|
||||
fn into_par_iter(self) -> <Self as IntoIterator>::IntoIter {
|
||||
self.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "rayon"))]
|
||||
impl<T> IntoParallelIterator for T where T: IntoIterator {}
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "swc_common"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
|
||||
license = "Apache-2.0/MIT"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
|
@ -103,28 +103,18 @@ impl<T> Lock<T> {
|
||||
// self.0.get_mut()
|
||||
// }
|
||||
|
||||
// #[cfg(parallel_compiler)]
|
||||
// #[cfg(feature = "concurrent")]
|
||||
// #[inline(always)]
|
||||
// pub fn try_lock(&self) -> Option<LockGuard<'_, T>> {
|
||||
// self.0.try_lock()
|
||||
// }
|
||||
//
|
||||
// #[cfg(not(parallel_compiler))]
|
||||
// #[cfg(not(feature = "concurrent"))]
|
||||
// #[inline(always)]
|
||||
// pub fn try_lock(&self) -> Option<LockGuard<'_, T>> {
|
||||
// self.0.try_borrow_mut().ok()
|
||||
// }
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
#[inline(always)]
|
||||
pub fn lock(&self) -> LockGuard<'_, T> {
|
||||
if ERROR_CHECKING {
|
||||
self.0.try_lock().expect("lock was already held")
|
||||
} else {
|
||||
self.0.lock()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "concurrent")]
|
||||
#[inline(always)]
|
||||
pub fn lock(&self) -> LockGuard<'_, T> {
|
||||
@ -253,7 +243,7 @@ impl<T: Ord + Copy> Ord for LockCell<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct RwLock<T>(InnerRwLock<T>);
|
||||
|
||||
impl<T> RwLock<T> {
|
||||
@ -278,6 +268,50 @@ impl<T> RwLock<T> {
|
||||
pub fn borrow(&self) -> ReadGuard<'_, T> {
|
||||
self.read()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn get_mut(&mut self) -> &mut T {
|
||||
self.0.get_mut()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn with_read_lock<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
|
||||
f(&*self.read())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
#[inline(always)]
|
||||
pub fn try_write(&self) -> Result<WriteGuard<'_, T>, ()> {
|
||||
self.0.try_borrow_mut().map_err(|_| ())
|
||||
}
|
||||
|
||||
#[cfg(feature = "concurrent")]
|
||||
#[inline(always)]
|
||||
pub fn try_write(&self) -> Result<WriteGuard<'_, T>, ()> {
|
||||
self.0.try_write().ok_or(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
#[inline(always)]
|
||||
pub fn write(&self) -> WriteGuard<'_, T> {
|
||||
self.0.borrow_mut()
|
||||
}
|
||||
|
||||
#[cfg(feature = "concurrent")]
|
||||
#[inline(always)]
|
||||
pub fn write(&self) -> WriteGuard<'_, T> {
|
||||
self.0.write()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn with_write_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
|
||||
f(&mut *self.write())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
|
||||
self.write()
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Probably a bad idea
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "swc_ecma_parser"
|
||||
version = "0.33.2"
|
||||
version = "0.33.3"
|
||||
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
|
||||
license = "Apache-2.0/MIT"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
|
@ -7,7 +7,6 @@ use swc_common::{
|
||||
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax};
|
||||
|
||||
fn main() {
|
||||
swc_common::GLOBALS.set(&swc_common::Globals::new(), || {
|
||||
let cm: Lrc<SourceMap> = Default::default();
|
||||
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
|
||||
|
||||
@ -42,5 +41,4 @@ fn main() {
|
||||
.expect("Failed to parse module.");
|
||||
|
||||
println!("Tokens: {:?}", parser.input().take());
|
||||
});
|
||||
}
|
||||
|
@ -7,7 +7,6 @@ use swc_common::{
|
||||
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax};
|
||||
|
||||
fn main() {
|
||||
swc_common::GLOBALS.set(&swc_common::Globals::new(), || {
|
||||
let cm: Lrc<SourceMap> = Default::default();
|
||||
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
|
||||
|
||||
@ -42,5 +41,4 @@ fn main() {
|
||||
.expect("Failed to parse module.");
|
||||
|
||||
println!("Tokens: {:?}", parser.input().take());
|
||||
});
|
||||
}
|
||||
|
@ -40,7 +40,7 @@
|
||||
//! #[macro_use]
|
||||
//! extern crate swc_common;
|
||||
//! extern crate swc_ecma_parser;
|
||||
//! use std::sync::Arc;
|
||||
//! use swc_common::sync::Lrc;
|
||||
//! use swc_common::{
|
||||
//! errors::{ColorConfig, Handler},
|
||||
//! FileName, FilePathMapping, SourceMap,
|
||||
@ -48,8 +48,7 @@
|
||||
//! use swc_ecma_parser::{lexer::Lexer, Parser, StringInput, Syntax};
|
||||
//!
|
||||
//! fn main() {
|
||||
//! swc_common::GLOBALS.set(&swc_common::Globals::new(), || {
|
||||
//! let cm: Arc<SourceMap> = Default::default();
|
||||
//! let cm: Lrc<SourceMap> = Default::default();
|
||||
//! let handler =
|
||||
//! Handler::with_tty_emitter(ColorConfig::Auto, true, false,
|
||||
//! Some(cm.clone()));
|
||||
@ -84,7 +83,6 @@
|
||||
//! e.into_diagnostic(&handler).emit()
|
||||
//! })
|
||||
//! .expect("failed to parser module");
|
||||
//! });
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
|
@ -115,7 +115,7 @@ macro_rules! assert_and_bump {
|
||||
/// if token has data like string.
|
||||
macro_rules! eat {
|
||||
($p:expr, ';') => {{
|
||||
log::trace!("eat(';'): cur={:?}", cur!($p, true));
|
||||
log::trace!("eat(';'): cur={:?}", cur!($p, false));
|
||||
$p.input.eat(&Token::Semi)
|
||||
|| eof!($p)
|
||||
|| is!($p, '}')
|
||||
|
1
ecmascript/parser/tests/typescript/eof-issue/input.tsx
Normal file
1
ecmascript/parser/tests/typescript/eof-issue/input.tsx
Normal file
@ -0,0 +1 @@
|
||||
export const Divider = <div/>
|
83
ecmascript/parser/tests/typescript/eof-issue/input.tsx.json
Normal file
83
ecmascript/parser/tests/typescript/eof-issue/input.tsx.json
Normal file
@ -0,0 +1,83 @@
|
||||
{
|
||||
"type": "Module",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 29,
|
||||
"ctxt": 0
|
||||
},
|
||||
"body": [
|
||||
{
|
||||
"type": "ExportDeclaration",
|
||||
"span": {
|
||||
"start": 0,
|
||||
"end": 29,
|
||||
"ctxt": 0
|
||||
},
|
||||
"declaration": {
|
||||
"type": "VariableDeclaration",
|
||||
"span": {
|
||||
"start": 7,
|
||||
"end": 29,
|
||||
"ctxt": 0
|
||||
},
|
||||
"kind": "const",
|
||||
"declare": false,
|
||||
"declarations": [
|
||||
{
|
||||
"type": "VariableDeclarator",
|
||||
"span": {
|
||||
"start": 13,
|
||||
"end": 29,
|
||||
"ctxt": 0
|
||||
},
|
||||
"id": {
|
||||
"type": "Identifier",
|
||||
"span": {
|
||||
"start": 13,
|
||||
"end": 20,
|
||||
"ctxt": 0
|
||||
},
|
||||
"value": "Divider",
|
||||
"typeAnnotation": null,
|
||||
"optional": false
|
||||
},
|
||||
"init": {
|
||||
"type": "JSXElement",
|
||||
"span": {
|
||||
"start": 23,
|
||||
"end": 29,
|
||||
"ctxt": 0
|
||||
},
|
||||
"opening": {
|
||||
"type": "JSXOpeningElement",
|
||||
"name": {
|
||||
"type": "Identifier",
|
||||
"span": {
|
||||
"start": 24,
|
||||
"end": 27,
|
||||
"ctxt": 0
|
||||
},
|
||||
"value": "div",
|
||||
"typeAnnotation": null,
|
||||
"optional": false
|
||||
},
|
||||
"span": {
|
||||
"start": 23,
|
||||
"end": 29,
|
||||
"ctxt": 0
|
||||
},
|
||||
"attributes": [],
|
||||
"selfClosing": true,
|
||||
"typeArguments": null
|
||||
},
|
||||
"children": [],
|
||||
"closing": null
|
||||
},
|
||||
"definite": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"interpreter": null
|
||||
}
|
@ -21,7 +21,7 @@ semver = { version = "0.9.0", features = ["serde"] }
|
||||
once_cell = "1.2.0"
|
||||
st-map = "0.1.2"
|
||||
fxhash = "0.2.1"
|
||||
dashmap = "=3.5.1"
|
||||
dashmap = "3"
|
||||
|
||||
[dev-dependencies]
|
||||
swc_ecma_codegen = { path = "../codegen" }
|
||||
|
@ -20,7 +20,7 @@ swc_ecma_ast = { version = "0.28.0", path ="../ast" }
|
||||
swc_ecma_utils = { version = "0.17.0", path ="../utils" }
|
||||
swc_ecma_parser = { version = "0.33.0", path ="../parser" }
|
||||
swc_ecma_visit = { version = "0.13.0", path ="../visit" }
|
||||
dashmap = { version = "=3.5.1", optional = true }
|
||||
dashmap = { version = "3", optional = true }
|
||||
either = "1.5"
|
||||
fxhash = "0.2"
|
||||
indexmap = "1"
|
||||
|
@ -153,11 +153,12 @@ impl Fold for Dce<'_> {
|
||||
return node;
|
||||
}
|
||||
|
||||
let stmts = node.stmts.fold_with(self);
|
||||
let mut stmts = node.stmts.fold_with(self);
|
||||
|
||||
let mut span = node.span;
|
||||
if stmts.iter().any(|stmt| self.is_marked(stmt.span())) {
|
||||
if self.marking_phase || stmts.iter().any(|stmt| self.is_marked(stmt.span())) {
|
||||
span = span.apply_mark(self.config.used_mark);
|
||||
stmts = self.fold_in_marking_phase(stmts);
|
||||
}
|
||||
|
||||
BlockStmt { span, stmts }
|
||||
@ -170,6 +171,7 @@ impl Fold for Dce<'_> {
|
||||
|
||||
if self.marking_phase || self.included.contains(&node.ident.to_id()) {
|
||||
node.class.span = node.class.span.apply_mark(self.config.used_mark);
|
||||
node.class.super_class = self.fold_in_marking_phase(node.class.super_class);
|
||||
}
|
||||
|
||||
node.fold_children_with(self)
|
||||
@ -307,7 +309,9 @@ impl Fold for Dce<'_> {
|
||||
|
||||
if self.marking_phase || self.included.contains(&f.ident.to_id()) {
|
||||
f.function.span = f.function.span.apply_mark(self.config.used_mark);
|
||||
f.function.params = self.fold_in_marking_phase(f.function.params);
|
||||
f.function.body = self.fold_in_marking_phase(f.function.body);
|
||||
return f;
|
||||
}
|
||||
|
||||
f.fold_children_with(self)
|
||||
@ -445,6 +449,7 @@ impl Fold for Dce<'_> {
|
||||
}
|
||||
|
||||
// Drop unused imports.
|
||||
log::debug!("Removing unused import specifiers");
|
||||
import.specifiers.retain(|s| self.should_include(s));
|
||||
|
||||
if !import.specifiers.is_empty() {
|
||||
@ -508,13 +513,9 @@ impl Fold for Dce<'_> {
|
||||
if self.is_marked(node.span) {
|
||||
return node;
|
||||
}
|
||||
|
||||
node.span = node.span.apply_mark(self.config.used_mark);
|
||||
|
||||
let mut node = node.fold_children_with(self);
|
||||
|
||||
if self.is_marked(node.arg.span()) {
|
||||
node.arg = self.fold_in_marking_phase(node.arg)
|
||||
}
|
||||
node.arg = self.fold_in_marking_phase(node.arg);
|
||||
|
||||
node
|
||||
}
|
||||
@ -611,7 +612,6 @@ impl Fold for Dce<'_> {
|
||||
return var;
|
||||
}
|
||||
|
||||
log::trace!("VarDecl");
|
||||
var = var.fold_children_with(self);
|
||||
|
||||
var.decls = var.decls.move_flat_map(|decl| {
|
||||
@ -676,6 +676,10 @@ impl Dce<'_> {
|
||||
T: StmtLike + FoldWith<Self> + Spanned + std::fmt::Debug,
|
||||
T: for<'any> VisitWith<SideEffectVisitor<'any>> + VisitWith<ImportDetector>,
|
||||
{
|
||||
if self.marking_phase {
|
||||
return items.move_map(|item| self.fold_in_marking_phase(item));
|
||||
}
|
||||
|
||||
let old = self.changed;
|
||||
|
||||
let mut preserved = FxHashSet::default();
|
||||
|
@ -424,3 +424,34 @@ var load = function(){}
|
||||
var { progress } = load();
|
||||
console.log(progress);"
|
||||
);
|
||||
|
||||
noop!(
|
||||
spack_issue_008,
|
||||
"class B {
|
||||
}
|
||||
class A extends B {
|
||||
}
|
||||
console.log('foo');
|
||||
new A();"
|
||||
);
|
||||
|
||||
noop!(
|
||||
spack_issue_009,
|
||||
"
|
||||
class A {
|
||||
|
||||
}
|
||||
function a() {
|
||||
return new A();
|
||||
}
|
||||
console.log(a, a());
|
||||
"
|
||||
);
|
||||
|
||||
noop!(
|
||||
spack_issue_010,
|
||||
"
|
||||
class A {}
|
||||
console.log(new A());
|
||||
"
|
||||
);
|
||||
|
@ -17,6 +17,7 @@ neon-build = "0.4.0"
|
||||
|
||||
[dependencies]
|
||||
swc = { path = "../" }
|
||||
swc_bundler = { path = "../bundler" }
|
||||
swc_common = { path = "../common", features = ["tty-emitter", "sourcemap"] }
|
||||
swc_ecma_ast = { path = "../ecmascript/ast" }
|
||||
swc_ecma_parser = { path = "../ecmascript/parser" }
|
||||
|
@ -3,16 +3,13 @@ use anyhow::{bail, Error};
|
||||
use fxhash::FxHashMap;
|
||||
use neon::prelude::*;
|
||||
use serde::Deserialize;
|
||||
use spack::{
|
||||
load::Load,
|
||||
resolve::{NodeResolver, Resolve},
|
||||
BundleKind,
|
||||
};
|
||||
use spack::resolvers::NodeResolver;
|
||||
use std::{
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
sync::Arc,
|
||||
};
|
||||
use swc::{config::SourceMapsConfig, Compiler, TransformOutput};
|
||||
use swc_bundler::{BundleKind, Bundler, Load, Resolve};
|
||||
|
||||
struct ConfigItem {
|
||||
loader: Box<dyn Load>,
|
||||
@ -41,23 +38,55 @@ impl Task for BundleTask {
|
||||
|
||||
fn perform(&self) -> Result<Self::Output, Self::Error> {
|
||||
let res = catch_unwind(AssertUnwindSafe(|| {
|
||||
let bundler = spack::Bundler::new(
|
||||
self.swc.clone(),
|
||||
self.config
|
||||
.static_items
|
||||
.config
|
||||
.options
|
||||
.as_ref()
|
||||
.map(|options| options.clone())
|
||||
.unwrap_or_else(|| {
|
||||
serde_json::from_value(serde_json::Value::Object(Default::default()))
|
||||
.unwrap()
|
||||
}),
|
||||
&self.config.resolver,
|
||||
let bundler = Bundler::new(
|
||||
self.swc.globals(),
|
||||
self.swc.cm.clone(),
|
||||
&self.config.loader,
|
||||
&self.config.resolver,
|
||||
swc_bundler::Config {
|
||||
require: true,
|
||||
external_modules: vec![
|
||||
"assert",
|
||||
"buffer",
|
||||
"child_process",
|
||||
"console",
|
||||
"cluster",
|
||||
"crypto",
|
||||
"dgram",
|
||||
"dns",
|
||||
"events",
|
||||
"fs",
|
||||
"http",
|
||||
"http2",
|
||||
"https",
|
||||
"net",
|
||||
"os",
|
||||
"path",
|
||||
"perf_hooks",
|
||||
"process",
|
||||
"querystring",
|
||||
"readline",
|
||||
"repl",
|
||||
"stream",
|
||||
"string_decoder",
|
||||
"timers",
|
||||
"tls",
|
||||
"tty",
|
||||
"url",
|
||||
"util",
|
||||
"v8",
|
||||
"vm",
|
||||
"wasi",
|
||||
"worker",
|
||||
"zlib",
|
||||
]
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect(),
|
||||
},
|
||||
);
|
||||
|
||||
let result = bundler.bundle(&self.config.static_items.config)?;
|
||||
let result = bundler.bundle(self.config.static_items.config.entry.clone().into())?;
|
||||
|
||||
let result = result
|
||||
.into_iter()
|
||||
@ -134,7 +163,7 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
|
||||
|
||||
let opt = cx.argument::<JsObject>(0)?;
|
||||
let callback = cx.argument::<JsFunction>(1)?;
|
||||
let static_items = neon_serde::from_value(&mut cx, opt.upcast())?;
|
||||
let static_items: StaticConfigItem = neon_serde::from_value(&mut cx, opt.upcast())?;
|
||||
|
||||
let loader = opt
|
||||
.get(&mut cx, "loader")?
|
||||
@ -150,7 +179,15 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
|
||||
.unwrap_or_else(|_| {
|
||||
Box::new(spack::loaders::swc::SwcLoader::new(
|
||||
c.clone(),
|
||||
Default::default(),
|
||||
static_items
|
||||
.config
|
||||
.options
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| {
|
||||
serde_json::from_value(serde_json::Value::Object(Default::default()))
|
||||
.unwrap()
|
||||
}),
|
||||
))
|
||||
});
|
||||
|
||||
@ -158,7 +195,7 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
|
||||
swc: c.clone(),
|
||||
config: ConfigItem {
|
||||
loader,
|
||||
resolver: Box::new(NodeResolver) as Box<_>,
|
||||
resolver: Box::new(NodeResolver::new()) as Box<_>,
|
||||
static_items,
|
||||
},
|
||||
}
|
||||
|
@ -16,8 +16,8 @@ it('should respect .swcrc', async () => {
|
||||
const result = await swc.bundle(path.join(__dirname, '../../tests/spack/config-swcrc/spack.config.js'));
|
||||
|
||||
expect(result.a).toBeTruthy();
|
||||
expect(result.a.code).toContain(`require("./common-`);
|
||||
expect(result.a.code).toContain(`require("./common`);
|
||||
|
||||
expect(result.b).toBeTruthy();
|
||||
expect(result.b.code).toContain(`require("./common-`);
|
||||
expect(result.b.code).toContain(`require("./common`);
|
||||
});
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@swc/core",
|
||||
"version": "1.2.18",
|
||||
"version": "1.2.19",
|
||||
"description": "Super-fast alternative for babel",
|
||||
"main": "./index.js",
|
||||
"author": "강동윤 <kdy1997.dev@gmail.com>",
|
||||
|
@ -12,6 +12,7 @@ edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
swc_atoms = { path = "../atoms" }
|
||||
swc_bundler = { path = "../bundler", features = ["concurrent"] }
|
||||
swc_common = { path = "../common" }
|
||||
swc_ecma_ast = { path = "../ecmascript/ast" }
|
||||
swc_ecma_codegen = { path = "../ecmascript/codegen" }
|
||||
@ -25,18 +26,13 @@ regex = "1"
|
||||
once_cell = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
anyhow = "1"
|
||||
crc = "1.8"
|
||||
dashmap = "=3.5.1"
|
||||
radix_fmt = "1"
|
||||
rayon = "1"
|
||||
dashmap = "3"
|
||||
log = "0.4.8"
|
||||
node-resolve = "2.2.0"
|
||||
petgraph = "0.5"
|
||||
fxhash = "0.2.1"
|
||||
is-macro = "0.1.8"
|
||||
neon = { version = "0.4.0", features = ["event-handler-api"] }
|
||||
neon-sys = "0.4.0"
|
||||
relative-path = "1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
@ -1,57 +0,0 @@
|
||||
use crate::Bundler;
|
||||
use anyhow::{Error, Result};
|
||||
use dashmap::DashMap;
|
||||
use std::{collections::HashMap, env};
|
||||
use swc_atoms::JsWord;
|
||||
use swc_common::FileName;
|
||||
use swc_ecma_ast::Expr;
|
||||
use swc_ecma_parser::{lexer::Lexer, Parser, StringInput};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct Cache {
|
||||
exprs: DashMap<String, Expr>,
|
||||
}
|
||||
|
||||
impl Bundler<'_> {
|
||||
#[inline]
|
||||
fn get_or_parse_expr(&self, key: &str, s: String) -> Result<Expr> {
|
||||
if let Some(v) = self.cache.exprs.get(key) {
|
||||
return Ok((*v).clone());
|
||||
}
|
||||
|
||||
let cm = self.swc.cm.clone();
|
||||
let fm = cm.new_source_file(FileName::Anon, s);
|
||||
|
||||
let lexer = Lexer::new(
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
StringInput::from(&*fm),
|
||||
None,
|
||||
);
|
||||
|
||||
let mut parser = Parser::new_from(lexer);
|
||||
|
||||
let expr = parser.parse_expr().map_err(|err| {
|
||||
Error::msg(format!(
|
||||
"config: failed parse `{}` as expression: (key = `{}`): {:?}",
|
||||
fm.src, key, err
|
||||
))
|
||||
})?;
|
||||
|
||||
self.cache.exprs.insert(key.to_string(), *expr.clone());
|
||||
|
||||
Ok(*expr)
|
||||
}
|
||||
|
||||
/// Has`NODE_ENV`
|
||||
pub(super) fn envs(&self) -> Result<HashMap<JsWord, Expr>> {
|
||||
let mut envs = HashMap::with_capacity(1);
|
||||
|
||||
let node_env = env::var("NODE_ENV").unwrap_or_else(|_| "development".to_string());
|
||||
|
||||
let v = self.get_or_parse_expr("NODE_ENV", node_env)?;
|
||||
envs.insert("NODE_ENV".into(), v);
|
||||
|
||||
Ok(envs)
|
||||
}
|
||||
}
|
@ -1,466 +0,0 @@
|
||||
use super::Bundler;
|
||||
use crate::{
|
||||
bundler::{
|
||||
export::{Exports, RawExports},
|
||||
helpers::Helpers,
|
||||
import::RawImports,
|
||||
},
|
||||
debug::assert_clean,
|
||||
Id, ModuleId,
|
||||
};
|
||||
use anyhow::{Context, Error};
|
||||
use is_macro::Is;
|
||||
use rayon::prelude::*;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use swc_atoms::js_word;
|
||||
use swc_common::{FileName, Mark, SourceFile, DUMMY_SP};
|
||||
use swc_ecma_ast::{
|
||||
Expr, ExprOrSuper, ImportDecl, ImportSpecifier, Invalid, MemberExpr, Module, ModuleDecl,
|
||||
Program, Str,
|
||||
};
|
||||
use swc_ecma_transforms::{
|
||||
optimization::{simplify::dead_branch_remover, InlineGlobals},
|
||||
resolver::resolver_with_mark,
|
||||
};
|
||||
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Module after applying transformations.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct TransformedModule {
|
||||
pub id: ModuleId,
|
||||
pub fm: Arc<SourceFile>,
|
||||
pub module: Arc<Module>,
|
||||
pub imports: Arc<Imports>,
|
||||
pub exports: Arc<Exports>,
|
||||
|
||||
/// If false, the module will be wrapped with helper function just like
|
||||
/// wwbpack.
|
||||
pub is_es6: bool,
|
||||
|
||||
/// Used helpers
|
||||
pub helpers: Arc<Helpers>,
|
||||
|
||||
mark: Mark,
|
||||
}
|
||||
|
||||
impl TransformedModule {
|
||||
/// Marks applied to bindings
|
||||
pub fn mark(&self) -> Mark {
|
||||
self.mark
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct Imports {
|
||||
/// If imported ids are empty, it is a side-effect import.
|
||||
pub specifiers: Vec<(Source, Vec<Specifier>)>,
|
||||
}
|
||||
|
||||
/// Clone is relatively cheap
|
||||
#[derive(Debug, Clone, Is)]
|
||||
pub(super) enum Specifier {
|
||||
Specific { local: Id, alias: Option<Id> },
|
||||
Namespace { local: Id },
|
||||
}
|
||||
|
||||
impl Specifier {
|
||||
pub fn local(&self) -> &Id {
|
||||
match self {
|
||||
Specifier::Specific { local, .. } => local,
|
||||
Specifier::Namespace { local, .. } => local,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub(super) struct Source {
|
||||
pub is_loaded_synchronously: bool,
|
||||
pub is_unconditional: bool,
|
||||
|
||||
pub module_id: ModuleId,
|
||||
// Clone is relatively cheap, thanks to string_cache.
|
||||
pub src: Str,
|
||||
}
|
||||
|
||||
impl Bundler<'_> {
|
||||
/// Phase 1 (discovery)
|
||||
///
|
||||
/// We apply transforms at this phase to make cache efficient.
|
||||
/// As we cache in this phase, changing dependency does not affect cache.
|
||||
pub(super) fn load_transformed(&self, path: Arc<PathBuf>) -> Result<TransformedModule, Error> {
|
||||
Ok(self.load_transformed_inner(path)?.1)
|
||||
}
|
||||
|
||||
fn load_transformed_inner(
|
||||
&self,
|
||||
path: Arc<PathBuf>,
|
||||
) -> Result<(Arc<PathBuf>, TransformedModule), Error> {
|
||||
log::trace!("load_transformed: ({})", path.display());
|
||||
|
||||
self.swc.run(|| {
|
||||
if let Some(cached) = self.scope.get_module_by_path(&path) {
|
||||
return Ok((path, cached.clone()));
|
||||
}
|
||||
|
||||
let (_, fm, module) = self.load(&path).context("Bundler.load failed")?;
|
||||
let v = self
|
||||
.transform_module(&path, fm.clone(), module)
|
||||
.context("failed to transform module")?;
|
||||
|
||||
self.scope.store_module(path.clone(), v.clone());
|
||||
|
||||
//{
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &(*v.module).clone().fold_with(&mut HygieneVisualizer),
|
||||
// fm,
|
||||
// false,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!(
|
||||
// "Fully loaded:\n{}\nImports: {:?}\nExports: {:?}\n",
|
||||
// code, v.imports, v.exports
|
||||
// );
|
||||
//}
|
||||
|
||||
Ok((path, v))
|
||||
})
|
||||
}
|
||||
|
||||
fn load(&self, path: &Arc<PathBuf>) -> Result<(ModuleId, Arc<SourceFile>, Module), Error> {
|
||||
self.swc.run(|| {
|
||||
let (module_id, _) = self.scope.module_id_gen.gen(path);
|
||||
|
||||
let path = Arc::new(path);
|
||||
|
||||
let (fm, module) = self
|
||||
.loader
|
||||
.load(&path)
|
||||
.with_context(|| format!("Loader.load({}) failed", path.display()))?;
|
||||
assert_clean(&module);
|
||||
|
||||
Ok((module_id, fm, module))
|
||||
})
|
||||
}
|
||||
|
||||
fn transform_module(
|
||||
&self,
|
||||
path: &Arc<PathBuf>,
|
||||
fm: Arc<SourceFile>,
|
||||
mut module: Module,
|
||||
) -> Result<TransformedModule, Error> {
|
||||
self.swc.run(|| {
|
||||
log::trace!("transform_module({})", fm.name);
|
||||
module = module.fold_with(&mut resolver_with_mark(self.top_level_mark));
|
||||
module = module.fold_with(&mut InlineGlobals {
|
||||
envs: self.envs()?,
|
||||
globals: Default::default(),
|
||||
});
|
||||
module = module.fold_with(&mut dead_branch_remover());
|
||||
|
||||
let (id, mark) = self.scope.module_id_gen.gen(path);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &module.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("Resolved:\n{}\n\n", code);
|
||||
// }
|
||||
|
||||
let imports = self.extract_import_info(path, &mut module, mark);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &module.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("After imports:\n{}\n", code,);
|
||||
// }
|
||||
|
||||
let exports = self.extract_export_info(&module);
|
||||
|
||||
// TODO: Exclude resolver (for performance)
|
||||
let (module, (imports, exports)) = rayon::join(
|
||||
|| -> Result<_, Error> {
|
||||
self.swc.run(|| {
|
||||
// Process module
|
||||
let config = self
|
||||
.swc
|
||||
.config_for_file(&self.swc_options, &fm.name)
|
||||
.context("failed to parse .swcrc")?;
|
||||
|
||||
let program = self.swc.transform(
|
||||
Program::Module(module),
|
||||
config.external_helpers,
|
||||
config.pass,
|
||||
);
|
||||
|
||||
// {
|
||||
// let code = self
|
||||
// .swc
|
||||
// .print(
|
||||
// &program.clone().fold_with(&mut HygieneVisualizer),
|
||||
// SourceMapsConfig::Bool(false),
|
||||
// None,
|
||||
// false,
|
||||
// )
|
||||
// .unwrap()
|
||||
// .code;
|
||||
//
|
||||
// println!("loaded using swc:\n{}\n\n", code);
|
||||
// }
|
||||
|
||||
match program {
|
||||
Program::Module(module) => Ok(module),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
})
|
||||
},
|
||||
|| {
|
||||
let p = match fm.name {
|
||||
FileName::Real(ref p) => p,
|
||||
_ => unreachable!("{} module in spack", fm.name),
|
||||
};
|
||||
|
||||
rayon::join(
|
||||
|| self.swc.run(|| self.load_imports(&p, imports)),
|
||||
|| self.swc.run(|| self.load_exports(&p, exports)),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
let imports = imports?;
|
||||
let exports = exports?;
|
||||
let mut module = module?;
|
||||
let is_es6 = {
|
||||
let mut v = Es6ModuleDetector {
|
||||
forced_es6: false,
|
||||
found_other: false,
|
||||
};
|
||||
module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v);
|
||||
v.forced_es6 || !v.found_other
|
||||
};
|
||||
if is_es6 {
|
||||
module = self.drop_unused(fm.clone(), module, None);
|
||||
}
|
||||
|
||||
let module = Arc::new(module);
|
||||
|
||||
Ok(TransformedModule {
|
||||
id,
|
||||
fm,
|
||||
module,
|
||||
imports: Arc::new(imports),
|
||||
exports: Arc::new(exports),
|
||||
is_es6,
|
||||
helpers: Default::default(),
|
||||
mark,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn load_exports(&self, base: &Path, raw: RawExports) -> Result<Exports, Error> {
|
||||
self.swc.run(|| {
|
||||
log::trace!("load_exports({})", base.display());
|
||||
|
||||
let mut exports = Exports::default();
|
||||
exports.pure_constants = raw.pure_constants;
|
||||
|
||||
let items = raw
|
||||
.items
|
||||
.into_par_iter()
|
||||
.map(|(src, ss)| -> Result<_, Error> {
|
||||
let info = match src {
|
||||
Some(src) => {
|
||||
let path = self.resolve(base, &src.value)?;
|
||||
Some((self.load_transformed_inner(path)?, src))
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok((info, ss))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for res in items {
|
||||
let (info, specifiers): (Option<((Arc<PathBuf>, TransformedModule), Str)>, _) =
|
||||
res?;
|
||||
|
||||
match info {
|
||||
None => exports.items.extend(specifiers),
|
||||
Some(info) => exports
|
||||
.reexports
|
||||
.entry(Source {
|
||||
is_loaded_synchronously: true,
|
||||
is_unconditional: false,
|
||||
module_id: (info.0).1.id,
|
||||
src: info.1,
|
||||
})
|
||||
.or_default()
|
||||
.extend(specifiers),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(exports)
|
||||
})
|
||||
}
|
||||
|
||||
/// Load dependencies
|
||||
fn load_imports(&self, base: &Path, info: RawImports) -> Result<Imports, Error> {
|
||||
self.swc.run(|| {
|
||||
log::trace!("load_imports({})", base.display());
|
||||
|
||||
let mut merged = Imports::default();
|
||||
let RawImports {
|
||||
imports,
|
||||
lazy_imports,
|
||||
dynamic_imports,
|
||||
} = info;
|
||||
|
||||
let loaded = imports
|
||||
.into_par_iter()
|
||||
.map(|v| (v, false, true))
|
||||
.chain(lazy_imports.into_par_iter().map(|v| (v, false, false)))
|
||||
.chain(dynamic_imports.into_par_iter().map(|src| {
|
||||
(
|
||||
ImportDecl {
|
||||
span: src.span,
|
||||
specifiers: vec![],
|
||||
src,
|
||||
type_only: false,
|
||||
},
|
||||
true,
|
||||
false,
|
||||
)
|
||||
}))
|
||||
.map(|(decl, dynamic, unconditional)| -> Result<_, Error> {
|
||||
//
|
||||
let path = self.resolve(base, &decl.src.value)?;
|
||||
let res = self.load_transformed_inner(path)?;
|
||||
|
||||
Ok((res, decl, dynamic, unconditional))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for res in loaded {
|
||||
// TODO: Report error and proceed instead of returning an error
|
||||
let ((path, _res), decl, is_dynamic, is_unconditional) = res?;
|
||||
|
||||
if let Some(src) = self.scope.get_module_by_path(&path) {
|
||||
let src = Source {
|
||||
is_loaded_synchronously: !is_dynamic,
|
||||
is_unconditional,
|
||||
module_id: src.id,
|
||||
src: decl.src,
|
||||
};
|
||||
|
||||
// TODO: Handle rename
|
||||
let mut specifiers = vec![];
|
||||
for s in decl.specifiers {
|
||||
match s {
|
||||
ImportSpecifier::Named(s) => specifiers.push(Specifier::Specific {
|
||||
local: s.local.into(),
|
||||
alias: s.imported.map(From::from),
|
||||
}),
|
||||
ImportSpecifier::Default(s) => specifiers.push(Specifier::Specific {
|
||||
local: s.local.into(),
|
||||
alias: Some(Id::new(js_word!("default"), s.span.ctxt())),
|
||||
}),
|
||||
ImportSpecifier::Namespace(s) => {
|
||||
specifiers.push(Specifier::Namespace {
|
||||
local: s.local.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
merged.specifiers.push((src, specifiers));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(merged)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct Es6ModuleDetector {
|
||||
/// If import statement or export is detected, it's an es6 module regardless
|
||||
/// of other codes.
|
||||
forced_es6: bool,
|
||||
/// True if other module system is detected.
|
||||
found_other: bool,
|
||||
}
|
||||
|
||||
impl Visit for Es6ModuleDetector {
|
||||
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
|
||||
e.obj.visit_with(e as _, self);
|
||||
|
||||
if e.computed {
|
||||
e.prop.visit_with(e as _, self);
|
||||
}
|
||||
|
||||
match &e.obj {
|
||||
ExprOrSuper::Expr(e) => {
|
||||
match &**e {
|
||||
Expr::Ident(i) => {
|
||||
// TODO: Check syntax context (Check if marker is the global mark)
|
||||
if i.sym == *"module" {
|
||||
self.found_other = true;
|
||||
}
|
||||
|
||||
if i.sym == *"exports" {
|
||||
self.found_other = true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
//
|
||||
}
|
||||
|
||||
fn visit_module_decl(&mut self, decl: &ModuleDecl, _: &dyn Node) {
|
||||
match decl {
|
||||
ModuleDecl::Import(_)
|
||||
| ModuleDecl::ExportDecl(_)
|
||||
| ModuleDecl::ExportNamed(_)
|
||||
| ModuleDecl::ExportDefaultDecl(_)
|
||||
| ModuleDecl::ExportDefaultExpr(_)
|
||||
| ModuleDecl::ExportAll(_) => {
|
||||
self.forced_es6 = true;
|
||||
}
|
||||
|
||||
ModuleDecl::TsImportEquals(_) => {}
|
||||
ModuleDecl::TsExportAssignment(_) => {}
|
||||
ModuleDecl::TsNamespaceExport(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
use crate::bundler::tests::test_bundler;
|
||||
|
||||
#[test]
|
||||
fn basic() {
|
||||
test_bundler(|t| {
|
||||
t.parse("");
|
||||
});
|
||||
}
|
@ -1,154 +0,0 @@
|
||||
use self::{config::Cache, scope::Scope};
|
||||
use crate::{
|
||||
bundler::load_transformed::TransformedModule,
|
||||
config::{Config, EntryConfig},
|
||||
load::Load,
|
||||
resolve::Resolve,
|
||||
ModuleId,
|
||||
};
|
||||
use anyhow::{Context, Error};
|
||||
use fxhash::FxHashMap;
|
||||
use rayon::prelude::*;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use swc::config::ModuleConfig;
|
||||
use swc_common::{Mark, DUMMY_SP};
|
||||
use swc_ecma_ast::Module;
|
||||
|
||||
mod chunk;
|
||||
mod config;
|
||||
mod export;
|
||||
mod helpers;
|
||||
mod import;
|
||||
mod load_transformed;
|
||||
mod rename;
|
||||
mod scope;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod usage_analysis;
|
||||
|
||||
pub struct Bundler<'a> {
|
||||
cache: Cache,
|
||||
/// Javascript compiler.
|
||||
swc: Arc<swc::Compiler>,
|
||||
swc_options: swc::config::Options,
|
||||
used_mark: Mark,
|
||||
top_level_mark: Mark,
|
||||
|
||||
resolver: &'a dyn Resolve,
|
||||
loader: &'a dyn Load,
|
||||
|
||||
scope: Scope,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum BundleKind {
|
||||
/// User-provided entry
|
||||
Named { name: String },
|
||||
/// Auto-generated entry (created by import expression)
|
||||
Dynamic,
|
||||
/// A lazy-loaded shared library
|
||||
Lib { name: String },
|
||||
}
|
||||
|
||||
/// Built bundle
|
||||
#[derive(Debug)]
|
||||
pub struct Bundle {
|
||||
pub kind: BundleKind,
|
||||
pub id: ModuleId,
|
||||
/// Merged module
|
||||
pub module: Module,
|
||||
}
|
||||
|
||||
impl<'a> Bundler<'a> {
|
||||
pub fn new(
|
||||
swc: Arc<swc::Compiler>,
|
||||
mut swc_options: swc::config::Options,
|
||||
resolver: &'a dyn Resolve,
|
||||
loader: &'a dyn Load,
|
||||
) -> Self {
|
||||
let used_mark = swc.run(|| Mark::fresh(Mark::root()));
|
||||
log::info!("Used mark: {:?}", DUMMY_SP.apply_mark(used_mark).ctxt());
|
||||
let top_level_mark = swc.run(|| Mark::fresh(Mark::root()));
|
||||
log::info!(
|
||||
"top-level mark: {:?}",
|
||||
DUMMY_SP.apply_mark(top_level_mark).ctxt()
|
||||
);
|
||||
|
||||
swc_options.disable_fixer = true;
|
||||
swc_options.disable_hygiene = true;
|
||||
swc_options.global_mark = Some(top_level_mark);
|
||||
|
||||
if swc_options.config.is_none() {
|
||||
swc_options.config = Some(Default::default());
|
||||
}
|
||||
|
||||
if let Some(c) = &mut swc_options.config {
|
||||
// Preserve es6 modules
|
||||
c.module = Some(ModuleConfig::Es6);
|
||||
}
|
||||
|
||||
Bundler {
|
||||
swc,
|
||||
cache: Default::default(),
|
||||
swc_options,
|
||||
used_mark,
|
||||
top_level_mark,
|
||||
resolver,
|
||||
loader,
|
||||
scope: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bundle(&self, config: &Config) -> Result<Vec<Bundle>, Error> {
|
||||
let entries = {
|
||||
let mut map = FxHashMap::default();
|
||||
match &config.entry {
|
||||
EntryConfig::File(f) => {
|
||||
map.insert(f.clone(), PathBuf::from(f.clone()));
|
||||
}
|
||||
EntryConfig::Multiple(files) => {
|
||||
for f in files {
|
||||
map.insert(f.clone(), f.clone().into());
|
||||
}
|
||||
}
|
||||
EntryConfig::Files(files) => map = files.clone(),
|
||||
}
|
||||
|
||||
map
|
||||
};
|
||||
|
||||
let results = entries
|
||||
.into_par_iter()
|
||||
.map(|(name, path)| -> Result<_, Error> {
|
||||
let path = self.resolve(&config.working_dir, &path.to_string_lossy())?;
|
||||
let res = self
|
||||
.load_transformed(path)
|
||||
.context("load_transformed failed")?;
|
||||
Ok((name, res))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// We collect at here to handle dynamic imports
|
||||
// TODO: Handle dynamic imports
|
||||
|
||||
let local = self.swc.run(|| -> Result<_, Error> {
|
||||
let mut output = FxHashMap::default();
|
||||
|
||||
for res in results {
|
||||
let (name, m): (String, TransformedModule) = res?;
|
||||
|
||||
output.insert(name, m);
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
})?;
|
||||
|
||||
let bundles = self.chunk(local)?;
|
||||
|
||||
Ok(self.finalize(bundles)?)
|
||||
}
|
||||
|
||||
pub fn swc(&self) -> &swc::Compiler {
|
||||
&self.swc
|
||||
}
|
||||
}
|
@ -1,276 +0,0 @@
|
||||
use crate::{Bundle, BundleKind, Bundler};
|
||||
use anyhow::{Context, Error};
|
||||
use crc::{crc64, crc64::Digest, Hasher64};
|
||||
use fxhash::FxHashMap;
|
||||
use relative_path::RelativePath;
|
||||
use std::{
|
||||
io,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use swc::config::Options;
|
||||
use swc_common::{util::move_map::MoveMap, FileName, Span};
|
||||
use swc_ecma_ast::{ImportDecl, Module, Str};
|
||||
use swc_ecma_codegen::{text_writer::WriteJs, Emitter};
|
||||
use swc_ecma_transforms::noop_fold_type;
|
||||
use swc_ecma_visit::{Fold, FoldWith};
|
||||
|
||||
impl Bundler<'_> {
|
||||
pub(super) fn finalize(&self, bundles: Vec<Bundle>) -> Result<Vec<Bundle>, Error> {
|
||||
let mut new = Vec::with_capacity(bundles.len());
|
||||
let mut renamed = FxHashMap::default();
|
||||
|
||||
for mut bundle in bundles {
|
||||
match bundle.kind {
|
||||
BundleKind::Named { .. } => {
|
||||
// Inject helpers
|
||||
let helpers = self
|
||||
.scope
|
||||
.get_module(bundle.id)
|
||||
.expect("module should exist at this point")
|
||||
.helpers;
|
||||
|
||||
self.swc
|
||||
.run_transform(true, || helpers.append_to(&mut bundle.module.body));
|
||||
|
||||
new.push(Bundle { ..bundle });
|
||||
}
|
||||
BundleKind::Lib { name } => {
|
||||
let hash = self.calc_hash(&bundle.module)?;
|
||||
let mut new_name = PathBuf::from(name);
|
||||
let key = new_name.clone();
|
||||
let file_name = new_name
|
||||
.file_name()
|
||||
.map(|path| -> PathBuf {
|
||||
let path = Path::new(path);
|
||||
let ext = path.extension();
|
||||
if let Some(ext) = ext {
|
||||
return format!(
|
||||
"{}-{}.{}",
|
||||
path.file_stem().unwrap().to_string_lossy(),
|
||||
hash,
|
||||
ext.to_string_lossy()
|
||||
)
|
||||
.into();
|
||||
}
|
||||
return format!(
|
||||
"{}-{}",
|
||||
path.file_stem().unwrap().to_string_lossy(),
|
||||
hash,
|
||||
)
|
||||
.into();
|
||||
})
|
||||
.expect("javascript file should have name");
|
||||
new_name.pop();
|
||||
new_name = new_name.join(file_name.clone());
|
||||
|
||||
renamed.insert(key, new_name.to_string_lossy().to_string());
|
||||
|
||||
new.push(Bundle {
|
||||
kind: BundleKind::Named {
|
||||
name: file_name.display().to_string(),
|
||||
},
|
||||
..bundle
|
||||
})
|
||||
}
|
||||
_ => new.push(bundle),
|
||||
}
|
||||
}
|
||||
|
||||
new = new.move_map(|bundle| {
|
||||
let path = match self.scope.get_module(bundle.id).unwrap().fm.name {
|
||||
FileName::Real(ref v) => v.clone(),
|
||||
_ => {
|
||||
log::error!("Cannot rename: not a real file");
|
||||
return bundle;
|
||||
}
|
||||
};
|
||||
|
||||
let module = {
|
||||
// Change imports
|
||||
let mut v = Renamer {
|
||||
bundler: self,
|
||||
path: &path,
|
||||
renamed: &renamed,
|
||||
};
|
||||
bundle.module.fold_with(&mut v)
|
||||
};
|
||||
|
||||
let module = self.swc.run(|| {
|
||||
let opts = Options {
|
||||
..self.swc_options.clone()
|
||||
};
|
||||
let file_name = FileName::Real(path);
|
||||
let config = self.swc.read_config(&opts, &file_name).unwrap_or_default();
|
||||
let mut module_pass = swc::config::ModuleConfig::build(
|
||||
self.swc.cm.clone(),
|
||||
self.top_level_mark,
|
||||
config.module,
|
||||
);
|
||||
module.fold_with(&mut module_pass)
|
||||
});
|
||||
|
||||
Bundle { module, ..bundle }
|
||||
});
|
||||
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
fn calc_hash(&self, m: &Module) -> Result<String, Error> {
|
||||
let digest = crc64::Digest::new(crc64::ECMA);
|
||||
let mut buf = Hasher { digest };
|
||||
|
||||
{
|
||||
let mut emitter = Emitter {
|
||||
cfg: Default::default(),
|
||||
cm: self.swc.cm.clone(),
|
||||
comments: None,
|
||||
wr: Box::new(&mut buf) as Box<dyn WriteJs>,
|
||||
handlers: Box::new(Handlers),
|
||||
};
|
||||
|
||||
emitter
|
||||
.emit_module(&m)
|
||||
.context("failed to emit module to calculate hash")?;
|
||||
}
|
||||
//
|
||||
|
||||
let result = buf.digest.sum64();
|
||||
Ok(radix_fmt::radix(result, 36).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Import renamer. This pass changes import path.
|
||||
struct Renamer<'a, 'b> {
|
||||
bundler: &'a Bundler<'b>,
|
||||
path: &'a Path,
|
||||
renamed: &'a FxHashMap<PathBuf, String>,
|
||||
}
|
||||
|
||||
noop_fold_type!(Renamer<'_, '_>);
|
||||
|
||||
impl Fold for Renamer<'_, '_> {
|
||||
fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl {
|
||||
let resolved = match self.bundler.resolve(self.path, &import.src.value) {
|
||||
Ok(v) => v,
|
||||
Err(_) => return import,
|
||||
};
|
||||
|
||||
if let Some(v) = self.renamed.get(&*resolved) {
|
||||
// We use parent because RelativePath uses ../common-[hash].js
|
||||
// if we use `entry-a.js` as a base.
|
||||
//
|
||||
// entry-a.js
|
||||
// common.js
|
||||
let base = self
|
||||
.path
|
||||
.parent()
|
||||
.unwrap_or(self.path)
|
||||
.as_os_str()
|
||||
.to_string_lossy();
|
||||
let base = RelativePath::new(&*base);
|
||||
let v = base.relative(&*v);
|
||||
let value = v.as_str();
|
||||
return ImportDecl {
|
||||
src: Str {
|
||||
value: if value.starts_with(".") {
|
||||
value.into()
|
||||
} else {
|
||||
format!("./{}", value).into()
|
||||
},
|
||||
..import.src
|
||||
},
|
||||
..import
|
||||
};
|
||||
}
|
||||
|
||||
import
|
||||
}
|
||||
}
|
||||
|
||||
impl swc_ecma_codegen::Handlers for Handlers {}
|
||||
struct Handlers;
|
||||
|
||||
struct Hasher {
|
||||
digest: Digest,
|
||||
}
|
||||
|
||||
impl Hasher {
|
||||
fn w(&mut self, s: &str) {
|
||||
self.digest.write(s.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl WriteJs for &mut Hasher {
|
||||
fn increase_indent(&mut self) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decrease_indent(&mut self) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_semi(&mut self) -> io::Result<()> {
|
||||
self.w(";");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_space(&mut self) -> io::Result<()> {
|
||||
self.w(" ");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_keyword(&mut self, _: Option<Span>, s: &'static str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_operator(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_param(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_property(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_line(&mut self) -> io::Result<()> {
|
||||
self.w("\n");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_comment(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_str_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_str(&mut self, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_symbol(&mut self, _: Span, s: &str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_punct(&mut self, s: &'static str) -> io::Result<()> {
|
||||
self.w(s);
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
use crate::{bundler::load_transformed::TransformedModule, id::ModuleIdGenerator, ModuleId};
|
||||
use dashmap::DashMap;
|
||||
use fxhash::FxBuildHasher;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct Scope {
|
||||
pub module_id_gen: ModuleIdGenerator,
|
||||
|
||||
/// Phase 1 cache
|
||||
modules: DashMap<ModuleId, TransformedModule, FxBuildHasher>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
/// Stores module information. The information should contain only
|
||||
/// information gotten from module itself. In other words, it should not
|
||||
/// contains information from a dependency.
|
||||
pub fn store_module(&self, _path: Arc<PathBuf>, info: TransformedModule) {
|
||||
self.modules.insert(info.id, info);
|
||||
}
|
||||
|
||||
pub fn get_module_by_path(&self, path: &Arc<PathBuf>) -> Option<TransformedModule> {
|
||||
let (id, _) = self.module_id_gen.gen(path);
|
||||
self.get_module(id)
|
||||
}
|
||||
|
||||
pub fn get_module(&self, id: ModuleId) -> Option<TransformedModule> {
|
||||
Some(self.modules.get(&id)?.value().clone())
|
||||
}
|
||||
}
|
@ -1,78 +0,0 @@
|
||||
//! Utilities for testing.
|
||||
use super::Bundler;
|
||||
use crate::{loaders::swc::SwcLoader, resolve::NodeResolver, util::HygieneRemover};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use swc_common::FileName;
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_parser::{EsConfig, Syntax};
|
||||
use swc_ecma_utils::drop_span;
|
||||
use swc_ecma_visit::FoldWith;
|
||||
|
||||
pub struct Tester<'a> {
|
||||
pub bundler: Bundler<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Tester<'a> {
|
||||
pub fn parse(&self, s: &str) -> Module {
|
||||
let fm = self
|
||||
.bundler
|
||||
.swc
|
||||
.cm
|
||||
.new_source_file(FileName::Real(PathBuf::from("input.js")), s.into());
|
||||
let p = self
|
||||
.bundler
|
||||
.swc
|
||||
.parse_js(
|
||||
fm,
|
||||
Default::default(),
|
||||
Syntax::Es(EsConfig {
|
||||
dynamic_import: true,
|
||||
..Default::default()
|
||||
}),
|
||||
true,
|
||||
true,
|
||||
)
|
||||
.expect("failed to parse");
|
||||
|
||||
match p {
|
||||
Program::Module(m) => m,
|
||||
Program::Script(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_eq(&self, m: &Module, expected: &str) {
|
||||
let expected = self.parse(expected);
|
||||
|
||||
let m = drop_span(m.clone().fold_with(&mut HygieneRemover));
|
||||
let expected = drop_span(expected);
|
||||
|
||||
assert_eq!(m, expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn test_bundler<F>(op: F)
|
||||
where
|
||||
F: FnOnce(&mut Tester),
|
||||
{
|
||||
testing::run_test2(true, |cm, handler| {
|
||||
let compiler = Arc::new(swc::Compiler::new(cm.clone(), Arc::new(handler)));
|
||||
let loader = SwcLoader::new(compiler.clone(), Default::default());
|
||||
let bundler = Bundler::new(
|
||||
compiler.clone(),
|
||||
swc::config::Options {
|
||||
swcrc: true,
|
||||
..Default::default()
|
||||
},
|
||||
&NodeResolver,
|
||||
&loader,
|
||||
);
|
||||
|
||||
let mut t = Tester { bundler };
|
||||
|
||||
op(&mut t);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.expect("WTF?");
|
||||
}
|
@ -8,6 +8,7 @@ use fxhash::FxHashMap;
|
||||
use serde::Deserialize;
|
||||
use std::{fmt, marker::PhantomData, path::PathBuf};
|
||||
use string_enum::StringEnum;
|
||||
use swc_common::FileName;
|
||||
|
||||
mod module;
|
||||
mod optimization;
|
||||
@ -56,7 +57,7 @@ impl Default for Mode {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(untagged, rename = "Entry")]
|
||||
pub enum EntryConfig {
|
||||
File(String),
|
||||
@ -64,6 +65,36 @@ pub enum EntryConfig {
|
||||
Files(FxHashMap<String, PathBuf>),
|
||||
}
|
||||
|
||||
impl From<EntryConfig> for FxHashMap<String, FileName> {
|
||||
fn from(c: EntryConfig) -> Self {
|
||||
let mut m = FxHashMap::default();
|
||||
|
||||
match c {
|
||||
EntryConfig::File(f) => {
|
||||
let path = PathBuf::from(f);
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.expect("entry must be a file, instead of a directory");
|
||||
m.insert(file_name.to_string_lossy().into(), FileName::Real(path));
|
||||
}
|
||||
EntryConfig::Multiple(files) => {
|
||||
for f in files {
|
||||
let path = PathBuf::from(f);
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.expect("entry must be a file, instead of a directory");
|
||||
m.insert(file_name.to_string_lossy().into(), FileName::Real(path));
|
||||
}
|
||||
}
|
||||
EntryConfig::Files(f) => {
|
||||
return f.into_iter().map(|(k, v)| (k, FileName::Real(v))).collect()
|
||||
}
|
||||
}
|
||||
|
||||
m
|
||||
}
|
||||
}
|
||||
|
||||
pub struct JsCallback<T, Ret> {
|
||||
_f: Box<dyn Send + Sync + Fn(T) -> Ret>,
|
||||
_phantom: PhantomData<(T, Ret)>,
|
||||
|
@ -1,30 +0,0 @@
|
||||
use swc_common::{Span, SyntaxContext, DUMMY_SP};
|
||||
use swc_ecma_ast::{Ident, Invalid};
|
||||
use swc_ecma_visit::{Fold, Node, Visit, VisitWith};
|
||||
|
||||
pub(crate) struct HygieneVisualizer;
|
||||
|
||||
impl Fold for HygieneVisualizer {
|
||||
fn fold_ident(&mut self, node: Ident) -> Ident {
|
||||
Ident {
|
||||
sym: format!("{}{:?}", node.sym, node.span.ctxt()).into(),
|
||||
..node
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct AssertClean;
|
||||
|
||||
impl Visit for AssertClean {
|
||||
fn visit_span(&mut self, s: &Span, _: &dyn Node) {
|
||||
debug_assert_eq!(
|
||||
s.ctxt(),
|
||||
SyntaxContext::empty(),
|
||||
"Hygiene info should be clean at this moment"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn assert_clean<T: VisitWith<AssertClean>>(m: &T) {
|
||||
m.visit_with(&Invalid { span: DUMMY_SP } as _, &mut AssertClean)
|
||||
}
|
@ -3,17 +3,6 @@
|
||||
#[cfg(test)]
|
||||
extern crate test;
|
||||
|
||||
pub use self::{
|
||||
bundler::{Bundle, BundleKind, Bundler},
|
||||
id::{Id, ModuleId, QualifiedId},
|
||||
};
|
||||
|
||||
mod bundler;
|
||||
pub mod config;
|
||||
mod debug;
|
||||
mod id;
|
||||
pub mod load;
|
||||
pub mod loaders;
|
||||
mod normalize;
|
||||
pub mod resolve;
|
||||
mod util;
|
||||
pub mod resolvers;
|
||||
|
@ -1,20 +0,0 @@
|
||||
use anyhow::Error;
|
||||
use std::{path::Path, sync::Arc};
|
||||
use swc_common::SourceFile;
|
||||
use swc_ecma_ast::Module;
|
||||
|
||||
pub trait Load: Send + Sync {
|
||||
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Load> Load for Box<T> {
|
||||
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error> {
|
||||
T::load(self, path)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + Load> Load for &'a T {
|
||||
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error> {
|
||||
T::load(self, path)
|
||||
}
|
||||
}
|
@ -1,11 +1,7 @@
|
||||
use crate::load::Load;
|
||||
use anyhow::{Context as _, Error};
|
||||
use neon::prelude::*;
|
||||
|
||||
use std::{
|
||||
path::Path,
|
||||
sync::{mpsc::channel, Arc},
|
||||
};
|
||||
use std::sync::{mpsc::channel, Arc};
|
||||
use swc_bundler::Load;
|
||||
use swc_common::{FileName, SourceFile};
|
||||
use swc_ecma_ast::{Module, Program};
|
||||
|
||||
@ -16,8 +12,8 @@ pub struct NeonLoader {
|
||||
}
|
||||
|
||||
impl Load for NeonLoader {
|
||||
fn load(&self, p: &Path) -> Result<(Arc<SourceFile>, Module), Error> {
|
||||
let path = p.to_string_lossy().to_string();
|
||||
fn load(&self, name: &FileName) -> Result<(Arc<SourceFile>, Module), Error> {
|
||||
let path = name.to_string();
|
||||
let (tx, rx) = channel();
|
||||
|
||||
self.handler.schedule_with(move |cx, _value, f| {
|
||||
@ -58,10 +54,7 @@ impl Load for NeonLoader {
|
||||
.context("failed to receive output from js loader")?;
|
||||
let code = code?;
|
||||
|
||||
let fm = self
|
||||
.swc
|
||||
.cm
|
||||
.new_source_file(FileName::Real(p.to_path_buf()), code);
|
||||
let fm = self.swc.cm.new_source_file(name.clone(), code);
|
||||
|
||||
let config = self.swc.config_for_file(
|
||||
&swc::config::Options {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::load::Load;
|
||||
use anyhow::Error;
|
||||
use std::{path::Path, sync::Arc};
|
||||
use swc_common::SourceFile;
|
||||
use anyhow::{bail, Context, Error};
|
||||
use std::sync::Arc;
|
||||
use swc_bundler::Load;
|
||||
use swc_common::{FileName, SourceFile};
|
||||
use swc_ecma_ast::{Module, Program};
|
||||
use swc_ecma_parser::JscTarget;
|
||||
|
||||
@ -23,7 +23,28 @@ impl SwcLoader {
|
||||
v.module = None;
|
||||
v.minify = Some(false);
|
||||
|
||||
v.jsc.target = JscTarget::Es2019;
|
||||
v.jsc.target = JscTarget::Es2020;
|
||||
|
||||
if v.jsc.transform.is_none() {
|
||||
v.jsc.transform = Some(Default::default());
|
||||
}
|
||||
|
||||
let mut transform = v.jsc.transform.as_mut().unwrap();
|
||||
if transform.optimizer.is_none() {
|
||||
transform.optimizer = Some(Default::default());
|
||||
}
|
||||
|
||||
let mut opt = transform.optimizer.as_mut().unwrap();
|
||||
if opt.globals.is_none() {
|
||||
opt.globals = Some(Default::default());
|
||||
}
|
||||
|
||||
// Always inline NODE_ENV
|
||||
opt.globals
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.envs
|
||||
.insert("NODE_ENV".to_string());
|
||||
}
|
||||
|
||||
SwcLoader { compiler, options }
|
||||
@ -31,11 +52,17 @@ impl SwcLoader {
|
||||
}
|
||||
|
||||
impl Load for SwcLoader {
|
||||
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error> {
|
||||
self.compiler.run(|| {
|
||||
log::debug!("JsLoader.load({})", path.display());
|
||||
fn load(&self, name: &FileName) -> Result<(Arc<SourceFile>, Module), Error> {
|
||||
log::debug!("JsLoader.load({})", name);
|
||||
|
||||
let fm = self.compiler.cm.load_file(path)?;
|
||||
let fm = self
|
||||
.compiler
|
||||
.cm
|
||||
.load_file(match name {
|
||||
FileName::Real(v) => &v,
|
||||
_ => bail!("swc-loader only accepts path. Got `{}`", name),
|
||||
})
|
||||
.with_context(|| format!("failed to load file `{}`", name))?;
|
||||
|
||||
log::trace!("JsLoader.load: loaded");
|
||||
|
||||
@ -60,6 +87,5 @@ impl Load for SwcLoader {
|
||||
Program::Module(module) => Ok((fm, module)),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1 +0,0 @@
|
||||
|
@ -1,45 +0,0 @@
|
||||
use anyhow::{Context, Error};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub trait Resolve: Send + Sync {
|
||||
///
|
||||
/// Returned filename will be hashed if possible and used to generate module
|
||||
/// id.
|
||||
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Resolve> Resolve for Box<T> {
|
||||
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error> {
|
||||
T::resolve(self, base, import)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + Resolve> Resolve for &'a T {
|
||||
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error> {
|
||||
T::resolve(self, base, import)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NodeResolver;
|
||||
|
||||
impl Resolve for NodeResolver {
|
||||
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error> {
|
||||
let base_dir = base
|
||||
.parent()
|
||||
.map(Path::to_path_buf)
|
||||
.unwrap_or_else(|| PathBuf::from("."));
|
||||
|
||||
Ok(node_resolve::Resolver::new()
|
||||
.with_extensions(&[".ts", ".tsx", ".js", ".jsx", ".json", ".node"])
|
||||
.with_main_fields(&["swc-main", "esnext", "main"])
|
||||
.with_basedir(base_dir.clone())
|
||||
.resolve(import)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"node-resolve failed; basedir = {}, import = {}",
|
||||
base_dir.display(),
|
||||
import
|
||||
)
|
||||
})?)
|
||||
}
|
||||
}
|
43
spack/src/resolvers/mod.rs
Normal file
43
spack/src/resolvers/mod.rs
Normal file
@ -0,0 +1,43 @@
|
||||
use anyhow::{bail, Context, Error};
|
||||
use std::path::{Path, PathBuf};
|
||||
use swc_bundler::Resolve;
|
||||
use swc_common::FileName;
|
||||
|
||||
pub struct NodeResolver(node_resolve::Resolver);
|
||||
|
||||
impl NodeResolver {
|
||||
pub fn new() -> Self {
|
||||
Self(
|
||||
node_resolve::Resolver::new()
|
||||
.with_extensions(&[".ts", ".tsx", ".js", ".jsx", ".json", ".node"])
|
||||
.with_main_fields(&["swc-main", "esnext", "main"]),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolve for NodeResolver {
|
||||
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
|
||||
let base = match base {
|
||||
FileName::Real(v) => v,
|
||||
_ => bail!("node-resolver supports only files"),
|
||||
};
|
||||
|
||||
let base_dir = base
|
||||
.parent()
|
||||
.map(Path::to_path_buf)
|
||||
.unwrap_or_else(|| PathBuf::from("."));
|
||||
|
||||
let path = self
|
||||
.0
|
||||
.with_basedir(base_dir.clone())
|
||||
.resolve(module_specifier)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"node-resolver failed; basedir = {}, import = {}",
|
||||
base_dir.display(),
|
||||
module_specifier
|
||||
)
|
||||
})?;
|
||||
Ok(FileName::Real(path))
|
||||
}
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
use swc_common::{Span, SyntaxContext};
|
||||
use swc_ecma_visit::Fold;
|
||||
|
||||
pub struct HygieneRemover;
|
||||
|
||||
impl Fold for HygieneRemover {
|
||||
fn fold_span(&mut self, s: Span) -> Span {
|
||||
s.with_ctxt(SyntaxContext::empty())
|
||||
}
|
||||
}
|
@ -3,11 +3,7 @@
|
||||
extern crate test;
|
||||
|
||||
use fxhash::FxHashMap;
|
||||
use spack::{
|
||||
config::{Config, EntryConfig},
|
||||
loaders::swc::SwcLoader,
|
||||
BundleKind, Bundler,
|
||||
};
|
||||
use spack::{loaders::swc::SwcLoader, resolvers::NodeResolver};
|
||||
use std::{
|
||||
env,
|
||||
fs::{create_dir_all, read_dir},
|
||||
@ -16,6 +12,8 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
use swc::config::SourceMapsConfig;
|
||||
use swc_bundler::{BundleKind, Bundler, Config};
|
||||
use swc_common::{FileName, GLOBALS};
|
||||
use test::{
|
||||
test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType,
|
||||
};
|
||||
@ -93,7 +91,10 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
})
|
||||
.map(|e| -> Result<_, io::Error> {
|
||||
let e = e?;
|
||||
Ok((e.file_name().to_string_lossy().to_string(), e.path()))
|
||||
Ok((
|
||||
e.file_name().to_string_lossy().to_string(),
|
||||
FileName::Real(e.path()),
|
||||
))
|
||||
})
|
||||
.collect::<Result<FxHashMap<_, _>, _>>()?;
|
||||
|
||||
@ -116,6 +117,7 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
eprintln!("\n\n========== Running reference test {}\n", dir_name);
|
||||
|
||||
testing::run_test2(false, |cm, handler| {
|
||||
GLOBALS.with(|globals| {
|
||||
let compiler = Arc::new(swc::Compiler::new(cm.clone(), Arc::new(handler)));
|
||||
let loader = SwcLoader::new(
|
||||
compiler.clone(),
|
||||
@ -124,36 +126,63 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
let config = Config {
|
||||
working_dir: Default::default(),
|
||||
mode: Default::default(),
|
||||
entry: EntryConfig::Files(entries),
|
||||
output: None,
|
||||
module: Default::default(),
|
||||
optimization: None,
|
||||
resolve: None,
|
||||
options: None,
|
||||
};
|
||||
let bundler = Bundler::new(
|
||||
compiler.clone(),
|
||||
swc::config::Options {
|
||||
swcrc: true,
|
||||
..Default::default()
|
||||
},
|
||||
&spack::resolve::NodeResolver,
|
||||
globals,
|
||||
cm.clone(),
|
||||
&loader,
|
||||
NodeResolver::new(),
|
||||
Config {
|
||||
require: true,
|
||||
external_modules: vec![
|
||||
"assert",
|
||||
"buffer",
|
||||
"child_process",
|
||||
"console",
|
||||
"cluster",
|
||||
"crypto",
|
||||
"dgram",
|
||||
"dns",
|
||||
"events",
|
||||
"fs",
|
||||
"http",
|
||||
"http2",
|
||||
"https",
|
||||
"net",
|
||||
"os",
|
||||
"path",
|
||||
"perf_hooks",
|
||||
"process",
|
||||
"querystring",
|
||||
"readline",
|
||||
"repl",
|
||||
"stream",
|
||||
"string_decoder",
|
||||
"timers",
|
||||
"tls",
|
||||
"tty",
|
||||
"url",
|
||||
"util",
|
||||
"v8",
|
||||
"vm",
|
||||
"wasi",
|
||||
"worker",
|
||||
"zlib",
|
||||
]
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect(),
|
||||
},
|
||||
);
|
||||
|
||||
let modules = bundler.bundle(&config).expect("failed to bundle module");
|
||||
let modules = bundler.bundle(entries).map_err(|_| ())?;
|
||||
log::info!("Bundled as {} modules", modules.len());
|
||||
|
||||
let mut error = false;
|
||||
|
||||
for bundled in modules {
|
||||
let code = bundler
|
||||
.swc()
|
||||
let code = compiler
|
||||
.print(&bundled.module, SourceMapsConfig::Bool(false), None, false)
|
||||
.expect("failed to emit bundle")
|
||||
.expect("failed to print?")
|
||||
.code;
|
||||
|
||||
let name = match bundled.kind {
|
||||
@ -163,7 +192,8 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
BundleKind::Dynamic => format!("dynamic.{}.js", bundled.id).into(),
|
||||
};
|
||||
|
||||
let output_path = entry.path().join("output").join(name.file_name().unwrap());
|
||||
let output_path =
|
||||
entry.path().join("output").join(name.file_name().unwrap());
|
||||
|
||||
log::info!("Printing {}", output_path.display());
|
||||
|
||||
@ -184,6 +214,7 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
|
||||
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.expect("failed to process a module");
|
||||
});
|
||||
}
|
||||
|
5
spack/tests/pass/basic/class-inheritance/input/a.js
Normal file
5
spack/tests/pass/basic/class-inheritance/input/a.js
Normal file
@ -0,0 +1,5 @@
|
||||
import { B } from './b';
|
||||
|
||||
export class A extends B {
|
||||
|
||||
}
|
3
spack/tests/pass/basic/class-inheritance/input/b.js
Normal file
3
spack/tests/pass/basic/class-inheritance/input/b.js
Normal file
@ -0,0 +1,3 @@
|
||||
export class B {
|
||||
|
||||
}
|
5
spack/tests/pass/basic/class-inheritance/input/entry.ts
Normal file
5
spack/tests/pass/basic/class-inheritance/input/entry.ts
Normal file
@ -0,0 +1,5 @@
|
||||
import { A } from './a';
|
||||
import './b';
|
||||
|
||||
console.log('foo');
|
||||
new A();
|
6
spack/tests/pass/basic/class-inheritance/output/entry.ts
Normal file
6
spack/tests/pass/basic/class-inheritance/output/entry.ts
Normal file
@ -0,0 +1,6 @@
|
||||
class B {
|
||||
}
|
||||
class A extends B {
|
||||
}
|
||||
console.log('foo');
|
||||
new A();
|
3
spack/tests/pass/basic/extends/input/a.js
Normal file
3
spack/tests/pass/basic/extends/input/a.js
Normal file
@ -0,0 +1,3 @@
|
||||
import { B } from './b';
|
||||
|
||||
export class A extends B { }
|
1
spack/tests/pass/basic/extends/input/b.js
Normal file
1
spack/tests/pass/basic/extends/input/b.js
Normal file
@ -0,0 +1 @@
|
||||
export class B { }
|
4
spack/tests/pass/basic/extends/input/entry.js
Normal file
4
spack/tests/pass/basic/extends/input/entry.js
Normal file
@ -0,0 +1,4 @@
|
||||
import { A } from './a';
|
||||
import { B } from './b';
|
||||
|
||||
console.log(A, B);
|
5
spack/tests/pass/basic/extends/output/entry.js
Normal file
5
spack/tests/pass/basic/extends/output/entry.js
Normal file
@ -0,0 +1,5 @@
|
||||
class B {
|
||||
}
|
||||
class A extends B {
|
||||
}
|
||||
console.log(A, B);
|
13
spack/tests/pass/circular/complex-class-function/input/a.js
Normal file
13
spack/tests/pass/circular/complex-class-function/input/a.js
Normal file
@ -0,0 +1,13 @@
|
||||
import { getC } from './c';
|
||||
|
||||
export function a() {
|
||||
return new A()
|
||||
}
|
||||
|
||||
export class A extends getC() {
|
||||
|
||||
}
|
||||
|
||||
export function getA() {
|
||||
return A;
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
import { A, getA, a } from './a';
|
||||
|
||||
|
||||
export { A, getA, a }
|
@ -0,0 +1,7 @@
|
||||
import './b';
|
||||
|
||||
export function getC() {
|
||||
return C;
|
||||
}
|
||||
|
||||
export class C { }
|
@ -0,0 +1,3 @@
|
||||
import { a } from './a';
|
||||
|
||||
console.log(a, a())
|
@ -0,0 +1,4 @@
|
||||
function a() {
|
||||
return new A();
|
||||
}
|
||||
console.log(a, a());
|
@ -0,0 +1,3 @@
|
||||
import { B } from './b';
|
||||
|
||||
export class A extends B { }
|
@ -0,0 +1,8 @@
|
||||
import { A } from './a';
|
||||
import { C } from './c';
|
||||
|
||||
export class B extends C {
|
||||
a() {
|
||||
return new A();
|
||||
}
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
import { B } from './b';
|
||||
|
||||
export class C {
|
||||
a() {
|
||||
throw new Error('Unimplemented')
|
||||
}
|
||||
b() {
|
||||
return new B();
|
||||
}
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
import { A } from './a';
|
||||
import './b';
|
||||
import './c';
|
||||
console.log(A, 'Loaded!');
|
@ -0,0 +1,16 @@
|
||||
class C {
|
||||
a() {
|
||||
throw new Error('Unimplemented');
|
||||
}
|
||||
b() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
class B extends C {
|
||||
a() {
|
||||
return new A();
|
||||
}
|
||||
}
|
||||
class A extends B {
|
||||
}
|
||||
console.log(A, 'Loaded!');
|
1
spack/tests/pass/circular/many/input/a.js
Normal file
1
spack/tests/pass/circular/many/input/a.js
Normal file
@ -0,0 +1 @@
|
||||
import './b';
|
1
spack/tests/pass/circular/many/input/b.js
Normal file
1
spack/tests/pass/circular/many/input/b.js
Normal file
@ -0,0 +1 @@
|
||||
import './c';
|
1
spack/tests/pass/circular/many/input/c.js
Normal file
1
spack/tests/pass/circular/many/input/c.js
Normal file
@ -0,0 +1 @@
|
||||
import './d';
|
1
spack/tests/pass/circular/many/input/d.js
Normal file
1
spack/tests/pass/circular/many/input/d.js
Normal file
@ -0,0 +1 @@
|
||||
import './a';
|
1
spack/tests/pass/circular/many/input/entry.js
Normal file
1
spack/tests/pass/circular/many/input/entry.js
Normal file
@ -0,0 +1 @@
|
||||
import './a';
|
9
spack/tests/pass/circular/mixed/input/a.js
Normal file
9
spack/tests/pass/circular/mixed/input/a.js
Normal file
@ -0,0 +1,9 @@
|
||||
import { B } from './b'
|
||||
import './c';
|
||||
|
||||
export class A {
|
||||
method() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
|
6
spack/tests/pass/circular/mixed/input/b.js
Normal file
6
spack/tests/pass/circular/mixed/input/b.js
Normal file
@ -0,0 +1,6 @@
|
||||
import { A } from "./a";
|
||||
import './c';
|
||||
|
||||
export class B extends A {
|
||||
|
||||
}
|
1
spack/tests/pass/circular/mixed/input/c.js
Normal file
1
spack/tests/pass/circular/mixed/input/c.js
Normal file
@ -0,0 +1 @@
|
||||
console.log('c');
|
4
spack/tests/pass/circular/mixed/input/entry.js
Normal file
4
spack/tests/pass/circular/mixed/input/entry.js
Normal file
@ -0,0 +1,4 @@
|
||||
import { A } from './a';
|
||||
import { B } from './b';
|
||||
|
||||
console.log(A, B);
|
9
spack/tests/pass/circular/mixed/output/entry.js
Normal file
9
spack/tests/pass/circular/mixed/output/entry.js
Normal file
@ -0,0 +1,9 @@
|
||||
class A {
|
||||
method() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
class B extends A {
|
||||
}
|
||||
console.log('c');
|
||||
console.log(A, B);
|
8
spack/tests/pass/circular/simple/input/a.js
Normal file
8
spack/tests/pass/circular/simple/input/a.js
Normal file
@ -0,0 +1,8 @@
|
||||
import { B } from './b'
|
||||
|
||||
export class A {
|
||||
method() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
|
5
spack/tests/pass/circular/simple/input/b.js
Normal file
5
spack/tests/pass/circular/simple/input/b.js
Normal file
@ -0,0 +1,5 @@
|
||||
import { A } from "./a";
|
||||
|
||||
export class B extends A {
|
||||
|
||||
}
|
4
spack/tests/pass/circular/simple/input/entry.js
Normal file
4
spack/tests/pass/circular/simple/input/entry.js
Normal file
@ -0,0 +1,4 @@
|
||||
import { A } from './a';
|
||||
import { B } from './b';
|
||||
|
||||
console.log(A, B);
|
8
spack/tests/pass/circular/simple/output/entry.js
Normal file
8
spack/tests/pass/circular/simple/output/entry.js
Normal file
@ -0,0 +1,8 @@
|
||||
class A {
|
||||
method() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
class B extends A {
|
||||
}
|
||||
console.log(A, B);
|
3
spack/tests/pass/circular/top-level-idents/input/a.js
Normal file
3
spack/tests/pass/circular/top-level-idents/input/a.js
Normal file
@ -0,0 +1,3 @@
|
||||
import './b';
|
||||
|
||||
console.log('a');
|
3
spack/tests/pass/circular/top-level-idents/input/b.js
Normal file
3
spack/tests/pass/circular/top-level-idents/input/b.js
Normal file
@ -0,0 +1,3 @@
|
||||
import './c';
|
||||
|
||||
console.log('b');
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user