swc_bundler (#943)

swc_bundler:
 - Splitted from spack

swc_ecma_parser:
 - Fix unexpected eof problem which occurs if log level is trace

swc_ecma_transforms:
 - Fix bugs of dce pass

spack:
 - Support cyclic dependencies
This commit is contained in:
강동윤 2020-08-12 22:18:47 +09:00 committed by GitHub
parent 4ab3c58f6a
commit 2fedf32747
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
108 changed files with 2779 additions and 1660 deletions

View File

@ -42,3 +42,7 @@ jobs:
- name: Check swc_ecma_transforms - name: Check swc_ecma_transforms
run: | run: |
(cd ecmascript/transforms && cargo hack check --feature-powerset --no-dev-deps) (cd ecmascript/transforms && cargo hack check --feature-powerset --no-dev-deps)
- name: Check swc_bundler
run: |
(cd bundler && cargo hack check --feature-powerset --no-dev-deps)

View File

@ -15,6 +15,7 @@ package-lock.json
wasm/ wasm/
# Reduce package size # Reduce package size
**/tests.rs
**/tests/ **/tests/
**/benches/ **/benches/
**/target/ **/target/

View File

@ -31,7 +31,7 @@ serde_json = "1"
once_cell = "1" once_cell = "1"
regex = "1" regex = "1"
either = "1" either = "1"
dashmap = "=3.5.1" dashmap = "3"
sourcemap = "6" sourcemap = "6"
base64 = "0.12.0" base64 = "0.12.0"

39
bundler/Cargo.toml Normal file
View File

@ -0,0 +1,39 @@
[package]
name = "swc_bundler"
version = "0.1.0"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git"
documentation = "https://swc-project.github.io/rustdoc/swc_bundler/"
description = "Very fast ecmascript bundler"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
concurrent = ["swc_common/concurrent", "dashmap", "rayon"]
[dependencies]
swc_atoms = { version = "0.2", path = "../atoms" }
swc_common = { version = "0.9", path = "../common" }
swc_ecma_ast = { version = "0.28", path = "../ecmascript/ast" }
swc_ecma_codegen = { version = "0.31", path = "../ecmascript/codegen" }
swc_ecma_parser = { version = "0.33", path = "../ecmascript/parser" }
swc_ecma_transforms = { version = "0.19", path = "../ecmascript/transforms" }
swc_ecma_utils = { version = "0.17", path = "../ecmascript/utils" }
swc_ecma_visit = { version = "0.13", path = "../ecmascript/visit" }
anyhow = "1"
crc = "1.8"
radix_fmt = "1"
relative-path = "1.2"
log = "0.4"
petgraph = "0.5"
once_cell = "1"
dashmap = { version = "3", optional = true }
rayon = { version = "1", optional = true }
fxhash = "0.2.1"
is-macro = "0.1"
[dev-dependencies]
testing = { path = "../testing" }

13
bundler/README.md Normal file
View File

@ -0,0 +1,13 @@
# swc_bundler
Bundler for the swc project.
## Features
- Clean merging (generated code is easy to optimize)
- Parallel file loading
- Tree shaking
- Common js support (aka `require`)
- Circular imports
Tests live at `/spack`.

7
bundler/assets/a.js Normal file
View File

@ -0,0 +1,7 @@
export const FOO = 1;
export class A {
foo() {
}
}

3
bundler/assets/main.js Normal file
View File

@ -0,0 +1,3 @@
import { A, FOO } from './a';
console.log(A, FOO);

112
bundler/examples/path.rs Normal file
View File

@ -0,0 +1,112 @@
use anyhow::Error;
use fxhash::FxHashMap;
use std::io::stdout;
use swc_bundler::{BundleKind, Bundler, Config, Load, Resolve};
use swc_common::{sync::Lrc, FileName, FilePathMapping, Globals, SourceMap};
use swc_ecma_codegen::{text_writer::JsWriter, Emitter};
use swc_ecma_parser::{lexer::Lexer, EsConfig, Parser, StringInput, Syntax};
fn main() {
testing::init();
let globals = Globals::new();
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
// This example does not use core modules.
let external_modules = vec![];
let bundler = Bundler::new(
&globals,
cm.clone(),
PathLoader { cm: cm.clone() },
PathResolver,
Config {
require: true,
external_modules,
},
);
let mut entries = FxHashMap::default();
entries.insert("main".to_string(), FileName::Real("assets/main.js".into()));
let mut bundles = bundler.bundle(entries).expect("failed to bundle");
assert_eq!(
bundles.len(),
1,
"There's no conditional / dynamic imports and we provided only one entry"
);
let bundle = bundles.pop().unwrap();
assert_eq!(
bundle.kind,
BundleKind::Named {
name: "main".into()
},
"We provided it"
);
let wr = stdout();
let mut emitter = Emitter {
cfg: swc_ecma_codegen::Config { minify: false },
cm: cm.clone(),
comments: None,
wr: Box::new(JsWriter::new(cm.clone(), "\n", wr.lock(), None)),
handlers: Box::new(Handllers),
};
emitter.emit_module(&bundle.module).unwrap();
}
/// I should remove this...
struct Handllers;
impl swc_ecma_codegen::Handlers for Handllers {}
struct PathLoader {
cm: Lrc<SourceMap>,
}
impl Load for PathLoader {
fn load(
&self,
file: &FileName,
) -> Result<(Lrc<swc_common::SourceFile>, swc_ecma_ast::Module), Error> {
let file = match file {
FileName::Real(v) => v,
_ => unreachable!(),
};
let fm = self.cm.load_file(file)?;
let lexer = Lexer::new(
Syntax::Es(EsConfig {
..Default::default()
}),
Default::default(),
StringInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(lexer);
let module = parser.parse_module().expect("This should not happen");
Ok((fm, module))
}
}
struct PathResolver;
impl Resolve for PathResolver {
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
assert!(
module_specifier.starts_with("."),
"We are not using node_modules within this example"
);
let base = match base {
FileName::Real(v) => v,
_ => unreachable!(),
};
Ok(FileName::Real(
base.parent()
.unwrap()
.join(module_specifier)
.with_extension("js"),
))
}
}

View File

@ -0,0 +1,242 @@
use super::merge::{LocalMarker, Unexporter};
use crate::{bundler::load::TransformedModule, Bundler, Load, ModuleId, Resolve};
use hygiene::top_level_ident_folder;
use std::iter::once;
use swc_common::DUMMY_SP;
use swc_ecma_ast::*;
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
mod hygiene;
/// Circular imports are hard to handle.
///
/// We use some dedicated method to handle circular dependencies.
impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
pub(super) fn merge_circular_modules(
&self,
entry_id: ModuleId,
circular_modules: &mut Vec<ModuleId>,
) -> Module {
assert!(
circular_modules.len() >= 1,
"# of circular modules should be 2 or greater than 2 including entry. Got {:?}",
circular_modules
);
debug_assert!(
self.scope.is_circular(entry_id),
"merge_circular_modules should only be called for circular entries"
);
let entry_module = self.scope.get_module(entry_id).unwrap();
let modules = circular_modules
.iter()
.chain(once(&entry_id))
.map(|&id| self.scope.get_module(id).unwrap())
.collect::<Vec<_>>();
let mut entry = self.process_circular_module(&modules, entry_module);
for &dep in &*circular_modules {
let new_module = self.merge_two_circular_modules(&modules, entry, dep);
entry = new_module;
}
// All circular modules are inlined
circular_modules.clear();
circular_modules.push(entry_id);
entry
}
/// Merges `a` and `b` into one module.
fn merge_two_circular_modules(
&self,
circular_modules: &[TransformedModule],
mut entry: Module,
dep: ModuleId,
) -> Module {
self.run(|| {
// print_hygiene("START: merge_two_circular_modules", &self.cm, &entry);
let dep_info = self.scope.get_module(dep).unwrap();
let mut dep = self.process_circular_module(circular_modules, dep_info);
dep = dep.fold_with(&mut Unexporter);
// Merge code
entry.body = merge_respecting_order(entry.body, dep.body);
// print_hygiene("END :merge_two_circular_modules", &self.cm, &entry);
entry
})
}
///
/// - Remove cicular imnports
fn process_circular_module(
&self,
circular_modules: &[TransformedModule],
entry: TransformedModule,
) -> Module {
let mut module = (*entry.module).clone();
// print_hygiene("START: process_circular_module", &self.cm, &module);
module.body.retain(|item| {
match item {
ModuleItem::ModuleDecl(ModuleDecl::Import(import)) => {
// Drop if it's one of circular import
for circular_module in circular_modules {
if entry
.imports
.specifiers
.iter()
.any(|v| v.0.module_id == circular_module.id && v.0.src == import.src)
{
log::debug!("Dropping circular import");
return false;
}
}
}
_ => {}
}
true
});
for circular_module in circular_modules {
for (src, specifiers) in entry.imports.specifiers.iter() {
if circular_module.id == src.module_id {
module = module.fold_with(&mut LocalMarker {
mark: circular_module.mark(),
specifiers: &specifiers,
excluded: vec![],
});
break;
}
}
}
module = module.fold_with(&mut top_level_ident_folder(
self.top_level_mark,
entry.mark(),
));
// print_hygiene("END: process_circular_module", &self.cm, &module);
module
}
}
/// Originally, this method should create a dependency graph, but
fn merge_respecting_order(mut entry: Vec<ModuleItem>, mut dep: Vec<ModuleItem>) -> Vec<ModuleItem> {
let mut new = Vec::with_capacity(entry.len() + dep.len());
// While looping over items from entry, we check for dependency.
loop {
if entry.is_empty() {
log::debug!("entry is empty");
break;
}
let item = entry.drain(..=0).next().unwrap();
// Everything from dep is injected
if dep.is_empty() {
log::trace!("dep is empty");
new.push(item);
new.extend(entry);
break;
}
// If the code of entry depends on dependency, we insert dependency source code
// at the position.
if let Some(pos) = dependency_index(&item, &dep) {
log::trace!("Found depndency: {}", pos);
new.extend(dep.drain(..=pos));
new.push(item);
continue;
}
// We checked the length of `dep`
if let Some(pos) = dependency_index(&dep[0], &[item.clone()]) {
log::trace!("Found reverse depndency (index[0]): {}", pos);
new.extend(entry.drain(..=pos));
new.extend(dep.drain(..=0));
continue;
}
if let Some(pos) = dependency_index(&dep[0], &entry) {
log::trace!("Found reverse depndency: {}", pos);
new.extend(entry.drain(..=pos));
new.extend(dep.drain(..=0));
continue;
}
log::debug!("No dependency");
new.push(item);
}
// Append remaining statements.
new.extend(dep);
new
}
fn dependency_index(item: &ModuleItem, deps: &[ModuleItem]) -> Option<usize> {
let mut v = DepFinder { deps, idx: None };
item.visit_with(&Invalid { span: DUMMY_SP }, &mut v);
v.idx
}
struct DepFinder<'a> {
deps: &'a [ModuleItem],
idx: Option<usize>,
}
impl Visit for DepFinder<'_> {
fn visit_ident(&mut self, i: &Ident, _: &dyn Node) {
if self.idx.is_some() {
return;
}
for (idx, dep) in self.deps.iter().enumerate() {
match dep {
ModuleItem::Stmt(Stmt::Decl(Decl::Class(decl))) => {
log::trace!(
"Decl (from dep) = {}{:?}, Ident = {}{:?}",
decl.ident.sym,
decl.ident.span.ctxt,
i.sym,
i.span.ctxt
);
if decl.ident.sym == i.sym && decl.ident.span.ctxt == i.span.ctxt {
self.idx = Some(idx);
break;
}
}
_ => {}
}
}
}
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
e.obj.visit_with(e as _, self);
if e.computed {
e.prop.visit_with(e as _, self)
}
}
fn visit_class_member(&mut self, _: &ClassMember, _: &dyn Node) {}
fn visit_function(&mut self, _: &Function, _: &dyn Node) {}
fn visit_arrow_expr(&mut self, _: &ArrowExpr, _: &dyn Node) {}
}

View File

@ -0,0 +1,164 @@
use crate::id::Id;
use fxhash::FxHashSet;
use swc_common::{Mark, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{Fold, FoldWith};
pub fn top_level_ident_folder(top_level_mark: Mark, module_mark: Mark) -> impl 'static + Fold {
MergeFolder {
scope: Default::default(),
top_level_mark,
module_mark,
}
}
#[derive(Default)]
struct Scope<'a> {
parent: Option<&'a Scope<'a>>,
binding_idents: FxHashSet<Id>,
}
/// Modifies mark of top-level identifiers so they can be merged cleanly.
struct MergeFolder<'a> {
scope: Scope<'a>,
/// Global marker for the top-level identifiers
top_level_mark: Mark,
/// THe marker for the module's top-level identifiers.
module_mark: Mark,
}
impl<'a> Scope<'a> {
pub fn new(parent: Option<&'a Scope<'a>>) -> Self {
Scope {
parent,
..Default::default()
}
}
pub fn contains(&self, i: &Ident) -> bool {
if self.binding_idents.contains(&Id::from(i)) {
return true;
}
self.parent.map(|p| p.contains(i)).unwrap_or(false)
}
}
/// TODO: This is incomplete
impl<'a> MergeFolder<'a> {
fn fold_bindine_ident(&mut self, mut i: Ident) -> Ident {
log::trace!("BindingIdent: {}{:?}", i.sym, i.span.ctxt);
let mut ctxt = i.span.clone();
if self.top_level_mark == ctxt.remove_mark() {
i.span = i
.span
.with_ctxt(SyntaxContext::empty().apply_mark(self.module_mark));
}
self.scope.binding_idents.insert((&i).into());
i
}
fn fold_ref_ident(&mut self, mut i: Ident) -> Ident {
// Skip reference to globals.
if !self.scope.contains(&i) {
// eprintln!("Preserving {}{:?}", i.sym, i.span.ctxt);
return i;
}
log::trace!("Changing context of ident ref: {}{:?}", i.sym, i.span.ctxt);
let mut ctxt = i.span.clone();
if self.top_level_mark == ctxt.remove_mark() {
i.span = i
.span
.with_ctxt(SyntaxContext::empty().apply_mark(self.module_mark));
}
i
}
fn child(&'a self) -> MergeFolder<'a> {
MergeFolder {
top_level_mark: self.top_level_mark,
module_mark: self.module_mark,
scope: Scope::new(Some(&self.scope)),
}
}
}
impl Fold for MergeFolder<'_> {
fn fold_class_decl(&mut self, c: ClassDecl) -> ClassDecl {
ClassDecl {
ident: self.fold_bindine_ident(c.ident),
class: c.class.fold_with(self),
..c
}
}
fn fold_class_expr(&mut self, c: ClassExpr) -> ClassExpr {
ClassExpr {
ident: c.ident.map(|i| self.fold_bindine_ident(i)),
class: c.class.fold_with(self),
..c
}
}
fn fold_member_expr(&mut self, e: MemberExpr) -> MemberExpr {
if e.computed {
MemberExpr {
obj: e.obj.fold_with(self),
prop: e.prop.fold_with(self),
..e
}
} else {
MemberExpr {
obj: e.obj.fold_with(self),
..e
}
}
}
fn fold_expr(&mut self, mut e: Expr) -> Expr {
e = e.fold_children_with(self);
match e {
Expr::Ident(i) => Expr::Ident(self.fold_ref_ident(i)),
_ => e,
}
}
fn fold_fn_decl(&mut self, decl: FnDecl) -> FnDecl {
let ident = self.fold_bindine_ident(decl.ident);
let mut child = self.child();
let function = decl.function.fold_with(&mut child);
FnDecl {
ident,
function,
..decl
}
}
fn fold_fn_expr(&mut self, f: FnExpr) -> FnExpr {
let ident = f.ident.map(|i| self.fold_bindine_ident(i));
let mut child = self.child();
let function = f.function.fold_with(&mut child);
FnExpr {
ident,
function,
..f
}
}
fn fold_pat(&mut self, mut p: Pat) -> Pat {
p = p.fold_children_with(self);
match p {
Pat::Ident(i) => Pat::Ident(self.fold_bindine_ident(i)),
_ => p,
}
}
}

View File

@ -1,7 +1,9 @@
use super::Bundler;
use crate::{ use crate::{
bundler::{export::Exports, load_transformed::Specifier}, bundler::{export::Exports, load::Specifier},
Id, ModuleId, id::{Id, ModuleId},
load::Load,
resolve::Resolve,
Bundler,
}; };
use anyhow::{Context, Error}; use anyhow::{Context, Error};
use std::{ use std::{
@ -12,57 +14,89 @@ use std::{
use swc_atoms::{js_word, JsWord}; use swc_atoms::{js_word, JsWord};
use swc_common::{Mark, Span, Spanned, SyntaxContext, DUMMY_SP}; use swc_common::{Mark, Span, Spanned, SyntaxContext, DUMMY_SP};
use swc_ecma_ast::*; use swc_ecma_ast::*;
use swc_ecma_transforms::noop_fold_type;
use swc_ecma_utils::{ use swc_ecma_utils::{
find_ids, prepend, private_ident, undefined, DestructuringFinder, ExprFactory, StmtLike, find_ids, prepend, private_ident, undefined, DestructuringFinder, ExprFactory, StmtLike,
}; };
use swc_ecma_visit::{Fold, FoldWith, VisitMut, VisitMutWith, VisitWith}; use swc_ecma_visit::{Fold, FoldWith, VisitMut, VisitMutWith, VisitWith};
impl Bundler<'_> { impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
/// Merge `targets` into `entry`. /// Merge `targets` into `entry`.
pub(super) fn merge_modules( pub(super) fn merge_modules(
&self, &self,
entry: ModuleId, entry: ModuleId,
targets: &mut Vec<ModuleId>, targets: &mut Vec<ModuleId>,
) -> Result<Module, Error> { ) -> Result<Module, Error> {
log::trace!("merge_modules({})", entry); self.run(|| {
let is_circular = self.scope.is_circular(entry);
log::trace!(
"merge_modules({}) <- {:?}; circular = {}",
entry,
targets,
is_circular
);
self.swc.run(|| {
let info = self.scope.get_module(entry).unwrap(); let info = self.scope.get_module(entry).unwrap();
let mut entry: Module = (*info.module).clone();
if targets.is_empty() { if targets.is_empty() {
return Ok((*info.module).clone()); return Ok((*info.module).clone());
} }
log::info!("Merge: {} <= {:?}", info.fm.name, targets); if is_circular {
log::info!("Circular dependency detected");
// TODO: provide only circular imports.
return Ok(self.merge_circular_modules(entry, targets));
}
// { let mut entry: Module = (*info.module).clone();
// let code = self
// .swc log::info!("Merge: ({}){} <= {:?}", info.id, info.fm.name, targets);
// .print(
// &entry.clone().fold_with(&mut HygieneVisualizer), // print_hygiene("before:merge", &self.cm, &entry);
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("Before merging:\n{}\n\n\n", code);
// }
for (src, specifiers) in &info.imports.specifiers { for (src, specifiers) in &info.imports.specifiers {
if !targets.contains(&src.module_id) { if !targets.contains(&src.module_id) {
// Already merged by recursive call to merge_modules.
log::debug!( log::debug!(
"Not merging: not in target: ({}):{} <= ({}):{}", "Not merging: already merged: ({}):{} <= ({}):{}",
info.id, info.id,
info.fm.name, info.fm.name,
src.module_id, src.module_id,
src.src.value, src.src.value,
); );
if let Some(imported) = self.scope.get_module(src.module_id) {
// Respan using imported module's syntax context.
entry = entry.fold_with(&mut LocalMarker {
mark: imported.mark(),
specifiers: &specifiers,
excluded: vec![],
});
}
// Drop imports, as they are already merged.
entry.body.retain(|item| {
match item {
ModuleItem::ModuleDecl(ModuleDecl::Import(import)) => {
// Drop if it's one of circular import
if info.imports.specifiers.iter().any(|v| {
v.0.module_id == src.module_id && v.0.src == import.src
}) {
log::debug!("Dropping import");
return false;
}
}
_ => {}
}
true
});
continue; continue;
} }
log::debug!("Merging: {} <= {}", info.fm.name, src.src.value); log::debug!("Merging: {} <= {}", info.fm.name, src.src.value);
if specifiers.iter().any(|v| v.is_namespace()) { if specifiers.iter().any(|v| v.is_namespace()) {
@ -75,6 +109,10 @@ impl Bundler<'_> {
if let Some(imported) = self.scope.get_module(src.module_id) { if let Some(imported) = self.scope.get_module(src.module_id) {
info.helpers.extend(&imported.helpers); info.helpers.extend(&imported.helpers);
if let Some(pos) = targets.iter().position(|x| *x == src.module_id) {
targets.remove(pos);
}
// In the case of // In the case of
// //
// a <- b // a <- b
@ -93,43 +131,13 @@ impl Bundler<'_> {
) )
})?; })?;
if let Some(pos) = targets.iter().position(|x| *x == info.id) {
targets.remove(pos);
}
if imported.is_es6 { if imported.is_es6 {
//{ // print_hygiene("dep:before:tree-shaking", &self.cm, &dep);
// let code = self
// .swc
// .print(
// &dep.clone().fold_with(&mut HygieneVisualizer),
// info.fm.clone(),
// false,
// false,
// )
// .unwrap()
// .code;
//
// println!("Dep before drop_unused:\n{}\n\n\n", code);
//}
// Tree-shaking // Tree-shaking
dep = self.drop_unused(imported.fm.clone(), dep, Some(&specifiers)); dep = self.drop_unused(dep, Some(&specifiers));
//{ // print_hygiene("dep:after:tree-shaking", &self.cm, &dep);
// let code = self
// .swc
// .print(
// &dep.clone().fold_with(&mut HygieneVisualizer),
// info.fm.clone(),
// false,
// false,
// )
// .unwrap()
// .code;
//
// println!("Dep after drop_unused:\n{}\n\n\n", code);
//}
if let Some(imports) = info if let Some(imports) = info
.imports .imports
@ -163,11 +171,15 @@ impl Bundler<'_> {
// }); // });
} }
// print_hygiene("dep:before:global-mark", &self.cm, &dep);
dep = dep.fold_with(&mut GlobalMarker { dep = dep.fold_with(&mut GlobalMarker {
used_mark: self.used_mark, used_mark: self.used_mark,
module_mark: imported.mark(), module_mark: imported.mark(),
}); });
// print_hygiene("dep:after:global-mark", &self.cm, &dep);
// { // {
// let code = self // let code = self
// .swc // .swc
@ -183,21 +195,6 @@ impl Bundler<'_> {
// println!("Dep:\n{}\n\n\n", code); // println!("Dep:\n{}\n\n\n", code);
// } // }
// {
// let code = self
// .swc
// .print(
// &entry.clone().fold_with(&mut HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("@: Before merging:\n{}\n\n\n", code);
// }
// Replace import statement / require with module body // Replace import statement / require with module body
let mut injector = Es6ModuleInjector { let mut injector = Es6ModuleInjector {
imported: dep.body.clone(), imported: dep.body.clone(),
@ -205,28 +202,14 @@ impl Bundler<'_> {
}; };
entry.body.visit_mut_with(&mut injector); entry.body.visit_mut_with(&mut injector);
// { // print_hygiene("entry:after:injection", &self.cm, &entry);
// let code = self
// .swc
// .print(
// &entry.clone().fold_with(&mut
// HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("Merged:\n{}\n\n\n", code);
// }
if injector.imported.is_empty() { if injector.imported.is_empty() {
continue; continue;
} }
} }
{ if self.config.require {
// common js module is transpiled as // common js module is transpiled as
// //
// Src: // Src:
@ -356,16 +339,44 @@ impl Bundler<'_> {
} }
/// `export var a = 1` => `var a = 1` /// `export var a = 1` => `var a = 1`
struct Unexporter; pub(super) struct Unexporter;
noop_fold_type!(Unexporter);
impl Fold for Unexporter { impl Fold for Unexporter {
fn fold_module_item(&mut self, item: ModuleItem) -> ModuleItem { fn fold_module_item(&mut self, item: ModuleItem) -> ModuleItem {
match item { match item {
ModuleItem::ModuleDecl(decl) => match decl { ModuleItem::ModuleDecl(decl) => match decl {
ModuleDecl::ExportDecl(decl) => ModuleItem::Stmt(Stmt::Decl(decl.decl)), ModuleDecl::ExportDecl(decl) => ModuleItem::Stmt(Stmt::Decl(decl.decl)),
ModuleDecl::ExportDefaultExpr(..) => {
ModuleDecl::ExportDefaultDecl(export) => match export.decl {
DefaultDecl::Class(ClassExpr { ident: None, .. })
| DefaultDecl::Fn(FnExpr { ident: None, .. }) => {
ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
}
DefaultDecl::TsInterfaceDecl(decl) => {
ModuleItem::Stmt(Stmt::Decl(Decl::TsInterface(decl)))
}
DefaultDecl::Class(ClassExpr {
ident: Some(ident),
class,
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Class(ClassDecl {
declare: false,
ident,
class,
}))),
DefaultDecl::Fn(FnExpr {
ident: Some(ident),
function,
}) => ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
declare: false,
function,
ident,
}))),
},
// Empty statement
ModuleDecl::ExportAll(..) | ModuleDecl::ExportDefaultExpr(..) => {
ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP })) ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
} }
ModuleDecl::ExportNamed(ref n) if n.src.is_none() => { ModuleDecl::ExportNamed(ref n) if n.src.is_none() => {
@ -373,8 +384,7 @@ impl Fold for Unexporter {
} }
ModuleDecl::Import(..) => ModuleItem::ModuleDecl(decl), ModuleDecl::Import(..) => ModuleItem::ModuleDecl(decl),
// TODO: Handle all _ => unimplemented!("Unexported: {:?}", decl),
_ => unimplemented!("Unexporter: {:?}", decl),
}, },
_ => item, _ => item,
@ -392,8 +402,6 @@ struct ExportRenamer<'a> {
extras: Vec<Stmt>, extras: Vec<Stmt>,
} }
noop_fold_type!(ExportRenamer<'_>);
impl ExportRenamer<'_> { impl ExportRenamer<'_> {
pub fn aliased_import(&self, sym: &JsWord) -> Option<Id> { pub fn aliased_import(&self, sym: &JsWord) -> Option<Id> {
log::debug!("aliased_import({})\n{:?}\n\n\n", sym, self.imports); log::debug!("aliased_import({})\n{:?}\n\n\n", sym, self.imports);
@ -575,8 +583,6 @@ struct ActualMarker<'a> {
imports: &'a [Specifier], imports: &'a [Specifier],
} }
noop_fold_type!(ActualMarker<'_>);
impl Fold for ActualMarker<'_> { impl Fold for ActualMarker<'_> {
fn fold_expr(&mut self, node: Expr) -> Expr { fn fold_expr(&mut self, node: Expr) -> Expr {
node node
@ -605,15 +611,13 @@ impl Fold for ActualMarker<'_> {
} }
/// Applied to the importer module, and marks (connects) imported idents. /// Applied to the importer module, and marks (connects) imported idents.
struct LocalMarker<'a> { pub(super) struct LocalMarker<'a> {
/// Mark applied to imported idents. /// Mark applied to imported idents.
mark: Mark, pub mark: Mark,
specifiers: &'a [Specifier], pub specifiers: &'a [Specifier],
excluded: Vec<Id>, pub excluded: Vec<Id>,
} }
noop_fold_type!(LocalMarker<'_>);
impl<'a> LocalMarker<'a> { impl<'a> LocalMarker<'a> {
/// Searches for i, and fold T. /// Searches for i, and fold T.
#[allow(dead_code)] #[allow(dead_code)]
@ -772,13 +776,11 @@ impl VisitMut for Es6ModuleInjector {
} }
} }
struct GlobalMarker { pub(super) struct GlobalMarker {
used_mark: Mark, pub used_mark: Mark,
module_mark: Mark, pub module_mark: Mark,
} }
noop_fold_type!(GlobalMarker);
impl GlobalMarker { impl GlobalMarker {
fn is_marked_as_used(&self, span: Span) -> bool { fn is_marked_as_used(&self, span: Span) -> bool {
let mut ctxt = span.ctxt(); let mut ctxt = span.ctxt();

View File

@ -1,15 +1,16 @@
use super::Bundler; use super::{load::TransformedModule, Bundler};
use crate::{ use crate::{
bundler::{load_transformed::TransformedModule, Bundle, BundleKind}, id::ModuleId, load::Load, resolve::Resolve, util::IntoParallelIterator, Bundle, BundleKind,
ModuleId,
}; };
use anyhow::{Context, Error}; use anyhow::{Context, Error};
use fxhash::{FxHashMap, FxHashSet}; use fxhash::{FxHashMap, FxHashSet};
use petgraph::{graphmap::DiGraphMap, visit::Bfs}; use petgraph::{graphmap::DiGraphMap, visit::Bfs};
use rayon::prelude::*; #[cfg(feature = "rayon")]
use swc_ecma_transforms::{fixer, hygiene, optimization::simplify::dce::dce}; use rayon::iter::ParallelIterator;
use swc_ecma_transforms::{hygiene, optimization::simplify::dce};
use swc_ecma_visit::FoldWith; use swc_ecma_visit::FoldWith;
mod circular;
mod merge; mod merge;
pub(super) type ModuleGraph = DiGraphMap<ModuleId, usize>; pub(super) type ModuleGraph = DiGraphMap<ModuleId, usize>;
@ -34,7 +35,11 @@ struct State {
common_libs: FxHashSet<ModuleId>, common_libs: FxHashSet<ModuleId>,
} }
impl Bundler<'_> { impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
/// `entries` - Entry modules (provided by user) by it's basename. /// `entries` - Entry modules (provided by user) by it's basename.
/// ///
/// # How it works /// # How it works
@ -50,16 +55,15 @@ impl Bundler<'_> {
.into_par_iter() .into_par_iter()
.map( .map(
|(kind, id, mut module_ids_to_merge): (BundleKind, ModuleId, _)| { |(kind, id, mut module_ids_to_merge): (BundleKind, ModuleId, _)| {
self.swc().run(|| { self.run(|| {
let module = self let module = self
.merge_modules(id, &mut module_ids_to_merge) .merge_modules(id, &mut module_ids_to_merge)
.context("failed to merge module") .context("failed to merge module")
.unwrap(); // TODO .unwrap(); // TODO
let module = module let module = module
.fold_with(&mut dce(Default::default())) .fold_with(&mut dce::dce(Default::default()))
.fold_with(&mut hygiene()) .fold_with(&mut hygiene());
.fold_with(&mut fixer(Some(&self.swc.comments() as _)));
Bundle { kind, id, module } Bundle { kind, id, module }
}) })
@ -139,6 +143,8 @@ impl Bundler<'_> {
} }
fn add_to_graph(&self, graph: &mut ModuleGraph, module_id: ModuleId) { fn add_to_graph(&self, graph: &mut ModuleGraph, module_id: ModuleId) {
let contains = graph.contains_node(module_id);
graph.add_node(module_id); graph.add_node(module_id);
let m = self let m = self
@ -146,8 +152,20 @@ impl Bundler<'_> {
.get_module(module_id) .get_module(module_id)
.expect("failed to get module"); .expect("failed to get module");
// Prevent dejavu
if contains {
for (src, _) in &m.imports.specifiers {
if graph.contains_node(src.module_id) {
self.scope.mark_as_circular(module_id);
self.scope.mark_as_circular(src.module_id);
return;
}
}
}
for (src, _) in &*m.imports.specifiers { for (src, _) in &*m.imports.specifiers {
// //
self.add_to_graph(graph, src.module_id); self.add_to_graph(graph, src.module_id);
graph.add_edge( graph.add_edge(
module_id, module_id,

View File

@ -1,17 +1,20 @@
use super::Bundler; use super::{
use crate::{ load::{Source, Specifier},
bundler::load_transformed::{Source, Specifier}, Bundler,
Id,
}; };
use crate::{id::Id, load::Load, resolve::Resolve};
use fxhash::FxHashMap; use fxhash::FxHashMap;
use swc_atoms::js_word; use swc_atoms::js_word;
use swc_common::{SyntaxContext, DUMMY_SP}; use swc_common::{SyntaxContext, DUMMY_SP};
use swc_ecma_ast::*; use swc_ecma_ast::*;
use swc_ecma_transforms::noop_visit_type;
use swc_ecma_utils::find_ids; use swc_ecma_utils::find_ids;
use swc_ecma_visit::{Node, Visit, VisitWith}; use swc_ecma_visit::{Node, Visit, VisitWith};
impl Bundler<'_> { impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
/// This method removes exported pure constants from the module. /// This method removes exported pure constants from the module.
/// ///
/// A pure constant is a exported literal. /// A pure constant is a exported literal.
@ -20,7 +23,7 @@ impl Bundler<'_> {
/// TODO: Support pattern like /// TODO: Support pattern like
/// export const [a, b] = [1, 2] /// export const [a, b] = [1, 2]
pub(super) fn extract_export_info(&self, module: &Module) -> RawExports { pub(super) fn extract_export_info(&self, module: &Module) -> RawExports {
self.swc.run(|| { self.run(|| {
let mut v = ExportFinder::default(); let mut v = ExportFinder::default();
module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v); module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v);
@ -32,14 +35,12 @@ impl Bundler<'_> {
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(super) struct RawExports { pub(super) struct RawExports {
pub pure_constants: Vec<(Id, Lit)>,
/// Key is None if it's exported from the module itself. /// Key is None if it's exported from the module itself.
pub items: FxHashMap<Option<Str>, Vec<Specifier>>, pub items: FxHashMap<Option<Str>, Vec<Specifier>>,
} }
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(super) struct Exports { pub(super) struct Exports {
pub pure_constants: Vec<(Id, Lit)>,
pub items: Vec<Specifier>, pub items: Vec<Specifier>,
pub reexports: FxHashMap<Source, Vec<Specifier>>, pub reexports: FxHashMap<Source, Vec<Specifier>>,
} }
@ -49,8 +50,6 @@ struct ExportFinder {
info: RawExports, info: RawExports,
} }
noop_visit_type!(ExportFinder);
impl Visit for ExportFinder { impl Visit for ExportFinder {
fn visit_module_item(&mut self, item: &ModuleItem, _: &dyn Node) { fn visit_module_item(&mut self, item: &ModuleItem, _: &dyn Node) {
match item { match item {

View File

@ -0,0 +1,170 @@
use crate::{hash::calc_hash, Bundle, BundleKind, Bundler, Load, Resolve};
use anyhow::Error;
use fxhash::FxHashMap;
use relative_path::RelativePath;
use std::path::{Path, PathBuf};
use swc_common::{util::move_map::MoveMap, FileName};
use swc_ecma_ast::{ImportDecl, Str};
use swc_ecma_transforms::noop_fold_type;
use swc_ecma_visit::{Fold, FoldWith};
impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
/// This method do
///
/// - inject helpers
/// - rename chunks
pub(super) fn finalize(&self, bundles: Vec<Bundle>) -> Result<Vec<Bundle>, Error> {
self.run(|| {
let mut new = Vec::with_capacity(bundles.len());
let mut renamed = FxHashMap::default();
for mut bundle in bundles {
match bundle.kind {
BundleKind::Named { .. } => {
// Inject helpers
let helpers = self
.scope
.get_module(bundle.id)
.expect("module should exist at this point")
.helpers;
helpers.append_to(&mut bundle.module.body);
new.push(Bundle { ..bundle });
}
BundleKind::Lib { name } => {
let hash = calc_hash(self.cm.clone(), &bundle.module)?;
let mut new_name = PathBuf::from(name);
let key = new_name.clone();
let file_name = new_name
.file_name()
.map(|path| -> PathBuf {
let path = Path::new(path);
let ext = path.extension();
if let Some(ext) = ext {
return format!(
"{}-{}.{}",
path.file_stem().unwrap().to_string_lossy(),
hash,
ext.to_string_lossy()
)
.into();
}
return format!(
"{}-{}",
path.file_stem().unwrap().to_string_lossy(),
hash,
)
.into();
})
.expect("javascript file should have name");
new_name.pop();
new_name = new_name.join(file_name.clone());
renamed.insert(key, new_name.to_string_lossy().to_string());
new.push(Bundle {
kind: BundleKind::Named {
name: file_name.display().to_string(),
},
..bundle
})
}
_ => new.push(bundle),
}
}
if new.len() == 1 {
return Ok(new);
}
new = new.move_map(|bundle| {
let path = match self.scope.get_module(bundle.id).unwrap().fm.name {
FileName::Real(ref v) => v.clone(),
_ => {
log::error!("Cannot rename: not a real file");
return bundle;
}
};
let module = {
// Change imports
let mut v = Renamer {
resolver: &self.resolver,
base: &path,
renamed: &renamed,
};
bundle.module.fold_with(&mut v)
};
Bundle { module, ..bundle }
});
Ok(new)
})
}
}
/// Import renamer. This pass changes import path.
struct Renamer<'a, R>
where
R: Resolve,
{
resolver: R,
base: &'a PathBuf,
renamed: &'a FxHashMap<PathBuf, String>,
}
noop_fold_type!(Renamer<'_, '_>);
impl<R> Fold for Renamer<'_, R>
where
R: Resolve,
{
fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl {
let resolved = match self
.resolver
.resolve(&FileName::Real(self.base.clone()), &import.src.value)
{
Ok(v) => match v {
FileName::Real(v) => v,
_ => panic!("rename_bundles called with non-path module"),
},
Err(_) => return import,
};
if let Some(v) = self.renamed.get(&resolved) {
// We use parent because RelativePath uses ../common-[hash].js
// if we use `entry-a.js` as a base.
//
// entry-a.js
// common.js
let base = self
.base
.parent()
.unwrap_or(self.base)
.as_os_str()
.to_string_lossy();
let base = RelativePath::new(&*base);
let v = base.relative(&*v);
let value = v.as_str();
return ImportDecl {
src: Str {
value: if value.starts_with(".") {
value.into()
} else {
format!("./{}", value).into()
},
..import.src
},
..import
};
}
import
}
}

View File

@ -1,57 +1,62 @@
use super::Bundler; use super::Bundler;
use crate::{load::Load, resolve::Resolve};
use anyhow::{Context, Error}; use anyhow::{Context, Error};
use fxhash::{FxHashMap, FxHashSet}; use fxhash::{FxHashMap, FxHashSet};
use node_resolve::is_core_module; use std::mem::replace;
use std::{
mem::replace,
path::{Path, PathBuf},
sync::Arc,
};
use swc_atoms::{js_word, JsWord}; use swc_atoms::{js_word, JsWord};
use swc_common::{util::move_map::MoveMap, Mark, Spanned, DUMMY_SP}; use swc_common::{sync::Lrc, util::move_map::MoveMap, FileName, Mark, Spanned, DUMMY_SP};
use swc_ecma_ast::*; use swc_ecma_ast::*;
use swc_ecma_transforms::noop_fold_type;
use swc_ecma_utils::{find_ids, ident::IdentLike, Id}; use swc_ecma_utils::{find_ids, ident::IdentLike, Id};
use swc_ecma_visit::{Fold, FoldWith}; use swc_ecma_visit::{Fold, FoldWith};
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
impl Bundler<'_> { impl<L, R> Bundler<'_, L, R>
/// This de-globs imports if possible. where
L: Load,
R: Resolve,
{
/// This method de-globs imports if possible.
pub(super) fn extract_import_info( pub(super) fn extract_import_info(
&self, &self,
path: &Path, path: &FileName,
module: &mut Module, module: &mut Module,
_mark: Mark, _mark: Mark,
) -> RawImports { ) -> RawImports {
let body = replace(&mut module.body, vec![]); self.run(|| {
let body = replace(&mut module.body, vec![]);
let mut v = ImportHandler { let mut v = ImportHandler {
path, path,
bundler: self, bundler: self,
top_level: false, top_level: false,
info: Default::default(), info: Default::default(),
forces_ns: Default::default(), forces_ns: Default::default(),
ns_usage: Default::default(), ns_usage: Default::default(),
deglob_phase: false, deglob_phase: false,
}; };
let body = body.fold_with(&mut v); let body = body.fold_with(&mut v);
v.deglob_phase = true; v.deglob_phase = true;
let body = body.fold_with(&mut v); let body = body.fold_with(&mut v);
module.body = body; module.body = body;
v.info v.info
})
} }
pub(super) fn resolve(&self, base: &Path, s: &str) -> Result<Arc<PathBuf>, Error> { pub(super) fn resolve(
self.swc.run(|| { &self,
base: &FileName,
module_specifier: &str,
) -> Result<Lrc<FileName>, Error> {
self.run(|| {
let path = self let path = self
.resolver .resolver
.resolve(base, s) .resolve(base, module_specifier)
.with_context(|| format!("failed to resolve {} from {}", s, base.display()))?; .with_context(|| format!("failed to resolve {} from {}", module_specifier, base))?;
let path = Arc::new(path); let path = Lrc::new(path);
Ok(path) Ok(path)
}) })
@ -77,9 +82,13 @@ pub(super) struct RawImports {
pub dynamic_imports: Vec<Str>, pub dynamic_imports: Vec<Str>,
} }
struct ImportHandler<'a, 'b> { struct ImportHandler<'a, 'b, L, R>
path: &'a Path, where
bundler: &'a Bundler<'b>, L: Load,
R: Resolve,
{
path: &'a FileName,
bundler: &'a Bundler<'b, L, R>,
top_level: bool, top_level: bool,
info: RawImports, info: RawImports,
/// Contains namespace imports accessed with computed key. /// Contains namespace imports accessed with computed key.
@ -99,11 +108,20 @@ struct ImportHandler<'a, 'b> {
deglob_phase: bool, deglob_phase: bool,
} }
noop_fold_type!(ImportHandler<'_, '_>); impl<L, R> ImportHandler<'_, '_, L, R>
where
impl ImportHandler<'_, '_> { L: Load,
R: Resolve,
{
fn mark_for(&self, src: &str) -> Option<Mark> { fn mark_for(&self, src: &str) -> Option<Mark> {
if is_core_module(src) { // Don't apply mark if it's a core module.
if self
.bundler
.config
.external_modules
.iter()
.any(|v| v == src)
{
return None; return None;
} }
let path = self.bundler.resolve(self.path, src).ok()?; let path = self.bundler.resolve(self.path, src).ok()?;
@ -112,10 +130,20 @@ impl ImportHandler<'_, '_> {
} }
} }
impl Fold for ImportHandler<'_, '_> { impl<L, R> Fold for ImportHandler<'_, '_, L, R>
where
L: Load,
R: Resolve,
{
fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl { fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl {
if !self.deglob_phase { if !self.deglob_phase {
if is_core_module(&import.src.value) { // Ignore if it's a core module.
if self
.bundler
.config
.external_modules
.contains(&import.src.value)
{
return import; return import;
} }
@ -334,13 +362,14 @@ impl Fold for ImportHandler<'_, '_> {
match &e.callee { match &e.callee {
ExprOrSuper::Expr(callee) ExprOrSuper::Expr(callee)
if match &**callee { if self.bundler.config.require
Expr::Ident(Ident { && match &**callee {
sym: js_word!("require"), Expr::Ident(Ident {
.. sym: js_word!("require"),
}) => true, ..
_ => false, }) => true,
} => _ => false,
} =>
{ {
let span = callee.span(); let span = callee.span();
@ -399,13 +428,15 @@ impl Fold for ImportHandler<'_, '_> {
callee: ExprOrSuper::Expr(ref callee), callee: ExprOrSuper::Expr(ref callee),
ref args, ref args,
.. ..
}) if match &**callee { }) if self.bundler.config.require
Expr::Ident(Ident { && match &**callee {
sym: js_word!("require"), Expr::Ident(Ident {
.. sym: js_word!("require"),
}) => true, ..
_ => false, }) => true,
} && args.len() == 1 => _ => false,
}
&& args.len() == 1 =>
{ {
let span = *span; let span = *span;
let src = match args.first().unwrap() { let src = match args.first().unwrap() {
@ -415,7 +446,8 @@ impl Fold for ImportHandler<'_, '_> {
}, },
_ => return node, _ => return node,
}; };
if is_core_module(&src.value) { // Ignore core modules.
if self.bundler.config.external_modules.contains(&src.value) {
return node; return node;
} }

View File

@ -1,5 +1,7 @@
use crate::bundler::{import::ImportHandler, tests::test_bundler}; use super::ImportHandler;
use crate::bundler::tests::test_bundler;
use std::path::Path; use std::path::Path;
use swc_common::FileName;
use swc_ecma_visit::FoldWith; use swc_ecma_visit::FoldWith;
#[test] #[test]
@ -13,7 +15,7 @@ ns.foo();
", ",
); );
let mut v = ImportHandler { let mut v = ImportHandler {
path: &Path::new("index.js"), path: &FileName::Real(Path::new("index.js").to_path_buf()),
bundler: &t.bundler, bundler: &t.bundler,
top_level: false, top_level: false,
info: Default::default(), info: Default::default(),
@ -41,7 +43,7 @@ ns.bar();
", ",
); );
let mut v = ImportHandler { let mut v = ImportHandler {
path: &Path::new("index.js"), path: &FileName::Real(Path::new("index.js").to_path_buf()),
bundler: &t.bundler, bundler: &t.bundler,
top_level: false, top_level: false,
info: Default::default(), info: Default::default(),

422
bundler/src/bundler/load.rs Normal file
View File

@ -0,0 +1,422 @@
use super::{export::Exports, helpers::Helpers, Bundler};
use crate::{
bundler::{export::RawExports, import::RawImports},
id::{Id, ModuleId},
util,
util::IntoParallelIterator,
Load, Resolve,
};
use anyhow::{Context, Error};
use is_macro::Is;
#[cfg(feature = "rayon")]
use rayon::iter::ParallelIterator;
use swc_atoms::js_word;
use swc_common::{sync::Lrc, FileName, Mark, SourceFile, DUMMY_SP};
use swc_ecma_ast::{
Expr, ExprOrSuper, ImportDecl, ImportSpecifier, Invalid, MemberExpr, Module, ModuleDecl, Str,
};
use swc_ecma_transforms::resolver_with_mark;
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
/// Module after applying transformations.
#[derive(Debug, Clone)]
pub(super) struct TransformedModule {
pub id: ModuleId,
pub fm: Lrc<SourceFile>,
pub module: Lrc<Module>,
pub imports: Lrc<Imports>,
pub exports: Lrc<Exports>,
/// If false, the module will be wrapped with a small helper function.
pub is_es6: bool,
/// Used helpers
pub helpers: Lrc<Helpers>,
mark: Mark,
}
impl TransformedModule {
/// THe marker for the module's top-level identifiers.
pub fn mark(&self) -> Mark {
self.mark
}
}
impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
/// Phase 1 (discovery)
///
/// We apply transforms at this phase to make cache efficient.
/// As we cache in this phase, changing dependency does not affect cache.
pub(super) fn load_transformed(
&self,
file_name: &FileName,
) -> Result<Option<TransformedModule>, Error> {
self.run(|| {
log::trace!("load_transformed: ({})", file_name);
// In case of common module
if let Some(cached) = self.scope.get_module_by_path(&file_name) {
log::info!("Cached: {}", file_name);
return Ok(Some(cached));
}
let (_, fm, module) = self.load(&file_name).context("Bundler.load() failed")?;
let (v, mut files) = self
.analyze(&file_name, fm.clone(), module)
.context("failed to analyze module")?;
files.dedup_by_key(|v| v.1.clone());
log::info!("Storing module: {}", file_name);
self.scope.store_module(v.clone());
// Load dependencies and store them in the `Scope`
let results = files
.into_par_iter()
.map(|(_src, path)| {
log::debug!("loading dependency: {}", path);
self.load_transformed(&path)
})
.collect::<Vec<_>>();
// Do tasks in parallel, and then wait for result
for result in results {
let res = result?;
dbg!(res.is_none());
}
Ok(Some(v))
})
}
fn load(&self, file_name: &FileName) -> Result<(ModuleId, Lrc<SourceFile>, Module), Error> {
self.run(|| {
let (module_id, _) = self.scope.module_id_gen.gen(file_name);
let (fm, module) = self
.loader
.load(&file_name)
.with_context(|| format!("Bundler.loader.load({}) failed", file_name))?;
self.scope.mark_as_loaded(module_id);
Ok((module_id, fm, module))
})
}
/// This methods returns [Source]s which should be loaded.
fn analyze(
&self,
file_name: &FileName,
fm: Lrc<SourceFile>,
mut module: Module,
) -> Result<(TransformedModule, Vec<(Source, Lrc<FileName>)>), Error> {
self.run(|| {
log::trace!("transform_module({})", fm.name);
module = module.fold_with(&mut resolver_with_mark(self.top_level_mark));
let (id, mark) = self.scope.module_id_gen.gen(file_name);
// {
// let code = self
// .swc
// .print(
// &module.clone().fold_with(&mut HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("Resolved:\n{}\n\n", code);
// }
let imports = self.extract_import_info(file_name, &mut module, mark);
// {
// let code = self
// .swc
// .print(
// &module.clone().fold_with(&mut HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("After imports:\n{}\n", code,);
// }
let exports = self.extract_export_info(&module);
let is_es6 = {
let mut v = Es6ModuleDetector {
forced_es6: false,
found_other: false,
};
module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v);
v.forced_es6 || !v.found_other
};
if is_es6 {
module = self.drop_unused(module, None);
}
let (imports, exports) = util::join(
|| self.resolve_imports(file_name, imports),
|| self.resolve_exports(file_name, exports),
);
let (imports, mut import_files) = imports?;
let (exports, reexport_files) = exports?;
import_files.extend(reexport_files);
let module = Lrc::new(module);
Ok((
TransformedModule {
id,
fm,
module,
imports: Lrc::new(imports),
exports: Lrc::new(exports),
is_es6,
helpers: Default::default(),
mark,
},
import_files,
))
})
}
/// Resolve re-exports.
fn resolve_exports(
&self,
base: &FileName,
raw: RawExports,
) -> Result<(Exports, Vec<(Source, Lrc<FileName>)>), Error> {
self.run(|| {
log::trace!("resolve_exports({})", base);
let mut files = vec![];
let mut exports = Exports::default();
let items = raw
.items
.into_par_iter()
.map(|(src, ss)| -> Result<_, Error> {
self.run(|| {
let info = match src {
Some(src) => {
let name = self.resolve(base, &src.value)?;
let (id, _) = self.scope.module_id_gen.gen(&name);
Some((id, name, src))
}
None => None,
};
Ok((info, ss))
})
})
.collect::<Vec<_>>();
for res in items {
let (info, specifiers) = res?;
match info {
None => exports.items.extend(specifiers),
Some((id, name, src)) => {
//
let src = Source {
is_loaded_synchronously: true,
is_unconditional: false,
module_id: id,
src,
};
exports
.reexports
.entry(src.clone())
.or_default()
.extend(specifiers);
files.push((src, name));
}
}
}
Ok((exports, files))
})
}
/// Resolve dependencies
fn resolve_imports(
&self,
base: &FileName,
info: RawImports,
) -> Result<(Imports, Vec<(Source, Lrc<FileName>)>), Error> {
self.run(|| {
log::trace!("resolve_imports({})", base);
let mut files = vec![];
let mut merged = Imports::default();
let RawImports {
imports,
lazy_imports,
dynamic_imports,
} = info;
let loaded = imports
.into_par_iter()
.map(|v| (v, false, true))
.chain(lazy_imports.into_par_iter().map(|v| (v, false, false)))
.chain(dynamic_imports.into_par_iter().map(|src| {
(
ImportDecl {
span: src.span,
specifiers: vec![],
src,
type_only: false,
},
true,
false,
)
}))
.map(|(decl, dynamic, unconditional)| -> Result<_, Error> {
self.run(|| {
//
let file_name = self.resolve(base, &decl.src.value)?;
let (id, _) = self.scope.module_id_gen.gen(&file_name);
Ok((id, file_name, decl, dynamic, unconditional))
})
})
.collect::<Vec<_>>();
for res in loaded {
// TODO: Report error and proceed instead of returning an error
let (id, file_name, decl, is_dynamic, is_unconditional) = res?;
let src = Source {
is_loaded_synchronously: !is_dynamic,
is_unconditional,
module_id: id,
src: decl.src,
};
files.push((src.clone(), file_name));
// TODO: Handle rename
let mut specifiers = vec![];
for s in decl.specifiers {
match s {
ImportSpecifier::Named(s) => specifiers.push(Specifier::Specific {
local: s.local.into(),
alias: s.imported.map(From::from),
}),
ImportSpecifier::Default(s) => specifiers.push(Specifier::Specific {
local: s.local.into(),
alias: Some(Id::new(js_word!("default"), s.span.ctxt())),
}),
ImportSpecifier::Namespace(s) => {
specifiers.push(Specifier::Namespace {
local: s.local.into(),
});
}
}
}
merged.specifiers.push((src, specifiers));
}
Ok((merged, files))
})
}
}
#[derive(Debug, Default)]
pub(super) struct Imports {
/// If imported ids are empty, it is a side-effect import.
pub specifiers: Vec<(Source, Vec<Specifier>)>,
}
/// Clone is relatively cheap
#[derive(Debug, Clone, Is)]
pub(super) enum Specifier {
Specific { local: Id, alias: Option<Id> },
Namespace { local: Id },
}
impl Specifier {
pub fn local(&self) -> &Id {
match self {
Specifier::Specific { local, .. } => local,
Specifier::Namespace { local, .. } => local,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(super) struct Source {
pub is_loaded_synchronously: bool,
pub is_unconditional: bool,
pub module_id: ModuleId,
// Clone is relatively cheap, thanks to string_cache.
pub src: Str,
}
struct Es6ModuleDetector {
/// If import statement or export is detected, it's an es6 module regardless
/// of other codes.
forced_es6: bool,
/// True if other module system is detected.
found_other: bool,
}
impl Visit for Es6ModuleDetector {
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
e.obj.visit_with(e as _, self);
if e.computed {
e.prop.visit_with(e as _, self);
}
match &e.obj {
ExprOrSuper::Expr(e) => {
match &**e {
Expr::Ident(i) => {
// TODO: Check syntax context (Check if marker is the global mark)
if i.sym == *"module" {
self.found_other = true;
}
if i.sym == *"exports" {
self.found_other = true;
}
}
_ => {}
}
}
_ => {}
}
//
}
fn visit_module_decl(&mut self, decl: &ModuleDecl, _: &dyn Node) {
match decl {
ModuleDecl::Import(_)
| ModuleDecl::ExportDecl(_)
| ModuleDecl::ExportNamed(_)
| ModuleDecl::ExportDefaultDecl(_)
| ModuleDecl::ExportDefaultExpr(_)
| ModuleDecl::ExportAll(_) => {
self.forced_es6 = true;
}
ModuleDecl::TsImportEquals(_) => {}
ModuleDecl::TsExportAssignment(_) => {}
ModuleDecl::TsNamespaceExport(_) => {}
}
}
}

150
bundler/src/bundler/mod.rs Normal file
View File

@ -0,0 +1,150 @@
use self::scope::Scope;
use crate::{Load, ModuleId, Resolve};
use anyhow::{Context, Error};
use fxhash::FxHashMap;
use swc_atoms::JsWord;
use swc_common::{sync::Lrc, FileName, Globals, Mark, SourceMap, DUMMY_SP, GLOBALS};
use swc_ecma_ast::Module;
mod chunk;
mod export;
mod finalize;
mod helpers;
mod import;
mod load;
mod scope;
#[cfg(test)]
mod tests;
mod usage_analysis;
#[derive(Debug)]
pub struct Config {
/// If it's true, [Bundler] searches for require calls.
pub require: bool,
/// List of modules which should be preserved.
pub external_modules: Vec<JsWord>,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum BundleKind {
/// User-provided entry
Named { name: String },
/// Auto-generated entry (created by import expression)
Dynamic,
/// A lazy-loaded shared library
Lib { name: String },
}
/// Built bundle
#[derive(Debug)]
pub struct Bundle {
pub kind: BundleKind,
pub id: ModuleId,
/// Merged module.
///
/// You **should** run fixer.
pub module: Module,
}
pub struct Bundler<'a, L, R>
where
L: Load,
R: Resolve,
{
config: Config,
globals: &'a Globals,
cm: Lrc<SourceMap>,
loader: L,
resolver: R,
/// [Mark] used while tree shaking
used_mark: Mark,
/// [Mark] used while tree shaking
top_level_mark: Mark,
scope: Scope,
}
impl<'a, L, R> Bundler<'a, L, R>
where
L: Load,
R: Resolve,
{
pub fn new(
globals: &'a Globals,
cm: Lrc<SourceMap>,
loader: L,
resolver: R,
config: Config,
) -> Self {
GLOBALS.set(&globals, || {
let used_mark = Mark::fresh(Mark::root());
log::info!("Used mark: {:?}", DUMMY_SP.apply_mark(used_mark).ctxt());
let top_level_mark = Mark::fresh(Mark::root());
log::info!(
"top-level mark: {:?}",
DUMMY_SP.apply_mark(top_level_mark).ctxt()
);
Bundler {
cm,
loader,
resolver,
used_mark,
top_level_mark,
scope: Default::default(),
globals,
config,
}
})
}
///
///
///
/// Note: This method will panic if entries references each other in
/// circular manner. However, it applies only to the provided `entries`, and
/// dependencies with circular reference is ok.
pub fn bundle(&self, entries: FxHashMap<String, FileName>) -> Result<Vec<Bundle>, Error> {
let results = entries
.into_iter()
.map(|(name, path)| -> Result<_, Error> {
let res = self
.load_transformed(&path)
.context("load_transformed failed")?;
Ok((name, res))
})
.collect::<Vec<_>>();
// We collect at here to handle dynamic imports
// TODO: Handle dynamic imports
let local = {
let mut output = FxHashMap::default();
for res in results {
let (name, m) = res?;
let m = m.unwrap();
output.insert(name, m);
}
output
};
let bundles = self.chunk(local)?;
let bundles = self.finalize(bundles)?;
Ok(bundles)
}
/// Sets `swc_common::GLOBALS`
#[inline]
fn run<F, Ret>(&self, op: F) -> Ret
where
F: FnOnce() -> Ret,
{
GLOBALS.set(self.globals, op)
}
}

View File

@ -0,0 +1,47 @@
use super::load::TransformedModule;
use crate::{
id::{ModuleId, ModuleIdGenerator},
util::CloneMap,
};
use swc_common::FileName;
#[derive(Debug, Default)]
pub(super) struct Scope {
pub module_id_gen: ModuleIdGenerator,
circular_modules: CloneMap<ModuleId, ()>,
loaded_modules: CloneMap<ModuleId, ()>,
/// Cached after applying basical transformations.
transformed_modules: CloneMap<ModuleId, TransformedModule>,
}
impl Scope {
pub fn is_circular(&self, id: ModuleId) -> bool {
self.circular_modules.get(&id).is_some()
}
pub fn mark_as_circular(&self, id: ModuleId) {
self.circular_modules.insert(id, ());
}
pub fn mark_as_loaded(&self, id: ModuleId) {
self.loaded_modules.insert(id, ());
}
/// Stores module information. The information should contain only
/// information gotten from module itself. In other words, it should not
/// contains information from a dependency.
pub fn store_module(&self, info: TransformedModule) {
self.transformed_modules.insert(info.id, info);
}
pub fn get_module_by_path(&self, file_name: &FileName) -> Option<TransformedModule> {
let (id, _) = self.module_id_gen.gen(file_name);
self.get_module(id)
}
pub fn get_module(&self, id: ModuleId) -> Option<TransformedModule> {
Some(self.transformed_modules.get(&id)?.clone())
}
}

View File

@ -0,0 +1,89 @@
//! Utilities for testing.
use super::{Bundler, Config};
use crate::{util::HygieneRemover, Load, Resolve};
use anyhow::Error;
use std::path::PathBuf;
use swc_common::{sync::Lrc, FileName, SourceFile, SourceMap, GLOBALS};
use swc_ecma_ast::*;
use swc_ecma_parser::{lexer::Lexer, Parser, StringInput};
use swc_ecma_utils::drop_span;
use swc_ecma_visit::FoldWith;
pub struct Tester<'a> {
pub cm: Lrc<SourceMap>,
pub bundler: Bundler<'a, Loader, Resolver>,
}
#[derive(Debug, Default)]
pub struct Loader;
impl Load for Loader {
fn load(&self, _: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
unreachable!("swc_bundler: tester.load")
}
}
#[derive(Debug, Default)]
pub struct Resolver;
impl Resolve for Resolver {
fn resolve(&self, _: &FileName, _: &str) -> Result<FileName, Error> {
unreachable!("swc_bundler: tester.resolve")
}
}
impl<'a> Tester<'a> {
pub fn parse(&self, s: &str) -> Module {
let fm = self
.cm
.new_source_file(FileName::Real(PathBuf::from("input.js")), s.into());
let lexer = Lexer::new(
Default::default(),
Default::default(),
StringInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(lexer);
parser.parse_module().unwrap()
}
pub fn assert_eq(&self, m: &Module, expected: &str) {
let expected = self.parse(expected);
let m = drop_span(m.clone().fold_with(&mut HygieneRemover));
let expected = drop_span(expected);
assert_eq!(m, expected)
}
}
pub fn test_bundler<F>(op: F)
where
F: FnOnce(&mut Tester),
{
testing::run_test2(true, |cm, _| {
GLOBALS.with(|globals| {
let bundler = Bundler::new(
globals,
cm.clone(),
Default::default(),
Default::default(),
Config {
require: true,
external_modules: vec![],
},
);
let mut t = Tester {
cm: cm.clone(),
bundler,
};
op(&mut t);
Ok(())
})
})
.expect("WTF?");
}

View File

@ -1,23 +1,22 @@
use crate::{bundler::load_transformed::Specifier, Bundler}; use super::load::Specifier;
use std::{borrow::Cow, sync::Arc}; use crate::{Bundler, Load, Resolve};
use swc_common::SourceFile; use std::borrow::Cow;
use swc_ecma_ast::*; use swc_ecma_ast::*;
use swc_ecma_transforms::optimization::simplify::dce; use swc_ecma_transforms::optimization::simplify::dce;
use swc_ecma_utils::ident::IdentLike; use swc_ecma_utils::ident::IdentLike;
use swc_ecma_visit::FoldWith; use swc_ecma_visit::FoldWith;
impl Bundler<'_> { impl<L, R> Bundler<'_, L, R>
where
L: Load,
R: Resolve,
{
/// If used_exports is [None], all exports are treated as exported. /// If used_exports is [None], all exports are treated as exported.
/// ///
/// Note: Context of used_exports is ignored, as the specifiers comes from /// Note: Context of used_exports is ignored, as the specifiers comes from
/// other module. /// other module.
pub(super) fn drop_unused( pub(super) fn drop_unused(&self, node: Module, used_exports: Option<&[Specifier]>) -> Module {
&self, self.run(|| {
_fm: Arc<SourceFile>,
node: Module,
used_exports: Option<&[Specifier]>,
) -> Module {
self.swc.run(|| {
let mut used = vec![]; let mut used = vec![];
if let Some(used_exports) = used_exports { if let Some(used_exports) = used_exports {

41
bundler/src/debug/mod.rs Normal file
View File

@ -0,0 +1,41 @@
#![allow(dead_code)]
use std::io::{stdout, Write};
use swc_common::{sync::Lrc, SourceMap};
use swc_ecma_ast::{Ident, Module};
use swc_ecma_codegen::{text_writer::JsWriter, Emitter};
use swc_ecma_visit::{Fold, FoldWith};
pub(crate) fn print_hygiene(event: &str, cm: &Lrc<SourceMap>, t: &Module) {
let module = t.clone().fold_with(&mut HygieneVisualizer);
let stdout = stdout();
let mut w = stdout.lock();
writeln!(w, "==================== @ {} ====================", event).unwrap();
Emitter {
cfg: swc_ecma_codegen::Config { minify: false },
cm: cm.clone(),
comments: None,
wr: Box::new(JsWriter::new(cm.clone(), "\n", &mut w, None)),
handlers: Box::new(Handlers),
}
.emit_module(&module)
.unwrap();
writeln!(w, "==================== @ ====================").unwrap();
}
impl swc_ecma_codegen::Handlers for Handlers {}
struct Handlers;
struct HygieneVisualizer;
impl Fold for HygieneVisualizer {
fn fold_ident(&mut self, node: Ident) -> Ident {
Ident {
sym: format!("{}{:?}", node.sym, node.span.ctxt()).into(),
..node
}
}
}

117
bundler/src/hash.rs Normal file
View File

@ -0,0 +1,117 @@
use anyhow::{Context, Error};
use crc::{crc64, crc64::Digest, Hasher64};
use std::io;
use swc_common::{sync::Lrc, SourceMap, Span};
use swc_ecma_ast::Module;
use swc_ecma_codegen::{text_writer::WriteJs, Emitter};
pub(crate) fn calc_hash(cm: Lrc<SourceMap>, m: &Module) -> Result<String, Error> {
let digest = crc64::Digest::new(crc64::ECMA);
let mut buf = Hasher { digest };
{
let mut emitter = Emitter {
cfg: Default::default(),
cm,
comments: None,
wr: Box::new(&mut buf) as Box<dyn WriteJs>,
handlers: Box::new(Handlers),
};
emitter
.emit_module(&m)
.context("failed to emit module to calculate hash")?;
}
//
let result = buf.digest.sum64();
Ok(radix_fmt::radix(result, 36).to_string())
}
impl swc_ecma_codegen::Handlers for Handlers {}
struct Handlers;
struct Hasher {
digest: Digest,
}
impl Hasher {
fn w(&mut self, s: &str) {
self.digest.write(s.as_bytes());
}
}
impl WriteJs for &mut Hasher {
fn increase_indent(&mut self) -> io::Result<()> {
Ok(())
}
fn decrease_indent(&mut self) -> io::Result<()> {
Ok(())
}
fn write_semi(&mut self) -> io::Result<()> {
self.w(";");
Ok(())
}
fn write_space(&mut self) -> io::Result<()> {
self.w(" ");
Ok(())
}
fn write_keyword(&mut self, _: Option<Span>, s: &'static str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_operator(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_param(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_property(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_line(&mut self) -> io::Result<()> {
self.w("\n");
Ok(())
}
fn write_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_comment(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_str_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_str(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_symbol(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_punct(&mut self, s: &'static str) -> io::Result<()> {
self.w(s);
Ok(())
}
}

View File

@ -1,14 +1,10 @@
use dashmap::DashMap; use fxhash::FxHashMap;
use std::{ use std::{
fmt, fmt,
path::PathBuf, sync::atomic::{AtomicU64, Ordering::SeqCst},
sync::{
atomic::{AtomicU64, Ordering::SeqCst},
Arc,
},
}; };
use swc_atoms::JsWord; use swc_atoms::JsWord;
use swc_common::{Mark, SyntaxContext, DUMMY_SP}; use swc_common::{sync::Lock, FileName, Mark, SyntaxContext, DUMMY_SP};
use swc_ecma_ast::Ident; use swc_ecma_ast::Ident;
use swc_ecma_utils::ident::IdentLike; use swc_ecma_utils::ident::IdentLike;
@ -24,18 +20,19 @@ impl fmt::Display for ModuleId {
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(crate) struct ModuleIdGenerator { pub(crate) struct ModuleIdGenerator {
v: AtomicU64, v: AtomicU64,
cache: DashMap<Arc<PathBuf>, (ModuleId, Mark)>, cache: Lock<FxHashMap<FileName, (ModuleId, Mark)>>,
} }
impl ModuleIdGenerator { impl ModuleIdGenerator {
pub fn gen(&self, path: &Arc<PathBuf>) -> (ModuleId, Mark) { pub fn gen(&self, file_name: &FileName) -> (ModuleId, Mark) {
if let Some(v) = self.cache.get(path) { let mut w = self.cache.lock();
return *v.value(); if let Some(v) = w.get(file_name) {
return v.clone();
} }
let id = ModuleId(self.v.fetch_add(1, SeqCst)); let id = ModuleId(self.v.fetch_add(1, SeqCst));
let mark = Mark::fresh(Mark::root()); let mark = Mark::fresh(Mark::root());
self.cache.insert(path.clone(), (id, mark)); w.insert(file_name.clone(), (id, mark));
(id, mark) (id, mark)
} }
} }
@ -62,11 +59,6 @@ impl Id {
Ident::new(self.0, DUMMY_SP.with_ctxt(self.1)) Ident::new(self.0, DUMMY_SP.with_ctxt(self.1))
} }
pub fn append_mark(mut self, mark: Mark) -> Self {
self.1 = self.1.apply_mark(mark);
self
}
pub fn replace_mark(mut self, mark: Mark) -> Self { pub fn replace_mark(mut self, mark: Mark) -> Self {
self.1 = SyntaxContext::empty().apply_mark(mark); self.1 = SyntaxContext::empty().apply_mark(mark);
self self
@ -110,5 +102,3 @@ impl PartialEq<JsWord> for Id {
self.0 == *other self.0 == *other
} }
} }
pub type QualifiedId = (ModuleId, Id);

14
bundler/src/lib.rs Normal file
View File

@ -0,0 +1,14 @@
pub use self::{
bundler::{Bundle, BundleKind, Bundler, Config},
id::ModuleId,
load::Load,
resolve::Resolve,
};
mod bundler;
mod debug;
mod hash;
mod id;
mod load;
mod resolve;
mod util;

27
bundler/src/load.rs Normal file
View File

@ -0,0 +1,27 @@
use anyhow::Error;
use swc_common::{sync::Lrc, FileName, SourceFile};
use swc_ecma_ast::Module;
/// Responsible for providing files to the bundler.
///
/// Note: Resolve and Load are separate trait because multiple module can depend
/// on a single module. Due to the possibility of 'common' module, bundler
/// should implement some caching. The bundler uses [FileName] as a key of the
/// cache.
///
/// This trait is designed to allow passing pre-parsed module.
pub trait Load: swc_common::sync::Send + swc_common::sync::Sync {
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error>;
}
impl<T: ?Sized + Load> Load for Box<T> {
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
(**self).load(file)
}
}
impl<'a, T: ?Sized + Load> Load for &'a T {
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
(**self).load(file)
}
}

18
bundler/src/resolve.rs Normal file
View File

@ -0,0 +1,18 @@
use anyhow::Error;
use swc_common::FileName;
pub trait Resolve: swc_common::sync::Send + swc_common::sync::Sync {
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error>;
}
impl<T: ?Sized + Resolve> Resolve for Box<T> {
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
(**self).resolve(base, module_specifier)
}
}
impl<'a, T: ?Sized + Resolve> Resolve for &'a T {
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
(**self).resolve(base, module_specifier)
}
}

96
bundler/src/util.rs Normal file
View File

@ -0,0 +1,96 @@
use fxhash::FxBuildHasher;
use std::hash::Hash;
use swc_common::{Span, SyntaxContext};
use swc_ecma_visit::Fold;
#[derive(Debug)]
pub(crate) struct CloneMap<K, V>
where
K: Eq + Hash,
V: Clone,
{
#[cfg(feature = "concurrent")]
inner: dashmap::DashMap<K, V, FxBuildHasher>,
#[cfg(not(feature = "concurrent"))]
inner: std::cell::RefCell<std::collections::HashMap<K, V, FxBuildHasher>>,
}
impl<K, V> Default for CloneMap<K, V>
where
K: Eq + Hash,
V: Clone,
{
fn default() -> Self {
Self {
inner: Default::default(),
}
}
}
impl<K, V> CloneMap<K, V>
where
K: Eq + Hash,
V: Clone,
{
#[cfg(feature = "concurrent")]
pub fn get(&self, k: &K) -> Option<V> {
if let Some(v) = self.inner.get(k) {
Some(v.value().clone())
} else {
None
}
}
#[cfg(not(feature = "concurrent"))]
pub fn get(&self, k: &K) -> Option<V> {
if let Some(v) = self.inner.borrow().get(k) {
Some(v.clone())
} else {
None
}
}
#[cfg(feature = "concurrent")]
pub fn insert(&self, k: K, v: V) {
self.inner.insert(k, v);
}
#[cfg(not(feature = "concurrent"))]
pub fn insert(&self, k: K, v: V) {
self.inner.borrow_mut().insert(k, v);
}
}
pub(crate) struct HygieneRemover;
impl Fold for HygieneRemover {
fn fold_span(&mut self, s: Span) -> Span {
s.with_ctxt(SyntaxContext::empty())
}
}
#[cfg(feature = "rayon")]
pub(crate) use rayon::join;
#[cfg(not(feature = "rayon"))]
pub(crate) fn join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
where
A: FnOnce() -> RA,
B: FnOnce() -> RB,
{
(oper_a(), oper_b())
}
#[cfg(feature = "rayon")]
pub(crate) use rayon::iter::IntoParallelIterator;
/// Fake trait
#[cfg(not(feature = "rayon"))]
pub(crate) trait IntoParallelIterator: Sized + IntoIterator {
fn into_par_iter(self) -> <Self as IntoIterator>::IntoIter {
self.into_iter()
}
}
#[cfg(not(feature = "rayon"))]
impl<T> IntoParallelIterator for T where T: IntoIterator {}

View File

@ -1,6 +1,6 @@
[package] [package]
name = "swc_common" name = "swc_common"
version = "0.9.0" version = "0.9.1"
authors = ["강동윤 <kdy1997.dev@gmail.com>"] authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT" license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git" repository = "https://github.com/swc-project/swc.git"

View File

@ -103,28 +103,18 @@ impl<T> Lock<T> {
// self.0.get_mut() // self.0.get_mut()
// } // }
// #[cfg(parallel_compiler)] // #[cfg(feature = "concurrent")]
// #[inline(always)] // #[inline(always)]
// pub fn try_lock(&self) -> Option<LockGuard<'_, T>> { // pub fn try_lock(&self) -> Option<LockGuard<'_, T>> {
// self.0.try_lock() // self.0.try_lock()
// } // }
// //
// #[cfg(not(parallel_compiler))] // #[cfg(not(feature = "concurrent"))]
// #[inline(always)] // #[inline(always)]
// pub fn try_lock(&self) -> Option<LockGuard<'_, T>> { // pub fn try_lock(&self) -> Option<LockGuard<'_, T>> {
// self.0.try_borrow_mut().ok() // self.0.try_borrow_mut().ok()
// } // }
#[cfg(parallel_compiler)]
#[inline(always)]
pub fn lock(&self) -> LockGuard<'_, T> {
if ERROR_CHECKING {
self.0.try_lock().expect("lock was already held")
} else {
self.0.lock()
}
}
#[cfg(feature = "concurrent")] #[cfg(feature = "concurrent")]
#[inline(always)] #[inline(always)]
pub fn lock(&self) -> LockGuard<'_, T> { pub fn lock(&self) -> LockGuard<'_, T> {
@ -253,7 +243,7 @@ impl<T: Ord + Copy> Ord for LockCell<T> {
} }
} }
#[derive(Debug)] #[derive(Debug, Default)]
pub struct RwLock<T>(InnerRwLock<T>); pub struct RwLock<T>(InnerRwLock<T>);
impl<T> RwLock<T> { impl<T> RwLock<T> {
@ -278,6 +268,50 @@ impl<T> RwLock<T> {
pub fn borrow(&self) -> ReadGuard<'_, T> { pub fn borrow(&self) -> ReadGuard<'_, T> {
self.read() self.read()
} }
#[inline(always)]
pub fn get_mut(&mut self) -> &mut T {
self.0.get_mut()
}
#[inline(always)]
pub fn with_read_lock<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
f(&*self.read())
}
#[cfg(not(feature = "concurrent"))]
#[inline(always)]
pub fn try_write(&self) -> Result<WriteGuard<'_, T>, ()> {
self.0.try_borrow_mut().map_err(|_| ())
}
#[cfg(feature = "concurrent")]
#[inline(always)]
pub fn try_write(&self) -> Result<WriteGuard<'_, T>, ()> {
self.0.try_write().ok_or(())
}
#[cfg(not(feature = "concurrent"))]
#[inline(always)]
pub fn write(&self) -> WriteGuard<'_, T> {
self.0.borrow_mut()
}
#[cfg(feature = "concurrent")]
#[inline(always)]
pub fn write(&self) -> WriteGuard<'_, T> {
self.0.write()
}
#[inline(always)]
pub fn with_write_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
f(&mut *self.write())
}
#[inline(always)]
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
self.write()
}
} }
// FIXME: Probably a bad idea // FIXME: Probably a bad idea

View File

@ -1,6 +1,6 @@
[package] [package]
name = "swc_ecma_parser" name = "swc_ecma_parser"
version = "0.33.2" version = "0.33.3"
authors = ["강동윤 <kdy1997.dev@gmail.com>"] authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT" license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git" repository = "https://github.com/swc-project/swc.git"

View File

@ -7,40 +7,38 @@ use swc_common::{
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax}; use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax};
fn main() { fn main() {
swc_common::GLOBALS.set(&swc_common::Globals::new(), || { let cm: Lrc<SourceMap> = Default::default();
let cm: Lrc<SourceMap> = Default::default(); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
// Real usage // Real usage
// let fm = cm // let fm = cm
// .load_file(Path::new("test.js")) // .load_file(Path::new("test.js"))
// .expect("failed to load test.js"); // .expect("failed to load test.js");
let fm = cm.new_source_file( let fm = cm.new_source_file(
FileName::Custom("test.js".into()), FileName::Custom("test.js".into()),
"function foo() {}".into(), "function foo() {}".into(),
); );
let lexer = Lexer::new( let lexer = Lexer::new(
Syntax::Es(Default::default()), Syntax::Es(Default::default()),
Default::default(), Default::default(),
StringInput::from(&*fm), StringInput::from(&*fm),
None, None,
); );
let capturing = Capturing::new(lexer); let capturing = Capturing::new(lexer);
let mut parser = Parser::new_from(capturing); let mut parser = Parser::new_from(capturing);
for e in parser.take_errors() { for e in parser.take_errors() {
e.into_diagnostic(&handler).emit(); e.into_diagnostic(&handler).emit();
} }
let _module = parser let _module = parser
.parse_module() .parse_module()
.map_err(|e| e.into_diagnostic(&handler).emit()) .map_err(|e| e.into_diagnostic(&handler).emit())
.expect("Failed to parse module."); .expect("Failed to parse module.");
println!("Tokens: {:?}", parser.input().take()); println!("Tokens: {:?}", parser.input().take());
});
} }

View File

@ -7,40 +7,38 @@ use swc_common::{
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax}; use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax};
fn main() { fn main() {
swc_common::GLOBALS.set(&swc_common::Globals::new(), || { let cm: Lrc<SourceMap> = Default::default();
let cm: Lrc<SourceMap> = Default::default(); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone()));
// Real usage // Real usage
// let fm = cm // let fm = cm
// .load_file(Path::new("test.js")) // .load_file(Path::new("test.js"))
// .expect("failed to load test.js"); // .expect("failed to load test.js");
let fm = cm.new_source_file( let fm = cm.new_source_file(
FileName::Custom("test.js".into()), FileName::Custom("test.js".into()),
"interface Foo {}".into(), "interface Foo {}".into(),
); );
let lexer = Lexer::new( let lexer = Lexer::new(
Syntax::Typescript(Default::default()), Syntax::Typescript(Default::default()),
Default::default(), Default::default(),
StringInput::from(&*fm), StringInput::from(&*fm),
None, None,
); );
let capturing = Capturing::new(lexer); let capturing = Capturing::new(lexer);
let mut parser = Parser::new_from(capturing); let mut parser = Parser::new_from(capturing);
for e in parser.take_errors() { for e in parser.take_errors() {
e.into_diagnostic(&handler).emit(); e.into_diagnostic(&handler).emit();
} }
let _module = parser let _module = parser
.parse_typescript_module() .parse_typescript_module()
.map_err(|e| e.into_diagnostic(&handler).emit()) .map_err(|e| e.into_diagnostic(&handler).emit())
.expect("Failed to parse module."); .expect("Failed to parse module.");
println!("Tokens: {:?}", parser.input().take()); println!("Tokens: {:?}", parser.input().take());
});
} }

View File

@ -40,7 +40,7 @@
//! #[macro_use] //! #[macro_use]
//! extern crate swc_common; //! extern crate swc_common;
//! extern crate swc_ecma_parser; //! extern crate swc_ecma_parser;
//! use std::sync::Arc; //! use swc_common::sync::Lrc;
//! use swc_common::{ //! use swc_common::{
//! errors::{ColorConfig, Handler}, //! errors::{ColorConfig, Handler},
//! FileName, FilePathMapping, SourceMap, //! FileName, FilePathMapping, SourceMap,
@ -48,43 +48,41 @@
//! use swc_ecma_parser::{lexer::Lexer, Parser, StringInput, Syntax}; //! use swc_ecma_parser::{lexer::Lexer, Parser, StringInput, Syntax};
//! //!
//! fn main() { //! fn main() {
//! swc_common::GLOBALS.set(&swc_common::Globals::new(), || { //! let cm: Lrc<SourceMap> = Default::default();
//! let cm: Arc<SourceMap> = Default::default(); //! let handler =
//! let handler = //! Handler::with_tty_emitter(ColorConfig::Auto, true, false,
//! Handler::with_tty_emitter(ColorConfig::Auto, true, false, //! Some(cm.clone()));
//! Some(cm.clone()));
//! //!
//! // Real usage //! // Real usage
//! // let fm = cm //! // let fm = cm
//! // .load_file(Path::new("test.js")) //! // .load_file(Path::new("test.js"))
//! // .expect("failed to load test.js"); //! // .expect("failed to load test.js");
//! let fm = cm.new_source_file( //! let fm = cm.new_source_file(
//! FileName::Custom("test.js".into()), //! FileName::Custom("test.js".into()),
//! "function foo() {}".into(), //! "function foo() {}".into(),
//! ); //! );
//! let lexer = Lexer::new( //! let lexer = Lexer::new(
//! // We want to parse ecmascript //! // We want to parse ecmascript
//! Syntax::Es(Default::default()), //! Syntax::Es(Default::default()),
//! // JscTarget defaults to es5 //! // JscTarget defaults to es5
//! Default::default(), //! Default::default(),
//! StringInput::from(&*fm), //! StringInput::from(&*fm),
//! None, //! None,
//! ); //! );
//! //!
//! let mut parser = Parser::new_from(lexer); //! let mut parser = Parser::new_from(lexer);
//! //!
//! for e in parser.take_errors() { //! for e in parser.take_errors() {
//! e.into_diagnostic(&handler).emit(); //! e.into_diagnostic(&handler).emit();
//! } //! }
//! //!
//! let _module = parser //! let _module = parser
//! .parse_module() //! .parse_module()
//! .map_err(|mut e| { //! .map_err(|mut e| {
//! // Unrecoverable fatal error occurred //! // Unrecoverable fatal error occurred
//! e.into_diagnostic(&handler).emit() //! e.into_diagnostic(&handler).emit()
//! }) //! })
//! .expect("failed to parser module"); //! .expect("failed to parser module");
//! });
//! } //! }
//! ``` //! ```
//! //!

View File

@ -115,7 +115,7 @@ macro_rules! assert_and_bump {
/// if token has data like string. /// if token has data like string.
macro_rules! eat { macro_rules! eat {
($p:expr, ';') => {{ ($p:expr, ';') => {{
log::trace!("eat(';'): cur={:?}", cur!($p, true)); log::trace!("eat(';'): cur={:?}", cur!($p, false));
$p.input.eat(&Token::Semi) $p.input.eat(&Token::Semi)
|| eof!($p) || eof!($p)
|| is!($p, '}') || is!($p, '}')

View File

@ -0,0 +1 @@
export const Divider = <div/>

View File

@ -0,0 +1,83 @@
{
"type": "Module",
"span": {
"start": 0,
"end": 29,
"ctxt": 0
},
"body": [
{
"type": "ExportDeclaration",
"span": {
"start": 0,
"end": 29,
"ctxt": 0
},
"declaration": {
"type": "VariableDeclaration",
"span": {
"start": 7,
"end": 29,
"ctxt": 0
},
"kind": "const",
"declare": false,
"declarations": [
{
"type": "VariableDeclarator",
"span": {
"start": 13,
"end": 29,
"ctxt": 0
},
"id": {
"type": "Identifier",
"span": {
"start": 13,
"end": 20,
"ctxt": 0
},
"value": "Divider",
"typeAnnotation": null,
"optional": false
},
"init": {
"type": "JSXElement",
"span": {
"start": 23,
"end": 29,
"ctxt": 0
},
"opening": {
"type": "JSXOpeningElement",
"name": {
"type": "Identifier",
"span": {
"start": 24,
"end": 27,
"ctxt": 0
},
"value": "div",
"typeAnnotation": null,
"optional": false
},
"span": {
"start": 23,
"end": 29,
"ctxt": 0
},
"attributes": [],
"selfClosing": true,
"typeArguments": null
},
"children": [],
"closing": null
},
"definite": false
}
]
}
}
],
"interpreter": null
}

View File

@ -21,7 +21,7 @@ semver = { version = "0.9.0", features = ["serde"] }
once_cell = "1.2.0" once_cell = "1.2.0"
st-map = "0.1.2" st-map = "0.1.2"
fxhash = "0.2.1" fxhash = "0.2.1"
dashmap = "=3.5.1" dashmap = "3"
[dev-dependencies] [dev-dependencies]
swc_ecma_codegen = { path = "../codegen" } swc_ecma_codegen = { path = "../codegen" }

View File

@ -20,7 +20,7 @@ swc_ecma_ast = { version = "0.28.0", path ="../ast" }
swc_ecma_utils = { version = "0.17.0", path ="../utils" } swc_ecma_utils = { version = "0.17.0", path ="../utils" }
swc_ecma_parser = { version = "0.33.0", path ="../parser" } swc_ecma_parser = { version = "0.33.0", path ="../parser" }
swc_ecma_visit = { version = "0.13.0", path ="../visit" } swc_ecma_visit = { version = "0.13.0", path ="../visit" }
dashmap = { version = "=3.5.1", optional = true } dashmap = { version = "3", optional = true }
either = "1.5" either = "1.5"
fxhash = "0.2" fxhash = "0.2"
indexmap = "1" indexmap = "1"

View File

@ -153,11 +153,12 @@ impl Fold for Dce<'_> {
return node; return node;
} }
let stmts = node.stmts.fold_with(self); let mut stmts = node.stmts.fold_with(self);
let mut span = node.span; let mut span = node.span;
if stmts.iter().any(|stmt| self.is_marked(stmt.span())) { if self.marking_phase || stmts.iter().any(|stmt| self.is_marked(stmt.span())) {
span = span.apply_mark(self.config.used_mark); span = span.apply_mark(self.config.used_mark);
stmts = self.fold_in_marking_phase(stmts);
} }
BlockStmt { span, stmts } BlockStmt { span, stmts }
@ -170,6 +171,7 @@ impl Fold for Dce<'_> {
if self.marking_phase || self.included.contains(&node.ident.to_id()) { if self.marking_phase || self.included.contains(&node.ident.to_id()) {
node.class.span = node.class.span.apply_mark(self.config.used_mark); node.class.span = node.class.span.apply_mark(self.config.used_mark);
node.class.super_class = self.fold_in_marking_phase(node.class.super_class);
} }
node.fold_children_with(self) node.fold_children_with(self)
@ -307,7 +309,9 @@ impl Fold for Dce<'_> {
if self.marking_phase || self.included.contains(&f.ident.to_id()) { if self.marking_phase || self.included.contains(&f.ident.to_id()) {
f.function.span = f.function.span.apply_mark(self.config.used_mark); f.function.span = f.function.span.apply_mark(self.config.used_mark);
f.function.params = self.fold_in_marking_phase(f.function.params);
f.function.body = self.fold_in_marking_phase(f.function.body); f.function.body = self.fold_in_marking_phase(f.function.body);
return f;
} }
f.fold_children_with(self) f.fold_children_with(self)
@ -445,6 +449,7 @@ impl Fold for Dce<'_> {
} }
// Drop unused imports. // Drop unused imports.
log::debug!("Removing unused import specifiers");
import.specifiers.retain(|s| self.should_include(s)); import.specifiers.retain(|s| self.should_include(s));
if !import.specifiers.is_empty() { if !import.specifiers.is_empty() {
@ -508,13 +513,9 @@ impl Fold for Dce<'_> {
if self.is_marked(node.span) { if self.is_marked(node.span) {
return node; return node;
} }
node.span = node.span.apply_mark(self.config.used_mark); node.span = node.span.apply_mark(self.config.used_mark);
node.arg = self.fold_in_marking_phase(node.arg);
let mut node = node.fold_children_with(self);
if self.is_marked(node.arg.span()) {
node.arg = self.fold_in_marking_phase(node.arg)
}
node node
} }
@ -611,7 +612,6 @@ impl Fold for Dce<'_> {
return var; return var;
} }
log::trace!("VarDecl");
var = var.fold_children_with(self); var = var.fold_children_with(self);
var.decls = var.decls.move_flat_map(|decl| { var.decls = var.decls.move_flat_map(|decl| {
@ -676,6 +676,10 @@ impl Dce<'_> {
T: StmtLike + FoldWith<Self> + Spanned + std::fmt::Debug, T: StmtLike + FoldWith<Self> + Spanned + std::fmt::Debug,
T: for<'any> VisitWith<SideEffectVisitor<'any>> + VisitWith<ImportDetector>, T: for<'any> VisitWith<SideEffectVisitor<'any>> + VisitWith<ImportDetector>,
{ {
if self.marking_phase {
return items.move_map(|item| self.fold_in_marking_phase(item));
}
let old = self.changed; let old = self.changed;
let mut preserved = FxHashSet::default(); let mut preserved = FxHashSet::default();

View File

@ -424,3 +424,34 @@ var load = function(){}
var { progress } = load(); var { progress } = load();
console.log(progress);" console.log(progress);"
); );
noop!(
spack_issue_008,
"class B {
}
class A extends B {
}
console.log('foo');
new A();"
);
noop!(
spack_issue_009,
"
class A {
}
function a() {
return new A();
}
console.log(a, a());
"
);
noop!(
spack_issue_010,
"
class A {}
console.log(new A());
"
);

View File

@ -17,6 +17,7 @@ neon-build = "0.4.0"
[dependencies] [dependencies]
swc = { path = "../" } swc = { path = "../" }
swc_bundler = { path = "../bundler" }
swc_common = { path = "../common", features = ["tty-emitter", "sourcemap"] } swc_common = { path = "../common", features = ["tty-emitter", "sourcemap"] }
swc_ecma_ast = { path = "../ecmascript/ast" } swc_ecma_ast = { path = "../ecmascript/ast" }
swc_ecma_parser = { path = "../ecmascript/parser" } swc_ecma_parser = { path = "../ecmascript/parser" }

View File

@ -3,16 +3,13 @@ use anyhow::{bail, Error};
use fxhash::FxHashMap; use fxhash::FxHashMap;
use neon::prelude::*; use neon::prelude::*;
use serde::Deserialize; use serde::Deserialize;
use spack::{ use spack::resolvers::NodeResolver;
load::Load,
resolve::{NodeResolver, Resolve},
BundleKind,
};
use std::{ use std::{
panic::{catch_unwind, AssertUnwindSafe}, panic::{catch_unwind, AssertUnwindSafe},
sync::Arc, sync::Arc,
}; };
use swc::{config::SourceMapsConfig, Compiler, TransformOutput}; use swc::{config::SourceMapsConfig, Compiler, TransformOutput};
use swc_bundler::{BundleKind, Bundler, Load, Resolve};
struct ConfigItem { struct ConfigItem {
loader: Box<dyn Load>, loader: Box<dyn Load>,
@ -41,23 +38,55 @@ impl Task for BundleTask {
fn perform(&self) -> Result<Self::Output, Self::Error> { fn perform(&self) -> Result<Self::Output, Self::Error> {
let res = catch_unwind(AssertUnwindSafe(|| { let res = catch_unwind(AssertUnwindSafe(|| {
let bundler = spack::Bundler::new( let bundler = Bundler::new(
self.swc.clone(), self.swc.globals(),
self.config self.swc.cm.clone(),
.static_items
.config
.options
.as_ref()
.map(|options| options.clone())
.unwrap_or_else(|| {
serde_json::from_value(serde_json::Value::Object(Default::default()))
.unwrap()
}),
&self.config.resolver,
&self.config.loader, &self.config.loader,
&self.config.resolver,
swc_bundler::Config {
require: true,
external_modules: vec![
"assert",
"buffer",
"child_process",
"console",
"cluster",
"crypto",
"dgram",
"dns",
"events",
"fs",
"http",
"http2",
"https",
"net",
"os",
"path",
"perf_hooks",
"process",
"querystring",
"readline",
"repl",
"stream",
"string_decoder",
"timers",
"tls",
"tty",
"url",
"util",
"v8",
"vm",
"wasi",
"worker",
"zlib",
]
.into_iter()
.map(From::from)
.collect(),
},
); );
let result = bundler.bundle(&self.config.static_items.config)?; let result = bundler.bundle(self.config.static_items.config.entry.clone().into())?;
let result = result let result = result
.into_iter() .into_iter()
@ -134,7 +163,7 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let opt = cx.argument::<JsObject>(0)?; let opt = cx.argument::<JsObject>(0)?;
let callback = cx.argument::<JsFunction>(1)?; let callback = cx.argument::<JsFunction>(1)?;
let static_items = neon_serde::from_value(&mut cx, opt.upcast())?; let static_items: StaticConfigItem = neon_serde::from_value(&mut cx, opt.upcast())?;
let loader = opt let loader = opt
.get(&mut cx, "loader")? .get(&mut cx, "loader")?
@ -150,7 +179,15 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
.unwrap_or_else(|_| { .unwrap_or_else(|_| {
Box::new(spack::loaders::swc::SwcLoader::new( Box::new(spack::loaders::swc::SwcLoader::new(
c.clone(), c.clone(),
Default::default(), static_items
.config
.options
.as_ref()
.cloned()
.unwrap_or_else(|| {
serde_json::from_value(serde_json::Value::Object(Default::default()))
.unwrap()
}),
)) ))
}); });
@ -158,7 +195,7 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
swc: c.clone(), swc: c.clone(),
config: ConfigItem { config: ConfigItem {
loader, loader,
resolver: Box::new(NodeResolver) as Box<_>, resolver: Box::new(NodeResolver::new()) as Box<_>,
static_items, static_items,
}, },
} }

View File

@ -16,8 +16,8 @@ it('should respect .swcrc', async () => {
const result = await swc.bundle(path.join(__dirname, '../../tests/spack/config-swcrc/spack.config.js')); const result = await swc.bundle(path.join(__dirname, '../../tests/spack/config-swcrc/spack.config.js'));
expect(result.a).toBeTruthy(); expect(result.a).toBeTruthy();
expect(result.a.code).toContain(`require("./common-`); expect(result.a.code).toContain(`require("./common`);
expect(result.b).toBeTruthy(); expect(result.b).toBeTruthy();
expect(result.b.code).toContain(`require("./common-`); expect(result.b.code).toContain(`require("./common`);
}); });

View File

@ -1,6 +1,6 @@
{ {
"name": "@swc/core", "name": "@swc/core",
"version": "1.2.18", "version": "1.2.19",
"description": "Super-fast alternative for babel", "description": "Super-fast alternative for babel",
"main": "./index.js", "main": "./index.js",
"author": "강동윤 <kdy1997.dev@gmail.com>", "author": "강동윤 <kdy1997.dev@gmail.com>",

View File

@ -12,6 +12,7 @@ edition = "2018"
[dependencies] [dependencies]
swc_atoms = { path = "../atoms" } swc_atoms = { path = "../atoms" }
swc_bundler = { path = "../bundler", features = ["concurrent"] }
swc_common = { path = "../common" } swc_common = { path = "../common" }
swc_ecma_ast = { path = "../ecmascript/ast" } swc_ecma_ast = { path = "../ecmascript/ast" }
swc_ecma_codegen = { path = "../ecmascript/codegen" } swc_ecma_codegen = { path = "../ecmascript/codegen" }
@ -25,18 +26,13 @@ regex = "1"
once_cell = "1" once_cell = "1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
anyhow = "1" anyhow = "1"
crc = "1.8" dashmap = "3"
dashmap = "=3.5.1"
radix_fmt = "1"
rayon = "1"
log = "0.4.8" log = "0.4.8"
node-resolve = "2.2.0" node-resolve = "2.2.0"
petgraph = "0.5"
fxhash = "0.2.1" fxhash = "0.2.1"
is-macro = "0.1.8" is-macro = "0.1.8"
neon = { version = "0.4.0", features = ["event-handler-api"] } neon = { version = "0.4.0", features = ["event-handler-api"] }
neon-sys = "0.4.0" neon-sys = "0.4.0"
relative-path = "1.2"
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.6.1" pretty_assertions = "0.6.1"

View File

@ -1,57 +0,0 @@
use crate::Bundler;
use anyhow::{Error, Result};
use dashmap::DashMap;
use std::{collections::HashMap, env};
use swc_atoms::JsWord;
use swc_common::FileName;
use swc_ecma_ast::Expr;
use swc_ecma_parser::{lexer::Lexer, Parser, StringInput};
#[derive(Debug, Default)]
pub(super) struct Cache {
exprs: DashMap<String, Expr>,
}
impl Bundler<'_> {
#[inline]
fn get_or_parse_expr(&self, key: &str, s: String) -> Result<Expr> {
if let Some(v) = self.cache.exprs.get(key) {
return Ok((*v).clone());
}
let cm = self.swc.cm.clone();
let fm = cm.new_source_file(FileName::Anon, s);
let lexer = Lexer::new(
Default::default(),
Default::default(),
StringInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(lexer);
let expr = parser.parse_expr().map_err(|err| {
Error::msg(format!(
"config: failed parse `{}` as expression: (key = `{}`): {:?}",
fm.src, key, err
))
})?;
self.cache.exprs.insert(key.to_string(), *expr.clone());
Ok(*expr)
}
/// Has`NODE_ENV`
pub(super) fn envs(&self) -> Result<HashMap<JsWord, Expr>> {
let mut envs = HashMap::with_capacity(1);
let node_env = env::var("NODE_ENV").unwrap_or_else(|_| "development".to_string());
let v = self.get_or_parse_expr("NODE_ENV", node_env)?;
envs.insert("NODE_ENV".into(), v);
Ok(envs)
}
}

View File

@ -1,466 +0,0 @@
use super::Bundler;
use crate::{
bundler::{
export::{Exports, RawExports},
helpers::Helpers,
import::RawImports,
},
debug::assert_clean,
Id, ModuleId,
};
use anyhow::{Context, Error};
use is_macro::Is;
use rayon::prelude::*;
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use swc_atoms::js_word;
use swc_common::{FileName, Mark, SourceFile, DUMMY_SP};
use swc_ecma_ast::{
Expr, ExprOrSuper, ImportDecl, ImportSpecifier, Invalid, MemberExpr, Module, ModuleDecl,
Program, Str,
};
use swc_ecma_transforms::{
optimization::{simplify::dead_branch_remover, InlineGlobals},
resolver::resolver_with_mark,
};
use swc_ecma_visit::{FoldWith, Node, Visit, VisitWith};
#[cfg(test)]
mod tests;
/// Module after applying transformations.
#[derive(Debug, Clone)]
pub(super) struct TransformedModule {
pub id: ModuleId,
pub fm: Arc<SourceFile>,
pub module: Arc<Module>,
pub imports: Arc<Imports>,
pub exports: Arc<Exports>,
/// If false, the module will be wrapped with helper function just like
/// wwbpack.
pub is_es6: bool,
/// Used helpers
pub helpers: Arc<Helpers>,
mark: Mark,
}
impl TransformedModule {
/// Marks applied to bindings
pub fn mark(&self) -> Mark {
self.mark
}
}
#[derive(Debug, Default)]
pub(super) struct Imports {
/// If imported ids are empty, it is a side-effect import.
pub specifiers: Vec<(Source, Vec<Specifier>)>,
}
/// Clone is relatively cheap
#[derive(Debug, Clone, Is)]
pub(super) enum Specifier {
Specific { local: Id, alias: Option<Id> },
Namespace { local: Id },
}
impl Specifier {
pub fn local(&self) -> &Id {
match self {
Specifier::Specific { local, .. } => local,
Specifier::Namespace { local, .. } => local,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(super) struct Source {
pub is_loaded_synchronously: bool,
pub is_unconditional: bool,
pub module_id: ModuleId,
// Clone is relatively cheap, thanks to string_cache.
pub src: Str,
}
impl Bundler<'_> {
/// Phase 1 (discovery)
///
/// We apply transforms at this phase to make cache efficient.
/// As we cache in this phase, changing dependency does not affect cache.
pub(super) fn load_transformed(&self, path: Arc<PathBuf>) -> Result<TransformedModule, Error> {
Ok(self.load_transformed_inner(path)?.1)
}
fn load_transformed_inner(
&self,
path: Arc<PathBuf>,
) -> Result<(Arc<PathBuf>, TransformedModule), Error> {
log::trace!("load_transformed: ({})", path.display());
self.swc.run(|| {
if let Some(cached) = self.scope.get_module_by_path(&path) {
return Ok((path, cached.clone()));
}
let (_, fm, module) = self.load(&path).context("Bundler.load failed")?;
let v = self
.transform_module(&path, fm.clone(), module)
.context("failed to transform module")?;
self.scope.store_module(path.clone(), v.clone());
//{
// let code = self
// .swc
// .print(
// &(*v.module).clone().fold_with(&mut HygieneVisualizer),
// fm,
// false,
// false,
// )
// .unwrap()
// .code;
//
// println!(
// "Fully loaded:\n{}\nImports: {:?}\nExports: {:?}\n",
// code, v.imports, v.exports
// );
//}
Ok((path, v))
})
}
fn load(&self, path: &Arc<PathBuf>) -> Result<(ModuleId, Arc<SourceFile>, Module), Error> {
self.swc.run(|| {
let (module_id, _) = self.scope.module_id_gen.gen(path);
let path = Arc::new(path);
let (fm, module) = self
.loader
.load(&path)
.with_context(|| format!("Loader.load({}) failed", path.display()))?;
assert_clean(&module);
Ok((module_id, fm, module))
})
}
fn transform_module(
&self,
path: &Arc<PathBuf>,
fm: Arc<SourceFile>,
mut module: Module,
) -> Result<TransformedModule, Error> {
self.swc.run(|| {
log::trace!("transform_module({})", fm.name);
module = module.fold_with(&mut resolver_with_mark(self.top_level_mark));
module = module.fold_with(&mut InlineGlobals {
envs: self.envs()?,
globals: Default::default(),
});
module = module.fold_with(&mut dead_branch_remover());
let (id, mark) = self.scope.module_id_gen.gen(path);
// {
// let code = self
// .swc
// .print(
// &module.clone().fold_with(&mut HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("Resolved:\n{}\n\n", code);
// }
let imports = self.extract_import_info(path, &mut module, mark);
// {
// let code = self
// .swc
// .print(
// &module.clone().fold_with(&mut HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("After imports:\n{}\n", code,);
// }
let exports = self.extract_export_info(&module);
// TODO: Exclude resolver (for performance)
let (module, (imports, exports)) = rayon::join(
|| -> Result<_, Error> {
self.swc.run(|| {
// Process module
let config = self
.swc
.config_for_file(&self.swc_options, &fm.name)
.context("failed to parse .swcrc")?;
let program = self.swc.transform(
Program::Module(module),
config.external_helpers,
config.pass,
);
// {
// let code = self
// .swc
// .print(
// &program.clone().fold_with(&mut HygieneVisualizer),
// SourceMapsConfig::Bool(false),
// None,
// false,
// )
// .unwrap()
// .code;
//
// println!("loaded using swc:\n{}\n\n", code);
// }
match program {
Program::Module(module) => Ok(module),
_ => unreachable!(),
}
})
},
|| {
let p = match fm.name {
FileName::Real(ref p) => p,
_ => unreachable!("{} module in spack", fm.name),
};
rayon::join(
|| self.swc.run(|| self.load_imports(&p, imports)),
|| self.swc.run(|| self.load_exports(&p, exports)),
)
},
);
let imports = imports?;
let exports = exports?;
let mut module = module?;
let is_es6 = {
let mut v = Es6ModuleDetector {
forced_es6: false,
found_other: false,
};
module.visit_with(&Invalid { span: DUMMY_SP } as _, &mut v);
v.forced_es6 || !v.found_other
};
if is_es6 {
module = self.drop_unused(fm.clone(), module, None);
}
let module = Arc::new(module);
Ok(TransformedModule {
id,
fm,
module,
imports: Arc::new(imports),
exports: Arc::new(exports),
is_es6,
helpers: Default::default(),
mark,
})
})
}
fn load_exports(&self, base: &Path, raw: RawExports) -> Result<Exports, Error> {
self.swc.run(|| {
log::trace!("load_exports({})", base.display());
let mut exports = Exports::default();
exports.pure_constants = raw.pure_constants;
let items = raw
.items
.into_par_iter()
.map(|(src, ss)| -> Result<_, Error> {
let info = match src {
Some(src) => {
let path = self.resolve(base, &src.value)?;
Some((self.load_transformed_inner(path)?, src))
}
None => None,
};
Ok((info, ss))
})
.collect::<Vec<_>>();
for res in items {
let (info, specifiers): (Option<((Arc<PathBuf>, TransformedModule), Str)>, _) =
res?;
match info {
None => exports.items.extend(specifiers),
Some(info) => exports
.reexports
.entry(Source {
is_loaded_synchronously: true,
is_unconditional: false,
module_id: (info.0).1.id,
src: info.1,
})
.or_default()
.extend(specifiers),
}
}
Ok(exports)
})
}
/// Load dependencies
fn load_imports(&self, base: &Path, info: RawImports) -> Result<Imports, Error> {
self.swc.run(|| {
log::trace!("load_imports({})", base.display());
let mut merged = Imports::default();
let RawImports {
imports,
lazy_imports,
dynamic_imports,
} = info;
let loaded = imports
.into_par_iter()
.map(|v| (v, false, true))
.chain(lazy_imports.into_par_iter().map(|v| (v, false, false)))
.chain(dynamic_imports.into_par_iter().map(|src| {
(
ImportDecl {
span: src.span,
specifiers: vec![],
src,
type_only: false,
},
true,
false,
)
}))
.map(|(decl, dynamic, unconditional)| -> Result<_, Error> {
//
let path = self.resolve(base, &decl.src.value)?;
let res = self.load_transformed_inner(path)?;
Ok((res, decl, dynamic, unconditional))
})
.collect::<Vec<_>>();
for res in loaded {
// TODO: Report error and proceed instead of returning an error
let ((path, _res), decl, is_dynamic, is_unconditional) = res?;
if let Some(src) = self.scope.get_module_by_path(&path) {
let src = Source {
is_loaded_synchronously: !is_dynamic,
is_unconditional,
module_id: src.id,
src: decl.src,
};
// TODO: Handle rename
let mut specifiers = vec![];
for s in decl.specifiers {
match s {
ImportSpecifier::Named(s) => specifiers.push(Specifier::Specific {
local: s.local.into(),
alias: s.imported.map(From::from),
}),
ImportSpecifier::Default(s) => specifiers.push(Specifier::Specific {
local: s.local.into(),
alias: Some(Id::new(js_word!("default"), s.span.ctxt())),
}),
ImportSpecifier::Namespace(s) => {
specifiers.push(Specifier::Namespace {
local: s.local.into(),
});
}
}
}
merged.specifiers.push((src, specifiers));
}
}
Ok(merged)
})
}
}
struct Es6ModuleDetector {
/// If import statement or export is detected, it's an es6 module regardless
/// of other codes.
forced_es6: bool,
/// True if other module system is detected.
found_other: bool,
}
impl Visit for Es6ModuleDetector {
fn visit_member_expr(&mut self, e: &MemberExpr, _: &dyn Node) {
e.obj.visit_with(e as _, self);
if e.computed {
e.prop.visit_with(e as _, self);
}
match &e.obj {
ExprOrSuper::Expr(e) => {
match &**e {
Expr::Ident(i) => {
// TODO: Check syntax context (Check if marker is the global mark)
if i.sym == *"module" {
self.found_other = true;
}
if i.sym == *"exports" {
self.found_other = true;
}
}
_ => {}
}
}
_ => {}
}
//
}
fn visit_module_decl(&mut self, decl: &ModuleDecl, _: &dyn Node) {
match decl {
ModuleDecl::Import(_)
| ModuleDecl::ExportDecl(_)
| ModuleDecl::ExportNamed(_)
| ModuleDecl::ExportDefaultDecl(_)
| ModuleDecl::ExportDefaultExpr(_)
| ModuleDecl::ExportAll(_) => {
self.forced_es6 = true;
}
ModuleDecl::TsImportEquals(_) => {}
ModuleDecl::TsExportAssignment(_) => {}
ModuleDecl::TsNamespaceExport(_) => {}
}
}
}

View File

@ -1,8 +0,0 @@
use crate::bundler::tests::test_bundler;
#[test]
fn basic() {
test_bundler(|t| {
t.parse("");
});
}

View File

@ -1,154 +0,0 @@
use self::{config::Cache, scope::Scope};
use crate::{
bundler::load_transformed::TransformedModule,
config::{Config, EntryConfig},
load::Load,
resolve::Resolve,
ModuleId,
};
use anyhow::{Context, Error};
use fxhash::FxHashMap;
use rayon::prelude::*;
use std::{path::PathBuf, sync::Arc};
use swc::config::ModuleConfig;
use swc_common::{Mark, DUMMY_SP};
use swc_ecma_ast::Module;
mod chunk;
mod config;
mod export;
mod helpers;
mod import;
mod load_transformed;
mod rename;
mod scope;
#[cfg(test)]
mod tests;
mod usage_analysis;
pub struct Bundler<'a> {
cache: Cache,
/// Javascript compiler.
swc: Arc<swc::Compiler>,
swc_options: swc::config::Options,
used_mark: Mark,
top_level_mark: Mark,
resolver: &'a dyn Resolve,
loader: &'a dyn Load,
scope: Scope,
}
#[derive(Debug)]
pub enum BundleKind {
/// User-provided entry
Named { name: String },
/// Auto-generated entry (created by import expression)
Dynamic,
/// A lazy-loaded shared library
Lib { name: String },
}
/// Built bundle
#[derive(Debug)]
pub struct Bundle {
pub kind: BundleKind,
pub id: ModuleId,
/// Merged module
pub module: Module,
}
impl<'a> Bundler<'a> {
pub fn new(
swc: Arc<swc::Compiler>,
mut swc_options: swc::config::Options,
resolver: &'a dyn Resolve,
loader: &'a dyn Load,
) -> Self {
let used_mark = swc.run(|| Mark::fresh(Mark::root()));
log::info!("Used mark: {:?}", DUMMY_SP.apply_mark(used_mark).ctxt());
let top_level_mark = swc.run(|| Mark::fresh(Mark::root()));
log::info!(
"top-level mark: {:?}",
DUMMY_SP.apply_mark(top_level_mark).ctxt()
);
swc_options.disable_fixer = true;
swc_options.disable_hygiene = true;
swc_options.global_mark = Some(top_level_mark);
if swc_options.config.is_none() {
swc_options.config = Some(Default::default());
}
if let Some(c) = &mut swc_options.config {
// Preserve es6 modules
c.module = Some(ModuleConfig::Es6);
}
Bundler {
swc,
cache: Default::default(),
swc_options,
used_mark,
top_level_mark,
resolver,
loader,
scope: Default::default(),
}
}
pub fn bundle(&self, config: &Config) -> Result<Vec<Bundle>, Error> {
let entries = {
let mut map = FxHashMap::default();
match &config.entry {
EntryConfig::File(f) => {
map.insert(f.clone(), PathBuf::from(f.clone()));
}
EntryConfig::Multiple(files) => {
for f in files {
map.insert(f.clone(), f.clone().into());
}
}
EntryConfig::Files(files) => map = files.clone(),
}
map
};
let results = entries
.into_par_iter()
.map(|(name, path)| -> Result<_, Error> {
let path = self.resolve(&config.working_dir, &path.to_string_lossy())?;
let res = self
.load_transformed(path)
.context("load_transformed failed")?;
Ok((name, res))
})
.collect::<Vec<_>>();
// We collect at here to handle dynamic imports
// TODO: Handle dynamic imports
let local = self.swc.run(|| -> Result<_, Error> {
let mut output = FxHashMap::default();
for res in results {
let (name, m): (String, TransformedModule) = res?;
output.insert(name, m);
}
Ok(output)
})?;
let bundles = self.chunk(local)?;
Ok(self.finalize(bundles)?)
}
pub fn swc(&self) -> &swc::Compiler {
&self.swc
}
}

View File

@ -1,276 +0,0 @@
use crate::{Bundle, BundleKind, Bundler};
use anyhow::{Context, Error};
use crc::{crc64, crc64::Digest, Hasher64};
use fxhash::FxHashMap;
use relative_path::RelativePath;
use std::{
io,
path::{Path, PathBuf},
};
use swc::config::Options;
use swc_common::{util::move_map::MoveMap, FileName, Span};
use swc_ecma_ast::{ImportDecl, Module, Str};
use swc_ecma_codegen::{text_writer::WriteJs, Emitter};
use swc_ecma_transforms::noop_fold_type;
use swc_ecma_visit::{Fold, FoldWith};
impl Bundler<'_> {
pub(super) fn finalize(&self, bundles: Vec<Bundle>) -> Result<Vec<Bundle>, Error> {
let mut new = Vec::with_capacity(bundles.len());
let mut renamed = FxHashMap::default();
for mut bundle in bundles {
match bundle.kind {
BundleKind::Named { .. } => {
// Inject helpers
let helpers = self
.scope
.get_module(bundle.id)
.expect("module should exist at this point")
.helpers;
self.swc
.run_transform(true, || helpers.append_to(&mut bundle.module.body));
new.push(Bundle { ..bundle });
}
BundleKind::Lib { name } => {
let hash = self.calc_hash(&bundle.module)?;
let mut new_name = PathBuf::from(name);
let key = new_name.clone();
let file_name = new_name
.file_name()
.map(|path| -> PathBuf {
let path = Path::new(path);
let ext = path.extension();
if let Some(ext) = ext {
return format!(
"{}-{}.{}",
path.file_stem().unwrap().to_string_lossy(),
hash,
ext.to_string_lossy()
)
.into();
}
return format!(
"{}-{}",
path.file_stem().unwrap().to_string_lossy(),
hash,
)
.into();
})
.expect("javascript file should have name");
new_name.pop();
new_name = new_name.join(file_name.clone());
renamed.insert(key, new_name.to_string_lossy().to_string());
new.push(Bundle {
kind: BundleKind::Named {
name: file_name.display().to_string(),
},
..bundle
})
}
_ => new.push(bundle),
}
}
new = new.move_map(|bundle| {
let path = match self.scope.get_module(bundle.id).unwrap().fm.name {
FileName::Real(ref v) => v.clone(),
_ => {
log::error!("Cannot rename: not a real file");
return bundle;
}
};
let module = {
// Change imports
let mut v = Renamer {
bundler: self,
path: &path,
renamed: &renamed,
};
bundle.module.fold_with(&mut v)
};
let module = self.swc.run(|| {
let opts = Options {
..self.swc_options.clone()
};
let file_name = FileName::Real(path);
let config = self.swc.read_config(&opts, &file_name).unwrap_or_default();
let mut module_pass = swc::config::ModuleConfig::build(
self.swc.cm.clone(),
self.top_level_mark,
config.module,
);
module.fold_with(&mut module_pass)
});
Bundle { module, ..bundle }
});
Ok(new)
}
fn calc_hash(&self, m: &Module) -> Result<String, Error> {
let digest = crc64::Digest::new(crc64::ECMA);
let mut buf = Hasher { digest };
{
let mut emitter = Emitter {
cfg: Default::default(),
cm: self.swc.cm.clone(),
comments: None,
wr: Box::new(&mut buf) as Box<dyn WriteJs>,
handlers: Box::new(Handlers),
};
emitter
.emit_module(&m)
.context("failed to emit module to calculate hash")?;
}
//
let result = buf.digest.sum64();
Ok(radix_fmt::radix(result, 36).to_string())
}
}
/// Import renamer. This pass changes import path.
struct Renamer<'a, 'b> {
bundler: &'a Bundler<'b>,
path: &'a Path,
renamed: &'a FxHashMap<PathBuf, String>,
}
noop_fold_type!(Renamer<'_, '_>);
impl Fold for Renamer<'_, '_> {
fn fold_import_decl(&mut self, import: ImportDecl) -> ImportDecl {
let resolved = match self.bundler.resolve(self.path, &import.src.value) {
Ok(v) => v,
Err(_) => return import,
};
if let Some(v) = self.renamed.get(&*resolved) {
// We use parent because RelativePath uses ../common-[hash].js
// if we use `entry-a.js` as a base.
//
// entry-a.js
// common.js
let base = self
.path
.parent()
.unwrap_or(self.path)
.as_os_str()
.to_string_lossy();
let base = RelativePath::new(&*base);
let v = base.relative(&*v);
let value = v.as_str();
return ImportDecl {
src: Str {
value: if value.starts_with(".") {
value.into()
} else {
format!("./{}", value).into()
},
..import.src
},
..import
};
}
import
}
}
impl swc_ecma_codegen::Handlers for Handlers {}
struct Handlers;
struct Hasher {
digest: Digest,
}
impl Hasher {
fn w(&mut self, s: &str) {
self.digest.write(s.as_bytes());
}
}
impl WriteJs for &mut Hasher {
fn increase_indent(&mut self) -> io::Result<()> {
Ok(())
}
fn decrease_indent(&mut self) -> io::Result<()> {
Ok(())
}
fn write_semi(&mut self) -> io::Result<()> {
self.w(";");
Ok(())
}
fn write_space(&mut self) -> io::Result<()> {
self.w(" ");
Ok(())
}
fn write_keyword(&mut self, _: Option<Span>, s: &'static str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_operator(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_param(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_property(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_line(&mut self) -> io::Result<()> {
self.w("\n");
Ok(())
}
fn write_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_comment(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_str_lit(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_str(&mut self, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_symbol(&mut self, _: Span, s: &str) -> io::Result<()> {
self.w(s);
Ok(())
}
fn write_punct(&mut self, s: &'static str) -> io::Result<()> {
self.w(s);
Ok(())
}
}

View File

@ -1,30 +0,0 @@
use crate::{bundler::load_transformed::TransformedModule, id::ModuleIdGenerator, ModuleId};
use dashmap::DashMap;
use fxhash::FxBuildHasher;
use std::{path::PathBuf, sync::Arc};
#[derive(Debug, Default)]
pub(super) struct Scope {
pub module_id_gen: ModuleIdGenerator,
/// Phase 1 cache
modules: DashMap<ModuleId, TransformedModule, FxBuildHasher>,
}
impl Scope {
/// Stores module information. The information should contain only
/// information gotten from module itself. In other words, it should not
/// contains information from a dependency.
pub fn store_module(&self, _path: Arc<PathBuf>, info: TransformedModule) {
self.modules.insert(info.id, info);
}
pub fn get_module_by_path(&self, path: &Arc<PathBuf>) -> Option<TransformedModule> {
let (id, _) = self.module_id_gen.gen(path);
self.get_module(id)
}
pub fn get_module(&self, id: ModuleId) -> Option<TransformedModule> {
Some(self.modules.get(&id)?.value().clone())
}
}

View File

@ -1,78 +0,0 @@
//! Utilities for testing.
use super::Bundler;
use crate::{loaders::swc::SwcLoader, resolve::NodeResolver, util::HygieneRemover};
use pretty_assertions::assert_eq;
use std::{path::PathBuf, sync::Arc};
use swc_common::FileName;
use swc_ecma_ast::*;
use swc_ecma_parser::{EsConfig, Syntax};
use swc_ecma_utils::drop_span;
use swc_ecma_visit::FoldWith;
pub struct Tester<'a> {
pub bundler: Bundler<'a>,
}
impl<'a> Tester<'a> {
pub fn parse(&self, s: &str) -> Module {
let fm = self
.bundler
.swc
.cm
.new_source_file(FileName::Real(PathBuf::from("input.js")), s.into());
let p = self
.bundler
.swc
.parse_js(
fm,
Default::default(),
Syntax::Es(EsConfig {
dynamic_import: true,
..Default::default()
}),
true,
true,
)
.expect("failed to parse");
match p {
Program::Module(m) => m,
Program::Script(_) => unreachable!(),
}
}
pub fn assert_eq(&self, m: &Module, expected: &str) {
let expected = self.parse(expected);
let m = drop_span(m.clone().fold_with(&mut HygieneRemover));
let expected = drop_span(expected);
assert_eq!(m, expected)
}
}
pub fn test_bundler<F>(op: F)
where
F: FnOnce(&mut Tester),
{
testing::run_test2(true, |cm, handler| {
let compiler = Arc::new(swc::Compiler::new(cm.clone(), Arc::new(handler)));
let loader = SwcLoader::new(compiler.clone(), Default::default());
let bundler = Bundler::new(
compiler.clone(),
swc::config::Options {
swcrc: true,
..Default::default()
},
&NodeResolver,
&loader,
);
let mut t = Tester { bundler };
op(&mut t);
Ok(())
})
.expect("WTF?");
}

View File

@ -8,6 +8,7 @@ use fxhash::FxHashMap;
use serde::Deserialize; use serde::Deserialize;
use std::{fmt, marker::PhantomData, path::PathBuf}; use std::{fmt, marker::PhantomData, path::PathBuf};
use string_enum::StringEnum; use string_enum::StringEnum;
use swc_common::FileName;
mod module; mod module;
mod optimization; mod optimization;
@ -56,7 +57,7 @@ impl Default for Mode {
} }
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Clone, Deserialize)]
#[serde(untagged, rename = "Entry")] #[serde(untagged, rename = "Entry")]
pub enum EntryConfig { pub enum EntryConfig {
File(String), File(String),
@ -64,6 +65,36 @@ pub enum EntryConfig {
Files(FxHashMap<String, PathBuf>), Files(FxHashMap<String, PathBuf>),
} }
impl From<EntryConfig> for FxHashMap<String, FileName> {
fn from(c: EntryConfig) -> Self {
let mut m = FxHashMap::default();
match c {
EntryConfig::File(f) => {
let path = PathBuf::from(f);
let file_name = path
.file_name()
.expect("entry must be a file, instead of a directory");
m.insert(file_name.to_string_lossy().into(), FileName::Real(path));
}
EntryConfig::Multiple(files) => {
for f in files {
let path = PathBuf::from(f);
let file_name = path
.file_name()
.expect("entry must be a file, instead of a directory");
m.insert(file_name.to_string_lossy().into(), FileName::Real(path));
}
}
EntryConfig::Files(f) => {
return f.into_iter().map(|(k, v)| (k, FileName::Real(v))).collect()
}
}
m
}
}
pub struct JsCallback<T, Ret> { pub struct JsCallback<T, Ret> {
_f: Box<dyn Send + Sync + Fn(T) -> Ret>, _f: Box<dyn Send + Sync + Fn(T) -> Ret>,
_phantom: PhantomData<(T, Ret)>, _phantom: PhantomData<(T, Ret)>,

View File

@ -1,30 +0,0 @@
use swc_common::{Span, SyntaxContext, DUMMY_SP};
use swc_ecma_ast::{Ident, Invalid};
use swc_ecma_visit::{Fold, Node, Visit, VisitWith};
pub(crate) struct HygieneVisualizer;
impl Fold for HygieneVisualizer {
fn fold_ident(&mut self, node: Ident) -> Ident {
Ident {
sym: format!("{}{:?}", node.sym, node.span.ctxt()).into(),
..node
}
}
}
pub(crate) struct AssertClean;
impl Visit for AssertClean {
fn visit_span(&mut self, s: &Span, _: &dyn Node) {
debug_assert_eq!(
s.ctxt(),
SyntaxContext::empty(),
"Hygiene info should be clean at this moment"
);
}
}
pub(crate) fn assert_clean<T: VisitWith<AssertClean>>(m: &T) {
m.visit_with(&Invalid { span: DUMMY_SP } as _, &mut AssertClean)
}

View File

@ -3,17 +3,6 @@
#[cfg(test)] #[cfg(test)]
extern crate test; extern crate test;
pub use self::{
bundler::{Bundle, BundleKind, Bundler},
id::{Id, ModuleId, QualifiedId},
};
mod bundler;
pub mod config; pub mod config;
mod debug;
mod id;
pub mod load;
pub mod loaders; pub mod loaders;
mod normalize; pub mod resolvers;
pub mod resolve;
mod util;

View File

@ -1,20 +0,0 @@
use anyhow::Error;
use std::{path::Path, sync::Arc};
use swc_common::SourceFile;
use swc_ecma_ast::Module;
pub trait Load: Send + Sync {
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error>;
}
impl<T: ?Sized + Load> Load for Box<T> {
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error> {
T::load(self, path)
}
}
impl<'a, T: ?Sized + Load> Load for &'a T {
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error> {
T::load(self, path)
}
}

View File

@ -1,11 +1,7 @@
use crate::load::Load;
use anyhow::{Context as _, Error}; use anyhow::{Context as _, Error};
use neon::prelude::*; use neon::prelude::*;
use std::sync::{mpsc::channel, Arc};
use std::{ use swc_bundler::Load;
path::Path,
sync::{mpsc::channel, Arc},
};
use swc_common::{FileName, SourceFile}; use swc_common::{FileName, SourceFile};
use swc_ecma_ast::{Module, Program}; use swc_ecma_ast::{Module, Program};
@ -16,8 +12,8 @@ pub struct NeonLoader {
} }
impl Load for NeonLoader { impl Load for NeonLoader {
fn load(&self, p: &Path) -> Result<(Arc<SourceFile>, Module), Error> { fn load(&self, name: &FileName) -> Result<(Arc<SourceFile>, Module), Error> {
let path = p.to_string_lossy().to_string(); let path = name.to_string();
let (tx, rx) = channel(); let (tx, rx) = channel();
self.handler.schedule_with(move |cx, _value, f| { self.handler.schedule_with(move |cx, _value, f| {
@ -58,10 +54,7 @@ impl Load for NeonLoader {
.context("failed to receive output from js loader")?; .context("failed to receive output from js loader")?;
let code = code?; let code = code?;
let fm = self let fm = self.swc.cm.new_source_file(name.clone(), code);
.swc
.cm
.new_source_file(FileName::Real(p.to_path_buf()), code);
let config = self.swc.config_for_file( let config = self.swc.config_for_file(
&swc::config::Options { &swc::config::Options {

View File

@ -1,7 +1,7 @@
use crate::load::Load; use anyhow::{bail, Context, Error};
use anyhow::Error; use std::sync::Arc;
use std::{path::Path, sync::Arc}; use swc_bundler::Load;
use swc_common::SourceFile; use swc_common::{FileName, SourceFile};
use swc_ecma_ast::{Module, Program}; use swc_ecma_ast::{Module, Program};
use swc_ecma_parser::JscTarget; use swc_ecma_parser::JscTarget;
@ -23,7 +23,28 @@ impl SwcLoader {
v.module = None; v.module = None;
v.minify = Some(false); v.minify = Some(false);
v.jsc.target = JscTarget::Es2019; v.jsc.target = JscTarget::Es2020;
if v.jsc.transform.is_none() {
v.jsc.transform = Some(Default::default());
}
let mut transform = v.jsc.transform.as_mut().unwrap();
if transform.optimizer.is_none() {
transform.optimizer = Some(Default::default());
}
let mut opt = transform.optimizer.as_mut().unwrap();
if opt.globals.is_none() {
opt.globals = Some(Default::default());
}
// Always inline NODE_ENV
opt.globals
.as_mut()
.unwrap()
.envs
.insert("NODE_ENV".to_string());
} }
SwcLoader { compiler, options } SwcLoader { compiler, options }
@ -31,35 +52,40 @@ impl SwcLoader {
} }
impl Load for SwcLoader { impl Load for SwcLoader {
fn load(&self, path: &Path) -> Result<(Arc<SourceFile>, Module), Error> { fn load(&self, name: &FileName) -> Result<(Arc<SourceFile>, Module), Error> {
self.compiler.run(|| { log::debug!("JsLoader.load({})", name);
log::debug!("JsLoader.load({})", path.display());
let fm = self.compiler.cm.load_file(path)?; let fm = self
.compiler
.cm
.load_file(match name {
FileName::Real(v) => &v,
_ => bail!("swc-loader only accepts path. Got `{}`", name),
})
.with_context(|| format!("failed to load file `{}`", name))?;
log::trace!("JsLoader.load: loaded"); log::trace!("JsLoader.load: loaded");
let config = self.compiler.config_for_file(&self.options, &fm.name)?; let config = self.compiler.config_for_file(&self.options, &fm.name)?;
log::trace!("JsLoader.load: loaded config"); log::trace!("JsLoader.load: loaded config");
// We run transform at this phase to strip out unused dependencies. // We run transform at this phase to strip out unused dependencies.
// //
// Note that we don't apply compat transform at loading phase. // Note that we don't apply compat transform at loading phase.
let program = let program =
self.compiler self.compiler
.parse_js(fm.clone(), JscTarget::Es2019, config.syntax, true, true)?; .parse_js(fm.clone(), JscTarget::Es2019, config.syntax, true, true)?;
log::trace!("JsLoader.load: parsed"); log::trace!("JsLoader.load: parsed");
let program = self.compiler.transform(program, true, config.pass); let program = self.compiler.transform(program, true, config.pass);
log::trace!("JsLoader.load: applied transforms"); log::trace!("JsLoader.load: applied transforms");
match program { match program {
Program::Module(module) => Ok((fm, module)), Program::Module(module) => Ok((fm, module)),
_ => unreachable!(), _ => unreachable!(),
} }
})
} }
} }

View File

@ -1 +0,0 @@

View File

@ -1,45 +0,0 @@
use anyhow::{Context, Error};
use std::path::{Path, PathBuf};
pub trait Resolve: Send + Sync {
///
/// Returned filename will be hashed if possible and used to generate module
/// id.
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error>;
}
impl<T: ?Sized + Resolve> Resolve for Box<T> {
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error> {
T::resolve(self, base, import)
}
}
impl<'a, T: ?Sized + Resolve> Resolve for &'a T {
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error> {
T::resolve(self, base, import)
}
}
pub struct NodeResolver;
impl Resolve for NodeResolver {
fn resolve(&self, base: &Path, import: &str) -> Result<PathBuf, Error> {
let base_dir = base
.parent()
.map(Path::to_path_buf)
.unwrap_or_else(|| PathBuf::from("."));
Ok(node_resolve::Resolver::new()
.with_extensions(&[".ts", ".tsx", ".js", ".jsx", ".json", ".node"])
.with_main_fields(&["swc-main", "esnext", "main"])
.with_basedir(base_dir.clone())
.resolve(import)
.with_context(|| {
format!(
"node-resolve failed; basedir = {}, import = {}",
base_dir.display(),
import
)
})?)
}
}

View File

@ -0,0 +1,43 @@
use anyhow::{bail, Context, Error};
use std::path::{Path, PathBuf};
use swc_bundler::Resolve;
use swc_common::FileName;
pub struct NodeResolver(node_resolve::Resolver);
impl NodeResolver {
pub fn new() -> Self {
Self(
node_resolve::Resolver::new()
.with_extensions(&[".ts", ".tsx", ".js", ".jsx", ".json", ".node"])
.with_main_fields(&["swc-main", "esnext", "main"]),
)
}
}
impl Resolve for NodeResolver {
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
let base = match base {
FileName::Real(v) => v,
_ => bail!("node-resolver supports only files"),
};
let base_dir = base
.parent()
.map(Path::to_path_buf)
.unwrap_or_else(|| PathBuf::from("."));
let path = self
.0
.with_basedir(base_dir.clone())
.resolve(module_specifier)
.with_context(|| {
format!(
"node-resolver failed; basedir = {}, import = {}",
base_dir.display(),
module_specifier
)
})?;
Ok(FileName::Real(path))
}
}

View File

@ -1,10 +0,0 @@
use swc_common::{Span, SyntaxContext};
use swc_ecma_visit::Fold;
pub struct HygieneRemover;
impl Fold for HygieneRemover {
fn fold_span(&mut self, s: Span) -> Span {
s.with_ctxt(SyntaxContext::empty())
}
}

View File

@ -3,11 +3,7 @@
extern crate test; extern crate test;
use fxhash::FxHashMap; use fxhash::FxHashMap;
use spack::{ use spack::{loaders::swc::SwcLoader, resolvers::NodeResolver};
config::{Config, EntryConfig},
loaders::swc::SwcLoader,
BundleKind, Bundler,
};
use std::{ use std::{
env, env,
fs::{create_dir_all, read_dir}, fs::{create_dir_all, read_dir},
@ -16,6 +12,8 @@ use std::{
sync::Arc, sync::Arc,
}; };
use swc::config::SourceMapsConfig; use swc::config::SourceMapsConfig;
use swc_bundler::{BundleKind, Bundler, Config};
use swc_common::{FileName, GLOBALS};
use test::{ use test::{
test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType, test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType,
}; };
@ -93,7 +91,10 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
}) })
.map(|e| -> Result<_, io::Error> { .map(|e| -> Result<_, io::Error> {
let e = e?; let e = e?;
Ok((e.file_name().to_string_lossy().to_string(), e.path())) Ok((
e.file_name().to_string_lossy().to_string(),
FileName::Real(e.path()),
))
}) })
.collect::<Result<FxHashMap<_, _>, _>>()?; .collect::<Result<FxHashMap<_, _>, _>>()?;
@ -116,73 +117,103 @@ fn reference_tests(tests: &mut Vec<TestDescAndFn>, errors: bool) -> Result<(), i
eprintln!("\n\n========== Running reference test {}\n", dir_name); eprintln!("\n\n========== Running reference test {}\n", dir_name);
testing::run_test2(false, |cm, handler| { testing::run_test2(false, |cm, handler| {
let compiler = Arc::new(swc::Compiler::new(cm.clone(), Arc::new(handler))); GLOBALS.with(|globals| {
let loader = SwcLoader::new( let compiler = Arc::new(swc::Compiler::new(cm.clone(), Arc::new(handler)));
compiler.clone(), let loader = SwcLoader::new(
swc::config::Options { compiler.clone(),
swcrc: true, swc::config::Options {
..Default::default() swcrc: true,
}, ..Default::default()
); },
let config = Config { );
working_dir: Default::default(), let bundler = Bundler::new(
mode: Default::default(), globals,
entry: EntryConfig::Files(entries), cm.clone(),
output: None, &loader,
module: Default::default(), NodeResolver::new(),
optimization: None, Config {
resolve: None, require: true,
options: None, external_modules: vec![
}; "assert",
let bundler = Bundler::new( "buffer",
compiler.clone(), "child_process",
swc::config::Options { "console",
swcrc: true, "cluster",
..Default::default() "crypto",
}, "dgram",
&spack::resolve::NodeResolver, "dns",
&loader, "events",
); "fs",
"http",
"http2",
"https",
"net",
"os",
"path",
"perf_hooks",
"process",
"querystring",
"readline",
"repl",
"stream",
"string_decoder",
"timers",
"tls",
"tty",
"url",
"util",
"v8",
"vm",
"wasi",
"worker",
"zlib",
]
.into_iter()
.map(From::from)
.collect(),
},
);
let modules = bundler.bundle(&config).expect("failed to bundle module"); let modules = bundler.bundle(entries).map_err(|_| ())?;
log::info!("Bundled as {} modules", modules.len()); log::info!("Bundled as {} modules", modules.len());
let mut error = false; let mut error = false;
for bundled in modules { for bundled in modules {
let code = bundler let code = compiler
.swc() .print(&bundled.module, SourceMapsConfig::Bool(false), None, false)
.print(&bundled.module, SourceMapsConfig::Bool(false), None, false) .expect("failed to print?")
.expect("failed to emit bundle") .code;
.code;
let name = match bundled.kind { let name = match bundled.kind {
BundleKind::Named { name } | BundleKind::Lib { name } => { BundleKind::Named { name } | BundleKind::Lib { name } => {
PathBuf::from(name) PathBuf::from(name)
} }
BundleKind::Dynamic => format!("dynamic.{}.js", bundled.id).into(), BundleKind::Dynamic => format!("dynamic.{}.js", bundled.id).into(),
}; };
let output_path = entry.path().join("output").join(name.file_name().unwrap()); let output_path =
entry.path().join("output").join(name.file_name().unwrap());
log::info!("Printing {}", output_path.display()); log::info!("Printing {}", output_path.display());
let s = NormalizedOutput::from(code); let s = NormalizedOutput::from(code);
match s.compare_to_file(&output_path) { match s.compare_to_file(&output_path) {
Ok(_) => {} Ok(_) => {}
Err(err) => { Err(err) => {
println!("{:?}", err); println!("{:?}", err);
error = true; error = true;
}
} }
} }
}
if error { if error {
return Err(()); return Err(());
} }
Ok(()) Ok(())
})
}) })
.expect("failed to process a module"); .expect("failed to process a module");
}); });

View File

@ -0,0 +1,5 @@
import { B } from './b';
export class A extends B {
}

View File

@ -0,0 +1,3 @@
export class B {
}

View File

@ -0,0 +1,5 @@
import { A } from './a';
import './b';
console.log('foo');
new A();

View File

@ -0,0 +1,6 @@
class B {
}
class A extends B {
}
console.log('foo');
new A();

View File

@ -0,0 +1,3 @@
import { B } from './b';
export class A extends B { }

View File

@ -0,0 +1 @@
export class B { }

View File

@ -0,0 +1,4 @@
import { A } from './a';
import { B } from './b';
console.log(A, B);

View File

@ -0,0 +1,5 @@
class B {
}
class A extends B {
}
console.log(A, B);

View File

@ -0,0 +1,13 @@
import { getC } from './c';
export function a() {
return new A()
}
export class A extends getC() {
}
export function getA() {
return A;
}

View File

@ -0,0 +1,4 @@
import { A, getA, a } from './a';
export { A, getA, a }

View File

@ -0,0 +1,7 @@
import './b';
export function getC() {
return C;
}
export class C { }

View File

@ -0,0 +1,3 @@
import { a } from './a';
console.log(a, a())

View File

@ -0,0 +1,4 @@
function a() {
return new A();
}
console.log(a, a());

View File

@ -0,0 +1,3 @@
import { B } from './b';
export class A extends B { }

View File

@ -0,0 +1,8 @@
import { A } from './a';
import { C } from './c';
export class B extends C {
a() {
return new A();
}
}

View File

@ -0,0 +1,10 @@
import { B } from './b';
export class C {
a() {
throw new Error('Unimplemented')
}
b() {
return new B();
}
}

View File

@ -0,0 +1,4 @@
import { A } from './a';
import './b';
import './c';
console.log(A, 'Loaded!');

View File

@ -0,0 +1,16 @@
class C {
a() {
throw new Error('Unimplemented');
}
b() {
return new B();
}
}
class B extends C {
a() {
return new A();
}
}
class A extends B {
}
console.log(A, 'Loaded!');

View File

@ -0,0 +1 @@
import './b';

View File

@ -0,0 +1 @@
import './c';

View File

@ -0,0 +1 @@
import './d';

View File

@ -0,0 +1 @@
import './a';

View File

@ -0,0 +1 @@
import './a';

View File

@ -0,0 +1,9 @@
import { B } from './b'
import './c';
export class A {
method() {
return new B();
}
}

View File

@ -0,0 +1,6 @@
import { A } from "./a";
import './c';
export class B extends A {
}

View File

@ -0,0 +1 @@
console.log('c');

View File

@ -0,0 +1,4 @@
import { A } from './a';
import { B } from './b';
console.log(A, B);

View File

@ -0,0 +1,9 @@
class A {
method() {
return new B();
}
}
class B extends A {
}
console.log('c');
console.log(A, B);

View File

@ -0,0 +1,8 @@
import { B } from './b'
export class A {
method() {
return new B();
}
}

View File

@ -0,0 +1,5 @@
import { A } from "./a";
export class B extends A {
}

View File

@ -0,0 +1,4 @@
import { A } from './a';
import { B } from './b';
console.log(A, B);

View File

@ -0,0 +1,8 @@
class A {
method() {
return new B();
}
}
class B extends A {
}
console.log(A, B);

View File

@ -0,0 +1,3 @@
import './b';
console.log('a');

View File

@ -0,0 +1,3 @@
import './c';
console.log('b');

Some files were not shown because too many files have changed in this diff Show More