mirror of
https://github.com/swc-project/swc.git
synced 2024-11-30 15:23:33 +03:00
Fix swc_bundler (#1075)
swc_bundler: - Skip least_common_ancestor for roots. - Correct planning for circular imports mixed with normal imports. - Correct merging of circular imports mixed with normal imports.
This commit is contained in:
parent
ff0db8f122
commit
1af1840d01
@ -6,7 +6,7 @@ edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_bundler"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.7.1"
|
||||
version = "0.7.2"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[features]
|
||||
|
@ -2,13 +2,16 @@ use super::{
|
||||
merge::{ImportDropper, Unexporter},
|
||||
plan::{CircularPlan, Plan},
|
||||
};
|
||||
use crate::{bundler::load::TransformedModule, Bundler, Load, ModuleId, Resolve};
|
||||
use crate::{bundler::load::TransformedModule, util::CHashSet, Bundler, Load, ModuleId, Resolve};
|
||||
use anyhow::{Context, Error};
|
||||
use std::borrow::Borrow;
|
||||
use swc_common::DUMMY_SP;
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_visit::{noop_visit_type, FoldWith, Node, Visit, VisitMutWith, VisitWith};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Circular imports are hard to handle.
|
||||
///
|
||||
/// We use some dedicated method to handle circular dependencies.
|
||||
@ -22,6 +25,7 @@ where
|
||||
plan: &Plan,
|
||||
circular_plan: &CircularPlan,
|
||||
entry_id: ModuleId,
|
||||
merged: &CHashSet<ModuleId>,
|
||||
) -> Result<Module, Error> {
|
||||
assert!(
|
||||
circular_plan.chunks.len() >= 1,
|
||||
@ -29,28 +33,42 @@ where
|
||||
circular_plan
|
||||
);
|
||||
let entry_module = self.scope.get_module(entry_id).unwrap();
|
||||
let direct_deps = entry_module
|
||||
.imports
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|v| v.0.module_id)
|
||||
.chain(entry_module.exports.reexports.iter().map(|v| v.0.module_id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let modules = circular_plan
|
||||
.chunks
|
||||
.iter()
|
||||
.map(|&id| self.scope.get_module(id).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
merged.insert(entry_id);
|
||||
let mut entry = self
|
||||
.merge_modules(plan, entry_id, false, true)
|
||||
.merge_modules(plan, entry_id, false, true, merged)
|
||||
.context("failed to merge dependency of a cyclic module")?;
|
||||
|
||||
entry.visit_mut_with(&mut ImportDropper {
|
||||
imports: &entry_module.imports,
|
||||
});
|
||||
// print_hygiene("entry:drop_imports", &self.cm, &entry);
|
||||
let mut deps = circular_plan.chunks.clone();
|
||||
deps.sort_by_key(|&dep| (!direct_deps.contains(&dep), dep));
|
||||
|
||||
for &dep in &*circular_plan.chunks {
|
||||
for dep in deps {
|
||||
if dep == entry_id {
|
||||
continue;
|
||||
}
|
||||
if !merged.insert(dep) {
|
||||
log::debug!("[circular merge] Already merged: {:?}", dep);
|
||||
continue;
|
||||
}
|
||||
log::debug!("Circular merge: {:?}", dep);
|
||||
|
||||
let new_module = self.merge_two_circular_modules(plan, &modules, entry, dep)?;
|
||||
let new_module = self.merge_two_circular_modules(plan, &modules, entry, dep, merged)?;
|
||||
|
||||
entry = new_module;
|
||||
|
||||
@ -68,10 +86,11 @@ where
|
||||
_circular_modules: &[TransformedModule],
|
||||
mut entry: Module,
|
||||
dep: ModuleId,
|
||||
merged: &CHashSet<ModuleId>,
|
||||
) -> Result<Module, Error> {
|
||||
self.run(|| {
|
||||
let mut dep = self
|
||||
.merge_modules(plan, dep, false, false)
|
||||
.merge_modules(plan, dep, false, true, merged)
|
||||
.context("failed to merge dependency of a cyclic module")?;
|
||||
|
||||
// print_hygiene("dep:init", &self.cm, &dep);
|
||||
@ -94,7 +113,7 @@ fn merge_respecting_order(mut entry: Vec<ModuleItem>, mut dep: Vec<ModuleItem>)
|
||||
// While looping over items from entry, we check for dependency.
|
||||
loop {
|
||||
if entry.is_empty() {
|
||||
log::debug!("entry is empty");
|
||||
log::trace!("entry is empty");
|
||||
break;
|
||||
}
|
||||
let item = entry.drain(..=0).next().unwrap();
|
||||
@ -103,7 +122,7 @@ fn merge_respecting_order(mut entry: Vec<ModuleItem>, mut dep: Vec<ModuleItem>)
|
||||
if dep.is_empty() {
|
||||
log::trace!("dep is empty");
|
||||
new.push(item);
|
||||
new.extend(entry);
|
||||
new.append(&mut entry);
|
||||
break;
|
||||
}
|
||||
|
||||
@ -121,7 +140,7 @@ fn merge_respecting_order(mut entry: Vec<ModuleItem>, mut dep: Vec<ModuleItem>)
|
||||
if let Some(pos) = dependency_index(&dep[0], &[&item]) {
|
||||
log::trace!("Found reverse depndency (index[0]): {}", pos);
|
||||
|
||||
new.extend(entry.drain(..=pos));
|
||||
new.push(item);
|
||||
new.extend(dep.drain(..=0));
|
||||
continue;
|
||||
}
|
||||
@ -129,16 +148,19 @@ fn merge_respecting_order(mut entry: Vec<ModuleItem>, mut dep: Vec<ModuleItem>)
|
||||
if let Some(pos) = dependency_index(&dep[0], &entry) {
|
||||
log::trace!("Found reverse depndency: {}", pos);
|
||||
|
||||
new.push(item);
|
||||
new.extend(entry.drain(..=pos));
|
||||
new.extend(dep.drain(..=0));
|
||||
continue;
|
||||
}
|
||||
|
||||
log::debug!("No dependency");
|
||||
log::trace!("No dependency");
|
||||
|
||||
new.push(item);
|
||||
}
|
||||
|
||||
new.extend(entry);
|
||||
|
||||
// Append remaining statements.
|
||||
new.extend(dep);
|
||||
|
||||
@ -176,8 +198,12 @@ where
|
||||
|
||||
for (idx, dep) in self.deps.iter().enumerate() {
|
||||
match dep.borrow() {
|
||||
ModuleItem::Stmt(Stmt::Decl(Decl::Class(decl))) => {
|
||||
log::debug!(
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(ExportDecl {
|
||||
decl: Decl::Class(decl),
|
||||
..
|
||||
}))
|
||||
| ModuleItem::Stmt(Stmt::Decl(Decl::Class(decl))) => {
|
||||
log::trace!(
|
||||
"Decl (from dep) = {}{:?}, Ident = {}{:?}",
|
||||
decl.ident.sym,
|
||||
decl.ident.span.ctxt,
|
||||
@ -186,10 +212,11 @@ where
|
||||
);
|
||||
if decl.ident.sym == i.sym && decl.ident.span.ctxt == i.span.ctxt {
|
||||
self.idx = Some(idx);
|
||||
log::info!("Index is {}", idx);
|
||||
log::debug!("Index is {}", idx);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
83
bundler/src/bundler/chunk/circular/tests.rs
Normal file
83
bundler/src/bundler/chunk/circular/tests.rs
Normal file
@ -0,0 +1,83 @@
|
||||
use super::*;
|
||||
use swc_common::{sync::Lrc, FileName, SourceMap};
|
||||
use swc_ecma_parser::{lexer::Lexer, JscTarget, Parser, StringInput, Syntax};
|
||||
use swc_ecma_utils::drop_span;
|
||||
use testing::assert_eq;
|
||||
|
||||
fn parse(cm: Lrc<SourceMap>, name: &str, src: &str) -> Module {
|
||||
let fm = cm.new_source_file(FileName::Custom(name.into()), src.into());
|
||||
let lexer = Lexer::new(
|
||||
Syntax::default(),
|
||||
JscTarget::Es2020,
|
||||
StringInput::from(&*fm),
|
||||
None,
|
||||
);
|
||||
let mut parser = Parser::new_from(lexer);
|
||||
|
||||
let module = parser.parse_module().unwrap();
|
||||
|
||||
drop_span(module)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_merge_respecting_order(modules: &[&str], output: &str) {
|
||||
for i in 0..modules.len() {
|
||||
log::info!("[{}] Testing", i);
|
||||
::testing::run_test2(false, |cm, _handler| {
|
||||
let mut entry = parse(cm.clone(), &format!("entry-{}", i), modules[i]).body;
|
||||
|
||||
for j in 0..modules.len() {
|
||||
if i == j {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dep = parse(cm.clone(), &format!("deps-{}-{}", i, j), modules[j]);
|
||||
entry = merge_respecting_order(entry, dep.body);
|
||||
}
|
||||
|
||||
let output = parse(cm.clone(), "output", output);
|
||||
assert_eq!(entry, output.body, "[{}]", i);
|
||||
|
||||
log::info!("[{}] Success", i);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_two() {
|
||||
assert_merge_respecting_order(
|
||||
&["export class A {}", "export class B extends A {}"],
|
||||
"
|
||||
export class A {}
|
||||
export class B extends A {}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_dependency_index(entry: &str, dep: &str, expected: usize) {
|
||||
::testing::run_test2(false, |cm, _handler| {
|
||||
let entry = parse(cm.clone(), "entry", entry);
|
||||
let dep = parse(cm.clone(), "dep", dep);
|
||||
|
||||
let calculated = dependency_index(&entry.body[0], &dep.body);
|
||||
|
||||
assert_eq!(calculated, Some(expected));
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dep_index_class() {
|
||||
assert_dependency_index("class A extends B {}", "class B {}", 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dep_index_export_class() {
|
||||
assert_dependency_index("class A extends B {}", "export class B {}", 0);
|
||||
}
|
@ -43,7 +43,7 @@ where
|
||||
dep_info: &TransformedModule,
|
||||
targets: &mut Vec<ModuleId>,
|
||||
) -> Result<(), Error> {
|
||||
log::info!("Merging as a common js module: {}", info.fm.name);
|
||||
log::debug!("Merging as a common js module: {}", info.fm.name);
|
||||
// If src is none, all requires are transpiled
|
||||
let mut v = RequireReplacer {
|
||||
is_entry,
|
||||
@ -315,7 +315,7 @@ impl VisitMut for RequireReplacer {
|
||||
self.replaced = true;
|
||||
*node = load.clone();
|
||||
|
||||
log::debug!("Found, and replacing require");
|
||||
log::trace!("Found, and replacing require");
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -1,8 +1,8 @@
|
||||
use super::plan::Plan;
|
||||
use super::plan::{NormalPlan, Plan};
|
||||
use crate::{
|
||||
bundler::load::{Specifier, TransformedModule},
|
||||
util::IntoParallelIterator,
|
||||
Bundler, Load, Resolve,
|
||||
util::{CHashSet, IntoParallelIterator},
|
||||
Bundler, Load, ModuleId, Resolve,
|
||||
};
|
||||
use anyhow::{Context, Error};
|
||||
#[cfg(feature = "concurrent")]
|
||||
@ -52,15 +52,25 @@ where
|
||||
pub(super) fn merge_reexports(
|
||||
&self,
|
||||
plan: &Plan,
|
||||
nomral_plan: &NormalPlan,
|
||||
entry: &mut Module,
|
||||
info: &TransformedModule,
|
||||
merged: &CHashSet<ModuleId>,
|
||||
) -> Result<(), Error> {
|
||||
log::debug!("merge_reexports: {}", info.fm.name);
|
||||
log::trace!("merge_reexports: {}", info.fm.name);
|
||||
|
||||
let deps = (&*info.exports.reexports)
|
||||
let mut reexports = info.exports.reexports.clone();
|
||||
// Remove transitive dependencies which is merged by parent moudle.
|
||||
reexports.retain(|(src, _)| nomral_plan.chunks.contains(&src.module_id));
|
||||
|
||||
let deps = reexports
|
||||
.into_par_iter()
|
||||
.map(|(src, specifiers)| -> Result<_, Error> {
|
||||
log::info!("Merging exports: {} <- {}", info.fm.name, src.src.value);
|
||||
if !merged.insert(src.module_id) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
log::debug!("Merging exports: {} <- {}", info.fm.name, src.src.value);
|
||||
|
||||
let imported = self.scope.get_module(src.module_id).unwrap();
|
||||
assert!(imported.is_es6, "Reexports are es6 only");
|
||||
@ -68,7 +78,7 @@ where
|
||||
info.helpers.extend(&imported.helpers);
|
||||
|
||||
let mut dep = self
|
||||
.merge_modules(plan, src.module_id, false, false)
|
||||
.merge_modules(plan, src.module_id, false, false, merged)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to merge for reexport: ({}):{} <= ({}):{}",
|
||||
@ -98,12 +108,17 @@ where
|
||||
// print_hygiene(&format!("dep: unexport"), &self.cm, &dep);
|
||||
}
|
||||
|
||||
Ok((src, dep))
|
||||
Ok(Some((src, dep)))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for dep in deps {
|
||||
let (src, dep) = dep?;
|
||||
let dep = dep?;
|
||||
let dep = match dep {
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
let (src, dep) = dep;
|
||||
|
||||
// Replace import statement / require with module body
|
||||
let mut injector = ExportInjector {
|
||||
@ -241,7 +256,7 @@ impl VisitMut for UnexportAsVar<'_> {
|
||||
}
|
||||
}
|
||||
None => {
|
||||
log::debug!("Alias: {:?} -> {:?}", n.orig, self.dep_ctxt);
|
||||
log::trace!("Alias: {:?} -> {:?}", n.orig, self.dep_ctxt);
|
||||
|
||||
decls.push(VarDeclarator {
|
||||
span: n.span,
|
||||
|
@ -10,12 +10,14 @@ use crate::{
|
||||
use anyhow::{Context, Error};
|
||||
#[cfg(feature = "concurrent")]
|
||||
use rayon::iter::ParallelIterator;
|
||||
use retain_mut::RetainMut;
|
||||
use std::{borrow::Cow, mem::take};
|
||||
use swc_atoms::js_word;
|
||||
use swc_common::{SyntaxContext, DUMMY_SP};
|
||||
use swc_ecma_ast::*;
|
||||
use swc_ecma_utils::prepend_stmts;
|
||||
use swc_ecma_visit::{noop_fold_type, noop_visit_mut_type, Fold, FoldWith, VisitMut, VisitMutWith};
|
||||
use util::CHashSet;
|
||||
|
||||
impl<L, R> Bundler<'_, L, R>
|
||||
where
|
||||
@ -29,15 +31,18 @@ where
|
||||
entry: ModuleId,
|
||||
is_entry: bool,
|
||||
force_not_cyclic: bool,
|
||||
merged: &CHashSet<ModuleId>,
|
||||
) -> Result<Module, Error> {
|
||||
self.run(|| {
|
||||
merged.insert(entry);
|
||||
|
||||
let info = self.scope.get_module(entry).unwrap();
|
||||
|
||||
if !force_not_cyclic {
|
||||
// Handle circular imports
|
||||
if let Some(circular_plan) = plan.entry_as_circular(info.id) {
|
||||
log::info!("Circular dependency detected: ({})", info.fm.name);
|
||||
return Ok(self.merge_circular_modules(plan, circular_plan, entry)?);
|
||||
log::debug!("Circular dependency detected: ({})", info.fm.name);
|
||||
return Ok(self.merge_circular_modules(plan, circular_plan, entry, merged)?);
|
||||
}
|
||||
}
|
||||
|
||||
@ -65,7 +70,7 @@ where
|
||||
plan.normal.get(&info.id)
|
||||
);
|
||||
|
||||
self.merge_reexports(plan, &mut entry, &info)
|
||||
self.merge_reexports(plan, module_plan, &mut entry, &info, merged)
|
||||
.context("failed to merge reepxorts")?;
|
||||
|
||||
let to_merge: Vec<_> = info
|
||||
@ -94,8 +99,13 @@ where
|
||||
|| {
|
||||
to_merge
|
||||
.into_par_iter()
|
||||
.map(|(src, specifiers)| -> Result<_, Error> {
|
||||
.map(|(src, specifiers)| -> Result<Option<_>, Error> {
|
||||
self.run(|| {
|
||||
if !merged.insert(src.module_id) {
|
||||
log::debug!("Skipping: {} <= {}", info.fm.name, src.src.value);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
log::debug!("Merging: {} <= {}", info.fm.name, src.src.value);
|
||||
|
||||
let dep_info = self.scope.get_module(src.module_id).unwrap();
|
||||
@ -110,7 +120,7 @@ where
|
||||
// a <- b + chunk(c)
|
||||
//
|
||||
let mut dep = self
|
||||
.merge_modules(plan, src.module_id, false, false)
|
||||
.merge_modules(plan, src.module_id, false, false, merged)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to merge: ({}):{} <= ({}):{}",
|
||||
@ -194,7 +204,7 @@ where
|
||||
}
|
||||
// print_hygiene("dep:before-injection", &self.cm, &dep);
|
||||
|
||||
Ok((dep, dep_info))
|
||||
Ok(Some((dep, dep_info)))
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
@ -205,15 +215,19 @@ where
|
||||
.clone()
|
||||
.into_par_iter()
|
||||
.map(|id| -> Result<_, Error> {
|
||||
if !merged.insert(id) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let dep_info = self.scope.get_module(id).unwrap();
|
||||
let mut dep = self.merge_modules(plan, id, false, true)?;
|
||||
let mut dep = self.merge_modules(plan, id, false, true, merged)?;
|
||||
|
||||
dep = self.remark_exports(dep, dep_info.ctxt(), None, true);
|
||||
dep = dep.fold_with(&mut Unexporter);
|
||||
|
||||
// As transitive deps can have no direct relation with entry,
|
||||
// remark_exports is not enough.
|
||||
Ok((dep, dep_info))
|
||||
Ok(Some((dep, dep_info)))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -228,7 +242,14 @@ where
|
||||
.map(|v| (v, true))
|
||||
.chain(transitive_deps.into_iter().map(|v| (v, false)))
|
||||
{
|
||||
let (mut dep, dep_info) = dep?;
|
||||
let dep = dep?;
|
||||
let dep = match dep {
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let (mut dep, dep_info) = dep;
|
||||
|
||||
if let Some(idx) = targets.iter().position(|v| *v == dep_info.id) {
|
||||
targets.remove(idx);
|
||||
if let Some(v) = plan.normal.get(&dep_info.id) {
|
||||
@ -307,8 +328,12 @@ where
|
||||
// }
|
||||
|
||||
if is_entry {
|
||||
entry.body.retain(|item| {
|
||||
entry.body.retain_mut(|item| {
|
||||
match item {
|
||||
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(export)) => {
|
||||
export.src = None;
|
||||
}
|
||||
|
||||
ModuleItem::ModuleDecl(ModuleDecl::Import(import)) => {
|
||||
for (id, p) in &plan.normal {
|
||||
if import.span.ctxt == self.scope.get_module(*id).unwrap().ctxt() {
|
||||
@ -352,6 +377,7 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
@ -44,6 +44,7 @@ where
|
||||
entries: HashMap<String, TransformedModule>,
|
||||
) -> Result<Vec<Bundle>, Error> {
|
||||
let plan = self.determine_entries(entries).context("failed to plan")?;
|
||||
let merged = Default::default();
|
||||
|
||||
Ok((&*plan.entries)
|
||||
.into_par_iter()
|
||||
@ -58,7 +59,7 @@ where
|
||||
.clone();
|
||||
|
||||
let module = self
|
||||
.merge_modules(&plan, entry, true, false)
|
||||
.merge_modules(&plan, entry, true, false, &merged)
|
||||
.context("failed to merge module")
|
||||
.unwrap(); // TODO
|
||||
|
||||
|
@ -13,6 +13,13 @@ pub(super) fn least_common_ancestor(g: &ModuleGraph, module_ids: &[ModuleId]) ->
|
||||
return module_ids[0];
|
||||
}
|
||||
|
||||
// Check for roots
|
||||
for &mid in module_ids {
|
||||
if g.neighbors_directed(mid, Incoming).count() == 0 {
|
||||
return mid;
|
||||
}
|
||||
}
|
||||
|
||||
let first = module_ids[0];
|
||||
let second = module_ids[1];
|
||||
|
||||
@ -22,11 +29,14 @@ pub(super) fn least_common_ancestor(g: &ModuleGraph, module_ids: &[ModuleId]) ->
|
||||
}
|
||||
|
||||
if let Some(id) = check_itself_and_parent(g, &[first], &[second]) {
|
||||
log::info!("Found lca: {:?}", id);
|
||||
log::debug!("Found lca: {:?}", id);
|
||||
return id;
|
||||
}
|
||||
|
||||
unreachable!("failed to calculagte least common ancestor")
|
||||
unreachable!(
|
||||
"failed to calculate least common ancestors of {:?}",
|
||||
module_ids
|
||||
)
|
||||
}
|
||||
|
||||
return module_ids
|
||||
|
@ -4,8 +4,16 @@ use crate::{
|
||||
};
|
||||
use anyhow::{bail, Error};
|
||||
use lca::least_common_ancestor;
|
||||
use petgraph::{algo::all_simple_paths, graphmap::DiGraphMap, visit::Bfs};
|
||||
use std::collections::{hash_map::Entry, HashMap};
|
||||
use petgraph::{
|
||||
algo::all_simple_paths,
|
||||
graphmap::DiGraphMap,
|
||||
visit::Bfs,
|
||||
EdgeDirection::{Incoming, Outgoing},
|
||||
};
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap, HashSet},
|
||||
ops::{Deref, DerefMut},
|
||||
};
|
||||
|
||||
mod lca;
|
||||
#[cfg(test)]
|
||||
@ -13,89 +21,75 @@ mod tests;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct PlanBuilder {
|
||||
entry_graph: ModuleGraph,
|
||||
/// A hashmap to check if a module import is circular.
|
||||
///
|
||||
/// This contains all dependencies, including transitive ones. For example,
|
||||
/// if `a` dependes on `b` and `b` depdends on `c`, all of
|
||||
/// `(a, b)`, `(a, c)`,`(b, c)` will be inserted.
|
||||
all_deps: HashSet<(ModuleId, ModuleId)>,
|
||||
|
||||
/// Graph to compute direct dependencies (direct means it will be merged
|
||||
/// directly)
|
||||
tracking_graph: ModuleGraph,
|
||||
direct_deps: ModuleGraph,
|
||||
|
||||
circular: HashMap<ModuleId, Vec<ModuleId>>,
|
||||
direct_deps: HashMap<ModuleId, Vec<ModuleId>>,
|
||||
|
||||
/// Used to calcuate transitive dependencies.
|
||||
reverse: HashMap<ModuleId, Vec<ModuleId>>,
|
||||
|
||||
/// Used for normalization
|
||||
///
|
||||
/// This is required because we cannot know the order file is
|
||||
/// loaded. It means we cannot know order
|
||||
/// calls to add_to_graph.
|
||||
/// Thus, we cannot track import order in add_to_graph.
|
||||
pending_direct_deps: HashMap<ModuleId, Vec<ModuleId>>,
|
||||
circular: Circulars,
|
||||
|
||||
kinds: HashMap<ModuleId, BundleKind>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct Circulars(Vec<HashSet<ModuleId>>);
|
||||
|
||||
impl Circulars {
|
||||
pub fn get(&self, id: ModuleId) -> Option<&HashSet<ModuleId>> {
|
||||
let pos = self.0.iter().position(|set| set.contains(&id))?;
|
||||
|
||||
Some(&self.0[pos])
|
||||
}
|
||||
// pub fn remove(&mut self, id: ModuleId) -> Option<HashSet<ModuleId>> {
|
||||
// let pos = self.0.iter().position(|set| set.contains(&id))?;
|
||||
// Some(self.0.remove(pos))
|
||||
// }
|
||||
}
|
||||
|
||||
impl Deref for Circulars {
|
||||
type Target = Vec<HashSet<ModuleId>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Circulars {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PlanBuilder {
|
||||
fn mark_as_circular(&mut self, src: ModuleId, imported: ModuleId) {
|
||||
if let Some(v) = self.circular.get_mut(&src) {
|
||||
if !v.contains(&src) {
|
||||
v.push(src);
|
||||
for set in self.circular.iter_mut() {
|
||||
if set.contains(&src) || set.contains(&imported) {
|
||||
set.insert(src);
|
||||
set.insert(imported);
|
||||
return;
|
||||
}
|
||||
if !v.contains(&imported) {
|
||||
v.push(imported);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(v) = self.circular.iter_mut().find_map(|(_, v)| {
|
||||
if v.contains(&src) || v.contains(&imported) {
|
||||
Some(v)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
if !v.contains(&src) {
|
||||
v.push(src);
|
||||
}
|
||||
if !v.contains(&imported) {
|
||||
v.push(imported);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
self.circular.insert(src, vec![imported]);
|
||||
let mut set = HashSet::default();
|
||||
set.insert(src);
|
||||
set.insert(imported);
|
||||
self.circular.push(set);
|
||||
}
|
||||
|
||||
fn is_circular(&self, id: ModuleId) -> bool {
|
||||
if self.circular.get(&id).is_some() {
|
||||
return true;
|
||||
for set in self.circular.iter() {
|
||||
if set.contains(&id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
self.circular
|
||||
.iter()
|
||||
.any(|(_, v)| v.iter().any(|&v| v == id))
|
||||
}
|
||||
|
||||
fn try_add_direct_dep(&mut self, root_id: ModuleId, dep: ModuleId, dep_of_dep: ModuleId) {
|
||||
if let None = self.tracking_graph.add_edge(root_id, dep_of_dep, 0) {
|
||||
if self.circular.contains_key(&dep_of_dep) {
|
||||
self.direct_deps.entry(root_id).or_default().push(dep);
|
||||
return;
|
||||
}
|
||||
|
||||
// Track direct dependencies, but exclude if it will be recursively merged.
|
||||
self.direct_deps.entry(dep).or_default().push(dep_of_dep);
|
||||
} else {
|
||||
if self.circular.contains_key(&dep_of_dep) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.pending_direct_deps
|
||||
.entry(dep)
|
||||
.or_default()
|
||||
.push(dep_of_dep);
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,6 +142,19 @@ where
|
||||
&self,
|
||||
entries: HashMap<String, TransformedModule>,
|
||||
) -> Result<Plan, Error> {
|
||||
let plan = self.calculate_plan(entries)?;
|
||||
let plan = self.handle_duplicates(plan);
|
||||
|
||||
Ok(plan)
|
||||
}
|
||||
|
||||
/// 1. For entry -> a -> b -> a, entry -> a->c, entry -> b -> c,
|
||||
/// we change c as transitive dependancy of entry.
|
||||
fn handle_duplicates(&self, plan: Plan) -> Plan {
|
||||
plan
|
||||
}
|
||||
|
||||
fn calculate_plan(&self, entries: HashMap<String, TransformedModule>) -> Result<Plan, Error> {
|
||||
let mut builder = PlanBuilder::default();
|
||||
|
||||
for (name, module) in entries {
|
||||
@ -156,16 +163,16 @@ where
|
||||
None => {}
|
||||
}
|
||||
|
||||
self.add_to_graph(&mut builder, module.id, module.id);
|
||||
self.add_to_graph(&mut builder, module.id, &mut vec![]);
|
||||
}
|
||||
|
||||
let mut metadata = HashMap::<ModuleId, Metadata>::default();
|
||||
|
||||
// Draw dependency graph to calculte
|
||||
for (id, _) in &builder.kinds {
|
||||
let mut bfs = Bfs::new(&builder.entry_graph, *id);
|
||||
let mut bfs = Bfs::new(&builder.direct_deps, *id);
|
||||
|
||||
while let Some(dep) = bfs.next(&builder.entry_graph) {
|
||||
while let Some(dep) = bfs.next(&builder.direct_deps) {
|
||||
if dep == *id {
|
||||
// Useless
|
||||
continue;
|
||||
@ -201,160 +208,182 @@ where
|
||||
plans.bundle_kinds.insert(*id, kind.clone());
|
||||
}
|
||||
|
||||
// Fix direct dependencies. See the doc of pending_direct_deps for more
|
||||
// information.
|
||||
for (entry, deps) in builder.pending_direct_deps.drain() {
|
||||
for (key, direct_deps) in builder.direct_deps.iter_mut() {
|
||||
if direct_deps.contains(&entry) {
|
||||
if *key == entry {
|
||||
direct_deps.extend_from_slice(&deps);
|
||||
} else {
|
||||
direct_deps.retain(|&id| {
|
||||
if *key == id {
|
||||
return true;
|
||||
}
|
||||
if deps.contains(&id) {
|
||||
return false;
|
||||
}
|
||||
true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !builder.direct_deps.contains_key(&entry) {
|
||||
builder.direct_deps.insert(entry, deps);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle circular imports
|
||||
for (k, members) in &builder.circular {
|
||||
for (_entry, deps) in builder.direct_deps.iter_mut() {
|
||||
deps.retain(|v| !members.contains(v));
|
||||
}
|
||||
for (root_entry, _) in builder.kinds.iter() {
|
||||
let mut bfs = Bfs::new(&builder.direct_deps, *root_entry);
|
||||
|
||||
builder.direct_deps.remove(k);
|
||||
}
|
||||
while let Some(entry) = bfs.next(&builder.direct_deps) {
|
||||
let deps: Vec<_> = builder
|
||||
.direct_deps
|
||||
.neighbors_directed(entry, Outgoing)
|
||||
.collect();
|
||||
|
||||
// Calculate actual chunking plans
|
||||
for (id, _) in builder.kinds.iter() {
|
||||
let mut bfs = Bfs::new(&builder.entry_graph, *id);
|
||||
for dep in deps {
|
||||
// Check if it's circular.
|
||||
if let Some(members) = builder.circular.get(dep) {
|
||||
// Exclude circular imnports from normal dependencies
|
||||
for &circular_member in members {
|
||||
if entry == circular_member {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut prev = *id;
|
||||
|
||||
while let Some(dep) = bfs.next(&builder.entry_graph) {
|
||||
if dep == *id {
|
||||
// Useless
|
||||
continue;
|
||||
}
|
||||
// Check if it's circular.
|
||||
if builder.is_circular(dep) {
|
||||
// Entry is `dep`.
|
||||
match plans.circular.entry(dep) {
|
||||
// Already added
|
||||
Entry::Occupied(_) => {
|
||||
// TODO: assert!
|
||||
}
|
||||
|
||||
// We need to mark modules as circular.
|
||||
Entry::Vacant(e) => {
|
||||
let plan = e.insert(CircularPlan::default());
|
||||
if let Some(mut v) = builder.circular.remove(&dep) {
|
||||
if let Some(index) = v.iter().position(|&id| id == dep) {
|
||||
v.remove(index);
|
||||
}
|
||||
plan.chunks.extend(v);
|
||||
if builder
|
||||
.direct_deps
|
||||
.remove_edge(dep, circular_member)
|
||||
.is_some()
|
||||
{
|
||||
log::debug!(
|
||||
"[circular] Removing {:?} => {:?}",
|
||||
dep,
|
||||
circular_member
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !builder.is_circular(prev) {
|
||||
plans.normal.entry(prev).or_default().chunks.push(dep);
|
||||
// Add circular plans
|
||||
match plans.circular.entry(dep) {
|
||||
// Already added
|
||||
Entry::Occupied(_) => {
|
||||
// TODO: assert!
|
||||
}
|
||||
|
||||
// We need to mark modules as circular.
|
||||
Entry::Vacant(e) => {
|
||||
let circular_plan = e.insert(CircularPlan::default());
|
||||
if let Some(v) = builder.circular.get(dep) {
|
||||
circular_plan
|
||||
.chunks
|
||||
.extend(v.iter().copied().filter(|&v| v != dep));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if !builder.is_circular(dep) {
|
||||
// plans.normal.entry(dep).or_default().chunks.
|
||||
// push(entry); }
|
||||
}
|
||||
}
|
||||
prev = dep;
|
||||
}
|
||||
}
|
||||
|
||||
for (id, deps) in builder.direct_deps.drain() {
|
||||
for &dep in &deps {
|
||||
if builder.circular.get(&id).is_some() {
|
||||
plans.normal.entry(id).or_default().chunks.push(dep);
|
||||
continue;
|
||||
}
|
||||
let is_es6 = self.scope.get_module(dep).unwrap().is_es6;
|
||||
let dependants = builder.reverse.get(&dep).map(|s| &**s).unwrap_or(&[]);
|
||||
for (root_entry, _) in &builder.kinds {
|
||||
let root_entry = *root_entry;
|
||||
let mut bfs = Bfs::new(&builder.direct_deps, root_entry);
|
||||
|
||||
if metadata.get(&dep).map(|md| md.bundle_cnt).unwrap_or(0) == 1 {
|
||||
log::info!("Module dep: {} => {}", id, dep);
|
||||
while let Some(entry) = bfs.next(&builder.direct_deps) {
|
||||
let deps: Vec<_> = builder
|
||||
.direct_deps
|
||||
.neighbors_directed(entry, Outgoing)
|
||||
.collect();
|
||||
|
||||
for &dep in &deps {
|
||||
if builder.is_circular(entry) {
|
||||
log::debug!(
|
||||
"Adding a circular dependencuy {:?} to normal entry {:?}",
|
||||
entry,
|
||||
root_entry
|
||||
);
|
||||
plans.normal.entry(entry).or_default().chunks.push(dep);
|
||||
continue;
|
||||
}
|
||||
let is_es6 = self.scope.get_module(entry).unwrap().is_es6;
|
||||
let dependants = builder
|
||||
.direct_deps
|
||||
.neighbors_directed(dep, Incoming)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if metadata.get(&dep).map(|md| md.bundle_cnt).unwrap_or(0) == 1 {
|
||||
log::debug!("{:?} depends on {:?}", entry, dep);
|
||||
|
||||
// Common js support.
|
||||
if !is_es6 {
|
||||
// Dependancy of
|
||||
//
|
||||
// a -> b
|
||||
// b -> c
|
||||
//
|
||||
// results in
|
||||
//
|
||||
// a <- b
|
||||
// b <- c
|
||||
//
|
||||
if dependants.len() <= 1 {
|
||||
plans.normal.entry(entry).or_default().chunks.push(dep);
|
||||
continue;
|
||||
}
|
||||
|
||||
// We now have a module depended by multiple modules. Let's say
|
||||
//
|
||||
// a -> b
|
||||
// a -> c
|
||||
// b -> c
|
||||
//
|
||||
// results in
|
||||
//
|
||||
// a <- b
|
||||
// a <- c
|
||||
let module = least_common_ancestor(&builder.direct_deps, &dependants);
|
||||
|
||||
let normal_plan = plans.normal.entry(module).or_default();
|
||||
|
||||
for &dep in &deps {
|
||||
if !normal_plan.chunks.contains(&dep)
|
||||
&& !normal_plan.transitive_chunks.contains(&dep)
|
||||
{
|
||||
if dependants.contains(&module) {
|
||||
// `entry` depends on `module` directly
|
||||
normal_plan.chunks.push(dep);
|
||||
} else {
|
||||
normal_plan.transitive_chunks.push(dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Common js support.
|
||||
if !is_es6 {
|
||||
// Dependancy of
|
||||
//
|
||||
// a -> b
|
||||
// b -> c
|
||||
//
|
||||
// results in
|
||||
//
|
||||
// a <- b
|
||||
// b <- c
|
||||
//
|
||||
if dependants.len() <= 1 {
|
||||
plans.normal.entry(id).or_default().chunks.push(dep);
|
||||
continue;
|
||||
}
|
||||
|
||||
// We now have a module depended by multiple modules. Let's say
|
||||
//
|
||||
// a -> b
|
||||
// a -> c
|
||||
// b -> c
|
||||
//
|
||||
// results in
|
||||
//
|
||||
// a <- b
|
||||
// a <- c
|
||||
let module = least_common_ancestor(&builder.entry_graph, dependants);
|
||||
|
||||
let normal_plan = plans.normal.entry(module).or_default();
|
||||
normal_plan.transitive_chunks.reserve(deps.len());
|
||||
|
||||
for &dep in &deps {
|
||||
if !normal_plan.chunks.contains(&dep)
|
||||
&& !normal_plan.transitive_chunks.contains(&dep)
|
||||
if 2 <= dependants.len() {
|
||||
// Should be merged as a transitive dependency.
|
||||
let higher_module = if plans.entries.contains(&dependants[0]) {
|
||||
dependants[0]
|
||||
} else if dependants.len() == 2
|
||||
&& plans.entries.contains(&dependants[1])
|
||||
{
|
||||
if dependants.contains(&module) {
|
||||
// `entry` depends on `module` directly
|
||||
dependants[1]
|
||||
} else {
|
||||
least_common_ancestor(&builder.direct_deps, &dependants)
|
||||
};
|
||||
|
||||
if dependants.len() == 2 && dependants.contains(&higher_module) {
|
||||
let mut entry =
|
||||
*dependants.iter().find(|&&v| v != higher_module).unwrap();
|
||||
|
||||
// We choose higher node if import is circular
|
||||
if builder.is_circular(entry) {
|
||||
entry = higher_module;
|
||||
}
|
||||
|
||||
let normal_plan = plans.normal.entry(entry).or_default();
|
||||
if !normal_plan.chunks.contains(&dep) {
|
||||
normal_plan.chunks.push(dep);
|
||||
} else {
|
||||
normal_plan.transitive_chunks.push(dep);
|
||||
}
|
||||
} else {
|
||||
let t = &mut plans
|
||||
.normal
|
||||
.entry(higher_module)
|
||||
.or_default()
|
||||
.transitive_chunks;
|
||||
if !t.contains(&dep) {
|
||||
t.push(dep)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Direct dependency.
|
||||
plans.normal.entry(entry).or_default().chunks.push(dep);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if 2 <= dependants.len() {
|
||||
// Should be merged as a transitive dependency.
|
||||
let module = least_common_ancestor(&builder.entry_graph, dependants);
|
||||
|
||||
if dependants.len() == 2 && dependants.contains(&module) {
|
||||
let entry = *dependants.iter().find(|&&v| v != module).unwrap();
|
||||
plans.normal.entry(entry).or_default().chunks.push(dep);
|
||||
} else {
|
||||
let t = &mut plans.normal.entry(module).or_default().transitive_chunks;
|
||||
if !t.contains(&dep) {
|
||||
t.push(dep)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Direct dependency.
|
||||
plans.normal.entry(id).or_default().chunks.push(dep);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -364,86 +393,67 @@ where
|
||||
}
|
||||
|
||||
// dbg!(&plans);
|
||||
|
||||
Ok(plans)
|
||||
}
|
||||
|
||||
fn add_to_graph(&self, builder: &mut PlanBuilder, module_id: ModuleId, root_id: ModuleId) {
|
||||
let contains = builder.entry_graph.contains_node(module_id);
|
||||
|
||||
builder.entry_graph.add_node(module_id);
|
||||
builder.tracking_graph.add_node(module_id);
|
||||
fn add_to_graph(
|
||||
&self,
|
||||
builder: &mut PlanBuilder,
|
||||
module_id: ModuleId,
|
||||
path: &mut Vec<ModuleId>,
|
||||
) {
|
||||
builder.direct_deps.add_node(module_id);
|
||||
|
||||
let m = self
|
||||
.scope
|
||||
.get_module(module_id)
|
||||
.expect("failed to get module");
|
||||
|
||||
for (src, _) in &m.imports.specifiers {
|
||||
log::trace!("({:?}) {:?} => {:?}", root_id, module_id, src.module_id);
|
||||
for src in m
|
||||
.imports
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|v| &v.0)
|
||||
.chain(m.exports.reexports.iter().map(|v| &v.0))
|
||||
{
|
||||
log::debug!("Dependency: {:?} => {:?}", module_id, src.module_id);
|
||||
|
||||
builder.direct_deps.add_edge(module_id, src.module_id, 0);
|
||||
|
||||
for &id in &*path {
|
||||
builder.all_deps.insert((id, src.module_id));
|
||||
}
|
||||
builder.all_deps.insert((module_id, src.module_id));
|
||||
|
||||
if !builder.all_deps.contains(&(src.module_id, module_id)) {
|
||||
path.push(module_id);
|
||||
self.add_to_graph(builder, src.module_id, path);
|
||||
assert_eq!(path.pop(), Some(module_id));
|
||||
}
|
||||
}
|
||||
|
||||
// Prevent dejavu
|
||||
if contains {
|
||||
for (src, _) in &m.imports.specifiers {
|
||||
if builder.entry_graph.contains_edge(module_id, src.module_id) {
|
||||
log::debug!(
|
||||
"({:?}) Maybe circular dep: {:?} => {:?}",
|
||||
root_id,
|
||||
module_id,
|
||||
src.module_id
|
||||
);
|
||||
for (src, _) in &m.imports.specifiers {
|
||||
if builder.all_deps.contains(&(src.module_id, module_id)) {
|
||||
log::debug!("Circular dep: {:?} => {:?}", module_id, src.module_id);
|
||||
|
||||
// builder.mark_as_circular(module_id, src.module_id);
|
||||
builder.mark_as_circular(module_id, src.module_id);
|
||||
|
||||
let circular_paths = all_simple_paths::<Vec<ModuleId>, _>(
|
||||
&builder.entry_graph,
|
||||
src.module_id,
|
||||
module_id,
|
||||
0,
|
||||
None,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
let circular_paths = all_simple_paths::<Vec<ModuleId>, _>(
|
||||
&builder.direct_deps,
|
||||
src.module_id,
|
||||
module_id,
|
||||
0,
|
||||
None,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for path in circular_paths {
|
||||
for dep in path {
|
||||
builder.mark_as_circular(module_id, dep)
|
||||
}
|
||||
for path in circular_paths {
|
||||
for dep in path {
|
||||
builder.mark_as_circular(module_id, dep)
|
||||
}
|
||||
|
||||
return;
|
||||
} else {
|
||||
builder.entry_graph.add_edge(
|
||||
module_id,
|
||||
src.module_id,
|
||||
if src.is_unconditional { 2 } else { 1 },
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (src, _) in m.imports.specifiers.iter().chain(&m.exports.reexports) {
|
||||
self.add_to_graph(builder, src.module_id, root_id);
|
||||
|
||||
builder.entry_graph.add_edge(
|
||||
module_id,
|
||||
src.module_id,
|
||||
if src.is_unconditional { 2 } else { 1 },
|
||||
);
|
||||
if self.scope.get_module(src.module_id).unwrap().is_es6 {
|
||||
builder.try_add_direct_dep(root_id, module_id, src.module_id);
|
||||
} else {
|
||||
// Common js support.
|
||||
let v = builder.direct_deps.entry(module_id).or_default();
|
||||
if !v.contains(&src.module_id) {
|
||||
v.push(src.module_id);
|
||||
}
|
||||
}
|
||||
|
||||
let rev = builder.reverse.entry(src.module_id).or_default();
|
||||
if !rev.contains(&module_id) {
|
||||
rev.push(module_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,6 +54,7 @@ fn assert_normal_transitive(
|
||||
)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_circular(t: &mut Tester, p: &Plan, entry: &str, members: &[&str]) {
|
||||
assert_eq!(
|
||||
p.circular[&t.id(&format!("{}.js", entry))]
|
||||
@ -66,6 +67,9 @@ fn assert_circular(t: &mut Tester, p: &Plan, entry: &str, members: &[&str]) {
|
||||
.map(|s| format!("{}.js", s))
|
||||
.map(|s| t.id(&s))
|
||||
.collect::<HashSet<_>>(),
|
||||
"[circular] `{}` should merge {:?}",
|
||||
entry,
|
||||
members
|
||||
);
|
||||
}
|
||||
|
||||
@ -101,7 +105,7 @@ fn concurrency_001() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
assert_eq!(p.circular.len(), 0);
|
||||
|
||||
@ -145,7 +149,7 @@ fn concurrency_002() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
assert_eq!(p.circular.len(), 0);
|
||||
|
||||
@ -191,7 +195,7 @@ fn concurrency_003() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
assert_eq!(p.circular.len(), 0);
|
||||
assert_eq!(p.normal.len(), 2);
|
||||
@ -219,21 +223,12 @@ fn circular_001() {
|
||||
"a.js",
|
||||
"
|
||||
import { B } from './b'
|
||||
|
||||
export class A {
|
||||
method() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
",
|
||||
)
|
||||
.file(
|
||||
"b.js",
|
||||
"
|
||||
import { A } from './a';
|
||||
|
||||
export class B extends A {
|
||||
}
|
||||
",
|
||||
)
|
||||
.run(|t| {
|
||||
@ -244,10 +239,12 @@ fn circular_001() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module.clone());
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
assert_circular(t, &p, "a", &["b"]);
|
||||
assert_normal(t, &p, "main", &["a"]);
|
||||
assert_normal(t, &p, "main", &["a", "b"]);
|
||||
assert_normal(t, &p, "a", &[]);
|
||||
assert_normal(t, &p, "b", &[]);
|
||||
|
||||
@ -276,7 +273,7 @@ fn transitive_001() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -341,7 +338,7 @@ fn transitive_002() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -393,7 +390,7 @@ fn cjs_001() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -434,7 +431,7 @@ fn cjs_002() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -482,7 +479,7 @@ fn cjs_003() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -539,7 +536,7 @@ fn cjs_004() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -592,7 +589,7 @@ fn cjs_005() {
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.determine_entries(entries)?;
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
@ -607,3 +604,99 @@ fn cjs_005() {
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deno_001() {
|
||||
suite()
|
||||
.file(
|
||||
"main.js",
|
||||
r#"
|
||||
import { listenAndServe } from "./http-server";
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"http-server.js",
|
||||
r#"
|
||||
import { BufReader, BufWriter } from "./io-bufio";
|
||||
import { bodyReader } from "./_io";
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"_io.js",
|
||||
r#"
|
||||
import { BufReader, BufWriter } from "./io-bufio";
|
||||
import { ServerRequest, Response } from "./http-server";
|
||||
"#,
|
||||
)
|
||||
.file(
|
||||
"io-bufio.js",
|
||||
r#"
|
||||
"#,
|
||||
)
|
||||
.run(|t| {
|
||||
let module = t
|
||||
.bundler
|
||||
.load_transformed(&FileName::Real("main.js".into()))?
|
||||
.unwrap();
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module);
|
||||
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
assert_normal(t, &p, "main", &["http-server"]);
|
||||
assert_normal(t, &p, "io-bufio", &[]);
|
||||
|
||||
assert_circular(t, &p, "http-server", &["_io"]);
|
||||
// assert_circular(t, &p, "_io", &["http-server"]);
|
||||
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn circular_002() {
|
||||
suite()
|
||||
.file(
|
||||
"main.js",
|
||||
"
|
||||
import './a';
|
||||
",
|
||||
)
|
||||
.file(
|
||||
"a.js",
|
||||
"
|
||||
import './b';
|
||||
",
|
||||
)
|
||||
.file(
|
||||
"b.js",
|
||||
"
|
||||
import './c';
|
||||
",
|
||||
)
|
||||
.file(
|
||||
"c.js",
|
||||
"
|
||||
import './a';
|
||||
",
|
||||
)
|
||||
.run(|t| {
|
||||
let module = t
|
||||
.bundler
|
||||
.load_transformed(&FileName::Real("main.js".into()))?
|
||||
.unwrap();
|
||||
let mut entries = HashMap::default();
|
||||
entries.insert("main.js".to_string(), module.clone());
|
||||
|
||||
let p = t.bundler.calculate_plan(entries)?;
|
||||
|
||||
dbg!(&p);
|
||||
|
||||
assert_normal(t, &p, "main", &["a"]);
|
||||
assert_circular(t, &p, "a", &["b", "c"]);
|
||||
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
@ -33,12 +33,14 @@ where
|
||||
};
|
||||
dep = dep.fold_with(&mut v);
|
||||
|
||||
log::info!("Remark map: {:?}", v.remark_map);
|
||||
if !v.remark_map.is_empty() {
|
||||
log::debug!("Remark map: {:?}", v.remark_map);
|
||||
|
||||
// Swap syntax context. Although name is remark, it's actually
|
||||
// swapping because ExportRenamer inserts two-side conversion
|
||||
// rule.
|
||||
self.remark(&mut dep, &v.remark_map);
|
||||
// Swap syntax context. Although name is remark, it's actually
|
||||
// swapping because ExportRenamer inserts two-side conversion
|
||||
// rule.
|
||||
self.remark(&mut dep, &v.remark_map);
|
||||
}
|
||||
|
||||
dep
|
||||
}
|
||||
@ -63,7 +65,7 @@ struct ExportRenamer<'a> {
|
||||
impl ExportRenamer<'_> {
|
||||
/// Returns [SyntaxContext] for the name of variable.
|
||||
fn mark_as_remarking_required(&mut self, exported: Id, orig: Id) -> SyntaxContext {
|
||||
log::info!("Remarking required: {:?} -> {:?}", exported, orig);
|
||||
log::debug!("Remarking required: {:?} -> {:?}", exported, orig);
|
||||
|
||||
let ctxt = SyntaxContext::empty().apply_mark(Mark::fresh(Mark::root()));
|
||||
self.remark_map
|
||||
@ -167,7 +169,7 @@ impl Fold for ExportRenamer<'_> {
|
||||
let ident = if let Some(id) = ident {
|
||||
id
|
||||
} else {
|
||||
log::info!("Dropping export default declaration because it's not used");
|
||||
log::debug!("Dropping export default declaration because it's not used");
|
||||
|
||||
return Stmt::Empty(EmptyStmt { span: DUMMY_SP }).into();
|
||||
};
|
||||
@ -206,7 +208,7 @@ impl Fold for ExportRenamer<'_> {
|
||||
})))
|
||||
}
|
||||
DefaultDecl::TsInterfaceDecl(_) => {
|
||||
log::info!(
|
||||
log::debug!(
|
||||
"Dropping export default declaration because ts interface declaration \
|
||||
is not supported yet"
|
||||
);
|
||||
@ -239,7 +241,7 @@ impl Fold for ExportRenamer<'_> {
|
||||
}],
|
||||
})))
|
||||
} else {
|
||||
log::debug!("Removing default export expression as it's not imported");
|
||||
log::trace!("Removing default export expression as it's not imported");
|
||||
|
||||
// Expression statement cannot start with function
|
||||
ModuleItem::Stmt(Stmt::Expr(ExprStmt {
|
||||
@ -323,7 +325,7 @@ impl Fold for ExportRenamer<'_> {
|
||||
definite: false,
|
||||
})
|
||||
} else {
|
||||
log::debug!(
|
||||
log::trace!(
|
||||
"Removing export specifier {:?} as it's not imported",
|
||||
specifier
|
||||
);
|
||||
@ -417,7 +419,7 @@ impl Fold for ExportRenamer<'_> {
|
||||
// definite: false,
|
||||
// })
|
||||
} else {
|
||||
log::debug!(
|
||||
log::trace!(
|
||||
"Removing export specifier {:?} as it's not imported (`unexport` \
|
||||
is false, but it's not used)",
|
||||
specifier
|
||||
@ -581,7 +583,7 @@ impl VisitMut for RemarkIdents<'_> {
|
||||
let id = (*n).to_id();
|
||||
if let Some(&ctxt) = self.map.get(&id) {
|
||||
n.span = n.span.with_ctxt(ctxt);
|
||||
log::info!("Remark: {:?} -> {:?}", id, ctxt)
|
||||
log::debug!("Remark: {:?} -> {:?}", id, ctxt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ where
|
||||
|
||||
// In case of common module
|
||||
if let Some(cached) = self.scope.get_module_by_path(&file_name) {
|
||||
log::info!("Cached: {}", file_name);
|
||||
log::debug!("Cached: {}", file_name);
|
||||
return Ok(Some(cached));
|
||||
}
|
||||
|
||||
@ -75,14 +75,14 @@ where
|
||||
.context("failed to analyze module")?;
|
||||
files.dedup_by_key(|v| v.1.clone());
|
||||
|
||||
log::info!("Storing module: {}", file_name);
|
||||
log::info!("({}) Storing module: {}", v.id, file_name);
|
||||
self.scope.store_module(v.clone());
|
||||
|
||||
// Load dependencies and store them in the `Scope`
|
||||
let results = files
|
||||
.into_par_iter()
|
||||
.map(|(_src, path)| {
|
||||
log::debug!("loading dependency: {}", path);
|
||||
log::trace!("loading dependency: {}", path);
|
||||
self.load_transformed(&path)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -87,9 +87,9 @@ where
|
||||
) -> Self {
|
||||
GLOBALS.set(&globals, || {
|
||||
let used_mark = Mark::fresh(Mark::root());
|
||||
log::info!("Used mark: {:?}", DUMMY_SP.apply_mark(used_mark).ctxt());
|
||||
log::debug!("Used mark: {:?}", DUMMY_SP.apply_mark(used_mark).ctxt());
|
||||
let helper_ctxt = SyntaxContext::empty().apply_mark(Mark::fresh(Mark::root()));
|
||||
log::info!("Helper ctxt: {:?}", helper_ctxt);
|
||||
log::debug!("Helper ctxt: {:?}", helper_ctxt);
|
||||
|
||||
Bundler {
|
||||
config,
|
||||
|
@ -2,6 +2,34 @@ use std::hash::Hash;
|
||||
use swc_common::{Span, SyntaxContext};
|
||||
use swc_ecma_visit::{noop_visit_mut_type, VisitMut};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CHashSet<V>
|
||||
where
|
||||
V: Eq + Hash,
|
||||
{
|
||||
inner: CloneMap<V, ()>,
|
||||
}
|
||||
|
||||
impl<V> CHashSet<V>
|
||||
where
|
||||
V: Eq + Hash,
|
||||
{
|
||||
pub fn insert(&self, v: V) -> bool {
|
||||
self.inner.insert(v, ()).is_none()
|
||||
}
|
||||
}
|
||||
|
||||
impl<V> Default for CHashSet<V>
|
||||
where
|
||||
V: Eq + Hash,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
inner: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CloneMap<K, V>
|
||||
where
|
||||
@ -50,13 +78,13 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "concurrent")]
|
||||
pub fn insert(&self, k: K, v: V) {
|
||||
self.inner.insert(k, v);
|
||||
pub fn insert(&self, k: K, v: V) -> Option<V> {
|
||||
self.inner.insert(k, v)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "concurrent"))]
|
||||
pub fn insert(&self, k: K, v: V) {
|
||||
self.inner.borrow_mut().insert(k, v);
|
||||
pub fn insert(&self, k: K, v: V) -> Option<V> {
|
||||
self.inner.borrow_mut().insert(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_ecmascript"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.7.8"
|
||||
version = "0.7.9"
|
||||
|
||||
[features]
|
||||
codegen = ["swc_ecma_codegen"]
|
||||
@ -24,7 +24,7 @@ swc_ecma_ast = {version = "0.31.0", path = "./ast"}
|
||||
swc_ecma_codegen = {version = "0.35.2", path = "./codegen", optional = true}
|
||||
swc_ecma_dep_graph = {version = "0.3.0", path = "./dep-graph", optional = true}
|
||||
swc_ecma_parser = {version = "0.37.2", path = "./parser", optional = true}
|
||||
swc_ecma_transforms = {version = "0.23.13", path = "./transforms", optional = true}
|
||||
swc_ecma_transforms = {version = "0.23.14", path = "./transforms", optional = true}
|
||||
swc_ecma_utils = {version = "0.21.0", path = "./utils", optional = true}
|
||||
swc_ecma_visit = {version = "0.17.2", path = "./visit", optional = true}
|
||||
|
||||
|
@ -6,7 +6,7 @@ edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
name = "swc_ecma_transforms"
|
||||
repository = "https://github.com/swc-project/swc.git"
|
||||
version = "0.23.13"
|
||||
version = "0.23.14"
|
||||
|
||||
[features]
|
||||
const-modules = ["dashmap"]
|
||||
|
@ -261,13 +261,13 @@ impl VisitMut for Dce<'_> {
|
||||
}
|
||||
|
||||
fn visit_mut_expr_stmt(&mut self, node: &mut ExprStmt) {
|
||||
log::debug!("ExprStmt ->");
|
||||
log::trace!("ExprStmt ->");
|
||||
if self.is_marked(node.span) {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.should_include(&node.expr) {
|
||||
log::debug!("\tIncluded");
|
||||
log::trace!("\tIncluded");
|
||||
node.span = node.span.apply_mark(self.config.used_mark);
|
||||
self.mark(&mut node.expr);
|
||||
return;
|
||||
@ -362,7 +362,7 @@ impl VisitMut for Dce<'_> {
|
||||
|
||||
if self.marking_phase {
|
||||
if self.included.insert(i.to_id()) {
|
||||
log::info!("{} is used", i.sym);
|
||||
log::debug!("{} is used", i.sym);
|
||||
self.changed = true;
|
||||
}
|
||||
}
|
||||
@ -404,7 +404,7 @@ impl VisitMut for Dce<'_> {
|
||||
}
|
||||
|
||||
// Drop unused imports.
|
||||
log::debug!("Removing unused import specifiers");
|
||||
log::trace!("Removing unused import specifiers");
|
||||
import.specifiers.retain(|s| self.should_include(s));
|
||||
|
||||
if !import.specifiers.is_empty() {
|
||||
@ -616,7 +616,7 @@ impl Dce<'_> {
|
||||
preserved.reserve(items.len());
|
||||
|
||||
loop {
|
||||
log::info!("loop start");
|
||||
log::debug!("loop start");
|
||||
|
||||
self.changed = false;
|
||||
let mut idx = 0u32;
|
||||
@ -737,7 +737,7 @@ impl Dce<'_> {
|
||||
{
|
||||
let old = self.marking_phase;
|
||||
self.marking_phase = true;
|
||||
log::info!("Marking: {}", type_name::<T>());
|
||||
log::debug!("Marking: {}", type_name::<T>());
|
||||
node.visit_mut_with(self);
|
||||
self.marking_phase = old;
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ impl Visit for SideEffectVisitor<'_> {
|
||||
noop_visit_type!();
|
||||
|
||||
fn visit_expr(&mut self, node: &Expr, _: &dyn Node) {
|
||||
log::debug!("Visit<Expr>");
|
||||
log::trace!("Visit<Expr>");
|
||||
|
||||
if self.found || node.is_pure_callee() {
|
||||
return;
|
||||
|
@ -774,7 +774,7 @@ impl Fold for Inlining<'_> {
|
||||
self.phase = Phase::Analysis;
|
||||
items = items.fold_children_with(self);
|
||||
|
||||
log::debug!("Switching to Inlining phase");
|
||||
log::trace!("Switching to Inlining phase");
|
||||
|
||||
// Inline
|
||||
self.phase = Phase::Inlining;
|
||||
|
@ -76,7 +76,7 @@ impl Inlining<'_> {
|
||||
}) {
|
||||
let v: VarInfo = v;
|
||||
|
||||
log::debug!("Hoisting a variable {:?}", id);
|
||||
log::trace!("Hoisting a variable {:?}", id);
|
||||
|
||||
if self.scope.unresolved_usages.contains(&id) {
|
||||
v.inline_prevented.set(true)
|
||||
@ -192,7 +192,7 @@ impl Inlining<'_> {
|
||||
}
|
||||
|
||||
if scope.kind == ScopeKind::Loop {
|
||||
log::debug!("preventing inline as it's declared in a loop");
|
||||
log::trace!("preventing inline as it's declared in a loop");
|
||||
self.scope.prevent_inline(&id);
|
||||
break;
|
||||
}
|
||||
@ -342,11 +342,11 @@ impl<'a> Scope<'a> {
|
||||
log::trace!("found");
|
||||
break;
|
||||
}
|
||||
log::debug!("({}): {}: kind = {:?}", scope.depth(), id.0, scope.kind);
|
||||
log::trace!("({}): {}: kind = {:?}", scope.depth(), id.0, scope.kind);
|
||||
|
||||
match scope.kind {
|
||||
ScopeKind::Fn { .. } => {
|
||||
log::debug!("{}: variable access from a nested function detected", id.0);
|
||||
log::trace!("{}: variable access from a nested function detected", id.0);
|
||||
return true;
|
||||
}
|
||||
ScopeKind::Loop | ScopeKind::Cond => {
|
||||
@ -363,7 +363,7 @@ impl<'a> Scope<'a> {
|
||||
|
||||
pub fn add_read(&mut self, id: &Id) {
|
||||
if self.read_prevents_inlining(id) {
|
||||
log::debug!("prevent inlining because of read: {}", id.0);
|
||||
log::trace!("prevent inlining because of read: {}", id.0);
|
||||
|
||||
self.prevent_inline(id)
|
||||
}
|
||||
@ -402,11 +402,11 @@ impl<'a> Scope<'a> {
|
||||
if found {
|
||||
break;
|
||||
}
|
||||
log::debug!("({}): {}: kind = {:?}", scope.depth(), id.0, scope.kind);
|
||||
log::trace!("({}): {}: kind = {:?}", scope.depth(), id.0, scope.kind);
|
||||
|
||||
match scope.kind {
|
||||
ScopeKind::Fn { .. } => {
|
||||
log::debug!("{}: variable access from a nested function detected", id.0);
|
||||
log::trace!("{}: variable access from a nested function detected", id.0);
|
||||
return true;
|
||||
}
|
||||
ScopeKind::Loop | ScopeKind::Cond => {
|
||||
@ -423,7 +423,7 @@ impl<'a> Scope<'a> {
|
||||
|
||||
pub fn add_write(&mut self, id: &Id, force_no_inline: bool) {
|
||||
if self.write_prevents_inline(id) {
|
||||
log::debug!("prevent inlining because of write: {}", id.0);
|
||||
log::trace!("prevent inlining because of write: {}", id.0);
|
||||
|
||||
self.prevent_inline(id)
|
||||
}
|
||||
@ -569,7 +569,7 @@ impl<'a> Scope<'a> {
|
||||
}
|
||||
|
||||
pub fn prevent_inline(&self, id: &Id) {
|
||||
log::debug!("({}) Prevent inlining: {:?}", self.depth(), id);
|
||||
log::trace!("({}) Prevent inlining: {:?}", self.depth(), id);
|
||||
|
||||
if let Some(v) = self.find_binding_from_current(id) {
|
||||
v.inline_prevented.set(true);
|
||||
|
1
spack/.gitignore
vendored
1
spack/.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
/*.js
|
||||
*.d.ts
|
||||
*.map
|
||||
deno-std/
|
9
spack/tests/deno-std/.swcrc
Normal file
9
spack/tests/deno-std/.swcrc
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"jsc": {
|
||||
"target": "es2020",
|
||||
"parser": {
|
||||
"syntax": "typescript",
|
||||
"decorators": true
|
||||
}
|
||||
}
|
||||
}
|
5
spack/tests/pass/circular/imports-same/input/a.js
Normal file
5
spack/tests/pass/circular/imports-same/input/a.js
Normal file
@ -0,0 +1,5 @@
|
||||
import { foo } from './common'
|
||||
|
||||
console.log('a', foo);
|
||||
|
||||
export const a = foo + 1;
|
5
spack/tests/pass/circular/imports-same/input/b.js
Normal file
5
spack/tests/pass/circular/imports-same/input/b.js
Normal file
@ -0,0 +1,5 @@
|
||||
import { foo } from './common'
|
||||
|
||||
console.log('b', foo);
|
||||
|
||||
export const b = foo + 2;
|
@ -0,0 +1 @@
|
||||
export const foo = 1;
|
1
spack/tests/pass/circular/imports-same/input/common.js
Normal file
1
spack/tests/pass/circular/imports-same/input/common.js
Normal file
@ -0,0 +1 @@
|
||||
export { foo } from './common-foo';
|
1
spack/tests/pass/circular/imports-same/input/entry.js
Normal file
1
spack/tests/pass/circular/imports-same/input/entry.js
Normal file
@ -0,0 +1 @@
|
||||
export * from './a';
|
4
spack/tests/pass/circular/imports-same/output/entry.js
Normal file
4
spack/tests/pass/circular/imports-same/output/entry.js
Normal file
@ -0,0 +1,4 @@
|
||||
const foo = 1;
|
||||
const foo1 = foo;
|
||||
console.log('a', foo1);
|
||||
export const a = foo1 + 1;
|
@ -1,9 +1,9 @@
|
||||
console.log('c');
|
||||
class A {
|
||||
method() {
|
||||
return new B();
|
||||
}
|
||||
}
|
||||
console.log('c');
|
||||
class B extends A {
|
||||
}
|
||||
console.log(A, B);
|
||||
|
@ -2,5 +2,4 @@ const b = '1';
|
||||
const a = '1';
|
||||
console.log(b);
|
||||
export { a };
|
||||
const b = '1';
|
||||
export { b };
|
||||
|
Loading…
Reference in New Issue
Block a user