feat(cli): Add tracing options for compile command (#3746)

This commit is contained in:
OJ Kwon 2022-02-25 19:32:46 -08:00 committed by GitHub
parent 2ab150c603
commit 41f92d228b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 81 additions and 4 deletions

4
Cargo.lock generated
View File

@ -2777,6 +2777,10 @@ dependencies = [
"serde_json", "serde_json",
"swc", "swc",
"swc_common", "swc_common",
"tracing",
"tracing-chrome",
"tracing-futures",
"tracing-subscriber",
"walkdir", "walkdir",
] ]

View File

@ -707,6 +707,7 @@ impl Compiler {
} }
} }
#[tracing::instrument(level = "trace", skip_all)]
pub fn read_config(&self, opts: &Options, name: &FileName) -> Result<Option<Config>, Error> { pub fn read_config(&self, opts: &Options, name: &FileName) -> Result<Option<Config>, Error> {
static CUR_DIR: Lazy<PathBuf> = Lazy::new(|| { static CUR_DIR: Lazy<PathBuf> = Lazy::new(|| {
if cfg!(target_arch = "wasm32") { if cfg!(target_arch = "wasm32") {
@ -812,6 +813,7 @@ impl Compiler {
/// This method handles merging of config. /// This method handles merging of config.
/// ///
/// This method does **not** parse module. /// This method does **not** parse module.
#[tracing::instrument(level = "trace", skip_all)]
pub fn parse_js_as_input<'a, P>( pub fn parse_js_as_input<'a, P>(
&'a self, &'a self,
fm: Lrc<SourceFile>, fm: Lrc<SourceFile>,
@ -859,6 +861,7 @@ impl Compiler {
}) })
} }
#[tracing::instrument(level = "trace", skip_all)]
pub fn transform( pub fn transform(
&self, &self,
handler: &Handler, handler: &Handler,
@ -886,6 +889,7 @@ impl Compiler {
/// ///
/// This means, you can use `noop_visit_type`, `noop_fold_type` and /// This means, you can use `noop_visit_type`, `noop_fold_type` and
/// `noop_visit_mut_type` in your visitor to reduce the binary size. /// `noop_visit_mut_type` in your visitor to reduce the binary size.
#[tracing::instrument(level = "trace", skip_all)]
pub fn process_js_with_custom_pass<P1, P2>( pub fn process_js_with_custom_pass<P1, P2>(
&self, &self,
fm: Arc<SourceFile>, fm: Arc<SourceFile>,
@ -946,6 +950,7 @@ impl Compiler {
.context("failed to process js file") .context("failed to process js file")
} }
#[tracing::instrument(level = "trace", skip(self, handler, opts))]
pub fn process_js_file( pub fn process_js_file(
&self, &self,
fm: Arc<SourceFile>, fm: Arc<SourceFile>,
@ -955,6 +960,7 @@ impl Compiler {
self.process_js_with_custom_pass(fm, None, handler, opts, |_| noop(), |_| noop()) self.process_js_with_custom_pass(fm, None, handler, opts, |_| noop(), |_| noop())
} }
#[tracing::instrument(level = "trace", skip_all)]
pub fn minify( pub fn minify(
&self, &self,
fm: Arc<SourceFile>, fm: Arc<SourceFile>,
@ -1076,6 +1082,7 @@ impl Compiler {
/// You can use custom pass with this method. /// You can use custom pass with this method.
/// ///
/// There exists a [PassBuilder] to help building custom passes. /// There exists a [PassBuilder] to help building custom passes.
#[tracing::instrument(level = "trace", skip_all)]
pub fn process_js( pub fn process_js(
&self, &self,
handler: &Handler, handler: &Handler,
@ -1088,6 +1095,7 @@ impl Compiler {
self.process_js_with_custom_pass(fm, Some(program), handler, opts, |_| noop(), |_| noop()) self.process_js_with_custom_pass(fm, Some(program), handler, opts, |_| noop(), |_| noop())
} }
#[tracing::instrument(level = "trace", skip_all)]
fn process_js_inner( fn process_js_inner(
&self, &self,
handler: &Handler, handler: &Handler,
@ -1130,6 +1138,7 @@ impl Compiler {
} }
} }
#[tracing::instrument(level = "trace", skip_all)]
fn load_swcrc(path: &Path) -> Result<Rc, Error> { fn load_swcrc(path: &Path) -> Result<Rc, Error> {
fn convert_json_err(e: serde_json::Error) -> Error { fn convert_json_err(e: serde_json::Error) -> Error {
let line = e.line(); let line = e.line();

View File

@ -23,6 +23,10 @@ relative-path = "1.6.1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = { version = "1", features = ["unbounded_depth"] } serde_json = { version = "1", features = ["unbounded_depth"] }
atty = "0.2.14" atty = "0.2.14"
tracing-chrome = "0.4.0"
tracing-futures = "0.2.5"
tracing-subscriber = { version = "0.3.9", features = ["env-filter"] }
tracing = "0.1.31"
[dependencies.path-absolutize] [dependencies.path-absolutize]
version = "3.0.11" version = "3.0.11"

View File

@ -15,6 +15,8 @@ use swc::{
try_with_handler, Compiler, TransformOutput, try_with_handler, Compiler, TransformOutput,
}; };
use swc_common::{sync::Lazy, FileName, FilePathMapping, SourceMap}; use swc_common::{sync::Lazy, FileName, FilePathMapping, SourceMap};
use tracing_chrome::{ChromeLayerBuilder, FlushGuard};
use tracing_subscriber::{prelude::__tracing_subscriber_SubscriberExt, util::SubscriberInitExt};
use walkdir::WalkDir; use walkdir::WalkDir;
/// Configuration option for transform files. /// Configuration option for transform files.
@ -77,6 +79,16 @@ pub struct CompileOptions {
/// Files to compile /// Files to compile
#[clap(group = "input")] #[clap(group = "input")]
files: Vec<PathBuf>, files: Vec<PathBuf>,
/// Enable experimantal trace profiling
/// generates trace compatible with trace event format.
#[clap(group = "experimental_trace", long)]
experimental_trace: bool,
/// Set file name for the trace output. If not specified,
/// `trace-{unix epoch time}.json` will be used by default.
#[clap(group = "experimental_trace", long)]
trace_out_file: Option<String>,
//Flags legacy @swc/cli supports, might need some thoughts if we need support same. //Flags legacy @swc/cli supports, might need some thoughts if we need support same.
//log_watch_compilation: bool, //log_watch_compilation: bool,
//copy_files: bool, //copy_files: bool,
@ -97,6 +109,7 @@ static DEFAULT_EXTENSIONS: &[&str] = &["js", "jsx", "es6", "es", "mjs", "ts", "t
/// Infer list of files to be transformed from cli arguments. /// Infer list of files to be transformed from cli arguments.
/// If given input is a directory, it'll traverse it and collect all supported /// If given input is a directory, it'll traverse it and collect all supported
/// files. /// files.
#[tracing::instrument(level = "trace", skip_all)]
fn get_files_list( fn get_files_list(
raw_files_input: &[PathBuf], raw_files_input: &[PathBuf],
extensions: &[String], extensions: &[String],
@ -246,8 +259,27 @@ fn collect_stdin_input() -> Option<String> {
) )
} }
fn init_trace(out_file: &Option<String>) -> Option<FlushGuard> {
let layer = if let Some(trace_out_file) = out_file {
ChromeLayerBuilder::new()
.file(trace_out_file.clone())
.include_args(true)
} else {
ChromeLayerBuilder::new().include_args(true)
};
let (chrome_layer, guard) = layer.build();
tracing_subscriber::registry()
.with(chrome_layer)
.try_init()
.expect("Should able to register trace");
Some(guard)
}
impl super::CommandRunner for CompileOptions { impl super::CommandRunner for CompileOptions {
fn execute(&self) -> anyhow::Result<()> { #[tracing::instrument(level = "trace", skip_all)]
fn execute_inner(&self) -> anyhow::Result<()> {
let stdin_input = collect_stdin_input(); let stdin_input = collect_stdin_input();
if stdin_input.is_some() && !self.files.is_empty() { if stdin_input.is_some() && !self.files.is_empty() {
@ -259,6 +291,8 @@ impl super::CommandRunner for CompileOptions {
} }
if let Some(stdin_input) = stdin_input { if let Some(stdin_input) = stdin_input {
let span = tracing::span!(tracing::Level::TRACE, "compile_stdin");
let stdin_span_guard = span.enter();
let comp = COMPILER.clone(); let comp = COMPILER.clone();
let result = try_with_handler(comp.cm.clone(), false, |handler| { let result = try_with_handler(comp.cm.clone(), false, |handler| {
@ -286,10 +320,13 @@ impl super::CommandRunner for CompileOptions {
Err(e) => return Err(e), Err(e) => return Err(e),
}; };
drop(stdin_span_guard);
return Ok(()); return Ok(());
} }
if !self.files.is_empty() { if !self.files.is_empty() {
let span = tracing::span!(tracing::Level::TRACE, "compile_files");
let files_span_guard = span.enter();
let included_extensions = if let Some(extensions) = &self.extensions { let included_extensions = if let Some(extensions) = &self.extensions {
extensions.clone() extensions.clone()
} else { } else {
@ -299,7 +336,7 @@ impl super::CommandRunner for CompileOptions {
let files = get_files_list(&self.files, &included_extensions, false)?; let files = get_files_list(&self.files, &included_extensions, false)?;
let cm = COMPILER.clone(); let cm = COMPILER.clone();
return files let ret = files
.into_par_iter() .into_par_iter()
.try_for_each_with(cm, |compiler, file_path| { .try_for_each_with(cm, |compiler, file_path| {
let result = try_with_handler(compiler.cm.clone(), false, |handler| { let result = try_with_handler(compiler.cm.clone(), false, |handler| {
@ -319,8 +356,26 @@ impl super::CommandRunner for CompileOptions {
Ok(()) Ok(())
}); });
drop(files_span_guard);
return ret;
} }
Ok(()) Ok(())
} }
fn execute(&self) -> anyhow::Result<()> {
let guard = if self.experimental_trace {
init_trace(&self.trace_out_file)
} else {
None
};
let ret = self.execute_inner();
if let Some(guard) = guard {
guard.flush();
}
ret
}
} }

View File

@ -13,7 +13,7 @@ pub enum Command {
#[clap(subcommand)] #[clap(subcommand)]
Plugin(PluginSubcommand), Plugin(PluginSubcommand),
/// Run SWC's transformer. /// Run SWC's transformer.
Compile(CompileOptions), Compile(Box<CompileOptions>),
} }
#[derive(Parser)] #[derive(Parser)]
@ -25,4 +25,5 @@ pub struct SwcCliOptions {
pub trait CommandRunner { pub trait CommandRunner {
fn execute(&self) -> anyhow::Result<()>; fn execute(&self) -> anyhow::Result<()>;
fn execute_inner(&self) -> anyhow::Result<()>;
} }

View File

@ -109,6 +109,10 @@ fn write_ignore_file(base_path: &Path) -> Result<()> {
} }
impl super::CommandRunner for PluginScaffoldOptions { impl super::CommandRunner for PluginScaffoldOptions {
fn execute_inner(&self) -> Result<()> {
Ok(())
}
/// Create a rust project for the plugin from template. /// Create a rust project for the plugin from template.
/// This largely mimic https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/cargo_new.rs, /// This largely mimic https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/cargo_new.rs,
/// but also thinner implementation based on some assumptions like skipping /// but also thinner implementation based on some assumptions like skipping